2015-06-28 53 views
2

看起來像我在應用程序中使用的MediaPlayer已經開始掛起每16或17幀。在GlSurfaceView上播放的視頻每16或17幀掛起500ms

我正在使用GlSurfaceView來渲染由MediaPlayer播放的幀。一切都很好,並在一個快速fps的速度。該應用程序曾經運行良好,但自從有些日子以來,我看到視頻在每16或17幀至少500ms的時間內掛起。

該程序看起來像這樣,而我在Xperia Z1上。爲了確保代碼中沒有迴歸,我重新開始使用教程,而這種滯後行爲仍然存在。

無論我使用lock(在Java​​)或沒有,或Rendermode.WhenDirty與否,這改變了回放絕對沒有。

該程序只是一個活動和佈局與此自定義視圖。沒有涉及其他代碼。 (順便說一句,演示沒有使用C#標準的重構,因爲這是一個臨時的片段,請不要討論重構。)

public class CustomVideoView : GLSurfaceView { 

    VideoRender mRenderer; 
    private MediaPlayer mMediaPlayer = null; 
    private string filePath = ""; 
    private Uri uri = null; 
    private Context _context; 

    public CustomVideoView(Context context, IAttributeSet attrs) : base(context, attrs) { 
     _context = context; 
     init(); 
    } 

    public CustomVideoView(Context context, IAttributeSet attrs, int defStyle) : base(context, attrs) { 
     _context = context; 
     init(); 
    } 

    public CustomVideoView(Context context) : base(context, null) { 
     _context = context; 
     init(); 
    } 


    public void init() { 
     SetEGLContextClientVersion (2); 

     Holder.SetFormat (Format.Translucent); 
     SetEGLConfigChooser (8, 8, 8, 8, 16, 0); 
     filePath = "/storage/sdcard1/download/cat3.mp4"; 
     mRenderer = new VideoRender (_context, mMediaPlayer, filePath, false, this); 
     SetRenderer (mRenderer); 
     //RenderMode = Rendermode.WhenDirty; 
    } 

    public override void OnResume() { 
     base.OnResume(); 
    } 

    public override void OnPause() { 
     base.OnPause(); 
    } 

    protected override void OnDetachedFromWindow() { 
     // TODO Auto-generated method stub 
     base.OnDetachedFromWindow(); 

     if (mMediaPlayer != null) { 
      mMediaPlayer.Stop(); 
      mMediaPlayer.Release(); 
     } 
    } 

    private class VideoRender : Java.Lang.Object, GLSurfaceView.IRenderer, SurfaceTexture.IOnFrameAvailableListener { 

     private string TAG = "VideoRender"; 
     private const int FLOAT_SIZE_BYTES = 4; 
     private const int TRIANGLE_VERTICES_DATA_STRIDE_BYTES = 3 * FLOAT_SIZE_BYTES; 
     private const int TEXTURE_VERTICES_DATA_STRIDE_BYTES = 2 * FLOAT_SIZE_BYTES; 
     private const int TRIANGLE_VERTICES_DATA_POS_OFFSET = 0; 
     private const int TRIANGLE_VERTICES_DATA_UV_OFFSET = 0; 
     private float[] mTriangleVerticesData = { -1.0f, -1.0f, 0, 1.0f, 
      -1.0f, 0, -1.0f, 1.0f, 0, 1.0f, 1.0f, 0, }; 

     private float[] mTextureVerticesData = { 0.0f, 0.0f, 1.0f, 0.0f, 0.0f, 1.0f, 1.0f, 1.0f }; 

     private FloatBuffer mTriangleVertices; 

     // extra 
     private FloatBuffer mTextureVertices; 

     private string mVertexShader = "uniform mat4 uMVPMatrix;\n" 
      + "uniform mat4 uSTMatrix;\n" + "attribute vec4 aPosition;\n" 
      + "attribute vec4 aTextureCoord;\n" 
      + "varying vec2 vTextureCoord;\n" + "void main() {\n" 
      + " gl_Position = uMVPMatrix * aPosition;\n" 
      + " vTextureCoord = (uSTMatrix * aTextureCoord).xy;\n" + "}\n"; 

     private string mFragmentShader = "#extension GL_OES_EGL_image_external : require\n" 
      + "precision mediump float;\n" 
      + "varying vec2 vTextureCoord;\n" 
      + "uniform samplerExternalOES sTexture;\n" 
      + "void main() {\n" 
      + " gl_FragColor = texture2D(sTexture, vTextureCoord);\n" 
      + "}\n"; 

     private float[] mMVPMatrix = new float[16]; 
     private float[] mSTMatrix = new float[16]; 
     private float[] projectionMatrix = new float[16]; 

     private int mProgram; 
     private int mTextureID; 
     private int muMVPMatrixHandle; 
     private int muSTMatrixHandle; 
     private int maPositionHandle; 
     private int maTextureHandle; 

     private SurfaceTexture mSurface; 
     private bool updateSurface = false; 
     private MediaPlayer mMediaPlayer; 
     private string _filePath; 
     private bool _isStreaming = false; 
     private Context _context; 
     private CustomVideoView _customVideoView; 

     private int GL_TEXTURE_EXTERNAL_OES = 0x8D65; 

     public VideoRender(Context context, MediaPlayer mediaPlayer, string filePath, bool isStreaming, CustomVideoView customVideoView) { 
      _customVideoView = customVideoView; 
      _filePath = filePath; 
      _isStreaming = isStreaming; 
      _context = context; 
      mMediaPlayer = mediaPlayer; 

      mTriangleVertices = ByteBuffer 
       .AllocateDirect(
        mTriangleVerticesData.Length * FLOAT_SIZE_BYTES) 
       .Order(ByteOrder.NativeOrder()).AsFloatBuffer(); 
      mTriangleVertices.Put(mTriangleVerticesData).Position(0); 

      // extra 
      mTextureVertices = ByteBuffer 
       .AllocateDirect(mTextureVerticesData.Length * FLOAT_SIZE_BYTES) 
       .Order(ByteOrder.NativeOrder()).AsFloatBuffer(); 

      mTextureVertices.Put(mTextureVerticesData).Position(0); 

      Android.Opengl.Matrix.SetIdentityM(mSTMatrix, 0); 
     } 

     public void OnDrawFrame(Javax.Microedition.Khronos.Opengles.IGL10 glUnused) { 

      lock (syncLock) { 
       if (updateSurface) { 
        mSurface.UpdateTexImage(); 
        mSurface.GetTransformMatrix (mSTMatrix); 
        updateSurface = false; 
       } 
      } 

      GLES20.GlClearColor (255.0f, 255.0f, 255.0f, 1.0f); 
      GLES20.GlClear (GLES20.GlDepthBufferBit 
       | GLES20.GlColorBufferBit); 

      GLES20.GlUseProgram (mProgram); 
      checkGlError ("glUseProgram"); 

      GLES20.GlActiveTexture (GLES20.GlTexture0); 
      GLES20.GlBindTexture (GL_TEXTURE_EXTERNAL_OES, mTextureID); 

      mTriangleVertices.Position (TRIANGLE_VERTICES_DATA_POS_OFFSET); 
      GLES20.GlVertexAttribPointer (maPositionHandle, 3, GLES20.GlFloat, 
       false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, 
       mTriangleVertices); 
      checkGlError ("glVertexAttribPointer maPosition"); 
      GLES20.GlEnableVertexAttribArray (maPositionHandle); 
      checkGlError ("glEnableVertexAttribArray maPositionHandle"); 

      mTextureVertices.Position (TRIANGLE_VERTICES_DATA_UV_OFFSET); 
      GLES20.GlVertexAttribPointer (maTextureHandle, 2, GLES20.GlFloat, 
       false, TEXTURE_VERTICES_DATA_STRIDE_BYTES, mTextureVertices); 

      checkGlError ("glVertexAttribPointer maTextureHandle"); 
      GLES20.GlEnableVertexAttribArray (maTextureHandle); 
      checkGlError ("glEnableVertexAttribArray maTextureHandle"); 

      Android.Opengl.Matrix.SetIdentityM (mMVPMatrix, 0); 

      GLES20.GlUniformMatrix4fv (muMVPMatrixHandle, 1, false, mMVPMatrix, 
       0); 
      GLES20.GlUniformMatrix4fv (muSTMatrixHandle, 1, false, mSTMatrix, 0); 

      GLES20.GlDrawArrays (GLES20.GlTriangleStrip, 0, 4); 
      checkGlError ("glDrawArrays"); 


      GLES20.GlFinish(); 

     } 

     public void OnSurfaceChanged(Javax.Microedition.Khronos.Opengles.IGL10 glUnused, int width, int height) { 

      GLES20.GlViewport (0, 0, width, height); 

      Android.Opengl.Matrix.FrustumM (projectionMatrix, 0, -1.0f, 1.0f, -1.0f, 1.0f, 
       1.0f, 10.0f); 

     } 


     public void OnSurfaceCreated(Javax.Microedition.Khronos.Opengles.IGL10 gl,Javax.Microedition.Khronos.Egl.EGLConfig config) { 

      mProgram = createProgram (mVertexShader, mFragmentShader); 
      if (mProgram == 0) { 
       return; 
      } 
      maPositionHandle = GLES20 
       .GlGetAttribLocation (mProgram, "aPosition"); 
      checkGlError ("glGetAttribLocation aPosition"); 
      if (maPositionHandle == -1) { 
       throw new RuntimeException (
        "Could not get attrib location for aPosition"); 
      } 
      maTextureHandle = GLES20.GlGetAttribLocation (mProgram, 
       "aTextureCoord"); 
      checkGlError ("glGetAttribLocation aTextureCoord"); 
      if (maTextureHandle == -1) { 
       throw new RuntimeException (
        "Could not get attrib location for aTextureCoord"); 
      } 

      muMVPMatrixHandle = GLES20.GlGetUniformLocation (mProgram, 
       "uMVPMatrix"); 
      checkGlError ("glGetUniformLocation uMVPMatrix"); 
      if (muMVPMatrixHandle == -1) { 
       throw new RuntimeException (
        "Could not get attrib location for uMVPMatrix"); 
      } 

      muSTMatrixHandle = GLES20.GlGetUniformLocation (mProgram, 
       "uSTMatrix"); 
      checkGlError ("glGetUniformLocation uSTMatrix"); 
      if (muSTMatrixHandle == -1) { 
       throw new RuntimeException (
        "Could not get attrib location for uSTMatrix"); 
      } 

      int[] textures = new int[1]; 
      GLES20.GlGenTextures (1, textures, 0); 

      mTextureID = textures [0]; 
      GLES20.GlBindTexture (GL_TEXTURE_EXTERNAL_OES, mTextureID); 
      checkGlError ("glBindTexture mTextureID"); 

      GLES20.GlTexParameterf (GL_TEXTURE_EXTERNAL_OES, 
       GLES20.GlTextureMinFilter, GLES20.GlNearest); 
      GLES20.GlTexParameterf (GL_TEXTURE_EXTERNAL_OES, 
       GLES20.GlTextureMagFilter, GLES20.GlLinear); 

      mSurface = new SurfaceTexture (mTextureID); 
      // mSurface.SetOnFrameAvailableListener (this); 
      mSurface.FrameAvailable += (object sender, SurfaceTexture.FrameAvailableEventArgs e) => { 
       OnFrameAvailable(e.SurfaceTexture); 
      }; 
      Surface surface = new Surface (mSurface); 

      mMediaPlayer = new MediaPlayer(); 

      if (System.IO.File.Exists(_filePath)) { 
       try { 
        if (!_isStreaming) { 
         mMediaPlayer.SetDataSource (_filePath); 
        } else { 
         throw new System.NotImplementedException(); 
         //mMediaPlayer.SetDataSource (_context, new Uri.Builder().AppendPath(_filePath)); 
        } 

       } catch (IllegalArgumentException e) { 
        // TODO Auto-generated catch block 
        e.PrintStackTrace(); 
       } catch (SecurityException e) { 
        // TODO Auto-generated catch block 
        e.PrintStackTrace(); 
       } catch (IllegalStateException e) { 
        // TODO Auto-generated catch block 
        e.PrintStackTrace(); 
       } catch (IOException e) { 
        // TODO Auto-generated catch block 
        e.PrintStackTrace(); 
       } 
      } 

      mMediaPlayer.SetSurface (surface); 
      surface.Release(); 

      try { 
       mMediaPlayer.Prepare(); 
      } catch (IOException t) { 
       Log.Error (TAG, "media player prepare failed"); 
      } 

      lock (syncLock) { 
       updateSurface = false; 
      } 

      mMediaPlayer.Start(); 

     } 

     private readonly object syncLock = new object(); 

     public void OnFrameAvailable(SurfaceTexture surface) { 
      lock (syncLock) { 
       updateSurface = true;    
      } 

      _customVideoView.RequestRender(); 
     } 

     private int loadShader(int shaderType, string source) { 
      int shader = GLES20.GlCreateShader (shaderType); 
      if (shader != 0) { 
       GLES20.GlShaderSource (shader, source); 
       GLES20.GlCompileShader (shader); 
       int[] compiled = new int[1]; 
       GLES20.GlGetShaderiv (shader, GLES20.GlCompileStatus, compiled, 0); 
       if (compiled [0] == 0) { 
        Log.Error (TAG, "Could not compile shader " + shaderType + ":"); 
        Log.Error (TAG, GLES20.GlGetShaderInfoLog (shader)); 
        GLES20.GlDeleteShader (shader); 
        shader = 0; 
       } 
      } 
      return shader; 
     } 

     private int createProgram(string vertexSource, string fragmentSource) { 
      int vertexShader = loadShader (GLES20.GlVertexShader, vertexSource); 
      if (vertexShader == 0) { 
       return 0; 
      } 
      int pixelShader = loadShader (GLES20.GlFragmentShader, 
       fragmentSource); 

      if (pixelShader == 0) { 
       return 0; 
      } 

      int program = GLES20.GlCreateProgram(); 
      if (program != 0) { 
       GLES20.GlAttachShader (program, vertexShader); 
       checkGlError ("glAttachShader"); 
       GLES20.GlAttachShader (program, pixelShader); 
       checkGlError ("glAttachShader"); 
       GLES20.GlLinkProgram (program); 
       int[] linkStatus = new int[1]; 
       GLES20.GlGetProgramiv (program, GLES20.GlLinkStatus, 
        linkStatus, 0); 
       if (linkStatus [0] != GLES20.GlTrue) { 
        Log.Error (TAG, "Could not link program: "); 
        Log.Error (TAG, GLES20.GlGetProgramInfoLog (program)); 
        GLES20.GlDeleteProgram (program); 
        program = 0; 
       } 
      } 
      return program; 
     } 

     private void checkGlError(string op) { 
      int error; 
      while ((error = GLES20.GlGetError()) != GLES20.GlNoError) { 
       Log.Error (TAG, op + ": glError " + error); 
       throw new RuntimeException (op + ": glError " + error); 
      } 
     } 
    } 
} 

調試時,這裏計算的FPS整體,要多長時間一幀以該幀率播放,以及它實際上持續多久。我們可以看到滯後的時間長度,最終大約爲320ms。

[fps] 15.0627 - norm=66 - cur=44.712 
[fps] 15.09347 - norm=66 - cur=45.017 
[fps] 15.12472 - norm=66 - cur=44.437 
[fps] 15.17346 - norm=65 - cur=32.413 
[fps] 15.20476 - norm=65 - cur=44.01 
[fps] 15.2337 - norm=65 - cur=45.506 
[fps] 15.26154 - norm=65 - cur=46.177 
[fps] 14.8815 - norm=67 - cur=334.503 
[fps] 14.93206 - norm=66 - cur=29.971 
[fps] 14.96286 - norm=66 - cur=44.071 
[fps] 14.99153 - norm=66 - cur=45.445 
[fps] 15.03538 - norm=66 - cur=34.213 
[fps] 15.0695 - norm=66 - cur=41.142 
[fps] 15.09754 - norm=66 - cur=44.468 
[fps] 15.12501 - norm=66 - cur=45.628 
[fps] 15.17139 - norm=65 - cur=31.558 
[fps] 15.20057 - norm=65 - cur=44.01 
[fps] 15.22785 - norm=65 - cur=45.231 
[fps] 15.25471 - norm=65 - cur=45.384 
[fps] 15.30203 - norm=65 - cur=30.093 
[fps] 15.32664 - norm=65 - cur=46.636 
[fps] 15.35203 - norm=65 - cur=45.933 
[fps] 15.37996 - norm=65 - cur=44.041 
[fps] 15.42686 - norm=64 - cur=29.3 
[fps] 15.47278 - norm=64 - cur=30.001 
[fps] 15.49799 - norm=64 - cur=45.384 

[編輯]

重新啓動電話解決問題。因此,手機本身缺乏內存或存儲空間,或者我沒有正確編寫示例,並且CPU過度使用。

回答

1

使用普通的SurfaceView來處理EGL設置和線程管理可能會更容易。如果您只是拍攝視頻幀,那麼擁有專用渲染線程幾乎沒有什麼價值。 (例如,請參閱Grafika。)

如果您堅持使用GLSurfaceView,則不需要或需要在onDrawFrame()的末尾調用glFinish()。這是一個同步調用,它將阻止你的線程,直到GLES完成繪圖。在onDrawFrame()返回後,GLSurfaceView將調用eglSwapBuffers()

無論如何,一個300ms的檔位不可能是GLES的結果。 MediaPlayer正在停滯,或者系統中的其他部件正在喚醒並佔用所有可用的CPU資源。如果你想進一步調試,你可以試試systrace