2017-08-15 247 views
1

我試圖使用android MediaCodec類來解碼遠程攝像頭的h264視頻流。我的代碼是:在Nexus設備上H264視頻流的解碼不正確

public class RemoteCamera { 

public interface OnCameraListener { 
    void onConnected(); 
    void onFailureConnection(); 
    void onDisconnected(); 
    void onReady(); 
} 

private static final int MAX_NAL_LEN = 1024 * 1024; 
private static final String TAG = "RemoteCamera"; 

private OutputThread mOutputThread; 

private WebSocketManager mWebSocketManager; 
private OnCameraListener mOnCameraListener; 

private int mSearchState = 0; 
private byte[] mNalData; 
private int mNalDataPos; 

private MediaCodec mDecoder; 
private MediaFormat mFormat; 
private SurfaceView mSurfaceView; 
private MediaCodec.BufferInfo mInfo = new MediaCodec.BufferInfo(); 
private boolean mIsWaitingForSPS = true; 

public RemoteCamera(final SurfaceView surfaceView, final String wss) { 
    mSurfaceView = surfaceView; 
    mWebSocketManager = new WebSocketManager(wss); 
    mWebSocketManager.setWSListener(new WebSocketManager.OnWSListener() { 
     @Override 
     public void onOpen() { 
      if (mOnCameraListener != null) { 
       mOnCameraListener.onConnected(); 
      } 
     } 

     @Override 
     public void onClosed() { 
      if (mOnCameraListener != null) { 
       mOnCameraListener.onDisconnected(); 
      } 
     } 

     @Override 
     public void onFailure() { 
      if (mOnCameraListener != null) { 
       mOnCameraListener.onFailureConnection(); 
      } 
     } 

     @Override 
     public synchronized void onMessage(final ByteString bytes) { 
      final ByteBuffer bb = ByteBuffer.wrap(bytes.toByteArray()); 

      if (mIsWaitingForSPS) { 
       if (isSPSUnit(bb)) { 
        mIsWaitingForSPS = false; 
        if (mOnCameraListener != null) { 
         mOnCameraListener.onReady(); 
        } 
       } else { 
        return; 
       } 
      } 

      parseDatagram(bb.array(), bytes.size()); 
     } 
    }); 

    mNalData = new byte[MAX_NAL_LEN]; 
    mNalDataPos = 0; 
    try { 
     mDecoder = MediaCodec.createDecoderByType("video/avc"); 
    } catch (Exception e) { 
     Log.d(TAG, e.toString()); 
     return; 
    } 

    mFormat = MediaFormat.createVideoFormat("video/avc", 320, 240); 
} 

public void setOnCameraListener(final OnCameraListener cameraListener) { 
    mOnCameraListener = cameraListener; 
} 

public void startStreaming() { 
    mSurfaceView.getHolder().addCallback(new SurfaceHolder.Callback() { 
     @Override 
     public void surfaceCreated(SurfaceHolder surfaceHolder) { 
      try { 
       mDecoder.configure(mFormat, mSurfaceView.getHolder().getSurface(), null, 0); 
      } catch (Exception e) { 
       Log.d(TAG, e.toString()); 
       return; 
      } 
      mWebSocketManager.wsRegister(); 
      mDecoder.start(); 
      mOutputThread = new OutputThread(); 
      mOutputThread.start(); 
     } 

     @Override 
     public void surfaceChanged(SurfaceHolder surfaceHolder, int i, int i1, int i2) { 

     } 

     @Override 
     public void surfaceDestroyed(SurfaceHolder surfaceHolder) { 

     } 
    }); 
} 


private void feedDecoder(byte[] n, int len) { 
    for (; ;) { 
     try { 
      int inputBufferIndex = mDecoder.dequeueInputBuffer(0); 
      if (inputBufferIndex >= 0) { 
       final ByteBuffer inputBuffer = mDecoder.getInputBuffer(inputBufferIndex); 
       inputBuffer.put(n, 0, len); 
       mDecoder.queueInputBuffer(inputBufferIndex, 0, len, System.currentTimeMillis(), 0); 
       break; 
      } 
     } catch (Exception e) { 
      Log.d(TAG, e.toString()); 
     } 
    } 
} 

private void parseDatagram(byte[] p, int plen) { 
    try { 
     for (int i = 0; i < plen; ++i) { 
      mNalData[mNalDataPos++] = p[i]; 
      if (mNalDataPos == MAX_NAL_LEN - 1) { 
       mNalDataPos = 0; 
      } 
      switch (mSearchState) { 
       case 0: 
       case 1: 
       case 2: 
        if (p[i] == 0) 
         mSearchState++; 
        else 
         mSearchState = 0; 
        break; 
       case 3: 
        if (p[i] == 1) { 
         mNalData[0] = 0; 
         mNalData[1] = 0; 
         mNalData[2] = 0; 
         mNalData[3] = 1; 
         feedDecoder(mNalData, mNalDataPos - 4); 
         mNalDataPos = 4; 
        } 
        mSearchState = 0; 
        break; 
       default: 
        break; 
      } 
     } 
    } catch (Exception e) { 
     Log.d(TAG, e.toString()); 
    } 
} 

private boolean isSPSUnit(final ByteBuffer unit) { 
    return unit.get(4) == 0x67; 
} 


private class OutputThread extends Thread { 

    @Override 
    public void run() { 
     while (true) { 
      try { 
       int outputBufferIndex = mDecoder.dequeueOutputBuffer(mInfo, 10); 
       if (outputBufferIndex >= 0) { 
        mDecoder.releaseOutputBuffer(outputBufferIndex, true); 
       } 
      } catch (Exception e) { 
       Log.d(TAG, e.toString()); 
      } 
     } 
    } 
} 

我測試了索尼Xperia Z5 Compact和Yota Phone 2上的代碼,並且它在這些設備上正常工作。我從索尼那裏得到的圖片是really good。 然後,我嘗試了Nexus 9和Nexus 7設備上的視頻流式傳輸,但它看起來像是從上到下的一排。 Nexus設備上沒有正確的輸出:nexus results

我知道它取決於原生android媒體編解碼器,但我應該怎麼做才能解決問題並能夠在所有設備上顯示視頻?

+0

解碼器非常寬鬆。我猜想索尼和Yota比Nexus X更寬鬆。你能將流保存到一個共享文件嗎? –

+0

謝謝你的回答。我會盡快提供該文件 –

回答

0

不要將0x00 0x00 0x00 0x01 NALU開始代碼傳遞給解碼器。

+0

謝謝,但它不適合我。沒有改變。而且,當我刪除所有NALU設備的啓動代碼時,所有設備的結果都不好。所以,我必須爲SPS和PPS單元保留0x00 0x00 0x00 0x01。它幫助索尼和Yota恢復了良好的形象,但它仍然不適用於Nexus 9和Nexus 7。 –