2014-12-27 106 views
2

我想通過ffmpeg使用MediaCodec API解碼來自PC的實時流屏幕捕獲。使用MediaCodec解碼h.264流,dequeueOutputBuffer總是返回-1

對於發件人(PC FFMPEG)

我用這個命令

ffmpeg -re -f gdigrab -s 1920x1080 -threads 4 -i desktop -vcodec libx264 -pix_fmt yuv420p -tune zerolatency -profile:v baseline -flags global_header -s 1280x720 -an -f rtp rtp://192.168.1.6:1234

和輸出如下所示

Output #0, rtp, to 'rtp://192.168.1.6:1234': 
    Metadata: 
    encoder   : Lavf56.15.104 
    Stream #0:0: Video: h264 (libx264), yuv420p, 1280x720, q=-1--1, 29.97 fps, 90k tbn, 29.97 tbc 
Metadata: 
    encoder   : Lavc56.14.100 libx264 
Stream mapping: 
    Stream #0:0 -> #0:0 (bmp (native) -> h264 (libx264)) 
SDP: 
v=0 
o=- 0 0 IN IP4 127.0.0.1 
s=No Name 
c=IN IP4 192.168.1.6 
t=0 0 
a=tool:libavformat 56.15.104 
m=video 1234 RTP/AVP 96 
a=rtpmap:96 H264/90000 
a=fmtp:96 packetization-mode=1; sprop-parameter-sets=Z0LAH9kAUAW6EAAAPpAADqYI8YMkgA==,aMuDyyA=; profile-level-id=42C01F 

Press [q] to stop, [?] for help 
frame= 19 fps=0.0 q=17.0 size=  141kB time=00:00:00.63 bitrate=1826.0kbits/ 
frame= 34 fps= 32 q=17.0 size=  164kB time=00:00:01.13 bitrate=1181.5kbits/ 
frame= 50 fps= 32 q=18.0 size=  173kB time=00:00:01.66 bitrate= 850.9kbits/ 

對於接收機(的Android MediaCodec)

我創建活動與表面,並實現SurfaceHolder.Callback

在surfaceChanged

@Override 
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { 
    Log.i("sss", "surfaceChanged"); 
    if(playerThread == null) { 
     playerThread = new PlayerThread(holder.getSurface()); 
     playerThread.start(); 
    } 

} 

對於PlayerThread

class PlayerThread extends Thread { 

    MediaCodec decoder; 
    Surface surface; 

    public PlayerThread(Surface surface) { 
     this.surface = surface; 
    } 

    @Override 
    public void run() { 
     running = true; 
     try { 
      MediaFormat format = MediaFormat.createVideoFormat("video/avc", 1280, 720); 
      byte[] header = new byte[] {0,0,0,1}; 
      byte[] sps = Base64.decode("Z0LAH9kAUAW6EAAAPpAADqYI8YMkgA==", Base64.DEFAULT); 
      byte[] pps = Base64.decode("aMuDyyA=", Base64.DEFAULT); 

      byte[] header_sps = new byte[sps.length + header.length]; 
      System.arraycopy(header,0,header_sps,0,header.length); 
      System.arraycopy(sps,0,header_sps,header.length, sps.length); 

      byte[] header_pps = new byte[pps.length + header.length]; 
      System.arraycopy(header,0, header_pps, 0, header.length); 
      System.arraycopy(pps, 0, header_pps, header.length, pps.length); 

      format.setByteBuffer("csd-0", ByteBuffer.wrap(header_sps)); 
      format.setByteBuffer("csd-1", ByteBuffer.wrap(header_pps)); 
      format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 1280 * 720); 
//   format.setInteger("durationUs", 63446722); 
//   format.setByteBuffer("csd-2", ByteBuffer.wrap((hexStringToByteArray("42C01E"))));      
//   format.setInteger(MediaFormat.KEY_COLOR_FORMAT ,MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar); 
      Log.i("sss", "Format = " + format); 

      try { 
       decoder = MediaCodec.createDecoderByType("video/avc"); 
       decoder.configure(format, surface, null, 0); 
       decoder.start(); 

      } catch (IOException ioEx) { 
       ioEx.printStackTrace(); 
      } 

      DatagramSocket socket = new DatagramSocket(1234); 
      byte[] bytes = new byte[4096]; 
      DatagramPacket packet = new DatagramPacket(bytes, bytes.length); 

      byte[] data; 

      ByteBuffer[] inputBuffers; 
      ByteBuffer[] outputBuffers; 

      ByteBuffer inputBuffer; 
      ByteBuffer outputBuffer; 

      MediaCodec.BufferInfo bufferInfo; 

      bufferInfo = new MediaCodec.BufferInfo(); 
      int inputBufferIndex; 
      int outputBufferIndex; 
      byte[] outData; 

      inputBuffers = decoder.getInputBuffers(); 
      outputBuffers = decoder.getOutputBuffers(); 

      int minusCount = 0; 
      byte[] prevData = new byte[65535]; 
      List<byte[]> playLoads = new ArrayList<>(); 
      int playloadSize = 0; 
      while (true) { 
       try { 
        socket.receive(packet); 
        data = new byte[packet.getLength()]; 
        System.arraycopy(packet.getData(), packet.getOffset(), data, 0, packet.getLength()); 

       inputBufferIndex = decoder.dequeueInputBuffer(-1); 
        Log.i("sss", "inputBufferIndex = " + inputBufferIndex); 
       if (inputBufferIndex >= 0) 
       { 
        inputBuffer = inputBuffers[inputBufferIndex]; 
        inputBuffer.clear(); 

        inputBuffer.put(data); 


        decoder.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0); 
//     decoder.flush(); 
       } 

       outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 10000); 
       Log.i("sss", "outputBufferIndex = " + outputBufferIndex); 

       while (outputBufferIndex >= 0) 
       { 
        outputBuffer = outputBuffers[outputBufferIndex]; 

        outputBuffer.position(bufferInfo.offset); 
        outputBuffer.limit(bufferInfo.offset + bufferInfo.size); 

        outData = new byte[bufferInfo.size]; 
        outputBuffer.get(outData); 


        decoder.releaseOutputBuffer(outputBufferIndex, false); 
        outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 0); 

       } 

       } catch (SocketTimeoutException e) { 
        Log.d("thread", "timeout"); 
       } 
      } 
     } catch (Exception e) { 
      e.printStackTrace(); 
     } 
    } 
} 

我覺得從ffmpeg的流是不是因爲我可以打開它的一個問題mxPlayer通過sdp文件。 如果我通過這個流到本地RTSP服務器(由VLC),然後我使用MediaPlayer獲得RTSP流,它的工作原理,但很慢。

我看着該分組後,我意識到

  • 前四個字節是首標和序列號
  • 下四個字節是時間戳
  • 下四個字節被源標識符

因此,我將前12個字節剪出,並將數據包與TimeStamp相同。然後把它放在緩衝區這樣

在一段時間(真)接收的數據包後

   Log.i("sss", "Received = " + data.length + " bytes"); 
       Log.i("sss","prev " + prevData.length + " bytes = " + getBytesStr(prevData)); 
       Log.i("sss","data " + data.length + " bytes = " + getBytesStr(data)); 

         if(data[4] == prevData[4] && data[5] == prevData[5] && data[6] == prevData[6] && data[7] == prevData[7]){ 
          byte[] playload = new byte[prevData.length -12]; 
          System.arraycopy(prevData,12,playload, 0, prevData.length-12); 
          playLoads.add(playload); 
          playloadSize += playload.length; 
          Log.i("sss", "Same timeStamp playload " + playload.length + " bytes = " + getBytesStr(playload)); 
         } else { 
          if(playLoads.size() > 0){ 
           byte[] playload = new byte[prevData.length -12]; 
           System.arraycopy(prevData,12,playload, 0, prevData.length-12); 
           playLoads.add(playload); 
           playloadSize += playload.length; 
           Log.i("sss", "last playload " + playload.length + " bytes = " + getBytesStr(playload)); 

           inputBufferIndex = decoder.dequeueInputBuffer(-1); 
           if (inputBufferIndex >= 0){ 
            inputBuffer = inputBuffers[inputBufferIndex]; 
            inputBuffer.clear(); 
            byte[] allPlayload = new byte[playloadSize]; 
            int curLength = 0; 
            for(byte[] playLoad:playLoads){ 
             System.arraycopy(playLoad,0,allPlayload, curLength, playLoad.length); 
             curLength += playLoad.length; 
            } 
            Log.i("sss", "diff timeStamp AlllayLoad " + allPlayload.length + "bytes = " + getBytesStr(allPlayload)); 
            inputBuffer.put(allPlayload); 

            decoder.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0); 
            decoder.flush(); 
           } 

           bufferInfo = new MediaCodec.BufferInfo(); 
           outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 10000); 
           if(outputBufferIndex!= -1) 
            Log.i("sss", "outputBufferIndex = " + outputBufferIndex); 

           playLoads = new ArrayList<>(); 
           prevData = new byte[65535]; 
           playloadSize = 0; 
          } 

         } 

        prevData = data.clone(); 

的outputBufferIndex仍返回-1

如果我從10000變更timeoutUS爲-1,它從來沒有去下一行

我搜索了一個星期,但仍沒有運氣T_T

爲什麼dequeueOutputBuffer總是返回-1?

我的代碼有什麼問題?

您能否正確優化我的代碼以正常工作?

感謝您的幫助。

編輯#1

感謝@mstorsjo引導我打包,我發現有用的信息來源

How to process raw UDP packets so that they can be decoded by a decoder filter in a directshow source filter

然後我編輯下面

if((data[12] & 0x1f) == 28){ 
    if((data[13] & 0x80) == 0x80){ //found start bit 
     inputBufferIndex = decoder.dequeueInputBuffer(-1); 
     if (inputBufferIndex >= 0){ 
     inputBuffer = inputBuffers[inputBufferIndex]; 
     inputBuffer.clear(); 
     byte result = (byte)((bytes[12] & 0xe0) + (bytes[13] & 0x1f)); 
     inputBuffer.put(new byte[] {0,0,1}); 
     inputBuffer.put(result); 
     inputBuffer.put(data,14, data.length-14); 
     } 

    } else if((data[13] &0x40) == 0x40){ //found stop bit 
     inputBuffer.put(data, 14, data.length -14); 
     decoder.queueInputBuffer(inputBufferIndex, 0, data.length, 0, 0); 
     bufferInfo = new MediaCodec.BufferInfo(); 
     outputBufferIndex = decoder.dequeueOutputBuffer(bufferInfo, 10000); 

     switch(outputBufferIndex) 
     { 
     case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: 
      outputBuffers = decoder.getOutputBuffers(); 
      Log.w("sss", "Output Buffers Changed"); 
      break; 
     case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: 
      Log.w("sss", "Output Format Changed"); 
      MediaFormat newFormat = decoder.getOutputFormat(); 
      Log.i("sss","New format : " + newFormat); 

      break; 
     case MediaCodec.INFO_TRY_AGAIN_LATER: 
      Log.w("sss", "Try Again Later"); 
      break; 
     default: 
      outputBuffer = outputBuffers[outputBufferIndex]; 
      outputBuffer.position(bufferInfo.offset); 
      outputBuffer.limit(bufferInfo.offset + bufferInfo.size); 
      decoder.releaseOutputBuffer(outputBufferIndex, true); 

     } 
    } else { 
     inputBuffer.put(data, 14, data.length -14); 
    } 
} 

現在我可以在我的代碼看到一些圖片,但大部分屏幕是灰色的

接下來我該怎麼做?

謝謝。

回答

1

你不能丟棄RTP頭並假裝其餘的包是正常的H264幀 - 事實並非如此。請參閱RFC 6184以獲取H264打包到RTP時使用的格式的解釋。您需要撤銷這個打包,將數據恢復爲普通解碼器可以處理的格式。有關如何執行此操作的示例,可以在libav/ffmpeg中查看libavformat/rtpdec_h264.c

+0

它的工作原理!但是......我的大部分屏幕都是灰色的......就像編輯#1 –

0

這可能會很晚,但我可以看到兩個可能的問題。 1)您只查看NAL類型28(FU-A)的NAL單元,但ffmpeg正在發送類型爲1,24和28的NAL單元。類型24 NAL單元可以毫無風險地被忽略,但類型1的NAL單元不能被忽略(它們有NRI> 0)。

2)rtp流不一定按它們發送的順序到達。因此,幀可能以錯誤的順序重構。爲了確保正確的順序,您必須查看rtp頭文件中的時間戳。

一個好的庫我發現這是否是Anroid Streaming Client。您需要稍微修改它以在MediaFormat中使用正確的csd-0/csd-1,並將輸出緩衝區設置爲任意曲面而不是SurfaceView中的一個曲面。