2014-10-31 37 views
4

當談到MediaCodec(以及一般的視頻編碼/解碼)時,我有點新意,所以如果我在這裏說的任何內容都是錯誤的,請糾正我。如何玩由MediaCodec編碼器生產的原始h264?

我想用VLC/ffplay播放MediaCodec的原始h264輸出。因爲我需要這個遊戲,因爲我的最終目標是將一些實時視頻流式傳輸到計算機,而MediaMuxer只在磁盤上生成文件,而不是我可以以非常低的延遲流到桌面。 (我打開其他解決方案,但我還沒有發現任何其他適合延遲要求的東西)

這是我使用的代碼對視頻進行編碼並將其寫入文件:(它基於MediaCodec例如發現here,僅除去了MediaMuxer一部分)

package com.jackos2500.droidtop; 

import android.media.MediaCodec; 
import android.media.MediaCodecInfo; 
import android.media.MediaFormat; 
import android.opengl.EGL14; 
import android.opengl.EGLConfig; 
import android.opengl.EGLContext; 
import android.opengl.EGLDisplay; 
import android.opengl.EGLExt; 
import android.opengl.EGLSurface; 
import android.opengl.GLES20; 
import android.os.Environment; 
import android.util.Log; 
import android.view.Surface; 

import java.io.BufferedOutputStream; 
import java.io.File; 
import java.io.FileOutputStream; 
import java.io.IOException; 
import java.nio.ByteBuffer; 

public class StreamH264 { 
    private static final String TAG = "StreamH264"; 
    private static final boolean VERBOSE = true;   // lots of logging 

    // where to put the output file (note: /sdcard requires WRITE_EXTERNAL_STORAGE permission) 
    private static final File OUTPUT_DIR = Environment.getExternalStorageDirectory(); 

    public static int MEGABIT = 1000 * 1000; 
    private static final int IFRAME_INTERVAL = 10; 

    private static final int TEST_R0 = 0; 
    private static final int TEST_G0 = 136; 
    private static final int TEST_B0 = 0; 
    private static final int TEST_R1 = 236; 
    private static final int TEST_G1 = 50; 
    private static final int TEST_B1 = 186; 

    private MediaCodec codec; 
    private CodecInputSurface inputSurface; 
    private BufferedOutputStream out; 

    private MediaCodec.BufferInfo bufferInfo; 
    public StreamH264() { 

    } 

    private void prepareEncoder() throws IOException { 
     bufferInfo = new MediaCodec.BufferInfo(); 

     MediaFormat format = MediaFormat.createVideoFormat("video/avc", 1280, 720); 
     format.setInteger(MediaFormat.KEY_BIT_RATE, 2 * MEGABIT); 
     format.setInteger(MediaFormat.KEY_FRAME_RATE, 30); 
     format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); 
     format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); 

     codec = MediaCodec.createEncoderByType("video/avc"); 
     codec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); 
     inputSurface = new CodecInputSurface(codec.createInputSurface()); 
     codec.start(); 

     File dst = new File(OUTPUT_DIR, "test.264"); 
     out = new BufferedOutputStream(new FileOutputStream(dst)); 
    } 
    private void releaseEncoder() throws IOException { 
     if (VERBOSE) Log.d(TAG, "releasing encoder objects"); 
     if (codec != null) { 
      codec.stop(); 
      codec.release(); 
      codec = null; 
     } 
     if (inputSurface != null) { 
      inputSurface.release(); 
      inputSurface = null; 
     } 
     if (out != null) { 
      out.flush(); 
      out.close(); 
      out = null; 
     } 
    } 
    public void stream() throws IOException { 
     try { 
      prepareEncoder(); 
      inputSurface.makeCurrent(); 
      for (int i = 0; i < (30 * 5); i++) { 
       // Feed any pending encoder output into the file. 
       drainEncoder(false); 

       // Generate a new frame of input. 
       generateSurfaceFrame(i); 
       inputSurface.setPresentationTime(computePresentationTimeNsec(i, 30)); 

       // Submit it to the encoder. The eglSwapBuffers call will block if the input 
       // is full, which would be bad if it stayed full until we dequeued an output 
       // buffer (which we can't do, since we're stuck here). So long as we fully drain 
       // the encoder before supplying additional input, the system guarantees that we 
       // can supply another frame without blocking. 
       if (VERBOSE) Log.d(TAG, "sending frame " + i + " to encoder"); 
       inputSurface.swapBuffers(); 
      } 
      // send end-of-stream to encoder, and drain remaining output 
      drainEncoder(true); 
     } finally { 
      // release encoder, muxer, and input Surface 
      releaseEncoder(); 
     } 
    } 

    private void drainEncoder(boolean endOfStream) throws IOException { 
     final int TIMEOUT_USEC = 10000; 
     if (VERBOSE) Log.d(TAG, "drainEncoder(" + endOfStream + ")"); 

     if (endOfStream) { 
      if (VERBOSE) Log.d(TAG, "sending EOS to encoder"); 
      codec.signalEndOfInputStream(); 
     } 
     ByteBuffer[] outputBuffers = codec.getOutputBuffers(); 
     while (true) { 
      int encoderStatus = codec.dequeueOutputBuffer(bufferInfo, TIMEOUT_USEC); 
      if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { 
       // no output available yet 
       if (!endOfStream) { 
        break;  // out of while 
       } else { 
        if (VERBOSE) Log.d(TAG, "no output available, spinning to await EOS"); 
       } 
      } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { 
       // not expected for an encoder 
       outputBuffers = codec.getOutputBuffers(); 
      } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { 
       // should happen before receiving buffers, and should only happen once 
       MediaFormat newFormat = codec.getOutputFormat(); 
       Log.d(TAG, "encoder output format changed: " + newFormat); 
      } else if (encoderStatus < 0) { 
       Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); 
       // let's ignore it 
      } else { 
       ByteBuffer encodedData = outputBuffers[encoderStatus]; 
       if (encodedData == null) { 
        throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null"); 
       } 

       if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { 
        // The codec config data was pulled out and fed to the muxer when we got 
        // the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it. 
        if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG"); 
        bufferInfo.size = 0; 
       } 

       if (bufferInfo.size != 0) { 
        // adjust the ByteBuffer values to match BufferInfo (not needed?) 
        encodedData.position(bufferInfo.offset); 
        encodedData.limit(bufferInfo.offset + bufferInfo.size); 

        byte[] data = new byte[bufferInfo.size]; 
        encodedData.get(data); 
        out.write(data); 
        if (VERBOSE) Log.d(TAG, "sent " + bufferInfo.size + " bytes to file"); 
       } 

       codec.releaseOutputBuffer(encoderStatus, false); 

       if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { 
        if (!endOfStream) { 
         Log.w(TAG, "reached end of stream unexpectedly"); 
        } else { 
         if (VERBOSE) Log.d(TAG, "end of stream reached"); 
        } 
        break;  // out of while 
       } 
      } 
     } 
    } 
    private void generateSurfaceFrame(int frameIndex) { 
     frameIndex %= 8; 

     int startX, startY; 
     if (frameIndex < 4) { 
      // (0,0) is bottom-left in GL 
      startX = frameIndex * (1280/4); 
      startY = 720/2; 
     } else { 
      startX = (7 - frameIndex) * (1280/4); 
      startY = 0; 
     } 

     GLES20.glClearColor(TEST_R0/255.0f, TEST_G0/255.0f, TEST_B0/255.0f, 1.0f); 
     GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 

     GLES20.glEnable(GLES20.GL_SCISSOR_TEST); 
     GLES20.glScissor(startX, startY, 1280/4, 720/2); 
     GLES20.glClearColor(TEST_R1/255.0f, TEST_G1/255.0f, TEST_B1/255.0f, 1.0f); 
     GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); 
     GLES20.glDisable(GLES20.GL_SCISSOR_TEST); 
    } 
    private static long computePresentationTimeNsec(int frameIndex, int frameRate) { 
     final long ONE_BILLION = 1000000000; 
     return frameIndex * ONE_BILLION/frameRate; 
    } 

    /** 
    * Holds state associated with a Surface used for MediaCodec encoder input. 
    * <p> 
    * The constructor takes a Surface obtained from MediaCodec.createInputSurface(), and uses that 
    * to create an EGL window surface. Calls to eglSwapBuffers() cause a frame of data to be sent 
    * to the video encoder. 
    * <p> 
    * This object owns the Surface -- releasing this will release the Surface too. 
    */ 
    private static class CodecInputSurface { 
     private static final int EGL_RECORDABLE_ANDROID = 0x3142; 

     private EGLDisplay mEGLDisplay = EGL14.EGL_NO_DISPLAY; 
     private EGLContext mEGLContext = EGL14.EGL_NO_CONTEXT; 
     private EGLSurface mEGLSurface = EGL14.EGL_NO_SURFACE; 

     private Surface mSurface; 

     /** 
     * Creates a CodecInputSurface from a Surface. 
     */ 
     public CodecInputSurface(Surface surface) { 
      if (surface == null) { 
       throw new NullPointerException(); 
      } 
      mSurface = surface; 

      eglSetup(); 
     } 

     /** 
     * Prepares EGL. We want a GLES 2.0 context and a surface that supports recording. 
     */ 
     private void eglSetup() { 
      mEGLDisplay = EGL14.eglGetDisplay(EGL14.EGL_DEFAULT_DISPLAY); 
      if (mEGLDisplay == EGL14.EGL_NO_DISPLAY) { 
       throw new RuntimeException("unable to get EGL14 display"); 
      } 
      int[] version = new int[2]; 
      if (!EGL14.eglInitialize(mEGLDisplay, version, 0, version, 1)) { 
       throw new RuntimeException("unable to initialize EGL14"); 
      } 

      // Configure EGL for recording and OpenGL ES 2.0. 
      int[] attribList = { 
        EGL14.EGL_RED_SIZE, 8, 
        EGL14.EGL_GREEN_SIZE, 8, 
        EGL14.EGL_BLUE_SIZE, 8, 
        EGL14.EGL_ALPHA_SIZE, 8, 
        EGL14.EGL_RENDERABLE_TYPE, EGL14.EGL_OPENGL_ES2_BIT, 
        EGL_RECORDABLE_ANDROID, 1, 
        EGL14.EGL_NONE 
      }; 
      EGLConfig[] configs = new EGLConfig[1]; 
      int[] numConfigs = new int[1]; 
      EGL14.eglChooseConfig(mEGLDisplay, attribList, 0, configs, 0, configs.length, 
        numConfigs, 0); 
      checkEglError("eglCreateContext RGB888+recordable ES2"); 

      // Configure context for OpenGL ES 2.0. 
      int[] attrib_list = { 
        EGL14.EGL_CONTEXT_CLIENT_VERSION, 2, 
        EGL14.EGL_NONE 
      }; 
      mEGLContext = EGL14.eglCreateContext(mEGLDisplay, configs[0], EGL14.EGL_NO_CONTEXT, 
        attrib_list, 0); 
      checkEglError("eglCreateContext"); 

      // Create a window surface, and attach it to the Surface we received. 
      int[] surfaceAttribs = { 
        EGL14.EGL_NONE 
      }; 
      mEGLSurface = EGL14.eglCreateWindowSurface(mEGLDisplay, configs[0], mSurface, 
        surfaceAttribs, 0); 
      checkEglError("eglCreateWindowSurface"); 
     } 

     /** 
     * Discards all resources held by this class, notably the EGL context. Also releases the 
     * Surface that was passed to our constructor. 
     */ 
     public void release() { 
      if (mEGLDisplay != EGL14.EGL_NO_DISPLAY) { 
       EGL14.eglMakeCurrent(mEGLDisplay, EGL14.EGL_NO_SURFACE, EGL14.EGL_NO_SURFACE, 
         EGL14.EGL_NO_CONTEXT); 
       EGL14.eglDestroySurface(mEGLDisplay, mEGLSurface); 
       EGL14.eglDestroyContext(mEGLDisplay, mEGLContext); 
       EGL14.eglReleaseThread(); 
       EGL14.eglTerminate(mEGLDisplay); 
      } 

      mSurface.release(); 

      mEGLDisplay = EGL14.EGL_NO_DISPLAY; 
      mEGLContext = EGL14.EGL_NO_CONTEXT; 
      mEGLSurface = EGL14.EGL_NO_SURFACE; 

      mSurface = null; 
     } 

     /** 
     * Makes our EGL context and surface current. 
     */ 
     public void makeCurrent() { 
      EGL14.eglMakeCurrent(mEGLDisplay, mEGLSurface, mEGLSurface, mEGLContext); 
      checkEglError("eglMakeCurrent"); 
     } 

     /** 
     * Calls eglSwapBuffers. Use this to "publish" the current frame. 
     */ 
     public boolean swapBuffers() { 
      boolean result = EGL14.eglSwapBuffers(mEGLDisplay, mEGLSurface); 
      checkEglError("eglSwapBuffers"); 
      return result; 
     } 

     /** 
     * Sends the presentation time stamp to EGL. Time is expressed in nanoseconds. 
     */ 
     public void setPresentationTime(long nsecs) { 
      EGLExt.eglPresentationTimeANDROID(mEGLDisplay, mEGLSurface, nsecs); 
      checkEglError("eglPresentationTimeANDROID"); 
     } 

     /** 
     * Checks for EGL errors. Throws an exception if one is found. 
     */ 
     private void checkEglError(String msg) { 
      int error; 
      if ((error = EGL14.eglGetError()) != EGL14.EGL_SUCCESS) { 
       throw new RuntimeException(msg + ": EGL error: 0x" + Integer.toHexString(error)); 
      } 
     } 
    } 
} 

然而,從由此代碼生成的文件不與VLC或ffplay播放。誰能告訴我我做錯了什麼?我相信這是由於播放原始h264所需的標頭格式不正確(或完全缺失),因爲我已成功播放使用ffplay從互聯網下載的.264個文件。另外,我不確定我是如何將這部影片傳輸到電腦的,所以如果有人可以給我一些關於我可以怎麼做的建議,我會非常感激!謝謝!

回答

5

你應該能夠播放原始的H264流正如你寫的,其他原始.264文件在VLC或ffplay中可以很好地播放),但是你缺少參數集。這些通過兩種不同的方式傳遞,而你恰好缺少這兩種方式。首先當你得到MediaCodec.INFO_OUTPUT_FORMAT_CHANGED(你不處理,你只是記錄一條消息)返回MediaFormat,其次它們返回到一個設置爲MediaCodec.BUFFER_FLAG_CODEC_CONFIG的緩衝區中(你忽略的大小爲0)。這裏最簡單的解決方案是取消MediaCodec.BUFFER_FLAG_CODEC_CONFIG的特殊情況處理,它應該都可以正常工作。

你基於它的代碼以這種方式做事情,以便測試所有不同的做事方式 - 從哪裏複製它,參數集在MediaFormatMediaCodec.INFO_OUTPUT_FORMAT_CHANGED進行。如果你想在你的情況下使用H264字節流,你可以用和csd-1來編寫字節緩衝區,從MediaFormat開始,並且忽略設置了MediaCodec.BUFFER_FLAG_CODEC_CONFIG的緩衝區。

+0

對不起,延遲迴復,但刪除忽略這些參數的代碼,通過設置bufferInfo .size爲0完美工作,謝謝! – jackos2500 2014-11-02 19:28:13

+0

我試圖找到如何通過MedoaCodec生成SPS/PPS頭幾天。感謝您的回答。 – yorkw 2015-11-03 03:50:55

1

你不能玩純粹的h264。它沒有關於格式的任何信息。你也可以找到幾個很好的例子here。爲了流式傳輸,你需要實現一些流媒體協議,如RTSP(在實時流媒體的情況下)或更靈活的HLS(如果不需要實時)

+0

好的,如果我忘記播放原始視頻,我怎麼能發送一幀到計算機,解碼並顯示在屏幕上? (最好使用Java) – jackos2500 2014-10-31 16:23:27

+0

那麼你可以通過TCP或UDP發送一個幀(描述它的字節有多大),然後如果你需要使用一些h264解碼器來提供一幀並解釋它的格式如bps,寬度/高度等。因爲你流到電腦,你需要自己的解碼器應用程序,可能使用一些庫來幫助你解碼視頻和渲染它(通常OpenGL) – 2014-10-31 16:26:52

相關問題