2014-01-15 61 views
2

我想視頻流直接從相機捕獲爲Android設備。到目前爲止,我已經能夠捕獲來自Android攝像頭的onPreviewFrame(字節[]數據,攝像頭攝像頭)函數每一幀,編碼數據&然後成功地對數據進行解碼並顯示到表面。我用android的MediaCodec進行編碼&解碼。但顏色&視頻的方向不正確[90度旋轉。搜索過了一會兒,我發現這個YV12toYUV420PackedSemiPlanar功能 - 如果我把它傳遞給顏色出來正確的編碼器在使用前對原始相機數據這個功能,但它仍然旋轉90度MediaCodec視頻流從攝像機方向錯誤和顏色

public static byte[] YV12toYUV420PackedSemiPlanar(final byte[] input, final int width, final int height) { 

    final int frameSize = width * height; 
    final int qFrameSize = frameSize/4; 
    byte[] output = new byte[input.length]; 


    System.arraycopy(input, 0, output, 0, frameSize); 
    for (int i = 0; i < (qFrameSize); i++) 
    { 
     byte b = (input[frameSize + qFrameSize + i - 32 - 320]); 
     output[frameSize + i*2] = b; 
     output[frameSize + i*2 + 1] = (input[frameSize + i - 32 - 320]);    
    } 
    System.arraycopy(input, 0, output, 0, frameSize); // Y 
    for (int i = 0; i < qFrameSize; i++) { 
     output[frameSize + i*2] = input[frameSize + i + qFrameSize]; // Cb (U) 
     output[frameSize + i*2 + 1] = input[frameSize + i]; // Cr (V) 
    } 
    return output; 
} 

enter image description here

然後我調用YV12toYUV420PackedSemiPlanar功能後使用此功能,rotateYUV420Degree90 。它似乎方向和顏色是好的但輸出視頻非常扭曲

private byte[] rotateYUV420Degree90(byte[] data, int imageWidth, int imageHeight) 
{ 
    byte [] yuv = new byte[imageWidth*imageHeight*3/2]; 
    // Rotate the Y luma 
    int i = 0; 
    for(int x = 0;x < imageWidth;x++) 
    { 
     for(int y = imageHeight-1;y >= 0;y--)        
     { 
      yuv[i] = data[y*imageWidth+x]; 
      i++; 
     } 
    } 
    // Rotate the U and V color components 
    i = imageWidth*imageHeight*3/2-1; 
    for(int x = imageWidth-1;x > 0;x=x-2) 
    { 
     for(int y = 0;y < imageHeight/2;y++)         
     { 
      yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+x]; 
      i--; 
      yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+(x-1)]; 
      i--; 
     } 
    } 
    return yuv; 
} 

enter image description here

因爲我的色彩功能和攝像頭的數據我不明白我在做什麼錯了很少的知識。這裏是我的總代碼 - 請看看並幫我找到我的錯誤。

在此先感謝。

public class MainActivity extends Activity implements SurfaceHolder.Callback { 

    Camera mCamera; 
    FileOutputStream fos; 
    File mVideoFile; 
    MediaCodec mMediaCodec; 
    ByteBuffer[] inputBuffers; 
    ByteBuffer[] outputBuffers; 
    MySurfaceView cameraSurfaceView ; 
    SurfaceView decodedSurfaceView ; 
    LinearLayout ll; 
    RelativeLayout rl; 
    Button btn; 
    boolean mPreviewRunning = false; 
    boolean firstTime = true; 
    boolean isRunning = false; 
    public static final String ENCODING = "h264"; 

    private PlayerThread mPlayer = null; 
    Handler handler = null; 
    public static byte[] SPS = null; 
    public static byte[] PPS = null; 
    public static int frameID = 0; 
    BlockingQueue<Frame> queue = new ArrayBlockingQueue<Frame>(100); 

    private static class Frame 
    { 
     public int id; 
     public byte[] frameData; 

     public Frame(int id) 
     { 
      this.id = id; 
     } 
    } 

    @Override 
    protected void onCreate(Bundle savedInstanceState) { 
     super.onCreate(savedInstanceState); 

     ll = new LinearLayout(getApplicationContext()); 
     ll.setOrientation(LinearLayout.VERTICAL); 

     cameraSurfaceView = new MySurfaceView(getApplicationContext()); 
     if(ENCODING.equalsIgnoreCase("h264")) 
     { 
      cameraSurfaceView.setLayoutParams(new android.widget.FrameLayout.LayoutParams(320, 240)); 
     } 
     else if(ENCODING.equalsIgnoreCase("h263")) 
     { 
      cameraSurfaceView.setLayoutParams(new android.widget.FrameLayout.LayoutParams(352, 288)); 
     } 
     ll.addView(cameraSurfaceView); 

     initCodec(); 
     setContentView(ll); 

    } 

    @Override 
    protected void onPause() { 

     super.onPause(); 
     mPreviewRunning = false; 

     if(cameraSurfaceView !=null && cameraSurfaceView.isEnabled()) 
      cameraSurfaceView.setEnabled(false); 
     cameraSurfaceView = null; 

     if(mCamera != null) 
     { 
      mCamera.stopPreview(); 
      mCamera.release(); 
     } 

     System.exit(0); 

     mMediaCodec.stop(); 
     mMediaCodec.release(); 
     mMediaCodec = null; 

    }; 


    private void initCodec() { 

     MediaFormat mediaFormat = null; 

     if(mMediaCodec != null) 
     { 
      mMediaCodec.stop(); 
      mMediaCodec.release(); 
      mMediaCodec = null; 
     } 

     if(ENCODING.equalsIgnoreCase("h264")) 
     { 
      mMediaCodec = MediaCodec.createEncoderByType("video/avc"); 
      mediaFormat = MediaFormat.createVideoFormat("video/avc", 
        320, 
        240); 
     } 
     else if(ENCODING.equalsIgnoreCase("h263")) 
     { 
      mMediaCodec = MediaCodec.createEncoderByType("video/3gpp"); 
      mediaFormat = MediaFormat.createVideoFormat("video/3gpp", 
        352, 
        288); 
     } 

     mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, 125000); 
     mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 15); 
     mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5); 
     mediaFormat.setInteger(MediaFormat.KEY_SAMPLE_RATE, 8000); 
     mediaFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); 

     try 
     { 
      mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, 
        MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar); 

      mMediaCodec.configure(mediaFormat, 
        null, 
        null, 
        MediaCodec.CONFIGURE_FLAG_ENCODE); 
      frameID = 0; 
      mMediaCodec.start(); 
     } 
     catch(Exception e) 
     { 
      Toast.makeText(getApplicationContext(), "mediaformat error", Toast.LENGTH_LONG).show(); 
      e.printStackTrace(); 
     } 

    } 

    /**========================================================================*/ 
    /** This function gets the starting index of the first appearance of match array in source array. The function will search in source array from startIndex position.*/ 
    public static int find(byte[] source, byte[] match, int startIndex) 
    { 
     if(source == null || match == null) 
     { 
      Log.d("EncodeDecode", "ERROR in find : null"); 
      return -1; 
     } 
     if(source.length == 0 || match.length == 0) 
     { 
      Log.d("EncodeDecode", "ERROR in find : length 0"); 
      return -1; 
     } 

     int ret = -1; 
     int spos = startIndex; 
     int mpos = 0; 
     byte m = match[mpos]; 
     for(; spos < source.length; spos++) 
     { 
      if(m == source[spos]) 
      { 
       // starting match 
       if(mpos == 0) 
        ret = spos; 
       // finishing match 
       else if(mpos == match.length - 1) 
        return ret; 

       mpos++; 
       m = match[mpos]; 
      } 
      else 
      { 
       ret = -1; 
       mpos = 0; 
       m = match[mpos]; 
      } 
     } 
     return ret; 
    } 


    /**========================================================================*/ 
    /** For H264 encoding, this function will retrieve SPS & PPS from the given data and will insert into SPS & PPS global arrays. */ 
    public static void getSPS_PPS(byte[] data, int startingIndex) 
    { 
     byte[] spsHeader = {0x00, 0x00, 0x00, 0x01, 0x67}; 
     byte[] ppsHeader = {0x00, 0x00, 0x00, 0x01, 0x68}; 
     byte[] frameHeader = {0x00, 0x00, 0x00, 0x01}; 

     int spsStartingIndex = -1; 
     int nextFrameStartingIndex = -1; 
     int ppsStartingIndex = -1; 

     spsStartingIndex = find(data, spsHeader, startingIndex); 
     Log.d("EncodeDecode", "spsStartingIndex: " + spsStartingIndex); 
     if(spsStartingIndex >= 0) 
     { 
      nextFrameStartingIndex = find(data, frameHeader, spsStartingIndex+1); 
      int spsLength = 0; 
      if(nextFrameStartingIndex>=0) 
       spsLength = nextFrameStartingIndex - spsStartingIndex; 
      else 
       spsLength = data.length - spsStartingIndex; 
      if(spsLength > 0) 
      { 
       SPS = new byte[spsLength]; 
       System.arraycopy(data, spsStartingIndex, SPS, 0, spsLength); 
      } 
     } 

     ppsStartingIndex = find(data, ppsHeader, startingIndex); 
     Log.d("EncodeDecode", "ppsStartingIndex: " + ppsStartingIndex); 
     if(ppsStartingIndex >= 0) 
     { 
      nextFrameStartingIndex = find(data, frameHeader, ppsStartingIndex+1); 
      int ppsLength = 0; 
      if(nextFrameStartingIndex>=0) 
       ppsLength = nextFrameStartingIndex - ppsStartingIndex; 
      else 
       ppsLength = data.length - ppsStartingIndex; 
      if(ppsLength > 0) 
      { 
       PPS = new byte[ppsLength]; 
       System.arraycopy(data, ppsStartingIndex, PPS, 0, ppsLength); 
      } 
     } 
    } 


    /**========================================================================*/ 
    /** Prints the byte array in hex */ 
    private void printByteArray(byte[] array) 
    { 
     StringBuilder sb1 = new StringBuilder(); 
     for (byte b : array) 
     { 
      sb1.append(String.format("%02X ", b)); 
     } 
     Log.d("EncodeDecode", sb1.toString()); 
    } 

    public static byte[] YV12toYUV420PackedSemiPlanar(final byte[] input, final int width, final int height) { 
     /* 
     * COLOR_TI_FormatYUV420PackedSemiPlanar is NV12 
     * We convert by putting the corresponding U and V bytes together (interleaved). 
     */ 
     final int frameSize = width * height; 
     final int qFrameSize = frameSize/4; 
     byte[] output = new byte[input.length]; 


     System.arraycopy(input, 0, output, 0, frameSize); 
     for (int i = 0; i < (qFrameSize); i++) 
     { 
      byte b = (input[frameSize + qFrameSize + i - 32 - 320]); 
      output[frameSize + i*2] = b; 
      output[frameSize + i*2 + 1] = (input[frameSize + i - 32 - 320]);    
     } 



     System.arraycopy(input, 0, output, 0, frameSize); // Y 

     for (int i = 0; i < qFrameSize; i++) { 
      output[frameSize + i*2] = input[frameSize + i + qFrameSize]; // Cb (U) 
      output[frameSize + i*2 + 1] = input[frameSize + i]; // Cr (V) 
     } 
     return output; 
    } 

    private byte[] rotateYUV420Degree90(byte[] data, int imageWidth, int imageHeight) 
    { 
     byte [] yuv = new byte[imageWidth*imageHeight*3/2]; 
     // Rotate the Y luma 
     int i = 0; 
     for(int x = 0;x < imageWidth;x++) 
     { 
      for(int y = imageHeight-1;y >= 0;y--)        
      { 
       yuv[i] = data[y*imageWidth+x]; 
       i++; 
      } 
     } 
     // Rotate the U and V color components 
     i = imageWidth*imageHeight*3/2-1; 
     for(int x = imageWidth-1;x > 0;x=x-2) 
     { 
      for(int y = 0;y < imageHeight/2;y++)         
      { 
       yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+x]; 
       i--; 
       yuv[i] = data[(imageWidth*imageHeight)+(y*imageWidth)+(x-1)]; 
       i--; 
      } 
     } 
     return yuv; 
    } 

    /**========================================================================*/ 
    /** When camera receives a frame this function is called with the frame data as its parameter. It encodes the given data and then stores in frameQueue. */ 
    private void encode(byte[] data) 
    { 
     Log.d("EncodeDecode", "ENCODE FUNCTION CALLED"); 
     inputBuffers = mMediaCodec.getInputBuffers(); 
     outputBuffers = mMediaCodec.getOutputBuffers(); 

     int inputBufferIndex = mMediaCodec.dequeueInputBuffer(0); 
     if (inputBufferIndex >= 0) 
     { 
      ByteBuffer inputBuffer = inputBuffers[inputBufferIndex]; 
      inputBuffer.clear(); 

      int size = inputBuffer.limit(); 
      //inputBuffer.put(data); 

      // color right, but rotated 
      byte[] output = YV12toYUV420PackedSemiPlanar(data,320,240); 
      inputBuffer.put(output); 

      // color almost right, orientation ok but distorted 
      /*byte[] output = YV12toYUV420PackedSemiPlanar(data,320,240); 
      output = rotateYUV420Degree90(output,320,240); 
      inputBuffer.put(output);*/ 

      mMediaCodec.queueInputBuffer(inputBufferIndex, 0 /* offset */, size, 0 /* timeUs */, 0); 
      Log.d("EncodeDecode", "InputBuffer queued"); 
     } 
     else 
     { 
      Log.d("EncodeDecode", "inputBufferIndex < 0, returning null"); 
      return ; 
     } 

     MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo(); 
     int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0); 
     Log.d("EncodeDecode", "outputBufferIndex = " + outputBufferIndex); 
     do 
     { 
      if (outputBufferIndex >= 0) 
      { 
       Frame frame = new Frame(frameID); 
       ByteBuffer outBuffer = outputBuffers[outputBufferIndex]; 
       byte[] outData = new byte[bufferInfo.size]; 
       byte idrFrameType = 0x65; 
       int dataLength = 0; 

       outBuffer.get(outData); 

       // If SPS & PPS is not ready then 
       if(ENCODING.equalsIgnoreCase("h264") && ((SPS == null || SPS.length ==0) || (PPS == null || PPS.length == 0))) 
        getSPS_PPS(outData, 0); 

       dataLength = outData.length; 

       // If the frame is an IDR Frame then adding SPS & PPS in front of the actual frame data 
       if(ENCODING.equalsIgnoreCase("h264") && outData[4] == idrFrameType) 
       { 
        int totalDataLength = dataLength + SPS.length + PPS.length; 

        frame.frameData = new byte[totalDataLength]; 

        System.arraycopy(SPS, 0, frame.frameData, 0, SPS.length); 
        System.arraycopy(PPS, 0, frame.frameData, SPS.length, PPS.length); 
        System.arraycopy(outData, 0 , frame.frameData, SPS.length+PPS.length, dataLength); 
       } 
       else 
       { 
        frame.frameData = new byte[dataLength]; 
        System.arraycopy(outData, 0 , frame.frameData, 0, dataLength); 
       } 

       // for testing 
       Log.d("EncodeDecode" , "Frame no :: " + frameID + " :: frameSize:: " + frame.frameData.length + " :: "); 
       printByteArray(frame.frameData); 

       // if encoding type is h264 and sps & pps is ready then, enqueueing the frame in the queue 
       // if encoding type is h263 then, enqueueing the frame in the queue 
       if((ENCODING.equalsIgnoreCase("h264") && SPS != null && PPS != null && SPS.length != 0 && PPS.length != 0) || ENCODING.equalsIgnoreCase("h263")) 
       { 
        Log.d("EncodeDecode", "enqueueing frame no: " + (frameID)); 

        try 
        { 
         queue.put(frame); 
        } 
        catch(InterruptedException e) 
        { 
         Log.e("EncodeDecode", "interrupted while waiting"); 
         e.printStackTrace(); 
        } 
        catch(NullPointerException e) 
        { 
         Log.e("EncodeDecode", "frame is null"); 
         e.printStackTrace(); 
        } 
        catch(IllegalArgumentException e) 
        { 
         Log.e("EncodeDecode", "problem inserting in the queue"); 
         e.printStackTrace(); 
        } 

        Log.d("EncodeDecode", "frame enqueued. queue size now: " + queue.size()); 

        if(firstTime) 
        { 
         Log.d("EncodeDecode", "adding a surface to layout for decoder"); 
         SurfaceView sv = new SurfaceView(getApplicationContext()); 
         handler = new Handler(); 
         sv.getHolder().addCallback(MainActivity.this); 
         sv.setLayoutParams(new android.widget.FrameLayout.LayoutParams(320, 240)); 
         ll.addView(sv,1); 
         MainActivity.this.setContentView(ll); 
         firstTime = false; 
        } 
       } 

       frameID++; 
       mMediaCodec.releaseOutputBuffer(outputBufferIndex, false); 
       outputBufferIndex = mMediaCodec.dequeueOutputBuffer(bufferInfo, 0); 

      } 
      else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) 
      { 
       outputBuffers = mMediaCodec.getOutputBuffers(); 
       Log.e("EncodeDecode","output buffer of encoder : info changed"); 
      } 
      else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) 
      { 
       Log.e("EncodeDecode","output buffer of encoder : format changed"); 
      } 
      else 
      { 
       Log.e("EncodeDecode", "unknown value of outputBufferIndex : " + outputBufferIndex); 
       //printByteArray(data); 
      } 
     } while (outputBufferIndex >= 0); 
    } 

    private class MySurfaceView extends SurfaceView implements SurfaceHolder.Callback 
    { 
     SurfaceHolder holder; 
     public MySurfaceView(Context context) { 
      super(context); 
      holder = this.getHolder(); 
      holder.addCallback(this); 
     } 

     public MySurfaceView(Context context, AttributeSet attrs) { 
      super(context,attrs); 
      holder = this.getHolder(); 
      holder.addCallback(this); 
     } 

     public void surfaceCreated(SurfaceHolder holder) { 
      try 
      { 
       try 
       { 
        if(mCamera == null) 
         mCamera = Camera.open(); 
        mCamera.setDisplayOrientation(90); 
        Log.d("EncodeDecode","Camera opened"); 
       } 
       catch (Exception e) 
       { 
        Log.d("EncodeDecode","Camera open failed"); 
        e.printStackTrace(); 
       } 

       Camera.Parameters p = mCamera.getParameters(); 

       if(ENCODING.equalsIgnoreCase("h264")) 
        p.setPreviewSize(320, 240); 
       else if(ENCODING.equalsIgnoreCase("h263")) 
        p.setPreviewSize(352, 288); 

       mCamera.setParameters(p); 
       mCamera.setPreviewDisplay(holder); 

       mCamera.setPreviewCallback(new PreviewCallback() 
       { 
        @Override 
        public void onPreviewFrame(byte[] data, Camera camera) 
        { 
         Log.d("EncodeDecode", "onPreviewFrame, calling encode function"); 
         encode(data); 
        } 
       }); 
       mCamera.startPreview(); 
       mPreviewRunning = true; 
      } 
      catch (IOException e) 
      { 
       Log.e("EncodeDecode","surfaceCreated():: in setPreviewDisplay(holder) function"); 
       e.printStackTrace(); 
      } 
      catch (NullPointerException e) 
      { 
       Log.e("EncodeDecode","surfaceCreated Nullpointer"); 
       e.printStackTrace(); 
      } 
     } 

     public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) 
     { 
      if (mPreviewRunning) 
      { 
       mCamera.stopPreview(); 
       Log.e("EncodeDecode","preview stopped"); 
      } 
      try 
      { 
       if(mCamera == null) 
       { 
        mCamera = Camera.open(); 
        mCamera.setDisplayOrientation(90); 
       } 

       Camera.Parameters p = mCamera.getParameters(); 
       if(ENCODING.equalsIgnoreCase("h264")) 
        p.setPreviewSize(320, 240); 
       else if(ENCODING.equalsIgnoreCase("h263")) 
        p.setPreviewSize(352, 288); 

       p.setPreviewFormat(ImageFormat.YV12); 
       mCamera.setParameters(p); 
       mCamera.setPreviewDisplay(holder); 
       mCamera.unlock(); 
       mCamera.reconnect(); 
       mCamera.setPreviewCallback(new PreviewCallback() 
       { 
        @Override 
        public void onPreviewFrame(byte[] data, Camera camera) 
        { 
         Log.d("EncodeDecode", "onPreviewFrame, calling encode function"); 
         encode(data); 
        } 
       }); 
       Log.d("EncodeDecode", "previewCallBack set"); 
       mCamera.startPreview(); 
       mPreviewRunning = true; 
      } 
      catch (Exception e) 
      { 
       Log.e("EncodeDecode","surface changed:set preview display failed"); 
       e.printStackTrace(); 
      } 

     } 

     public void surfaceDestroyed(SurfaceHolder holder) 
     { 

     } 
    } 


    @Override 
    public void surfaceCreated(SurfaceHolder holder) 
    { 
     Log.d("EncodeDecode", "mainActivity surfaceCreated"); 
    } 

    @Override 
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) 
    { 
     Log.d("EncodeDecode", "mainActivity surfaceChanged."); 
     if (mPlayer == null) 
     { 
      mPlayer = new PlayerThread(holder.getSurface()); 
      mPlayer.start(); 
      Log.d("EncodeDecode", "PlayerThread started"); 
     } 
    } 

    @Override 
    public void surfaceDestroyed(SurfaceHolder holder) 
    { 
     if (mPlayer != null) 
     { 
      mPlayer.interrupt();  
     } 
    } 

    private class PlayerThread extends Thread 
    { 
     //private MediaExtractor extractor; 
     private MediaCodec decoder; 
     private Surface surface; 

     public PlayerThread(Surface surface) 
     { 
      this.surface = surface; 
     } 

     @Override 
     public void run() 
     { 
      while(SPS == null || PPS == null || SPS.length == 0 || PPS.length == 0) 
      { 
       try 
       { 
        Log.d("EncodeDecode", "DECODER_THREAD:: sps,pps not ready yet"); 
        sleep(1000); 
       } catch (InterruptedException e) { 
        e.printStackTrace(); 

       } 
      } 

      Log.d("EncodeDecode", "DECODER_THREAD:: sps,pps READY"); 

      if(ENCODING.equalsIgnoreCase("h264")) 
      { 
       decoder = MediaCodec.createDecoderByType("video/avc"); 
       MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", 320, 240); 
       mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(SPS)); 
       mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(PPS)); 
       decoder.configure(mediaFormat, surface /* surface */, null /* crypto */, 0 /* flags */); 
      } 
      else if(ENCODING.equalsIgnoreCase("h263")) 
      { 
       decoder = MediaCodec.createDecoderByType("video/3gpp"); 
       MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/3gpp", 352, 288); 
       decoder.configure(mediaFormat, surface /* surface */, null /* crypto */, 0 /* flags */); 
      } 

      if (decoder == null) 
      { 
       Log.e("DecodeActivity", "DECODER_THREAD:: Can't find video info!"); 
       return; 
      } 

      decoder.start(); 
      Log.d("EncodeDecode", "DECODER_THREAD:: decoder.start() called"); 

      ByteBuffer[] inputBuffers = decoder.getInputBuffers(); 
      ByteBuffer[] outputBuffers = decoder.getOutputBuffers(); 


      int i = 0; 
      while(!Thread.interrupted()) 
      { 
       Frame currentFrame = null; 
       try 
       { 
        Log.d("EncodeDecode", "DECODER_THREAD:: calling queue.take(), if there is no frame in the queue it will wait"); 
        currentFrame = queue.take(); 
       } 
       catch (InterruptedException e) 
       { 
        Log.e("EncodeDecode","DECODER_THREAD:: interrupted while PlayerThread was waiting for the next frame"); 
        e.printStackTrace(); 
       } 

       if(currentFrame == null) 
        Log.e("EncodeDecode","DECODER_THREAD:: null frame dequeued"); 
       else 
        Log.d("EncodeDecode","DECODER_THREAD:: " + currentFrame.id + " no frame dequeued"); 

       if(currentFrame != null && currentFrame.frameData != null && currentFrame.frameData.length != 0) 
       { 
        Log.d("EncodeDecode", "DECODER_THREAD:: decoding frame no: " + i + " , dataLength = " + currentFrame.frameData.length); 

        int inIndex = 0; 
        while ((inIndex = decoder.dequeueInputBuffer(1)) < 0) 
         ; 

        if (inIndex >= 0) 
        { 
         Log.d("EncodeDecode", "DECODER_THREAD:: sample size: " + currentFrame.frameData.length); 

         ByteBuffer buffer = inputBuffers[inIndex]; 
         buffer.clear(); 
         buffer.put(currentFrame.frameData); 
         decoder.queueInputBuffer(inIndex, 0, currentFrame.frameData.length, 0, 0); 

         BufferInfo info = new BufferInfo(); 
         int outIndex = decoder.dequeueOutputBuffer(info, 100000); 

         switch (outIndex) 
         { 
         case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: 
          Log.e("EncodeDecode", "DECODER_THREAD:: INFO_OUTPUT_BUFFERS_CHANGED"); 
          outputBuffers = decoder.getOutputBuffers(); 
          break; 
         case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: 
          Log.e("EncodeDecode", "DECODER_THREAD:: New format " + decoder.getOutputFormat()); 

          break; 
         case MediaCodec.INFO_TRY_AGAIN_LATER: 
          Log.e("EncodeDecode", "DECODER_THREAD:: dequeueOutputBuffer timed out!"); 
          break; 
         default: 
          Log.d("EncodeDecode", "DECODER_THREAD:: decoded SUCCESSFULLY!!!"); 
          ByteBuffer outbuffer = outputBuffers[outIndex]; 
          decoder.releaseOutputBuffer(outIndex, true); 
          break; 
         } 
         i++; 
        } 
       } 
      } 

      decoder.stop(); 
      decoder.release(); 

     } 
    } 
} 
+2

相機拍攝方向是固定的 - 當設備處於橫向平時看起來 「正確的」。檢查Grafika中的「顯示+捕捉相機」測試,例如...如果您有縱向的Nexus 5,則相機圖像將出現橫向。相機應用程序在幕後爲您「修復」了這一點。對於旋轉,上面的「之前」和「之後」圖像看起來具有相同的尺寸,這似乎是錯誤的,因爲相機沒有拍攝方形圖片。 – fadden

回答

2

當你旋轉圖像時,它有newWidth = oldHeight,newHeigth = oldWidth,因爲source不是方形的圖片。所以,你有一個選擇,要麼使用裁剪和調整分別轉動週期或使用不同的預覽大小dispaying,現在你面間距較小剛然後surfaceview的間距或任何組件,您使用的顯示

+0

我有同樣的問題。是否可以裁剪字節數據?由於我的視頻在服務器端失真。 – LvN

2

當我在做的應用程序,它可以通過RTMP實時播放相機幀我在肖像模式下也有同樣的問題,但我可以使用TextureView解決它。首先,我沒有在發送端旋轉幀。但是我旋轉了在mediaplayer中鏈接的TextureView,並在接收端調整了textureview的大小。

enter image description here

我喜歡編碼下面。

<?xml version="1.0" encoding="utf-8"?> 
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android" 
    android:layout_width="match_parent" 
    android:layout_height="match_parent"> 

    <TextureView 
     android:id="@+id/videoView" 
     android:layout_width="match_parent" 
     android:layout_height="match_parent" /> 
</FrameLayout> 

private void updateTextureViewSize(int viewWidth, int viewHeight) { 
     int pivotPointX = viewWidth/2; 
     int pivotPointY = viewHeight/2; 

     Matrix matrix = new Matrix(); 

     if(isLandscapeOrientation) { 
      matrix.preRotate(0); 
      matrix.setScale(1.0f, 1.0f, pivotPointX, pivotPointY); 
      videoView.setTransform(matrix); 
      videoView.setLayoutParams(new FrameLayout.LayoutParams(viewWidth, viewHeight)); 
     } else { 
      matrix.preRotate(0); 
      matrix.setScale(1.0f, 1.0f, pivotPointX, pivotPointY); 
      videoView.setRotation(90); 
      videoView.setTranslationX(-viewWidth/2); 
      videoView.setTranslationY(-viewHeight/2); 
      videoView.setLayoutParams(new FrameLayout.LayoutParams(viewWidth * 2, viewHeight * 2)); 
     } 

    } 

private TextureView.SurfaceTextureListener surfaceTextureListener = new TextureView.SurfaceTextureListener() { 
     @Override 
     public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { 
      Surface s = new Surface(surface); 
      if(mMediaPlayer != null) { 
       mMediaPlayer.setSurface(s); 

       DisplayMetrics displaymetrics = new DisplayMetrics(); 
       getWindowManager().getDefaultDisplay().getMetrics(displaymetrics); 
       int sh = displaymetrics.heightPixels; 
       int sw = displaymetrics.widthPixels; 

       updateTextureViewSize(sw, sh); 

      } 

     } 

     @Override 
     public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) { 

     } 

     @Override 
     public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) { 
      return false; 
     } 

     @Override 
     public void onSurfaceTextureUpdated(SurfaceTexture surface) { 
     } 
    }; 
相關問題