2016-05-29 57 views
1

我碰到了這個大問題。BebopVideoView to Mat

我正在嘗試BebopVideoView到Mat。 (BebopVideoView是鸚鵡無人機源代碼)

但是我失敗了好幾天。

這是代碼。

package com.hyeonjung.dronecontroll.view; 

import android.content.Context; 
import android.graphics.Bitmap; 
import android.graphics.Canvas; 
import android.graphics.Rect; 
import android.media.MediaCodec; 
import android.media.MediaFormat; 
import android.os.Environment; 
import android.util.AttributeSet; 
import android.util.Log; 
import android.view.SurfaceHolder; 
import android.view.SurfaceView; 

import com.parrot.arsdk.arcontroller.ARCONTROLLER_STREAM_CODEC_TYPE_ENUM; 

import com.parrot.arsdk.arcontroller.ARControllerCodec; 
import com.parrot.arsdk.arcontroller.ARFrame; 

import org.opencv.core.CvType; 
import org.opencv.core.Mat; 
import org.opencv.imgcodecs.Imgcodecs; 
import org.opencv.imgproc.Imgproc; 

import java.io.File; 
import java.io.IOException; 
import java.nio.ByteBuffer; 
import java.util.concurrent.locks.Lock; 
import java.util.concurrent.locks.ReentrantLock; 

public class BebopVideoView extends SurfaceView implements SurfaceHolder.Callback { 

    private static final String TAG = "BebopVideoView"; 
    private static final String VIDEO_MIME_TYPE = "video/avc"; 
    private static final int VIDEO_DEQUEUE_TIMEOUT = 33000; 

    private MediaCodec mMediaCodec; 
    private Lock mReadyLock; 

    private boolean mIsCodecConfigured = false; 

    private ByteBuffer mSpsBuffer; 
    private ByteBuffer mPpsBuffer; 

    private ByteBuffer[] mBuffers; 

    private static final int VIDEO_WIDTH = 640; 
    private static final int VIDEO_HEIGHT = 368; 

    public byte[] a; 
    public Mat k; 


    public BebopVideoView(Context context) { 
     super(context); 
     customInit(); 
    } 

    public BebopVideoView(Context context, AttributeSet attrs) { 
     super(context, attrs); 
     customInit(); 
    } 

    public BebopVideoView(Context context, AttributeSet attrs, int defStyleAttr) { 
     super(context, attrs, defStyleAttr); 
     customInit(); 
    } 

    private void customInit() { 
     mReadyLock = new ReentrantLock(); 
     getHolder().addCallback(this); 
    } 

    public void displayFrame(ARFrame frame) { 
     mReadyLock.lock(); 

     if ((mMediaCodec != null)) { 
      if (mIsCodecConfigured) { 
       // Here we have either a good PFrame, or an IFrame 
       int index = -1; 

       try { 
        index = mMediaCodec.dequeueInputBuffer(VIDEO_DEQUEUE_TIMEOUT); 
       } catch (IllegalStateException e) { 
        Log.e(TAG, "Error while dequeue input buffer"); 
       } 
       if (index >= 0) { 
        ByteBuffer b; 
        if (android.os.Build.VERSION.SDK_INT >= android.os.Build.VERSION_CODES.LOLLIPOP) { 
         b = mMediaCodec.getInputBuffer(index); // fill inputBuffer with valid data 
        } 
        else { 
         b = mBuffers[index]; // fill inputBuffer with valid data 
         b.clear(); 
        } 

        if (b != null) { 
         b.put(frame.getByteData(), 0, frame.getDataSize()); //write to b. 
         getMat(frame); 
         saveMat(k); 
        } 

        try { 
         mMediaCodec.queueInputBuffer(index, 0, frame.getDataSize(), 0, 0); //end of stream 
        } catch (IllegalStateException e) { 
         Log.e(TAG, "Error while queue input buffer"); 
        } 
       } 
      } 

      // Try to display previous frame 
      MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); 
      int outIndex; 
      try { 
       outIndex = mMediaCodec.dequeueOutputBuffer(info, 0); 

       while (outIndex >= 0) { 
        mMediaCodec.releaseOutputBuffer(outIndex, true); 
        outIndex = mMediaCodec.dequeueOutputBuffer(info, 0); 
       } 
      } catch (IllegalStateException e) { 
       Log.e(TAG, "Error while dequeue input buffer (outIndex)"); 
      } 
     } 


     mReadyLock.unlock(); 
    } 

    public void configureDecoder(ARControllerCodec codec) { 
     mReadyLock.lock(); 

     if (codec.getType() == ARCONTROLLER_STREAM_CODEC_TYPE_ENUM.ARCONTROLLER_STREAM_CODEC_TYPE_H264) { 
      ARControllerCodec.H264 codecH264 = codec.getAsH264(); 

      mSpsBuffer = ByteBuffer.wrap(codecH264.getSps().getByteData()); 
      mPpsBuffer = ByteBuffer.wrap(codecH264.getPps().getByteData()); 
     } 

     if ((mMediaCodec != null) && (mSpsBuffer != null)) { 
      configureMediaCodec(); 
     } 

     mReadyLock.unlock(); 
    } 

    private void configureMediaCodec() { 
     MediaFormat format = MediaFormat.createVideoFormat(VIDEO_MIME_TYPE, VIDEO_WIDTH, VIDEO_HEIGHT); 
     format.setByteBuffer("csd-0", mSpsBuffer); 
     format.setByteBuffer("csd-1", mPpsBuffer); 

     mMediaCodec.configure(format, getHolder().getSurface(), null, 0); 
     mMediaCodec.start(); 

     if (android.os.Build.VERSION.SDK_INT < android.os.Build.VERSION_CODES.LOLLIPOP) { 
      mBuffers = mMediaCodec.getInputBuffers(); 
     } 

     mIsCodecConfigured = true; 
    } 

    private void initMediaCodec(String type) { 
     try { 
      mMediaCodec = MediaCodec.createDecoderByType(type); 
     } catch (IOException e) { 
      Log.e(TAG, "Exception", e); 
     } 

     if ((mMediaCodec != null) && (mSpsBuffer != null)) { 
      configureMediaCodec(); 
     } 
    } 

    private void releaseMediaCodec() { 
     if (mMediaCodec != null) { 
      if (mIsCodecConfigured) { 
       mMediaCodec.stop(); 
       mMediaCodec.release(); 
      } 
      mIsCodecConfigured = false; 
      mMediaCodec = null; 
     } 
    } 

    @Override 
    public void surfaceCreated(SurfaceHolder holder) { 
     mReadyLock.lock(); 
     initMediaCodec(VIDEO_MIME_TYPE); 
     mReadyLock.unlock(); 
    } 

    @Override 
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {} 

    @Override 
    public void surfaceDestroyed(SurfaceHolder holder) { 
     mReadyLock.lock(); 
     releaseMediaCodec(); 
     mReadyLock.unlock(); 
    } 

    public void getMat(ARFrame frame) { 
     k = new Mat(); 

     k.get(150, 150, frame.getByteData()); 
     k.put(150, 150, frame.getByteData()); 

     //or 
     //byte[] a= new byte[b.remaining()]; 
     //b.get(a); 
     //k.get(150, 150, a); 
     //k.put(150, 150, a); 

    } 

    public void saveMat (Mat mat) { 
     Mat mIntermediateMat = new Mat(150, 150, CvType.CV_8UC1); 
     Imgproc.cvtColor(mat, mIntermediateMat, Imgproc.COLOR_GRAY2BGR); 

     File path = new File(Environment.getExternalStorageDirectory() + "/data"); 
     path.mkdirs(); 
     File file = new File(path, "image.png"); 
     String filename = file.toString(); 
     Boolean bool = Imgcodecs.imwrite(filename, mIntermediateMat); 

     if (bool) 
      Log.i(TAG, "SUCCESS writing image to external storage"); 
     else 
      Log.i(TAG, "Fail writing image to external storage"); 
    } 

} 

我想我可以從的ByteBuffer獲得的圖像相關的數據Bframe.get ByteData()。

我確認字節緩衝區bframe.getByteData()

有具有一定範圍的-128 char數據類型到127

所以我確認的getMat, saveMat結果,結果是一個NULL(墊K)。

出了什麼問題?

請幫我T.T

回答

0

如果使用TextureView你可以簡單地抓住它的位圖,並將其轉換成墊。您需要使用TextureView提供的表面而不是典型的SurfaceView支架。這將需要對mediaCodec生命週期進行一些額外的重構,但這是相當微不足道的變化。

public class BebopVideoView extends TextureView implements TextureView.SurfaceTextureListener { 

    @Override 
    public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) { 
     this.surface = new Surface(surface); 
     surfaceCreated = true; 
    } 

... 

} 

而且裏面的configureMediaCodec使用onSurfaceTextureAvailable,而不是捕捉到的一流水平表面....

mediaCodec.configure(format, surface, null, 0); 

隨着其他幾個小的調整,你現在有超過視圖的更多的控制。你可以做一些事情,比如setTransform(),更重要的是你的情況getBitmap:

Mat mat = new Mat(); 
Utils.bitmapToMat(getBitmap(), mat); 
+0

OMG ..非常感謝。我會再試一次。 – kimms

+0

您可以做的最好的方式是upvote和/或選擇我的答案作爲解決方案。謝謝。 – ShellDude