2013-10-18 29 views
3

我使用的是在Android設備啓動時啓動的服務。這是因爲我不需要可見的活動。迄今爲止工作良好。但現在我試圖打開相機(在MyService.onStart中)並進行一些基本的圖像處理。我明白默認的Android相機類需要一個表面進行視頻預覽。這就是爲什麼我想使用OpenCV中的VideoCapture。Android:在服務中使用OpenCV VideoCapture

但我得到這個錯誤:

No implementation found for native Lorg/opencv/highgui/VideoCapture;.n_VideoCapture:(I)J

我不知道這是否是因爲我沒有以下行中使用的主要活動OpenCV的例子中使用。問題是,如何在我的服務中集成這個以及何時初始化VideoCapture成員。

OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_5, this, mLoaderCallback); 

這是我的代碼到目前爲止。大多數OpenCV的代碼是從OpenCV中的NativeCameraView採取和CameraBridgeViewBase

package com.example.boot; 

import org.opencv.android.Utils; 
import org.opencv.core.Mat; 
import org.opencv.core.Size; 
import org.opencv.highgui.Highgui; 
import org.opencv.highgui.VideoCapture; 

import android.app.Service; 
import android.content.Intent; 
import android.graphics.Bitmap; 
import android.os.IBinder; 
import android.util.Log; 
import android.widget.Toast; 

public final class MyService extends Service 
{ 
    private static final String TAG = "MyService"; 
    private boolean mStopThread; 
    private Thread mThread; 
    private VideoCapture mCamera; 
    private int mFrameWidth; 
    private int mFrameHeight; 
    private int mCameraIndex = -1; 
    private Bitmap mCacheBitmap; 

    @Override 
    public IBinder onBind(Intent intent) { 
     return null; 
    } 

    public void onDestroy() { 

     this.disconnectCamera(); 

     Toast.makeText(this, "service stopped", Toast.LENGTH_LONG).show(); 
     Log.d(TAG, "onDestroy"); 
    } 

    @Override 
    public void onStart(Intent intent, int startid) 
    {   
     Log.d(TAG, "service.onStart: begin"); 

     try 
     { 
      if (!connectCamera(640, 480)) 
       Log.e(TAG, "Could not connect camera"); 
      else 
       Log.d(TAG, "Camera successfully connected"); 
     } 
     catch(Exception e) 
     { 
      Log.e(TAG, "MyServer.connectCamera throws an exception: " + e.getMessage()); 
     } 

     Toast.makeText(this, "service started", Toast.LENGTH_LONG).show(); 
     Log.d(TAG, "service.onStart: end"); 
    } 

    private boolean connectCamera(int width, int height) { 
     /* First step - initialize camera connection */ 
     if (!initializeCamera(width, height)) 
      return false; 

     /* now we can start update thread */ 
     mThread = new Thread(new CameraWorker()); 
     mThread.start(); 

     return true; 
    } 

    private boolean initializeCamera(int width, int height) { 
     synchronized (this) { 

      if (mCameraIndex == -1) 
       mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID); 
      else 
       mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID + mCameraIndex); 

      if (mCamera == null) 
       return false; 

      if (mCamera.isOpened() == false) 
       return false; 

      //java.util.List<Size> sizes = mCamera.getSupportedPreviewSizes(); 

      /* Select the size that fits surface considering maximum size allowed */ 
      Size frameSize = new Size(width, height); 

      mFrameWidth = (int)frameSize.width; 
      mFrameHeight = (int)frameSize.height; 

      AllocateCache(); 

      mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, frameSize.width); 
      mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, frameSize.height); 
     } 

     Log.i(TAG, "Selected camera frame size = (" + mFrameWidth + ", " + mFrameHeight + ")"); 

     return true; 
    } 

    protected void AllocateCache() 
    { 
     mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888); 
    } 

    private void releaseCamera() { 
     synchronized (this) { 
      if (mCamera != null) { 
       mCamera.release(); 
      } 
     } 
    } 

    private void disconnectCamera() { 
     /* 1. We need to stop thread which updating the frames 
     * 2. Stop camera and release it 
     */ 
     try { 
      mStopThread = true; 
      mThread.join(); 
     } catch (InterruptedException e) { 
      e.printStackTrace(); 
     } finally { 
      mThread = null; 
      mStopThread = false; 
     } 

     /* Now release camera */ 
     releaseCamera(); 
    } 

    protected void deliverAndDrawFrame(NativeCameraFrame frame) 
    { 
     Mat modified = frame.rgba(); 

     boolean bmpValid = true; 
     if (modified != null) { 
      try { 
       Utils.matToBitmap(modified, mCacheBitmap); 
      } catch(Exception e) { 
       Log.e(TAG, "Mat type: " + modified); 
       Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight()); 
       Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage()); 
       bmpValid = false; 
      } 
     } 
    }  

    private class NativeCameraFrame 
    { 
     public Mat rgba() { 
      mCapture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); 
      return mRgba; 
     } 

     public Mat gray() { 
      mCapture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); 
      return mGray; 
     } 

     public NativeCameraFrame(VideoCapture capture) { 
      mCapture = capture; 
      mGray = new Mat(); 
      mRgba = new Mat(); 
     } 

     private VideoCapture mCapture; 
     private Mat mRgba; 
     private Mat mGray; 
    }; 

    private class CameraWorker implements Runnable 
    { 
     public void run() 
     { 
      do 
      { 
       if (!mCamera.grab()) { 
        Log.e(TAG, "Camera frame grab failed"); 
        break; 
       } 

       deliverAndDrawFrame(new NativeCameraFrame(mCamera)); 

      } while (!mStopThread); 
     } 
    } 
} 

回答

2

你提到(initAsync)線路實際上是用來加載OpenCV的經理。這應該是你做的第一件事,因此它應該在onStart()開頭。

+0

是的,我會嘗試。我發現[這篇文章](http://answers.opencv.org/question/14717/using-default-baseloadercallback-in-an-android/)說的一樣。 – Matthias

+0

也沒有工作。加載OpenCV就是這樣工作的,但是內部的OpenCV會拋出一個例外,它無法連接到攝像頭服務。我最終通過創建虛擬EGL SurfaceTexture並將該紋理設置爲相機預覽來在服務中使用默認的Android相機實現。有關更多詳細信息,請參閱[本文](http://stackoverflow.com/questions/2386025/android-camera-without-preview)。 – Matthias