2015-10-10 23 views
0

我正在運行使用OpenCV函數的攝像機。我通過Mat對象爲jni功能,它工作了一段時間,他們的錯誤:錯誤:當Mat對象從Java傳遞到jni函數時出現「致命信號11(SIGSEGV),代碼1」

10-10 13:03:17.978: A/libc(28693): Fatal signal 11 (SIGSEGV), code 1, fault addr 0x9 in tid 28791 (Thread-5418) 
運行攝像頭,並調用JNI函數

Java代碼:

package com.adhamenaya; 

import java.util.ArrayList; 

import org.opencv.android.BaseLoaderCallback; 
import org.opencv.android.CameraBridgeViewBase; 
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; 
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2; 
import org.opencv.android.LoaderCallbackInterface; 
import org.opencv.android.OpenCVLoader; 
import org.opencv.core.Mat; 
//import org.opencv.highgui.Highgui; 
import org.opencv.imgproc.Imgproc; 

import android.app.Activity; 
import android.os.Bundle; 
import android.os.Handler; 
import android.util.Log; 
import android.view.MotionEvent; 
import android.view.SurfaceView; 
import android.view.View; 
import android.view.View.OnTouchListener; 
import android.view.WindowManager; 

public class MainActivity extends Activity implements CvCameraViewListener2, 
     OnTouchListener { 

    private static final String TAG = "OCVSample::Activity"; 
    private Mat mRgba; 
    private Mat mGray; 
    private CameraBridgeViewBase mOpenCvCameraView; 

    private ArrayList<Mat> mats = new ArrayList<Mat>(); 

    private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) { 
     @Override 
     public void onManagerConnected(int status) { 
      switch (status) { 
      case LoaderCallbackInterface.SUCCESS: { 
       Log.i(TAG, "OpenCV loaded successfully"); 
       mOpenCvCameraView.enableView(); 
       mOpenCvCameraView.setOnTouchListener(MainActivity.this); 

      } 
       break; 
      default: { 
       super.onManagerConnected(status); 
      } 
       break; 
      } 
     } 
    }; 

    public MainActivity() { 
     Log.i(TAG, "Instantiated new " + this.getClass()); 
    } 

    @Override 
    protected void onCreate(Bundle savedInstanceState) { 
     Log.i(TAG, "called onCreate"); 
     super.onCreate(savedInstanceState); 

     Native.loadlibs(); 


     mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.cam_view); 
     mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE); 
     mOpenCvCameraView.setCvCameraViewListener(this); 

     Native.setup(mFaceCascadeFile, mNoseCascadeFile, mLandmarks); 
    } 

    @Override 
    public void onPause() { 
     super.onPause(); 
     if (mOpenCvCameraView != null) 
      mOpenCvCameraView.disableView(); 
    } 

    @Override 
    public void onResume() { 
     super.onResume(); 
     OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_3, this, 
       mLoaderCallback); 
    } 

    public void onDestroy() { 
     super.onDestroy(); 
     if (mOpenCvCameraView != null) 
      mOpenCvCameraView.disableView(); 
    } 

    public void onCameraViewStarted(int width, int height) { 
     mGray = new Mat(); 
     mRgba = new Mat(); 
    } 

    public void onCameraViewStopped() { 
    } 

    public Mat onCameraFrame(CvCameraViewFrame inputFrame) { 
     mRgba = inputFrame.rgba(); 
     Imgproc.cvtColor(mRgba, mGray, Imgproc.COLOR_BGRA2GRAY); 
     Native.runJni(mFaceCascadeFile, mNoseCascadeFile, mLandmarks, 
       mRgba.getNativeObjAddr()); 

     return mRgba; 
    } 

    @Override 
    public boolean onTouch(View v, MotionEvent event) { 
     // TODO Auto-generated method stub 
     return false; 
    } 


} 

JNI功能:

JNIEXPORT jbyteArray JNICALL Java_com_adhamenaya_Native_runJni(JNIEnv * env, 
     jobject obj, jstring faceCascadeFile, jstring noseCascadeFile, 
     jstring landmarks, jlong frame) { 

    cv::Mat& inFrame = *(cv::Mat*) frame; 

    if (!gsys.loadFaceCascade(faceCascadeFnameStr)) { 
     LOG("Could not load face cascade"); 
     gsys.loadFaceCascade(faceCascadeFnameStr); 
    } else { 
     LOG("Face cascade: OK"); 

    } 

    if (!gsys.loadNoseCascade(noseCascadeFnameStr)) { 
     LOG("Could not load nose cascade"); 
     gsys.loadNoseCascade(noseCascadeFnameStr); 

    } else { 
     LOG("Nose cascade: OK"); 

    } 

    gsys.setFrameRate(30); 
    gsys.setProgramState(DETECT); 

    clock_t tin, tout = 0; 

    cv::flip(inFrame, inFrame, 0); 
    cv::transpose(inFrame, inFrame); 

    dlib::shape_predictor pose_model; 
    dlib::deserialize(landmarksStr) >> pose_model; 


    gsys.setCurrentFrame(inFrame); 

    tin = clock(); 
    trigger_hr(gsys, faces, pose_model); 

    // Process the frame 
    size_t spm; 
    float motionStrengthX, motionStrengthY; 
    float phiYaw = -0xFFFFFFFF, thetaPitch = -0xFFFFFFFF; 

    if (faces.size()) { 
    faces[0].getSpm(gsys, spm, motionStrengthX, motionStrengthY); 
    faces[0].getFacePose(phiYaw, thetaPitch); 
    } 

    tout = tout + clock() - tin; 
    if ((gsys.getFrameCount() % 30) == 29) { 
    double secs_between_frames = (double) (tout)/(CLOCKS_PER_SEC * 30.0f); 
    printf("FPS = %2.2f\n", 1.0f/secs_between_frames); 
    LOG("FPS = %2.2f ", 1.0f/secs_between_frames); 
    tout = 0; 
    } 

    char spmText[100]; 

    //sprintf(spmText, 
    //  "SPM = %zu, P = %2.2f, T = %2.2f, MS-X = %2.2f, MS-Y = %2.2f", spm, 
    //  phiYaw, thetaPitch, motionStrengthX, motionStrengthY); 

    LOG("SPM = %zu, P = %2.2f, T = %2.2f, MS-X = %2.2f, MS-Y = %2.2f", spm, 
    phiYaw, thetaPitch, motionStrengthX, motionStrengthY); 

    std::string str; 
    str = "SPM="; 

    jbyteArray arr = env->NewByteArray(str.length()); 
    env->SetByteArrayRegion(arr, 0, str.length(), (jbyte*) str.c_str()); 

    return arr; 

} 

請幫助我。

+0

您是否確定了碰撞發生的地方?日誌的相關部分很有價值。一旦確定了該行,就可以在該行上打印輸入到該語句的數據。 – 18446744073709551615

+0

@ 18446744073709551615感謝您的評論,我可以自己找出問題所在。 –

回答

3

經過兩天的在線搜索,我可以發現問題是由於'內存泄漏',而這種情況發生在我讀取視頻中的幀並向其發送功能時發生,而不釋放幀之後完成他們的工作,所以我總是會有內存中的幀。

我所做的是將C++代碼中的Mat對象移動到函數範圍之外,並將其作爲類作用域對象,因此每次調用函數時都不會創建新對象。

而且,我叫:

inFrame.release(); 

完成它的工作後,釋放內存。

相關問題