2017-04-20 94 views
0
import static com.googlecode.javacv.cpp.opencv_core.*; 
    import static com.googlecode.javacv.cpp.opencv_highgui.*; 
    import static com.googlecode.javacv.cpp.opencv_imgproc.*; 
    import static com.googlecode.javacv.cpp.opencv_contrib.createLBPHFaceRecognizer; 
    import static com.googlecode.javacv.cpp.opencv_objdetect.cvHaarDetectObjects; 

    import java.io.File; 
    import java.io.FileInputStream; 
    import java.util.Iterator; 
    import java.util.Properties; 
    import java.util.Set; 
    import com.googlecode.javacpp.Loader; 
    import com.googlecode.javacv.CanvasFrame; 
    import com.googlecode.javacv.FrameGrabber; 
    import com.googlecode.javacv.OpenCVFrameGrabber; 
    import com.googlecode.javacv.cpp.opencv_contrib.FaceRecognizer; 
    import com.googlecode.javacv.cpp.opencv_core.CvMat; 
    import com.googlecode.javacv.cpp.opencv_core.CvMemStorage; 
    import com.googlecode.javacv.cpp.opencv_core.CvRect; 
    import com.googlecode.javacv.cpp.opencv_core.CvSeq; 
    import com.googlecode.javacv.cpp.opencv_core.IplImage; 
    import com.googlecode.javacv.cpp.opencv_core.MatVector; 
    import static com.googlecode.javacv.cpp.opencv_highgui.cvLoadImage; 
    import com.googlecode.javacv.cpp.opencv_objdetect; 
    import static com.googlecode.javacv.cpp.opencv_objdetect.CV_HAAR_DO_CANNY_PRUNING; 
    import com.googlecode.javacv.cpp.opencv_objdetect.CvHaarClassifierCascade; 
    import java.io.OutputStream; 
    import java.io.FileOutputStream; 
    import java.util.logging.Level; 
    import java.util.logging.Logger; 
    import static javax.swing.JFrame.EXIT_ON_CLOSE; 

public class LBPFaceRecognizer { 
private static String faceDataFolder = "F:\\FaceID\\ID\\"; 
    public static String imageDataFolder = faceDataFolder + "images\\"; 
    private static final String CASCADE_FILE = "C:\\Users\\chatur\\Documents\\NetBeansProjects\\NetbeansWorkspace\\DemoFaceRecognize-master\\haarcascade_frontalface_default.xml"; 
public static final String personNameMappingFileName = faceDataFolder + "personNumberMap.properties"; 

    static final CvHaarClassifierCascade cascade = new CvHaarClassifierCascade(cvLoad(CASCADE_FILE)); 
    private static LBPFaceRecognizer instance = new LBPFaceRecognizer(); 
    Properties dataMap = new Properties(); 
    private static final int NUM_IMAGES_PER_PERSON =10; 
    double binaryTreshold = 100; 
    int highConfidenceLevel = 70; 
    private FaceRecognizer fr_binary = null; 
    private LBPFaceRecognizer() { 
      if(cascade!=null) 
      {System.out.println(cascade+"\t"+CASCADE_FILE);} 
    createModels(); 
    loadTrainingData(); 
    } 
    public static LBPFaceRecognizer getInstance() { 
      return instance; 
    } 
    private void createModels() { 
      fr_binary = createLBPHFaceRecognizer(1, 8, 8, 8, binaryTreshold); 
    } 
    protected CvSeq detectFace(IplImage originalImage) { 
     System.out.println("in detection"); 
     CvSeq faces = null; 
     Loader.load(opencv_objdetect.class); 
     try { 
        IplImage grayImage = IplImage.create(originalImage.width(), originalImage.height(), IPL_DEPTH_8U, 1); 
        cvCvtColor(originalImage, grayImage, CV_BGR2GRAY); 
        CvMemStorage storage = CvMemStorage.create(); 
        faces = cvHaarDetectObjects(grayImage, cascade, storage, 1.1,3,CV_HAAR_DO_CANNY_PRUNING); 
        for(int i=0;i<faces.total();i++){ 
        CvRect r=new CvRect(cvGetSeqElem(faces,i)); 
        cvRectangle(originalImage, cvPoint(r.x(),r.y()),cvPoint(r.x()+r.width(),r.y()+r.height()),CvScalar.YELLOW,1,CV_AA,0);; 
        } 
        } catch (Exception e) { 
        e.printStackTrace(); 
      } 
     return faces; 
    } 
      public String identifyFace(IplImage image) { 
      System.err.println("=========================================================="); 
      String personName = ""; 

      Set keys = dataMap.keySet(); 

      if (keys.size() > 0) { 
        int[] ids = new int[1]; 
        double[] distance = new double[1]; 
        int result = -1; 

          fr_binary.predict(image, ids, distance); 
          //just deriving a confidence number against treshold 
          result = ids[0]; 

          if (result > -1 && distance[0]<highConfidenceLevel) { 
            personName = (String) dataMap.get("" + result); 
          } 
      } 

      return personName; 
    } 
     public boolean learnNewFace(String personName, IplImage[] images) throws Exception 
     { 
      System.out.println("in learn new face"); 
      int memberCounter = dataMap.size(); 
      System.out.println(memberCounter); 
      if(dataMap.containsValue(personName)){ 
        Set keys = dataMap.keySet(); 
        Iterator ite = keys.iterator(); 
        while (ite.hasNext()) { 
          String personKeyForTraining = (String) ite.next(); 
          String personNameForTraining = (String) dataMap.getProperty(personKeyForTraining); 
          if(personNameForTraining.equals(personName)){ 
            memberCounter = Integer.parseInt(personKeyForTraining); 
            System.out.println(memberCounter); 
            System.err.println("Person already exist.. re-learning.."); 
          } 
        } 
      } 

      dataMap.put("" + memberCounter, personName); 
      storeTrainingImages(personName, images); 
      retrainAll(); 

      return true; 
    } 
    public IplImage preprocessImage(IplImage image, CvRect r){ 
      IplImage gray = cvCreateImage(cvGetSize(image), IPL_DEPTH_8U, 1); 
      IplImage roi = cvCreateImage(cvGetSize(image), IPL_DEPTH_8U, 1); 
      System.out.println("in preprocessing"); 
      CvRect r1 = new CvRect(r.x()-10, r.y()-10, r.width()+10, r.height()+10); 
      cvCvtColor(image, gray, CV_BGR2GRAY); 
      cvSetImageROI(gray, r1); 
      cvResize(gray, roi, CV_INTER_LINEAR); 
      cvEqualizeHist(roi, roi); 
      return roi; 

    } 
    private void retrainAll() throws Exception { 
      System.out.println("in retrainAll"); 
      Set keys = dataMap.keySet(); 

      if (keys.size() > 0) { 
        MatVector trainImages = new MatVector(keys.size() * NUM_IMAGES_PER_PERSON); 
        CvMat trainLabels = CvMat.create(keys.size() * NUM_IMAGES_PER_PERSON, 1, CV_32SC1); 
        Iterator ite = keys.iterator(); 
        int count = 0; 

        System.err.print("Loading images for training..."); 
        while (ite.hasNext()) { 
          String personKeyForTraining = (String) ite.next(); 
          String personNameForTraining = (String) dataMap.getProperty(personKeyForTraining); 
          IplImage imagesForTraining[]=new IplImage[NUM_IMAGES_PER_PERSON]; 
          imagesForTraining = readImages(personNameForTraining); 
          System.out.println("length\t"+imagesForTraining.length+"\t"+personNameForTraining); 
          IplImage grayImage[]=new IplImage[NUM_IMAGES_PER_PERSON];      

          for(int j=0;j<imagesForTraining.length;j++){   
            grayImage[j] = IplImage.create(imagesForTraining[j].width(), imagesForTraining[j].height(), IPL_DEPTH_8U, 1); 
          } 

          for (int i = 0; i < imagesForTraining.length; i++) { 
            trainLabels.put(count, 0, Integer.parseInt(personKeyForTraining)); 
            cvCvtColor(imagesForTraining[i], grayImage[i], CV_BGR2GRAY); 
            trainImages.put(count,grayImage[i]); 
            count++; 
          } 
    } 
        System.err.println("done."); 
              fr_binary.train(trainImages, trainLabels); 
        System.err.println("done."); 
        storeTrainingData(); 
      } 
      } 
     private void loadTrainingData() { 

      try { 

        File personNameMapFile = new File(personNameMappingFileName); 
        if (personNameMapFile.exists()) { 
         System.out.print("in if "); 
          FileInputStream fis = new FileInputStream(personNameMapFile); 
          dataMap.load(fis); 
          System.out.println("datdataMap"+dataMap); 
          fis.close(); 
          dataMap.list(System.out); 
        } 
       } catch (Exception e) { 
        e.printStackTrace(); 
      } 
     } 
     private void storeTrainingData() throws Exception { 
     System.err.print("Storing training models ...."); 
     File personNameMapFile = new File("personNameMappingFileName"); 
      if (personNameMapFile.exists()) { 
        personNameMapFile.delete(); 
      } 
      FileOutputStream fos = new FileOutputStream(personNameMapFile, false); 
      dataMap.store(fos, ""); 
      fos.close(); 

      System.err.println("done."); 
    } 

     public void storeTrainingImages(String personName, IplImage[] images) { 
      System.out.println("in store"); 
      for (int i = 0; i < images.length; i++) { 
        String imageFileName = imageDataFolder + "training\\" + personName + "_" + i + ".bmp"; 
        File imgFile = new File(imageFileName); 
        if (imgFile.exists()) { 
          imgFile.delete(); 
        } 
        cvSaveImage(imageFileName, images[i]); 
      } 
          System.out.println("exit store"); 

     } 
     private IplImage[] readImages(String personName) { 
      File imgFolder = new File(imageDataFolder); 
      IplImage[] images = null; 
      if (imgFolder.isDirectory() && imgFolder.exists()) { 
        images = new IplImage[NUM_IMAGES_PER_PERSON]; 
        for (int i = 0; i < NUM_IMAGES_PER_PERSON; i++) { 
          String imageFileName = imageDataFolder + "training\\" + personName + "_" + i + ".bmp"; 
          IplImage img = cvLoadImage(imageFileName); 
          images[i] = img; 
        } 

      } 
      return images; 
     } 
     public static void main(String ar[]) throws FrameGrabber.Exception, Exception{ 
    try{ 
     //LBPFaceRecognizer fr=new LBPFaceRecognizer(); 
     //fr.getInstance(); 
     LBPFaceRecognizer fr = LBPFaceRecognizer.getInstance(); 

     FrameGrabber grabber = new OpenCVFrameGrabber(0); 
     grabber.start(); 
     IplImage img=null; 

     CvSeq faces=null; 
     CvRect r=null; 
     CanvasFrame canvas=new CanvasFrame("webcam"); 
     canvas.setDefaultCloseOperation(EXIT_ON_CLOSE); 
     canvas.setSize(750,750); 
     boolean flag=true; 
     while(flag){ 
     img = grabber.grab();   
     IplImage snapshot = cvCreateImage(cvGetSize(img), img.depth(), img.nChannels()); 
     cvFlip(img, snapshot, 1);   
     faces = fr.detectFace(img); 
        if(img!=null){ 
     canvas.showImage(img); 
     } 
     r=new CvRect(cvGetSeqElem(faces,0)); 
     int imageCounter = 0; 
     IplImage trainImages[]= new IplImage[1]; 
     trainImages[0]= fr.preprocessImage(img, r); 
     fr.learnNewFace("p", trainImages); 
     fr.identifyFace(fr.preprocessImage(img, r)); 
     } 
     } 
     catch(ExceptionInInitializerError e) 
     { 
      System.out.println(e); 
     } 
     } 
     } 

上述代碼用於人臉識別。它檢測面部,但訓練時會拋出異常。事情是在retrainAll()方法中拋出一個名爲「java.lang.NullPointerException」的異常。 在聲明使用Opencv和JavaCV進行人臉識別

grayImage[j] = IplImage.create(imagesForTraining[j].width(), imagesForTraining[j].height(), IPL_DEPTH_8U, 1); 

請幫助理清這一點。

+0

你可以做的第一件事情就是分裂您網上看到這是肆意異常:grayImage [J] = IplImage.create(imagesForTraining [J] .WIDTH(),imagesForTraining [j] .height(),IPL_DEPTH_8U,1); – strash

+0

int width = imagesForForining [j] .width(); – strash

+0

int height = imagesForForining [j] .height(); – strash

回答

0
IplImage img = cvLoadImage(imageFileName); 

那東西可以返回null。所以你的一個imagesForTraining[j]爲空。 這是造成這個問題。

如果你改變你的代碼

int width = imagesForTraining[j].width(); 

int height = imagesForTraining[j].height(); 

grayImage[j] = IplImage.create(width, height, IPL_DEPTH_8U, 1); 

異常會在第一這3行被拋出。 你應該考慮這個場景以及你的圖片沒有被加載和實現的地方。簡單的實現方式是這樣的:

if(imagesForTraining[j] == null)continue;//or do something, try to load again 
int width = imagesForTraining[j].width(); 

int height = imagesForTraining[j].height(); 

grayImage[j] = IplImage.create(width, height, IPL_DEPTH_8U, 1); 
+0

它解決了。異常是在解決了cvLoadImage()方法。但現在在最後的方法identifyFace()它是拋出一個異常,如 –

+0

'OpenCV錯誤:斷言失敗(rect.width> = 0 && rect.height> = 0 && rect.x < image->寬度&& rect.y < image-> height && rect。 xv rect.width> =(int)(rect.width> 0)&& rect.y + rect.height> =(int)(rect.height> 0))in cvSetImageROI,file .. \ .. \ .. \ .. \ opencv \ modules \ core \ src \ array.cpp,行3006' –

+0

更好地嘗試你的調試器,如果你真的不知道空指針異常,你就不會解決這個問題。但是您應該檢查您提供給該方法的圖像的大小,並在執行檢測時閱讀該第三方api – strash