2016-08-17 75 views
3

我創建了一項記錄視頻的服務,當有人試圖解鎖手機時嘗試失敗。當服務運行並捕獲視頻時,它還會實時捕獲視頻幀並從這些視頻幀中檢測臉部。如何在android中的服務中錄製視頻時實時檢測視頻幀中的人臉?

問題是當服務從視頻中檢測到人臉時,超過150幀被跳過,如日誌貓所示。

如何有效檢測臉部,以避免跳過大量的幀?

這裏是我的代碼

public class Background_Recording extends Service implements SurfaceHolder.Callback,Camera.PreviewCallback { 
    private WindowManager windowManager; 
    public static int MAX_FACES = 5; 
    boolean stopped = false; 
    Timer t; 
    Bitmap bitmaper; 
    Handler handler; 
    ArrayList<Bitmap> bit_collect = new ArrayList<Bitmap>(); 
    private SurfaceView surfaceView; 
    private Camera camera = null; 
    int camera_type = 1; 
    private MediaRecorder mediaRecorder = null; 

    @Override 
    public void onCreate() { 
     windowManager = (WindowManager) this.getSystemService(Context.WINDOW_SERVICE); 
     surfaceView = new SurfaceView(this); 
     WindowManager.LayoutParams layoutParams = new WindowManager.LayoutParams(1, 1, WindowManager.LayoutParams.TYPE_SYSTEM_OVERLAY, 
       WindowManager.LayoutParams.FLAG_WATCH_OUTSIDE_TOUCH, 
       PixelFormat.TRANSLUCENT 
     ); 
     layoutParams.gravity = Gravity.LEFT | Gravity.TOP; 
     windowManager.addView(surfaceView, layoutParams); 
     surfaceView.getHolder().addCallback(this); 
    } 

    @Override 
    public void surfaceCreated(SurfaceHolder surfaceHolder) { 
     camera = Camera.open(camera_type); 
     mediaRecorder = new MediaRecorder(); 
     camera.unlock(); 
     mediaRecorder.setPreviewDisplay(surfaceHolder.getSurface()); 
     mediaRecorder.setCamera(camera); 
     mediaRecorder.setAudioSource(MediaRecorder.AudioSource.CAMCORDER); 
     mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA); 
     mediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH)); 
     mediaRecorder.setOutputFile("/sdcard/unlock.mp4"); 
     try { 
      mediaRecorder.prepare(); 
      mediaRecorder.start(); 
      camera.setPreviewCallback(this); 
      t = new Timer(); 
      t.schedule(new TimerTask() { 
       @Override 
       public void run() { 
        camera.setPreviewCallback(null); 
        stopSelf(); 
        stopped = true; 
       } 
      }, 8000); 
     } catch (Exception e) { 
      Toast.makeText(getApplicationContext(), "getting exception ", Toast.LENGTH_LONG).show(); 
     } 
    } 

    public int onStartCommand(Intent intent, int flags, int flagID) { 
     handler = new Handler(Looper.getMainLooper()); 
     return super.onStartCommand(intent, flags, flagID); 
    } 

    @Override 
    public void onDestroy() { 
     mediaRecorder.stop(); 
     mediaRecorder.reset(); 
     mediaRecorder.release(); 
     camera.lock(); 
     camera.release(); 
     windowManager.removeView(surfaceView); 
    } 

    @Override 
    public void surfaceChanged(SurfaceHolder surfaceHolder, int format, int width, int height) { 
    } 

    @Override 
    public void surfaceDestroyed(SurfaceHolder surfaceHolder) { 
    } 

    @Override 
    public IBinder onBind(Intent intent) { 
     return null; 
    } 

    @Override 
    public void onPreviewFrame(byte[] data, Camera camera) { 
     try { 
      Camera.Parameters parameters = camera.getParameters(); 
      Camera.Size size = parameters.getPreviewSize(); 
      YuvImage image = new YuvImage(data, parameters.getPreviewFormat(), size.width, size.height, null); 
      ByteArrayOutputStream os = new ByteArrayOutputStream(); 
      image.compressToJpeg(new Rect(0, 0, size.width, size.height), 100, os); 
      byte[] jpegByteArray = os.toByteArray(); 
      bitmaper = BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length); 
      bit_collect.add(bitmaper); 
      handler.post(new Runnable() { 
       @Override 
       public void run() { 
        processing(bitmaper); 
       } 
      }); 
     } catch (Exception e) { 
      Toast.makeText(getApplicationContext(), "no frames ", Toast.LENGTH_SHORT).show(); 
     } 
    } 

    public void processing(Bitmap final_byte) { 
     if (final_byte != null) { 
      int width = final_byte.getWidth(); 
      int height = final_byte.getHeight(); 
      FaceDetector detector = new FaceDetector(width, height, Background_Recording.MAX_FACES); 
      FaceDetector.Face[] faces = new FaceDetector.Face[Background_Recording.MAX_FACES]; 
      int facesFound = detector.findFaces(final_byte, faces); 
      if (facesFound > 0) { 
       Toast.makeText(getApplicationContext(), "face found", Toast.LENGTH_SHORT).show(); 
      } else { 
       final_byte.recycle(); 
       Toast.makeText(getApplicationContext(), "no face found", Toast.LENGTH_SHORT).show(); 
      } 
     } 
    } 
} 

回答

1

您正在運行在UI線程上的人臉檢測 - 因爲你的handler連接到主線程的Looper。你應該把所有這些移到背景。請嘗試下面的代碼,並查看我的評論以解釋我的更改。它可能無法100%運行,但它應該非常接近。

//at the beginning of your class 
private HandlerThread handlerThread; 

//... 
@Override 
public int onStartCommand(Intent intent, int flags, int flagID) { 
    //Create a new thread which will be used by the Handler to 
    //process the image data and run frame deteciton 
    handlerThread = new HandlerThread("faceDetectionThread"); 
    handlerThread.start(); 
    Looper looper = handlerThread.getLooper(); 
    Handler handler = new Handler(looper); 
    return super.onStartCommand(intent, flags, flagID); 
} 

@Override 
public void onPreviewFrame(final byte[] data, final Camera camera) { 
    try { 
     //This solution only processes the last frame. 
     //It waits for 100ms, and if no more data is received, 
     //it will start processing. Otherwise, the previous posted 
     //Runnable will be cancelled and replaced with the new frame. 
     handler.removeCallbacksAndMessages(null); 
     handler.postDelayed(new Runnable() { 
      @Override 
      public void run() { 
       processing(data, camera); 
      } 
     }, 100); 
    } catch (Exception e) { 
     Toast.makeText(getApplicationContext(), "no frames ", Toast.LENGTH_SHORT).show(); 
    } 
} 

public void processing(final byte[] data, final Camera camera) { 

    // Bitmap processing must be on a background thread! 
    // Moved this from the onPreviewFrame method 
    Camera.Parameters parameters = camera.getParameters(); 
    Camera.Size size = parameters.getPreviewSize(); 
    YuvImage image = new YuvImage(data, parameters.getPreviewFormat(), size.width, size.height, null); 
    ByteArrayOutputStream os = new ByteArrayOutputStream(); 
    image.compressToJpeg(new Rect(0, 0, size.width, size.height), 100, os); 
    byte[] jpegByteArray = os.toByteArray(); 
    bitmaper = BitmapFactory.decodeByteArray(jpegByteArray, 0, jpegByteArray.length); 
     bit_collect.add(bitmaper); 

    if (bitmaper != null) { 
     int width = bitmaper.getWidth(); 
     int height = bitmaper.getHeight(); 
     FaceDetector detector = new FaceDetector(width, height, Background_Recording.MAX_FACES); 
     FaceDetector.Face[] faces = new FaceDetector.Face[Background_Recording.MAX_FACES]; 
     int facesFound = detector.findFaces(bitmaper, faces); 
     if (facesFound > 0) { 
      Toast.makeText(getApplicationContext(), "face found", Toast.LENGTH_SHORT).show(); 
     } else { 
      bitmaper.recycle(); 
      Toast.makeText(getApplicationContext(), "no face found", Toast.LENGTH_SHORT).show(); 
     } 
    } 
} 

//... 

@Override 
public void onDestroy() { 
    mediaRecorder.stop(); 
    mediaRecorder.reset(); 
    mediaRecorder.release(); 
    camera.lock(); 
    camera.release(); 
    windowManager.removeView(surfaceView); 
    handlerThread.quit(); //Don't forget this! 
} 

而且,我想人臉檢測會更快,如果你減少MAX_FACES1

+0

感謝的人,它的工作 –

+0

很高興聽到它!在這種情況下,請提出答案。 – theFunkyEngineer