我正在開發一個android應用程序,用於使收件箱中的消息變爲私人,用戶將僅通過面部識別登錄,之後用戶可以閱讀消息並將消息發送給私人聯繫人。我卡在這裏使用人臉識別進行用戶驗證。 我已經使用微軟訂閱密鑰和示例應用程序,並修改它爲我的應用程序,但我不能得到結果只有空白屏幕來。我的活動課是這樣的。面部識別登錄到Android應用程序
public class Recognition extends AppCompatActivity {
// Background task for face verification.
private class VerificationTask extends AsyncTask<Void, String, VerifyResult> {
// The IDs of two face to verify.
private UUID mFaceId0;
private UUID mFaceId1;
VerificationTask (UUID faceId0, UUID faceId1) {
mFaceId0 = faceId0;
mFaceId1 = faceId1;
}
@Override
protected VerifyResult doInBackground(Void... params) {
// Get an instance of face service client to detect faces in image.
FaceServiceClient faceServiceClient = SampleApp.getFaceServiceClient();
try{
publishProgress("Verifying...");
// Start verification.
return faceServiceClient.verify(
mFaceId0, /* The first face ID to verify */
mFaceId1); /* The second face ID to verify */
} catch (Exception e) {
publishProgress(e.getMessage());
addLog(e.getMessage());
return null;
}
}
@Override
protected void onPreExecute() {
progressDialog.show();
addLog("Request: Verifying face " + mFaceId0 + " and face " + mFaceId1);
}
@Override
protected void onProgressUpdate(String... progress) {
progressDialog.setMessage(progress[0]);
setInfo(progress[0]);
}
@Override
protected void onPostExecute(VerifyResult result) {
if (result != null) {
addLog("Response: Success. Face " + mFaceId0 + " and face "
+ mFaceId1 + (result.isIdentical ? " " : " don't ")
+ "belong to the same person");
}
// Show the result on screen when verification is done.
setUiAfterVerification(result);
}
}
// Background task of face detection.
private class DetectionTask extends AsyncTask<InputStream, String, Face[]> {
// Index indicates detecting in which of the two images.
private int mIndex;
private boolean mSucceed = true;
DetectionTask(int index) {
mIndex = index;
}
@Override
protected Face[] doInBackground(InputStream... params) {
// Get an instance of face service client to detect faces in image.
FaceServiceClient faceServiceClient = SampleApp.getFaceServiceClient();
try{
publishProgress("Detecting...");
// Start detection.
return faceServiceClient.detect(
params[0], /* Input stream of image to detect */
true, /* Whether to return face ID */
false, /* Whether to return face landmarks */
/* Which face attributes to analyze, currently we support:
age,gender,headPose,smile,facialHair */
null);
} catch (Exception e) {
mSucceed = false;
publishProgress(e.getMessage());
addLog(e.getMessage());
return null;
}
}
@Override
protected void onPreExecute() {
progressDialog.show();
addLog("Request: Detecting in image" + mIndex);
}
@Override
protected void onProgressUpdate(String... progress) {
progressDialog.setMessage(progress[0]);
setInfo(progress[0]);
}
@Override
protected void onPostExecute(Face[] result) {
// Show the result on screen when detection is done.
setUiAfterDetection(result, mIndex, mSucceed);
}
}
// Flag to indicate which task is to be performed.
private static final int REQUEST_SELECT_IMAGE_0 = 0;
private static final int REQUEST_SELECT_IMAGE_1 = 1;
DataBaseHandler db = new DataBaseHandler(this);
// The IDs of the two faces to be verified.
private UUID mFaceId0;
private UUID mFaceId1;
List<Contact> contacts;
// The two images from where we get the two faces to verify.
private Bitmap mBitmap0;
private Bitmap mBitmap1;
// The adapter of the ListView which contains the detected faces from the two images.
protected FaceListAdapter mFaceListAdapter0;
protected FaceListAdapter mFaceListAdapter1;
// Progress dialog popped up when communicating with server.
ProgressDialog progressDialog;
// When the activity is created, set all the member variables to initial state.
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_recognition);
// Initialize the two ListViews which contain the thumbnails of the detected faces.
progressDialog = new ProgressDialog(this);
progressDialog.setTitle("Recognizing");
take_photo();
Intent intent=new Intent(Recognition.this,Inbox.class);
startActivity(intent);
}
private Bitmap image_saved() {
Bitmap theImage = null;
contacts = db.getAllContacts();
for (Contact cn : contacts) {
String log = "ID:" + cn.getID() + " Name: " + cn.getName()
+ " ,Image: " + cn.getImage();
byte[] outImage = cn._image;
ByteArrayInputStream imageStream = new ByteArrayInputStream(outImage);
theImage = BitmapFactory.decodeStream(imageStream);
// imageView.setImageBitmap(theImage);
// username.setText(cn.getName());
detect(theImage,1);
}
if (theImage != null) {
}
return theImage;
}
public void take_photo() {
Intent intent = new Intent("android.media.action.IMAGE_CAPTURE");
intent.putExtra("android.intent.extras.CAMERA_FACING", android.hardware.Camera.CameraInfo.CAMERA_FACING_FRONT);
intent.putExtra("android.intent.extras.LENS_FACING_FRONT", 1);
intent.putExtra("android.intent.extra.USE_FRONT_CAMERA", true);
startActivityForResult(intent, REQUEST_SELECT_IMAGE_0);
}
// Called when image selection is done. Begin detecting if the image is selected successfully.
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
// Index indicates which of the two images is selected.
if(resultCode == REQUEST_SELECT_IMAGE_0) {
// If image is selected successfully, set the image URI and bitmap.
Bitmap bitmap = ImageHelper.loadSizeLimitedBitmapFromUri(
data.getData(), getContentResolver());
if (bitmap != null) {
// Image is select but not detected, disable verification button.
int index=0;
// Set the image to detect.
if (index == 0) {
mBitmap0 = bitmap;
mFaceId0 = null;
} else
{
mBitmap1 = bitmap;
mFaceId1 = null;
}
// Add verification log.
addLog("Image" + index + ": " + data.getData() + " resized to " + bitmap.getWidth()
+ "x" + bitmap.getHeight());
detect( image_saved(),1);
// Start detecting in image.
detect(bitmap, index);
next();
Toast.makeText(Recognition.this,"Image Not Match",Toast.LENGTH_SHORT).show();
}
}
}
private boolean next() {
new VerificationTask(mFaceId0, mFaceId1).execute();
return true;
}
private void setUiAfterVerification(VerifyResult result) {
// Verification is done, hide the progress dialog.
progressDialog.dismiss();
// Enable all the buttons.
// Show verification result.
if (result != null) {
DecimalFormat formatter = new DecimalFormat("#0.00");
String verificationResult = (result.isIdentical ? "The same person": "Different persons")
+ ". The confidence is " + formatter.format(result.confidence);
setInfo(verificationResult);
}
}
// Show the result on screen when detection in image that indicated by index is done.
private void setUiAfterDetection(Face[] result, int index, boolean succeed) {
if (succeed) {
addLog("Response: Success. Detected "
+ result.length + " face(s) in image" + index);
setInfo(result.length + " face" + (result.length != 1 ? "s": "") + " detected");
// Show the detailed list of detected faces.
// Set the default face ID to the ID of first face, if one or more faces are detected.
}
if (result != null && result.length == 0) {
setInfo("No face detected!");
}
if ((index == 0 && mBitmap1 == null) || (index == 1 && mBitmap0 == null) || index == 2) {
progressDialog.dismiss();
}
if (mFaceId0 != null && mFaceId1 != null) {
}
}
// Start detecting in image specified by index.
private void detect(Bitmap bitmap, int index) {
// Put the image into an input stream for detection.
ByteArrayOutputStream output = new ByteArrayOutputStream();
bitmap.compress(Bitmap.CompressFormat.JPEG, 100, output);
ByteArrayInputStream inputStream = new ByteArrayInputStream(output.toByteArray());
// Start a background task to detect faces in the image.
new DetectionTask(index).execute(inputStream);
// Set the status to show that detection starts.
setInfo("Detecting...");
}
// Set the information panel on screen.
private void setInfo(String info) {
TextView textView = (TextView) findViewById(R.id.info);
textView.setText(info);
}
// Add a log item.
private void addLog(String log) {
LogHelper.addVerificationLog(log);
}
// The adapter of the GridView which contains the thumbnails of the detected faces.
private abstract class FaceListAdapter extends BaseAdapter {
// The detected faces.
List<Face> faces;
int mIndex;
// The thumbnails of detected faces.
List<Bitmap> faceThumbnails;
// Initialize with detection result and index indicating on which image the result is got.
FaceListAdapter(Face[] detectionResult, int index) {
faces = new ArrayList<>();
faceThumbnails = new ArrayList<>();
mIndex = index;
if (detectionResult != null) {
faces = Arrays.asList(detectionResult);
for (Face face : faces) {
try {
// Crop face thumbnail without landmarks drawn.
faceThumbnails.add(ImageHelper.generateFaceThumbnail(
index == 0 ? mBitmap0 : mBitmap1, face.faceRectangle));
} catch (IOException e) {
// Show the exception when generating face thumbnail fails.
setInfo(e.getMessage());
}
}
}
}
@Override
public int getCount() {
return faces.size();
}
@Override
public Object getItem(int position) {
return faces.get(position);
}
@Override
public long getItemId(int position) {
return position;
}
}
}
你似乎忘了在你的問題中包含一個問題。 – Biffen
你的問題有點微不足道,你必須依賴第三方應用程序。我的第一個想法是研究微軟認知服務,並圍繞該服務創建某種認證服務。 (https://www.microsoft.com/cognitive-services/en-us/face-api) – grmbl
是的我在你的鏈接@grmbl需要這個,但我怎麼可以在我的android應用程序中使用這些庫作爲登錄用戶身份驗證。我很新,請提供一步一步的程序。 –