Android FaceDetector實現人臉檢測,人臉追蹤(框出人臉)(MVP模式)
阿新 • • 發佈:2019-02-15
一 主要流程:
1.通過FaceDetector類來檢測人臉,返回獲取到的人臉資訊,以及人臉的座標,通過人臉座標可以做人臉追蹤的操作。
2.通過兩個surfaceview,一個surfaceview用來做相機的預覽,另外一個surfaceview附著在相機預覽surfaceview上面,用來繪製人臉的相框。
二 核心程式碼:
1.人臉檢測類的使用
import android.graphics.Bitmap; import android.media.FaceDetector; import com.skyee.medicalrobot.common.Config; import com.skyee.medicalrobot.recognitionface.view.CameraView; /** * Created by qiandu on 2017/7/4. */ public class FaceCheckManager { private FaceDetector mFaceDetector; private FaceDetector.Face[] mFace = new FaceDetector.Face[Config.FACE_MAX_NUM]; private FaceCheckManager(Bitmap bitmap){ mFaceDetector = new FaceDetector(bitmap.getWidth(), bitmap.getHeight(), Config.FACE_MAX_NUM); } public static class Builder{ private CameraView mCmView; private Bitmap mBitmap; public Builder setImgData(Bitmap image){ this.mBitmap = image; return this; } public Builder setCmView(CameraView mCmView){ this.mCmView = mCmView; return this; } public FaceCheckManager create(){ return new FaceCheckManager(mBitmap); } } public void findFace(final FaceCheckCallback faceCallback,Bitmap bitmap){ if(bitmap == null){ faceCallback.onError("mBitmap == null"); return; }
//Bitmap.Config.RGB_565 這句很重要,可以提高檢測率
Bitmap mBitmap = bitmap.copy(Bitmap.Config.RGB_565, true); final int faceResult = mFaceDetector.findFaces(mBitmap, mFace);
faceCallback.onResult(faceResult, mFace, mBitmap); bitmap.recycle(); }}
2.繪製人臉相框
/** * 開始預覽 */ public void takePreview() { // Logger.v("takePreview"); if(mCamera != null){ if(isPreviewing){ mCamera.stopPreview(); return; } parameters = mCamera.getParameters(); parameters.setFlashMode("off"); // 無閃光燈 parameters.setWhiteBalance(Camera.Parameters.WHITE_BALANCE_AUTO); parameters.setSceneMode(Camera.Parameters.SCENE_MODE_AUTO); parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO); parameters.setPreviewFormat(ImageFormat.NV21); //mCamera.setDisplayOrientation(90); CamParaUtil.getInstance().printSupportPictureSize(parameters); CamParaUtil.getInstance().printSupportPreviewSize(parameters); Camera.Size pictureSize = CamParaUtil.getInstance().getPropPictureSize( parameters.getSupportedPictureSizes(), previewRate, 800); parameters.setPictureSize(pictureSize.width, pictureSize.height); Camera.Size previewSize = CamParaUtil.getInstance().getPropPreviewSize( parameters.getSupportedPreviewSizes(), previewRate , 800); //這裡的 pictureSize 和 PreviewSize很重要, 一般平板採用上面的CamParaUtil可以獲取到預設支援最低的解析度。偶爾也會出現解析度不正確的情況,建議採用
自己打印出來看看系統支援的解析度,然後根據需要,選擇相應的解析度
} </preparameters.setPictureSize(pictureSize.width, pictureSize.height); parameters.setPreviewSize(previewSize.width, previewSize.height); mCamera.setParameters(parameters); //Logger.v("setPreviewCallback前" + pictureSize.width + "//" + pictureSize.height + " oo "+ previewSize.width + "//" + previewSize.height); mCamera.setPreviewCallback(new Camera.PreviewCallback() { @Override public void onPreviewFrame(byte[] bytes, Camera camera) { // Logger.v("setPreviewCallback"); Camera.Size size = camera.getParameters().getPreviewSize(); YuvImage image = new YuvImage(bytes, ImageFormat.NV21, size.width, size.height, null); if(bytes != null){ ByteArrayOutputStream out = new ByteArrayOutputStream(); image.compressToJpeg(new Rect(0, 0, size.width, size.height), 50, out); byte[] datas = out.toByteArray(); BitmapFactory.Options options = new BitmapFactory.Options(); options.inPreferredConfig = Bitmap.Config.RGB_565; Bitmap mBitmap = BitmapFactory.decodeByteArray(datas, 0, datas.length, options); long l = System.nanoTime(); //FileUtil.saveBitmap("cccc/"+l+"ee.png", mBitmap); Matrix matrix = new Matrix(); //matrix.postRotate((float)90); matrix.postScale(0.4f, 0.3125f); //照片的大小使 1280*960 螢幕的大小使 1024*600 這裡需要注意換算比例 // Logger.v("MyCameraManager faceCheckFlag"); //synchronized (this) { //Logger.v("MyCameraManager synchronized"); //if(faceCheckFlag){ //setFaceCheckFlag(false); //Logger.v("MyCameraManager synchronized" + errornum + mBitmap.getWidth() + "dd " + mBitmap.getHeight()); Bitmap bitmap = Bitmap.createBitmap(mBitmap, 0, 0, mBitmap.getWidth(), mBitmap.getHeight(), matrix, false); FaceCheckManager faceCheckManager = new FaceCheckManager.Builder().setCmView(mCameraView).setImgData(bitmap).create(); faceCheckManager.findFace(MyCameraManager.this, bitmap); //} // } mBitmap.recycle(); } } }); // Logger.v("setPreviewCallback後"); mCamera.startPreview(); isPreviewing = true; } } @Override public void onResult(int faceNum, final FaceDetector.Face[] faceData, final Bitmap bitmap) { //Logger.v("MyCameraManager 發現了幾個臉" +faceNum); if(faceNum <= 0){ errornum ++; if(errornum>2){ clear(); errornum = 0; } setFaceCheckFlag(true); return; } if(befmSurfaceHolder != null ){//&& threadNum < 6) //鎖定整個SurfaceView /* new Thread(){ @Override public void run() { super.run();*/ // threadNum ++; Canvas mCanvas = befmSurfaceHolder.lockCanvas(); mCanvas.drawColor(Color.TRANSPARENT, PorterDuff.Mode.CLEAR); for(FaceDetector.Face face : faceData){ if (face == null) { break; } errornum = 0; //mCanvas.drawBitmap(MainApplication.mRotaBitmap, 0, 0 ,paint); PointF pointF = new PointF(); face.getMidPoint(pointF); float eyesDistance = face.eyesDistance(); //Logger.v("onResult"+ pointF.x + "_" + pointF.y); mCanvas.drawRect(new Rect((int)(pointF.x - eyesDistance)*2, (int)(pointF.y- eyesDistance)*2, (int)(pointF.x + eyesDistance)*2, (int)(pointF.y + eyesDistance)*2),paint);//繪製矩形 } befmSurfaceHolder.unlockCanvasAndPost(mCanvas); // threadNum --; //mCameraView.onDetectFace(bitmap); } //}.start(); //}