Android camera2 face recognition

和自甴很熟 提交于 2019-11-29 01:19:54

My attempts were on android 5.0(API 21). After update to 5.1(API 22) it started working without code changes.

raj

https://github.com/rajktariya/Android-Camera2-Front-with-Face-Detection

found this working sample for both front camera with face detection

I found that only in case STATE_PREVIEW, you can process the result to show faces lenth. Change from

private CameraCaptureSession.CaptureCallback mCaptureCallback
        = new CameraCaptureSession.CaptureCallback() {

    private void process(CaptureResult result) {
        Integer mode = result.get(CaptureResult.STATISTICS_FACE_DETECT_MODE);
        Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
        if(faces != null && mode != null) {
            Log.e("tag", "faces : " + faces.length + " , mode : " + mode);
        }

        switch (mState) {
            case STATE_PREVIEW: {
                // We have nothing to do when the camera preview is working normally.
                break;
            }
...

to

private CameraCaptureSession.CaptureCallback mCaptureCallback
        = new CameraCaptureSession.CaptureCallback() {

    private void process(CaptureResult result) {


        switch (mState) {
            case STATE_PREVIEW: {
              Face[] faces = result.get(CaptureResult.STATISTICS_FACES);
              if (faces != null && faces.length > 0) {
                  Log.e("tag", "faces : " + faces.length);
              }
                break;
            }

Please try this to see if it works.

Francisco Durdin Garcia

I think your phone is not working good with the Google Face detection. Are you sure that it use HAL3 and can use API2?.

For example, in my code I'm using face detection without any problem like this:

 private CameraCaptureSession.CaptureCallback mPhotoCaptureCallback
            = new CameraCaptureSession.CaptureCallback() {
//more code...
  private void process(CaptureResult result) {
            switch (mState) {
                case STATE_PREVIEW: {
                    checkFaces(result.get(CaptureResult.STATISTICS_FACES));
                   //more code....
                    break;
                }
//more code...
}

Here is the checkFaces method:

 private void checkFaces(Face[] faces) {
    if (faces != null) {
        CameraUtil.CustomFace[] mMappedCustomFaces;
        mMappedCustomFaces = computeFacesFromCameraCoordinates(faces);
        if (faces != null && faces.length > 0) {
            mHandler.sendEmptyMessage(SHOW_FACES_MSG);
            mLastTimeRenderingFaces = System.currentTimeMillis();
        }
    } else {
        if (System.currentTimeMillis() > (mLastTimeRenderingFaces + 100)) {
            mHandler.sendEmptyMessage(HIDE_FACES_MSG);
        }
    }
}

my custom Face class:

     //    public static class CustomFace extends Camera.CustomFace{
public static class CustomFace {
    private int score = 0;
    private Rect rect = null;

    public CustomFace(Rect rect, int score) {
        this.score = score;
        this.rect = rect;
    }

    public int getScore() {
        return score;
    }

    public Rect getBounds() {
        return rect;
    }
}

finally with this method you can draw the faces correctly(you can use the default android one, but rectangles doesn't work so good in 4:3 or 16:9 sizes or when you rotate the phone:

  public static RectF rectToRectF(Rect r) {
    return new RectF(r.left, r.top, r.right, r.bottom);
}

     private CameraFaceUtil.CustomFace[] computeFacesFromCameraCoordinates(Face[] faces) {
        CameraFaceUtil.CustomFace[] mappedFacesList = new CameraFaceUtil.CustomFace[faces.length];

        mCameraCharacteristics.get(CameraCharacteristics.SENSOR_INFO_ACTIVE_ARRAY_SIZE);

        float toStandardAspectRatio = ((float) mPreviewRect.bottom / (float) mPreviewRect.right) / AutoFitTextureView.RATIO_STANDARD;
//
        for (int i = 0; i < faces.length; i++) {

            RectF mappedRect = new RectF();
            Log.i(TAG, "[computeFacesFromCameraCoordinates] toStandardAspectRatio: " + toStandardAspectRatio);
            Log.i(TAG, "[computeFacesFromCameraCoordinates] preview rect: " + mPreviewRect);
            Log.i(TAG, "[computeFacesFromCameraCoordinates] raw rect: " + faces[i].getBounds());

            mCameraToPreviewMatrix.mapRect(mappedRect, CameraUtil.rectToRectF(faces[i].getBounds()));

            Log.i(TAG, "[computeFacesFromCameraCoordinates] mapped rect: " + mappedRect);

            Rect auxRect = new Rect(CameraUtil.rectFToRect(mappedRect));


            Log.i(TAG, "[computeFacesFromCameraCoordinates] aux rect: " + auxRect);

            int cameraSensorOrientation = mCameraCharacteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
            Log.i(TAG, "[computeFacesFromCameraCoordinates] cameraSensorOrientation: " + cameraSensorOrientation);
            switch (cameraSensorOrientation) {
                case 90:
                    mappedRect.top = auxRect.left;
                    mappedRect.bottom = auxRect.right;
                    mappedRect.left = (mPreviewRect.right - auxRect.bottom);
                    mappedRect.right = (mPreviewRect.right - auxRect.top);
                    break;

                case 180:
                    mappedRect.top = (mPreviewRect.bottom - auxRect.bottom) * toStandardAspectRatio;
                    mappedRect.bottom = (mPreviewRect.bottom - auxRect.top) * toStandardAspectRatio;
                    mappedRect.left = (mPreviewRect.right - auxRect.right) * toStandardAspectRatio;
                    mappedRect.right = (mPreviewRect.right - auxRect.left) * toStandardAspectRatio;
                    break;

                case 270:
                    mappedRect.top = (mPreviewRect.bottom - auxRect.right) * toStandardAspectRatio;
                    mappedRect.bottom = (mPreviewRect.bottom - auxRect.left) * toStandardAspectRatio;
                    mappedRect.left = auxRect.top;
                    mappedRect.right = auxRect.bottom;
                    break;
            }

            Log.i(TAG, "[computeFacesFromCameraCoordinates] rotated by camera driver orientation rect without scale: "
                    + mappedRect + ",  with score: " + faces[i].getScore());

            float topOffset = mappedRect.top;
            float leftOffset = mappedRect.left;

            mappedRect.top = mappedRect.top * toStandardAspectRatio;
            mappedRect.bottom = mappedRect.bottom * toStandardAspectRatio;
            mappedRect.left = mappedRect.left * toStandardAspectRatio;
            mappedRect.right = mappedRect.right * toStandardAspectRatio;


            Log.i(TAG, "[computeFacesFromCameraCoordinates] rotated by camera driver orientation rect with scale: "
                    + mappedRect + ",  with score: " + faces[i].getScore());

            topOffset = mappedRect.top - topOffset;
            leftOffset = mappedRect.left - leftOffset;

            mappedRect.top -= topOffset /*- (mMirror ? mPreviewRect.height() : 0)*/;
            mappedRect.bottom -= topOffset /* - (mMirror ? mPreviewRect.height() : 0)*/;
            mappedRect.left -= leftOffset;
            mappedRect.right -= leftOffset;

            Log.i(TAG, "[computeFacesFromCameraCoordinates] rotated by camera driver orientation rect with offset: "
                    + mappedRect + " topOffset " + topOffset + " leftOffset " + leftOffset);

            // set the new values to the mapping array to get rendered
            mappedFacesList[i] = new CameraFaceUtil.CustomFace(CameraUtil.rectFToRect(mappedRect), faces[i].getScore());
        }

        return mappedFacesList;

    }

What I'm doing is drawing the faces based in the screen ratio and size. Feel free to ask if you need something else about camera2API.

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!