Android 原生人脸识别 Camera2+FaceDetector 快速实现人脸跟踪
title: Android原生人脸识别Camera2+FaceDetector 快速实现人脸跟踪
categories:
- Android
tags:
- 人脸识别
- FaceDetector
- Camera2
date: 2020-05-27 14:02:13
前言本人博客
本篇主要介绍Android原生Api人脸检测FaceDetector的使用,该方法检测人脸可以
做到的是,检测到屏幕有无人脸,有多少个人脸,人脸的双眼眉心位置2d坐标,双眼间距,
但是本人测到该方法的坑,检测有无人脸确实好用,但是如果要精确的测量人脸位置,距离等,会有偏差,毕竟是2d坐标,对现实
定位不准确,我感觉可以这样理解,
然后大家如果要实现该功能的时候,如果这些不够用,就不用考虑该方法了。
废话不多说,实现开始,
<!-- more -->
实现
1.首先可以实现一个自定义view用来在屏幕上画方框
```
class FaceView : View {
lateinit var mPaint: Paint
private var mCorlor = "#42ed45"
private var mFaces: ArrayList<RectF>? = null
constructor(context: Context) : super(context) {
init()
}
constructor(context: Context, attrs: AttributeSet?) : super(context, attrs) {
init()
}
constructor(context: Context, attrs: AttributeSet?, defStyleAttr: Int) : super(context, attrs, defStyleAttr) {
init()
}
private fun init() {
mPaint = Paint()
mPaint.color = Color.parseColor(mCorlor)
mPaint.style = Paint.Style.STROKE
mPaint.strokeWidth = TypedValue.applyDimension(TypedValue.COMPLEXUNITDIP, 3f, context.resources.displayMetrics)
mPaint.isAntiAlias = true
}
override fun onDraw(canvas: Canvas) {
super.onDraw(canvas)
mFaces?.let {
for (face in it) {
canvas.drawRect(face, mPaint)
}
}
}
fun setFaces(faces: ArrayList<RectF>) {
this.mFaces = faces
invalidate()
}
}
```
imageUtil用来处理返回的基础格式
```
/**
* Author: Sar_Wang
* Date: 2020/5/11 3:40 PM
* Description:
*/
public class ImageUtil {
/**
* 将Y:U:V == 4:2:2的数据转换为nv21
*
* @param y Y 数据
* @param u U 数据
* @param v V 数据
* @param nv21 生成的nv21,需要预先分配内存
* @param stride 步长
* @param height 图像高度
*/
public static void yuv422ToYuv420sp(byte[] y, byte[] u, byte[] v, byte[] nv21, int stride, int height) {
System.arraycopy(y, 0, nv21, 0, y.length);
// 注意,若length值为 y.length * 3 / 2 会有数组越界的风险,需使用真实数据长度计算
int length = y.length + u.length / 2 + v.length / 2;
int uIndex = 0, vIndex = 0;
for (int i = stride * height; i < length; i += 2) {
nv21[i] = v[vIndex];
nv21[i + 1] = u[uIndex];
vIndex += 2;
uIndex += 2;
}
}
/**
* 将Y:U:V == 4:1:1的数据转换为nv21
*
* @param y Y 数据
* @param u U 数据
* @param v V 数据
* @param nv21 生成的nv21,需要预先分配内存
* @param stride 步长
* @param height 图像高度
*/
public static void yuv420ToYuv420sp(byte[] y, byte[] u, byte[] v, byte[] nv21, int stride, int height) {
System.arraycopy(y, 0, nv21, 0, y.length);
// 注意,若length值为 y.length * 3 / 2 会有数组越界的风险,需使用真实数据长度计算
int length = y.length + u.length + v.length;
int uIndex = 0, vIndex = 0;
for (int i = stride * height; i < length; i++) {
nv21[i] = v[vIndex++];
nv21[i + 1] = u[uIndex++];
}
}
}
```
然后是调用相机的activity的布局
```
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layoutwidth="matchparent"
android:layoutheight="matchparent">
<TextureView
android:id="@+id/textureView"
android:layoutwidth="matchparent"
android:layoutheight="matchparent"/>
<TextView
android:id="@+id/switch_Camera"
android:layout_gravity="end|bottom"
android:layout_marginBottom="90dp"
android:layout_marginEnd="40dp"
android:text="切换摄像头"
android:layoutwidth="wrapcontent"
android:layoutheight="wrapcontent" />
<com.sto.itgspeaker.FaceView
android:id="@+id/faceView"
android:layoutwidth="matchparent"
android:layoutheight="matchparent"/>
</FrameLayout>
public class Camera2Helper {
private static final String TAG = "Camera2Helper";
private Point maxPreviewSize;
private Point minPreviewSize;
public static final String CAMERAIDFRONT = "1";
public static final String CAMERAIDBACK = "0";
private String mCameraId;
private String specificCameraId;
private Camera2Listener camera2Listener;
private TextureView mTextureView;
private int rotation;
private Point previewViewSize;
private Point specificPreviewSize;
private boolean isMirror;
private Context context;
private boolean mCalibrated;
private boolean mIsVertical = true;
/**
* A {@link CameraCaptureSession } for camera preview.
*/
private CameraCaptureSession mCaptureSession;
/**
* A reference to the opened {@link CameraDevice}.
*/
private CameraDevice mCameraDevice;
private Size mPreviewSize;
private Camera2Helper(Camera2Helper.Builder builder) {
mTextureView = builder.previewDisplayView;
specificCameraId = builder.specificCameraId;
camera2Listener = builder.camera2Listener;
rotation = builder.rotation;
previewViewSize = builder.previewViewSize;
specificPreviewSize = builder.previewSize;
maxPreviewSize = builder.maxPreviewSize;
minPreviewSize = builder.minPreviewSize;
isMirror = builder.isMirror;
context = builder.context;
if (isMirror) {
mTextureView.setScaleX(-1);
}
}
public void setConfiguration(boolean val) {
mIsVertical = val;
}
public void switchCamera() {
if (CAMERAIDBACK.equals(mCameraId)) {
specificCameraId = CAMERAIDFRONT;
} else if (CAMERAIDFRONT.equals(mCameraId)) {
specificCameraId = CAMERAIDBACK;
}
stop();
start();
}
private int getCameraOri(int rotation, String cameraId) {
int degrees = rotation * 90;
switch (rotation) {
case Surface.ROTATION_0:
degrees = 0;
break;
case Surface.ROTATION_90:
degrees = 90;
break;
case Surface.ROTATION_180:
degrees = 180;
break;
case Surface.ROTATION_270:
degrees = 270;
break;
default:
break;
}
int result;
if (CAMERAIDFRONT.equals(cameraId)) {
result = (mSensorOrientation + degrees) % 360;
result = (360 - result) % 360;
} else {
result = (mSensorOrientation - degrees + 360) % 360;
}
Log.i(TAG, "getCameraOri: " + rotation + " " + result + " " + mSensorOrientation);
return result;
}
private final TextureView.SurfaceTextureListener mSurfaceTextureListener
= new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture texture, int width, int height) {
Log.i(TAG, "onSurfaceTextureAvailable: ");
openCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture texture, int width, int height) {
Log.i(TAG, "onSurfaceTextureSizeChanged: ");
configureTransform(width, height);
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture texture) {
Log.i(TAG, "onSurfaceTextureDestroyed: ");
return true;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture texture) {
}
};
private CameraDevice.StateCallback mDeviceStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(@NonNull CameraDevice cameraDevice) {
Log.i(TAG, "onOpened: ");
// This method is called when the camera is opened. We start camera preview here.
mCameraOpenCloseLock.release();
mCameraDevice = cameraDevice;
createCameraPreviewSession();
if (camera2Listener != null) {
camera2Listener.onCameraOpened(cameraDevice, mCameraId, mPreviewSize, getCameraOri(rotation, mCameraId), isMirror);
}
}
@Override
public void onDisconnected(@NonNull CameraDevice cameraDevice) {
Log.i(TAG, "onDisconnected: ");
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
if (camera2Listener != null) {
camera2Listener.onCameraClosed();
}
}
@Override
public void onError(@NonNull CameraDevice cameraDevice, int error) {
Log.i(TAG, "onError: ");
mCameraOpenCloseLock.release();
cameraDevice.close();
mCameraDevice = null;
if (camera2Listener != null) {
camera2Listener.onCameraError(new Exception("error occurred, code is " + error));
}
}
};
private CameraCaptureSession.StateCallback mCaptureStateCallback = new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
Log.i(TAG, "onConfigured: ");
// The camera is already closed
if (null == mCameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
mCaptureSession = cameraCaptureSession;
try {
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(),
mCaptureCallBack, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(
@NonNull CameraCaptureSession cameraCaptureSession) {
Log.i(TAG, "onConfigureFailed: ");
if (camera2Listener != null) {
camera2Listener.onCameraError(new Exception("configureFailed"));
}
}
};
private CameraCaptureSession.CaptureCallback mCaptureCallBack = new CameraCaptureSession.CaptureCallback(){
@Override
public void onCaptureCompleted(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
camera2Listener.onHandleFaces(result);
}
@Override
public void onCaptureFailed(@NonNull CameraCaptureSession session, @NonNull CaptureRequest request, @NonNull CaptureFailure failure) {
super.onCaptureFailed(session, request, failure);
}
};
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread mBackgroundThread;
/**
* A {@link Handler} for running tasks in the background.
*/
private Handler mBackgroundHandler;
private ImageReader mImageReader;
/**
* {@link CaptureRequest.Builder} for the camera preview
*/
private CaptureRequest.Builder mPreviewRequestBuilder;
/**
* A {@link Semaphore} to prevent the app from exiting before closing the camera.
*/
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
/**
* Orientation of the camera sensor
*/
private int mSensorOrientation;
private Size getBestSupportedSize(List<Size> sizes) {
Size defaultSize = sizes.get(0);
Size[] tempSizes = sizes.toArray(new Size[0]);
Arrays.sort(tempSizes, new Comparator<Size>() {
@Override
public int compare(Size o1, Size o2) {
if (o1.getWidth() > o2.getWidth()) {
return -1;
} else if (o1.getWidth() == o2.getWidth()) {
return o1.getHeight() > o2.getHeight() ? -1 : 1;
} else {
return 1;
}
}
});
sizes = new ArrayList<>(Arrays.asList(tempSizes));
for (int i = sizes.size() - 1; i >= 0; i--) {
if (maxPreviewSize != null) {
if (sizes.get(i).getWidth() > maxPreviewSize.x || sizes.get(i).getHeight() > maxPreviewSize.y) {
sizes.remove(i);
continue;
}
}
if (minPreviewSize != null) {
if (sizes.get(i).getWidth() < minPreviewSize.x || sizes.get(i).getHeight() < minPreviewSize.y) {
sizes.remove(i);
}
}
}
if (sizes.size() == 0) {
String msg = "can not find suitable previewSize, now using default";
if (camera2Listener != null) {
Log.e(TAG, msg);
camera2Listener.onCameraError(new Exception(msg));
}
return defaultSize;
}
Size bestSize = sizes.get(0);
float previewViewRatio;
if (previewViewSize != null) {
previewViewRatio = (float) previewViewSize.x / (float) previewViewSize.y;
} else {
previewViewRatio = (float) bestSize.getWidth() / (float) bestSize.getHeight();
}
if (previewViewRatio > 1) {
previewViewRatio = 1 / previewViewRatio;
}
for (Size s : sizes) {
if (specificPreviewSize != null && specificPreviewSize.x == s.getWidth() && specificPreviewSize.y == s.getHeight()) {
return s;
}
if (Math.abs((s.getHeight() / (float) s.getWidth()) - previewViewRatio) < Math.abs(bestSize.getHeight() / (float) bestSize.getWidth() - previewViewRatio)) {
bestSize = s;
}
}
return bestSize;
}
public synchronized void start() {
if (mCameraDevice != null) {
return;
}
startBackgroundThread();
// When the screen is turned off and turned back on, the SurfaceTexture is already
// available, and "onSurfaceTextureAvailable" will not be called. In that case, we can open
// a camera and start preview from here (otherwise, we wait until the surface is ready in
// the SurfaceTextureListener).
if (mTextureView.isAvailable()) {
openCamera();
} else {
mTextureView.setSurfaceTextureListener(mSurfaceTextureListener);
}
}
public synchronized void stop() {
if (mCameraDevice == null) {
return;
}
closeCamera();
stopBackgroundThread();
}
public void release() {
stop();
mTextureView = null;
camera2Listener = null;
context = null;
}
private void setUpCameraOutputs(CameraManager cameraManager) {
try {
if (configCameraParams(cameraManager, specificCameraId)) {
return;
}
for (String cameraId : cameraManager.getCameraIdList()) {
if (configCameraParams(cameraManager, cameraId)) {
return;
}
}
} catch (CameraAccessException e) {
e.printStackTrace();
} catch (NullPointerException e) {
// Currently an NPE is thrown when the Camera2API is used but not supported on the
// device this code runs.
if (camera2Listener != null) {
camera2Listener.onCameraError(e);
}
}
}
private boolean configCameraParams(CameraManager manager, String cameraId) throws CameraAccessException {
CameraCharacteristics characteristics
= manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(
CameraCharacteristics.SCALERSTREAMCONFIGURATION_MAP);
if (map == null) {
return false;
}
mPreviewSize = getBestSupportedSize(new ArrayList<Size>(Arrays.asList(map.getOutputSizes(SurfaceTexture.class))));
mImageReader = ImageReader.newInstance(mPreviewSize.getWidth(), mPreviewSize.getHeight(),
ImageFormat.YUV420888, 2);
mImageReader.setOnImageAvailableListener(
new OnImageAvailableListenerImpl(), mBackgroundHandler);
mSensorOrientation = characteristics.get(CameraCharacteristics.SENSOR_ORIENTATION);
mCameraId = cameraId;
return true;
}
private void openCamera() {
CameraManager cameraManager = (CameraManager) context.getSystemService(Context.CAMERA_SERVICE);
setUpCameraOutputs(cameraManager);
configureTransform(mTextureView.getWidth(), mTextureView.getHeight());
try {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException("Time out waiting to lock camera opening.");
}
cameraManager.openCamera(mCameraId, mDeviceStateCallback, mBackgroundHandler);
} catch (CameraAccessException e) {
if (camera2Listener != null) {
camera2Listener.onCameraError(e);
}
} catch (InterruptedException e) {
if (camera2Listener != null) {
camera2Listener.onCameraError(e);
}
}
}
/**
* Closes the current {@link CameraDevice}.
*/
private void closeCamera() {
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
if (camera2Listener != null) {
camera2Listener.onCameraClosed();
}
} catch (InterruptedException e) {
if (camera2Listener != null) {
camera2Listener.onCameraError(e);
}
} finally {
mCameraOpenCloseLock.release();
}
}
/**
* Starts a background thread and its {@link Handler}.
*/
private void startBackgroundThread() {
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
/**
* Stops the background thread and its {@link Handler}.
*/
private void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
/**
* Creates a new {@link CameraCaptureSession} for camera preview.
*/
private void createCameraPreviewSession() {
try {
SurfaceTexture texture = mTextureView.getSurfaceTexture();
assert texture != null;
// We configure the size of default buffer to be the size of camera preview we want.
texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());
// This is the output Surface we need to start preview.
Surface surface = new Surface(texture);
// We set up a CaptureRequest.Builder with the output Surface.
mPreviewRequestBuilder
= mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.set(CaptureRequest.CONTROLAFMODE,
CaptureRequest.CONTROLAFMODE_CONTINUOUS_PICTURE);
mPreviewRequestBuilder.addTarget(surface);
mPreviewRequestBuilder.addTarget(mImageReader.getSurface());
// Here, we create a CameraCaptureSession for camera preview.
mCameraDevice.createCaptureSession(Arrays.asList(surface, mImageReader.getSurface()),
mCaptureStateCallback, mBackgroundHandler
);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
/**
* Configures the necessary {@link Matrix} transformation to mTextureView
.
* This method should be called after the camera preview size is determined in
* setUpCameraOutputs and also the size of mTextureView
is fixed.
*
* @param viewWidth The width of mTextureView
* @param viewHeight The height of mTextureView
*/
private void configureTransform(int viewWidth, int viewHeight) {
if (null == mTextureView || null == mPreviewSize) {
return;
}
Matrix matrix = new Matrix();
RectF viewRect = new RectF(0, 0, viewWidth, viewHeight);
RectF bufferRect = new RectF(0, 0, mPreviewSize.getHeight(), mPreviewSize.getWidth());
float centerX = viewRect.centerX();
float centerY = viewRect.centerY();
if (Surface.ROTATION90 == rotation || Surface.ROTATION270 == rotation) {
bufferRect.offset(centerX - bufferRect.centerX(), centerY - bufferRect.centerY());
matrix.setRectToRect(viewRect, bufferRect, Matrix.ScaleToFit.FILL);
float scale = Math.max(
(float) viewHeight / mPreviewSize.getHeight(),
(float) viewWidth / mPreviewSize.getWidth());
matrix.postScale(scale, scale, centerX, centerY);
matrix.postRotate((90 * (rotation - 2)) % 360, centerX, centerY);
} else if (Surface.ROTATION_180 == rotation) {
matrix.postRotate(180, centerX, centerY);
}
Log.i(TAG, "configureTransform: " + getCameraOri(rotation, mCameraId) + " " + rotation * 90);
mTextureView.setTransform(matrix);
}
public static final class Builder {
/**
* 预览显示的view,目前仅支持textureView
*/
private TextureView previewDisplayView;
/**
* 是否镜像显示,只支持textureView
*/
private boolean isMirror;
/**
* 指定的相机ID
*/
private String specificCameraId;
/**
* 事件回调
*/
private Camera2Listener camera2Listener;
/**
* 屏幕的长宽,在选择最佳相机比例时用到
*/
private Point previewViewSize;
/**
* 传入getWindowManager().getDefaultDisplay().getRotation()的值即可
*/
private int rotation;
/**
* 指定的预览宽高,若系统支持则会以这个预览宽高进行预览
*/
private Point previewSize;
/**
* 最大分辨率
*/
private Point maxPreviewSize;
/**
* 最小分辨率
*/
private Point minPreviewSize;
/**
* 上下文,用于获取CameraManager
*/
private Context context;
public Builder() {
}
public Builder previewOn(TextureView val) {
previewDisplayView = val;
return this;
}
public Builder isMirror(boolean val) {
isMirror = val;
return this;
}
public Builder previewSize(Point val) {
previewSize = val;
return this;
}
public Builder maxPreviewSize(Point val) {
maxPreviewSize = val;
return this;
}
public Builder minPreviewSize(Point val) {
minPreviewSize = val;
return this;
}
public Builder previewViewSize(Point val) {
previewViewSize = val;
return this;
}
public Builder rotation(int val) {
rotation = val;
return this;
}
public Builder specificCameraId(String val) {
specificCameraId = val;
return this;
}
public Builder cameraListener(Camera2Listener val) {
camera2Listener = val;
return this;
}
public Builder context(Context val) {
context = val;
return this;
}
public Camera2Helper build() {
if (previewViewSize == null) {
Log.e(TAG, "previewViewSize is null, now use default previewSize");
}
if (camera2Listener == null) {
Log.e(TAG, "camera2Listener is null, callback will not be called");
}
if (previewDisplayView == null) {
throw new NullPointerException("you must preview on a textureView or a surfaceView");
}
if (maxPreviewSize != null && minPreviewSize != null) {
if (maxPreviewSize.x < minPreviewSize.x || maxPreviewSize.y < minPreviewSize.y) {
throw new IllegalArgumentException("maxPreviewSize must greater than minPreviewSize");
}
}
return new Camera2Helper(this);
}
}
private class OnImageAvailableListenerImpl implements ImageReader.OnImageAvailableListener {
private byte[] y;
private byte[] u;
private byte[] v;
private ReentrantLock lock = new ReentrantLock();
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireNextImage();
// Y:U:V == 4:2:2
if (camera2Listener != null && image.getFormat() == ImageFormat.YUV420888) {
Image.Plane[] planes = image.getPlanes();
// 加锁确保y、u、v来源于同一个Image
lock.lock();
// 重复使用同一批byte数组,减少gc频率
if (y == null) {
y = new byte[planes[0].getBuffer().limit() - planes[0].getBuffer().position()];
u = new byte[planes[1].getBuffer().limit() - planes[1].getBuffer().position()];
v = new byte[planes[2].getBuffer().limit() - planes[2].getBuffer().position()];
}
if (image.getPlanes()[0].getBuffer().remaining() == y.length) {
planes[0].getBuffer().get(y);
planes[1].getBuffer().get(u);
planes[2].getBuffer().get(v);
camera2Listener.onPreview(y, u, v, mPreviewSize, planes[0].getRowStride());
}
lock.unlock();
}
image.close();
}
}
}
texture_preview.viewTreeObserver.addOnGlobalLayoutListener(this)
override fun onGlobalLayout() {
texture_preview.viewTreeObserver.removeOnGlobalLayoutListener(this)
if (!checkPermissions(NEEDED_PERMISSIONS)) {
ActivityCompat.requestPermissions(this, NEEDEDPERMISSIONS, ACTIONREQUEST_PERMISSIONS)
} else {
initCamera()
}
}
camera2Helper = Camera2Helper.Builder()
.cameraListener(this)
.maxPreviewSize(Point(1920, 1080))
.minPreviewSize(Point(1280, 720))
.specificCameraId(CAMERA_ID)
.context(applicationContext)
.previewOn(texture_preview)
.previewViewSize(Point(texture_preview.width,
texture_preview.height))
.rotation(windowManager.defaultDisplay.rotation)
.build()
camera2Helper.start()
override fun onCameraOpened(
cameraDevice: CameraDevice?,
cameraId: String?,
previewSize: Size?,
displayOrientation: Int,
isMirror: Boolean
) {
Log.i("Wzz", "onCameraOpened: previewSize = ${previewSize?.width} x ${previewSize?.height}")
mDisplayOrientation = displayOrientation
isMirrorPreview = isMirror
openedCameraId = cameraId
}
if (!this::nv21.isInitialized) {
nv21 = ByteArray(stride previewSize!!.height 3 / 2)
}
// 回传数据是YUV422
if (y!!.size / u!!.size == 2) {
ImageUtil.yuv422ToYuv420sp(y, u, v, nv21, stride, previewSize!!.height)
} else if (y.size / u.size == 4) {
ImageUtil.yuv420ToYuv420sp(y, u, v, nv21, stride, previewSize!!.height)
}
val yuvImage = YuvImage(nv21, ImageFormat.NV21, stride, previewSize!!.height, null)
YuvImage yuvimage = new YuvImage(_data, ImageFormat.NV21,
previewSize.getWidth(), previewSize.getHeight(), null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
BitmapFactory.Options bfo = new BitmapFactory.Options();
bfo.inPreferredConfig = Bitmap.Config.RGB_565;
Bitmap _currentFrame = BitmapFactory.decodeStream(new ByteArrayInputStream(baos.toByteArray()), null, bfo);
Matrix matrix = new Matrix();
if(mIsVertical){
matrix.postRotate(90);
matrix.preScale(-1, 1); //Android内置人脸识别的图像必须是头在上,所以要做旋转变换
// We rotate the same Bitmap
currentFrame = Bitmap.createBitmap(currentFrame, 0, 0,
previewSize.getWidth(), previewSize.getHeight(), matrix, false);
}
接下来就可以自己对face进行判断处理进行自己需要的操作了
然后介绍如何绘制人脸位置方框
然后具体的参数,大家就可以调试着玩了,
后续会推出人脸识别opencv方案,
1.最近要研究一下opencv 2d人脸模型转3d
2.以及arcore的人脸增强玩法
大家有什么问题可以评论讨论,也可以直接联系博主
评论