使用前置摄像头持续跟踪面部

Posted

技术标签:

【中文标题】使用前置摄像头持续跟踪面部【英文标题】:Keep tracking faces by using the front camera 【发布时间】:2014-10-10 12:09:06 【问题描述】:

我尝试制作一个应用程序来跟踪相机检测到的人脸数量,但是通过logcat,我发现程序从未调用onPreviewFrame方法,我不知道原因。

public class MainActivity extends Activity 
Context context = null;
BackGround bg=new BackGround();
FDR fdr = new FDR();

@Override
protected void onCreate(Bundle savedInstanceState) 
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_main);
    context=this.getApplicationContext();
    bg.execute();



private class BackGround extends  AsyncTask <Void, Integer, Void> 

    @Override
    protected Void doInBackground(Void...params)   
        while(!isCancelled())  
            fdr.startFaceDetection(context);
            Log.d("result", String.valueOf(fdr.getnumberOfFaceDetected()));
             try 
                    Thread.sleep(1000);
                 catch (InterruptedException e) 
                    // TODO Auto-generated catch block
                    e.printStackTrace();
                
        
        fdr.FreeCamera();
        return null;
           
    @Override
    protected void onProgressUpdate( Integer ...progress )        
    

@Override
public void onDestroy()
    super.onDestroy();
    bg.cancel(true);
    fdr.FreeCamera();
   
 

以下类的代码基本上是来自另一个网站的示例代码。

public class FDR       
protected Camera mCameraDevice = null;
private long mScanBeginTime = 0;   
private long mScanEndTime = 0;   
private long mSpecPreviewTime = 0;  
private long mSpecStopTime = 0;
private long mSpecCameraTime = 0;
private static final String TAG = null;
private int orientionOfCamera ;   
private Context con=null;
private int numberOfFaceDetected;    

public void startFaceDetection(Context cont)   
    con=cont;
    try   
            FreeCamera();           
           mCameraDevice = Camera.open(1); 
            if (mCameraDevice != null)  
                Log.i(TAG, "open cameradevice success! ");  
         catch (Exception e)            
            mCameraDevice = null;  
            Log.w(TAG, "open cameraFail");  

            return;  
       

    Log.i(TAG, "startFaceDetection");  
    Camera.Parameters parameters = mCameraDevice.getParameters();  
    setCameraDisplayOrientation(1,mCameraDevice);              

    mCameraDevice.setPreviewCallback(new PreviewCallback()  
        public void onPreviewFrame(byte[] data, Camera camera)  
            mScanEndTime = System.currentTimeMillis();   
            mSpecPreviewTime = mScanEndTime - mScanBeginTime;    
            Log.i(TAG, "onPreviewFrame and mSpecPreviewTime = " + String.valueOf(mSpecPreviewTime));  
            Camera.Size localSize = camera.getParameters().getPreviewSize(); 
            YuvImage localYuvImage = new YuvImage(data, 17, localSize.width, localSize.height, null);  
            ByteArrayOutputStream localByteArrayOutputStream = new ByteArrayOutputStream();  
            localYuvImage.compressToJpeg(new Rect(0, 0, localSize.width, localSize.height), 80, localByteArrayOutputStream);   
            byte[] arrayOfByte = localByteArrayOutputStream.toByteArray();  
            FreeCamera();   
            StoreByteImage(arrayOfByte);  
          
    );  

    mCameraDevice.startPreview();   
    Log.i(TAG,"StartPreviewed");  
    mScanBeginTime = System.currentTimeMillis();
  


public void setCameraDisplayOrientation(int paramInt, Camera paramCamera)  
    CameraInfo info = new CameraInfo();  
    Camera.getCameraInfo(paramInt, info);  
    int rotation = ((WindowManager)con.getSystemService("window")).getDefaultDisplay().getRotation();  
    int degrees = 0;  
    Log.i(TAG,"getRotation's rotation is " + String.valueOf(rotation));  
    switch (rotation)   
        case Surface.ROTATION_0: degrees = 0; break;  
        case Surface.ROTATION_90: degrees = 90; break;  
        case Surface.ROTATION_180: degrees = 180; break;  
        case Surface.ROTATION_270: degrees = 270; break;  
      

    orientionOfCamera = info.orientation;    
    int result;  
    if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT)   
        result = (info.orientation + degrees) % 360;  
        result = (360 - result) % 360;  // compensate the mirror  
     else   // back-facing  
        result = (info.orientation - degrees + 360) % 360;  
      
    paramCamera.setDisplayOrientation(result);    
  
public void StoreByteImage(byte[] paramArrayOfByte)  
    mSpecStopTime = System.currentTimeMillis();  
    mSpecCameraTime = mSpecStopTime - mScanBeginTime;  

    Log.i(TAG, "StoreByteImage and mSpecCameraTime is " + String.valueOf(mSpecCameraTime));  

    BitmapFactory.Options localOptions = new BitmapFactory.Options();  
        Bitmap localBitmap1 = BitmapFactory.decodeByteArray(paramArrayOfByte, 0, paramArrayOfByte.length, localOptions);  
        int i = localBitmap1.getWidth();  
        int j = localBitmap1.getHeight(); 
        Matrix localMatrix = new Matrix();  
        //int k = cameraResOr;  
        Bitmap localBitmap2 = null;  
        FaceDetector localFaceDetector = null;  

    switch(orientionOfCamera)  
        case 0:  
            localFaceDetector = new FaceDetector(i, j, 1);  
                    localMatrix.postRotate(0.0F, i / 2, j / 2);  
                    localBitmap2 = Bitmap.createBitmap(i, j, Bitmap.Config.RGB_565);  
            break;  
        case 90:  
            localFaceDetector = new FaceDetector(j, i, 1);   
                    localMatrix.postRotate(-270.0F, j / 2, i / 2);  
                    localBitmap2 = Bitmap.createBitmap(i, j, Bitmap.Config.RGB_565);  
            break;                        
        case 180:  
            localFaceDetector = new FaceDetector(i, j, 1);  
                    localMatrix.postRotate(-180.0F, i / 2, j / 2);  
                    localBitmap2 = Bitmap.createBitmap(i, j, Bitmap.Config.RGB_565);  
            break;  
        case 270:  
            localFaceDetector = new FaceDetector(j, i, 1);  
                    localMatrix.postRotate(-90.0F, j / 2, i / 2);  
                    localBitmap2 = Bitmap.createBitmap(j, i, Bitmap.Config.RGB_565); 
            break;  
      

    FaceDetector.Face[] arrayOfFace = new FaceDetector.Face[1];  
        Paint localPaint1 = new Paint();  
        Paint localPaint2 = new Paint();  
    localPaint1.setDither(true);  
        localPaint2.setColor(-65536);  
        localPaint2.setStyle(Paint.Style.STROKE);  
        localPaint2.setStrokeWidth(2.0F);  
        Canvas localCanvas = new Canvas();  
        localCanvas.setBitmap(localBitmap2);  
        localCanvas.setMatrix(localMatrix);  
        localCanvas.drawBitmap(localBitmap1, 0.0F, 0.0F, localPaint1); 

    numberOfFaceDetected = localFaceDetector.findFaces(localBitmap2, arrayOfFace); 
        localBitmap2.recycle();  
        localBitmap1.recycle();   
  

public int getnumberOfFaceDetected()
    return numberOfFaceDetected;

public void FreeCamera() 
    if (mCameraDevice != null) 
        // Call stopPreview() to stop updating the preview surface.
        // Important: Call release() to release the camera for use by other
        // applications. Applications should release the camera immediately
        // during onPause() and re-open() it during onResume()).
        mCameraDevice.stopPreview();
        mCameraDevice.release();    
        mCameraDevice = null;
    

【问题讨论】:

【参考方案1】:

    要接收回调,必须先setPreviewDisplay()

    考虑在循环之前打开相机一次,并使用 setOneShotPreviewCallback() 运行人脸检测

    与使用 JPEG 中间帧相比,将 YUV 预览帧转换为 RGB 位图的方法要高效得多。

    android 相机具有相当快速和强大的内置人脸检测器,您可以通过调用 startFaceDetection() 启动它,然后您将收到作为回调的结果。

【讨论】:

以上是关于使用前置摄像头持续跟踪面部的主要内容,如果未能解决你的问题,请参考以下文章

ARKit3 如何使用 TrueDepth 相机进行人脸跟踪和其他人的面部网格?

使用前置摄像头录制视频时分别获取视频和音频缓冲区

htc vive 前置摄像头怎么打开

中国手机持续创新,全面屏再进一步,引领手机创新

opencv for android中使用照相机前置摄像头是照片是颠倒的?

android Camera如何判断当前使用的摄像头是前置还是后置