Android:在服务中使用 OpenCV VideoCapture
Posted
技术标签:
【中文标题】Android:在服务中使用 OpenCV VideoCapture【英文标题】:Android: using OpenCV VideoCapture in service 【发布时间】:2013-10-27 00:57:38 【问题描述】:我正在使用在 android 设备启动时启动的服务。这是因为我不需要可见的活动。到目前为止工作正常。但现在我正在尝试打开相机(在 MyService.onStart 中)并进行一些基本的图像处理。我知道默认的 Android 相机类需要一个用于视频预览的表面。这就是我想使用 OpenCV 的 VideoCapture 的原因。
但我收到此错误:
没有找到原生的实现 lorg/opencv/highgui/VideoCapture;.n_VideoCapture:(I)J
我想知道这是否是因为我没有使用主 Activity 的 OpenCV 示例中使用的以下行。问题是,如何将它集成到我的服务中,以及何时初始化 VideoCapture 成员。
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_5, this, mLoaderCallback);
到目前为止,这是我的代码。大部分OpenCV代码取自OpenCV的NativeCameraView和CameraBridgeViewBase
package com.example.boot;
import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;
import android.app.Service;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.IBinder;
import android.util.Log;
import android.widget.Toast;
public final class MyService extends Service
private static final String TAG = "MyService";
private boolean mStopThread;
private Thread mThread;
private VideoCapture mCamera;
private int mFrameWidth;
private int mFrameHeight;
private int mCameraIndex = -1;
private Bitmap mCacheBitmap;
@Override
public IBinder onBind(Intent intent)
return null;
public void onDestroy()
this.disconnectCamera();
Toast.makeText(this, "service stopped", Toast.LENGTH_LONG).show();
Log.d(TAG, "onDestroy");
@Override
public void onStart(Intent intent, int startid)
Log.d(TAG, "service.onStart: begin");
try
if (!connectCamera(640, 480))
Log.e(TAG, "Could not connect camera");
else
Log.d(TAG, "Camera successfully connected");
catch(Exception e)
Log.e(TAG, "MyServer.connectCamera throws an exception: " + e.getMessage());
Toast.makeText(this, "service started", Toast.LENGTH_LONG).show();
Log.d(TAG, "service.onStart: end");
private boolean connectCamera(int width, int height)
/* First step - initialize camera connection */
if (!initializeCamera(width, height))
return false;
/* now we can start update thread */
mThread = new Thread(new CameraWorker());
mThread.start();
return true;
private boolean initializeCamera(int width, int height)
synchronized (this)
if (mCameraIndex == -1)
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
else
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID + mCameraIndex);
if (mCamera == null)
return false;
if (mCamera.isOpened() == false)
return false;
//java.util.List<Size> sizes = mCamera.getSupportedPreviewSizes();
/* Select the size that fits surface considering maximum size allowed */
Size frameSize = new Size(width, height);
mFrameWidth = (int)frameSize.width;
mFrameHeight = (int)frameSize.height;
AllocateCache();
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, frameSize.width);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, frameSize.height);
Log.i(TAG, "Selected camera frame size = (" + mFrameWidth + ", " + mFrameHeight + ")");
return true;
protected void AllocateCache()
mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
private void releaseCamera()
synchronized (this)
if (mCamera != null)
mCamera.release();
private void disconnectCamera()
/* 1. We need to stop thread which updating the frames
* 2. Stop camera and release it
*/
try
mStopThread = true;
mThread.join();
catch (InterruptedException e)
e.printStackTrace();
finally
mThread = null;
mStopThread = false;
/* Now release camera */
releaseCamera();
protected void deliverAndDrawFrame(NativeCameraFrame frame)
Mat modified = frame.rgba();
boolean bmpValid = true;
if (modified != null)
try
Utils.matToBitmap(modified, mCacheBitmap);
catch(Exception e)
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
private class NativeCameraFrame
public Mat rgba()
mCapture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
return mRgba;
public Mat gray()
mCapture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
return mGray;
public NativeCameraFrame(VideoCapture capture)
mCapture = capture;
mGray = new Mat();
mRgba = new Mat();
private VideoCapture mCapture;
private Mat mRgba;
private Mat mGray;
;
private class CameraWorker implements Runnable
public void run()
do
if (!mCamera.grab())
Log.e(TAG, "Camera frame grab failed");
break;
deliverAndDrawFrame(new NativeCameraFrame(mCamera));
while (!mStopThread);
【问题讨论】:
【参考方案1】:你提到的那行 (initAsync
) 实际上是用来加载 OpenCV 管理器的。这应该是你做的第一件事,因此它可能应该放在onStart()
的开头。
【讨论】:
是的,我会试试的。我发现this article 也这么说。 也没有用。加载 OpenCV 以这种方式工作,但在内部 OpenCV 会引发无法连接到相机服务的异常。通过创建一个虚拟 EGL SurfaceTexture 并将该纹理设置为相机的预览,我最终在服务中使用了默认的 Android 相机实现。有关详细信息,请参阅this article。以上是关于Android:在服务中使用 OpenCV VideoCapture的主要内容,如果未能解决你的问题,请参考以下文章
Android:在服务中使用 OpenCV VideoCapture
如何在 android 中提高 OpenCV 人脸检测性能?