Android: obsługa OpenCV VideoCapture

Korzystam z usługi uruchamianej podczas uruchamiania urządzenia z systemem Android. To dlatego, że nie potrzebuję widocznej aktywności. Jak dotąd działa dobrze. Ale teraz próbuję otworzyć kamerę (w MyService.onStart) i wykonać podstawowe przetwarzanie obrazu. Zrozumiałem, że domyślna klasa kamer Android wymaga powierzchni do podglądu wideo. Dlatego chcę używać VideoCapture z OpenCV.

Ale dostaję ten błąd:

Nie znaleziono implementacji dla rodzimego Lorg / opencv / highgui / VideoCapture; .n_VideoCapture: (I) J

Zastanawiam się, czy to dlatego, że nie mam następującego wiersza, który jest używany w przykładach OpenCV przy użyciu głównego działania. Pytanie brzmi, jak zintegrować to w mojej usłudze i kiedy zainicjować członka VideoCapture.

OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_5, this, mLoaderCallback);

Oto mój kod do tej pory. Większość kodu OpenCV pochodzi z NativeCameraView i CameraBridgeViewBase OpenCV

package com.example.boot;

import org.opencv.android.Utils;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;

import android.app.Service;
import android.content.Intent;
import android.graphics.Bitmap;
import android.os.IBinder;
import android.util.Log;
import android.widget.Toast;

public final class MyService extends Service
{
    private static final String TAG = "MyService";
    private boolean mStopThread;
    private Thread mThread;
    private VideoCapture mCamera;
    private int mFrameWidth;
    private int mFrameHeight;
    private int mCameraIndex = -1;
    private Bitmap mCacheBitmap;

    @Override
    public IBinder onBind(Intent intent) {
        return null;
    }

    public void onDestroy() {

        this.disconnectCamera();

        Toast.makeText(this, "service stopped", Toast.LENGTH_LONG).show();
        Log.d(TAG, "onDestroy");
    }

    @Override
    public void onStart(Intent intent, int startid)
    {           
        Log.d(TAG, "service.onStart: begin");

        try
        {
            if (!connectCamera(640, 480))
                Log.e(TAG, "Could not connect camera");
            else
                Log.d(TAG, "Camera successfully connected");
        }
        catch(Exception e)
        {
            Log.e(TAG, "MyServer.connectCamera throws an exception: " + e.getMessage());
        }

        Toast.makeText(this, "service started", Toast.LENGTH_LONG).show();
        Log.d(TAG, "service.onStart: end");
    }

    private boolean connectCamera(int width, int height) {
        /* First step - initialize camera connection */
        if (!initializeCamera(width, height))
            return false;

        /* now we can start update thread */
        mThread = new Thread(new CameraWorker());
        mThread.start();

        return true;
    }

    private boolean initializeCamera(int width, int height) {
        synchronized (this) {

            if (mCameraIndex == -1)
                mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
            else
                mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID + mCameraIndex);

            if (mCamera == null)
                return false;

            if (mCamera.isOpened() == false)
                return false;

            //java.util.List<Size> sizes = mCamera.getSupportedPreviewSizes();

            /* Select the size that fits surface considering maximum size allowed */
            Size frameSize = new Size(width, height);

            mFrameWidth = (int)frameSize.width;
            mFrameHeight = (int)frameSize.height;

            AllocateCache();

            mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, frameSize.width);
            mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, frameSize.height);
        }

        Log.i(TAG, "Selected camera frame size = (" + mFrameWidth + ", " + mFrameHeight + ")");

        return true;
    }

    protected void AllocateCache()
    {
        mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
    }

    private void releaseCamera() {
        synchronized (this) {
            if (mCamera != null) {
                mCamera.release();
            }
        }
    }

    private void disconnectCamera() {
        /* 1. We need to stop thread which updating the frames
         * 2. Stop camera and release it
         */
        try {
            mStopThread = true;
            mThread.join();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } finally {
            mThread =  null;
            mStopThread = false;
        }

        /* Now release camera */
        releaseCamera();
    }

    protected void deliverAndDrawFrame(NativeCameraFrame frame) 
    {
        Mat modified = frame.rgba();

        boolean bmpValid = true;
        if (modified != null) {
            try {
                Utils.matToBitmap(modified, mCacheBitmap);
            } catch(Exception e) {
                Log.e(TAG, "Mat type: " + modified);
                Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
                Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
                bmpValid = false;
            }
        }
    }    

    private class NativeCameraFrame 
    {
        public Mat rgba() {
            mCapture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
            return mRgba;
        }

        public Mat gray() {
            mCapture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
            return mGray;
        }

        public NativeCameraFrame(VideoCapture capture) {
            mCapture = capture;
            mGray = new Mat();
            mRgba = new Mat();
        }

        private VideoCapture mCapture;
        private Mat mRgba;
        private Mat mGray;
    };

    private class CameraWorker implements Runnable 
    {
        public void run() 
        {
            do 
            {
                if (!mCamera.grab()) {
                    Log.e(TAG, "Camera frame grab failed");
                    break;
                }

                deliverAndDrawFrame(new NativeCameraFrame(mCamera));

            } while (!mStopThread);
        }
    }
}

questionAnswers(1)

yourAnswerToTheQuestion