2016-10-22 1 views
-1

Ich arbeite gerade an Eye-Tracking-Methoden und als meine Anwendungsausgabe brauche ich ein Diagramm, das die Bewegung des Augenzentrums verfolgt. Mit Hilfe des Algorithmus von Fabian Timm habe ich erfolgreich die Lokalisierung des Augenzentrums abgeschlossen. Aber wenn ich versuche, das SurfaceView zu meinem Design hinzuzufügen, blinkt es oder eine Sekunde und verschwindet plötzlich. Unten habe ich meine Javadoc und .xml Datei angegeben.Meine Oberflächenansicht blinkt einmal und verschwindet dann, wenn ich sie zusammen mit der Kameraansicht starte

package org.opencv.samples.facedetect; 

import org.opencv.android.BaseLoaderCallback; 
import org.opencv.android.CameraBridgeViewBase; 
import org.opencv.android.CameraBridgeViewBase.CvCameraViewFrame; 
import org.opencv.android.CameraBridgeViewBase.CvCameraViewListener2; 
import org.opencv.android.LoaderCallbackInterface; 
import org.opencv.android.OpenCVLoader; 
import org.opencv.core.*; 
import org.opencv.objdetect.CascadeClassifier; 
import org.opencv.imgproc.Imgproc; 

import java.io.File; 
import java.io.FileOutputStream; 
import java.io.IOException; 
import java.io.InputStream; 
import java.util.ArrayList; 
import java.util.List; 

import android.os.Bundle; 
import android.app.Activity; 
import android.content.Context; 
import android.graphics.Canvas; 
import android.graphics.Color; 
import android.graphics.Paint; 
import android.graphics.Path; 
import android.util.Log; 
import android.view.Display; 
import android.view.SurfaceHolder; 
import android.view.SurfaceView; 
import android.view.View; 
import android.view.View.OnTouchListener; 
import android.view.WindowManager; 
import android.widget.Button; 

public class EyeTrackingActivity extends Activity implements  CvCameraViewListener2,SurfaceHolder.Callback { 

public native int[] findEyeCenter(long mFace, int[] mEye); 


private static final Scalar  FACE_RECT_COLOR  = new Scalar(255, 255, 255, 255); 
private Mat      mRgba; 
private Mat      mGray; 
private Mat      mGrayNew; 
private Mat      mretVal; 
private Mat      scaledMatrix; 
private Mat      tempMatrix; 
private Mat      invertcolormatrix; 

private File     mCascadeFile; 
private CascadeClassifier  face_cascade; 
private CameraBridgeViewBase mOpenCvCameraView; 

private float     mRelativeFaceSize = 0.5f; 
private int      mAbsoluteFaceSize = 0; 

int leftEyePoint [] = new int[2]; 
int rightEyePoint [] = new int[2]; 

Point[] calibrationArray = new Point[4]; 

int screen_width, screen_height; 
static double scale_factor; 
Point leftPupil, rightPupil; 

private static final String TAG = "OCVSample::NDK"; 

private BaseLoaderCallback mLoaderCallback = new BaseLoaderCallback(this) { 
    @Override 
    public void onManagerConnected(int status) { 
     switch (status) { 
      case LoaderCallbackInterface.SUCCESS: 
      { 
       System.loadLibrary("example"); 
       Log.i(TAG, "OpenCV loaded successfully"); 

       final InputStream is; 
       FileOutputStream os; 

       try { 
        is = getResources().openRawResource(R.raw.haarcascade_frontalface_alt); 
        File cascadeDir = getDir("cascade", Context.MODE_PRIVATE); 
        mCascadeFile = new File(cascadeDir, "haarcascade_frontalface_alt.xml"); 

        os = new FileOutputStream(mCascadeFile); 

        byte[] buffer = new byte[4096]; 
        int bytesRead; 
        while ((bytesRead = is.read(buffer)) != -1) { 
         os.write(buffer, 0, bytesRead); 
        } 

        is.close(); 
        os.close(); 

        face_cascade = new CascadeClassifier(mCascadeFile.getAbsolutePath()); 
        if (face_cascade.empty()) { 
         Log.e(TAG, "Failed to load cascade classifier"); 
         face_cascade = null; 
        } else 
         Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath()); 

        cascadeDir.delete(); 
       } catch (IOException e) { 
        Log.i(TAG, "face cascade not found"); 
       } 

       mOpenCvCameraView.enableView(); 
      } break; 
      default: 
      { 
       super.onManagerConnected(status); 
      } break; 
     } 
    } 
}; 

public EyeTrackingActivity() { 
    Log.i(TAG, "Instantiated new " + this.getClass());} 


@Override 
protected void onCreate(Bundle savedInstanceState) { 
    Log.i(TAG, "called onCreate"); 
    super.onCreate(savedInstanceState); 
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON); 

    setContentView(R.layout.face_detect_surface_view); 

    Display display = getWindowManager().getDefaultDisplay(); 
    android.graphics.Point size = new android.graphics.Point(); 
    display.getSize(size); 
    screen_width = size.x; 
    screen_height = size.y; 
    Log.i(TAG, "W: " + String.valueOf(screen_width) + " - H: " + String.valueOf(screen_height)); 

    mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view); 


    mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE); 
    mOpenCvCameraView.setCameraIndex(CameraBridgeViewBase.CAMERA_ID_FRONT); 
    mOpenCvCameraView.enableFpsMeter(); 
    mOpenCvCameraView.setCvCameraViewListener(this); 


    SurfaceView surfaceView = (SurfaceView)findViewById(R.id.surface_view); 
    surfaceView.getHolder().addCallback(this); 

} 

@Override 
public void onPause() 
{ 
    super.onPause(); 
    if (mOpenCvCameraView != null) 
     mOpenCvCameraView.disableView(); 
} 

@Override 
public void onResume() 
{ 
    super.onResume(); 
    OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_9, this, mLoaderCallback); 
} 

public void onDestroy() { 
    super.onDestroy(); 
    if (mOpenCvCameraView != null) 
     mOpenCvCameraView.disableView(); 
} 

public void onCameraViewStarted(int width, int height) { 
    mRgba = new Mat(); 
    mGray = new Mat(); 
    mGrayNew = new Mat(); 
    scaledMatrix = new Mat(); 
    tempMatrix = new Mat(); 
    invertcolormatrix= new Mat(); 

} 

public void onCameraViewStopped() { 
    mGray.release(); 
    mGrayNew.release(); 
    scaledMatrix.release(); 
    tempMatrix.release(); 
    invertcolormatrix.release(); 
} 

public Mat onCameraFrame(CvCameraViewFrame inputFrame) { 
    mGray = inputFrame.gray(); 


    MatOfPoint pointsMat = new MatOfPoint(); 

    if (mAbsoluteFaceSize == 0) { 
     int height = mGray.rows(); 
     if (Math.round(height * mRelativeFaceSize) > 0) { 
      mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize); 
     } 
    } 

    MatOfRect faces = new MatOfRect(); 

    if (face_cascade != null) 
     face_cascade.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE 
        new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size()); 

    Rect[] facesArray = faces.toArray(); 
    if (facesArray.length<1) 
     return null; 
    for (int i = 0; i < facesArray.length; i++){ 
     Core.rectangle(mGray, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3); 

     /*scale_factor = screen_width/(double)facesArray[0].width; 

     facesArray[0].height = (int) (screen_height/scale_factor); 

     facesArray[0].y += 50; 


     scaledMatrix = mGray.submat(facesArray[0]); 

     Imgproc.resize(scaledMatrix, tempMatrix, new Size(screen_width,screen_height)); 
     Rect qwer = new Rect(0,0,tempMatrix.width(), tempMatrix.height());*/ 
     findEyes(mGray, facesArray[0]); 

    } 

    return mGray; 

} 

private Mat findEyes(Mat frame_gray, Rect face) { 

    Mat faceROI = frame_gray.submat(face); 

     int eye_region_width = (int) (face.width * 0.35); 
     int eye_region_height = (int) (face.width * 0.30); 
     int eye_region_top = (int) (face.height * 0.25); 
     int leftEyeRegion_x = (int) (face.width * 0.13); 
     Rect leftEyeRegion = new Rect(leftEyeRegion_x,eye_region_top,eye_region_width,eye_region_height); 
     int [] leftEyeArray = {leftEyeRegion_x,eye_region_top,eye_region_width,eye_region_height}; 
     Rect rightEyeRegion = new Rect(face.width - eye_region_width - leftEyeRegion_x, 
       eye_region_top,eye_region_width,eye_region_height); 
     int [] rightEyeArray = {face.width - eye_region_width - leftEyeRegion_x, 
       eye_region_top,eye_region_width,eye_region_height}; 


     // TODO: error when loading the native function 
     leftEyePoint = findEyeCenter(faceROI.getNativeObjAddr(), leftEyeArray); 
     rightEyePoint = findEyeCenter(faceROI.getNativeObjAddr(), rightEyeArray); 
     leftPupil = new Point(leftEyePoint[0], leftEyePoint[1]); 
     rightPupil = new Point(rightEyePoint[0], rightEyePoint[1]); 
     //-- Find Eye Centers 

     rightPupil.x += Math.round(rightEyeRegion.x + face.x); 
     rightPupil.y += Math.round(rightEyeRegion.y + face.y) ; 
     leftPupil.x += Math.round(leftEyeRegion.x + face.x); 
     leftPupil.y += Math.round(leftEyeRegion.y + face.y); 

     //rightPupil = Math.round(rightPupil); 
     //leftPupil = unscalePoint(leftPupil); 


     // draw eye centers 
     Core.circle(mGray, rightPupil, 3, FACE_RECT_COLOR); 
     Core.circle(mGray, leftPupil, 3, FACE_RECT_COLOR); 
     return mGray; 



    } 

private static Point unscalePoint(Point p) { 

    int x = (int)(p.x); 
    int y = (int) Math.round(p.y/scale_factor); 

    return new Point(x, y - 50); 
} 


@Override 
public void surfaceCreated(SurfaceHolder holder) { 
    // TODO Auto-generated method stub 

} 


@Override 
public void surfaceChanged(SurfaceHolder holder, int format, int width, 
     int height) { 
    // TODO Auto-generated method stub 
    Paint paint = new Paint(); 
    paint.setStyle(Paint.Style.STROKE); 
    paint.setStrokeWidth(3); 
    paint.setColor(Color.WHITE); 
    Path path = new Path(); 
    path.moveTo(0, 0); 
    path.lineTo(150, 150); 

    Canvas canvas = holder.lockCanvas(); 
    canvas.drawRGB(255, 128, 128); 
    canvas.drawPath(path, paint); 
    holder.unlockCanvasAndPost(canvas); 

} 


@Override 
public void surfaceDestroyed(SurfaceHolder holder) { 
    // TODO Auto-generated method stub 

} 

} 

Und meine .xml-Datei ist als unten

<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" 
xmlns:tools="http://schemas.android.com/tools" 
android:layout_width="match_parent" 
android:layout_height="match_parent" 
tools:context=".EyeTrackingActivity" > 

<org.opencv.android.JavaCameraView 
    android:id="@+id/fd_activity_surface_view" 
    android:layout_width="match_parent" 
    android:layout_height="match_parent" /> 

<SurfaceView 
    android:id="@+id/surface_view" 
    android:layout_width="fill_parent" 
    android:layout_height="100dip" 
    android:layout_alignParentBottom="true"/> 

+1

gezeigt Was ist in Ihrem logcat Logausgabe? – Ivan

+0

Das Problem wurde gelöst, aber jetzt habe ich eine neue Aufgabe mit dem gleichen Code. Ich muss meine X-Y-Koordinaten in Echtzeit mit Hilfe von Canvas darstellen. Kannst du mir dabei helfen? –

+0

Kommentare sind nicht für eine andere Frage. Bitte lesen Sie [Kommentare] (http://stackoverflow.com/help/privileges/comment). Wenn das Problem gelöst wurde, stellen Sie bitte eine Antwort und akzeptieren Sie diese, damit die Leute sehen, dass bei dieser speziellen Frage keine Hilfe benötigt wird. – Ivan

Antwort

0

Die Antwort, die ich in der XML-Datei gefunden. Die modifizierte .xml wird, wie unten

<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android" 
xmlns:tools="http://schemas.android.com/tools" 
android:layout_width="match_parent" 
android:layout_height="match_parent" 
android:gravity="fill" 
tools:context=".EyeTrackingActivity" > 

<org.opencv.android.JavaCameraView 
    android:id="@+id/fd_activity_surface_view" 
    android:layout_width="fill_parent" 
    android:layout_height="match_parent" 
    android:layout_above="@+id/surface_view" /> 

<SurfaceView 
    android:id="@+id/surface_view" 
    android:layout_width="fill_parent" 
    android:layout_height="100dip" 
    android:layout_alignParentBottom="true"/> 

Verwandte Themen