Commit 2fd8b51a authored by Alexander Smorkalov's avatar Alexander Smorkalov Committed by Andrey Kamaev

Face detection sample ported on new framework.

parent d9ea16ce
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<org.opencv.framework.OpenCvJavaCameraView
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:id="@+id/fd_activity_surface_view" />
</LinearLayout>
package org.opencv.samples.fd;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.highgui.Highgui;
import org.opencv.highgui.VideoCapture;
import org.opencv.objdetect.CascadeClassifier;
import android.content.Context;
import android.graphics.Bitmap;
import android.util.Log;
import android.view.SurfaceHolder;
class FdView extends SampleCvViewBase {
private static final String TAG = "OCVSample::View";
private Mat mRgba;
private Mat mGray;
private File mCascadeFile;
private CascadeClassifier mJavaDetector;
private DetectionBasedTracker mNativeDetector;
private static final Scalar FACE_RECT_COLOR = new Scalar(0, 255, 0, 255);
public static final int JAVA_DETECTOR = 0;
public static final int NATIVE_DETECTOR = 1;
private int mDetectorType = JAVA_DETECTOR;
private float mRelativeFaceSize = 0;
private int mAbsoluteFaceSize = 0;
public void setMinFaceSize(float faceSize) {
mRelativeFaceSize = faceSize;
mAbsoluteFaceSize = 0;
}
public void setDetectorType(int type) {
if (mDetectorType != type) {
mDetectorType = type;
if (type == NATIVE_DETECTOR) {
Log.i(TAG, "Detection Based Tracker enabled");
mNativeDetector.start();
} else {
Log.i(TAG, "Cascade detector enabled");
mNativeDetector.stop();
}
}
}
public FdView(Context context) {
super(context);
try {
// load cascade file from application resources
InputStream is = context.getResources().openRawResource(R.raw.lbpcascade_frontalface);
File cascadeDir = context.getDir("cascade", Context.MODE_PRIVATE);
mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
FileOutputStream os = new FileOutputStream(mCascadeFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
if (mJavaDetector.empty()) {
Log.e(TAG, "Failed to load cascade classifier");
mJavaDetector = null;
} else
Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());
mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);
cascadeDir.delete();
} catch (IOException e) {
e.printStackTrace();
Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
}
Log.i(TAG, "Instantiated new " + this.getClass());
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
synchronized (this) {
// initialize Mats before usage
mGray = new Mat();
mRgba = new Mat();
}
super.surfaceCreated(holder);
}
@Override
protected Bitmap processFrame(VideoCapture capture) {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
if (mAbsoluteFaceSize == 0) {
int height = mGray.rows();
if (Math.round(height * mRelativeFaceSize) > 0) {
mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
}
mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
}
MatOfRect faces = new MatOfRect();
if (mDetectorType == JAVA_DETECTOR) {
if (mJavaDetector != null)
mJavaDetector.detectMultiScale(mGray, faces, 1.1, 2, 2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
new Size(mAbsoluteFaceSize, mAbsoluteFaceSize), new Size());
}
else if (mDetectorType == NATIVE_DETECTOR) {
if (mNativeDetector != null)
mNativeDetector.detect(mGray, faces);
}
else {
Log.e(TAG, "Detection method is not selected!");
}
Rect[] facesArray = faces.toArray();
for (int i = 0; i < facesArray.length; i++)
Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
try {
Utils.matToBitmap(mRgba, bmp);
} catch(Exception e) {
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
bmp = null;
}
return bmp;
}
@Override
public void run() {
super.run();
synchronized (this) {
// Explicitly deallocate Mats
if (mRgba != null)
mRgba.release();
if (mGray != null)
mGray.release();
if (mCascadeFile != null)
mCascadeFile.delete();
if (mNativeDetector != null)
mNativeDetector.release();
mRgba = null;
mGray = null;
mCascadeFile = null;
}
}
}
package org.opencv.samples.fd;
import java.util.List;
import org.opencv.core.Size;
import org.opencv.highgui.VideoCapture;
import org.opencv.highgui.Highgui;
import android.content.Context;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHolder.Callback, Runnable {
private static final String TAG = "OCVSample::BaseView";
private SurfaceHolder mHolder;
private VideoCapture mCamera;
private FpsMeter mFps;
public SampleCvViewBase(Context context) {
super(context);
mHolder = getHolder();
mHolder.addCallback(this);
mFps = new FpsMeter();
Log.i(TAG, "Instantiated new " + this.getClass());
}
public synchronized boolean openCamera() {
Log.i(TAG, "Opening Camera");
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
releaseCamera();
Log.e(TAG, "Can't open native camera");
return false;
}
return true;
}
public synchronized void releaseCamera() {
Log.i(TAG, "Releasing Camera");
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
public synchronized void setupCamera(int width, int height) {
if (mCamera != null && mCamera.isOpened()) {
Log.i(TAG, "Setup Camera - " + width + "x" + height);
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
// selecting optimal camera preview size
{
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
Log.i(TAG, "called surfaceChanged");
setupCamera(width, height);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "called surfaceCreated");
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
Log.i(TAG, "called surfaceDestroyed");
}
protected abstract Bitmap processFrame(VideoCapture capture);
public void run() {
Log.i(TAG, "Started processing thread");
mFps.init();
while (true) {
Bitmap bmp = null;
synchronized (this) {
if (mCamera == null)
break;
if (!mCamera.grab()) {
Log.e(TAG, "mCamera.grab() failed");
break;
}
bmp = processFrame(mCamera);
mFps.measure();
}
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null);
mFps.draw(canvas, (canvas.getWidth() - bmp.getWidth()) / 2, 0);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();
}
}
Log.i(TAG, "Finished processing thread");
}
}
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment