Commit a0411054 authored by Andrey Pavlenko's avatar Andrey Pavlenko

refactoring Tutorial-4 using new OpenCV CameraGLSurfaceView

parent 15db8243
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
<storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="com.android.toolchain.gcc.2119826334" moduleId="org.eclipse.cdt.core.settings" name="Default"> <storageModule buildSystemId="org.eclipse.cdt.managedbuilder.core.configurationDataProvider" id="com.android.toolchain.gcc.2119826334" moduleId="org.eclipse.cdt.core.settings" name="Default">
<externalSettings/> <externalSettings/>
<extensions> <extensions>
<extension id="org.eclipse.cdt.core.ELF" point="org.eclipse.cdt.core.BinaryParser"/>
<extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/> <extension id="org.eclipse.cdt.core.GASErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/> <extension id="org.eclipse.cdt.core.GmakeErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/> <extension id="org.eclipse.cdt.core.GLDErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
...@@ -12,7 +13,6 @@ ...@@ -12,7 +13,6 @@
<extension id="org.eclipse.cdt.core.VCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/> <extension id="org.eclipse.cdt.core.VCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/> <extension id="org.eclipse.cdt.core.CWDLocator" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/> <extension id="org.eclipse.cdt.core.GCCErrorParser" point="org.eclipse.cdt.core.ErrorParser"/>
<extension id="org.eclipse.cdt.core.ELF" point="org.eclipse.cdt.core.BinaryParser"/>
</extensions> </extensions>
</storageModule> </storageModule>
<storageModule moduleId="cdtBuildSystem" version="4.0.0"> <storageModule moduleId="cdtBuildSystem" version="4.0.0">
...@@ -28,7 +28,7 @@ ...@@ -28,7 +28,7 @@
</builder> </builder>
<tool id="com.android.gcc.compiler.1725706653" name="Android GCC Compiler" superClass="com.android.gcc.compiler"> <tool id="com.android.gcc.compiler.1725706653" name="Android GCC Compiler" superClass="com.android.gcc.compiler">
<option id="com.android.gcc.option.includePath.1852635009" superClass="com.android.gcc.option.includePath" valueType="includePath"> <option id="com.android.gcc.option.includePath.1852635009" superClass="com.android.gcc.option.includePath" valueType="includePath">
<listOptionValue builtIn="false" value="../$(O4A_SDK_ROOT)/sdk/native/jni/include"/> <listOptionValue builtIn="false" value="&quot;${OPENCV_ANDROID_SDK}/sdk/native/jni/include&quot;"/>
</option> </option>
<inputType id="com.android.gcc.inputType.193477776" superClass="com.android.gcc.inputType"/> <inputType id="com.android.gcc.inputType.193477776" superClass="com.android.gcc.inputType"/>
</tool> </tool>
......
...@@ -21,7 +21,7 @@ endif ...@@ -21,7 +21,7 @@ endif
LOCAL_C_INCLUDES += $(OPENCL_SDK)/include LOCAL_C_INCLUDES += $(OPENCL_SDK)/include
LOCAL_LDLIBS += -L$(OPENCL_SDK)/lib/$(TARGET_ARCH_ABI) -lOpenCL LOCAL_LDLIBS += -L$(OPENCL_SDK)/lib/$(TARGET_ARCH_ABI) -lOpenCL
LOCAL_MODULE := JNIrender LOCAL_MODULE := JNIpart
LOCAL_SRC_FILES := jni.c GLrender.cpp CLprocessor.cpp LOCAL_SRC_FILES := jni.c CLprocessor.cpp
LOCAL_LDLIBS += -llog -lGLESv2 -lEGL LOCAL_LDLIBS += -llog -lGLESv2 -lEGL
include $(BUILD_SHARED_LIBRARY) include $(BUILD_SHARED_LIBRARY)
\ No newline at end of file
...@@ -2,6 +2,7 @@ ...@@ -2,6 +2,7 @@
#define CL_USE_DEPRECATED_OPENCL_1_1_APIS /*let's give a chance for OpenCL 1.1 devices*/ #define CL_USE_DEPRECATED_OPENCL_1_1_APIS /*let's give a chance for OpenCL 1.1 devices*/
#include <CL/cl.hpp> #include <CL/cl.hpp>
#include <GLES2/gl2.h>
#include <EGL/egl.h> #include <EGL/egl.h>
#include <opencv2/core.hpp> #include <opencv2/core.hpp>
...@@ -82,7 +83,7 @@ cl::CommandQueue theQueue; ...@@ -82,7 +83,7 @@ cl::CommandQueue theQueue;
cl::Program theProgB2B, theProgI2B, theProgI2I; cl::Program theProgB2B, theProgI2B, theProgI2I;
bool haveOpenCL = false; bool haveOpenCL = false;
void initCL() extern "C" void initCL()
{ {
dumpCLinfo(); dumpCLinfo();
...@@ -144,14 +145,19 @@ void initCL() ...@@ -144,14 +145,19 @@ void initCL()
LOGD("initCL completed"); LOGD("initCL completed");
} }
void closeCL() extern "C" void closeCL()
{ {
} }
#define GL_TEXTURE_2D 0x0DE1 #define GL_TEXTURE_2D 0x0DE1
void procOCL_I2I(int texIn, int texOut, int w, int h) void procOCL_I2I(int texIn, int texOut, int w, int h)
{ {
if(!haveOpenCL) return; LOGD("Processing OpenCL Direct (image2d)");
if(!haveOpenCL)
{
LOGE("OpenCL isn't initialized");
return;
}
LOGD("procOCL_I2I(%d, %d, %d, %d)", texIn, texOut, w, h); LOGD("procOCL_I2I(%d, %d, %d, %d)", texIn, texOut, w, h);
cl::ImageGL imgIn (theContext, CL_MEM_READ_ONLY, GL_TEXTURE_2D, 0, texIn); cl::ImageGL imgIn (theContext, CL_MEM_READ_ONLY, GL_TEXTURE_2D, 0, texIn);
...@@ -185,7 +191,12 @@ void procOCL_I2I(int texIn, int texOut, int w, int h) ...@@ -185,7 +191,12 @@ void procOCL_I2I(int texIn, int texOut, int w, int h)
void procOCL_OCV(int texIn, int texOut, int w, int h) void procOCL_OCV(int texIn, int texOut, int w, int h)
{ {
if(!haveOpenCL) return; LOGD("Processing OpenCL via OpenCV");
if(!haveOpenCL)
{
LOGE("OpenCL isn't initialized");
return;
}
int64_t t = getTimeMs(); int64_t t = getTimeMs();
cl::ImageGL imgIn (theContext, CL_MEM_READ_ONLY, GL_TEXTURE_2D, 0, texIn); cl::ImageGL imgIn (theContext, CL_MEM_READ_ONLY, GL_TEXTURE_2D, 0, texIn);
...@@ -219,3 +230,60 @@ void procOCL_OCV(int texIn, int texOut, int w, int h) ...@@ -219,3 +230,60 @@ void procOCL_OCV(int texIn, int texOut, int w, int h)
cv::ocl::finish(); cv::ocl::finish();
LOGD("uploading results to texture costs %d ms", getTimeInterval(t)); LOGD("uploading results to texture costs %d ms", getTimeInterval(t));
} }
void drawFrameProcCPU(int w, int h, int texOut)
{
LOGD("Processing on CPU");
int64_t t;
// let's modify pixels in FBO texture in C++ code (on CPU)
const int BUFF_SIZE = 1<<24;//2k*2k*4;
static char tmpBuff[BUFF_SIZE];
if(w*h > BUFF_SIZE)
{
LOGE("Internal temp buffer is too small, can't make CPU frame processing");
return;
}
// read
t = getTimeMs();
// expecting FBO to be bound
glReadPixels(0, 0, w, h, GL_RGBA, GL_UNSIGNED_BYTE, tmpBuff);
LOGD("glReadPixels() costs %d ms", getTimeInterval(t));
// modify
t = getTimeMs();
cv::Mat m(h, w, CV_8UC4, tmpBuff);
cv::Laplacian(m, m, CV_8U);
m *= 10;
LOGD("Laplacian() costs %d ms", getTimeInterval(t));
// write back
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, texOut);
t = getTimeMs();
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, w, h, GL_RGBA, GL_UNSIGNED_BYTE, tmpBuff);
LOGD("glTexSubImage2D() costs %d ms", getTimeInterval(t));
}
enum ProcMode {PROC_MODE_NO_PROC=0, PROC_MODE_CPU=1, PROC_MODE_OCL_DIRECT=2, PROC_MODE_OCL_OCV=3};
extern "C" void processFrame(int tex1, int tex2, int w, int h, int mode)
{
switch(mode)
{
//case PROC_MODE_NO_PROC:
case PROC_MODE_CPU:
drawFrameProcCPU(w, h, tex2);
break;
case PROC_MODE_OCL_DIRECT:
procOCL_I2I(tex1, tex2, w, h);
break;
case PROC_MODE_OCL_OCV:
procOCL_OCV(tex1, tex2, w, h);
break;
default:
LOGE("Unexpected processing mode: %d", mode);
}
}
This diff is collapsed.
#include <android/log.h> #include <android/log.h>
#define LOG_TAG "JNIRenderer" #define LOG_TAG "JNIpart"
//#define LOGD(...) //#define LOGD(...)
#define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__)) #define LOGD(...) ((void)__android_log_print(ANDROID_LOG_DEBUG, LOG_TAG, __VA_ARGS__))
#define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__)) #define LOGE(...) ((void)__android_log_print(ANDROID_LOG_ERROR, LOG_TAG, __VA_ARGS__))
......
#include <jni.h> #include <jni.h>
int initGL(); int initCL();
void closeGL(); void closeCL();
void changeSize(int width, int height); void processFrame(int tex1, int tex2, int w, int h, int mode);
void drawFrame();
void setProcessingMode(int mode);
JNIEXPORT jint JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_initGL(JNIEnv * env, jclass cls) JNIEXPORT jint JNICALL Java_org_opencv_samples_tutorial4_NativePart_initCL(JNIEnv * env, jclass cls)
{ {
return initGL(); return initCL();
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_closeGL(JNIEnv * env, jclass cls) JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativePart_closeCL(JNIEnv * env, jclass cls)
{ {
closeGL(); closeCL();
} }
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_changeSize(JNIEnv * env, jclass cls, jint width, jint height) JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativePart_processFrame(JNIEnv * env, jclass cls, jint tex1, jint tex2, jint w, jint h, jint mode)
{ {
changeSize(width, height); processFrame(tex1, tex2, w, h, mode);
}
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_drawFrame(JNIEnv * env, jclass cls)
{
drawFrame();
}
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial4_NativeGLRenderer_setProcessingMode(JNIEnv * env, jclass cls, jint mode)
{
setProcessingMode(mode);
} }
package org.opencv.samples.tutorial4;
import java.io.IOException;
import java.util.List;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
@SuppressWarnings("deprecation")
public class CameraRenderer extends MyGLRendererBase {
protected final String LOGTAG = "CameraRenderer";
private Camera mCamera;
boolean mPreviewStarted = false;
CameraRenderer(MyGLSurfaceView view) {
super(view);
}
protected void closeCamera() {
Log.i(LOGTAG, "closeCamera");
if(mCamera != null) {
mCamera.stopPreview();
mPreviewStarted = false;
mCamera.release();
mCamera = null;
}
}
protected void openCamera() {
Log.i(LOGTAG, "openCamera");
closeCamera();
mCamera = Camera.open();
try {
mCamera.setPreviewTexture(mSTex);
} catch (IOException ioe) {
Log.e(LOGTAG, "setPreviewTexture() failed: " + ioe.getMessage());
}
}
public void setCameraPreviewSize(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize: "+width+"x"+height);
if(mCamera == null)
return;
if(mPreviewStarted) {
mCamera.stopPreview();
mPreviewStarted = false;
}
Camera.Parameters param = mCamera.getParameters();
List<Size> psize = param.getSupportedPreviewSizes();
int bestWidth = 0, bestHeight = 0;
if (psize.size() > 0) {
float aspect = (float)width / height;
for (Size size : psize) {
int w = size.width, h = size.height;
Log.d("Renderer", "checking camera preview size: "+w+"x"+h);
if ( w <= width && h <= height &&
w >= bestWidth && h >= bestHeight &&
Math.abs(aspect - (float)w/h) < 0.2 ) {
bestWidth = w;
bestHeight = h;
}
}
if(bestWidth > 0 && bestHeight > 0) {
param.setPreviewSize(bestWidth, bestHeight);
Log.i(LOGTAG, "size: "+bestWidth+" x "+bestHeight);
}
}
param.set("orientation", "landscape");
mCamera.setParameters(param);
mCamera.startPreview();
mPreviewStarted = true;
}
}
package org.opencv.samples.tutorial4;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import android.graphics.SurfaceTexture;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.os.Handler;
import android.os.Looper;
import android.util.Log;
import android.widget.TextView;
public abstract class MyGLRendererBase implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
protected final String LOGTAG = "MyGLRendererBase";
protected int frameCounter;
protected long lastNanoTime;
protected SurfaceTexture mSTex;
protected MyGLSurfaceView mView;
protected TextView mFpsText;
protected boolean mGLInit = false;
protected boolean mTexUpdate = false;
MyGLRendererBase(MyGLSurfaceView view) {
mView = view;
}
protected abstract void openCamera();
protected abstract void closeCamera();
protected abstract void setCameraPreviewSize(int width, int height);
public void setFpsTextView(TextView fpsTV)
{
mFpsText = fpsTV;
}
public void onResume() {
Log.i(LOGTAG, "onResume");
frameCounter = 0;
lastNanoTime = System.nanoTime();
}
public void onPause() {
Log.i(LOGTAG, "onPause");
mGLInit = false;
mTexUpdate = false;
closeCamera();
if(mSTex != null) {
mSTex.release();
mSTex = null;
NativeGLRenderer.closeGL();
}
}
@Override
public synchronized void onFrameAvailable(SurfaceTexture surfaceTexture) {
//Log.i(LOGTAG, "onFrameAvailable");
mTexUpdate = true;
mView.requestRender();
}
@Override
public void onDrawFrame(GL10 gl) {
//Log.i(LOGTAG, "onDrawFrame");
if (!mGLInit)
return;
synchronized (this) {
if (mTexUpdate) {
mSTex.updateTexImage();
mTexUpdate = false;
}
}
NativeGLRenderer.drawFrame();
// log FPS
frameCounter++;
if(frameCounter >= 10)
{
final int fps = (int) (frameCounter * 1e9 / (System.nanoTime() - lastNanoTime));
Log.i(LOGTAG, "drawFrame() FPS: "+fps);
if(mFpsText != null) {
Runnable fpsUpdater = new Runnable() {
public void run() {
mFpsText.setText("FPS: " + fps);
}
};
new Handler(Looper.getMainLooper()).post(fpsUpdater);
}
frameCounter = 0;
lastNanoTime = System.nanoTime();
}
}
@Override
public void onSurfaceChanged(GL10 gl, int surfaceWidth, int surfaceHeight) {
Log.i(LOGTAG, "onSurfaceChanged("+surfaceWidth+"x"+surfaceHeight+")");
NativeGLRenderer.changeSize(surfaceWidth, surfaceHeight);
setCameraPreviewSize(surfaceWidth, surfaceHeight);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.i(LOGTAG, "onSurfaceCreated");
String strGLVersion = GLES20.glGetString(GLES20.GL_VERSION);
if (strGLVersion != null)
Log.i(LOGTAG, "OpenGL ES version: " + strGLVersion);
int hTex = NativeGLRenderer.initGL();
mSTex = new SurfaceTexture(hTex);
mSTex.setOnFrameAvailableListener(this);
openCamera();
mGLInit = true;
}
}
package org.opencv.samples.tutorial4; package org.opencv.samples.tutorial4;
import org.opencv.android.CameraGLSurfaceView;
import android.app.Activity; import android.app.Activity;
import android.content.Context; import android.content.Context;
import android.opengl.GLSurfaceView; import android.os.Handler;
import android.os.Looper;
import android.util.AttributeSet; import android.util.AttributeSet;
import android.util.Log;
import android.view.MotionEvent; import android.view.MotionEvent;
import android.view.SurfaceHolder; import android.view.SurfaceHolder;
import android.widget.TextView; import android.widget.TextView;
import android.widget.Toast;
public class MyGLSurfaceView extends GLSurfaceView { public class MyGLSurfaceView extends CameraGLSurfaceView implements CameraGLSurfaceView.CameraTextureListener {
MyGLRendererBase mRenderer; static final String LOGTAG = "MyGLSurfaceView";
protected int procMode = NativePart.PROCESSING_MODE_NO_PROCESSING;
static final String[] procModeName = new String[] {"No Processing", "CPU", "OpenCL Direct", "OpenCL via OpenCV"};
protected int frameCounter;
protected long lastNanoTime;
TextView mFpsText = null;
public MyGLSurfaceView(Context context, AttributeSet attrs) { public MyGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs); super(context, attrs);
if(android.os.Build.VERSION.SDK_INT >= 21)
mRenderer = new Camera2Renderer(this);
else
mRenderer = new CameraRenderer(this);
setEGLContextClientVersion(2);
setRenderer(mRenderer);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
} }
public void setFpsTextView(TextView tv) { @Override
mRenderer.setFpsTextView(tv); public boolean onTouchEvent(MotionEvent e) {
if(e.getAction() == MotionEvent.ACTION_DOWN)
((Activity)getContext()).openOptionsMenu();
return true;
} }
@Override @Override
public void surfaceCreated(SurfaceHolder holder) { public void surfaceCreated(SurfaceHolder holder) {
super.surfaceCreated(holder); super.surfaceCreated(holder);
//NativePart.initCL();
} }
@Override @Override
public void surfaceDestroyed(SurfaceHolder holder) { public void surfaceDestroyed(SurfaceHolder holder) {
//NativePart.closeCL();
super.surfaceDestroyed(holder); super.surfaceDestroyed(holder);
} }
@Override public void setProcessingMode(int newMode) {
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) { if(newMode>=0 && newMode<procModeName.length)
super.surfaceChanged(holder, format, w, h); procMode = newMode;
else
Log.e(LOGTAG, "Ignoring invalid processing mode: " + newMode);
((Activity) getContext()).runOnUiThread(new Runnable() {
public void run() {
Toast.makeText(getContext(), "Selected mode: " + procModeName[procMode], Toast.LENGTH_LONG).show();
}
});
} }
@Override @Override
public void onResume() { public void onCameraViewStarted(int width, int height) {
super.onResume(); ((Activity) getContext()).runOnUiThread(new Runnable() {
mRenderer.onResume(); public void run() {
Toast.makeText(getContext(), "onCameraViewStarted", Toast.LENGTH_SHORT).show();
}
});
NativePart.initCL();
frameCounter = 0;
lastNanoTime = System.nanoTime();
} }
@Override @Override
public void onPause() { public void onCameraViewStopped() {
mRenderer.onPause(); ((Activity) getContext()).runOnUiThread(new Runnable() {
super.onPause(); public void run() {
Toast.makeText(getContext(), "onCameraViewStopped", Toast.LENGTH_SHORT).show();
}
});
} }
@Override @Override
public boolean onTouchEvent(MotionEvent e) { public boolean onCameraTexture(int texIn, int texOut, int width, int height) {
if(e.getAction() == MotionEvent.ACTION_DOWN) // FPS
((Activity)getContext()).openOptionsMenu(); frameCounter++;
if(frameCounter >= 30)
{
final int fps = (int) (frameCounter * 1e9 / (System.nanoTime() - lastNanoTime));
Log.i(LOGTAG, "drawFrame() FPS: "+fps);
if(mFpsText != null) {
Runnable fpsUpdater = new Runnable() {
public void run() {
mFpsText.setText("FPS: " + fps);
}
};
new Handler(Looper.getMainLooper()).post(fpsUpdater);
} else {
Log.d(LOGTAG, "mFpsText == null");
mFpsText = (TextView)((Activity) getContext()).findViewById(R.id.fps_text_view);
}
frameCounter = 0;
lastNanoTime = System.nanoTime();
}
if(procMode == NativePart.PROCESSING_MODE_NO_PROCESSING)
return false;
NativePart.processFrame(texIn, texOut, width, height, procMode);
return true; return true;
} }
} }
package org.opencv.samples.tutorial4; package org.opencv.samples.tutorial4;
public class NativeGLRenderer { public class NativePart {
static static
{ {
System.loadLibrary("opencv_java3"); System.loadLibrary("opencv_java3");
System.loadLibrary("JNIrender"); System.loadLibrary("JNIpart");
} }
public static final int PROCESSING_MODE_NO_PROCESSING = 0; public static final int PROCESSING_MODE_NO_PROCESSING = 0;
...@@ -12,9 +12,7 @@ public class NativeGLRenderer { ...@@ -12,9 +12,7 @@ public class NativeGLRenderer {
public static final int PROCESSING_MODE_OCL_DIRECT = 2; public static final int PROCESSING_MODE_OCL_DIRECT = 2;
public static final int PROCESSING_MODE_OCL_OCV = 3; public static final int PROCESSING_MODE_OCL_OCV = 3;
public static native int initGL(); public static native int initCL();
public static native void closeGL(); public static native void closeCL();
public static native void drawFrame(); public static native void processFrame(int tex1, int tex2, int w, int h, int mode);
public static native void changeSize(int width, int height);
public static native void setProcessingMode(int mode);
} }
...@@ -29,8 +29,8 @@ public class Tutorial4Activity extends Activity { ...@@ -29,8 +29,8 @@ public class Tutorial4Activity extends Activity {
//setContentView(mView); //setContentView(mView);
setContentView(R.layout.activity); setContentView(R.layout.activity);
mView = (MyGLSurfaceView) findViewById(R.id.my_gl_surface_view); mView = (MyGLSurfaceView) findViewById(R.id.my_gl_surface_view);
mView.setCameraTextureListener(mView);
TextView tv = (TextView)findViewById(R.id.fps_text_view); TextView tv = (TextView)findViewById(R.id.fps_text_view);
mView.setFpsTextView(tv);
mProcMode = (TextView)findViewById(R.id.proc_mode_text_view); mProcMode = (TextView)findViewById(R.id.proc_mode_text_view);
runOnUiThread(new Runnable() { runOnUiThread(new Runnable() {
public void run() { public void run() {
...@@ -38,7 +38,8 @@ public class Tutorial4Activity extends Activity { ...@@ -38,7 +38,8 @@ public class Tutorial4Activity extends Activity {
} }
}); });
NativeGLRenderer.setProcessingMode(NativeGLRenderer.PROCESSING_MODE_NO_PROCESSING); } mView.setProcessingMode(NativePart.PROCESSING_MODE_NO_PROCESSING);
}
@Override @Override
protected void onPause() { protected void onPause() {
...@@ -68,7 +69,7 @@ public class Tutorial4Activity extends Activity { ...@@ -68,7 +69,7 @@ public class Tutorial4Activity extends Activity {
mProcMode.setText("Processing mode: No Processing"); mProcMode.setText("Processing mode: No Processing");
} }
}); });
NativeGLRenderer.setProcessingMode(NativeGLRenderer.PROCESSING_MODE_NO_PROCESSING); mView.setProcessingMode(NativePart.PROCESSING_MODE_NO_PROCESSING);
return true; return true;
case R.id.cpu: case R.id.cpu:
runOnUiThread(new Runnable() { runOnUiThread(new Runnable() {
...@@ -76,7 +77,7 @@ public class Tutorial4Activity extends Activity { ...@@ -76,7 +77,7 @@ public class Tutorial4Activity extends Activity {
mProcMode.setText("Processing mode: CPU"); mProcMode.setText("Processing mode: CPU");
} }
}); });
NativeGLRenderer.setProcessingMode(NativeGLRenderer.PROCESSING_MODE_CPU); mView.setProcessingMode(NativePart.PROCESSING_MODE_CPU);
return true; return true;
case R.id.ocl_direct: case R.id.ocl_direct:
runOnUiThread(new Runnable() { runOnUiThread(new Runnable() {
...@@ -84,7 +85,7 @@ public class Tutorial4Activity extends Activity { ...@@ -84,7 +85,7 @@ public class Tutorial4Activity extends Activity {
mProcMode.setText("Processing mode: OpenCL direct"); mProcMode.setText("Processing mode: OpenCL direct");
} }
}); });
NativeGLRenderer.setProcessingMode(NativeGLRenderer.PROCESSING_MODE_OCL_DIRECT); mView.setProcessingMode(NativePart.PROCESSING_MODE_OCL_DIRECT);
return true; return true;
case R.id.ocl_ocv: case R.id.ocl_ocv:
runOnUiThread(new Runnable() { runOnUiThread(new Runnable() {
...@@ -92,7 +93,7 @@ public class Tutorial4Activity extends Activity { ...@@ -92,7 +93,7 @@ public class Tutorial4Activity extends Activity {
mProcMode.setText("Processing mode: OpenCL via OpenCV (TAPI)"); mProcMode.setText("Processing mode: OpenCL via OpenCV (TAPI)");
} }
}); });
NativeGLRenderer.setProcessingMode(NativeGLRenderer.PROCESSING_MODE_OCL_OCV); mView.setProcessingMode(NativePart.PROCESSING_MODE_OCL_OCV);
return true; return true;
default: default:
return false; return false;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment