Commit 3a932b0f authored by Ethan Rublee's avatar Ethan Rublee

Refactoring the image_pool for android, and adding some common utils for camera…

Refactoring the image_pool for android, and adding some common utils for camera configuration.  Also experimenting with optimization - grayscale preview is way faster than color right now.
parent 077dd777
...@@ -11,6 +11,10 @@ ...@@ -11,6 +11,10 @@
regular Android project. regular Android project.
--> -->
<activity android:name="com.opencv.OpenCV" /> <activity android:name="com.opencv.OpenCV" />
<activity android:name="com.opencv.calibration.ChessBoardChooser"/>
<activity android:name="com.opencv.calibration.CameraConfig"/>
<activity android:name="com.opencv.calibration.CalibrationViewer"/>
<service android:name="com.opencv.calibration.services.CalibrationService"/>
</application> </application>
<!-- set the opengl version <!-- set the opengl version
<uses-feature android:glEsVersion="0x00020000" />--> <uses-feature android:glEsVersion="0x00020000" />-->
......
...@@ -12,6 +12,10 @@ $(info gedit $(LOCAL_ENV_MK)) ...@@ -12,6 +12,10 @@ $(info gedit $(LOCAL_ENV_MK))
$(error Please setup the $(LOCAL_ENV_MK) - the default was just created') $(error Please setup the $(LOCAL_ENV_MK) - the default was just created')
endif endif
ifndef ARM_TARGETS
ARM_TARGETS=armeabi armeabi-v7a
endif
ANDROID_NDK_BASE = $(ANDROID_NDK_ROOT) ANDROID_NDK_BASE = $(ANDROID_NDK_ROOT)
$(info OPENCV_CONFIG = $(OPENCV_CONFIG)) $(info OPENCV_CONFIG = $(OPENCV_CONFIG))
...@@ -44,7 +48,7 @@ all: $(LIB) nogdb ...@@ -44,7 +48,7 @@ all: $(LIB) nogdb
#calls the ndk-build script, passing it OPENCV_ROOT and OPENCV_LIBS_DIR #calls the ndk-build script, passing it OPENCV_ROOT and OPENCV_LIBS_DIR
$(LIB): $(SWIG_C_OUT) $(SOURCES) $(HEADERS) $(ANDROID_MKS) $(LIB): $(SWIG_C_OUT) $(SOURCES) $(HEADERS) $(ANDROID_MKS)
$(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \ $(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \
PROJECT_PATH=$(PROJECT_PATH) V=$(V) $(NDK_FLAGS) PROJECT_PATH=$(PROJECT_PATH) ARM_TARGETS=$(ARM_TARGETS) V=$(V) $(NDK_FLAGS)
#this creates the swig wrappers #this creates the swig wrappers
...@@ -70,5 +74,5 @@ clean-swig: ...@@ -70,5 +74,5 @@ clean-swig:
#does clean-swig and then uses the ndk-build clean #does clean-swig and then uses the ndk-build clean
clean: clean-swig clean: clean-swig
$(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \ $(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \
PROJECT_PATH=$(PROJECT_PATH) clean V=$(V) $(NDK_FLAGS) PROJECT_PATH=$(PROJECT_PATH) clean ARM_TARGETS=$(ARM_TARGETS) V=$(V) $(NDK_FLAGS)
APP_ABI := armeabi armeabi-v7a APP_ABI := $(ARM_TARGETS)
APP_MODULES := android-opencv APP_MODULES := android-opencv
This diff is collapsed.
...@@ -14,8 +14,6 @@ ...@@ -14,8 +14,6 @@
#include <opencv2/imgproc/imgproc.hpp> #include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp> #include <opencv2/calib3d/calib3d.hpp>
#include <vector> #include <vector>
#include "image_pool.h" #include "image_pool.h"
...@@ -24,22 +22,10 @@ ...@@ -24,22 +22,10 @@
#define DETECT_STAR 1 #define DETECT_STAR 1
#define DETECT_SURF 2 #define DETECT_SURF 2
class Calibration
class Calibration { {
std::vector<cv::KeyPoint> keypoints;
vector<vector<Point2f> > imagepoints;
cv::Mat K;
cv::Mat distortion;
cv::Size imgsize;
public: public:
cv::Size patternsize;
Calibration(); Calibration();
virtual ~Calibration(); virtual ~Calibration();
...@@ -52,8 +38,17 @@ public: ...@@ -52,8 +38,17 @@ public:
void calibrate(const char* filename); void calibrate(const char* filename);
void drawText(int idx, image_pool* pool, const char* text); void drawText(int idx, image_pool* pool, const char* text);
};
cv::Size patternsize;
private:
std::vector<cv::KeyPoint> keypoints;
std::vector<std::vector<cv::Point2f> > imagepoints;
cv::Mat K;
cv::Mat distortion;
cv::Size imgsize;
};
#endif /* PROCESSOR_H_ */ #endif /* PROCESSOR_H_ */
...@@ -37,13 +37,16 @@ using namespace cv; ...@@ -37,13 +37,16 @@ using namespace cv;
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__) #define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__) #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
static void printGLString(const char *name, GLenum s) { static void printGLString(const char *name, GLenum s)
const char *v = (const char *) glGetString(s); {
const char *v = (const char *)glGetString(s);
LOGI("GL %s = %s\n", name, v); LOGI("GL %s = %s\n", name, v);
} }
static void checkGlError(const char* op) { static void checkGlError(const char* op)
for (GLint error = glGetError(); error; error = glGetError()) { {
for (GLint error = glGetError(); error; error = glGetError())
{
LOGI("after %s() glError (0x%x)\n", op, error); LOGI("after %s() glError (0x%x)\n", op, error);
} }
} }
...@@ -57,8 +60,7 @@ static const char gVertexShader[] = "attribute vec4 a_position; \n" ...@@ -57,8 +60,7 @@ static const char gVertexShader[] = "attribute vec4 a_position; \n"
" v_texCoord = a_texCoord; \n" " v_texCoord = a_texCoord; \n"
"} \n"; "} \n";
static const char gFragmentShader[] = static const char gFragmentShader[] = "precision mediump float; \n"
"precision mediump float; \n"
"varying vec2 v_texCoord; \n" "varying vec2 v_texCoord; \n"
"uniform sampler2D s_texture; \n" "uniform sampler2D s_texture; \n"
"void main() \n" "void main() \n"
...@@ -66,25 +68,26 @@ static const char gFragmentShader[] = ...@@ -66,25 +68,26 @@ static const char gFragmentShader[] =
" gl_FragColor = texture2D( s_texture, v_texCoord );\n" " gl_FragColor = texture2D( s_texture, v_texCoord );\n"
"} \n"; "} \n";
const GLfloat gTriangleVertices[] = { 0.0f, 0.5f, -0.5f, -0.5f, 0.5f, -0.5f }; const GLfloat gTriangleVertices[] = {0.0f, 0.5f, -0.5f, -0.5f, 0.5f, -0.5f};
GLubyte testpixels[4 * 3] = { 255, 0, 0, // Red GLubyte testpixels[4 * 3] = {255, 0, 0, // Red
0, 255, 0, // Green 0, 255, 0, // Green
0, 0, 255, // Blue 0, 0, 255, // Blue
255, 255, 0 // Yellow 255, 255, 0 // Yellow
}; };
GLuint glcamera::createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, GLuint glcamera::createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, int height, int channels)
int width, int height, int channels) { {
// Bind the texture // Bind the texture
glActiveTexture(GL_TEXTURE0); glActiveTexture( GL_TEXTURE0);
checkGlError("glActiveTexture"); checkGlError("glActiveTexture");
// Bind the texture object // Bind the texture object
glBindTexture(GL_TEXTURE_2D, _textureid); glBindTexture(GL_TEXTURE_2D, _textureid);
checkGlError("glBindTexture"); checkGlError("glBindTexture");
GLenum format; GLenum format;
switch (channels) { switch (channels)
{
case 3: case 3:
format = GL_RGB; format = GL_RGB;
break; break;
...@@ -96,31 +99,35 @@ GLuint glcamera::createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, ...@@ -96,31 +99,35 @@ GLuint glcamera::createSimpleTexture2D(GLuint _textureid, GLubyte* pixels,
break; break;
} }
// Load the texture // Load the texture
glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format, glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format, GL_UNSIGNED_BYTE, pixels);
GL_UNSIGNED_BYTE, pixels);
checkGlError("glTexImage2D"); checkGlError("glTexImage2D");
// Set the filtering mode // Set the filtering mode
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST ); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST ); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
return _textureid; return _textureid;
} }
GLuint glcamera::loadShader(GLenum shaderType, const char* pSource) { GLuint glcamera::loadShader(GLenum shaderType, const char* pSource)
{
GLuint shader = glCreateShader(shaderType); GLuint shader = glCreateShader(shaderType);
if (shader) { if (shader)
{
glShaderSource(shader, 1, &pSource, NULL); glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader); glCompileShader(shader);
GLint compiled = 0; GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled); glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) { if (!compiled)
{
GLint infoLen = 0; GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen); glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen) { if (infoLen)
char* buf = (char*) malloc(infoLen); {
if (buf) { char* buf = (char*)malloc(infoLen);
if (buf)
{
glGetShaderInfoLog(shader, infoLen, NULL, buf); glGetShaderInfoLog(shader, infoLen, NULL, buf);
LOGE("Could not compile shader %d:\n%s\n", LOGE("Could not compile shader %d:\n%s\n",
shaderType, buf); shaderType, buf);
...@@ -134,20 +141,23 @@ GLuint glcamera::loadShader(GLenum shaderType, const char* pSource) { ...@@ -134,20 +141,23 @@ GLuint glcamera::loadShader(GLenum shaderType, const char* pSource) {
return shader; return shader;
} }
GLuint glcamera::createProgram(const char* pVertexSource, GLuint glcamera::createProgram(const char* pVertexSource, const char* pFragmentSource)
const char* pFragmentSource) { {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource); GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
if (!vertexShader) { if (!vertexShader)
{
return 0; return 0;
} }
GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource); GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
if (!pixelShader) { if (!pixelShader)
{
return 0; return 0;
} }
GLuint program = glCreateProgram(); GLuint program = glCreateProgram();
if (program) { if (program)
{
glAttachShader(program, vertexShader); glAttachShader(program, vertexShader);
checkGlError("glAttachShader"); checkGlError("glAttachShader");
glAttachShader(program, pixelShader); glAttachShader(program, pixelShader);
...@@ -155,12 +165,15 @@ GLuint glcamera::createProgram(const char* pVertexSource, ...@@ -155,12 +165,15 @@ GLuint glcamera::createProgram(const char* pVertexSource,
glLinkProgram(program); glLinkProgram(program);
GLint linkStatus = GL_FALSE; GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus); glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE) { if (linkStatus != GL_TRUE)
{
GLint bufLength = 0; GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength); glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength) { if (bufLength)
char* buf = (char*) malloc(bufLength); {
if (buf) { char* buf = (char*)malloc(bufLength);
if (buf)
{
glGetProgramInfoLog(program, bufLength, NULL, buf); glGetProgramInfoLog(program, bufLength, NULL, buf);
LOGE("Could not link program:\n%s\n", buf); LOGE("Could not link program:\n%s\n", buf);
free(buf); free(buf);
...@@ -175,7 +188,8 @@ GLuint glcamera::createProgram(const char* pVertexSource, ...@@ -175,7 +188,8 @@ GLuint glcamera::createProgram(const char* pVertexSource,
//GLuint textureID; //GLuint textureID;
bool glcamera::setupGraphics(int w, int h) { bool glcamera::setupGraphics(int w, int h)
{
printGLString("Version", GL_VERSION); printGLString("Version", GL_VERSION);
printGLString("Vendor", GL_VENDOR); printGLString("Vendor", GL_VENDOR);
printGLString("Renderer", GL_RENDERER); printGLString("Renderer", GL_RENDERER);
...@@ -183,7 +197,8 @@ bool glcamera::setupGraphics(int w, int h) { ...@@ -183,7 +197,8 @@ bool glcamera::setupGraphics(int w, int h) {
LOGI("setupGraphics(%d, %d)", w, h); LOGI("setupGraphics(%d, %d)", w, h);
gProgram = createProgram(gVertexShader, gFragmentShader); gProgram = createProgram(gVertexShader, gFragmentShader);
if (!gProgram) { if (!gProgram)
{
LOGE("Could not create program."); LOGE("Could not create program.");
return false; return false;
} }
...@@ -208,9 +223,10 @@ bool glcamera::setupGraphics(int w, int h) { ...@@ -208,9 +223,10 @@ bool glcamera::setupGraphics(int w, int h) {
return true; return true;
} }
void glcamera::renderFrame() { void glcamera::renderFrame()
{
GLfloat vVertices[] = { -1.0f, 1.0f, 0.0f, // Position 0 GLfloat vVertices[] = {-1.0f, 1.0f, 0.0f, // Position 0
0.0f, 0.0f, // TexCoord 0 0.0f, 0.0f, // TexCoord 0
-1.0f, -1.0f, 0.0f, // Position 1 -1.0f, -1.0f, 0.0f, // Position 1
0.0f, 1.0f, // TexCoord 1 0.0f, 1.0f, // TexCoord 1
...@@ -219,7 +235,7 @@ void glcamera::renderFrame() { ...@@ -219,7 +235,7 @@ void glcamera::renderFrame() {
1.0f, 1.0f, 0.0f, // Position 3 1.0f, 1.0f, 0.0f, // Position 3
1.0f, 0.0f // TexCoord 3 1.0f, 0.0f // TexCoord 3
}; };
GLushort indices[] = { 0, 1, 2, 0, 2, 3 }; GLushort indices[] = {0, 1, 2, 0, 2, 3};
GLsizei stride = 5 * sizeof(GLfloat); // 3 for position, 2 for texture GLsizei stride = 5 * sizeof(GLfloat); // 3 for position, 2 for texture
glClearColor(0.0f, 0.0f, 0.0f, 0.0f); glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
...@@ -232,17 +248,15 @@ void glcamera::renderFrame() { ...@@ -232,17 +248,15 @@ void glcamera::renderFrame() {
checkGlError("glUseProgram"); checkGlError("glUseProgram");
// Load the vertex position // Load the vertex position
glVertexAttribPointer(gvPositionHandle, 3, GL_FLOAT, GL_FALSE, stride, glVertexAttribPointer(gvPositionHandle, 3, GL_FLOAT, GL_FALSE, stride, vVertices);
vVertices);
// Load the texture coordinate // Load the texture coordinate
glVertexAttribPointer(gvTexCoordHandle, 2, GL_FLOAT, GL_FALSE, stride, glVertexAttribPointer(gvTexCoordHandle, 2, GL_FLOAT, GL_FALSE, stride, &vVertices[3]);
&vVertices[3]);
glEnableVertexAttribArray(gvPositionHandle); glEnableVertexAttribArray(gvPositionHandle);
glEnableVertexAttribArray(gvTexCoordHandle); glEnableVertexAttribArray(gvTexCoordHandle);
// Bind the texture // Bind the texture
glActiveTexture(GL_TEXTURE0); glActiveTexture( GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureID); glBindTexture(GL_TEXTURE_2D, textureID);
// Set the sampler texture unit to 0 // Set the sampler texture unit to 0
...@@ -257,40 +271,37 @@ void glcamera::renderFrame() { ...@@ -257,40 +271,37 @@ void glcamera::renderFrame() {
//checkGlError("glDrawArrays"); //checkGlError("glDrawArrays");
} }
void glcamera::init(int width, int height) { void glcamera::init(int width, int height)
{
newimage = false; newimage = false;
nimg = Mat(); nimg = Mat();
setupGraphics(width, height); setupGraphics(width, height);
} }
void glcamera::step() { void glcamera::step()
if (newimage && !nimg.empty()) { {
if (newimage && !nimg.empty())
{
textureID = createSimpleTexture2D(textureID, textureID = createSimpleTexture2D(textureID, nimg.ptr<unsigned char> (0), nimg.rows, nimg.cols, nimg.channels());
nimg.ptr<unsigned char> (0), nimg.rows, nimg.cols,
nimg.channels());
newimage = false; newimage = false;
} }
renderFrame(); renderFrame();
} }
#define NEAREST_POW2(x)((int)(0.5 + std::log(x)/0.69315) )
void glcamera::setTextureImage(Ptr<Mat> img) { void glcamera::setTextureImage(const Mat& img)
{
//int p2 = (int)(std::log(img->size().width)/0.69315); Size size(256, 256);
int sz = 256;//std::pow(2,p2); resize(img, nimg, size, cv::INTER_NEAREST);
Size size(sz, sz);
resize(*img, nimg, size,cv::INTER_NEAREST);
newimage = true; newimage = true;
} }
void glcamera::drawMatToGL(int idx, image_pool* pool) { void glcamera::drawMatToGL(int idx, image_pool* pool)
{
Ptr<Mat> img = pool->getImage(idx); Mat img = pool->getImage(idx);
if (img.empty()) if (img.empty())
return; //no image at input_idx! return; //no image at input_idx!
...@@ -299,11 +310,13 @@ void glcamera::drawMatToGL(int idx, image_pool* pool) { ...@@ -299,11 +310,13 @@ void glcamera::drawMatToGL(int idx, image_pool* pool) {
} }
glcamera::glcamera():newimage(false) { glcamera::glcamera() :
newimage(false)
{
LOGI("glcamera constructor"); LOGI("glcamera constructor");
} }
glcamera::~glcamera() { glcamera::~glcamera()
{
LOGI("glcamera destructor"); LOGI("glcamera destructor");
} }
...@@ -6,17 +6,9 @@ ...@@ -6,17 +6,9 @@
#include <GLES2/gl2ext.h> #include <GLES2/gl2ext.h>
#include "image_pool.h" #include "image_pool.h"
class glcamera {
Mat nimg;
bool newimage;
GLuint textureID;
GLuint gProgram;
GLuint gvPositionHandle;
GLuint gvTexCoordHandle;
GLuint gvSamplerHandle;
class glcamera
{
public: public:
glcamera(); glcamera();
...@@ -25,16 +17,23 @@ public: ...@@ -25,16 +17,23 @@ public:
void step(); void step();
void drawMatToGL(int idx, image_pool* pool); void drawMatToGL(int idx, image_pool* pool);
void setTextureImage(Ptr<Mat> img); void setTextureImage(const cv::Mat& img);
private: private:
GLuint createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, GLuint createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, int height, int channels);
int height, int channels);
GLuint loadShader(GLenum shaderType, const char* pSource); GLuint loadShader(GLenum shaderType, const char* pSource);
GLuint GLuint
createProgram(const char* pVertexSource, createProgram(const char* pVertexSource, const char* pFragmentSource);
const char* pFragmentSource);
bool setupGraphics(int w, int h); bool setupGraphics(int w, int h);
void renderFrame(); void renderFrame();
cv::Mat nimg;
bool newimage;
GLuint textureID;
GLuint gProgram;
GLuint gvPositionHandle;
GLuint gvTexCoordHandle;
GLuint gvSamplerHandle;
}; };
#endif #endif
...@@ -5,92 +5,97 @@ ...@@ -5,92 +5,97 @@
#include <android/log.h> #include <android/log.h>
#include <opencv2/imgproc/imgproc.hpp> #include <opencv2/imgproc/imgproc.hpp>
using namespace cv;
#define LOG_TAG "libandroid-opencv" #define LOG_TAG "libandroid-opencv"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__) #define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__) #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved) JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved)
{ {
JNIEnv *env; JNIEnv *env;
LOGI("JNI_OnLoad called for opencv"); LOGI("JNI_OnLoad called for opencv");
return JNI_VERSION_1_4; return JNI_VERSION_1_4;
} }
JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv * env, JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv * env,
jclass thiz, jlong ppool, jobject _jpool, jbyteArray jbuffer, jclass thiz, jlong ppool, jobject _jpool, jbyteArray jbuffer,
jint jidx, jint jwidth, jint jheight, jboolean jgrey) { jint jidx, jint jwidth, jint jheight, jboolean jgrey)
{
int buff_height = jheight + (jheight/2);
Size buff_size(jwidth,buff_height);
image_pool *pool = (image_pool *) ppool; image_pool *pool = (image_pool *) ppool;
Ptr<Mat> mat = pool->getYUV(jidx); Mat mat = pool->getYUV(jidx);
if (mat.empty() || mat->cols != jwidth || mat->rows != jheight * 2) { if (mat.empty() || mat.size() != buff_size )
//pool->deleteGrey(jidx); {
mat = new Mat(jheight * 2, jwidth, CV_8UC1); mat.create(buff_size, CV_8UC1);
} }
jsize sz = env->GetArrayLength(jbuffer); jsize sz = env->GetArrayLength(jbuffer);
uchar* buff = mat->ptr<uchar> (0); uchar* buff = mat.ptr<uchar> (0);
env->GetByteArrayRegion(jbuffer, 0, sz, (jbyte*) buff); env->GetByteArrayRegion(jbuffer, 0, sz, (jbyte*) buff);
pool->addYUVMat(jidx, mat); pool->addYUVMat(jidx, mat);
Ptr<Mat> color = pool->getImage(jidx);
if (color.empty() || color->cols != jwidth || color->rows != jheight) {
//pool->deleteImage(jidx);
color = new Mat(jheight, jwidth, CV_8UC3);
}
if (!jgrey) {
Mat color = pool->getImage(jidx);
if (!jgrey)
{
if (color.cols != jwidth || color.rows != jheight || color.channels() != 3)
{
color.create(jheight, jwidth, CV_8UC3);
}
//doesn't work unfortunately.. //doesn't work unfortunately..
//cvtColor(*mat,*color, CV_YCrCb2RGB); //TODO cvtColor(mat,color, CV_YCrCb2RGB);
color_convert_common(buff, buff + jwidth * jheight, jwidth, jheight, color_convert_common(buff, buff + jwidth * jheight, jwidth, jheight,
color->ptr<uchar> (0), false); color.ptr<uchar> (0), false);
} }
if (jgrey) { if (jgrey)
Mat grey; {
pool->getGrey(jidx, grey); Mat grey = pool->getGrey(jidx);
color = grey;
cvtColor(grey, *color, CV_GRAY2RGB);
} }
pool->addImage(jidx, color); pool->addImage(jidx, color);
} }
image_pool::image_pool() { image_pool::image_pool()
{
} }
image_pool::~image_pool() { image_pool::~image_pool()
{
__android_log_print(ANDROID_LOG_INFO, "image_pool", "destructor called"); __android_log_print(ANDROID_LOG_INFO, "image_pool", "destructor called");
} }
cv::Ptr<Mat> image_pool::getImage(int i) { Mat image_pool::getImage(int i)
{
return imagesmap[i]; return imagesmap[i];
} }
void image_pool::getGrey(int i, Mat & grey) { Mat image_pool::getGrey(int i)
{
cv::Ptr<Mat> tm = yuvImagesMap[i]; Mat tm = yuvImagesMap[i];
if (tm.empty()) if (tm.empty())
return; return tm;
grey = (*tm)(Range(0, tm->rows / 2), Range::all()); return tm(Range(0, tm.rows * (2.0f/3)), Range::all());
} }
cv::Ptr<Mat> image_pool::getYUV(int i) { Mat image_pool::getYUV(int i)
{
return yuvImagesMap[i]; return yuvImagesMap[i];
} }
void image_pool::addYUVMat(int i, cv::Ptr<Mat> mat) { void image_pool::addYUVMat(int i, Mat mat)
{
yuvImagesMap[i] = mat; yuvImagesMap[i] = mat;
} }
void image_pool::addImage(int i, cv::Ptr<Mat> mat) { void image_pool::addImage(int i, Mat mat)
{
imagesmap[i] = mat; imagesmap[i] = mat;
} }
#ifndef IMAGE_POOL_H #ifndef IMAGE_POOL_H_ANDROID_KDJFKJ
#define IMAGE_POOL_H #define IMAGE_POOL_H_ANDROID_KDJFKJ
#include <opencv2/core/core.hpp> #include <opencv2/core/core.hpp>
#include <jni.h>
#include <map> #include <map>
using namespace cv;
#if ANDROID
#include <jni.h>
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C"
{
#endif #endif
JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved); JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved);
...@@ -15,34 +17,33 @@ JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved); ...@@ -15,34 +17,33 @@ JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved);
// JNIEnv *jenv, jclass jcls, jlong jarg1, jobject jarg1_); // JNIEnv *jenv, jclass jcls, jlong jarg1, jobject jarg1_);
JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv *, jclass, jlong, jobject, jbyteArray, jint,
(JNIEnv *, jclass, jlong, jobject, jbyteArray, jint, jint, jint, jboolean); jint, jint, jboolean);
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif
#endif
class image_pool
{
//bool yuv2mat2(char *data, int size, int width, int height, bool grey, Mat& mat);
class image_pool {
std::map<int, Ptr< Mat> > imagesmap;
std::map<int, Ptr< Mat> > yuvImagesMap;
//uchar * mbuffer;
//int length;
public: public:
image_pool(); image_pool();
~image_pool(); ~image_pool();
cv::Ptr<Mat> getImage(int i); cv::Mat getImage(int i);
cv::Mat getGrey(int i);
void getGrey(int i, Mat & grey); cv::Mat getYUV(int i);
cv::Ptr<Mat> getYUV(int i);
int getCount(){ int getCount()
{
return imagesmap.size(); return imagesmap.size();
} }
void addImage(int i, Ptr< Mat> mat); /** Adds a mat at the given index - will not do a deep copy, just images[i] = mat
*
*/
void addImage(int i, cv::Mat mat);
/** this function stores the given matrix in the the yuvImagesMap. Also, /** this function stores the given matrix in the the yuvImagesMap. Also,
* after this call getGrey will work, as the grey image is just the top * after this call getGrey will work, as the grey image is just the top
* half of the YUV mat. * half of the YUV mat.
...@@ -50,13 +51,14 @@ public: ...@@ -50,13 +51,14 @@ public:
* \param i index to store yuv image at * \param i index to store yuv image at
* \param mat the yuv matrix to store * \param mat the yuv matrix to store
*/ */
void addYUVMat(int i, Ptr< Mat> mat); void addYUVMat(int i, cv::Mat mat);
// int addYUV(uchar* buffer, int size, int width, int height, bool grey,int idx);
int addYUV(uchar* buffer, int size, int width, int height, bool grey,int idx); //
// void getBitmap(int * outintarray, int size, int idx);
void getBitmap(int * outintarray, int size, int idx); private:
std::map<int, cv::Mat> imagesmap;
std::map<int, cv::Mat> yuvImagesMap;
}; };
#endif #endif
...@@ -46,10 +46,8 @@ public: ...@@ -46,10 +46,8 @@ public:
~image_pool(); ~image_pool();
Ptr<Mat> getImage(int i); Mat getImage(int i);
void addImage(int i, Mat mat);
void addImage(int i, Ptr< Mat> mat);
......
#include <string.h> #include <string.h>
#include <jni.h> #include <jni.h>
#include <yuv420sp2rgb.h>
#ifndef max
#define max(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a > _b ? _a : _b; })
#define min(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a < _b ? _a : _b; })
#endif
#include <yuv420sp2rgb.h>
/* /*
YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved
...@@ -21,41 +14,39 @@ ...@@ -21,41 +14,39 @@
V (Cr) Sample Period 2 2 V (Cr) Sample Period 2 2
*/ */
/* /*
size of a char: size of a char:
find . -name limits.h -exec grep CHAR_BIT {} \; find . -name limits.h -exec grep CHAR_BIT {} \;
*/ */
#ifndef max
#define max(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a > _b ? _a : _b; })
#define min(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a < _b ? _a : _b; })
#endif
const int bytes_per_pixel = 2; const int bytes_per_pixel = 2;
void color_convert_common( void color_convert_common(unsigned char *pY, unsigned char *pUV, int width, int height, unsigned char *buffer, int grey)
unsigned char *pY, unsigned char *pUV,
int width, int height, unsigned char *buffer,
int grey)
{ {
int i, j; int i, j;
int nR, nG, nB; int nR, nG, nB;
int nY, nU, nV; int nY, nU, nV;
unsigned char *out = buffer; unsigned char *out = buffer;
int offset = 0; int offset = 0;
if(grey){ if (grey)
for (i = 0; i < height; i++) { {
for (j = 0; j < width; j++) { memcpy(out,pY,width*height*sizeof(unsigned char));
unsigned char nB = *(pY + i * width + j);
out[offset++] = (unsigned char)nB;
// out[offset++] = (unsigned char)nB;
// out[offset++] = (unsigned char)nB;
}
} }
}else else
// YUV 4:2:0 // YUV 4:2:0
for (i = 0; i < height; i++) { for (i = 0; i < height; i++)
for (j = 0; j < width; j++) { {
for (j = 0; j < width; j++)
{
nY = *(pY + i * width + j); nY = *(pY + i * width + j);
nV = *(pUV + (i/2) * width + bytes_per_pixel * (j/2)); nV = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2));
nU = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1); nU = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2) + 1);
// Yuv Convert // Yuv Convert
nY -= 16; nY -= 16;
...@@ -65,10 +56,6 @@ void color_convert_common( ...@@ -65,10 +56,6 @@ void color_convert_common(
if (nY < 0) if (nY < 0)
nY = 0; nY = 0;
// nR = (int)(1.164 * nY + 2.018 * nU);
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
// nB = (int)(1.164 * nY + 1.596 * nV);
nB = (int)(1192 * nY + 2066 * nU); nB = (int)(1192 * nY + 2066 * nU);
nG = (int)(1192 * nY - 833 * nV - 400 * nU); nG = (int)(1192 * nY - 833 * nV - 400 * nU);
nR = (int)(1192 * nY + 1634 * nV); nR = (int)(1192 * nY + 1634 * nV);
...@@ -77,22 +64,17 @@ void color_convert_common( ...@@ -77,22 +64,17 @@ void color_convert_common(
nG = min(262143, max(0, nG)); nG = min(262143, max(0, nG));
nB = min(262143, max(0, nB)); nB = min(262143, max(0, nB));
nR >>= 10;
nR >>= 10; nR &= 0xff; nR &= 0xff;
nG >>= 10; nG &= 0xff; nG >>= 10;
nB >>= 10; nB &= 0xff; nG &= 0xff;
nB >>= 10;
nB &= 0xff;
out[offset++] = (unsigned char)nR; out[offset++] = (unsigned char)nR;
out[offset++] = (unsigned char)nG; out[offset++] = (unsigned char)nG;
out[offset++] = (unsigned char)nB; out[offset++] = (unsigned char)nB;
//out[offset++] = 0xff; //set alpha for ARGB 8888 format
} }
//offset = i * width * 3; //non power of two
//offset = i * texture_size + j;//power of two
//offset *= 3; //3 byte per pixel
//out = buffer + offset;
} }
} }
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical"
android:gravity="center_vertical|center_horizontal">
<TextView android:scrollbars="vertical" android:id="@+id/calibtext" android:text="" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:padding="20dip"/>
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical"
android:gravity="center_vertical|center_horizontal">
<TextView android:text="@string/settings_text" android:autoLink="web" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:padding="20dip"/>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/image_size_prompt"/>
<Spinner android:id="@+id/image_size"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:saveEnabled="true"
android:prompt="@string/image_size_prompt"
android:entries="@array/image_sizes">
</Spinner>
</LinearLayout>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content" android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="@string/camera_mode_prompt"/>
<Spinner android:id="@+id/camera_mode"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:saveEnabled="true"
android:prompt="@string/camera_mode_prompt"
android:entries="@array/camera_mode">
</Spinner>
</LinearLayout>
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical"
android:gravity="center_vertical|center_horizontal">
<TextView android:text="@string/patterntext" android:autoLink="web" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:padding="20dip"/>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Corners in width direction:"/>
<Spinner android:id="@+id/rows"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:saveEnabled="true"
android:prompt="@string/chesspromptx"
android:entries="@array/chesssizes">
</Spinner>
</LinearLayout>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content" android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="Corners in height direction:"/>
<Spinner android:id="@+id/cols"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:saveEnabled="true"
android:prompt="@string/chessprompty"
android:entries="@array/chesssizes">
</Spinner>
</LinearLayout>
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<resources>
<declare-styleable name="CameraParams">
<attr name="preview_width" format="integer"/>
<attr name="preview_height" format="integer"/>
</declare-styleable>
</resources>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string-array name="chesssizes">
<item>3</item>
<item>4</item>
<item>5</item>
<item>6</item>
<item>7</item>
<item>8</item>
<item>9</item>
<item>10</item>
<item>11</item>
<item>12</item>
<item>13</item>
</string-array>
<string name="chesspromptx">
Choose the width:</string>
<string name="chessprompty">
Choose the height:</string>
</resources>
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string-array name="image_sizes">
<item>320x240</item>
<item>400x300</item>
<item>600x400</item>
<item>800x600</item>
<item>1000x800</item>
</string-array>
<string-array name="camera_mode">
<item>color</item>
<item>BW</item>
</string-array>
<string name="image_size_prompt">
Image Size:\n(may not be exact)
</string>
<string name="camera_mode_prompt">
Camera Mode:
</string>
</resources>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">Calibration</string>
<string name="patternsize">Pattern Size</string>
<string name="patterntext">Please choose the width and height (number of inside corners) of the checker
board pattern you will be using for calibration. Default is 6 by 8 corners. You may find a checkerboard pattern at
http://opencv.willowgarage.com/pattern</string>
<string name="patternlink">http://opencv.willowgarage.com/pattern</string>
<string name="camera_settings_label">Camera Settings</string>
<string name="settings_text">Change the camera settings</string>
<string name="calibration_service_started">Calibration calculations have started...</string>
<string name="calibration_service_stopped">Calibration calculations has stopped.</string>
<string name="calibration_service_finished">Calibration finished, you camera is calibrated.</string>
<string name="calibration_service_label">Calibration</string>
<string name="calibration_not_enough">Please capture atleast 10 images of the pattern!</string>
</resources>
...@@ -6,3 +6,4 @@ OPENCV_CONFIG=../build/android-opencv.mk ...@@ -6,3 +6,4 @@ OPENCV_CONFIG=../build/android-opencv.mk
#you can download the ndk from http://www.crystax.net/android/ndk-r4.php #you can download the ndk from http://www.crystax.net/android/ndk-r4.php
ANDROID_NDK_ROOT=$(HOME)/android-ndk-r4-crystax ANDROID_NDK_ROOT=$(HOME)/android-ndk-r4-crystax
ARM_TARGETS=armeabi armeabi-v7a
\ No newline at end of file
package com.opencv.calibration;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import android.app.Activity;
import android.os.Bundle;
import android.text.method.ScrollingMovementMethod;
import android.util.Log;
import android.widget.TextView;
import com.opencv.R;
public class CalibrationViewer extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.calibrationviewer);
Bundle extras = getIntent().getExtras();
String filename = extras.getString("calibfile");
if (filename != null) {
TextView text = (TextView) findViewById(R.id.calibtext);
text.setMovementMethod(new ScrollingMovementMethod());
try {
BufferedReader reader = new BufferedReader(new FileReader(
filename));
while (reader.ready()) {
text.append(reader.readLine() +"\n");
}
} catch (FileNotFoundException e) {
Log.e("opencv", "could not open calibration file at:"
+ filename);
} catch (IOException e) {
Log.e("opencv", "error reading file: "
+ filename);
}
}
}
}
package com.opencv.calibration;
import com.opencv.R;
import com.opencv.jni.Size;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.Spinner;
public class ChessBoardChooser extends Activity {
public static final String CHESS_SIZE = "chess_size";
public static final int DEFAULT_WIDTH = 6;
public static final int DEFAULT_HEIGHT = 8;
public static final int LOWEST = 3;
class DimChooser implements OnItemSelectedListener {
private String dim;
public DimChooser(String dim) {
this.dim = dim;
}
@Override
public void onItemSelected(AdapterView<?> arg0, View arg1, int pos,
long arg3) {
SharedPreferences settings = getSharedPreferences(CHESS_SIZE, 0);
Editor editor = settings.edit();
editor.putInt(dim, pos + LOWEST);
editor.commit();
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.chesssizer);
// Restore preferences
SharedPreferences settings = getSharedPreferences(CHESS_SIZE, 0);
int width = settings.getInt("width", 6);
int height = settings.getInt("height", 8);
Spinner wspin, hspin;
wspin = (Spinner) findViewById(R.id.rows);
hspin = (Spinner) findViewById(R.id.cols);
wspin.setSelection(width - LOWEST);
hspin.setSelection(height - LOWEST);
wspin.setOnItemSelectedListener(new DimChooser("width"));
hspin.setOnItemSelectedListener(new DimChooser("height"));
}
public static Size getPatternSize(Context ctx) {
SharedPreferences settings = ctx.getSharedPreferences(CHESS_SIZE, 0);
int width = settings.getInt("width", 6);
int height = settings.getInt("height", 8);
return new Size(width, height);
}
}
package com.opencv.calibration.services;
import java.io.File;
import java.io.IOException;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.os.Binder;
import android.os.IBinder;
import android.util.Log;
import android.widget.Toast;
import com.opencv.R;
import com.opencv.calibration.CalibrationViewer;
import com.opencv.calibration.Calibrator;
import com.opencv.calibration.Calibrator.CalibrationCallback;
public class CalibrationService extends Service implements CalibrationCallback {
Class<?> activity;
int icon;
File calibration_file;
public void startCalibrating(Class<?> activitycaller,int icon_id, Calibrator calibrator, File calibration_file)
throws IOException {
activity = activitycaller;
icon = icon_id;
// Display a notification about us starting. We put an icon in the
// status bar.
showNotification();
this.calibration_file = calibration_file;
calibrator.setCallback(this);
calibrator.calibrate(calibration_file);
}
private NotificationManager mNM;
/**
* Class for clients to access. Because we know this service always runs in
* the same process as its clients, we don't need to deal with IPC.
*/
public class CalibrationServiceBinder extends Binder {
public CalibrationService getService() {
return CalibrationService.this;
}
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.i("LocalService", "Received start id " + startId + ": " + intent);
// We want this service to continue running until it is explicitly
// stopped, so return sticky.
return START_NOT_STICKY;
}
@Override
public void onCreate() {
mNM = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
}
@Override
public void onDestroy() {
// Cancel the persistent notification.
// mNM.cancel(R.string.calibration_service_started);
// Tell the user we stopped.
Toast.makeText(this, R.string.calibration_service_finished,
Toast.LENGTH_SHORT).show();
}
private final IBinder mBinder = new CalibrationServiceBinder();
@Override
public IBinder onBind(Intent intent) {
return mBinder;
}
/**
* Show a notification while this service is running.
*/
private void showNotification() {
// In this sample, we'll use the same text for the ticker and the
// expanded notification
CharSequence text = getText(R.string.calibration_service_started);
// Set the icon, scrolling text and timestamp
Notification notification = new Notification(icon, text,
System.currentTimeMillis());
// The PendingIntent to launch our activity if the user selects this
// notification
PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
new Intent(this, activity), 0);
// Set the info for the views that show in the notification panel.
notification.setLatestEventInfo(this,
getText(R.string.calibration_service_label), text,
contentIntent);
notification.defaults |= Notification.DEFAULT_SOUND;
// Send the notification.
// We use a layout id because it is a unique number. We use it later to
// cancel.
mNM.notify(R.string.calibration_service_started, notification);
}
/**
* Show a notification while this service is running.
*/
private void doneNotification() {
// In this sample, we'll use the same text for the ticker and the
// expanded notification
CharSequence text = getText(R.string.calibration_service_finished);
// Set the icon, scrolling text and timestamp
Notification notification = new Notification(icon, text,
System.currentTimeMillis());
Intent intent = new Intent(this,CalibrationViewer.class);
intent.putExtra("calibfile", calibration_file.getAbsolutePath());
// The PendingIntent to launch our activity if the user selects this
// notification
PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
intent, 0);
// Set the info for the views that show in the notification panel.
notification.setLatestEventInfo(this,
getText(R.string.calibration_service_label), text,
contentIntent);
notification.defaults |= Notification.DEFAULT_SOUND;
// Send the notification.
// We use a layout id because it is a unique number. We use it later to
// cancel.
mNM.notify(R.string.calibration_service_started, notification);
}
@Override
public void onFoundChessboard(Calibrator calibrator) {
// TODO Auto-generated method stub
}
@Override
public void onDoneCalibration(Calibrator calibration, File calibfile) {
doneNotification();
stopSelf();
}
@Override
public void onFailedChessboard(Calibrator calibrator) {
// TODO Auto-generated method stub
}
}
package com.opencv.camera;
import com.opencv.R;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.Spinner;
public class CameraConfig extends Activity {
public static final String CAMERA_SETTINGS = "CAMERA_SETTINGS";
public static final String CAMERA_MODE = "camera_mode";
public static final String IMAGE_WIDTH = "IMAGE_WIDTH";
public static final String IMAGE_HEIGHT = "IMAGE_HEIGHT";
public static final int CAMERA_MODE_BW = 0;
public static final int CAMERA_MODE_COLOR = 1;
public static int readCameraMode(Context ctx) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
int mode = settings.getInt(CAMERA_MODE, CAMERA_MODE_BW);
return mode;
}
static public void setCameraMode(Context context, String mode) {
int m = 0;
if (mode.equals("BW")) {
m = CAMERA_MODE_BW;
} else if (mode.equals("color"))
m = CAMERA_MODE_COLOR;
setCameraMode(context, m);
}
private static String sizeToString(int[] size) {
return size[0] + "x" + size[1];
}
private static void parseStrToSize(String ssize, int[] size) {
String sz[] = ssize.split("x");
size[0] = Integer.valueOf(sz[0]);
size[1] = Integer.valueOf(sz[1]);
}
public static void readImageSize(Context ctx, int[] size) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
size[0] = settings.getInt(IMAGE_WIDTH, 600);
size[1] = settings.getInt(IMAGE_HEIGHT, 600);
}
public static void setCameraMode(Context ctx, int mode) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
Editor editor = settings.edit();
editor.putInt(CAMERA_MODE, mode);
editor.commit();
}
public static void setImageSize(Context ctx, String strsize) {
int size[] = { 0, 0 };
parseStrToSize(strsize, size);
setImageSize(ctx, size[0], size[1]);
}
public static void setImageSize(Context ctx, int width, int height) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
Editor editor = settings.edit();
editor.putInt(IMAGE_WIDTH, width);
editor.putInt(IMAGE_HEIGHT, height);
editor.commit();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.camerasettings);
int mode = readCameraMode(this);
int size[] = { 0, 0 };
readImageSize(this, size);
final Spinner size_spinner;
final Spinner mode_spinner;
size_spinner = (Spinner) findViewById(R.id.image_size);
mode_spinner = (Spinner) findViewById(R.id.camera_mode);
String strsize = sizeToString(size);
String strmode = modeToString(mode);
String sizes[] = getResources().getStringArray(R.array.image_sizes);
int i = 1;
for (String x : sizes) {
if (x.equals(strsize))
break;
i++;
}
if(i <= sizes.length)
size_spinner.setSelection(i-1);
i = 1;
String modes[] = getResources().getStringArray(R.array.camera_mode);
for (String x :modes) {
if (x.equals(strmode))
break;
i++;
}
if(i <= modes.length)
mode_spinner.setSelection(i-1);
size_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View spinner,
int position, long arg3) {
Object o = size_spinner.getItemAtPosition(position);
if (o != null)
setImageSize(spinner.getContext(), (String) o);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
mode_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View spinner,
int position, long arg3) {
Object o = mode_spinner.getItemAtPosition(position);
if (o != null)
setCameraMode(spinner.getContext(), (String) o);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
}
private String modeToString(int mode) {
switch (mode) {
case CAMERA_MODE_BW:
return "BW";
case CAMERA_MODE_COLOR:
return "color";
default:
return "";
}
}
}
...@@ -11,7 +11,152 @@ import android.util.Log; ...@@ -11,7 +11,152 @@ import android.util.Log;
import com.opencv.jni.image_pool; import com.opencv.jni.image_pool;
import com.opencv.jni.opencv; import com.opencv.jni.opencv;
/** The NativeProcessor is a native processing stack engine.
*
* What this means is that the NativeProcessor handles loading
* live camera frames into native memory space, i.e. the image_pool
* and then calling a stack of PoolCallback's and passing them the
* image_pool.
*
* The image_pool index 0 is populated with the live video image
*
* And any modifications to this the pool are in place, so you may
* pass on changes to the pool to the next PoolCallback in the stack.
*
*/
public class NativeProcessor { public class NativeProcessor {
/** Users that would like to be able to have access to live video frames
* should implement a PoolCallback
* the idx and pool contain the images, specifically at idx == 0 is the
* live video frame.
*/
static public interface PoolCallback {
void process(int idx, image_pool pool, long timestamp,
NativeProcessor nativeProcessor);
}
/**At every frame, each PoolCallback is called in order and is passed the
* the same pool and index
*
* @param stack A list of PoolCallback objects, that will be called in order
*/
public void addCallbackStack(LinkedList<PoolCallback> stack) {
try {
while (!stacklock.tryLock(10, TimeUnit.MILLISECONDS)) {
}
try {
nextStack = stack;
} finally {
stacklock.unlock();
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* Create a NativeProcessor. The processor will not start running until
* start is called, at which point it will operate in its own thread and
* sleep until a post is called. The processor should not be started until
* an onSurfaceChange event, and should be shut down when the surface is
* destroyed by calling interupt.
*
*/
public NativeProcessor() {
gray_scale_only = false;
}
/** Grayscale only is much faster because the yuv does not get decoded, and grayscale is only one
* byter per pixel - giving fast opengl texture loading.
*
* You still have access to the whole yuv image, but grayscale is only immediately available to
* use without further effort.
*
* Suggestion - use grayscale only and save your yuv images to disk if you would like color images
*
* Also, in grayscale mode, the images in the pool are only single channel, so please keep this in mind
* when accessing the color images - check the cv::Mat::channels() or cv::Mat::type() if your messing
* with color channels
*
* @param grayscale true if you want to only process grayscale images
*/
public void setGrayscale(boolean grayscale){
gray_scale_only = grayscale;
}
/**
* A callback that allows the NativeProcessor to pass back the buffer when
* it has completed processing a frame.
*/
static protected interface NativeProcessorCallback {
/**
* Called after processing, meant to be recieved by the NativePreviewer
* wich reuses the byte buffer for the camera preview...
*
* @param buffer
* the buffer passed to the NativeProcessor with post.
*/
void onDoneNativeProcessing(byte[] buffer);
}
protected void stop() {
mthread.interrupt();
try {
mthread.join();
} catch (InterruptedException e) {
Log.w("NativeProcessor",
"interupted while stoping " + e.getMessage());
}
mthread = null;
}
protected void start() {
mthread = new ProcessorThread();
mthread.start();
}
/**
* post is used to notify the processor that a preview frame is ready, this
* will return almost immediately. if the processor is busy, returns false
* and is essentially a nop.
*
* @param buffer
* a preview frame from the Android Camera onPreviewFrame
* callback
* @param width
* of preview frame
* @param height
* of preview frame
* @param format
* of preview frame
* @return true if the processor wasn't busy and accepted the post, false if
* the processor is still processing.
*/
protected boolean post(byte[] buffer, int width, int height, int format,
long timestamp, NativeProcessorCallback callback) {
lock.lock();
try {
NPPostObject pobj = new NPPostObject(buffer, width, height, format,
timestamp, callback);
postobjects.addFirst(pobj);
} finally {
lock.unlock();
}
return true;
}
private class ProcessorThread extends Thread { private class ProcessorThread extends Thread {
...@@ -20,7 +165,7 @@ public class NativeProcessor { ...@@ -20,7 +165,7 @@ public class NativeProcessor {
if (pobj.format == PixelFormat.YCbCr_420_SP) { if (pobj.format == PixelFormat.YCbCr_420_SP) {
// add as color image, because we know how to decode this // add as color image, because we know how to decode this
opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width, opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width,
pobj.height, false); pobj.height, gray_scale_only);
} else if (pobj.format == PixelFormat.YCbCr_422_SP) { } else if (pobj.format == PixelFormat.YCbCr_422_SP) {
// add as gray image, because this format is not coded // add as gray image, because this format is not coded
...@@ -31,7 +176,6 @@ public class NativeProcessor { ...@@ -31,7 +176,6 @@ public class NativeProcessor {
} else } else
throw new Exception("bad pixel format!"); throw new Exception("bad pixel format!");
for (PoolCallback x : stack) { for (PoolCallback x : stack) {
if (interrupted()) { if (interrupted()) {
throw new InterruptedException( throw new InterruptedException(
...@@ -40,11 +184,9 @@ public class NativeProcessor { ...@@ -40,11 +184,9 @@ public class NativeProcessor {
x.process(0, pool, pobj.timestamp, NativeProcessor.this); x.process(0, pool, pobj.timestamp, NativeProcessor.this);
} }
pobj.done(); // tell the postobject that we're done doing pobj.done(); // tell the postobject that we're done doing
// all the processing. // all the processing.
} }
@Override @Override
...@@ -54,7 +196,7 @@ public class NativeProcessor { ...@@ -54,7 +196,7 @@ public class NativeProcessor {
while (true) { while (true) {
yield(); yield();
while(!stacklock.tryLock(5, TimeUnit.MILLISECONDS)){ while (!stacklock.tryLock(5, TimeUnit.MILLISECONDS)) {
} }
try { try {
if (nextStack != null) { if (nextStack != null) {
...@@ -67,10 +209,11 @@ public class NativeProcessor { ...@@ -67,10 +209,11 @@ public class NativeProcessor {
NPPostObject pobj = null; NPPostObject pobj = null;
while(!lock.tryLock(5, TimeUnit.MILLISECONDS)){ while (!lock.tryLock(5, TimeUnit.MILLISECONDS)) {
} }
try { try {
if(postobjects.isEmpty()) continue; if (postobjects.isEmpty())
continue;
pobj = postobjects.removeLast(); pobj = postobjects.removeLast();
} finally { } finally {
...@@ -78,12 +221,12 @@ public class NativeProcessor { ...@@ -78,12 +221,12 @@ public class NativeProcessor {
} }
if(interrupted())throw new InterruptedException(); if (interrupted())
throw new InterruptedException();
if(stack != null && pobj != null) if (stack != null && pobj != null)
process(pobj); process(pobj);
} }
} catch (InterruptedException e) { } catch (InterruptedException e) {
...@@ -100,101 +243,9 @@ public class NativeProcessor { ...@@ -100,101 +243,9 @@ public class NativeProcessor {
} }
ProcessorThread mthread;
static public interface PoolCallback {
void process(int idx, image_pool pool,long timestamp, NativeProcessor nativeProcessor);
}
Lock stacklock = new ReentrantLock();
LinkedList<PoolCallback> nextStack;
void addCallbackStack(LinkedList<PoolCallback> stack) {
try {
while (!stacklock.tryLock(10, TimeUnit.MILLISECONDS)) {
}
try {
nextStack = stack;
} finally {
stacklock.unlock();
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* A callback that allows the NativeProcessor to pass back the buffer when
* it has completed processing a frame.
*
* @author ethan
*
*/
static public interface NativeProcessorCallback {
/**
* Called after processing, meant to be recieved by the NativePreviewer
* wich reuses the byte buffer for the camera preview...
*
* @param buffer
* the buffer passed to the NativeProcessor with post.
*/
void onDoneNativeProcessing(byte[] buffer);
}
/**
* Create a NativeProcessor. The processor will not start running until
* start is called, at which point it will operate in its own thread and
* sleep until a post is called. The processor should not be started until
* an onSurfaceChange event, and should be shut down when the surface is
* destroyed by calling interupt.
*
*/
public NativeProcessor() {
}
/**
* post is used to notify the processor that a preview frame is ready, this
* will return almost immediately. if the processor is busy, returns false
* and is essentially a nop.
*
* @param buffer
* a preview frame from the Android Camera onPreviewFrame
* callback
* @param width
* of preview frame
* @param height
* of preview frame
* @param format
* of preview frame
* @return true if the processor wasn't busy and accepted the post, false if
* the processor is still processing.
*/
public boolean post(byte[] buffer, int width, int height, int format,long timestamp,
NativeProcessorCallback callback) {
lock.lock();
try {
NPPostObject pobj = new NPPostObject(buffer, width, height,
format,timestamp, callback);
postobjects.addFirst(pobj);
} finally {
lock.unlock();
}
return true;
}
static private class NPPostObject { static private class NPPostObject {
public NPPostObject(byte[] buffer, int width, int height, int format, long timestamp, public NPPostObject(byte[] buffer, int width, int height, int format,
NativeProcessorCallback callback) { long timestamp, NativeProcessorCallback callback) {
this.buffer = buffer; this.buffer = buffer;
this.width = width; this.width = width;
this.height = height; this.height = height;
...@@ -215,6 +266,7 @@ public class NativeProcessor { ...@@ -215,6 +266,7 @@ public class NativeProcessor {
NativeProcessorCallback callback; NativeProcessorCallback callback;
} }
private LinkedList<NPPostObject> postobjects = new LinkedList<NPPostObject>(); private LinkedList<NPPostObject> postobjects = new LinkedList<NPPostObject>();
private image_pool pool = new image_pool(); private image_pool pool = new image_pool();
...@@ -222,20 +274,12 @@ public class NativeProcessor { ...@@ -222,20 +274,12 @@ public class NativeProcessor {
private final Lock lock = new ReentrantLock(); private final Lock lock = new ReentrantLock();
private LinkedList<PoolCallback> stack = new LinkedList<PoolCallback>(); private LinkedList<PoolCallback> stack = new LinkedList<PoolCallback>();
private boolean gray_scale_only;
void stop() { private Lock stacklock = new ReentrantLock();
mthread.interrupt();
try {
mthread.join();
} catch (InterruptedException e) {
Log.w("NativeProcessor","interupted while stoping " + e.getMessage());
}
mthread = null;
}
void start() { private LinkedList<PoolCallback> nextStack;
mthread = new ProcessorThread();
mthread.start(); private ProcessorThread mthread;
}
} }
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment