Commit 3a932b0f authored by Ethan Rublee's avatar Ethan Rublee

Refactoring the image_pool for android, and adding some common utils for camera…

Refactoring the image_pool for android, and adding some common utils for camera configuration.  Also experimenting with optimization - grayscale preview is way faster than color right now.
parent 077dd777
...@@ -11,6 +11,10 @@ ...@@ -11,6 +11,10 @@
regular Android project. regular Android project.
--> -->
<activity android:name="com.opencv.OpenCV" /> <activity android:name="com.opencv.OpenCV" />
<activity android:name="com.opencv.calibration.ChessBoardChooser"/>
<activity android:name="com.opencv.calibration.CameraConfig"/>
<activity android:name="com.opencv.calibration.CalibrationViewer"/>
<service android:name="com.opencv.calibration.services.CalibrationService"/>
</application> </application>
<!-- set the opengl version <!-- set the opengl version
<uses-feature android:glEsVersion="0x00020000" />--> <uses-feature android:glEsVersion="0x00020000" />-->
......
...@@ -12,6 +12,10 @@ $(info gedit $(LOCAL_ENV_MK)) ...@@ -12,6 +12,10 @@ $(info gedit $(LOCAL_ENV_MK))
$(error Please setup the $(LOCAL_ENV_MK) - the default was just created') $(error Please setup the $(LOCAL_ENV_MK) - the default was just created')
endif endif
ifndef ARM_TARGETS
ARM_TARGETS=armeabi armeabi-v7a
endif
ANDROID_NDK_BASE = $(ANDROID_NDK_ROOT) ANDROID_NDK_BASE = $(ANDROID_NDK_ROOT)
$(info OPENCV_CONFIG = $(OPENCV_CONFIG)) $(info OPENCV_CONFIG = $(OPENCV_CONFIG))
...@@ -44,7 +48,7 @@ all: $(LIB) nogdb ...@@ -44,7 +48,7 @@ all: $(LIB) nogdb
#calls the ndk-build script, passing it OPENCV_ROOT and OPENCV_LIBS_DIR #calls the ndk-build script, passing it OPENCV_ROOT and OPENCV_LIBS_DIR
$(LIB): $(SWIG_C_OUT) $(SOURCES) $(HEADERS) $(ANDROID_MKS) $(LIB): $(SWIG_C_OUT) $(SOURCES) $(HEADERS) $(ANDROID_MKS)
$(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \ $(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \
PROJECT_PATH=$(PROJECT_PATH) V=$(V) $(NDK_FLAGS) PROJECT_PATH=$(PROJECT_PATH) ARM_TARGETS=$(ARM_TARGETS) V=$(V) $(NDK_FLAGS)
#this creates the swig wrappers #this creates the swig wrappers
...@@ -70,5 +74,5 @@ clean-swig: ...@@ -70,5 +74,5 @@ clean-swig:
#does clean-swig and then uses the ndk-build clean #does clean-swig and then uses the ndk-build clean
clean: clean-swig clean: clean-swig
$(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \ $(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \
PROJECT_PATH=$(PROJECT_PATH) clean V=$(V) $(NDK_FLAGS) PROJECT_PATH=$(PROJECT_PATH) clean ARM_TARGETS=$(ARM_TARGETS) V=$(V) $(NDK_FLAGS)
APP_ABI := armeabi armeabi-v7a APP_ABI := $(ARM_TARGETS)
APP_MODULES := android-opencv APP_MODULES := android-opencv
This diff is collapsed.
...@@ -14,8 +14,6 @@ ...@@ -14,8 +14,6 @@
#include <opencv2/imgproc/imgproc.hpp> #include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp> #include <opencv2/calib3d/calib3d.hpp>
#include <vector> #include <vector>
#include "image_pool.h" #include "image_pool.h"
...@@ -24,36 +22,33 @@ ...@@ -24,36 +22,33 @@
#define DETECT_STAR 1 #define DETECT_STAR 1
#define DETECT_SURF 2 #define DETECT_SURF 2
class Calibration
{
public:
class Calibration { Calibration();
std::vector<cv::KeyPoint> keypoints; virtual ~Calibration();
vector<vector<Point2f> > imagepoints;
cv::Mat K;
cv::Mat distortion;
cv::Size imgsize;
bool detectAndDrawChessboard(int idx, image_pool* pool);
void resetChess();
public: int getNumberDetectedChessboards();
cv::Size patternsize; void calibrate(const char* filename);
Calibration();
virtual ~Calibration();
bool detectAndDrawChessboard(int idx, image_pool* pool); void drawText(int idx, image_pool* pool, const char* text);
void resetChess(); cv::Size patternsize;
private:
std::vector<cv::KeyPoint> keypoints;
int getNumberDetectedChessboards(); std::vector<std::vector<cv::Point2f> > imagepoints;
void calibrate(const char* filename); cv::Mat K;
cv::Mat distortion;
cv::Size imgsize;
void drawText(int idx, image_pool* pool, const char* text);
}; };
#endif /* PROCESSOR_H_ */ #endif /* PROCESSOR_H_ */
This diff is collapsed.
...@@ -6,35 +6,34 @@ ...@@ -6,35 +6,34 @@
#include <GLES2/gl2ext.h> #include <GLES2/gl2ext.h>
#include "image_pool.h" #include "image_pool.h"
class glcamera {
Mat nimg;
bool newimage;
GLuint textureID;
GLuint gProgram;
GLuint gvPositionHandle;
GLuint gvTexCoordHandle;
GLuint gvSamplerHandle;
class glcamera
{
public: public:
glcamera(); glcamera();
~glcamera(); ~glcamera();
void init(int width, int height); void init(int width, int height);
void step(); void step();
void drawMatToGL(int idx, image_pool* pool); void drawMatToGL(int idx, image_pool* pool);
void setTextureImage(Ptr<Mat> img); void setTextureImage(const cv::Mat& img);
private: private:
GLuint createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, GLuint createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, int height, int channels);
int height, int channels); GLuint loadShader(GLenum shaderType, const char* pSource);
GLuint loadShader(GLenum shaderType, const char* pSource); GLuint
GLuint createProgram(const char* pVertexSource, const char* pFragmentSource);
createProgram(const char* pVertexSource, bool setupGraphics(int w, int h);
const char* pFragmentSource); void renderFrame();
bool setupGraphics(int w, int h); cv::Mat nimg;
void renderFrame(); bool newimage;
GLuint textureID;
GLuint gProgram;
GLuint gvPositionHandle;
GLuint gvTexCoordHandle;
GLuint gvSamplerHandle;
}; };
#endif #endif
...@@ -5,92 +5,97 @@ ...@@ -5,92 +5,97 @@
#include <android/log.h> #include <android/log.h>
#include <opencv2/imgproc/imgproc.hpp> #include <opencv2/imgproc/imgproc.hpp>
using namespace cv;
#define LOG_TAG "libandroid-opencv" #define LOG_TAG "libandroid-opencv"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__) #define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__) #define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved) JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved)
{ {
JNIEnv *env; JNIEnv *env;
LOGI("JNI_OnLoad called for opencv"); LOGI("JNI_OnLoad called for opencv");
return JNI_VERSION_1_4; return JNI_VERSION_1_4;
} }
JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv * env, JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv * env,
jclass thiz, jlong ppool, jobject _jpool, jbyteArray jbuffer, jclass thiz, jlong ppool, jobject _jpool, jbyteArray jbuffer,
jint jidx, jint jwidth, jint jheight, jboolean jgrey) { jint jidx, jint jwidth, jint jheight, jboolean jgrey)
image_pool *pool = (image_pool *) ppool; {
int buff_height = jheight + (jheight/2);
Ptr<Mat> mat = pool->getYUV(jidx); Size buff_size(jwidth,buff_height);
image_pool *pool = (image_pool *) ppool;
if (mat.empty() || mat->cols != jwidth || mat->rows != jheight * 2) { Mat mat = pool->getYUV(jidx);
//pool->deleteGrey(jidx);
mat = new Mat(jheight * 2, jwidth, CV_8UC1);
}
jsize sz = env->GetArrayLength(jbuffer); if (mat.empty() || mat.size() != buff_size )
uchar* buff = mat->ptr<uchar> (0); {
mat.create(buff_size, CV_8UC1);
}
env->GetByteArrayRegion(jbuffer, 0, sz, (jbyte*) buff); jsize sz = env->GetArrayLength(jbuffer);
uchar* buff = mat.ptr<uchar> (0);
pool->addYUVMat(jidx, mat); env->GetByteArrayRegion(jbuffer, 0, sz, (jbyte*) buff);
Ptr<Mat> color = pool->getImage(jidx);
if (color.empty() || color->cols != jwidth || color->rows != jheight) {
//pool->deleteImage(jidx);
color = new Mat(jheight, jwidth, CV_8UC3);
}
if (!jgrey) {
//doesn't work unfortunately.. pool->addYUVMat(jidx, mat);
//cvtColor(*mat,*color, CV_YCrCb2RGB);
color_convert_common(buff, buff + jwidth * jheight, jwidth, jheight,
color->ptr<uchar> (0), false);
} Mat color = pool->getImage(jidx);
if (jgrey) { if (!jgrey)
Mat grey; {
pool->getGrey(jidx, grey);
cvtColor(grey, *color, CV_GRAY2RGB); if (color.cols != jwidth || color.rows != jheight || color.channels() != 3)
{
color.create(jheight, jwidth, CV_8UC3);
}
//doesn't work unfortunately..
//TODO cvtColor(mat,color, CV_YCrCb2RGB);
color_convert_common(buff, buff + jwidth * jheight, jwidth, jheight,
color.ptr<uchar> (0), false);
}
} if (jgrey)
{
Mat grey = pool->getGrey(jidx);
color = grey;
}
pool->addImage(jidx, color); pool->addImage(jidx, color);
} }
image_pool::image_pool() { image_pool::image_pool()
{
} }
image_pool::~image_pool() { image_pool::~image_pool()
__android_log_print(ANDROID_LOG_INFO, "image_pool", "destructor called"); {
__android_log_print(ANDROID_LOG_INFO, "image_pool", "destructor called");
} }
cv::Ptr<Mat> image_pool::getImage(int i) { Mat image_pool::getImage(int i)
return imagesmap[i]; {
return imagesmap[i];
} }
void image_pool::getGrey(int i, Mat & grey) { Mat image_pool::getGrey(int i)
{
cv::Ptr<Mat> tm = yuvImagesMap[i]; Mat tm = yuvImagesMap[i];
if (tm.empty()) if (tm.empty())
return; return tm;
grey = (*tm)(Range(0, tm->rows / 2), Range::all()); return tm(Range(0, tm.rows * (2.0f/3)), Range::all());
} }
cv::Ptr<Mat> image_pool::getYUV(int i) { Mat image_pool::getYUV(int i)
{
return yuvImagesMap[i]; return yuvImagesMap[i];
} }
void image_pool::addYUVMat(int i, cv::Ptr<Mat> mat) { void image_pool::addYUVMat(int i, Mat mat)
{
yuvImagesMap[i] = mat; yuvImagesMap[i] = mat;
} }
void image_pool::addImage(int i, cv::Ptr<Mat> mat) { void image_pool::addImage(int i, Mat mat)
{
imagesmap[i] = mat; imagesmap[i] = mat;
} }
#ifndef IMAGE_POOL_H #ifndef IMAGE_POOL_H_ANDROID_KDJFKJ
#define IMAGE_POOL_H #define IMAGE_POOL_H_ANDROID_KDJFKJ
#include <opencv2/core/core.hpp> #include <opencv2/core/core.hpp>
#include <jni.h>
#include <map> #include <map>
using namespace cv;
#if ANDROID
#include <jni.h>
#ifdef __cplusplus #ifdef __cplusplus
extern "C" { extern "C"
{
#endif #endif
JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved); JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved);
...@@ -15,48 +17,48 @@ JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved); ...@@ -15,48 +17,48 @@ JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved);
// JNIEnv *jenv, jclass jcls, jlong jarg1, jobject jarg1_); // JNIEnv *jenv, jclass jcls, jlong jarg1, jobject jarg1_);
JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv *, jclass, jlong, jobject, jbyteArray, jint,
(JNIEnv *, jclass, jlong, jobject, jbyteArray, jint, jint, jint, jboolean); jint, jint, jboolean);
#ifdef __cplusplus #ifdef __cplusplus
} }
#endif #endif
#endif
class image_pool
{
//bool yuv2mat2(char *data, int size, int width, int height, bool grey, Mat& mat);
class image_pool {
std::map<int, Ptr< Mat> > imagesmap;
std::map<int, Ptr< Mat> > yuvImagesMap;
//uchar * mbuffer;
//int length;
public: public:
image_pool(); image_pool();
~image_pool(); ~image_pool();
cv::Ptr<Mat> getImage(int i); cv::Mat getImage(int i);
cv::Mat getGrey(int i);
void getGrey(int i, Mat & grey); cv::Mat getYUV(int i);
cv::Ptr<Mat> getYUV(int i);
int getCount()
int getCount(){ {
return imagesmap.size(); return imagesmap.size();
} }
void addImage(int i, Ptr< Mat> mat); /** Adds a mat at the given index - will not do a deep copy, just images[i] = mat
/** this function stores the given matrix in the the yuvImagesMap. Also, *
* after this call getGrey will work, as the grey image is just the top */
* half of the YUV mat. void addImage(int i, cv::Mat mat);
*
* \param i index to store yuv image at /** this function stores the given matrix in the the yuvImagesMap. Also,
* \param mat the yuv matrix to store * after this call getGrey will work, as the grey image is just the top
*/ * half of the YUV mat.
void addYUVMat(int i, Ptr< Mat> mat); *
* \param i index to store yuv image at
* \param mat the yuv matrix to store
int addYUV(uchar* buffer, int size, int width, int height, bool grey,int idx); */
void addYUVMat(int i, cv::Mat mat);
void getBitmap(int * outintarray, int size, int idx);
// int addYUV(uchar* buffer, int size, int width, int height, bool grey,int idx);
//
// void getBitmap(int * outintarray, int size, int idx);
private:
std::map<int, cv::Mat> imagesmap;
std::map<int, cv::Mat> yuvImagesMap;
}; };
#endif #endif
...@@ -46,10 +46,8 @@ public: ...@@ -46,10 +46,8 @@ public:
~image_pool(); ~image_pool();
Ptr<Mat> getImage(int i); Mat getImage(int i);
void addImage(int i, Mat mat);
void addImage(int i, Ptr< Mat> mat);
......
#include <string.h> #include <string.h>
#include <jni.h> #include <jni.h>
#include <yuv420sp2rgb.h>
#ifndef max
#define max(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a > _b ? _a : _b; })
#define min(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a < _b ? _a : _b; })
#endif
#include <yuv420sp2rgb.h>
/* /*
YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved
U/V plane containing 8 bit 2x2 subsampled chroma samples. U/V plane containing 8 bit 2x2 subsampled chroma samples.
except the interleave order of U and V is reversed. except the interleave order of U and V is reversed.
H V H V
Y Sample Period 1 1 Y Sample Period 1 1
U (Cb) Sample Period 2 2 U (Cb) Sample Period 2 2
V (Cr) Sample Period 2 2 V (Cr) Sample Period 2 2
*/ */
/* /*
size of a char: size of a char:
find . -name limits.h -exec grep CHAR_BIT {} \; find . -name limits.h -exec grep CHAR_BIT {} \;
*/ */
#ifndef max
#define max(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a > _b ? _a : _b; })
#define min(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a < _b ? _a : _b; })
#endif
const int bytes_per_pixel = 2; const int bytes_per_pixel = 2;
void color_convert_common( void color_convert_common(unsigned char *pY, unsigned char *pUV, int width, int height, unsigned char *buffer, int grey)
unsigned char *pY, unsigned char *pUV,
int width, int height, unsigned char *buffer,
int grey)
{ {
int i, j;
int nR, nG, nB;
int nY, nU, nV;
unsigned char *out = buffer;
int offset = 0;
if(grey){ int i, j;
for (i = 0; i < height; i++) { int nR, nG, nB;
for (j = 0; j < width; j++) { int nY, nU, nV;
unsigned char nB = *(pY + i * width + j); unsigned char *out = buffer;
int offset = 0;
if (grey)
{
memcpy(out,pY,width*height*sizeof(unsigned char));
}
else
// YUV 4:2:0
for (i = 0; i < height; i++)
{
for (j = 0; j < width; j++)
{
nY = *(pY + i * width + j);
nV = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2));
nU = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2) + 1);
// Yuv Convert
nY -= 16;
nU -= 128;
nV -= 128;
if (nY < 0)
nY = 0;
out[offset++] = (unsigned char)nB; nB = (int)(1192 * nY + 2066 * nU);
// out[offset++] = (unsigned char)nB; nG = (int)(1192 * nY - 833 * nV - 400 * nU);
// out[offset++] = (unsigned char)nB; nR = (int)(1192 * nY + 1634 * nV);
}
}
}else
// YUV 4:2:0
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
nY = *(pY + i * width + j);
nV = *(pUV + (i/2) * width + bytes_per_pixel * (j/2));
nU = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1);
// Yuv Convert
nY -= 16;
nU -= 128;
nV -= 128;
if (nY < 0)
nY = 0;
// nR = (int)(1.164 * nY + 2.018 * nU);
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
// nB = (int)(1.164 * nY + 1.596 * nV);
nB = (int)(1192 * nY + 2066 * nU);
nG = (int)(1192 * nY - 833 * nV - 400 * nU);
nR = (int)(1192 * nY + 1634 * nV);
nR = min(262143, max(0, nR));
nG = min(262143, max(0, nG));
nB = min(262143, max(0, nB));
nR >>= 10; nR &= 0xff;
nG >>= 10; nG &= 0xff;
nB >>= 10; nB &= 0xff;
out[offset++] = (unsigned char)nR; nR = min(262143, max(0, nR));
out[offset++] = (unsigned char)nG; nG = min(262143, max(0, nG));
out[offset++] = (unsigned char)nB; nB = min(262143, max(0, nB));
//out[offset++] = 0xff; //set alpha for ARGB 8888 format nR >>= 10;
nR &= 0xff;
nG >>= 10;
nG &= 0xff;
nB >>= 10;
nB &= 0xff;
out[offset++] = (unsigned char)nR;
out[offset++] = (unsigned char)nG;
out[offset++] = (unsigned char)nB;
}
}
} }
//offset = i * width * 3; //non power of two
//offset = i * texture_size + j;//power of two
//offset *= 3; //3 byte per pixel
//out = buffer + offset;
}
}
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical"
android:gravity="center_vertical|center_horizontal">
<TextView android:scrollbars="vertical" android:id="@+id/calibtext" android:text="" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:padding="20dip"/>
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical"
android:gravity="center_vertical|center_horizontal">
<TextView android:text="@string/settings_text" android:autoLink="web" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:padding="20dip"/>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="@string/image_size_prompt"/>
<Spinner android:id="@+id/image_size"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:saveEnabled="true"
android:prompt="@string/image_size_prompt"
android:entries="@array/image_sizes">
</Spinner>
</LinearLayout>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content" android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="@string/camera_mode_prompt"/>
<Spinner android:id="@+id/camera_mode"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:saveEnabled="true"
android:prompt="@string/camera_mode_prompt"
android:entries="@array/camera_mode">
</Spinner>
</LinearLayout>
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical"
android:gravity="center_vertical|center_horizontal">
<TextView android:text="@string/patterntext" android:autoLink="web" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:padding="20dip"/>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Corners in width direction:"/>
<Spinner android:id="@+id/rows"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:saveEnabled="true"
android:prompt="@string/chesspromptx"
android:entries="@array/chesssizes">
</Spinner>
</LinearLayout>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content" android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="Corners in height direction:"/>
<Spinner android:id="@+id/cols"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:saveEnabled="true"
android:prompt="@string/chessprompty"
android:entries="@array/chesssizes">
</Spinner>
</LinearLayout>
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<resources>
<declare-styleable name="CameraParams">
<attr name="preview_width" format="integer"/>
<attr name="preview_height" format="integer"/>
</declare-styleable>
</resources>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string-array name="chesssizes">
<item>3</item>
<item>4</item>
<item>5</item>
<item>6</item>
<item>7</item>
<item>8</item>
<item>9</item>
<item>10</item>
<item>11</item>
<item>12</item>
<item>13</item>
</string-array>
<string name="chesspromptx">
Choose the width:</string>
<string name="chessprompty">
Choose the height:</string>
</resources>
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string-array name="image_sizes">
<item>320x240</item>
<item>400x300</item>
<item>600x400</item>
<item>800x600</item>
<item>1000x800</item>
</string-array>
<string-array name="camera_mode">
<item>color</item>
<item>BW</item>
</string-array>
<string name="image_size_prompt">
Image Size:\n(may not be exact)
</string>
<string name="camera_mode_prompt">
Camera Mode:
</string>
</resources>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">Calibration</string>
<string name="patternsize">Pattern Size</string>
<string name="patterntext">Please choose the width and height (number of inside corners) of the checker
board pattern you will be using for calibration. Default is 6 by 8 corners. You may find a checkerboard pattern at
http://opencv.willowgarage.com/pattern</string>
<string name="patternlink">http://opencv.willowgarage.com/pattern</string>
<string name="camera_settings_label">Camera Settings</string>
<string name="settings_text">Change the camera settings</string>
<string name="calibration_service_started">Calibration calculations have started...</string>
<string name="calibration_service_stopped">Calibration calculations has stopped.</string>
<string name="calibration_service_finished">Calibration finished, you camera is calibrated.</string>
<string name="calibration_service_label">Calibration</string>
<string name="calibration_not_enough">Please capture atleast 10 images of the pattern!</string>
</resources>
...@@ -6,3 +6,4 @@ OPENCV_CONFIG=../build/android-opencv.mk ...@@ -6,3 +6,4 @@ OPENCV_CONFIG=../build/android-opencv.mk
#you can download the ndk from http://www.crystax.net/android/ndk-r4.php #you can download the ndk from http://www.crystax.net/android/ndk-r4.php
ANDROID_NDK_ROOT=$(HOME)/android-ndk-r4-crystax ANDROID_NDK_ROOT=$(HOME)/android-ndk-r4-crystax
ARM_TARGETS=armeabi armeabi-v7a
\ No newline at end of file
package com.opencv.calibration;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import android.app.Activity;
import android.os.Bundle;
import android.text.method.ScrollingMovementMethod;
import android.util.Log;
import android.widget.TextView;
import com.opencv.R;
public class CalibrationViewer extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.calibrationviewer);
Bundle extras = getIntent().getExtras();
String filename = extras.getString("calibfile");
if (filename != null) {
TextView text = (TextView) findViewById(R.id.calibtext);
text.setMovementMethod(new ScrollingMovementMethod());
try {
BufferedReader reader = new BufferedReader(new FileReader(
filename));
while (reader.ready()) {
text.append(reader.readLine() +"\n");
}
} catch (FileNotFoundException e) {
Log.e("opencv", "could not open calibration file at:"
+ filename);
} catch (IOException e) {
Log.e("opencv", "error reading file: "
+ filename);
}
}
}
}
package com.opencv.calibration;
import com.opencv.R;
import com.opencv.jni.Size;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.Spinner;
public class ChessBoardChooser extends Activity {
public static final String CHESS_SIZE = "chess_size";
public static final int DEFAULT_WIDTH = 6;
public static final int DEFAULT_HEIGHT = 8;
public static final int LOWEST = 3;
class DimChooser implements OnItemSelectedListener {
private String dim;
public DimChooser(String dim) {
this.dim = dim;
}
@Override
public void onItemSelected(AdapterView<?> arg0, View arg1, int pos,
long arg3) {
SharedPreferences settings = getSharedPreferences(CHESS_SIZE, 0);
Editor editor = settings.edit();
editor.putInt(dim, pos + LOWEST);
editor.commit();
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.chesssizer);
// Restore preferences
SharedPreferences settings = getSharedPreferences(CHESS_SIZE, 0);
int width = settings.getInt("width", 6);
int height = settings.getInt("height", 8);
Spinner wspin, hspin;
wspin = (Spinner) findViewById(R.id.rows);
hspin = (Spinner) findViewById(R.id.cols);
wspin.setSelection(width - LOWEST);
hspin.setSelection(height - LOWEST);
wspin.setOnItemSelectedListener(new DimChooser("width"));
hspin.setOnItemSelectedListener(new DimChooser("height"));
}
public static Size getPatternSize(Context ctx) {
SharedPreferences settings = ctx.getSharedPreferences(CHESS_SIZE, 0);
int width = settings.getInt("width", 6);
int height = settings.getInt("height", 8);
return new Size(width, height);
}
}
package com.opencv.calibration.services;
import java.io.File;
import java.io.IOException;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.os.Binder;
import android.os.IBinder;
import android.util.Log;
import android.widget.Toast;
import com.opencv.R;
import com.opencv.calibration.CalibrationViewer;
import com.opencv.calibration.Calibrator;
import com.opencv.calibration.Calibrator.CalibrationCallback;
public class CalibrationService extends Service implements CalibrationCallback {
Class<?> activity;
int icon;
File calibration_file;
public void startCalibrating(Class<?> activitycaller,int icon_id, Calibrator calibrator, File calibration_file)
throws IOException {
activity = activitycaller;
icon = icon_id;
// Display a notification about us starting. We put an icon in the
// status bar.
showNotification();
this.calibration_file = calibration_file;
calibrator.setCallback(this);
calibrator.calibrate(calibration_file);
}
private NotificationManager mNM;
/**
* Class for clients to access. Because we know this service always runs in
* the same process as its clients, we don't need to deal with IPC.
*/
public class CalibrationServiceBinder extends Binder {
public CalibrationService getService() {
return CalibrationService.this;
}
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.i("LocalService", "Received start id " + startId + ": " + intent);
// We want this service to continue running until it is explicitly
// stopped, so return sticky.
return START_NOT_STICKY;
}
@Override
public void onCreate() {
mNM = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
}
@Override
public void onDestroy() {
// Cancel the persistent notification.
// mNM.cancel(R.string.calibration_service_started);
// Tell the user we stopped.
Toast.makeText(this, R.string.calibration_service_finished,
Toast.LENGTH_SHORT).show();
}
private final IBinder mBinder = new CalibrationServiceBinder();
@Override
public IBinder onBind(Intent intent) {
return mBinder;
}
/**
* Show a notification while this service is running.
*/
private void showNotification() {
// In this sample, we'll use the same text for the ticker and the
// expanded notification
CharSequence text = getText(R.string.calibration_service_started);
// Set the icon, scrolling text and timestamp
Notification notification = new Notification(icon, text,
System.currentTimeMillis());
// The PendingIntent to launch our activity if the user selects this
// notification
PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
new Intent(this, activity), 0);
// Set the info for the views that show in the notification panel.
notification.setLatestEventInfo(this,
getText(R.string.calibration_service_label), text,
contentIntent);
notification.defaults |= Notification.DEFAULT_SOUND;
// Send the notification.
// We use a layout id because it is a unique number. We use it later to
// cancel.
mNM.notify(R.string.calibration_service_started, notification);
}
/**
* Show a notification while this service is running.
*/
private void doneNotification() {
// In this sample, we'll use the same text for the ticker and the
// expanded notification
CharSequence text = getText(R.string.calibration_service_finished);
// Set the icon, scrolling text and timestamp
Notification notification = new Notification(icon, text,
System.currentTimeMillis());
Intent intent = new Intent(this,CalibrationViewer.class);
intent.putExtra("calibfile", calibration_file.getAbsolutePath());
// The PendingIntent to launch our activity if the user selects this
// notification
PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
intent, 0);
// Set the info for the views that show in the notification panel.
notification.setLatestEventInfo(this,
getText(R.string.calibration_service_label), text,
contentIntent);
notification.defaults |= Notification.DEFAULT_SOUND;
// Send the notification.
// We use a layout id because it is a unique number. We use it later to
// cancel.
mNM.notify(R.string.calibration_service_started, notification);
}
@Override
public void onFoundChessboard(Calibrator calibrator) {
// TODO Auto-generated method stub
}
@Override
public void onDoneCalibration(Calibrator calibration, File calibfile) {
doneNotification();
stopSelf();
}
@Override
public void onFailedChessboard(Calibrator calibrator) {
// TODO Auto-generated method stub
}
}
package com.opencv.camera;
import com.opencv.R;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.Spinner;
public class CameraConfig extends Activity {
public static final String CAMERA_SETTINGS = "CAMERA_SETTINGS";
public static final String CAMERA_MODE = "camera_mode";
public static final String IMAGE_WIDTH = "IMAGE_WIDTH";
public static final String IMAGE_HEIGHT = "IMAGE_HEIGHT";
public static final int CAMERA_MODE_BW = 0;
public static final int CAMERA_MODE_COLOR = 1;
public static int readCameraMode(Context ctx) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
int mode = settings.getInt(CAMERA_MODE, CAMERA_MODE_BW);
return mode;
}
static public void setCameraMode(Context context, String mode) {
int m = 0;
if (mode.equals("BW")) {
m = CAMERA_MODE_BW;
} else if (mode.equals("color"))
m = CAMERA_MODE_COLOR;
setCameraMode(context, m);
}
private static String sizeToString(int[] size) {
return size[0] + "x" + size[1];
}
private static void parseStrToSize(String ssize, int[] size) {
String sz[] = ssize.split("x");
size[0] = Integer.valueOf(sz[0]);
size[1] = Integer.valueOf(sz[1]);
}
public static void readImageSize(Context ctx, int[] size) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
size[0] = settings.getInt(IMAGE_WIDTH, 600);
size[1] = settings.getInt(IMAGE_HEIGHT, 600);
}
public static void setCameraMode(Context ctx, int mode) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
Editor editor = settings.edit();
editor.putInt(CAMERA_MODE, mode);
editor.commit();
}
public static void setImageSize(Context ctx, String strsize) {
int size[] = { 0, 0 };
parseStrToSize(strsize, size);
setImageSize(ctx, size[0], size[1]);
}
public static void setImageSize(Context ctx, int width, int height) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
Editor editor = settings.edit();
editor.putInt(IMAGE_WIDTH, width);
editor.putInt(IMAGE_HEIGHT, height);
editor.commit();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.camerasettings);
int mode = readCameraMode(this);
int size[] = { 0, 0 };
readImageSize(this, size);
final Spinner size_spinner;
final Spinner mode_spinner;
size_spinner = (Spinner) findViewById(R.id.image_size);
mode_spinner = (Spinner) findViewById(R.id.camera_mode);
String strsize = sizeToString(size);
String strmode = modeToString(mode);
String sizes[] = getResources().getStringArray(R.array.image_sizes);
int i = 1;
for (String x : sizes) {
if (x.equals(strsize))
break;
i++;
}
if(i <= sizes.length)
size_spinner.setSelection(i-1);
i = 1;
String modes[] = getResources().getStringArray(R.array.camera_mode);
for (String x :modes) {
if (x.equals(strmode))
break;
i++;
}
if(i <= modes.length)
mode_spinner.setSelection(i-1);
size_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View spinner,
int position, long arg3) {
Object o = size_spinner.getItemAtPosition(position);
if (o != null)
setImageSize(spinner.getContext(), (String) o);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
mode_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View spinner,
int position, long arg3) {
Object o = mode_spinner.getItemAtPosition(position);
if (o != null)
setCameraMode(spinner.getContext(), (String) o);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
}
private String modeToString(int mode) {
switch (mode) {
case CAMERA_MODE_BW:
return "BW";
case CAMERA_MODE_COLOR:
return "color";
default:
return "";
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment