Commit e553a37f authored by Andrey Kamaev's avatar Andrey Kamaev

Removed outdated android-opencv subproject

parent d9d74678
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.opencv" android:versionCode="1" android:versionName="0.1">
<application android:debuggable="true">
<!-- The activity tag here is currently not used. The main project TicTacToeMain
must currently redefine the activities to be used from the libraries. However
later the tools will pick up the activities from here and merge them automatically,
so it's best to define your activities here like for any regular Android
project. -->
<activity android:name="com.opencv.OpenCV">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity android:name="com.opencv.calibration.ChessBoardChooser" />
<activity android:name="com.opencv.calibration.CameraConfig" />
<activity android:name="com.opencv.calibration.CalibrationViewer" />
<service android:name="com.opencv.calibration.services.CalibrationService" />
</application>
<uses-sdk android:minSdkVersion="7" />
<!-- set the opengl version -->
<uses-feature android:glEsVersion="0x00020000" />
<!-- set the opengl version -->
<uses-permission android:name="android.permission.CAMERA"></uses-permission>
</manifest>
# ============================================================================
# The AndroidOpenCV CMake configuration file
#
# ** File generated automatically, do not modify **
#
# Usage from an external project:
# In your CMakeLists.txt, add these lines:
#
# FIND_PACKAGE(AndroidOpenCV REQUIRED )
# TARGET_LINK_LIBRARIES(MY_TARGET_NAME ${AndroidOpenCV_LIBS})
#
# This file will define the following variables:
# - AndroidOpenCV_LIBS : The list of libraries to links against.
# - AndroidOpenCV_LIB_DIR : The directory where lib files are.
# Calling LINK_DIRECTORIES with this path
# is NOT needed.
# - AndroidOpenCV_INCLUDE_DIRS : The AndroidOpenCV include directories.
# - AndroidOpenCV_SWIG_DIR : The swig path
#
# ===========================================================================
# ======================================================
# Include directories to add to the user project:
# ======================================================
# Provide the include directories to the caller
SET(AndroidOpenCV_INCLUDE_DIRS @CMAKE_INCLUDE_DIRS_CONFIGCMAKE@)
INCLUDE_DIRECTORIES(${AndroidOpenCV_INCLUDE_DIRS})
# ======================================================
# Link directories to add to the user project:
# ======================================================
# Provide the libs directory anyway, it may be needed in some cases.
SET(AndroidOpenCV_LIB_DIR @CMAKE_LIB_DIRS_CONFIGCMAKE@)
LINK_DIRECTORIES(${AndroidOpenCV_LIB_DIR})
# ======================================================
# Libraries to add to the user project:
# ======================================================
SET(AndroidOpenCV_LIBS @CMAKE_LIBS_CONFIGCMAKE@)
SET(AndroidOpenCV_SWIG_DIR @CMAKE_SWIG_DIR_CONFIGCMAKE@)
cmake_minimum_required(VERSION 2.8)
project(android-jni)
add_subdirectory(jni)
=========================================
CMake Build
=========================================
#path to the android build of opencv
opencv_dir=`pwd`/../build
mkdir build
cd build
cmake -DOpenCV_DIR=$opencv_dir -DCMAKE_TOOLCHAIN_FILE=$ANDTOOLCHAIN ..
=========================================
Android Build
=========================================
sh project_create.sh
ant compile
@ECHO OFF
SETLOCAL
PUSHD %~dp0
SET PROJECT_NAME=android-opencv
CALL ..\scripts\build.cmd %*
POPD
ENDLOCAL
\ No newline at end of file
#!/bin/sh
cd `dirname $0`
opencv_build_dir=`pwd`/../build
mkdir -p build
cd build
cmake -DOpenCVDIR=$opencv_build_dir -DCMAKE_TOOLCHAIN_FILE=../../android.toolchain.cmake ..
#!/bin/sh
cd `dirname $0`
opencv_build_dir=`pwd`/../build_neon
mkdir -p build_neon
cd build_neon
cmake -DOpenCVDIR=$opencv_build_dir -DARM_TARGET="armeabi-v7a with NEON" -DCMAKE_TOOLCHAIN_FILE=../../android.toolchain.cmake ..
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
# Project target.
target=android-7
#########################################################
# Find opencv and android-opencv
#########################################################
set(OpenCV_DIR ${CMAKE_SOURCE_DIR}/../build
CACHE PATH "The path where you built opencv for android")
find_package(OpenCV REQUIRED)
#########################################################
#c flags, included, and lib dependencies
#########################################################
#notice the "recycling" of CMAKE_C_FLAGS
#this is necessary to pick up android flags
set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -pedantic -fPIC" )
INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR}
${CMAKE_CURRENT_SOURCE_DIR}/include)
set( LIBRARY_DEPS ${OpenCV_LIBS} )
if(ANDROID)
set( LIBRARY_DEPS ${LIBRARY_DEPS} log dl GLESv2)
endif(ANDROID)
#########################################################
#SWIG STUFF
#########################################################
#the java package to place swig generated java files in
set(MY_PACKAGE com.opencv.jni)
if(NOT ANDROID)
#non android swig and jni
#jni is available by default on android
find_package(JNI REQUIRED)
include_directories(${JNI_INCLUDE_DIRS})
FIND_PACKAGE(SWIG)
endif()
INCLUDE(${SWIG_USE_FILE}) #on android this is found by the cmake toolchain
if(ANDROID)
#this will set the output path for the java package
#and properly create the package declarations in generated java sources
SET_SWIG_JAVA_PACKAGE( ${MY_PACKAGE} ) #defined in the android toolchain
endif(ANDROID)
SET_SOURCE_FILES_PROPERTIES(android-cv.i PROPERTIES CPLUSPLUS ON)
SWIG_ADD_MODULE(android-opencv java
android-cv.i
Calibration.cpp
gl_code.cpp
image_pool.cpp
yuv420sp2rgb.c
#yuv420rgb888c.c
#yuv420rgb888.s
yuv2rgb16tab.c
)
target_link_libraries(android-opencv ${LIBRARY_DEPS} )
###################################################################
# Setup the configure file for other's to link against.
###################################################################
set(CMAKE_INCLUDE_DIRS_CONFIGCMAKE ${CMAKE_CURRENT_SOURCE_DIR}/include)
set(CMAKE_LIB_DIRS_CONFIGCMAKE ${LIBRARY_OUTPUT_PATH})
set(CMAKE_LIBS_CONFIGCMAKE android-opencv)
set(CMAKE_SWIG_DIR_CONFIGCMAKE ${CMAKE_CURRENT_SOURCE_DIR})
configure_file( "${CMAKE_SOURCE_DIR}/AndroidOpenCVConfig.cmake.in"
"${CMAKE_BINARY_DIR}/AndroidOpenCVConfig.cmake"
IMMEDIATE @ONLY)
/*
* Processor.cpp
*
* Created on: Jun 13, 2010
* Author: ethan
*/
#include "Calibration.h"
#include <sys/stat.h>
using namespace cv;
Calibration::Calibration() :
patternsize(6, 8)
{
}
Calibration::~Calibration()
{
}
namespace
{
double computeReprojectionErrors(const vector<vector<Point3f> >& objectPoints,
const vector<vector<Point2f> >& imagePoints, const vector<Mat>& rvecs, const vector<
Mat>& tvecs, const Mat& cameraMatrix, const Mat& distCoeffs,
vector<float>& perViewErrors)
{
vector<Point2f> imagePoints2;
int i, totalPoints = 0;
double totalErr = 0, err;
perViewErrors.resize(objectPoints.size());
for (i = 0; i < (int)objectPoints.size(); i++)
{
projectPoints(Mat(objectPoints[i]), rvecs[i], tvecs[i], cameraMatrix, distCoeffs, imagePoints2);
err = norm(Mat(imagePoints[i]), Mat(imagePoints2), CV_L1);
int n = (int)objectPoints[i].size();
perViewErrors[i] = err / n;
totalErr += err;
totalPoints += n;
}
return totalErr / totalPoints;
}
void calcChessboardCorners(Size boardSize, float squareSize, vector<Point3f>& corners)
{
corners.resize(0);
for (int i = 0; i < boardSize.height; i++)
for (int j = 0; j < boardSize.width; j++)
corners.push_back(Point3f(float(j * squareSize), float(i * squareSize), 0));
}
/**from opencv/samples/cpp/calibration.cpp
*
*/
bool runCalibration(vector<vector<Point2f> > imagePoints, Size imageSize, Size boardSize, float squareSize,
float aspectRatio, int flags, Mat& cameraMatrix, Mat& distCoeffs, vector<Mat>& rvecs,
vector<Mat>& tvecs, vector<float>& reprojErrs, double& totalAvgErr)
{
cameraMatrix = Mat::eye(3, 3, CV_64F);
if (flags & CV_CALIB_FIX_ASPECT_RATIO)
cameraMatrix.at<double> (0, 0) = aspectRatio;
distCoeffs = Mat::zeros(4, 1, CV_64F);
vector<vector<Point3f> > objectPoints(1);
calcChessboardCorners(boardSize, squareSize, objectPoints[0]);
for (size_t i = 1; i < imagePoints.size(); i++)
objectPoints.push_back(objectPoints[0]);
calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix, distCoeffs, rvecs, tvecs, flags);
bool ok = checkRange(cameraMatrix, CV_CHECK_QUIET) && checkRange(distCoeffs, CV_CHECK_QUIET);
totalAvgErr
= computeReprojectionErrors(objectPoints, imagePoints, rvecs, tvecs, cameraMatrix, distCoeffs, reprojErrs);
return ok;
}
void saveCameraParams(const string& filename, Size imageSize, Size boardSize, float squareSize, float aspectRatio,
int flags, const Mat& cameraMatrix, const Mat& distCoeffs, const vector<Mat>& rvecs,
const vector<Mat>& tvecs, const vector<float>& reprojErrs,
const vector<vector<Point2f> >& imagePoints, double totalAvgErr)
{
FileStorage fs(filename, FileStorage::WRITE);
time_t t;
time(&t);
struct tm *t2 = localtime(&t);
char buf[1024];
strftime(buf, sizeof(buf) - 1, "%c", t2);
fs << "calibration_time" << buf;
if (!rvecs.empty() || !reprojErrs.empty())
fs << "nframes" << (int)std::max(rvecs.size(), reprojErrs.size());
fs << "image_width" << imageSize.width;
fs << "image_height" << imageSize.height;
fs << "board_width" << boardSize.width;
fs << "board_height" << boardSize.height;
fs << "squareSize" << squareSize;
if (flags & CV_CALIB_FIX_ASPECT_RATIO)
fs << "aspectRatio" << aspectRatio;
if (flags != 0)
{
sprintf(buf, "flags: %s%s%s%s", flags & CV_CALIB_USE_INTRINSIC_GUESS ? "+use_intrinsic_guess" : "", flags
& CV_CALIB_FIX_ASPECT_RATIO ? "+fix_aspectRatio" : "", flags & CV_CALIB_FIX_PRINCIPAL_POINT
? "+fix_principal_point" : "", flags & CV_CALIB_ZERO_TANGENT_DIST ? "+zero_tangent_dist" : "");
cvWriteComment(*fs, buf, 0);
}
fs << "flags" << flags;
fs << "camera_matrix" << cameraMatrix;
fs << "distortion_coefficients" << distCoeffs;
fs << "avg_reprojection_error" << totalAvgErr;
if (!reprojErrs.empty())
fs << "per_view_reprojection_errors" << Mat(reprojErrs);
if (!rvecs.empty() && !tvecs.empty())
{
Mat bigmat(rvecs.size(), 6, CV_32F);
for (size_t i = 0; i < rvecs.size(); i++)
{
Mat r = bigmat(Range(i, i + 1), Range(0, 3));
Mat t = bigmat(Range(i, i + 1), Range(3, 6));
rvecs[i].copyTo(r);
tvecs[i].copyTo(t);
}
cvWriteComment(*fs, "a set of 6-tuples (rotation vector + translation vector) for each view", 0);
fs << "extrinsic_parameters" << bigmat;
}
if (!imagePoints.empty())
{
Mat imagePtMat(imagePoints.size(), imagePoints[0].size(), CV_32FC2);
for (size_t i = 0; i < imagePoints.size(); i++)
{
Mat r = imagePtMat.row(i).reshape(2, imagePtMat.cols);
Mat(imagePoints[i]).copyTo(r);
}
fs << "image_points" << imagePtMat;
}
}
}//anon namespace
bool Calibration::detectAndDrawChessboard(int idx, image_pool* pool)
{
bool patternfound = false;
Mat grey = pool->getGrey(idx);
if (grey.empty())
return false;
vector<Point2f> corners;
patternfound = findChessboardCorners(grey, patternsize, corners,CALIB_CB_FILTER_QUADS + CALIB_CB_ADAPTIVE_THRESH + CALIB_CB_NORMALIZE_IMAGE
+ CALIB_CB_FAST_CHECK);
Mat img = pool->getImage(idx);
if (corners.size() < 1)
return false;
if (patternfound)
{
cornerSubPix(grey, corners, Size(11, 11), Size(-1, -1), TermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1));
imagepoints.push_back(corners);
}
drawChessboardCorners(img, patternsize, Mat(corners), patternfound);
imgsize = grey.size();
return patternfound;
}
void Calibration::drawText(int i, image_pool* pool, const char* ctext)
{
// Use "y" to show that the baseLine is about
string text = ctext;
int fontFace = FONT_HERSHEY_COMPLEX_SMALL;
double fontScale = .8;
int thickness = .5;
Mat img = pool->getImage(i);
int baseline = 0;
Size textSize = getTextSize(text, fontFace, fontScale, thickness, &baseline);
baseline += thickness;
// center the text
Point textOrg((img.cols - textSize.width) / 2, (img.rows - textSize.height * 2));
// draw the box
rectangle(img, textOrg + Point(0, baseline), textOrg + Point(textSize.width, -textSize.height), Scalar(0, 0, 255),
CV_FILLED);
// ... and the baseline first
line(img, textOrg + Point(0, thickness), textOrg + Point(textSize.width, thickness), Scalar(0, 0, 255));
// then put the text itself
putText(img, text, textOrg, fontFace, fontScale, Scalar::all(255), thickness, 8);
}
void Calibration::resetChess()
{
imagepoints.clear();
}
void Calibration::calibrate(const char* filename)
{
vector<Mat> rvecs, tvecs;
vector<float> reprojErrs;
double totalAvgErr = 0;
int flags = 0;
flags |= CV_CALIB_FIX_PRINCIPAL_POINT | CV_CALIB_FIX_ASPECT_RATIO;
bool writeExtrinsics = true;
bool writePoints = true;
bool ok = runCalibration(imagepoints, imgsize, patternsize, 1.f, 1.f, flags, K, distortion, rvecs, tvecs, reprojErrs,
totalAvgErr);
if (ok)
{
saveCameraParams(filename, imgsize, patternsize, 1.f, 1.f, flags, K, distortion, writeExtrinsics ? rvecs : vector<
Mat> (), writeExtrinsics ? tvecs : vector<Mat> (), writeExtrinsics ? reprojErrs : vector<float> (), writePoints
? imagepoints : vector<vector<Point2f> > (), totalAvgErr);
}
}
int Calibration::getNumberDetectedChessboards()
{
return imagepoints.size();
}
/*
* include the headers required by the generated cpp code
*/
%{
#include "Calibration.h"
#include "image_pool.h"
using namespace cv;
%}
class Calibration {
public:
Size patternsize;
Calibration();
virtual ~Calibration();
bool detectAndDrawChessboard(int idx, image_pool* pool);
void resetChess();
int getNumberDetectedChessboards();
void calibrate(const char* filename);
void drawText(int idx, image_pool* pool, const char* text);
};
%feature("director") Mat;
%feature("director") glcamera;
%feature("director") image_pool;
%typemap("javapackage") Mat, Mat *, Mat & "com.opencv.jni";
%typemap("javapackage") glcamera, glcamera *, glcamera & "com.opencv.jni";
%typemap("javapackage") image_pool, image_pool *, image_pool & "com.opencv.jni";
\ No newline at end of file
/* File : android-cv.i
import this file, and make sure to add the System.loadlibrary("android-opencv")
before loading any lib that depends on this.
*/
%module opencv
%{
#include "image_pool.h"
#include "glcamera.h"
using namespace cv;
%}
#ifndef SWIGIMPORTED
%include "various.i"
%include "typemaps.i"
%include "arrays_java.i"
#endif
/**
* Make all the swig pointers public, so that
* external libraries can refer to these, otherwise they default to
* protected...
*/
%typemap(javabody) SWIGTYPE %{
private long swigCPtr;
protected boolean swigCMemOwn;
public $javaclassname(long cPtr, boolean cMemoryOwn) {
swigCMemOwn = cMemoryOwn;
swigCPtr = cPtr;
}
public static long getCPtr($javaclassname obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
%}
%pragma(java) jniclasscode=%{
static {
try {
//load the library, make sure that libandroid-opencv.so is in your <project>/libs/armeabi directory
//so that android sdk automatically installs it along with the app.
System.loadLibrary("android-opencv");
} catch (UnsatisfiedLinkError e) {
//badness
throw e;
}
}
%}
%include "cv.i"
%include "glcamera.i"
%include "image_pool.i"
%include "Calibration.i"
/*
* These typemaps provide support for sharing data between JNI and JVM code
* using NIO direct buffers. It is the responsibility of the JVM code to
* allocate a direct buffer of the appropriate size.
*
* Example use:
* Wrapping:
* %include "buffers.i"
* %apply int* BUFF {int* buffer}
* int read_foo_int(int* buffer);
*
* Java:
* IntBuffer buffer = IntBuffer.allocateDirect(nInts*4).order(ByteOrder.nativeOrder()).asIntBuffer();
* Example.read_foo_int(buffer);
*
* The following typemaps are defined:
* void* BUFF <--> javax.nio.Buffer
* char* BUFF <--> javax.nio.ByteBuffer
* char* CBUFF <--> javax.nio.CharBuffer
* unsigned char* INBUFF/OUTBUFF <--> javax.nio.ShortBuffer
* short* BUFF <--> javax.nio.ShortBuffer
* unsigned short* INBUFF/OUTBUFF <--> javax.nio.IntBuffer
* int* BUFF <--> javax.nio.IntBuffer
* unsigned int* INBUFF/OUTBUFF <--> javax.nio.LongBuffer
* long* BUFF <--> javax.nio.IntBuffer
* unsigned long* INBUFF/OUTBUF <--> javax.nio.LongBuffer
* long long* BUFF <--> javax.nio.LongBuffer
* float* BUFF <--> javax.nio.FloatBuffer
* double* BUFF <--> javax.nio.DoubleBuffer
*
* Note the potential for data loss in the conversion from
* the C type 'unsigned long' to the signed Java long type.
* Hopefully, I can implement a workaround with BigNumber in the future.
*
* The use of ByteBuffer vs CharBuffer for the char* type should
* depend on the type of data. In general you'll probably
* want to use CharBuffer for actual text data.
*/
/*
* This macro is used to define the nio buffers for primitive types.
*/
%define NIO_BUFFER_TYPEMAP(CTYPE, LABEL, BUFFERTYPE)
%typemap(jni) CTYPE* LABEL "jobject"
%typemap(jtype) CTYPE* LABEL "BUFFERTYPE"
%typemap(jstype) CTYPE* LABEL "BUFFERTYPE"
%typemap(javain,
pre=" assert $javainput.isDirect() : \"Buffer must be allocated direct.\";") CTYPE* LABEL "$javainput"
%typemap(javaout) CTYPE* LABEL {
return $jnicall;
}
%typemap(in) CTYPE* LABEL {
$1 = (CTYPE*)(jenv)->GetDirectBufferAddress( $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* LABEL {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* LABEL ""
%enddef
NIO_BUFFER_TYPEMAP(void, BUFF, java.nio.Buffer);
NIO_BUFFER_TYPEMAP(char, BUFF, java.nio.ByteBuffer);
NIO_BUFFER_TYPEMAP(char, CBUFF, java.nio.CharBuffer);
/*NIO_BUFFER_TYPEMAP(unsigned char, BUFF, java.nio.ShortBuffer);*/
NIO_BUFFER_TYPEMAP(short, BUFF, java.nio.ShortBuffer);
NIO_BUFFER_TYPEMAP(unsigned short, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(int, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(unsigned int, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(long, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(unsigned long, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(long long, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(float, BUFF, java.nio.FloatBuffer);
NIO_BUFFER_TYPEMAP(double, BUFF, java.nio.DoubleBuffer);
#undef NIO_BUFFER_TYPEMAP
%define UNSIGNED_NIO_BUFFER_TYPEMAP(CTYPE, BSIZE, BUFFERTYPE, PACKFCN, UNPACKFCN)
%typemap(jni) CTYPE* INBUFF "jobject"
%typemap(jtype) CTYPE* INBUFF "java.nio.ByteBuffer"
%typemap(jstype) CTYPE* INBUFF "BUFFERTYPE"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = PACKFCN($javainput, true);") CTYPE* INBUFF "tmp$javainput"
%typemap(javaout) CTYPE* INBUFF {
return $jnicall;
}
%typemap(in) CTYPE* INBUFF {
$1 = (jenv)->GetDirectBufferAddress($input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* INBUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* INBUFF ""
%typemap(jni) CTYPE* OUTBUFF "jobject"
%typemap(jtype) CTYPE* OUTBUFF "java.nio.ByteBuffer"
%typemap(jstype) CTYPE* OUTBUFF "BUFFERTYPE"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = java.nio.ByteBuffer.allocateDirect($javainput.capacity()*BSIZE).order($javainput.order());",
post=" UNPACKFCN(tmp$javainput, $javainput);") CTYPE* OUTBUFF "tmp$javainput"
%typemap(javaout) CTYPE* OUTBUFF {
return $jnicall;
}
%typemap(in) CTYPE* OUTBUFF {
$1 = (jenv)->GetDirectBufferAddress( $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* OUTBUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* OUTBUFF ""
%enddef
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned char, 1, java.nio.ShortBuffer, permafrost.hdf.libhdf.BufferUtils.packUChar, permafrost.hdf.libhdf.BufferUtils.unpackUChar);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned short, 2, java.nio.IntBuffer, permafrost.hdf.libhdf.BufferUtils.packUShort, permafrost.hdf.libhdf.BufferUtils.unpackUShort);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned int, 4, java.nio.LongBuffer, permafrost.hdf.libhdf.BufferUtils.packUInt, permafrost.hdf.libhdf.BufferUtils.unpackUInt);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned long, 4, java.nio.LongBuffer, permafrost.hdf.libhdf.BufferUtils.packUInt, permafrost.hdf.libhdf.BufferUtils.unpackUInt);
/*
%typemap(jni) unsigned char* BUFF "jobject"
%typemap(jtype) unsigned char* BUFF "java.nio.ByteBuffer"
%typemap(jstype) unsigned char* BUFF "java.nio.ShortBuffer"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = permafrost.hdf.libhdf.BufferUtils.packUChar($javainput, true);",
post=" permafrost.hdf.libhdf.BufferUtils.unpackUChar(tmp$javainput, $javainput);") unsigned char* BUFF "tmp$javainput"
%typemap(javaout) unsigned char* BUFF {
return $jnicall;
}
%typemap(in) unsigned char* BUFF {
$1 = (const char*)(jenv)->GetDirectBufferAddress( $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) unsigned char* BUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) unsigned char* BUFF ""
*/
#undef UNSIGNED_NIO_BUFFER_TYPEMAP
\ No newline at end of file
%typemap(javaimports) Mat "
/** Wrapper for the OpenCV Mat object. Good for passing around as a pointer to a Mat.
*/"
%typemap(javaimports) Size "
/** Wrapper for the OpenCV Size object. Good for setting dimensions of cv::Mat...
*/"
class Size{
public:
Size();
Size(int width,int height);
~Size();
int width;
int height;
};
#define CV_CN_MAX 512
#define CV_CN_SHIFT 3
#define CV_DEPTH_MAX (1 << CV_CN_SHIFT)
#define CV_8U 0
#define CV_8S 1
#define CV_16U 2
#define CV_16S 3
#define CV_32S 4
#define CV_32F 5
#define CV_64F 6
#define CV_USRTYPE1 7
#define CV_MAT_DEPTH_MASK (CV_DEPTH_MAX - 1)
#define CV_MAT_DEPTH(flags) ((flags) & CV_MAT_DEPTH_MASK)
#define CV_MAKETYPE(depth,cn) (CV_MAT_DEPTH(depth) + (((cn)-1) << CV_CN_SHIFT))
#define CV_MAKE_TYPE CV_MAKETYPE
#define CV_8UC1 CV_MAKETYPE(CV_8U,1)
#define CV_8UC2 CV_MAKETYPE(CV_8U,2)
#define CV_8UC3 CV_MAKETYPE(CV_8U,3)
#define CV_8UC4 CV_MAKETYPE(CV_8U,4)
#define CV_8UC(n) CV_MAKETYPE(CV_8U,(n))
#define CV_8SC1 CV_MAKETYPE(CV_8S,1)
#define CV_8SC2 CV_MAKETYPE(CV_8S,2)
#define CV_8SC3 CV_MAKETYPE(CV_8S,3)
#define CV_8SC4 CV_MAKETYPE(CV_8S,4)
#define CV_8SC(n) CV_MAKETYPE(CV_8S,(n))
#define CV_16UC1 CV_MAKETYPE(CV_16U,1)
#define CV_16UC2 CV_MAKETYPE(CV_16U,2)
#define CV_16UC3 CV_MAKETYPE(CV_16U,3)
#define CV_16UC4 CV_MAKETYPE(CV_16U,4)
#define CV_16UC(n) CV_MAKETYPE(CV_16U,(n))
#define CV_16SC1 CV_MAKETYPE(CV_16S,1)
#define CV_16SC2 CV_MAKETYPE(CV_16S,2)
#define CV_16SC3 CV_MAKETYPE(CV_16S,3)
#define CV_16SC4 CV_MAKETYPE(CV_16S,4)
#define CV_16SC(n) CV_MAKETYPE(CV_16S,(n))
#define CV_32SC1 CV_MAKETYPE(CV_32S,1)
#define CV_32SC2 CV_MAKETYPE(CV_32S,2)
#define CV_32SC3 CV_MAKETYPE(CV_32S,3)
#define CV_32SC4 CV_MAKETYPE(CV_32S,4)
#define CV_32SC(n) CV_MAKETYPE(CV_32S,(n))
#define CV_32FC1 CV_MAKETYPE(CV_32F,1)
#define CV_32FC2 CV_MAKETYPE(CV_32F,2)
#define CV_32FC3 CV_MAKETYPE(CV_32F,3)
#define CV_32FC4 CV_MAKETYPE(CV_32F,4)
#define CV_32FC(n) CV_MAKETYPE(CV_32F,(n))
#define CV_64FC1 CV_MAKETYPE(CV_64F,1)
#define CV_64FC2 CV_MAKETYPE(CV_64F,2)
#define CV_64FC3 CV_MAKETYPE(CV_64F,3)
#define CV_64FC4 CV_MAKETYPE(CV_64F,4)
#define CV_64FC(n) CV_MAKETYPE(CV_64F,(n))
#define CV_AUTO_STEP 0x7fffffff
#define CV_WHOLE_ARR cvSlice( 0, 0x3fffffff )
#define CV_MAT_CN_MASK ((CV_CN_MAX - 1) << CV_CN_SHIFT)
#define CV_MAT_CN(flags) ((((flags) & CV_MAT_CN_MASK) >> CV_CN_SHIFT) + 1)
#define CV_MAT_TYPE_MASK (CV_DEPTH_MAX*CV_CN_MAX - 1)
#define CV_MAT_TYPE(flags) ((flags) & CV_MAT_TYPE_MASK)
#define CV_MAT_CONT_FLAG_SHIFT 14
#define CV_MAT_CONT_FLAG (1 << CV_MAT_CONT_FLAG_SHIFT)
#define CV_IS_MAT_CONT(flags) ((flags) & CV_MAT_CONT_FLAG)
#define CV_IS_CONT_MAT CV_IS_MAT_CONT
#define CV_SUBMAT_FLAG_SHIFT 15
#define CV_SUBMAT_FLAG (1 << CV_SUBMAT_FLAG_SHIFT)
#define CV_IS_SUBMAT(flags) ((flags) & CV_MAT_SUBMAT_FLAG)
#define CV_MAGIC_MASK 0xFFFF0000
#define CV_MAT_MAGIC_VAL 0x42420000
#define CV_TYPE_NAME_MAT "opencv-matrix"
class Mat {
public:
Mat();
~Mat();
void create(Size size, int type);
int channels() const;
%immutable;
int rows;
int cols;
};
template<class _Tp> class Ptr
{
public:
//! empty constructor
Ptr();
//! take ownership of the pointer. The associated reference counter is allocated and set to 1
Ptr(_Tp* _obj);
//! calls release()
~Ptr();
//! copy constructor. Copies the members and calls addref()
Ptr(const Ptr& ptr);
//! copy operator. Calls ptr.addref() and release() before copying the members
// Ptr& operator = (const Ptr& ptr);
//! increments the reference counter
void addref();
//! decrements the reference counter. If it reaches 0, delete_obj() is called
void release();
//! deletes the object. Override if needed
void delete_obj();
//! returns true iff obj==NULL
bool empty() const;
//! helper operators making "Ptr<T> ptr" use very similar to "T* ptr".
_Tp* operator -> ();
// const _Tp* operator -> () const;
// operator _Tp* ();
// operator const _Tp*() const;
protected:
_Tp* obj; //< the object pointer.
int* refcount; //< the associated reference counter
};
%template(PtrMat) Ptr<Mat>;
void imwrite(const char* image_name, const Mat& image);
Mat imread(const char* image_name);
%include "buffers.i"
%apply char* BUFF {const char* buffer}
%apply char* BUFF {char* buffer}
void copyMatToBuffer(char* buffer, const Mat& mat);
void copyBufferToMat(Mat& mat, const char* buffer);
\ No newline at end of file
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// OpenGL ES 2.0 code
#include <jni.h>
#if __ANDROID__
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#else
#include <GL/gl.h>
#endif
#include "android_logger.h"
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <stdint.h>
#include "glcamera.h"
#include "image_pool.h"
using namespace cv;
static void printGLString(const char *name, GLenum s)
{
const char *v = (const char *)glGetString(s);
LOGI("GL %s = %s\n", name, v);
}
static void checkGlError(const char* op)
{
for (GLint error = glGetError(); error; error = glGetError())
{
LOGI("after %s() glError (0x%x)\n", op, error);
}
}
static const char gVertexShader[] = "attribute vec4 a_position; \n"
"attribute vec2 a_texCoord; \n"
"varying vec2 v_texCoord; \n"
"void main() \n"
"{ \n"
" gl_Position = a_position; \n"
" v_texCoord = a_texCoord; \n"
"} \n";
static const char gFragmentShader[] = "precision mediump float; \n"
"varying vec2 v_texCoord; \n"
"uniform sampler2D s_texture; \n"
"void main() \n"
"{ \n"
" gl_FragColor = texture2D( s_texture, v_texCoord );\n"
"} \n";
GLuint glcamera::createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, int height, int channels)
{
// Bind the texture
glActiveTexture(GL_TEXTURE0);
checkGlError("glActiveTexture");
// Bind the texture object
glBindTexture(GL_TEXTURE_2D, _textureid);
checkGlError("glBindTexture");
GLenum format;
switch (channels)
{
case 3:
#if ANDROID
format = GL_RGB;
#else
format = GL_BGR;
#endif
break;
case 1:
format = GL_LUMINANCE;
break;
case 4:
format = GL_RGBA;
break;
}
// Load the texture
glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format, GL_UNSIGNED_BYTE, pixels);
checkGlError("glTexImage2D");
#if ANDROID
// Set the filtering mode
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
#else
/* Linear Filtering */
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
#endif
return _textureid;
}
GLuint glcamera::loadShader(GLenum shaderType, const char* pSource)
{
GLuint shader = 0;
#if __ANDROID__
shader = glCreateShader(shaderType);
if (shader)
{
glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled)
{
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen)
{
char* buf = (char*)malloc(infoLen);
if (buf)
{
glGetShaderInfoLog(shader, infoLen, NULL, buf);
LOGE("Could not compile shader %d:\n%s\n",
shaderType, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
}
#endif
return shader;
}
GLuint glcamera::createProgram(const char* pVertexSource, const char* pFragmentSource)
{
#if __ANDROID__
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
if (!vertexShader)
{
return 0;
}
GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
if (!pixelShader)
{
return 0;
}
GLuint program = glCreateProgram();
if (program)
{
glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE)
{
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength)
{
char* buf = (char*)malloc(bufLength);
if (buf)
{
glGetProgramInfoLog(program, bufLength, NULL, buf);
LOGE("Could not link program:\n%s\n", buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
}
}
return program;
#else
return 0;
#endif
}
void glcamera::clear(){
nimg = Mat();
}
//GLuint textureID;
bool glcamera::setupGraphics(int w, int h)
{
// printGLString("Version", GL_VERSION);
// printGLString("Vendor", GL_VENDOR);
// printGLString("Renderer", GL_RENDERER);
// printGLString("Extensions", GL_EXTENSIONS);
#if __ANDROID__
gProgram = createProgram(gVertexShader, gFragmentShader);
if (!gProgram)
{
LOGE("Could not create program.");
return false;
}
gvPositionHandle = glGetAttribLocation(gProgram, "a_position");
gvTexCoordHandle = glGetAttribLocation(gProgram, "a_texCoord");
gvSamplerHandle = glGetAttribLocation(gProgram, "s_texture");
// Use tightly packed data
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
// Generate a texture object
glGenTextures(1, &textureID);
glViewport(0, 0, w, h);
#endif
return true;
}
void glcamera::renderFrame()
{
#if __ANDROID__
GLfloat vVertices[] = {-1.0f, 1.0f, 0.0f, // Position 0
0.0f, 0.0f, // TexCoord 0
-1.0f, -1.0f, 0.0f, // Position 1
0.0f, img_h, // TexCoord 1
1.0f, -1.0f, 0.0f, // Position 2
img_w, img_h, // TexCoord 2
1.0f, 1.0f, 0.0f, // Position 3
img_w, 0.0f // TexCoord 3
};
GLushort indices[] = {0, 1, 2, 0, 2, 3};
GLsizei stride = 5 * sizeof(GLfloat); // 3 for position, 2 for texture
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
checkGlError("glClearColor");
glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
checkGlError("glClear");
if(nimg.empty())return;
glUseProgram(gProgram);
checkGlError("glUseProgram");
// Load the vertex position
glVertexAttribPointer(gvPositionHandle, 3, GL_FLOAT, GL_FALSE, stride, vVertices);
// Load the texture coordinate
glVertexAttribPointer(gvTexCoordHandle, 2, GL_FLOAT, GL_FALSE, stride, &vVertices[3]);
glEnableVertexAttribArray(gvPositionHandle);
glEnableVertexAttribArray(gvTexCoordHandle);
// Bind the texture
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureID);
// Set the sampler texture unit to 0
glUniform1i(gvSamplerHandle, 0);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices);
#endif
//checkGlError("glVertexAttribPointer");
//glEnableVertexAttribArray(gvPositionHandle);
//checkGlError("glEnableVertexAttribArray");
//glDrawArrays(GL_TRIANGLES, 0, 3);
//checkGlError("glDrawArrays");
}
void glcamera::init(int width, int height)
{
newimage = false;
nimg = Mat();
setupGraphics(width, height);
}
void glcamera::step()
{
if (newimage && !nimg.empty())
{
textureID = createSimpleTexture2D(textureID, nimg.ptr<unsigned char> (0), nimg.cols, nimg.rows, nimg.channels());
newimage = false;
}
renderFrame();
}
#define NEAREST_POW2(x)( std::ceil(std::log(x)/0.69315) )
void glcamera::setTextureImage(const Mat& img)
{
int p = NEAREST_POW2(img.cols/2); //subsample by 2
//int sz = std::pow(2, p);
// Size size(sz, sz);
Size size(256, 256);
img_w = 1;
img_h = 1;
if (nimg.cols != size.width)
LOGI_STREAM( "using texture of size: (" << size.width << " , " << size.height << ") image size is: (" << img.cols << " , " << img.rows << ")");
nimg.create(size, img.type());
#if SUBREGION_NPO2
cv::Rect roi(0, 0, img.cols/2, img.rows/2);
cv::Mat nimg_sub = nimg(roi);
//img.copyTo(nimg_sub);
img_w = (img.cols/2)/float(sz);
img_h = (img.rows/2)/float(sz);
cv::resize(img,nimg_sub,nimg_sub.size(),0,0,CV_INTER_NN);
#else
cv::resize(img, nimg, nimg.size(), 0, 0, CV_INTER_NN);
#endif
newimage = true;
}
void glcamera::drawMatToGL(int idx, image_pool* pool)
{
Mat img = pool->getImage(idx);
if (img.empty())
return; //no image at input_idx!
setTextureImage(img);
}
glcamera::glcamera() :
newimage(false)
{
LOGI("glcamera constructor");
}
glcamera::~glcamera()
{
LOGI("glcamera destructor");
}
%typemap(javaimports) glcamera "
/** a class for doing the native rendering of images
this class renders using GL2 es, the native ndk version
This class is used by the GL2CameraViewer to do the rendering,
and is inspired by the gl2 example in the ndk samples
*/"
%javamethodmodifiers glcamera::init"
/** should be called onSurfaceChanged by the GLSurfaceView that is using this
* as the drawing engine
* @param width the width of the surface view that this will be drawing to
* @param width the height of the surface view that this will be drawing to
*
*/
public";
%javamethodmodifiers glcamera::step"
/** should be called by GLSurfaceView.Renderer in the onDrawFrame method, as it
handles the rendering of the opengl scene, and requires that the opengl context be
valid.
*
*/
public";
%javamethodmodifiers glcamera::drawMatToGL"
/** copies an image from a pool and queues it for drawing in opengl.
* this does transformation into power of two texture sizes
* @param idx the image index to copy
* @param pool the image_pool to look up the image from
*
*/
public";
class glcamera {
public:
void init(int width, int height);
void step();
void drawMatToGL(int idx, image_pool* pool);
void clear();
};
#include "image_pool.h"
#include "yuv420sp2rgb.h"
#include "android_logger.h"
#include <opencv2/imgproc/imgproc.hpp>
#include <cstdlib>
#include <jni.h>
#ifdef __cplusplus
extern "C"
{
#endif
JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved);
//
//JNIEXPORT jobject JNICALL Java_com_opencv_jni_opencvJNI_getBitmapBuffer(
// JNIEnv *jenv, jclass jcls, jlong jarg1, jobject jarg1_);
JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv *, jclass, jlong, jobject, jbyteArray, jint,
jint, jint, jboolean);
#ifdef __cplusplus
}
#endif
using namespace cv;
JNIEXPORT jint JNI_OnLoad(JavaVM* vm, void* reserved)
{
JNIEnv *env;
LOGI("JNI_OnLoad called for opencv");
return JNI_VERSION_1_4;
}
JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv * env, jclass thiz, jlong ppool,
jobject _jpool, jbyteArray jbuffer, jint jidx,
jint jwidth, jint jheight, jboolean jgrey)
{
int buff_height = jheight + (jheight / 2);
Size buff_size(jwidth, buff_height);
image_pool *pool = (image_pool *)ppool;
Mat mat = pool->getYUV(jidx);
//create is smart and only copies if the buffer size is different
mat.create(buff_size, CV_8UC1);
{
uchar* buff = mat.ptr<uchar> (0);
jsize sz = env->GetArrayLength(jbuffer);
//http://elliotth.blogspot.com/2007/03/optimizing-jni-array-access.html
env->GetByteArrayRegion(jbuffer, 0, sz, (jbyte*)buff);
}
pool->addYUVMat(jidx, mat);
Mat color;
if (jgrey)
{
Mat grey = pool->getGrey(jidx);
color = grey;
}
else
{
color = pool->getImage(jidx);
pool->convertYUVtoColor(jidx, color);
}
pool->addImage(jidx, color);
}
image_pool::image_pool()
{
}
image_pool::~image_pool()
{
}
Mat image_pool::getImage(int i)
{
return imagesmap[i];
}
Mat image_pool::getGrey(int i)
{
Mat tm = yuvImagesMap[i];
if (tm.empty())
return tm;
return tm(Range(0, tm.rows * (2.0f / 3)), Range::all());
}
Mat image_pool::getYUV(int i)
{
return yuvImagesMap[i];
}
void image_pool::addYUVMat(int i, Mat mat)
{
yuvImagesMap[i] = mat;
}
void image_pool::addImage(int i, Mat mat)
{
imagesmap[i] = mat;
}
void image_pool::convertYUVtoColor(int i, cv::Mat& out)
{
Mat yuv = getYUV(i);
if (yuv.empty())
return;
int width = yuv.cols;
int height = yuv.rows * (2.0f / 3);
out.create(height, width, CV_8UC3);
const unsigned char* buff = yuv.ptr<unsigned char> (0);
unsigned char* out_buff = out.ptr<unsigned char> (0);
color_convert_common(buff, buff + width * height, width, height, out_buff, false);
}
void copyMatToBuffer(char* buffer, const cv::Mat& mat)
{
memcpy(buffer, mat.data, mat.rows * mat.cols * mat.step1());
}
void copyBufferToMat(cv::Mat& mat, const char* buffer)
{
memcpy(mat.data, buffer, mat.rows * mat.cols * mat.step1());
}
void RGB2BGR(const Mat& in, Mat& out)
{
cvtColor(in, out, CV_RGB2BGR);
}
%typemap(javaimports) image_pool "
/** image_pool is used for keeping track of a pool of native images. It stores images as cv::Mat's and
references them by an index. It allows one to get a pointer to an underlying mat, and handles memory deletion.*/"
%javamethodmodifiers image_pool::getImage"
/** gets a pointer to a stored image, by an index. If the index is new, returns a null pointer
* @param idx the index in the pool that is associated with a cv::Mat
* @return the pointer to a cv::Mat, null pointer if the given idx is novel
*/
public";
%javamethodmodifiers image_pool::deleteImage"
/** deletes the image from the pool
* @param idx the index in the pool that is associated with a cv::Mat
*/
public";
%javamethodmodifiers addYUVtoPool"
/** adds a yuv
* @param idx the index in the pool that is associated with a cv::Mat
*/
public";
%include "various.i"
%apply (char* BYTE) { (char *data)}; //byte[] to char*
%native (addYUVtoPool) void addYUVtoPool(image_pool* pool, char* data,int idx, int width, int height, bool grey);
%feature("director") image_pool;
class image_pool {
public:
Mat getGrey(int i);
Mat getImage(int i);
void addImage(int i, Mat mat);
void convertYUVtoColor(int i, Mat& out);
};
void RGB2BGR(const Mat& in, Mat& out);
/*
* Processor.h
*
* Created on: Jun 13, 2010
* Author: ethan
*/
#ifndef PROCESSOR_H_
#define PROCESSOR_H_
#include <opencv2/core/core.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <vector>
#include "image_pool.h"
#define DETECT_FAST 0
#define DETECT_STAR 1
#define DETECT_SURF 2
class Calibration
{
public:
Calibration();
virtual ~Calibration();
bool detectAndDrawChessboard(int idx, image_pool* pool);
void resetChess();
int getNumberDetectedChessboards();
void calibrate(const char* filename);
void drawText(int idx, image_pool* pool, const char* text);
cv::Size patternsize;
private:
std::vector<cv::KeyPoint> keypoints;
std::vector<std::vector<cv::Point2f> > imagepoints;
cv::Mat K;
cv::Mat distortion;
cv::Size imgsize;
};
#endif /* PROCESSOR_H_ */
#pragma once
#include <iostream>
#include <sstream>
#define LOG_TAG "libopencv"
#if ANDROID
#include <android/log.h>
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
#else
#include <cstdio>
#define LOGI(...) printf("info:");printf("%s:",LOG_TAG); fprintf(stdout,__VA_ARGS__);printf("\n");
#define LOGE(...) printf("error:");printf("%s:",LOG_TAG); fprintf(stderr,__VA_ARGS__);printf("\n");
#endif
#ifndef LOGI_STREAM
#define LOGI_STREAM(x) {std::stringstream ss; ss << x; LOGI("%s",ss.str().c_str());}
#endif
#define LOGE_STREAM(x) {std::stringstream ss; ss << x; LOGE("%s",ss.str().c_str());}
#ifndef GLCAMERA_H_
#define GLCAMERA_H_
#include <opencv2/core/core.hpp>
#ifdef __ANDROID__
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#else
#include <GL/gl.h>
#include <GL/glu.h>
#endif
#include "image_pool.h"
class glcamera
{
public:
glcamera();
~glcamera();
void init(int width, int height);
void step();
void drawMatToGL(int idx, image_pool* pool);
void drawMatToGL(const cv::Mat& img);
void setTextureImage(const cv::Mat& img);
void clear();
private:
GLuint createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width, int height, int channels);
GLuint loadShader(GLenum shaderType, const char* pSource);
GLuint
createProgram(const char* pVertexSource, const char* pFragmentSource);
bool setupGraphics(int w, int h);
void renderFrame();
cv::Mat nimg;
bool newimage;
GLuint textureID;
GLuint gProgram;
GLuint gvPositionHandle;
GLuint gvTexCoordHandle;
GLuint gvSamplerHandle;
float img_w, img_h;
};
#endif
#ifndef IMAGE_POOL_H_ANDROID_KDJFKJ
#define IMAGE_POOL_H_ANDROID_KDJFKJ
#include <opencv2/core/core.hpp>
#include <map>
class image_pool
{
public:
image_pool();
~image_pool();
cv::Mat getImage(int i);
cv::Mat getGrey(int i);
cv::Mat getYUV(int i);
int getCount()
{
return imagesmap.size();
}
/** Adds a mat at the given index - will not do a deep copy, just images[i] = mat
*
*/
void addImage(int i, cv::Mat mat);
/** this function stores the given matrix in the the yuvImagesMap. Also,
* after this call getGrey will work, as the grey image is just the top
* half of the YUV mat.
*
* \param i index to store yuv image at
* \param mat the yuv matrix to store
*/
void addYUVMat(int i, cv::Mat mat);
void convertYUVtoColor(int i, cv::Mat& out);
// int addYUV(uchar* buffer, int size, int width, int height, bool grey,int idx);
//
// void getBitmap(int * outintarray, int size, int idx);
private:
std::map<int, cv::Mat> imagesmap;
std::map<int, cv::Mat> yuvImagesMap;
};
void copyMatToBuffer(char* buffer, const cv::Mat& mat);
void copyBufferToMat(cv::Mat& mat, const char* buffer);
void RGB2BGR(const cv::Mat& in, cv::Mat& out);
#endif
/* YUV-> RGB conversion code.
*
* Copyright (C) 2008-9 Robin Watts (robin@wss.co.uk) for Pinknoise
* Productions Ltd.
*
* Licensed under the GNU GPL. If you need it under another license, contact
* me and ask.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
*/
#ifndef YUV2RGB_H
#define YUV2RGB_H
/* Define these to something appropriate in your build */
typedef unsigned int uint32_t;
typedef signed int int32_t;
typedef unsigned short uint16_t;
typedef unsigned char uint8_t;
extern const uint32_t yuv2rgb565_table[];
extern const uint32_t yuv2bgr565_table[];
void yuv420_2_rgb565(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv422_2_rgb565(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv444_2_rgb565(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv420_2_rgb888(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv422_2_rgb888(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv444_2_rgb888(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv420_2_rgb8888(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv422_2_rgb8888(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
void yuv444_2_rgb8888(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither);
#endif /* YUV2RGB_H */
#ifndef YUV420SP2RGB_H
#define YUV420SP2RGB_H
#ifdef __cplusplus
extern "C" {
#endif
void color_convert_common(
const unsigned char *pY, const unsigned char *pUV,
int width, int height, unsigned char *buffer,
int grey);
#ifdef __cplusplus
}
#endif
#endif
/*
* int *INTARRAY typemaps.
* These are input typemaps for mapping a Java int[] array to a C int array.
* Note that as a Java array is used and thus passeed by reference, the C routine
* can return data to Java via the parameter.
*
* Example usage wrapping:
* void foo((int *INTARRAY, int INTARRAYSIZE);
*
* Java usage:
* byte b[] = new byte[20];
* modulename.foo(b);
*/
%typemap(in) (int *INTARRAY, int INTARRAYSIZE) {
$1 = (int *) JCALL2(GetIntArrayElements, jenv, $input, 0);
jsize sz = JCALL1(GetArrayLength, jenv, $input);
$2 = (int)sz;
}
%typemap(argout) (int *INTARRAY, int INTARRAYSIZE) {
JCALL3(ReleaseIntArrayElements, jenv, $input, (jint *) $1, 0);
}
/* Prevent default freearg typemap from being used */
%typemap(freearg) (int *INTARRAY, int INTARRAYSIZE) ""
%typemap(jni) (int *INTARRAY, int INTARRAYSIZE) "jintArray"
%typemap(jtype) (int *INTARRAY, int INTARRAYSIZE) "int[]"
%typemap(jstype) (int *INTARRAY, int INTARRAYSIZE) "int[]"
%typemap(javain) (int *INTARRAY, int INTARRAYSIZE) "$javainput"
This diff is collapsed.
/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
#include "yuv420sp2rgb.h"
#include <arm_neon.h>
#include <stdlib.h>
/* this source file should only be compiled by Android.mk when targeting
* the armeabi-v7a ABI, and should be built in NEON mode
*/
void fir_filter_neon_intrinsics(short *output, const short* input, const short* kernel, int width, int kernelSize)
{
#if 1
int nn, offset = -kernelSize / 2;
for (nn = 0; nn < width; nn++)
{
int mm, sum = 0;
int32x4_t sum_vec = vdupq_n_s32(0);
for (mm = 0; mm < kernelSize / 4; mm++)
{
int16x4_t kernel_vec = vld1_s16(kernel + mm * 4);
int16x4_t input_vec = vld1_s16(input + (nn + offset + mm * 4));
sum_vec = vmlal_s16(sum_vec, kernel_vec, input_vec);
}
sum += vgetq_lane_s32(sum_vec, 0);
sum += vgetq_lane_s32(sum_vec, 1);
sum += vgetq_lane_s32(sum_vec, 2);
sum += vgetq_lane_s32(sum_vec, 3);
if (kernelSize & 3)
{
for (mm = kernelSize - (kernelSize & 3); mm < kernelSize; mm++)
sum += kernel[mm] * input[nn + offset + mm];
}
output[nn] = (short)((sum + 0x8000) >> 16);
}
#else /* for comparison purposes only */
int nn, offset = -kernelSize/2;
for (nn = 0; nn < width; nn++)
{
int sum = 0;
int mm;
for (mm = 0; mm < kernelSize; mm++)
{
sum += kernel[mm]*input[nn+offset+mm];
}
output[n] = (short)((sum + 0x8000) >> 16);
}
#endif
}
/*
YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved
U/V plane containing 8 bit 2x2 subsampled chroma samples.
except the interleave order of U and V is reversed.
H V
Y Sample Period 1 1
U (Cb) Sample Period 2 2
V (Cr) Sample Period 2 2
*/
/*
size of a char:
find . -name limits.h -exec grep CHAR_BIT {} \;
*/
#ifndef max
#define max(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a > _b ? _a : _b; })
#define min(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a < _b ? _a : _b; })
#endif
#define bytes_per_pixel 2
#define LOAD_Y(i,j) (pY + i * width + j)
#define LOAD_V(i,j) (pUV + (i / 2) * width + bytes_per_pixel * (j / 2))
#define LOAD_U(i,j) (pUV + (i / 2) * width + bytes_per_pixel * (j / 2)+1)
const uint8_t ZEROS[8] = {220,220, 220, 220, 220, 220, 220, 220};
const uint8_t Y_SUBS[8] = {16, 16, 16, 16, 16, 16, 16, 16};
const uint8_t UV_SUBS[8] = {128, 128, 128, 128, 128, 128, 128, 128};
const uint32_t UV_MULS[] = {833, 400, 833, 400};
void color_convert_common(unsigned char *pY, unsigned char *pUV, int width, int height, unsigned char *buffer, int grey)
{
int i, j;
int nR, nG, nB;
int nY, nU, nV;
unsigned char *out = buffer;
int offset = 0;
uint8x8_t Y_SUBvec = vld1_u8(Y_SUBS);
uint8x8_t UV_SUBvec = vld1_u8(UV_SUBS); // v,u,v,u v,u,v,u
uint32x4_t UV_MULSvec = vld1q_u32(UV_MULS);
uint8x8_t ZEROSvec =vld1_u8(ZEROS);
uint32_t UVvec_int[8];
if (grey)
{
memcpy(out, pY, width * height * sizeof(unsigned char));
}
else
// YUV 4:2:0
for (i = 0; i < height; i++)
{
for (j = 0; j < width; j += 8)
{
// nY = *(pY + i * width + j);
// nV = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2));
// nU = *(pUV + (i / 2) * width + bytes_per_pixel * (j / 2) + 1);
uint8x8_t nYvec = vld1_u8(LOAD_Y(i,j));
uint8x8_t nUVvec = vld1_u8(LOAD_V(i,j)); // v,u,v,u v,u,v,u
nYvec = vmul_u8(nYvec, vcle_u8(nYvec,ZEROSvec));
// Yuv Convert
// nY -= 16;
// nU -= 128;
// nV -= 128;
// nYvec = vsub_u8(nYvec, Y_SUBvec);
// nUVvec = vsub_u8(nYvec, UV_SUBvec);
uint16x8_t nYvec16 = vmovl_u8(vsub_u8(nYvec, Y_SUBvec));
uint16x8_t nUVvec16 = vmovl_u8(vsub_u8(nYvec, UV_SUBvec));
uint16x4_t Y_low4 = vget_low_u16(nYvec16);
uint16x4_t Y_high4 = vget_high_u16(nYvec16);
uint16x4_t UV_low4 = vget_low_u16(nUVvec16);
uint16x4_t UV_high4 = vget_high_u16(nUVvec16);
uint32x4_t UV_low4_int = vmovl_u16(UV_low4);
uint32x4_t UV_high4_int = vmovl_u16(UV_high4);
uint32x4_t Y_low4_int = vmull_n_u16(Y_low4, 1192);
uint32x4_t Y_high4_int = vmull_n_u16(Y_high4, 1192);
uint32x4x2_t UV_uzp = vuzpq_u32(UV_low4_int, UV_high4_int);
uint32x2_t Vl = vget_low_u32(UV_uzp.val[0]);// vld1_u32(UVvec_int);
uint32x2_t Vh = vget_high_u32(UV_uzp.val[0]);//vld1_u32(UVvec_int + 2);
uint32x2x2_t Vll_ = vzip_u32(Vl, Vl);
uint32x4_t* Vll = (uint32x4_t*)(&Vll_);
uint32x2x2_t Vhh_ = vzip_u32(Vh, Vh);
uint32x4_t* Vhh = (uint32x4_t*)(&Vhh_);
uint32x2_t Ul = vget_low_u32(UV_uzp.val[1]);
uint32x2_t Uh = vget_high_u32(UV_uzp.val[1]);
uint32x2x2_t Ull_ = vzip_u32(Ul, Ul);
uint32x4_t* Ull = (uint32x4_t*)(&Ull_);
uint32x2x2_t Uhh_ = vzip_u32(Uh, Uh);
uint32x4_t* Uhh = (uint32x4_t*)(&Uhh_);
uint32x4_t B_int_low = vmlaq_n_u32(Y_low4_int, *Ull, 2066); //multiply by scalar accum
uint32x4_t B_int_high = vmlaq_n_u32(Y_high4_int, *Uhh, 2066); //multiply by scalar accum
uint32x4_t G_int_low = vsubq_u32(Y_low4_int, vmlaq_n_u32(vmulq_n_u32(*Vll, 833), *Ull, 400));
uint32x4_t G_int_high = vsubq_u32(Y_high4_int, vmlaq_n_u32(vmulq_n_u32(*Vhh, 833), *Uhh, 400));
uint32x4_t R_int_low = vmlaq_n_u32(Y_low4_int, *Vll, 1634); //multiply by scalar accum
uint32x4_t R_int_high = vmlaq_n_u32(Y_high4_int, *Vhh, 1634); //multiply by scalar accum
B_int_low = vshrq_n_u32 (B_int_low, 10);
B_int_high = vshrq_n_u32 (B_int_high, 10);
G_int_low = vshrq_n_u32 (G_int_low, 10);
G_int_high = vshrq_n_u32 (G_int_high, 10);
R_int_low = vshrq_n_u32 (R_int_low, 10);
R_int_high = vshrq_n_u32 (R_int_high, 10);
uint8x8x3_t RGB;
RGB.val[0] = vmovn_u16(vcombine_u16(vqmovn_u32 (R_int_low),vqmovn_u32 (R_int_high)));
RGB.val[1] = vmovn_u16(vcombine_u16(vqmovn_u32 (G_int_low),vqmovn_u32 (G_int_high)));
RGB.val[2] = vmovn_u16(vcombine_u16(vqmovn_u32 (B_int_low),vqmovn_u32 (B_int_high)));
vst3_u8 (out+i*width*3 + j*3, RGB);
}
}
}
This diff is collapsed.
/* YUV-> RGB conversion code. (YUV420 to RGB565)
*
* Copyright (C) 2008-9 Robin Watts (robin@wss.co.uk) for Pinknoise
* Productions Ltd.
*
* Licensed under the GNU GPL. If you need it under another license, contact
* me and ask.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program ; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
*
* The algorithm used here is based heavily on one created by Sophie Wilson
* of Acorn/e-14/Broadcomm. Many thanks.
*
* Additional tweaks (in the fast fixup code) are from Paul Gardiner.
*
* The old implementation of YUV -> RGB did:
*
* R = CLAMP((Y-16)*1.164 + 1.596*V)
* G = CLAMP((Y-16)*1.164 - 0.391*U - 0.813*V)
* B = CLAMP((Y-16)*1.164 + 2.018*U )
*
* We're going to bend that here as follows:
*
* R = CLAMP(y + 1.596*V)
* G = CLAMP(y - 0.383*U - 0.813*V)
* B = CLAMP(y + 1.976*U )
*
* where y = 0 for Y <= 16,
* y = ( Y-16)*1.164, for 16 < Y <= 239,
* y = (239-16)*1.164, for 239 < Y
*
* i.e. We clamp Y to the 16 to 239 range (which it is supposed to be in
* anyway). We then pick the B_U factor so that B never exceeds 511. We then
* shrink the G_U factor in line with that to avoid a colour shift as much as
* possible.
*
* We're going to use tables to do it faster, but rather than doing it using
* 5 tables as as the above suggests, we're going to do it using just 3.
*
* We do this by working in parallel within a 32 bit word, and using one
* table each for Y U and V.
*
* Source Y values are 0 to 255, so 0.. 260 after scaling
* Source U values are -128 to 127, so -49.. 49(G), -253..251(B) after
* Source V values are -128 to 127, so -204..203(R), -104..103(G) after
*
* So total summed values:
* -223 <= R <= 481, -173 <= G <= 431, -253 <= B < 511
*
* We need to pack R G and B into a 32 bit word, and because of Bs range we
* need 2 bits above the valid range of B to detect overflow, and another one
* to detect the sense of the overflow. We therefore adopt the following
* representation:
*
* osGGGGGgggggosBBBBBbbbosRRRRRrrr
*
* Each such word breaks down into 3 ranges.
*
* osGGGGGggggg osBBBBBbbb osRRRRRrrr
*
* Thus we have 8 bits for each B and R table entry, and 10 bits for G (good
* as G is the most noticable one). The s bit for each represents the sign,
* and o represents the overflow.
*
* For R and B we pack the table by taking the 11 bit representation of their
* values, and toggling bit 10 in the U and V tables.
*
* For the green case we calculate 4*G (thus effectively using 10 bits for the
* valid range) truncate to 12 bits. We toggle bit 11 in the Y table.
*/
#include "yuv2rgb.h"
enum
{
FLAGS = 0x40080100
};
#define READUV(U,V) (tables[256 + (U)] + tables[512 + (V)])
#define READY(Y) tables[Y]
#define FIXUP(Y) \
do { \
int tmp = (Y) & FLAGS; \
if (tmp != 0) \
{ \
tmp -= tmp>>8; \
(Y) |= tmp; \
tmp = FLAGS & ~(Y>>1); \
(Y) += tmp>>8; \
} \
} while (0 == 1)
#define STORE(Y,DSTPTR) \
do { \
uint32_t Y2 = (Y); \
uint8_t *DSTPTR2 = (DSTPTR); \
(DSTPTR2)[0] = (Y2); \
(DSTPTR2)[1] = (Y2)>>22; \
(DSTPTR2)[2] = (Y2)>>11; \
} while (0 == 1)
void yuv420_2_rgb888(uint8_t *dst_ptr,
const uint8_t *y_ptr,
const uint8_t *u_ptr,
const uint8_t *v_ptr,
int32_t width,
int32_t height,
int32_t y_span,
int32_t uv_span,
int32_t dst_span,
const uint32_t *tables,
int32_t dither)
{
height -= 1;
while (height > 0)
{
height -= width<<16;
height += 1<<16;
while (height < 0)
{
/* Do 2 column pairs */
uint32_t uv, y0, y1;
uv = READUV(*u_ptr++,*v_ptr++);
y1 = uv + READY(y_ptr[y_span]);
y0 = uv + READY(*y_ptr++);
FIXUP(y1);
FIXUP(y0);
STORE(y1, &dst_ptr[dst_span]);
STORE(y0, dst_ptr);
dst_ptr += 3;
y1 = uv + READY(y_ptr[y_span]);
y0 = uv + READY(*y_ptr++);
FIXUP(y1);
FIXUP(y0);
STORE(y1, &dst_ptr[dst_span]);
STORE(y0, dst_ptr);
dst_ptr += 3;
height += (2<<16);
}
if ((height>>16) == 0)
{
/* Trailing column pair */
uint32_t uv, y0, y1;
uv = READUV(*u_ptr,*v_ptr);
y1 = uv + READY(y_ptr[y_span]);
y0 = uv + READY(*y_ptr++);
FIXUP(y1);
FIXUP(y0);
STORE(y0, &dst_ptr[dst_span]);
STORE(y1, dst_ptr);
dst_ptr += 3;
}
dst_ptr += dst_span*2-width*3;
y_ptr += y_span*2-width;
u_ptr += uv_span-(width>>1);
v_ptr += uv_span-(width>>1);
height = (height<<16)>>16;
height -= 2;
}
if (height == 0)
{
/* Trail row */
height -= width<<16;
height += 1<<16;
while (height < 0)
{
/* Do a row pair */
uint32_t uv, y0, y1;
uv = READUV(*u_ptr++,*v_ptr++);
y1 = uv + READY(*y_ptr++);
y0 = uv + READY(*y_ptr++);
FIXUP(y1);
FIXUP(y0);
STORE(y1, dst_ptr);
dst_ptr += 3;
STORE(y0, dst_ptr);
dst_ptr += 3;
height += (2<<16);
}
if ((height>>16) == 0)
{
/* Trailing pix */
uint32_t uv, y0;
uv = READUV(*u_ptr++,*v_ptr++);
y0 = uv + READY(*y_ptr++);
FIXUP(y0);
STORE(y0, dst_ptr);
dst_ptr += 3;
}
}
}
#include <string.h>
#include <jni.h>
#include <yuv420sp2rgb.h>
#include <yuv2rgb.h>
/*
YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved
U/V plane containing 8 bit 2x2 subsampled chroma samples.
except the interleave order of U and V is reversed.
H V
Y Sample Period 1 1
U (Cb) Sample Period 2 2
V (Cr) Sample Period 2 2
*/
/*
size of a char:
find . -name limits.h -exec grep CHAR_BIT {} \;
*/
#ifndef max
#define max(a,b) (a > b ? a : b )
#define min(a,b) (a < b ? a : b )
#endif
enum
{
FLAGS = 0x40080100
};
#define READUV(U,V) (tables[256 + (U)] + tables[512 + (V)])
#define READY(Y) tables[Y]
#define FIXUP(Y) \
do { \
int tmp = (Y) & FLAGS; \
if (tmp != 0) \
{ \
tmp -= tmp>>8; \
(Y) |= tmp; \
tmp = FLAGS & ~(Y>>1); \
(Y) += tmp>>8; \
} \
} while (0 == 1)
#define STORE(Y,DSTPTR) \
do { \
uint32_t Y2 = (Y); \
uint8_t *DSTPTR2 = (DSTPTR); \
(DSTPTR2)[2] = (Y2); \
(DSTPTR2)[1] = (Y2)>>22; \
(DSTPTR2)[0] = (Y2)>>11; \
} while (0 == 1)
typedef unsigned char byte;
const int bytes_per_pixel = 2;
void color_convert_common(const unsigned char *pY, const unsigned char *pUV, int width, int height,
unsigned char *buffer, int grey)
{
#define LOOKUP 1
#if ! LOOKUP
int nR, nG, nB;
#endif
int dest_span = 3 * width;
unsigned char *out = buffer;
if (grey)
{
memcpy(out, pY, width * height * sizeof(unsigned char));
}
else
{
#if LOOKUP
const uint32_t* tables = yuv2rgb565_table;
const byte* nY = pY;
const byte* nUV = pUV;
int idx = 0;
while (nY+width < pUV)
{
int y = (idx / width);
int x = (idx % width);
byte Y = *nY;
byte Y2 = nY[width];
byte V = *nUV;
byte U = *(nUV + 1);
/* Do 2 row pairs */
uint32_t uv, y0, y1;
uv = READUV(U,V);
y1 = uv + READY(Y);
y0 = uv + READY(Y2);
FIXUP(y1);
FIXUP(y0);
STORE(y1, &out[dest_span]);
STORE(y0, out);
out += 3;
Y = *(++nY);
Y2 = nY[width];
y1 = uv + READY(Y);
y0 = uv + READY(Y2);
FIXUP(y1);
FIXUP(y0);
STORE(y1, &out[dest_span]);
STORE(y0, out);
out += 3;
height += (2 << 16);
++nY;
nUV = pUV + (y / 2) * width + 2 * (x / 2);
idx+=2;
}
#else
const byte* nY = pY;
const byte* nUV = pUV;
int idx = 0;
while (nY < pUV)
{
int y = (idx / width);
int x = (idx % width);
int Y = *nY;
int V = *nUV;
int U = *(nUV + 1);
Y -= 16;
V -= 128;
U -= 128;
if (y < 0)
y = 0;
nB = (int)(1192 * Y + 2066 * U);
nG = (int)(1192 * Y - 833 * V - 400 * U);
nR = (int)(1192 * Y + 1634 * V);
nR = min(262143, max(0, nR));
nG = min(262143, max(0, nG));
nB = min(262143, max(0, nB));
nR >>= 10;
nR &= 0xff;
nG >>= 10;
nG &= 0xff;
nB >>= 10;
nB &= 0xff;
*(out++) = (unsigned char)nR;
*(out++) = (unsigned char)nG;
*(out++) = (unsigned char)nB;
nY += 1;
nUV = pUV + (y / 2) * width + 2 * (x / 2);
++idx;
}
#endif
}
}
#!/bin/sh
#this generates an ant based cli build of the android-jni project
android update project --name android-opencv \
--path .
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical"
android:gravity="center_vertical|center_horizontal">
<TextView android:scrollbars="vertical" android:id="@+id/calibtext" android:text="" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:padding="20dip"/>
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent" android:layout_height="fill_parent"
android:background="@drawable/cameraback">
<!--<SurfaceView-->
<com.opencv.camera.NativePreviewer
android:id="@+id/nativepreviewer" android:layout_width="400dip"
android:layout_height="300dip" android:layout_alignParentLeft="true"
android:layout_margin="20dip" android:gravity="center_horizontal|center_vertical"
android:layout_marginRight="20dip" />
<LinearLayout android:id="@+id/glview_layout"
android:layout_width="400dip" android:layout_height="300dip"
android:layout_alignParentLeft="true" android:layout_margin="20dip"
android:gravity="center_horizontal|center_vertical"
android:layout_marginRight="20dip">
</LinearLayout>
<LinearLayout android:layout_width="wrap_content"
android:layout_height="fill_parent" android:orientation="vertical"
android:layout_margin="20dip" android:gravity="center_horizontal|center_vertical"
android:layout_alignParentRight="true">
<ImageButton android:src="@android:drawable/ic_menu_camera"
android:id="@+id/button_capture" android:layout_width="60dip"
android:layout_height="60dip" android:layout_marginBottom="10dip"></ImageButton>
<ImageButton android:src="@android:drawable/ic_menu_preferences"
android:id="@+id/button_camera_settings" android:layout_width="60dip"
android:layout_height="60dip" android:layout_marginBottom="10dip"></ImageButton>
</LinearLayout>
</RelativeLayout>
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent" android:layout_height="fill_parent"
android:orientation="vertical" android:gravity="center_vertical|center_horizontal">
<TextView android:text="@string/settings_text"
android:autoLink="web" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:padding="20dip" />
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content" android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="@string/image_size_prompt" />
<Spinner android:id="@+id/image_size" android:layout_width="fill_parent"
android:layout_height="wrap_content" android:saveEnabled="true"
android:prompt="@string/image_size_prompt" android:entries="@array/image_sizes">
</Spinner>
</LinearLayout>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content" android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="@string/camera_mode_prompt" />
<Spinner android:id="@+id/camera_mode" android:layout_width="fill_parent"
android:layout_height="wrap_content" android:saveEnabled="true"
android:prompt="@string/camera_mode_prompt" android:entries="@array/camera_mode">
</Spinner>
</LinearLayout>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content" android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="@string/whitebalance_prompt" />
<Spinner android:id="@+id/whitebalance" android:layout_width="fill_parent"
android:layout_height="wrap_content" android:saveEnabled="true"
android:prompt="@string/whitebalance_prompt" android:entries="@array/whitebalance">
</Spinner>
</LinearLayout>
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
android:orientation="vertical"
android:gravity="center_vertical|center_horizontal">
<TextView android:text="@string/patterntext" android:autoLink="web" android:layout_width="wrap_content"
android:layout_height="wrap_content" android:padding="20dip"/>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Corners in width direction:"/>
<Spinner android:id="@+id/rows"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:saveEnabled="true"
android:prompt="@string/chesspromptx"
android:entries="@array/chesssizes">
</Spinner>
</LinearLayout>
<LinearLayout android:id="@+id/LinearLayout01"
android:layout_width="wrap_content" android:layout_height="wrap_content"
android:gravity="center_vertical">
<TextView android:layout_width="wrap_content"
android:layout_height="wrap_content" android:text="Corners in height direction:"/>
<Spinner android:id="@+id/cols"
android:layout_width="fill_parent"
android:layout_height="wrap_content"
android:saveEnabled="true"
android:prompt="@string/chessprompty"
android:entries="@array/chesssizes">
</Spinner>
</LinearLayout>
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<resources>
<declare-styleable name="CameraParams">
<attr name="preview_width" format="integer"/>
<attr name="preview_height" format="integer"/>
</declare-styleable>
</resources>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string-array name="chesssizes">
<item>2</item>
<item>3</item>
<item>4</item>
<item>5</item>
<item>6</item>
<item>7</item>
<item>8</item>
<item>9</item>
<item>10</item>
<item>11</item>
<item>12</item>
<item>13</item>
</string-array>
<string name="chesspromptx">
Choose the width:</string>
<string name="chessprompty">
Choose the height:</string>
</resources>
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string-array name="image_sizes">
<item>320x240</item>
<item>400x300</item>
<item>640x480</item>
<item>800x600</item>
<item>1000x800</item>
</string-array>
<string-array name="camera_mode">
<item>color</item>
<item>BW</item>
</string-array>
<string name="image_size_prompt">
Image Size:\n(may not be exact)
</string>
<string name="camera_mode_prompt">
Camera Mode:
</string>
<string-array name="whitebalance">
<item>auto</item>
<item>incandescent</item>
<item>fluorescent</item>
<item>daylight</item>
<item>cloudy-daylight</item>
</string-array>
<string name="whitebalance_prompt">
Whitebalance:
</string>
</resources>
\ No newline at end of file
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">Calibration</string>
<string name="patternsize">Pattern Size</string>
<string name="patterntext">Please choose the width and height (number of inside corners) of the checker
board pattern you will be using for calibration. Default is 6 by 8 corners. You may find a checkerboard pattern at
http://opencv.willowgarage.com/pattern</string>
<string name="patternlink">http://opencv.willowgarage.com/pattern</string>
<string name="camera_settings_label">Camera Settings</string>
<string name="settings_text">Change the camera settings. Be aware that BW is much faster for previewing, than color. Also, if you change the image size, you should
rerun calibration. Default values: BW and 640x480 are a good start.</string>
<string name="calibration_service_started">Calibration calculations have started...</string>
<string name="calibration_service_stopped">Calibration calculations has stopped.</string>
<string name="calibration_service_finished">Calibration finished, you camera is calibrated.</string>
<string name="calibration_service_label">Calibration</string>
<string name="calibration_not_enough">Please capture atleast 10 images of the pattern!</string>
</resources>
package com.opencv;
import java.util.LinkedList;
import android.app.Activity;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import android.view.ViewGroup.LayoutParams;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import com.opencv.camera.NativePreviewer;
import com.opencv.camera.NativeProcessor;
import com.opencv.camera.NativeProcessor.PoolCallback;
import com.opencv.opengl.GL2CameraViewer;
public class OpenCV extends Activity {
private NativePreviewer mPreview;
private GL2CameraViewer glview;
/*
* (non-Javadoc)
*
* @see android.app.Activity#onKeyUp(int, android.view.KeyEvent)
*/
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
return super.onKeyUp(keyCode, event);
}
/*
* (non-Javadoc)
*
* @see android.app.Activity#onKeyLongPress(int, android.view.KeyEvent)
*/
@Override
public boolean onKeyLongPress(int keyCode, KeyEvent event) {
return super.onKeyLongPress(keyCode, event);
}
/**
* Avoid that the screen get's turned off by the system.
*/
public void disableScreenTurnOff() {
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
/**
* Set's the orientation to landscape, as this is needed by AndAR.
*/
public void setOrientation() {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}
/**
* Maximize the application.
*/
public void setFullscreen() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
public void setNoTitle() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// menu.add("Sample");
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// if(item.getTitle().equals("Sample")){
// //do stuff...
// }
return true;
}
@Override
public void onOptionsMenuClosed(Menu menu) {
// TODO Auto-generated method stub
super.onOptionsMenuClosed(menu);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setFullscreen();
disableScreenTurnOff();
FrameLayout frame = new FrameLayout(getApplication());
// Create our Preview view and set it as the content of our activity.
mPreview = new NativePreviewer(getApplication(), 640, 480);
LayoutParams params = new LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT);
params.height = getWindowManager().getDefaultDisplay().getHeight();
params.width = (int) (params.height * 4.0 / 2.88);
LinearLayout vidlay = new LinearLayout(getApplication());
vidlay.setGravity(Gravity.CENTER);
vidlay.addView(mPreview, params);
frame.addView(vidlay);
// make the glview overlay ontop of video preview
mPreview.setZOrderMediaOverlay(false);
glview = new GL2CameraViewer(getApplication(), false, 0, 0);
glview.setZOrderMediaOverlay(true);
glview.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT,
LayoutParams.FILL_PARENT));
frame.addView(glview);
setContentView(frame);
}
@Override
protected void onPause() {
super.onPause();
mPreview.onPause();
glview.onPause();
}
@Override
protected void onResume() {
super.onResume();
glview.onResume();
LinkedList<NativeProcessor.PoolCallback> callbackstack = new LinkedList<PoolCallback>();
callbackstack.add(glview.getDrawCallback());
mPreview.addCallbackStack(callbackstack);
mPreview.onResume();
}
}
package com.opencv.calibration;
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.FileReader;
import java.io.IOException;
import android.app.Activity;
import android.os.Bundle;
import android.text.method.ScrollingMovementMethod;
import android.util.Log;
import android.widget.TextView;
import com.opencv.R;
public class CalibrationViewer extends Activity {
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.calibrationviewer);
Bundle extras = getIntent().getExtras();
String filename = extras.getString("calibfile");
if (filename != null) {
TextView text = (TextView) findViewById(R.id.calibtext);
text.setMovementMethod(new ScrollingMovementMethod());
try {
BufferedReader reader = new BufferedReader(new FileReader(
filename));
while (reader.ready()) {
text.append(reader.readLine() +"\n");
}
} catch (FileNotFoundException e) {
Log.e("opencv", "could not open calibration file at:"
+ filename);
} catch (IOException e) {
Log.e("opencv", "error reading file: "
+ filename);
}
}
}
}
package com.opencv.calibration;
import java.io.File;
import java.io.IOException;
import java.util.concurrent.locks.ReentrantLock;
import android.os.AsyncTask;
import com.opencv.camera.NativeProcessor;
import com.opencv.camera.NativeProcessor.PoolCallback;
import com.opencv.jni.Calibration;
import com.opencv.jni.Size;
import com.opencv.jni.image_pool;
public class Calibrator implements PoolCallback {
private Calibration calibration;
static public interface CalibrationCallback{
public void onFoundChessboard(Calibrator calibrator);
public void onDoneCalibration(Calibrator calibration, File calibfile);
public void onFailedChessboard(Calibrator calibrator);
}
private CalibrationCallback callback;
public Calibrator(CalibrationCallback callback) {
calibration = new Calibration();
this.callback = callback;
}
public void resetCalibration(){
calibration.resetChess();
}
public void setPatternSize(Size size){
Size csize = calibration.getPatternsize();
if(size.getWidth() == csize.getWidth()&&
size.getHeight() == csize.getHeight())
return;
calibration.setPatternsize(size);
resetCalibration();
}
public void setPatternSize(int width, int height){
Size patternsize = new Size(width,height);
setPatternSize(patternsize);
}
private boolean capture_chess;
ReentrantLock lock = new ReentrantLock();
public void calibrate(File calibration_file) throws IOException{
if(getNumberPatternsDetected() < 3){
return;
}
CalibrationTask calibtask = new CalibrationTask(calibration_file);
calibtask.execute((Object[])null);
}
public void queueChessCapture(){
capture_chess = true;
}
private class CalibrationTask extends AsyncTask<Object, Object, Object> {
File calibfile;
public CalibrationTask(File calib) throws IOException{
super();
calibfile = calib;
calibfile.createNewFile();
}
@Override
protected Object doInBackground(Object... params) {
lock.lock();
try{
calibration.calibrate(calibfile.getAbsolutePath());
}
finally{
lock.unlock();
}
return null;
}
@Override
protected void onPostExecute(Object result) {
callback.onDoneCalibration(Calibrator.this, calibfile);
}
}
//@Override
public void process(int idx, image_pool pool, long timestamp,
NativeProcessor nativeProcessor) {
if(lock.tryLock()){
try{
if(capture_chess){
if(calibration.detectAndDrawChessboard(idx, pool)){
callback.onFoundChessboard(this);
}else
callback.onFailedChessboard(this);
capture_chess = false;
}
}finally{
lock.unlock();
}
}
}
public int getNumberPatternsDetected(){
return calibration.getNumberDetectedChessboards();
}
public void setCallback(CalibrationCallback callback) {
this.callback = callback;
}
}
package com.opencv.calibration;
import com.opencv.R;
import com.opencv.jni.Size;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.Spinner;
public class ChessBoardChooser extends Activity {
public static final String CHESS_SIZE = "chess_size";
public static final int DEFAULT_WIDTH = 6;
public static final int DEFAULT_HEIGHT = 8;
public static final int LOWEST = 2;
class DimChooser implements OnItemSelectedListener {
private String dim;
public DimChooser(String dim) {
this.dim = dim;
}
@Override
public void onItemSelected(AdapterView<?> arg0, View arg1, int pos,
long arg3) {
SharedPreferences settings = getSharedPreferences(CHESS_SIZE, 0);
Editor editor = settings.edit();
editor.putInt(dim, pos + LOWEST);
editor.commit();
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
}
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.chesssizer);
// Restore preferences
SharedPreferences settings = getSharedPreferences(CHESS_SIZE, 0);
int width = settings.getInt("width", 6);
int height = settings.getInt("height", 8);
Spinner wspin, hspin;
wspin = (Spinner) findViewById(R.id.rows);
hspin = (Spinner) findViewById(R.id.cols);
wspin.setSelection(width - LOWEST);
hspin.setSelection(height - LOWEST);
wspin.setOnItemSelectedListener(new DimChooser("width"));
hspin.setOnItemSelectedListener(new DimChooser("height"));
}
public static Size getPatternSize(Context ctx) {
SharedPreferences settings = ctx.getSharedPreferences(CHESS_SIZE, 0);
int width = settings.getInt("width", 6);
int height = settings.getInt("height", 8);
return new Size(width, height);
}
}
package com.opencv.calibration.services;
import java.io.File;
import java.io.IOException;
import android.app.Notification;
import android.app.NotificationManager;
import android.app.PendingIntent;
import android.app.Service;
import android.content.Intent;
import android.os.Binder;
import android.os.IBinder;
import android.util.Log;
import android.widget.Toast;
import com.opencv.R;
import com.opencv.calibration.CalibrationViewer;
import com.opencv.calibration.Calibrator;
import com.opencv.calibration.Calibrator.CalibrationCallback;
public class CalibrationService extends Service implements CalibrationCallback {
Class<?> activity;
int icon;
File calibration_file;
public void startCalibrating(Class<?> activitycaller,int icon_id, Calibrator calibrator, File calibration_file)
throws IOException {
activity = activitycaller;
icon = icon_id;
// Display a notification about us starting. We put an icon in the
// status bar.
showNotification();
this.calibration_file = calibration_file;
calibrator.setCallback(this);
calibrator.calibrate(calibration_file);
}
private NotificationManager mNM;
/**
* Class for clients to access. Because we know this service always runs in
* the same process as its clients, we don't need to deal with IPC.
*/
public class CalibrationServiceBinder extends Binder {
public CalibrationService getService() {
return CalibrationService.this;
}
}
@Override
public int onStartCommand(Intent intent, int flags, int startId) {
Log.i("LocalService", "Received start id " + startId + ": " + intent);
// We want this service to continue running until it is explicitly
// stopped, so return sticky.
return START_NOT_STICKY;
}
@Override
public void onCreate() {
mNM = (NotificationManager) getSystemService(NOTIFICATION_SERVICE);
}
@Override
public void onDestroy() {
// Cancel the persistent notification.
// mNM.cancel(R.string.calibration_service_started);
// Tell the user we stopped.
Toast.makeText(this, R.string.calibration_service_finished,
Toast.LENGTH_SHORT).show();
}
private final IBinder mBinder = new CalibrationServiceBinder();
@Override
public IBinder onBind(Intent intent) {
return mBinder;
}
/**
* Show a notification while this service is running.
*/
private void showNotification() {
// In this sample, we'll use the same text for the ticker and the
// expanded notification
CharSequence text = getText(R.string.calibration_service_started);
// Set the icon, scrolling text and timestamp
Notification notification = new Notification(icon, text,
System.currentTimeMillis());
// The PendingIntent to launch our activity if the user selects this
// notification
PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
new Intent(this, activity), 0);
// Set the info for the views that show in the notification panel.
notification.setLatestEventInfo(this,
getText(R.string.calibration_service_label), text,
contentIntent);
notification.defaults |= Notification.DEFAULT_SOUND;
// Send the notification.
// We use a layout id because it is a unique number. We use it later to
// cancel.
mNM.notify(R.string.calibration_service_started, notification);
}
/**
* Show a notification while this service is running.
*/
private void doneNotification() {
// In this sample, we'll use the same text for the ticker and the
// expanded notification
CharSequence text = getText(R.string.calibration_service_finished);
// Set the icon, scrolling text and timestamp
Notification notification = new Notification(icon, text,
System.currentTimeMillis());
Intent intent = new Intent(this,CalibrationViewer.class);
intent.putExtra("calibfile", calibration_file.getAbsolutePath());
// The PendingIntent to launch our activity if the user selects this
// notification
PendingIntent contentIntent = PendingIntent.getActivity(this, 0,
intent, 0);
// Set the info for the views that show in the notification panel.
notification.setLatestEventInfo(this,
getText(R.string.calibration_service_label), text,
contentIntent);
notification.defaults |= Notification.DEFAULT_SOUND;
// Send the notification.
// We use a layout id because it is a unique number. We use it later to
// cancel.
mNM.notify(R.string.calibration_service_started, notification);
}
@Override
public void onFoundChessboard(Calibrator calibrator) {
// TODO Auto-generated method stub
}
@Override
public void onDoneCalibration(Calibrator calibration, File calibfile) {
doneNotification();
stopSelf();
}
@Override
public void onFailedChessboard(Calibrator calibrator) {
// TODO Auto-generated method stub
}
}
package com.opencv.camera;
import java.util.LinkedList;
import android.app.Activity;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.view.KeyEvent;
import android.view.MotionEvent;
import android.view.Window;
import android.view.WindowManager;
import android.widget.LinearLayout;
import com.opencv.camera.CameraButtonsHandler.CaptureListener;
import com.opencv.opengl.GL2CameraViewer;
public abstract class CameraActivity extends Activity implements CaptureListener {
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setFullscreen();
setOrientation();
disableScreenTurnOff();
setContentView(com.opencv.R.layout.camera);
cameraButtonHandler = new CameraButtonsHandler(this,this);
mPreview = (NativePreviewer) findViewById(com.opencv.R.id.nativepreviewer);
LinearLayout glview_layout = (LinearLayout) findViewById(com.opencv.R.id.glview_layout);
glview = new GL2CameraViewer(getApplication(), true, 0, 0);
glview_layout.addView(glview);
}
/**
* Handle the capture button as follows...
*/
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
switch (keyCode) {
case KeyEvent.KEYCODE_CAMERA:
case KeyEvent.KEYCODE_SPACE:
case KeyEvent.KEYCODE_DPAD_CENTER:
cameraButtonHandler.setIsCapture(true);
return true;
default:
return super.onKeyUp(keyCode, event);
}
}
/**
* Handle the capture button as follows... On some phones there is no
* capture button, only trackball
*/
@Override
public boolean onTrackballEvent(MotionEvent event) {
if (event.getAction() == MotionEvent.ACTION_UP) {
cameraButtonHandler.setIsCapture(true);
return true;
}
return super.onTrackballEvent(event);
}
/**
* Avoid that the screen get's turned off by the system.
*/
public void disableScreenTurnOff() {
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
/**
* Set's the orientation to landscape, as this is needed by AndAR.
*/
public void setOrientation() {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}
/**
* Maximize the application.
*/
public void setFullscreen() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
@Override
protected void onPause() {
super.onPause();
mPreview.onPause();
glview.onPause();
}
@Override
protected void onResume() {
super.onResume();
mPreview.setParamsFromPrefs(getApplicationContext());
glview.onResume();
mPreview.onResume();
setCallbackStack();
}
protected void setCallbackStack() {
LinkedList<NativeProcessor.PoolCallback> callbackstack = getCallBackStack();
if (callbackstack == null){
callbackstack = new LinkedList<NativeProcessor.PoolCallback>();
callbackstack.add(glview.getDrawCallback());
}
mPreview.addCallbackStack(callbackstack);
}
/**
* Overide this and provide your processors to the camera
*
* @return null for default drawing
*/
protected abstract LinkedList<NativeProcessor.PoolCallback> getCallBackStack();
public void onCapture(){
}
protected NativePreviewer mPreview;
protected GL2CameraViewer glview;
protected CameraButtonsHandler cameraButtonHandler;
}
package com.opencv.camera;
import android.app.Activity;
import android.content.Context;
import android.content.Intent;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.ImageButton;
public class CameraButtonsHandler {
/** Constructs a buttons handler, will register with the capture button
* and the camera settings button.
* @param a The activity that has inflated the com.opencv.R.layout.camera
* as its layout.
*/
public CameraButtonsHandler(Activity a, CaptureListener l) {
ImageButton capture = (ImageButton) a
.findViewById(com.opencv.R.id.button_capture);
ImageButton settings = (ImageButton) a
.findViewById(com.opencv.R.id.button_camera_settings);
capture.setOnClickListener(capture_listener);
settings.setOnClickListener(settings_listener);
captureListener = l;
ctx = a;
}
public CameraButtonsHandler(Activity a) {
ImageButton capture = (ImageButton) a
.findViewById(com.opencv.R.id.button_capture);
ImageButton settings = (ImageButton) a
.findViewById(com.opencv.R.id.button_camera_settings);
capture.setOnClickListener(capture_listener);
settings.setOnClickListener(settings_listener);
ctx = a;
}
/** Check if the capture button has been pressed
* @return true if the capture button has been pressed
*/
synchronized public boolean isCapture(){
return capture_flag;
}
/** Reset the capture flag
*/
synchronized public void resetIsCapture(){
capture_flag = false;
}
/** Manually set the flag - call this on any event that should trigger
* a capture
* @param isCapture true if a capture should take place
*/
synchronized public void setIsCapture(boolean isCapture){
capture_flag = isCapture;
if(capture_flag && captureListener != null){
captureListener.onCapture();
}
}
private OnClickListener capture_listener = new View.OnClickListener() {
@Override
public void onClick(View v) {
setIsCapture(true);
}
};
private OnClickListener settings_listener = new View.OnClickListener() {
@Override
public void onClick(View v) {
Intent configurer = new Intent(ctx,
CameraConfig.class);
ctx.startActivity(configurer);
}
};
interface CaptureListener{
public void onCapture();
}
private CaptureListener captureListener;
private Context ctx;
private boolean capture_flag = false;
}
package com.opencv.camera;
import com.opencv.R;
import android.app.Activity;
import android.content.Context;
import android.content.SharedPreferences;
import android.content.SharedPreferences.Editor;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.AdapterView.OnItemSelectedListener;
import android.widget.Spinner;
public class CameraConfig extends Activity {
public static final String CAMERA_SETTINGS = "CAMERA_SETTINGS";
public static final String CAMERA_MODE = "camera_mode";
public static final String IMAGE_WIDTH = "IMAGE_WIDTH";
public static final String IMAGE_HEIGHT = "IMAGE_HEIGHT";
public static final int CAMERA_MODE_BW = 0;
public static final int CAMERA_MODE_COLOR = 1;
private static final String WHITEBALANCE = "WHITEBALANCE";
public static int readCameraMode(Context ctx) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
int mode = settings.getInt(CAMERA_MODE, CAMERA_MODE_BW);
return mode;
}
public static String readWhitebalace(Context ctx) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
return settings.getString(WHITEBALANCE, "auto");
}
static public void setCameraMode(Context context, String mode) {
int m = 0;
if (mode.equals("BW")) {
m = CAMERA_MODE_BW;
} else if (mode.equals("color"))
m = CAMERA_MODE_COLOR;
setCameraMode(context, m);
}
private static String sizeToString(int[] size) {
return size[0] + "x" + size[1];
}
private static void parseStrToSize(String ssize, int[] size) {
String sz[] = ssize.split("x");
size[0] = Integer.valueOf(sz[0]);
size[1] = Integer.valueOf(sz[1]);
}
public static void readImageSize(Context ctx, int[] size) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
size[0] = settings.getInt(IMAGE_WIDTH, 640);
size[1] = settings.getInt(IMAGE_HEIGHT, 480);
}
public static void setCameraMode(Context ctx, int mode) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
Editor editor = settings.edit();
editor.putInt(CAMERA_MODE, mode);
editor.commit();
}
public static void setImageSize(Context ctx, String strsize) {
int size[] = { 0, 0 };
parseStrToSize(strsize, size);
setImageSize(ctx, size[0], size[1]);
}
public static void setImageSize(Context ctx, int width, int height) {
// Restore preferences
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
Editor editor = settings.edit();
editor.putInt(IMAGE_WIDTH, width);
editor.putInt(IMAGE_HEIGHT, height);
editor.commit();
}
@Override
protected void onCreate(Bundle savedInstanceState) {
// TODO Auto-generated method stub
super.onCreate(savedInstanceState);
setContentView(R.layout.camerasettings);
int mode = readCameraMode(this);
int size[] = { 0, 0 };
readImageSize(this, size);
final Spinner size_spinner;
final Spinner mode_spinner;
final Spinner whitebalance_spinner;
size_spinner = (Spinner) findViewById(R.id.image_size);
mode_spinner = (Spinner) findViewById(R.id.camera_mode);
whitebalance_spinner = (Spinner) findViewById(R.id.whitebalance);
String strsize = sizeToString(size);
String strmode = modeToString(mode);
String wbmode = readWhitebalace(getApplicationContext());
String sizes[] = getResources().getStringArray(R.array.image_sizes);
int i = 1;
for (String x : sizes) {
if (x.equals(strsize))
break;
i++;
}
if(i <= sizes.length)
size_spinner.setSelection(i-1);
i = 1;
String modes[] = getResources().getStringArray(R.array.camera_mode);
for (String x :modes) {
if (x.equals(strmode))
break;
i++;
}
if(i <= modes.length)
mode_spinner.setSelection(i-1);
i = 1;
String wbmodes[] = getResources().getStringArray(R.array.whitebalance);
for (String x :wbmodes) {
if (x.equals(wbmode))
break;
i++;
}
if(i <= wbmodes.length)
whitebalance_spinner.setSelection(i-1);
size_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View spinner,
int position, long arg3) {
Object o = size_spinner.getItemAtPosition(position);
if (o != null)
setImageSize(spinner.getContext(), (String) o);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
mode_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View spinner,
int position, long arg3) {
Object o = mode_spinner.getItemAtPosition(position);
if (o != null)
setCameraMode(spinner.getContext(), (String) o);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
whitebalance_spinner.setOnItemSelectedListener(new OnItemSelectedListener() {
@Override
public void onItemSelected(AdapterView<?> arg0, View spinner,
int position, long arg3) {
Object o = whitebalance_spinner.getItemAtPosition(position);
if (o != null)
setWhitebalance(spinner.getContext(), (String) o);
}
@Override
public void onNothingSelected(AdapterView<?> arg0) {
}
});
}
public static void setWhitebalance(Context ctx, String o) {
SharedPreferences settings = ctx.getSharedPreferences(CAMERA_SETTINGS,
0);
Editor editor = settings.edit();
editor.putString(WHITEBALANCE, o);
editor.commit();
}
private String modeToString(int mode) {
switch (mode) {
case CAMERA_MODE_BW:
return "BW";
case CAMERA_MODE_COLOR:
return "color";
default:
return "";
}
}
}
package com.opencv.utils;
import java.nio.ByteBuffer;
import com.opencv.jni.Mat;
import com.opencv.jni.Size;
import com.opencv.jni.opencv;
import android.graphics.Bitmap;
import android.graphics.Bitmap.Config;
public class BitmapBridge {
static void copyBitmap(Bitmap bmap, Mat mat) throws Exception {
if ((bmap.getConfig() == null) || bmap.getConfig() == Config.ARGB_8888)
throw new Exception("bad config");
Size sz = new Size(bmap.getWidth(), bmap.getHeight());
mat.create(sz, opencv.CV_8UC4);
ByteBuffer buffer = ByteBuffer.allocate(4 * bmap.getWidth()
* bmap.getHeight());
bmap.copyPixelsToBuffer(buffer);
opencv.copyBufferToMat(mat, buffer);
}
static Bitmap matToBitmap(Mat mat) {
Bitmap bmap = Bitmap.createBitmap(mat.getCols(), mat.getRows(),
Config.ARGB_8888);
ByteBuffer buffer = ByteBuffer.allocate(4 * bmap.getWidth()
* bmap.getHeight());
opencv.copyMatToBuffer(buffer, mat);
bmap.copyPixelsFromBuffer(buffer);
return bmap;
}
}
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.theveganrobot.cvcamera" android:versionCode="7" android:versionName="7.0"
>
<application android:debuggable="false" android:icon="@drawable/icon">
<activity android:name=".CVCamera" android:screenOrientation="landscape"
android:configChanges="orientation|keyboardHidden|keyboard"
android:label="@string/app_name"
>
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
<activity android:name="com.opencv.camera.CameraConfig" android:label="@string/app_name"
android:screenOrientation="landscape"
android:configChanges="orientation|keyboardHidden|keyboard">
</activity>
</application>
<uses-feature android:glEsVersion="0x00020000" android:required="true"/>
<uses-feature android:name="android.hardware.camera" android:required="true"/>
<uses-permission android:name="android.permission.CAMERA"></uses-permission>
<uses-sdk android:minSdkVersion="7" android:targetSdkVersion="7" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE"></uses-permission>
</manifest>
\ No newline at end of file
cmake_minimum_required(VERSION 2.8)
project(CVCamera)
add_subdirectory(jni)
see http://code.google.com/p/android-opencv/wiki/CVCamera
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
android.library.reference.1=../../android-opencv
# Project target.
target=android-7
#########################################################
# Find opencv and android-opencv
#########################################################
set(OpenCV_DIR ${CMAKE_SOURCE_DIR}/../../build
CACHE PATH "The path where you built opencv for android")
set(AndroidOpenCV_DIR ${CMAKE_SOURCE_DIR}/../../android-opencv/build
CACHE PATH "The path where you built android-opencv")
find_package(OpenCV REQUIRED)
FIND_PACKAGE(AndroidOpenCV REQUIRED )
#########################################################
#c flags, included, and lib dependencies
#########################################################
#notice the "recycling" of CMAKE_C_FLAGS
#this is necessary to pick up android flags
set( CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wall -pedantic -fPIC" )
INCLUDE_DIRECTORIES(${CMAKE_CURRENT_SOURCE_DIR})
set( LIBRARY_DEPS ${AndroidOpenCV_LIBS} ${OpenCV_LIBS} )
if(ANDROID)
set( LIBRARY_DEPS ${LIBRARY_DEPS} log dl)
endif(ANDROID)
#########################################################
#SWIG STUFF
#########################################################
#the java package to place swig generated java files in
set(MY_PACKAGE com.theveganrobot.cvcamera.jni)
if(NOT ANDROID)
#non android swig and jni
#jni is available by default on android
find_package(JNI REQUIRED)
include_directories(${JNI_INCLUDE_DIRS})
FIND_PACKAGE(SWIG)
endif()
INCLUDE(${SWIG_USE_FILE}) #on android this is found by the cmake toolchain
if(ANDROID)
#this will set the output path for the java package
#and properly create the package declarations in generated java sources
SET_SWIG_JAVA_PACKAGE( ${MY_PACKAGE} ) #defined in the android toolchain
endif(ANDROID)
#this add's the swig path for the opencv wrappers
SET(CMAKE_SWIG_FLAGS ${CMAKE_SWIG_FLAGS} "-I${AndroidOpenCV_SWIG_DIR}" )
SET_SOURCE_FILES_PROPERTIES(cvcamera.i PROPERTIES CPLUSPLUS ON)
#add the swig module, giving it the name, java, and then all of the source files
SWIG_ADD_MODULE(cvcamera java
cvcamera.i #swig file
Processor.cpp #cpp files can be compiled to
)
#link the module like any other
target_link_libraries(cvcamera ${LIBRARY_DEPS} )
This diff is collapsed.
/*
* Processor.h
*
* Created on: Jun 13, 2010
* Author: ethan
*/
#ifndef PROCESSOR_H_
#define PROCESSOR_H_
#include <opencv2/core/core.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <vector>
#include "image_pool.h"
#define DETECT_FAST 0
#define DETECT_STAR 1
#define DETECT_SURF 2
class Processor
{
public:
Processor();
virtual ~Processor();
void detectAndDrawFeatures(int idx, image_pool* pool, int feature_type);
bool detectAndDrawChessboard(int idx, image_pool* pool);
void resetChess();
int getNumberDetectedChessboards();
void calibrate(const char* filename);
void drawText(int idx, image_pool* pool, const char* text);
private:
cv::StarFeatureDetector stard;
cv::FastFeatureDetector fastd;
cv::SurfFeatureDetector surfd;
std::vector<cv::KeyPoint> keypoints;
std::vector<std::vector<cv::Point2f> > imagepoints;
cv::Mat K;
cv::Mat distortion;
cv::Size imgsize;
};
#endif /* PROCESSOR_H_ */
This diff is collapsed.
This diff is collapsed.
android update project --name CVCamera \
--path .
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:orientation="vertical"
android:layout_width="fill_parent"
android:layout_height="fill_parent"
>
</LinearLayout>
<?xml version="1.0" encoding="utf-8"?>
<resources>
<string name="app_name">CVCamera</string>
<string name="app_description">app to demo using android camera and passing data to opencv layer.</string>
<string name="Changes">Release 0.0.1 - first demo of using the OpenCV library with camera data</string>
</resources>
#!/bin/bash
echo uninstalling CVCamera from phone
adb uninstall com.theveganrobot.cvcamera
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
android update project --name Calibration \
--path .
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment