Commit fbac2027 authored by Ethan Rublee's avatar Ethan Rublee

adding cmake based android scripts and a reusable android library - samples to…

adding cmake based android scripts and a reusable android library - samples to follow - haven't tested yet completely
parent bd829b8f
LOCAL_PATH := ${CMAKE_CURRENT_SOURCE_DIR}
include $(CLEAR_VARS)
LOCAL_MODULE := ${android_module_name}
LOCAL_SRC_FILES := ${android_srcs}
LOCAL_CFLAGS := ${android_defs}
LOCAL_C_INCLUDES := ${include_dirs} $(LOCAL_PATH)
include $(BUILD_STATIC_LIBRARY)
include android-opencv.mk
include modules/Android.mk
include 3rdparty/Android.mk
include zlib-android/Android.mk
include $(call all-subdir-makefiles)
macro(define_android_manual name lib_srcs includes)
set(android_module_name ${name})
set(android_srcs "")
set(include_dirs "${includes}")
foreach(f ${lib_srcs})
string(REPLACE "${CMAKE_CURRENT_SOURCE_DIR}/" "" n_f ${f})
set(android_srcs "${android_srcs} ${n_f}")
endforeach()
configure_file("${CMAKE_SOURCE_DIR}/Android.mk.in" "${CMAKE_CURRENT_BINARY_DIR}/Android.mk")
endmacro()
macro(define_3rdparty_module name)
file(GLOB lib_srcs "*.c" "*.cpp")
file(GLOB lib_int_hdrs "*.h*")
define_android_manual(${name} "${lib_srcs}" "$(LOCAL_PATH)/../include")
endmacro()
macro(define_opencv_module name)
file(GLOB lib_srcs "src/*.cpp")
file(GLOB lib_int_hdrs "src/*.h*")
define_android_manual(${name} "${lib_srcs}" "$(LOCAL_PATH)/src $(OPENCV_INCLUDES)")
endmacro()
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.opencv"
android:versionCode="1"
android:versionName="1.0">
</manifest>
\ No newline at end of file
APP_BUILD_SCRIPT := $(call my-dir)/Android.mk
APP_PROJECT_PATH := $(call my-dir)
# The ARMv7 is significanly faster due to the use of the hardware FPU
APP_ABI := armeabi armeabi-v7a
APP_MODULES := png jpeg jasper zlib opencv_lapack core imgproc ml highgui features2d \
legacy objdetect calib3d video contrib flann
# ----------------------------------------------------------------------------
# Root CMake file for Android Opencv Build
#
# To build with cmake
# $ mkdir build
# $ cd build
# $ cmake ..
# $ make
# Make sure to set the path in the cache for the crystax ndk available
# here:
# http://www.crystax.net/android/ndk-r4.php
#
# - initial version August 2010 Ethan Rublee ethan.ruble@gmail.com
#
# ----------------------------------------------------------------------------
project(android-opencv)
cmake_minimum_required(VERSION 2.8)
include(AndroidCVModule.cmake REQUIRED)
set(opencv_root "${CMAKE_SOURCE_DIR}/.." CACHE STRING "opencv source root directory")
if(NOT EXISTS ${opencv_root})
message(FATAL_ERROR "Cannot find your opencv root directory!" )
endif()
set(ANDROID true)
set(WITH_JASPER true)
set(JASPER_FOUND false)
set(WITH_PNG true)
set(WITH_JPEG true)
file(GLOB module_includes "${opencv_root}/modules/[a-zA-Z]*")
list(REMOVE_ITEM module_includes ${opencv_root}/modules/CMakeLists.txt)
set(module_includes ${module_includes} ${CMAKE_SOURCE_DIR}/../3rdparty $(OPENCV_BUILD_ROOT) )
foreach(mdir ${module_includes})
string(REPLACE "${opencv_root}" "$(OPENCV_ROOT)" n_f ${mdir})
set(android_module_include_dirs "${android_module_include_dirs} ${n_f}/include")
endforeach()
configure_file("${CMAKE_SOURCE_DIR}/Android.mk.master.in" "${CMAKE_BINARY_DIR}/Android.mk")
configure_file("${CMAKE_SOURCE_DIR}/Application.mk.in" "${CMAKE_BINARY_DIR}/Application.mk")
configure_file("${CMAKE_SOURCE_DIR}/AndroidManifest.xml.in" "${CMAKE_BINARY_DIR}/AndroidManifest.xml")
configure_file("${CMAKE_SOURCE_DIR}/default.properties.in" "${CMAKE_BINARY_DIR}/default.properties")
configure_file("${CMAKE_SOURCE_DIR}/cvconfig.h.in" "${CMAKE_BINARY_DIR}/include/cvconfig.h")
add_subdirectory(${CMAKE_SOURCE_DIR}/zlib-android)
add_subdirectory(${opencv_root}/modules "${CMAKE_BINARY_DIR}/modules")
add_subdirectory(${opencv_root}/3rdparty "${CMAKE_BINARY_DIR}/3rdparty")
set(NDK_ROOT "$ENV{HOME}/android-ndk-r4-crystax" CACHE STRING "the crystax ndk directory")
if(NOT EXISTS ${NDK_ROOT})
message(FATAL_ERROR "Cannot find your ndk root directory! please download and
unzip the android ndk from crystax to the directory specified by NDK_ROOT
You may download the crystax ndk from:
http://www.crystax.net/android/ndk-r4.php" )
endif()
set(J "2" CACHE STRING "how many processes for make -j <J>")
ADD_CUSTOM_COMMAND(
OUTPUT android-opencv
DEPENDS ${CMAKE_BINARY_DIR}/Android.mk
COMMAND "${NDK_ROOT}/ndk-build"
ARGS --directory=${CMAKE_BINARY_DIR} NDK_APPLICATION_MK=Application.mk -j${J}
)
configure_file("${CMAKE_SOURCE_DIR}/android-opencv.mk.in" "${CMAKE_BINARY_DIR}/android-opencv.mk")
ADD_CUSTOM_TARGET(ndk ALL echo
DEPENDS android-opencv
)
message(STATUS "Make will use make -j${J} - for speeding up build - you may change this in the cache")
message(STATUS "The NDK directory is ${NDK_ROOT}")
message(STATUS "OpenCV source root is ${opencv_root}")
message(STATUS "just run make - and grab some coffee or tea ;)")
message(STATUS "The android opencv libs will be located in ${CMAKE_BINARY_DIR}/obj/local/armeabi*/")
Author: Ethan Rublee
email: ethan.rublee@gmail.com
To build with cmake:
mkdir build
cd build
cmake ..
make
Make sure to set the path in the cache for the crystax ndk available
here:
http://www.crystax.net/android/ndk-r4.php
to include in an android project -
just include the generated android-opencv.mk in you android ndk project
with:
include android-opencv.mk
this defines OPENCV_INCLUDES and OPENCV_LIBS - which you should add to your
makefiles like:
#define OPENCV_INCLUDES and OPENCV_LIBS
include $(PATH_TO_OPENCV_ANDROID_BUILD)/android-opencv.mk
LOCAL_LDLIBS += $(OPENCV_LIBS)
LOCAL_C_INCLUDES += $(OPENCV_INCLUDES)
# ----------------------------------------------------------------------------
# Root CMake file for Android Opencv Build
#
# To build with cmake
# $ mkdir build
# $ cd build
# $ cmake ..
# $ make
# Make sure to set the path in the cache for the crystax ndk available
# here:
# http://www.crystax.net/android/ndk-r4.php
#
# - initial version August 2010 Ethan Rublee ethan.ruble@gmail.com
#
# ----------------------------------------------------------------------------
project(android-opencv-shared)
cmake_minimum_required(VERSION 2.8)
set(opencv_root "$ENV{HOME}/opencv" CACHE PATH "opencv source root directory")
if(NOT EXISTS ${opencv_root})
message(FATAL_ERROR "Cannot find your opencv root directory!" )
endif()
set(android_opencv_mk "${opencv_root}/android/build/android-opencv.mk" cache FILE "the generated android-opencv.mk file")
if(NOT EXISTS ${android_opencv_mk})
message(FATAL_ERROR "please locate the cmake generated android-opencv.mk file, usually in the android/build directory...")
endif()
set(ANDROID true)
file(GLOB module_includes "${opencv_root}/modules/[a-zA-Z]*")
list(REMOVE_ITEM module_includes ${opencv_root}/modules/CMakeLists.txt)
set(module_includes ${module_includes} ${CMAKE_SOURCE_DIR}/../3rdparty $(OPENCV_BUILD_ROOT) )
foreach(mdir ${module_includes})
string(REPLACE "${opencv_root}" "$(OPENCV_ROOT)" n_f ${mdir})
set(android_module_include_dirs "${android_module_include_dirs} ${n_f}/include")
endforeach()
configure_file("${CMAKE_SOURCE_DIR}/Android.mk.master.in" "${CMAKE_BINARY_DIR}/Android.mk")
configure_file("${CMAKE_SOURCE_DIR}/Application.mk.in" "${CMAKE_BINARY_DIR}/Application.mk")
configure_file("${CMAKE_SOURCE_DIR}/AndroidManifest.xml.in" "${CMAKE_BINARY_DIR}/AndroidManifest.xml")
configure_file("${CMAKE_SOURCE_DIR}/default.properties.in" "${CMAKE_BINARY_DIR}/default.properties")
set(NDK_ROOT "$ENV{HOME}/android-ndk-r4-crystax" CACHE STRING "the crystax ndk directory")
if(NOT EXISTS ${NDK_ROOT})
message(FATAL_ERROR "Cannot find your ndk root directory! please download and
unzip the android ndk from crystax to the directory specified by NDK_ROOT
You may download the crystax ndk from:
http://www.crystax.net/android/ndk-r4.php" )
endif()
set(J "2" CACHE STRING "how many processes for make -j <J>")
ADD_CUSTOM_COMMAND(
OUTPUT android-ndk
DEPENDS ${CMAKE_BINARY_DIR}/Android.mk
COMMAND "${NDK_ROOT}/ndk-build"
ARGS --directory=${CMAKE_BINARY_DIR} NDK_APPLICATION_MK=Application.mk -j${J}
)
ADD_CUSTOM_COMMAND(
OUTPUT android-swig
DEPENDS ${SWIG_MAIN}
COMMAND "swig"
ARGS --directory=${CMAKE_BINARY_DIR} NDK_APPLICATION_MK=Application.mk -j${J}
)
configure_file("${CMAKE_SOURCE_DIR}/android-opencv.mk.in" "${CMAKE_BINARY_DIR}/android-opencv.mk")
ADD_CUSTOM_TARGET(ndk ALL echo
DEPENDS android-ndk
)
message(STATUS "Make will use make -j${J} - for speeding up build - you may change this in the cache")
message(STATUS "The NDK directory is ${NDK_ROOT}")
message(STATUS "OpenCV source root is ${opencv_root}")
message(STATUS "just run make - and grab some coffee or tea ;)")
message(STATUS "The android opencv libs will be located in ${CMAKE_BINARY_DIR}/obj/local/armeabi*/")
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.opencv" android:versionCode="1"
android:versionName="0.1">
<application android:debuggable="true">
<!-- The activity tag here is currently not used. The main project TicTacToeMain
must currently redefine the activities to be used from the libraries.
However later the tools will pick up the activities from here and merge them
automatically, so it's best to define your activities here like for any
regular Android project.
-->
<activity android:name="com.opencv.OpenCV" />
</application>
<!-- set the opengl version
<uses-feature android:glEsVersion="0x00020000" />-->
<uses-sdk android:minSdkVersion="7" />
<uses-permission android:name="android.permission.CAMERA"></uses-permission>
</manifest>
# The path to the NDK, requires crystax version r-4 for now, due to support
#for the standard library
#load environment from local make file
LOCAL_ENV_MK=local.env.mk
ifneq "$(wildcard $(LOCAL_ENV_MK))" ""
include $(LOCAL_ENV_MK)
else
$(shell cp sample.$(LOCAL_ENV_MK) $(LOCAL_ENV_MK))
$(info ERROR local environement not setup! try:)
$(info gedit $(LOCAL_ENV_MK))
$(info Please setup the $(LOCAL_ENV_MK) - the default was just created')
include $(LOCAL_ENV_MK)
endif
ANDROID_NDK_BASE = $(ANDROID_NDK_ROOT)
$(info OPENCV_CONFIG = $(OPENCV_CONFIG))
# The name of the native library
LIBNAME = libandroid-opencv.so
# Find all the C++ sources in the native folder
SOURCES = $(wildcard jni/*.cpp)
HEADERS = $(wildcard jni/*.h)
SWIG_IS = $(wildcard jni/*.i)
ANDROID_MKS = $(wildcard jni/*.mk)
SWIG_MAIN = jni/android-cv.i
SWIG_JAVA_DIR = src/com/opencv/jni
SWIG_JAVA_OUT = $(wildcard $(SWIG_JAVA_DIR)/*.java)
SWIG_C_DIR = jni/gen
SWIG_C_OUT = $(SWIG_C_DIR)/android_cv_wrap.cpp
# The real native library stripped of symbols
LIB = libs/armeabi-v7a/$(LIBNAME) libs/armeabi/$(LIBNAME)
all: $(LIB)
#calls the ndk-build script, passing it OPENCV_ROOT and OPENCV_LIBS_DIR
$(LIB): $(SWIG_C_OUT) $(SOURCES) $(HEADERS) $(ANDROID_MKS)
$(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \
PROJECT_PATH=$(PROJECT_PATH) V=$(V) $(NDK_FLAGS)
#this creates the swig wrappers
$(SWIG_C_OUT): $(SWIG_IS)
make clean-swig &&\
mkdir -p $(SWIG_C_DIR) &&\
mkdir -p $(SWIG_JAVA_DIR) &&\
swig -java -c++ -package "com.opencv.jni" \
-outdir $(SWIG_JAVA_DIR) \
-o $(SWIG_C_OUT) $(SWIG_MAIN)
#clean targets
.PHONY: clean clean-swig cleanall nogdb
nogdb: $(LIB)
rm -f libs/armeabi*/gdb*
#this deletes the generated swig java and the generated c wrapper
clean-swig:
rm -f $(SWIG_JAVA_OUT) $(SWIG_C_OUT)
#does clean-swig and then uses the ndk-build clean
clean: clean-swig
$(ANDROID_NDK_BASE)/ndk-build OPENCV_CONFIG=$(OPENCV_CONFIG) \
PROJECT_PATH=$(PROJECT_PATH) clean V=$(V) $(NDK_FLAGS)
android-opencv
this is an example of an android library project that has some reusable
code that exposes part of OpenCV to android. In particular this provides a
native camera interface for loading live video frames from the android camera
into native opencv functions(as cv::Mat's)
to build make sure you have swig and the crystax ndk in your path
cp sample.local.env.mk local.env.mk
make
that should work...
more later on how to build actual project for android
- see the code.google.com/p/android-opencv for details on this
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
android.library=true
# Project target.
target=android-7
# date: Summer, 2010
# author: Ethan Rublee
# contact: ethan.rublee@gmail.com
#
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
#define OPENCV_INCLUDES and OPENCV_LIBS
include $(OPENCV_CONFIG)
LOCAL_LDLIBS += $(OPENCV_LIBS) -llog -lGLESv2
LOCAL_C_INCLUDES += $(OPENCV_INCLUDES)
LOCAL_MODULE := android-opencv
LOCAL_SRC_FILES := gen/android_cv_wrap.cpp image_pool.cpp \
yuv420sp2rgb.c gl_code.cpp Calibration.cpp
include $(BUILD_SHARED_LIBRARY)
# The ARMv7 is significanly faster due to the use of the hardware FPU
APP_ABI := armeabi armeabi-v7a
APP_BUILD_SCRIPT := $(call my-dir)/Android.mk
APP_PROJECT_PATH := $(PROJECT_PATH)
\ No newline at end of file
/*
* Processor.cpp
*
* Created on: Jun 13, 2010
* Author: ethan
*/
#include "Calibration.h"
#include <sys/stat.h>
using namespace cv;
Calibration::Calibration():patternsize(6,8)
{
}
Calibration::~Calibration() {
}
namespace
{
double computeReprojectionErrors(
const vector<vector<Point3f> >& objectPoints, const vector<vector<
Point2f> >& imagePoints, const vector<Mat>& rvecs,
const vector<Mat>& tvecs, const Mat& cameraMatrix,
const Mat& distCoeffs, vector<float>& perViewErrors) {
vector<Point2f> imagePoints2;
int i, totalPoints = 0;
double totalErr = 0, err;
perViewErrors.resize(objectPoints.size());
for (i = 0; i < (int) objectPoints.size(); i++) {
projectPoints(Mat(objectPoints[i]), rvecs[i], tvecs[i], cameraMatrix,
distCoeffs, imagePoints2);
err = norm(Mat(imagePoints[i]), Mat(imagePoints2), CV_L1 );
int n = (int) objectPoints[i].size();
perViewErrors[i] = err / n;
totalErr += err;
totalPoints += n;
}
return totalErr / totalPoints;
}
void calcChessboardCorners(Size boardSize, float squareSize, vector<
Point3f>& corners) {
corners.resize(0);
for (int i = 0; i < boardSize.height; i++)
for (int j = 0; j < boardSize.width; j++)
corners.push_back(Point3f(float(j * squareSize), float(i
* squareSize), 0));
}
/**from opencv/samples/cpp/calibration.cpp
*
*/
bool runCalibration(vector<vector<Point2f> > imagePoints,
Size imageSize, Size boardSize, float squareSize, float aspectRatio,
int flags, Mat& cameraMatrix, Mat& distCoeffs, vector<Mat>& rvecs,
vector<Mat>& tvecs, vector<float>& reprojErrs, double& totalAvgErr) {
cameraMatrix = Mat::eye(3, 3, CV_64F);
if (flags & CV_CALIB_FIX_ASPECT_RATIO)
cameraMatrix.at<double> (0, 0) = aspectRatio;
distCoeffs = Mat::zeros(5, 1, CV_64F);
vector<vector<Point3f> > objectPoints(1);
calcChessboardCorners(boardSize, squareSize, objectPoints[0]);
for (size_t i = 1; i < imagePoints.size(); i++)
objectPoints.push_back(objectPoints[0]);
calibrateCamera(objectPoints, imagePoints, imageSize, cameraMatrix,
distCoeffs, rvecs, tvecs, flags);
bool ok = checkRange(cameraMatrix, CV_CHECK_QUIET ) && checkRange(
distCoeffs, CV_CHECK_QUIET );
totalAvgErr = computeReprojectionErrors(objectPoints, imagePoints, rvecs,
tvecs, cameraMatrix, distCoeffs, reprojErrs);
return ok;
}
void saveCameraParams(const string& filename, Size imageSize, Size boardSize,
float squareSize, float aspectRatio, int flags,
const Mat& cameraMatrix, const Mat& distCoeffs,
const vector<Mat>& rvecs, const vector<Mat>& tvecs,
const vector<float>& reprojErrs,
const vector<vector<Point2f> >& imagePoints, double totalAvgErr) {
FileStorage fs(filename, FileStorage::WRITE);
time_t t;
time(&t);
struct tm *t2 = localtime(&t);
char buf[1024];
strftime(buf, sizeof(buf) - 1, "%c", t2);
fs << "calibration_time" << buf;
if (!rvecs.empty() || !reprojErrs.empty())
fs << "nframes" << (int) std::max(rvecs.size(), reprojErrs.size());
fs << "image_width" << imageSize.width;
fs << "image_height" << imageSize.height;
fs << "board_width" << boardSize.width;
fs << "board_height" << boardSize.height;
fs << "squareSize" << squareSize;
if (flags & CV_CALIB_FIX_ASPECT_RATIO)
fs << "aspectRatio" << aspectRatio;
if (flags != 0) {
sprintf(buf, "flags: %s%s%s%s",
flags & CV_CALIB_USE_INTRINSIC_GUESS ? "+use_intrinsic_guess"
: "",
flags & CV_CALIB_FIX_ASPECT_RATIO ? "+fix_aspectRatio" : "",
flags & CV_CALIB_FIX_PRINCIPAL_POINT ? "+fix_principal_point"
: "",
flags & CV_CALIB_ZERO_TANGENT_DIST ? "+zero_tangent_dist" : "");
cvWriteComment(*fs, buf, 0);
}
fs << "flags" << flags;
fs << "camera_matrix" << cameraMatrix;
fs << "distortion_coefficients" << distCoeffs;
fs << "avg_reprojection_error" << totalAvgErr;
if (!reprojErrs.empty())
fs << "per_view_reprojection_errors" << Mat(reprojErrs);
if (!rvecs.empty() && !tvecs.empty()) {
Mat bigmat(rvecs.size(), 6, CV_32F);
for (size_t i = 0; i < rvecs.size(); i++) {
Mat r = bigmat(Range(i, i + 1), Range(0, 3));
Mat t = bigmat(Range(i, i + 1), Range(3, 6));
rvecs[i].copyTo(r);
tvecs[i].copyTo(t);
}
cvWriteComment(
*fs,
"a set of 6-tuples (rotation vector + translation vector) for each view",
0);
fs << "extrinsic_parameters" << bigmat;
}
if (!imagePoints.empty()) {
Mat imagePtMat(imagePoints.size(), imagePoints[0].size(), CV_32FC2);
for (size_t i = 0; i < imagePoints.size(); i++) {
Mat r = imagePtMat.row(i).reshape(2, imagePtMat.cols);
Mat(imagePoints[i]).copyTo(r);
}
fs << "image_points" << imagePtMat;
}
}
}//anon namespace
bool Calibration::detectAndDrawChessboard(int idx,image_pool* pool) {
Mat grey;
pool->getGrey(idx, grey);
if (grey.empty())
return false;
vector<Point2f> corners;
IplImage iplgrey = grey;
if (!cvCheckChessboard(&iplgrey, patternsize))
return false;
bool patternfound = findChessboardCorners(grey, patternsize, corners);
Mat * img = pool->getImage(idx);
if (corners.size() < 1)
return false;
cornerSubPix(grey, corners, Size(11, 11), Size(-1, -1), TermCriteria(
CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 30, 0.1));
if(patternfound)
imagepoints.push_back(corners);
drawChessboardCorners(*img, patternsize, Mat(corners), patternfound);
imgsize = grey.size();
return patternfound;
}
void Calibration::drawText(int i, image_pool* pool, const char* ctext){
// Use "y" to show that the baseLine is about
string text = ctext;
int fontFace = FONT_HERSHEY_COMPLEX_SMALL;
double fontScale = .8;
int thickness = .5;
Mat img = *pool->getImage(i);
int baseline=0;
Size textSize = getTextSize(text, fontFace,
fontScale, thickness, &baseline);
baseline += thickness;
// center the text
Point textOrg((img.cols - textSize.width)/2,
(img.rows - textSize.height *2));
// draw the box
rectangle(img, textOrg + Point(0, baseline),
textOrg + Point(textSize.width, -textSize.height),
Scalar(0,0,255),CV_FILLED);
// ... and the baseline first
line(img, textOrg + Point(0, thickness),
textOrg + Point(textSize.width, thickness),
Scalar(0, 0, 255));
// then put the text itself
putText(img, text, textOrg, fontFace, fontScale,
Scalar::all(255), thickness, 8);
}
void Calibration::resetChess() {
imagepoints.clear();
}
void Calibration::calibrate(const char* filename) {
vector<Mat> rvecs, tvecs;
vector<float> reprojErrs;
double totalAvgErr = 0;
int flags = 0;
flags |= CV_CALIB_FIX_PRINCIPAL_POINT | CV_CALIB_FIX_ASPECT_RATIO;
bool writeExtrinsics = true;
bool writePoints = true;
bool ok = runCalibration(imagepoints, imgsize, patternsize, 1.f, 1.f,
flags, K, distortion, rvecs, tvecs, reprojErrs, totalAvgErr);
if (ok){
saveCameraParams(filename, imgsize, patternsize, 1.f,
1.f, flags, K, distortion, writeExtrinsics ? rvecs
: vector<Mat> (), writeExtrinsics ? tvecs
: vector<Mat> (), writeExtrinsics ? reprojErrs
: vector<float> (), writePoints ? imagepoints : vector<
vector<Point2f> > (), totalAvgErr);
}
}
int Calibration::getNumberDetectedChessboards() {
return imagepoints.size();
}
/*
* Processor.h
*
* Created on: Jun 13, 2010
* Author: ethan
*/
#ifndef PROCESSOR_H_
#define PROCESSOR_H_
#include <opencv2/core/core.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/calib3d/calib3d.hpp>
#include <vector>
#include "image_pool.h"
#define DETECT_FAST 0
#define DETECT_STAR 1
#define DETECT_SURF 2
class Calibration {
std::vector<cv::KeyPoint> keypoints;
vector<vector<Point2f> > imagepoints;
cv::Mat K;
cv::Mat distortion;
cv::Size imgsize;
public:
cv::Size patternsize;
Calibration();
virtual ~Calibration();
bool detectAndDrawChessboard(int idx, image_pool* pool);
void resetChess();
int getNumberDetectedChessboards();
void calibrate(const char* filename);
void drawText(int idx, image_pool* pool, const char* text);
};
#endif /* PROCESSOR_H_ */
/*
* include the headers required by the generated cpp code
*/
%{
#include "Calibration.h"
#include "image_pool.h"
using namespace cv;
%}
class Calibration {
public:
Size patternsize;
Calibration();
virtual ~Calibration();
bool detectAndDrawChessboard(int idx, image_pool* pool);
void resetChess();
int getNumberDetectedChessboards();
void calibrate(const char* filename);
void drawText(int idx, image_pool* pool, const char* text);
};
%feature("director") Mat;
%feature("director") glcamera;
%feature("director") image_pool;
%typemap("javapackage") Mat, Mat *, Mat & "com.opencv.jni";
%typemap("javapackage") glcamera, glcamera *, glcamera & "com.opencv.jni";
%typemap("javapackage") image_pool, image_pool *, image_pool & "com.opencv.jni";
\ No newline at end of file
/* File : android-cv.i
import this file, and make sure to add the System.loadlibrary("android-opencv")
before loading any lib that depends on this.
*/
%module opencv
%{
#include "image_pool.h"
#include "glcamera.h"
using namespace cv;
%}
#ifndef SWIGIMPORTED
%include "various.i"
%include "typemaps.i"
%include "arrays_java.i"
#endif
/**
* Make all the swig pointers public, so that
* external libraries can refer to these, otherwise they default to
* protected...
*/
%typemap(javabody) SWIGTYPE %{
private long swigCPtr;
protected boolean swigCMemOwn;
public $javaclassname(long cPtr, boolean cMemoryOwn) {
swigCMemOwn = cMemoryOwn;
swigCPtr = cPtr;
}
public static long getCPtr($javaclassname obj) {
return (obj == null) ? 0 : obj.swigCPtr;
}
%}
%pragma(java) jniclasscode=%{
static {
try {
//load the library, make sure that libandroid-opencv.so is in your <project>/libs/armeabi directory
//so that android sdk automatically installs it along with the app.
System.loadLibrary("android-opencv");
} catch (UnsatisfiedLinkError e) {
//badness
throw e;
}
}
%}
%include "cv.i"
%include "glcamera.i"
%include "image_pool.i"
%include "Calibration.i"
/*
* These typemaps provide support for sharing data between JNI and JVM code
* using NIO direct buffers. It is the responsibility of the JVM code to
* allocate a direct buffer of the appropriate size.
*
* Example use:
* Wrapping:
* %include "buffers.i"
* %apply int* BUFF {int* buffer}
* int read_foo_int(int* buffer);
*
* Java:
* IntBuffer buffer = IntBuffer.allocateDirect(nInts*4).order(ByteOrder.nativeOrder()).asIntBuffer();
* Example.read_foo_int(buffer);
*
* The following typemaps are defined:
* void* BUFF <--> javax.nio.Buffer
* char* BUFF <--> javax.nio.ByteBuffer
* char* CBUFF <--> javax.nio.CharBuffer
* unsigned char* INBUFF/OUTBUFF <--> javax.nio.ShortBuffer
* short* BUFF <--> javax.nio.ShortBuffer
* unsigned short* INBUFF/OUTBUFF <--> javax.nio.IntBuffer
* int* BUFF <--> javax.nio.IntBuffer
* unsigned int* INBUFF/OUTBUFF <--> javax.nio.LongBuffer
* long* BUFF <--> javax.nio.IntBuffer
* unsigned long* INBUFF/OUTBUF <--> javax.nio.LongBuffer
* long long* BUFF <--> javax.nio.LongBuffer
* float* BUFF <--> javax.nio.FloatBuffer
* double* BUFF <--> javax.nio.DoubleBuffer
*
* Note the potential for data loss in the conversion from
* the C type 'unsigned long' to the signed Java long type.
* Hopefully, I can implement a workaround with BigNumber in the future.
*
* The use of ByteBuffer vs CharBuffer for the char* type should
* depend on the type of data. In general you'll probably
* want to use CharBuffer for actual text data.
*/
/*
* This macro is used to define the nio buffers for primitive types.
*/
%define NIO_BUFFER_TYPEMAP(CTYPE, LABEL, BUFFERTYPE)
%typemap(jni) CTYPE* LABEL "jobject"
%typemap(jtype) CTYPE* LABEL "BUFFERTYPE"
%typemap(jstype) CTYPE* LABEL "BUFFERTYPE"
%typemap(javain,
pre=" assert $javainput.isDirect() : \"Buffer must be allocated direct.\";") CTYPE* LABEL "$javainput"
%typemap(javaout) CTYPE* LABEL {
return $jnicall;
}
%typemap(in) CTYPE* LABEL {
$1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* LABEL {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* LABEL ""
%enddef
NIO_BUFFER_TYPEMAP(void, BUFF, java.nio.Buffer);
NIO_BUFFER_TYPEMAP(char, BUFF, java.nio.ByteBuffer);
NIO_BUFFER_TYPEMAP(char, CBUFF, java.nio.CharBuffer);
/*NIO_BUFFER_TYPEMAP(unsigned char, BUFF, java.nio.ShortBuffer);*/
NIO_BUFFER_TYPEMAP(short, BUFF, java.nio.ShortBuffer);
NIO_BUFFER_TYPEMAP(unsigned short, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(int, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(unsigned int, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(long, BUFF, java.nio.IntBuffer);
NIO_BUFFER_TYPEMAP(unsigned long, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(long long, BUFF, java.nio.LongBuffer);
NIO_BUFFER_TYPEMAP(float, BUFF, java.nio.FloatBuffer);
NIO_BUFFER_TYPEMAP(double, BUFF, java.nio.DoubleBuffer);
#undef NIO_BUFFER_TYPEMAP
%define UNSIGNED_NIO_BUFFER_TYPEMAP(CTYPE, BSIZE, BUFFERTYPE, PACKFCN, UNPACKFCN)
%typemap(jni) CTYPE* INBUFF "jobject"
%typemap(jtype) CTYPE* INBUFF "java.nio.ByteBuffer"
%typemap(jstype) CTYPE* INBUFF "BUFFERTYPE"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = PACKFCN($javainput, true);") CTYPE* INBUFF "tmp$javainput"
%typemap(javaout) CTYPE* INBUFF {
return $jnicall;
}
%typemap(in) CTYPE* INBUFF {
$1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* INBUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* INBUFF ""
%typemap(jni) CTYPE* OUTBUFF "jobject"
%typemap(jtype) CTYPE* OUTBUFF "java.nio.ByteBuffer"
%typemap(jstype) CTYPE* OUTBUFF "BUFFERTYPE"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = java.nio.ByteBuffer.allocateDirect($javainput.capacity()*BSIZE).order($javainput.order());",
post=" UNPACKFCN(tmp$javainput, $javainput);") CTYPE* OUTBUFF "tmp$javainput"
%typemap(javaout) CTYPE* OUTBUFF {
return $jnicall;
}
%typemap(in) CTYPE* OUTBUFF {
$1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) CTYPE* OUTBUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) CTYPE* OUTBUFF ""
%enddef
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned char, 1, java.nio.ShortBuffer, permafrost.hdf.libhdf.BufferUtils.packUChar, permafrost.hdf.libhdf.BufferUtils.unpackUChar);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned short, 2, java.nio.IntBuffer, permafrost.hdf.libhdf.BufferUtils.packUShort, permafrost.hdf.libhdf.BufferUtils.unpackUShort);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned int, 4, java.nio.LongBuffer, permafrost.hdf.libhdf.BufferUtils.packUInt, permafrost.hdf.libhdf.BufferUtils.unpackUInt);
UNSIGNED_NIO_BUFFER_TYPEMAP(unsigned long, 4, java.nio.LongBuffer, permafrost.hdf.libhdf.BufferUtils.packUInt, permafrost.hdf.libhdf.BufferUtils.unpackUInt);
/*
%typemap(jni) unsigned char* BUFF "jobject"
%typemap(jtype) unsigned char* BUFF "java.nio.ByteBuffer"
%typemap(jstype) unsigned char* BUFF "java.nio.ShortBuffer"
%typemap(javain,
pre=" java.nio.ByteBuffer tmp$javainput = permafrost.hdf.libhdf.BufferUtils.packUChar($javainput, true);",
post=" permafrost.hdf.libhdf.BufferUtils.unpackUChar(tmp$javainput, $javainput);") unsigned char* BUFF "tmp$javainput"
%typemap(javaout) unsigned char* BUFF {
return $jnicall;
}
%typemap(in) unsigned char* BUFF {
$1 = (*jenv)->GetDirectBufferAddress(jenv, $input);
if ($1 == NULL) {
SWIG_JavaThrowException(jenv, SWIG_JavaRuntimeException, "Unable to get address of direct buffer. Buffer must be allocated direct.");
}
}
%typemap(memberin) unsigned char* BUFF {
if ($input) {
$1 = $input;
} else {
$1 = 0;
}
}
%typemap(freearg) unsigned char* BUFF ""
*/
#undef UNSIGNED_NIO_BUFFER_TYPEMAP
\ No newline at end of file
%typemap(javaimports) Mat "
/** Wrapper for the OpenCV Mat object. Good for passing around as a pointer to a Mat.
*/"
%typemap(javaimports) Size "
/** Wrapper for the OpenCV Size object. Good for setting dimensions of cv::Mat...
*/"
class Mat {
public:
%immutable;
int rows;
int cols;
};
class Size{
public:
Size(int width,int height);
int width;
int height;
};
template<class _Tp> class Ptr
{
public:
//! empty constructor
Ptr();
//! take ownership of the pointer. The associated reference counter is allocated and set to 1
Ptr(_Tp* _obj);
//! calls release()
~Ptr();
//! copy constructor. Copies the members and calls addref()
Ptr(const Ptr& ptr);
//! copy operator. Calls ptr.addref() and release() before copying the members
// Ptr& operator = (const Ptr& ptr);
//! increments the reference counter
void addref();
//! decrements the reference counter. If it reaches 0, delete_obj() is called
void release();
//! deletes the object. Override if needed
void delete_obj();
//! returns true iff obj==NULL
bool empty() const;
//! helper operators making "Ptr<T> ptr" use very similar to "T* ptr".
_Tp* operator -> ();
// const _Tp* operator -> () const;
// operator _Tp* ();
// operator const _Tp*() const;
protected:
_Tp* obj; //< the object pointer.
int* refcount; //< the associated reference counter
};
%template(PtrMat) Ptr<Mat>;
\ No newline at end of file
/*
* Copyright (C) 2009 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// OpenGL ES 2.0 code
#include <jni.h>
#include <android/log.h>
#include <opencv2/core/core.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include <stdio.h>
#include <stdlib.h>
#include <math.h>
#include <stdint.h>
#include "glcamera.h"
#include "image_pool.h"
using namespace cv;
#define LOG_TAG "libandroid-opencv"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
static void printGLString(const char *name, GLenum s) {
const char *v = (const char *) glGetString(s);
LOGI("GL %s = %s\n", name, v);
}
static void checkGlError(const char* op) {
for (GLint error = glGetError(); error; error = glGetError()) {
LOGI("after %s() glError (0x%x)\n", op, error);
}
}
static const char gVertexShader[] = "attribute vec4 a_position; \n"
"attribute vec2 a_texCoord; \n"
"varying vec2 v_texCoord; \n"
"void main() \n"
"{ \n"
" gl_Position = a_position; \n"
" v_texCoord = a_texCoord; \n"
"} \n";
static const char gFragmentShader[] =
"precision mediump float; \n"
"varying vec2 v_texCoord; \n"
"uniform sampler2D s_texture; \n"
"void main() \n"
"{ \n"
" gl_FragColor = texture2D( s_texture, v_texCoord );\n"
"} \n";
const GLfloat gTriangleVertices[] = { 0.0f, 0.5f, -0.5f, -0.5f, 0.5f, -0.5f };
GLubyte testpixels[4 * 3] = { 255, 0, 0, // Red
0, 255, 0, // Green
0, 0, 255, // Blue
255, 255, 0 // Yellow
};
GLuint glcamera::createSimpleTexture2D(GLuint _textureid, GLubyte* pixels,
int width, int height, int channels) {
// Bind the texture
glActiveTexture(GL_TEXTURE0);
checkGlError("glActiveTexture");
// Bind the texture object
glBindTexture(GL_TEXTURE_2D, _textureid);
checkGlError("glBindTexture");
GLenum format;
switch (channels) {
case 3:
format = GL_RGB;
break;
case 1:
format = GL_LUMINANCE;
break;
case 4:
format = GL_RGBA;
break;
}
// Load the texture
glTexImage2D(GL_TEXTURE_2D, 0, format, width, height, 0, format,
GL_UNSIGNED_BYTE, pixels);
checkGlError("glTexImage2D");
// Set the filtering mode
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST );
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST );
return _textureid;
}
GLuint glcamera::loadShader(GLenum shaderType, const char* pSource) {
GLuint shader = glCreateShader(shaderType);
if (shader) {
glShaderSource(shader, 1, &pSource, NULL);
glCompileShader(shader);
GLint compiled = 0;
glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
if (!compiled) {
GLint infoLen = 0;
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &infoLen);
if (infoLen) {
char* buf = (char*) malloc(infoLen);
if (buf) {
glGetShaderInfoLog(shader, infoLen, NULL, buf);
LOGE("Could not compile shader %d:\n%s\n",
shaderType, buf);
free(buf);
}
glDeleteShader(shader);
shader = 0;
}
}
}
return shader;
}
GLuint glcamera::createProgram(const char* pVertexSource,
const char* pFragmentSource) {
GLuint vertexShader = loadShader(GL_VERTEX_SHADER, pVertexSource);
if (!vertexShader) {
return 0;
}
GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pFragmentSource);
if (!pixelShader) {
return 0;
}
GLuint program = glCreateProgram();
if (program) {
glAttachShader(program, vertexShader);
checkGlError("glAttachShader");
glAttachShader(program, pixelShader);
checkGlError("glAttachShader");
glLinkProgram(program);
GLint linkStatus = GL_FALSE;
glGetProgramiv(program, GL_LINK_STATUS, &linkStatus);
if (linkStatus != GL_TRUE) {
GLint bufLength = 0;
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &bufLength);
if (bufLength) {
char* buf = (char*) malloc(bufLength);
if (buf) {
glGetProgramInfoLog(program, bufLength, NULL, buf);
LOGE("Could not link program:\n%s\n", buf);
free(buf);
}
}
glDeleteProgram(program);
program = 0;
}
}
return program;
}
//GLuint textureID;
bool glcamera::setupGraphics(int w, int h) {
printGLString("Version", GL_VERSION);
printGLString("Vendor", GL_VENDOR);
printGLString("Renderer", GL_RENDERER);
printGLString("Extensions", GL_EXTENSIONS);
LOGI("setupGraphics(%d, %d)", w, h);
gProgram = createProgram(gVertexShader, gFragmentShader);
if (!gProgram) {
LOGE("Could not create program.");
return false;
}
gvPositionHandle = glGetAttribLocation(gProgram, "a_position");
gvTexCoordHandle = glGetAttribLocation(gProgram, "a_texCoord");
gvSamplerHandle = glGetAttribLocation(gProgram, "s_texture");
// Use tightly packed data
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
// Generate a texture object
glGenTextures(1, &textureID);
textureID = createSimpleTexture2D(textureID, testpixels, 2, 2, 3);
checkGlError("glGetAttribLocation");
LOGI("glGetAttribLocation(\"vPosition\") = %d\n",
gvPositionHandle);
glViewport(0, 0, w, h);
checkGlError("glViewport");
return true;
}
void glcamera::renderFrame() {
GLfloat vVertices[] = { -1.0f, 1.0f, 0.0f, // Position 0
0.0f, 0.0f, // TexCoord 0
-1.0f, -1.0f, 0.0f, // Position 1
0.0f, 1.0f, // TexCoord 1
1.0f, -1.0f, 0.0f, // Position 2
1.0f, 1.0f, // TexCoord 2
1.0f, 1.0f, 0.0f, // Position 3
1.0f, 0.0f // TexCoord 3
};
GLushort indices[] = { 0, 1, 2, 0, 2, 3 };
GLsizei stride = 5 * sizeof(GLfloat); // 3 for position, 2 for texture
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
checkGlError("glClearColor");
glClear(GL_DEPTH_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
checkGlError("glClear");
glUseProgram(gProgram);
checkGlError("glUseProgram");
// Load the vertex position
glVertexAttribPointer(gvPositionHandle, 3, GL_FLOAT, GL_FALSE, stride,
vVertices);
// Load the texture coordinate
glVertexAttribPointer(gvTexCoordHandle, 2, GL_FLOAT, GL_FALSE, stride,
&vVertices[3]);
glEnableVertexAttribArray(gvPositionHandle);
glEnableVertexAttribArray(gvTexCoordHandle);
// Bind the texture
glActiveTexture(GL_TEXTURE0);
glBindTexture(GL_TEXTURE_2D, textureID);
// Set the sampler texture unit to 0
glUniform1i(gvSamplerHandle, 0);
glDrawElements(GL_TRIANGLES, 6, GL_UNSIGNED_SHORT, indices);
//checkGlError("glVertexAttribPointer");
//glEnableVertexAttribArray(gvPositionHandle);
//checkGlError("glEnableVertexAttribArray");
//glDrawArrays(GL_TRIANGLES, 0, 3);
//checkGlError("glDrawArrays");
}
void glcamera::init(int width, int height) {
newimage = false;
nimg = Mat();
setupGraphics(width, height);
}
void glcamera::step() {
if (newimage && !nimg.empty()) {
textureID = createSimpleTexture2D(textureID,
nimg.ptr<unsigned char> (0), nimg.rows, nimg.cols,
nimg.channels());
newimage = false;
}
renderFrame();
}
void glcamera::setTextureImage(Ptr<Mat> img) {
//int p2 = (int)(std::log(img->size().width)/0.69315);
int sz = 256;//std::pow(2,p2);
Size size(sz, sz);
resize(*img, nimg, size,cv::INTER_NEAREST);
newimage = true;
}
void glcamera::drawMatToGL(int idx, image_pool* pool) {
Ptr<Mat> img = pool->getImage(idx);
if (img.empty())
return; //no image at input_idx!
setTextureImage(img);
}
glcamera::glcamera():newimage(false) {
LOGI("glcamera constructor");
}
glcamera::~glcamera() {
LOGI("glcamera destructor");
}
#ifndef GLCAMERA_H_
#define GLCAMERA_H_
#include <opencv2/core/core.hpp>
#include <GLES2/gl2.h>
#include <GLES2/gl2ext.h>
#include "image_pool.h"
class glcamera {
Mat nimg;
bool newimage;
GLuint textureID;
GLuint gProgram;
GLuint gvPositionHandle;
GLuint gvTexCoordHandle;
GLuint gvSamplerHandle;
public:
glcamera();
~glcamera();
void init(int width, int height);
void step();
void drawMatToGL(int idx, image_pool* pool);
void setTextureImage(Ptr<Mat> img);
private:
GLuint createSimpleTexture2D(GLuint _textureid, GLubyte* pixels, int width,
int height, int channels);
GLuint loadShader(GLenum shaderType, const char* pSource);
GLuint
createProgram(const char* pVertexSource,
const char* pFragmentSource);
bool setupGraphics(int w, int h);
void renderFrame();
};
#endif
%typemap(javaimports) glcamera "
/** a class for doing the native rendering of images
this class renders using GL2 es, the native ndk version
This class is used by the GL2CameraViewer to do the rendering,
and is inspired by the gl2 example in the ndk samples
*/"
%javamethodmodifiers glcamera::init"
/** should be called onSurfaceChanged by the GLSurfaceView that is using this
* as the drawing engine
* @param width the width of the surface view that this will be drawing to
* @param width the height of the surface view that this will be drawing to
*
*/
public";
%javamethodmodifiers glcamera::step"
/** should be called by GLSurfaceView.Renderer in the onDrawFrame method, as it
handles the rendering of the opengl scene, and requires that the opengl context be
valid.
*
*/
public";
%javamethodmodifiers glcamera::drawMatToGL"
/** copies an image from a pool and queues it for drawing in opengl.
* this does transformation into power of two texture sizes
* @param idx the image index to copy
* @param pool the image_pool to look up the image from
*
*/
public";
class glcamera {
public:
void init(int width, int height);
void step();
void drawMatToGL(int idx, image_pool* pool);
};
#include "image_pool.h"
#include "yuv420sp2rgb.h"
#include <android/log.h>
#include <opencv2/imgproc/imgproc.hpp>
JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool(JNIEnv * env,
jclass thiz, jlong ppool, jobject _jpool, jbyteArray jbuffer,
jint jidx, jint jwidth, jint jheight, jboolean jgrey) {
image_pool *pool = (image_pool *) ppool;
Ptr<Mat> mat = pool->getYUV(jidx);
if (mat.empty() || mat->cols != jwidth || mat->rows != jheight * 2) {
//pool->deleteGrey(jidx);
mat = new Mat(jheight * 2, jwidth, CV_8UC1);
}
jsize sz = env->GetArrayLength(jbuffer);
uchar* buff = mat->ptr<uchar> (0);
env->GetByteArrayRegion(jbuffer, 0, sz, (jbyte*) buff);
pool->addYUVMat(jidx, mat);
Ptr<Mat> color = pool->getImage(jidx);
if (color.empty() || color->cols != jwidth || color->rows != jheight) {
//pool->deleteImage(jidx);
color = new Mat(jheight, jwidth, CV_8UC3);
}
if (!jgrey) {
//doesn't work unfortunately..
//cvtColor(*mat,*color, CV_YCrCb2RGB);
color_convert_common(buff, buff + jwidth * jheight, jwidth, jheight,
color->ptr<uchar> (0), false);
}
if (jgrey) {
Mat grey;
pool->getGrey(jidx, grey);
cvtColor(grey, *color, CV_GRAY2RGB);
}
pool->addImage(jidx, color);
}
image_pool::image_pool() {
}
image_pool::~image_pool() {
__android_log_print(ANDROID_LOG_INFO, "image_pool", "destructor called");
}
cv::Ptr<Mat> image_pool::getImage(int i) {
return imagesmap[i];
}
void image_pool::getGrey(int i, Mat & grey) {
cv::Ptr<Mat> tm = yuvImagesMap[i];
if (tm.empty())
return;
grey = (*tm)(Range(0, tm->rows / 2), Range::all());
}
cv::Ptr<Mat> image_pool::getYUV(int i) {
return yuvImagesMap[i];
}
void image_pool::addYUVMat(int i, cv::Ptr<Mat> mat) {
yuvImagesMap[i] = mat;
}
void image_pool::addImage(int i, cv::Ptr<Mat> mat) {
imagesmap[i] = mat;
}
#ifndef IMAGE_POOL_H
#define IMAGE_POOL_H
#include <opencv2/core/core.hpp>
#include <jni.h>
#include <map>
using namespace cv;
#ifdef __cplusplus
extern "C" {
#endif
//
//JNIEXPORT jobject JNICALL Java_com_opencv_jni_opencvJNI_getBitmapBuffer(
// JNIEnv *jenv, jclass jcls, jlong jarg1, jobject jarg1_);
JNIEXPORT void JNICALL Java_com_opencv_jni_opencvJNI_addYUVtoPool
(JNIEnv *, jclass, jlong, jobject, jbyteArray, jint, jint, jint, jboolean);
#ifdef __cplusplus
}
#endif
//bool yuv2mat2(char *data, int size, int width, int height, bool grey, Mat& mat);
class image_pool {
std::map<int, Ptr< Mat> > imagesmap;
std::map<int, Ptr< Mat> > yuvImagesMap;
//uchar * mbuffer;
//int length;
public:
image_pool();
~image_pool();
cv::Ptr<Mat> getImage(int i);
void getGrey(int i, Mat & grey);
cv::Ptr<Mat> getYUV(int i);
int getCount(){
return imagesmap.size();
}
void addImage(int i, Ptr< Mat> mat);
/** this function stores the given matrix in the the yuvImagesMap. Also,
* after this call getGrey will work, as the grey image is just the top
* half of the YUV mat.
*
* \param i index to store yuv image at
* \param mat the yuv matrix to store
*/
void addYUVMat(int i, Ptr< Mat> mat);
int addYUV(uchar* buffer, int size, int width, int height, bool grey,int idx);
void getBitmap(int * outintarray, int size, int idx);
};
#endif
%typemap(javaimports) image_pool "
/** image_pool is used for keeping track of a pool of native images. It stores images as cv::Mat's and
references them by an index. It allows one to get a pointer to an underlying mat, and handles memory deletion.*/"
%javamethodmodifiers image_pool::getImage"
/** gets a pointer to a stored image, by an index. If the index is new, returns a null pointer
* @param idx the index in the pool that is associated with a cv::Mat
* @return the pointer to a cv::Mat, null pointer if the given idx is novel
*/
public";
%javamethodmodifiers image_pool::deleteImage"
/** deletes the image from the pool
* @param idx the index in the pool that is associated with a cv::Mat
*/
public";
%javamethodmodifiers addYUVtoPool"
/** adds a yuv
* @param idx the index in the pool that is associated with a cv::Mat
*/
public";
%include "various.i"
%apply (char* BYTE) { (char *data)}; //byte[] to char*
%native (addYUVtoPool) void addYUVtoPool(image_pool* pool, char* data,int idx, int width, int height, bool grey);
%feature("director") image_pool;
class image_pool {
public:
image_pool();
~image_pool();
Ptr<Mat> getImage(int i);
void addImage(int i, Ptr< Mat> mat);
};
/*
* int *INTARRAY typemaps.
* These are input typemaps for mapping a Java int[] array to a C int array.
* Note that as a Java array is used and thus passeed by reference, the C routine
* can return data to Java via the parameter.
*
* Example usage wrapping:
* void foo((int *INTARRAY, int INTARRAYSIZE);
*
* Java usage:
* byte b[] = new byte[20];
* modulename.foo(b);
*/
%typemap(in) (int *INTARRAY, int INTARRAYSIZE) {
$1 = (int *) JCALL2(GetIntArrayElements, jenv, $input, 0);
jsize sz = JCALL1(GetArrayLength, jenv, $input);
$2 = (int)sz;
}
%typemap(argout) (int *INTARRAY, int INTARRAYSIZE) {
JCALL3(ReleaseIntArrayElements, jenv, $input, (jint *) $1, 0);
}
/* Prevent default freearg typemap from being used */
%typemap(freearg) (int *INTARRAY, int INTARRAYSIZE) ""
%typemap(jni) (int *INTARRAY, int INTARRAYSIZE) "jintArray"
%typemap(jtype) (int *INTARRAY, int INTARRAYSIZE) "int[]"
%typemap(jstype) (int *INTARRAY, int INTARRAYSIZE) "int[]"
%typemap(javain) (int *INTARRAY, int INTARRAYSIZE) "$javainput"
#include <string.h>
#include <jni.h>
#include <yuv420sp2rgb.h>
#ifndef max
#define max(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a > _b ? _a : _b; })
#define min(a,b) ({typeof(a) _a = (a); typeof(b) _b = (b); _a < _b ? _a : _b; })
#endif
/*
YUV 4:2:0 image with a plane of 8 bit Y samples followed by an interleaved
U/V plane containing 8 bit 2x2 subsampled chroma samples.
except the interleave order of U and V is reversed.
H V
Y Sample Period 1 1
U (Cb) Sample Period 2 2
V (Cr) Sample Period 2 2
*/
/*
size of a char:
find . -name limits.h -exec grep CHAR_BIT {} \;
*/
const int bytes_per_pixel = 2;
void color_convert_common(
unsigned char *pY, unsigned char *pUV,
int width, int height, unsigned char *buffer,
int grey)
{
int i, j;
int nR, nG, nB;
int nY, nU, nV;
unsigned char *out = buffer;
int offset = 0;
if(grey){
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
unsigned char nB = *(pY + i * width + j);
out[offset++] = (unsigned char)nB;
// out[offset++] = (unsigned char)nB;
// out[offset++] = (unsigned char)nB;
}
}
}else
// YUV 4:2:0
for (i = 0; i < height; i++) {
for (j = 0; j < width; j++) {
nY = *(pY + i * width + j);
nV = *(pUV + (i/2) * width + bytes_per_pixel * (j/2));
nU = *(pUV + (i/2) * width + bytes_per_pixel * (j/2) + 1);
// Yuv Convert
nY -= 16;
nU -= 128;
nV -= 128;
if (nY < 0)
nY = 0;
// nR = (int)(1.164 * nY + 2.018 * nU);
// nG = (int)(1.164 * nY - 0.813 * nV - 0.391 * nU);
// nB = (int)(1.164 * nY + 1.596 * nV);
nB = (int)(1192 * nY + 2066 * nU);
nG = (int)(1192 * nY - 833 * nV - 400 * nU);
nR = (int)(1192 * nY + 1634 * nV);
nR = min(262143, max(0, nR));
nG = min(262143, max(0, nG));
nB = min(262143, max(0, nB));
nR >>= 10; nR &= 0xff;
nG >>= 10; nG &= 0xff;
nB >>= 10; nB &= 0xff;
out[offset++] = (unsigned char)nR;
out[offset++] = (unsigned char)nG;
out[offset++] = (unsigned char)nB;
//out[offset++] = 0xff; //set alpha for ARGB 8888 format
}
//offset = i * width * 3; //non power of two
//offset = i * texture_size + j;//power of two
//offset *= 3; //3 byte per pixel
//out = buffer + offset;
}
}
//yuv420sp2rgb.h
#ifndef YUV420SP2RGB_H
#define YUV420SP2RGB_H
#ifdef __cplusplus
extern "C" {
#endif
void color_convert_common(
unsigned char *pY, unsigned char *pUV,
int width, int height, unsigned char *buffer,
int grey);
#ifdef __cplusplus
}
#endif
#endif
#location of android-opencv port of OpenCV to android
OPENCV_CONFIG=../build/android-opencv.mk
ANDROID_NDK_ROOT=$(HOME)/android-ndk-r4-crystax
package com.opencv;
import java.util.LinkedList;
import android.app.Activity;
import android.content.pm.ActivityInfo;
import android.os.Bundle;
import android.view.Gravity;
import android.view.KeyEvent;
import android.view.Menu;
import android.view.MenuItem;
import android.view.Window;
import android.view.WindowManager;
import android.view.ViewGroup.LayoutParams;
import android.widget.FrameLayout;
import android.widget.LinearLayout;
import com.opencv.camera.NativePreviewer;
import com.opencv.camera.NativeProcessor;
import com.opencv.camera.NativeProcessor.PoolCallback;
import com.opencv.opengl.GL2CameraViewer;
public class OpenCV extends Activity {
private NativePreviewer mPreview;
private GL2CameraViewer glview;
/*
* (non-Javadoc)
*
* @see android.app.Activity#onKeyUp(int, android.view.KeyEvent)
*/
@Override
public boolean onKeyUp(int keyCode, KeyEvent event) {
return super.onKeyUp(keyCode, event);
}
/*
* (non-Javadoc)
*
* @see android.app.Activity#onKeyLongPress(int, android.view.KeyEvent)
*/
@Override
public boolean onKeyLongPress(int keyCode, KeyEvent event) {
return super.onKeyLongPress(keyCode, event);
}
/**
* Avoid that the screen get's turned off by the system.
*/
public void disableScreenTurnOff() {
getWindow().setFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON,
WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
}
/**
* Set's the orientation to landscape, as this is needed by AndAR.
*/
public void setOrientation() {
setRequestedOrientation(ActivityInfo.SCREEN_ORIENTATION_LANDSCAPE);
}
/**
* Maximize the application.
*/
public void setFullscreen() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
WindowManager.LayoutParams.FLAG_FULLSCREEN);
}
public void setNoTitle() {
requestWindowFeature(Window.FEATURE_NO_TITLE);
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// menu.add("Sample");
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// if(item.getTitle().equals("Sample")){
// //do stuff...
// }
return true;
}
@Override
public void onOptionsMenuClosed(Menu menu) {
// TODO Auto-generated method stub
super.onOptionsMenuClosed(menu);
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setFullscreen();
disableScreenTurnOff();
FrameLayout frame = new FrameLayout(getApplication());
// Create our Preview view and set it as the content of our activity.
mPreview = new NativePreviewer(getApplication(), 400, 300);
LayoutParams params = new LayoutParams(LayoutParams.WRAP_CONTENT,
LayoutParams.WRAP_CONTENT);
params.height = getWindowManager().getDefaultDisplay().getHeight();
params.width = (int) (params.height * 4.0 / 2.88);
LinearLayout vidlay = new LinearLayout(getApplication());
vidlay.setGravity(Gravity.CENTER);
vidlay.addView(mPreview, params);
frame.addView(vidlay);
// make the glview overlay ontop of video preview
mPreview.setZOrderMediaOverlay(false);
glview = new GL2CameraViewer(getApplication(), false, 0, 0);
glview.setZOrderMediaOverlay(true);
glview.setLayoutParams(new LayoutParams(LayoutParams.FILL_PARENT,
LayoutParams.FILL_PARENT));
frame.addView(glview);
setContentView(frame);
}
@Override
protected void onPause() {
super.onPause();
mPreview.onPause();
glview.onPause();
}
@Override
protected void onResume() {
super.onResume();
glview.onResume();
LinkedList<NativeProcessor.PoolCallback> callbackstack = new LinkedList<PoolCallback>();
callbackstack.add(glview.getDrawCallback());
mPreview.addCallbackStack(callbackstack);
mPreview.onResume();
}
}
\ No newline at end of file
package com.opencv.camera;
import java.io.IOException;
import java.lang.reflect.Method;
import java.util.Date;
import java.util.LinkedList;
import java.util.List;
import android.content.Context;
import android.graphics.PixelFormat;
import android.hardware.Camera;
import android.hardware.Camera.Parameters;
import android.hardware.Camera.PreviewCallback;
import android.hardware.Camera.Size;
import android.os.Handler;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import com.opencv.camera.NativeProcessor.NativeProcessorCallback;
import com.opencv.camera.NativeProcessor.PoolCallback;
public class NativePreviewer extends SurfaceView implements
SurfaceHolder.Callback, Camera.PreviewCallback, NativeProcessorCallback {
SurfaceHolder mHolder;
Camera mCamera;
private NativeProcessor processor;
private int preview_width, preview_height;
private int pixelformat;
private PixelFormat pixelinfo;
public NativePreviewer(Context context, int preview_width,
int preview_height) {
super(context);
listAllCameraMethods();
// Install a SurfaceHolder.Callback so we get notified when the
// underlying surface is created and destroyed.
mHolder = getHolder();
mHolder.addCallback(this);
mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
this.preview_width = preview_width;
this.preview_height = preview_height;
processor = new NativeProcessor();
}
public void surfaceCreated(SurfaceHolder holder) {
// The Surface has been created, acquire the camera and tell it where
// to draw.
mCamera = Camera.open();
try {
mCamera.setPreviewDisplay(holder);
} catch (IOException exception) {
mCamera.release();
mCamera = null;
}
}
public void surfaceDestroyed(SurfaceHolder holder) {
// Surface will be destroyed when we return, so stop the preview.
// Because the CameraDevice object is not a shared resource, it's very
// important to release it when the activity is paused.
mCamera.stopPreview();
mCamera.release();
// processor = null;
mCamera = null;
mAcb = null;
mPCWB = null;
}
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
// Now that the size is known, set up the camera parameters and begin
// the preview.
Camera.Parameters parameters = mCamera.getParameters();
List<Camera.Size> pvsizes = mCamera.getParameters().getSupportedPreviewSizes();
int best_width = 1000000;
int best_height = 1000000;
for(Size x: pvsizes){
if(x.width - preview_width >= 0 && x.width <= best_width){
best_width = x.width;
best_height = x.height;
}
}
preview_width = best_width;
preview_height = best_height;
parameters.setPreviewSize(preview_width, preview_height);
mCamera.setParameters(parameters);
pixelinfo = new PixelFormat();
pixelformat = mCamera.getParameters().getPreviewFormat();
PixelFormat.getPixelFormatInfo(pixelformat, pixelinfo);
Size preview_size = mCamera.getParameters().getPreviewSize();
preview_width = preview_size.width;
preview_height = preview_size.height;
int bufSize = preview_width * preview_height * pixelinfo.bitsPerPixel
/ 8;
// Must call this before calling addCallbackBuffer to get all the
// reflection variables setup
initForACB();
initForPCWB();
// Use only one buffer, so that we don't preview to many frames and bog
// down system
byte[] buffer = new byte[bufSize];
addCallbackBuffer(buffer);
setPreviewCallbackWithBuffer();
mCamera.startPreview();
postautofocus(0);
}
public void postautofocus(int delay) {
handler.postDelayed(autofocusrunner, delay);
}
Runnable autofocusrunner = new Runnable() {
@Override
public void run() {
mCamera.autoFocus(autocallback);
}
};
Camera.AutoFocusCallback autocallback = new Camera.AutoFocusCallback() {
@Override
public void onAutoFocus(boolean success, Camera camera) {
if(!success)
postautofocus(1000);
else{
Parameters params = camera.getParameters();
params.setSceneMode(Parameters.SCENE_MODE_AUTO);
camera.setParameters(params);
}
}
};
Handler handler = new Handler();
/**
* This method will list all methods of the android.hardware.Camera class,
* even the hidden ones. With the information it provides, you can use the
* same approach I took below to expose methods that were written but hidden
* in eclair
*/
private void listAllCameraMethods() {
try {
Class<?> c = Class.forName("android.hardware.Camera");
Method[] m = c.getMethods();
for (int i = 0; i < m.length; i++) {
Log.d("NativePreviewer", " method:" + m[i].toString());
}
} catch (Exception e) {
// TODO Auto-generated catch block
Log.e("NativePreviewer", e.toString());
}
}
/**
* These variables are re-used over and over by addCallbackBuffer
*/
Method mAcb;
private void initForACB() {
try {
mAcb = Class.forName("android.hardware.Camera").getMethod(
"addCallbackBuffer", byte[].class);
} catch (Exception e) {
Log
.e("NativePreviewer",
"Problem setting up for addCallbackBuffer: "
+ e.toString());
}
}
/**
* This method allows you to add a byte buffer to the queue of buffers to be
* used by preview. See:
* http://android.git.kernel.org/?p=platform/frameworks
* /base.git;a=blob;f=core/java/android/hardware/Camera.java;hb=9d
* b3d07b9620b4269ab33f78604a36327e536ce1
*
* @param b
* The buffer to register. Size should be width * height *
* bitsPerPixel / 8.
*/
private void addCallbackBuffer(byte[] b) {
try {
mAcb.invoke(mCamera, b);
} catch (Exception e) {
Log.e("NativePreviewer", "invoking addCallbackBuffer failed: "
+ e.toString());
}
}
Method mPCWB;
private void initForPCWB() {
try {
mPCWB = Class.forName("android.hardware.Camera").getMethod(
"setPreviewCallbackWithBuffer", PreviewCallback.class);
} catch (Exception e) {
Log.e("NativePreviewer",
"Problem setting up for setPreviewCallbackWithBuffer: "
+ e.toString());
}
}
/**
* Use this method instead of setPreviewCallback if you want to use manually
* allocated buffers. Assumes that "this" implements Camera.PreviewCallback
*/
private void setPreviewCallbackWithBuffer() {
// mCamera.setPreviewCallback(this);
// return;
try {
// If we were able to find the setPreviewCallbackWithBuffer method
// of Camera,
// we can now invoke it on our Camera instance, setting 'this' to be
// the
// callback handler
mPCWB.invoke(mCamera, this);
// Log.d("NativePrevier","setPreviewCallbackWithBuffer: Called method");
} catch (Exception e) {
Log.e("NativePreviewer", e.toString());
}
}
protected void clearPreviewCallbackWithBuffer() {
// mCamera.setPreviewCallback(this);
// return;
try {
// If we were able to find the setPreviewCallbackWithBuffer method
// of Camera,
// we can now invoke it on our Camera instance, setting 'this' to be
// the
// callback handler
mPCWB.invoke(mCamera, (PreviewCallback) null);
// Log.d("NativePrevier","setPreviewCallbackWithBuffer: cleared");
} catch (Exception e) {
Log.e("NativePreviewer", e.toString());
}
}
Date start;
int fcount = 0;
boolean processing = false;
/**
* Demonstration of how to use onPreviewFrame. In this case I'm not
* processing the data, I'm just adding the buffer back to the buffer queue
* for re-use
*/
public void onPreviewFrame(byte[] data, Camera camera) {
if (start == null) {
start = new Date();
}
processor.post(data, preview_width, preview_height, pixelformat, System.nanoTime(),
this);
fcount++;
if (fcount % 100 == 0) {
double ms = (new Date()).getTime() - start.getTime();
Log.i("NativePreviewer", "fps:" + fcount / (ms / 1000.0));
start = new Date();
fcount = 0;
}
}
@Override
public void onDoneNativeProcessing(byte[] buffer) {
addCallbackBuffer(buffer);
}
public void addCallbackStack(LinkedList<PoolCallback> callbackstack) {
processor.addCallbackStack(callbackstack);
}
/**This must be called when the activity pauses, in Activity.onPause
* This has the side effect of clearing the callback stack.
*
*/
public void onPause() {
addCallbackStack(null);
processor.stop();
mCamera.stopPreview();
}
public void onResume() {
processor.start();
}
}
\ No newline at end of file
package com.opencv.camera;
import java.util.LinkedList;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import android.graphics.PixelFormat;
import android.util.Log;
import com.opencv.jni.image_pool;
import com.opencv.jni.opencv;
public class NativeProcessor {
private class ProcessorThread extends Thread {
private void process(NPPostObject pobj) throws Exception {
if (pobj.format == PixelFormat.YCbCr_420_SP) {
// add as color image, because we know how to decode this
opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width,
pobj.height, false);
} else if (pobj.format == PixelFormat.YCbCr_422_SP) {
// add as gray image, because this format is not coded
// for...//TODO figure out how to decode this
// format
opencv.addYUVtoPool(pool, pobj.buffer, 0, pobj.width,
pobj.height, true);
} else
throw new Exception("bad pixel format!");
for (PoolCallback x : stack) {
if (interrupted()) {
throw new InterruptedException(
"Native Processor interupted while processing");
}
x.process(0, pool, pobj.timestamp, NativeProcessor.this);
}
pobj.done(); // tell the postobject that we're done doing
// all the processing.
}
@Override
public void run() {
try {
while (true) {
yield();
while(!stacklock.tryLock(5, TimeUnit.MILLISECONDS)){
}
try {
if (nextStack != null) {
stack = nextStack;
nextStack = null;
}
} finally {
stacklock.unlock();
}
NPPostObject pobj = null;
while(!lock.tryLock(5, TimeUnit.MILLISECONDS)){
}
try {
if(postobjects.isEmpty()) continue;
pobj = postobjects.removeLast();
} finally {
lock.unlock();
}
if(interrupted())throw new InterruptedException();
if(stack != null && pobj != null)
process(pobj);
}
} catch (InterruptedException e) {
Log.i("NativeProcessor",
"native processor interupted, ending now");
} catch (Exception e) {
e.printStackTrace();
} finally {
}
}
}
ProcessorThread mthread;
static public interface PoolCallback {
void process(int idx, image_pool pool,long timestamp, NativeProcessor nativeProcessor);
}
Lock stacklock = new ReentrantLock();
LinkedList<PoolCallback> nextStack;
void addCallbackStack(LinkedList<PoolCallback> stack) {
try {
while (!stacklock.tryLock(10, TimeUnit.MILLISECONDS)) {
}
try {
nextStack = stack;
} finally {
stacklock.unlock();
}
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
/**
* A callback that allows the NativeProcessor to pass back the buffer when
* it has completed processing a frame.
*
* @author ethan
*
*/
static public interface NativeProcessorCallback {
/**
* Called after processing, meant to be recieved by the NativePreviewer
* wich reuses the byte buffer for the camera preview...
*
* @param buffer
* the buffer passed to the NativeProcessor with post.
*/
void onDoneNativeProcessing(byte[] buffer);
}
/**
* Create a NativeProcessor. The processor will not start running until
* start is called, at which point it will operate in its own thread and
* sleep until a post is called. The processor should not be started until
* an onSurfaceChange event, and should be shut down when the surface is
* destroyed by calling interupt.
*
*/
public NativeProcessor() {
}
/**
* post is used to notify the processor that a preview frame is ready, this
* will return almost immediately. if the processor is busy, returns false
* and is essentially a nop.
*
* @param buffer
* a preview frame from the Android Camera onPreviewFrame
* callback
* @param width
* of preview frame
* @param height
* of preview frame
* @param format
* of preview frame
* @return true if the processor wasn't busy and accepted the post, false if
* the processor is still processing.
*/
public boolean post(byte[] buffer, int width, int height, int format,long timestamp,
NativeProcessorCallback callback) {
lock.lock();
try {
NPPostObject pobj = new NPPostObject(buffer, width, height,
format,timestamp, callback);
postobjects.addFirst(pobj);
} finally {
lock.unlock();
}
return true;
}
static private class NPPostObject {
public NPPostObject(byte[] buffer, int width, int height, int format, long timestamp,
NativeProcessorCallback callback) {
this.buffer = buffer;
this.width = width;
this.height = height;
this.format = format;
this.timestamp = timestamp;
this.callback = callback;
}
public void done() {
callback.onDoneNativeProcessing(buffer);
}
int width, height;
byte[] buffer;
int format;
long timestamp;
NativeProcessorCallback callback;
}
private LinkedList<NPPostObject> postobjects = new LinkedList<NPPostObject>();
private image_pool pool = new image_pool();
private final Lock lock = new ReentrantLock();
private LinkedList<PoolCallback> stack = new LinkedList<PoolCallback>();
void stop() {
mthread.interrupt();
try {
mthread.join();
} catch (InterruptedException e) {
Log.w("NativeProcessor","interupted while stoping " + e.getMessage());
}
mthread = null;
}
void start() {
mthread = new ProcessorThread();
mthread.start();
}
}
\ No newline at end of file
This diff is collapsed.
#you may override this if you move the build
#just define it before including this or on the command line - or with
#an environment variable
#this points to the root of the opencv trunk - where the original opencv
#sources are - with modules 3rparty ...
ifndef OPENCV_ROOT
OPENCV_ROOT := ${opencv_root}
endif
#you may override this same as above
#this points to the actually directory that you built opencv for android from
#maybe in under opencv/android/build
ifndef OPENCV_BUILD_ROOT
OPENCV_BUILD_ROOT := ${CMAKE_BINARY_DIR}
endif
OPENCV_INCLUDES := ${android_module_include_dirs} $(OPENCV_ROOT)/android/android-jni/jni
ANDROID_OPENCV_INCLUDES := $(OPENCV_ROOT)/android/android-jni/jni
ARMOBJS := obj/local/armeabi
ARMOBJS_V7A := obj/local/armeabi-v7a
OPENCV_LIB_DIRS := -L$(OPENCV_BUILD_ROOT)/$(ARMOBJS_V7A) -L$(OPENCV_BUILD_ROOT)/$(ARMOBJS)
ANDROID_OPENCV_LIB_DIRS := -L$(OPENCV_ROOT)/android/android-jni/$(ARMOBJS_V7A) -L$(OPENCV_ROOT)/android/android-jni/$(ARMOBJS)
#order of linking very important ---- may have stuff out of order here, but
#important that modules that are more dependent come first...
OPENCV_LIBS := $(OPENCV_LIB_DIRS) -lfeatures2d -lcalib3d -limgproc -lobjdetect \
-lvideo -lhighgui -lml -llegacy -lcore -lopencv_lapack -lflann \
-lzlib -lpng -ljpeg -ljasper
ANDROID_OPENCV_LIBS := $(ANDROID_OPENCV_LIB_DIRS) -landroid-opencv
changes
added some specific CMakeLists.txt changes see the changes.diff for these
basically see
mbstowcs is not supported - so see had to
#ifndef ANDROID
string fromUtf16(const WString& str)
WString toUtf16(const string& str)
#endif
ANDROID is always defined when building with the ndk
_S is a bad variable name for android...
added the zlib-android - because i couldn't figure out how to configure the
existing zlib in 3rdparty
Index: modules/highgui/CMakeLists.txt
===================================================================
--- modules/highgui/CMakeLists.txt (revision 3454)
+++ modules/highgui/CMakeLists.txt (working copy)
@@ -1,3 +1,21 @@
+if(ANDROID)
+
+set(high_gui_android_srcs src/bitstrm.cpp
+ src/cap.cpp
+ src/grfmt_base.cpp
+ src/grfmt_bmp.cpp
+ src/grfmt_jpeg.cpp
+ src/grfmt_png.cpp
+ src/grfmt_tiff.cpp
+ src/loadsave.cpp
+ src/precomp.cpp
+ src/utils.cpp
+ src/grfmt_sunras.cpp
+ src/grfmt_pxm.cpp
+ src/window.cpp )
+define_android_manual(highgui "${high_gui_android_srcs}" "$(LOCAL_PATH)/src $(OPENCV_INCLUDES)")
+
+else()
# ----------------------------------------------------------------------------
# CMake file for highgui. See root CMakeLists.txt
# Some parts taken from version of Hartmut Seichter, HIT Lab NZ.
@@ -332,3 +350,5 @@
install(FILES ${highgui_ext_hdrs}
DESTINATION include/opencv2/highgui
COMPONENT main)
+
+endif()#android
Index: modules/core/src/persistence.cpp
===================================================================
--- modules/core/src/persistence.cpp (revision 3454)
+++ modules/core/src/persistence.cpp (working copy)
@@ -114,12 +114,12 @@
namespace cv
{
-
+#ifndef ANDROID
string fromUtf16(const WString& str)
{
cv::AutoBuffer<char> _buf(str.size()*4 + 1);
char* buf = _buf;
-
+
size_t sz = wcstombs(buf, str.c_str(), str.size());
if( sz == (size_t)-1 )
return string();
@@ -131,14 +131,14 @@
{
cv::AutoBuffer<wchar_t> _buf(str.size() + 1);
wchar_t* buf = _buf;
-
+
size_t sz = mbstowcs(buf, str.c_str(), str.size());
if( sz == (size_t)-1 )
return WString();
buf[sz] = '\0';
return WString(buf);
}
-
+#endif
}
Index: modules/features2d/src/sift.cpp
===================================================================
--- modules/features2d/src/sift.cpp (revision 3454)
+++ modules/features2d/src/sift.cpp (working copy)
@@ -172,6 +172,7 @@
typedef Keypoints::iterator KeypointsIter ; ///< Keypoint list iter datatype
typedef Keypoints::const_iterator KeypointsConstIter ; ///< Keypoint list const iter datatype
+#undef _S
/** @brief Constructors and destructors */
/*@{*/
Sift(const pixel_t* _im_pt, int _width, int _height,
Index: modules/features2d/CMakeLists.txt
===================================================================
--- modules/features2d/CMakeLists.txt (revision 3454)
+++ modules/features2d/CMakeLists.txt (working copy)
@@ -1 +1,2 @@
-define_opencv_module(features2d opencv_core opencv_imgproc opencv_calib3d opencv_highgui)
\ No newline at end of file
+define_opencv_module(features2d opencv_core opencv_imgproc opencv_calib3d opencv_highgui)
+
Index: modules/CMakeLists.txt
===================================================================
--- modules/CMakeLists.txt (revision 3454)
+++ modules/CMakeLists.txt (working copy)
@@ -1,3 +1,7 @@
+if(ANDROID)
+ configure_file("${CMAKE_SOURCE_DIR}/Android.mk.modules.in" "${CMAKE_CURRENT_BINARY_DIR}/Android.mk")
+endif()
+
add_subdirectory(calib3d)
add_subdirectory(core)
add_subdirectory(features2d)
@@ -20,8 +24,11 @@
endif()
add_subdirectory(video)
+
+if(!ANDROID)
add_subdirectory(haartraining)
add_subdirectory(traincascade)
+endif()
#add_subdirectory(gpu)
Index: 3rdparty/zlib/CMakeLists.txt
===================================================================
--- 3rdparty/zlib/CMakeLists.txt (revision 3503)
+++ 3rdparty/zlib/CMakeLists.txt (working copy)
@@ -35,3 +35,5 @@
DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/3rdparty/lib"
)
+
+endif()
Index: 3rdparty/lapack/CMakeLists.txt
===================================================================
--- 3rdparty/lapack/CMakeLists.txt (revision 3503)
+++ 3rdparty/lapack/CMakeLists.txt (working copy)
@@ -2,6 +2,10 @@
# CMake file for opencv_lapack. See root CMakeLists.txt
#
# ----------------------------------------------------------------------------
+if(ANDROID)
+define_3rdparty_module(opencv_lapack)
+else()
+
project(opencv_lapack)
# List of C++ files:
@@ -57,3 +61,4 @@
DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/3rdparty/lib
)
+endif() #android
Index: 3rdparty/libjasper/CMakeLists.txt
===================================================================
--- 3rdparty/libjasper/CMakeLists.txt (revision 3503)
+++ 3rdparty/libjasper/CMakeLists.txt (working copy)
@@ -1,3 +1,8 @@
+if(ANDROID)
+define_3rdparty_module(jasper)
+else()
+
+
# ----------------------------------------------------------------------------
# CMake file for libjasper. See root CMakeLists.txt
#
@@ -4,6 +9,7 @@
# ----------------------------------------------------------------------------
project(libjasper)
+
add_definitions(-DEXCLUDE_MIF_SUPPORT -DEXCLUDE_PNM_SUPPORT -DEXCLUDE_BMP_SUPPORT -DEXCLUDE_RAS_SUPPORT -DEXCLUDE_JPG_SUPPORT -DEXCLUDE_PGX_SUPPORT)
# List of C++ files:
@@ -41,6 +47,8 @@
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-implicit-function-declaration -Wno-unused")
endif()
+endif()#!android
+
set_target_properties(${the_target}
PROPERTIES
OUTPUT_NAME "${the_target}"
Index: 3rdparty/libpng/CMakeLists.txt
===================================================================
--- 3rdparty/libpng/CMakeLists.txt (revision 3503)
+++ 3rdparty/libpng/CMakeLists.txt (working copy)
@@ -1,3 +1,7 @@
+if(ANDROID)
+define_3rdparty_module(png)
+else()
+#endif()#android
# ----------------------------------------------------------------------------
# CMake file for libpng. See root CMakeLists.txt
#
@@ -38,3 +42,4 @@
DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/3rdparty/lib"
)
+endif()#android
Index: 3rdparty/libjpeg/CMakeLists.txt
===================================================================
--- 3rdparty/libjpeg/CMakeLists.txt (revision 3503)
+++ 3rdparty/libjpeg/CMakeLists.txt (working copy)
@@ -1,3 +1,7 @@
+if(ANDROID)
+define_3rdparty_module(jpeg)
+else()
+#endif()#android
# ----------------------------------------------------------------------------
# CMake file for libjpeg. See root CMakeLists.txt
#
@@ -39,3 +43,4 @@
DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/3rdparty/lib
)
+endif()#android
Index: 3rdparty/CMakeLists.txt
===================================================================
--- 3rdparty/CMakeLists.txt (revision 3503)
+++ 3rdparty/CMakeLists.txt (working copy)
@@ -1,6 +1,22 @@
+if(ANDROID)
+ configure_file("${CMAKE_SOURCE_DIR}/Android.mk.modules.in" "${CMAKE_CURRENT_BINARY_DIR}/Android.mk")
+ add_subdirectory(libpng)
+ add_subdirectory(libjpeg)
+ add_subdirectory(libjasper)
+ add_subdirectory(flann)
+ add_subdirectory(lapack)
+
+ #zlib is special? look in zlib-android
+ #couldn't get the other one to compile for some reason...
+ #config issue
+ #add_subdirectory(zlib-android)
+
+else()
+
add_subdirectory(flann)
add_subdirectory(lapack)
add_subdirectory(zlib)
+
if(WITH_JASPER AND NOT JASPER_FOUND)
add_subdirectory(libjasper)
endif()
@@ -13,3 +29,5 @@
if(WITH_TIFF AND NOT TIFF_FOUND)
add_subdirectory(libtiff)
endif()
+
+endif()#android
Index: 3rdparty/flann/CMakeLists.txt
===================================================================
--- 3rdparty/flann/CMakeLists.txt (revision 3503)
+++ 3rdparty/flann/CMakeLists.txt (working copy)
@@ -1,3 +1,7 @@
+if(ANDROID)
+file(GLOB_RECURSE flann_sources_cpp *.cpp)
+define_android_manual(flann "${flann_sources_cpp}" "$(LOCAL_PATH)/../include $(LOCAL_PATH)/../include/flann $(LOCAL_PATH)/nn $(LOCAL_PATH)/algorithms $(LOCAL_PATH)/util")
+else()
if (DEFINED OPENCV_VERSION)
# ----------------------------------------------------------------------------
@@ -105,3 +109,4 @@
)
ENDIF()
+endif()#android
/* Define to one of `_getb67', `GETB67', `getb67' for Cray-2 and Cray-YMP
systems. This function is required for `alloca.c' support on those systems.
*/
/* #undef CRAY_STACKSEG_END */
/* Define to 1 if using `alloca.c'. */
/* #undef C_ALLOCA */
/* Define to 1 if you have `alloca', as a function or macro. */
/* #undef HAVE_ALLOCA */
/* Define to 1 if you have <alloca.h> and it should be used (not on Ultrix).
*/
#define HAVE_ALLOCA_H 1
/* V4L capturing support */
//#define HAVE_CAMV4L
/* V4L2 capturing support */
//#define HAVE_CAMV4L2
/* Carbon windowing environment */
/* #undef HAVE_CARBON */
/* IEEE1394 capturing support */
/* #undef HAVE_DC1394 */
/* libdc1394 0.9.4 or 0.9.5 */
/* #undef HAVE_DC1394_095 */
/* IEEE1394 capturing support - libdc1394 v2.x */
//#define HAVE_DC1394_2
/* ffmpeg in Gentoo */
/* #undef HAVE_GENTOO_FFMPEG */
/* FFMpeg video library */
/* #undef HAVE_FFMPEG */
/* ffmpeg's libswscale */
/* #undef HAVE_FFMPEG_SWSCALE */
/* GStreamer multimedia framework */
/* #undef HAVE_GSTREAMER */
/* GStreamer with gstappsink & gstappsrc */
/* #undef HAVE_GSTREAMER_APP */
/* GTK+ 2.0 Thread support */
//#define HAVE_GTHREAD
/* GTK+ 2.x toolkit */
//#define HAVE_GTK
/* OpenEXR codec */
/* #undef HAVE_ILMIMF */
/* Apple ImageIO Framework */
/* #undef HAVE_IMAGEIO */
/* Define to 1 if you have the <inttypes.h> header file. */
/* #undef HAVE_INTTYPES_H */
/* JPEG-2000 codec */
#define HAVE_JASPER
/* IJG JPEG codec */
#define HAVE_JPEG
/* Define to 1 if you have the `dl' library (-ldl). */
/* #undef HAVE_LIBDL */
/* Define to 1 if you have the `gomp' library (-lgomp). */
/* #undef HAVE_LIBGOMP */
/* Define to 1 if you have the `m' library (-lm). */
/* #undef HAVE_LIBM */
/* libpng/png.h needs to be included */
#undef HAVE_LIBPNG_PNG_H
/* Define to 1 if you have the `pthread' library (-lpthread). */
//#define HAVE_LIBPTHREAD 1
/* Define to 1 if you have the `lrint' function. */
/* #undef HAVE_LRINT */
/* PNG codec */
#define HAVE_PNG
/* Define to 1 if you have the `png_get_valid' function. */
/* #undef HAVE_PNG_GET_VALID */
/* png.h needs to be included */
#define HAVE_PNG_H
/* Define to 1 if you have the `png_set_tRNS_to_alpha' function. */
/* #undef HAVE_PNG_SET_TRNS_TO_ALPHA */
/* QuickTime video libraries */
/* #undef HAVE_QUICKTIME */
/* TIFF codec */
/* #undef HAVE_TIFF */
/* Unicap video capture library */
/* #undef HAVE_UNICAP */
/* Define to 1 if you have the <unistd.h> header file. */
#define HAVE_UNISTD_H 1
/* Xine video library */
/* #undef HAVE_XINE */
/* LZ77 compression/decompression library (used for PNG) */
/* #undef HAVE_ZLIB */
/* Intel Integrated Performance Primitives */
/* #undef HAVE_IPP */
/* OpenCV compiled as static or dynamic libs */
//#define OPENCV_BUILD_SHARED_LIB
/* Name of package */
#define PACKAGE "opencv"
/* Define to the address where bug reports for this package should be sent. */
//#define PACKAGE_BUGREPORT "opencvlibrary-devel@lists.sourceforge.net"
/* Define to the full name of this package. */
#define PACKAGE_NAME "opencv"
/* Define to the full name and version of this package. */
#define PACKAGE_STRING "opencv 2.1.0"
/* Define to the one symbol short name of this package. */
#define PACKAGE_TARNAME "opencv"
/* Define to the version of this package. */
#define PACKAGE_VERSION "2.1.0"
/* If using the C implementation of alloca, define if you know the
direction of stack growth for your system; otherwise it will be
automatically deduced at runtime.
STACK_DIRECTION > 0 => grows toward higher addresses
STACK_DIRECTION < 0 => grows toward lower addresses
STACK_DIRECTION = 0 => direction of growth unknown */
/* #undef STACK_DIRECTION */
/* Version number of package */
#define VERSION "2.1.0"
/* Define to 1 if your processor stores words with the most significant byte
first (like Motorola and SPARC, unlike Intel and VAX). */
/* #undef WORDS_BIGENDIAN */
/* Intel Threading Building Blocks */
/* #undef HAVE_TBB */
/*the android ndk defines this somewhere and it messes with some variables*/
#undef _S
# This file is automatically generated by Android Tools.
# Do not modify this file -- YOUR CHANGES WILL BE ERASED!
#
# This file must be checked in Version Control Systems.
#
# To customize properties used by the Ant build system use,
# "build.properties", and override values to adapt the script to your
# project structure.
# Project target.
target=android-7
Index: modules/highgui/CMakeLists.txt
===================================================================
--- modules/highgui/CMakeLists.txt (revision 3454)
+++ modules/highgui/CMakeLists.txt (working copy)
@@ -1,3 +1,21 @@
+if(ANDROID)
+
+set(high_gui_android_srcs src/bitstrm.cpp
+ src/cap.cpp
+ src/grfmt_base.cpp
+ src/grfmt_bmp.cpp
+ src/grfmt_jpeg.cpp
+ src/grfmt_png.cpp
+ src/grfmt_tiff.cpp
+ src/loadsave.cpp
+ src/precomp.cpp
+ src/utils.cpp
+ src/grfmt_sunras.cpp
+ src/grfmt_pxm.cpp
+ src/window.cpp )
+define_android_manual(highgui "${high_gui_android_srcs}" "$(OpenCVInclude) $(LOCAL_PATH)/include")
+
+else()
# ----------------------------------------------------------------------------
# CMake file for highgui. See root CMakeLists.txt
# Some parts taken from version of Hartmut Seichter, HIT Lab NZ.
@@ -332,3 +350,5 @@
install(FILES ${highgui_ext_hdrs}
DESTINATION include/opencv2/highgui
COMPONENT main)
+
+endif()#android
Index: modules/features2d/src/sift.cpp
===================================================================
--- modules/features2d/src/sift.cpp (revision 3454)
+++ modules/features2d/src/sift.cpp (working copy)
@@ -172,6 +172,7 @@
typedef Keypoints::iterator KeypointsIter ; ///< Keypoint list iter datatype
typedef Keypoints::const_iterator KeypointsConstIter ; ///< Keypoint list const iter datatype
+#undef _S
/** @brief Constructors and destructors */
/*@{*/
Sift(const pixel_t* _im_pt, int _width, int _height,
Index: modules/features2d/CMakeLists.txt
===================================================================
--- modules/features2d/CMakeLists.txt (revision 3454)
+++ modules/features2d/CMakeLists.txt (working copy)
@@ -1 +1,2 @@
-define_opencv_module(features2d opencv_core opencv_imgproc opencv_calib3d opencv_highgui)
\ No newline at end of file
+define_opencv_module(features2d opencv_core opencv_imgproc opencv_calib3d opencv_highgui)
+
Index: modules/CMakeLists.txt
===================================================================
--- modules/CMakeLists.txt (revision 3454)
+++ modules/CMakeLists.txt (working copy)
@@ -1,3 +1,7 @@
+if(ANDROID)
+ configure_file("${CMAKE_SOURCE_DIR}/Android.mk.modules.in" "${CMAKE_CURRENT_BINARY_DIR}/Android.mk")
+endif()
+
add_subdirectory(calib3d)
add_subdirectory(core)
add_subdirectory(features2d)
@@ -20,8 +24,11 @@
endif()
add_subdirectory(video)
+
+if(!ANDROID)
add_subdirectory(haartraining)
add_subdirectory(traincascade)
+endif()
#add_subdirectory(gpu)
Index: 3rdparty/zlib/CMakeLists.txt
===================================================================
--- 3rdparty/zlib/CMakeLists.txt (revision 3454)
+++ 3rdparty/zlib/CMakeLists.txt (working copy)
@@ -1,3 +1,7 @@
+if(ANDROID)
+define_3rdparty_module(zlib)
+else()
+
# ----------------------------------------------------------------------------
# CMake file for zlib. See root CMakeLists.txt
#
@@ -35,3 +39,5 @@
DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/3rdparty/lib"
)
+
+endif()
Index: 3rdparty/lapack/CMakeLists.txt
===================================================================
--- 3rdparty/lapack/CMakeLists.txt (revision 3454)
+++ 3rdparty/lapack/CMakeLists.txt (working copy)
@@ -2,6 +2,10 @@
# CMake file for opencv_lapack. See root CMakeLists.txt
#
# ----------------------------------------------------------------------------
+if(ANDROID)
+define_3rdparty_module(opencv_lapack)
+else()
+
project(opencv_lapack)
# List of C++ files:
@@ -57,3 +61,4 @@
DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/3rdparty/lib
)
+endif() #android
Index: 3rdparty/libjasper/CMakeLists.txt
===================================================================
--- 3rdparty/libjasper/CMakeLists.txt (revision 3454)
+++ 3rdparty/libjasper/CMakeLists.txt (working copy)
@@ -1,3 +1,8 @@
+if(ANDROID)
+define_3rdparty_module(jasper)
+else()
+
+
# ----------------------------------------------------------------------------
# CMake file for libjasper. See root CMakeLists.txt
#
@@ -4,6 +9,7 @@
# ----------------------------------------------------------------------------
project(libjasper)
+
add_definitions(-DEXCLUDE_MIF_SUPPORT -DEXCLUDE_PNM_SUPPORT -DEXCLUDE_BMP_SUPPORT -DEXCLUDE_RAS_SUPPORT -DEXCLUDE_JPG_SUPPORT -DEXCLUDE_PGX_SUPPORT)
# List of C++ files:
@@ -41,6 +47,8 @@
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -Wno-implicit-function-declaration -Wno-unused")
endif()
+endif()#!android
+
set_target_properties(${the_target}
PROPERTIES
OUTPUT_NAME "${the_target}"
Index: 3rdparty/libpng/CMakeLists.txt
===================================================================
--- 3rdparty/libpng/CMakeLists.txt (revision 3454)
+++ 3rdparty/libpng/CMakeLists.txt (working copy)
@@ -1,3 +1,7 @@
+if(ANDROID)
+define_3rdparty_module(png)
+else()
+#endif()#android
# ----------------------------------------------------------------------------
# CMake file for libpng. See root CMakeLists.txt
#
@@ -38,3 +42,4 @@
DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
ARCHIVE_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/3rdparty/lib"
)
+endif()#android
Index: 3rdparty/libjpeg/CMakeLists.txt
===================================================================
--- 3rdparty/libjpeg/CMakeLists.txt (revision 3454)
+++ 3rdparty/libjpeg/CMakeLists.txt (working copy)
@@ -1,3 +1,7 @@
+if(ANDROID)
+define_3rdparty_module(jpeg)
+else()
+#endif()#android
# ----------------------------------------------------------------------------
# CMake file for libjpeg. See root CMakeLists.txt
#
@@ -39,3 +43,4 @@
DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
ARCHIVE_OUTPUT_DIRECTORY ${CMAKE_BINARY_DIR}/3rdparty/lib
)
+endif()#android
Index: 3rdparty/CMakeLists.txt
===================================================================
--- 3rdparty/CMakeLists.txt (revision 3454)
+++ 3rdparty/CMakeLists.txt (working copy)
@@ -1,3 +1,10 @@
+if(ANDROID)
+ configure_file("${CMAKE_SOURCE_DIR}/Android.mk.modules.in" "${CMAKE_CURRENT_BINARY_DIR}/Android.mk")
+ add_subdirectory(libpng)
+ add_subdirectory(libjpeg)
+ add_subdirectory(libjasper)
+endif()
+
add_subdirectory(flann)
add_subdirectory(lapack)
add_subdirectory(zlib)
Index: 3rdparty/flann/CMakeLists.txt
===================================================================
--- 3rdparty/flann/CMakeLists.txt (revision 3454)
+++ 3rdparty/flann/CMakeLists.txt (working copy)
@@ -1,3 +1,7 @@
+if(ANDROID)
+file(GLOB_RECURSE flann_sources_cpp *.cpp)
+define_android_manual(flann "${flann_sources_cpp}" "$(LOCAL_PATH)/../include $(LOCAL_PATH)/../include/flann $(LOCAL_PATH)/nn $(LOCAL_PATH)/algorithms $(LOCAL_PATH)/util")
+else()
if (DEFINED OPENCV_VERSION)
# ----------------------------------------------------------------------------
@@ -105,3 +109,4 @@
)
ENDIF()
+endif()#android
*.aps *.plg *.o *.lo Makefile .libs *.user
if(ANDROID)
define_android_manual(zlib "adler32.c compress.c crc32.c deflate.c example.c gzio.c infback.c inffast.c inflate.c inftrees.c trees.c uncompr.c zutil.c" "")
endif()
ZLIB DATA COMPRESSION LIBRARY
zlib 1.2.3 is a general purpose data compression library. All the code is
thread safe. The data format used by the zlib library is described by RFCs
(Request for Comments) 1950 to 1952 in the files
http://www.ietf.org/rfc/rfc1950.txt (zlib format), rfc1951.txt (deflate format)
and rfc1952.txt (gzip format). These documents are also available in other
formats from ftp://ftp.uu.net/graphics/png/documents/zlib/zdoc-index.html
All functions of the compression library are documented in the file zlib.h
(volunteer to write man pages welcome, contact zlib@gzip.org). A usage example
of the library is given in the file example.c which also tests that the library
is working correctly. Another example is given in the file minigzip.c. The
compression library itself is composed of all source files except example.c and
minigzip.c.
To compile all files and run the test program, follow the instructions given at
the top of Makefile. In short "make test; make install" should work for most
machines. For Unix: "./configure; make test; make install". For MSDOS, use one
of the special makefiles such as Makefile.msc. For VMS, use make_vms.com.
Questions about zlib should be sent to <zlib@gzip.org>, or to Gilles Vollant
<info@winimage.com> for the Windows DLL version. The zlib home page is
http://www.zlib.org or http://www.gzip.org/zlib/ Before reporting a problem,
please check this site to verify that you have the latest version of zlib;
otherwise get the latest version and check whether the problem still exists or
not.
PLEASE read the zlib FAQ http://www.gzip.org/zlib/zlib_faq.html before asking
for help.
Mark Nelson <markn@ieee.org> wrote an article about zlib for the Jan. 1997
issue of Dr. Dobb's Journal; a copy of the article is available in
http://dogma.net/markn/articles/zlibtool/zlibtool.htm
The changes made in version 1.2.3 are documented in the file ChangeLog.
Unsupported third party contributions are provided in directory "contrib".
A Java implementation of zlib is available in the Java Development Kit
http://java.sun.com/j2se/1.4.2/docs/api/java/util/zip/package-summary.html
See the zlib home page http://www.zlib.org for details.
A Perl interface to zlib written by Paul Marquess <pmqs@cpan.org> is in the
CPAN (Comprehensive Perl Archive Network) sites
http://www.cpan.org/modules/by-module/Compress/
A Python interface to zlib written by A.M. Kuchling <amk@amk.ca> is
available in Python 1.5 and later versions, see
http://www.python.org/doc/lib/module-zlib.html
A zlib binding for TCL written by Andreas Kupries <a.kupries@westend.com> is
availlable at http://www.oche.de/~akupries/soft/trf/trf_zip.html
An experimental package to read and write files in .zip format, written on top
of zlib by Gilles Vollant <info@winimage.com>, is available in the
contrib/minizip directory of zlib.
Notes for some targets:
- For Windows DLL versions, please see win32/DLL_FAQ.txt
- For 64-bit Irix, deflate.c must be compiled without any optimization. With
-O, one libpng test fails. The test works in 32 bit mode (with the -n32
compiler flag). The compiler bug has been reported to SGI.
- zlib doesn't work with gcc 2.6.3 on a DEC 3000/300LX under OSF/1 2.1 it works
when compiled with cc.
- On Digital Unix 4.0D (formely OSF/1) on AlphaServer, the cc option -std1 is
necessary to get gzprintf working correctly. This is done by configure.
- zlib doesn't work on HP-UX 9.05 with some versions of /bin/cc. It works with
other compilers. Use "make test" to check your compiler.
- gzdopen is not supported on RISCOS, BEOS and by some Mac compilers.
- For PalmOs, see http://palmzlib.sourceforge.net/
- When building a shared, i.e. dynamic library on Mac OS X, the library must be
installed before testing (do "make install" before "make test"), since the
library location is specified in the library.
Acknowledgments:
The deflate format used by zlib was defined by Phil Katz. The deflate
and zlib specifications were written by L. Peter Deutsch. Thanks to all the
people who reported problems and suggested various improvements in zlib;
they are too numerous to cite here.
Copyright notice:
(C) 1995-2004 Jean-loup Gailly and Mark Adler
This software is provided 'as-is', without any express or implied
warranty. In no event will the authors be held liable for any damages
arising from the use of this software.
Permission is granted to anyone to use this software for any purpose,
including commercial applications, and to alter it and redistribute it
freely, subject to the following restrictions:
1. The origin of this software must not be misrepresented; you must not
claim that you wrote the original software. If you use this software
in a product, an acknowledgment in the product documentation would be
appreciated but is not required.
2. Altered source versions must be plainly marked as such, and must not be
misrepresented as being the original software.
3. This notice may not be removed or altered from any source distribution.
Jean-loup Gailly Mark Adler
jloup@gzip.org madler@alumni.caltech.edu
If you use the zlib library in a product, we would appreciate *not*
receiving lengthy legal documents to sign. The sources are provided
for free but without warranty of any kind. The library has been
entirely written by Jean-loup Gailly and Mark Adler; it does not
include third-party code.
If you redistribute modified sources, we would appreciate that you include
in the file ChangeLog history information documenting your changes. Please
read the FAQ for more information on the distribution of modified source
versions.
/* adler32.c -- compute the Adler-32 checksum of a data stream
* Copyright (C) 1995-2004 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* @(#) $Id: adler32.c,v 1.2 2008-05-26 19:08:10 vp153 Exp $ */
#define ZLIB_INTERNAL
#include "zlib.h"
#define BASE 65521UL /* largest prime smaller than 65536 */
#define NMAX 5552
/* NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1 */
#define DO1(buf,i) {adler += (buf)[i]; sum2 += adler;}
#define DO2(buf,i) DO1(buf,i); DO1(buf,i+1);
#define DO4(buf,i) DO2(buf,i); DO2(buf,i+2);
#define DO8(buf,i) DO4(buf,i); DO4(buf,i+4);
#define DO16(buf) DO8(buf,0); DO8(buf,8);
/* use NO_DIVIDE if your processor does not do division in hardware */
#ifdef NO_DIVIDE
# define MOD(a) \
do { \
if (a >= (BASE << 16)) a -= (BASE << 16); \
if (a >= (BASE << 15)) a -= (BASE << 15); \
if (a >= (BASE << 14)) a -= (BASE << 14); \
if (a >= (BASE << 13)) a -= (BASE << 13); \
if (a >= (BASE << 12)) a -= (BASE << 12); \
if (a >= (BASE << 11)) a -= (BASE << 11); \
if (a >= (BASE << 10)) a -= (BASE << 10); \
if (a >= (BASE << 9)) a -= (BASE << 9); \
if (a >= (BASE << 8)) a -= (BASE << 8); \
if (a >= (BASE << 7)) a -= (BASE << 7); \
if (a >= (BASE << 6)) a -= (BASE << 6); \
if (a >= (BASE << 5)) a -= (BASE << 5); \
if (a >= (BASE << 4)) a -= (BASE << 4); \
if (a >= (BASE << 3)) a -= (BASE << 3); \
if (a >= (BASE << 2)) a -= (BASE << 2); \
if (a >= (BASE << 1)) a -= (BASE << 1); \
if (a >= BASE) a -= BASE; \
} while (0)
# define MOD4(a) \
do { \
if (a >= (BASE << 4)) a -= (BASE << 4); \
if (a >= (BASE << 3)) a -= (BASE << 3); \
if (a >= (BASE << 2)) a -= (BASE << 2); \
if (a >= (BASE << 1)) a -= (BASE << 1); \
if (a >= BASE) a -= BASE; \
} while (0)
#else
# define MOD(a) a %= BASE
# define MOD4(a) a %= BASE
#endif
/* ========================================================================= */
uLong ZEXPORT adler32(adler, buf, len)
uLong adler;
const Bytef *buf;
uInt len;
{
unsigned long sum2;
unsigned n;
/* split Adler-32 into component sums */
sum2 = (adler >> 16) & 0xffff;
adler &= 0xffff;
/* in case user likes doing a byte at a time, keep it fast */
if (len == 1) {
adler += buf[0];
if (adler >= BASE)
adler -= BASE;
sum2 += adler;
if (sum2 >= BASE)
sum2 -= BASE;
return adler | (sum2 << 16);
}
/* initial Adler-32 value (deferred check for len == 1 speed) */
if (buf == Z_NULL)
return 1L;
/* in case short lengths are provided, keep it somewhat fast */
if (len < 16) {
while (len--) {
adler += *buf++;
sum2 += adler;
}
if (adler >= BASE)
adler -= BASE;
MOD4(sum2); /* only added so many BASE's */
return adler | (sum2 << 16);
}
/* do length NMAX blocks -- requires just one modulo operation */
while (len >= NMAX) {
len -= NMAX;
n = NMAX / 16; /* NMAX is divisible by 16 */
do {
DO16(buf); /* 16 sums unrolled */
buf += 16;
} while (--n);
MOD(adler);
MOD(sum2);
}
/* do remaining bytes (less than NMAX, still just one modulo) */
if (len) { /* avoid modulos if none remaining */
while (len >= 16) {
len -= 16;
DO16(buf);
buf += 16;
}
while (len--) {
adler += *buf++;
sum2 += adler;
}
MOD(adler);
MOD(sum2);
}
/* return recombined sums */
return adler | (sum2 << 16);
}
/* ========================================================================= */
uLong ZEXPORT adler32_combine(adler1, adler2, len2)
uLong adler1;
uLong adler2;
z_off_t len2;
{
unsigned long sum1;
unsigned long sum2;
unsigned rem;
/* the derivation of this formula is left as an exercise for the reader */
rem = (unsigned)(len2 % BASE);
sum1 = adler1 & 0xffff;
sum2 = rem * sum1;
MOD(sum2);
sum1 += (adler2 & 0xffff) + BASE - 1;
sum2 += ((adler1 >> 16) & 0xffff) + ((adler2 >> 16) & 0xffff) + BASE - rem;
if (sum1 > BASE) sum1 -= BASE;
if (sum1 > BASE) sum1 -= BASE;
if (sum2 > (BASE << 1)) sum2 -= (BASE << 1);
if (sum2 > BASE) sum2 -= BASE;
return sum1 | (sum2 << 16);
}
/* compress.c -- compress a memory buffer
* Copyright (C) 1995-2003 Jean-loup Gailly.
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* @(#) $Id: compress.c,v 1.2 2008-05-26 19:08:10 vp153 Exp $ */
#define ZLIB_INTERNAL
#include "zlib.h"
/* ===========================================================================
Compresses the source buffer into the destination buffer. The level
parameter has the same meaning as in deflateInit. sourceLen is the byte
length of the source buffer. Upon entry, destLen is the total size of the
destination buffer, which must be at least 0.1% larger than sourceLen plus
12 bytes. Upon exit, destLen is the actual size of the compressed buffer.
compress2 returns Z_OK if success, Z_MEM_ERROR if there was not enough
memory, Z_BUF_ERROR if there was not enough room in the output buffer,
Z_STREAM_ERROR if the level parameter is invalid.
*/
int ZEXPORT compress2 (dest, destLen, source, sourceLen, level)
Bytef *dest;
uLongf *destLen;
const Bytef *source;
uLong sourceLen;
int level;
{
z_stream stream;
int err;
stream.next_in = (Bytef*)source;
stream.avail_in = (uInt)sourceLen;
#ifdef MAXSEG_64K
/* Check for source > 64K on 16-bit machine: */
if ((uLong)stream.avail_in != sourceLen) return Z_BUF_ERROR;
#endif
stream.next_out = dest;
stream.avail_out = (uInt)*destLen;
if ((uLong)stream.avail_out != *destLen) return Z_BUF_ERROR;
stream.zalloc = (alloc_func)0;
stream.zfree = (free_func)0;
stream.opaque = (voidpf)0;
err = deflateInit(&stream, level);
if (err != Z_OK) return err;
err = deflate(&stream, Z_FINISH);
if (err != Z_STREAM_END) {
deflateEnd(&stream);
return err == Z_OK ? Z_BUF_ERROR : err;
}
*destLen = stream.total_out;
err = deflateEnd(&stream);
return err;
}
/* ===========================================================================
*/
int ZEXPORT compress (dest, destLen, source, sourceLen)
Bytef *dest;
uLongf *destLen;
const Bytef *source;
uLong sourceLen;
{
return compress2(dest, destLen, source, sourceLen, Z_DEFAULT_COMPRESSION);
}
/* ===========================================================================
If the default memLevel or windowBits for deflateInit() is changed, then
this function needs to be updated.
*/
uLong ZEXPORT compressBound (sourceLen)
uLong sourceLen;
{
return sourceLen + (sourceLen >> 12) + (sourceLen >> 14) + 11;
}
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
/* inffast.h -- header to use inffast.c
* Copyright (C) 1995-2003 Mark Adler
* For conditions of distribution and use, see copyright notice in zlib.h
*/
/* WARNING: this file should *not* be used by applications. It is
part of the implementation of the compression library and is
subject to change. Applications should only use zlib.h.
*/
void inflate_fast OF((z_streamp strm, unsigned start));
/* inffixed.h -- table for decoding fixed codes
* Generated automatically by makefixed().
*/
/* WARNING: this file should *not* be used by applications. It
is part of the implementation of the compression library and
is subject to change. Applications should only use zlib.h.
*/
static const code lenfix[512] = {
{96,7,0},{0,8,80},{0,8,16},{20,8,115},{18,7,31},{0,8,112},{0,8,48},
{0,9,192},{16,7,10},{0,8,96},{0,8,32},{0,9,160},{0,8,0},{0,8,128},
{0,8,64},{0,9,224},{16,7,6},{0,8,88},{0,8,24},{0,9,144},{19,7,59},
{0,8,120},{0,8,56},{0,9,208},{17,7,17},{0,8,104},{0,8,40},{0,9,176},
{0,8,8},{0,8,136},{0,8,72},{0,9,240},{16,7,4},{0,8,84},{0,8,20},
{21,8,227},{19,7,43},{0,8,116},{0,8,52},{0,9,200},{17,7,13},{0,8,100},
{0,8,36},{0,9,168},{0,8,4},{0,8,132},{0,8,68},{0,9,232},{16,7,8},
{0,8,92},{0,8,28},{0,9,152},{20,7,83},{0,8,124},{0,8,60},{0,9,216},
{18,7,23},{0,8,108},{0,8,44},{0,9,184},{0,8,12},{0,8,140},{0,8,76},
{0,9,248},{16,7,3},{0,8,82},{0,8,18},{21,8,163},{19,7,35},{0,8,114},
{0,8,50},{0,9,196},{17,7,11},{0,8,98},{0,8,34},{0,9,164},{0,8,2},
{0,8,130},{0,8,66},{0,9,228},{16,7,7},{0,8,90},{0,8,26},{0,9,148},
{20,7,67},{0,8,122},{0,8,58},{0,9,212},{18,7,19},{0,8,106},{0,8,42},
{0,9,180},{0,8,10},{0,8,138},{0,8,74},{0,9,244},{16,7,5},{0,8,86},
{0,8,22},{64,8,0},{19,7,51},{0,8,118},{0,8,54},{0,9,204},{17,7,15},
{0,8,102},{0,8,38},{0,9,172},{0,8,6},{0,8,134},{0,8,70},{0,9,236},
{16,7,9},{0,8,94},{0,8,30},{0,9,156},{20,7,99},{0,8,126},{0,8,62},
{0,9,220},{18,7,27},{0,8,110},{0,8,46},{0,9,188},{0,8,14},{0,8,142},
{0,8,78},{0,9,252},{96,7,0},{0,8,81},{0,8,17},{21,8,131},{18,7,31},
{0,8,113},{0,8,49},{0,9,194},{16,7,10},{0,8,97},{0,8,33},{0,9,162},
{0,8,1},{0,8,129},{0,8,65},{0,9,226},{16,7,6},{0,8,89},{0,8,25},
{0,9,146},{19,7,59},{0,8,121},{0,8,57},{0,9,210},{17,7,17},{0,8,105},
{0,8,41},{0,9,178},{0,8,9},{0,8,137},{0,8,73},{0,9,242},{16,7,4},
{0,8,85},{0,8,21},{16,8,258},{19,7,43},{0,8,117},{0,8,53},{0,9,202},
{17,7,13},{0,8,101},{0,8,37},{0,9,170},{0,8,5},{0,8,133},{0,8,69},
{0,9,234},{16,7,8},{0,8,93},{0,8,29},{0,9,154},{20,7,83},{0,8,125},
{0,8,61},{0,9,218},{18,7,23},{0,8,109},{0,8,45},{0,9,186},{0,8,13},
{0,8,141},{0,8,77},{0,9,250},{16,7,3},{0,8,83},{0,8,19},{21,8,195},
{19,7,35},{0,8,115},{0,8,51},{0,9,198},{17,7,11},{0,8,99},{0,8,35},
{0,9,166},{0,8,3},{0,8,131},{0,8,67},{0,9,230},{16,7,7},{0,8,91},
{0,8,27},{0,9,150},{20,7,67},{0,8,123},{0,8,59},{0,9,214},{18,7,19},
{0,8,107},{0,8,43},{0,9,182},{0,8,11},{0,8,139},{0,8,75},{0,9,246},
{16,7,5},{0,8,87},{0,8,23},{64,8,0},{19,7,51},{0,8,119},{0,8,55},
{0,9,206},{17,7,15},{0,8,103},{0,8,39},{0,9,174},{0,8,7},{0,8,135},
{0,8,71},{0,9,238},{16,7,9},{0,8,95},{0,8,31},{0,9,158},{20,7,99},
{0,8,127},{0,8,63},{0,9,222},{18,7,27},{0,8,111},{0,8,47},{0,9,190},
{0,8,15},{0,8,143},{0,8,79},{0,9,254},{96,7,0},{0,8,80},{0,8,16},
{20,8,115},{18,7,31},{0,8,112},{0,8,48},{0,9,193},{16,7,10},{0,8,96},
{0,8,32},{0,9,161},{0,8,0},{0,8,128},{0,8,64},{0,9,225},{16,7,6},
{0,8,88},{0,8,24},{0,9,145},{19,7,59},{0,8,120},{0,8,56},{0,9,209},
{17,7,17},{0,8,104},{0,8,40},{0,9,177},{0,8,8},{0,8,136},{0,8,72},
{0,9,241},{16,7,4},{0,8,84},{0,8,20},{21,8,227},{19,7,43},{0,8,116},
{0,8,52},{0,9,201},{17,7,13},{0,8,100},{0,8,36},{0,9,169},{0,8,4},
{0,8,132},{0,8,68},{0,9,233},{16,7,8},{0,8,92},{0,8,28},{0,9,153},
{20,7,83},{0,8,124},{0,8,60},{0,9,217},{18,7,23},{0,8,108},{0,8,44},
{0,9,185},{0,8,12},{0,8,140},{0,8,76},{0,9,249},{16,7,3},{0,8,82},
{0,8,18},{21,8,163},{19,7,35},{0,8,114},{0,8,50},{0,9,197},{17,7,11},
{0,8,98},{0,8,34},{0,9,165},{0,8,2},{0,8,130},{0,8,66},{0,9,229},
{16,7,7},{0,8,90},{0,8,26},{0,9,149},{20,7,67},{0,8,122},{0,8,58},
{0,9,213},{18,7,19},{0,8,106},{0,8,42},{0,9,181},{0,8,10},{0,8,138},
{0,8,74},{0,9,245},{16,7,5},{0,8,86},{0,8,22},{64,8,0},{19,7,51},
{0,8,118},{0,8,54},{0,9,205},{17,7,15},{0,8,102},{0,8,38},{0,9,173},
{0,8,6},{0,8,134},{0,8,70},{0,9,237},{16,7,9},{0,8,94},{0,8,30},
{0,9,157},{20,7,99},{0,8,126},{0,8,62},{0,9,221},{18,7,27},{0,8,110},
{0,8,46},{0,9,189},{0,8,14},{0,8,142},{0,8,78},{0,9,253},{96,7,0},
{0,8,81},{0,8,17},{21,8,131},{18,7,31},{0,8,113},{0,8,49},{0,9,195},
{16,7,10},{0,8,97},{0,8,33},{0,9,163},{0,8,1},{0,8,129},{0,8,65},
{0,9,227},{16,7,6},{0,8,89},{0,8,25},{0,9,147},{19,7,59},{0,8,121},
{0,8,57},{0,9,211},{17,7,17},{0,8,105},{0,8,41},{0,9,179},{0,8,9},
{0,8,137},{0,8,73},{0,9,243},{16,7,4},{0,8,85},{0,8,21},{16,8,258},
{19,7,43},{0,8,117},{0,8,53},{0,9,203},{17,7,13},{0,8,101},{0,8,37},
{0,9,171},{0,8,5},{0,8,133},{0,8,69},{0,9,235},{16,7,8},{0,8,93},
{0,8,29},{0,9,155},{20,7,83},{0,8,125},{0,8,61},{0,9,219},{18,7,23},
{0,8,109},{0,8,45},{0,9,187},{0,8,13},{0,8,141},{0,8,77},{0,9,251},
{16,7,3},{0,8,83},{0,8,19},{21,8,195},{19,7,35},{0,8,115},{0,8,51},
{0,9,199},{17,7,11},{0,8,99},{0,8,35},{0,9,167},{0,8,3},{0,8,131},
{0,8,67},{0,9,231},{16,7,7},{0,8,91},{0,8,27},{0,9,151},{20,7,67},
{0,8,123},{0,8,59},{0,9,215},{18,7,19},{0,8,107},{0,8,43},{0,9,183},
{0,8,11},{0,8,139},{0,8,75},{0,9,247},{16,7,5},{0,8,87},{0,8,23},
{64,8,0},{19,7,51},{0,8,119},{0,8,55},{0,9,207},{17,7,15},{0,8,103},
{0,8,39},{0,9,175},{0,8,7},{0,8,135},{0,8,71},{0,9,239},{16,7,9},
{0,8,95},{0,8,31},{0,9,159},{20,7,99},{0,8,127},{0,8,63},{0,9,223},
{18,7,27},{0,8,111},{0,8,47},{0,9,191},{0,8,15},{0,8,143},{0,8,79},
{0,9,255}
};
static const code distfix[32] = {
{16,5,1},{23,5,257},{19,5,17},{27,5,4097},{17,5,5},{25,5,1025},
{21,5,65},{29,5,16385},{16,5,3},{24,5,513},{20,5,33},{28,5,8193},
{18,5,9},{26,5,2049},{22,5,129},{64,5,0},{16,5,2},{23,5,385},
{19,5,25},{27,5,6145},{17,5,7},{25,5,1537},{21,5,97},{29,5,24577},
{16,5,4},{24,5,769},{20,5,49},{28,5,12289},{18,5,13},{26,5,3073},
{22,5,193},{64,5,0}
};
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment