Commit a21df069 authored by Andrey Kamaev's avatar Andrey Kamaev

Cherry pick Android-related changes from master till 2012-08-23

* service
* tutorials
* samples
* gitignore
parents c42c3ebc dd3b0944
*.pyc
.DS_Store
refman.rst
OpenCV4Tegra/
*.user
tbb*.tgz
\ No newline at end of file
#!/bin/sh
cd `dirname $0`/..
ANDROID_DIR=`pwd`
rm -rf package
mkdir -p package
cd package
PRG_DIR=`pwd`
mkdir opencv
# neon-enabled build
#cd $PRG_DIR
#mkdir build-neon
#cd build-neon
#cmake -DANDROID_ABI="armeabi-v7a with NEON" -DBUILD_DOCS=OFF -DBUILD_TESTS=OFF -DBUILD_EXAMPLES=OFF -DBUILD_ANDROID_EXAMPLES=OFF -DCMAKE_TOOLCHAIN_FILE="$ANDROID_DIR/android.toolchain.cmake" -DCMAKE_INSTALL_PREFIX="$PRG_DIR/opencv" "$ANDROID_DIR/.." || exit 1
#make -j8 install/strip || exit 1
#cd "$PRG_DIR/opencv"
#rm -rf doc include src .classpath .project AndroidManifest.xml default.properties share/OpenCV/haarcascades share/OpenCV/lbpcascades share/OpenCV/*.cmake share/OpenCV/OpenCV.mk
#mv libs/armeabi-v7a libs/armeabi-v7a-neon
#mv share/OpenCV/3rdparty/libs/armeabi-v7a share/OpenCV/3rdparty/libs/armeabi-v7a-neon
# armeabi-v7a build
cd "$PRG_DIR"
mkdir build
cd build
cmake -DANDROID_ABI="armeabi-v7a" -DBUILD_DOCS=OFF -DBUILD_TESTS=ON -DBUILD_EXAMPLES=OFF -DBUILD_ANDROID_EXAMPLES=ON -DCMAKE_TOOLCHAIN_FILE="$ANDROID_DIR/android.toolchain.cmake" -DCMAKE_INSTALL_PREFIX="$PRG_DIR/opencv" "$ANDROID_DIR/.." || exit 1
make -j8 install/strip || exit 1
cd "$PRG_DIR/opencv"
rm -rf doc include src .classpath .project AndroidManifest.xml default.properties project.properties share/OpenCV/haarcascades share/OpenCV/lbpcascades share/OpenCV/*.cmake share/OpenCV/OpenCV.mk
# armeabi build
cd "$PRG_DIR/build"
rm -rf CMakeCache.txt
cmake -DANDROID_ABI="armeabi" -DBUILD_DOCS=ON -DBUILD_TESTS=ON -DBUILD_EXAMPLES=OFF -DBUILD_ANDROID_EXAMPLES=ON -DINSTALL_ANDROID_EXAMPLES=ON -DCMAKE_TOOLCHAIN_FILE="$ANDROID_DIR/android.toolchain.cmake" -DCMAKE_INSTALL_PREFIX="$PRG_DIR/opencv" "$ANDROID_DIR/.." || exit 1
make -j8 install/strip docs || exit 1
find doc -name "*.pdf" -exec cp {} $PRG_DIR/opencv/doc \;
cd $PRG_DIR
rm -rf opencv/doc/CMakeLists.txt
cp "$ANDROID_DIR/README.android" opencv/
cp "$ANDROID_DIR/../README" opencv/
# get opencv version
CV_VERSION=`grep -o "[0-9]\+\.[0-9]\+\.[0-9]\+" opencv/share/OpenCV/OpenCVConfig-version.cmake`
OPENCV_NAME=OpenCV-$CV_VERSION
mv opencv $OPENCV_NAME
#samples
cp -r "$ANDROID_DIR/../samples/android" "$PRG_DIR/samples"
cd "$PRG_DIR/samples"
#enable for loops over items with spaces in their name
IFS="
"
for dir in `ls -1`
do
if [ -f "$dir/default.properties" ]
then
HAS_REFERENCE=`cat "$dir/project.properties" | grep -c android.library.reference.1`
if [ $HAS_REFERENCE = 1 ]
then
echo -n > "$dir/project.properties"
android update project --name "$dir" --target "android-8" --library "../../$OPENCV_NAME" --path "$dir"
#echo 'android update project --name "$dir" --target "android-8" --library "../opencv$CV_VERSION" --path "$dir"'
fi
else
if [ -f "$dir/default.properties" ]
then
HAS_REFERENCE=`cat "$dir/default.properties" | grep -c android.library.reference.1`
if [ $HAS_REFERENCE = 1 ]
then
echo -n > "$dir/default.properties"
android update project --name "$dir" --target "android-8" --library "../../$OPENCV_NAME" --path "$dir"
#echo 'android update project --name "$dir" --target "android-8" --library "../opencv$CV_VERSION" --path "$dir"'
fi
else
rm -rf "$dir"
fi
fi
done
echo "OPENCV_MK_PATH:=../../$OPENCV_NAME/share/OpenCV/OpenCV.mk" > includeOpenCV.mk
#clean samples
cd "$PRG_DIR/samples"
#remove ignored files/folders
svn status --no-ignore | grep ^I | cut -c9- | xargs -d \\n rm -rf
#remove unversioned files/folders
svn status | grep ^\? | cut -c9- | xargs -d \\n rm -rf
#generate "gen" folders to eliminate eclipse warnings
cd "$PRG_DIR/samples"
for dir in `ls -1`
do
if [ -d "$dir" ]
then
mkdir "$dir/gen"
fi
done
#generate folders "gen" and "res" for opencv (dummy eclipse stiff)
cd $PRG_DIR
mkdir "$OPENCV_NAME/gen"
mkdir "$OPENCV_NAME/res"
# pack all files
cd $PRG_DIR
PRG_NAME=OpenCV-$CV_VERSION-tp-android-bin.tar.bz2
tar cjpf $PRG_NAME --exclude-vcs $OPENCV_NAME samples || exit -1
echo
echo "Package $PRG_NAME is successfully created"
......@@ -219,7 +219,6 @@ macro(add_android_project target path)
# get project sources
file(GLOB_RECURSE android_proj_files RELATIVE "${path}" "${path}/res/*" "${path}/src/*")
ocv_list_filterout(android_proj_files "\\\\.svn")
# copy sources out from the build tree
set(android_proj_file_deps "")
......@@ -277,9 +276,11 @@ macro(add_android_project target path)
)
get_target_property(android_proj_jni_location "${JNI_LIB_NAME}" LOCATION)
if (NOT (CMAKE_BUILD_TYPE MATCHES "debug"))
add_custom_command(TARGET ${JNI_LIB_NAME} POST_BUILD COMMAND ${CMAKE_STRIP} --strip-unneeded "${android_proj_jni_location}")
endif()
endif()
endif()
# build java part
if(android_proj_IGNORE_JAVA)
......
......@@ -26,11 +26,11 @@ if(BUILD_PACKAGE)
set(TAR_TRANSFORM "\"s,^,${TARBALL_NAME}/,\"")
add_custom_target(package_source
#TODO: maybe we should not remove dll's
COMMAND ${TAR_CMD} --transform ${TAR_TRANSFORM} -cjpf ${CMAKE_CURRENT_BINARY_DIR}/${TARBALL_NAME}.tar.bz2 --exclude=".svn" --exclude="*.pyc" --exclude="*.vcproj" --exclude="*/lib/*" --exclude="*.dll" ./
COMMAND ${TAR_CMD} --transform ${TAR_TRANSFORM} -cjpf ${CMAKE_CURRENT_BINARY_DIR}/${TARBALL_NAME}.tar.bz2 --exclude=".svn" --exclude=".git" --exclude="*.pyc" --exclude="*.vcproj" --exclude="*/lib/*" --exclude="*.dll" ./
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
else()
add_custom_target(package_source
COMMAND zip -9 -r ${CMAKE_CURRENT_BINARY_DIR}/${TARBALL_NAME}.zip . -x '*/.svn/*' '*.vcproj' '*.pyc'
COMMAND zip -9 -r ${CMAKE_CURRENT_BINARY_DIR}/${TARBALL_NAME}.zip . -x '*/.svn/*' '*/.git/*' '*.vcproj' '*.pyc'
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
endif()
if(ENABLE_SOLUTION_FOLDERS)
......
......@@ -175,6 +175,8 @@ a:hover {
div.body p, div.body dd, div.body li {
text-align: justify;
line-height: 130%;
margin-top: 1em;
margin-bottom: 1em;
}
div.body h1,
......@@ -327,9 +329,9 @@ table.field-list {
margin-top: 20px;
}
ul.simple {
/*ul.simple {
list-style: none;
}
}*/
em.menuselection, em.guilabel {
font-family: {{ theme_guifont }};
......@@ -384,3 +386,8 @@ margin-top: 0px;
div.body ul.search li {
text-align: left;
}
div.linenodiv {
min-width: 1em;
text-align: right;
}
\ No newline at end of file
......@@ -148,22 +148,22 @@
%\texttt{\href{http://www.ros.org/wiki/Stack Manifest}{stack manifest}} & Description of a ROS stack.
%\end{tabular}
\emph{The OpenCV C++ reference manual is here: \url{http://opencv.itseez.com}. Use \textbf{Quick Search} to find descriptions of the particular functions and classes}
\emph{The OpenCV C++ reference manual is here: \url{http://docs.opencv.org}. Use \textbf{Quick Search} to find descriptions of the particular functions and classes}
\section{Key OpenCV Classes}
\begin{tabular}{@{}p{\the\MyLen}%
@{}p{\linewidth-\the\MyLen}@{}}
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#Point_}{Point\_}} & Template 2D point class \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#Point3_}{Point3\_}} & Template 3D point class \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#Size_}{Size\_}} & Template size (width, height) class \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#Vec}{Vec}} & Template short vector class \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#Matx}{Matx}} & Template small matrix class \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#Scalar_}{Scalar}} & 4-element vector \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#Rect_}{Rect}} & Rectangle \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#Range}{Range}} & Integer value range \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#Mat}{Mat}} & 2D or multi-dimensional dense array (can be used to store matrices, images, histograms, feature descriptors, voxel volumes etc.)\\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#sparsemat}{SparseMat}} & Multi-dimensional sparse array \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#Ptr}{Ptr}} & Template smart pointer class
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#Point_}{Point\_}} & Template 2D point class \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#Point3_}{Point3\_}} & Template 3D point class \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#Size_}{Size\_}} & Template size (width, height) class \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#Vec}{Vec}} & Template short vector class \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#Matx}{Matx}} & Template small matrix class \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#Scalar_}{Scalar}} & 4-element vector \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#Rect_}{Rect}} & Rectangle \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#Range}{Range}} & Integer value range \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#Mat}{Mat}} & 2D or multi-dimensional dense array (can be used to store matrices, images, histograms, feature descriptors, voxel volumes etc.)\\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#sparsemat}{SparseMat}} & Multi-dimensional sparse array \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#Ptr}{Ptr}} & Template smart pointer class
\end{tabular}
\section{Matrix Basics}
......@@ -173,7 +173,7 @@
\> \texttt{Mat image(240, 320, CV\_8UC3);} \\
\textbf{[Re]allocate a pre-declared matrix}\\
\> \texttt{image.\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#mat-create}{create}(480, 640, CV\_8UC3);}\\
\> \texttt{image.\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#mat-create}{create}(480, 640, CV\_8UC3);}\\
\textbf{Create a matrix initialized with a constant}\\
\> \texttt{Mat A33(3, 3, CV\_32F, Scalar(5));} \\
......@@ -189,8 +189,8 @@
\> \texttt{Mat B22 = Mat(2, 2, CV\_32F, B22data).clone();}\\
\textbf{Initialize a random matrix}\\
\> \texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#randu}{randu}(image, Scalar(0), Scalar(256)); }\textit{// uniform dist}\\
\> \texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#randn}{randn}(image, Scalar(128), Scalar(10)); }\textit{// Gaussian dist}\\
\> \texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#randu}{randu}(image, Scalar(0), Scalar(256)); }\textit{// uniform dist}\\
\> \texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#randn}{randn}(image, Scalar(128), Scalar(10)); }\textit{// Gaussian dist}\\
\textbf{Convert matrix to/from other structures}\\
\>\textbf{(without copying the data)}\\
......@@ -230,32 +230,32 @@
\section{Matrix Manipulations: Copying, Shuffling, Part Access}
\begin{tabular}{@{}p{\the\MyLen}%
@{}p{\linewidth-\the\MyLen}@{}}
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#mat-copyto}{src.copyTo(dst)}} & Copy matrix to another one \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#mat-convertto}{src.convertTo(dst,type,scale,shift)}} & \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ Scale and convert to another datatype \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#mat-clone}{m.clone()}} & Make deep copy of a matrix \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#mat-reshape}{m.reshape(nch,nrows)}} & Change matrix dimensions and/or number of channels without copying data \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#mat-copyto}{src.copyTo(dst)}} & Copy matrix to another one \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#mat-convertto}{src.convertTo(dst,type,scale,shift)}} & \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ \ Scale and convert to another datatype \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#mat-clone}{m.clone()}} & Make deep copy of a matrix \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#mat-reshape}{m.reshape(nch,nrows)}} & Change matrix dimensions and/or number of channels without copying data \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#mat-row}{m.row(i)}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#mat-col}{m.col(i)}} & Take a matrix row/column \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#mat-row}{m.row(i)}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#mat-col}{m.col(i)}} & Take a matrix row/column \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#mat-rowrange}{m.rowRange(Range(i1,i2))}}
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#mat-colrange}{m.colRange(Range(j1,j2))}} & \ \ \ \ \ \ \ Take a matrix row/column span \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#mat-rowrange}{m.rowRange(Range(i1,i2))}}
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#mat-colrange}{m.colRange(Range(j1,j2))}} & \ \ \ \ \ \ \ Take a matrix row/column span \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#mat-diag}{m.diag(i)}} & Take a matrix diagonal \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#mat-diag}{m.diag(i)}} & Take a matrix diagonal \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#Mat}{m(Range(i1,i2),Range(j1,j2)), m(roi)}} & \ \ \ \ \ \ \ \ \ \ \ \ \ Take a submatrix \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#Mat}{m(Range(i1,i2),Range(j1,j2)), m(roi)}} & \ \ \ \ \ \ \ \ \ \ \ \ \ Take a submatrix \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#repeat}{m.repeat(ny,nx)}} & Make a bigger matrix from a smaller one \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#repeat}{m.repeat(ny,nx)}} & Make a bigger matrix from a smaller one \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#flip}{flip(src,dst,dir)}} & Reverse the order of matrix rows and/or columns \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#flip}{flip(src,dst,dir)}} & Reverse the order of matrix rows and/or columns \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#split}{split(...)}} & Split multi-channel matrix into separate channels \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#split}{split(...)}} & Split multi-channel matrix into separate channels \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#merge}{merge(...)}} & Make a multi-channel matrix out of the separate channels \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#merge}{merge(...)}} & Make a multi-channel matrix out of the separate channels \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#mixchannels}{mixChannels(...)}} & Generalized form of split() and merge() \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#mixchannels}{mixChannels(...)}} & Generalized form of split() and merge() \\
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#randshuffle}{randShuffle(...)}} & Randomly shuffle matrix elements \\
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#randshuffle}{randShuffle(...)}} & Randomly shuffle matrix elements \\
\end{tabular}
......@@ -278,17 +278,17 @@ other matrix operations, such as
\begin{itemize}
\item
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#add}{add()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#subtract}{subtract()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#multiply}{multiply()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#divide}{divide()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#absdiff}{absdiff()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#bitwise-and}{bitwise\_and()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#bitwise-or}{bitwise\_or()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#bitwise-xor}{bitwise\_xor()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#max}{max()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#min}{min()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#compare}{compare()}}
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#add}{add()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#subtract}{subtract()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#multiply}{multiply()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#divide}{divide()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#absdiff}{absdiff()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#bitwise-and}{bitwise\_and()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#bitwise-or}{bitwise\_or()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#bitwise-xor}{bitwise\_xor()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#max}{max()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#min}{min()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#compare}{compare()}}
-- correspondingly, addition, subtraction, element-wise multiplication ... comparison of two matrices or a matrix and a scalar.
......@@ -314,49 +314,49 @@ Exa\=mple. \href{http://en.wikipedia.org/wiki/Alpha_compositing}{Alpha compositi
\item
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#sum}{sum()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#mean}{mean()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#meanstddev}{meanStdDev()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#norm}{norm()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#countnonzero}{countNonZero()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#minmaxloc}{minMaxLoc()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#sum}{sum()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#mean}{mean()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#meanstddev}{meanStdDev()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#norm}{norm()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#countnonzero}{countNonZero()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#minmaxloc}{minMaxLoc()}},
-- various statistics of matrix elements.
\item
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#exp}{exp()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#log}{log()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#pow}{pow()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#sqrt}{sqrt()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#carttopolar}{cartToPolar()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#polartocart}{polarToCart()}}
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#exp}{exp()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#log}{log()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#pow}{pow()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#sqrt}{sqrt()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#carttopolar}{cartToPolar()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#polartocart}{polarToCart()}}
-- the classical math functions.
\item
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#scaleadd}{scaleAdd()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#transpose}{transpose()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#gemm}{gemm()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#invert}{invert()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#solve}{solve()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#determinant}{determinant()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#trace}{trace()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#eigen}{eigen()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#SVD}{SVD}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#scaleadd}{scaleAdd()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#transpose}{transpose()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#gemm}{gemm()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#invert}{invert()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#solve}{solve()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#determinant}{determinant()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#trace}{trace()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#eigen}{eigen()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#SVD}{SVD}},
-- the algebraic functions + SVD class.
\item
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#dft}{dft()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#idft}{idft()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#dct}{dct()}},
\texttt{\href{http://opencv.itseez.com/modules/core/doc/operations_on_arrays.html\#idct}{idct()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#dft}{dft()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#idft}{idft()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#dct}{dct()}},
\texttt{\href{http://docs.opencv.org/modules/core/doc/operations_on_arrays.html\#idct}{idct()}},
-- discrete Fourier and cosine transformations
\end{itemize}
For some operations a more convenient \href{http://opencv.itseez.com/modules/core/doc/basic_structures.html\#matrix-expressions}{algebraic notation} can be used, for example:
For some operations a more convenient \href{http://docs.opencv.org/modules/core/doc/basic_structures.html\#matrix-expressions}{algebraic notation} can be used, for example:
\begin{tabbing}
\texttt{Mat}\={} \texttt{delta = (J.t()*J + lambda*}\\
\>\texttt{Mat::eye(J.cols, J.cols, J.type()))}\\
......@@ -370,20 +370,20 @@ implements the core of Levenberg-Marquardt optimization algorithm.
\begin{tabular}{@{}p{\the\MyLen}%
@{}p{\linewidth-\the\MyLen}@{}}
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/filtering.html\#filter2d}{filter2D()}} & Non-separable linear filter \\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/filtering.html\#filter2d}{filter2D()}} & Non-separable linear filter \\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/filtering.html\#sepfilter2d}{sepFilter2D()}} & Separable linear filter \\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/filtering.html\#sepfilter2d}{sepFilter2D()}} & Separable linear filter \\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/filtering.html\#blur}{boxFilter()}}, \texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/filtering.html\#gaussianblur}{GaussianBlur()}},
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/filtering.html\#medianblur}{medianBlur()}},
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/filtering.html\#bilateralfilter}{bilateralFilter()}}
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/filtering.html\#blur}{boxFilter()}}, \texttt{\href{http://docs.opencv.org/modules/imgproc/doc/filtering.html\#gaussianblur}{GaussianBlur()}},
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/filtering.html\#medianblur}{medianBlur()}},
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/filtering.html\#bilateralfilter}{bilateralFilter()}}
& Smooth the image with one of the linear or non-linear filters \\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/filtering.html\#sobel}{Sobel()}}, \texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/filtering.html\#scharr}{Scharr()}}
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/filtering.html\#sobel}{Sobel()}}, \texttt{\href{http://docs.opencv.org/modules/imgproc/doc/filtering.html\#scharr}{Scharr()}}
& Compute the spatial image derivatives \\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/filtering.html\#laplacian}{Laplacian()}} & compute Laplacian: $\Delta I = \frac{\partial ^ 2 I}{\partial x^2} + \frac{\partial ^ 2 I}{\partial y^2}$ \\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/filtering.html\#laplacian}{Laplacian()}} & compute Laplacian: $\Delta I = \frac{\partial ^ 2 I}{\partial x^2} + \frac{\partial ^ 2 I}{\partial y^2}$ \\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/filtering.html\#erode}{erode()}}, \texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/filtering.html\#dilate}{dilate()}} & Morphological operations \\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/filtering.html\#erode}{erode()}}, \texttt{\href{http://docs.opencv.org/modules/imgproc/doc/filtering.html\#dilate}{dilate()}} & Morphological operations \\
\end{tabular}
......@@ -398,17 +398,17 @@ Exa\=mple. Filter image in-place with a 3x3 high-pass kernel\\
\begin{tabular}{@{}p{\the\MyLen}%
@{}p{\linewidth-\the\MyLen}@{}}
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/geometric_transformations.html\#resize}{resize()}} & Resize image \\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/geometric_transformations.html\#resize}{resize()}} & Resize image \\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/geometric_transformations.html\#getrectsubpix}{getRectSubPix()}} & Extract an image patch \\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/geometric_transformations.html\#getrectsubpix}{getRectSubPix()}} & Extract an image patch \\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/geometric_transformations.html\#warpaffine}{warpAffine()}} & Warp image affinely\\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/geometric_transformations.html\#warpaffine}{warpAffine()}} & Warp image affinely\\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/geometric_transformations.html\#warpperspective}{warpPerspective()}} & Warp image perspectively\\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/geometric_transformations.html\#warpperspective}{warpPerspective()}} & Warp image perspectively\\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/geometric_transformations.html\#remap}{remap()}} & Generic image warping\\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/geometric_transformations.html\#remap}{remap()}} & Generic image warping\\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/geometric_transformations.html\#convertmaps}{convertMaps()}} & Optimize maps for a faster remap() execution\\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/geometric_transformations.html\#convertmaps}{convertMaps()}} & Optimize maps for a faster remap() execution\\
\end{tabular}
......@@ -422,21 +422,21 @@ Example. Decimate image by factor of $\sqrt{2}$:\\
\begin{tabular}{@{}p{\the\MyLen}%
@{}p{\linewidth-\the\MyLen}@{}}
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/miscellaneous_transformations.html\#cvtcolor}{cvtColor()}} & Convert image from one color space to another \\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/miscellaneous_transformations.html\#cvtcolor}{cvtColor()}} & Convert image from one color space to another \\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/miscellaneous_transformations.html\#threshold}{threshold()}}, \texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/miscellaneous_transformations.html\#adaptivethreshold}{adaptivethreshold()}} & Convert grayscale image to binary image using a fixed or a variable threshold \\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/miscellaneous_transformations.html\#threshold}{threshold()}}, \texttt{\href{http://docs.opencv.org/modules/imgproc/doc/miscellaneous_transformations.html\#adaptivethreshold}{adaptivethreshold()}} & Convert grayscale image to binary image using a fixed or a variable threshold \\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/miscellaneous_transformations.html\#floodfill}{floodFill()}} & Find a connected component using region growing algorithm\\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/miscellaneous_transformations.html\#floodfill}{floodFill()}} & Find a connected component using region growing algorithm\\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/miscellaneous_transformations.html\#integral}{integral()}} & Compute integral image \\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/miscellaneous_transformations.html\#integral}{integral()}} & Compute integral image \\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/miscellaneous_transformations.html\#distancetransform}{distanceTransform()}}
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/miscellaneous_transformations.html\#distancetransform}{distanceTransform()}}
& build distance map or discrete Voronoi diagram for a binary image. \\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/miscellaneous_transformations.html\#watershed}{watershed()}},
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/miscellaneous_transformations.html\#grabcut}{grabCut()}}
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/miscellaneous_transformations.html\#watershed}{watershed()}},
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/miscellaneous_transformations.html\#grabcut}{grabCut()}}
& marker-based image segmentation algorithms.
See the samples \texttt{\href{http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/watershed.cpp}{watershed.cpp}} and \texttt{\href{http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/grabcut.cpp}{grabcut.cpp}}.
See the samples \texttt{\href{http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/cpp/watershed.cpp}{watershed.cpp}} and \texttt{\href{http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/cpp/grabcut.cpp}{grabcut.cpp}}.
\end{tabular}
......@@ -445,13 +445,13 @@ Example. Decimate image by factor of $\sqrt{2}$:\\
\begin{tabular}{@{}p{\the\MyLen}%
@{}p{\linewidth-\the\MyLen}@{}}
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/histograms.html\#calchist}{calcHist()}} & Compute image(s) histogram \\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/histograms.html\#calchist}{calcHist()}} & Compute image(s) histogram \\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/histograms.html\#calcbackproject}{calcBackProject()}} & Back-project the histogram \\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/histograms.html\#calcbackproject}{calcBackProject()}} & Back-project the histogram \\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/histograms.html\#equalizehist}{equalizeHist()}} & Normalize image brightness and contrast\\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/histograms.html\#equalizehist}{equalizeHist()}} & Normalize image brightness and contrast\\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/histograms.html\#comparehist}{compareHist()}} & Compare two histograms\\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/histograms.html\#comparehist}{compareHist()}} & Compare two histograms\\
\end{tabular}
......@@ -464,12 +464,12 @@ Example. Compute Hue-Saturation histogram of an image:\\
\end{tabbing}
\subsection{Contours}
See \texttt{\href{http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/contours2.cpp}{contours2.cpp}} and \texttt{\href{http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/squares.cpp}{squares.cpp}}
See \texttt{\href{http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/cpp/contours2.cpp}{contours2.cpp}} and \texttt{\href{http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/cpp/squares.cpp}{squares.cpp}}
samples on what are the contours and how to use them.
\section{Data I/O}
\href{http://opencv.itseez.com/modules/core/doc/xml_yaml_persistence.html\#xml-yaml-file-storages-writing-to-a-file-storage}{XML/YAML storages} are collections (possibly nested) of scalar values, structures and heterogeneous lists.
\href{http://docs.opencv.org/modules/core/doc/xml_yaml_persistence.html\#xml-yaml-file-storages-writing-to-a-file-storage}{XML/YAML storages} are collections (possibly nested) of scalar values, structures and heterogeneous lists.
\begin{tabbing}
\textbf{Wr}\=\textbf{iting data to YAML (or XML)}\\
......@@ -521,9 +521,9 @@ samples on what are the contours and how to use them.
\begin{tabbing}
\textbf{Wr}\=\textbf{iting and reading raster images}\\
\texttt{\href{http://opencv.itseez.com/modules/highgui/doc/reading_and_writing_images_and_video.html\#imwrite}{imwrite}("myimage.jpg", image);}\\
\texttt{Mat image\_color\_copy = \href{http://opencv.itseez.com/modules/highgui/doc/reading_and_writing_images_and_video.html\#imread}{imread}("myimage.jpg", 1);}\\
\texttt{Mat image\_grayscale\_copy = \href{http://opencv.itseez.com/modules/highgui/doc/reading_and_writing_images_and_video.html\#imread}{imread}("myimage.jpg", 0);}\\
\texttt{\href{http://docs.opencv.org/modules/highgui/doc/reading_and_writing_images_and_video.html\#imwrite}{imwrite}("myimage.jpg", image);}\\
\texttt{Mat image\_color\_copy = \href{http://docs.opencv.org/modules/highgui/doc/reading_and_writing_images_and_video.html\#imread}{imread}("myimage.jpg", 1);}\\
\texttt{Mat image\_grayscale\_copy = \href{http://docs.opencv.org/modules/highgui/doc/reading_and_writing_images_and_video.html\#imread}{imread}("myimage.jpg", 0);}\\
\end{tabbing}
\emph{The functions can read/write images in the following formats: \textbf{BMP (.bmp), JPEG (.jpg, .jpeg), TIFF (.tif, .tiff), PNG (.png), PBM/PGM/PPM (.p?m), Sun Raster (.sr), JPEG 2000 (.jp2)}. Every format supports 8-bit, 1- or 3-channel images. Some formats (PNG, JPEG 2000) support 16 bits per channel.}
......@@ -544,61 +544,61 @@ samples on what are the contours and how to use them.
\begin{tabular}{@{}p{\the\MyLen}%
@{}p{\linewidth-\the\MyLen}@{}}
\texttt{\href{http://opencv.itseez.com/modules/highgui/doc/user_interface.html\#namedwindow}{namedWindow(winname,flags)}} & \ \ \ \ \ \ \ \ \ \ Create named highgui window \\
\texttt{\href{http://docs.opencv.org/modules/highgui/doc/user_interface.html\#namedwindow}{namedWindow(winname,flags)}} & \ \ \ \ \ \ \ \ \ \ Create named highgui window \\
\texttt{\href{http://opencv.itseez.com/modules/highgui/doc/user_interface.html\#destroywindow}{destroyWindow(winname)}} & \ \ \ Destroy the specified window \\
\texttt{\href{http://docs.opencv.org/modules/highgui/doc/user_interface.html\#destroywindow}{destroyWindow(winname)}} & \ \ \ Destroy the specified window \\
\texttt{\href{http://opencv.itseez.com/modules/highgui/doc/user_interface.html\#imshow}{imshow(winname, mtx)}} & Show image in the window \\
\texttt{\href{http://docs.opencv.org/modules/highgui/doc/user_interface.html\#imshow}{imshow(winname, mtx)}} & Show image in the window \\
\texttt{\href{http://opencv.itseez.com/modules/highgui/doc/user_interface.html\#waitkey}{waitKey(delay)}} & Wait for a key press during the specified time interval (or forever). Process events while waiting. \emph{Do not forget to call this function several times a second in your code.} \\
\texttt{\href{http://docs.opencv.org/modules/highgui/doc/user_interface.html\#waitkey}{waitKey(delay)}} & Wait for a key press during the specified time interval (or forever). Process events while waiting. \emph{Do not forget to call this function several times a second in your code.} \\
\texttt{\href{http://opencv.itseez.com/modules/highgui/doc/user_interface.html\#createtrackbar}{createTrackbar(...)}} & Add trackbar (slider) to the specified window \\
\texttt{\href{http://docs.opencv.org/modules/highgui/doc/user_interface.html\#createtrackbar}{createTrackbar(...)}} & Add trackbar (slider) to the specified window \\
\texttt{\href{http://opencv.itseez.com/modules/highgui/doc/user_interface.html\#setmousecallback}{setMouseCallback(...)}} & \ \ Set the callback on mouse clicks and movements in the specified window \\
\texttt{\href{http://docs.opencv.org/modules/highgui/doc/user_interface.html\#setmousecallback}{setMouseCallback(...)}} & \ \ Set the callback on mouse clicks and movements in the specified window \\
\end{tabular}
See \texttt{\href{http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/camshiftdemo.cpp}{camshiftdemo.cpp}} and other \href{http://code.opencv.org/svn/opencv/trunk/opencv/samples/}{OpenCV samples} on how to use the GUI functions.
See \texttt{\href{http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/cpp/camshiftdemo.cpp}{camshiftdemo.cpp}} and other \href{http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/}{OpenCV samples} on how to use the GUI functions.
\section{Camera Calibration, Pose Estimation and Depth Estimation}
\begin{tabular}{@{}p{\the\MyLen}%
@{}p{\linewidth-\the\MyLen}@{}}
\texttt{\href{http://opencv.itseez.com/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#calibratecamera}{calibrateCamera()}} & Calibrate camera from several views of a calibration pattern. \\
\texttt{\href{http://docs.opencv.org/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#calibratecamera}{calibrateCamera()}} & Calibrate camera from several views of a calibration pattern. \\
\texttt{\href{http://opencv.itseez.com/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#findchessboardcorners}{findChessboardCorners()}} & \ \ \ \ \ \ Find feature points on the checkerboard calibration pattern. \\
\texttt{\href{http://docs.opencv.org/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#findchessboardcorners}{findChessboardCorners()}} & \ \ \ \ \ \ Find feature points on the checkerboard calibration pattern. \\
\texttt{\href{http://opencv.itseez.com/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#solvepnp}{solvePnP()}} & Find the object pose from the known projections of its feature points. \\
\texttt{\href{http://docs.opencv.org/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#solvepnp}{solvePnP()}} & Find the object pose from the known projections of its feature points. \\
\texttt{\href{http://opencv.itseez.com/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#stereocalibrate}{stereoCalibrate()}} & Calibrate stereo camera. \\
\texttt{\href{http://docs.opencv.org/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#stereocalibrate}{stereoCalibrate()}} & Calibrate stereo camera. \\
\texttt{\href{http://opencv.itseez.com/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#stereorectify}{stereoRectify()}} & Compute the rectification transforms for a calibrated stereo camera.\\
\texttt{\href{http://docs.opencv.org/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#stereorectify}{stereoRectify()}} & Compute the rectification transforms for a calibrated stereo camera.\\
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/geometric_transformations.html\#initundistortrectifymap}{initUndistortRectifyMap()}} & \ \ \ \ \ \ Compute rectification map (for \texttt{remap()}) for each stereo camera head.\\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/geometric_transformations.html\#initundistortrectifymap}{initUndistortRectifyMap()}} & \ \ \ \ \ \ Compute rectification map (for \texttt{remap()}) for each stereo camera head.\\
\texttt{\href{http://opencv.itseez.com/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#StereoBM}{StereoBM}}, \texttt{\href{http://opencv.itseez.com/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#StereoSGBM}{StereoSGBM}} & The stereo correspondence engines to be run on rectified stereo pairs.\\
\texttt{\href{http://docs.opencv.org/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#StereoBM}{StereoBM}}, \texttt{\href{http://docs.opencv.org/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#StereoSGBM}{StereoSGBM}} & The stereo correspondence engines to be run on rectified stereo pairs.\\
\texttt{\href{http://opencv.itseez.com/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#reprojectimageto3d}{reprojectImageTo3D()}} & Convert disparity map to 3D point cloud.\\
\texttt{\href{http://docs.opencv.org/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#reprojectimageto3d}{reprojectImageTo3D()}} & Convert disparity map to 3D point cloud.\\
\texttt{\href{http://opencv.itseez.com/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#findhomography}{findHomography()}} & Find best-fit perspective transformation between two 2D point sets. \\
\texttt{\href{http://docs.opencv.org/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html\#findhomography}{findHomography()}} & Find best-fit perspective transformation between two 2D point sets. \\
\end{tabular}
To calibrate a camera, you can use \texttt{\href{http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/calibration.cpp}{calibration.cpp}} or
\texttt{\href{http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/stereo\_calib.cpp}{stereo\_calib.cpp}} samples.
To calibrate a camera, you can use \texttt{\href{http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/cpp/calibration.cpp}{calibration.cpp}} or
\texttt{\href{http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/cpp/stereo\_calib.cpp}{stereo\_calib.cpp}} samples.
To get the disparity maps and the point clouds, use
\texttt{\href{http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/stereo\_match.cpp}{stereo\_match.cpp}} sample.
\texttt{\href{http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/cpp/stereo\_match.cpp}{stereo\_match.cpp}} sample.
\section{Object Detection}
\begin{tabular}{@{}p{\the\MyLen}%
@{}p{\linewidth-\the\MyLen}@{}}
\texttt{\href{http://opencv.itseez.com/modules/imgproc/doc/object_detection.html\#matchtemplate}{matchTemplate}} & Compute proximity map for given template.\\
\texttt{\href{http://docs.opencv.org/modules/imgproc/doc/object_detection.html\#matchtemplate}{matchTemplate}} & Compute proximity map for given template.\\
\texttt{\href{http://opencv.itseez.com/modules/objdetect/doc/cascade_classification.html\#cascadeclassifier}{CascadeClassifier}} & Viola's Cascade of Boosted classifiers using Haar or LBP features. Suits for detecting faces, facial features and some other objects without diverse textures. See \texttt{\href{http://code.opencv.org/svn/opencv/trunk/opencv/samples/c/facedetect.cpp}{facedetect.cpp}}\\
\texttt{\href{http://docs.opencv.org/modules/objdetect/doc/cascade_classification.html\#cascadeclassifier}{CascadeClassifier}} & Viola's Cascade of Boosted classifiers using Haar or LBP features. Suits for detecting faces, facial features and some other objects without diverse textures. See \texttt{\href{http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/c/facedetect.cpp}{facedetect.cpp}}\\
\texttt{{HOGDescriptor}} & N. Dalal's object detector using Histogram-of-Oriented-Gradients (HOG) features. Suits for detecting people, cars and other objects with well-defined silhouettes. See \texttt{\href{http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/peopledetect.cpp}{peopledetect.cpp}}\\
\texttt{{HOGDescriptor}} & N. Dalal's object detector using Histogram-of-Oriented-Gradients (HOG) features. Suits for detecting people, cars and other objects with well-defined silhouettes. See \texttt{\href{http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/cpp/peopledetect.cpp}{peopledetect.cpp}}\\
\end{tabular}
......
......@@ -56,7 +56,7 @@ Scalar
Code
=====
* This code is in your OpenCV sample folder. Otherwise you can grab it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/core/Matrix/Drawing_1.cpp>`_
* This code is in your OpenCV sample folder. Otherwise you can grab it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/core/Matrix/Drawing_1.cpp>`_
Explanation
=============
......
......@@ -22,7 +22,7 @@ Code
* In this tutorial, we intend to use *random* values for the drawing parameters. Also, we intend to populate our image with a big number of geometric figures. Since we will be initializing them in a random fashion, this process will be automatic and made by using *loops* .
* This code is in your OpenCV sample folder. Otherwise you can grab it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/core/Matrix/Drawing_2.cpp>`_ .
* This code is in your OpenCV sample folder. Otherwise you can grab it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/core/Matrix/Drawing_2.cpp>`_ .
Explanation
============
......
......@@ -2,6 +2,7 @@
.. |Author_BernatG| unicode:: Bern U+00E1 t U+0020 G U+00E1 bor
.. |Author_AndreyK| unicode:: Andrey U+0020 Kamaev
.. |Author_LeonidBLB| unicode:: Leonid U+0020 Beynenson
.. |Author_VsevolodG| unicode:: Vsevolod U+0020 Glumov
.. |Author_VictorE| unicode:: Victor U+0020 Eruhimov
.. |Author_ArtemM| unicode:: Artem U+0020 Myagkov
.. |Author_FernandoI| unicode:: Fernando U+0020 Iglesias U+0020 Garc U+00ED a
......
......@@ -23,7 +23,7 @@ Theory
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/features2D/SURF_descriptor.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/features2D/SURF_descriptor.cpp>`_
.. code-block:: cpp
......
......@@ -22,7 +22,7 @@ Theory
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/features2D/SURF_detector.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/features2D/SURF_detector.cpp>`_
.. code-block:: cpp
......
......@@ -19,7 +19,7 @@ Theory
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/features2D/SURF_FlannMatcher.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/features2D/SURF_FlannMatcher.cpp>`_
.. code-block:: cpp
......
......@@ -20,7 +20,7 @@ Theory
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/features2D/SURF_Homography.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/features2D/SURF_Homography.cpp>`_
.. code-block:: cpp
......
......@@ -19,7 +19,7 @@ Theory
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/TrackingMotion/cornerSubPix_Demo.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/TrackingMotion/cornerSubPix_Demo.cpp>`_
.. code-block:: cpp
......
......@@ -20,7 +20,7 @@ Theory
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/TrackingMotion/cornerDetector_Demo.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/TrackingMotion/cornerDetector_Demo.cpp>`_
.. code-block:: cpp
......
......@@ -18,7 +18,7 @@ Theory
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/TrackingMotion/goodFeaturesToTrack_Demo.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/TrackingMotion/goodFeaturesToTrack_Demo.cpp>`_
.. code-block:: cpp
......
......@@ -151,7 +151,7 @@ How does it work?
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/TrackingMotion/cornerHarris_Demo.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/TrackingMotion/cornerHarris_Demo.cpp>`_
.. code-block:: cpp
......
......@@ -70,7 +70,7 @@ Erosion
Code
======
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ImgProc/Morphology_1.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ImgProc/Morphology_1.cpp>`_
.. code-block:: cpp
......
......@@ -115,7 +115,7 @@ Code
* Applies 4 different kinds of filters (explained in Theory) and show the filtered images sequentially
* **Downloadable code**:
Click `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ImgProc/Smoothing.cpp>`_
Click `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ImgProc/Smoothing.cpp>`_
* **Code at glance:**
......
......@@ -99,9 +99,9 @@ Code
* **Downloadable code**:
a. Click `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/Histograms_Matching/calcBackProject_Demo1.cpp>`_ for the basic version (explained in this tutorial).
b. For stuff slightly fancier (using H-S histograms and floodFill to define a mask for the skin area) you can check the `improved demo <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/Histograms_Matching/calcBackProject_Demo2.cpp>`_
c. ...or you can always check out the classical `camshiftdemo <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/camshiftdemo.cpp>`_ in samples.
a. Click `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/Histograms_Matching/calcBackProject_Demo1.cpp>`_ for the basic version (explained in this tutorial).
b. For stuff slightly fancier (using H-S histograms and floodFill to define a mask for the skin area) you can check the `improved demo <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/Histograms_Matching/calcBackProject_Demo2.cpp>`_
c. ...or you can always check out the classical `camshiftdemo <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/camshiftdemo.cpp>`_ in samples.
* **Code at glance:**
......
......@@ -82,7 +82,7 @@ Code
* Plot the three histograms in a window
* **Downloadable code**:
Click `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/Histograms_Matching/calcHist_Demo.cpp>`_
Click `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/Histograms_Matching/calcHist_Demo.cpp>`_
* **Code at glance:**
......
......@@ -80,7 +80,7 @@ Code
* Display the numerical matching parameters obtained.
* **Downloadable code**:
Click `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/Histograms_Matching/compareHist_Demo.cpp>`_
Click `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/Histograms_Matching/compareHist_Demo.cpp>`_
* **Code at glance:**
......
......@@ -83,7 +83,7 @@ Code
* Display the source and equalized images in a window.
* **Downloadable code**:
Click `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/Histograms_Matching/EqualizeHist_Demo.cpp>`_
Click `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/Histograms_Matching/EqualizeHist_Demo.cpp>`_
* **Code at glance:**
......
......@@ -125,7 +125,7 @@ Code
* Draw a rectangle around the area corresponding to the highest match
* **Downloadable code**:
Click `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/Histograms_Matching/MatchTemplate_Demo.cpp>`_
Click `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/Histograms_Matching/MatchTemplate_Demo.cpp>`_
* **Code at glance:**
......
......@@ -86,7 +86,7 @@ Code
* Applies the *Canny Detector* and generates a **mask** (bright lines representing the edges on a black background).
* Applies the mask obtained on the original image and display it in a window.
#. The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ImgTrans/CannyDetector_Demo.cpp>`_
#. The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ImgTrans/CannyDetector_Demo.cpp>`_
.. code-block:: cpp
......
......@@ -47,7 +47,7 @@ Code
The user chooses either option by pressing 'c' (constant) or 'r' (replicate)
* The program finishes when the user presses 'ESC'
#. The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ImgTrans/copyMakeBorder_demo.cpp>`_
#. The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ImgTrans/copyMakeBorder_demo.cpp>`_
.. code-block:: cpp
......
......@@ -72,7 +72,7 @@ Code
* The filter output (with each kernel) will be shown during 500 milliseconds
#. The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ImgTrans/filter2D_demo.cpp>`_
#. The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ImgTrans/filter2D_demo.cpp>`_
.. code-block:: cpp
......
......@@ -40,9 +40,9 @@ Code
* Display the detected circle in a window.
.. |TutorialHoughCirclesSimpleDownload| replace:: here
.. _TutorialHoughCirclesSimpleDownload: http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/houghlines.cpp
.. _TutorialHoughCirclesSimpleDownload: http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/houghlines.cpp
.. |TutorialHoughCirclesFancyDownload| replace:: here
.. _TutorialHoughCirclesFancyDownload: http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ImgTrans/HoughCircle_Demo.cpp
.. _TutorialHoughCirclesFancyDownload: http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ImgTrans/HoughCircle_Demo.cpp
#. The sample code that we will explain can be downloaded from |TutorialHoughCirclesSimpleDownload|_. A slightly fancier version (which shows both Hough standard and probabilistic with trackbars for changing the threshold values) can be found |TutorialHoughCirclesFancyDownload|_.
......
......@@ -89,9 +89,9 @@ Code
======
.. |TutorialHoughLinesSimpleDownload| replace:: here
.. _TutorialHoughLinesSimpleDownload: http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/houghlines.cpp
.. _TutorialHoughLinesSimpleDownload: http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/houghlines.cpp
.. |TutorialHoughLinesFancyDownload| replace:: here
.. _TutorialHoughLinesFancyDownload: http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ImgTrans/HoughLines_Demo.cpp
.. _TutorialHoughLinesFancyDownload: http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ImgTrans/HoughLines_Demo.cpp
#. **What does this program do?**
......
......@@ -55,7 +55,7 @@ Code
* Applies a Laplacian operator to the grayscale image and stores the output image
* Display the result in a window
#. The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ImgTrans/Laplace_Demo.cpp>`_
#. The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ImgTrans/Laplace_Demo.cpp>`_
.. code-block:: cpp
......
......@@ -59,7 +59,7 @@ Code
* Each second, apply 1 of 4 different remapping processes to the image and display them indefinitely in a window.
* Wait for the user to exit the program
#. The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ImgTrans/Remap_Demo.cpp>`_
#. The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ImgTrans/Remap_Demo.cpp>`_
.. code-block:: cpp
......
......@@ -121,7 +121,7 @@ Code
* Applies the *Sobel Operator* and generates as output an image with the detected *edges* bright on a darker background.
#. The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ImgTrans/Sobel_Demo.cpp>`_
#. The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ImgTrans/Sobel_Demo.cpp>`_
.. code-block:: cpp
......
......@@ -93,7 +93,7 @@ Code
* Applies a Rotation to the image after being transformed. This rotation is with respect to the image center
* Waits until the user exits the program
#. The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ImgTrans/Geometric_Transforms_Demo.cpp>`_
#. The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ImgTrans/Geometric_Transforms_Demo.cpp>`_
.. code-block:: cpp
......
......@@ -111,7 +111,7 @@ Black Hat
Code
======
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ImgProc/Morphology_2.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ImgProc/Morphology_2.cpp>`_
.. code-block:: cpp
......
......@@ -80,7 +80,7 @@ Gaussian Pyramid
Code
======
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ImgProc/Pyramids.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ImgProc/Pyramids.cpp>`_
.. code-block:: cpp
......
......@@ -21,7 +21,7 @@ Theory
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ShapeDescriptors/generalContours_demo1.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ShapeDescriptors/generalContours_demo1.cpp>`_
.. code-block:: cpp
......
......@@ -21,7 +21,7 @@ Theory
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ShapeDescriptors/generalContours_demo2.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ShapeDescriptors/generalContours_demo2.cpp>`_
.. code-block:: cpp
......
......@@ -19,7 +19,7 @@ Theory
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ShapeDescriptors/findContours_demo.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ShapeDescriptors/findContours_demo.cpp>`_
.. code-block:: cpp
......
......@@ -19,7 +19,7 @@ Theory
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ShapeDescriptors/hull_demo.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ShapeDescriptors/hull_demo.cpp>`_
.. code-block:: cpp
......
......@@ -21,7 +21,7 @@ Theory
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ShapeDescriptors/moments_demo.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ShapeDescriptors/moments_demo.cpp>`_
.. code-block:: cpp
......
......@@ -19,7 +19,7 @@ Theory
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ShapeDescriptors/pointPolygonTest_demo.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ShapeDescriptors/pointPolygonTest_demo.cpp>`_
.. code-block:: cpp
......
......@@ -130,7 +130,7 @@ Threshold to Zero, Inverted
Code
======
The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/ImgProc/Threshold.cpp>`_
The tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/ImgProc/Threshold.cpp>`_
.. code-block:: cpp
......
.. _O4A_SDK:
OpenCV4Android SDK
******************
This tutorial was designed to help you with installation and configuration of OpenCV4Android SDK.
This guide was written with MS Windows 7 in mind, though it should work with GNU Linux and Apple MacOS as well.
This tutorial assumes you have the following installed and configured:
* JDK
* Android SDK and NDK
* Eclipse IDE
* ADT and CDT plugins for Eclipse
..
If you need help with anything of the above, you may refer to our :ref:`android_dev_intro` guide.
If you encounter any error after thoroughly following these steps, feel free to contact us via `OpenCV4Android <https://groups.google.com/group/android-opencv/>`_ discussion group or OpenCV `Q&A forum <http://answers.opencv.org>`_. We'll do our best to help you out.
General info
============
OpenCV4Android SDK package enables development of Android applications with use of OpenCV library.
The structure of package contents looks as follows:
::
OpenCV-2.4.2-android-sdk
|_ apk
| |_ OpenCV_2.4.2_binary_pack_XXX.apk
| |_ OpenCV_2.4.2_Manager.apk
|
|_ doc
|_ samples
|_ sdk
| |_ etc
| |_ java
| |_ native
| |_ 3rdparty
| |_ jni
| |_ libs
| |_ armeabi
| |_ armeabi-v7a
| |_ x86
|
|_ license.txt
|_ README.android
* :file:`sdk` folder contains OpenCV API and libraries for Android:
* :file:`sdk/java` folder contains an Android library Eclipse project providing OpenCV Java API that can be imported into developer's workspace;
* :file:`sdk/native` folder contains OpenCV C++ headers (for JNI code) and native Android libraries (\*\.so and \*\.a) for ARM-v5, ARM-v7a and x86 architectures;
* :file:`sdk/etc` folder contains Haar and LBP cascades distributed with OpenCV.
* :file:`apk` folder contains Android packages that should be installed on the target Android device to enable OpenCV library access via OpenCV Manager API (see details below).
On production devices that have access to Google Play Market (and internet) these packages will be installed from Market on the first start of an application using OpenCV Manager API.
But dev kits without Market or internet require this packages to be installed manually.
(Install the `Manager.apk` and the corresponding `binary_pack.apk` depending on the device CPU, the Manager GUI provides this info).
**Note**: installation from internet is the preferable way since we may publish updated versions of this packages on the Market.
* :file:`samples` folder contains sample applications projects and their prebuilt packages (APK).
Import them into Eclipse workspace (like described below) and browse the code to learn possible ways of OpenCV use on Android.
* :file:`doc` folder contains various OpenCV documentation in PDF format.
It's also available online at http://docs.opencv.org.
**Note**: the most recent docs (nightly build) are at http://docs.opencv.org/trunk/.
Generally, it's more up-to-date, but can refer to not-yet-released functionality.
Starting version 2.4.2 `OpenCV4Android SDK` uses `OpenCV Manager` API for library initialization. `OpenCV Manager` is an Android service based solution providing the following benefits for OpenCV applications developers:
* Compact apk-size, since all applications use the same binaries from Manager and do not store native libs within themselves;
* Hardware specific optimizations are automatically enabled on all supported platforms;
* Automatic updates and bug fixes;
* Trusted OpenCV library source. All packages with OpenCV are published on Google Play;
..
For additional information on OpenCV Manager see the:
* |OpenCV4Android_Slides|_
* |OpenCV4Android_Reference|_
..
.. |OpenCV4Android_Slides| replace:: Slides
.. _OpenCV4Android_Slides: https://docs.google.com/a/itseez.com/presentation/d/1EO_1kijgBg_BsjNp2ymk-aarg-0K279_1VZRcPplSuk/present#slide=id.p
.. |OpenCV4Android_Reference| replace:: Reference Manual
.. _OpenCV4Android_Reference: http://docs.opencv.org/android/refman.html
Tegra Android Development Pack users
====================================
You may have used `Tegra Android Development Pack <http://developer.nvidia.com/tegra-android-development-pack>`_
(**TADP**) released by **NVIDIA** for Android development environment setup.
Beside Android development tools the TADP 2.0 includes OpenCV4Android SDK 2.4.2, so it can be already installed in your system and you can skip to running the ``face-detection`` sample.
More details regarding TADP can be found in the :ref:`android_dev_intro` guide.
Manual OpenCV4Android SDK setup
===============================
Get the OpenCV4Android SDK
--------------------------
#. Go to the `OpenCV dowload page on SourceForge <http://sourceforge.net/projects/opencvlibrary/files/opencv-android/>`_ and download the latest available version. Currently it's |opencv_android_bin_pack_url|_
#. Create a new folder for Android with OpenCV development. For this tutorial I have unpacked OpenCV to the :file:`C:\\Work\\OpenCV4Android\\` directory.
.. note:: Better to use a path without spaces in it. Otherwise you may have problems with :command:`ndk-build`.
#. Unpack the OpenCV package into the chosen directory.
You can unpack it using any popular archiver (e.g with |seven_zip|_):
.. image:: images/android_package_7zip.png
:alt: Exploring OpenCV package with 7-Zip
:align: center
On Unix you can use the following command:
.. code-block:: bash
unzip ~/Downloads/OpenCV-2.4.2-android-sdk.zip
.. |opencv_android_bin_pack| replace:: OpenCV-2.4.2-android-sdk.zip
.. _opencv_android_bin_pack_url: http://sourceforge.net/projects/opencvlibrary/files/opencv-android/2.4.2/OpenCV-2.4.2-android-sdk.zip/download
.. |opencv_android_bin_pack_url| replace:: |opencv_android_bin_pack|
.. |seven_zip| replace:: 7-Zip
.. _seven_zip: http://www.7-zip.org/
Open OpenCV library and samples in Eclipse
------------------------------------------
#. Start *Eclipse* and choose your workspace location.
We recommend to start working with OpenCV for Android from a new clean workspace. A new Eclipse workspace can for example be created in the folder where you have unpacked OpenCV4Android SDK package:
.. image:: images/eclipse_1_choose_workspace.png
:alt: Choosing C:\Work\android-opencv\ as workspace location
:align: center
#. Import OpenCV library and samples into workspace.
OpenCV library is packed as a ready-for-use `Android Library Project
<http://developer.android.com/guide/developing/projects/index.html#LibraryProjects>`_. You can simply reference it in your projects.
Each sample included into the |opencv_android_bin_pack| is a regular Android project that already references OpenCV library.
Follow the steps below to import OpenCV and samples into the workspace:
* Right click on the :guilabel:`Package Explorer` window and choose :guilabel:`Import...` option from the context menu:
.. image:: images/eclipse_5_import_command.png
:alt: Select Import... from context menu
:align: center
* In the main panel select :menuselection:`General --> Existing Projects into Workspace` and press :guilabel:`Next` button:
.. image:: images/eclipse_6_import_existing_projects.png
:alt: General > Existing Projects into Workspace
:align: center
* In the :guilabel:`Select root directory` field locate your OpenCV package folder. Eclipse should automatically locate OpenCV library and samples:
.. image:: images/eclipse_7_select_projects.png
:alt: Locate OpenCV library and samples
:align: center
* Click :guilabel:`Finish` button to complete the import operation.
After clicking :guilabel:`Finish` button Eclipse will load all selected projects into workspace. Numerous errors will be indicated:
.. image:: images/eclipse_8_false_alarm.png
:alt: Confusing Eclipse screen with numerous errors
:align: center
However, **all these errors are only false-alarms**!
Just give a minute to Eclipse to complete initialization.
In some cases these errors disappear after :menuselection:`Project --> Clean... --> Clean all --> OK`
or after pressing :kbd:`F5` (for Refresh action) when selecting error-label-marked projects in :guilabel:`Package Explorer`.
Sometimes more advanced manipulations are required:
The provided projects are configured for ``API 11`` target (and ``API 9`` for the library) that can be missing platform in your Android SDK.
After right click on any project select :guilabel:`Properties` and then :guilabel:`Android` on the left pane.
Click some target with `API Level` 11 or higher:
.. image:: images/eclipse_8a_target.png
:alt: Updating target
:align: center
Eclipse will rebuild your workspace and error icons will disappear one by one:
.. image:: images/eclipse_9_errors_dissapearing.png
:alt: After small help Eclipse removes error icons!
:align: center
Once Eclipse completes build you will have the clean workspace without any build errors:
.. image:: images/eclipse_10_crystal_clean.png
:alt: OpenCV package imported into Eclipse
:align: center
.. _Running_OpenCV_Samples:
Running OpenCV Samples
----------------------
At this point you should be able to build and run the samples. Keep in mind, that ``face-detection``, ``Tutorial 3`` and ``Tutorial 4`` include some native code and require Android NDK and CDT plugin for Eclipse to build working applications.
If you haven't installed these tools see the corresponding section of :ref:`Android_Dev_Intro`.
Also, please consider that ``Tutorial 0`` and ``Tutorial 1`` samples use Java Camera API that definitelly accessible on emulator from the Android SDK.
Other samples use OpenCV Native Camera which may not work with emulator.
.. note:: Recent *Android SDK tools, revision 19+* can run ARM v7a OS images but they available not for all Android versions.
Well, running samples from Eclipse is very simple:
* Connect your device with :command:`adb` tool from Android SDK or create an emulator with camera support.
* See `Managing Virtual Devices
<http://developer.android.com/guide/developing/devices/index.html>`_ document for help with Android Emulator.
* See `Using Hardware Devices
<http://developer.android.com/guide/developing/device.html>`_ for help with real devices (not emulators).
* Select project you want to start in :guilabel:`Package Explorer` and just press :kbd:`Ctrl + F11` or select option :menuselection:`Run --> Run` from the main menu, or click :guilabel:`Run` button on the toolbar.
.. note:: Android Emulator can take several minutes to start. So, please, be patient.
* On the first run Eclipse will ask you about the running mode for your application:
.. image:: images/eclipse_11_run_as.png
:alt: Run sample as Android Application
:align: center
* Select the :guilabel:`Android Application` option and click :guilabel:`OK` button. Eclipse will install and run the sample.
Chances are that on the first launch you will not have the `OpenCV Manager <https://docs.google.com/a/itseez.com/presentation/d/1EO_1kijgBg_BsjNp2ymk-aarg-0K279_1VZRcPplSuk/present#slide=id.p>`_ package installed.
In this case you will see the following message:
.. image:: images/android_emulator_opencv_manager_fail.png
:alt: You will see this message if you have no OpenCV Manager installed
:align: center
To get rid of the message you will need to install `OpenCV Manager` and the appropriate `OpenCV binary pack`.
Simply tap :menuselection:`Yes` if you have *Google Play Market* installed on your device/emulator. It will redirect you to the corresponding page on *Google Play Market*.
If you have no access to the *Market*, which is often the case with emulators - you will need to install the packages from OpenCV4Android SDK folder manually. Open the console/terminal and type in the following two commands:
.. code-block:: sh
:linenos:
<Android SDK path>/platform-tools/adb install <OpenCV4Android SDK path>/apk/OpenCV_2.4.2_Manager.apk
<Android SDK path>/platform-tools/adb install <OpenCV4Android SDK path>/apk/OpenCV_2.4.2_binary_pack_armv7a.apk
If you're running Windows, that will probably look like this:
.. image:: images/install_opencv_manager_with_adb.png
:alt: Run these commands in the console to install OpenCV Manager
:align: center
When done, you will be able to run OpenCV samples on your device/emulator seamlessly.
* Here is ``Tutorial 2 - Use OpenCV Camera`` sample, running on top of stock camera-preview of the emulator.
.. image:: images/emulator_canny.png
:height: 600px
:alt: Tutorial 1 Basic - 1. Add OpenCV - running Canny
:align: center
What's next
===========
Now, when you have your instance of OpenCV4Adroid SDK set up and configured, you may want to proceed to using OpenCV in your own application. You can learn how to do that in a separate :ref:`dev_with_OCV_on_Android` tutorial.
\ No newline at end of file
.. _Android_Binary_Package:
Using Android binary package with Eclipse
*****************************************
This tutorial was tested using Ubuntu 10.04 and Windows 7 SP1 operating systems.
Nevertheless, it should also work on any other **OS**\ es supported by Android SDK (including Mac OS X).
If you encounter errors after following the steps described here, feel free to contact us via `OpenCV4Android <https://groups.google.com/group/android-opencv/>`_ discussion group or OpenCV `Q&A forum <http://answers.opencv.org>`_ and we will try to help you.
Quick environment setup for Android development
===============================================
If you are making a clean environment installation then you can try `Tegra Android Development Pack <http://developer.nvidia.com/tegra-android-development-pack>`_
(**TADP**) released by **NVIDIA**:
It will cover all of the environment set up automatically and you can go to the next step :ref:`Get_the_OpenCV_package_for_Android_development` right after automatic setup.
If you are a beginner in Android development then we recommend you to start with TADP.
.. note:: *NVIDIA*\ 's Tegra Android Development Pack includes some special features for |Nvidia_Tegra_Platform|_ but it is not just for *Tegra* devices
+ You need at least *1.6 Gb* free disk space for installation.
+ TADP will download Android SDK platforms and Android NDK from Google's server, so you need an Internet connection for the installation.
+ TADP can ask you to flash your development kit at the end of installation process. Just skip this step if you have no |Tegra_Ventana_Development_Kit|_\ .
+ (``UNIX``) TADP will ask you for a *root* in the middle of installation, so you need to be a member of *sudo* group.
..
.. |Nvidia_Tegra_Platform| replace:: *NVIDIA*\ ’s Tegra platform
.. _Nvidia_Tegra_Platform: http://developer.nvidia.com/node/19071
.. |Tegra_Ventana_Development_Kit| replace:: Tegra Ventana Development Kit
.. _Tegra_Ventana_Development_Kit: http://developer.nvidia.com/tegra-ventana-development-kit
.. _Android_Environment_Setup_Lite:
Manual environment setup for Android Development
================================================
You need the following tools to be installed:
#. **Sun JDK 6**
Visit `Java SE Downloads page <http://www.oracle.com/technetwork/java/javase/downloads/>`_ and download installer for your OS.
Here is a detailed :abbr:`JDK (Java Development Kit)` `installation guide <http://source.android.com/source/initializing.html#installing-the-jdk>`_
for Ubuntu and Mac OS (only JDK sections are applicable for OpenCV)
.. note:: OpenJDK is not usable for Android development because Android SDK supports only Sun JDK.
If you use Ubuntu, after installation of Sun JDK you should run the following command to set Sun java environment:
.. code-block:: bash
sudo update-java-alternatives --set java-6-sun
#. **Android SDK**
Get the latest ``Android SDK`` from http://developer.android.com/sdk/index.html
Here is Google's `install guide <http://developer.android.com/sdk/installing.html>`_ for SDK.
.. note:: If you choose SDK packed into Windows installer, then you should have 32-bit JRE installed. It is not needed for Android development, but installer is x86 application and requires 32-bit Java runtime.
.. note:: If you are running x64 version of Ubuntu Linux, then you need ia32 shared libraries for use on amd64 and ia64 systems to be installed. You can install them with the following command:
.. code-block:: bash
sudo apt-get install ia32-libs
For Red Hat based systems the following command might be helpful:
.. code-block:: bash
sudo yum install libXtst.i386
#. **Android SDK components**
You need the following SDK components to be installed:
* *Android SDK Tools, revision14* or newer
Older revisions should also work, but they are not recommended.
* *SDK Platform Android 3.0, API 11* (also known as *android-11*)
The minimal platform supported by OpenCV Java API is **Android 2.2** (API 8). This is also the minimum API Level required for the provided samples to run.
See the ``<uses-sdk android:minSdkVersion="8"/>`` tag in their **AndroidManifest.xml** files.
But for successful compilation of some samples the **target** platform should be set to Android 3.0 (API 11) or higher. It will not block them from running on Android 2.2+.
.. image:: images/android_sdk_and_avd_manager.png
:height: 500px
:alt: Android SDK Manager
:align: center
See `Adding SDK Components <http://developer.android.com/sdk/adding-components.html>`_ for help with installing/updating SDK components.
#. **Eclipse IDE**
Check the `Android SDK System Requirements <http://developer.android.com/sdk/requirements.html>`_ document for a list of Eclipse versions that are compatible with the Android SDK.
For OpenCV 2.4.x we recommend Eclipse 3.7 (Indigo) or later versions. They work well for OpenCV under both Windows and Linux.
If you have no Eclipse installed, you can get it from the `download page <http://www.eclipse.org/downloads/>`_.
#. **ADT plugin for Eclipse**
This instruction is copied from `Android Developers site <http://developer.android.com/sdk/eclipse-adt.html>`_.
Please, visit `that page <http://developer.android.com/sdk/eclipse-adt.html#downloading>`_ if you have any troubles with :abbr:`ADT(Android Development Tools)` plugin installation.
Assuming that you have Eclipse IDE installed, as described above, follow these steps to download and install the ADT plugin:
#. Start Eclipse, then select :menuselection:`Help --> Install New Software...`
#. Click :guilabel:`Add` (in the top-right corner).
#. In the :guilabel:`Add Repository` dialog that appears, enter "ADT Plugin" for the Name and the following URL for the Location:
https://dl-ssl.google.com/android/eclipse/
#. Click :guilabel:`OK`
.. note:: If you have trouble acquiring the plugin, try using "http" in the Location URL, instead of "https" (https is preferred for security reasons).
#. In the :guilabel:`Available Software` dialog, select the checkbox next to :guilabel:`Developer Tools` and click :guilabel:`Next`.
#. In the next window, you'll see a list of the tools to be downloaded. Click :guilabel:`Next`.
#. Read and accept the license agreements, then click :guilabel:`Finish`.
.. note:: If you get a security warning saying that the authenticity or validity of the software can't be established, click :guilabel:`OK`.
#. When the installation completes, restart Eclipse.
.. _Get_the_OpenCV_package_for_Android_development:
Get the OpenCV package for Android development
==============================================
#. Go to the `OpenCV dowload page on SourceForge <http://sourceforge.net/projects/opencvlibrary/files/opencv-android/>`_ and download the latest available version. Currently it is |opencv_android_bin_pack_url|_
#. Create new folder for Android+OpenCV development. For this tutorial I have unpacked OpenCV to the :file:`C:\\Work\\android-opencv\\` directory.
.. note:: Better to use a path without spaces in it. Otherwise you will probably have problems with :command:`ndk-build`.
#. Unpack the OpenCV package into that dir.
You can unpack it using any popular archiver (for example with |seven_zip|_):
.. image:: images/android_package_7zip.png
:alt: Exploring OpenCV package with 7-Zip
:align: center
On Unix you can use the following command:
.. code-block:: bash
unzip ~/Downloads/OpenCV-2.4.2-android-sdk.zip
.. |opencv_android_bin_pack| replace:: OpenCV-2.4.2-android-sdk.zip
.. _opencv_android_bin_pack_url: http://sourceforge.net/projects/opencvlibrary/files/opencv-android/2.4.2/OpenCV-2.4.2-android-sdk.zip/download
.. |opencv_android_bin_pack_url| replace:: |opencv_android_bin_pack|
.. |seven_zip| replace:: 7-Zip
.. _seven_zip: http://www.7-zip.org/
Open OpenCV library and samples in Eclipse
==========================================
#. Start the *Eclipse* and choose your workspace location.
I recommend to start familiarizing yourself with OpenCV for Android from a new clean workspace. So I have chosen my OpenCV package directory for the new workspace:
.. image:: images/eclipse_1_choose_workspace.png
:alt: Choosing C:\Work\android-opencv\ as workspace location
:align: center
#. Configure your ADT plugin (if needed)
.. important:: In most cases the ADT plugin finds Android SDK automatically, but sometimes it fails and shows the following prompt:
.. image:: images/eclipse_1a_locate_sdk.png
:alt: Locating Android SDK
:align: center
Select :guilabel:`Use existing SDKs` option, browse for Android SDK folder and click :guilabel:`Finish`.
To make sure the SDK folder is set correctly do the following step taken from `Configuring the ADT Plugin <http://developer.android.com/sdk/eclipse-adt.html#configuring>`_ document from *Google*:
* Select :menuselection:`Window --> Preferences...` to open the Preferences panel (Mac OS X: :menuselection:`Eclipse --> Preferences`):
.. image:: images/eclipse_2_window_preferences.png
:alt: Select Window > Preferences...
:align: center
* Select :guilabel:`Android` from the left panel.
You may see a dialog asking whether you want to send usage statistics to *Google*. If so, make your choice and click :guilabel:`Proceed`. You cannot continue with this procedure until you click :guilabel:`Proceed`.
If the SDK folder isn't set you'll see the following:
.. image:: images/eclipse_3_preferences_android.png
:alt: Select Android from the left panel
:align: center
* For the SDK Location in the main panel, click :guilabel:`Browse...` and locate your Android SDK directory.
* Click :guilabel:`Apply` button at the bottom right corner of main panel.
If the SDK folder is already set correctly you'll see something like this:
.. image:: images/eclipse_4_locate_sdk.png
:alt: Locate Android SDK
:align: center
* Click :guilabel:`OK` to close preferences dialog.
#. Import OpenCV and samples into workspace.
OpenCV library is packed as a ready-for-use `Android Library Project
<http://developer.android.com/guide/developing/projects/index.html#LibraryProjects>`_. You can simply reference it in your projects.
Each sample included into the |opencv_android_bin_pack| is a regular Android project that already references OpenCV library.
Follow the steps below to import OpenCV and samples into the workspace:
* Right click on the :guilabel:`Package Explorer` window and choose :guilabel:`Import...` option from the context menu:
.. image:: images/eclipse_5_import_command.png
:alt: Select Import... from context menu
:align: center
* In the main panel select :menuselection:`General --> Existing Projects into Workspace` and press :guilabel:`Next` button:
.. image:: images/eclipse_6_import_existing_projects.png
:alt: General > Existing Projects into Workspace
:align: center
* For the :guilabel:`Select root directory` in the main panel locate your OpenCV package folder. (If you have created workspace in the package directory, then just click :guilabel:`Browse...` button and instantly close directory choosing dialog with :guilabel:`OK` button!) Eclipse should automatically locate OpenCV library and samples:
.. image:: images/eclipse_7_select_projects.png
:alt: Locate OpenCV library and samples
:align: center
* Click :guilabel:`Finish` button to complete the import operation.
After clicking :guilabel:`Finish` button Eclipse will load all selected projects into workspace. And... will indicate numerous errors:
.. image:: images/eclipse_8_false_alarm.png
:alt: Confusing Eclipse screen with numerous errors
:align: center
However **all these errors are only false-alarms**!
To help Eclipse to understand that there are no any errors choose OpenCV library in :guilabel:`Package Explorer` (left mouse click) and press :kbd:`F5` button on your keyboard. Then choose any sample (except first samples in *Tutorial Base* and *Tutorial Advanced*) and also press :kbd:`F5`.
In some cases these errors disappear after :menuselection:`Project --> Clean... --> Clean all --> OK`.
Sometimes more advanced manipulations are needed:
* The provided projects are configured for `android-11` target that can be missing platform in your Android SDK. After right click on any project select :guilabel:`Properties` and then :guilabel:`Android` on the left pane. Click some target with `API Level` 11 or higher:
.. image:: images/eclipse_8a_target.png
:alt: Updating target
:align: center
After this manipulation Eclipse will rebuild your workspace and error icons will disappear one after another:
.. image:: images/eclipse_9_errors_dissapearing.png
:alt: After small help Eclipse removes error icons!
:align: center
Once Eclipse completes build you will have the clean workspace without any build errors:
.. image:: images/eclipse_10_crystal_clean.png
:alt: OpenCV package imported into Eclipse
:align: center
Running OpenCV Samples
======================
At this point you should be able to build and run all samples except the last two (Tutorial 3 and 4). These samples include native code and require Android NDK to build working applications, see the next tutorial :ref:`Android_Binary_Package_with_NDK` to learn how to compile them.
Also I want to note that only ``Tutorial 0 - Android Camera`` and ``Tutorial 1 - Add OpenCV`` samples are able to run on Emulator from the Android SDK. Other samples are using OpenCV Native Camera which does not work with emulator.
.. note:: Latest *Android SDK tools, revision 19* can run ARM v7a OS images but *Google* provides such image for Android 4.x only.
Well, running samples from Eclipse is very simple:
* Connect your device with :command:`adb` tool from Android SDK or create Emulator with camera support.
* See `Managing Virtual Devices
<http://developer.android.com/guide/developing/devices/index.html>`_ document for help with Android Emulator.
* See `Using Hardware Devices
<http://developer.android.com/guide/developing/device.html>`_ for help with real devices (not emulators).
* Select project you want to start in :guilabel:`Package Explorer` and just press :kbd:`Ctrl + F11` or select option :menuselection:`Run --> Run` from the main menu, or click :guilabel:`Run` button on the toolbar.
.. note:: Android Emulator can take several minutes to start. So, please, be patient.
* On the first run Eclipse will ask you about the running mode for your application:
.. image:: images/eclipse_11_run_as.png
:alt: Run sample as Android Application
:align: center
* Select the :guilabel:`Android Application` option and click :guilabel:`OK` button. Eclipse will install and run the sample.
Here is ``Tutorial 1 - Add OpenCV`` sample detecting edges using Canny algorithm from OpenCV:
.. image:: images/emulator_canny.png
:height: 600px
:alt: Tutorial 1 Basic - 1. Add OpenCV - running Canny
:align: center
How to use OpenCV library project in your application
=====================================================
In this section we will explain how to make some existing application to use OpenCV.
Starting from 2.4.2 release on Android so-called "OpenCV Manager" is used for providing applications with the best version of OpenCV.
You can get more information here: :ref:`Android_OpenCV_Manager`.
Application development with async initialization
-------------------------------------------------
Using async initialization is a preferred way for application development. It uses the OpenCV Manager for access to OpenCV libraries.
#. Add OpenCV library project to your workspace. Use menu :guilabel:`File –> Import –> Existing project in your workspace`,
push :guilabel:`Browse` button and select OpenCV SDK path (:file:`OpenCV-2.4.2-android-sdk/sdk`).
.. image:: images/eclipse_opencv_dependency0.png
:alt: Add dependency from OpenCV library
:align: center
#. In application project add a reference to the OpenCV Java SDK in :guilabel:`Project –> Properties –> Android –> Library –> Add` select ``OpenCV Library - 2.4.2``.
.. image:: images/eclipse_opencv_dependency1.png
:alt: Add dependency from OpenCV library
:align: center
To use OpenCV Manager-based approach you need to install packages with the `Manager` and `OpenCV binary package` for you platform.
You can do it using Google Play Market or manually with ``adb`` tool:
.. code-block:: sh
:linenos:
adb install ./org.opencv.engine.apk
adb install ./org.opencv.lib_v24_<hardware version>.apk
There is a very base code snippet implementing the async initialization. It shows basic principles. See the "15-puzzle" OpenCV sample for details.
.. code-block:: java
:linenos:
public class MyActivity extends Activity implements HelperCallbackInterface
{
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new puzzle15View(mAppContext);
setContentView(mView);
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState)
{
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
// ...
}
It this case application works with OpenCV Manager in asynchronous fashion. ``OnManagerConnected`` callback will be called in UI thread, when initialization finishes.
Please note, that it is not allowed to use OpenCV calls or load OpenCV-dependent native libs before invoking this callback.
Load your own native libraries that depend on OpenCV after the successful OpenCV initialization.
Application development with static initialization
--------------------------------------------------
According to this approach all OpenCV binaries are included into your application package. It is designed mostly for development purposes.
This approach is deprecated for the production code, release package is recommended to communicate with OpenCV Manager via the async initialization described above.
#. Add the OpenCV library project to your workspace the same way as for the async initialization above.
Use menu :guilabel:`File –> Import –> Existing project in your workspace`, push :guilabel:`Browse` button and select OpenCV SDK path (:file:`OpenCV-2.4.2-android-sdk/sdk`).
.. image:: images/eclipse_opencv_dependency0.png
:alt: Add dependency from OpenCV library
:align: center
#. In the application project add a reference to the OpenCV Java SDK in :guilabel:`Project –> Properties –> Android –> Library –> Add` select ``OpenCV Library - 2.4.2``;
.. image:: images/eclipse_opencv_dependency1.png
:alt: Add dependency from OpenCV library
:align: center
#. If your application project **doesn't have a JNI part**, just copy the OpenCV native libs to your project directory to folder :file:`libs/target_arch/`.
In case of the application project **with a JNI part**, instead of manual libraries copying you need to modify your ``Android.mk`` file:
add the following two code lines after the ``"include $(CLEAR_VARS)"`` and before ``"include path_to_OpenCV-2.4.2-android-sdk/sdk/native/jni/OpenCV.mk"``
.. code-block:: make
:linenos:
OPENCV_CAMERA_MODULES:=on
OPENCV_INSTALL_MODULES:=on
The result should look like the following:
.. code-block:: make
:linenos:
include $(CLEAR_VARS)
# OpenCV
OPENCV_CAMERA_MODULES:=on
OPENCV_INSTALL_MODULES:=on
include ../../sdk/native/jni/OpenCV.mk
After that the OpenCV libraries will be copied to your application :file:`libs` folder during the JNI part build.
Eclipse will automatically include all the libraries from the :file:`libs` folder to the application package (APK).
#. The last step of enabling OpenCV in your application is Java initialization code before call to OpenCV API.
It can be done, for example, in the static section of the ``Activity`` class:
.. code-block:: java
:linenos:
static {
if (!OpenCVLoader.initDebug()) {
// Handle initialization error
}
}
If you application includes other OpenCV-dependent native libraries you should load them **after** OpenCV initialization:
.. code-block:: java
:linenos:
static {
if (!OpenCVLoader.initDebug()) {
// Handle initialization error
} else {
System.loadLibrary("my_jni_lib1");
System.loadLibrary("my_jni_lib2");
}
}
What's next?
============
Read the :ref:`Android_Binary_Package_with_NDK` tutorial to learn how to add native OpenCV code to your Android project.
.. _Android_Binary_Package_with_NDK:
Using C++ OpenCV code with Android binary package
*************************************************
The Android way is writing all your code in Java. But sometimes it is not enough and you need to go to the native level and write some parts of your application in C/C++.
This is especially important when you already have some computer vision code which is written in C++ and uses OpenCV, and you want to reuse it in your Android application,
but do not want to rewrite the C++ code to Java.
In this case the only way is to use JNI - a Java framework for interaction with native code.
It means, that you should add a Java class with native methods exposing your C++ functionality to the Java part of your Android application.
This tutorial describes a fast way to create and build Android applications containing OpenCV code written in C++. It shows how to build an application which uses OpenCV inside its JNI calls. Tutorial 3 and 4 from the OpenCV for Android SDK can be used as examples. OpenCV Sample "face-detect" also contain a call to C++ class.
Please note that before starting this tutorial you should fulfill all the steps, described in the tutorial :ref:`Android_Binary_Package`.
This tutorial was tested using Ubuntu 10.04 and Windows 7 SP1 operating systems.
Nevertheless, it should also work on Mac OS X.
If you encounter errors after following the steps described here, feel free to contact us via
`OpenCV4Android <https://groups.google.com/group/android-opencv/>`_ discussion group or
OpenCV `Q&A forum <http://answers.opencv.org>`_ and we will try to help you.
Prerequisites: Setup Android NDK
================================
To compile C++ code for Android platform you need ``Android Native Development Kit`` (*NDK*).
You can get the latest version of NDK from the `download page <http://developer.android.com/sdk/ndk/index.html>`_. To install Android NDK just extract the archive to some folder on your computer. Here are `installation instructions <http://developer.android.com/sdk/ndk/index.html#installing>`_.
.. note:: Before start you can read official Android NDK documentation which is in the Android NDK archive, in the folder :file:`docs/`.
The main article about using Android NDK build system is in the :file:`ANDROID-MK.html` file.
Some additional information you can find in the :file:`APPLICATION-MK.html`, :file:`NDK-BUILD.html` files, and :file:`CPU-ARM-NEON.html`, :file:`CPLUSPLUS-SUPPORT.html`, :file:`PREBUILTS.html`.
Theory: Android application structure
=====================================
Usually code of an Android application has the following structure:
+ :file:`root folder of the project/`
- :file:`jni/`
- :file:`libs/`
- :file:`res/`
- :file:`src/`
- :file:`AndroidManifest.xml`
- :file:`project.properties`
- :file:`... other files ...`
where
+ the :file:`src` folder contains Java code of the application,
+ the :file:`res` folder contains resources of the application (images, xml files describing UI layout , etc),
+ the :file:`libs` folder will contain native libraries after successful build,
+ and the :file:`jni` folder contains C/C++ application source code and NDK's build scripts :file:`Android.mk` and :file:`Application.mk`.
These scripts control the C++ build process (they are written in Makefile language).
Also the root folder should contain the following files:
* :file:`AndroidManifest.xml` file presents essential information about application to the Android system
(name of the Application, name of main application's package, components of the application, required permissions, etc).
It can be created using Eclipse wizard or :command:`android` tool from Android SDK.
* :file:`project.properties` is a text file containing information about target Android platform and other build details.
This file is generated by Eclipse or can be created with :command:`android` tool from Android SDK.
.. note:: Both files (:file:`AndroidManifest.xml` and :file:`project.properties`) are required to compile the C++ part of the application (NDK build system uses information from these files). If any of these files does not exist, compile the Java part of the project before the C++ part.
.. _NDK_build_cli:
Theory: Building application with C++ native part from command line
===================================================================
Here is the standard way to compile C++ part of an Android application:
#. Open console and go to the root folder of Android application
.. code-block:: bash
cd <root folder of the project>/
.. note:: Alternatively you can go to the :file:`jni` folder of Android project. But samples from OpenCV binary package are configured for building from the project root level (because of relative path to the OpenCV library).
#. Run the following command
.. code-block:: bash
<path_where_NDK_is_placed>/ndk-build
.. note:: On Windows we recommend to use ``ndk-build.cmd`` in standard Windows console (``cmd.exe``) rather than the similar ``bash`` script in ``Cygwin`` shell.
.. image:: images/ndk_build.png
:alt: NDK build
:align: center
#. After executing this command the C++ part of the source code is compiled.
After that the Java part of the application can be (re)compiled (using either *Eclipse* or :command:`ant` build tool).
.. note:: Some parameters can be set for the :command:`ndk-build`:
**Example 1**: Verbose compilation
.. code-block:: bash
<path_where_NDK_is_placed>/ndk-build V=1
**Example 2**: Rebuild all
.. code-block:: bash
<path_where_NDK_is_placed>/ndk-build -B
.. _Android_NDK_integration_with_Eclipse:
Theory: Building application with C++ native part from *Eclipse*
================================================================
There are several possible ways to integrate compilation of C++ code by Android NDK into Eclipse compilation process.
We recommend the approach based on Eclipse :abbr:`CDT(C/C++ Development Tooling)` Builder.
.. important:: Make sure your Eclipse IDE has the :abbr:`CDT(C/C++ Development Tooling)` plugin installed. Menu :guilabel:`Help -> About Eclipse SDK` and push :guilabel:`Installation Details` button.
.. image:: images/eclipse_inst_details.png
:alt: Configure builders
:align: center
To install the `CDT plugin <http://eclipse.org/cdt/>`_ use menu :guilabel:`Help -> Install New Software...`,
then paste the CDT 8.0 repository URL http://download.eclipse.org/tools/cdt/releases/indigo as shown in the picture below and click :guilabel:`Add...`, name it *CDT* and click :guilabel:`OK`.
.. image:: images/eclipse_inst_cdt.png
:alt: Configure builders
:align: center
``CDT Main Features`` should be enough:
.. image:: images/eclipse_inst_cdt_2.png
:alt: Configure builders
:align: center
.. important:: OpenCV for Android 2.4.2 package contains sample projects pre-configured to use CDT Builder. It automatically builds JNI part via ``ndk-build``.
#. Define the ``NDKROOT`` environment variable containing the path to Android NDK in your system (e.g. **"X:\\Apps\\android-ndk-r8"** or **"/opt/android-ndk-r8"**).
#. | CDT Builder is already configured for **Windows** hosts,
| on **Linux** or **MacOS** a small modification is required:
Open `Project Properties` of the projects having JNI part (`face-detection`, `Tutorial 3` and `Tutorial 4`),
select :guilabel:`C/C++ Build` in the left pane,
remove **".cmd"** and leave ``"${NDKROOT}/ndk-build"`` in the :guilabel:`Build command` edit box and click :guilabel:`OK`.
.. image:: images/eclipse_cdt_cfg4.png
:alt: Configure CDT
:align: center
#. Use menu :guilabel:`Project` -> :guilabel:`Clean...` to make sure that NDK build is invoked on the project build:
.. image:: images/eclipse_ndk_build.png
:alt: Select resources folder to refresh automatically
:align: center
Theory: The structure of :file:`Android.mk` and :file:`Application.mk` scripts
==============================================================================
The script :file:`Android.mk` usually have the following structure:
.. code-block:: make
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := <module_name>
LOCAL_SRC_FILES := <list of .c and .cpp project files>
<some variable name> := <some variable value>
...
<some variable name> := <some variable value>
include $(BUILD_SHARED_LIBRARY)
This is the minimal file :file:`Android.mk`, which builds a C++ source code of an Android application. Note that the first two lines and the last line are mandatory for any :file:`Android.mk`.
Usually the file :file:`Application.mk` is optional, but in case of project using OpenCV, when STL and exceptions are used in C++, it also should be written. Example of the file :file:`Application.mk`:
.. code-block:: make
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
APP_ABI := armeabi-v7a
Practice: Build samples from OpenCV binary package
==================================================
OpenCV binary package includes 3 samples having JNI resources:
* *Tutorial 3 (Advanced) - Add Native OpenCV*
This sample illustrates how you can use OpenCV in C++ but without OpenCV Java API.
* *Tutorial 4 (Advanced) - Mix Java+Native OpenCV*
This sample shows how you can mix OpenCV Java API and native C++ code.
* *Sample - face-detection*
This sample illustrates usage of both simple OpenCV face detector via Java API and advanced detection based face tracker via JNI and C++.
.. important:: Before OpenCV **2.4.2** for Android these projects were not configured to use CDT for building their native part , so you can do it yourself.
Practice: Create an Android application, which uses OpenCV
==========================================================
To build your own Android application, which uses OpenCV from native part, the following steps should be done:
#. The archive with OpenCV binary package should be downloaded and extracted to some folder (e.g. ``C:\Work\android-opencv\OpenCV-2.4.0``)
#. You can use an environment variable to specify the location of OpenCV package or just hardcode full or relative path in the :file:`jni/Android.mk` of your projects.
#. The file :file:`jni/Android.mk` should be written for the current application using the common rules for the file.
For detailed information see the Android NDK documentation from the Android NDK archive, in the file
:file:`<path_where_NDK_is_placed>/docs/ANDROID-MK.html`
#. The line
.. code-block:: make
include C:\Work\android-opencv\OpenCV-2.4.0\share\OpenCV\OpenCV.mk
should be inserted into the :file:`jni/Android.mk` file **after** the line
.. code-block:: make
include $(CLEAR_VARS)
#. Several variables can be used to customize OpenCV stuff, but you **don't need** to use them when your application uses the `async initialization` via the `OpenCV Manager` API.
Note: these variables should be set **before** the ``"include .../OpenCV.mk"`` line:
.. code-block:: make
OPENCV_INSTALL_MODULES:=on
Copies necessary OpenCV dynamic libs to the project ``libs`` folder in order to include them into the APK.
.. code-block:: make
OPENCV_CAMERA_MODULES:=off
Skip native OpenCV camera related libs copying to the project ``libs`` folder.
.. code-block:: make
OPENCV_LIB_TYPE:=STATIC
Perform static link with OpenCV. By default dynamic link is used and the project JNI lib depends on ``libopencv_java.so``.
#. The file :file:`Application.mk` should exist and should contain lines
.. code-block:: make
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
Also the line
.. code-block:: make
APP_ABI := armeabi-v7a
is recommended for the applications targeting modern ARMs
#. Either use :ref:`manual <NDK_build_cli>` ``ndk-build`` invocation or :ref:`setup Eclipse CDT Builder <Android_NDK_integration_with_Eclipse>` to build native JNI lib before Java part [re]build and APK creation.
.. _Android_Dev_Intro:
Introduction into Android Development
*************************************
This guide was designed to help you in learning Android development basics and seting up your working environment quickly.
This guide was written with Windows 7 in mind, though it would work with Linux (Ubuntu), Mac OS X and any other OS supported by Android SDK.
If you encounter any error after thoroughly following these steps, feel free to contact us via `OpenCV4Android <https://groups.google.com/group/android-opencv/>`_ discussion group or OpenCV `Q&A forum <http://answers.opencv.org>`_. We'll do our best to help you out.
Preface
=======
Android is a Linux-based, open source mobile operating system developed by Open Handset Alliance led by Google. See the `Android home site <http://www.android.com/about/>`_ for general details.
Development for Android significantly differs from development for other platforms.
So before starting programming for Android we recommend you make sure that you are familiar with the following key topis:
#. `Java <http://en.wikipedia.org/wiki/Java_(programming_language)>`_ programming language that is the primary development technology for Android OS. Also, you can find `Oracle docs on Java <http://docs.oracle.com/javase/>`_ useful.
#. `Java Native Interface (JNI) <http://en.wikipedia.org/wiki/Java_Native_Interface>`_ that is a technology of running native code in Java virtual machine. Also, you can find `Oracle docs on JNI <http://docs.oracle.com/javase/7/docs/technotes/guides/jni/>`_ useful.
#. `Android Activity <http://developer.android.com/training/basics/activity-lifecycle/starting.html>`_ and its lifecycle, that is an essential Android API class.
#. OpenCV development will certainly require some knowlege of the `Android Camera <http://developer.android.com/guide/topics/media/camera.html>`_ specifics.
Quick environment setup for Android development
===============================================
If you are making a clean environment install, then you can try `Tegra Android Development Pack <http://developer.nvidia.com/mobile/tegra-android-development-pack>`_
(**TADP**) released by **NVIDIA**.
When unpacked, TADP will cover all of the environment setup automatically and you can skip the rest of the guide.
If you are a beginner in Android development then we also recommend you to start with TADP.
.. note:: *NVIDIA*\ 's Tegra Android Development Pack includes some special features for |Nvidia_Tegra_Platform|_ but its use is not limited to *Tegra* devices only.
* You need at least *1.6 Gb* free disk space for the install.
* TADP will download Android SDK platforms and Android NDK from Google's server, so Internet connection is required for the installation.
* TADP may ask you to flash your development kit at the end of installation process. Just skip this step if you have no |Tegra_Development_Kit|_\ .
* (``UNIX``) TADP will ask you for *root* in the middle of installation, so you need to be a member of *sudo* group.
..
.. |Nvidia_Tegra_Platform| replace:: *NVIDIA*\ ’s Tegra platform
.. _Nvidia_Tegra_Platform: http://www.nvidia.com/object/tegra-3-processor.html
.. |Tegra_Development_Kit| replace:: Tegra Development Kit
.. _Tegra_Development_Kit: http://developer.nvidia.com/mobile/tegra-hardware-sales-inquiries
.. _Android_Environment_Setup_Lite:
Manual environment setup for Android development
================================================
Development in Java
-------------------
You need the following software to be installed in order to develop for Android in Java:
#. **Sun JDK 6**
Visit `Java SE Downloads page <http://www.oracle.com/technetwork/java/javase/downloads/>`_ and download an installer for your OS.
Here is a detailed :abbr:`JDK (Java Development Kit)` `installation guide <http://source.android.com/source/initializing.html#installing-the-jdk>`_
for Ubuntu and Mac OS (only JDK sections are applicable for OpenCV)
.. note:: OpenJDK is not suitable for Android development, since Android SDK supports only Sun JDK.
If you use Ubuntu, after installation of Sun JDK you should run the following command to set Sun java environment:
.. code-block:: bash
sudo update-java-alternatives --set java-6-sun
.. **TODO:** add a note on Sun/Oracle Java installation on Ubuntu 12.
#. **Android SDK**
Get the latest ``Android SDK`` from http://developer.android.com/sdk/index.html
Here is Google's `install guide <http://developer.android.com/sdk/installing.html>`_ for the SDK.
.. note:: If you choose SDK packed into a Windows installer, then you should have 32-bit JRE installed. It is not a prerequisite for Android development, but installer is a x86 application and requires 32-bit Java runtime.
.. note:: If you are running x64 version of Ubuntu Linux, then you need ia32 shared libraries for use on amd64 and ia64 systems to be installed. You can install them with the following command:
.. code-block:: bash
sudo apt-get install ia32-libs
For Red Hat based systems the following command might be helpful:
.. code-block:: bash
sudo yum install libXtst.i386
#. **Android SDK components**
You need the following SDK components to be installed:
* *Android SDK Tools, revision14* or newer.
Older revisions should also work, but they are not recommended.
* *SDK Platform Android 3.0*, ``API 11`` and *Android 2.3.1*, ``API 9``.
The minimal platform supported by OpenCV Java API is **Android 2.2** (``API 8``). This is also the minimum API Level required for the provided samples to run.
See the ``<uses-sdk android:minSdkVersion="8"/>`` tag in their **AndroidManifest.xml** files.
But for successful compilation of some samples the **target** platform should be set to Android 3.0 (API 11) or higher. It will not prevent them from running on Android 2.2.
.. image:: images/android_sdk_and_avd_manager.png
:height: 500px
:alt: Android SDK Manager
:align: center
See `Adding Platforms and Packages <http://developer.android.com/sdk/installing/adding-packages.html>`_ for help with installing/updating SDK components.
#. **Eclipse IDE**
Check the `Android SDK System Requirements <http://developer.android.com/sdk/requirements.html>`_ document for a list of Eclipse versions that are compatible with the Android SDK.
For OpenCV 2.4.x we recommend **Eclipse 3.7 (Indigo)** or later versions. They work well for OpenCV under both Windows and Linux.
If you have no Eclipse installed, you can get it from the `official site <http://www.eclipse.org/downloads/>`_.
#. **ADT plugin for Eclipse**
These instructions are copied from `Android Developers site <http://developer.android.com/sdk/installing/installing-adt.html>`_, check it out in case of any ADT-related problem.
Assuming that you have Eclipse IDE installed, as described above, follow these steps to download and install the ADT plugin:
#. Start Eclipse, then select :menuselection:`Help --> Install New Software...`
#. Click :guilabel:`Add` (in the top-right corner).
#. In the :guilabel:`Add Repository` dialog that appears, enter "ADT Plugin" for the Name and the following URL for the Location:
https://dl-ssl.google.com/android/eclipse/
#. Click :guilabel:`OK`
.. note:: If you have trouble acquiring the plugin, try using "http" in the Location URL, instead of "https" (https is preferred for security reasons).
#. In the :guilabel:`Available Software` dialog, select the checkbox next to :guilabel:`Developer Tools` and click :guilabel:`Next`.
#. In the next window, you'll see a list of the tools to be downloaded. Click :guilabel:`Next`.
#. Read and accept the license agreements, then click :guilabel:`Finish`.
.. note:: If you get a security warning saying that the authenticity or validity of the software can't be established, click :guilabel:`OK`.
#. When the installation completes, restart Eclipse.
Native development in C++
-------------------------
You need the following software to be installed in order to develop for Android in C++:
#. **Android NDK**
To compile C++ code for Android platform you need ``Android Native Development Kit`` (*NDK*).
You can get the latest version of NDK from the `download page <http://developer.android.com/tools/sdk/ndk/index.html>`_. To install Android NDK just extract the archive to some folder on your computer. Here are `installation instructions <http://developer.android.com/tools/sdk/ndk/index.html#Installing>`_.
.. note:: Before start you can read official Android NDK documentation which is in the Android NDK archive, in the folder :file:`docs/`.
The main article about using Android NDK build system is in the :file:`ANDROID-MK.html` file.
Some additional information you can find in the :file:`APPLICATION-MK.html`, :file:`NDK-BUILD.html` files, and :file:`CPU-ARM-NEON.html`, :file:`CPLUSPLUS-SUPPORT.html`, :file:`PREBUILTS.html`.
#. **CDT plugin for Eclipse**
There are several possible ways to integrate compilation of C++ code by Android NDK into Eclipse compilation process.
We recommend the approach based on Eclipse :abbr:`CDT(C/C++ Development Tooling)` Builder.
.. important:: Make sure your Eclipse IDE has the :abbr:`CDT(C/C++ Development Tooling)` plugin installed. Menu :guilabel:`Help -> About Eclipse SDK` and push :guilabel:`Installation Details` button.
.. image:: images/eclipse_inst_details.png
:alt: Configure builders
:align: center
To install the `CDT plugin <http://eclipse.org/cdt/>`_ use menu :guilabel:`Help -> Install New Software...`,
then paste the CDT 8.0 repository URL http://download.eclipse.org/tools/cdt/releases/indigo as shown in the picture below and click :guilabel:`Add...`, name it *CDT* and click :guilabel:`OK`.
.. image:: images/eclipse_inst_cdt.png
:alt: Configure builders
:align: center
``CDT Main Features`` should be enough:
.. image:: images/eclipse_inst_cdt_2.png
:alt: Configure builders
:align: center
That's it. Compilation of C++ code is fully integrated into Eclipse building process now.
Android application structure
=============================
Usually source code of an Android application has the following structure:
+ :file:`root folder of the project/`
- :file:`jni/`
- :file:`libs/`
- :file:`res/`
- :file:`src/`
- :file:`AndroidManifest.xml`
- :file:`project.properties`
- :file:`... other files ...`
where:
* the :file:`src` folder contains Java code of the application,
* the :file:`res` folder contains resources of the application (images, xml files describing UI layout, etc),
* the :file:`libs` folder will contain native libraries after a successful build,
* and the :file:`jni` folder contains C/C++ application source code and NDK's build scripts :file:`Android.mk` and :file:`Application.mk`
producing the native libraries,
* :file:`AndroidManifest.xml` file presents essential information about application to the Android system
(name of the Application, name of main application's package, components of the application, required permissions, etc).
It can be created using Eclipse wizard or :command:`android` tool from Android SDK.
* :file:`project.properties` is a text file containing information about target Android platform and other build details.
This file is generated by Eclipse or can be created with :command:`android` tool included in Android SDK.
.. note:: Both files (:file:`AndroidManifest.xml` and :file:`project.properties`) are required to compile the C++ part of the application,
since Android NDK build system relies on them. If any of these files does not exist, compile the Java part of the project before the C++ part.
:file:`Android.mk` and :file:`Application.mk` scripts
==================================================================
The script :file:`Android.mk` usually has the following structure:
.. code-block:: make
:linenos:
LOCAL_PATH := $(call my-dir)
include $(CLEAR_VARS)
LOCAL_MODULE := <module_name>
LOCAL_SRC_FILES := <list of .c and .cpp project files>
<some variable name> := <some variable value>
...
<some variable name> := <some variable value>
include $(BUILD_SHARED_LIBRARY)
This is the minimal file :file:`Android.mk`, which builds C++ source code of an Android application. Note that the first two lines and the last line are mandatory for any :file:`Android.mk`.
Usually the file :file:`Application.mk` is optional, but in case of project using OpenCV, when STL and exceptions are used in C++, it also should be created. Example of the file :file:`Application.mk`:
.. code-block:: make
:linenos:
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
APP_ABI := armeabi-v7a
.. _NDK_build_cli:
Building application native part from command line
==================================================
Here is the standard way to compile C++ part of an Android application:
#. Open console and go to the root folder of an Android application
.. code-block:: bash
cd <root folder of the project>/
#. Run the following command
.. code-block:: bash
<path_where_NDK_is_placed>/ndk-build
.. note:: On Windows we recommend to use ``ndk-build.cmd`` in standard Windows console (``cmd.exe``) rather than the similar ``bash`` script in ``Cygwin`` shell.
.. image:: images/ndk_build.png
:alt: NDK build
:align: center
#. After executing this command the C++ part of the source code is compiled.
After that the Java part of the application can be (re)compiled (using either *Eclipse* or *Ant* build tool).
.. note:: Some parameters can be set for the :command:`ndk-build`:
**Example 1**: Verbose compilation
.. code-block:: bash
<path_where_NDK_is_placed>/ndk-build V=1
**Example 2**: Rebuild all
.. code-block:: bash
<path_where_NDK_is_placed>/ndk-build -B
.. _CDT_Builder:
Building application native part from *Eclipse* (CDT Builder)
=============================================================
There are several possible ways to integrate compilation of native C++ code by Android NDK into Eclipse build process.
We recommend the approach based on Eclipse :abbr:`CDT(C/C++ Development Tooling)` Builder.
.. important:: Make sure your Eclipse IDE has the :abbr:`CDT(C/C++ Development Tooling)` plugin installed. Menu :guilabel:`Help -> About Eclipse SDK -> Installation Details`.
.. image:: images/eclipse_inst_details.png
:alt: Eclipse About
:align: center
.. important:: OpenCV for Android 2.4.2 package contains sample projects pre-configured CDT Builders. For your own projects follow the steps below.
#. Define the ``NDKROOT`` environment variable containing the path to Android NDK in your system (e.g. ``"X:\\Apps\\android-ndk-r8"`` or ``"/opt/android-ndk-r8"``).
**On Windows** an environment variable can be set via :guilabel:`My Computer -> Properties -> Advanced -> Environment variables` and restarting Eclipse.
On Windows 7 it's also possible to use `setx <http://ss64.com/nt/setx.html>`_ command in a console session.
**On Linux** and **MacOS** an environment variable can be set via appending a ``"export VAR_NAME=VAR_VALUE"`` line to the :file:`"~/.bashrc"` file and logging off and then on.
#. Open Eclipse and load the Android app project to configure.
#. Add C/C++ Nature to the project via Eclipse menu :guilabel:`New -> Other -> C/C++ -> Convert to a C/C++ Project`.
.. image:: images/eclipse_cdt_cfg1.png
:alt: Configure CDT
:align: center
And:
.. image:: images/eclipse_cdt_cfg2.png
:alt: Configure CDT
:align: center
#. Select the project(s) to convert. Specify "Project type" = ``Makefile project``, "Toolchains" = ``Other Toolchain``.
.. image:: images/eclipse_cdt_cfg3.png
:alt: Configure CDT
:align: center
#. Open :guilabel:`Project Properties -> C/C++ Build`, unckeck ``Use default build command``, replace "Build command" text from ``"make"`` to
``"${NDKROOT}/ndk-build.cmd"`` on Windows,
``"${NDKROOT}/ndk-build"`` on Linux and MacOS.
.. image:: images/eclipse_cdt_cfg4.png
:alt: Configure CDT
:align: center
#. Go to :guilabel:`Behaviour` tab and change "Workbench build type" section like shown below:
.. image:: images/eclipse_cdt_cfg5.png
:alt: Configure CDT
:align: center
#. Press :guilabel:`OK` and make sure the ``ndk-build`` is successfully invoked when building the project.
.. image:: images/eclipse_cdt_cfg6.png
:alt: Configure CDT
:align: center
#. If you open your C++ source file in Eclipse editor, you'll see syntax error notifications. They are not real errors, but additional CDT configuring is required.
.. image:: images/eclipse_cdt_cfg7.png
:alt: Configure CDT
:align: center
#. Open :guilabel:`Project Properties -> C/C++ General -> Paths and Symbols` and add the following **Include** paths for **C++**:
::
${NDKROOT}/platforms/android-9/arch-arm/usr/include
${NDKROOT}/sources/cxx-stl/gnu-libstdc++/include
${NDKROOT}/sources/cxx-stl/gnu-libstdc++/libs/armeabi-v7a/include
${ProjDirPath}/../../sdk/native/jni/include
The last path should be changed to the correct absolute or relative path to OpenCV4Android SDK location.
This should clear the syntax error notifications in Eclipse C++ editor.
.. image:: images/eclipse_cdt_cfg8.png
:alt: Configure CDT
:align: center
.. note:: The latest Android NDK **r8b** uses different STL headers path. So if you use this NDK release add the following **Include** paths list instead:
::
${NDKROOT}/platforms/android-9/arch-arm/usr/include
${NDKROOT}/sources/cxx-stl/gnu-libstdc++/4.6/include
${NDKROOT}/sources/cxx-stl/gnu-libstdc++/4.6/libs/armeabi-v7a/include
${ProjDirPath}/../../sdk/native/jni/include
Debugging and Testing
=====================
In this section we will give you some easy-to-follow instructions on how to set up an emulator or hardware device for testing and debugging an Android project.
AVD
---
AVD (*Android Virtual Device*) is not probably the most convenient way to test an OpenCV-dependent application, but sure the most uncomplicated one to configure.
#. Assuming you already have *Android SDK* and *Eclipse IDE* installed, in Eclipse go :guilabel:`Window -> AVD Manager`.
.. **TBD:** how to start AVD Manager without Eclipse...
#. Press the :guilabel:`New` button in :guilabel:`AVD Manager` window.
#. :guilabel:`Create new Android Virtual Device` window will let you select some properties for your new device, like target API level, size of SD-card and other.
.. image:: images/AVD_create.png
:alt: Configure builders
:align: center
#. When you click the :guilabel:`Create AVD` button, your new AVD will be availible in :guilabel:`AVD Manager`.
#. Press :guilabel:`Start` to launch the device. Be aware that any AVD (a.k.a. Emulator) is usually much slower than a hardware Android device, so it may take up to several minutes to start.
#. Go :guilabel:`Run -> Run/Debug` in Eclipse IDE to run your application in regular or debugging mode. :guilabel:`Device Chooser` will let you choose among the running devices or to start a new one.
Hardware Device
---------------
If you have an Android device, you can use it to test and debug your applications. This way is more authentic, though a little bit harder to set up. You need to make some actions for Windows and Linux operating systems to be able to work with Android devices. No extra actions are required for Mac OS. See detailed information on configuring hardware devices in subsections below.
You may also consult the official `Android Developers site instructions <http://developer.android.com/tools/device.html>`_ for more information.
Windows host computer
^^^^^^^^^^^^^^^^^^^^^
#. Enable USB debugging on the Android device (via :guilabel:`Settings` menu).
#. Attach the Android device to your PC with a USB cable.
#. Go to :guilabel:`Start Menu` and **right-click** on :guilabel:`Computer`. Select :guilabel:`Manage` in the context menu. You may be asked for Administrative permissions.
#. Select :guilabel:`Device Manager` in the left pane and find an unknown device in the list. You may try unplugging it and then plugging back in order to check whether it's your exact equipment appears in the list.
.. image:: images/usb_device_connect_01.png
:alt: Unknown device
:align: center
#. Try your luck installing `Google USB drivers` without any modifications: **right-click** on the unknown device, select :guilabel:`Properties` menu item --> :guilabel:`Details` tab --> :guilabel:`Update Driver` button.
.. image:: images/usb_device_connect_05.png
:alt: Device properties
:align: center
#. Select :guilabel:`Browse computer for driver software`.
.. image:: images/usb_device_connect_06.png
:alt: Browse for driver
:align: center
#. Specify the path to :file:`<Android SDK folder>/extras/google/usb_driver/` folder.
.. image:: images/usb_device_connect_07.png
:alt: Browse for driver
:align: center
#. If you get the prompt to install unverified drivers and report about success - you've finished with USB driver installation.
.. image:: images/usb_device_connect_08.png
:alt: Install prompt
:align: center
` `
.. image:: images/usb_device_connect_09.png
:alt: Installed OK
:align: center
#. Otherwise (getting the failure like shown below) follow the next steps.
.. image:: images/usb_device_connect_12.png
:alt: No driver
:align: center
#. Again **right-click** on the unknown device, select :guilabel:`Properties --> Details --> Hardware Ids` and copy the line like ``USB\VID_XXXX&PID_XXXX&MI_XX``.
.. image:: images/usb_device_connect_02.png
:alt: Device properties details
:align: center
#. Now open file :file:`<Android SDK folder>/extras/google/usb_driver/android_winusb.inf`. Select either ``Google.NTx86`` or ``Google.NTamd64`` section depending on your host system architecture.
.. image:: images/usb_device_connect_03.png
:alt: "android_winusb.inf"
:align: center
#. There should be a record like existing ones for your device and you need to add one manually.
.. image:: images/usb_device_connect_04.png
:alt: "android_winusb.inf"
:align: center
#. Save the :file:`android_winusb.inf` file and try to install the USB driver again.
.. image:: images/usb_device_connect_05.png
:alt: Device properties
:align: center
` `
.. image:: images/usb_device_connect_06.png
:alt: Browse for driver
:align: center
` `
.. image:: images/usb_device_connect_07.png
:alt: Browse for driver
:align: center
#. This time installation should go successfully.
.. image:: images/usb_device_connect_08.png
:alt: Install prompt
:align: center
` `
.. image:: images/usb_device_connect_09.png
:alt: Installed OK
:align: center
#. And an unknown device is now recognized as an Android phone.
.. image:: images/usb_device_connect_10.png
:alt: "Known" device
:align: center
#. Successful device USB connection can be verified in console via ``adb devices`` command.
.. image:: images/usb_device_connect_11.png
:alt: "adb devices"
:align: center
#. Now, in Eclipse go :guilabel:`Run -> Run/Debug` to run your application in regular or debugging mode. :guilabel:`Device Chooser` will let you choose among the devices.
Linux host computer
^^^^^^^^^^^^^^^^^^^
By default Linux doesn't recognize Android devices, but it's easy to fix this issue. On Ubuntu Linux you have to create a new **/etc/udev/rules.d/51-android.rules** configuration file that contains information about your Android device. You may find some Vendor ID's `here <http://developer.android.com/tools/device.html#VendorIds>`_ or execute :command:`lsusb` command to view VendorID of plugged Android device. Here is an example of such file for LG device:
.. code-block:: guess
SUBSYSTEM=="usb", ATTR{idVendor}=="1004", MODE="0666", GROUP="plugdev"
Then restart your adb server (even better to restart the system), plug in your Android device and execute :command:`adb devices` command. You will see the list of attached devices:
.. image:: images/usb_device_connect_ubuntu.png
:alt: List of attached devices
:align: center
MacOS host computer
^^^^^^^^^^^^^^^^^^^
No actions are required, just connect your device via USB and run ``adb devices`` to check connection.
What's next
===========
Now, when you have your development environment set up and configured, you may want to proceed to installing OpenCV4Android SDK. You can learn how to do that in a separate :ref:`O4A_SDK` tutorial.
\ No newline at end of file
.. _dev_with_OCV_on_Android:
Android development with OpenCV
*******************************
This tutorial is created to help you use OpenCV library within your Android project.
This guide was written with Windows 7 in mind, though it should work with any other OS supported by OpenCV4Android SDK.
This tutorial assumes you have the following installed and configured:
* JDK
* Android SDK and NDK
* Eclipse IDE
* ADT and CDT plugins for Eclipse
..
If you need help with anything of the above, you may refer to our :ref:`android_dev_intro` guide.
This tutorial also assumes you have OpenCV4Android SDK already installed on your development machine and OpenCV Manager on your testing device correspondingly. If you need help with any of these, you may consult our :ref:`O4A_SDK` tutorial.
If you encounter any error after thoroughly following these steps, feel free to contact us via `OpenCV4Android <https://groups.google.com/group/android-opencv/>`_ discussion group or OpenCV `Q&A forum <http://answers.opencv.org>`_ . We'll do our best to help you out.
Using OpenCV library within your Android project
================================================
In this section we will explain how to make some existing project to use OpenCV.
Starting with 2.4.2 release for Android, *OpenCV Manager* is used to provide apps with the best available version of OpenCV.
You can get more information here: :ref:`Android_OpenCV_Manager` and in these `slides <https://docs.google.com/a/itseez.com/presentation/d/1EO_1kijgBg_BsjNp2ymk-aarg-0K279_1VZRcPplSuk/present#slide=id.p>`_.
Java
----
Application development with async initialization
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Using async initialization is a **recommended** way for application development. It uses the OpenCV Manager to access OpenCV libraries externally installed in the target system.
#. Add OpenCV library project to your workspace. Use menu :guilabel:`File -> Import -> Existing project in your workspace`,
press :guilabel:`Browse` button and locate OpenCV4Android SDK (:file:`OpenCV-2.4.2-android-sdk/sdk`).
.. image:: images/eclipse_opencv_dependency0.png
:alt: Add dependency from OpenCV library
:align: center
#. In application project add a reference to the OpenCV Java SDK in :guilabel:`Project -> Properties -> Android -> Library -> Add` select ``OpenCV Library - 2.4.2``.
.. image:: images/eclipse_opencv_dependency1.png
:alt: Add dependency from OpenCV library
:align: center
To run OpenCV Manager-based application the first time you need to install packages with the `OpenCV Manager` and `OpenCV binary pack` for you platform.
You can do it using Google Play Market or manually with ``adb`` tool:
.. code-block:: sh
:linenos:
<Android SDK path>/platform-tools/adb install <OpenCV4Android SDK path>/apk/OpenCV_2.4.2_Manager.apk
<Android SDK path>/platform-tools/adb install <OpenCV4Android SDK path>/apk/OpenCV_2.4.2_binary_pack_armv7a.apk
There is a very base code snippet implementing the async initialization. It shows basic principles. See the "15-puzzle" OpenCV sample for details.
.. code-block:: java
:linenos:
public class MyActivity extends Activity implements HelperCallbackInterface
{
private BaseLoaderCallback mOpenCVCallBack = new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status) {
case LoaderCallbackInterface.SUCCESS:
{
Log.i(TAG, "OpenCV loaded successfully");
// Create and set View
mView = new puzzle15View(mAppContext);
setContentView(mView);
} break;
default:
{
super.onManagerConnected(status);
} break;
}
}
};
/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState)
{
Log.i(TAG, "onCreate");
super.onCreate(savedInstanceState);
Log.i(TAG, "Trying to load OpenCV library");
if (!OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_2_4_2, this, mOpenCVCallBack))
{
Log.e(TAG, "Cannot connect to OpenCV Manager");
}
}
// ...
}
It this case application works with OpenCV Manager in asynchronous fashion. ``OnManagerConnected`` callback will be called in UI thread, when initialization finishes.
Please note, that it is not allowed to use OpenCV calls or load OpenCV-dependent native libs before invoking this callback.
Load your own native libraries that depend on OpenCV after the successful OpenCV initialization.
Application development with static initialization
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
According to this approach all OpenCV binaries are included into your application package. It is designed mostly for development purposes.
This approach is deprecated for the production code, release package is recommended to communicate with OpenCV Manager via the async initialization described above.
#. Add the OpenCV library project to your workspace the same way as for the async initialization above.
Use menu :guilabel:`File -> Import -> Existing project in your workspace`, push :guilabel:`Browse` button and select OpenCV SDK path (:file:`OpenCV-2.4.2-android-sdk/sdk`).
.. image:: images/eclipse_opencv_dependency0.png
:alt: Add dependency from OpenCV library
:align: center
#. In the application project add a reference to the OpenCV4Android SDK in :guilabel:`Project -> Properties -> Android -> Library -> Add` select ``OpenCV Library - 2.4.2``;
.. image:: images/eclipse_opencv_dependency1.png
:alt: Add dependency from OpenCV library
:align: center
#. If your application project **doesn't have a JNI part**, just copy the corresponding OpenCV native libs from :file:`<OpenCV-2.4.2-android-sdk>/sdk/native/libs/<target_arch>` to your project directory to folder :file:`libs/<target_arch>`.
In case of the application project **with a JNI part**, instead of manual libraries copying you need to modify your ``Android.mk`` file:
add the following two code lines after the ``"include $(CLEAR_VARS)"`` and before ``"include path_to_OpenCV-2.4.2-android-sdk/sdk/native/jni/OpenCV.mk"``
.. code-block:: make
:linenos:
OPENCV_CAMERA_MODULES:=on
OPENCV_INSTALL_MODULES:=on
The result should look like the following:
.. code-block:: make
:linenos:
include $(CLEAR_VARS)
# OpenCV
OPENCV_CAMERA_MODULES:=on
OPENCV_INSTALL_MODULES:=on
include ../../sdk/native/jni/OpenCV.mk
After that the OpenCV libraries will be copied to your application :file:`libs` folder during the JNI part build.
Eclipse will automatically include all the libraries from the :file:`libs` folder to the application package (APK).
#. The last step of enabling OpenCV in your application is Java initialization code before call to OpenCV API.
It can be done, for example, in the static section of the ``Activity`` class:
.. code-block:: java
:linenos:
static {
if (!OpenCVLoader.initDebug()) {
// Handle initialization error
}
}
If you application includes other OpenCV-dependent native libraries you should load them **after** OpenCV initialization:
.. code-block:: java
:linenos:
static {
if (!OpenCVLoader.initDebug()) {
// Handle initialization error
} else {
System.loadLibrary("my_jni_lib1");
System.loadLibrary("my_jni_lib2");
}
}
Native/C++
----------
To build your own Android application, which uses OpenCV from native part, the following steps should be done:
#. You can use an environment variable to specify the location of OpenCV package or just hardcode absolute or relative path in the :file:`jni/Android.mk` of your projects.
#. The file :file:`jni/Android.mk` should be written for the current application using the common rules for this file.
For detailed information see the Android NDK documentation from the Android NDK archive, in the file
:file:`<path_where_NDK_is_placed>/docs/ANDROID-MK.html`
#. The line
.. code-block:: make
include C:\Work\OpenCV4Android\OpenCV-2.4.2-android-sdk\sdk\native\jni\OpenCV.mk
should be inserted into the :file:`jni/Android.mk` file **after** the line
.. code-block:: make
include $(CLEAR_VARS)
#. Several variables can be used to customize OpenCV stuff, but you **don't need** to use them when your application uses the `async initialization` via the `OpenCV Manager` API.
Note: these variables should be set **before** the ``"include .../OpenCV.mk"`` line:
.. code-block:: make
OPENCV_INSTALL_MODULES:=on
Copies necessary OpenCV dynamic libs to the project ``libs`` folder in order to include them into the APK.
.. code-block:: make
OPENCV_CAMERA_MODULES:=off
Skip native OpenCV camera related libs copying to the project ``libs`` folder.
.. code-block:: make
OPENCV_LIB_TYPE:=STATIC
Perform static link with OpenCV. By default dynamic link is used and the project JNI lib depends on ``libopencv_java.so``.
#. The file :file:`Application.mk` should exist and should contain lines:
.. code-block:: make
APP_STL := gnustl_static
APP_CPPFLAGS := -frtti -fexceptions
Also the line like this one:
.. code-block:: make
APP_ABI := armeabi-v7a
should specify the application target platforms.
In some cases a linkage error (like ``"In function 'cv::toUtf16(std::basic_string<...>... undefined reference to 'mbstowcs'"``) happens
when building an application JNI library depending on OpenCV.
The following line in the :file:`Application.mk` usually fixes it:
.. code-block:: make
APP_PLATFORM := android-9
#. Either use :ref:`manual <NDK_build_cli>` ``ndk-build`` invocation or :ref:`setup Eclipse CDT Builder <CDT_Builder>` to build native JNI lib before Java part [re]build and APK creation.
Hello OpenCV Sample
===================
Here are basic steps to guide you trough the process of creating a simple OpenCV-centric application.
It will be capable of accessing camera output, processing it and displaying the result.
#. Open Eclipse IDE, create a new clean workspace, create a new Android project (*File -> New -> Android Project*).
#. Set name, target, package and minSDKVersion accordingly.
#. Create a new class (*File -> New -> Class*). Name it for example: *HelloOpenCVView*.
.. image:: images/dev_OCV_new_class.png
:alt: Add a new class.
:align: center
* It should extend *SurfaceView* class.
* It also should implement *SurfaceHolder.Callback*, *Runnable*.
#. Edit *HelloOpenCVView* class.
* Add an *import* line for *android.content.context*.
* Modify autogenerated stubs: *HelloOpenCVView*, *surfaceCreated*, *surfaceDestroyed* and *surfaceChanged*.
.. code-block:: java
:linenos:
package com.hello.opencv.test;
import android.content.Context;
public class HelloOpenCVView extends SurfaceView implements Callback, Runnable {
public HelloOpenCVView(Context context) {
super(context);
getHolder().addCallback(this);
}
public void surfaceCreated(SurfaceHolder holder) {
(new Thread(this)).start();
}
public void surfaceDestroyed(SurfaceHolder holder) {
cameraRelease();
}
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
cameraSetup(width, height);
}
//...
* Add *cameraOpen*, *cameraRelease* and *cameraSetup* voids as shown below.
* Also, don't forget to add the public void *run()* as follows:
.. code-block:: java
:linenos:
public void run() {
// TODO: loop { getFrame(), processFrame(), drawFrame() }
}
public boolean cameraOpen() {
return false; //TODO: open camera
}
private void cameraRelease() {
// TODO release camera
}
private void cameraSetup(int width, int height) {
// TODO setup camera
}
#. Create a new *Activity* (*New -> Other -> Android -> Android Activity*) and name it, for example: *HelloOpenCVActivity*. For this activity define *onCreate*, *onResume* and *onPause* voids.
.. code-block:: java
:linenos:
public void onCreate (Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
mView = new HelloOpenCVView(this);
setContentView (mView);
}
protected void onPause() {
super.onPause();
mView.cameraRelease();
}
protected void onResume() {
super.onResume();
if( !mView.cameraOpen() ) {
// MessageBox and exit app
AlertDialog ad = new AlertDialog.Builder(this).create();
ad.setCancelable(false); // This blocks the "BACK" button
ad.setMessage("Fatal error: can't open camera!");
ad.setButton("OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
finish();
}
});
ad.show();
}
}
#. Add the following permissions to the AndroidManifest.xml file:
.. code-block:: xml
:linenos:
</application>
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
#. Reference OpenCV library within your project properties.
.. image:: images/dev_OCV_reference.png
:alt: Reference OpenCV library.
:align: center
#. We now need some code to handle the camera. Update the *HelloOpenCVView* class as follows:
.. code-block:: java
:linenos:
private VideoCapture mCamera;
public boolean cameraOpen() {
synchronized (this) {
cameraRelease();
mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
if (!mCamera.isOpened()) {
mCamera.release();
mCamera = null;
Log.e("HelloOpenCVView", "Failed to open native camera");
return false;
}
}
return true;
}
public void cameraRelease() {
synchronized(this) {
if (mCamera != null) {
mCamera.release();
mCamera = null;
}
}
}
private void cameraSetup(int width, int height) {
synchronized (this) {
if (mCamera != null && mCamera.isOpened()) {
List<Size> sizes = mCamera.getSupportedPreviewSizes();
int mFrameWidth = width;
int mFrameHeight = height;
{ // selecting optimal camera preview size
double minDiff = Double.MAX_VALUE;
for (Size size : sizes) {
if (Math.abs(size.height - height) < minDiff) {
mFrameWidth = (int) size.width;
mFrameHeight = (int) size.height;
minDiff = Math.abs(size.height - height);
}
}
}
mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
}
}
}
#. The last step would be to update the *run()* void in *HelloOpenCVView* class as follows:
.. code-block:: java
:linenos:
public void run() {
while (true) {
Bitmap bmp = null;
synchronized (this) {
if (mCamera == null)
break;
if (!mCamera.grab())
break;
bmp = processFrame(mCamera);
}
if (bmp != null) {
Canvas canvas = getHolder().lockCanvas();
if (canvas != null) {
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2,
(canvas.getHeight() - bmp.getHeight()) / 2, null);
getHolder().unlockCanvasAndPost(canvas);
}
bmp.recycle();
}
}
}
protected Bitmap processFrame(VideoCapture capture) {
Mat mRgba = new Mat();
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
//process mRgba
Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
try {
Utils.matToBitmap(mRgba, bmp);
} catch(Exception e) {
Log.e("processFrame", "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmp.recycle();
bmp = null;
}
return bmp;
}
......@@ -17,7 +17,7 @@ In this tutorial you will learn how to:
Source Code
===========
Download the source code from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/introduction/display_image/display_image.cpp>`_.
Download the source code from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/introduction/display_image/display_image.cpp>`_.
.. literalinclude:: ../../../../samples/cpp/tutorial_code/introduction/display_image/display_image.cpp
:language: cpp
......
.. _howToWriteTutorial: How to write a tutorial for OpenCV? *********************************** Okay, so assume you have just finished a project of yours implementing something based on OpenCV and you want to present/share it with the community. Luckily, OpenCV is an *open source project*. This means that in theory anyone has access to the full source code and may extend it. While making a robust and practical library (like OpenCV) is great, the success of a library also depends on how user friendly it is. To improve on this aspect, the OpenCV team has already been listening to user feedback from its :opencv_group:`Yahoo user group <>` and by making samples you can find in the source directories sample folder. The addition of the tutorials (in both online and PDF format) is an extension of these efforts. Goal ==== .. _reST: http://docutils.sourceforge.net/rst.html .. |reST| replace:: reStructuredText .. |Sphinx| replace:: Sphinx .. _Sphinx: http://sphinx.pocoo.org/ The tutorials are just as an important part of the library as the implementation of those crafty data structures and algorithms you can find in OpenCV. Therefore, the source codes for the tutorials are part of the library. And yes, I meant source codes. The reason for this formulation is that the tutorials are written by using the |Sphinx|_ documentation generation system. This is based on the popular python documentation system called |reST|_ (reST). ReStructuredText is a really neat language that by using a few simple conventions (indentation, directives) and emulating old school e-mail writing techniques (text only) tries to offer a simple way to create and edit documents. Sphinx extends this with some new features and creates the resulting document in both HTML (for web) and PDF (for offline usage) format. Usually, an OpenCV tutorial has the following parts: 1. A source code demonstration of an OpenCV feature: a. One or more CPP, Python, Java or other type of files depending for what OpenCV offers support and for what language you make the tutorial. #. Occasionaly, input resource files required for running your tutorials application. #. A table of content entry (so people may easily find the tutorial): a. Adding your stuff to the tutorials table of content (**reST** file). #. Add an image file near the TOC entry. #. The content of the tutorial itself: a. The **reST** text of the tutorial #. Images following the idea that "*A picture is worth a thousand words*". #. For more complex demonstrations you may create a video. As you can see you will need at least some basic knowledge of the *reST* system in order to complete the task at hand with success. However, don't worry *reST* (and *Sphinx*) was made with simplicity in mind. It is easy to grasp its basics. I found that the `OpenAlea documentations introduction on this subject <http://openalea.gforge.inria.fr/doc/openalea/doc/_build/html/source/tutorial/rest_syntax.html>`_ (or the `Thomas Cokelaer one <http://thomas-cokelaer.info/tutorials/sphinx/rest_syntax.html>`_ ) should enough for this. If for some directive or feature you need a more in-depth description look it up in the official |reST|_ help files or at the |Sphinx|_ documentation. In our world achieving some tasks is possible in multiple ways. However, some of the roads to take may have obvious or hidden advantages over others. Then again, in some other cases it may come down to just simple user preference. Here, I'll present how I decided to write the tutorials, based on my personal experience. If for some of them you know a better solution and you can back it up feel free to use that. I've nothing against it, as long as it gets the job done in an elegant fashion. Now the best would be if you could make the integration yourself. For this you need first to have the source code. I recommend following the guides for your operating system on acquiring OpenCV sources. For Linux users look :ref:`here <Linux-Installation>` and for :ref:`Windows here <Windows_Installation>`. You must also install python and sphinx with its dependencies in order to be able to build the documentation. Once you have downloaded the repository to your hard drive you can take a look in the OpenCV directory to make sure you have both the samples and doc folder present. Anyone may download the trunk source files from :file:`/svn/opencv/trunk/` . Nevertheless, not everyone has upload (commit/submit) rights. This is to protect the integrity of the library. If you plan doing more than one tutorial, and would like to have an account with commit user rights you should first register an account at http://code.opencv.org/ and then contact dr. Gary Bradski at -delete-bradski@-delete-willowgarage.com. Otherwise, you can just send the resulting files to us via the :opencv_group:`Yahoo user group <>` or to me at -delete-bernat@-delete-primeranks.net and I'll add it. If you have questions, suggestions or constructive critics I will gladly listen to them. If you send it to the OpenCV group please tag its subject with a **[Tutorial]** entry. Format the Source Code ====================== Before I start this let it be clear: the main goal is to have a working sample code. However, for your tutorial to be of a top notch quality you should follow a few guide lines I am going to present here. In case you have an application by using the older interface (with *IplImage*, *CVMat*, *cvLoadImage* and such) consider migrating it to the new C++ interface. The tutorials are intended to be an up to date help for our users. And as of OpenCV 2 the OpenCV emphasis on using the less error prone and clearer C++ interface. Therefore, if possible please convert your code to the C++ interface. For this it may help to read the :ref:`InteroperabilityWithOpenCV1` tutorial. However, once you have an OpenCV 2 working code, then you should make your source code snippet as easy to read as possible. Here're a couple of advices for this: .. container:: enumeratevisibleitemswithsquare + Add a standard output with the description of what your program does. Keep it short and yet, descriptive. This output is at the start of the program. In my example files this usually takes the form of a *help* function containing the output. This way both the source file viewer and application runner can see what all is about in your sample. Here's an instance of this: .. code-block:: cpp void help() { cout << "--------------------------------------------------------------------------" << endl << "This program shows how to write video files. You can extract the R or G or B color channel " << " of the input video. You can choose to use the source codec (Y) or select a custom one. (N)"<< endl << "Usage:" << endl << "./video-write inputvideoName [ R | G | B] [Y | N]" << endl << "--------------------------------------------------------------------------" << endl << endl; } // ... int main(int argc, char *argv[], char *window_name) { help(); // here comes the actual source code } Additionally, finalize the description with a short usage guide. This way the user will know how to call your programs, what leads us to the next point. + Prefer command line argument controlling instead of hard coded one. If your program has some variables that may be changed use command line arguments for this. The tutorials, can be a simple try-out ground for the user. If you offer command line controlling for the input image (for example), then you offer the possibility for the user to try it out with his/her own images, without the need to mess in the source code. In the upper example you can see that the input image, channel and codec selection may all be changed from the command line. Just compile the program and run it with your own input arguments. + Be as verbose as possible. There is no shame in filling the source code with comments. This way the more advanced user may figure out what's happening right from the sample code. This advice goes for the output console too. Specify to the user what's happening. Never leave the user hanging there and thinking on: "Is this program now crashing or just doing some computationally intensive task?." So, if you do a training task that may take some time, make sure you print out a message about this before starting and after finishing it. + Throw out unnecessary stuff from your source code. This is a warning to not take the previous point too seriously. Balance is the key. If it's something that can be done in a fewer lines or simpler than that's the way you should do it. Nevertheless, if for some reason you have such sections notify the user why you have chosen to do so. Keep the amount of information as low as possible, while still getting the job done in an elegant way. + Put your sample file into the :file:`opencv/samples/cpp/tutorial_code/sectionName` folder. If you write a tutorial for other languages than cpp, then change that part of the path. Before completing this you need to decide that to what section (module) does your tutorial goes. Think about on what module relies most heavily your code and that is the one to use. If the answer to this question is more than one modules then the *general* section is the one to use. For finding the *opencv* directory open up your file system and navigate where you downloaded our repository. + If the input resources are hard to acquire for the end user consider adding a few of them to the :file:`opencv/samples/cpp/tutorial_code/images`. Make sure that who reads your code can try it out! Add the TOC entry ================= For this you will need to know some |reST|_. There is no going around this. |reST|_ files have **rst** extensions. However, these are simple text files. Use any text editor you like. Finding a text editor that offers syntax highlighting for |reST|_ was quite a challenge at the time of writing this tutorial. In my experience, `Intype <http://intype.info/>`_ is a solid option on Windows, although there is still place for improvement. Adding your source code to a table of content is important for multiple reasons. First and foremost this will allow for the user base to find your tutorial from our websites tutorial table of content. Secondly, if you omit this *Sphinx* will throw a warning that your tutorial file isn't part of any TOC tree entry. And there is nothing more than the developer team hates than an ever increasing warning/error list for their builds. *Sphinx* also uses this to build up the previous-back-up buttons on the website. Finally, omitting this step will lead to that your tutorial will **not** be added to the PDF version of the tutorials. Navigate to the :file:`opencv/doc/tutorials/section/table_of_content_section` folder (where the section is the module to which you're adding the tutorial). Open the *table_of_content_section* file. Now this may have two forms. If no prior tutorials are present in this section that there is a template message about this and has the following form: .. code-block:: rst .. _Table-Of-Content-Section: Section title ----------------------------------------------------------- Description about the section. .. include:: ../../definitions/noContent.rst .. raw:: latex \pagebreak The first line is a reference to the section title in the reST system. The section title will be a link and you may refer to it via the ``:ref:`` directive. The *include* directive imports the template text from the definitions directories *noContent.rst* file. *Sphinx* does not creates the PDF from scratch. It does this by first creating a latex file. Then creates the PDF from the latex file. With the *raw* directive you can directly add to this output commands. Its unique argument is for what kind of output to add the content of the directive. For the PDFs it may happen that multiple sections will overlap on a single page. To avoid this at the end of the TOC we add a *pagebreak* latex command, that hints to the LATEX system that the next line should be on a new page. If you have one of this, try to transform it to the following form: .. include:: ../../definitions/tocDefinitions.rst .. code-block:: rst .. _Table-Of-Content-Section: Section title ----------------------------------------------------------- .. include:: ../../definitions/tocDefinitions.rst + .. tabularcolumns:: m{100pt} m{300pt} .. cssclass:: toctableopencv =============== ====================================================== |MatBasicIma| **Title:** :ref:`matTheBasicImageContainer` *Compatibility:* > OpenCV 2.0 *Author:* |Author_BernatG| You will learn how to store images in the memory and how to print out their content to the console. =============== ===================================================== .. |MatBasicIma| image:: images/matTheBasicImageStructure.jpg :height: 90pt :width: 90pt .. raw:: latex \pagebreak .. toctree:: :hidden: ../mat - the basic image container/mat - the basic image container If this is already present just add a new section of the content between the include and the raw directives (excluding those lines). Here you'll see a new include directive. This should be present only once in a TOC tree and the reST file contains the definitions of all the authors contributing to the OpenCV tutorials. We are a multicultural community and some of our name may contain some funky characters. However, reST **only supports** ANSI characters. Luckily we can specify Unicode characters with the *unicode* directive. Doing this for all of your tutorials is a troublesome procedure. Therefore, the tocDefinitions file contains the definition of your author name. Add it here once and afterwards just use the replace construction. For example here's the definition for my name: .. code-block:: rst .. |Author_BernatG| unicode:: Bern U+00E1 t U+0020 G U+00E1 bor The ``|Author_BernatG|`` is the text definitions alias. I can use later this to add the definition, like I've done in the TOCs *Author* part. After the ``::`` and a space you start the definition. If you want to add an UNICODE character (non-ASCI) leave an empty space and specify it in the format U+(UNICODE code). To find the UNICODE code of a character I recommend using the `FileFormat <http://www.fileformat.info>`_ websites service. Spaces are trimmed from the definition, therefore we add a space by its UNICODE character (U+0020). Until the *raw* directive what you can see is a TOC tree entry. Here's how a TOC entry will look like: + .. tabularcolumns:: m{100pt} m{300pt} .. cssclass:: toctableopencv =============== ====================================================== |MatBasicIma| **Title:** :ref:`matTheBasicImageContainer` *Compatibility:* > OpenCV 2.0 *Author:* |Author_BernatG| You will learn how to store images in the memory and how to print out their content to the console. =============== ====================================================== .. |MatBasicIma| image:: images/matTheBasicImageStructure.jpg :height: 90pt :width: 90pt As you can see we have an image to the left and a description box to the right. To create two boxes we use a table with two columns and a single row. In the left column is the image and in the right one the description. However, the image directive is way too long to fit in a column. Therefore, we need to use the substitution definition system. We add this definition after the TOC tree. All images for the TOC tree are to be put in the images folder near its |reST|_ file. We use the point measurement system because we are also creating PDFs. PDFs are printable documents, where there is no such thing that pixels (px), just points (pt). And while generally space is no problem for web pages (we have monitors with **huge** resolutions) the size of the paper (A4 or letter) is constant and will be for a long time in the future. Therefore, size constrains come in play more like for the PDF, than the generated HTML code. Now your images should be as small as possible, while still offering the intended information for the user. Remember that the tutorial will become part of the OpenCV source code. If you add large images (that manifest in form of large image size) it will just increase the size of the repository pointlessly. If someone wants to download it later, its download time will be that much longer. Not to mention the larger PDF size for the tutorials and the longer load time for the web pages. In terms of pixels a TOC image should not be larger than 120 X 120 pixels. Resize your images if they are larger! .. note:: If you add a larger image and specify a smaller image size, *Sphinx* will not resize that. At build time will add the full size image and the resize will be done by your browser after the image is loaded. A 120 X 120 image is somewhere below 10KB. If you add a 110KB image, you have just pointlessly added a 100KB extra data to transfer over the internet for every user! Generally speaking you shouldn't need to specify your images size (excluding the TOC entries). If no such is found *Sphinx* will use the size of the image itself (so no resize occurs). Then again if for some reason you decide to specify a size that should be the **width** of the image rather than its height. The reason for this again goes back to the PDFs. On a PDF page the height is larger than the width. In the PDF the images will not be resized. If you specify a size that does not fit in the page, then what does not fits in **will be cut off**. When creating your images for your tutorial you should try to keep the image widths below 500 pixels, and calculate with around 400 point page width when specifying image widths. The image format depends on the content of the image. If you have some complex scene (many random like colors) then use *jpg*. Otherwise, prefer using *png*. They are even some tools out there that optimize the size of *PNG* images, such as `PNGGauntlet <http://pnggauntlet.com/>`_. Use them to make your images as small as possible in size. Now on the right side column of the table we add the information about the tutorial: .. container:: enumeratevisibleitemswithsquare + In the first line it is the title of the tutorial. However, there is no need to specify it explicitly. We use the reference system. We'll start up our tutorial with a reference specification, just like in case of this TOC entry with its `` .. _Table-Of-Content-Section:`` . If after this you have a title (pointed out by the following line of -), then Sphinx will replace the ``:ref:`Table-Of-Content-Section``` directive with the tile of the section in reference form (creates a link in web page). Here's how the definition looks in my case: .. code-block:: rst .. _matTheBasicImageContainer: Mat - The Basic Image Container ******************************* Note, that according to the |reST|_ rules the * should be as long as your title. + Compatibility. What version of OpenCV is required to run your sample code. + Author. Use the substitution markup of |reST|_. + A short sentence describing the essence of your tutorial. Now before each TOC entry you need to add the three lines of: .. code-block:: cpp + .. tabularcolumns:: m{100pt} m{300pt} .. cssclass:: toctableopencv The plus sign (+) is to enumerate tutorials by using bullet points. So for every TOC entry we have a corresponding bullet point represented by the +. Sphinx is highly indenting sensitive. Indentation is used to express from which point until to which point does a construction last. Un-indentation means end of that construction. So to keep all the bullet points to the same group the following TOC entries (until the next +) should be indented by two spaces. Here, I should also mention that **always** prefer using spaces instead of tabs. Working with only spaces makes possible that if we both use monotype fonts we will see the same thing. Tab size is text editor dependent and as should be avoided. *Sphinx* translates all tabs into 8 spaces before interpreting it. It turns out that the automatic formatting of both the HTML and PDF(LATEX) system messes up our tables. Therefore, we need to help them out a little. For the PDF generation we add the ``.. tabularcolumns:: m{100pt} m{300pt}`` directive. This means that the first column should be 100 points wide and middle aligned. For the HTML look we simply name the following table of a *toctableopencv* class type. Then, we can modify the look of the table by modifying the CSS of our web page. The CSS definitions go into the :file:`opencv/doc/_themes/blue/static/default.css_t` file. .. code-block:: css .toctableopencv { width: 100% ; table-layout: fixed; } .toctableopencv colgroup col:first-child { width: 100pt !important; max-width: 100pt !important; min-width: 100pt !important; } .toctableopencv colgroup col:nth-child(2) { width: 100% !important; } However, you should not need to modify this. Just add these three lines (plus keep the two space indentation) for all TOC entries you add. At the end of the TOC file you'll find: .. code-block:: rst .. raw:: latex \pagebreak .. toctree:: :hidden: ../mat - the basic image container/mat - the basic image container The page break entry comes for separating sections and should be only one in a TOC tree |reST|_ file. Finally, at the end of the TOC tree we need to add our tutorial to the *Sphinx* TOC tree system. *Sphinx* will generate from this the previous-next-up information for the HTML file and add items to the PDF according to the order here. By default this TOC tree directive generates a simple table of contents. However, we already created a fancy looking one so we no longer need this basic one. Therefore, we add the *hidden* option to do not show it. The path is of a relative type. We step back in the file system and then go into the :file:`mat - the basic image container` directory for the :file:`mat - the basic image container.rst` file. Putting out the *rst* extension for the file is optional. Write the tutorial ================== Create a folder with the name of your tutorial. Preferably, use small letters only. Then create a text file in this folder with *rst* extension and the same name. If you have images for the tutorial create an :file:`images` folder and add your images there. When creating your images follow the guidelines described in the previous part! Now here's our recommendation for the structure of the tutorial (although, remember that this is not carved in the stone; if you have a better idea, use it!): .. container:: enumeratevisibleitemswithsquare + Create the reference point and the title. .. code-block:: rst .. _matTheBasicImageContainer: Mat - The Basic Image Container ******************************* You start the tutorial by specifying a reference point by the ``.. _matTheBasicImageContainer:`` and then its title. The name of the reference point should be a unique one over the whole documentation. Therefore, do not use general names like *tutorial1*. Use the * character to underline the title for its full width. The subtitles of the tutorial should be underlined with = charachter. + Goals. You start your tutorial by specifying what you will present. You can also enumerate the sub jobs to be done. For this you can use a bullet point construction. There is a single configuration file for both the reference manual and the tutorial documentation. In the reference manuals at the argument enumeration we do not want any kind of bullet point style enumeration. Therefore, by default all the bullet points at this level are set to do not show the dot before the entries in the HTML. You can override this by putting the bullet point in a container. I've defined a square type bullet point view under the name *enumeratevisibleitemswithsquare*. The CSS style definition for this is again in the :file:`opencv\doc\_themes\blue\static\default.css_t` file. Here's a quick example of using it: .. code-block:: rst .. container:: enumeratevisibleitemswithsquare + Create the reference point and the title. + Second entry + Third entry Note that you need the keep the indentation of the container directive. Directive indentations are always three (3) spaces. Here you may even give usage tips for your sample code. + Source code. Present your samples code to the user. It's a good idea to offer a quick download link for the HTML page by using the *download* directive and pointing out where the user may find your source code in the file system by using the *file* directive: .. code-block:: rst Text :file:`samples/cpp/tutorial_code/highgui/video-write/` folder of the OpenCV source library or :download:`text to appear in the webpage <../../../../samples/cpp/tutorial_code/HighGUI/video-write/video-write.cpp>`. For the download link the path is a relative one, hence the multiple back stepping operations (..). Then you can add the source code either by using the *code block* directive or the *literal include* one. In case of the code block you will need to actually add all the source code text into your |reST|_ text and also apply the required indentation: .. code-block:: rst .. code-block:: cpp int i = 0; l = ++j; The only argument of the directive is the language used (here CPP). Then you add the source code into its content (meaning one empty line after the directive) by keeping the indentation of the directive (3 spaces). With the *literal include* directive you do not need to add the source code of the sample. You just specify the sample and *Sphinx* will load it for you, during build time. Here's an example usage: .. code-block:: rst .. literalinclude:: ../../../../samples/cpp/tutorial_code/HighGUI/video-write/video-write.cpp :language: cpp :linenos: :tab-width: 4 :lines: 1-8, 21-22, 24- After the directive you specify a relative path to the file from what to import. It has four options: the language to use, if you add the ``:linenos:`` the line numbers will be shown, you can specify the tab size with the ``:tab-width:`` and you do not need to load the whole file, you can show just the important lines. Use the *lines* option to do not show redundant information (such as the *help* function). Here basically you specify ranges, if the second range line number is missing than that means that until the end of the file. The ranges specified here do no need to be in an ascending order, you may even reorganize the structure of how you want to show your sample inside the tutorial. + The tutorial. Well here goes the explanation for why and what have you used. Try to be short, clear, concise and yet a thorough one. There's no magic formula. Look into a few already made tutorials and start out from there. Try to mix sample OpenCV code with your explanations. If with words is hard to describe something do not hesitate to add in a reasonable size image, to overcome this issue. When you present OpenCV functionality it's a good idea to give a link to the used OpenCV data structure or function. Because the OpenCV tutorials and reference manual are in separate PDF files it is not possible to make this link work for the PDF format. Therefore, we use here only web page links to the **opencv.itseez.com** website. The OpenCV functions and data structures may be used for multiple tasks. Nevertheless, we want to avoid that every users creates its own reference to a commonly used function. So for this we use the global link collection of *Sphinx*. This is defined in the file:`opencv/doc/conf.py` configuration file. Open it and go all the way down to the last entry: .. code-block:: py # ---- External links for tutorials ----------------- extlinks = { 'huivideo' : ('http://opencv.itseez.com/modules/highgui/doc/reading_and_writing_images_and_video.html#%s', None) } In short here we defined a new **huivideo** directive that refers to an external webpage link. Its usage is: .. code-block:: rst A sample function of the highgui modules image write and read page is the :huivideo:`imread() function <imread>`. Which turns to: A sample function of the highgui modules image write and read page is the :huivideo:`imread() function <imread>`. The argument you give between the <> will be put in place of the ``%s`` in the upper definition, and as the link will anchor to the correct function. To find out the anchor of a given function just open up a web page, search for the function and click on it. In the address bar it should appear like: ``http://opencv.itseez.com/modules/highgui/doc/reading_and_writing_images_and_video.html#imread`` . Look here for the name of the directives for each page of the OpenCV reference manual. If none present for one of them feel free to add one for it. For formulas you can add LATEX code that will translate in the web pages into images. You do this by using the *math* directive. A usage tip: .. code-block:: latex .. math:: MSE = \frac{1}{c*i*j} \sum{(I_1-I_2)^2} That after build turns into: .. math:: MSE = \frac{1}{c*i*j} \sum{(I_1-I_2)^2} You can even use it inline as ``:math:` MSE = \frac{1}{c*i*j} \sum{(I_1-I_2)^2}``` that turns into :math:`MSE = \frac{1}{c*i*j} \sum{(I_1-I_2)^2}`. If you use some crazy LATEX library extension you need to add those to the ones to use at build time. Look into the file:`opencv/doc/conf.py` configuration file for more information on this. + Results. Well, here depending on your program show one of more of the following: - Console outputs by using the code block directive. - Output images. - Runtime videos, visualization. For this use your favorite screens capture software. `Camtasia Studio <http://www.techsmith.com/camtasia/>`_ certainly is one of the better choices, however their prices are out of this world. `CamStudio <http://camstudio.org/>`_ is a free alternative, but less powerful. If you do a video you can upload it to YouTube and then use the raw directive with HTML option to embed it into the generated web page: .. code-block:: rst You may observe a runtime instance of this on the `YouTube here <https://www.youtube.com/watch?v=jpBwHxsl1_0>`_. .. raw:: html <div align="center"> <iframe title="Creating a video with OpenCV" width="560" height="349" src="http://www.youtube.com/embed/jpBwHxsl1_0?rel=0&loop=1" frameborder="0" allowfullscreen align="middle"></iframe> </div> This results in the text and video: You may observe a runtime instance of this on the `YouTube here <https://www.youtube.com/watch?v=jpBwHxsl1_0>`_. .. raw:: html <div align="center"> <iframe title="Creating a video with OpenCV" width="560" height="349" src="http://www.youtube.com/embed/jpBwHxsl1_0?rel=0&loop=1" frameborder="0" allowfullscreen align="middle"></iframe> </div> When these aren't self-explanatory make sure to throw in a few guiding lines about what and why we can see. + Build the documentation and check for errors or warnings. In the CMake make sure you check or pass the option for building documentation. Then simply build the **docs** project for the PDF file and the **docs_html** project for the web page. Read the output of the build and check for errors/warnings for what you have added. This is also the time to observe and correct any kind of *not so good looking* parts. Remember to keep clean our build logs. + Read again your tutorial and check for both programming and spelling errors. If found any, please correct them. Take home the pride and joy of a job well done! =============================================== Once you are done contact me or dr. Gary Bradski with the tutorial. We may submit the tutorial ourselves to the trunk branch of our repository or ask you to do so. Now, to see your work **live** you may need to wait some time. The PDFs are updated usually at the launch of a new OpenCV version. The web pages are a little more diverse. They are automatically rebuilt in each evening. However, the **opencv.itseez.com** website contains only the most recent **stable branch** of OpenCV. Currently this is 2.3. When we add something new (like a tutorial) that first goes to the **trunk branch** of our repository. A build of this you may find on the **opencv.itseez.com/trunk** website. Although, we try to make a build every night occasionally we might freeze any of the branches to fix upcoming issues. During this it may take a little longer to see your work *live*, however if you submited it, be sure that eventually it will show up. If you have any questions or advices relating to this tutorial you can contact me at -delete-bernat@-delete-primeranks.net. Of course, delete the -delete- parts of that e-mail address.
\ No newline at end of file
.. _howToWriteTutorial: How to write a tutorial for OpenCV? *********************************** Okay, so assume you have just finished a project of yours implementing something based on OpenCV and you want to present/share it with the community. Luckily, OpenCV is an *open source project*. This means that in theory anyone has access to the full source code and may extend it. While making a robust and practical library (like OpenCV) is great, the success of a library also depends on how user friendly it is. To improve on this aspect, the OpenCV team has already been listening to user feedback from its :opencv_group:`Yahoo user group <>` and by making samples you can find in the source directories sample folder. The addition of the tutorials (in both online and PDF format) is an extension of these efforts. Goal ==== .. _reST: http://docutils.sourceforge.net/rst.html .. |reST| replace:: reStructuredText .. |Sphinx| replace:: Sphinx .. _Sphinx: http://sphinx.pocoo.org/ The tutorials are just as an important part of the library as the implementation of those crafty data structures and algorithms you can find in OpenCV. Therefore, the source codes for the tutorials are part of the library. And yes, I meant source codes. The reason for this formulation is that the tutorials are written by using the |Sphinx|_ documentation generation system. This is based on the popular python documentation system called |reST|_ (reST). ReStructuredText is a really neat language that by using a few simple conventions (indentation, directives) and emulating old school e-mail writing techniques (text only) tries to offer a simple way to create and edit documents. Sphinx extends this with some new features and creates the resulting document in both HTML (for web) and PDF (for offline usage) format. Usually, an OpenCV tutorial has the following parts: 1. A source code demonstration of an OpenCV feature: a. One or more CPP, Python, Java or other type of files depending for what OpenCV offers support and for what language you make the tutorial. #. Occasionaly, input resource files required for running your tutorials application. #. A table of content entry (so people may easily find the tutorial): a. Adding your stuff to the tutorials table of content (**reST** file). #. Add an image file near the TOC entry. #. The content of the tutorial itself: a. The **reST** text of the tutorial #. Images following the idea that "*A picture is worth a thousand words*". #. For more complex demonstrations you may create a video. As you can see you will need at least some basic knowledge of the *reST* system in order to complete the task at hand with success. However, don't worry *reST* (and *Sphinx*) was made with simplicity in mind. It is easy to grasp its basics. I found that the `OpenAlea documentations introduction on this subject <http://openalea.gforge.inria.fr/doc/openalea/doc/_build/html/source/tutorial/rest_syntax.html>`_ (or the `Thomas Cokelaer one <http://thomas-cokelaer.info/tutorials/sphinx/rest_syntax.html>`_ ) should enough for this. If for some directive or feature you need a more in-depth description look it up in the official |reST|_ help files or at the |Sphinx|_ documentation. In our world achieving some tasks is possible in multiple ways. However, some of the roads to take may have obvious or hidden advantages over others. Then again, in some other cases it may come down to just simple user preference. Here, I'll present how I decided to write the tutorials, based on my personal experience. If for some of them you know a better solution and you can back it up feel free to use that. I've nothing against it, as long as it gets the job done in an elegant fashion. Now the best would be if you could make the integration yourself. For this you need first to have the source code. I recommend following the guides for your operating system on acquiring OpenCV sources. For Linux users look :ref:`here <Linux-Installation>` and for :ref:`Windows here <Windows_Installation>`. You must also install python and sphinx with its dependencies in order to be able to build the documentation. Once you have downloaded the repository to your hard drive you can take a look in the OpenCV directory to make sure you have both the samples and doc folder present. Anyone may download the trunk source files from :file:`git://code.opencv.org/opencv.git` . Nevertheless, not everyone has upload (commit/submit) rights. This is to protect the integrity of the library. If you plan doing more than one tutorial, and would like to have an account with commit user rights you should first register an account at http://code.opencv.org/ and then contact dr. Gary Bradski at -delete-bradski@-delete-willowgarage.com. Otherwise, you can just send the resulting files to us via the :opencv_group:`Yahoo user group <>` or to me at -delete-bernat@-delete-primeranks.net and I'll add it. If you have questions, suggestions or constructive critics I will gladly listen to them. If you send it to the OpenCV group please tag its subject with a **[Tutorial]** entry. Format the Source Code ====================== Before I start this let it be clear: the main goal is to have a working sample code. However, for your tutorial to be of a top notch quality you should follow a few guide lines I am going to present here. In case you have an application by using the older interface (with *IplImage*, *CVMat*, *cvLoadImage* and such) consider migrating it to the new C++ interface. The tutorials are intended to be an up to date help for our users. And as of OpenCV 2 the OpenCV emphasis on using the less error prone and clearer C++ interface. Therefore, if possible please convert your code to the C++ interface. For this it may help to read the :ref:`InteroperabilityWithOpenCV1` tutorial. However, once you have an OpenCV 2 working code, then you should make your source code snippet as easy to read as possible. Here're a couple of advices for this: .. container:: enumeratevisibleitemswithsquare + Add a standard output with the description of what your program does. Keep it short and yet, descriptive. This output is at the start of the program. In my example files this usually takes the form of a *help* function containing the output. This way both the source file viewer and application runner can see what all is about in your sample. Here's an instance of this: .. code-block:: cpp void help() { cout << "--------------------------------------------------------------------------" << endl << "This program shows how to write video files. You can extract the R or G or B color channel " << " of the input video. You can choose to use the source codec (Y) or select a custom one. (N)"<< endl << "Usage:" << endl << "./video-write inputvideoName [ R | G | B] [Y | N]" << endl << "--------------------------------------------------------------------------" << endl << endl; } // ... int main(int argc, char *argv[], char *window_name) { help(); // here comes the actual source code } Additionally, finalize the description with a short usage guide. This way the user will know how to call your programs, what leads us to the next point. + Prefer command line argument controlling instead of hard coded one. If your program has some variables that may be changed use command line arguments for this. The tutorials, can be a simple try-out ground for the user. If you offer command line controlling for the input image (for example), then you offer the possibility for the user to try it out with his/her own images, without the need to mess in the source code. In the upper example you can see that the input image, channel and codec selection may all be changed from the command line. Just compile the program and run it with your own input arguments. + Be as verbose as possible. There is no shame in filling the source code with comments. This way the more advanced user may figure out what's happening right from the sample code. This advice goes for the output console too. Specify to the user what's happening. Never leave the user hanging there and thinking on: "Is this program now crashing or just doing some computationally intensive task?." So, if you do a training task that may take some time, make sure you print out a message about this before starting and after finishing it. + Throw out unnecessary stuff from your source code. This is a warning to not take the previous point too seriously. Balance is the key. If it's something that can be done in a fewer lines or simpler than that's the way you should do it. Nevertheless, if for some reason you have such sections notify the user why you have chosen to do so. Keep the amount of information as low as possible, while still getting the job done in an elegant way. + Put your sample file into the :file:`opencv/samples/cpp/tutorial_code/sectionName` folder. If you write a tutorial for other languages than cpp, then change that part of the path. Before completing this you need to decide that to what section (module) does your tutorial goes. Think about on what module relies most heavily your code and that is the one to use. If the answer to this question is more than one modules then the *general* section is the one to use. For finding the *opencv* directory open up your file system and navigate where you downloaded our repository. + If the input resources are hard to acquire for the end user consider adding a few of them to the :file:`opencv/samples/cpp/tutorial_code/images`. Make sure that who reads your code can try it out! Add the TOC entry ================= For this you will need to know some |reST|_. There is no going around this. |reST|_ files have **rst** extensions. However, these are simple text files. Use any text editor you like. Finding a text editor that offers syntax highlighting for |reST|_ was quite a challenge at the time of writing this tutorial. In my experience, `Intype <http://intype.info/>`_ is a solid option on Windows, although there is still place for improvement. Adding your source code to a table of content is important for multiple reasons. First and foremost this will allow for the user base to find your tutorial from our websites tutorial table of content. Secondly, if you omit this *Sphinx* will throw a warning that your tutorial file isn't part of any TOC tree entry. And there is nothing more than the developer team hates than an ever increasing warning/error list for their builds. *Sphinx* also uses this to build up the previous-back-up buttons on the website. Finally, omitting this step will lead to that your tutorial will **not** be added to the PDF version of the tutorials. Navigate to the :file:`opencv/doc/tutorials/section/table_of_content_section` folder (where the section is the module to which you're adding the tutorial). Open the *table_of_content_section* file. Now this may have two forms. If no prior tutorials are present in this section that there is a template message about this and has the following form: .. code-block:: rst .. _Table-Of-Content-Section: Section title ----------------------------------------------------------- Description about the section. .. include:: ../../definitions/noContent.rst .. raw:: latex \pagebreak The first line is a reference to the section title in the reST system. The section title will be a link and you may refer to it via the ``:ref:`` directive. The *include* directive imports the template text from the definitions directories *noContent.rst* file. *Sphinx* does not creates the PDF from scratch. It does this by first creating a latex file. Then creates the PDF from the latex file. With the *raw* directive you can directly add to this output commands. Its unique argument is for what kind of output to add the content of the directive. For the PDFs it may happen that multiple sections will overlap on a single page. To avoid this at the end of the TOC we add a *pagebreak* latex command, that hints to the LATEX system that the next line should be on a new page. If you have one of this, try to transform it to the following form: .. include:: ../../definitions/tocDefinitions.rst .. code-block:: rst .. _Table-Of-Content-Section: Section title ----------------------------------------------------------- .. include:: ../../definitions/tocDefinitions.rst + .. tabularcolumns:: m{100pt} m{300pt} .. cssclass:: toctableopencv =============== ====================================================== |MatBasicIma| **Title:** :ref:`matTheBasicImageContainer` *Compatibility:* > OpenCV 2.0 *Author:* |Author_BernatG| You will learn how to store images in the memory and how to print out their content to the console. =============== ===================================================== .. |MatBasicIma| image:: images/matTheBasicImageStructure.jpg :height: 90pt :width: 90pt .. raw:: latex \pagebreak .. toctree:: :hidden: ../mat - the basic image container/mat - the basic image container If this is already present just add a new section of the content between the include and the raw directives (excluding those lines). Here you'll see a new include directive. This should be present only once in a TOC tree and the reST file contains the definitions of all the authors contributing to the OpenCV tutorials. We are a multicultural community and some of our name may contain some funky characters. However, reST **only supports** ANSI characters. Luckily we can specify Unicode characters with the *unicode* directive. Doing this for all of your tutorials is a troublesome procedure. Therefore, the tocDefinitions file contains the definition of your author name. Add it here once and afterwards just use the replace construction. For example here's the definition for my name: .. code-block:: rst .. |Author_BernatG| unicode:: Bern U+00E1 t U+0020 G U+00E1 bor The ``|Author_BernatG|`` is the text definitions alias. I can use later this to add the definition, like I've done in the TOCs *Author* part. After the ``::`` and a space you start the definition. If you want to add an UNICODE character (non-ASCI) leave an empty space and specify it in the format U+(UNICODE code). To find the UNICODE code of a character I recommend using the `FileFormat <http://www.fileformat.info>`_ websites service. Spaces are trimmed from the definition, therefore we add a space by its UNICODE character (U+0020). Until the *raw* directive what you can see is a TOC tree entry. Here's how a TOC entry will look like: + .. tabularcolumns:: m{100pt} m{300pt} .. cssclass:: toctableopencv =============== ====================================================== |MatBasicIma| **Title:** :ref:`matTheBasicImageContainer` *Compatibility:* > OpenCV 2.0 *Author:* |Author_BernatG| You will learn how to store images in the memory and how to print out their content to the console. =============== ====================================================== .. |MatBasicIma| image:: images/matTheBasicImageStructure.jpg :height: 90pt :width: 90pt As you can see we have an image to the left and a description box to the right. To create two boxes we use a table with two columns and a single row. In the left column is the image and in the right one the description. However, the image directive is way too long to fit in a column. Therefore, we need to use the substitution definition system. We add this definition after the TOC tree. All images for the TOC tree are to be put in the images folder near its |reST|_ file. We use the point measurement system because we are also creating PDFs. PDFs are printable documents, where there is no such thing that pixels (px), just points (pt). And while generally space is no problem for web pages (we have monitors with **huge** resolutions) the size of the paper (A4 or letter) is constant and will be for a long time in the future. Therefore, size constrains come in play more like for the PDF, than the generated HTML code. Now your images should be as small as possible, while still offering the intended information for the user. Remember that the tutorial will become part of the OpenCV source code. If you add large images (that manifest in form of large image size) it will just increase the size of the repository pointlessly. If someone wants to download it later, its download time will be that much longer. Not to mention the larger PDF size for the tutorials and the longer load time for the web pages. In terms of pixels a TOC image should not be larger than 120 X 120 pixels. Resize your images if they are larger! .. note:: If you add a larger image and specify a smaller image size, *Sphinx* will not resize that. At build time will add the full size image and the resize will be done by your browser after the image is loaded. A 120 X 120 image is somewhere below 10KB. If you add a 110KB image, you have just pointlessly added a 100KB extra data to transfer over the internet for every user! Generally speaking you shouldn't need to specify your images size (excluding the TOC entries). If no such is found *Sphinx* will use the size of the image itself (so no resize occurs). Then again if for some reason you decide to specify a size that should be the **width** of the image rather than its height. The reason for this again goes back to the PDFs. On a PDF page the height is larger than the width. In the PDF the images will not be resized. If you specify a size that does not fit in the page, then what does not fits in **will be cut off**. When creating your images for your tutorial you should try to keep the image widths below 500 pixels, and calculate with around 400 point page width when specifying image widths. The image format depends on the content of the image. If you have some complex scene (many random like colors) then use *jpg*. Otherwise, prefer using *png*. They are even some tools out there that optimize the size of *PNG* images, such as `PNGGauntlet <http://pnggauntlet.com/>`_. Use them to make your images as small as possible in size. Now on the right side column of the table we add the information about the tutorial: .. container:: enumeratevisibleitemswithsquare + In the first line it is the title of the tutorial. However, there is no need to specify it explicitly. We use the reference system. We'll start up our tutorial with a reference specification, just like in case of this TOC entry with its `` .. _Table-Of-Content-Section:`` . If after this you have a title (pointed out by the following line of -), then Sphinx will replace the ``:ref:`Table-Of-Content-Section``` directive with the tile of the section in reference form (creates a link in web page). Here's how the definition looks in my case: .. code-block:: rst .. _matTheBasicImageContainer: Mat - The Basic Image Container ******************************* Note, that according to the |reST|_ rules the * should be as long as your title. + Compatibility. What version of OpenCV is required to run your sample code. + Author. Use the substitution markup of |reST|_. + A short sentence describing the essence of your tutorial. Now before each TOC entry you need to add the three lines of: .. code-block:: cpp + .. tabularcolumns:: m{100pt} m{300pt} .. cssclass:: toctableopencv The plus sign (+) is to enumerate tutorials by using bullet points. So for every TOC entry we have a corresponding bullet point represented by the +. Sphinx is highly indenting sensitive. Indentation is used to express from which point until to which point does a construction last. Un-indentation means end of that construction. So to keep all the bullet points to the same group the following TOC entries (until the next +) should be indented by two spaces. Here, I should also mention that **always** prefer using spaces instead of tabs. Working with only spaces makes possible that if we both use monotype fonts we will see the same thing. Tab size is text editor dependent and as should be avoided. *Sphinx* translates all tabs into 8 spaces before interpreting it. It turns out that the automatic formatting of both the HTML and PDF(LATEX) system messes up our tables. Therefore, we need to help them out a little. For the PDF generation we add the ``.. tabularcolumns:: m{100pt} m{300pt}`` directive. This means that the first column should be 100 points wide and middle aligned. For the HTML look we simply name the following table of a *toctableopencv* class type. Then, we can modify the look of the table by modifying the CSS of our web page. The CSS definitions go into the :file:`opencv/doc/_themes/blue/static/default.css_t` file. .. code-block:: css .toctableopencv { width: 100% ; table-layout: fixed; } .toctableopencv colgroup col:first-child { width: 100pt !important; max-width: 100pt !important; min-width: 100pt !important; } .toctableopencv colgroup col:nth-child(2) { width: 100% !important; } However, you should not need to modify this. Just add these three lines (plus keep the two space indentation) for all TOC entries you add. At the end of the TOC file you'll find: .. code-block:: rst .. raw:: latex \pagebreak .. toctree:: :hidden: ../mat - the basic image container/mat - the basic image container The page break entry comes for separating sections and should be only one in a TOC tree |reST|_ file. Finally, at the end of the TOC tree we need to add our tutorial to the *Sphinx* TOC tree system. *Sphinx* will generate from this the previous-next-up information for the HTML file and add items to the PDF according to the order here. By default this TOC tree directive generates a simple table of contents. However, we already created a fancy looking one so we no longer need this basic one. Therefore, we add the *hidden* option to do not show it. The path is of a relative type. We step back in the file system and then go into the :file:`mat - the basic image container` directory for the :file:`mat - the basic image container.rst` file. Putting out the *rst* extension for the file is optional. Write the tutorial ================== Create a folder with the name of your tutorial. Preferably, use small letters only. Then create a text file in this folder with *rst* extension and the same name. If you have images for the tutorial create an :file:`images` folder and add your images there. When creating your images follow the guidelines described in the previous part! Now here's our recommendation for the structure of the tutorial (although, remember that this is not carved in the stone; if you have a better idea, use it!): .. container:: enumeratevisibleitemswithsquare + Create the reference point and the title. .. code-block:: rst .. _matTheBasicImageContainer: Mat - The Basic Image Container ******************************* You start the tutorial by specifying a reference point by the ``.. _matTheBasicImageContainer:`` and then its title. The name of the reference point should be a unique one over the whole documentation. Therefore, do not use general names like *tutorial1*. Use the * character to underline the title for its full width. The subtitles of the tutorial should be underlined with = charachter. + Goals. You start your tutorial by specifying what you will present. You can also enumerate the sub jobs to be done. For this you can use a bullet point construction. There is a single configuration file for both the reference manual and the tutorial documentation. In the reference manuals at the argument enumeration we do not want any kind of bullet point style enumeration. Therefore, by default all the bullet points at this level are set to do not show the dot before the entries in the HTML. You can override this by putting the bullet point in a container. I've defined a square type bullet point view under the name *enumeratevisibleitemswithsquare*. The CSS style definition for this is again in the :file:`opencv\doc\_themes\blue\static\default.css_t` file. Here's a quick example of using it: .. code-block:: rst .. container:: enumeratevisibleitemswithsquare + Create the reference point and the title. + Second entry + Third entry Note that you need the keep the indentation of the container directive. Directive indentations are always three (3) spaces. Here you may even give usage tips for your sample code. + Source code. Present your samples code to the user. It's a good idea to offer a quick download link for the HTML page by using the *download* directive and pointing out where the user may find your source code in the file system by using the *file* directive: .. code-block:: rst Text :file:`samples/cpp/tutorial_code/highgui/video-write/` folder of the OpenCV source library or :download:`text to appear in the webpage <../../../../samples/cpp/tutorial_code/HighGUI/video-write/video-write.cpp>`. For the download link the path is a relative one, hence the multiple back stepping operations (..). Then you can add the source code either by using the *code block* directive or the *literal include* one. In case of the code block you will need to actually add all the source code text into your |reST|_ text and also apply the required indentation: .. code-block:: rst .. code-block:: cpp int i = 0; l = ++j; The only argument of the directive is the language used (here CPP). Then you add the source code into its content (meaning one empty line after the directive) by keeping the indentation of the directive (3 spaces). With the *literal include* directive you do not need to add the source code of the sample. You just specify the sample and *Sphinx* will load it for you, during build time. Here's an example usage: .. code-block:: rst .. literalinclude:: ../../../../samples/cpp/tutorial_code/HighGUI/video-write/video-write.cpp :language: cpp :linenos: :tab-width: 4 :lines: 1-8, 21-22, 24- After the directive you specify a relative path to the file from what to import. It has four options: the language to use, if you add the ``:linenos:`` the line numbers will be shown, you can specify the tab size with the ``:tab-width:`` and you do not need to load the whole file, you can show just the important lines. Use the *lines* option to do not show redundant information (such as the *help* function). Here basically you specify ranges, if the second range line number is missing than that means that until the end of the file. The ranges specified here do no need to be in an ascending order, you may even reorganize the structure of how you want to show your sample inside the tutorial. + The tutorial. Well here goes the explanation for why and what have you used. Try to be short, clear, concise and yet a thorough one. There's no magic formula. Look into a few already made tutorials and start out from there. Try to mix sample OpenCV code with your explanations. If with words is hard to describe something do not hesitate to add in a reasonable size image, to overcome this issue. When you present OpenCV functionality it's a good idea to give a link to the used OpenCV data structure or function. Because the OpenCV tutorials and reference manual are in separate PDF files it is not possible to make this link work for the PDF format. Therefore, we use here only web page links to the **opencv.itseez.com** website. The OpenCV functions and data structures may be used for multiple tasks. Nevertheless, we want to avoid that every users creates its own reference to a commonly used function. So for this we use the global link collection of *Sphinx*. This is defined in the file:`opencv/doc/conf.py` configuration file. Open it and go all the way down to the last entry: .. code-block:: py # ---- External links for tutorials ----------------- extlinks = { 'huivideo' : ('http://opencv.itseez.com/modules/highgui/doc/reading_and_writing_images_and_video.html#%s', None) } In short here we defined a new **huivideo** directive that refers to an external webpage link. Its usage is: .. code-block:: rst A sample function of the highgui modules image write and read page is the :huivideo:`imread() function <imread>`. Which turns to: A sample function of the highgui modules image write and read page is the :huivideo:`imread() function <imread>`. The argument you give between the <> will be put in place of the ``%s`` in the upper definition, and as the link will anchor to the correct function. To find out the anchor of a given function just open up a web page, search for the function and click on it. In the address bar it should appear like: ``http://opencv.itseez.com/modules/highgui/doc/reading_and_writing_images_and_video.html#imread`` . Look here for the name of the directives for each page of the OpenCV reference manual. If none present for one of them feel free to add one for it. For formulas you can add LATEX code that will translate in the web pages into images. You do this by using the *math* directive. A usage tip: .. code-block:: latex .. math:: MSE = \frac{1}{c*i*j} \sum{(I_1-I_2)^2} That after build turns into: .. math:: MSE = \frac{1}{c*i*j} \sum{(I_1-I_2)^2} You can even use it inline as ``:math:` MSE = \frac{1}{c*i*j} \sum{(I_1-I_2)^2}``` that turns into :math:`MSE = \frac{1}{c*i*j} \sum{(I_1-I_2)^2}`. If you use some crazy LATEX library extension you need to add those to the ones to use at build time. Look into the file:`opencv/doc/conf.py` configuration file for more information on this. + Results. Well, here depending on your program show one of more of the following: - Console outputs by using the code block directive. - Output images. - Runtime videos, visualization. For this use your favorite screens capture software. `Camtasia Studio <http://www.techsmith.com/camtasia/>`_ certainly is one of the better choices, however their prices are out of this world. `CamStudio <http://camstudio.org/>`_ is a free alternative, but less powerful. If you do a video you can upload it to YouTube and then use the raw directive with HTML option to embed it into the generated web page: .. code-block:: rst You may observe a runtime instance of this on the `YouTube here <https://www.youtube.com/watch?v=jpBwHxsl1_0>`_. .. raw:: html <div align="center"> <iframe title="Creating a video with OpenCV" width="560" height="349" src="http://www.youtube.com/embed/jpBwHxsl1_0?rel=0&loop=1" frameborder="0" allowfullscreen align="middle"></iframe> </div> This results in the text and video: You may observe a runtime instance of this on the `YouTube here <https://www.youtube.com/watch?v=jpBwHxsl1_0>`_. .. raw:: html <div align="center"> <iframe title="Creating a video with OpenCV" width="560" height="349" src="http://www.youtube.com/embed/jpBwHxsl1_0?rel=0&loop=1" frameborder="0" allowfullscreen align="middle"></iframe> </div> When these aren't self-explanatory make sure to throw in a few guiding lines about what and why we can see. + Build the documentation and check for errors or warnings. In the CMake make sure you check or pass the option for building documentation. Then simply build the **docs** project for the PDF file and the **docs_html** project for the web page. Read the output of the build and check for errors/warnings for what you have added. This is also the time to observe and correct any kind of *not so good looking* parts. Remember to keep clean our build logs. + Read again your tutorial and check for both programming and spelling errors. If found any, please correct them. Take home the pride and joy of a job well done! =============================================== Once you are done contact me or dr. Gary Bradski with the tutorial. We may submit the tutorial ourselves to the trunk branch of our repository or ask you to do so. Now, to see your work **live** you may need to wait some time. The PDFs are updated usually at the launch of a new OpenCV version. The web pages are a little more diverse. They are automatically rebuilt in each evening. However, the **opencv.itseez.com** website contains only the most recent **stable branch** of OpenCV. Currently this is 2.3. When we add something new (like a tutorial) that first goes to the **trunk branch** of our repository. A build of this you may find on the **opencv.itseez.com/trunk** website. Although, we try to make a build every night occasionally we might freeze any of the branches to fix upcoming issues. During this it may take a little longer to see your work *live*, however if you submited it, be sure that eventually it will show up. If you have any questions or advices relating to this tutorial you can contact me at -delete-bernat@-delete-primeranks.net. Of course, delete the -delete- parts of that e-mail address.
\ No newline at end of file
......
......@@ -106,46 +106,57 @@ Here you can read tutorials about how to set up your computer to work with the O
.. tabularcolumns:: m{100pt} m{300pt}
.. cssclass:: toctableopencv
================ ======================================================
|AndroidBinPack| **Title:** :ref:`Android_Binary_Package`
================ =================================================
|AndroidLogo| **Title:** :ref:`Android_Dev_Intro`
*Compatibility:* > OpenCV 2.3.1
*Compatibility:* > OpenCV 2.4.2
*Author:* |Author_AndreyK|
*Author:* |Author_VsevolodG|
You will learn how to setup OpenCV for Android platform!
Not a tutorial, but a guide introducing Android development basics and environment setup
================ ======================================================
================ =================================================
.. |AndroidBinPack| image:: images/android_logo.png
:height: 90pt
:width: 90pt
.. tabularcolumns:: m{100pt} m{300pt}
.. cssclass:: toctableopencv
================ =================================================
|AndroidLogo| **Title:** :ref:`O4A_SDK`
*Compatibility:* > OpenCV 2.4.2
*Author:* |Author_VsevolodG|
OpenCV4Android SDK: general info, installation, running samples
================ =================================================
.. tabularcolumns:: m{100pt} m{300pt}
.. cssclass:: toctableopencv
================ ======================================================
|AndroidNDKPack| **Title:** :ref:`Android_Binary_Package_with_NDK`
================ =================================================
|AndroidLogo| **Title:** :ref:`dev_with_OCV_on_Android`
*Compatibility:* > OpenCV 2.3.1
*Compatibility:* > OpenCV 2.4.2
*Author:* |Author_LeonidBLB|
*Author:* |Author_VsevolodG|
You will learn how to work with C++ OpenCV code for Android platform
Development with OpenCV4Android SDK
================ ======================================================
================ =================================================
.. |AndroidNDKPack| image:: images/android_logo.png
.. |AndroidLogo| image:: images/android_logo.png
:height: 90pt
:width: 90pt
* **iOS**
.. tabularcolumns:: m{100pt} m{300pt}
.. cssclass:: toctableopencv
.. tabularcolumns:: m{100pt} m{300pt}
.. cssclass:: toctableopencv
=========== ======================================================
|Install_2| **Title:** :ref:`iOS-Installation`
=========== ======================================================
|Install_2| **Title:** :ref:`iOS-Installation`
*Compatibility:* > OpenCV 2.3.1
......@@ -153,13 +164,13 @@ Here you can read tutorials about how to set up your computer to work with the O
We will learn how to setup OpenCV for using it in iOS!
=========== ======================================================
=========== ======================================================
.. |Install_2| image:: images/ios4_logo.jpg
.. |Install_2| image:: images/ios4_logo.jpg
:width: 90pt
.. tabularcolumns:: m{100pt} m{300pt}
.. cssclass:: toctableopencv
.. tabularcolumns:: m{100pt} m{300pt}
.. cssclass:: toctableopencv
============= ======================================================
|Beginners_1| **Title:** :ref:`Display_Image`
......@@ -196,6 +207,9 @@ Here you can read tutorials about how to set up your computer to work with the O
* **Want to contribute, and see your own work between the OpenCV tutorials?**
.. tabularcolumns:: m{100pt} m{300pt}
.. cssclass:: toctableopencv
=============== ======================================================
|HowToWriteT| **Title:** :ref:`howToWriteTutorial`
......@@ -224,8 +238,9 @@ Here you can read tutorials about how to set up your computer to work with the O
../linux_eclipse/linux_eclipse
../windows_install/windows_install
../windows_visual_studio_Opencv/windows_visual_studio_Opencv
../android_binary_package/android_binary_package
../android_binary_package/android_binary_package_using_with_NDK
../android_binary_package/android_dev_intro
../android_binary_package/O4A_SDK
../android_binary_package/dev_with_OCV_on_Android
../ios_install/ios_install
../display_image/display_image
../load_save_image/load_save_image
......
......@@ -22,7 +22,7 @@ Theory
Code
====
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/objectDetection/objectDetection.cpp>`_ . The second version (using LBP for face detection) can be `found here <http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/tutorial_code/objectDetection/objectDetection2.cpp>`_
This tutorial code's is shown lines below. You can also download it from `here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/objectDetection/objectDetection.cpp>`_ . The second version (using LBP for face detection) can be `found here <http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/tutorial_code/objectDetection/objectDetection2.cpp>`_
.. code-block:: cpp
......
......@@ -2,7 +2,7 @@
OpenCV Tutorials
################
The following links describe a set of basic OpenCV tutorials. All the source code mentioned here is provide as part of the OpenCV regular releases, so check before you start copy & pasting the code. The list of tutorials below is automatically generated from reST files located in our SVN repository.
The following links describe a set of basic OpenCV tutorials. All the source code mentioned here is provide as part of the OpenCV regular releases, so check before you start copy & pasting the code. The list of tutorials below is automatically generated from reST files located in our GIT repository.
As always, we would be happy to hear your comments and receive your contributions on any tutorial.
......
......@@ -130,4 +130,4 @@ Flags specifing the needed generator type must be used in combination with parti
For more information please refer to the example of usage openni_capture.cpp_ in ``opencv/samples/cpp`` folder.
.. _openni_capture.cpp: http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/openni_capture.cpp
.. _openni_capture.cpp: http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/openni_capture.cpp
......@@ -6,7 +6,7 @@ The built framework is universal, it can be used to build app and run it on eith
Usage:
./build_framework.py <outputdir>
By cmake conventions (and especially if you work with OpenCV SVN repository),
By cmake conventions (and especially if you work with OpenCV repository),
the output dir should not be a subdirectory of OpenCV source tree.
Script will create <outputdir>, if it's missing, and a few its subdirectories:
......
......@@ -57,8 +57,9 @@ SET_TARGET_PROPERTIES(${the_target} PROPERTIES
RUNTIME_OUTPUT_DIRECTORY ${EXECUTABLE_OUTPUT_PATH}
)
# force strip library after the build command
ADD_CUSTOM_COMMAND( TARGET ${the_target} POST_BUILD COMMAND ${CMAKE_STRIP} "${LIBRARY_OUTPUT_PATH}/lib${the_target}.so" )
if (NOT (CMAKE_BUILD_TYPE MATCHES "debug"))
ADD_CUSTOM_COMMAND( TARGET ${the_target} POST_BUILD COMMAND ${CMAKE_STRIP} "${LIBRARY_OUTPUT_PATH}/lib${the_target}.so" )
endif()
install(TARGETS ${the_target} LIBRARY DESTINATION ${OPENCV_LIB_INSTALL_PATH} COMPONENT main)
......@@ -2446,6 +2446,6 @@ The above methods are usually enough for users. If you want to make your own alg
* Make a class and specify ``Algorithm`` as its base class.
* The algorithm parameters should be the class members. See ``Algorithm::get()`` for the list of possible types of the parameters.
* Add public virtual method ``AlgorithmInfo* info() const;`` to your class.
* Add constructor function, ``AlgorithmInfo`` instance and implement the ``info()`` method. The simplest way is to take http://code.opencv.org/svn/opencv/trunk/opencv/modules/ml/src/ml_init.cpp as the reference and modify it according to the list of your parameters.
* Add constructor function, ``AlgorithmInfo`` instance and implement the ``info()`` method. The simplest way is to take http://code.opencv.org/projects/opencv/repository/revisions/master/entry/modules/ml/src/ml_init.cpp as the reference and modify it according to the list of your parameters.
* Add some public function (e.g. ``initModule_<mymodule>()``) that calls info() of your algorithm and put it into the same source file as ``info()`` implementation. This is to force C++ linker to include this object file into the target application. See ``Algorithm::create()`` for details.
......@@ -59,5 +59,5 @@ While developing algorithms for multiple GPUs, note a data passing overhead. For
With this algorithm, a dual GPU gave a 180
%
performance increase comparing to the single Fermi GPU. For a source code example, see
http://code.opencv.org/svn/opencv/trunk/opencv/samples/gpu/.
http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/gpu/.
......@@ -294,7 +294,7 @@ The methods/functions grab the next frame from video file or camera and return t
The primary use of the function is in multi-camera environments, especially when the cameras do not have hardware synchronization. That is, you call ``VideoCapture::grab()`` for each camera and after that call the slower method ``VideoCapture::retrieve()`` to decode and get frame from each camera. This way the overhead on demosaicing or motion jpeg decompression etc. is eliminated and the retrieved frames from different cameras will be closer in time.
Also, when a connected camera is multi-head (for example, a stereo camera or a Kinect device), the correct way of retrieving data from it is to call `VideoCapture::grab` first and then call :ocv:func:`VideoCapture::retrieve` one or more times with different values of the ``channel`` parameter. See http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/kinect_maps.cpp
Also, when a connected camera is multi-head (for example, a stereo camera or a Kinect device), the correct way of retrieving data from it is to call `VideoCapture::grab` first and then call :ocv:func:`VideoCapture::retrieve` one or more times with different values of the ``channel`` parameter. See http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/cpp/kinect_maps.cpp
VideoCapture::retrieve
......
......@@ -203,7 +203,7 @@ Sets mouse handler for the specified window
:param winname: Window name
:param onMouse: Mouse callback. See OpenCV samples, such as http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/ffilldemo.cpp, on how to specify and use the callback.
:param onMouse: Mouse callback. See OpenCV samples, such as http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/cpp/ffilldemo.cpp, on how to specify and use the callback.
:param userdata: The optional parameter passed to the callback.
......
......@@ -202,7 +202,7 @@ Approximates a polygonal curve(s) with the specified precision.
The functions ``approxPolyDP`` approximate a curve or a polygon with another curve/polygon with less vertices so that the distance between them is less or equal to the specified precision. It uses the Douglas-Peucker algorithm
http://en.wikipedia.org/wiki/Ramer-Douglas-Peucker_algorithm
See http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/contours.cpp for the function usage model.
See http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/cpp/contours.cpp for the function usage model.
ApproxChains
......
......@@ -181,7 +181,10 @@ if(ANDROID)
# force strip library after the build command
# because samples and tests will make a copy of the library before install
get_target_property(__opencv_java_location ${the_module} LOCATION)
# Turn off stripping in debug build
if ( NOT (CMAKE_BUILD_TYPE MATCHES "debug"))
add_custom_command(TARGET ${the_module} POST_BUILD COMMAND ${CMAKE_STRIP} --strip-unneeded "${__opencv_java_location}")
endif()
set(lib_proj_files "")
......
......@@ -21,7 +21,7 @@ The word "cascade" in the classifier name means that the resultant classifier co
The feature used in a particular classifier is specified by its shape (1a, 2b etc.), position within the region of interest and the scale (this scale is not the same as the scale used at the detection stage, though these two scales are multiplied). For example, in the case of the third line feature (2c) the response is calculated as the difference between the sum of image pixels under the rectangle covering the whole feature (including the two white stripes and the black stripe in the middle) and the sum of the image pixels under the black stripe multiplied by 3 in order to compensate for the differences in the size of areas. The sums of pixel values over a rectangular regions are calculated rapidly using integral images (see below and the :ocv:func:`integral` description).
To see the object detector at work, have a look at the facedetect demo:
http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/facedetect.cpp
http://code.opencv.org/projects/opencv/repository/revisions/master/entry/samples/cpp/facedetect.cpp
The following reference is for the detection part only. There is a separate application called ``opencv_traincascade`` that can train a cascade of boosted classifiers from a set of samples.
......
......@@ -115,5 +115,5 @@ public class ColorBlobDetector
// Color radius for range checking in HSV color space
private Scalar mColorRadius = new Scalar(25,50,50,0);
private Mat mSpectrum = new Mat();
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();;
private List<MatOfPoint> mContours = new ArrayList<MatOfPoint>();
}
......@@ -115,6 +115,7 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
......
......@@ -127,7 +127,7 @@ class ImageManipulationsView extends SampleCvViewBase {
case ImageManipulationsActivity.VIEW_MODE_HIST:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
if (mSizeRgba == null)
if ((mSizeRgba == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
int thikness = (int) (mSizeRgba.width / (mHistSizeNum + 10) / 5);
if(thikness > 5) thikness = 5;
......@@ -171,7 +171,7 @@ class ImageManipulationsView extends SampleCvViewBase {
case ImageManipulationsActivity.VIEW_MODE_CANNY:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
if (mRgbaInnerWindow == null || mGrayInnerWindow == null)
if ((mRgbaInnerWindow == null) || (mGrayInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
Imgproc.Canny(mRgbaInnerWindow, mIntermediateMat, 80, 90);
Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
......@@ -181,7 +181,7 @@ class ImageManipulationsView extends SampleCvViewBase {
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
if (mRgbaInnerWindow == null || mGrayInnerWindow == null)
if ((mRgbaInnerWindow == null) || (mGrayInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
Imgproc.Sobel(mGrayInnerWindow, mIntermediateMat, CvType.CV_8U, 1, 1);
......@@ -196,7 +196,7 @@ class ImageManipulationsView extends SampleCvViewBase {
case ImageManipulationsActivity.VIEW_MODE_ZOOM:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
if (mZoomCorner == null || mZoomWindow == null)
if ((mZoomCorner == null) || (mZoomWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
Imgproc.resize(mZoomWindow, mZoomCorner, mZoomCorner.size());
......@@ -206,7 +206,7 @@ class ImageManipulationsView extends SampleCvViewBase {
case ImageManipulationsActivity.VIEW_MODE_PIXELIZE:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
if (mRgbaInnerWindow == null)
if ((mRgbaInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
Imgproc.resize(mRgbaInnerWindow, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.INTER_NEAREST);
Imgproc.resize(mIntermediateMat, mRgbaInnerWindow, mSizeRgbaInner, 0., 0., Imgproc.INTER_NEAREST);
......@@ -214,7 +214,7 @@ class ImageManipulationsView extends SampleCvViewBase {
case ImageManipulationsActivity.VIEW_MODE_POSTERIZE:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
if (mRgbaInnerWindow == null)
if ((mRgbaInnerWindow == null) || (mRgba.cols() != mSizeRgba.width) || (mRgba.height() != mSizeRgba.height))
CreateAuxiliaryMats();
/*
Imgproc.cvtColor(mRgbaInnerWindow, mIntermediateMat, Imgproc.COLOR_RGBA2RGB);
......
......@@ -10,13 +10,13 @@ import android.view.MenuItem;
import android.view.Window;
public class Sample0Base extends Activity {
private static final String TAG = "Sample::Activity";
private MenuItem mItemPreviewRGBA;
private MenuItem mItemPreviewGray;
private Sample0View mView;
public Sample0Base() {
Log.i(TAG, "Instantiated new " + this.getClass());
}
......
......@@ -44,7 +44,7 @@ class Sample2View extends SampleCvViewBase {
break;
case Sample2NativeCamera.VIEW_MODE_RGBA:
capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3, 2, new Scalar(255, 0, 0, 255), 3);
Core.putText(mRgba, "OpenCV+Android", new Point(10, 50), 3, 1, new Scalar(255, 0, 0, 255), 2);
break;
case Sample2NativeCamera.VIEW_MODE_CANNY:
capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
......
......@@ -115,7 +115,8 @@ public abstract class SampleCvViewBase extends SurfaceView implements SurfaceHol
if (bmp != null) {
Canvas canvas = mHolder.lockCanvas();
if (canvas != null) {
canvas.drawBitmap(bmp, (canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2, null);
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
canvas.drawBitmap(bmp, (canvas.getWidth()-bmp.getWidth()) / 2, (canvas.getHeight()-bmp.getHeight()) / 2, null);
mHolder.unlockCanvasAndPost(canvas);
}
bmp.recycle();
......
......@@ -15,7 +15,7 @@ if __name__ == "__main__":
im = cv.LoadImageM(fileName, False)
im3 = cv.LoadImageM(fileName, True)
except: # if local copy cannot be opened, try downloading it
url = 'http://code.opencv.org/svn/opencv/trunk/opencv/samples/cpp/left01.jpg'
url = 'http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/cpp/left01.jpg'
filedata = urllib2.urlopen(url).read()
imagefiledata = cv.CreateMatHeader(1, len(filedata), cv.CV_8UC1)
cv.SetData(imagefiledata, filedata, len(filedata))
......
......@@ -9,7 +9,7 @@ def load_sample(name=None):
try:
img0 = cv.LoadImage(name, cv.CV_LOAD_IMAGE_COLOR)
except IOError:
urlbase = 'http://code.opencv.org/svn/opencv/trunk/opencv/samples/c/'
urlbase = 'http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/c/'
file = name.split('/')[-1]
filedata = urllib2.urlopen(urlbase+file).read()
imagefiledata = cv.CreateMatHeader(1, len(filedata), cv.CV_8UC1)
......
......@@ -68,7 +68,7 @@ if __name__ == "__main__":
if len(sys.argv) > 1:
src_image = cv.GetMat(cv.LoadImage(sys.argv[1], 0))
else:
url = 'http://code.opencv.org/svn/opencv/trunk/opencv/samples/c/baboon.jpg'
url = 'http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/c/baboon.jpg'
filedata = urllib2.urlopen(url).read()
imagefiledata = cv.CreateMatHeader(1, len(filedata), cv.CV_8UC1)
cv.SetData(imagefiledata, filedata, len(filedata))
......
......@@ -51,7 +51,7 @@ if __name__ == "__main__":
if len(sys.argv) > 1:
im = cv.LoadImage( sys.argv[1], cv.CV_LOAD_IMAGE_GRAYSCALE)
else:
url = 'http://code.opencv.org/svn/opencv/trunk/opencv/samples/c/baboon.jpg'
url = 'http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/c/baboon.jpg'
filedata = urllib2.urlopen(url).read()
imagefiledata = cv.CreateMatHeader(1, len(filedata), cv.CV_8UC1)
cv.SetData(imagefiledata, filedata, len(filedata))
......
......@@ -42,7 +42,7 @@ if __name__ == "__main__":
if len(sys.argv) > 1:
gray = cv.LoadImage(sys.argv[1], cv.CV_LOAD_IMAGE_GRAYSCALE)
else:
url = 'http://code.opencv.org/svn/opencv/trunk/opencv/samples/c/stuff.jpg'
url = 'http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/c/stuff.jpg'
filedata = urllib2.urlopen(url).read()
imagefiledata = cv.CreateMatHeader(1, len(filedata), cv.CV_8UC1)
cv.SetData(imagefiledata, filedata, len(filedata))
......
......@@ -32,7 +32,7 @@ if __name__ == '__main__':
if len(sys.argv) > 1:
im = cv.LoadImage( sys.argv[1], cv.CV_LOAD_IMAGE_COLOR)
else:
url = 'http://code.opencv.org/svn/opencv/trunk/opencv/samples/c/fruits.jpg'
url = 'http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/c/fruits.jpg'
filedata = urllib2.urlopen(url).read()
imagefiledata = cv.CreateMatHeader(1, len(filedata), cv.CV_8UC1)
cv.SetData(imagefiledata, filedata, len(filedata))
......
......@@ -73,7 +73,7 @@ if __name__ == "__main__":
if len(sys.argv) > 1:
im = cv.LoadImage( sys.argv[1], cv.CV_LOAD_IMAGE_COLOR)
else:
url = 'http://code.opencv.org/svn/opencv/trunk/opencv/samples/c/fruits.jpg'
url = 'http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/c/fruits.jpg'
filedata = urllib2.urlopen(url).read()
imagefiledata = cv.CreateMatHeader(1, len(filedata), cv.CV_8UC1)
cv.SetData(imagefiledata, filedata, len(filedata))
......
......@@ -82,7 +82,7 @@ if __name__ == '__main__':
if len(sys.argv) > 1:
source_image = cv.LoadImage(sys.argv[1], cv.CV_LOAD_IMAGE_GRAYSCALE)
else:
url = 'http://code.opencv.org/svn/opencv/trunk/opencv/samples/c/stuff.jpg'
url = 'http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/c/stuff.jpg'
filedata = urllib2.urlopen(url).read()
imagefiledata = cv.CreateMatHeader(1, len(filedata), cv.CV_8UC1)
cv.SetData(imagefiledata, filedata, len(filedata))
......
......@@ -14,7 +14,7 @@ if __name__ == "__main__":
filename = sys.argv[1]
src = cv.LoadImage(filename, cv.CV_LOAD_IMAGE_GRAYSCALE)
else:
url = 'http://code.opencv.org/svn/opencv/trunk/opencv/doc/pics/building.jpg'
url = 'http://code.opencv.org/projects/opencv/repository/revisions/master/raw/doc/pics/building.jpg'
filedata = urllib2.urlopen(url).read()
imagefiledata = cv.CreateMatHeader(1, len(filedata), cv.CV_8UC1)
cv.SetData(imagefiledata, filedata, len(filedata))
......
......@@ -27,7 +27,7 @@ if __name__=="__main__":
if len(sys.argv) > 1:
img0 = cv.LoadImage( sys.argv[1], cv.CV_LOAD_IMAGE_COLOR)
else:
url = 'http://code.opencv.org/svn/opencv/trunk/opencv/samples/c/fruits.jpg'
url = 'http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/c/fruits.jpg'
filedata = urllib2.urlopen(url).read()
imagefiledata = cv.CreateMatHeader(1, len(filedata), cv.CV_8UC1)
cv.SetData(imagefiledata, filedata, len(filedata))
......
......@@ -23,7 +23,7 @@ if __name__ == "__main__":
if len(sys.argv) > 1:
src = cv.LoadImage( sys.argv[1], cv.CV_LOAD_IMAGE_COLOR)
else:
url = 'http://code.opencv.org/svn/opencv/trunk/opencv/samples/c/fruits.jpg'
url = 'http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/c/fruits.jpg'
filedata = urllib2.urlopen(url).read()
imagefiledata = cv.CreateMatHeader(1, len(filedata), cv.CV_8UC1)
cv.SetData(imagefiledata, filedata, len(filedata))
......
......@@ -31,7 +31,7 @@ if __name__ == "__main__":
if len(sys.argv) > 1:
src = cv.LoadImage(sys.argv[1], cv.CV_LOAD_IMAGE_COLOR)
else:
url = 'http://code.opencv.org/svn/opencv/trunk/opencv/samples/c/fruits.jpg'
url = 'http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/c/fruits.jpg'
filedata = urllib2.urlopen(url).read()
imagefiledata = cv.CreateMatHeader(1, len(filedata), cv.CV_8UC1)
cv.SetData(imagefiledata, filedata, len(filedata))
......
......@@ -22,7 +22,7 @@ if __name__ == "__main__":
if len(sys.argv) > 1:
img0 = cv.LoadImageM( sys.argv[1], cv.CV_LOAD_IMAGE_COLOR)
else:
url = 'http://code.opencv.org/svn/opencv/trunk/opencv/samples/c/lena.jpg'
url = 'http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/c/lena.jpg'
filedata = urllib2.urlopen(url).read()
imagefiledata = cv.CreateMatHeader(1, len(filedata), cv.CV_8UC1)
cv.SetData(imagefiledata, filedata, len(filedata))
......
......@@ -27,7 +27,7 @@ if __name__ == "__main__":
if len(sys.argv) > 1:
img0 = cv.LoadImage( sys.argv[1], cv.CV_LOAD_IMAGE_COLOR)
else:
url = 'http://code.opencv.org/svn/opencv/trunk/opencv/samples/c/fruits.jpg'
url = 'http://code.opencv.org/projects/opencv/repository/revisions/master/raw/samples/c/fruits.jpg'
filedata = urllib2.urlopen(url).read()
imagefiledata = cv.CreateMatHeader(1, len(filedata), cv.CV_8UC1)
cv.SetData(imagefiledata, filedata, len(filedata))
......
'''
SVN and KNearest digit recognition.
SVM and KNearest digit recognition.
Sample loads a dataset of handwritten digits from 'digits.png'.
Then it trains a SVN and KNearest classifiers on it and evaluates
Then it trains a SVM and KNearest classifiers on it and evaluates
their accuracy.
Following preprocessing is applied to the dataset:
......
'''
Digit recognition adjustment.
Grid search is used to find the best parameters for SVN and KNearest classifiers.
Grid search is used to find the best parameters for SVM and KNearest classifiers.
SVM adjustment follows the guidelines given in
http://www.csie.ntu.edu.tw/~cjlin/papers/guide/guide.pdf
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment