Commit 2c6f1ab5 authored by Alexander Alekhin's avatar Alexander Alekhin

Merge remote-tracking branch 'upstream/3.4' into merge-3.4

parents 000110e7 e75576e1
...@@ -347,6 +347,7 @@ OCV_OPTION(GENERATE_ABI_DESCRIPTOR "Generate XML file for abi_compliance_chec ...@@ -347,6 +347,7 @@ OCV_OPTION(GENERATE_ABI_DESCRIPTOR "Generate XML file for abi_compliance_chec
OCV_OPTION(CV_ENABLE_INTRINSICS "Use intrinsic-based optimized code" ON ) OCV_OPTION(CV_ENABLE_INTRINSICS "Use intrinsic-based optimized code" ON )
OCV_OPTION(CV_DISABLE_OPTIMIZATION "Disable explicit optimized code (dispatched code/intrinsics/loop unrolling/etc)" OFF ) OCV_OPTION(CV_DISABLE_OPTIMIZATION "Disable explicit optimized code (dispatched code/intrinsics/loop unrolling/etc)" OFF )
OCV_OPTION(CV_TRACE "Enable OpenCV code trace" ON) OCV_OPTION(CV_TRACE "Enable OpenCV code trace" ON)
OCV_OPTION(OPENCV_GENERATE_SETUPVARS "Generate setup_vars* scripts" ON IF (NOT ANDROID AND NOT APPLE_FRAMEWORK) )
OCV_OPTION(ENABLE_PYLINT "Add target with Pylint checks" (BUILD_DOCS OR BUILD_EXAMPLES) IF (NOT CMAKE_CROSSCOMPILING AND NOT APPLE_FRAMEWORK) ) OCV_OPTION(ENABLE_PYLINT "Add target with Pylint checks" (BUILD_DOCS OR BUILD_EXAMPLES) IF (NOT CMAKE_CROSSCOMPILING AND NOT APPLE_FRAMEWORK) )
OCV_OPTION(ENABLE_FLAKE8 "Add target with Python flake8 checker" (BUILD_DOCS OR BUILD_EXAMPLES) IF (NOT CMAKE_CROSSCOMPILING AND NOT APPLE_FRAMEWORK) ) OCV_OPTION(ENABLE_FLAKE8 "Add target with Python flake8 checker" (BUILD_DOCS OR BUILD_EXAMPLES) IF (NOT CMAKE_CROSSCOMPILING AND NOT APPLE_FRAMEWORK) )
...@@ -929,6 +930,10 @@ if(COMMAND ocv_pylint_finalize) ...@@ -929,6 +930,10 @@ if(COMMAND ocv_pylint_finalize)
ocv_pylint_finalize() ocv_pylint_finalize()
endif() endif()
if(OPENCV_GENERATE_SETUPVARS)
include(cmake/OpenCVGenSetupVars.cmake)
endif()
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
# Summary: # Summary:
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
......
if(WIN32)
ocv_update(OPENCV_SETUPVARS_INSTALL_PATH ".")
ocv_update(OPENCV_SCRIPT_EXTENSION ".cmd")
ocv_update(OPENCV_SETUPVARS_TEMPLATE "setup_vars_win32.cmd.in")
else()
ocv_update(OPENCV_SETUPVARS_INSTALL_PATH "bin")
ocv_update(OPENCV_SCRIPT_EXTENSION ".sh")
if(APPLE)
ocv_update(OPENCV_SETUPVARS_TEMPLATE "setup_vars_macosx.sh.in")
else()
ocv_update(OPENCV_SETUPVARS_TEMPLATE "setup_vars_linux.sh.in")
endif()
endif()
if(INSTALL_TO_MANGLED_PATHS)
ocv_update(OPENCV_SETUPVARS_FILENAME "setup_vars_opencv-${OPENCV_VERSION}${OPENCV_SCRIPT_EXTENSION}")
else()
ocv_update(OPENCV_SETUPVARS_FILENAME setup_vars_opencv3${OPENCV_SCRIPT_EXTENSION})
endif()
##### build directory
if(WIN32)
set(__build_type "${CMAKE_BUILD_TYPE}")
if(NOT __build_type)
set(__build_type "Release") # default
endif()
file(RELATIVE_PATH OPENCV_LIB_RUNTIME_DIR_RELATIVE_CMAKECONFIG "${OpenCV_BINARY_DIR}/" "${EXECUTABLE_OUTPUT_PATH}/${__build_type}/")
else()
file(RELATIVE_PATH OPENCV_LIB_RUNTIME_DIR_RELATIVE_CMAKECONFIG "${OpenCV_BINARY_DIR}/" "${LIBRARY_OUTPUT_PATH}/")
endif()
set(OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG "python_loader") # https://github.com/opencv/opencv/pull/12977
configure_file("${OpenCV_SOURCE_DIR}/cmake/templates/${OPENCV_SETUPVARS_TEMPLATE}" "${CMAKE_BINARY_DIR}/tmp/setup_vars${OPENCV_SCRIPT_EXTENSION}" @ONLY)
file(COPY "${CMAKE_BINARY_DIR}/tmp/setup_vars${OPENCV_SCRIPT_EXTENSION}" DESTINATION "${CMAKE_BINARY_DIR}"
FILE_PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE)
##### install directory
if(WIN32)
file(RELATIVE_PATH OPENCV_LIB_RUNTIME_DIR_RELATIVE_CMAKECONFIG
"${CMAKE_INSTALL_PREFIX}/${OPENCV_SETUPVARS_INSTALL_PATH}/" "${CMAKE_INSTALL_PREFIX}/${OPENCV_BIN_INSTALL_PATH}/")
else()
file(RELATIVE_PATH OPENCV_LIB_RUNTIME_DIR_RELATIVE_CMAKECONFIG
"${CMAKE_INSTALL_PREFIX}/${OPENCV_SETUPVARS_INSTALL_PATH}/" "${CMAKE_INSTALL_PREFIX}/${OPENCV_LIB_INSTALL_PATH}/")
endif()
file(RELATIVE_PATH OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG
"${CMAKE_INSTALL_PREFIX}/${OPENCV_SETUPVARS_INSTALL_PATH}/" "${CMAKE_INSTALL_PREFIX}/")
ocv_path_join(OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG "${OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG}" "python_loader") # https://github.com/opencv/opencv/pull/12977
configure_file("${OpenCV_SOURCE_DIR}/cmake/templates/${OPENCV_SETUPVARS_TEMPLATE}" "${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/install/${OPENCV_SETUPVARS_FILENAME}" @ONLY)
install(FILES "${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/install/${OPENCV_SETUPVARS_FILENAME}"
DESTINATION "${OPENCV_SETUPVARS_INSTALL_PATH}"
PERMISSIONS OWNER_READ OWNER_WRITE OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ WORLD_EXECUTE
COMPONENT scripts)
...@@ -149,7 +149,9 @@ macro(ocv_path_join result_var P1 P2_) ...@@ -149,7 +149,9 @@ macro(ocv_path_join result_var P1 P2_)
else() else()
set(${result_var} "${P1}/${P2}") set(${result_var} "${P1}/${P2}")
endif() endif()
string(REGEX REPLACE "([/\\]?)[\\.][/\\]" "\\1" ${result_var} "${${result_var}}") string(REPLACE "\\\\" "\\" ${result_var} "${${result_var}}")
string(REPLACE "//" "/" ${result_var} "${${result_var}}")
string(REGEX REPLACE "(^|[/\\])[\\.][/\\]" "\\1" ${result_var} "${${result_var}}")
if("${${result_var}}" STREQUAL "") if("${${result_var}}" STREQUAL "")
set(${result_var} ".") set(${result_var} ".")
endif() endif()
......
#!/bin/bash
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
[[ ! "${OPENCV_QUIET}" ]] && ( echo "Setting vars for OpenCV @OPENCV_VERSION@" )
export LD_LIBRARY_PATH="$SCRIPT_DIR/@OPENCV_LIB_RUNTIME_DIR_RELATIVE_CMAKECONFIG@:$LD_LIBRARY_PATH"
if [[ ! "$OPENCV_SKIP_PYTHON" ]]; then
PYTHONPATH_OPENCV="$SCRIPT_DIR/@OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG@"
[[ ! "${OPENCV_QUIET}" ]] && ( echo "Append PYTHONPATH: ${PYTHONPATH_OPENCV}" )
export PYTHONPATH="${PYTHONPATH_OPENCV}:$PYTHONPATH"
fi
# Don't exec in "sourced" mode
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
if [[ $# -ne 0 ]]; then
[[ ! "${OPENCV_QUIET}" && "${OPENCV_VERBOSE}" ]] && ( echo "Executing: $*" )
exec "$@"
fi
fi
#!/bin/bash
SCRIPT_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
[[ ! "${OPENCV_QUIET}" ]] && ( echo "Setting vars for OpenCV @OPENCV_VERSION@" )
export DYLD_LIBRARY_PATH="$SCRIPT_DIR/@OPENCV_LIB_RUNTIME_DIR_RELATIVE_CMAKECONFIG@:$DYLD_LIBRARY_PATH"
if [[ ! "$OPENCV_SKIP_PYTHON" ]]; then
PYTHONPATH_OPENCV="$SCRIPT_DIR/@OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG@"
[[ ! "${OPENCV_QUIET}" ]] && ( echo "Append PYTHONPATH: ${PYTHONPATH_OPENCV}" )
export PYTHONPATH="${PYTHONPATH_OPENCV}:$PYTHONPATH"
fi
# Don't exec in "sourced" mode
if [[ "${BASH_SOURCE[0]}" == "${0}" ]]; then
if [[ $# -ne 0 ]]; then
[[ ! "${OPENCV_QUIET}" && "${OPENCV_VERBOSE}" ]] && ( echo "Executing: $*" )
exec "$@"
fi
fi
@ECHO OFF
SETLOCAL EnableDelayedExpansion
SET "SCRIPT_DIR=%~dp0"
IF NOT DEFINED OPENCV_QUIET ( ECHO Setting vars for OpenCV @OPENCV_VERSION@ )
SET "PATH=!SCRIPT_DIR!\@OPENCV_LIB_RUNTIME_DIR_RELATIVE_CMAKECONFIG@;%PATH%"
IF NOT DEFINED OPENCV_SKIP_PYTHON (
SET "PYTHONPATH_OPENCV=!SCRIPT_DIR!\@OPENCV_PYTHON_DIR_RELATIVE_CMAKECONFIG@"
IF NOT DEFINED OPENCV_QUIET ( ECHO Append PYTHONPATH: !PYTHONPATH_OPENCV! )
SET "PYTHONPATH=!PYTHONPATH_OPENCV!;%PYTHONPATH%"
)
IF NOT [%1] == [] (
%*
EXIT /B !errorlevel!
)
...@@ -107,7 +107,7 @@ RECURSIVE = YES ...@@ -107,7 +107,7 @@ RECURSIVE = YES
EXCLUDE = @CMAKE_DOXYGEN_EXCLUDE_LIST@ EXCLUDE = @CMAKE_DOXYGEN_EXCLUDE_LIST@
EXCLUDE_SYMLINKS = NO EXCLUDE_SYMLINKS = NO
EXCLUDE_PATTERNS = *.inl.hpp *.impl.hpp *_detail.hpp */cudev/**/detail/*.hpp *.m */opencl/runtime/* EXCLUDE_PATTERNS = *.inl.hpp *.impl.hpp *_detail.hpp */cudev/**/detail/*.hpp *.m */opencl/runtime/*
EXCLUDE_SYMBOLS = cv::DataType<*> cv::traits::* int void CV__* EXCLUDE_SYMBOLS = cv::DataType<*> cv::traits::* int void CV__* T __CV*
EXAMPLE_PATH = @CMAKE_DOXYGEN_EXAMPLE_PATH@ EXAMPLE_PATH = @CMAKE_DOXYGEN_EXAMPLE_PATH@
EXAMPLE_PATTERNS = * EXAMPLE_PATTERNS = *
EXAMPLE_RECURSIVE = YES EXAMPLE_RECURSIVE = YES
...@@ -255,6 +255,8 @@ PREDEFINED = __cplusplus=1 \ ...@@ -255,6 +255,8 @@ PREDEFINED = __cplusplus=1 \
CV_DEFAULT(x)=" = x" \ CV_DEFAULT(x)=" = x" \
CV_NEON=1 \ CV_NEON=1 \
CV_SSE2=1 \ CV_SSE2=1 \
CV__DEBUG_NS_BEGIN= \
CV__DEBUG_NS_END= \
CV_DEPRECATED= CV_DEPRECATED=
EXPAND_AS_DEFINED = EXPAND_AS_DEFINED =
SKIP_FUNCTION_MACROS = YES SKIP_FUNCTION_MACROS = YES
......
...@@ -174,7 +174,7 @@ void CirclesGridClusterFinder::findGrid(const std::vector<cv::Point2f> &points, ...@@ -174,7 +174,7 @@ void CirclesGridClusterFinder::findGrid(const std::vector<cv::Point2f> &points,
if(outsideCorners.size() != outsideCornersCount) if(outsideCorners.size() != outsideCornersCount)
return; return;
} }
getSortedCorners(hull2f, corners, outsideCorners, sortedCorners); getSortedCorners(hull2f, patternPoints, corners, outsideCorners, sortedCorners);
if(sortedCorners.size() != cornersCount) if(sortedCorners.size() != cornersCount)
return; return;
...@@ -291,7 +291,18 @@ void CirclesGridClusterFinder::findOutsideCorners(const std::vector<cv::Point2f> ...@@ -291,7 +291,18 @@ void CirclesGridClusterFinder::findOutsideCorners(const std::vector<cv::Point2f>
#endif #endif
} }
void CirclesGridClusterFinder::getSortedCorners(const std::vector<cv::Point2f> &hull2f, const std::vector<cv::Point2f> &corners, const std::vector<cv::Point2f> &outsideCorners, std::vector<cv::Point2f> &sortedCorners) namespace {
double pointLineDistance(const cv::Point2f &p, const cv::Vec4f &line)
{
Vec3f pa( line[0], line[1], 1 );
Vec3f pb( line[2], line[3], 1 );
Vec3f l = pa.cross(pb);
return std::abs((p.x * l[0] + p.y * l[1] + l[2])) * 1.0 /
std::sqrt(double(l[0] * l[0] + l[1] * l[1]));
}
}
void CirclesGridClusterFinder::getSortedCorners(const std::vector<cv::Point2f> &hull2f, const std::vector<cv::Point2f> &patternPoints, const std::vector<cv::Point2f> &corners, const std::vector<cv::Point2f> &outsideCorners, std::vector<cv::Point2f> &sortedCorners)
{ {
Point2f firstCorner; Point2f firstCorner;
if(isAsymmetricGrid) if(isAsymmetricGrid)
...@@ -337,10 +348,26 @@ void CirclesGridClusterFinder::getSortedCorners(const std::vector<cv::Point2f> & ...@@ -337,10 +348,26 @@ void CirclesGridClusterFinder::getSortedCorners(const std::vector<cv::Point2f> &
if(!isAsymmetricGrid) if(!isAsymmetricGrid)
{ {
double dist1 = norm(sortedCorners[0] - sortedCorners[1]); double dist01 = norm(sortedCorners[0] - sortedCorners[1]);
double dist2 = norm(sortedCorners[1] - sortedCorners[2]); double dist12 = norm(sortedCorners[1] - sortedCorners[2]);
// Use half the average distance between circles on the shorter side as threshold for determining whether a point lies on an edge.
double thresh = min(dist01, dist12) / min(patternSize.width, patternSize.height) / 2;
size_t circleCount01 = 0;
size_t circleCount12 = 0;
Vec4f line01( sortedCorners[0].x, sortedCorners[0].y, sortedCorners[1].x, sortedCorners[1].y );
Vec4f line12( sortedCorners[1].x, sortedCorners[1].y, sortedCorners[2].x, sortedCorners[2].y );
// Count the circles along both edges.
for (size_t i = 0; i < patternPoints.size(); i++)
{
if (pointLineDistance(patternPoints[i], line01) < thresh)
circleCount01++;
if (pointLineDistance(patternPoints[i], line12) < thresh)
circleCount12++;
}
if((dist1 > dist2 && patternSize.height > patternSize.width) || (dist1 < dist2 && patternSize.height < patternSize.width)) // Ensure that the edge from sortedCorners[0] to sortedCorners[1] is the one with more circles (i.e. it is interpreted as the pattern's width).
if ((circleCount01 > circleCount12 && patternSize.height > patternSize.width) || (circleCount01 < circleCount12 && patternSize.height < patternSize.width))
{ {
for(size_t i=0; i<sortedCorners.size()-1; i++) for(size_t i=0; i<sortedCorners.size()-1; i++)
{ {
......
...@@ -67,7 +67,7 @@ public: ...@@ -67,7 +67,7 @@ public:
private: private:
void findCorners(const std::vector<cv::Point2f> &hull2f, std::vector<cv::Point2f> &corners); void findCorners(const std::vector<cv::Point2f> &hull2f, std::vector<cv::Point2f> &corners);
void findOutsideCorners(const std::vector<cv::Point2f> &corners, std::vector<cv::Point2f> &outsideCorners); void findOutsideCorners(const std::vector<cv::Point2f> &corners, std::vector<cv::Point2f> &outsideCorners);
void getSortedCorners(const std::vector<cv::Point2f> &hull2f, const std::vector<cv::Point2f> &corners, const std::vector<cv::Point2f> &outsideCorners, std::vector<cv::Point2f> &sortedCorners); void getSortedCorners(const std::vector<cv::Point2f> &hull2f, const std::vector<cv::Point2f> &patternPoints, const std::vector<cv::Point2f> &corners, const std::vector<cv::Point2f> &outsideCorners, std::vector<cv::Point2f> &sortedCorners);
void rectifyPatternPoints(const std::vector<cv::Point2f> &patternPoints, const std::vector<cv::Point2f> &sortedCorners, std::vector<cv::Point2f> &rectifiedPatternPoints); void rectifyPatternPoints(const std::vector<cv::Point2f> &patternPoints, const std::vector<cv::Point2f> &sortedCorners, std::vector<cv::Point2f> &rectifiedPatternPoints);
void parsePatternPoints(const std::vector<cv::Point2f> &patternPoints, const std::vector<cv::Point2f> &rectifiedPatternPoints, std::vector<cv::Point2f> &centers); void parsePatternPoints(const std::vector<cv::Point2f> &patternPoints, const std::vector<cv::Point2f> &rectifiedPatternPoints, std::vector<cv::Point2f> &centers);
......
...@@ -624,5 +624,24 @@ TEST(Calib3d_AsymmetricCirclesPatternDetector, accuracy) { CV_ChessboardDetector ...@@ -624,5 +624,24 @@ TEST(Calib3d_AsymmetricCirclesPatternDetector, accuracy) { CV_ChessboardDetector
TEST(Calib3d_AsymmetricCirclesPatternDetectorWithClustering, accuracy) { CV_ChessboardDetectorTest test( ASYMMETRIC_CIRCLES_GRID, CALIB_CB_CLUSTERING ); test.safe_run(); } TEST(Calib3d_AsymmetricCirclesPatternDetectorWithClustering, accuracy) { CV_ChessboardDetectorTest test( ASYMMETRIC_CIRCLES_GRID, CALIB_CB_CLUSTERING ); test.safe_run(); }
#endif #endif
TEST(Calib3d_CirclesPatternDetectorWithClustering, accuracy)
{
cv::String dataDir = string(TS::ptr()->get_data_path()) + "cv/cameracalibration/circles/";
cv::Mat expected;
FileStorage fs(dataDir + "circles_corners15.dat", FileStorage::READ);
fs["corners"] >> expected;
fs.release();
cv::Mat image = cv::imread(dataDir + "circles15.png");
std::vector<Point2f> centers;
cv::findCirclesGrid(image, Size(10, 8), centers, CALIB_CB_SYMMETRIC_GRID | CALIB_CB_CLUSTERING);
ASSERT_EQ(expected.total(), centers.size());
double error = calcError(centers, expected);
ASSERT_LE(error, precise_success_error_level);
}
}} // namespace }} // namespace
/* End of file. */ /* End of file. */
This diff is collapsed.
...@@ -2959,7 +2959,7 @@ An example on K-means clustering ...@@ -2959,7 +2959,7 @@ An example on K-means clustering
/** @brief Finds centers of clusters and groups input samples around the clusters. /** @brief Finds centers of clusters and groups input samples around the clusters.
The function kmeans implements a k-means algorithm that finds the centers of cluster_count clusters The function kmeans implements a k-means algorithm that finds the centers of cluster_count clusters
and groups the input samples around the clusters. As an output, \f$\texttt{labels}_i\f$ contains a and groups the input samples around the clusters. As an output, \f$\texttt{bestLabels}_i\f$ contains a
0-based cluster index for the sample stored in the \f$i^{th}\f$ row of the samples matrix. 0-based cluster index for the sample stored in the \f$i^{th}\f$ row of the samples matrix.
@note @note
......
...@@ -325,7 +325,7 @@ This macro can be used to construct an error message on-fly to include some dyna ...@@ -325,7 +325,7 @@ This macro can be used to construct an error message on-fly to include some dyna
for example: for example:
@code @code
// note the extra parentheses around the formatted text message // note the extra parentheses around the formatted text message
CV_Error_( CV_StsOutOfRange, CV_Error_(Error::StsOutOfRange,
("the value at (%d, %d)=%g is out of range", badPt.x, badPt.y, badValue)); ("the value at (%d, %d)=%g is out of range", badPt.x, badPt.y, badValue));
@endcode @endcode
@param code one of Error::Code @param code one of Error::Code
......
...@@ -521,7 +521,7 @@ static inline size_t divUp(size_t a, unsigned int b) ...@@ -521,7 +521,7 @@ static inline size_t divUp(size_t a, unsigned int b)
/** @brief Enables or disables the optimized code. /** @brief Enables or disables the optimized code.
The function can be used to dynamically turn on and off optimized code (code that uses SSE2, AVX, The function can be used to dynamically turn on and off optimized dispatched code (code that uses SSE4.2, AVX/AVX2,
and other instructions on the platforms that support it). It sets a global flag that is further and other instructions on the platforms that support it). It sets a global flag that is further
checked by OpenCV functions. Since the flag is not checked in the inner OpenCV loops, it is only checked by OpenCV functions. Since the flag is not checked in the inner OpenCV loops, it is only
safe to call the function on the very top level in your application where you can be sure that no safe to call the function on the very top level in your application where you can be sure that no
......
...@@ -765,6 +765,15 @@ ...@@ -765,6 +765,15 @@
"v_type": "Mat", "v_type": "Mat",
"j_import": "org.opencv.core.MatOfRect2d" "j_import": "org.opencv.core.MatOfRect2d"
}, },
"vector_RotatedRect": {
"j_type": "MatOfRotatedRect",
"jn_type": "long",
"jni_type": "jlong",
"jni_var": "std::vector< RotatedRect > %(n)s",
"suffix": "J",
"v_type": "Mat",
"j_import": "org.opencv.core.MatOfRotatedRect"
},
"vector_String": { "vector_String": {
"j_type": "List<String>", "j_type": "List<String>",
"jn_type": "List<String>", "jn_type": "List<String>",
......
...@@ -1989,7 +1989,7 @@ cvWaitKey( int delay ) ...@@ -1989,7 +1989,7 @@ cvWaitKey( int delay )
MSG message; MSG message;
int is_processed = 0; int is_processed = 0;
if( delay <= 0 ) if( (delay <= 0) && hg_windows)
GetMessage(&message, 0, 0, 0); GetMessage(&message, 0, 0, 0);
else if( PeekMessage(&message, 0, 0, 0, PM_REMOVE) == FALSE ) else if( PeekMessage(&message, 0, 0, 0, PM_REMOVE) == FALSE )
{ {
......
This diff is collapsed.
This diff is collapsed.
...@@ -947,7 +947,12 @@ static void common_matchTemplate( Mat& img, Mat& templ, Mat& result, int method, ...@@ -947,7 +947,12 @@ static void common_matchTemplate( Mat& img, Mat& templ, Mat& result, int method,
if( isNormed ) if( isNormed )
{ {
t = std::sqrt(MAX(wndSum2 - wndMean2,0))*templNorm; double diff2 = MAX(wndSum2 - wndMean2, 0);
if (diff2 <= std::min(0.5, 10 * FLT_EPSILON * wndSum2))
t = 0; // avoid rounding errors
else
t = std::sqrt(diff2)*templNorm;
if( fabs(num) < t ) if( fabs(num) < t )
num /= t; num /= t;
else if( fabs(num) < t*1.125 ) else if( fabs(num) < t*1.125 )
......
...@@ -107,6 +107,20 @@ void vector_Rect2d_to_Mat(std::vector<Rect2d>& v_rect, Mat& mat) ...@@ -107,6 +107,20 @@ void vector_Rect2d_to_Mat(std::vector<Rect2d>& v_rect, Mat& mat)
mat = Mat(v_rect, true); mat = Mat(v_rect, true);
} }
//vector_RotatedRect
void Mat_to_vector_RotatedRect(Mat& mat, std::vector<RotatedRect>& v_rect)
{
v_rect.clear();
CHECK_MAT(mat.type()==CV_32FC(5) && mat.cols==1);
v_rect = (std::vector<RotatedRect>) mat;
}
void vector_RotatedRect_to_Mat(std::vector<RotatedRect>& v_rect, Mat& mat)
{
mat = Mat(v_rect, true);
}
//vector_Point //vector_Point
void Mat_to_vector_Point(Mat& mat, std::vector<Point>& v_point) void Mat_to_vector_Point(Mat& mat, std::vector<Point>& v_point)
{ {
......
...@@ -26,6 +26,8 @@ void vector_Rect_to_Mat(std::vector<cv::Rect>& v_rect, cv::Mat& mat); ...@@ -26,6 +26,8 @@ void vector_Rect_to_Mat(std::vector<cv::Rect>& v_rect, cv::Mat& mat);
void Mat_to_vector_Rect2d(cv::Mat& mat, std::vector<cv::Rect2d>& v_rect); void Mat_to_vector_Rect2d(cv::Mat& mat, std::vector<cv::Rect2d>& v_rect);
void vector_Rect2d_to_Mat(std::vector<cv::Rect2d>& v_rect, cv::Mat& mat); void vector_Rect2d_to_Mat(std::vector<cv::Rect2d>& v_rect, cv::Mat& mat);
void Mat_to_vector_RotatedRect(cv::Mat& mat, std::vector<cv::RotatedRect>& v_rect);
void vector_RotatedRect_to_Mat(std::vector<cv::RotatedRect>& v_rect, cv::Mat& mat);
void Mat_to_vector_Point(cv::Mat& mat, std::vector<cv::Point>& v_point); void Mat_to_vector_Point(cv::Mat& mat, std::vector<cv::Point>& v_point);
void Mat_to_vector_Point2f(cv::Mat& mat, std::vector<cv::Point2f>& v_point); void Mat_to_vector_Point2f(cv::Mat& mat, std::vector<cv::Point2f>& v_point);
......
...@@ -13,7 +13,9 @@ import org.opencv.core.MatOfPoint2f; ...@@ -13,7 +13,9 @@ import org.opencv.core.MatOfPoint2f;
import org.opencv.core.MatOfPoint3f; import org.opencv.core.MatOfPoint3f;
import org.opencv.core.Point; import org.opencv.core.Point;
import org.opencv.core.Point3; import org.opencv.core.Point3;
import org.opencv.core.Size;
import org.opencv.core.Rect; import org.opencv.core.Rect;
import org.opencv.core.RotatedRect;
import org.opencv.core.Rect2d; import org.opencv.core.Rect2d;
import org.opencv.core.DMatch; import org.opencv.core.DMatch;
import org.opencv.core.KeyPoint; import org.opencv.core.KeyPoint;
...@@ -770,4 +772,41 @@ public class Converters { ...@@ -770,4 +772,41 @@ public class Converters {
} }
mats.clear(); mats.clear();
} }
public static Mat vector_RotatedRect_to_Mat(List<RotatedRect> rs) {
Mat res;
int count = (rs != null) ? rs.size() : 0;
if (count > 0) {
res = new Mat(count, 1, CvType.CV_32FC(5));
float[] buff = new float[5 * count];
for (int i = 0; i < count; i++) {
RotatedRect r = rs.get(i);
buff[5 * i] = (float)r.center.x;
buff[5 * i + 1] = (float)r.center.y;
buff[5 * i + 2] = (float)r.size.width;
buff[5 * i + 3] = (float)r.size.height;
buff[5 * i + 4] = (float)r.angle;
}
res.put(0, 0, buff);
} else {
res = new Mat();
}
return res;
}
public static void Mat_to_vector_RotatedRect(Mat m, List<RotatedRect> rs) {
if (rs == null)
throw new java.lang.IllegalArgumentException("rs == null");
int count = m.rows();
if (CvType.CV_32FC(5) != m.type() || m.cols() != 1)
throw new java.lang.IllegalArgumentException(
"CvType.CV_32FC5 != m.type() || m.rows()!=1\n" + m);
rs.clear();
float[] buff = new float[5 * count];
m.get(0, 0, buff);
for (int i = 0; i < count; i++) {
rs.add(new RotatedRect(new Point(buff[5 * i], buff[5 * i + 1]), new Size(buff[5 * i + 2], buff[5 * i + 3]), buff[5 * i + 4]));
}
}
} }
...@@ -17,6 +17,7 @@ import org.opencv.core.Mat; ...@@ -17,6 +17,7 @@ import org.opencv.core.Mat;
import org.opencv.core.Point; import org.opencv.core.Point;
import org.opencv.core.Point3; import org.opencv.core.Point3;
import org.opencv.core.Rect; import org.opencv.core.Rect;
import org.opencv.core.RotatedRect;
import org.opencv.core.Scalar; import org.opencv.core.Scalar;
import org.opencv.core.Size; import org.opencv.core.Size;
import org.opencv.core.DMatch; import org.opencv.core.DMatch;
...@@ -336,6 +337,15 @@ public class OpenCVTestCase extends TestCase { ...@@ -336,6 +337,15 @@ public class OpenCVTestCase extends TestCase {
assertRectEquals(list1.get(i), list2.get(i)); assertRectEquals(list1.get(i), list2.get(i));
} }
public static void assertListRotatedRectEquals(List<RotatedRect> list1, List<RotatedRect> list2) {
if (list1.size() != list2.size()) {
throw new UnsupportedOperationException();
}
for (int i = 0; i < list1.size(); i++)
assertRotatedRectEquals(list1.get(i), list2.get(i));
}
public static void assertRectEquals(Rect expected, Rect actual) { public static void assertRectEquals(Rect expected, Rect actual) {
String msg = "expected:<" + expected + "> but was:<" + actual + ">"; String msg = "expected:<" + expected + "> but was:<" + actual + ">";
assertEquals(msg, expected.x, actual.x); assertEquals(msg, expected.x, actual.x);
...@@ -344,6 +354,15 @@ public class OpenCVTestCase extends TestCase { ...@@ -344,6 +354,15 @@ public class OpenCVTestCase extends TestCase {
assertEquals(msg, expected.height, actual.height); assertEquals(msg, expected.height, actual.height);
} }
public static void assertRotatedRectEquals(RotatedRect expected, RotatedRect actual) {
String msg = "expected:<" + expected + "> but was:<" + actual + ">";
assertEquals(msg, expected.center.x, actual.center.x);
assertEquals(msg, expected.center.y, actual.center.y);
assertEquals(msg, expected.size.width, actual.size.width);
assertEquals(msg, expected.size.height, actual.size.height);
assertEquals(msg, expected.angle, actual.angle);
}
public static void assertMatEqual(Mat m1, Mat m2) { public static void assertMatEqual(Mat m1, Mat m2) {
compareMats(m1, m2, true); compareMats(m1, m2, true);
} }
......
...@@ -4,7 +4,9 @@ import org.opencv.core.CvType; ...@@ -4,7 +4,9 @@ import org.opencv.core.CvType;
import org.opencv.core.Mat; import org.opencv.core.Mat;
import org.opencv.core.Point; import org.opencv.core.Point;
import org.opencv.core.Point3; import org.opencv.core.Point3;
import org.opencv.core.Size;
import org.opencv.core.Rect; import org.opencv.core.Rect;
import org.opencv.core.RotatedRect;
import org.opencv.core.DMatch; import org.opencv.core.DMatch;
import org.opencv.core.KeyPoint; import org.opencv.core.KeyPoint;
import org.opencv.test.OpenCVTestCase; import org.opencv.test.OpenCVTestCase;
...@@ -222,6 +224,19 @@ public class ConvertersTest extends OpenCVTestCase { ...@@ -222,6 +224,19 @@ public class ConvertersTest extends OpenCVTestCase {
assertListRectEquals(truth, rectangles); assertListRectEquals(truth, rectangles);
} }
public void testMat_to_vector_RotatedRect() {
Mat src = new Mat(2, 1, CvType.CV_32FC(5));
src.put(0, 0, 2, 2, 5, 2, 7,
0, 6, 4, 1, 3);
List<RotatedRect> rectangles = new ArrayList<RotatedRect>();
Converters.Mat_to_vector_RotatedRect(src, rectangles);
List<RotatedRect> truth = new ArrayList<RotatedRect>();
truth.add(new RotatedRect(new Point(2, 2), new Size(5, 2), 7));
truth.add(new RotatedRect(new Point(0, 6), new Size(4, 1), 3));
assertListRotatedRectEquals(truth, rectangles);
}
public void testMat_to_vector_uchar() { public void testMat_to_vector_uchar() {
Mat src = new Mat(3, 1, CvType.CV_8UC1); Mat src = new Mat(3, 1, CvType.CV_8UC1);
src.put(0, 0, 2, 4, 3); src.put(0, 0, 2, 4, 3);
...@@ -465,6 +480,19 @@ public class ConvertersTest extends OpenCVTestCase { ...@@ -465,6 +480,19 @@ public class ConvertersTest extends OpenCVTestCase {
assertMatEqual(truth, dst); assertMatEqual(truth, dst);
} }
public void testVector_RotatedRect_to_Mat() {
List<RotatedRect> rectangles = new ArrayList<RotatedRect>();
rectangles.add(new RotatedRect(new Point(2, 2), new Size(5, 2), 7));
rectangles.add(new RotatedRect(new Point(0, 0), new Size(6, 4), 3));
Mat dst = Converters.vector_RotatedRect_to_Mat(rectangles);
Mat truth = new Mat(2, 1, CvType.CV_32FC(5));
truth.put(0, 0, 2, 2, 5, 2, 7,
0, 0, 6, 4, 3);
assertMatEqual(truth, dst, EPS);
}
public void testVector_uchar_to_Mat() { public void testVector_uchar_to_Mat() {
List<Byte> bytes = new ArrayList<Byte>(); List<Byte> bytes = new ArrayList<Byte>();
byte value1 = 1; byte value1 = 1;
...@@ -498,5 +526,4 @@ public class ConvertersTest extends OpenCVTestCase { ...@@ -498,5 +526,4 @@ public class ConvertersTest extends OpenCVTestCase {
fail("Not yet implemented"); fail("Not yet implemented");
} }
} }
...@@ -20,6 +20,7 @@ import org.opencv.core.Mat; ...@@ -20,6 +20,7 @@ import org.opencv.core.Mat;
import org.opencv.core.Point; import org.opencv.core.Point;
import org.opencv.core.Point3; import org.opencv.core.Point3;
import org.opencv.core.Rect; import org.opencv.core.Rect;
import org.opencv.core.RotatedRect;
import org.opencv.core.Scalar; import org.opencv.core.Scalar;
import org.opencv.core.Size; import org.opencv.core.Size;
import org.opencv.core.DMatch; import org.opencv.core.DMatch;
...@@ -362,6 +363,15 @@ public class OpenCVTestCase extends TestCase { ...@@ -362,6 +363,15 @@ public class OpenCVTestCase extends TestCase {
assertRectEquals(list1.get(i), list2.get(i)); assertRectEquals(list1.get(i), list2.get(i));
} }
public static void assertListRotatedRectEquals(List<RotatedRect> list1, List<RotatedRect> list2) {
if (list1.size() != list2.size()) {
throw new UnsupportedOperationException();
}
for (int i = 0; i < list1.size(); i++)
assertRotatedRectEquals(list1.get(i), list2.get(i));
}
public static void assertRectEquals(Rect expected, Rect actual) { public static void assertRectEquals(Rect expected, Rect actual) {
String msg = "expected:<" + expected + "> but was:<" + actual + ">"; String msg = "expected:<" + expected + "> but was:<" + actual + ">";
assertEquals(msg, expected.x, actual.x); assertEquals(msg, expected.x, actual.x);
...@@ -370,6 +380,15 @@ public class OpenCVTestCase extends TestCase { ...@@ -370,6 +380,15 @@ public class OpenCVTestCase extends TestCase {
assertEquals(msg, expected.height, actual.height); assertEquals(msg, expected.height, actual.height);
} }
public static void assertRotatedRectEquals(RotatedRect expected, RotatedRect actual) {
String msg = "expected:<" + expected + "> but was:<" + actual + ">";
assertEquals(msg, expected.center.x, actual.center.x);
assertEquals(msg, expected.center.y, actual.center.y);
assertEquals(msg, expected.size.width, actual.size.width);
assertEquals(msg, expected.size.height, actual.size.height);
assertEquals(msg, expected.angle, actual.angle);
}
public static void assertMatEqual(Mat m1, Mat m2) { public static void assertMatEqual(Mat m1, Mat m2) {
compareMats(m1, m2, true); compareMats(m1, m2, true);
} }
......
...@@ -48,7 +48,10 @@ ...@@ -48,7 +48,10 @@
#include <algorithm> #include <algorithm>
#include <limits> #include <limits>
#define OPENCV_FOURCC(c1, c2, c3, c4) (((c1) & 255) + (((c2) & 255) << 8) + (((c3) & 255) << 16) + (((c4) & 255) << 24)) #ifndef __OPENCV_BUILD
#define CV_FOURCC(c1, c2, c3, c4) (((c1) & 255) + (((c2) & 255) << 8) + (((c3) & 255) << 16) + (((c4) & 255) << 24))
#endif
#define CALC_FFMPEG_VERSION(a,b,c) ( a<<16 | b<<8 | c ) #define CALC_FFMPEG_VERSION(a,b,c) ( a<<16 | b<<8 | c )
#if defined _MSC_VER && _MSC_VER >= 1200 #if defined _MSC_VER && _MSC_VER >= 1200
...@@ -1197,7 +1200,7 @@ double CvCapture_FFMPEG::getProperty( int property_id ) const ...@@ -1197,7 +1200,7 @@ double CvCapture_FFMPEG::getProperty( int property_id ) const
return codec_tag; return codec_tag;
} }
return (double) OPENCV_FOURCC(codec_fourcc[0], codec_fourcc[1], codec_fourcc[2], codec_fourcc[3]); return (double) CV_FOURCC(codec_fourcc[0], codec_fourcc[1], codec_fourcc[2], codec_fourcc[3]);
case CV_FFMPEG_CAP_PROP_SAR_NUM: case CV_FFMPEG_CAP_PROP_SAR_NUM:
return _opencv_ffmpeg_get_sample_aspect_ratio(ic->streams[video_stream]).num; return _opencv_ffmpeg_get_sample_aspect_ratio(ic->streams[video_stream]).num;
case CV_FFMPEG_CAP_PROP_SAR_DEN: case CV_FFMPEG_CAP_PROP_SAR_DEN:
......
...@@ -21,7 +21,6 @@ set(OPENCV_CUDA_SAMPLES_REQUIRED_DEPS ...@@ -21,7 +21,6 @@ set(OPENCV_CUDA_SAMPLES_REQUIRED_DEPS
opencv_cudaoptflow opencv_cudaoptflow
opencv_cudabgsegm opencv_cudabgsegm
opencv_cudastereo opencv_cudastereo
opencv_cudalegacy
opencv_cudaobjdetect) opencv_cudaobjdetect)
ocv_check_dependencies(${OPENCV_CUDA_SAMPLES_REQUIRED_DEPS}) ocv_check_dependencies(${OPENCV_CUDA_SAMPLES_REQUIRED_DEPS})
......
...@@ -4,7 +4,6 @@ ...@@ -4,7 +4,6 @@
#include "opencv2/core.hpp" #include "opencv2/core.hpp"
#include "opencv2/core/utility.hpp" #include "opencv2/core/utility.hpp"
#include "opencv2/cudabgsegm.hpp" #include "opencv2/cudabgsegm.hpp"
#include "opencv2/cudalegacy.hpp"
#include "opencv2/video.hpp" #include "opencv2/video.hpp"
#include "opencv2/highgui.hpp" #include "opencv2/highgui.hpp"
...@@ -16,8 +15,6 @@ enum Method ...@@ -16,8 +15,6 @@ enum Method
{ {
MOG, MOG,
MOG2, MOG2,
GMG,
FGD_STAT
}; };
int main(int argc, const char** argv) int main(int argc, const char** argv)
...@@ -25,7 +22,7 @@ int main(int argc, const char** argv) ...@@ -25,7 +22,7 @@ int main(int argc, const char** argv)
cv::CommandLineParser cmd(argc, argv, cv::CommandLineParser cmd(argc, argv,
"{ c camera | | use camera }" "{ c camera | | use camera }"
"{ f file | ../data/vtest.avi | input video file }" "{ f file | ../data/vtest.avi | input video file }"
"{ m method | mog | method (mog, mog2, gmg, fgd) }" "{ m method | mog | method (mog, mog2) }"
"{ h help | | print help message }"); "{ h help | | print help message }");
if (cmd.has("help") || !cmd.check()) if (cmd.has("help") || !cmd.check())
...@@ -40,9 +37,7 @@ int main(int argc, const char** argv) ...@@ -40,9 +37,7 @@ int main(int argc, const char** argv)
string method = cmd.get<string>("method"); string method = cmd.get<string>("method");
if (method != "mog" if (method != "mog"
&& method != "mog2" && method != "mog2")
&& method != "gmg"
&& method != "fgd")
{ {
cerr << "Incorrect method" << endl; cerr << "Incorrect method" << endl;
return -1; return -1;
...@@ -50,8 +45,8 @@ int main(int argc, const char** argv) ...@@ -50,8 +45,8 @@ int main(int argc, const char** argv)
Method m = method == "mog" ? MOG : Method m = method == "mog" ? MOG :
method == "mog2" ? MOG2 : method == "mog2" ? MOG2 :
method == "fgd" ? FGD_STAT : (Method)-1;
GMG; CV_Assert(m != (Method)-1);
VideoCapture cap; VideoCapture cap;
...@@ -73,8 +68,6 @@ int main(int argc, const char** argv) ...@@ -73,8 +68,6 @@ int main(int argc, const char** argv)
Ptr<BackgroundSubtractor> mog = cuda::createBackgroundSubtractorMOG(); Ptr<BackgroundSubtractor> mog = cuda::createBackgroundSubtractorMOG();
Ptr<BackgroundSubtractor> mog2 = cuda::createBackgroundSubtractorMOG2(); Ptr<BackgroundSubtractor> mog2 = cuda::createBackgroundSubtractorMOG2();
Ptr<BackgroundSubtractor> gmg = cuda::createBackgroundSubtractorGMG(40);
Ptr<BackgroundSubtractor> fgd = cuda::createBackgroundSubtractorFGD();
GpuMat d_fgmask; GpuMat d_fgmask;
GpuMat d_fgimg; GpuMat d_fgimg;
...@@ -93,23 +86,12 @@ int main(int argc, const char** argv) ...@@ -93,23 +86,12 @@ int main(int argc, const char** argv)
case MOG2: case MOG2:
mog2->apply(d_frame, d_fgmask); mog2->apply(d_frame, d_fgmask);
break; break;
case GMG:
gmg->apply(d_frame, d_fgmask);
break;
case FGD_STAT:
fgd->apply(d_frame, d_fgmask);
break;
} }
namedWindow("image", WINDOW_NORMAL); namedWindow("image", WINDOW_NORMAL);
namedWindow("foreground mask", WINDOW_NORMAL); namedWindow("foreground mask", WINDOW_NORMAL);
namedWindow("foreground image", WINDOW_NORMAL); namedWindow("foreground image", WINDOW_NORMAL);
if (m != GMG)
{
namedWindow("mean background image", WINDOW_NORMAL); namedWindow("mean background image", WINDOW_NORMAL);
}
for(;;) for(;;)
{ {
...@@ -132,15 +114,6 @@ int main(int argc, const char** argv) ...@@ -132,15 +114,6 @@ int main(int argc, const char** argv)
mog2->apply(d_frame, d_fgmask); mog2->apply(d_frame, d_fgmask);
mog2->getBackgroundImage(d_bgimg); mog2->getBackgroundImage(d_bgimg);
break; break;
case GMG:
gmg->apply(d_frame, d_fgmask);
break;
case FGD_STAT:
fgd->apply(d_frame, d_fgmask);
fgd->getBackgroundImage(d_bgimg);
break;
} }
double fps = cv::getTickFrequency() / (cv::getTickCount() - start); double fps = cv::getTickFrequency() / (cv::getTickCount() - start);
......
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment