Commit 5cce0389 authored by Andrey Kamaev's avatar Andrey Kamaev

Performance testing branch is merged back into trunk

parent df9f707f
...@@ -339,6 +339,7 @@ if(NOT IOS) ...@@ -339,6 +339,7 @@ if(NOT IOS)
else() else()
set(BUILD_TESTS OFF CACHE BOOL "Build tests") set(BUILD_TESTS OFF CACHE BOOL "Build tests")
endif() endif()
set(BUILD_PERF_TESTS ON CACHE BOOL "Build performance tests")
# Build 3rdparty libraries under unix # Build 3rdparty libraries under unix
# =================================================== # ===================================================
...@@ -1555,6 +1556,17 @@ if(ENABLE_SOLUTION_FOLDERS) ...@@ -1555,6 +1556,17 @@ if(ENABLE_SOLUTION_FOLDERS)
set_property(GLOBAL PROPERTY PREDEFINED_TARGETS_FOLDER "CMakeTargets") set_property(GLOBAL PROPERTY PREDEFINED_TARGETS_FOLDER "CMakeTargets")
endif() endif()
#-----------------------------------
# performance tests
#-----------------------------------
if(BUILD_PERF_TESTS AND PYTHON_EXECUTABLE)
add_custom_target(perf
${PYTHON_EXECUTABLE} "${CMAKE_CURRENT_SOURCE_DIR}/modules/ts/misc/run.py" "${CMAKE_BINARY_DIR}"
WORKING_DIRECTORY "${CMAKE_BINARY_DIR}"
DEPENDS "${CMAKE_CURRENT_SOURCE_DIR}/modules/ts/misc/run.py"
)
endif()
#----------------------------------- #-----------------------------------
# Subdirectories: # Subdirectories:
#----------------------------------- #-----------------------------------
...@@ -1763,8 +1775,9 @@ endif() ...@@ -1763,8 +1775,9 @@ endif()
# samples and tests # samples and tests
status("") status("")
status(" Tests and samples:") status(" Tests and samples:")
status(" Tests:" BUILD_TESTS THEN YES ELSE NO) status(" Tests:" BUILD_TESTS THEN YES ELSE NO)
status(" Examples:" BUILD_EXAMPLES THEN YES ELSE NO) status(" Performance tests:" BUILD_PERF_TESTS THEN YES ELSE NO)
status(" Examples:" BUILD_EXAMPLES THEN YES ELSE NO)
if(ANDROID) if(ANDROID)
status(" Android tests:" BUILD_TESTS AND CAN_BUILD_ANDROID_PROJECTS THEN YES ELSE NO) status(" Android tests:" BUILD_TESTS AND CAN_BUILD_ANDROID_PROJECTS THEN YES ELSE NO)
......
...@@ -3,6 +3,8 @@ ...@@ -3,6 +3,8 @@
macro(add_opencv_precompiled_headers the_target) macro(add_opencv_precompiled_headers the_target)
if("${the_target}" MATCHES "opencv_test_.*") if("${the_target}" MATCHES "opencv_test_.*")
SET(pch_name "test/test_precomp") SET(pch_name "test/test_precomp")
elseif("${the_target}" MATCHES "opencv_perf_.*")
SET(pch_name "perf/perf_precomp")
else() else()
SET(pch_name "src/precomp") SET(pch_name "src/precomp")
endif() endif()
...@@ -19,7 +21,110 @@ macro(add_opencv_precompiled_headers the_target) ...@@ -19,7 +21,110 @@ macro(add_opencv_precompiled_headers the_target)
endif() endif()
endmacro() endmacro()
# this is template for a OpenCV module # this is a template for a OpenCV performance tests
# define_opencv_perf_test(<module_name> <dependencies>)
macro(define_opencv_perf_test name)
set(perf_path "${CMAKE_CURRENT_SOURCE_DIR}/perf")
if(BUILD_PERF_TESTS AND EXISTS "${perf_path}")
include_directories("${perf_path}" "${CMAKE_CURRENT_BINARY_DIR}")
# opencv_highgui is required for imread/imwrite
set(perf_deps opencv_${name} ${ARGN} opencv_ts opencv_highgui ${EXTRA_OPENCV_${name}_DEPS})
foreach(d ${perf_deps})
if(d MATCHES "opencv_")
string(REPLACE "opencv_" "${OpenCV_SOURCE_DIR}/modules/" d_dir ${d})
if (EXISTS "${d_dir}/include")
include_directories("${d_dir}/include")
endif()
endif()
endforeach()
file(GLOB perf_srcs "${perf_path}/*.cpp")
file(GLOB perf_hdrs "${perf_path}/*.h*")
source_group("Src" FILES ${perf_srcs})
source_group("Include" FILES ${perf_hdrs})
set(the_target "opencv_perf_${name}")
add_executable(${the_target} ${perf_srcs} ${perf_hdrs})
# Additional target properties
set_target_properties(${the_target} PROPERTIES
DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
RUNTIME_OUTPUT_DIRECTORY "${EXECUTABLE_OUTPUT_PATH}"
)
if(ENABLE_SOLUTION_FOLDERS)
set_target_properties(${the_target} PROPERTIES FOLDER "performance tests")
endif()
add_dependencies(${the_target} ${perf_deps})
target_link_libraries(${the_target} ${OPENCV_LINKER_LIBS} ${perf_deps})
add_opencv_precompiled_headers(${the_target})
if (PYTHON_EXECUTABLE)
add_dependencies(perf ${the_target})
endif()
endif()
endmacro()
# this is a template for a OpenCV regression tests
# define_opencv_test(<module_name> <dependencies>)
macro(define_opencv_test name)
set(test_path "${CMAKE_CURRENT_SOURCE_DIR}/test")
if(BUILD_TESTS AND EXISTS "${test_path}")
include_directories("${test_path}" "${CMAKE_CURRENT_BINARY_DIR}")
# opencv_highgui is required for imread/imwrite
set(test_deps opencv_${name} ${ARGN} opencv_ts opencv_highgui ${EXTRA_OPENCV_${name}_DEPS})
foreach(d ${test_deps})
if(d MATCHES "opencv_")
string(REPLACE "opencv_" "${OpenCV_SOURCE_DIR}/modules/" d_dir ${d})
if (EXISTS "${d_dir}/include")
include_directories("${d_dir}/include")
endif()
endif()
endforeach()
file(GLOB test_srcs "${test_path}/*.cpp")
file(GLOB test_hdrs "${test_path}/*.h*")
source_group("Src" FILES ${test_srcs})
source_group("Include" FILES ${test_hdrs})
set(the_target "opencv_test_${name}")
add_executable(${the_target} ${test_srcs} ${test_hdrs})
# Additional target properties
set_target_properties(${the_target} PROPERTIES
DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
RUNTIME_OUTPUT_DIRECTORY "${EXECUTABLE_OUTPUT_PATH}"
)
if(ENABLE_SOLUTION_FOLDERS)
set_target_properties(${the_target} PROPERTIES FOLDER "tests")
endif()
add_dependencies(${the_target} ${test_deps})
target_link_libraries(${the_target} ${OPENCV_LINKER_LIBS} ${test_deps})
enable_testing()
get_target_property(LOC ${the_target} LOCATION)
add_test(${the_target} "${LOC}")
#if(WIN32)
# install(TARGETS ${the_target} RUNTIME DESTINATION bin COMPONENT main)
#endif()
add_opencv_precompiled_headers(${the_target})
endif()
endmacro()
# this is a template for a OpenCV module
# define_opencv_module(<module_name> <dependencies>)
macro(define_opencv_module name) macro(define_opencv_module name)
project(opencv_${name}) project(opencv_${name})
...@@ -30,21 +135,22 @@ macro(define_opencv_module name) ...@@ -30,21 +135,22 @@ macro(define_opencv_module name)
foreach(d ${ARGN}) foreach(d ${ARGN})
if(d MATCHES "opencv_") if(d MATCHES "opencv_")
string(REPLACE "opencv_" "${CMAKE_CURRENT_SOURCE_DIR}/../" d_dir ${d}) string(REPLACE "opencv_" "${OpenCV_SOURCE_DIR}/modules/" d_dir ${d})
include_directories("${d_dir}/include") if (EXISTS "${d_dir}/include")
include_directories("${d_dir}/include")
endif()
endif() endif()
endforeach() endforeach()
file(GLOB lib_srcs "src/*.cpp") file(GLOB lib_srcs "src/*.cpp")
file(GLOB lib_int_hdrs "src/*.h*") file(GLOB lib_int_hdrs "src/*.h*")
file(GLOB lib_hdrs "include/opencv2/${name}/*.h*")
if(COMMAND get_module_external_sources) if(COMMAND get_module_external_sources)
get_module_external_sources(${name}) get_module_external_sources(${name})
endif() endif()
source_group("Src" FILES ${lib_srcs} ${lib_int_hdrs}) source_group("Src" FILES ${lib_srcs} ${lib_int_hdrs})
file(GLOB lib_hdrs "include/opencv2/${name}/*.h*")
source_group("Include" FILES ${lib_hdrs}) source_group("Include" FILES ${lib_hdrs})
set(the_target "opencv_${name}") set(the_target "opencv_${name}")
...@@ -86,8 +192,6 @@ macro(define_opencv_module name) ...@@ -86,8 +192,6 @@ macro(define_opencv_module name)
INSTALL_NAME_DIR lib INSTALL_NAME_DIR lib
) )
add_opencv_precompiled_headers(${the_target})
# Add the required libraries for linking: # Add the required libraries for linking:
target_link_libraries(${the_target} ${OPENCV_LINKER_LIBS} ${IPP_LIBS} ${ARGN}) target_link_libraries(${the_target} ${OPENCV_LINKER_LIBS} ${IPP_LIBS} ${ARGN})
...@@ -114,53 +218,8 @@ macro(define_opencv_module name) ...@@ -114,53 +218,8 @@ macro(define_opencv_module name)
DESTINATION ${OPENCV_INCLUDE_PREFIX}/opencv2/${name} DESTINATION ${OPENCV_INCLUDE_PREFIX}/opencv2/${name}
COMPONENT main) COMPONENT main)
if(BUILD_TESTS AND EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/test) add_opencv_precompiled_headers(${the_target})
include_directories("${CMAKE_CURRENT_SOURCE_DIR}/include"
"${CMAKE_CURRENT_SOURCE_DIR}/test"
"${CMAKE_CURRENT_BINARY_DIR}")
set(test_deps opencv_${name} ${ARGN} opencv_ts opencv_highgui ${EXTRA_${the_target}_DEPS})
foreach(d ${test_deps})
if(d MATCHES "opencv_")
string(REPLACE "opencv_" "${CMAKE_CURRENT_SOURCE_DIR}/../" d_dir ${d})
include_directories("${d_dir}/include")
endif()
endforeach()
file(GLOB test_srcs "test/*.cpp")
file(GLOB test_hdrs "test/*.h*")
source_group("Src" FILES ${test_srcs})
source_group("Include" FILES ${test_hdrs})
set(the_target "opencv_test_${name}")
add_executable(${the_target} ${test_srcs} ${test_hdrs})
add_opencv_precompiled_headers(${the_target})
# Additional target properties
set_target_properties(${the_target} PROPERTIES
DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
RUNTIME_OUTPUT_DIRECTORY "${EXECUTABLE_OUTPUT_PATH}"
)
if(ENABLE_SOLUTION_FOLDERS)
set_target_properties(${the_target} PROPERTIES FOLDER "tests")
endif()
add_dependencies(${the_target} ${test_deps})
# Add the required libraries for linking:
target_link_libraries(${the_target} ${OPENCV_LINKER_LIBS} ${test_deps})
enable_testing()
get_target_property(LOC ${the_target} LOCATION)
add_test(${the_target} "${LOC}")
#if(WIN32)
# install(TARGETS ${the_target} RUNTIME DESTINATION bin COMPONENT main)
#endif()
endif()
define_opencv_test(${name})
define_opencv_perf_test(${name})
endmacro() endmacro()
if(ANDROID) if(ANDROID AND WITH_ANDROID_CAMERA)
ADD_DEFINITIONS(-DGTEST_HAS_STD_WSTRING=0) add_subdirectory(androidcamera)
if(ANDROID_API_LEVEL LESS 8)
ADD_DEFINITIONS(-DGTEST_HAS_CLONE=0)
endif()
IF(WITH_ANDROID_CAMERA)
add_subdirectory(androidcamera)
endif()
endif() endif()
add_subdirectory(calib3d) add_subdirectory(calib3d)
...@@ -14,9 +7,10 @@ add_subdirectory(core) ...@@ -14,9 +7,10 @@ add_subdirectory(core)
add_subdirectory(features2d) add_subdirectory(features2d)
add_subdirectory(flann) add_subdirectory(flann)
if(BUILD_TESTS) if(BUILD_TESTS OR BUILD_PERF_TESTS)
add_subdirectory(ts) add_subdirectory(ts)
endif() endif()
add_subdirectory(highgui) add_subdirectory(highgui)
add_subdirectory(imgproc) add_subdirectory(imgproc)
add_subdirectory(legacy) add_subdirectory(legacy)
...@@ -35,8 +29,8 @@ endif() ...@@ -35,8 +29,8 @@ endif()
add_subdirectory(video) add_subdirectory(video)
if(NOT IOS) if(NOT IOS)
add_subdirectory(traincascade) add_subdirectory(traincascade)
add_subdirectory(haartraining) add_subdirectory(haartraining)
endif() endif()
if(NOT (ANDROID OR IOS)) if(NOT (ANDROID OR IOS))
......
if(ZLIB_FOUND) if(ZLIB_FOUND)
include_directories(${ZLIB_INCUDE_DIR}) include_directories(${ZLIB_INCUDE_DIR})
set(deps ${ZLIB_LIBRARIES})
else() else()
include_directories("${CMAKE_CURRENT_SOURCE_DIR}/../../3rdparty/zlib") include_directories("${CMAKE_CURRENT_SOURCE_DIR}/../../3rdparty/zlib")
set(deps zlib)
endif() endif()
define_opencv_module(core ${deps}) define_opencv_module(core ${ZLIB_LIBRARY})
...@@ -4276,7 +4276,7 @@ class CV_EXPORTS CommandLineParser ...@@ -4276,7 +4276,7 @@ class CV_EXPORTS CommandLineParser
public: public:
//! the default constructor //! the default constructor
CommandLineParser(int argc, const char* argv[], const char* key_map); CommandLineParser(int argc, const char* const argv[], const char* key_map);
//! get parameter, you can choose: delete spaces in end and begin or not //! get parameter, you can choose: delete spaces in end and begin or not
template<typename _Tp> template<typename _Tp>
...@@ -4287,7 +4287,7 @@ class CV_EXPORTS CommandLineParser ...@@ -4287,7 +4287,7 @@ class CV_EXPORTS CommandLineParser
return _Tp(); return _Tp();
} }
std::string str = getString(name); std::string str = getString(name);
return analizeValue<_Tp>(str, space_delete); return analyzeValue<_Tp>(str, space_delete);
} }
//! print short name, full name, current value and help for all params //! print short name, full name, current value and help for all params
...@@ -4300,7 +4300,7 @@ class CV_EXPORTS CommandLineParser ...@@ -4300,7 +4300,7 @@ class CV_EXPORTS CommandLineParser
bool has(const std::string& keys); bool has(const std::string& keys);
template<typename _Tp> template<typename _Tp>
_Tp analizeValue(const std::string& str, bool space_delete=false); _Tp analyzeValue(const std::string& str, bool space_delete=false);
template<typename _Tp> template<typename _Tp>
static _Tp getData(const std::string& str) static _Tp getData(const std::string& str)
...@@ -4320,19 +4320,22 @@ template<> CV_EXPORTS ...@@ -4320,19 +4320,22 @@ template<> CV_EXPORTS
bool CommandLineParser::get<bool>(const std::string& name, bool space_delete); bool CommandLineParser::get<bool>(const std::string& name, bool space_delete);
template<> CV_EXPORTS template<> CV_EXPORTS
std::string CommandLineParser::analizeValue<std::string>(const std::string& str, bool space_delete); std::string CommandLineParser::analyzeValue<std::string>(const std::string& str, bool space_delete);
template<> CV_EXPORTS template<> CV_EXPORTS
int CommandLineParser::analizeValue<int>(const std::string& str, bool space_delete); int CommandLineParser::analyzeValue<int>(const std::string& str, bool space_delete);
template<> CV_EXPORTS template<> CV_EXPORTS
unsigned CommandLineParser::analizeValue<unsigned int>(const std::string& str, bool space_delete); unsigned int CommandLineParser::analyzeValue<unsigned int>(const std::string& str, bool space_delete);
template<> CV_EXPORTS template<> CV_EXPORTS
float CommandLineParser::analizeValue<float>(const std::string& str, bool space_delete); uint64 CommandLineParser::analyzeValue<uint64>(const std::string& str, bool space_delete);
template<> CV_EXPORTS template<> CV_EXPORTS
double CommandLineParser::analizeValue<double>(const std::string& str, bool space_delete); float CommandLineParser::analyzeValue<float>(const std::string& str, bool space_delete);
template<> CV_EXPORTS
double CommandLineParser::analyzeValue<double>(const std::string& str, bool space_delete);
} }
......
#include "perf_precomp.hpp"
using namespace std;
using namespace cv;
using namespace perf;
#define TYPICAL_MAT_SIZES_ABS TYPICAL_MAT_SIZES
#define TYPICAL_MAT_TYPES_ABS CV_8SC1, CV_8SC4, CV_32SC1, CV_32FC1
#define TYPICAL_MATS_ABS testing::Combine( testing::Values( TYPICAL_MAT_SIZES_ABS), testing::Values( TYPICAL_MAT_TYPES_ABS) )
PERF_TEST_P(Size_MatType, abs, TYPICAL_MATS_ABS)
{
Size sz = std::tr1::get<0>(GetParam());
int type = std::tr1::get<1>(GetParam());
cv::Mat a = Mat(sz, type);
cv::Mat c = Mat(sz, type);
declare.in(a, ::perf::TestBase::WARMUP_RNG).out(c).time(0.5);
TEST_CYCLE(100) c = cv::abs(a);
SANITY_CHECK(c);
}
#include "perf_precomp.hpp"
using namespace std;
using namespace cv;
using namespace perf;
#define TYPICAL_MAT_SIZES_BITWNOT TYPICAL_MAT_SIZES
#define TYPICAL_MAT_TYPES_BITWNOT CV_8SC1, CV_8SC4, CV_32SC1, CV_32SC4
#define TYPICAL_MATS_BITWNOT testing::Combine( testing::Values( TYPICAL_MAT_SIZES_BITWNOT), testing::Values( TYPICAL_MAT_TYPES_BITWNOT) )
PERF_TEST_P(Size_MatType, bitwise_not, TYPICAL_MATS_BITWNOT)
{
Size sz = std::tr1::get<0>(GetParam());
int type = std::tr1::get<1>(GetParam());
cv::Mat a = Mat(sz, type);
cv::Mat c = Mat(sz, type);
declare.in(a, WARMUP_RNG).out(c);
TEST_CYCLE(100) cv::bitwise_not(a, c);
SANITY_CHECK(c);
}
#include "perf_precomp.hpp"
using namespace std;
using namespace cv;
using namespace perf;
#define TYPICAL_MAT_SIZES_CORE_ARITHM TYPICAL_MAT_SIZES
#define TYPICAL_MAT_TYPES_CORE_ARITHM CV_8UC1, CV_8SC1, CV_8UC4, CV_32SC1, CV_32FC1
#define TYPICAL_MATS_CORE_ARITHM testing::Combine( testing::Values( TYPICAL_MAT_SIZES_CORE_ARITHM ), testing::Values( TYPICAL_MAT_TYPES_CORE_ARITHM ) )
#define TYPICAL_MAT_TYPES_BITW_ARITHM CV_8UC1, CV_8SC1, CV_8UC4, CV_32SC1, CV_32SC4
#define TYPICAL_MATS_BITW_ARITHM testing::Combine( testing::Values( TYPICAL_MAT_SIZES_CORE_ARITHM ), testing::Values( TYPICAL_MAT_TYPES_BITW_ARITHM ) )
#define PERF_TEST_P__CORE_ARITHM(__f, __testset) \
PERF_TEST_P(Size_MatType, core_arithm__ ## __f, __testset) \
{ \
Size sz = std::tr1::get<0>(GetParam()); \
int type = std::tr1::get<1>(GetParam()); \
cv::Mat a = Mat(sz, type); \
cv::Mat b = Mat(sz, type); \
cv::Mat c = Mat(sz, type); \
\
declare.in(a, b, WARMUP_RNG) \
.out(c); \
\
TEST_CYCLE(100) __f(a,b, c); \
\
SANITY_CHECK(c); \
}
#define PERF_TEST_P__CORE_ARITHM_SCALAR(__f, __testset) \
PERF_TEST_P(Size_MatType, core_arithm__ ## __f ##__Scalar, __testset) \
{ \
Size sz = std::tr1::get<0>(GetParam()); \
int type = std::tr1::get<1>(GetParam()); \
cv::Mat a = Mat(sz, type); \
cv::Scalar b; \
cv::Mat c = Mat(sz, type); \
\
declare.in(a, b, WARMUP_RNG) \
.out(c); \
\
TEST_CYCLE(100) __f(a,b, c); \
\
SANITY_CHECK(c); \
}
PERF_TEST_P__CORE_ARITHM(bitwise_and, TYPICAL_MATS_BITW_ARITHM)
PERF_TEST_P__CORE_ARITHM(bitwise_or, TYPICAL_MATS_BITW_ARITHM)
PERF_TEST_P__CORE_ARITHM(bitwise_xor, TYPICAL_MATS_BITW_ARITHM)
PERF_TEST_P__CORE_ARITHM(add, TYPICAL_MATS_CORE_ARITHM)
PERF_TEST_P__CORE_ARITHM(subtract, TYPICAL_MATS_CORE_ARITHM)
PERF_TEST_P__CORE_ARITHM(min, TYPICAL_MATS_CORE_ARITHM)
PERF_TEST_P__CORE_ARITHM(max, TYPICAL_MATS_CORE_ARITHM)
PERF_TEST_P__CORE_ARITHM(absdiff, TYPICAL_MATS_CORE_ARITHM)
PERF_TEST_P__CORE_ARITHM_SCALAR(bitwise_and, TYPICAL_MATS_BITW_ARITHM)
PERF_TEST_P__CORE_ARITHM_SCALAR(bitwise_or, TYPICAL_MATS_BITW_ARITHM)
PERF_TEST_P__CORE_ARITHM_SCALAR(bitwise_xor, TYPICAL_MATS_BITW_ARITHM)
PERF_TEST_P__CORE_ARITHM_SCALAR(add, TYPICAL_MATS_CORE_ARITHM)
PERF_TEST_P__CORE_ARITHM_SCALAR(subtract, TYPICAL_MATS_CORE_ARITHM)
PERF_TEST_P__CORE_ARITHM_SCALAR(min, TYPICAL_MATS_CORE_ARITHM)
PERF_TEST_P__CORE_ARITHM_SCALAR(max, TYPICAL_MATS_CORE_ARITHM)
PERF_TEST_P__CORE_ARITHM_SCALAR(absdiff, TYPICAL_MATS_CORE_ARITHM)
#include "perf_precomp.hpp"
CV_PERF_TEST_MAIN(core)
#include "perf_precomp.hpp"
#ifndef __OPENCV_PERF_PRECOMP_HPP__
#define __OPENCV_PERF_PRECOMP_HPP__
#include "opencv2/ts/ts.hpp"
#if GTEST_CREATE_SHARED_LIBRARY
#error no modules except ts should have GTEST_CREATE_SHARED_LIBRARY defined
#endif
#endif
This diff is collapsed.
...@@ -72,7 +72,7 @@ vector<string> split_string(const string& str, const string& delimiters) ...@@ -72,7 +72,7 @@ vector<string> split_string(const string& str, const string& delimiters)
return res; return res;
} }
CommandLineParser::CommandLineParser(int argc, const char* argv[], const char* keys) CommandLineParser::CommandLineParser(int argc, const char* const argv[], const char* keys)
{ {
std::string keys_buffer; std::string keys_buffer;
...@@ -272,7 +272,7 @@ bool CommandLineParser::get<bool>(const std::string& name, bool space_delete) ...@@ -272,7 +272,7 @@ bool CommandLineParser::get<bool>(const std::string& name, bool space_delete)
return true; return true;
} }
template<> template<>
std::string CommandLineParser::analizeValue<std::string>(const std::string& str, bool space_delete) std::string CommandLineParser::analyzeValue<std::string>(const std::string& str, bool space_delete)
{ {
if (space_delete) if (space_delete)
{ {
...@@ -287,25 +287,31 @@ std::string CommandLineParser::analizeValue<std::string>(const std::string& str, ...@@ -287,25 +287,31 @@ std::string CommandLineParser::analizeValue<std::string>(const std::string& str,
} }
template<> template<>
int CommandLineParser::analizeValue<int>(const std::string& str, bool /*space_delete*/) int CommandLineParser::analyzeValue<int>(const std::string& str, bool /*space_delete*/)
{ {
return fromStringNumber<int>(str); return fromStringNumber<int>(str);
} }
template<> template<>
unsigned int CommandLineParser::analizeValue<unsigned int>(const std::string& str, bool /*space_delete*/) unsigned int CommandLineParser::analyzeValue<unsigned int>(const std::string& str, bool /*space_delete*/)
{ {
return fromStringNumber<unsigned int>(str); return fromStringNumber<unsigned int>(str);
} }
template<> template<>
float CommandLineParser::analizeValue<float>(const std::string& str, bool /*space_delete*/) uint64 CommandLineParser::analyzeValue<uint64>(const std::string& str, bool /*space_delete*/)
{
return fromStringNumber<uint64>(str);
}
template<>
float CommandLineParser::analyzeValue<float>(const std::string& str, bool /*space_delete*/)
{ {
return fromStringNumber<float>(str); return fromStringNumber<float>(str);
} }
template<> template<>
double CommandLineParser::analizeValue<double>(const std::string& str, bool /*space_delete*/) double CommandLineParser::analyzeValue<double>(const std::string& str, bool /*space_delete*/)
{ {
return fromStringNumber<double>(str); return fromStringNumber<double>(str);
} }
...@@ -134,7 +134,7 @@ cv::string cv::FileStorage::getDefaultObjectName(const string& _filename) ...@@ -134,7 +134,7 @@ cv::string cv::FileStorage::getDefaultObjectName(const string& _filename)
namespace cv namespace cv
{ {
#ifndef ANDROID //unsuported wcstombs on android #if !defined(ANDROID) || defined(_GLIBCXX_USE_WCHAR_T)
string fromUtf16(const WString& str) string fromUtf16(const WString& str)
{ {
cv::AutoBuffer<char> _buf(str.size()*4 + 1); cv::AutoBuffer<char> _buf(str.size()*4 + 1);
...@@ -2160,6 +2160,7 @@ icvXMLParse( CvFileStorage* fs ) ...@@ -2160,6 +2160,7 @@ icvXMLParse( CvFileStorage* fs )
ptr = icvXMLSkipSpaces( fs, ptr, CV_XML_INSIDE_TAG ); ptr = icvXMLSkipSpaces( fs, ptr, CV_XML_INSIDE_TAG );
if( memcmp( ptr, "<?xml", 5 ) != 0 ) if( memcmp( ptr, "<?xml", 5 ) != 0 )
CV_PARSE_ERROR( "Valid XML should start with \'<?xml ...?>\'" ); CV_PARSE_ERROR( "Valid XML should start with \'<?xml ...?>\'" );
ptr = icvXMLParseTag( fs, ptr, &key, &list, &tag_type ); ptr = icvXMLParseTag( fs, ptr, &key, &list, &tag_type );
......
...@@ -125,7 +125,7 @@ if(WIN32) ...@@ -125,7 +125,7 @@ if(WIN32)
endif() endif()
endif() endif()
if(UNIX) if(UNIX)
if(NOT HAVE_QT) if(NOT HAVE_QT)
if(HAVE_GTK) if(HAVE_GTK)
set(highgui_srcs ${highgui_srcs} src/window_gtk.cpp) set(highgui_srcs ${highgui_srcs} src/window_gtk.cpp)
...@@ -190,7 +190,7 @@ endif() ...@@ -190,7 +190,7 @@ endif()
#YV #YV
if(APPLE) if(APPLE)
if (NOT IOS) if (NOT IOS)
add_definitions(-DHAVE_QUICKTIME=1) add_definitions(-DHAVE_QUICKTIME=1)
endif() endif()
if(NOT OPENCV_BUILD_3RDPARTY_LIBS) if(NOT OPENCV_BUILD_3RDPARTY_LIBS)
...@@ -208,22 +208,17 @@ if(APPLE) ...@@ -208,22 +208,17 @@ if(APPLE)
endif() endif()
if(WITH_QUICKTIME) if(WITH_QUICKTIME)
set(highgui_srcs ${highgui_srcs} src/cap_qt.cpp) set(highgui_srcs ${highgui_srcs} src/cap_qt.cpp)
else() else()
if (WITH_AVFOUNDATION) if (WITH_AVFOUNDATION)
add_definitions(-DHAVE_AVFOUNDATION=1) add_definitions(-DHAVE_AVFOUNDATION=1)
set(highgui_srcs ${highgui_srcs} src/cap_avfoundation.mm) set(highgui_srcs ${highgui_srcs} src/cap_avfoundation.mm)
else() else()
set(highgui_srcs ${highgui_srcs} src/cap_qtkit.mm) set(highgui_srcs ${highgui_srcs} src/cap_qtkit.mm)
endif() endif()
endif() endif()
endif(APPLE) endif(APPLE)
if(WITH_ANDROID_CAMERA) if(WITH_ANDROID_CAMERA)
include_directories("${CMAKE_CURRENT_SOURCE_DIR}/../androidcamera/include") include_directories("${CMAKE_CURRENT_SOURCE_DIR}/../androidcamera/include")
set(highgui_srcs ${highgui_srcs} src/cap_android.cpp) set(highgui_srcs ${highgui_srcs} src/cap_android.cpp)
...@@ -387,58 +382,5 @@ install(FILES ${highgui_ext_hdrs} ...@@ -387,58 +382,5 @@ install(FILES ${highgui_ext_hdrs}
############################# highgui tests ################################ ############################# highgui tests ################################
if(BUILD_TESTS) define_opencv_test(highgui)
define_opencv_perf_test(highgui)
include_directories("${CMAKE_CURRENT_SOURCE_DIR}/test"
"${CMAKE_CURRENT_BINARY_DIR}")
set(test_deps opencv_ts opencv_highgui opencv_imgproc)
if(WITH_ANDROID_CAMERA)
set(test_deps ${test_deps} opencv_androidcamera)
endif()
foreach(d ${test_deps})
if(${d} MATCHES "opencv_")
string(REPLACE "opencv_" "${CMAKE_CURRENT_SOURCE_DIR}/../" d_dir ${d})
include_directories("${d_dir}/include")
endif()
endforeach()
file(GLOB test_srcs "test/*.cpp")
file(GLOB test_hdrs "test/*.h*")
source_group("Src" FILES ${test_srcs})
source_group("Include" FILES ${test_hdrs})
set(the_target "opencv_test_highgui")
add_executable(${the_target} ${test_srcs} ${test_hdrs})
add_opencv_precompiled_headers(${the_target})
# Additional target properties
set_target_properties(${the_target} PROPERTIES
DEBUG_POSTFIX "${OPENCV_DEBUG_POSTFIX}"
RUNTIME_OUTPUT_DIRECTORY "${EXECUTABLE_OUTPUT_PATH}"
)
if(ENABLE_SOLUTION_FOLDERS)
set_target_properties(${the_target} PROPERTIES FOLDER "tests")
endif()
add_dependencies(${the_target} ${test_deps})
# Add the required libraries for linking:
target_link_libraries(${the_target} ${OPENCV_LINKER_LIBS} ${test_deps})
enable_testing()
get_target_property(LOC ${the_target} LOCATION)
add_test(${the_target} "${LOC}")
if(WIN32)
if (MSVC AND NOT BUILD_SHARED_LIBS)
set_target_properties(${the_target} PROPERTIES LINK_FLAGS "/NODEFAULTLIB:atlthunk.lib /NODEFAULTLIB:atlsd.lib /DEBUG")
endif()
#install(TARGETS ${the_target} RUNTIME DESTINATION bin COMPONENT main)
endif()
endif(BUILD_TESTS)
#include "perf_precomp.hpp"
using namespace std;
using namespace cv;
using namespace perf;
CV_ENUM(CvtMode, CV_YUV2BGR, CV_YUV2RGB, //YUV
CV_YUV420i2BGR, CV_YUV420i2RGB, CV_YUV420sp2BGR, CV_YUV420sp2RGB, //YUV420
CV_RGB2GRAY, CV_RGBA2GRAY, CV_BGR2GRAY, CV_BGRA2GRAY, //Gray
CV_GRAY2RGB, CV_GRAY2RGBA/*, CV_GRAY2BGR, CV_GRAY2BGRA*/ //Gray2
)
typedef std::tr1::tuple<Size, CvtMode> Size_CvtMode_t;
typedef perf::TestBaseWithParam<Size_CvtMode_t> Size_CvtMode;
typedef std::tr1::tuple<Size, CvtMode, int> Size_CvtMode_OutChNum_t;
typedef perf::TestBaseWithParam<Size_CvtMode_OutChNum_t> Size_CvtMode_OutChNum;
/*
// void cvtColor(InputArray src, OutputArray dst, int code, int dstCn=0 )
*/
PERF_TEST_P( Size_CvtMode_OutChNum, cvtColorYUV,
testing::Combine(
testing::Values( TYPICAL_MAT_SIZES ),
testing::Values( (int)CV_YUV2BGR, (int)CV_YUV2RGB ),
testing::Values( 3, 4 )
)
)
{
Size sz = std::tr1::get<0>(GetParam());
int mode = std::tr1::get<1>(GetParam());
int ch = std::tr1::get<2>(GetParam());
Mat src(sz, CV_8UC3);
Mat dst(sz, CV_8UC(ch));
declare.in(src, WARMUP_RNG).out(dst);
TEST_CYCLE(100) { cvtColor(src, dst, mode, ch); }
SANITY_CHECK(dst);
}
PERF_TEST_P( Size_CvtMode_OutChNum, cvtColorYUV420,
testing::Combine(
testing::Values( szVGA, sz720p, sz1080p, Size(130, 60) ),
testing::Values( (int)CV_YUV420i2BGR, (int)CV_YUV420i2RGB, (int)CV_YUV420sp2BGR, (int)CV_YUV420sp2RGB ),
testing::Values( 3, 4 )
)
)
{
Size sz = std::tr1::get<0>(GetParam());
int mode = std::tr1::get<1>(GetParam());
int ch = std::tr1::get<2>(GetParam());
Mat src(sz.height+sz.height/2, sz.width, CV_8UC1);
Mat dst(sz, CV_8UC(ch));
declare.in(src, WARMUP_RNG).out(dst);
TEST_CYCLE(100) { cvtColor(src, dst, mode, ch); }
SANITY_CHECK(dst);
}
PERF_TEST_P( Size_CvtMode, cvtColorGray,
testing::Combine(
testing::Values( TYPICAL_MAT_SIZES ),
testing::Values( (int)CV_RGB2GRAY, (int)CV_RGBA2GRAY, (int)CV_BGR2GRAY, (int)CV_BGRA2GRAY )
)
)
{
Size sz = std::tr1::get<0>(GetParam());
int mode = std::tr1::get<1>(GetParam());
Mat src(sz, CV_8UC((mode==CV_RGBA2GRAY || mode==CV_BGRA2GRAY)?4:3));
Mat dst(sz, CV_8UC1);
declare.in(src, WARMUP_RNG).out(dst);
TEST_CYCLE(100) { cvtColor(src, dst, mode); }
SANITY_CHECK(dst);
}
PERF_TEST_P( Size_CvtMode, cvtColorGray2,
testing::Combine(
testing::Values( TYPICAL_MAT_SIZES ),
testing::Values( (int)CV_GRAY2RGB, (int)CV_GRAY2RGBA/*, CV_GRAY2BGR, CV_GRAY2BGRA*/ )
)
)
{
Size sz = std::tr1::get<0>(GetParam());
int mode = std::tr1::get<1>(GetParam());
Mat src(sz, CV_8UC1);
Mat dst(sz, CV_8UC((mode==CV_GRAY2RGBA || mode==CV_GRAY2BGRA)?4:3));
declare.in(src, WARMUP_RNG).out(dst);
TEST_CYCLE(100) { cvtColor(src, dst, mode); }
SANITY_CHECK(dst);
}
#include "perf_precomp.hpp"
using namespace std;
using namespace cv;
using namespace perf;
typedef tr1::tuple<MatType, Size, Size> MatInfo_Size_Size_t;
typedef TestBaseWithParam<MatInfo_Size_Size_t> MatInfo_Size_Size;
PERF_TEST_P(MatInfo_Size_Size, resizeUpLinear,
testing::Values(
MatInfo_Size_Size_t(CV_8UC1, szVGA, szqHD),
MatInfo_Size_Size_t(CV_8UC1, szVGA, sz720p),
MatInfo_Size_Size_t(CV_8UC4, szVGA, sz720p)
)
)
{
int matType = tr1::get<0>(GetParam());
Size from = tr1::get<1>(GetParam());
Size to = tr1::get<2>(GetParam());
cv::Mat src(from, matType);
cv::Mat dst(to, matType);
declare.in(src, WARMUP_RNG).out(dst);
TEST_CYCLE(100) cv::resize(src, dst, to);
SANITY_CHECK(dst);
}
PERF_TEST_P(MatInfo_Size_Size, resizeDownLinear,
testing::Values(
MatInfo_Size_Size_t(CV_8UC1, szVGA, szQVGA),
MatInfo_Size_Size_t(CV_8UC4, szqHD, szVGA),
MatInfo_Size_Size_t(CV_8UC1, sz720p, Size(120 * sz720p.width / sz720p.height, 120)),//face detection min_face_size = 20%
MatInfo_Size_Size_t(CV_8UC4, sz720p, szVGA),
MatInfo_Size_Size_t(CV_8UC4, sz720p, szQVGA)
)
)
{
int matType = tr1::get<0>(GetParam());
Size from = tr1::get<1>(GetParam());
Size to = tr1::get<2>(GetParam());
cv::Mat src(from, matType);
cv::Mat dst(to, matType);
declare.in(src, WARMUP_RNG).out(dst);
TEST_CYCLE(100) cv::resize(src, dst, to);
SANITY_CHECK(dst);
}
#include "perf_precomp.hpp"
using namespace std;
using namespace cv;
using namespace perf;
typedef std::tr1::tuple<Size, MatType, MatDepth> Size_MatType_OutMatDepth_t;
typedef perf::TestBaseWithParam<Size_MatType_OutMatDepth_t> Size_MatType_OutMatDepth;
/*
// void integral(InputArray image, OutputArray sum, int sdepth=-1 )
*/
PERF_TEST_P( Size_MatType_OutMatDepth, integral1,
testing::Combine(
testing::Values( TYPICAL_MAT_SIZES ),
testing::Values( CV_8UC1, CV_8UC4 ),
testing::Values( CV_32S, CV_32F, CV_64F )
)
)
{
Size sz = std::tr1::get<0>(GetParam());
int matType = std::tr1::get<1>(GetParam());
int sdepth = std::tr1::get<2>(GetParam());
Mat src(sz, matType);
Mat sum(sz, sdepth);
declare.in(src, WARMUP_RNG);
TEST_CYCLE(100) { integral(src, sum, sdepth); }
SANITY_CHECK(sum);
}
/*
// void integral(InputArray image, OutputArray sum, OutputArray sqsum, int sdepth=-1 )
*/
/*
// void integral(InputArray image, OutputArray sum, OutputArray sqsum, OutputArray tilted, int sdepth=-1 )
*/
\ No newline at end of file
#include "perf_precomp.hpp"
CV_PERF_TEST_MAIN(imgproc)
#include "perf_precomp.hpp"
#ifndef __OPENCV_PERF_PRECOMP_HPP__
#define __OPENCV_PERF_PRECOMP_HPP__
#include "opencv2/ts/ts.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#if GTEST_CREATE_SHARED_LIBRARY
#error no modules except ts should have GTEST_CREATE_SHARED_LIBRARY defined
#endif
#endif
if(BUILD_SHARED_LIBS) if(BUILD_SHARED_LIBS)
add_definitions(-DGTEST_CREATE_SHARED_LIBRARY=1) add_definitions(-DGTEST_CREATE_SHARED_LIBRARY=1)
if (MSVC)
add_definitions( "/wd4251 /wd4275")
endif()
else()
add_definitions(-DGTEST_CREATE_SHARED_LIBRARY=0)
endif() endif()
define_opencv_module(ts opencv_core) define_opencv_module(ts opencv_core)
#ifndef __OPENCV_GTESTCV_HPP__ #ifndef __OPENCV_GTESTCV_HPP__
#define __OPENCV_GTESTCV_HPP__ #define __OPENCV_GTESTCV_HPP__
#ifndef GTEST_CREATE_AS_SHARED_LIBRARY #ifndef GTEST_CREATE_SHARED_LIBRARY
#define GTEST_LINKED_AS_SHARED_LIBRARY 1 #define GTEST_LINKED_AS_SHARED_LIBRARY 1
#endif #endif
#ifdef ANDROID
# include <android/api-level.h>
# define GTEST_HAS_CLONE (__ANDROID_API__ > 7)
# define GTEST_HAS_POSIX_RE (__ANDROID_API__ > 7)
# define GTEST_HAS_STD_WSTRING _GLIBCXX_USE_WCHAR_T
#endif
#include <stdarg.h> // for va_list #include <stdarg.h> // for va_list
#if _MSC_VER >= 1200
#pragma warning( disable: 4251 4275 4355 4127 )
#endif
#include "opencv2/ts/ts_gtest.h" #include "opencv2/ts/ts_gtest.h"
#include "opencv2/core/core.hpp" #include "opencv2/core/core.hpp"
...@@ -542,3 +553,4 @@ int main(int argc, char **argv) \ ...@@ -542,3 +553,4 @@ int main(int argc, char **argv) \
#endif #endif
#include "ts_perf.hpp"
This diff is collapsed.
This diff is collapsed.
import testlog_parser, sys, os, xml, re
from table_formatter import *
from optparse import OptionParser
cvsize_re = re.compile("^\d+x\d+$")
cvtype_re = re.compile("^(8U|8S|16U|16S|32S|32F|64F)C\d{1,3}$")
def keyselector(a):
if cvsize_re.match(a):
size = [int(d) for d in a.split('x')]
return size[0] * size[1]
elif cvtype_re.match(a):
depth = 7
if a[0] == '8':
depth = (0, 1) [a[1] == 'S']
elif a[0] == '1':
depth = (2, 3) [a[2] == 'S']
elif a[2] == 'S':
depth = 4
elif a[0] == '3':
depth = 5
elif a[0] == '6':
depth = 6
channels = int(a[a.index('C') + 1:])
#return (depth & 7) + ((channels - 1) << 3)
return ((channels-1) & 511) + (depth << 8)
return a
def getValueParams(test):
param = test.get("value_param")
if not param:
return []
if param.startswith("("):
param = param[1:]
if param.endswith(")"):
param = param[:-1]
return [p.strip() for p in param.split(",")]
def nextPermutation(indexes, lists, x, y):
idx = len(indexes)-1
while idx >= 0:
while idx == x or idx == y:
idx -= 1
if idx < 0:
return False
v = indexes[idx] + 1
if v < len(lists[idx]):
indexes[idx] = v;
return True;
else:
indexes[idx] = 0;
idx -= 1
return False
def getTestWideName(sname, indexes, lists, x, y):
name = sname + "::("
for i in range(len(indexes)):
if i > 0:
name += ", "
if i == x:
name += "X"
elif i == y:
name += "Y"
else:
name += lists[i][indexes[i]]
return str(name + ")")
def getTest(stests, x, y, row, col):
for pair in stests:
if pair[1][x] == row and pair[1][y] == col:
return pair[0]
return None
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-o", "--output", dest="format", help="output results in text format (can be 'txt', 'html' or 'auto' - default)", metavar="FMT", default="auto")
parser.add_option("-u", "--units", dest="units", help="units for output values (s, ms (default), mks, ns or ticks)", metavar="UNITS", default="ms")
parser.add_option("-m", "--metric", dest="metric", help="output metric", metavar="NAME", default="gmean")
parser.add_option("-x", "", dest="x", help="argument number for rows", metavar="ROW", default=1)
parser.add_option("-y", "", dest="y", help="argument number for columns", metavar="COL", default=0)
parser.add_option("-f", "--filter", dest="filter", help="regex to filter tests", metavar="REGEX", default=None)
(options, args) = parser.parse_args()
if len(args) != 1:
print >> sys.stderr, "Usage:\n", os.path.basename(sys.argv[0]), "<log_name1>.xml"
exit(1)
options.generateHtml = detectHtmlOutputType(options.format)
args[0] = os.path.basename(args[0])
if options.metric not in metrix_table:
options.metric = "gmean"
if options.metric.endswith("%"):
options.metric = options.metric[:-1]
getter = metrix_table[options.metric][1]
tests = testlog_parser.parseLogFile(args[0])
if options.filter:
expr = re.compile(options.filter)
tests = [(t,getValueParams(t)) for t in tests if expr.search(str(t))]
if not tests:
print >> sys.stderr, "Error - no tests matched"
exit(1)
argsnum = len(tests[0][1])
sname = tests[0][0].shortName()
arglists = []
for i in range(argsnum):
arglists.append({})
for pair in tests:
if len(pair[1]) != argsnum:
print >> sys.stderr, "Error - unable to create chart tables for functions having different argument numbers"
exit(1)
if pair[0].shortName() != sname:
print >> sys.stderr, "Error - unable to create chart tables for functions from different test suits:"
print >> sys.stderr, "First: ", sname
print >> sys.stderr, "Second:", pair[0].shortName()
exit(1)
for i in range(argsnum):
arglists[i][pair[1][i]] = 1
for i in range(argsnum):
arglists[i] = sorted([str(key) for key in arglists[i].iterkeys()], key=keyselector)
if options.generateHtml:
htmlPrintHeader(sys.stdout, "Report %s for %s" % (args[0], sname))
indexes = [0] * argsnum
x = int(options.x)
y = int(options.y)
if x == y or x < 0 or y < 0 or x >= argsnum or y >= argsnum:
x = 1
y = 0
while True:
stests = []
for pair in tests:
t = pair[0]
v = pair[1]
for i in range(argsnum):
if i != x and i != y:
if v[i] != arglists[i][indexes[i]]:
t = None
break
if t:
stests.append(pair)
tbl = table(metrix_table[options.metric][0] + " for\n" + getTestWideName(sname, indexes, arglists, x, y))
tbl.newColumn("x", "X\Y")
for col in arglists[y]:
tbl.newColumn(col, col, align="center")
for row in arglists[x]:
tbl.newRow()
tbl.newCell("x", row)
for col in arglists[y]:
case = getTest(stests, x, y, row, col)
if case:
status = case.get("status")
if status != "run":
tbl.newCell(col, status, color = "red")
else:
val = getter(case, None, options.units)
if isinstance(val, float):
tbl.newCell(col, "%.2f %s" % (val, options.units), val)
else:
tbl.newCell(col, val, val)
else:
tbl.newCell(col, "-")
if options.generateHtml:
tbl.htmlPrintTable(sys.stdout)
else:
tbl.consolePrintTable(sys.stdout)
if not nextPermutation(indexes, arglists, x, y):
break
if options.generateHtml:
htmlPrintFooter(sys.stdout)
\ No newline at end of file
This diff is collapsed.
import testlog_parser, sys, os, xml, re
from table_formatter import *
from optparse import OptionParser
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-o", "--output", dest="format", help="output results in text format (can be 'txt', 'html' or 'auto' - default)", metavar="FMT", default="auto")
parser.add_option("-u", "--units", dest="units", help="units for output values (s, ms (default), mks, ns or ticks)", metavar="UNITS", default="ms")
parser.add_option("-c", "--columns", dest="columns", help="comma-separated list of columns to show", metavar="COLS", default="")
parser.add_option("-f", "--filter", dest="filter", help="regex to filter tests", metavar="REGEX", default=None)
parser.add_option("", "--show-all", action="store_true", dest="showall", default=False, help="also include empty and \"notrun\" lines")
(options, args) = parser.parse_args()
if len(args) < 1:
print >> sys.stderr, "Usage:\n", os.path.basename(sys.argv[0]), "<log_name1>.xml"
exit(0)
options.generateHtml = detectHtmlOutputType(options.format)
args[0] = os.path.basename(args[0])
tests = []
files = []
for arg in set(args):
files.append(os.path.basename(arg))
tests.extend(testlog_parser.parseLogFile(arg))
if options.filter:
expr = re.compile(options.filter)
tests = [t for t in tests if expr.search(str(t))]
tbl = table(", ".join(files))
if options.columns:
metrics = [s.strip() for s in options.columns.split(",")]
metrics = [m for m in metrics if m and not m.endswith("%") and m in metrix_table]
else:
metrics = None
if not metrics:
metrics = ["name", "samples", "outliers", "min", "median", "gmean", "mean", "stddev"]
if "name" not in metrics:
metrics.insert(0, "name")
for m in metrics:
if m == "name":
tbl.newColumn(m, metrix_table[m][0])
else:
tbl.newColumn(m, metrix_table[m][0], align = "center")
needNewRow = True
for case in sorted(tests):
if needNewRow:
tbl.newRow()
if not options.showall:
needNewRow = False
status = case.get("status")
if status != "run":
if status != "notrun":
needNewRow = True
for m in metrics:
if m == "name":
tbl.newCell(m, str(case))
else:
tbl.newCell(m, status, color = "red")
else:
needNewRow = True
for m in metrics:
val = metrix_table[m][1](case, None, options.units)
if isinstance(val, float):
tbl.newCell(m, "%.2f %s" % (val, options.units), val)
else:
tbl.newCell(m, val, val)
if not needNewRow:
tbl.trimLastRow()
# output table
if options.generateHtml:
htmlPrintHeader(sys.stdout, "Report %s tests from %s" % (len(tests), ", ".join(files)))
tbl.htmlPrintTable(sys.stdout)
htmlPrintFooter(sys.stdout)
else:
tbl.consolePrintTable(sys.stdout)
\ No newline at end of file
This diff is collapsed.
import testlog_parser, sys, os, xml, glob
from table_formatter import *
from optparse import OptionParser
if __name__ == "__main__":
if len(sys.argv) < 2:
print >> sys.stderr, "Usage:\n", os.path.basename(sys.argv[0]), "<log_name1>.xml [<log_name2>.xml ...]"
exit(0)
parser = OptionParser()
parser.add_option("-o", "--output", dest="format", help="output results in text format (can be 'txt', 'html' or 'auto' - default)", metavar="FMT", default="auto")
parser.add_option("-m", "--metric", dest="metric", help="output metric", metavar="NAME", default="gmean")
parser.add_option("-u", "--units", dest="units", help="units for output values (s, ms (default), mks, ns or ticks)", metavar="UNITS", default="ms")
parser.add_option("-f", "--filter", dest="filter", help="regex to filter tests", metavar="REGEX", default=None)
parser.add_option("", "--no-relatives", action="store_false", dest="calc_relatives", default=True, help="do not output relative values")
parser.add_option("", "--show-all", action="store_true", dest="showall", default=False, help="also include empty and \"notrun\" lines")
(options, args) = parser.parse_args()
options.generateHtml = detectHtmlOutputType(options.format)
if options.metric not in metrix_table:
options.metric = "gmean"
if options.metric.endswith("%"):
options.calc_relatives = False
# expand wildcards and filter duplicates
files = []
files1 = []
for arg in args:
if ("*" in arg) or ("?" in arg):
files1.extend([os.path.abspath(f) for f in glob.glob(arg)])
else:
files.append(os.path.abspath(arg))
seen = set()
files = [ x for x in files if x not in seen and not seen.add(x)]
files.extend((set(files1) - set(files)))
# read all passed files
test_sets = []
for arg in files:
try:
tests = testlog_parser.parseLogFile(arg)
if options.filter:
expr = re.compile(options.filter)
tests = [t for t in tests if expr.search(str(t))]
if tests:
test_sets.append((os.path.basename(arg), tests))
except IOError as err:
sys.stderr.write("IOError reading \"" + arg + "\" - " + str(err) + os.linesep)
except xml.parsers.expat.ExpatError as err:
sys.stderr.write("ExpatError reading \"" + arg + "\" - " + str(err) + os.linesep)
if not test_sets:
sys.stderr.write("Error: no test data found" + os.linesep)
quit()
# find matches
setsCount = len(test_sets)
test_cases = {}
for i in range(setsCount):
for case in test_sets[i][1]:
name = str(case)
if name not in test_cases:
test_cases[name] = [None] * setsCount
test_cases[name][i] = case
# build table
getter = metrix_table[options.metric][1]
if options.calc_relatives:
getter_p = metrix_table[options.metric + "%"][1]
tbl = table(metrix_table[options.metric][0])
# header
tbl.newColumn("name", "Name of Test", align = "left")
i = 0
for set in test_sets:
tbl.newColumn(str(i), set[0].replace(".xml","").replace("_", "\n"), align = "center")
i += 1
if options.calc_relatives:
i = 1
for set in test_sets[1:]:
tbl.newColumn(str(i) + "%", set[0].replace(".xml","").replace("_", "\n") + "\nvs\n" + test_sets[0][0].replace(".xml","").replace("_", "\n"), align = "center")
i += 1
# rows
needNewRow = True
for name in sorted(test_cases.iterkeys()):
cases = test_cases[name]
if needNewRow:
tbl.newRow()
if not options.showall:
needNewRow = False
tbl.newCell("name", name)
for i in range(setsCount):
case = cases[i]
if case is None:
tbl.newCell(str(i), "-")
if options.calc_relatives and i > 0:
tbl.newCell(str(i) + "%", "-")
else:
status = case.get("status")
if status != "run":
tbl.newCell(str(i), status, color = "red")
if status != "notrun":
needNewRow = True
if options.calc_relatives and i > 0:
tbl.newCell(str(i) + "%", "-", color = "red")
else:
val = getter(case, cases[0], options.units)
if options.calc_relatives and i > 0 and val:
valp = getter_p(case, cases[0], options.units)
else:
valp = None
if not valp or i == 0:
color = None
elif valp > 1.05:
color = "red"
elif valp < 0.95:
color = "green"
else:
color = None
if val:
needNewRow = True
if options.metric.endswith("%"):
tbl.newCell(str(i), "%.2f" % val, val, color = color)
else:
tbl.newCell(str(i), "%.3f %s" % (val, options.units), val, color = color)
else:
tbl.newCell(str(i), "-")
if options.calc_relatives and i > 0:
if valp:
tbl.newCell(str(i) + "%", "%.2f" % valp, valp, color = color, bold = color)
else:
tbl.newCell(str(i) + "%", "-")
if not needNewRow:
tbl.trimLastRow()
# output table
if options.generateHtml:
htmlPrintHeader(sys.stdout, "Summary report for %s tests from %s test logs" % (len(test_cases), setsCount))
tbl.htmlPrintTable(sys.stdout)
htmlPrintFooter(sys.stdout)
else:
tbl.consolePrintTable(sys.stdout)
This diff is collapsed.
import sys, re, os.path
from xml.dom.minidom import parse
class TestInfo(object):
def __init__(self, xmlnode):
self.fixture = xmlnode.getAttribute("classname")
self.name = xmlnode.getAttribute("name")
self.value_param = xmlnode.getAttribute("value_param")
self.type_param = xmlnode.getAttribute("type_param")
self.name = xmlnode.getAttribute("name")
if xmlnode.getElementsByTagName("failure"):
self.status = "failed"
else:
self.status = xmlnode.getAttribute("status")
if self.name.startswith("DISABLED_"):
self.status = "disabled"
self.fixture = self.fixture.replace("DISABLED_", "")
self.name = self.name.replace("DISABLED_", "")
self.metrix = {}
self.parseLongMetric(xmlnode, "bytesIn");
self.parseLongMetric(xmlnode, "bytesOut");
self.parseIntMetric(xmlnode, "samples");
self.parseIntMetric(xmlnode, "outliers");
self.parseFloatMetric(xmlnode, "frequency", 1);
self.parseLongMetric(xmlnode, "min");
self.parseLongMetric(xmlnode, "median");
self.parseLongMetric(xmlnode, "gmean");
self.parseLongMetric(xmlnode, "mean");
self.parseLongMetric(xmlnode, "stddev");
self.parseFloatMetric(xmlnode, "gstddev");
def parseLongMetric(self, xmlnode, name, default = 0):
if xmlnode.hasAttribute(name):
tmp = xmlnode.getAttribute(name)
val = long(tmp)
self.metrix[name] = val
else:
self.metrix[name] = default
def parseIntMetric(self, xmlnode, name, default = 0):
if xmlnode.hasAttribute(name):
tmp = xmlnode.getAttribute(name)
val = int(tmp)
self.metrix[name] = val
else:
self.metrix[name] = default
def parseFloatMetric(self, xmlnode, name, default = 0):
if xmlnode.hasAttribute(name):
tmp = xmlnode.getAttribute(name)
val = float(tmp)
self.metrix[name] = val
else:
self.metrix[name] = default
def parseStringMetric(self, xmlnode, name, default = None):
if xmlnode.hasAttribute(name):
tmp = xmlnode.getAttribute(name)
self.metrix[name] = tmp.strip()
else:
self.metrix[name] = default
def get(self, name, units="ms"):
if name == "classname":
return self.fixture
if name == "name":
return self.name
if name == "fullname":
return self.__str__()
if name == "value_param":
return self.value_param
if name == "type_param":
return self.type_param
if name == "status":
return self.status
val = self.metrix.get(name, None)
if not val:
return val
if name in ["gmean", "min", "mean", "median", "stddev"]:
scale = 1.0
frequency = self.metrix.get("frequency", 1.0) or 1.0
if units == "ms":
scale = 1000.0
if units == "mks":
scale = 1000000.0
if units == "ns":
scale = 1000000000.0
if units == "ticks":
frequency = long(1)
scale = long(1)
return val * scale / frequency
return val
def dump(self, units="ms"):
print "%s ->\t\033[1;31m%s\033[0m = \t%.2f%s" % (str(self), self.status, self.get("gmean", units), units)
def shortName(self):
pos = self.name.find("/")
if pos > 0:
name = self.name[:pos]
else:
name = self.name
if self.fixture.endswith(name):
fixture = self.fixture[:-len(name)]
else:
fixture = self.fixture
if fixture.endswith("_"):
fixture = fixture[:-1]
return '::'.join(filter(None, [name, fixture]))
def __str__(self):
pos = self.name.find("/")
if pos > 0:
name = self.name[:pos]
else:
name = self.name
if self.fixture.endswith(name):
fixture = self.fixture[:-len(name)]
else:
fixture = self.fixture
if fixture.endswith("_"):
fixture = fixture[:-1]
return '::'.join(filter(None, [name, fixture, self.type_param, self.value_param]))
def __cmp__(self, other):
r = cmp(self.fixture, other.fixture);
if r != 0:
return r
if self.type_param:
if other.type_param:
r = cmp(self.type_param, other.type_param);
if r != 0:
return r
else:
return -1
else:
if other.type_param:
return 1
if self.value_param:
if other.value_param:
r = cmp(self.value_param, other.value_param);
if r != 0:
return r
else:
return -1
else:
if other.value_param:
return 1
return 0
def parseLogFile(filename):
tests = []
log = parse(filename)
for case in log.getElementsByTagName("testcase"):
tests.append(TestInfo(case))
return tests
if __name__ == "__main__":
if len(sys.argv) < 2:
print "Usage:\n", os.path.basename(sys.argv[0]), "<log_name>.xml"
exit(0)
for arg in sys.argv[1:]:
print "Tests found in", arg
tests = parseLogFile(arg)
for t in sorted(tests):
t.dump()
print
#include "precomp.hpp" #include "precomp.hpp"
#if ANDROID
int wcscasecmp(const wchar_t* lhs, const wchar_t* rhs)
{
wint_t left, right;
do {
left = towlower(*lhs++);
right = towlower(*rhs++);
} while (left && left == right);
return left == right;
}
#endif
#define GTEST_CREATE_AS_SHARED_LIBRARY 1
#if _MSC_VER >= 1200 #if _MSC_VER >= 1200
#pragma warning( disable: 4127 4251) #pragma warning( disable: 4127 4251)
#endif #endif
#include "opencv2/ts/ts.hpp"
#include "opencv2/core/core_c.h" #include "opencv2/core/core_c.h"
#include "opencv2/ts/ts.hpp"
#if ANDROID #if GTEST_LINKED_AS_SHARED_LIBRARY
int wcscasecmp(const wchar_t* lhs, const wchar_t* rhs); #error ts module should not have GTEST_LINKED_AS_SHARED_LIBRARY defined
#endif #endif
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment