Commit ccfb3e6a authored by Andrey Kamaev's avatar Andrey Kamaev

Perf tests: fixed some issues

parent 6ceb322a
......@@ -4,24 +4,33 @@ using namespace std;
using namespace cv;
using namespace perf;
typedef std::tr1::tuple<const char*, int> ImageName_MinSize_t;
typedef std::tr1::tuple<std::string, int> ImageName_MinSize_t;
typedef perf::TestBaseWithParam<ImageName_MinSize_t> ImageName_MinSize;
PERF_TEST_P( ImageName_MinSize, CascadeClassifierLBPFrontalFace, testing::Values( ImageName_MinSize_t("cv/shared/lena.jpg", 10) ) )
{
const char* filename = std::tr1::get<0>(GetParam());
const string filename = std::tr1::get<0>(GetParam());
int min_size = std::tr1::get<1>(GetParam());
Size minSize(min_size, min_size);
CascadeClassifier cc(getDataPath("cv/cascadeandhog/cascades/lbpcascade_frontalface.xml"));
if (cc.empty())
FAIL() << "Can't load cascade file";
Mat img=imread(getDataPath(filename));
if (img.empty())
FAIL() << "Can't load source image";
vector<Rect> res;
declare.in(img).time(10000);
TEST_CYCLE(100)
declare.in(img);//.out(res)
while(next())
{
res.clear();
startTimer();
cc.detectMultiScale(img, res, 1.1, 3, 0, minSize);
stopTimer();
}
}
......@@ -93,8 +93,6 @@ private: int _val;\
};\
inline void PrintTo(const class_name& t, std::ostream* os) { t.PrintTo(os); }
CV_ENUM(MatDepth, CV_8U, CV_8S, CV_16U, CV_16S, CV_32S, CV_32F, CV_64F, CV_USRTYPE1)
#define CV_FLAGS(class_name, ...) \
class CV_EXPORTS class_name {\
public:\
......@@ -122,6 +120,8 @@ private: int _val;\
};\
inline void PrintTo(const class_name& t, std::ostream* os) { t.PrintTo(os); }
CV_ENUM(MatDepth, CV_8U, CV_8S, CV_16U, CV_16S, CV_32S, CV_32F, CV_64F, CV_USRTYPE1)
/*****************************************************************************************\
* Regression control utility for performance testing *
\*****************************************************************************************/
......@@ -186,7 +186,8 @@ typedef struct CV_EXPORTS performance_metrics
{
TERM_ITERATIONS = 0,
TERM_TIME = 1,
TERM_UNKNOWN = 2
TERM_INTERRUPT = 2,
TERM_UNKNOWN = -1
};
performance_metrics();
......@@ -224,10 +225,12 @@ protected:
WARMUP_RNG,
WARMUP_NONE
};
void reportMetrics(bool toJUnitXML = false);
static void warmup(cv::InputOutputArray a, int wtype = WARMUP_READ);
performance_metrics& calcMetrics();
void reportMetrics(bool toJUnitXML = false);
void RunPerfTestBody();
private:
typedef std::vector<std::pair<int, cv::Size> > SizeVector;
typedef std::vector<int64> TimeVector;
......@@ -332,12 +335,7 @@ CV_EXPORTS void PrintTo(const Size& sz, ::std::ostream* os);
protected:\
virtual void PerfTestBody();\
};\
TEST_F(test_case_name, test_name){\
try {\
PerfTestBody();\
}catch(cv::Exception e) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws:\n " << e.what(); }\
catch(...) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws."; }\
}\
TEST_F(test_case_name, test_name){ RunPerfTestBody(); }\
}\
void PERF_PROXY_NAMESPACE_NAME_(test_case_name, test_name)::test_case_name::PerfTestBody()
......@@ -375,12 +373,7 @@ CV_EXPORTS void PrintTo(const Size& sz, ::std::ostream* os);
protected:\
virtual void PerfTestBody();\
};\
TEST_F(fixture, testname){\
try {\
PerfTestBody();\
}catch(cv::Exception e) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws:\n " << e.what(); }\
catch(...) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws."; }\
}\
TEST_F(fixture, testname){ RunPerfTestBody(); }\
}\
void PERF_PROXY_NAMESPACE_NAME_(fixture, testname)::fixture::PerfTestBody()
......@@ -413,12 +406,7 @@ CV_EXPORTS void PrintTo(const Size& sz, ::std::ostream* os);
protected:\
virtual void PerfTestBody();\
};\
TEST_P(fixture##_##name, name /*perf*/){\
try {\
PerfTestBody();\
}catch(cv::Exception e) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws:\n " << e.what(); }\
catch(...) { FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws."; }\
}\
TEST_P(fixture##_##name, name /*perf*/){ RunPerfTestBody(); }\
INSTANTIATE_TEST_CASE_P(/*none*/, fixture##_##name, params);\
void fixture##_##name::PerfTestBody()
......
......@@ -27,6 +27,11 @@ void randu(cv::Mat& m)
}
}
/*****************************************************************************************\
* inner exception class for early termination
\*****************************************************************************************/
class PerfEarlyExitException: public cv::Exception {};
/*****************************************************************************************\
* ::perf::Regression
......@@ -381,9 +386,9 @@ const char *command_line_keys =
"{!!bugbugbugbug!! |perf_min_samples |10 |minimal required numer of samples}"
"{!!bugbugbugbug!! |perf_seed |809564 |seed for random numbers generator}"
#if ANDROID
"{!!bugbugbugbug!! |perf_time_limit |2.0 |default time limit for a single test (in seconds)}"
"{!!bugbugbugbug!! |perf_time_limit |6.0 |default time limit for a single test (in seconds)}"
#else
"{!!bugbugbugbug!! |perf_time_limit |1.0 |default time limit for a single test (in seconds)}"
"{!!bugbugbugbug!! |perf_time_limit |3.0 |default time limit for a single test (in seconds)}"
#endif
"{!!bugbugbugbug!! |perf_max_deviation |1.0 |}"
"{h |help |false |}"
......@@ -411,8 +416,6 @@ void TestBase::Init(int argc, const char* const argv[])
return;
}
//LOGD("!!!!!!!!!!!! %f !!!!!!", param_time_limit);
timeLimitDefault = param_time_limit == 0.0 ? 1 : (int64)(param_time_limit * cv::getTickFrequency());
_timeadjustment = _calibrate();
}
......@@ -567,12 +570,15 @@ performance_metrics& TestBase::calcMetrics()
metrics.samples = (unsigned int)times.size();
metrics.outliers = 0;
if (currentIter == nIters)
metrics.terminationReason = performance_metrics::TERM_ITERATIONS;
else if (totalTime >= timeLimit)
metrics.terminationReason = performance_metrics::TERM_TIME;
else
metrics.terminationReason = performance_metrics::TERM_UNKNOWN;
if (metrics.terminationReason != performance_metrics::TERM_INTERRUPT)
{
if (currentIter == nIters)
metrics.terminationReason = performance_metrics::TERM_ITERATIONS;
else if (totalTime >= timeLimit)
metrics.terminationReason = performance_metrics::TERM_TIME;
else
metrics.terminationReason = performance_metrics::TERM_UNKNOWN;
}
std::sort(times.begin(), times.end());
......@@ -697,7 +703,7 @@ void TestBase::reportMetrics(bool toJUnitXML)
#endif
if (type_param) LOGD("type = %11s", type_param);
if (value_param) LOGD("param = %11s", value_param);
if (value_param) LOGD("params = %11s", value_param);
switch (m.terminationReason)
{
......@@ -707,6 +713,9 @@ void TestBase::reportMetrics(bool toJUnitXML)
case performance_metrics::TERM_TIME:
LOGD("termination reason: reached time limit");
break;
case performance_metrics::TERM_INTERRUPT:
LOGD("termination reason: aborted by the performance testing framework");
break;
case performance_metrics::TERM_UNKNOWN:
default:
LOGD("termination reason: unknown");
......@@ -721,12 +730,15 @@ void TestBase::reportMetrics(bool toJUnitXML)
LOGD("samples =%11u of %u", m.samples, nIters);
LOGD("outliers =%11u", m.outliers);
LOGD("frequency =%11.0f", m.frequency);
LOGD("min =%11.0f = %.2fms", m.min, m.min * 1e3 / m.frequency);
LOGD("median =%11.0f = %.2fms", m.median, m.median * 1e3 / m.frequency);
LOGD("gmean =%11.0f = %.2fms", m.gmean, m.gmean * 1e3 / m.frequency);
LOGD("gstddev =%11.8f = %.2fms for 97%% dispersion interval", m.gstddev, m.gmean * 2 * sinh(m.gstddev * 3) * 1e3 / m.frequency);
LOGD("mean =%11.0f = %.2fms", m.mean, m.mean * 1e3 / m.frequency);
LOGD("stddev =%11.0f = %.2fms", m.stddev, m.stddev * 1e3 / m.frequency);
if (m.samples > 0)
{
LOGD("min =%11.0f = %.2fms", m.min, m.min * 1e3 / m.frequency);
LOGD("median =%11.0f = %.2fms", m.median, m.median * 1e3 / m.frequency);
LOGD("gmean =%11.0f = %.2fms", m.gmean, m.gmean * 1e3 / m.frequency);
LOGD("gstddev =%11.8f = %.2fms for 97%% dispersion interval", m.gstddev, m.gmean * 2 * sinh(m.gstddev * 3) * 1e3 / m.frequency);
LOGD("mean =%11.0f = %.2fms", m.mean, m.mean * 1e3 / m.frequency);
LOGD("stddev =%11.0f = %.2fms", m.stddev, m.stddev * 1e3 / m.frequency);
}
}
}
......@@ -762,7 +774,7 @@ std::string TestBase::getDataPath(const std::string& relativePath)
if (relativePath.empty())
{
ADD_FAILURE() << " Bad path to test resource";
return std::string();
throw PerfEarlyExitException();
}
const char *data_path_dir = getenv("OPENCV_TEST_DATA_PATH");
......@@ -791,10 +803,34 @@ std::string TestBase::getDataPath(const std::string& relativePath)
if (fp)
fclose(fp);
else
{
ADD_FAILURE() << " Requested file \"" << path << "\" does not exist.";
throw PerfEarlyExitException();
}
return path;
}
void TestBase::RunPerfTestBody()
{
try
{
this->PerfTestBody();
}
catch(PerfEarlyExitException)
{
metrics.terminationReason = performance_metrics::TERM_INTERRUPT;
return;//no additional failure logging
}
catch(cv::Exception e)
{
FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws:\n " << e.what();
}
catch(...)
{
FAIL() << "Expected: PerfTestBody() doesn't throw an exception.\n Actual: it throws.";
}
}
/*****************************************************************************************\
* ::perf::TestBase::_declareHelper
\*****************************************************************************************/
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment