Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
97da1419
Commit
97da1419
authored
Oct 14, 2013
by
Andrey Pavlenko
Committed by
OpenCV Buildbot
Oct 14, 2013
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #1573 from alalek:perf_simple_strategy
parents
7f15ec9d
376cd8f8
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
197 additions
and
48 deletions
+197
-48
main.cpp
modules/ocl/perf/main.cpp
+2
-0
ts_perf.hpp
modules/ts/include/opencv2/ts/ts_perf.hpp
+14
-0
ts_perf.cpp
modules/ts/src/ts_perf.cpp
+181
-48
No files found.
modules/ocl/perf/main.cpp
View file @
97da1419
...
...
@@ -67,5 +67,7 @@ static const char * impls[] =
int
main
(
int
argc
,
char
**
argv
)
{
::
perf
::
TestBase
::
setPerformanceStrategy
(
::
perf
::
PERF_STRATEGY_SIMPLE
);
CV_PERF_TEST_MAIN_INTERNALS
(
ocl
,
impls
,
dumpOpenCLDevice
())
}
modules/ts/include/opencv2/ts/ts_perf.hpp
View file @
97da1419
...
...
@@ -247,9 +247,20 @@ typedef struct CV_EXPORTS performance_metrics
};
performance_metrics
();
void
clear
();
}
performance_metrics
;
/*****************************************************************************************\
* Strategy for performance measuring *
\*****************************************************************************************/
enum
PERF_STRATEGY
{
PERF_STRATEGY_BASE
=
0
,
PERF_STRATEGY_SIMPLE
=
1
,
};
/*****************************************************************************************\
* Base fixture for performance tests *
\*****************************************************************************************/
...
...
@@ -265,6 +276,9 @@ public:
static
std
::
string
getDataPath
(
const
std
::
string
&
relativePath
);
static
std
::
string
getSelectedImpl
();
static
enum
PERF_STRATEGY
getPerformanceStrategy
();
static
enum
PERF_STRATEGY
setPerformanceStrategy
(
enum
PERF_STRATEGY
strategy
);
protected
:
virtual
void
PerfTestBody
()
=
0
;
...
...
modules/ts/src/ts_perf.cpp
View file @
97da1419
...
...
@@ -18,6 +18,9 @@ int64 TestBase::_timeadjustment = 0;
static
std
::
vector
<
std
::
string
>
available_impls
;
static
std
::
string
param_impl
;
static
enum
PERF_STRATEGY
param_strategy
=
PERF_STRATEGY_BASE
;
static
double
param_max_outliers
;
static
double
param_max_deviation
;
static
unsigned
int
param_min_samples
;
...
...
@@ -152,7 +155,7 @@ void Regression::init(const std::string& testSuitName, const std::string& ext)
{
if
(
!
storageInPath
.
empty
())
{
LOGE
(
"Subsequent initiali
s
ation of Regression utility is not allowed."
);
LOGE
(
"Subsequent initiali
z
ation of Regression utility is not allowed."
);
return
;
}
...
...
@@ -598,6 +601,11 @@ Regression& Regression::operator() (const std::string& name, cv::InputArray arra
* ::perf::performance_metrics
\*****************************************************************************************/
performance_metrics
::
performance_metrics
()
{
clear
();
}
void
performance_metrics
::
clear
()
{
bytesIn
=
0
;
bytesOut
=
0
;
...
...
@@ -643,6 +651,7 @@ void TestBase::Init(const std::vector<std::string> & availableImpls,
"|the implementation variant of functions under test}"
"{ |perf_list_impls |false |list available implementation variants and exit}"
"{ |perf_run_cpu |false |deprecated, equivalent to --perf_impl=plain}"
"{ |perf_strategy |default |specifies performance measuring strategy: default, base or simple (weak restrictions)}"
#ifdef ANDROID
"{ |perf_time_limit |6.0 |default time limit for a single test (in seconds)}"
"{ |perf_affinity_mask |0 |set affinity mask for the main thread}"
...
...
@@ -669,6 +678,24 @@ void TestBase::Init(const std::vector<std::string> & availableImpls,
::
testing
::
AddGlobalTestEnvironment
(
new
PerfEnvironment
);
param_impl
=
args
.
get
<
bool
>
(
"perf_run_cpu"
)
?
"plain"
:
args
.
get
<
std
::
string
>
(
"perf_impl"
);
std
::
string
perf_strategy
=
args
.
get
<
std
::
string
>
(
"perf_strategy"
);
if
(
perf_strategy
==
"default"
)
{
// nothing
}
else
if
(
perf_strategy
==
"base"
)
{
param_strategy
=
PERF_STRATEGY_BASE
;
}
else
if
(
perf_strategy
==
"simple"
)
{
param_strategy
=
PERF_STRATEGY_SIMPLE
;
}
else
{
printf
(
"No such strategy: %s
\n
"
,
perf_strategy
.
c_str
());
exit
(
1
);
}
param_max_outliers
=
std
::
min
(
100.
,
std
::
max
(
0.
,
args
.
get
<
double
>
(
"perf_max_outliers"
)));
param_min_samples
=
std
::
max
(
1u
,
args
.
get
<
unsigned
int
>
(
"perf_min_samples"
));
param_max_deviation
=
std
::
max
(
0.
,
args
.
get
<
double
>
(
"perf_max_deviation"
));
...
...
@@ -762,6 +789,18 @@ std::string TestBase::getSelectedImpl()
return
param_impl
;
}
enum
PERF_STRATEGY
TestBase
::
getPerformanceStrategy
()
{
return
param_strategy
;
}
enum
PERF_STRATEGY
TestBase
::
setPerformanceStrategy
(
enum
PERF_STRATEGY
strategy
)
{
enum
PERF_STRATEGY
ret
=
param_strategy
;
param_strategy
=
strategy
;
return
ret
;
}
int64
TestBase
::
_calibrate
()
{
...
...
@@ -792,6 +831,11 @@ int64 TestBase::_calibrate()
_helper
h
;
h
.
PerfTestBody
();
double
compensation
=
h
.
getMetrics
().
min
;
if
(
param_strategy
==
PERF_STRATEGY_SIMPLE
)
{
CV_Assert
(
compensation
<
0.01
*
cv
::
getTickFrequency
());
compensation
=
0.0
f
;
// simple strategy doesn't require any compensation
}
LOGD
(
"Time compensation is %.0f"
,
compensation
);
return
(
int64
)
compensation
;
}
...
...
@@ -855,8 +899,64 @@ cv::Size TestBase::getSize(cv::InputArray a)
bool
TestBase
::
next
()
{
bool
has_next
=
++
currentIter
<
nIters
&&
totalTime
<
timeLimit
;
static
int64
lastActivityPrintTime
=
0
;
if
(
currentIter
!=
(
unsigned
int
)
-
1
)
{
if
(
currentIter
+
1
!=
times
.
size
())
ADD_FAILURE
()
<<
" next() is called before stopTimer()"
;
}
else
{
lastActivityPrintTime
=
0
;
metrics
.
clear
();
}
cv
::
theRNG
().
state
=
param_seed
;
//this rng should generate same numbers for each run
++
currentIter
;
bool
has_next
=
false
;
do
{
assert
(
currentIter
==
times
.
size
());
if
(
currentIter
==
0
)
{
has_next
=
true
;
break
;
}
if
(
param_strategy
==
PERF_STRATEGY_BASE
)
{
has_next
=
currentIter
<
nIters
&&
totalTime
<
timeLimit
;
}
else
{
assert
(
param_strategy
==
PERF_STRATEGY_SIMPLE
);
if
(
totalTime
-
lastActivityPrintTime
>=
cv
::
getTickFrequency
()
*
10
)
{
std
::
cout
<<
'.'
<<
std
::
endl
;
lastActivityPrintTime
=
totalTime
;
}
if
(
currentIter
>=
nIters
)
{
has_next
=
false
;
break
;
}
if
(
currentIter
<
param_min_samples
)
{
has_next
=
true
;
break
;
}
calcMetrics
();
double
criteria
=
0.03
;
// 3%
if
(
fabs
(
metrics
.
mean
)
>
1e-6
)
has_next
=
metrics
.
stddev
>
criteria
*
fabs
(
metrics
.
mean
);
else
has_next
=
true
;
}
}
while
(
false
);
#ifdef ANDROID
if
(
log_power_checkpoints
)
...
...
@@ -869,6 +969,9 @@ bool TestBase::next()
if
(
!
has_next
)
RecordProperty
(
"test_complete"
,
cv
::
format
(
"%llu"
,
t1
).
c_str
());
}
#endif
if
(
has_next
)
startTimer
();
// really we should measure activity from this moment, so reset start time
return
has_next
;
}
...
...
@@ -915,7 +1018,7 @@ void TestBase::stopTimer()
{
int64
time
=
cv
::
getTickCount
();
if
(
lastTime
==
0
)
ADD_FAILURE
()
<<
" stopTimer() is called before startTimer()"
;
ADD_FAILURE
()
<<
" stopTimer() is called before startTimer()
/next()
"
;
lastTime
=
time
-
lastTime
;
totalTime
+=
lastTime
;
lastTime
-=
_timeadjustment
;
...
...
@@ -926,6 +1029,7 @@ void TestBase::stopTimer()
performance_metrics
&
TestBase
::
calcMetrics
()
{
CV_Assert
(
metrics
.
samples
<=
(
unsigned
int
)
currentIter
);
if
((
metrics
.
samples
==
(
unsigned
int
)
currentIter
)
||
times
.
size
()
==
0
)
return
metrics
;
...
...
@@ -947,47 +1051,61 @@ performance_metrics& TestBase::calcMetrics()
std
::
sort
(
times
.
begin
(),
times
.
end
());
//estimate mean and stddev for log(time)
double
gmean
=
0
;
double
gstddev
=
0
;
int
n
=
0
;
for
(
TimeVector
::
const_iterator
i
=
times
.
begin
();
i
!=
times
.
end
();
++
i
)
{
double
x
=
static_cast
<
double
>
(
*
i
)
/
runsPerIteration
;
if
(
x
<
DBL_EPSILON
)
continue
;
double
lx
=
log
(
x
);
TimeVector
::
const_iterator
start
=
times
.
begin
();
TimeVector
::
const_iterator
end
=
times
.
end
();
++
n
;
double
delta
=
lx
-
gmean
;
gmean
+=
delta
/
n
;
gstddev
+=
delta
*
(
lx
-
gmean
);
}
if
(
param_strategy
==
PERF_STRATEGY_BASE
)
{
//estimate mean and stddev for log(time)
double
gmean
=
0
;
double
gstddev
=
0
;
int
n
=
0
;
for
(
TimeVector
::
const_iterator
i
=
times
.
begin
();
i
!=
times
.
end
();
++
i
)
{
double
x
=
static_cast
<
double
>
(
*
i
)
/
runsPerIteration
;
if
(
x
<
DBL_EPSILON
)
continue
;
double
lx
=
log
(
x
);
gstddev
=
n
>
1
?
sqrt
(
gstddev
/
(
n
-
1
))
:
0
;
++
n
;
double
delta
=
lx
-
gmean
;
gmean
+=
delta
/
n
;
gstddev
+=
delta
*
(
lx
-
gmean
);
}
TimeVector
::
const_iterator
start
=
times
.
begin
();
TimeVector
::
const_iterator
end
=
times
.
end
();
gstddev
=
n
>
1
?
sqrt
(
gstddev
/
(
n
-
1
))
:
0
;
//filter outliers assuming log-normal distribution
//http://stackoverflow.com/questions/1867426/modeling-distribution-of-performance-measurements
int
offset
=
0
;
if
(
gstddev
>
DBL_EPSILON
)
//filter outliers assuming log-normal distribution
//http://stackoverflow.com/questions/1867426/modeling-distribution-of-performance-measurements
if
(
gstddev
>
DBL_EPSILON
)
{
double
minout
=
exp
(
gmean
-
3
*
gstddev
)
*
runsPerIteration
;
double
maxout
=
exp
(
gmean
+
3
*
gstddev
)
*
runsPerIteration
;
while
(
*
start
<
minout
)
++
start
,
++
metrics
.
outliers
;
do
--
end
,
++
metrics
.
outliers
;
while
(
*
end
>
maxout
);
++
end
,
--
metrics
.
outliers
;
}
}
else
if
(
param_strategy
==
PERF_STRATEGY_SIMPLE
)
{
metrics
.
outliers
=
static_cast
<
int
>
(
times
.
size
()
*
param_max_outliers
/
100
);
for
(
unsigned
int
i
=
0
;
i
<
metrics
.
outliers
;
i
++
)
--
end
;
}
else
{
double
minout
=
exp
(
gmean
-
3
*
gstddev
)
*
runsPerIteration
;
double
maxout
=
exp
(
gmean
+
3
*
gstddev
)
*
runsPerIteration
;
while
(
*
start
<
minout
)
++
start
,
++
metrics
.
outliers
,
++
offset
;
do
--
end
,
++
metrics
.
outliers
;
while
(
*
end
>
maxout
);
++
end
,
--
metrics
.
outliers
;
assert
(
false
);
}
int
offset
=
static_cast
<
int
>
(
start
-
times
.
begin
());
metrics
.
min
=
static_cast
<
double
>
(
*
start
)
/
runsPerIteration
;
//calc final metrics
n
=
0
;
gmean
=
0
;
gstddev
=
0
;
unsigned
int
n
=
0
;
double
gmean
=
0
;
double
gstddev
=
0
;
double
mean
=
0
;
double
stddev
=
0
;
int
m
=
0
;
unsigned
int
m
=
0
;
for
(;
start
!=
end
;
++
start
)
{
double
x
=
static_cast
<
double
>
(
*
start
)
/
runsPerIteration
;
...
...
@@ -1009,11 +1127,10 @@ performance_metrics& TestBase::calcMetrics()
metrics
.
gmean
=
exp
(
gmean
);
metrics
.
gstddev
=
m
>
1
?
sqrt
(
gstddev
/
(
m
-
1
))
:
0
;
metrics
.
stddev
=
n
>
1
?
sqrt
(
stddev
/
(
n
-
1
))
:
0
;
metrics
.
median
=
n
%
2
metrics
.
median
=
(
n
%
2
?
(
double
)
times
[
offset
+
n
/
2
]
:
0.5
*
(
times
[
offset
+
n
/
2
]
+
times
[
offset
+
n
/
2
-
1
]);
metrics
.
median
/=
runsPerIteration
;
:
0.5
*
(
times
[
offset
+
n
/
2
]
+
times
[
offset
+
n
/
2
-
1
])
)
/
runsPerIteration
;
return
metrics
;
}
...
...
@@ -1027,17 +1144,31 @@ void TestBase::validateMetrics()
ASSERT_GE
(
m
.
samples
,
1u
)
<<
" No time measurements was performed.
\n
startTimer() and stopTimer() commands are required for performance tests."
;
EXPECT_GE
(
m
.
samples
,
param_min_samples
)
<<
" Only a few samples are collected.
\n
Please increase number of iterations or/and time limit to get reliable performance measurements."
;
if
(
param_strategy
==
PERF_STRATEGY_BASE
)
{
EXPECT_GE
(
m
.
samples
,
param_min_samples
)
<<
" Only a few samples are collected.
\n
Please increase number of iterations or/and time limit to get reliable performance measurements."
;
if
(
m
.
gstddev
>
DBL_EPSILON
)
{
EXPECT_GT
(
/*m.gmean * */
1.
,
/*m.gmean * */
2
*
sinh
(
m
.
gstddev
*
param_max_deviation
))
<<
" Test results are not reliable ((mean-sigma,mean+sigma) deviation interval is greater than measured time interval)."
;
}
if
(
m
.
gstddev
>
DBL_EPSILON
)
EXPECT_LE
(
m
.
outliers
,
std
::
max
((
unsigned
int
)
cvCeil
(
m
.
samples
*
param_max_outliers
/
100.
),
1u
))
<<
" Test results are not reliable (too many outliers)."
;
}
else
if
(
param_strategy
==
PERF_STRATEGY_SIMPLE
)
{
EXPECT_GT
(
/*m.gmean * */
1.
,
/*m.gmean * */
2
*
sinh
(
m
.
gstddev
*
param_max_deviation
))
<<
" Test results are not reliable ((mean-sigma,mean+sigma) deviation interval is greater than measured time interval)."
;
double
mean
=
metrics
.
mean
*
1000.0
f
/
metrics
.
frequency
;
double
stddev
=
metrics
.
stddev
*
1000.0
f
/
metrics
.
frequency
;
double
percents
=
stddev
/
mean
*
100.
f
;
printf
(
" samples = %d, mean = %.2f, stddev = %.2f (%.1f%%)
\n
"
,
(
int
)
metrics
.
samples
,
mean
,
stddev
,
percents
);
}
else
{
assert
(
false
);
}
EXPECT_LE
(
m
.
outliers
,
std
::
max
((
unsigned
int
)
cvCeil
(
m
.
samples
*
param_max_outliers
/
100.
),
1u
))
<<
" Test results are not reliable (too many outliers)."
;
}
void
TestBase
::
reportMetrics
(
bool
toJUnitXML
)
...
...
@@ -1200,12 +1331,12 @@ void TestBase::RunPerfTestBody()
{
this
->
PerfTestBody
();
}
catch
(
PerfEarlyExitException
)
catch
(
PerfEarlyExitException
&
)
{
metrics
.
terminationReason
=
performance_metrics
::
TERM_INTERRUPT
;
return
;
//no additional failure logging
}
catch
(
cv
::
Exception
e
)
catch
(
cv
::
Exception
&
e
)
{
metrics
.
terminationReason
=
performance_metrics
::
TERM_EXCEPTION
;
#ifdef HAVE_CUDA
...
...
@@ -1214,7 +1345,7 @@ void TestBase::RunPerfTestBody()
#endif
FAIL
()
<<
"Expected: PerfTestBody() doesn't throw an exception.
\n
Actual: it throws cv::Exception:
\n
"
<<
e
.
what
();
}
catch
(
std
::
exception
e
)
catch
(
std
::
exception
&
e
)
{
metrics
.
terminationReason
=
performance_metrics
::
TERM_EXCEPTION
;
FAIL
()
<<
"Expected: PerfTestBody() doesn't throw an exception.
\n
Actual: it throws std::exception:
\n
"
<<
e
.
what
();
...
...
@@ -1235,6 +1366,7 @@ TestBase::_declareHelper& TestBase::_declareHelper::iterations(unsigned int n)
test
->
times
.
reserve
(
n
);
test
->
nIters
=
std
::
min
(
n
,
TestBase
::
iterationsLimitDefault
);
test
->
currentIter
=
(
unsigned
int
)
-
1
;
test
->
metrics
.
clear
();
return
*
this
;
}
...
...
@@ -1243,6 +1375,7 @@ TestBase::_declareHelper& TestBase::_declareHelper::time(double timeLimitSecs)
test
->
times
.
clear
();
test
->
currentIter
=
(
unsigned
int
)
-
1
;
test
->
timeLimit
=
(
int64
)(
timeLimitSecs
*
cv
::
getTickFrequency
());
test
->
metrics
.
clear
();
return
*
this
;
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment