Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
74cf48b5
Commit
74cf48b5
authored
Jul 25, 2018
by
Alexander Alekhin
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
dnn(test): use Backend/Target enums
instead of 'int'
parent
5336b9ad
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
31 additions
and
31 deletions
+31
-31
test_halide_layers.cpp
modules/dnn/test/test_halide_layers.cpp
+31
-31
No files found.
modules/dnn/test/test_halide_layers.cpp
View file @
74cf48b5
...
@@ -16,7 +16,7 @@ using namespace cv;
...
@@ -16,7 +16,7 @@ using namespace cv;
using
namespace
cv
::
dnn
;
using
namespace
cv
::
dnn
;
using
namespace
testing
;
using
namespace
testing
;
static
void
test
(
Mat
&
input
,
Net
&
net
,
int
backendId
,
in
t
targetId
)
static
void
test
(
Mat
&
input
,
Net
&
net
,
Backend
backendId
,
Targe
t
targetId
)
{
{
DNNTestLayer
::
checkBackend
(
backendId
,
targetId
);
DNNTestLayer
::
checkBackend
(
backendId
,
targetId
);
randu
(
input
,
-
1.0
f
,
1.0
f
);
randu
(
input
,
-
1.0
f
,
1.0
f
);
...
@@ -34,7 +34,7 @@ static void test(Mat& input, Net& net, int backendId, int targetId)
...
@@ -34,7 +34,7 @@ static void test(Mat& input, Net& net, int backendId, int targetId)
normAssert
(
outputDefault
,
outputHalide
,
""
,
l1
,
lInf
);
normAssert
(
outputDefault
,
outputHalide
,
""
,
l1
,
lInf
);
}
}
static
void
test
(
LayerParams
&
params
,
Mat
&
input
,
int
backendId
,
in
t
targetId
)
static
void
test
(
LayerParams
&
params
,
Mat
&
input
,
Backend
backendId
,
Targe
t
targetId
)
{
{
Net
net
;
Net
net
;
net
.
addLayerToPrev
(
params
.
name
,
params
.
type
,
params
);
net
.
addLayerToPrev
(
params
.
name
,
params
.
type
,
params
);
...
@@ -101,8 +101,8 @@ TEST_P(Convolution, Accuracy)
...
@@ -101,8 +101,8 @@ TEST_P(Convolution, Accuracy)
Size
pad
=
get
<
4
>
(
GetParam
());
Size
pad
=
get
<
4
>
(
GetParam
());
Size
dilation
=
get
<
5
>
(
GetParam
());
Size
dilation
=
get
<
5
>
(
GetParam
());
bool
hasBias
=
get
<
6
>
(
GetParam
());
bool
hasBias
=
get
<
6
>
(
GetParam
());
int
backendId
=
get
<
0
>
(
get
<
7
>
(
GetParam
()));
Backend
backendId
=
get
<
0
>
(
get
<
7
>
(
GetParam
()));
in
t
targetId
=
get
<
1
>
(
get
<
7
>
(
GetParam
()));
Targe
t
targetId
=
get
<
1
>
(
get
<
7
>
(
GetParam
()));
if
(
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
&&
targetId
==
DNN_TARGET_MYRIAD
)
if
(
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
&&
targetId
==
DNN_TARGET_MYRIAD
)
throw
SkipTestException
(
""
);
throw
SkipTestException
(
""
);
...
@@ -171,8 +171,8 @@ TEST_P(Deconvolution, Accuracy)
...
@@ -171,8 +171,8 @@ TEST_P(Deconvolution, Accuracy)
Size
stride
=
Size
(
get
<
5
>
(
GetParam
())[
0
],
get
<
5
>
(
GetParam
())[
1
]);
Size
stride
=
Size
(
get
<
5
>
(
GetParam
())[
0
],
get
<
5
>
(
GetParam
())[
1
]);
Size
adjPad
=
Size
(
get
<
5
>
(
GetParam
())[
2
],
get
<
5
>
(
GetParam
())[
3
]);
Size
adjPad
=
Size
(
get
<
5
>
(
GetParam
())[
2
],
get
<
5
>
(
GetParam
())[
3
]);
bool
hasBias
=
get
<
6
>
(
GetParam
());
bool
hasBias
=
get
<
6
>
(
GetParam
());
int
backendId
=
get
<
0
>
(
get
<
7
>
(
GetParam
()));
Backend
backendId
=
get
<
0
>
(
get
<
7
>
(
GetParam
()));
in
t
targetId
=
get
<
1
>
(
get
<
7
>
(
GetParam
()));
Targe
t
targetId
=
get
<
1
>
(
get
<
7
>
(
GetParam
()));
if
(
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
&&
targetId
==
DNN_TARGET_CPU
&&
if
(
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
&&
targetId
==
DNN_TARGET_CPU
&&
dilation
.
width
==
2
&&
dilation
.
height
==
2
)
dilation
.
width
==
2
&&
dilation
.
height
==
2
)
throw
SkipTestException
(
""
);
throw
SkipTestException
(
""
);
...
@@ -235,8 +235,8 @@ TEST_P(LRN, Accuracy)
...
@@ -235,8 +235,8 @@ TEST_P(LRN, Accuracy)
float
bias
=
get
<
2
>
(
GetParam
())[
2
];
float
bias
=
get
<
2
>
(
GetParam
())[
2
];
bool
normBySize
=
get
<
3
>
(
GetParam
());
bool
normBySize
=
get
<
3
>
(
GetParam
());
std
::
string
nrmType
=
get
<
4
>
(
GetParam
());
std
::
string
nrmType
=
get
<
4
>
(
GetParam
());
int
backendId
=
get
<
0
>
(
get
<
5
>
(
GetParam
()));
Backend
backendId
=
get
<
0
>
(
get
<
5
>
(
GetParam
()));
in
t
targetId
=
get
<
1
>
(
get
<
5
>
(
GetParam
()));
Targe
t
targetId
=
get
<
1
>
(
get
<
5
>
(
GetParam
()));
if
(
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
)
if
(
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
)
throw
SkipTestException
(
""
);
throw
SkipTestException
(
""
);
...
@@ -276,8 +276,8 @@ TEST_P(AvePooling, Accuracy)
...
@@ -276,8 +276,8 @@ TEST_P(AvePooling, Accuracy)
Size
outSize
=
get
<
1
>
(
GetParam
());;
// Input size will be computed from parameters.
Size
outSize
=
get
<
1
>
(
GetParam
());;
// Input size will be computed from parameters.
Size
kernel
=
get
<
2
>
(
GetParam
());
Size
kernel
=
get
<
2
>
(
GetParam
());
Size
stride
=
get
<
3
>
(
GetParam
());
Size
stride
=
get
<
3
>
(
GetParam
());
int
backendId
=
get
<
0
>
(
get
<
4
>
(
GetParam
()));
Backend
backendId
=
get
<
0
>
(
get
<
4
>
(
GetParam
()));
in
t
targetId
=
get
<
1
>
(
get
<
4
>
(
GetParam
()));
Targe
t
targetId
=
get
<
1
>
(
get
<
4
>
(
GetParam
()));
if
(
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
&&
targetId
==
DNN_TARGET_MYRIAD
)
if
(
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
&&
targetId
==
DNN_TARGET_MYRIAD
)
throw
SkipTestException
(
""
);
throw
SkipTestException
(
""
);
...
@@ -317,8 +317,8 @@ TEST_P(MaxPooling, Accuracy)
...
@@ -317,8 +317,8 @@ TEST_P(MaxPooling, Accuracy)
Size
kernel
=
get
<
2
>
(
GetParam
());
Size
kernel
=
get
<
2
>
(
GetParam
());
Size
stride
=
get
<
3
>
(
GetParam
());
Size
stride
=
get
<
3
>
(
GetParam
());
Size
pad
=
get
<
4
>
(
GetParam
());
Size
pad
=
get
<
4
>
(
GetParam
());
int
backendId
=
get
<
0
>
(
get
<
5
>
(
GetParam
()));
Backend
backendId
=
get
<
0
>
(
get
<
5
>
(
GetParam
()));
in
t
targetId
=
get
<
1
>
(
get
<
5
>
(
GetParam
()));
Targe
t
targetId
=
get
<
1
>
(
get
<
5
>
(
GetParam
()));
LayerParams
lp
;
LayerParams
lp
;
lp
.
set
(
"pool"
,
"max"
);
lp
.
set
(
"pool"
,
"max"
);
...
@@ -355,8 +355,8 @@ TEST_P(FullyConnected, Accuracy)
...
@@ -355,8 +355,8 @@ TEST_P(FullyConnected, Accuracy)
Size
inSize
=
get
<
1
>
(
GetParam
());
Size
inSize
=
get
<
1
>
(
GetParam
());
int
outChannels
=
get
<
2
>
(
GetParam
());
int
outChannels
=
get
<
2
>
(
GetParam
());
bool
hasBias
=
get
<
3
>
(
GetParam
());
bool
hasBias
=
get
<
3
>
(
GetParam
());
int
backendId
=
get
<
0
>
(
get
<
4
>
(
GetParam
()));
Backend
backendId
=
get
<
0
>
(
get
<
4
>
(
GetParam
()));
in
t
targetId
=
get
<
1
>
(
get
<
4
>
(
GetParam
()));
Targe
t
targetId
=
get
<
1
>
(
get
<
4
>
(
GetParam
()));
if
(
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
)
if
(
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
)
throw
SkipTestException
(
""
);
throw
SkipTestException
(
""
);
...
@@ -394,8 +394,8 @@ typedef TestWithParam<tuple<int, tuple<Backend, Target> > > SoftMax;
...
@@ -394,8 +394,8 @@ typedef TestWithParam<tuple<int, tuple<Backend, Target> > > SoftMax;
TEST_P
(
SoftMax
,
Accuracy
)
TEST_P
(
SoftMax
,
Accuracy
)
{
{
int
inChannels
=
get
<
0
>
(
GetParam
());
int
inChannels
=
get
<
0
>
(
GetParam
());
int
backendId
=
get
<
0
>
(
get
<
1
>
(
GetParam
()));
Backend
backendId
=
get
<
0
>
(
get
<
1
>
(
GetParam
()));
in
t
targetId
=
get
<
1
>
(
get
<
1
>
(
GetParam
()));
Targe
t
targetId
=
get
<
1
>
(
get
<
1
>
(
GetParam
()));
LayerParams
lp
;
LayerParams
lp
;
lp
.
type
=
"SoftMax"
;
lp
.
type
=
"SoftMax"
;
lp
.
name
=
"testLayer"
;
lp
.
name
=
"testLayer"
;
...
@@ -457,7 +457,7 @@ TEST_P(Test_Halide_layers, MaxPoolUnpool)
...
@@ -457,7 +457,7 @@ TEST_P(Test_Halide_layers, MaxPoolUnpool)
////////////////////////////////////////////////////////////////////////////////
////////////////////////////////////////////////////////////////////////////////
static
const
int
kNumChannels
=
3
;
static
const
int
kNumChannels
=
3
;
void
testInPlaceActivation
(
LayerParams
&
lp
,
int
backendId
,
in
t
targetId
)
void
testInPlaceActivation
(
LayerParams
&
lp
,
Backend
backendId
,
Targe
t
targetId
)
{
{
EXPECT_FALSE
(
lp
.
name
.
empty
());
EXPECT_FALSE
(
lp
.
name
.
empty
());
...
@@ -485,8 +485,8 @@ TEST_P(BatchNorm, Accuracy)
...
@@ -485,8 +485,8 @@ TEST_P(BatchNorm, Accuracy)
bool
hasWeights
=
get
<
0
>
(
GetParam
());
bool
hasWeights
=
get
<
0
>
(
GetParam
());
bool
hasBias
=
get
<
1
>
(
GetParam
());
bool
hasBias
=
get
<
1
>
(
GetParam
());
float
epsilon
=
get
<
2
>
(
GetParam
());
float
epsilon
=
get
<
2
>
(
GetParam
());
int
backendId
=
get
<
0
>
(
get
<
3
>
(
GetParam
()));
Backend
backendId
=
get
<
0
>
(
get
<
3
>
(
GetParam
()));
in
t
targetId
=
get
<
1
>
(
get
<
3
>
(
GetParam
()));
Targe
t
targetId
=
get
<
1
>
(
get
<
3
>
(
GetParam
()));
LayerParams
lp
;
LayerParams
lp
;
lp
.
set
(
"has_weight"
,
hasWeights
);
lp
.
set
(
"has_weight"
,
hasWeights
);
...
@@ -518,8 +518,8 @@ typedef TestWithParam<tuple<float, tuple<Backend, Target> > > ReLU;
...
@@ -518,8 +518,8 @@ typedef TestWithParam<tuple<float, tuple<Backend, Target> > > ReLU;
TEST_P
(
ReLU
,
Accuracy
)
TEST_P
(
ReLU
,
Accuracy
)
{
{
float
negativeSlope
=
get
<
0
>
(
GetParam
());
float
negativeSlope
=
get
<
0
>
(
GetParam
());
int
backendId
=
get
<
0
>
(
get
<
1
>
(
GetParam
()));
Backend
backendId
=
get
<
0
>
(
get
<
1
>
(
GetParam
()));
in
t
targetId
=
get
<
1
>
(
get
<
1
>
(
GetParam
()));
Targe
t
targetId
=
get
<
1
>
(
get
<
1
>
(
GetParam
()));
LayerParams
lp
;
LayerParams
lp
;
lp
.
set
(
"negative_slope"
,
negativeSlope
);
lp
.
set
(
"negative_slope"
,
negativeSlope
);
...
@@ -536,8 +536,8 @@ INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, ReLU, Combine(
...
@@ -536,8 +536,8 @@ INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, ReLU, Combine(
typedef
TestWithParam
<
tuple
<
std
::
string
,
tuple
<
Backend
,
Target
>
>
>
NoParamActivation
;
typedef
TestWithParam
<
tuple
<
std
::
string
,
tuple
<
Backend
,
Target
>
>
>
NoParamActivation
;
TEST_P
(
NoParamActivation
,
Accuracy
)
TEST_P
(
NoParamActivation
,
Accuracy
)
{
{
int
backendId
=
get
<
0
>
(
get
<
1
>
(
GetParam
()));
Backend
backendId
=
get
<
0
>
(
get
<
1
>
(
GetParam
()));
in
t
targetId
=
get
<
1
>
(
get
<
1
>
(
GetParam
()));
Targe
t
targetId
=
get
<
1
>
(
get
<
1
>
(
GetParam
()));
LayerParams
lp
;
LayerParams
lp
;
lp
.
type
=
get
<
0
>
(
GetParam
());
lp
.
type
=
get
<
0
>
(
GetParam
());
...
@@ -555,8 +555,8 @@ TEST_P(Power, Accuracy)
...
@@ -555,8 +555,8 @@ TEST_P(Power, Accuracy)
float
power
=
get
<
0
>
(
GetParam
())[
0
];
float
power
=
get
<
0
>
(
GetParam
())[
0
];
float
scale
=
get
<
0
>
(
GetParam
())[
1
];
float
scale
=
get
<
0
>
(
GetParam
())[
1
];
float
shift
=
get
<
0
>
(
GetParam
())[
2
];
float
shift
=
get
<
0
>
(
GetParam
())[
2
];
int
backendId
=
get
<
0
>
(
get
<
1
>
(
GetParam
()));
Backend
backendId
=
get
<
0
>
(
get
<
1
>
(
GetParam
()));
in
t
targetId
=
get
<
1
>
(
get
<
1
>
(
GetParam
()));
Targe
t
targetId
=
get
<
1
>
(
get
<
1
>
(
GetParam
()));
LayerParams
lp
;
LayerParams
lp
;
lp
.
set
(
"power"
,
power
);
lp
.
set
(
"power"
,
power
);
...
@@ -589,8 +589,8 @@ typedef TestWithParam<tuple<bool, tuple<Backend, Target> > > Scale;
...
@@ -589,8 +589,8 @@ typedef TestWithParam<tuple<bool, tuple<Backend, Target> > > Scale;
TEST_P
(
Scale
,
Accuracy
)
TEST_P
(
Scale
,
Accuracy
)
{
{
bool
hasBias
=
get
<
0
>
(
GetParam
());
bool
hasBias
=
get
<
0
>
(
GetParam
());
int
backendId
=
get
<
0
>
(
get
<
1
>
(
GetParam
()));
Backend
backendId
=
get
<
0
>
(
get
<
1
>
(
GetParam
()));
in
t
targetId
=
get
<
1
>
(
get
<
1
>
(
GetParam
()));
Targe
t
targetId
=
get
<
1
>
(
get
<
1
>
(
GetParam
()));
LayerParams
lp
;
LayerParams
lp
;
lp
.
set
(
"bias_term"
,
hasBias
);
lp
.
set
(
"bias_term"
,
hasBias
);
...
@@ -624,8 +624,8 @@ TEST_P(Concat, Accuracy)
...
@@ -624,8 +624,8 @@ TEST_P(Concat, Accuracy)
{
{
Vec3i
inSize
=
get
<
0
>
(
GetParam
());
Vec3i
inSize
=
get
<
0
>
(
GetParam
());
Vec3i
numChannels
=
get
<
1
>
(
GetParam
());
Vec3i
numChannels
=
get
<
1
>
(
GetParam
());
int
backendId
=
get
<
0
>
(
get
<
2
>
(
GetParam
()));
Backend
backendId
=
get
<
0
>
(
get
<
2
>
(
GetParam
()));
in
t
targetId
=
get
<
1
>
(
get
<
2
>
(
GetParam
()));
Targe
t
targetId
=
get
<
1
>
(
get
<
2
>
(
GetParam
()));
Net
net
;
Net
net
;
...
@@ -692,8 +692,8 @@ TEST_P(Eltwise, Accuracy)
...
@@ -692,8 +692,8 @@ TEST_P(Eltwise, Accuracy)
std
::
string
op
=
get
<
1
>
(
GetParam
());
std
::
string
op
=
get
<
1
>
(
GetParam
());
int
numConv
=
get
<
2
>
(
GetParam
());
int
numConv
=
get
<
2
>
(
GetParam
());
bool
weighted
=
get
<
3
>
(
GetParam
());
bool
weighted
=
get
<
3
>
(
GetParam
());
int
backendId
=
get
<
0
>
(
get
<
4
>
(
GetParam
()));
Backend
backendId
=
get
<
0
>
(
get
<
4
>
(
GetParam
()));
in
t
targetId
=
get
<
1
>
(
get
<
4
>
(
GetParam
()));
Targe
t
targetId
=
get
<
1
>
(
get
<
4
>
(
GetParam
()));
Net
net
;
Net
net
;
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment