Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
523b6f32
Commit
523b6f32
authored
Jul 06, 2018
by
Vadim Pisarevsky
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #11867 from dkurt:dnn_ie_layers
parents
3b01777c
019c2f21
Expand all
Show whitespace changes
Inline
Side-by-side
Showing
13 changed files
with
229 additions
and
116 deletions
+229
-116
dnn.cpp
modules/dnn/src/dnn.cpp
+3
-1
convolution_layer.cpp
modules/dnn/src/layers/convolution_layer.cpp
+15
-1
eltwise_layer.cpp
modules/dnn/src/layers/eltwise_layer.cpp
+2
-2
reorg_layer.cpp
modules/dnn/src/layers/reorg_layer.cpp
+20
-1
resize_layer.cpp
modules/dnn/src/layers/resize_layer.cpp
+21
-0
slice_layer.cpp
modules/dnn/src/layers/slice_layer.cpp
+15
-1
test_backends.cpp
modules/dnn/test/test_backends.cpp
+5
-36
test_darknet_importer.cpp
modules/dnn/test/test_darknet_importer.cpp
+61
-73
test_halide_layers.cpp
modules/dnn/test/test_halide_layers.cpp
+0
-0
test_layers.cpp
modules/dnn/test/test_layers.cpp
+0
-0
test_precomp.hpp
modules/dnn/test/test_precomp.hpp
+87
-0
test_tf_importer.cpp
modules/dnn/test/test_tf_importer.cpp
+0
-0
test_torch_importer.cpp
modules/dnn/test/test_torch_importer.cpp
+0
-1
No files found.
modules/dnn/src/dnn.cpp
View file @
523b6f32
...
...
@@ -2730,9 +2730,9 @@ void Layer::applyHalideScheduler(Ptr<BackendNode>& node, const std::vector<Mat*>
}
else
if
(
targetId
==
DNN_TARGET_OPENCL
)
{
int
c_split
=
outC
>
8
?
(
outC
>
16
?
8
:
4
)
:
outC
;
if
(
outW
==
1
&&
outH
==
1
)
{
int
c_split
=
outC
>
8
?
(
outC
>
16
?
8
:
4
)
:
outC
;
top
.
split
(
c
,
co
,
ci
,
c_split
)
.
fuse
(
x
,
y
,
tile
).
fuse
(
co
,
tile
,
tile
).
fuse
(
n
,
tile
,
tile
)
.
gpu_blocks
(
tile
)
...
...
@@ -2742,6 +2742,8 @@ void Layer::applyHalideScheduler(Ptr<BackendNode>& node, const std::vector<Mat*>
{
int
x_split
=
outW
>
8
?
(
outW
>=
32
?
16
:
8
)
:
outW
;
int
y_split
=
outH
>
8
?
(
outH
>=
32
?
16
:
8
)
:
outH
;
// Supported vectorization widths: 2, 3, 4, 8, 16
int
c_split
=
outC
>
8
?
(
outC
>
16
?
8
:
4
)
:
std
::
min
(
4
,
outC
);
top
.
split
(
x
,
xo
,
xi
,
x_split
).
split
(
y
,
yo
,
yi
,
y_split
)
.
split
(
c
,
co
,
ci
,
c_split
)
.
gpu_blocks
(
xo
,
yo
,
co
)
...
...
modules/dnn/src/layers/convolution_layer.cpp
View file @
523b6f32
...
...
@@ -82,7 +82,21 @@ public:
virtual
bool
supportBackend
(
int
backendId
)
CV_OVERRIDE
{
if
(
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
)
return
preferableTarget
!=
DNN_TARGET_MYRIAD
||
type
!=
"Deconvolution"
||
adjustPad
==
Size
();
{
if
(
type
==
"Convolution"
)
return
preferableTarget
!=
DNN_TARGET_MYRIAD
||
dilation
.
width
==
dilation
.
height
;
else
{
CV_Assert
(
type
==
"Deconvolution"
);
const
int
outGroupCn
=
blobs
[
0
].
size
[
1
];
// Weights are in IOHW layout
const
int
group
=
numOutput
/
outGroupCn
;
if
(
group
!=
1
)
return
false
;
if
(
preferableTarget
==
DNN_TARGET_OPENCL
||
preferableTarget
==
DNN_TARGET_OPENCL_FP16
)
return
dilation
.
width
==
1
&&
dilation
.
height
==
1
;
return
true
;
}
}
else
return
backendId
==
DNN_BACKEND_OPENCV
||
backendId
==
DNN_BACKEND_HALIDE
;
}
...
...
modules/dnn/src/layers/eltwise_layer.cpp
View file @
523b6f32
...
...
@@ -97,8 +97,8 @@ public:
virtual
bool
supportBackend
(
int
backendId
)
CV_OVERRIDE
{
return
backendId
==
DNN_BACKEND_OPENCV
||
backendId
==
DNN_BACKEND_HALIDE
&&
haveHalide
()
||
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
&&
haveInfEngine
(
);
backendId
==
DNN_BACKEND_HALIDE
||
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
&&
(
op
!=
SUM
||
coeffs
.
empty
()
);
}
bool
getMemoryShapes
(
const
std
::
vector
<
MatShape
>
&
inputs
,
...
...
modules/dnn/src/layers/reorg_layer.cpp
View file @
523b6f32
...
...
@@ -41,9 +41,9 @@
//M*/
#include "../precomp.hpp"
#include "../op_inf_engine.hpp"
#include <opencv2/dnn/shape_utils.hpp>
#include <opencv2/dnn/all_layers.hpp>
#include <iostream>
#ifdef HAVE_OPENCL
#include "opencl_kernels_dnn.hpp"
...
...
@@ -85,6 +85,11 @@ public:
return
false
;
}
virtual
bool
supportBackend
(
int
backendId
)
CV_OVERRIDE
{
return
backendId
==
DNN_BACKEND_OPENCV
||
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
;
}
#ifdef HAVE_OPENCL
bool
forward_ocl
(
InputArrayOfArrays
inps
,
OutputArrayOfArrays
outs
,
OutputArrayOfArrays
internals
)
{
...
...
@@ -169,6 +174,20 @@ public:
}
}
virtual
Ptr
<
BackendNode
>
initInfEngine
(
const
std
::
vector
<
Ptr
<
BackendWrapper
>
>&
)
CV_OVERRIDE
{
#ifdef HAVE_INF_ENGINE
InferenceEngine
::
LayerParams
lp
;
lp
.
name
=
name
;
lp
.
type
=
"ReorgYolo"
;
lp
.
precision
=
InferenceEngine
::
Precision
::
FP32
;
std
::
shared_ptr
<
InferenceEngine
::
CNNLayer
>
ieLayer
(
new
InferenceEngine
::
CNNLayer
(
lp
));
ieLayer
->
params
[
"stride"
]
=
format
(
"%d"
,
reorgStride
);
return
Ptr
<
BackendNode
>
(
new
InfEngineBackendNode
(
ieLayer
));
#endif // HAVE_INF_ENGINE
return
Ptr
<
BackendNode
>
();
}
virtual
int64
getFLOPS
(
const
std
::
vector
<
MatShape
>
&
inputs
,
const
std
::
vector
<
MatShape
>
&
outputs
)
const
CV_OVERRIDE
{
...
...
modules/dnn/src/layers/resize_layer.cpp
View file @
523b6f32
...
...
@@ -192,6 +192,11 @@ public:
return
(
outputs
[
0
][
2
]
==
inputs
[
0
][
2
])
&&
(
outputs
[
0
][
3
]
==
inputs
[
0
][
3
]);
}
virtual
bool
supportBackend
(
int
backendId
)
CV_OVERRIDE
{
return
backendId
==
DNN_BACKEND_OPENCV
||
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
;
}
virtual
void
finalize
(
const
std
::
vector
<
Mat
*>&
inputs
,
std
::
vector
<
Mat
>
&
outputs
)
CV_OVERRIDE
{
if
(
!
outWidth
&&
!
outHeight
)
...
...
@@ -204,6 +209,22 @@ public:
scaleHeight
=
(
outHeight
>
1
)
?
(
static_cast
<
float
>
(
inpHeight
-
1
)
/
(
outHeight
-
1
))
:
0.
f
;
scaleWidth
=
(
outWidth
>
1
)
?
(
static_cast
<
float
>
(
inpWidth
-
1
)
/
(
outWidth
-
1
))
:
0.
f
;
}
virtual
Ptr
<
BackendNode
>
initInfEngine
(
const
std
::
vector
<
Ptr
<
BackendWrapper
>
>&
)
CV_OVERRIDE
{
#ifdef HAVE_INF_ENGINE
InferenceEngine
::
LayerParams
lp
;
lp
.
name
=
name
;
lp
.
type
=
"Interp"
;
lp
.
precision
=
InferenceEngine
::
Precision
::
FP32
;
std
::
shared_ptr
<
InferenceEngine
::
CNNLayer
>
ieLayer
(
new
InferenceEngine
::
CNNLayer
(
lp
));
ieLayer
->
params
[
"pad_beg"
]
=
"0"
;
ieLayer
->
params
[
"pad_end"
]
=
"0"
;
return
Ptr
<
BackendNode
>
(
new
InfEngineBackendNode
(
ieLayer
));
#endif // HAVE_INF_ENGINE
return
Ptr
<
BackendNode
>
();
}
};
Ptr
<
Layer
>
InterpLayer
::
create
(
const
LayerParams
&
params
)
...
...
modules/dnn/src/layers/slice_layer.cpp
View file @
523b6f32
...
...
@@ -266,7 +266,21 @@ public:
std
::
shared_ptr
<
InferenceEngine
::
CropLayer
>
ieLayer
(
new
InferenceEngine
::
CropLayer
(
lp
));
CV_Assert
(
sliceRanges
.
size
()
==
1
);
for
(
int
i
=
sliceRanges
[
0
].
size
()
-
1
;
i
>=
0
;
--
i
)
int
from
,
to
,
step
;
if
(
preferableTarget
==
DNN_TARGET_MYRIAD
)
{
from
=
1
;
to
=
sliceRanges
[
0
].
size
()
+
1
;
step
=
1
;
}
else
{
from
=
sliceRanges
[
0
].
size
()
-
1
;
to
=
-
1
;
step
=
-
1
;
}
for
(
int
i
=
from
;
i
!=
to
;
i
+=
step
)
{
ieLayer
->
axis
.
push_back
(
i
);
ieLayer
->
offset
.
push_back
(
sliceRanges
[
0
][
i
].
start
);
...
...
modules/dnn/test/test_backends.cpp
View file @
523b6f32
...
...
@@ -10,18 +10,9 @@
namespace
opencv_test
{
namespace
{
class
DNNTestNetwork
:
public
TestWithParam
<
tuple
<
DNNBackend
,
DNNTarget
>
>
class
DNNTestNetwork
:
public
DNNTestLayer
{
public
:
dnn
::
Backend
backend
;
dnn
::
Target
target
;
DNNTestNetwork
()
{
backend
=
(
dnn
::
Backend
)(
int
)
get
<
0
>
(
GetParam
());
target
=
(
dnn
::
Target
)(
int
)
get
<
1
>
(
GetParam
());
}
void
processNet
(
const
std
::
string
&
weights
,
const
std
::
string
&
proto
,
Size
inpSize
,
const
std
::
string
&
outputLayer
=
""
,
const
std
::
string
&
halideScheduler
=
""
,
...
...
@@ -40,32 +31,10 @@ public:
std
::
string
halideScheduler
=
""
,
double
l1
=
0.0
,
double
lInf
=
0.0
,
double
detectionConfThresh
=
0.2
)
{
if
(
backend
==
DNN_BACKEND_OPENCV
&&
(
target
==
DNN_TARGET_OPENCL
||
target
==
DNN_TARGET_OPENCL_FP16
))
{
#ifdef HAVE_OPENCL
if
(
!
cv
::
ocl
::
useOpenCL
())
#endif
{
throw
SkipTestException
(
"OpenCL is not available/disabled in OpenCV"
);
}
}
if
(
backend
==
DNN_BACKEND_INFERENCE_ENGINE
&&
target
==
DNN_TARGET_MYRIAD
)
{
if
(
!
checkMyriadTarget
())
{
throw
SkipTestException
(
"Myriad is not available/disabled in OpenCV"
);
}
}
if
(
target
==
DNN_TARGET_OPENCL_FP16
||
target
==
DNN_TARGET_MYRIAD
)
{
l1
=
l1
==
0.0
?
4e-3
:
l1
;
lInf
=
lInf
==
0.0
?
2e-2
:
lInf
;
}
else
{
l1
=
l1
==
0.0
?
1e-5
:
l1
;
lInf
=
lInf
==
0.0
?
1e-4
:
lInf
;
}
checkBackend
();
l1
=
l1
?
l1
:
default_l1
;
lInf
=
lInf
?
lInf
:
default_lInf
;
weights
=
findDataFile
(
weights
,
false
);
if
(
!
proto
.
empty
())
proto
=
findDataFile
(
proto
,
false
);
...
...
modules/dnn/test/test_darknet_importer.cpp
View file @
523b6f32
...
...
@@ -65,38 +65,49 @@ TEST(Test_Darknet, read_yolo_voc)
ASSERT_FALSE
(
net
.
empty
());
}
// Test object detection network from Darknet framework.
static
void
testDarknetModel
(
const
std
::
string
&
cfg
,
const
std
::
string
&
weights
,
class
Test_Darknet_layers
:
public
DNNTestLayer
{
public
:
void
testDarknetLayer
(
const
std
::
string
&
name
,
bool
hasWeights
=
false
)
{
std
::
string
cfg
=
findDataFile
(
"dnn/darknet/"
+
name
+
".cfg"
,
false
);
std
::
string
model
=
""
;
if
(
hasWeights
)
model
=
findDataFile
(
"dnn/darknet/"
+
name
+
".weights"
,
false
);
Mat
inp
=
blobFromNPY
(
findDataFile
(
"dnn/darknet/"
+
name
+
"_in.npy"
,
false
));
Mat
ref
=
blobFromNPY
(
findDataFile
(
"dnn/darknet/"
+
name
+
"_out.npy"
,
false
));
checkBackend
(
&
inp
,
&
ref
);
Net
net
=
readNet
(
cfg
,
model
);
net
.
setPreferableBackend
(
backend
);
net
.
setPreferableTarget
(
target
);
net
.
setInput
(
inp
);
Mat
out
=
net
.
forward
();
normAssert
(
out
,
ref
,
""
,
default_l1
,
default_lInf
);
}
};
class
Test_Darknet_nets
:
public
DNNTestLayer
{
public
:
// Test object detection network from Darknet framework.
void
testDarknetModel
(
const
std
::
string
&
cfg
,
const
std
::
string
&
weights
,
const
std
::
vector
<
cv
::
String
>&
outNames
,
const
std
::
vector
<
int
>&
refClassIds
,
const
std
::
vector
<
float
>&
refConfidences
,
const
std
::
vector
<
Rect2d
>&
refBoxes
,
int
backendId
,
int
targetId
,
float
scoreDiff
=
0.0
,
float
iouDiff
=
0.0
,
float
confThreshold
=
0.24
)
{
if
(
backendId
==
DNN_BACKEND_OPENCV
&&
targetId
==
DNN_TARGET_OPENCL
)
{
#ifdef HAVE_OPENCL
if
(
!
cv
::
ocl
::
useOpenCL
())
#endif
double
scoreDiff
,
double
iouDiff
,
float
confThreshold
=
0.24
)
{
throw
SkipTestException
(
"OpenCL is not available/disabled in OpenCV"
);
}
}
if
(
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
&&
targetId
==
DNN_TARGET_MYRIAD
)
{
if
(
!
checkMyriadTarget
())
{
throw
SkipTestException
(
"Myriad is not available/disabled in OpenCV"
);
}
}
checkBackend
();
Mat
sample
=
imread
(
_tf
(
"dog416.png"
));
Mat
inp
=
blobFromImage
(
sample
,
1.0
/
255
,
Size
(
416
,
416
),
Scalar
(),
true
,
false
);
Net
net
=
readNet
(
findDataFile
(
"dnn/"
+
cfg
,
false
),
findDataFile
(
"dnn/"
+
weights
,
false
));
net
.
setPreferableBackend
(
backendI
d
);
net
.
setPreferableTarget
(
targetId
);
net
.
setPreferableBackend
(
backen
d
);
net
.
setPreferableTarget
(
target
);
net
.
setInput
(
inp
);
std
::
vector
<
Mat
>
outs
;
net
.
forward
(
outs
,
outNames
);
...
...
@@ -127,14 +138,11 @@ static void testDarknetModel(const std::string& cfg, const std::string& weights,
}
normAssertDetections
(
refClassIds
,
refConfidences
,
refBoxes
,
classIds
,
confidences
,
boxes
,
""
,
confThreshold
,
scoreDiff
,
iouDiff
);
}
typedef
testing
::
TestWithParam
<
tuple
<
DNNBackend
,
DNNTarget
>
>
Test_Darknet_nets
;
}
};
TEST_P
(
Test_Darknet_nets
,
YoloVoc
)
{
int
backendId
=
get
<
0
>
(
GetParam
());
int
targetId
=
get
<
1
>
(
GetParam
());
std
::
vector
<
cv
::
String
>
outNames
(
1
,
"detection_out"
);
std
::
vector
<
int
>
classIds
(
3
);
...
...
@@ -143,34 +151,28 @@ TEST_P(Test_Darknet_nets, YoloVoc)
classIds
[
0
]
=
6
;
confidences
[
0
]
=
0.750469
f
;
boxes
[
0
]
=
Rect2d
(
0.577374
,
0.127391
,
0.325575
,
0.173418
);
// a car
classIds
[
1
]
=
1
;
confidences
[
1
]
=
0.780879
f
;
boxes
[
1
]
=
Rect2d
(
0.270762
,
0.264102
,
0.461713
,
0.48131
);
// a bicycle
classIds
[
2
]
=
11
;
confidences
[
2
]
=
0.901615
f
;
boxes
[
2
]
=
Rect2d
(
0.1386
,
0.338509
,
0.282737
,
0.60028
);
// a dog
double
scoreDiff
=
(
target
Id
==
DNN_TARGET_OPENCL_FP16
||
targetId
==
DNN_TARGET_MYRIAD
)
?
1e-2
:
8e-5
;
double
iouDiff
=
(
target
Id
==
DNN_TARGET_OPENCL_FP16
||
targetId
==
DNN_TARGET_MYRIAD
)
?
0.013
:
3e-5
;
double
scoreDiff
=
(
target
==
DNN_TARGET_OPENCL_FP16
||
target
==
DNN_TARGET_MYRIAD
)
?
1e-2
:
8e-5
;
double
iouDiff
=
(
target
==
DNN_TARGET_OPENCL_FP16
||
target
==
DNN_TARGET_MYRIAD
)
?
0.013
:
3e-5
;
testDarknetModel
(
"yolo-voc.cfg"
,
"yolo-voc.weights"
,
outNames
,
classIds
,
confidences
,
boxes
,
backendId
,
targetId
,
scoreDiff
,
iouDiff
);
classIds
,
confidences
,
boxes
,
scoreDiff
,
iouDiff
);
}
TEST_P
(
Test_Darknet_nets
,
TinyYoloVoc
)
{
int
backendId
=
get
<
0
>
(
GetParam
());
int
targetId
=
get
<
1
>
(
GetParam
());
std
::
vector
<
cv
::
String
>
outNames
(
1
,
"detection_out"
);
std
::
vector
<
int
>
classIds
(
2
);
std
::
vector
<
float
>
confidences
(
2
);
std
::
vector
<
Rect2d
>
boxes
(
2
);
classIds
[
0
]
=
6
;
confidences
[
0
]
=
0.761967
f
;
boxes
[
0
]
=
Rect2d
(
0.579042
,
0.159161
,
0.31544
,
0.160779
);
// a car
classIds
[
1
]
=
11
;
confidences
[
1
]
=
0.780595
f
;
boxes
[
1
]
=
Rect2d
(
0.129696
,
0.386467
,
0.315579
,
0.534527
);
// a dog
double
scoreDiff
=
(
target
Id
==
DNN_TARGET_OPENCL_FP16
||
targetId
==
DNN_TARGET_MYRIAD
)
?
8e-3
:
8e-5
;
double
iouDiff
=
(
target
Id
==
DNN_TARGET_OPENCL_FP16
||
targetId
==
DNN_TARGET_MYRIAD
)
?
8e-3
:
3e-5
;
double
scoreDiff
=
(
target
==
DNN_TARGET_OPENCL_FP16
||
target
==
DNN_TARGET_MYRIAD
)
?
8e-3
:
8e-5
;
double
iouDiff
=
(
target
==
DNN_TARGET_OPENCL_FP16
||
target
==
DNN_TARGET_MYRIAD
)
?
8e-3
:
3e-5
;
testDarknetModel
(
"tiny-yolo-voc.cfg"
,
"tiny-yolo-voc.weights"
,
outNames
,
classIds
,
confidences
,
boxes
,
backendId
,
targetId
,
scoreDiff
,
iouDiff
);
classIds
,
confidences
,
boxes
,
scoreDiff
,
iouDiff
);
}
TEST_P
(
Test_Darknet_nets
,
YOLOv3
)
{
int
backendId
=
get
<
0
>
(
GetParam
());
int
targetId
=
get
<
1
>
(
GetParam
());
if
(
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
&&
targetId
==
DNN_TARGET_MYRIAD
)
throw
SkipTestException
(
""
);
std
::
vector
<
cv
::
String
>
outNames
(
3
);
outNames
[
0
]
=
"yolo_82"
;
outNames
[
1
]
=
"yolo_94"
;
...
...
@@ -182,55 +184,41 @@ TEST_P(Test_Darknet_nets, YOLOv3)
classIds
[
0
]
=
7
;
confidences
[
0
]
=
0.952983
f
;
boxes
[
0
]
=
Rect2d
(
0.614622
,
0.150257
,
0.286747
,
0.138994
);
// a truck
classIds
[
1
]
=
1
;
confidences
[
1
]
=
0.987908
f
;
boxes
[
1
]
=
Rect2d
(
0.150913
,
0.221933
,
0.591342
,
0.524327
);
// a bicycle
classIds
[
2
]
=
16
;
confidences
[
2
]
=
0.998836
f
;
boxes
[
2
]
=
Rect2d
(
0.160024
,
0.389964
,
0.257861
,
0.553752
);
// a dog (COCO)
double
scoreDiff
=
(
target
Id
==
DNN_TARGET_OPENCL_FP16
||
targetId
==
DNN_TARGET_MYRIAD
)
?
4e-3
:
8e-5
;
double
iouDiff
=
(
target
Id
==
DNN_TARGET_OPENCL_FP16
||
targetId
==
DNN_TARGET_MYRIAD
)
?
0.011
:
3e-5
;
double
scoreDiff
=
(
target
==
DNN_TARGET_OPENCL_FP16
||
target
==
DNN_TARGET_MYRIAD
)
?
4e-3
:
8e-5
;
double
iouDiff
=
(
target
==
DNN_TARGET_OPENCL_FP16
||
target
==
DNN_TARGET_MYRIAD
)
?
0.011
:
3e-5
;
testDarknetModel
(
"yolov3.cfg"
,
"yolov3.weights"
,
outNames
,
classIds
,
confidences
,
boxes
,
backendId
,
targetId
,
scoreDiff
,
iouDiff
);
classIds
,
confidences
,
boxes
,
scoreDiff
,
iouDiff
);
}
const
tuple
<
DNNBackend
,
DNNTarget
>
testCases
[]
=
{
#ifdef HAVE_INF_ENGINE
tuple
<
DNNBackend
,
DNNTarget
>
(
DNN_BACKEND_INFERENCE_ENGINE
,
DNN_TARGET_CPU
),
tuple
<
DNNBackend
,
DNNTarget
>
(
DNN_BACKEND_INFERENCE_ENGINE
,
DNN_TARGET_OPENCL
),
tuple
<
DNNBackend
,
DNNTarget
>
(
DNN_BACKEND_INFERENCE_ENGINE
,
DNN_TARGET_OPENCL_FP16
),
tuple
<
DNNBackend
,
DNNTarget
>
(
DNN_BACKEND_INFERENCE_ENGINE
,
DNN_TARGET_MYRIAD
),
#endif
tuple
<
DNNBackend
,
DNNTarget
>
(
DNN_BACKEND_OPENCV
,
DNN_TARGET_CPU
),
tuple
<
DNNBackend
,
DNNTarget
>
(
DNN_BACKEND_OPENCV
,
DNN_TARGET_OPENCL
),
tuple
<
DNNBackend
,
DNNTarget
>
(
DNN_BACKEND_OPENCV
,
DNN_TARGET_OPENCL_FP16
)
};
INSTANTIATE_TEST_CASE_P
(
/**/
,
Test_Darknet_nets
,
testing
::
ValuesIn
(
testCases
));
INSTANTIATE_TEST_CASE_P
(
/**/
,
Test_Darknet_nets
,
dnnBackendsAndTargets
());
static
void
testDarknetLayer
(
const
std
::
string
&
name
,
bool
hasWeights
=
false
)
TEST_P
(
Test_Darknet_layers
,
shortcut
)
{
std
::
string
cfg
=
findDataFile
(
"dnn/darknet/"
+
name
+
".cfg"
,
false
);
std
::
string
model
=
""
;
if
(
hasWeights
)
model
=
findDataFile
(
"dnn/darknet/"
+
name
+
".weights"
,
false
);
Mat
inp
=
blobFromNPY
(
findDataFile
(
"dnn/darknet/"
+
name
+
"_in.npy"
,
false
));
Mat
ref
=
blobFromNPY
(
findDataFile
(
"dnn/darknet/"
+
name
+
"_out.npy"
,
false
));
if
(
backend
==
DNN_BACKEND_INFERENCE_ENGINE
&&
target
==
DNN_TARGET_CPU
)
throw
SkipTestException
(
""
);
testDarknetLayer
(
"shortcut"
);
}
Net
net
=
readNet
(
cfg
,
model
);
net
.
setPreferableBackend
(
DNN_BACKEND_OPENCV
);
net
.
setInput
(
inp
);
Mat
out
=
net
.
forward
();
normAssert
(
out
,
ref
);
TEST_P
(
Test_Darknet_layers
,
upsample
)
{
testDarknetLayer
(
"upsample"
);
}
TEST
(
Test_Darknet
,
shortcut
)
TEST
_P
(
Test_Darknet_layers
,
avgpool_softmax
)
{
testDarknetLayer
(
"
shortcut
"
);
testDarknetLayer
(
"
avgpool_softmax
"
);
}
TEST
(
Test_Darknet
,
upsample
)
TEST
_P
(
Test_Darknet_layers
,
region
)
{
testDarknetLayer
(
"
upsample
"
);
testDarknetLayer
(
"
region
"
);
}
TEST
(
Test_Darknet
,
avgpool_softmax
)
TEST
_P
(
Test_Darknet_layers
,
reorg
)
{
testDarknetLayer
(
"
avgpool_softmax
"
);
testDarknetLayer
(
"
reorg
"
);
}
INSTANTIATE_TEST_CASE_P
(
/**/
,
Test_Darknet_layers
,
dnnBackendsAndTargets
());
}}
// namespace
modules/dnn/test/test_halide_layers.cpp
View file @
523b6f32
This diff is collapsed.
Click to expand it.
modules/dnn/test/test_layers.cpp
View file @
523b6f32
This diff is collapsed.
Click to expand it.
modules/dnn/test/test_precomp.hpp
View file @
523b6f32
...
...
@@ -69,6 +69,93 @@ static testing::internal::ParamGenerator<DNNTarget> availableDnnTargets()
return
testing
::
ValuesIn
(
targets
);
}
static
testing
::
internal
::
ParamGenerator
<
tuple
<
DNNBackend
,
DNNTarget
>
>
dnnBackendsAndTargets
()
{
static
const
tuple
<
DNNBackend
,
DNNTarget
>
testCases
[]
=
{
#ifdef HAVE_INF_ENGINE
tuple
<
DNNBackend
,
DNNTarget
>
(
DNN_BACKEND_INFERENCE_ENGINE
,
DNN_TARGET_CPU
),
tuple
<
DNNBackend
,
DNNTarget
>
(
DNN_BACKEND_INFERENCE_ENGINE
,
DNN_TARGET_OPENCL
),
tuple
<
DNNBackend
,
DNNTarget
>
(
DNN_BACKEND_INFERENCE_ENGINE
,
DNN_TARGET_OPENCL_FP16
),
tuple
<
DNNBackend
,
DNNTarget
>
(
DNN_BACKEND_INFERENCE_ENGINE
,
DNN_TARGET_MYRIAD
),
#endif
tuple
<
DNNBackend
,
DNNTarget
>
(
DNN_BACKEND_OPENCV
,
DNN_TARGET_CPU
),
tuple
<
DNNBackend
,
DNNTarget
>
(
DNN_BACKEND_OPENCV
,
DNN_TARGET_OPENCL
),
tuple
<
DNNBackend
,
DNNTarget
>
(
DNN_BACKEND_OPENCV
,
DNN_TARGET_OPENCL_FP16
)
};
return
testing
::
ValuesIn
(
testCases
);
}
class
DNNTestLayer
:
public
TestWithParam
<
tuple
<
DNNBackend
,
DNNTarget
>
>
{
public
:
dnn
::
Backend
backend
;
dnn
::
Target
target
;
double
default_l1
,
default_lInf
;
DNNTestLayer
()
{
backend
=
(
dnn
::
Backend
)(
int
)
get
<
0
>
(
GetParam
());
target
=
(
dnn
::
Target
)(
int
)
get
<
1
>
(
GetParam
());
getDefaultThresholds
(
backend
,
target
,
&
default_l1
,
&
default_lInf
);
}
static
void
getDefaultThresholds
(
int
backend
,
int
target
,
double
*
l1
,
double
*
lInf
)
{
if
(
target
==
DNN_TARGET_OPENCL_FP16
||
target
==
DNN_TARGET_MYRIAD
)
{
*
l1
=
4e-3
;
*
lInf
=
2e-2
;
}
else
{
*
l1
=
1e-5
;
*
lInf
=
1e-4
;
}
}
static
void
checkBackend
(
int
backend
,
int
target
,
Mat
*
inp
=
0
,
Mat
*
ref
=
0
)
{
if
(
backend
==
DNN_BACKEND_OPENCV
&&
(
target
==
DNN_TARGET_OPENCL
||
target
==
DNN_TARGET_OPENCL_FP16
))
{
#ifdef HAVE_OPENCL
if
(
!
cv
::
ocl
::
useOpenCL
())
#endif
{
throw
SkipTestException
(
"OpenCL is not available/disabled in OpenCV"
);
}
}
if
(
backend
==
DNN_BACKEND_INFERENCE_ENGINE
&&
target
==
DNN_TARGET_MYRIAD
)
{
if
(
!
checkMyriadTarget
())
{
throw
SkipTestException
(
"Myriad is not available/disabled in OpenCV"
);
}
if
(
inp
&&
ref
&&
inp
->
size
[
0
]
!=
1
)
{
// Myriad plugin supports only batch size 1. Slice a single sample.
if
(
inp
->
size
[
0
]
==
ref
->
size
[
0
])
{
std
::
vector
<
cv
::
Range
>
range
(
inp
->
dims
,
Range
::
all
());
range
[
0
]
=
Range
(
0
,
1
);
*
inp
=
inp
->
operator
()(
range
);
range
=
std
::
vector
<
cv
::
Range
>
(
ref
->
dims
,
Range
::
all
());
range
[
0
]
=
Range
(
0
,
1
);
*
ref
=
ref
->
operator
()(
range
);
}
else
throw
SkipTestException
(
"Myriad plugin supports only batch size 1"
);
}
}
}
protected
:
void
checkBackend
(
Mat
*
inp
=
0
,
Mat
*
ref
=
0
)
{
checkBackend
(
backend
,
target
,
inp
,
ref
);
}
};
}}
#endif
modules/dnn/test/test_tf_importer.cpp
View file @
523b6f32
This diff is collapsed.
Click to expand it.
modules/dnn/test/test_torch_importer.cpp
View file @
523b6f32
...
...
@@ -296,7 +296,6 @@ TEST_P(Test_Torch_nets, FastNeuralStyle_accuracy)
Mat
inputBlob
=
blobFromImage
(
img
,
1.0
,
Size
(),
Scalar
(
103.939
,
116.779
,
123.68
),
false
);
net
.
setInput
(
inputBlob
);
net
.
setPreferableBackend
(
DNN_BACKEND_OPENCV
);
Mat
out
=
net
.
forward
();
// Deprocessing.
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment