Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
b92c3182
Commit
b92c3182
authored
Apr 12, 2018
by
Dmitry Kurtaev
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Blank and L2-normalization layers from Intel's Inference Engine
parent
0b9d0759
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
73 additions
and
5 deletions
+73
-5
blank_layer.cpp
modules/dnn/src/layers/blank_layer.cpp
+20
-0
normalize_bbox_layer.cpp
modules/dnn/src/layers/normalize_bbox_layer.cpp
+30
-0
op_inf_engine.cpp
modules/dnn/src/op_inf_engine.cpp
+23
-5
No files found.
modules/dnn/src/layers/blank_layer.cpp
View file @
b92c3182
...
...
@@ -40,6 +40,7 @@
//
//M*/
#include "../precomp.hpp"
#include "../op_inf_engine.hpp"
namespace
cv
{
...
...
@@ -53,6 +54,12 @@ public:
setParamsFrom
(
params
);
}
virtual
bool
supportBackend
(
int
backendId
)
CV_OVERRIDE
{
return
backendId
==
DNN_BACKEND_DEFAULT
||
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
&&
haveInfEngine
();
}
bool
getMemoryShapes
(
const
std
::
vector
<
MatShape
>
&
inputs
,
const
int
requiredOutputs
,
std
::
vector
<
MatShape
>
&
outputs
,
...
...
@@ -104,6 +111,19 @@ public:
if
(
outputs
[
i
].
data
!=
inputs
[
i
]
->
data
)
inputs
[
i
]
->
copyTo
(
outputs
[
i
]);
}
virtual
Ptr
<
BackendNode
>
initInfEngine
(
const
std
::
vector
<
Ptr
<
BackendWrapper
>
>&
)
CV_OVERRIDE
{
#ifdef HAVE_INF_ENGINE
InferenceEngine
::
LayerParams
lp
;
lp
.
name
=
name
;
lp
.
type
=
"Split"
;
lp
.
precision
=
InferenceEngine
::
Precision
::
FP32
;
std
::
shared_ptr
<
InferenceEngine
::
SplitLayer
>
ieLayer
(
new
InferenceEngine
::
SplitLayer
(
lp
));
return
Ptr
<
BackendNode
>
(
new
InfEngineBackendNode
(
ieLayer
));
#endif // HAVE_INF_ENGINE
return
Ptr
<
BackendNode
>
();
}
};
Ptr
<
Layer
>
BlankLayer
::
create
(
const
LayerParams
&
params
)
...
...
modules/dnn/src/layers/normalize_bbox_layer.cpp
View file @
b92c3182
...
...
@@ -42,6 +42,7 @@
#include "../precomp.hpp"
#include "layers_common.hpp"
#include "../op_inf_engine.hpp"
namespace
cv
{
namespace
dnn
{
...
...
@@ -60,6 +61,13 @@ public:
CV_Assert
(
pnorm
>
0
);
}
virtual
bool
supportBackend
(
int
backendId
)
CV_OVERRIDE
{
return
backendId
==
DNN_BACKEND_DEFAULT
||
backendId
==
DNN_BACKEND_INFERENCE_ENGINE
&&
haveInfEngine
()
&&
pnorm
==
2
&&
!
blobs
.
empty
();
}
bool
getMemoryShapes
(
const
std
::
vector
<
MatShape
>
&
inputs
,
const
int
requiredOutputs
,
std
::
vector
<
MatShape
>
&
outputs
,
...
...
@@ -228,6 +236,28 @@ public:
}
}
virtual
Ptr
<
BackendNode
>
initInfEngine
(
const
std
::
vector
<
Ptr
<
BackendWrapper
>
>&
)
CV_OVERRIDE
{
#ifdef HAVE_INF_ENGINE
InferenceEngine
::
LayerParams
lp
;
lp
.
name
=
name
;
lp
.
type
=
"Normalize"
;
lp
.
precision
=
InferenceEngine
::
Precision
::
FP32
;
std
::
shared_ptr
<
InferenceEngine
::
CNNLayer
>
ieLayer
(
new
InferenceEngine
::
CNNLayer
(
lp
));
CV_Assert
(
!
blobs
.
empty
());
ieLayer
->
params
[
"eps"
]
=
format
(
"%f"
,
epsilon
);
ieLayer
->
params
[
"across_spatial"
]
=
acrossSpatial
?
"1"
:
"0"
;
ieLayer
->
params
[
"channel_shared"
]
=
blobs
[
0
].
total
()
==
1
?
"1"
:
"0"
;
const
int
numChannels
=
blobs
[
0
].
total
();
ieLayer
->
blobs
[
"weights"
]
=
wrapToInfEngineBlob
(
blobs
[
0
],
{
numChannels
},
InferenceEngine
::
Layout
::
C
);
return
Ptr
<
BackendNode
>
(
new
InfEngineBackendNode
(
ieLayer
));
#endif // HAVE_INF_ENGINE
return
Ptr
<
BackendNode
>
();
}
private
:
int
startAxis
,
endAxis
;
};
...
...
modules/dnn/src/op_inf_engine.cpp
View file @
b92c3182
...
...
@@ -18,6 +18,11 @@ namespace cv { namespace dnn {
#ifdef HAVE_INF_ENGINE
static
int
infEngineVersion
()
{
return
std
::
atoi
(
InferenceEngine
::
GetInferenceEngineVersion
()
->
buildNumber
);
}
InfEngineBackendNode
::
InfEngineBackendNode
(
const
InferenceEngine
::
CNNLayerPtr
&
_layer
)
:
BackendNode
(
DNN_BACKEND_INFERENCE_ENGINE
),
layer
(
_layer
)
{}
...
...
@@ -58,9 +63,23 @@ static InferenceEngine::DataPtr wrapToInfEngineDataNode(const Mat& m, const std:
{
std
::
vector
<
size_t
>
reversedShape
(
&
m
.
size
[
0
],
&
m
.
size
[
0
]
+
m
.
dims
);
std
::
reverse
(
reversedShape
.
begin
(),
reversedShape
.
end
());
return
InferenceEngine
::
DataPtr
(
new
InferenceEngine
::
Data
(
name
,
reversedShape
,
InferenceEngine
::
Precision
::
FP32
)
);
if
(
infEngineVersion
()
>
5855
)
{
InferenceEngine
::
Layout
l
=
InferenceEngine
::
Layout
::
ANY
;
if
(
m
.
dims
==
4
)
l
=
InferenceEngine
::
Layout
::
NCHW
;
else
if
(
m
.
dims
==
2
)
l
=
InferenceEngine
::
Layout
::
NC
;
return
InferenceEngine
::
DataPtr
(
new
InferenceEngine
::
Data
(
name
,
reversedShape
,
InferenceEngine
::
Precision
::
FP32
,
l
)
);
}
else
{
return
InferenceEngine
::
DataPtr
(
new
InferenceEngine
::
Data
(
name
,
reversedShape
,
InferenceEngine
::
Precision
::
FP32
)
);
}
}
InferenceEngine
::
TBlob
<
float
>::
Ptr
wrapToInfEngineBlob
(
const
Mat
&
m
,
const
std
::
vector
<
size_t
>&
shape
,
...
...
@@ -336,10 +355,9 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::ICNNNetwork& net)
InferenceEngine
::
StatusCode
status
;
InferenceEngine
::
ResponseDesc
resp
;
const
InferenceEngine
::
Version
*
v
=
InferenceEngine
::
GetInferenceEngineVersion
();
plugin
=
InferenceEngine
::
PluginDispatcher
({
""
}).
getSuitablePlugin
(
targetDevice
);
if
(
std
::
atoi
(
v
->
buildNumber
)
>
5855
)
if
(
infEngineVersion
()
>
5855
&&
targetDevice
==
InferenceEngine
::
TargetDevice
::
eCPU
)
{
#ifdef _WIN32
InferenceEngine
::
IExtensionPtr
extension
=
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment