Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
ab389142
Commit
ab389142
authored
Jun 01, 2018
by
Dmitry Kurtaev
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Fix multiple networks with Intel's Inference Engine backend
parent
1822e85f
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
49 additions
and
15 deletions
+49
-15
op_inf_engine.cpp
modules/dnn/src/op_inf_engine.cpp
+20
-14
op_inf_engine.hpp
modules/dnn/src/op_inf_engine.hpp
+4
-1
test_layers.cpp
modules/dnn/test/test_layers.cpp
+25
-0
No files found.
modules/dnn/src/op_inf_engine.cpp
View file @
ab389142
...
...
@@ -361,10 +361,20 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::ICNNNetwork& net)
{
CV_Assert
(
!
isInitialized
());
InferenceEngine
::
StatusCode
status
;
InferenceEngine
::
ResponseDesc
resp
;
static
std
::
map
<
std
::
string
,
InferenceEngine
::
InferenceEnginePluginPtr
>
sharedPlugins
;
std
::
string
deviceName
=
InferenceEngine
::
getDeviceName
(
targetDevice
);
auto
pluginIt
=
sharedPlugins
.
find
(
deviceName
);
if
(
pluginIt
!=
sharedPlugins
.
end
())
{
enginePtr
=
pluginIt
->
second
;
}
else
{
enginePtr
=
InferenceEngine
::
PluginDispatcher
({
""
}).
getSuitablePlugin
(
targetDevice
);
sharedPlugins
[
deviceName
]
=
enginePtr
;
}
plugin
=
InferenceEngine
::
InferencePlugin
(
enginePtr
);
plugin
=
InferenceEngine
::
PluginDispatcher
({
""
}).
getSuitablePlugin
(
targetDevice
);
if
(
targetDevice
==
InferenceEngine
::
TargetDevice
::
eCPU
)
{
#ifdef _WIN32
...
...
@@ -374,18 +384,17 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::ICNNNetwork& net)
InferenceEngine
::
IExtensionPtr
extension
=
InferenceEngine
::
make_so_pointer
<
InferenceEngine
::
IExtension
>
(
"libcpu_extension.so"
);
#endif // _WIN32
status
=
plugin
->
AddExtension
(
extension
,
&
resp
);
if
(
status
!=
InferenceEngine
::
StatusCode
::
OK
)
CV_Error
(
Error
::
StsAssert
,
resp
.
msg
);
plugin
.
AddExtension
(
extension
);
}
status
=
plugin
->
LoadNetwork
(
net
,
&
resp
);
if
(
status
!=
InferenceEngine
::
StatusCode
::
OK
)
CV_Error
(
Error
::
StsAssert
,
resp
.
msg
);
netExec
=
plugin
.
LoadNetwork
(
net
,
{});
infRequest
=
netExec
.
CreateInferRequest
();
infRequest
.
SetInput
(
inpBlobs
);
infRequest
.
SetOutput
(
outBlobs
);
}
bool
InfEngineBackendNet
::
isInitialized
()
{
return
(
bool
)
plugin
;
return
(
bool
)
enginePtr
;
}
void
InfEngineBackendNet
::
addBlobs
(
const
std
::
vector
<
Ptr
<
BackendWrapper
>
>&
ptrs
)
...
...
@@ -399,10 +408,7 @@ void InfEngineBackendNet::addBlobs(const std::vector<Ptr<BackendWrapper> >& ptrs
void
InfEngineBackendNet
::
forward
()
{
InferenceEngine
::
ResponseDesc
resp
;
InferenceEngine
::
StatusCode
status
=
plugin
->
Infer
(
inpBlobs
,
outBlobs
,
&
resp
);
if
(
status
!=
InferenceEngine
::
StatusCode
::
OK
)
CV_Error
(
Error
::
StsAssert
,
resp
.
msg
);
infRequest
.
Infer
();
}
Mat
infEngineBlobToMat
(
const
InferenceEngine
::
Blob
::
Ptr
&
blob
)
...
...
modules/dnn/src/op_inf_engine.hpp
View file @
ab389142
...
...
@@ -89,7 +89,10 @@ private:
InferenceEngine
::
BlobMap
allBlobs
;
InferenceEngine
::
TargetDevice
targetDevice
;
InferenceEngine
::
Precision
precision
;
InferenceEngine
::
InferenceEnginePluginPtr
plugin
;
InferenceEngine
::
InferenceEnginePluginPtr
enginePtr
;
InferenceEngine
::
InferencePlugin
plugin
;
InferenceEngine
::
ExecutableNetwork
netExec
;
InferenceEngine
::
InferRequest
infRequest
;
void
initPlugin
(
InferenceEngine
::
ICNNNetwork
&
net
);
};
...
...
modules/dnn/test/test_layers.cpp
View file @
ab389142
...
...
@@ -887,6 +887,31 @@ TEST(Test_DLDT, fused_output)
ASSERT_NO_THROW
(
net
.
forward
());
LayerFactory
::
unregisterLayer
(
"Unsupported"
);
}
TEST
(
Test_DLDT
,
multiple_networks
)
{
Net
nets
[
2
];
for
(
int
i
=
0
;
i
<
2
;
++
i
)
{
nets
[
i
].
setInputsNames
(
std
::
vector
<
String
>
(
1
,
format
(
"input_%d"
,
i
)));
LayerParams
lp
;
lp
.
set
(
"kernel_size"
,
1
);
lp
.
set
(
"num_output"
,
1
);
lp
.
set
(
"bias_term"
,
false
);
lp
.
type
=
"Convolution"
;
lp
.
name
=
format
(
"testConv_%d"
,
i
);
lp
.
blobs
.
push_back
(
Mat
({
1
,
1
,
1
,
1
},
CV_32F
,
Scalar
(
1
+
i
)));
nets
[
i
].
addLayerToPrev
(
lp
.
name
,
lp
.
type
,
lp
);
nets
[
i
].
setPreferableBackend
(
DNN_BACKEND_INFERENCE_ENGINE
);
nets
[
i
].
setInput
(
Mat
({
1
,
1
,
1
,
1
},
CV_32FC1
,
Scalar
(
1
)));
}
Mat
out_1
=
nets
[
0
].
forward
();
Mat
out_2
=
nets
[
1
].
forward
();
// After the second model is initialized we try to receive an output from the first network again.
out_1
=
nets
[
0
].
forward
();
normAssert
(
2
*
out_1
,
out_2
);
}
#endif // HAVE_INF_ENGINE
// Test a custom layer.
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment