Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
00925ad7
Commit
00925ad7
authored
Mar 14, 2020
by
Alexander Alekhin
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #16809 from alalek:dnn_ie_separate_core_instances
parents
683910f5
9b64eadc
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
35 additions
and
21 deletions
+35
-21
dnn.cpp
modules/dnn/src/dnn.cpp
+3
-3
ie_ngraph.cpp
modules/dnn/src/ie_ngraph.cpp
+1
-1
op_inf_engine.cpp
modules/dnn/src/op_inf_engine.cpp
+30
-16
op_inf_engine.hpp
modules/dnn/src/op_inf_engine.hpp
+1
-1
No files found.
modules/dnn/src/dnn.cpp
View file @
00925ad7
...
@@ -108,7 +108,7 @@ public:
...
@@ -108,7 +108,7 @@ public:
{
{
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R3)
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R3)
// Lightweight detection
// Lightweight detection
const
std
::
vector
<
std
::
string
>
devices
=
getCore
().
GetAvailableDevices
();
const
std
::
vector
<
std
::
string
>
devices
=
getCore
(
""
).
GetAvailableDevices
();
for
(
std
::
vector
<
std
::
string
>::
const_iterator
i
=
devices
.
begin
();
i
!=
devices
.
end
();
++
i
)
for
(
std
::
vector
<
std
::
string
>::
const_iterator
i
=
devices
.
begin
();
i
!=
devices
.
end
();
++
i
)
{
{
if
(
std
::
string
::
npos
!=
i
->
find
(
"MYRIAD"
)
&&
target
==
DNN_TARGET_MYRIAD
)
if
(
std
::
string
::
npos
!=
i
->
find
(
"MYRIAD"
)
&&
target
==
DNN_TARGET_MYRIAD
)
...
@@ -3253,7 +3253,7 @@ Net Net::readFromModelOptimizer(const String& xml, const String& bin)
...
@@ -3253,7 +3253,7 @@ Net Net::readFromModelOptimizer(const String& xml, const String& bin)
InferenceEngine
::
CNNNetwork
ieNet
=
reader
.
getNetwork
();
InferenceEngine
::
CNNNetwork
ieNet
=
reader
.
getNetwork
();
#else
#else
InferenceEngine
::
Core
&
ie
=
getCore
();
InferenceEngine
::
Core
&
ie
=
getCore
(
""
);
InferenceEngine
::
CNNNetwork
ieNet
=
ie
.
ReadNetwork
(
xml
,
bin
);
InferenceEngine
::
CNNNetwork
ieNet
=
ie
.
ReadNetwork
(
xml
,
bin
);
#endif
#endif
...
@@ -3302,7 +3302,7 @@ Net Net::readFromModelOptimizer(
...
@@ -3302,7 +3302,7 @@ Net Net::readFromModelOptimizer(
InferenceEngine
::
CNNNetwork
ieNet
=
reader
.
getNetwork
();
InferenceEngine
::
CNNNetwork
ieNet
=
reader
.
getNetwork
();
#else
#else
InferenceEngine
::
Core
&
ie
=
getCore
();
InferenceEngine
::
Core
&
ie
=
getCore
(
""
);
std
::
string
model
;
model
.
assign
((
char
*
)
bufferModelConfigPtr
,
bufferModelConfigSize
);
std
::
string
model
;
model
.
assign
((
char
*
)
bufferModelConfigPtr
,
bufferModelConfigSize
);
...
...
modules/dnn/src/ie_ngraph.cpp
View file @
00925ad7
...
@@ -524,7 +524,7 @@ void InfEngineNgraphNet::initPlugin(InferenceEngine::CNNNetwork& net)
...
@@ -524,7 +524,7 @@ void InfEngineNgraphNet::initPlugin(InferenceEngine::CNNNetwork& net)
try
try
{
{
AutoLock
lock
(
getInitializationMutex
());
AutoLock
lock
(
getInitializationMutex
());
InferenceEngine
::
Core
&
ie
=
getCore
();
InferenceEngine
::
Core
&
ie
=
getCore
(
device_name
);
{
{
isInit
=
true
;
isInit
=
true
;
std
::
vector
<
std
::
string
>
candidates
;
std
::
vector
<
std
::
string
>
candidates
;
...
...
modules/dnn/src/op_inf_engine.cpp
View file @
00925ad7
...
@@ -604,18 +604,31 @@ static bool init_IE_plugins()
...
@@ -604,18 +604,31 @@ static bool init_IE_plugins()
(
void
)
init_core
->
GetAvailableDevices
();
(
void
)
init_core
->
GetAvailableDevices
();
return
true
;
return
true
;
}
}
static
InferenceEngine
::
Core
&
create_IE_Core_instance
(
)
static
InferenceEngine
::
Core
&
retrieveIECore
(
const
std
::
string
&
id
,
std
::
map
<
std
::
string
,
std
::
shared_ptr
<
InferenceEngine
::
Core
>
>&
cores
)
{
{
static
InferenceEngine
::
Core
core
;
AutoLock
lock
(
getInitializationMutex
());
return
core
;
std
::
map
<
std
::
string
,
std
::
shared_ptr
<
InferenceEngine
::
Core
>
>::
iterator
i
=
cores
.
find
(
id
);
if
(
i
==
cores
.
end
())
{
std
::
shared_ptr
<
InferenceEngine
::
Core
>
core
=
std
::
make_shared
<
InferenceEngine
::
Core
>
();
cores
[
id
]
=
core
;
return
*
core
.
get
();
}
return
*
(
i
->
second
).
get
();
}
}
static
InferenceEngine
::
Core
&
create_IE_Core_pointer
()
static
InferenceEngine
::
Core
&
create_IE_Core_instance
(
const
std
::
string
&
id
)
{
static
std
::
map
<
std
::
string
,
std
::
shared_ptr
<
InferenceEngine
::
Core
>
>
cores
;
return
retrieveIECore
(
id
,
cores
);
}
static
InferenceEngine
::
Core
&
create_IE_Core_pointer
(
const
std
::
string
&
id
)
{
{
// load and hold IE plugins
// load and hold IE plugins
static
InferenceEngine
::
Core
*
core
=
new
InferenceEngine
::
Core
();
// 'delete' is never called
static
std
::
map
<
std
::
string
,
std
::
shared_ptr
<
InferenceEngine
::
Core
>
>*
cores
=
return
*
core
;
new
std
::
map
<
std
::
string
,
std
::
shared_ptr
<
InferenceEngine
::
Core
>
>
();
return
retrieveIECore
(
id
,
*
cores
);
}
}
InferenceEngine
::
Core
&
getCore
()
InferenceEngine
::
Core
&
getCore
(
const
std
::
string
&
id
)
{
{
// to make happy memory leak tools use:
// to make happy memory leak tools use:
// - OPENCV_DNN_INFERENCE_ENGINE_HOLD_PLUGINS=0
// - OPENCV_DNN_INFERENCE_ENGINE_HOLD_PLUGINS=0
...
@@ -631,9 +644,10 @@ InferenceEngine::Core& getCore()
...
@@ -631,9 +644,10 @@ InferenceEngine::Core& getCore()
false
false
#endif
#endif
);
);
static
InferenceEngine
::
Core
&
core
=
param_DNN_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND
?
create_IE_Core_pointer
()
InferenceEngine
::
Core
&
core
=
param_DNN_INFERENCE_ENGINE_CORE_LIFETIME_WORKAROUND
:
create_IE_Core_instance
();
?
create_IE_Core_pointer
(
id
)
:
create_IE_Core_instance
(
id
);
return
core
;
return
core
;
}
}
#endif
#endif
...
@@ -641,9 +655,10 @@ InferenceEngine::Core& getCore()
...
@@ -641,9 +655,10 @@ InferenceEngine::Core& getCore()
#if !defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
#if !defined(OPENCV_DNN_IE_VPU_TYPE_DEFAULT)
static
bool
detectMyriadX_
()
static
bool
detectMyriadX_
()
{
{
AutoLock
lock
(
getInitializationMutex
());
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R3)
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R3)
// Lightweight detection
// Lightweight detection
InferenceEngine
::
Core
&
ie
=
getCore
();
InferenceEngine
::
Core
&
ie
=
getCore
(
"MYRIAD"
);
const
std
::
vector
<
std
::
string
>
devices
=
ie
.
GetAvailableDevices
();
const
std
::
vector
<
std
::
string
>
devices
=
ie
.
GetAvailableDevices
();
for
(
std
::
vector
<
std
::
string
>::
const_iterator
i
=
devices
.
begin
();
i
!=
devices
.
end
();
++
i
)
for
(
std
::
vector
<
std
::
string
>::
const_iterator
i
=
devices
.
begin
();
i
!=
devices
.
end
();
++
i
)
{
{
...
@@ -687,7 +702,6 @@ static bool detectMyriadX_()
...
@@ -687,7 +702,6 @@ static bool detectMyriadX_()
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1)
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1)
InferenceEngine
::
InferenceEnginePluginPtr
enginePtr
;
InferenceEngine
::
InferenceEnginePluginPtr
enginePtr
;
{
{
AutoLock
lock
(
getInitializationMutex
());
auto
&
sharedPlugins
=
getSharedPlugins
();
auto
&
sharedPlugins
=
getSharedPlugins
();
auto
pluginIt
=
sharedPlugins
.
find
(
"MYRIAD"
);
auto
pluginIt
=
sharedPlugins
.
find
(
"MYRIAD"
);
if
(
pluginIt
!=
sharedPlugins
.
end
())
{
if
(
pluginIt
!=
sharedPlugins
.
end
())
{
...
@@ -706,9 +720,9 @@ static bool detectMyriadX_()
...
@@ -706,9 +720,9 @@ static bool detectMyriadX_()
try
try
{
{
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R3)
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R3)
auto
netExec
=
getCore
().
LoadNetwork
(
cnn
,
"MYRIAD"
,
{{
"VPU_PLATFORM"
,
"VPU_2480"
}});
auto
netExec
=
getCore
(
"MYRIAD"
).
LoadNetwork
(
cnn
,
"MYRIAD"
,
{{
"VPU_PLATFORM"
,
"VPU_2480"
}});
#else
#else
auto
netExec
=
getCore
().
LoadNetwork
(
cnn
,
"MYRIAD"
,
{{
"VPU_MYRIAD_PLATFORM"
,
"VPU_MYRIAD_2480"
}});
auto
netExec
=
getCore
(
"MYRIAD"
).
LoadNetwork
(
cnn
,
"MYRIAD"
,
{{
"VPU_MYRIAD_PLATFORM"
,
"VPU_MYRIAD_2480"
}});
#endif
#endif
#endif
#endif
auto
infRequest
=
netExec
.
CreateInferRequest
();
auto
infRequest
=
netExec
.
CreateInferRequest
();
...
@@ -739,7 +753,7 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::CNNNetwork& net)
...
@@ -739,7 +753,7 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::CNNNetwork& net)
}
}
else
else
#else
#else
InferenceEngine
::
Core
&
ie
=
getCore
();
InferenceEngine
::
Core
&
ie
=
getCore
(
device_name
);
#endif
#endif
{
{
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1)
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1)
...
@@ -1124,7 +1138,7 @@ void resetMyriadDevice()
...
@@ -1124,7 +1138,7 @@ void resetMyriadDevice()
getSharedPlugins
().
erase
(
"MYRIAD"
);
getSharedPlugins
().
erase
(
"MYRIAD"
);
#else
#else
// Unregister both "MYRIAD" and "HETERO:MYRIAD,CPU" plugins
// Unregister both "MYRIAD" and "HETERO:MYRIAD,CPU" plugins
InferenceEngine
::
Core
&
ie
=
getCore
();
InferenceEngine
::
Core
&
ie
=
getCore
(
"MYRIAD"
);
try
try
{
{
ie
.
UnregisterPlugin
(
"MYRIAD"
);
ie
.
UnregisterPlugin
(
"MYRIAD"
);
...
...
modules/dnn/src/op_inf_engine.hpp
View file @
00925ad7
...
@@ -245,7 +245,7 @@ bool isMyriadX();
...
@@ -245,7 +245,7 @@ bool isMyriadX();
CV__DNN_EXPERIMENTAL_NS_END
CV__DNN_EXPERIMENTAL_NS_END
InferenceEngine
::
Core
&
getCore
();
InferenceEngine
::
Core
&
getCore
(
const
std
::
string
&
id
);
template
<
typename
T
=
size_t
>
template
<
typename
T
=
size_t
>
static
inline
std
::
vector
<
T
>
getShape
(
const
Mat
&
mat
)
static
inline
std
::
vector
<
T
>
getShape
(
const
Mat
&
mat
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment