Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
f3a6ae5f
Commit
f3a6ae5f
authored
Jun 05, 2018
by
Dmitry Kurtaev
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Wrap Inference Engine init to try-catch
parent
3cbd2e27
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
59 additions
and
28 deletions
+59
-28
perf_net.cpp
modules/dnn/perf/perf_net.cpp
+1
-1
dnn.cpp
modules/dnn/src/dnn.cpp
+7
-1
op_inf_engine.cpp
modules/dnn/src/op_inf_engine.cpp
+50
-25
test_backends.cpp
modules/dnn/test/test_backends.cpp
+1
-1
No files found.
modules/dnn/perf/perf_net.cpp
View file @
f3a6ae5f
...
...
@@ -34,7 +34,7 @@ public:
void
processNet
(
std
::
string
weights
,
std
::
string
proto
,
std
::
string
halide_scheduler
,
const
Mat
&
input
,
const
std
::
string
&
outputLayer
=
""
)
{
if
(
backend
==
DNN_BACKEND_OPENCV
&&
target
==
DNN_TARGET_OPENCL
)
if
(
backend
==
DNN_BACKEND_OPENCV
&&
(
target
==
DNN_TARGET_OPENCL
||
target
==
DNN_TARGET_OPENCL_FP16
)
)
{
#if defined(HAVE_OPENCL)
if
(
!
cv
::
ocl
::
useOpenCL
())
...
...
modules/dnn/src/dnn.cpp
View file @
f3a6ae5f
...
...
@@ -2252,7 +2252,13 @@ void Net::setPreferableTarget(int targetId)
if
(
IS_DNN_OPENCL_TARGET
(
targetId
))
{
#ifndef HAVE_OPENCL
impl
->
preferableTarget
=
DNN_TARGET_CPU
;
#ifdef HAVE_INF_ENGINE
if
(
impl
->
preferableBackend
==
DNN_BACKEND_OPENCV
)
#else
if
(
impl
->
preferableBackend
==
DNN_BACKEND_DEFAULT
||
impl
->
preferableBackend
==
DNN_BACKEND_OPENCV
)
#endif // HAVE_INF_ENGINE
impl
->
preferableTarget
=
DNN_TARGET_CPU
;
#else
bool
fp16
=
ocl
::
Device
::
getDefault
().
isExtensionSupported
(
"cl_khr_fp16"
);
if
(
!
fp16
&&
targetId
==
DNN_TARGET_OPENCL_FP16
)
...
...
modules/dnn/src/op_inf_engine.cpp
View file @
f3a6ae5f
...
...
@@ -361,35 +361,60 @@ void InfEngineBackendNet::initPlugin(InferenceEngine::ICNNNetwork& net)
{
CV_Assert
(
!
isInitialized
());
static
std
::
map
<
std
::
string
,
InferenceEngine
::
InferenceEnginePluginPtr
>
sharedPlugins
;
std
::
string
deviceName
=
InferenceEngine
::
getDeviceName
(
targetDevice
);
auto
pluginIt
=
sharedPlugins
.
find
(
deviceName
);
if
(
pluginIt
!=
sharedPlugins
.
end
())
try
{
enginePtr
=
pluginIt
->
second
;
}
else
{
enginePtr
=
InferenceEngine
::
PluginDispatcher
({
""
}).
getSuitablePlugin
(
targetDevice
);
sharedPlugins
[
deviceName
]
=
enginePtr
;
}
plugin
=
InferenceEngine
::
InferencePlugin
(
enginePtr
);
static
std
::
map
<
std
::
string
,
InferenceEngine
::
InferenceEnginePluginPtr
>
sharedPlugins
;
std
::
string
deviceName
=
InferenceEngine
::
getDeviceName
(
targetDevice
);
auto
pluginIt
=
sharedPlugins
.
find
(
deviceName
);
if
(
pluginIt
!=
sharedPlugins
.
end
())
{
enginePtr
=
pluginIt
->
second
;
}
else
{
enginePtr
=
InferenceEngine
::
PluginDispatcher
({
""
}).
getSuitablePlugin
(
targetDevice
);
sharedPlugins
[
deviceName
]
=
enginePtr
;
if
(
targetDevice
==
InferenceEngine
::
TargetDevice
::
eCPU
)
if
(
targetDevice
==
InferenceEngine
::
TargetDevice
::
eCPU
)
{
std
::
string
suffixes
[]
=
{
"_avx2"
,
"_sse4"
,
""
};
bool
haveFeature
[]
=
{
checkHardwareSupport
(
CPU_AVX2
),
checkHardwareSupport
(
CPU_SSE4_2
),
true
};
for
(
int
i
=
0
;
i
<
3
;
++
i
)
{
if
(
!
haveFeature
[
i
])
continue
;
#ifdef _WIN32
std
::
string
libName
=
"cpu_extension"
+
suffixes
[
i
]
+
".dll"
;
#else
std
::
string
libName
=
"libcpu_extension"
+
suffixes
[
i
]
+
".so"
;
#endif // _WIN32
try
{
InferenceEngine
::
IExtensionPtr
extension
=
InferenceEngine
::
make_so_pointer
<
InferenceEngine
::
IExtension
>
(
libName
);
enginePtr
->
AddExtension
(
extension
,
0
);
break
;
}
catch
(...)
{}
}
// Some of networks can work without a library of extra layers.
}
}
plugin
=
InferenceEngine
::
InferencePlugin
(
enginePtr
);
netExec
=
plugin
.
LoadNetwork
(
net
,
{});
infRequest
=
netExec
.
CreateInferRequest
();
infRequest
.
SetInput
(
inpBlobs
);
infRequest
.
SetOutput
(
outBlobs
);
}
catch
(
const
std
::
exception
&
ex
)
{
#ifdef _WIN32
InferenceEngine
::
IExtensionPtr
extension
=
InferenceEngine
::
make_so_pointer
<
InferenceEngine
::
IExtension
>
(
"cpu_extension.dll"
);
#else
InferenceEngine
::
IExtensionPtr
extension
=
InferenceEngine
::
make_so_pointer
<
InferenceEngine
::
IExtension
>
(
"libcpu_extension.so"
);
#endif // _WIN32
plugin
.
AddExtension
(
extension
);
CV_Error
(
Error
::
StsAssert
,
format
(
"Failed to initialize Inference Engine backend: %s"
,
ex
.
what
()));
}
netExec
=
plugin
.
LoadNetwork
(
net
,
{});
infRequest
=
netExec
.
CreateInferRequest
();
infRequest
.
SetInput
(
inpBlobs
);
infRequest
.
SetOutput
(
outBlobs
);
}
bool
InfEngineBackendNet
::
isInitialized
()
...
...
modules/dnn/test/test_backends.cpp
View file @
f3a6ae5f
...
...
@@ -40,7 +40,7 @@ public:
std
::
string
halideScheduler
=
""
,
double
l1
=
0.0
,
double
lInf
=
0.0
)
{
if
(
backend
==
DNN_BACKEND_OPENCV
&&
target
==
DNN_TARGET_OPENCL
)
if
(
backend
==
DNN_BACKEND_OPENCV
&&
(
target
==
DNN_TARGET_OPENCL
||
target
==
DNN_TARGET_OPENCL_FP16
)
)
{
#ifdef HAVE_OPENCL
if
(
!
cv
::
ocl
::
useOpenCL
())
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment