Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
38180c2c
Commit
38180c2c
authored
Dec 05, 2019
by
Alexander Alekhin
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #16014 from dkurt:dnn_ie_pooling_with_indices
parents
95e36fd4
d8e10f3a
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
39 additions
and
17 deletions
+39
-17
perf_net.cpp
modules/dnn/perf/perf_net.cpp
+1
-1
dnn.cpp
modules/dnn/src/dnn.cpp
+1
-1
op_inf_engine.cpp
modules/dnn/src/op_inf_engine.cpp
+34
-11
op_inf_engine.hpp
modules/dnn/src/op_inf_engine.hpp
+1
-1
test_tf_importer.cpp
modules/dnn/test/test_tf_importer.cpp
+2
-3
No files found.
modules/dnn/perf/perf_net.cpp
View file @
38180c2c
...
...
@@ -108,7 +108,7 @@ PERF_TEST_P_(DNNTestNetwork, Inception_5h)
PERF_TEST_P_
(
DNNTestNetwork
,
ENet
)
{
if
((
backend
==
DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019
)
||
if
((
backend
==
DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019
&&
target
!=
DNN_TARGET_CPU
)
||
(
backend
==
DNN_BACKEND_OPENCV
&&
target
==
DNN_TARGET_OPENCL_FP16
))
throw
SkipTestException
(
""
);
processNet
(
"dnn/Enet-model-best.net"
,
""
,
"enet.yml"
,
...
...
modules/dnn/src/dnn.cpp
View file @
38180c2c
...
...
@@ -1624,7 +1624,7 @@ struct Net::Impl
Ptr
<
Layer
>
layer
=
ld
.
layerInstance
;
if
(
!
fused
&&
!
layer
->
supportBackend
(
preferableBackend
))
{
bool
customizable
=
ld
.
id
!=
0
&&
ld
.
outputBlobs
.
size
()
==
1
&&
bool
customizable
=
ld
.
id
!=
0
&&
INF_ENGINE_VER_MAJOR_GE
(
INF_ENGINE_RELEASE_2019R2
)
&&
supportsCPUFallback
;
// TODO: there is a bug in Myriad plugin with custom layers shape infer.
...
...
modules/dnn/src/op_inf_engine.cpp
View file @
38180c2c
...
...
@@ -278,11 +278,28 @@ void InfEngineBackendNet::connect(const std::vector<Ptr<BackendWrapper> >& input
{
const
auto
&
inp
=
inpWrappers
[
i
];
const
std
::
string
&
inpName
=
inp
->
dataPtr
->
getName
();
std
::
string
inpLayerName
=
inpName
;
size_t
inpPortId
=
inpName
.
rfind
(
'.'
);
if
(
inpPortId
!=
std
::
string
::
npos
)
{
std
::
string
portIdStr
=
inpName
.
substr
(
inpPortId
+
1
);
if
(
std
::
all_of
(
portIdStr
.
begin
(),
portIdStr
.
end
(),
::
isdigit
))
{
inpLayerName
=
inpName
.
substr
(
0
,
inpPortId
);
inpPortId
=
atoi
(
portIdStr
.
c_str
());
}
else
inpPortId
=
0
;
}
else
inpPortId
=
0
;
int
inpId
;
it
=
layers
.
find
(
inpName
);
it
=
layers
.
find
(
inp
Layer
Name
);
if
(
it
==
layers
.
end
())
{
InferenceEngine
::
Builder
::
InputLayer
inpLayer
(
!
inp
Name
.
empty
()
?
inp
Name
:
kDefaultInpLayerName
);
InferenceEngine
::
Builder
::
InputLayer
inpLayer
(
!
inp
LayerName
.
empty
()
?
inpLayer
Name
:
kDefaultInpLayerName
);
std
::
vector
<
size_t
>
shape
(
inp
->
blob
->
getTensorDesc
().
getDims
());
inpLayer
.
setPort
(
InferenceEngine
::
Port
(
shape
));
inpId
=
netBuilder
.
addLayer
(
inpLayer
);
...
...
@@ -292,24 +309,28 @@ void InfEngineBackendNet::connect(const std::vector<Ptr<BackendWrapper> >& input
else
inpId
=
it
->
second
;
netBuilder
.
connect
(
(
size_t
)
inpId
,
{(
size_t
)
layerId
,
i
});
unconnected
LayersIds
.
erase
(
inpId
);
netBuilder
.
connect
(
{(
size_t
)
inpId
,
inpPortId
}
,
{(
size_t
)
layerId
,
i
});
unconnected
Ports
.
erase
({
inpId
,
inpPortId
}
);
}
CV_Assert
(
!
outputs
.
empty
());
InferenceEngine
::
DataPtr
dataPtr
=
infEngineDataNode
(
outputs
[
0
]);
for
(
int
i
=
0
;
i
<
outputs
.
size
();
++
i
)
{
InferenceEngine
::
DataPtr
dataPtr
=
infEngineDataNode
(
outputs
[
i
]);
std
::
string
outputName
=
outputs
.
size
()
>
1
?
(
layerName
+
"."
+
std
::
to_string
(
i
))
:
layerName
;
#if INF_ENGINE_VER_MAJOR_LE(INF_ENGINE_RELEASE_2019R1)
dataPtr
->
name
=
layer
Name
;
dataPtr
->
name
=
output
Name
;
#else
dataPtr
->
setName
(
layer
Name
);
dataPtr
->
setName
(
output
Name
);
#endif
}
}
void
InfEngineBackendNet
::
init
(
Target
targetId
)
{
if
(
!
hasNetOwner
)
{
CV_Assert
(
!
unconnected
LayersId
s
.
empty
());
for
(
int
id
:
unconnectedLayersId
s
)
CV_Assert
(
!
unconnected
Port
s
.
empty
());
for
(
const
auto
&
port
:
unconnectedPort
s
)
{
InferenceEngine
::
Builder
::
OutputLayer
outLayer
(
"myconv1"
);
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1)
...
...
@@ -320,7 +341,7 @@ void InfEngineBackendNet::init(Target targetId)
InferenceEngine
::
Precision
::
FP32
;
outLayer
.
setPort
(
InferenceEngine
::
Port
({},
p
));
#endif
netBuilder
.
addLayer
({
InferenceEngine
::
PortInfo
(
i
d
)},
outLayer
);
netBuilder
.
addLayer
({
InferenceEngine
::
PortInfo
(
port
.
first
,
port
.
secon
d
)},
outLayer
);
}
netBuilder
.
getContext
().
addShapeInferImpl
(
kOpenCVLayersType
,
std
::
make_shared
<
InfEngineCustomLayerShapeInfer
>
());
...
...
@@ -409,8 +430,10 @@ void InfEngineBackendNet::addLayer(InferenceEngine::Builder::Layer& layer)
int
id
=
netBuilder
.
addLayer
(
layer
);
const
std
::
string
&
layerName
=
layer
.
getName
();
CV_Assert
(
layers
.
insert
({
layerName
,
id
}).
second
);
unconnectedLayersIds
.
insert
(
id
);
for
(
int
i
=
0
;
i
<
layer
.
getOutputPorts
().
size
();
++
i
)
unconnectedPorts
.
insert
({
id
,
i
});
#if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1)
// By default, all the weights are connected to last ports ids.
...
...
modules/dnn/src/op_inf_engine.hpp
View file @
38180c2c
...
...
@@ -132,7 +132,7 @@ private:
std
::
map
<
std
::
string
,
int
>
layers
;
std
::
vector
<
std
::
string
>
requestedOutputs
;
std
::
set
<
int
>
unconnectedLayersId
s
;
std
::
set
<
std
::
pair
<
int
,
int
>
>
unconnectedPort
s
;
};
class
InfEngineBackendNode
:
public
BackendNode
...
...
modules/dnn/test/test_tf_importer.cpp
View file @
38180c2c
...
...
@@ -717,9 +717,8 @@ TEST_P(Test_TensorFlow_layers, lstm)
TEST_P
(
Test_TensorFlow_layers
,
split
)
{
if
(
backend
==
DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019
&&
target
==
DNN_TARGET_MYRIAD
&&
getInferenceEngineVPUType
()
==
CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_2
)
applyTestTag
(
CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_2
,
CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER
);
if
(
backend
==
DNN_BACKEND_INFERENCE_ENGINE_NN_BUILDER_2019
&&
target
==
DNN_TARGET_MYRIAD
)
applyTestTag
(
CV_TEST_TAG_DNN_SKIP_IE_MYRIAD
,
CV_TEST_TAG_DNN_SKIP_IE_NN_BUILDER
);
runTensorFlowNet
(
"split"
);
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment