Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv_contrib
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv_contrib
Commits
b0d008ce
Commit
b0d008ce
authored
Jun 22, 2017
by
Aleksandr Rybnikov
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Enabled tests for intermediate blobs in goolgenet
parent
e551d15c
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
82 additions
and
73 deletions
+82
-73
dnn.cpp
modules/dnn/src/dnn.cpp
+81
-71
test_googlenet.cpp
modules/dnn/test/test_googlenet.cpp
+1
-2
No files found.
modules/dnn/src/dnn.cpp
View file @
b0d008ce
...
...
@@ -324,7 +324,6 @@ struct LayerData
//add logging info
params
.
name
=
name
;
params
.
type
=
type
;
skip
=
false
;
}
int
id
;
...
...
@@ -347,7 +346,6 @@ struct LayerData
std
::
map
<
int
,
bool
>
skipFlags
;
int
flag
;
bool
skip
;
Ptr
<
Layer
>
getLayerInstance
()
{
...
...
@@ -666,18 +664,39 @@ struct Net::Impl
}
}
void
setUpNet
(
const
std
::
vector
<
LayerPin
>&
blobsToKeep_
=
std
::
vector
<
LayerPin
>
()
)
void
clear
(
)
{
if
(
!
netWasAllocated
||
this
->
blobsToKeep
!=
blobsToKeep_
)
MapIdToLayerData
::
iterator
it
;
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
it
++
)
{
MapIdToLayerData
::
iterator
it
;
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
it
++
)
if
(
it
->
second
.
id
!=
0
)
{
it
->
second
.
outputBlobs
.
clear
();
it
->
second
.
internals
.
clear
();
}
it
->
second
.
skipFlags
.
clear
();
it
->
second
.
consumers
.
clear
();
Ptr
<
ConvolutionLayer
>
convLayer
=
it
->
second
.
layerInstance
.
dynamicCast
<
ConvolutionLayer
>
();
if
(
!
convLayer
.
empty
()
)
{
if
(
it
->
second
.
id
!=
0
)
{
it
->
second
.
outputBlobs
.
clear
();
it
->
second
.
internals
.
clear
();
}
convLayer
->
setActivation
(
Ptr
<
ActivationLayer
>
());
convLayer
->
setBatchNorm
(
Ptr
<
BatchNormLayer
>
());
}
Ptr
<
PoolingLayer
>
poolingLayer
=
it
->
second
.
layerInstance
.
dynamicCast
<
PoolingLayer
>
();
if
(
!
poolingLayer
.
empty
()
)
{
poolingLayer
->
computeMaxIdx
=
true
;
}
}
}
void
setUpNet
(
const
std
::
vector
<
LayerPin
>&
blobsToKeep_
=
std
::
vector
<
LayerPin
>
())
{
if
(
!
netWasAllocated
||
this
->
blobsToKeep
!=
blobsToKeep_
)
{
clear
();
allocateLayers
(
blobsToKeep_
);
computeNetOutputLayers
();
...
...
@@ -1005,69 +1024,41 @@ struct Net::Impl
ld
.
flag
=
1
;
}
void
allocat
eLayers
(
const
std
::
vector
<
LayerPin
>&
blobsToKeep_
)
void
fus
eLayers
(
const
std
::
vector
<
LayerPin
>&
blobsToKeep_
)
{
MapIdToLayerData
::
iterator
it
;
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
it
++
)
it
->
second
.
flag
=
0
;
CV_Assert
(
!
layers
[
0
].
outputBlobs
.
empty
());
ShapesVec
inputShapes
;
for
(
int
i
=
0
;
i
<
layers
[
0
].
outputBlobs
.
size
();
i
++
)
{
CV_Assert
(
layers
[
0
].
outputBlobs
[
i
].
total
());
inputShapes
.
push_back
(
shape
(
layers
[
0
].
outputBlobs
[
i
]));
}
LayersShapesMap
layersShapes
;
getLayersShapes
(
inputShapes
,
layersShapes
);
blobManager
.
reset
();
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
++
it
)
{
const
LayerData
&
ld
=
it
->
second
;
blobManager
.
addReferences
(
ld
.
inputBlobsId
);
}
for
(
int
i
=
0
;
i
<
blobsToKeep_
.
size
();
i
++
)
{
blobManager
.
addReference
(
blobsToKeep_
[
i
]);
}
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
it
++
)
{
int
lid
=
it
->
first
;
allocateLayer
(
lid
,
layersShapes
);
}
// scan through all the layers. If there is convolution layer followed by the activation layer,
// we try to embed this activation into the convolution and disable separate execution of the activation
std
::
vector
<
String
>
outnames
;
std
::
set
<
LayerPin
>
pinsToKeep
(
blobsToKeep_
.
begin
(),
blobsToKeep_
.
end
());
MapIdToLayerData
::
iterator
it
;
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
it
++
)
{
int
lid
=
it
->
first
;
LayerData
&
ld
=
layers
[
lid
];
if
(
ld
.
skip
)
if
(
ld
.
skip
Flags
[
DNN_BACKEND_DEFAULT
]
)
{
//printf("skipping %s\n", ld.layerInstance->name.c_str());
continue
;
}
//printf("analyzing %s\n", ld.layerInstance->name.c_str());
if
(
ld
.
consumers
.
size
()
==
0
)
outnames
.
push_back
(
ld
.
layerInstance
->
name
);
Ptr
<
ConvolutionLayer
>
convLayer
=
ld
.
layerInstance
.
dynamicCast
<
ConvolutionLayer
>
();
if
(
!
convLayer
.
empty
()
&&
ld
.
consumers
.
size
()
==
1
)
LayerPin
lp
(
lid
,
0
);
if
(
!
convLayer
.
empty
()
&&
ld
.
consumers
.
size
()
==
1
&&
pinsToKeep
.
count
(
lp
)
==
0
)
{
LayerData
*
nextData
=
&
layers
[
ld
.
consumers
[
0
].
lid
];
Ptr
<
BatchNormLayer
>
nextBNormLayer
=
nextData
->
layerInstance
.
dynamicCast
<
BatchNormLayer
>
();
if
(
!
nextBNormLayer
.
empty
()
)
LayerPin
lpNext
(
ld
.
consumers
[
0
].
lid
,
0
);
if
(
!
nextBNormLayer
.
empty
()
&&
pinsToKeep
.
count
(
lpNext
)
==
0
)
{
LayerData
*
bnormData
=
nextData
;
nextData
=
0
;
if
(
convLayer
->
setBatchNorm
(
nextBNormLayer
)
)
{
//printf("fused convolution (%s) and batch norm (%s)\n", convLayer->name.c_str(), nextBNormLayer->name.c_str())
;
bnormData
->
skip
=
true
;
bnormData
->
skipFlags
[
DNN_BACKEND_DEFAULT
]
=
true
;
ld
.
outputBlobs
=
layers
[
lpNext
.
lid
].
outputBlobs
;
if
(
bnormData
->
consumers
.
size
()
==
1
)
nextData
=
&
layers
[
bnormData
->
consumers
[
0
].
lid
];
}
...
...
@@ -1079,8 +1070,8 @@ struct Net::Impl
if
(
!
nextActivLayer
.
empty
()
&&
convLayer
->
setActivation
(
nextActivLayer
)
)
{
//printf("fused convolution (%s) and activation (%s)\n", convLayer->name.c_str(), nextActivLayer->name.c_str())
;
nextData
->
skip
=
true
;
nextData
->
skipFlags
[
DNN_BACKEND_DEFAULT
]
=
true
;
ld
.
outputBlobs
=
layers
[
lpNext
.
lid
].
outputBlobs
;
}
}
Ptr
<
PoolingLayer
>
poolingLayer
=
ld
.
layerInstance
.
dynamicCast
<
PoolingLayer
>
();
...
...
@@ -1096,10 +1087,43 @@ struct Net::Impl
poolingLayer
->
computeMaxIdx
=
false
;
}
}
/*printf("outputs: ");
for( size_t j = 0; j < outnames.size(); j++ )
printf("%s ", outnames[j].c_str());
printf("\n");*/
}
void
allocateLayers
(
const
std
::
vector
<
LayerPin
>&
blobsToKeep_
)
{
MapIdToLayerData
::
iterator
it
;
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
it
++
)
it
->
second
.
flag
=
0
;
CV_Assert
(
!
layers
[
0
].
outputBlobs
.
empty
());
ShapesVec
inputShapes
;
for
(
int
i
=
0
;
i
<
layers
[
0
].
outputBlobs
.
size
();
i
++
)
{
CV_Assert
(
layers
[
0
].
outputBlobs
[
i
].
total
());
inputShapes
.
push_back
(
shape
(
layers
[
0
].
outputBlobs
[
i
]));
}
LayersShapesMap
layersShapes
;
getLayersShapes
(
inputShapes
,
layersShapes
);
blobManager
.
reset
();
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
++
it
)
{
const
LayerData
&
ld
=
it
->
second
;
blobManager
.
addReferences
(
ld
.
inputBlobsId
);
}
for
(
int
i
=
0
;
i
<
blobsToKeep_
.
size
();
i
++
)
{
blobManager
.
addReference
(
blobsToKeep_
[
i
]);
}
for
(
it
=
layers
.
begin
();
it
!=
layers
.
end
();
it
++
)
{
int
lid
=
it
->
first
;
allocateLayer
(
lid
,
layersShapes
);
}
fuseLayers
(
blobsToKeep_
);
}
void
forwardLayer
(
LayerData
&
ld
)
...
...
@@ -1109,7 +1133,7 @@ struct Net::Impl
if
(
preferableBackend
==
DNN_BACKEND_DEFAULT
||
!
layer
->
supportBackend
(
preferableBackend
))
{
if
(
!
ld
.
skip
)
if
(
!
ld
.
skip
Flags
[
DNN_BACKEND_DEFAULT
]
)
layer
->
forward
(
ld
.
inputBlobs
,
ld
.
outputBlobs
,
ld
.
internals
);
}
else
if
(
!
ld
.
skipFlags
[
preferableBackend
])
...
...
@@ -1300,20 +1324,6 @@ void Net::connect(String _outPin, String _inPin)
impl
->
connect
(
outPin
.
lid
,
outPin
.
oid
,
inpPin
.
lid
,
inpPin
.
oid
);
}
//void Net::forward(LayerId toLayer)
//{
// if (!impl->netWasAllocated)
// {
// impl->setUpNet();
// }
// if (toLayer.isString() && toLayer.get<String>().empty())
// impl->forwardAll();
// else
// impl->forwardLayer(impl->getLayerData(toLayer));
//}
Mat
Net
::
forward
(
const
String
&
outputName
)
{
String
layerName
=
outputName
;
...
...
modules/dnn/test/test_googlenet.cpp
View file @
b0d008ce
...
...
@@ -95,8 +95,7 @@ static void launchGoogleNetTest()
std
::
replace
(
filename
.
begin
(),
filename
.
end
(),
'/'
,
'#'
);
Mat
ref
=
blobFromNPY
(
_tf
(
"googlenet_"
+
filename
+
".npy"
));
// TODO: disabled the check for now, because it conflicts with the layer fusion
// normAssert(outs[i], ref, "", 1E-4, 1E-2);
normAssert
(
outs
[
i
],
ref
,
""
,
1E-4
,
1E-2
);
}
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment