Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
c0d0cf5e
Commit
c0d0cf5e
authored
6 years ago
by
Alexander Alekhin
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #11893 from dkurt:fix_11884
parents
6309e28d
f25a01bb
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
36 additions
and
3 deletions
+36
-3
dnn.cpp
modules/dnn/src/dnn.cpp
+4
-3
test_layers.cpp
modules/dnn/test/test_layers.cpp
+32
-0
No files found.
modules/dnn/src/dnn.cpp
View file @
c0d0cf5e
...
...
@@ -2075,7 +2075,8 @@ Mat Net::forward(const String& outputName)
if
(
layerName
.
empty
())
layerName
=
getLayerNames
().
back
();
impl
->
setUpNet
();
std
::
vector
<
LayerPin
>
pins
(
1
,
impl
->
getPinByAlias
(
layerName
));
impl
->
setUpNet
(
pins
);
impl
->
forwardToLayer
(
impl
->
getLayerData
(
layerName
));
return
impl
->
getBlob
(
layerName
);
...
...
@@ -2085,13 +2086,13 @@ void Net::forward(OutputArrayOfArrays outputBlobs, const String& outputName)
{
CV_TRACE_FUNCTION
();
impl
->
setUpNet
();
String
layerName
=
outputName
;
if
(
layerName
.
empty
())
layerName
=
getLayerNames
().
back
();
std
::
vector
<
LayerPin
>
pins
(
1
,
impl
->
getPinByAlias
(
layerName
));
impl
->
setUpNet
(
pins
);
impl
->
forwardToLayer
(
impl
->
getLayerData
(
layerName
));
LayerPin
pin
=
impl
->
getPinByAlias
(
layerName
);
...
...
This diff is collapsed.
Click to expand it.
modules/dnn/test/test_layers.cpp
View file @
c0d0cf5e
...
...
@@ -1240,4 +1240,36 @@ INSTANTIATE_TEST_CASE_P(/**/, Layer_Test_ShuffleChannel, Combine(
/*group*/
Values
(
1
,
2
,
3
,
6
)
));
// Check if relu is not fused to convolution if we requested it's output
TEST
(
Layer_Test_Convolution
,
relu_fusion
)
{
Net
net
;
{
LayerParams
lp
;
lp
.
set
(
"kernel_size"
,
1
);
lp
.
set
(
"num_output"
,
1
);
lp
.
set
(
"bias_term"
,
false
);
lp
.
type
=
"Convolution"
;
lp
.
name
=
"testConv"
;
int
weightsShape
[]
=
{
1
,
1
,
1
,
1
};
Mat
weights
(
4
,
&
weightsShape
[
0
],
CV_32F
,
Scalar
(
1
));
lp
.
blobs
.
push_back
(
weights
);
net
.
addLayerToPrev
(
lp
.
name
,
lp
.
type
,
lp
);
}
{
LayerParams
lp
;
lp
.
type
=
"ReLU"
;
lp
.
name
=
"testReLU"
;
net
.
addLayerToPrev
(
lp
.
name
,
lp
.
type
,
lp
);
}
int
sz
[]
=
{
1
,
1
,
2
,
3
};
Mat
input
(
4
,
&
sz
[
0
],
CV_32F
);
randu
(
input
,
-
1.0
,
-
0.1
);
net
.
setInput
(
input
);
net
.
setPreferableBackend
(
DNN_BACKEND_OPENCV
);
Mat
output
=
net
.
forward
(
"testConv"
);
normAssert
(
input
,
output
);
}
}}
// namespace
This diff is collapsed.
Click to expand it.
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment