Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv_contrib
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv_contrib
Commits
ecd88160
Commit
ecd88160
authored
Mar 02, 2017
by
Alexander Alekhin
Browse files
Options
Browse Files
Download
Plain Diff
Merge pull request #1017 from arrybn:unpool_layer_fix
parents
1aeeee00
fd89b574
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
35 additions
and
26 deletions
+35
-26
all_layers.hpp
modules/dnn/include/opencv2/dnn/all_layers.hpp
+1
-1
layer_loaders.cpp
modules/dnn/src/caffe/layer_loaders.cpp
+4
-3
max_unpooling_layer.cpp
modules/dnn/src/layers/max_unpooling_layer.cpp
+9
-8
max_unpooling_layer.hpp
modules/dnn/src/layers/max_unpooling_layer.hpp
+4
-2
torch_importer.cpp
modules/dnn/src/torch/torch_importer.cpp
+17
-12
No files found.
modules/dnn/include/opencv2/dnn/all_layers.hpp
View file @
ecd88160
...
...
@@ -412,7 +412,7 @@ namespace dnn
class
CV_EXPORTS_W
MaxUnpoolLayer
:
public
Layer
{
public
:
static
CV_WRAP
Ptr
<
MaxUnpoolLayer
>
create
(
Size
unpoolSiz
e
);
static
CV_WRAP
Ptr
<
MaxUnpoolLayer
>
create
(
Size
poolKernel
,
Size
poolPad
,
Size
poolStrid
e
);
};
class
CV_EXPORTS_W
ScaleLayer
:
public
Layer
...
...
modules/dnn/src/caffe/layer_loaders.cpp
View file @
ecd88160
...
...
@@ -336,9 +336,10 @@ Ptr<Layer> createLayerFromCaffe<ChannelsPReLULayer>(LayerParams& params)
template
<>
//MaxUnpoolLayer specialization
Ptr
<
Layer
>
createLayerFromCaffe
<
MaxUnpoolLayer
>
(
LayerParams
&
params
)
{
Size
outSize
(
params
.
get
<
int
>
(
"out_w"
),
params
.
get
<
int
>
(
"out_h"
));
Ptr
<
MaxUnpoolLayer
>
l
=
MaxUnpoolLayer
::
create
(
outSize
);
Size
poolKernel
(
params
.
get
<
int
>
(
"pool_k_w"
),
params
.
get
<
int
>
(
"pool_k_h"
)),
poolPad
(
params
.
get
<
int
>
(
"pool_pad_w"
),
params
.
get
<
int
>
(
"pool_pad_h"
)),
poolStride
(
params
.
get
<
int
>
(
"pool_stride_w"
),
params
.
get
<
int
>
(
"pool_stride_h"
));
Ptr
<
MaxUnpoolLayer
>
l
=
MaxUnpoolLayer
::
create
(
poolKernel
,
poolPad
,
poolStride
);
l
->
setParamsFrom
(
params
);
return
Ptr
<
Layer
>
(
l
);
...
...
modules/dnn/src/layers/max_unpooling_layer.cpp
View file @
ecd88160
...
...
@@ -16,19 +16,20 @@ namespace cv
namespace
dnn
{
MaxUnpoolLayerImpl
::
MaxUnpoolLayerImpl
(
Size
outSize_
)
:
outSize
(
outSize_
)
MaxUnpoolLayerImpl
::
MaxUnpoolLayerImpl
(
Size
poolKernel_
,
Size
poolPad_
,
Size
poolStride_
)
:
poolKernel
(
poolKernel_
),
poolPad
(
poolPad_
),
poolStride
(
poolStride_
)
{}
void
MaxUnpoolLayerImpl
::
allocate
(
const
std
::
vector
<
Blob
*>
&
inputs
,
std
::
vector
<
Blob
>
&
outputs
)
{
CV_Assert
(
inputs
.
size
()
==
2
);
CV_Assert
(
inputs
[
0
]
->
total
()
==
inputs
[
1
]
->
total
());
BlobShape
outShape
=
inputs
[
0
]
->
shape
();
outShape
[
2
]
=
outSize
.
height
;
outShape
[
3
]
=
outSize
.
width
;
CV_Assert
(
inputs
[
0
]
->
total
()
==
inputs
[
1
]
->
total
());
outShape
[
2
]
=
(
outShape
[
2
]
-
1
)
*
poolStride
.
height
+
poolKernel
.
height
-
2
*
poolPad
.
height
;
outShape
[
3
]
=
(
outShape
[
3
]
-
1
)
*
poolStride
.
width
+
poolKernel
.
width
-
2
*
poolPad
.
width
;
outputs
.
resize
(
1
);
outputs
[
0
].
create
(
outShape
);
...
...
@@ -63,9 +64,9 @@ void MaxUnpoolLayerImpl::forward(std::vector<Blob*> &inputs, std::vector<Blob> &
}
}
Ptr
<
MaxUnpoolLayer
>
MaxUnpoolLayer
::
create
(
Size
unpoolSiz
e
)
Ptr
<
MaxUnpoolLayer
>
MaxUnpoolLayer
::
create
(
Size
poolKernel
,
Size
poolPad
,
Size
poolStrid
e
)
{
return
Ptr
<
MaxUnpoolLayer
>
(
new
MaxUnpoolLayerImpl
(
unpoolSiz
e
));
return
Ptr
<
MaxUnpoolLayer
>
(
new
MaxUnpoolLayerImpl
(
poolKernel
,
poolPad
,
poolStrid
e
));
}
}
...
...
modules/dnn/src/layers/max_unpooling_layer.hpp
View file @
ecd88160
...
...
@@ -22,14 +22,16 @@ namespace dnn
class
MaxUnpoolLayerImpl
:
public
MaxUnpoolLayer
{
public
:
MaxUnpoolLayerImpl
(
Size
outSiz
e_
);
MaxUnpoolLayerImpl
(
Size
poolKernel_
,
Size
poolPad_
,
Size
poolStrid
e_
);
void
allocate
(
const
std
::
vector
<
Blob
*>
&
inputs
,
std
::
vector
<
Blob
>
&
outputs
);
void
forward
(
std
::
vector
<
Blob
*>
&
inputs
,
std
::
vector
<
Blob
>
&
outputs
);
private
:
Size
outSize
;
Size
poolKernel
;
Size
poolPad
;
Size
poolStride
;
};
}
...
...
modules/dnn/src/torch/torch_importer.cpp
View file @
ecd88160
...
...
@@ -737,14 +737,8 @@ struct TorchImporter : public ::cv::dnn::Importer
else
if
(
nnName
==
"SpatialMaxUnpooling"
)
{
readTorchTable
(
scalarParams
,
tensorParams
);
CV_Assert
(
scalarParams
.
has
(
"oheight"
)
&&
scalarParams
.
has
(
"owidth"
));
CV_Assert
(
tensorParams
.
count
(
"indices"
));
layerParams
.
set
(
"out_h"
,
static_cast
<
int
>
(
scalarParams
.
get
<
double
>
(
"oheight"
)));
layerParams
.
set
(
"out_w"
,
static_cast
<
int
>
(
scalarParams
.
get
<
double
>
(
"owidth"
))
/
2
);
layerParams
.
set
(
"indices_blob_id"
,
tensorParams
[
"indices"
].
first
);
curModule
->
modules
.
push_back
(
newModule
);
}
...
...
@@ -908,13 +902,10 @@ struct TorchImporter : public ::cv::dnn::Importer
return
id
;
}
else
if
(
module
->
thName
==
"SpatialMaxUnpooling"
)
{
String
name
=
generateLayerName
(
"torchMaxUnpooling"
);
int
id
=
net
.
addLayer
(
name
,
"MaxUnpool"
,
module
->
params
);
net
.
connect
(
prevLayerId
,
0
,
id
,
0
);
CV_Assert
(
module
->
params
.
has
(
"indices_blob_id"
));
int
indicesBlobId
=
module
->
params
.
get
<
int
>
(
"indices_blob_id"
);
std
::
pair
<
int
,
Module
*>
poolingLayer
;
poolingLayer
.
first
=
-
1
;
for
(
int
i
=
0
;
i
<
addedModules
.
size
();
i
++
)
{
...
...
@@ -922,11 +913,25 @@ struct TorchImporter : public ::cv::dnn::Importer
addedModules
[
i
].
second
->
params
.
has
(
"indices_blob_id"
)
&&
addedModules
[
i
].
second
->
params
.
get
<
int
>
(
"indices_blob_id"
)
==
indicesBlobId
)
{
net
.
connect
(
addedModules
[
i
].
first
,
1
,
id
,
1
)
;
poolingLayer
=
addedModules
[
i
]
;
break
;
}
}
module
->
params
.
set
(
"pool_k_h"
,
poolingLayer
.
second
->
params
.
get
<
int
>
(
"kernel_h"
));
module
->
params
.
set
(
"pool_k_w"
,
poolingLayer
.
second
->
params
.
get
<
int
>
(
"kernel_w"
));
module
->
params
.
set
(
"pool_stride_h"
,
poolingLayer
.
second
->
params
.
get
<
int
>
(
"stride_h"
));
module
->
params
.
set
(
"pool_stride_w"
,
poolingLayer
.
second
->
params
.
get
<
int
>
(
"stride_w"
));
module
->
params
.
set
(
"pool_pad_h"
,
poolingLayer
.
second
->
params
.
get
<
int
>
(
"pad_h"
));
module
->
params
.
set
(
"pool_pad_w"
,
poolingLayer
.
second
->
params
.
get
<
int
>
(
"pad_w"
));
String
name
=
generateLayerName
(
"torchMaxUnpooling"
);
int
id
=
net
.
addLayer
(
name
,
"MaxUnpool"
,
module
->
params
);
net
.
connect
(
prevLayerId
,
0
,
id
,
0
);
CV_Assert
(
poolingLayer
.
first
!=
-
1
);
net
.
connect
(
poolingLayer
.
first
,
1
,
id
,
1
);
return
id
;
}
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment