Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
905a9dad
Commit
905a9dad
authored
Aug 28, 2017
by
Dmitry Kurtaev
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Removed LPNormalize layer.
parent
af8ed9d0
Show whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
65 additions
and
211 deletions
+65
-211
all_layers.hpp
modules/dnn/include/opencv2/dnn/all_layers.hpp
+28
-8
init.cpp
modules/dnn/src/init.cpp
+0
-1
lp_normalize_layer.cpp
modules/dnn/src/layers/lp_normalize_layer.cpp
+0
-78
normalize_bbox_layer.cpp
modules/dnn/src/layers/normalize_bbox_layer.cpp
+36
-123
torch_importer.cpp
modules/dnn/src/torch/torch_importer.cpp
+1
-1
No files found.
modules/dnn/include/opencv2/dnn/all_layers.hpp
View file @
905a9dad
...
...
@@ -263,14 +263,6 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
static
Ptr
<
SoftmaxLayer
>
create
(
const
LayerParams
&
params
);
};
class
CV_EXPORTS
LPNormalizeLayer
:
public
Layer
{
public
:
float
pnorm
,
epsilon
;
static
Ptr
<
LPNormalizeLayer
>
create
(
const
LayerParams
&
params
);
};
class
CV_EXPORTS
InnerProductLayer
:
public
Layer
{
public
:
...
...
@@ -545,9 +537,37 @@ CV__DNN_EXPERIMENTAL_NS_BEGIN
static
Ptr
<
DetectionOutputLayer
>
create
(
const
LayerParams
&
params
);
};
/**
* @brief \f$ L_p \f$ - normalization layer.
* @param p Normalization factor. The most common `p = 1` for \f$ L_1 \f$ -
* normalization or `p = 2` for \f$ L_2 \f$ - normalization or a custom one.
* @param eps Parameter \f$ \epsilon \f$ to prevent a division by zero.
* @param across_spatial If true, normalize an input across all non-batch dimensions.
* Otherwise normalize an every channel separately.
*
* Across spatial:
* @f[
* norm = \sqrt[p]{\epsilon + \sum_{x, y, c} |src(x, y, c)|^p } \\
* dst(x, y, c) = \frac{ src(x, y, c) }{norm}
* @f]
*
* Channel wise normalization:
* @f[
* norm(c) = \sqrt[p]{\epsilon + \sum_{x, y} |src(x, y, c)|^p } \\
* dst(x, y, c) = \frac{ src(x, y, c) }{norm(c)}
* @f]
*
* Where `x, y` - spatial cooridnates, `c` - channel.
*
* An every sample in the batch is normalized separately. Optionally,
* output is scaled by the trained parameters.
*/
class
NormalizeBBoxLayer
:
public
Layer
{
public
:
float
pnorm
,
epsilon
;
bool
acrossSpatial
;
static
Ptr
<
NormalizeBBoxLayer
>
create
(
const
LayerParams
&
params
);
};
...
...
modules/dnn/src/init.cpp
View file @
905a9dad
...
...
@@ -92,7 +92,6 @@ void initializeLayerFactory()
CV_DNN_REGISTER_LAYER_CLASS
(
InnerProduct
,
InnerProductLayer
);
CV_DNN_REGISTER_LAYER_CLASS
(
Softmax
,
SoftmaxLayer
);
CV_DNN_REGISTER_LAYER_CLASS
(
MVN
,
MVNLayer
);
CV_DNN_REGISTER_LAYER_CLASS
(
LPNormalize
,
LPNormalizeLayer
);
CV_DNN_REGISTER_LAYER_CLASS
(
ReLU
,
ReLULayer
);
CV_DNN_REGISTER_LAYER_CLASS
(
ReLU6
,
ReLU6Layer
);
...
...
modules/dnn/src/layers/lp_normalize_layer.cpp
deleted
100644 → 0
View file @
af8ed9d0
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
//
// Copyright (C) 2017, Intel Corporation, all rights reserved.
// Third party copyrights are property of their respective owners.
#include "../precomp.hpp"
#include "layers_common.hpp"
#include <iostream>
namespace
cv
{
namespace
dnn
{
class
LPNormalizeLayerImpl
:
public
LPNormalizeLayer
{
public
:
LPNormalizeLayerImpl
(
const
LayerParams
&
params
)
{
setParamsFrom
(
params
);
pnorm
=
params
.
get
<
float
>
(
"p"
,
2
);
epsilon
=
params
.
get
<
float
>
(
"eps"
,
1e-10
f
);
CV_Assert
(
pnorm
>
0
);
}
bool
getMemoryShapes
(
const
std
::
vector
<
MatShape
>
&
inputs
,
const
int
requiredOutputs
,
std
::
vector
<
MatShape
>
&
outputs
,
std
::
vector
<
MatShape
>
&
internals
)
const
{
Layer
::
getMemoryShapes
(
inputs
,
requiredOutputs
,
outputs
,
internals
);
if
(
pnorm
!=
1
&&
pnorm
!=
2
)
{
internals
.
resize
(
1
,
inputs
[
0
]);
}
return
true
;
}
virtual
bool
supportBackend
(
int
backendId
)
{
return
backendId
==
DNN_BACKEND_DEFAULT
;
}
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
{
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
CV_Assert
(
inputs
[
0
]
->
total
()
==
outputs
[
0
].
total
());
float
norm
;
if
(
pnorm
==
1
)
norm
=
cv
::
norm
(
*
inputs
[
0
],
NORM_L1
);
else
if
(
pnorm
==
2
)
norm
=
cv
::
norm
(
*
inputs
[
0
],
NORM_L2
);
else
{
cv
::
pow
(
abs
(
*
inputs
[
0
]),
pnorm
,
internals
[
0
]);
norm
=
pow
((
float
)
sum
(
internals
[
0
])[
0
],
1.0
f
/
pnorm
);
}
multiply
(
*
inputs
[
0
],
1.0
f
/
(
norm
+
epsilon
),
outputs
[
0
]);
}
int64
getFLOPS
(
const
std
::
vector
<
MatShape
>
&
inputs
,
const
std
::
vector
<
MatShape
>
&
)
const
{
int64
flops
=
0
;
for
(
int
i
=
0
;
i
<
inputs
.
size
();
i
++
)
flops
+=
3
*
total
(
inputs
[
i
]);
return
flops
;
}
};
Ptr
<
LPNormalizeLayer
>
LPNormalizeLayer
::
create
(
const
LayerParams
&
params
)
{
return
Ptr
<
LPNormalizeLayer
>
(
new
LPNormalizeLayerImpl
(
params
));
}
}
// namespace dnn
}
// namespace cv
modules/dnn/src/layers/normalize_bbox_layer.cpp
View file @
905a9dad
...
...
@@ -43,83 +43,18 @@
#include "../precomp.hpp"
#include "layers_common.hpp"
#include <float.h>
#include <algorithm>
namespace
cv
{
namespace
dnn
{
namespace
{
const
std
::
string
layerName
=
"NormalizeBBox"
;
}
namespace
cv
{
namespace
dnn
{
class
NormalizeBBoxLayerImpl
:
public
NormalizeBBoxLayer
{
float
_eps
;
bool
_across_spatial
;
bool
_channel_shared
;
public
:
bool
getParameterDict
(
const
LayerParams
&
params
,
const
std
::
string
&
parameterName
,
DictValue
&
result
)
{
if
(
!
params
.
has
(
parameterName
))
{
return
false
;
}
result
=
params
.
get
(
parameterName
);
return
true
;
}
template
<
typename
T
>
T
getParameter
(
const
LayerParams
&
params
,
const
std
::
string
&
parameterName
,
const
size_t
&
idx
=
0
,
const
bool
required
=
true
,
const
T
&
defaultValue
=
T
())
{
DictValue
dictValue
;
bool
success
=
getParameterDict
(
params
,
parameterName
,
dictValue
);
if
(
!
success
)
{
if
(
required
)
{
std
::
string
message
=
layerName
;
message
+=
" layer parameter does not contain "
;
message
+=
parameterName
;
message
+=
" parameter."
;
CV_Error
(
Error
::
StsBadArg
,
message
);
}
else
{
return
defaultValue
;
}
}
return
dictValue
.
get
<
T
>
(
idx
);
}
NormalizeBBoxLayerImpl
(
const
LayerParams
&
params
)
NormalizeBBoxLayerImpl
(
const
LayerParams
&
params
)
{
_eps
=
getParameter
<
float
>
(
params
,
"eps"
,
0
,
false
,
1e-10
f
);
_across_spatial
=
getParameter
<
bool
>
(
params
,
"across_spatial"
);
_channel_shared
=
getParameter
<
bool
>
(
params
,
"channel_shared"
);
setParamsFrom
(
params
);
}
void
checkInputs
(
const
std
::
vector
<
Mat
*>
&
inputs
)
{
CV_Assert
(
inputs
.
size
()
>
0
);
CV_Assert
(
inputs
[
0
]
->
dims
==
4
&&
inputs
[
0
]
->
type
()
==
CV_32F
);
for
(
size_t
i
=
1
;
i
<
inputs
.
size
();
i
++
)
{
CV_Assert
(
inputs
[
i
]
->
dims
==
4
&&
inputs
[
i
]
->
type
()
==
CV_32F
);
CV_Assert
(
inputs
[
i
]
->
size
==
inputs
[
0
]
->
size
);
}
CV_Assert
(
inputs
[
0
]
->
dims
>
2
);
pnorm
=
params
.
get
<
float
>
(
"p"
,
2
);
epsilon
=
params
.
get
<
float
>
(
"eps"
,
1e-10
f
);
acrossSpatial
=
params
.
get
<
bool
>
(
"across_spatial"
,
true
);
CV_Assert
(
pnorm
>
0
);
}
bool
getMemoryShapes
(
const
std
::
vector
<
MatShape
>
&
inputs
,
...
...
@@ -127,17 +62,11 @@ public:
std
::
vector
<
MatShape
>
&
outputs
,
std
::
vector
<
MatShape
>
&
internals
)
const
{
bool
inplace
=
Layer
::
getMemoryShapes
(
inputs
,
requiredOutputs
,
outputs
,
internals
);
size_t
channels
=
inputs
[
0
][
1
];
size_t
rows
=
inputs
[
0
][
2
];
size_t
cols
=
inputs
[
0
][
3
];
size_t
channelSize
=
rows
*
cols
;
internals
.
assign
(
1
,
shape
(
channels
,
channelSize
));
internals
.
push_back
(
shape
(
channels
,
1
));
internals
.
push_back
(
shape
(
1
,
channelSize
));
return
inplace
;
CV_Assert
(
inputs
.
size
()
==
1
);
Layer
::
getMemoryShapes
(
inputs
,
requiredOutputs
,
outputs
,
internals
);
internals
.
resize
(
1
,
inputs
[
0
]);
internals
[
0
][
0
]
=
1
;
// Batch size.
return
true
;
}
void
forward
(
std
::
vector
<
Mat
*>
&
inputs
,
std
::
vector
<
Mat
>
&
outputs
,
std
::
vector
<
Mat
>
&
internals
)
...
...
@@ -145,60 +74,46 @@ public:
CV_TRACE_FUNCTION
();
CV_TRACE_ARG_VALUE
(
name
,
"name"
,
name
.
c_str
());
checkInputs
(
inputs
);
Mat
&
buffer
=
internals
[
0
],
sumChannelMultiplier
=
internals
[
1
],
sumSpatialMultiplier
=
internals
[
2
];
sumChannelMultiplier
.
setTo
(
1.0
);
sumSpatialMultiplier
.
setTo
(
1.0
);
CV_Assert
(
inputs
.
size
()
==
1
&&
outputs
.
size
()
==
1
);
CV_Assert
(
inputs
[
0
]
->
total
()
==
outputs
[
0
].
total
());
const
Mat
&
inp0
=
*
inputs
[
0
];
Mat
&
buffer
=
internals
[
0
];
size_t
num
=
inp0
.
size
[
0
];
size_t
channels
=
inp0
.
size
[
1
];
size_t
channelSize
=
inp0
.
size
[
2
]
*
inp0
.
size
[
3
];
Mat
zeroBuffer
(
channels
,
channelSize
,
CV_32F
,
Scalar
(
0
));
Mat
absDiff
;
Mat
scale
=
blobs
[
0
];
for
(
size_t
j
=
0
;
j
<
inputs
.
size
();
j
++
)
{
size_t
channelSize
=
inp0
.
total
()
/
(
num
*
channels
);
for
(
size_t
n
=
0
;
n
<
num
;
++
n
)
{
Mat
src
=
Mat
(
channels
,
channelSize
,
CV_32F
,
inputs
[
j
]
->
ptr
<
float
>
(
n
));
Mat
dst
=
Mat
(
channels
,
channelSize
,
CV_32F
,
outputs
[
j
].
ptr
<
float
>
(
n
));
Mat
src
=
Mat
(
channels
,
channelSize
,
CV_32F
,
(
void
*
)
inp0
.
ptr
<
float
>
(
n
));
Mat
dst
=
Mat
(
channels
,
channelSize
,
CV_32F
,
(
void
*
)
outputs
[
0
].
ptr
<
float
>
(
n
));
buffer
=
src
.
mul
(
src
);
cv
::
pow
(
abs
(
src
),
pnorm
,
buffer
);
if
(
_across_s
patial
)
if
(
acrossS
patial
)
{
absdiff
(
buffer
,
zeroBuffer
,
absDiff
);
// add eps to avoid overflow
double
absSum
=
sum
(
absDiff
)[
0
]
+
_eps
;
float
norm
=
sqrt
(
absSum
);
dst
=
src
/
norm
;
float
absSum
=
sum
(
buffer
)[
0
]
+
epsilon
;
float
norm
=
pow
(
absSum
,
1.0
f
/
pnorm
);
multiply
(
src
,
1.0
f
/
norm
,
dst
);
}
else
{
Mat
norm
(
channelSize
,
1
,
buffer
.
type
());
// 1 x channelSize
// (_channels x channelSize)T * _channels x 1 -> channelSize x 1
gemm
(
buffer
,
sumChannelMultiplier
,
1
,
norm
,
0
,
norm
,
GEMM_1_T
);
Mat
norm
;
reduce
(
buffer
,
norm
,
0
,
REDUCE_SUM
);
norm
+=
epsilon
;
// compute norm
pow
(
norm
,
0.5
f
,
norm
);
// compute inverted norm to call multiply instead divide
cv
::
pow
(
norm
,
-
1.0
f
/
pnorm
,
norm
);
// scale the layer
// _channels x 1 * (channelSize x 1)T -> _channels x channelSize
gemm
(
sumChannelMultiplier
,
norm
,
1
,
buffer
,
0
,
buffer
,
GEMM_2_T
);
dst
=
src
/
buffer
;
repeat
(
norm
,
channels
,
1
,
buffer
);
multiply
(
src
,
buffer
,
dst
);
}
if
(
!
blobs
.
empty
())
{
// scale the output
if
(
_channel_shared
)
Mat
scale
=
blobs
[
0
];
if
(
scale
.
total
()
==
1
)
{
// _scale: 1 x 1
dst
*=
scale
.
at
<
float
>
(
0
,
0
);
...
...
@@ -206,15 +121,13 @@ public:
else
{
// _scale: _channels x 1
// _channels x 1 * 1 x channelSize -> _channels x channelSize
gemm
(
scale
,
sumSpatialMultiplier
,
1
,
buffer
,
0
,
buffer
);
dst
=
dst
.
mul
(
buffer
);
CV_Assert
(
scale
.
total
()
==
channels
);
repeat
(
scale
,
1
,
dst
.
cols
,
buffer
);
multiply
(
dst
,
buffer
,
dst
);
}
}
}
}
};
...
...
modules/dnn/src/torch/torch_importer.cpp
View file @
905a9dad
...
...
@@ -706,7 +706,7 @@ struct TorchImporter : public ::cv::dnn::Importer
if
(
scalarParams
.
has
(
"eps"
))
layerParams
.
set
(
"eps"
,
scalarParams
.
get
<
float
>
(
"eps"
));
newModule
->
apiType
=
"
LP
Normalize"
;
newModule
->
apiType
=
"Normalize"
;
curModule
->
modules
.
push_back
(
newModule
);
}
else
if
(
nnName
==
"Padding"
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment