Commit 2abf45d1 authored by Dmitry Kurtaev's avatar Dmitry Kurtaev Committed by Vadim Pisarevsky

Made some deep learning layers params are public (#1134)

parent 78fabfe6
...@@ -329,6 +329,8 @@ namespace dnn ...@@ -329,6 +329,8 @@ namespace dnn
class CV_EXPORTS ReLULayer : public Layer class CV_EXPORTS ReLULayer : public Layer
{ {
public: public:
float negativeSlope;
static Ptr<ReLULayer> create(const LayerParams &params); static Ptr<ReLULayer> create(const LayerParams &params);
}; };
...@@ -365,6 +367,8 @@ namespace dnn ...@@ -365,6 +367,8 @@ namespace dnn
class CV_EXPORTS PowerLayer : public Layer class CV_EXPORTS PowerLayer : public Layer
{ {
public: public:
float power, scale, shift;
static Ptr<PowerLayer> create(const LayerParams &params); static Ptr<PowerLayer> create(const LayerParams &params);
}; };
...@@ -395,18 +399,27 @@ namespace dnn ...@@ -395,18 +399,27 @@ namespace dnn
class CV_EXPORTS BatchNormLayer : public Layer class CV_EXPORTS BatchNormLayer : public Layer
{ {
public: public:
bool hasWeights, hasBias;
float epsilon;
static Ptr<BatchNormLayer> create(const LayerParams &params); static Ptr<BatchNormLayer> create(const LayerParams &params);
}; };
class CV_EXPORTS MaxUnpoolLayer : public Layer class CV_EXPORTS MaxUnpoolLayer : public Layer
{ {
public: public:
Size poolKernel;
Size poolPad;
Size poolStride;
static Ptr<MaxUnpoolLayer> create(const LayerParams &params); static Ptr<MaxUnpoolLayer> create(const LayerParams &params);
}; };
class CV_EXPORTS ScaleLayer : public Layer class CV_EXPORTS ScaleLayer : public Layer
{ {
public: public:
bool hasBias;
static Ptr<ScaleLayer> create(const LayerParams& params); static Ptr<ScaleLayer> create(const LayerParams& params);
}; };
......
...@@ -91,9 +91,6 @@ public: ...@@ -91,9 +91,6 @@ public:
} }
return flops; return flops;
} }
bool hasWeights, hasBias;
float epsilon;
}; };
Ptr<BatchNormLayer> BatchNormLayer::create(const LayerParams& params) Ptr<BatchNormLayer> BatchNormLayer::create(const LayerParams& params)
......
...@@ -47,7 +47,10 @@ namespace dnn ...@@ -47,7 +47,10 @@ namespace dnn
class BlankLayerImpl : public BlankLayer class BlankLayerImpl : public BlankLayer
{ {
public: public:
BlankLayerImpl(const LayerParams&) {} BlankLayerImpl(const LayerParams& params)
{
setParamsFrom(params);
}
bool getMemoryShapes(const std::vector<MatShape> &inputs, bool getMemoryShapes(const std::vector<MatShape> &inputs,
const int requiredOutputs, const int requiredOutputs,
......
...@@ -261,6 +261,7 @@ Ptr<ReLULayer> ReLULayer::create(const LayerParams& params) ...@@ -261,6 +261,7 @@ Ptr<ReLULayer> ReLULayer::create(const LayerParams& params)
float negativeSlope = params.get<float>("negative_slope", 0.f); float negativeSlope = params.get<float>("negative_slope", 0.f);
Ptr<ReLULayer> l(new ElementWiseLayer<ReLUFunctor>(true, ReLUFunctor(negativeSlope))); Ptr<ReLULayer> l(new ElementWiseLayer<ReLUFunctor>(true, ReLUFunctor(negativeSlope)));
l->setParamsFrom(params); l->setParamsFrom(params);
l->negativeSlope = negativeSlope;
return l; return l;
} }
...@@ -306,6 +307,9 @@ Ptr<PowerLayer> PowerLayer::create(const LayerParams& params) ...@@ -306,6 +307,9 @@ Ptr<PowerLayer> PowerLayer::create(const LayerParams& params)
(PowerLayer*)(new ElementWiseLayer<PowerFunctor1>(false, PowerFunctor1(scale, shift))) : (PowerLayer*)(new ElementWiseLayer<PowerFunctor1>(false, PowerFunctor1(scale, shift))) :
(PowerLayer*)(new ElementWiseLayer<PowerFunctor>(true, PowerFunctor(power, scale, shift)))); (PowerLayer*)(new ElementWiseLayer<PowerFunctor>(true, PowerFunctor(power, scale, shift))));
l->setParamsFrom(params); l->setParamsFrom(params);
l->power = power;
l->scale = scale;
l->shift = shift;
return l; return l;
} }
......
...@@ -81,10 +81,6 @@ public: ...@@ -81,10 +81,6 @@ public:
} }
} }
} }
Size poolKernel;
Size poolPad;
Size poolStride;
}; };
Ptr<MaxUnpoolLayer> MaxUnpoolLayer::create(const LayerParams& params) Ptr<MaxUnpoolLayer> MaxUnpoolLayer::create(const LayerParams& params)
......
...@@ -67,8 +67,6 @@ public: ...@@ -67,8 +67,6 @@ public:
} }
return flops; return flops;
} }
bool hasBias;
}; };
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment