Commit 2abf45d1 authored by Dmitry Kurtaev's avatar Dmitry Kurtaev Committed by Vadim Pisarevsky

Made some deep learning layers params are public (#1134)

parent 78fabfe6
......@@ -329,6 +329,8 @@ namespace dnn
class CV_EXPORTS ReLULayer : public Layer
{
public:
float negativeSlope;
static Ptr<ReLULayer> create(const LayerParams &params);
};
......@@ -365,6 +367,8 @@ namespace dnn
class CV_EXPORTS PowerLayer : public Layer
{
public:
float power, scale, shift;
static Ptr<PowerLayer> create(const LayerParams &params);
};
......@@ -395,18 +399,27 @@ namespace dnn
class CV_EXPORTS BatchNormLayer : public Layer
{
public:
bool hasWeights, hasBias;
float epsilon;
static Ptr<BatchNormLayer> create(const LayerParams &params);
};
class CV_EXPORTS MaxUnpoolLayer : public Layer
{
public:
Size poolKernel;
Size poolPad;
Size poolStride;
static Ptr<MaxUnpoolLayer> create(const LayerParams &params);
};
class CV_EXPORTS ScaleLayer : public Layer
{
public:
bool hasBias;
static Ptr<ScaleLayer> create(const LayerParams& params);
};
......
......@@ -91,9 +91,6 @@ public:
}
return flops;
}
bool hasWeights, hasBias;
float epsilon;
};
Ptr<BatchNormLayer> BatchNormLayer::create(const LayerParams& params)
......
......@@ -47,7 +47,10 @@ namespace dnn
class BlankLayerImpl : public BlankLayer
{
public:
BlankLayerImpl(const LayerParams&) {}
BlankLayerImpl(const LayerParams& params)
{
setParamsFrom(params);
}
bool getMemoryShapes(const std::vector<MatShape> &inputs,
const int requiredOutputs,
......
......@@ -261,6 +261,7 @@ Ptr<ReLULayer> ReLULayer::create(const LayerParams& params)
float negativeSlope = params.get<float>("negative_slope", 0.f);
Ptr<ReLULayer> l(new ElementWiseLayer<ReLUFunctor>(true, ReLUFunctor(negativeSlope)));
l->setParamsFrom(params);
l->negativeSlope = negativeSlope;
return l;
}
......@@ -306,6 +307,9 @@ Ptr<PowerLayer> PowerLayer::create(const LayerParams& params)
(PowerLayer*)(new ElementWiseLayer<PowerFunctor1>(false, PowerFunctor1(scale, shift))) :
(PowerLayer*)(new ElementWiseLayer<PowerFunctor>(true, PowerFunctor(power, scale, shift))));
l->setParamsFrom(params);
l->power = power;
l->scale = scale;
l->shift = shift;
return l;
}
......
......@@ -81,10 +81,6 @@ public:
}
}
}
Size poolKernel;
Size poolPad;
Size poolStride;
};
Ptr<MaxUnpoolLayer> MaxUnpoolLayer::create(const LayerParams& params)
......
......@@ -67,8 +67,6 @@ public:
}
return flops;
}
bool hasBias;
};
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment