Commit 5a6b23e8 authored by Lubov Batanina's avatar Lubov Batanina Committed by Maksim Shabunin

Support for several min and max sizes in PriorBox layer (Merge pull request #15076)

* Support for several min and max sizes in PriorBox layer

* Fix minSize

* Check size

* Modify initInfEngine

* Fix tests

* Fix IE support

* Add priorbox test

* Remove inputs
parent e90438fe
This diff is collapsed.
This diff is collapsed.
......@@ -116,9 +116,9 @@ message PriorBoxParameter {
CENTER_SIZE = 2;
}
// Minimum box size (in pixels). Required!
optional float min_size = 1;
repeated float min_size = 1;
// Maximum box size (in pixels). Required!
optional float max_size = 2;
repeated float max_size = 2;
// Various of aspect ratios. Duplicate ratios will be ignored.
// If none is provided, we use default ratio 1.
repeated float aspect_ratio = 3;
......
......@@ -180,21 +180,20 @@ public:
PriorBoxLayerImpl(const LayerParams &params)
{
setParamsFrom(params);
_minSize = getParameter<float>(params, "min_size", 0, false, 0);
_flip = getParameter<bool>(params, "flip", 0, false, true);
_clip = getParameter<bool>(params, "clip", 0, false, true);
_bboxesNormalized = getParameter<bool>(params, "normalized_bbox", 0, false, true);
_aspectRatios.clear();
getParams("min_size", params, &_minSize);
getAspectRatios(params);
getVariance(params);
_maxSize = -1;
if (params.has("max_size"))
{
_maxSize = params.get("max_size").get<float>(0);
CV_Assert(_maxSize > _minSize);
getParams("max_size", params, &_maxSize);
CV_Assert(_minSize.size() == _maxSize.size());
for (int i = 0; i < _maxSize.size(); i++)
CV_Assert(_minSize[i] < _maxSize[i]);
}
std::vector<float> widths, heights;
......@@ -213,25 +212,28 @@ public:
}
else
{
CV_Assert(_minSize > 0);
_boxWidths.resize(1 + (_maxSize > 0 ? 1 : 0) + _aspectRatios.size());
_boxHeights.resize(_boxWidths.size());
_boxWidths[0] = _boxHeights[0] = _minSize;
int i = 1;
if (_maxSize > 0)
CV_Assert(!_minSize.empty());
for (int i = 0; i < _minSize.size(); ++i)
{
// second prior: aspect_ratio = 1, size = sqrt(min_size * max_size)
_boxWidths[i] = _boxHeights[i] = sqrt(_minSize * _maxSize);
i += 1;
}
float minSize = _minSize[i];
CV_Assert(minSize > 0);
_boxWidths.push_back(minSize);
_boxHeights.push_back(minSize);
// rest of priors
for (size_t r = 0; r < _aspectRatios.size(); ++r)
{
float arSqrt = sqrt(_aspectRatios[r]);
_boxWidths[i + r] = _minSize * arSqrt;
_boxHeights[i + r] = _minSize / arSqrt;
if (_maxSize.size() > 0)
{
float size = sqrt(minSize * _maxSize[i]);
_boxWidths.push_back(size);
_boxHeights.push_back(size);
}
// rest of priors
for (size_t r = 0; r < _aspectRatios.size(); ++r)
{
float arSqrt = sqrt(_aspectRatios[r]);
_boxWidths.push_back(minSize * arSqrt);
_boxHeights.push_back(minSize / arSqrt);
}
}
}
CV_Assert(_boxWidths.size() == _boxHeights.size());
......@@ -271,7 +273,8 @@ public:
virtual bool supportBackend(int backendId) CV_OVERRIDE
{
return backendId == DNN_BACKEND_OPENCV ||
(backendId == DNN_BACKEND_INFERENCE_ENGINE && haveInfEngine());
(backendId == DNN_BACKEND_INFERENCE_ENGINE && haveInfEngine() &&
( _explicitSizes || (_minSize.size() == 1 && _maxSize.size() <= 1)));
}
bool getMemoryShapes(const std::vector<MatShape> &inputs,
......@@ -508,10 +511,9 @@ public:
InferenceEngine::Builder::PriorBoxLayer ieLayer(name);
CV_Assert(!_explicitSizes);
ieLayer.setMinSize(_minSize);
if (_maxSize > 0)
ieLayer.setMaxSize(_maxSize);
ieLayer.setMinSize(_minSize[0]);
if (!_maxSize.empty())
ieLayer.setMaxSize(_maxSize[0]);
CV_CheckEQ(_offsetsX.size(), (size_t)1, ""); CV_CheckEQ(_offsetsY.size(), (size_t)1, ""); CV_CheckEQ(_offsetsX[0], _offsetsY[0], "");
ieLayer.setOffset(_offsetsX[0]);
......@@ -558,8 +560,8 @@ public:
}
private:
float _minSize;
float _maxSize;
std::vector<float> _minSize;
std::vector<float> _maxSize;
float _stepX, _stepY;
......
......@@ -742,6 +742,22 @@ TEST_P(Test_Caffe_layers, Average_pooling_kernel_area)
normAssert(out, blobFromImage(ref));
}
TEST_P(Test_Caffe_layers, PriorBox_repeated)
{
Net net = readNet(_tf("prior_box.prototxt"));
int inp_size[] = {1, 3, 10, 10};
int shape_size[] = {1, 2, 3, 4};
Mat inp(4, inp_size, CV_32F);
randu(inp, -1.0f, 1.0f);
Mat shape(4, shape_size, CV_32F);
randu(shape, -1.0f, 1.0f);
net.setInput(inp, "data");
net.setInput(shape, "shape");
Mat out = net.forward();
Mat ref = blobFromNPY(_tf("priorbox_output.npy"));
normAssert(out, ref, "");
}
// Test PriorBoxLayer in case of no aspect ratios (just squared proposals).
TEST_P(Test_Caffe_layers, PriorBox_squares)
{
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment