Commit bbb14d37 authored by Vadim Pisarevsky's avatar Vadim Pisarevsky

Merge pull request #9003 from dkurt:halide_bug_fixes

parents 2ae84909 121789f7
...@@ -234,7 +234,7 @@ struct ReLUFunctor ...@@ -234,7 +234,7 @@ struct ReLUFunctor
Halide::Var x("x"), y("y"), c("c"), n("n"); Halide::Var x("x"), y("y"), c("c"), n("n");
if (slope) if (slope)
{ {
top(x, y, c, n) = select(input >= 0.0f, input, slope); top(x, y, c, n) = select(input >= 0.0f, input, slope * input);
} }
else else
{ {
......
...@@ -78,7 +78,6 @@ public: ...@@ -78,7 +78,6 @@ public:
wpadding.setTo(Scalar::all(0.)); wpadding.setTo(Scalar::all(0.));
weightsMat = weightsBuf.colRange(0, vecsize); weightsMat = weightsBuf.colRange(0, vecsize);
blobs[0].copyTo(weightsMat); blobs[0].copyTo(weightsMat);
blobs[0] = weightsMat;
} }
if (bias) if (bias)
......
...@@ -430,7 +430,7 @@ TEST_P(ReLU, Accuracy) ...@@ -430,7 +430,7 @@ TEST_P(ReLU, Accuracy)
} }
INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, ReLU, Values( INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, ReLU, Values(
/*negative slope*/ 2.0f, 0.3f, -0.1f /*negative slope*/ 2.0f, 0.3f, -0.1f, 0.0f
)); ));
typedef TestWithParam<tuple<std::string> > NoParamActivation; typedef TestWithParam<tuple<std::string> > NoParamActivation;
...@@ -515,12 +515,7 @@ TEST_P(Concat, Accuracy) ...@@ -515,12 +515,7 @@ TEST_P(Concat, Accuracy)
Net net; Net net;
LayerParams concatParam; std::vector<int> convLayerIds(numChannels.channels);
concatParam.type = "Concat";
concatParam.name = "testLayer";
int concatId = net.addLayer(concatParam.name, concatParam.type, concatParam);
net.connect(0, 0, concatId, 0);
for (int i = 0, n = numChannels.channels; i < n; ++i) for (int i = 0, n = numChannels.channels; i < n; ++i)
{ {
if (!numChannels[i]) if (!numChannels[i])
...@@ -540,9 +535,18 @@ TEST_P(Concat, Accuracy) ...@@ -540,9 +535,18 @@ TEST_P(Concat, Accuracy)
convParam.name = ss.str(); convParam.name = ss.str();
convParam.blobs.push_back(weights); convParam.blobs.push_back(weights);
int convId = net.addLayer(convParam.name, convParam.type, convParam); convLayerIds[i] = net.addLayer(convParam.name, convParam.type, convParam);
net.connect(0, 0, convId, 0); net.connect(0, 0, convLayerIds[i], 0);
net.connect(convId, 0, concatId, i + 1); }
LayerParams concatParam;
concatParam.type = "Concat";
concatParam.name = "testLayer";
int concatId = net.addLayer(concatParam.name, concatParam.type, concatParam);
net.connect(0, 0, concatId, 0);
for (int i = 0; i < convLayerIds.size(); ++i)
{
net.connect(convLayerIds[i], 0, concatId, i + 1);
} }
Mat input({1, inSize[0], inSize[1], inSize[2]}, CV_32F); Mat input({1, inSize[0], inSize[1], inSize[2]}, CV_32F);
...@@ -578,12 +582,7 @@ TEST_P(Eltwise, Accuracy) ...@@ -578,12 +582,7 @@ TEST_P(Eltwise, Accuracy)
Net net; Net net;
LayerParams eltwiseParam; std::vector<int> convLayerIds(numConv);
eltwiseParam.type = "Eltwise";
eltwiseParam.name = "testLayer";
int eltwiseId = net.addLayer(eltwiseParam.name, eltwiseParam.type, eltwiseParam);
net.connect(0, 0, eltwiseId, 0);
for (int i = 0; i < numConv; ++i) for (int i = 0; i < numConv; ++i)
{ {
Mat weights({inSize[0], inSize[0], 1, 1}, CV_32F); Mat weights({inSize[0], inSize[0], 1, 1}, CV_32F);
...@@ -600,9 +599,18 @@ TEST_P(Eltwise, Accuracy) ...@@ -600,9 +599,18 @@ TEST_P(Eltwise, Accuracy)
convParam.name = ss.str(); convParam.name = ss.str();
convParam.blobs.push_back(weights); convParam.blobs.push_back(weights);
int convId = net.addLayer(convParam.name, convParam.type, convParam); convLayerIds[i] = net.addLayer(convParam.name, convParam.type, convParam);
net.connect(0, 0, convId, 0); net.connect(0, 0, convLayerIds[i], 0);
net.connect(convId, 0, eltwiseId, i + 1); }
LayerParams eltwiseParam;
eltwiseParam.type = "Eltwise";
eltwiseParam.name = "testLayer";
int eltwiseId = net.addLayer(eltwiseParam.name, eltwiseParam.type, eltwiseParam);
net.connect(0, 0, eltwiseId, 0);
for (int i = 0; i < numConv; ++i)
{
net.connect(convLayerIds[i], 0, eltwiseId, i + 1);
} }
Mat input({1, inSize[0], inSize[1], inSize[2]}, CV_32F); Mat input({1, inSize[0], inSize[1], inSize[2]}, CV_32F);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment