Commit 895e10c3 authored by Maksim Shabunin's avatar Maksim Shabunin

dnn: fixed IE support on Windows

parent 2f9b4439
...@@ -283,7 +283,7 @@ public: ...@@ -283,7 +283,7 @@ public:
lp.precision = InferenceEngine::Precision::FP32; lp.precision = InferenceEngine::Precision::FP32;
std::shared_ptr<InferenceEngine::ScaleShiftLayer> ieLayer(new InferenceEngine::ScaleShiftLayer(lp)); std::shared_ptr<InferenceEngine::ScaleShiftLayer> ieLayer(new InferenceEngine::ScaleShiftLayer(lp));
const int numChannels = weights_.total(); const size_t numChannels = weights_.total();
ieLayer->_weights = wrapToInfEngineBlob(weights_, {numChannels}, InferenceEngine::Layout::C); ieLayer->_weights = wrapToInfEngineBlob(weights_, {numChannels}, InferenceEngine::Layout::C);
ieLayer->_biases = wrapToInfEngineBlob(bias_, {numChannels}, InferenceEngine::Layout::C); ieLayer->_biases = wrapToInfEngineBlob(bias_, {numChannels}, InferenceEngine::Layout::C);
......
...@@ -456,7 +456,7 @@ public: ...@@ -456,7 +456,7 @@ public:
if (hasBias() || fusedBias) if (hasBias() || fusedBias)
{ {
Mat biasesMat({outCn}, CV_32F, &biasvec[0]); Mat biasesMat({outCn}, CV_32F, &biasvec[0]);
ieLayer->_biases = wrapToInfEngineBlob(biasesMat, {outCn}, InferenceEngine::Layout::C); ieLayer->_biases = wrapToInfEngineBlob(biasesMat, {(size_t)outCn}, InferenceEngine::Layout::C);
} }
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer)); return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
#endif // HAVE_INF_ENGINE #endif // HAVE_INF_ENGINE
......
...@@ -427,9 +427,9 @@ public: ...@@ -427,9 +427,9 @@ public:
std::shared_ptr<InferenceEngine::FullyConnectedLayer> ieLayer(new InferenceEngine::FullyConnectedLayer(lp)); std::shared_ptr<InferenceEngine::FullyConnectedLayer> ieLayer(new InferenceEngine::FullyConnectedLayer(lp));
ieLayer->_out_num = blobs[0].size[0]; ieLayer->_out_num = blobs[0].size[0];
ieLayer->_weights = wrapToInfEngineBlob(blobs[0], {blobs[0].size[0], blobs[0].size[1], 1, 1}, InferenceEngine::Layout::OIHW); ieLayer->_weights = wrapToInfEngineBlob(blobs[0], {(size_t)blobs[0].size[0], (size_t)blobs[0].size[1], 1, 1}, InferenceEngine::Layout::OIHW);
if (blobs.size() > 1) if (blobs.size() > 1)
ieLayer->_biases = wrapToInfEngineBlob(blobs[1], {ieLayer->_out_num}, InferenceEngine::Layout::C); ieLayer->_biases = wrapToInfEngineBlob(blobs[1], {(size_t)ieLayer->_out_num}, InferenceEngine::Layout::C);
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer)); return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
#endif // HAVE_INF_ENGINE #endif // HAVE_INF_ENGINE
return Ptr<BackendNode>(); return Ptr<BackendNode>();
......
...@@ -254,7 +254,7 @@ public: ...@@ -254,7 +254,7 @@ public:
ieLayer->params["across_spatial"] = acrossSpatial ? "1" : "0"; ieLayer->params["across_spatial"] = acrossSpatial ? "1" : "0";
ieLayer->params["channel_shared"] = blobs[0].total() == 1 ? "1" : "0"; ieLayer->params["channel_shared"] = blobs[0].total() == 1 ? "1" : "0";
const int numChannels = blobs[0].total(); const size_t numChannels = blobs[0].total();
ieLayer->blobs["weights"] = wrapToInfEngineBlob(blobs[0], {numChannels}, InferenceEngine::Layout::C); ieLayer->blobs["weights"] = wrapToInfEngineBlob(blobs[0], {numChannels}, InferenceEngine::Layout::C);
return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer)); return Ptr<BackendNode>(new InfEngineBackendNode(ieLayer));
#endif // HAVE_INF_ENGINE #endif // HAVE_INF_ENGINE
......
...@@ -178,7 +178,7 @@ public: ...@@ -178,7 +178,7 @@ public:
lp.precision = InferenceEngine::Precision::FP32; lp.precision = InferenceEngine::Precision::FP32;
std::shared_ptr<InferenceEngine::ScaleShiftLayer> ieLayer(new InferenceEngine::ScaleShiftLayer(lp)); std::shared_ptr<InferenceEngine::ScaleShiftLayer> ieLayer(new InferenceEngine::ScaleShiftLayer(lp));
const int numChannels = blobs[0].total(); const size_t numChannels = blobs[0].total();
ieLayer->_weights = wrapToInfEngineBlob(blobs[0], {numChannels}, InferenceEngine::Layout::C); ieLayer->_weights = wrapToInfEngineBlob(blobs[0], {numChannels}, InferenceEngine::Layout::C);
if (hasBias) if (hasBias)
ieLayer->_biases = wrapToInfEngineBlob(blobs[1], {numChannels}, InferenceEngine::Layout::C); ieLayer->_biases = wrapToInfEngineBlob(blobs[1], {numChannels}, InferenceEngine::Layout::C);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment