Commit 05f0cb16 authored by Alexander Alekhin's avatar Alexander Alekhin

Merge pull request #12022 from mshabunin:fix-ie-build

parents 7198b9e4 a4060e15
...@@ -180,10 +180,15 @@ InferenceEngine::Precision InfEngineBackendNet::getPrecision() noexcept ...@@ -180,10 +180,15 @@ InferenceEngine::Precision InfEngineBackendNet::getPrecision() noexcept
return precision; return precision;
} }
InferenceEngine::Precision InfEngineBackendNet::getPrecision() const noexcept
{
return precision;
}
// Assume that outputs of network is unconnected blobs. // Assume that outputs of network is unconnected blobs.
void InfEngineBackendNet::getOutputsInfo(InferenceEngine::OutputsDataMap &outputs_) noexcept void InfEngineBackendNet::getOutputsInfo(InferenceEngine::OutputsDataMap &outputs_) noexcept
{ {
outputs_ = outputs; const_cast<const InfEngineBackendNet*>(this)->getOutputsInfo(outputs_);
} }
void InfEngineBackendNet::getOutputsInfo(InferenceEngine::OutputsDataMap &outputs_) const noexcept void InfEngineBackendNet::getOutputsInfo(InferenceEngine::OutputsDataMap &outputs_) const noexcept
{ {
...@@ -193,7 +198,7 @@ void InfEngineBackendNet::getOutputsInfo(InferenceEngine::OutputsDataMap &output ...@@ -193,7 +198,7 @@ void InfEngineBackendNet::getOutputsInfo(InferenceEngine::OutputsDataMap &output
// Returns input references that aren't connected to internal outputs. // Returns input references that aren't connected to internal outputs.
void InfEngineBackendNet::getInputsInfo(InferenceEngine::InputsDataMap &inputs_) noexcept void InfEngineBackendNet::getInputsInfo(InferenceEngine::InputsDataMap &inputs_) noexcept
{ {
inputs_ = inputs; const_cast<const InfEngineBackendNet*>(this)->getInputsInfo(inputs_);
} }
// Returns input references that aren't connected to internal outputs. // Returns input references that aren't connected to internal outputs.
...@@ -204,7 +209,11 @@ void InfEngineBackendNet::getInputsInfo(InferenceEngine::InputsDataMap &inputs_) ...@@ -204,7 +209,11 @@ void InfEngineBackendNet::getInputsInfo(InferenceEngine::InputsDataMap &inputs_)
InferenceEngine::InputInfo::Ptr InfEngineBackendNet::getInput(const std::string &inputName) noexcept InferenceEngine::InputInfo::Ptr InfEngineBackendNet::getInput(const std::string &inputName) noexcept
{ {
getInputsInfo(inputs); return const_cast<const InfEngineBackendNet*>(this)->getInput(inputName);
}
InferenceEngine::InputInfo::Ptr InfEngineBackendNet::getInput(const std::string &inputName) const noexcept
{
const auto& it = inputs.find(inputName); const auto& it = inputs.find(inputName);
CV_Assert(it != inputs.end()); CV_Assert(it != inputs.end());
return it->second; return it->second;
...@@ -218,7 +227,17 @@ void InfEngineBackendNet::getName(char*, size_t) const noexcept ...@@ -218,7 +227,17 @@ void InfEngineBackendNet::getName(char*, size_t) const noexcept
{ {
} }
const std::string& InfEngineBackendNet::getName() const noexcept
{
return name;
}
size_t InfEngineBackendNet::layerCount() noexcept size_t InfEngineBackendNet::layerCount() noexcept
{
return const_cast<const InfEngineBackendNet*>(this)->layerCount();
}
size_t InfEngineBackendNet::layerCount() const noexcept
{ {
return layers.size(); return layers.size();
} }
...@@ -258,6 +277,13 @@ InfEngineBackendNet::addOutput(const std::string &layerName, size_t outputIndex, ...@@ -258,6 +277,13 @@ InfEngineBackendNet::addOutput(const std::string &layerName, size_t outputIndex,
InferenceEngine::StatusCode InferenceEngine::StatusCode
InfEngineBackendNet::getLayerByName(const char *layerName, InferenceEngine::CNNLayerPtr &out, InfEngineBackendNet::getLayerByName(const char *layerName, InferenceEngine::CNNLayerPtr &out,
InferenceEngine::ResponseDesc *resp) noexcept InferenceEngine::ResponseDesc *resp) noexcept
{
return const_cast<const InfEngineBackendNet*>(this)->getLayerByName(layerName, out, resp);
}
InferenceEngine::StatusCode InfEngineBackendNet::getLayerByName(const char *layerName,
InferenceEngine::CNNLayerPtr &out,
InferenceEngine::ResponseDesc *resp) const noexcept
{ {
for (auto& l : layers) for (auto& l : layers)
{ {
...@@ -285,7 +311,12 @@ InferenceEngine::TargetDevice InfEngineBackendNet::getTargetDevice() noexcept ...@@ -285,7 +311,12 @@ InferenceEngine::TargetDevice InfEngineBackendNet::getTargetDevice() noexcept
return targetDevice; return targetDevice;
} }
InferenceEngine::StatusCode InfEngineBackendNet::setBatchSize(const size_t size) noexcept InferenceEngine::TargetDevice InfEngineBackendNet::getTargetDevice() const noexcept
{
return targetDevice;
}
InferenceEngine::StatusCode InfEngineBackendNet::setBatchSize(const size_t) noexcept
{ {
CV_Error(Error::StsNotImplemented, ""); CV_Error(Error::StsNotImplemented, "");
return InferenceEngine::StatusCode::OK; return InferenceEngine::StatusCode::OK;
...@@ -374,7 +405,9 @@ void InfEngineBackendNet::init(int targetId) ...@@ -374,7 +405,9 @@ void InfEngineBackendNet::init(int targetId)
switch (targetId) switch (targetId)
{ {
case DNN_TARGET_CPU: setTargetDevice(InferenceEngine::TargetDevice::eCPU); break; case DNN_TARGET_CPU: setTargetDevice(InferenceEngine::TargetDevice::eCPU); break;
case DNN_TARGET_OPENCL_FP16: setPrecision(InferenceEngine::Precision::FP16); // Fallback to the next. case DNN_TARGET_OPENCL_FP16:
setPrecision(InferenceEngine::Precision::FP16);
/* Falls through. */
case DNN_TARGET_OPENCL: setTargetDevice(InferenceEngine::TargetDevice::eGPU); break; case DNN_TARGET_OPENCL: setTargetDevice(InferenceEngine::TargetDevice::eGPU); break;
case DNN_TARGET_MYRIAD: case DNN_TARGET_MYRIAD:
{ {
......
...@@ -8,6 +8,8 @@ ...@@ -8,6 +8,8 @@
#ifndef __OPENCV_DNN_OP_INF_ENGINE_HPP__ #ifndef __OPENCV_DNN_OP_INF_ENGINE_HPP__
#define __OPENCV_DNN_OP_INF_ENGINE_HPP__ #define __OPENCV_DNN_OP_INF_ENGINE_HPP__
#include "opencv2/core/cvdef.h"
#ifdef HAVE_INF_ENGINE #ifdef HAVE_INF_ENGINE
#if defined(__GNUC__) && __GNUC__ >= 5 #if defined(__GNUC__) && __GNUC__ >= 5
//#pragma GCC diagnostic push //#pragma GCC diagnostic push
...@@ -34,7 +36,9 @@ public: ...@@ -34,7 +36,9 @@ public:
void setPrecision(InferenceEngine::Precision p) noexcept; void setPrecision(InferenceEngine::Precision p) noexcept;
virtual InferenceEngine::Precision getPrecision() noexcept CV_OVERRIDE; virtual InferenceEngine::Precision getPrecision() noexcept;
virtual InferenceEngine::Precision getPrecision() const noexcept;
virtual void getOutputsInfo(InferenceEngine::OutputsDataMap &out) noexcept /*CV_OVERRIDE*/; virtual void getOutputsInfo(InferenceEngine::OutputsDataMap &out) noexcept /*CV_OVERRIDE*/;
...@@ -44,13 +48,19 @@ public: ...@@ -44,13 +48,19 @@ public:
virtual void getInputsInfo(InferenceEngine::InputsDataMap &inputs) const noexcept /*CV_OVERRIDE*/; virtual void getInputsInfo(InferenceEngine::InputsDataMap &inputs) const noexcept /*CV_OVERRIDE*/;
virtual InferenceEngine::InputInfo::Ptr getInput(const std::string &inputName) noexcept CV_OVERRIDE; virtual InferenceEngine::InputInfo::Ptr getInput(const std::string &inputName) noexcept;
virtual InferenceEngine::InputInfo::Ptr getInput(const std::string &inputName) const noexcept;
virtual void getName(char *pName, size_t len) noexcept; virtual void getName(char *pName, size_t len) noexcept;
virtual void getName(char *pName, size_t len) const noexcept; virtual void getName(char *pName, size_t len) const noexcept;
virtual size_t layerCount() noexcept CV_OVERRIDE; virtual const std::string& getName() const noexcept;
virtual size_t layerCount() noexcept;
virtual size_t layerCount() const noexcept;
virtual InferenceEngine::DataPtr& getData(const char *dname) noexcept CV_OVERRIDE; virtual InferenceEngine::DataPtr& getData(const char *dname) noexcept CV_OVERRIDE;
...@@ -58,15 +68,21 @@ public: ...@@ -58,15 +68,21 @@ public:
virtual InferenceEngine::StatusCode addOutput(const std::string &layerName, virtual InferenceEngine::StatusCode addOutput(const std::string &layerName,
size_t outputIndex = 0, size_t outputIndex = 0,
InferenceEngine::ResponseDesc *resp = nullptr) noexcept CV_OVERRIDE; InferenceEngine::ResponseDesc *resp = nullptr) noexcept;
virtual InferenceEngine::StatusCode getLayerByName(const char *layerName, virtual InferenceEngine::StatusCode getLayerByName(const char *layerName,
InferenceEngine::CNNLayerPtr &out, InferenceEngine::CNNLayerPtr &out,
InferenceEngine::ResponseDesc *resp) noexcept CV_OVERRIDE; InferenceEngine::ResponseDesc *resp) noexcept;
virtual InferenceEngine::StatusCode getLayerByName(const char *layerName,
InferenceEngine::CNNLayerPtr &out,
InferenceEngine::ResponseDesc *resp) const noexcept;
virtual void setTargetDevice(InferenceEngine::TargetDevice device) noexcept CV_OVERRIDE; virtual void setTargetDevice(InferenceEngine::TargetDevice device) noexcept CV_OVERRIDE;
virtual InferenceEngine::TargetDevice getTargetDevice() noexcept CV_OVERRIDE; virtual InferenceEngine::TargetDevice getTargetDevice() noexcept;
virtual InferenceEngine::TargetDevice getTargetDevice() const noexcept;
virtual InferenceEngine::StatusCode setBatchSize(const size_t size) noexcept CV_OVERRIDE; virtual InferenceEngine::StatusCode setBatchSize(const size_t size) noexcept CV_OVERRIDE;
...@@ -94,6 +110,8 @@ private: ...@@ -94,6 +110,8 @@ private:
InferenceEngine::ExecutableNetwork netExec; InferenceEngine::ExecutableNetwork netExec;
InferenceEngine::InferRequest infRequest; InferenceEngine::InferRequest infRequest;
std::string name;
void initPlugin(InferenceEngine::ICNNNetwork& net); void initPlugin(InferenceEngine::ICNNNetwork& net);
}; };
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment