Commit c300070b authored by Alexander Alekhin's avatar Alexander Alekhin

Merge pull request #14241 from alalek:openvino_2019R1

parents 4bb6edf1 cafa0103
...@@ -87,9 +87,9 @@ endif() ...@@ -87,9 +87,9 @@ endif()
if(INF_ENGINE_TARGET) if(INF_ENGINE_TARGET)
if(NOT INF_ENGINE_RELEASE) if(NOT INF_ENGINE_RELEASE)
message(WARNING "InferenceEngine version have not been set, 2018R5 will be used by default. Set INF_ENGINE_RELEASE variable if you experience build errors.") message(WARNING "InferenceEngine version have not been set, 2019R1 will be used by default. Set INF_ENGINE_RELEASE variable if you experience build errors.")
endif() endif()
set(INF_ENGINE_RELEASE "2018050000" CACHE STRING "Force IE version, should be in form YYYYAABBCC (e.g. 2018R2.0.2 -> 2018020002)") set(INF_ENGINE_RELEASE "2019010000" CACHE STRING "Force IE version, should be in form YYYYAABBCC (e.g. 2018R2.0.2 -> 2018020002)")
set_target_properties(${INF_ENGINE_TARGET} PROPERTIES set_target_properties(${INF_ENGINE_TARGET} PROPERTIES
INTERFACE_COMPILE_DEFINITIONS "HAVE_INF_ENGINE=1;INF_ENGINE_RELEASE=${INF_ENGINE_RELEASE}" INTERFACE_COMPILE_DEFINITIONS "HAVE_INF_ENGINE=1;INF_ENGINE_RELEASE=${INF_ENGINE_RELEASE}"
) )
......
...@@ -222,6 +222,10 @@ PERF_TEST_P_(DNNTestNetwork, FastNeuralStyle_eccv16) ...@@ -222,6 +222,10 @@ PERF_TEST_P_(DNNTestNetwork, FastNeuralStyle_eccv16)
PERF_TEST_P_(DNNTestNetwork, Inception_v2_Faster_RCNN) PERF_TEST_P_(DNNTestNetwork, Inception_v2_Faster_RCNN)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("Test is disabled in OpenVINO 2019R1");
#endif
if (backend == DNN_BACKEND_HALIDE || if (backend == DNN_BACKEND_HALIDE ||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) || (backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) ||
(backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)) (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16))
......
...@@ -1636,7 +1636,7 @@ struct Net::Impl ...@@ -1636,7 +1636,7 @@ struct Net::Impl
preferableTarget == DNN_TARGET_MYRIAD || preferableTarget == DNN_TARGET_MYRIAD ||
preferableTarget == DNN_TARGET_FPGA) && !fused) preferableTarget == DNN_TARGET_FPGA) && !fused)
{ {
#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R5) #if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1)
for (const std::string& name : {"weights", "biases"}) for (const std::string& name : {"weights", "biases"})
{ {
auto it = ieNode->layer.getParameters().find(name); auto it = ieNode->layer.getParameters().find(name);
......
...@@ -290,7 +290,7 @@ public: ...@@ -290,7 +290,7 @@ public:
weights = wrapToInfEngineBlob(blobs[0], {(size_t)numChannels}, InferenceEngine::Layout::C); weights = wrapToInfEngineBlob(blobs[0], {(size_t)numChannels}, InferenceEngine::Layout::C);
l.getParameters()["channel_shared"] = blobs[0].total() == 1; l.getParameters()["channel_shared"] = blobs[0].total() == 1;
} }
#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R5) #if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1)
l.getParameters()["weights"] = weights; l.getParameters()["weights"] = weights;
#else #else
l.addConstantData("weights", weights); l.addConstantData("weights", weights);
......
...@@ -130,7 +130,7 @@ void InfEngineBackendNet::init(int targetId) ...@@ -130,7 +130,7 @@ void InfEngineBackendNet::init(int targetId)
for (int id : unconnectedLayersIds) for (int id : unconnectedLayersIds)
{ {
InferenceEngine::Builder::OutputLayer outLayer("myconv1"); InferenceEngine::Builder::OutputLayer outLayer("myconv1");
#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R5) #if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1)
// Inference Engine determines network precision by ports. // Inference Engine determines network precision by ports.
InferenceEngine::Precision p = (targetId == DNN_TARGET_MYRIAD || InferenceEngine::Precision p = (targetId == DNN_TARGET_MYRIAD ||
targetId == DNN_TARGET_OPENCL_FP16) ? targetId == DNN_TARGET_OPENCL_FP16) ?
...@@ -188,7 +188,7 @@ void InfEngineBackendNet::init(int targetId) ...@@ -188,7 +188,7 @@ void InfEngineBackendNet::init(int targetId)
void InfEngineBackendNet::addLayer(InferenceEngine::Builder::Layer& layer) void InfEngineBackendNet::addLayer(InferenceEngine::Builder::Layer& layer)
{ {
#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R5) #if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1)
// Add weights to network and connect them after input blobs. // Add weights to network and connect them after input blobs.
std::map<std::string, InferenceEngine::Parameter>& params = layer.getParameters(); std::map<std::string, InferenceEngine::Parameter>& params = layer.getParameters();
std::vector<int> blobsIds; std::vector<int> blobsIds;
...@@ -229,7 +229,7 @@ void InfEngineBackendNet::addLayer(InferenceEngine::Builder::Layer& layer) ...@@ -229,7 +229,7 @@ void InfEngineBackendNet::addLayer(InferenceEngine::Builder::Layer& layer)
CV_Assert(layers.insert({layerName, id}).second); CV_Assert(layers.insert({layerName, id}).second);
unconnectedLayersIds.insert(id); unconnectedLayersIds.insert(id);
#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R5) #if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1)
// By default, all the weights are connected to last ports ids. // By default, all the weights are connected to last ports ids.
for (int i = 0; i < blobsIds.size(); ++i) for (int i = 0; i < blobsIds.size(); ++i)
{ {
...@@ -903,7 +903,7 @@ InferenceEngine::Blob::Ptr convertFp16(const InferenceEngine::Blob::Ptr& blob) ...@@ -903,7 +903,7 @@ InferenceEngine::Blob::Ptr convertFp16(const InferenceEngine::Blob::Ptr& blob)
void addConstantData(const std::string& name, InferenceEngine::Blob::Ptr data, void addConstantData(const std::string& name, InferenceEngine::Blob::Ptr data,
InferenceEngine::Builder::Layer& l) InferenceEngine::Builder::Layer& l)
{ {
#if INF_ENGINE_VER_MAJOR_GT(INF_ENGINE_RELEASE_2018R5) #if INF_ENGINE_VER_MAJOR_GE(INF_ENGINE_RELEASE_2019R1)
l.getParameters()[name] = data; l.getParameters()[name] = data;
#else #else
l.addConstantData(name, data); l.addConstantData(name, data);
......
...@@ -27,10 +27,11 @@ ...@@ -27,10 +27,11 @@
#define INF_ENGINE_RELEASE_2018R3 2018030000 #define INF_ENGINE_RELEASE_2018R3 2018030000
#define INF_ENGINE_RELEASE_2018R4 2018040000 #define INF_ENGINE_RELEASE_2018R4 2018040000
#define INF_ENGINE_RELEASE_2018R5 2018050000 #define INF_ENGINE_RELEASE_2018R5 2018050000
#define INF_ENGINE_RELEASE_2019R1 2019010000
#ifndef INF_ENGINE_RELEASE #ifndef INF_ENGINE_RELEASE
#warning("IE version have not been provided via command-line. Using 2018R5 by default") #warning("IE version have not been provided via command-line. Using 2019R1 by default")
#define INF_ENGINE_RELEASE INF_ENGINE_RELEASE_2018R5 #define INF_ENGINE_RELEASE INF_ENGINE_RELEASE_2019R1
#endif #endif
#define INF_ENGINE_VER_MAJOR_GT(ver) (((INF_ENGINE_RELEASE) / 10000) > ((ver) / 10000)) #define INF_ENGINE_VER_MAJOR_GT(ver) (((INF_ENGINE_RELEASE) / 10000) > ((ver) / 10000))
......
...@@ -289,7 +289,7 @@ TEST_P(DNNTestNetwork, OpenFace) ...@@ -289,7 +289,7 @@ TEST_P(DNNTestNetwork, OpenFace)
#if INF_ENGINE_VER_MAJOR_EQ(2018050000) #if INF_ENGINE_VER_MAJOR_EQ(2018050000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets"); throw SkipTestException("Test is disabled for Myriad targets");
#elif INF_ENGINE_VER_MAJOR_GT(2018050000) #elif INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
) )
......
...@@ -267,7 +267,7 @@ public: ...@@ -267,7 +267,7 @@ public:
TEST_P(Test_Darknet_nets, YoloVoc) TEST_P(Test_Darknet_nets, YoloVoc)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("Test is disabled"); throw SkipTestException("Test is disabled");
#endif #endif
......
...@@ -169,7 +169,7 @@ TEST_P(Deconvolution, Accuracy) ...@@ -169,7 +169,7 @@ TEST_P(Deconvolution, Accuracy)
throw SkipTestException("Test is disabled for OpenVINO 2018R4"); throw SkipTestException("Test is disabled for OpenVINO 2018R4");
#endif #endif
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
&& inChannels == 6 && outChannels == 4 && group == 1 && inChannels == 6 && outChannels == 4 && group == 1
...@@ -351,7 +351,7 @@ TEST_P(MaxPooling, Accuracy) ...@@ -351,7 +351,7 @@ TEST_P(MaxPooling, Accuracy)
throw SkipTestException("Problems with output dimension in OpenVINO 2018R5"); throw SkipTestException("Problems with output dimension in OpenVINO 2018R5");
#endif #endif
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
&& (stride == Size(1, 1) || stride == Size(2, 2)) && (stride == Size(1, 1) || stride == Size(2, 2))
...@@ -561,7 +561,7 @@ TEST_P(ReLU, Accuracy) ...@@ -561,7 +561,7 @@ TEST_P(ReLU, Accuracy)
float negativeSlope = get<0>(GetParam()); float negativeSlope = get<0>(GetParam());
Backend backendId = get<0>(get<1>(GetParam())); Backend backendId = get<0>(get<1>(GetParam()));
Target targetId = get<1>(get<1>(GetParam())); Target targetId = get<1>(get<1>(GetParam()));
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE if (backendId == DNN_BACKEND_INFERENCE_ENGINE
&& negativeSlope < 0 && negativeSlope < 0
) )
...@@ -589,7 +589,7 @@ TEST_P(NoParamActivation, Accuracy) ...@@ -589,7 +589,7 @@ TEST_P(NoParamActivation, Accuracy)
LayerParams lp; LayerParams lp;
lp.type = get<0>(GetParam()); lp.type = get<0>(GetParam());
lp.name = "testLayer"; lp.name = "testLayer";
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE if (backendId == DNN_BACKEND_INFERENCE_ENGINE
&& lp.type == "AbsVal" && lp.type == "AbsVal"
) )
...@@ -688,7 +688,7 @@ TEST_P(Concat, Accuracy) ...@@ -688,7 +688,7 @@ TEST_P(Concat, Accuracy)
throw SkipTestException("Test is disabled for Myriad target"); // crash throw SkipTestException("Test is disabled for Myriad target"); // crash
#endif #endif
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_CPU if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_CPU
&& inSize == Vec3i(1, 4, 5) && numChannels == Vec3i(1, 6, 2) && inSize == Vec3i(1, 4, 5) && numChannels == Vec3i(1, 6, 2)
) )
...@@ -769,7 +769,7 @@ TEST_P(Eltwise, Accuracy) ...@@ -769,7 +769,7 @@ TEST_P(Eltwise, Accuracy)
throw SkipTestException("Test is disabled for Myriad target"); throw SkipTestException("Test is disabled for Myriad target");
#endif #endif
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && numConv > 1) if (backendId == DNN_BACKEND_INFERENCE_ENGINE && numConv > 1)
throw SkipTestException("Test is disabled for DLIE backend"); throw SkipTestException("Test is disabled for DLIE backend");
#endif #endif
......
...@@ -21,9 +21,18 @@ static void initDLDTDataPath() ...@@ -21,9 +21,18 @@ static void initDLDTDataPath()
static bool initialized = false; static bool initialized = false;
if (!initialized) if (!initialized)
{ {
#if INF_ENGINE_RELEASE <= 2018050000
const char* dldtTestDataPath = getenv("INTEL_CVSDK_DIR"); const char* dldtTestDataPath = getenv("INTEL_CVSDK_DIR");
if (dldtTestDataPath) if (dldtTestDataPath)
cvtest::addDataSearchPath(cv::utils::fs::join(dldtTestDataPath, "deployment_tools")); cvtest::addDataSearchPath(dldtTestDataPath);
#else
const char* omzDataPath = getenv("OPENCV_OPEN_MODEL_ZOO_DATA_PATH");
if (omzDataPath)
cvtest::addDataSearchPath(omzDataPath);
const char* dnnDataPath = getenv("OPENCV_DNN_TEST_DATA_PATH");
if (dnnDataPath)
cvtest::addDataSearchPath(std::string(dnnDataPath) + "/omz_intel_models");
#endif
initialized = true; initialized = true;
} }
#endif #endif
...@@ -33,6 +42,76 @@ using namespace cv; ...@@ -33,6 +42,76 @@ using namespace cv;
using namespace cv::dnn; using namespace cv::dnn;
using namespace InferenceEngine; using namespace InferenceEngine;
struct OpenVINOModelTestCaseInfo
{
const char* modelPathFP32;
const char* modelPathFP16;
};
static const std::map<std::string, OpenVINOModelTestCaseInfo>& getOpenVINOTestModels()
{
static std::map<std::string, OpenVINOModelTestCaseInfo> g_models {
#if INF_ENGINE_RELEASE <= 2018050000
{ "age-gender-recognition-retail-0013", {
"deployment_tools/intel_models/age-gender-recognition-retail-0013/FP32/age-gender-recognition-retail-0013",
"deployment_tools/intel_models/age-gender-recognition-retail-0013/FP16/age-gender-recognition-retail-0013"
}},
{ "face-person-detection-retail-0002", {
"deployment_tools/intel_models/face-person-detection-retail-0002/FP32/face-person-detection-retail-0002",
"deployment_tools/intel_models/face-person-detection-retail-0002/FP16/face-person-detection-retail-0002"
}},
{ "head-pose-estimation-adas-0001", {
"deployment_tools/intel_models/head-pose-estimation-adas-0001/FP32/head-pose-estimation-adas-0001",
"deployment_tools/intel_models/head-pose-estimation-adas-0001/FP16/head-pose-estimation-adas-0001"
}},
{ "person-detection-retail-0002", {
"deployment_tools/intel_models/person-detection-retail-0002/FP32/person-detection-retail-0002",
"deployment_tools/intel_models/person-detection-retail-0002/FP16/person-detection-retail-0002"
}},
{ "vehicle-detection-adas-0002", {
"deployment_tools/intel_models/vehicle-detection-adas-0002/FP32/vehicle-detection-adas-0002",
"deployment_tools/intel_models/vehicle-detection-adas-0002/FP16/vehicle-detection-adas-0002"
}}
#else
// layout is defined by open_model_zoo/model_downloader
// Downloaded using these parameters for Open Model Zoo downloader (2019R1):
// ./downloader.py -o ${OPENCV_DNN_TEST_DATA_PATH}/omz_intel_models --cache_dir ${OPENCV_DNN_TEST_DATA_PATH}/.omz_cache/ \
// --name face-person-detection-retail-0002,face-person-detection-retail-0002-fp16,age-gender-recognition-retail-0013,age-gender-recognition-retail-0013-fp16,head-pose-estimation-adas-0001,head-pose-estimation-adas-0001-fp16,person-detection-retail-0002,person-detection-retail-0002-fp16,vehicle-detection-adas-0002,vehicle-detection-adas-0002-fp16
{ "age-gender-recognition-retail-0013", {
"Retail/object_attributes/age_gender/dldt/age-gender-recognition-retail-0013",
"Retail/object_attributes/age_gender/dldt/age-gender-recognition-retail-0013-fp16"
}},
{ "face-person-detection-retail-0002", {
"Retail/object_detection/face_pedestrian/rmnet-ssssd-2heads/0002/dldt/face-person-detection-retail-0002",
"Retail/object_detection/face_pedestrian/rmnet-ssssd-2heads/0002/dldt/face-person-detection-retail-0002-fp16"
}},
{ "head-pose-estimation-adas-0001", {
"Transportation/object_attributes/headpose/vanilla_cnn/dldt/head-pose-estimation-adas-0001",
"Transportation/object_attributes/headpose/vanilla_cnn/dldt/head-pose-estimation-adas-0001-fp16"
}},
{ "person-detection-retail-0002", {
"Retail/object_detection/pedestrian/hypernet-rfcn/0026/dldt/person-detection-retail-0002",
"Retail/object_detection/pedestrian/hypernet-rfcn/0026/dldt/person-detection-retail-0002-fp16"
}},
{ "vehicle-detection-adas-0002", {
"Transportation/object_detection/vehicle/mobilenet-reduced-ssd/dldt/vehicle-detection-adas-0002",
"Transportation/object_detection/vehicle/mobilenet-reduced-ssd/dldt/vehicle-detection-adas-0002-fp16"
}}
#endif
};
return g_models;
}
static const std::vector<std::string> getOpenVINOTestModelsList()
{
std::vector<std::string> result;
const std::map<std::string, OpenVINOModelTestCaseInfo>& models = getOpenVINOTestModels();
for (const auto& it : models)
result.push_back(it.first);
return result;
}
static inline void genData(const std::vector<size_t>& dims, Mat& m, Blob::Ptr& dataPtr) static inline void genData(const std::vector<size_t>& dims, Mat& m, Blob::Ptr& dataPtr)
{ {
std::vector<int> reversedDims(dims.begin(), dims.end()); std::vector<int> reversedDims(dims.begin(), dims.end());
...@@ -172,25 +251,23 @@ void runCV(Target target, const std::string& xmlPath, const std::string& binPath ...@@ -172,25 +251,23 @@ void runCV(Target target, const std::string& xmlPath, const std::string& binPath
} }
} }
typedef TestWithParam<tuple<Target, String> > DNNTestOpenVINO; typedef TestWithParam<tuple<Target, std::string> > DNNTestOpenVINO;
TEST_P(DNNTestOpenVINO, models) TEST_P(DNNTestOpenVINO, models)
{ {
initDLDTDataPath();
Target target = (dnn::Target)(int)get<0>(GetParam()); Target target = (dnn::Target)(int)get<0>(GetParam());
std::string modelName = get<1>(GetParam()); std::string modelName = get<1>(GetParam());
std::string precision = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD) ? "FP16" : "FP32"; bool isFP16 = (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_MYRIAD);
std::string prefix;
#ifdef INF_ENGINE_RELEASE const std::map<std::string, OpenVINOModelTestCaseInfo>& models = getOpenVINOTestModels();
#if INF_ENGINE_RELEASE <= 2018050000 const auto it = models.find(modelName);
prefix = utils::fs::join("intel_models", ASSERT_TRUE(it != models.end()) << modelName;
utils::fs::join(modelName, OpenVINOModelTestCaseInfo modelInfo = it->second;
utils::fs::join(precision, modelName))); std::string modelPath = isFP16 ? modelInfo.modelPathFP16 : modelInfo.modelPathFP32;
#endif
#endif
initDLDTDataPath(); std::string xmlPath = findDataFile(modelPath + ".xml");
std::string xmlPath = findDataFile(prefix + ".xml"); std::string binPath = findDataFile(modelPath + ".bin");
std::string binPath = findDataFile(prefix + ".bin");
std::map<std::string, cv::Mat> inputsMap; std::map<std::string, cv::Mat> inputsMap;
std::map<std::string, cv::Mat> ieOutputsMap, cvOutputsMap; std::map<std::string, cv::Mat> ieOutputsMap, cvOutputsMap;
...@@ -210,16 +287,12 @@ TEST_P(DNNTestOpenVINO, models) ...@@ -210,16 +287,12 @@ TEST_P(DNNTestOpenVINO, models)
} }
} }
INSTANTIATE_TEST_CASE_P(/**/, INSTANTIATE_TEST_CASE_P(/**/,
DNNTestOpenVINO, DNNTestOpenVINO,
Combine(testing::ValuesIn(getAvailableTargets(DNN_BACKEND_INFERENCE_ENGINE)), Combine(testing::ValuesIn(getAvailableTargets(DNN_BACKEND_INFERENCE_ENGINE)),
testing::Values( testing::ValuesIn(getOpenVINOTestModelsList())
"age-gender-recognition-retail-0013", )
"face-person-detection-retail-0002",
"head-pose-estimation-adas-0001",
"person-detection-retail-0002",
"vehicle-detection-adas-0002"
))
); );
}} }}
......
...@@ -236,7 +236,7 @@ TEST_P(Test_Caffe_layers, Dropout) ...@@ -236,7 +236,7 @@ TEST_P(Test_Caffe_layers, Dropout)
TEST_P(Test_Caffe_layers, Concat) TEST_P(Test_Caffe_layers, Concat)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets"); throw SkipTestException("Test is disabled for Myriad targets");
#endif #endif
...@@ -247,7 +247,7 @@ TEST_P(Test_Caffe_layers, Concat) ...@@ -247,7 +247,7 @@ TEST_P(Test_Caffe_layers, Concat)
TEST_P(Test_Caffe_layers, Fused_Concat) TEST_P(Test_Caffe_layers, Fused_Concat)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE) if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("Test is disabled for DLIE due negative_slope parameter"); throw SkipTestException("Test is disabled for DLIE due negative_slope parameter");
#endif #endif
......
...@@ -319,7 +319,7 @@ TEST_P(Test_ONNX_nets, ResNet50v1) ...@@ -319,7 +319,7 @@ TEST_P(Test_ONNX_nets, ResNet50v1)
TEST_P(Test_ONNX_nets, ResNet101_DUC_HDC) TEST_P(Test_ONNX_nets, ResNet101_DUC_HDC)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE) if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("Test is disabled for DLIE targets"); throw SkipTestException("Test is disabled for DLIE targets");
#endif #endif
......
...@@ -140,7 +140,7 @@ TEST_P(Test_TensorFlow_layers, padding) ...@@ -140,7 +140,7 @@ TEST_P(Test_TensorFlow_layers, padding)
TEST_P(Test_TensorFlow_layers, padding_same) TEST_P(Test_TensorFlow_layers, padding_same)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE) if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("Test is disabled for DLIE"); throw SkipTestException("Test is disabled for DLIE");
#endif #endif
...@@ -197,7 +197,7 @@ TEST_P(Test_TensorFlow_layers, pooling) ...@@ -197,7 +197,7 @@ TEST_P(Test_TensorFlow_layers, pooling)
TEST_P(Test_TensorFlow_layers, ave_pool_same) TEST_P(Test_TensorFlow_layers, ave_pool_same)
{ {
// Reference output values are in range [-0.519531, 0.112976] // Reference output values are in range [-0.519531, 0.112976]
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
) )
...@@ -241,7 +241,7 @@ TEST_P(Test_TensorFlow_layers, reshape) ...@@ -241,7 +241,7 @@ TEST_P(Test_TensorFlow_layers, reshape)
TEST_P(Test_TensorFlow_layers, flatten) TEST_P(Test_TensorFlow_layers, flatten)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE) if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("Test is disabled for DLIE"); throw SkipTestException("Test is disabled for DLIE");
#endif #endif
...@@ -257,7 +257,7 @@ TEST_P(Test_TensorFlow_layers, flatten) ...@@ -257,7 +257,7 @@ TEST_P(Test_TensorFlow_layers, flatten)
TEST_P(Test_TensorFlow_layers, unfused_flatten) TEST_P(Test_TensorFlow_layers, unfused_flatten)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE) if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("Test is disabled for DLIE"); throw SkipTestException("Test is disabled for DLIE");
#endif #endif
...@@ -279,7 +279,7 @@ TEST_P(Test_TensorFlow_layers, leaky_relu) ...@@ -279,7 +279,7 @@ TEST_P(Test_TensorFlow_layers, leaky_relu)
TEST_P(Test_TensorFlow_layers, l2_normalize) TEST_P(Test_TensorFlow_layers, l2_normalize)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
) )
...@@ -587,7 +587,7 @@ TEST_P(Test_TensorFlow_layers, fp16_weights) ...@@ -587,7 +587,7 @@ TEST_P(Test_TensorFlow_layers, fp16_weights)
TEST_P(Test_TensorFlow_layers, fp16_padding_same) TEST_P(Test_TensorFlow_layers, fp16_padding_same)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GT(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE) if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("Test is disabled for DLIE"); throw SkipTestException("Test is disabled for DLIE");
#endif #endif
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment