Commit 894f208d authored by Alexander Alekhin's avatar Alexander Alekhin

dnn(test): replace SkipTestException with tags

parent f8c96cb1
This diff is collapsed.
...@@ -113,7 +113,7 @@ TEST(Test_Caffe, read_googlenet) ...@@ -113,7 +113,7 @@ TEST(Test_Caffe, read_googlenet)
TEST_P(Test_Caffe_nets, Axpy) TEST_P(Test_Caffe_nets, Axpy)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE) if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
String proto = _tf("axpy.prototxt"); String proto = _tf("axpy.prototxt");
Net net = readNetFromCaffe(proto); Net net = readNetFromCaffe(proto);
...@@ -158,8 +158,7 @@ TEST_P(Reproducibility_AlexNet, Accuracy) ...@@ -158,8 +158,7 @@ TEST_P(Reproducibility_AlexNet, Accuracy)
{ {
Target targetId = get<1>(GetParam()); Target targetId = get<1>(GetParam());
applyTestTag(targetId == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB); applyTestTag(targetId == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
if (!ocl::useOpenCL() && targetId != DNN_TARGET_CPU) ASSERT_TRUE(ocl::useOpenCL() || targetId == DNN_TARGET_CPU);
throw SkipTestException("OpenCL is disabled");
bool readFromMemory = get<0>(GetParam()); bool readFromMemory = get<0>(GetParam());
Net net; Net net;
...@@ -197,7 +196,7 @@ TEST_P(Reproducibility_AlexNet, Accuracy) ...@@ -197,7 +196,7 @@ TEST_P(Reproducibility_AlexNet, Accuracy)
} }
INSTANTIATE_TEST_CASE_P(/**/, Reproducibility_AlexNet, Combine(testing::Bool(), INSTANTIATE_TEST_CASE_P(/**/, Reproducibility_AlexNet, Combine(testing::Bool(),
Values(DNN_TARGET_CPU, DNN_TARGET_OPENCL, DNN_TARGET_OPENCL_FP16))); testing::ValuesIn(getAvailableTargets(DNN_BACKEND_OPENCV))));
TEST(Reproducibility_FCN, Accuracy) TEST(Reproducibility_FCN, Accuracy)
{ {
...@@ -329,8 +328,7 @@ TEST_P(Reproducibility_ResNet50, Accuracy) ...@@ -329,8 +328,7 @@ TEST_P(Reproducibility_ResNet50, Accuracy)
{ {
Target targetId = GetParam(); Target targetId = GetParam();
applyTestTag(targetId == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB); applyTestTag(targetId == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
if (!ocl::useOpenCL() && targetId != DNN_TARGET_CPU) ASSERT_TRUE(ocl::useOpenCL() || targetId == DNN_TARGET_CPU);
throw SkipTestException("OpenCL is disabled");
Net net = readNetFromCaffe(findDataFile("dnn/ResNet-50-deploy.prototxt"), Net net = readNetFromCaffe(findDataFile("dnn/ResNet-50-deploy.prototxt"),
findDataFile("dnn/ResNet-50-model.caffemodel", false)); findDataFile("dnn/ResNet-50-model.caffemodel", false));
...@@ -362,14 +360,14 @@ TEST_P(Reproducibility_ResNet50, Accuracy) ...@@ -362,14 +360,14 @@ TEST_P(Reproducibility_ResNet50, Accuracy)
} }
} }
INSTANTIATE_TEST_CASE_P(/**/, Reproducibility_ResNet50, INSTANTIATE_TEST_CASE_P(/**/, Reproducibility_ResNet50,
Values(DNN_TARGET_CPU, DNN_TARGET_OPENCL, DNN_TARGET_OPENCL_FP16)); testing::ValuesIn(getAvailableTargets(DNN_BACKEND_OPENCV)));
typedef testing::TestWithParam<Target> Reproducibility_SqueezeNet_v1_1; typedef testing::TestWithParam<Target> Reproducibility_SqueezeNet_v1_1;
TEST_P(Reproducibility_SqueezeNet_v1_1, Accuracy) TEST_P(Reproducibility_SqueezeNet_v1_1, Accuracy)
{ {
int targetId = GetParam(); int targetId = GetParam();
if(targetId == DNN_TARGET_OPENCL_FP16) if(targetId == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("This test does not support FP16"); applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
Net net = readNetFromCaffe(findDataFile("dnn/squeezenet_v1.1.prototxt"), Net net = readNetFromCaffe(findDataFile("dnn/squeezenet_v1.1.prototxt"),
findDataFile("dnn/squeezenet_v1.1.caffemodel", false)); findDataFile("dnn/squeezenet_v1.1.caffemodel", false));
net.setPreferableBackend(DNN_BACKEND_OPENCV); net.setPreferableBackend(DNN_BACKEND_OPENCV);
...@@ -600,10 +598,10 @@ TEST_P(Test_Caffe_nets, FasterRCNN_vgg16) ...@@ -600,10 +598,10 @@ TEST_P(Test_Caffe_nets, FasterRCNN_vgg16)
#if defined(INF_ENGINE_RELEASE) #if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)) if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException("Test is disabled for DLIE OpenCL targets"); // very slow applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
#endif #endif
static Mat ref = (Mat_<float>(3, 7) << 0, 2, 0.949398, 99.2454, 210.141, 601.205, 462.849, static Mat ref = (Mat_<float>(3, 7) << 0, 2, 0.949398, 99.2454, 210.141, 601.205, 462.849,
...@@ -618,9 +616,10 @@ TEST_P(Test_Caffe_nets, FasterRCNN_zf) ...@@ -618,9 +616,10 @@ TEST_P(Test_Caffe_nets, FasterRCNN_zf)
(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB), (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
CV_TEST_TAG_DEBUG_LONG CV_TEST_TAG_DEBUG_LONG
); );
if ((backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16) || if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
throw SkipTestException(""); if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
static Mat ref = (Mat_<float>(3, 7) << 0, 2, 0.90121, 120.407, 115.83, 570.586, 528.395, static Mat ref = (Mat_<float>(3, 7) << 0, 2, 0.90121, 120.407, 115.83, 570.586, 528.395,
0, 7, 0.988779, 469.849, 75.1756, 718.64, 186.762, 0, 7, 0.988779, 469.849, 75.1756, 718.64, 186.762,
0, 12, 0.967198, 138.588, 206.843, 329.766, 553.176); 0, 12, 0.967198, 138.588, 206.843, 329.766, 553.176);
...@@ -634,9 +633,10 @@ TEST_P(Test_Caffe_nets, RFCN) ...@@ -634,9 +633,10 @@ TEST_P(Test_Caffe_nets, RFCN)
CV_TEST_TAG_LONG, CV_TEST_TAG_LONG,
CV_TEST_TAG_DEBUG_VERYLONG CV_TEST_TAG_DEBUG_VERYLONG
); );
if ((backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16) || if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
throw SkipTestException(""); if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
double scoreDiff = (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) ? 4e-3 : default_l1; double scoreDiff = (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) ? 4e-3 : default_l1;
double iouDiff = (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) ? 8e-2 : default_lInf; double iouDiff = (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) ? 8e-2 : default_lInf;
static Mat ref = (Mat_<float>(2, 7) << 0, 7, 0.991359, 491.822, 81.1668, 702.573, 178.234, static Mat ref = (Mat_<float>(2, 7) << 0, 7, 0.991359, 491.822, 81.1668, 702.573, 178.234,
......
...@@ -11,6 +11,20 @@ ...@@ -11,6 +11,20 @@
#include "opencv2/core/ocl.hpp" #include "opencv2/core/ocl.hpp"
#endif #endif
#define CV_TEST_TAG_DNN_SKIP_HALIDE "dnn_skip_halide"
#define CV_TEST_TAG_DNN_SKIP_OPENCL "dnn_skip_ocl"
#define CV_TEST_TAG_DNN_SKIP_OPENCL_FP16 "dnn_skip_ocl_fp16"
#define CV_TEST_TAG_DNN_SKIP_IE "dnn_skip_ie"
#define CV_TEST_TAG_DNN_SKIP_IE_2018R5 "dnn_skip_ie_2018r5"
#define CV_TEST_TAG_DNN_SKIP_IE_2019R1 "dnn_skip_ie_2019r1"
#define CV_TEST_TAG_DNN_SKIP_IE_2019R1_1 "dnn_skip_ie_2019r1_1"
#define CV_TEST_TAG_DNN_SKIP_IE_OPENCL "dnn_skip_ie_ocl"
#define CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 "dnn_skip_ie_ocl_fp16"
#define CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_2 "dnn_skip_ie_myriad2"
#define CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X "dnn_skip_ie_myriadx"
#define CV_TEST_TAG_DNN_SKIP_IE_MYRIAD CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_2, CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X
namespace cv { namespace dnn { namespace cv { namespace dnn {
CV__DNN_EXPERIMENTAL_NS_BEGIN CV__DNN_EXPERIMENTAL_NS_BEGIN
...@@ -28,6 +42,8 @@ CV__DNN_EXPERIMENTAL_NS_END ...@@ -28,6 +42,8 @@ CV__DNN_EXPERIMENTAL_NS_END
namespace opencv_test { namespace opencv_test {
void initDNNTests();
using namespace cv::dnn; using namespace cv::dnn;
static inline const std::string &getOpenCVExtraDir() static inline const std::string &getOpenCVExtraDir()
...@@ -106,7 +122,10 @@ public: ...@@ -106,7 +122,10 @@ public:
{ {
if (inp && ref && inp->dims == 4 && ref->dims == 4 && if (inp && ref && inp->dims == 4 && ref->dims == 4 &&
inp->size[0] != 1 && inp->size[0] != ref->size[0]) inp->size[0] != 1 && inp->size[0] != ref->size[0])
{
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
throw SkipTestException("Inconsistent batch size of input and output blobs for Myriad plugin"); throw SkipTestException("Inconsistent batch size of input and output blobs for Myriad plugin");
}
} }
} }
......
...@@ -270,6 +270,14 @@ static bool validateVPUType_() ...@@ -270,6 +270,14 @@ static bool validateVPUType_()
exit(1); exit(1);
} }
} }
if (have_vpu_target)
{
std::string dnn_vpu_type = getInferenceEngineVPUType();
if (dnn_vpu_type == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_2)
registerGlobalSkipTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_2);
if (dnn_vpu_type == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
registerGlobalSkipTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
}
return true; return true;
} }
...@@ -280,4 +288,38 @@ bool validateVPUType() ...@@ -280,4 +288,38 @@ bool validateVPUType()
} }
#endif // HAVE_INF_ENGINE #endif // HAVE_INF_ENGINE
void initDNNTests()
{
const char* extraTestDataPath =
#ifdef WINRT
NULL;
#else
getenv("OPENCV_DNN_TEST_DATA_PATH");
#endif
if (extraTestDataPath)
cvtest::addDataSearchPath(extraTestDataPath);
registerGlobalSkipTag(
CV_TEST_TAG_DNN_SKIP_HALIDE,
CV_TEST_TAG_DNN_SKIP_OPENCL, CV_TEST_TAG_DNN_SKIP_OPENCL_FP16
);
#if defined(INF_ENGINE_RELEASE)
registerGlobalSkipTag(
#if INF_ENGINE_VER_MAJOR_EQ(2018050000)
CV_TEST_TAG_DNN_SKIP_IE_2018R5,
#elif INF_ENGINE_VER_MAJOR_EQ(2019010000)
CV_TEST_TAG_DNN_SKIP_IE_2019R1,
#elif INF_ENGINE_VER_MAJOR_EQ(2019010100)
CV_TEST_TAG_DNN_SKIP_IE_2019R1_1
#endif
CV_TEST_TAG_DNN_SKIP_IE
);
#endif
registerGlobalSkipTag(
// see validateVPUType(): CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_2, CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X
CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16
);
}
} // namespace } // namespace
...@@ -273,12 +273,12 @@ TEST_P(Test_Darknet_nets, YoloVoc) ...@@ -273,12 +273,12 @@ TEST_P(Test_Darknet_nets, YoloVoc)
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("Test is disabled"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
#endif #endif
#if defined(INF_ENGINE_RELEASE) #if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X) && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
throw SkipTestException("Test is disabled for MyriadX (need to update check function)"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X); // need to update check function
#endif #endif
// batchId, classId, confidence, left, top, right, bottom // batchId, classId, confidence, left, top, right, bottom
...@@ -314,7 +314,7 @@ TEST_P(Test_Darknet_nets, TinyYoloVoc) ...@@ -314,7 +314,7 @@ TEST_P(Test_Darknet_nets, TinyYoloVoc)
#if defined(INF_ENGINE_RELEASE) #if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X) && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
throw SkipTestException("Test is disabled for MyriadX (need to update check function)"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X); // need to update check function
#endif #endif
// batchId, classId, confidence, left, top, right, bottom // batchId, classId, confidence, left, top, right, bottom
Mat ref = (Mat_<float>(4, 7) << 0, 6, 0.761967f, 0.579042f, 0.159161f, 0.894482f, 0.31994f, // a car Mat ref = (Mat_<float>(4, 7) << 0, 6, 0.761967f, 0.579042f, 0.159161f, 0.894482f, 0.31994f, // a car
...@@ -346,7 +346,7 @@ TEST_P(Test_Darknet_nets, YOLOv3) ...@@ -346,7 +346,7 @@ TEST_P(Test_Darknet_nets, YOLOv3)
#if defined(INF_ENGINE_RELEASE) #if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X) && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
throw SkipTestException("Test is disabled for MyriadX"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
#endif #endif
// batchId, classId, confidence, left, top, right, bottom // batchId, classId, confidence, left, top, right, bottom
...@@ -373,7 +373,7 @@ TEST_P(Test_Darknet_nets, YOLOv3) ...@@ -373,7 +373,7 @@ TEST_P(Test_Darknet_nets, YOLOv3)
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL)
throw SkipTestException("Test with 'batch size 2' is disabled for DLIE/OpenCL target"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL) // Test with 'batch size 2' is disabled for DLIE/OpenCL target
#endif #endif
{ {
......
...@@ -56,8 +56,8 @@ typedef testing::TestWithParam<Target> Reproducibility_GoogLeNet; ...@@ -56,8 +56,8 @@ typedef testing::TestWithParam<Target> Reproducibility_GoogLeNet;
TEST_P(Reproducibility_GoogLeNet, Batching) TEST_P(Reproducibility_GoogLeNet, Batching)
{ {
const int targetId = GetParam(); const int targetId = GetParam();
if(targetId == DNN_TARGET_OPENCL_FP16) if (targetId == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("This test does not support FP16"); applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
Net net = readNetFromCaffe(findDataFile("dnn/bvlc_googlenet.prototxt"), Net net = readNetFromCaffe(findDataFile("dnn/bvlc_googlenet.prototxt"),
findDataFile("dnn/bvlc_googlenet.caffemodel", false)); findDataFile("dnn/bvlc_googlenet.caffemodel", false));
net.setPreferableBackend(DNN_BACKEND_OPENCV); net.setPreferableBackend(DNN_BACKEND_OPENCV);
...@@ -87,8 +87,8 @@ TEST_P(Reproducibility_GoogLeNet, Batching) ...@@ -87,8 +87,8 @@ TEST_P(Reproducibility_GoogLeNet, Batching)
TEST_P(Reproducibility_GoogLeNet, IntermediateBlobs) TEST_P(Reproducibility_GoogLeNet, IntermediateBlobs)
{ {
const int targetId = GetParam(); const int targetId = GetParam();
if(targetId == DNN_TARGET_OPENCL_FP16) if (targetId == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("This test does not support FP16"); applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
Net net = readNetFromCaffe(findDataFile("dnn/bvlc_googlenet.prototxt"), Net net = readNetFromCaffe(findDataFile("dnn/bvlc_googlenet.prototxt"),
findDataFile("dnn/bvlc_googlenet.caffemodel", false)); findDataFile("dnn/bvlc_googlenet.caffemodel", false));
net.setPreferableBackend(DNN_BACKEND_OPENCV); net.setPreferableBackend(DNN_BACKEND_OPENCV);
...@@ -118,8 +118,8 @@ TEST_P(Reproducibility_GoogLeNet, IntermediateBlobs) ...@@ -118,8 +118,8 @@ TEST_P(Reproducibility_GoogLeNet, IntermediateBlobs)
TEST_P(Reproducibility_GoogLeNet, SeveralCalls) TEST_P(Reproducibility_GoogLeNet, SeveralCalls)
{ {
const int targetId = GetParam(); const int targetId = GetParam();
if(targetId == DNN_TARGET_OPENCL_FP16) if (targetId == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("This test does not support FP16"); applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
Net net = readNetFromCaffe(findDataFile("dnn/bvlc_googlenet.prototxt"), Net net = readNetFromCaffe(findDataFile("dnn/bvlc_googlenet.prototxt"),
findDataFile("dnn/bvlc_googlenet.caffemodel", false)); findDataFile("dnn/bvlc_googlenet.caffemodel", false));
net.setPreferableBackend(DNN_BACKEND_OPENCV); net.setPreferableBackend(DNN_BACKEND_OPENCV);
......
...@@ -165,7 +165,7 @@ TEST_P(Deconvolution, Accuracy) ...@@ -165,7 +165,7 @@ TEST_P(Deconvolution, Accuracy)
&& inChannels == 6 && outChannels == 4 && group == 1 && inChannels == 6 && outChannels == 4 && group == 1
&& kernel == Size(1, 3) && pad == Size(1, 0) && kernel == Size(1, 3) && pad == Size(1, 0)
&& stride == Size(1, 1) && dilation == Size(1, 1)) && stride == Size(1, 1) && dilation == Size(1, 1))
throw SkipTestException("Test is disabled"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
#endif #endif
int sz[] = {inChannels, outChannels / group, kernel.height, kernel.width}; int sz[] = {inChannels, outChannels / group, kernel.height, kernel.width};
...@@ -231,7 +231,7 @@ TEST_P(LRN, Accuracy) ...@@ -231,7 +231,7 @@ TEST_P(LRN, Accuracy)
if ((inSize.width == 5 || inSize.height == 5) && targetId == DNN_TARGET_MYRIAD && if ((inSize.width == 5 || inSize.height == 5) && targetId == DNN_TARGET_MYRIAD &&
nrmType == "ACROSS_CHANNELS") nrmType == "ACROSS_CHANNELS")
throw SkipTestException("This test case is disabled"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
LayerParams lp; LayerParams lp;
lp.set("norm_region", nrmType); lp.set("norm_region", nrmType);
...@@ -276,7 +276,7 @@ TEST_P(AvePooling, Accuracy) ...@@ -276,7 +276,7 @@ TEST_P(AvePooling, Accuracy)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
&& kernel == Size(1, 1) && (stride == Size(1, 1) || stride == Size(2, 2))) && kernel == Size(1, 1) && (stride == Size(1, 1) || stride == Size(2, 2)))
throw SkipTestException("Test is disabled for MyriadX target"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
#endif #endif
const int inWidth = (outSize.width - 1) * stride.width + kernel.width; const int inWidth = (outSize.width - 1) * stride.width + kernel.width;
...@@ -324,7 +324,7 @@ TEST_P(MaxPooling, Accuracy) ...@@ -324,7 +324,7 @@ TEST_P(MaxPooling, Accuracy)
&& (stride == Size(1, 1) || stride == Size(2, 2)) && (stride == Size(1, 1) || stride == Size(2, 2))
&& (pad == Size(0, 1) || pad == Size(1, 1)) && (pad == Size(0, 1) || pad == Size(1, 1))
) )
throw SkipTestException("Test is disabled in OpenVINO <= 2018R5"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
#endif #endif
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
...@@ -332,7 +332,7 @@ TEST_P(MaxPooling, Accuracy) ...@@ -332,7 +332,7 @@ TEST_P(MaxPooling, Accuracy)
&& (kernel == Size(2, 2) || kernel == Size(3, 2)) && (kernel == Size(2, 2) || kernel == Size(3, 2))
&& stride == Size(1, 1) && (pad == Size(0, 0) || pad == Size(0, 1)) && stride == Size(1, 1) && (pad == Size(0, 0) || pad == Size(0, 1))
) )
throw SkipTestException("Problems with output dimension in OpenVINO 2018R5"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
#endif #endif
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
...@@ -341,7 +341,7 @@ TEST_P(MaxPooling, Accuracy) ...@@ -341,7 +341,7 @@ TEST_P(MaxPooling, Accuracy)
&& (stride == Size(1, 1) || stride == Size(2, 2)) && (stride == Size(1, 1) || stride == Size(2, 2))
&& (pad == Size(0, 1) || pad == Size(1, 1)) && (pad == Size(0, 1) || pad == Size(1, 1))
) )
throw SkipTestException("Test is disabled for MyriadX target"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1);
#endif #endif
LayerParams lp; LayerParams lp;
...@@ -382,7 +382,7 @@ TEST_P(FullyConnected, Accuracy) ...@@ -382,7 +382,7 @@ TEST_P(FullyConnected, Accuracy)
Backend backendId = get<0>(get<4>(GetParam())); Backend backendId = get<0>(get<4>(GetParam()));
Target targetId = get<1>(get<4>(GetParam())); Target targetId = get<1>(get<4>(GetParam()));
if (backendId == DNN_BACKEND_INFERENCE_ENGINE) if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
Mat weights(outChannels, inChannels * inSize.height * inSize.width, CV_32F); Mat weights(outChannels, inChannels * inSize.height * inSize.width, CV_32F);
randu(weights, -1.0f, 1.0f); randu(weights, -1.0f, 1.0f);
...@@ -440,7 +440,7 @@ INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, SoftMax, Combine( ...@@ -440,7 +440,7 @@ INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, SoftMax, Combine(
TEST_P(Test_Halide_layers, MaxPoolUnpool) TEST_P(Test_Halide_layers, MaxPoolUnpool)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE) if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
LayerParams pool; LayerParams pool;
pool.set("pool", "max"); pool.set("pool", "max");
...@@ -656,14 +656,14 @@ TEST_P(Concat, Accuracy) ...@@ -656,14 +656,14 @@ TEST_P(Concat, Accuracy)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD
&& inSize == Vec3i(1, 4, 5) && numChannels == Vec3i(1, 6, 2) && inSize == Vec3i(1, 4, 5) && numChannels == Vec3i(1, 6, 2)
) )
throw SkipTestException("Test is disabled for Myriad target"); // crash applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_2018R5); // crash
#endif #endif
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_CPU if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_CPU
&& inSize == Vec3i(1, 4, 5) && numChannels == Vec3i(1, 6, 2) && inSize == Vec3i(1, 4, 5) && numChannels == Vec3i(1, 6, 2)
) )
throw SkipTestException("Test is disabled for DLIE/CPU target"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1); // TODO: IE_CPU
#endif #endif
Net net; Net net;
...@@ -737,12 +737,12 @@ TEST_P(Eltwise, Accuracy) ...@@ -737,12 +737,12 @@ TEST_P(Eltwise, Accuracy)
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD && if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD &&
inSize == Vec3i(1, 4, 5)) inSize == Vec3i(1, 4, 5))
throw SkipTestException("Test is disabled for Myriad target"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
#endif #endif
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && numConv > 1) if (backendId == DNN_BACKEND_INFERENCE_ENGINE && numConv > 1)
throw SkipTestException("Test is disabled for DLIE backend"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1);
#endif #endif
Net net; Net net;
......
...@@ -142,15 +142,16 @@ TEST_P(Test_Caffe_layers, Convolution) ...@@ -142,15 +142,16 @@ TEST_P(Test_Caffe_layers, Convolution)
TEST_P(Test_Caffe_layers, DeConvolution) TEST_P(Test_Caffe_layers, DeConvolution)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_CPU) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_CPU)
throw SkipTestException("Test is disabled for DLIE/CPU"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE); // TODO IE_CPU
testLayerUsingCaffeModels("layer_deconvolution", true, false); testLayerUsingCaffeModels("layer_deconvolution", true, false);
} }
TEST_P(Test_Caffe_layers, InnerProduct) TEST_P(Test_Caffe_layers, InnerProduct)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE || if (backend == DNN_BACKEND_INFERENCE_ENGINE)
(backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
throw SkipTestException(""); if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
testLayerUsingCaffeModels("layer_inner_product", true); testLayerUsingCaffeModels("layer_inner_product", true);
} }
...@@ -236,7 +237,7 @@ TEST_P(Test_Caffe_layers, Concat) ...@@ -236,7 +237,7 @@ TEST_P(Test_Caffe_layers, Concat)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1);
#endif #endif
testLayerUsingCaffeModels("layer_concat"); testLayerUsingCaffeModels("layer_concat");
testLayerUsingCaffeModels("layer_concat_optim", true, false); testLayerUsingCaffeModels("layer_concat_optim", true, false);
...@@ -246,15 +247,13 @@ TEST_P(Test_Caffe_layers, Concat) ...@@ -246,15 +247,13 @@ TEST_P(Test_Caffe_layers, Concat)
TEST_P(Test_Caffe_layers, Fused_Concat) TEST_P(Test_Caffe_layers, Fused_Concat)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE) if (backend == DNN_BACKEND_INFERENCE_ENGINE) // Test is disabled for DLIE due negative_slope parameter
throw SkipTestException("Test is disabled for DLIE due negative_slope parameter"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1);
#endif #endif
#if defined(INF_ENGINE_RELEASE) #if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
&& (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16) applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
)
throw SkipTestException("Test is disabled for DLIE");
#endif #endif
checkBackend(); checkBackend();
...@@ -300,7 +299,7 @@ TEST_P(Test_Caffe_layers, Fused_Concat) ...@@ -300,7 +299,7 @@ TEST_P(Test_Caffe_layers, Fused_Concat)
TEST_P(Test_Caffe_layers, Eltwise) TEST_P(Test_Caffe_layers, Eltwise)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
testLayerUsingCaffeModels("layer_eltwise"); testLayerUsingCaffeModels("layer_eltwise");
} }
...@@ -313,7 +312,7 @@ TEST_P(Test_Caffe_layers, PReLU) ...@@ -313,7 +312,7 @@ TEST_P(Test_Caffe_layers, PReLU)
TEST_P(Test_Caffe_layers, layer_prelu_fc) TEST_P(Test_Caffe_layers, layer_prelu_fc)
{ {
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
// Reference output values are in range [-0.0001, 10.3906] // Reference output values are in range [-0.0001, 10.3906]
double l1 = (target == DNN_TARGET_MYRIAD) ? 0.005 : 0.0; double l1 = (target == DNN_TARGET_MYRIAD) ? 0.005 : 0.0;
double lInf = (target == DNN_TARGET_MYRIAD) ? 0.021 : 0.0; double lInf = (target == DNN_TARGET_MYRIAD) ? 0.021 : 0.0;
...@@ -343,7 +342,7 @@ TEST_P(Test_Caffe_layers, layer_prelu_fc) ...@@ -343,7 +342,7 @@ TEST_P(Test_Caffe_layers, layer_prelu_fc)
TEST_P(Test_Caffe_layers, Reshape_Split_Slice) TEST_P(Test_Caffe_layers, Reshape_Split_Slice)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE) if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
Net net = readNetFromCaffe(_tf("reshape_and_slice_routines.prototxt")); Net net = readNetFromCaffe(_tf("reshape_and_slice_routines.prototxt"));
ASSERT_FALSE(net.empty()); ASSERT_FALSE(net.empty());
...@@ -365,7 +364,7 @@ TEST_P(Test_Caffe_layers, Conv_Elu) ...@@ -365,7 +364,7 @@ TEST_P(Test_Caffe_layers, Conv_Elu)
{ {
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE <= 2018050000 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE <= 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
#endif #endif
Net net = readNetFromTensorflow(_tf("layer_elu_model.pb")); Net net = readNetFromTensorflow(_tf("layer_elu_model.pb"));
...@@ -548,9 +547,11 @@ TEST(Layer_Test_ROIPooling, Accuracy) ...@@ -548,9 +547,11 @@ TEST(Layer_Test_ROIPooling, Accuracy)
TEST_P(Test_Caffe_layers, FasterRCNN_Proposal) TEST_P(Test_Caffe_layers, FasterRCNN_Proposal)
{ {
if ((backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) || if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
backend == DNN_BACKEND_INFERENCE_ENGINE) applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
throw SkipTestException(""); if (backend == DNN_BACKEND_INFERENCE_ENGINE)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
Net net = readNetFromCaffe(_tf("net_faster_rcnn_proposal.prototxt")); Net net = readNetFromCaffe(_tf("net_faster_rcnn_proposal.prototxt"));
Mat scores = blobFromNPY(_tf("net_faster_rcnn_proposal.scores.npy")); Mat scores = blobFromNPY(_tf("net_faster_rcnn_proposal.scores.npy"));
...@@ -774,7 +775,8 @@ TEST_P(Test_Caffe_layers, Average_pooling_kernel_area) ...@@ -774,7 +775,8 @@ TEST_P(Test_Caffe_layers, Average_pooling_kernel_area)
TEST_P(Test_Caffe_layers, PriorBox_squares) TEST_P(Test_Caffe_layers, PriorBox_squares)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE) if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
LayerParams lp; LayerParams lp;
lp.name = "testPriorBox"; lp.name = "testPriorBox";
lp.type = "PriorBox"; lp.type = "PriorBox";
...@@ -1307,7 +1309,8 @@ TEST_P(Test_Caffe_layers, DISABLED_Interp) // requires patched protobuf (availa ...@@ -1307,7 +1309,8 @@ TEST_P(Test_Caffe_layers, DISABLED_Interp) // requires patched protobuf (availa
#endif #endif
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
// Test a custom layer. // Test a custom layer.
CV_DNN_REGISTER_LAYER_CLASS(Interp, CustomInterpLayer); CV_DNN_REGISTER_LAYER_CLASS(Interp, CustomInterpLayer);
try try
......
#include "test_precomp.hpp" #include "test_precomp.hpp"
static const char* extraTestDataPath = CV_TEST_MAIN("", initDNNTests());
#ifdef WINRT
NULL;
#else
getenv("OPENCV_DNN_TEST_DATA_PATH");
#endif
CV_TEST_MAIN("",
extraTestDataPath ? (void)cvtest::addDataSearchPath(extraTestDataPath) : (void)0
)
namespace opencv_test
{
using namespace cv;
using namespace cv::dnn;
}
...@@ -158,7 +158,7 @@ TEST_P(setInput, normalization) ...@@ -158,7 +158,7 @@ TEST_P(setInput, normalization)
const bool kSwapRB = true; const bool kSwapRB = true;
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16 && dtype != CV_32F) if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16 && dtype != CV_32F)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
Mat inp(5, 5, CV_8UC3); Mat inp(5, 5, CV_8UC3);
randu(inp, 0, 255); randu(inp, 0, 255);
......
...@@ -104,7 +104,7 @@ TEST_P(Test_ONNX_layers, Two_convolution) ...@@ -104,7 +104,7 @@ TEST_P(Test_ONNX_layers, Two_convolution)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
) )
throw SkipTestException("Test is disabled for MyriadX"); // 2018R5+ is failed applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
#endif #endif
// Reference output values are in range [-0.855, 0.611] // Reference output values are in range [-0.855, 0.611]
testONNXModels("two_convolution"); testONNXModels("two_convolution");
...@@ -127,7 +127,7 @@ TEST_P(Test_ONNX_layers, Dropout) ...@@ -127,7 +127,7 @@ TEST_P(Test_ONNX_layers, Dropout)
TEST_P(Test_ONNX_layers, Linear) TEST_P(Test_ONNX_layers, Linear)
{ {
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
testONNXModels("linear"); testONNXModels("linear");
} }
...@@ -143,9 +143,12 @@ TEST_P(Test_ONNX_layers, MaxPooling_Sigmoid) ...@@ -143,9 +143,12 @@ TEST_P(Test_ONNX_layers, MaxPooling_Sigmoid)
TEST_P(Test_ONNX_layers, Concatenation) TEST_P(Test_ONNX_layers, Concatenation)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE && if (backend == DNN_BACKEND_INFERENCE_ENGINE)
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD)) {
throw SkipTestException(""); if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL);
if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
}
testONNXModels("concatenation"); testONNXModels("concatenation");
} }
...@@ -191,24 +194,32 @@ TEST_P(Test_ONNX_layers, BatchNormalization) ...@@ -191,24 +194,32 @@ TEST_P(Test_ONNX_layers, BatchNormalization)
TEST_P(Test_ONNX_layers, BatchNormalization3D) TEST_P(Test_ONNX_layers, BatchNormalization3D)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU) if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException(""); {
if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL);
if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
}
testONNXModels("batch_norm_3d"); testONNXModels("batch_norm_3d");
} }
TEST_P(Test_ONNX_layers, Transpose) TEST_P(Test_ONNX_layers, Transpose)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE && if (backend == DNN_BACKEND_INFERENCE_ENGINE)
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD)) {
throw SkipTestException(""); if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL);
if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
}
testONNXModels("transpose"); testONNXModels("transpose");
} }
TEST_P(Test_ONNX_layers, Multiplication) TEST_P(Test_ONNX_layers, Multiplication)
{ {
if ((backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) || if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)) applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
throw SkipTestException(""); if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
testONNXModels("mul"); testONNXModels("mul");
} }
...@@ -217,7 +228,7 @@ TEST_P(Test_ONNX_layers, Constant) ...@@ -217,7 +228,7 @@ TEST_P(Test_ONNX_layers, Constant)
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X) && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
throw SkipTestException("Test is disabled for OpenVINO <= 2018R5 + MyriadX target"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
#endif #endif
testONNXModels("constant"); testONNXModels("constant");
} }
...@@ -261,8 +272,11 @@ TEST_P(Test_ONNX_layers, MultyInputs) ...@@ -261,8 +272,11 @@ TEST_P(Test_ONNX_layers, MultyInputs)
TEST_P(Test_ONNX_layers, DynamicReshape) TEST_P(Test_ONNX_layers, DynamicReshape)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)) if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException(""); {
if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL);
}
testONNXModels("dynamic_reshape"); testONNXModels("dynamic_reshape");
} }
...@@ -325,7 +339,7 @@ TEST_P(Test_ONNX_nets, Squeezenet) ...@@ -325,7 +339,7 @@ TEST_P(Test_ONNX_nets, Squeezenet)
TEST_P(Test_ONNX_nets, Googlenet) TEST_P(Test_ONNX_nets, Googlenet)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE) if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
const String model = _tf("models/googlenet.onnx", false); const String model = _tf("models/googlenet.onnx", false);
...@@ -409,14 +423,18 @@ TEST_P(Test_ONNX_nets, ResNet101_DUC_HDC) ...@@ -409,14 +423,18 @@ TEST_P(Test_ONNX_nets, ResNet101_DUC_HDC)
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000) #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE) if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("Test is disabled for DLIE targets"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1);
#endif #endif
#if defined(INF_ENGINE_RELEASE) #if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
#endif #endif
if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL) if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL)
{
if (backend == DNN_BACKEND_OPENCV)
applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_OPENCL : CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
throw SkipTestException("Test is disabled for OpenCL targets"); throw SkipTestException("Test is disabled for OpenCL targets");
}
testONNXModels("resnet101_duc_hdc", pb); testONNXModels("resnet101_duc_hdc", pb);
} }
...@@ -430,12 +448,12 @@ TEST_P(Test_ONNX_nets, TinyYolov2) ...@@ -430,12 +448,12 @@ TEST_P(Test_ONNX_nets, TinyYolov2)
if (backend == DNN_BACKEND_INFERENCE_ENGINE if (backend == DNN_BACKEND_INFERENCE_ENGINE
&& (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16) && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)
) )
throw SkipTestException("Test is disabled for DLIE OpenCL targets"); applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
) )
throw SkipTestException("Test is disabled for MyriadX"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
#endif #endif
// output range: [-11; 8] // output range: [-11; 8]
...@@ -462,9 +480,12 @@ TEST_P(Test_ONNX_nets, LResNet100E_IR) ...@@ -462,9 +480,12 @@ TEST_P(Test_ONNX_nets, LResNet100E_IR)
(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB), (target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
CV_TEST_TAG_DEBUG_LONG CV_TEST_TAG_DEBUG_LONG
); );
if (backend == DNN_BACKEND_INFERENCE_ENGINE && if (backend == DNN_BACKEND_INFERENCE_ENGINE)
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD)) {
throw SkipTestException(""); if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL);
if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
}
double l1 = default_l1; double l1 = default_l1;
double lInf = default_lInf; double lInf = default_lInf;
...@@ -486,7 +507,7 @@ TEST_P(Test_ONNX_nets, Emotion_ferplus) ...@@ -486,7 +507,7 @@ TEST_P(Test_ONNX_nets, Emotion_ferplus)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
) )
throw SkipTestException("Test is disabled for MyriadX"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
#endif #endif
double l1 = default_l1; double l1 = default_l1;
...@@ -524,16 +545,19 @@ TEST_P(Test_ONNX_nets, Inception_v1) ...@@ -524,16 +545,19 @@ TEST_P(Test_ONNX_nets, Inception_v1)
{ {
#if defined(INF_ENGINE_RELEASE) #if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
#endif #endif
testONNXModels("inception_v1", pb); testONNXModels("inception_v1", pb);
} }
TEST_P(Test_ONNX_nets, Shufflenet) TEST_P(Test_ONNX_nets, Shufflenet)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE && if (backend == DNN_BACKEND_INFERENCE_ENGINE)
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD)) {
throw SkipTestException(""); if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL);
if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
}
testONNXModels("shufflenet", pb); testONNXModels("shufflenet", pb);
} }
......
This diff is collapsed.
...@@ -120,7 +120,7 @@ TEST_P(Test_Torch_layers, run_convolution) ...@@ -120,7 +120,7 @@ TEST_P(Test_Torch_layers, run_convolution)
TEST_P(Test_Torch_layers, run_pool_max) TEST_P(Test_Torch_layers, run_pool_max)
{ {
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
runTorchNet("net_pool_max", "", true); runTorchNet("net_pool_max", "", true);
} }
...@@ -137,7 +137,7 @@ TEST_P(Test_Torch_layers, run_reshape_change_batch_size) ...@@ -137,7 +137,7 @@ TEST_P(Test_Torch_layers, run_reshape_change_batch_size)
TEST_P(Test_Torch_layers, run_reshape) TEST_P(Test_Torch_layers, run_reshape)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
runTorchNet("net_reshape_batch"); runTorchNet("net_reshape_batch");
runTorchNet("net_reshape_channels", "", false, true); runTorchNet("net_reshape_channels", "", false, true);
} }
...@@ -153,7 +153,7 @@ TEST_P(Test_Torch_layers, run_reshape_single_sample) ...@@ -153,7 +153,7 @@ TEST_P(Test_Torch_layers, run_reshape_single_sample)
TEST_P(Test_Torch_layers, run_linear) TEST_P(Test_Torch_layers, run_linear)
{ {
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
runTorchNet("net_linear_2d"); runTorchNet("net_linear_2d");
} }
...@@ -210,7 +210,7 @@ TEST_P(Test_Torch_layers, net_lp_pooling) ...@@ -210,7 +210,7 @@ TEST_P(Test_Torch_layers, net_lp_pooling)
TEST_P(Test_Torch_layers, net_conv_gemm_lrn) TEST_P(Test_Torch_layers, net_conv_gemm_lrn)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
runTorchNet("net_conv_gemm_lrn", "", false, true, true, runTorchNet("net_conv_gemm_lrn", "", false, true, true,
target == DNN_TARGET_OPENCL_FP16 ? 0.046 : 0.0, target == DNN_TARGET_OPENCL_FP16 ? 0.046 : 0.0,
target == DNN_TARGET_OPENCL_FP16 ? 0.023 : 0.0); target == DNN_TARGET_OPENCL_FP16 ? 0.023 : 0.0);
...@@ -237,14 +237,14 @@ TEST_P(Test_Torch_layers, net_non_spatial) ...@@ -237,14 +237,14 @@ TEST_P(Test_Torch_layers, net_non_spatial)
{ {
if (backend == DNN_BACKEND_INFERENCE_ENGINE && if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
(target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)) (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException(""); applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
runTorchNet("net_non_spatial", "", false, true); runTorchNet("net_non_spatial", "", false, true);
} }
TEST_P(Test_Torch_layers, run_paralel) TEST_P(Test_Torch_layers, run_paralel)
{ {
if (backend != DNN_BACKEND_OPENCV || target != DNN_TARGET_CPU) if (backend != DNN_BACKEND_OPENCV || target != DNN_TARGET_CPU)
throw SkipTestException(""); throw SkipTestException(""); // TODO: Check this
runTorchNet("net_parallel", "l5_torchMerge"); runTorchNet("net_parallel", "l5_torchMerge");
} }
...@@ -253,7 +253,7 @@ TEST_P(Test_Torch_layers, net_residual) ...@@ -253,7 +253,7 @@ TEST_P(Test_Torch_layers, net_residual)
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE == 2018050000 #if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE == 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL || if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL ||
target == DNN_TARGET_OPENCL_FP16)) target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException("Test is disabled for OpenVINO 2018R5"); applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
#endif #endif
runTorchNet("net_residual", "", false, true); runTorchNet("net_residual", "", false, true);
} }
...@@ -264,7 +264,7 @@ TEST_P(Test_Torch_nets, OpenFace_accuracy) ...@@ -264,7 +264,7 @@ TEST_P(Test_Torch_nets, OpenFace_accuracy)
{ {
#if defined(INF_ENGINE_RELEASE) #if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
#endif #endif
checkBackend(); checkBackend();
...@@ -339,7 +339,7 @@ TEST_P(Test_Torch_nets, ENet_accuracy) ...@@ -339,7 +339,7 @@ TEST_P(Test_Torch_nets, ENet_accuracy)
checkBackend(); checkBackend();
if (backend == DNN_BACKEND_INFERENCE_ENGINE || if (backend == DNN_BACKEND_INFERENCE_ENGINE ||
(backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)) (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException(""); applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
Net net; Net net;
{ {
...@@ -391,7 +391,7 @@ TEST_P(Test_Torch_nets, FastNeuralStyle_accuracy) ...@@ -391,7 +391,7 @@ TEST_P(Test_Torch_nets, FastNeuralStyle_accuracy)
#if defined INF_ENGINE_RELEASE #if defined INF_ENGINE_RELEASE
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X) && getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
throw SkipTestException("Test is disabled for MyriadX target"); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
#endif #endif
checkBackend(); checkBackend();
...@@ -399,7 +399,7 @@ TEST_P(Test_Torch_nets, FastNeuralStyle_accuracy) ...@@ -399,7 +399,7 @@ TEST_P(Test_Torch_nets, FastNeuralStyle_accuracy)
#if defined(INF_ENGINE_RELEASE) #if defined(INF_ENGINE_RELEASE)
#if INF_ENGINE_RELEASE <= 2018050000 #if INF_ENGINE_RELEASE <= 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL) if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL)
throw SkipTestException(""); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
#endif #endif
#endif #endif
......
...@@ -212,6 +212,36 @@ static inline void applyTestTag(const std::string& tag1, const std::string& tag2 ...@@ -212,6 +212,36 @@ static inline void applyTestTag(const std::string& tag1, const std::string& tag2
{ applyTestTag_(tag1); applyTestTag_(tag2); applyTestTag_(tag3); applyTestTag_(tag4); checkTestTags(); } { applyTestTag_(tag1); applyTestTag_(tag2); applyTestTag_(tag3); applyTestTag_(tag4); checkTestTags(); }
/** Append global skip test tags
*/
void registerGlobalSkipTag(const std::string& skipTag);
static inline void registerGlobalSkipTag(const std::string& tag1, const std::string& tag2)
{ registerGlobalSkipTag(tag1); registerGlobalSkipTag(tag2); }
static inline void registerGlobalSkipTag(const std::string& tag1, const std::string& tag2, const std::string& tag3)
{ registerGlobalSkipTag(tag1); registerGlobalSkipTag(tag2); registerGlobalSkipTag(tag3); }
static inline void registerGlobalSkipTag(const std::string& tag1, const std::string& tag2, const std::string& tag3, const std::string& tag4)
{ registerGlobalSkipTag(tag1); registerGlobalSkipTag(tag2); registerGlobalSkipTag(tag3); registerGlobalSkipTag(tag4); }
static inline void registerGlobalSkipTag(const std::string& tag1, const std::string& tag2, const std::string& tag3, const std::string& tag4,
const std::string& tag5)
{
registerGlobalSkipTag(tag1); registerGlobalSkipTag(tag2); registerGlobalSkipTag(tag3); registerGlobalSkipTag(tag4);
registerGlobalSkipTag(tag5);
}
static inline void registerGlobalSkipTag(const std::string& tag1, const std::string& tag2, const std::string& tag3, const std::string& tag4,
const std::string& tag5, const std::string& tag6)
{
registerGlobalSkipTag(tag1); registerGlobalSkipTag(tag2); registerGlobalSkipTag(tag3); registerGlobalSkipTag(tag4);
registerGlobalSkipTag(tag5); registerGlobalSkipTag(tag6);
}
static inline void registerGlobalSkipTag(const std::string& tag1, const std::string& tag2, const std::string& tag3, const std::string& tag4,
const std::string& tag5, const std::string& tag6, const std::string& tag7)
{
registerGlobalSkipTag(tag1); registerGlobalSkipTag(tag2); registerGlobalSkipTag(tag3); registerGlobalSkipTag(tag4);
registerGlobalSkipTag(tag5); registerGlobalSkipTag(tag6); registerGlobalSkipTag(tag7);
}
class TS; class TS;
int64 readSeed(const char* str); int64 readSeed(const char* str);
...@@ -758,7 +788,7 @@ int main(int argc, char **argv) \ ...@@ -758,7 +788,7 @@ int main(int argc, char **argv) \
{ \ { \
CV_TRACE_FUNCTION(); \ CV_TRACE_FUNCTION(); \
{ CV_TRACE_REGION("INIT"); \ { CV_TRACE_REGION("INIT"); \
using namespace cvtest; \ using namespace cvtest; using namespace opencv_test; \
TS* ts = TS::ptr(); \ TS* ts = TS::ptr(); \
ts->init(resourcesubdir); \ ts->init(resourcesubdir); \
__CV_TEST_EXEC_ARGS(CV_TEST_INIT0_ ## INIT0) \ __CV_TEST_EXEC_ARGS(CV_TEST_INIT0_ ## INIT0) \
......
...@@ -13,6 +13,30 @@ static bool printTestTag = false; ...@@ -13,6 +13,30 @@ static bool printTestTag = false;
static std::vector<std::string> currentDirectTestTags, currentImpliedTestTags; static std::vector<std::string> currentDirectTestTags, currentImpliedTestTags;
static std::vector<const ::testing::TestInfo*> skipped_tests; static std::vector<const ::testing::TestInfo*> skipped_tests;
static std::map<std::string, int>& getTestTagsSkipCounts()
{
static std::map<std::string, int> testTagsSkipCounts;
return testTagsSkipCounts;
}
static std::map<std::string, int>& getTestTagsSkipExtraCounts()
{
static std::map<std::string, int> testTagsSkipExtraCounts;
return testTagsSkipExtraCounts;
}
static void increaseTagsSkipCount(const std::string& tag, bool isMain)
{
std::map<std::string, int>& counts = isMain ? getTestTagsSkipCounts() : getTestTagsSkipExtraCounts();
std::map<std::string, int>::iterator i = counts.find(tag);
if (i == counts.end())
{
counts[tag] = 1;
}
else
{
i->second++;
}
}
static std::vector<std::string>& getTestTagsSkipList() static std::vector<std::string>& getTestTagsSkipList()
{ {
static std::vector<std::string> testSkipWithTags; static std::vector<std::string> testSkipWithTags;
...@@ -33,6 +57,17 @@ static std::vector<std::string>& getTestTagsSkipList() ...@@ -33,6 +57,17 @@ static std::vector<std::string>& getTestTagsSkipList()
return testSkipWithTags; return testSkipWithTags;
} }
void registerGlobalSkipTag(const std::string& skipTag)
{
std::vector<std::string>& skipTags = getTestTagsSkipList();
for (size_t i = 0; i < skipTags.size(); ++i)
{
if (skipTag == skipTags[i])
return; // duplicate
}
skipTags.push_back(skipTag);
}
static std::vector<std::string>& getTestTagsForceList() static std::vector<std::string>& getTestTagsForceList()
{ {
static std::vector<std::string> getTestTagsForceList; static std::vector<std::string> getTestTagsForceList;
...@@ -156,7 +191,27 @@ public: ...@@ -156,7 +191,27 @@ public:
{ {
if (!skipped_tests.empty()) if (!skipped_tests.empty())
{ {
std::cout << "[ SKIP ] " << skipped_tests.size() << " tests via tags" << std::endl; std::cout << "[ SKIPSTAT ] " << skipped_tests.size() << " tests via tags" << std::endl;
const std::vector<std::string>& skipTags = getTestTagsSkipList();
const std::map<std::string, int>& counts = getTestTagsSkipCounts();
const std::map<std::string, int>& countsExtra = getTestTagsSkipExtraCounts();
for (std::vector<std::string>::const_iterator i = skipTags.begin(); i != skipTags.end(); ++i)
{
int c1 = 0;
std::map<std::string, int>::const_iterator i1 = counts.find(*i);
if (i1 != counts.end()) c1 = i1->second;
int c2 = 0;
std::map<std::string, int>::const_iterator i2 = countsExtra.find(*i);
if (i2 != countsExtra.end()) c2 = i2->second;
if (c2 > 0)
{
std::cout << "[ SKIPSTAT ] TAG='" << *i << "' skip " << c1 << " tests (" << c2 << " times in extra skip list)" << std::endl;
}
else if (c1 > 0)
{
std::cout << "[ SKIPSTAT ] TAG='" << *i << "' skip " << c1 << " tests" << std::endl;
}
}
} }
skipped_tests.clear(); skipped_tests.clear();
} }
...@@ -255,13 +310,14 @@ void checkTestTags() ...@@ -255,13 +310,14 @@ void checkTestTags()
if (isTestTagForced(testTag)) if (isTestTagForced(testTag))
return; return;
} }
std::string skip_message;
for (size_t i = 0; i < testTags.size(); ++i) for (size_t i = 0; i < testTags.size(); ++i)
{ {
const std::string& testTag = testTags[i]; const std::string& testTag = testTags[i];
if (isTestTagSkipped(testTag, skipTag)) if (isTestTagSkipped(testTag, skipTag))
{ {
skipped_tests.push_back(::testing::UnitTest::GetInstance()->current_test_info()); increaseTagsSkipCount(skipTag, skip_message.empty());
throw SkipTestException("Test with tag '" + testTag + "' is skipped ('" + skipTag + "' is in skip list)"); if (skip_message.empty()) skip_message = "Test with tag '" + testTag + "' is skipped ('" + skipTag + "' is in skip list)";
} }
} }
const std::vector<std::string>& testTagsImplied = currentImpliedTestTags; const std::vector<std::string>& testTagsImplied = currentImpliedTestTags;
...@@ -270,10 +326,16 @@ void checkTestTags() ...@@ -270,10 +326,16 @@ void checkTestTags()
const std::string& testTag = testTagsImplied[i]; const std::string& testTag = testTagsImplied[i];
if (isTestTagSkipped(testTag, skipTag)) if (isTestTagSkipped(testTag, skipTag))
{ {
skipped_tests.push_back(::testing::UnitTest::GetInstance()->current_test_info()); increaseTagsSkipCount(skipTag, skip_message.empty());
throw SkipTestException("Test with tag '" + testTag + "' is skipped ('" + skipTag + "' is in skip list)"); if (skip_message.empty()) skip_message = "Test with tag '" + testTag + "' is skipped (implied '" + skipTag + "' is in skip list)";
} }
} }
if (!skip_message.empty())
{
skipped_tests.push_back(::testing::UnitTest::GetInstance()->current_test_info());
throw SkipTestException(skip_message);
}
} }
static bool applyTestTagImpl(const std::string& tag, bool direct = false) static bool applyTestTagImpl(const std::string& tag, bool direct = false)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment