Commit 894f208d authored by Alexander Alekhin's avatar Alexander Alekhin

dnn(test): replace SkipTestException with tags

parent f8c96cb1
This diff is collapsed.
......@@ -113,7 +113,7 @@ TEST(Test_Caffe, read_googlenet)
TEST_P(Test_Caffe_nets, Axpy)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
String proto = _tf("axpy.prototxt");
Net net = readNetFromCaffe(proto);
......@@ -158,8 +158,7 @@ TEST_P(Reproducibility_AlexNet, Accuracy)
{
Target targetId = get<1>(GetParam());
applyTestTag(targetId == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
if (!ocl::useOpenCL() && targetId != DNN_TARGET_CPU)
throw SkipTestException("OpenCL is disabled");
ASSERT_TRUE(ocl::useOpenCL() || targetId == DNN_TARGET_CPU);
bool readFromMemory = get<0>(GetParam());
Net net;
......@@ -197,7 +196,7 @@ TEST_P(Reproducibility_AlexNet, Accuracy)
}
INSTANTIATE_TEST_CASE_P(/**/, Reproducibility_AlexNet, Combine(testing::Bool(),
Values(DNN_TARGET_CPU, DNN_TARGET_OPENCL, DNN_TARGET_OPENCL_FP16)));
testing::ValuesIn(getAvailableTargets(DNN_BACKEND_OPENCV))));
TEST(Reproducibility_FCN, Accuracy)
{
......@@ -329,8 +328,7 @@ TEST_P(Reproducibility_ResNet50, Accuracy)
{
Target targetId = GetParam();
applyTestTag(targetId == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB);
if (!ocl::useOpenCL() && targetId != DNN_TARGET_CPU)
throw SkipTestException("OpenCL is disabled");
ASSERT_TRUE(ocl::useOpenCL() || targetId == DNN_TARGET_CPU);
Net net = readNetFromCaffe(findDataFile("dnn/ResNet-50-deploy.prototxt"),
findDataFile("dnn/ResNet-50-model.caffemodel", false));
......@@ -362,14 +360,14 @@ TEST_P(Reproducibility_ResNet50, Accuracy)
}
}
INSTANTIATE_TEST_CASE_P(/**/, Reproducibility_ResNet50,
Values(DNN_TARGET_CPU, DNN_TARGET_OPENCL, DNN_TARGET_OPENCL_FP16));
testing::ValuesIn(getAvailableTargets(DNN_BACKEND_OPENCV)));
typedef testing::TestWithParam<Target> Reproducibility_SqueezeNet_v1_1;
TEST_P(Reproducibility_SqueezeNet_v1_1, Accuracy)
{
int targetId = GetParam();
if(targetId == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("This test does not support FP16");
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
Net net = readNetFromCaffe(findDataFile("dnn/squeezenet_v1.1.prototxt"),
findDataFile("dnn/squeezenet_v1.1.caffemodel", false));
net.setPreferableBackend(DNN_BACKEND_OPENCV);
......@@ -600,10 +598,10 @@ TEST_P(Test_Caffe_nets, FasterRCNN_vgg16)
#if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException("Test is disabled for DLIE OpenCL targets"); // very slow
applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
#endif
static Mat ref = (Mat_<float>(3, 7) << 0, 2, 0.949398, 99.2454, 210.141, 601.205, 462.849,
......@@ -618,9 +616,10 @@ TEST_P(Test_Caffe_nets, FasterRCNN_zf)
(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
CV_TEST_TAG_DEBUG_LONG
);
if ((backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16) ||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
static Mat ref = (Mat_<float>(3, 7) << 0, 2, 0.90121, 120.407, 115.83, 570.586, 528.395,
0, 7, 0.988779, 469.849, 75.1756, 718.64, 186.762,
0, 12, 0.967198, 138.588, 206.843, 329.766, 553.176);
......@@ -634,9 +633,10 @@ TEST_P(Test_Caffe_nets, RFCN)
CV_TEST_TAG_LONG,
CV_TEST_TAG_DEBUG_VERYLONG
);
if ((backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16) ||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
double scoreDiff = (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) ? 4e-3 : default_l1;
double iouDiff = (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) ? 8e-2 : default_lInf;
static Mat ref = (Mat_<float>(2, 7) << 0, 7, 0.991359, 491.822, 81.1668, 702.573, 178.234,
......
......@@ -11,6 +11,20 @@
#include "opencv2/core/ocl.hpp"
#endif
#define CV_TEST_TAG_DNN_SKIP_HALIDE "dnn_skip_halide"
#define CV_TEST_TAG_DNN_SKIP_OPENCL "dnn_skip_ocl"
#define CV_TEST_TAG_DNN_SKIP_OPENCL_FP16 "dnn_skip_ocl_fp16"
#define CV_TEST_TAG_DNN_SKIP_IE "dnn_skip_ie"
#define CV_TEST_TAG_DNN_SKIP_IE_2018R5 "dnn_skip_ie_2018r5"
#define CV_TEST_TAG_DNN_SKIP_IE_2019R1 "dnn_skip_ie_2019r1"
#define CV_TEST_TAG_DNN_SKIP_IE_2019R1_1 "dnn_skip_ie_2019r1_1"
#define CV_TEST_TAG_DNN_SKIP_IE_OPENCL "dnn_skip_ie_ocl"
#define CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16 "dnn_skip_ie_ocl_fp16"
#define CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_2 "dnn_skip_ie_myriad2"
#define CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X "dnn_skip_ie_myriadx"
#define CV_TEST_TAG_DNN_SKIP_IE_MYRIAD CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_2, CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X
namespace cv { namespace dnn {
CV__DNN_EXPERIMENTAL_NS_BEGIN
......@@ -28,6 +42,8 @@ CV__DNN_EXPERIMENTAL_NS_END
namespace opencv_test {
void initDNNTests();
using namespace cv::dnn;
static inline const std::string &getOpenCVExtraDir()
......@@ -106,7 +122,10 @@ public:
{
if (inp && ref && inp->dims == 4 && ref->dims == 4 &&
inp->size[0] != 1 && inp->size[0] != ref->size[0])
{
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
throw SkipTestException("Inconsistent batch size of input and output blobs for Myriad plugin");
}
}
}
......
......@@ -270,6 +270,14 @@ static bool validateVPUType_()
exit(1);
}
}
if (have_vpu_target)
{
std::string dnn_vpu_type = getInferenceEngineVPUType();
if (dnn_vpu_type == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_2)
registerGlobalSkipTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_2);
if (dnn_vpu_type == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
registerGlobalSkipTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
}
return true;
}
......@@ -280,4 +288,38 @@ bool validateVPUType()
}
#endif // HAVE_INF_ENGINE
void initDNNTests()
{
const char* extraTestDataPath =
#ifdef WINRT
NULL;
#else
getenv("OPENCV_DNN_TEST_DATA_PATH");
#endif
if (extraTestDataPath)
cvtest::addDataSearchPath(extraTestDataPath);
registerGlobalSkipTag(
CV_TEST_TAG_DNN_SKIP_HALIDE,
CV_TEST_TAG_DNN_SKIP_OPENCL, CV_TEST_TAG_DNN_SKIP_OPENCL_FP16
);
#if defined(INF_ENGINE_RELEASE)
registerGlobalSkipTag(
#if INF_ENGINE_VER_MAJOR_EQ(2018050000)
CV_TEST_TAG_DNN_SKIP_IE_2018R5,
#elif INF_ENGINE_VER_MAJOR_EQ(2019010000)
CV_TEST_TAG_DNN_SKIP_IE_2019R1,
#elif INF_ENGINE_VER_MAJOR_EQ(2019010100)
CV_TEST_TAG_DNN_SKIP_IE_2019R1_1
#endif
CV_TEST_TAG_DNN_SKIP_IE
);
#endif
registerGlobalSkipTag(
// see validateVPUType(): CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_2, CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X
CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16
);
}
} // namespace
......@@ -273,12 +273,12 @@ TEST_P(Test_Darknet_nets, YoloVoc)
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("Test is disabled");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
#endif
#if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
throw SkipTestException("Test is disabled for MyriadX (need to update check function)");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X); // need to update check function
#endif
// batchId, classId, confidence, left, top, right, bottom
......@@ -314,7 +314,7 @@ TEST_P(Test_Darknet_nets, TinyYoloVoc)
#if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
throw SkipTestException("Test is disabled for MyriadX (need to update check function)");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X); // need to update check function
#endif
// batchId, classId, confidence, left, top, right, bottom
Mat ref = (Mat_<float>(4, 7) << 0, 6, 0.761967f, 0.579042f, 0.159161f, 0.894482f, 0.31994f, // a car
......@@ -346,7 +346,7 @@ TEST_P(Test_Darknet_nets, YOLOv3)
#if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
throw SkipTestException("Test is disabled for MyriadX");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
#endif
// batchId, classId, confidence, left, top, right, bottom
......@@ -373,7 +373,7 @@ TEST_P(Test_Darknet_nets, YOLOv3)
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL)
throw SkipTestException("Test with 'batch size 2' is disabled for DLIE/OpenCL target");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL) // Test with 'batch size 2' is disabled for DLIE/OpenCL target
#endif
{
......
......@@ -56,8 +56,8 @@ typedef testing::TestWithParam<Target> Reproducibility_GoogLeNet;
TEST_P(Reproducibility_GoogLeNet, Batching)
{
const int targetId = GetParam();
if(targetId == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("This test does not support FP16");
if (targetId == DNN_TARGET_OPENCL_FP16)
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
Net net = readNetFromCaffe(findDataFile("dnn/bvlc_googlenet.prototxt"),
findDataFile("dnn/bvlc_googlenet.caffemodel", false));
net.setPreferableBackend(DNN_BACKEND_OPENCV);
......@@ -87,8 +87,8 @@ TEST_P(Reproducibility_GoogLeNet, Batching)
TEST_P(Reproducibility_GoogLeNet, IntermediateBlobs)
{
const int targetId = GetParam();
if(targetId == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("This test does not support FP16");
if (targetId == DNN_TARGET_OPENCL_FP16)
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
Net net = readNetFromCaffe(findDataFile("dnn/bvlc_googlenet.prototxt"),
findDataFile("dnn/bvlc_googlenet.caffemodel", false));
net.setPreferableBackend(DNN_BACKEND_OPENCV);
......@@ -118,8 +118,8 @@ TEST_P(Reproducibility_GoogLeNet, IntermediateBlobs)
TEST_P(Reproducibility_GoogLeNet, SeveralCalls)
{
const int targetId = GetParam();
if(targetId == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("This test does not support FP16");
if (targetId == DNN_TARGET_OPENCL_FP16)
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
Net net = readNetFromCaffe(findDataFile("dnn/bvlc_googlenet.prototxt"),
findDataFile("dnn/bvlc_googlenet.caffemodel", false));
net.setPreferableBackend(DNN_BACKEND_OPENCV);
......
......@@ -165,7 +165,7 @@ TEST_P(Deconvolution, Accuracy)
&& inChannels == 6 && outChannels == 4 && group == 1
&& kernel == Size(1, 3) && pad == Size(1, 0)
&& stride == Size(1, 1) && dilation == Size(1, 1))
throw SkipTestException("Test is disabled");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
#endif
int sz[] = {inChannels, outChannels / group, kernel.height, kernel.width};
......@@ -231,7 +231,7 @@ TEST_P(LRN, Accuracy)
if ((inSize.width == 5 || inSize.height == 5) && targetId == DNN_TARGET_MYRIAD &&
nrmType == "ACROSS_CHANNELS")
throw SkipTestException("This test case is disabled");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
LayerParams lp;
lp.set("norm_region", nrmType);
......@@ -276,7 +276,7 @@ TEST_P(AvePooling, Accuracy)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
&& kernel == Size(1, 1) && (stride == Size(1, 1) || stride == Size(2, 2)))
throw SkipTestException("Test is disabled for MyriadX target");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
#endif
const int inWidth = (outSize.width - 1) * stride.width + kernel.width;
......@@ -324,7 +324,7 @@ TEST_P(MaxPooling, Accuracy)
&& (stride == Size(1, 1) || stride == Size(2, 2))
&& (pad == Size(0, 1) || pad == Size(1, 1))
)
throw SkipTestException("Test is disabled in OpenVINO <= 2018R5");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
#endif
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_EQ(2018050000)
......@@ -332,7 +332,7 @@ TEST_P(MaxPooling, Accuracy)
&& (kernel == Size(2, 2) || kernel == Size(3, 2))
&& stride == Size(1, 1) && (pad == Size(0, 0) || pad == Size(0, 1))
)
throw SkipTestException("Problems with output dimension in OpenVINO 2018R5");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
#endif
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
......@@ -341,7 +341,7 @@ TEST_P(MaxPooling, Accuracy)
&& (stride == Size(1, 1) || stride == Size(2, 2))
&& (pad == Size(0, 1) || pad == Size(1, 1))
)
throw SkipTestException("Test is disabled for MyriadX target");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1);
#endif
LayerParams lp;
......@@ -382,7 +382,7 @@ TEST_P(FullyConnected, Accuracy)
Backend backendId = get<0>(get<4>(GetParam()));
Target targetId = get<1>(get<4>(GetParam()));
if (backendId == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
Mat weights(outChannels, inChannels * inSize.height * inSize.width, CV_32F);
randu(weights, -1.0f, 1.0f);
......@@ -440,7 +440,7 @@ INSTANTIATE_TEST_CASE_P(Layer_Test_Halide, SoftMax, Combine(
TEST_P(Test_Halide_layers, MaxPoolUnpool)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
LayerParams pool;
pool.set("pool", "max");
......@@ -656,14 +656,14 @@ TEST_P(Concat, Accuracy)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD
&& inSize == Vec3i(1, 4, 5) && numChannels == Vec3i(1, 6, 2)
)
throw SkipTestException("Test is disabled for Myriad target"); // crash
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_2018R5); // crash
#endif
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_CPU
&& inSize == Vec3i(1, 4, 5) && numChannels == Vec3i(1, 6, 2)
)
throw SkipTestException("Test is disabled for DLIE/CPU target");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1); // TODO: IE_CPU
#endif
Net net;
......@@ -737,12 +737,12 @@ TEST_P(Eltwise, Accuracy)
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_MYRIAD &&
inSize == Vec3i(1, 4, 5))
throw SkipTestException("Test is disabled for Myriad target");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
#endif
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && numConv > 1)
throw SkipTestException("Test is disabled for DLIE backend");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1);
#endif
Net net;
......
......@@ -142,15 +142,16 @@ TEST_P(Test_Caffe_layers, Convolution)
TEST_P(Test_Caffe_layers, DeConvolution)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_CPU)
throw SkipTestException("Test is disabled for DLIE/CPU");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE); // TODO IE_CPU
testLayerUsingCaffeModels("layer_deconvolution", true, false);
}
TEST_P(Test_Caffe_layers, InnerProduct)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE ||
(backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException("");
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
testLayerUsingCaffeModels("layer_inner_product", true);
}
......@@ -236,7 +237,7 @@ TEST_P(Test_Caffe_layers, Concat)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1);
#endif
testLayerUsingCaffeModels("layer_concat");
testLayerUsingCaffeModels("layer_concat_optim", true, false);
......@@ -246,15 +247,13 @@ TEST_P(Test_Caffe_layers, Concat)
TEST_P(Test_Caffe_layers, Fused_Concat)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("Test is disabled for DLIE due negative_slope parameter");
if (backend == DNN_BACKEND_INFERENCE_ENGINE) // Test is disabled for DLIE due negative_slope parameter
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1);
#endif
#if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE
&& (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)
)
throw SkipTestException("Test is disabled for DLIE");
if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
#endif
checkBackend();
......@@ -300,7 +299,7 @@ TEST_P(Test_Caffe_layers, Fused_Concat)
TEST_P(Test_Caffe_layers, Eltwise)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
testLayerUsingCaffeModels("layer_eltwise");
}
......@@ -313,7 +312,7 @@ TEST_P(Test_Caffe_layers, PReLU)
TEST_P(Test_Caffe_layers, layer_prelu_fc)
{
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
// Reference output values are in range [-0.0001, 10.3906]
double l1 = (target == DNN_TARGET_MYRIAD) ? 0.005 : 0.0;
double lInf = (target == DNN_TARGET_MYRIAD) ? 0.021 : 0.0;
......@@ -343,7 +342,7 @@ TEST_P(Test_Caffe_layers, layer_prelu_fc)
TEST_P(Test_Caffe_layers, Reshape_Split_Slice)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
Net net = readNetFromCaffe(_tf("reshape_and_slice_routines.prototxt"));
ASSERT_FALSE(net.empty());
......@@ -365,7 +364,7 @@ TEST_P(Test_Caffe_layers, Conv_Elu)
{
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE <= 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
#endif
Net net = readNetFromTensorflow(_tf("layer_elu_model.pb"));
......@@ -548,9 +547,11 @@ TEST(Layer_Test_ROIPooling, Accuracy)
TEST_P(Test_Caffe_layers, FasterRCNN_Proposal)
{
if ((backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) ||
backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("");
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
Net net = readNetFromCaffe(_tf("net_faster_rcnn_proposal.prototxt"));
Mat scores = blobFromNPY(_tf("net_faster_rcnn_proposal.scores.npy"));
......@@ -774,7 +775,8 @@ TEST_P(Test_Caffe_layers, Average_pooling_kernel_area)
TEST_P(Test_Caffe_layers, PriorBox_squares)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
LayerParams lp;
lp.name = "testPriorBox";
lp.type = "PriorBox";
......@@ -1307,7 +1309,8 @@ TEST_P(Test_Caffe_layers, DISABLED_Interp) // requires patched protobuf (availa
#endif
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
// Test a custom layer.
CV_DNN_REGISTER_LAYER_CLASS(Interp, CustomInterpLayer);
try
......
#include "test_precomp.hpp"
static const char* extraTestDataPath =
#ifdef WINRT
NULL;
#else
getenv("OPENCV_DNN_TEST_DATA_PATH");
#endif
CV_TEST_MAIN("",
extraTestDataPath ? (void)cvtest::addDataSearchPath(extraTestDataPath) : (void)0
)
namespace opencv_test
{
using namespace cv;
using namespace cv::dnn;
}
CV_TEST_MAIN("", initDNNTests());
......@@ -158,7 +158,7 @@ TEST_P(setInput, normalization)
const bool kSwapRB = true;
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16 && dtype != CV_32F)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
Mat inp(5, 5, CV_8UC3);
randu(inp, 0, 255);
......
......@@ -104,7 +104,7 @@ TEST_P(Test_ONNX_layers, Two_convolution)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
)
throw SkipTestException("Test is disabled for MyriadX"); // 2018R5+ is failed
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
#endif
// Reference output values are in range [-0.855, 0.611]
testONNXModels("two_convolution");
......@@ -127,7 +127,7 @@ TEST_P(Test_ONNX_layers, Dropout)
TEST_P(Test_ONNX_layers, Linear)
{
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
testONNXModels("linear");
}
......@@ -143,9 +143,12 @@ TEST_P(Test_ONNX_layers, MaxPooling_Sigmoid)
TEST_P(Test_ONNX_layers, Concatenation)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
{
if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL);
if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
}
testONNXModels("concatenation");
}
......@@ -191,24 +194,32 @@ TEST_P(Test_ONNX_layers, BatchNormalization)
TEST_P(Test_ONNX_layers, BatchNormalization3D)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target != DNN_TARGET_CPU)
throw SkipTestException("");
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
{
if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL);
if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
}
testONNXModels("batch_norm_3d");
}
TEST_P(Test_ONNX_layers, Transpose)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
{
if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL);
if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
}
testONNXModels("transpose");
}
TEST_P(Test_ONNX_layers, Multiplication)
{
if ((backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16) ||
(backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
testONNXModels("mul");
}
......@@ -217,7 +228,7 @@ TEST_P(Test_ONNX_layers, Constant)
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_LE(2018050000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
throw SkipTestException("Test is disabled for OpenVINO <= 2018R5 + MyriadX target");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
#endif
testONNXModels("constant");
}
......@@ -261,8 +272,11 @@ TEST_P(Test_ONNX_layers, MultyInputs)
TEST_P(Test_ONNX_layers, DynamicReshape)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException("");
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
{
if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL);
}
testONNXModels("dynamic_reshape");
}
......@@ -325,7 +339,7 @@ TEST_P(Test_ONNX_nets, Squeezenet)
TEST_P(Test_ONNX_nets, Googlenet)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE);
const String model = _tf("models/googlenet.onnx", false);
......@@ -409,14 +423,18 @@ TEST_P(Test_ONNX_nets, ResNet101_DUC_HDC)
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_VER_MAJOR_GE(2019010000)
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
throw SkipTestException("Test is disabled for DLIE targets");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1);
#endif
#if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
#endif
if (target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL)
{
if (backend == DNN_BACKEND_OPENCV)
applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_OPENCL : CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
throw SkipTestException("Test is disabled for OpenCL targets");
}
testONNXModels("resnet101_duc_hdc", pb);
}
......@@ -430,12 +448,12 @@ TEST_P(Test_ONNX_nets, TinyYolov2)
if (backend == DNN_BACKEND_INFERENCE_ENGINE
&& (target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16)
)
throw SkipTestException("Test is disabled for DLIE OpenCL targets");
applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
)
throw SkipTestException("Test is disabled for MyriadX");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
#endif
// output range: [-11; 8]
......@@ -462,9 +480,12 @@ TEST_P(Test_ONNX_nets, LResNet100E_IR)
(target == DNN_TARGET_CPU ? CV_TEST_TAG_MEMORY_512MB : CV_TEST_TAG_MEMORY_1GB),
CV_TEST_TAG_DEBUG_LONG
);
if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
{
if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL);
if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
}
double l1 = default_l1;
double lInf = default_lInf;
......@@ -486,7 +507,7 @@ TEST_P(Test_ONNX_nets, Emotion_ferplus)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X
)
throw SkipTestException("Test is disabled for MyriadX");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
#endif
double l1 = default_l1;
......@@ -524,16 +545,19 @@ TEST_P(Test_ONNX_nets, Inception_v1)
{
#if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
#endif
testONNXModels("inception_v1", pb);
}
TEST_P(Test_ONNX_nets, Shufflenet)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
(target == DNN_TARGET_OPENCL_FP16 || target == DNN_TARGET_OPENCL || target == DNN_TARGET_MYRIAD))
throw SkipTestException("");
if (backend == DNN_BACKEND_INFERENCE_ENGINE)
{
if (target == DNN_TARGET_OPENCL_FP16) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
if (target == DNN_TARGET_OPENCL) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL);
if (target == DNN_TARGET_MYRIAD) applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
}
testONNXModels("shufflenet", pb);
}
......
This diff is collapsed.
......@@ -120,7 +120,7 @@ TEST_P(Test_Torch_layers, run_convolution)
TEST_P(Test_Torch_layers, run_pool_max)
{
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
runTorchNet("net_pool_max", "", true);
}
......@@ -137,7 +137,7 @@ TEST_P(Test_Torch_layers, run_reshape_change_batch_size)
TEST_P(Test_Torch_layers, run_reshape)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
runTorchNet("net_reshape_batch");
runTorchNet("net_reshape_channels", "", false, true);
}
......@@ -153,7 +153,7 @@ TEST_P(Test_Torch_layers, run_reshape_single_sample)
TEST_P(Test_Torch_layers, run_linear)
{
if (backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_OPENCL_FP16);
runTorchNet("net_linear_2d");
}
......@@ -210,7 +210,7 @@ TEST_P(Test_Torch_layers, net_lp_pooling)
TEST_P(Test_Torch_layers, net_conv_gemm_lrn)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
runTorchNet("net_conv_gemm_lrn", "", false, true, true,
target == DNN_TARGET_OPENCL_FP16 ? 0.046 : 0.0,
target == DNN_TARGET_OPENCL_FP16 ? 0.023 : 0.0);
......@@ -237,14 +237,14 @@ TEST_P(Test_Torch_layers, net_non_spatial)
{
if (backend == DNN_BACKEND_INFERENCE_ENGINE &&
(target == DNN_TARGET_OPENCL || target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException("");
applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
runTorchNet("net_non_spatial", "", false, true);
}
TEST_P(Test_Torch_layers, run_paralel)
{
if (backend != DNN_BACKEND_OPENCV || target != DNN_TARGET_CPU)
throw SkipTestException("");
throw SkipTestException(""); // TODO: Check this
runTorchNet("net_parallel", "l5_torchMerge");
}
......@@ -253,7 +253,7 @@ TEST_P(Test_Torch_layers, net_residual)
#if defined(INF_ENGINE_RELEASE) && INF_ENGINE_RELEASE == 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && (target == DNN_TARGET_OPENCL ||
target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException("Test is disabled for OpenVINO 2018R5");
applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
#endif
runTorchNet("net_residual", "", false, true);
}
......@@ -264,7 +264,7 @@ TEST_P(Test_Torch_nets, OpenFace_accuracy)
{
#if defined(INF_ENGINE_RELEASE)
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD)
throw SkipTestException("Test is disabled for Myriad targets");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD);
#endif
checkBackend();
......@@ -339,7 +339,7 @@ TEST_P(Test_Torch_nets, ENet_accuracy)
checkBackend();
if (backend == DNN_BACKEND_INFERENCE_ENGINE ||
(backend == DNN_BACKEND_OPENCV && target == DNN_TARGET_OPENCL_FP16))
throw SkipTestException("");
applyTestTag(target == DNN_TARGET_OPENCL ? CV_TEST_TAG_DNN_SKIP_IE_OPENCL : CV_TEST_TAG_DNN_SKIP_IE_OPENCL_FP16);
Net net;
{
......@@ -391,7 +391,7 @@ TEST_P(Test_Torch_nets, FastNeuralStyle_accuracy)
#if defined INF_ENGINE_RELEASE
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_MYRIAD
&& getInferenceEngineVPUType() == CV_DNN_INFERENCE_ENGINE_VPU_TYPE_MYRIAD_X)
throw SkipTestException("Test is disabled for MyriadX target");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_MYRIAD_X);
#endif
checkBackend();
......@@ -399,7 +399,7 @@ TEST_P(Test_Torch_nets, FastNeuralStyle_accuracy)
#if defined(INF_ENGINE_RELEASE)
#if INF_ENGINE_RELEASE <= 2018050000
if (backend == DNN_BACKEND_INFERENCE_ENGINE && target == DNN_TARGET_OPENCL)
throw SkipTestException("");
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE_2018R5);
#endif
#endif
......
......@@ -212,6 +212,36 @@ static inline void applyTestTag(const std::string& tag1, const std::string& tag2
{ applyTestTag_(tag1); applyTestTag_(tag2); applyTestTag_(tag3); applyTestTag_(tag4); checkTestTags(); }
/** Append global skip test tags
*/
void registerGlobalSkipTag(const std::string& skipTag);
static inline void registerGlobalSkipTag(const std::string& tag1, const std::string& tag2)
{ registerGlobalSkipTag(tag1); registerGlobalSkipTag(tag2); }
static inline void registerGlobalSkipTag(const std::string& tag1, const std::string& tag2, const std::string& tag3)
{ registerGlobalSkipTag(tag1); registerGlobalSkipTag(tag2); registerGlobalSkipTag(tag3); }
static inline void registerGlobalSkipTag(const std::string& tag1, const std::string& tag2, const std::string& tag3, const std::string& tag4)
{ registerGlobalSkipTag(tag1); registerGlobalSkipTag(tag2); registerGlobalSkipTag(tag3); registerGlobalSkipTag(tag4); }
static inline void registerGlobalSkipTag(const std::string& tag1, const std::string& tag2, const std::string& tag3, const std::string& tag4,
const std::string& tag5)
{
registerGlobalSkipTag(tag1); registerGlobalSkipTag(tag2); registerGlobalSkipTag(tag3); registerGlobalSkipTag(tag4);
registerGlobalSkipTag(tag5);
}
static inline void registerGlobalSkipTag(const std::string& tag1, const std::string& tag2, const std::string& tag3, const std::string& tag4,
const std::string& tag5, const std::string& tag6)
{
registerGlobalSkipTag(tag1); registerGlobalSkipTag(tag2); registerGlobalSkipTag(tag3); registerGlobalSkipTag(tag4);
registerGlobalSkipTag(tag5); registerGlobalSkipTag(tag6);
}
static inline void registerGlobalSkipTag(const std::string& tag1, const std::string& tag2, const std::string& tag3, const std::string& tag4,
const std::string& tag5, const std::string& tag6, const std::string& tag7)
{
registerGlobalSkipTag(tag1); registerGlobalSkipTag(tag2); registerGlobalSkipTag(tag3); registerGlobalSkipTag(tag4);
registerGlobalSkipTag(tag5); registerGlobalSkipTag(tag6); registerGlobalSkipTag(tag7);
}
class TS;
int64 readSeed(const char* str);
......@@ -758,7 +788,7 @@ int main(int argc, char **argv) \
{ \
CV_TRACE_FUNCTION(); \
{ CV_TRACE_REGION("INIT"); \
using namespace cvtest; \
using namespace cvtest; using namespace opencv_test; \
TS* ts = TS::ptr(); \
ts->init(resourcesubdir); \
__CV_TEST_EXEC_ARGS(CV_TEST_INIT0_ ## INIT0) \
......
......@@ -13,6 +13,30 @@ static bool printTestTag = false;
static std::vector<std::string> currentDirectTestTags, currentImpliedTestTags;
static std::vector<const ::testing::TestInfo*> skipped_tests;
static std::map<std::string, int>& getTestTagsSkipCounts()
{
static std::map<std::string, int> testTagsSkipCounts;
return testTagsSkipCounts;
}
static std::map<std::string, int>& getTestTagsSkipExtraCounts()
{
static std::map<std::string, int> testTagsSkipExtraCounts;
return testTagsSkipExtraCounts;
}
static void increaseTagsSkipCount(const std::string& tag, bool isMain)
{
std::map<std::string, int>& counts = isMain ? getTestTagsSkipCounts() : getTestTagsSkipExtraCounts();
std::map<std::string, int>::iterator i = counts.find(tag);
if (i == counts.end())
{
counts[tag] = 1;
}
else
{
i->second++;
}
}
static std::vector<std::string>& getTestTagsSkipList()
{
static std::vector<std::string> testSkipWithTags;
......@@ -33,6 +57,17 @@ static std::vector<std::string>& getTestTagsSkipList()
return testSkipWithTags;
}
void registerGlobalSkipTag(const std::string& skipTag)
{
std::vector<std::string>& skipTags = getTestTagsSkipList();
for (size_t i = 0; i < skipTags.size(); ++i)
{
if (skipTag == skipTags[i])
return; // duplicate
}
skipTags.push_back(skipTag);
}
static std::vector<std::string>& getTestTagsForceList()
{
static std::vector<std::string> getTestTagsForceList;
......@@ -156,7 +191,27 @@ public:
{
if (!skipped_tests.empty())
{
std::cout << "[ SKIP ] " << skipped_tests.size() << " tests via tags" << std::endl;
std::cout << "[ SKIPSTAT ] " << skipped_tests.size() << " tests via tags" << std::endl;
const std::vector<std::string>& skipTags = getTestTagsSkipList();
const std::map<std::string, int>& counts = getTestTagsSkipCounts();
const std::map<std::string, int>& countsExtra = getTestTagsSkipExtraCounts();
for (std::vector<std::string>::const_iterator i = skipTags.begin(); i != skipTags.end(); ++i)
{
int c1 = 0;
std::map<std::string, int>::const_iterator i1 = counts.find(*i);
if (i1 != counts.end()) c1 = i1->second;
int c2 = 0;
std::map<std::string, int>::const_iterator i2 = countsExtra.find(*i);
if (i2 != countsExtra.end()) c2 = i2->second;
if (c2 > 0)
{
std::cout << "[ SKIPSTAT ] TAG='" << *i << "' skip " << c1 << " tests (" << c2 << " times in extra skip list)" << std::endl;
}
else if (c1 > 0)
{
std::cout << "[ SKIPSTAT ] TAG='" << *i << "' skip " << c1 << " tests" << std::endl;
}
}
}
skipped_tests.clear();
}
......@@ -255,13 +310,14 @@ void checkTestTags()
if (isTestTagForced(testTag))
return;
}
std::string skip_message;
for (size_t i = 0; i < testTags.size(); ++i)
{
const std::string& testTag = testTags[i];
if (isTestTagSkipped(testTag, skipTag))
{
skipped_tests.push_back(::testing::UnitTest::GetInstance()->current_test_info());
throw SkipTestException("Test with tag '" + testTag + "' is skipped ('" + skipTag + "' is in skip list)");
increaseTagsSkipCount(skipTag, skip_message.empty());
if (skip_message.empty()) skip_message = "Test with tag '" + testTag + "' is skipped ('" + skipTag + "' is in skip list)";
}
}
const std::vector<std::string>& testTagsImplied = currentImpliedTestTags;
......@@ -270,10 +326,16 @@ void checkTestTags()
const std::string& testTag = testTagsImplied[i];
if (isTestTagSkipped(testTag, skipTag))
{
skipped_tests.push_back(::testing::UnitTest::GetInstance()->current_test_info());
throw SkipTestException("Test with tag '" + testTag + "' is skipped ('" + skipTag + "' is in skip list)");
increaseTagsSkipCount(skipTag, skip_message.empty());
if (skip_message.empty()) skip_message = "Test with tag '" + testTag + "' is skipped (implied '" + skipTag + "' is in skip list)";
}
}
if (!skip_message.empty())
{
skipped_tests.push_back(::testing::UnitTest::GetInstance()->current_test_info());
throw SkipTestException(skip_message);
}
}
static bool applyTestTagImpl(const std::string& tag, bool direct = false)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment