Commit c557193b authored by Alexander Alekhin's avatar Alexander Alekhin

dnn(test): use dnnBackendsAndTargets() param generator

parent 3e6b3a68
......@@ -666,15 +666,9 @@ PERF_TEST_P_(Conv, conv)
SANITY_CHECK_NOTHING();
}
static const tuple<Backend, Target> testBackendsAndTargets[] = {
tuple<Backend, Target>(DNN_BACKEND_OPENCV, DNN_TARGET_CPU),
tuple<Backend, Target>(DNN_BACKEND_OPENCV, DNN_TARGET_OPENCL),
tuple<Backend, Target>(DNN_BACKEND_OPENCV, DNN_TARGET_OPENCL_FP16)
};
INSTANTIATE_TEST_CASE_P(/**/, Conv, Combine(
ConvParamID::all(),
testing::ValuesIn(testBackendsAndTargets)
dnnBackendsAndTargets(false, false) // defined in ../test/test_common.hpp
));
} // namespace
......@@ -14,10 +14,7 @@
namespace opencv_test {
CV_ENUM(DNNBackend, DNN_BACKEND_DEFAULT, DNN_BACKEND_HALIDE, DNN_BACKEND_INFERENCE_ENGINE, DNN_BACKEND_OPENCV)
CV_ENUM(DNNTarget, DNN_TARGET_CPU, DNN_TARGET_OPENCL, DNN_TARGET_OPENCL_FP16, DNN_TARGET_MYRIAD)
class DNNTestNetwork : public ::perf::TestBaseWithParam< tuple<DNNBackend, DNNTarget> >
class DNNTestNetwork : public ::perf::TestBaseWithParam< tuple<Backend, Target> >
{
public:
dnn::Backend backend;
......@@ -269,22 +266,6 @@ PERF_TEST_P_(DNNTestNetwork, Inception_v2_Faster_RCNN)
Mat(cv::Size(800, 600), CV_32FC3));
}
const tuple<DNNBackend, DNNTarget> testCases[] = {
#ifdef HAVE_HALIDE
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_HALIDE, DNN_TARGET_CPU),
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_HALIDE, DNN_TARGET_OPENCL),
#endif
#ifdef HAVE_INF_ENGINE
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_CPU),
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL),
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL_FP16),
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_MYRIAD),
#endif
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_OPENCV, DNN_TARGET_CPU),
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_OPENCV, DNN_TARGET_OPENCL),
tuple<DNNBackend, DNNTarget>(DNN_BACKEND_OPENCV, DNN_TARGET_OPENCL_FP16)
};
INSTANTIATE_TEST_CASE_P(/*nothing*/, DNNTestNetwork, testing::ValuesIn(testCases));
INSTANTIATE_TEST_CASE_P(/*nothing*/, DNNTestNetwork, dnnBackendsAndTargets());
} // namespace
......@@ -285,21 +285,6 @@ TEST_P(DNNTestNetwork, FastNeuralStyle_eccv16)
processNet("dnn/fast_neural_style_eccv16_starry_night.t7", "", inp, "", "", l1, lInf);
}
const tuple<Backend, Target> testCases[] = {
#ifdef HAVE_HALIDE
tuple<Backend, Target>(DNN_BACKEND_HALIDE, DNN_TARGET_CPU),
tuple<Backend, Target>(DNN_BACKEND_HALIDE, DNN_TARGET_OPENCL),
#endif
#ifdef HAVE_INF_ENGINE
tuple<Backend, Target>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_CPU),
tuple<Backend, Target>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL),
tuple<Backend, Target>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL_FP16),
tuple<Backend, Target>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_MYRIAD),
#endif
tuple<Backend, Target>(DNN_BACKEND_OPENCV, DNN_TARGET_OPENCL),
tuple<Backend, Target>(DNN_BACKEND_OPENCV, DNN_TARGET_OPENCL_FP16)
};
INSTANTIATE_TEST_CASE_P(/*nothing*/, DNNTestNetwork, testing::ValuesIn(testCases));
INSTANTIATE_TEST_CASE_P(/*nothing*/, DNNTestNetwork, dnnBackendsAndTargets(true, true, false));
}} // namespace
......@@ -42,6 +42,10 @@
#ifndef __OPENCV_TEST_COMMON_HPP__
#define __OPENCV_TEST_COMMON_HPP__
#ifdef HAVE_OPENCL
#include "opencv2/core/ocl.hpp"
#endif
namespace cv { namespace dnn {
CV__DNN_EXPERIMENTAL_NS_BEGIN
static inline void PrintTo(const cv::dnn::Backend& v, std::ostream* os)
......@@ -227,4 +231,50 @@ static inline bool readFileInMemory(const std::string& filename, std::string& co
return true;
}
namespace opencv_test {
using namespace cv::dnn;
static testing::internal::ParamGenerator<tuple<Backend, Target> > dnnBackendsAndTargets(
bool withInferenceEngine = true,
bool withHalide = false,
bool withCpuOCV = true
)
{
std::vector<tuple<Backend, Target> > targets;
#ifdef HAVE_HALIDE
if (withHalide)
{
targets.push_back(make_tuple(DNN_BACKEND_HALIDE, DNN_TARGET_CPU));
if (cv::ocl::useOpenCL())
targets.push_back(make_tuple(DNN_BACKEND_HALIDE, DNN_TARGET_OPENCL));
}
#endif
#ifdef HAVE_INF_ENGINE
if (withInferenceEngine)
{
targets.push_back(make_tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_CPU));
if (cv::ocl::useOpenCL())
{
targets.push_back(make_tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL));
targets.push_back(make_tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL_FP16));
}
if (checkMyriadTarget())
targets.push_back(make_tuple(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_MYRIAD));
}
#endif
if (withCpuOCV)
targets.push_back(make_tuple(DNN_BACKEND_OPENCV, DNN_TARGET_CPU));
#ifdef HAVE_OPENCL
if (cv::ocl::useOpenCL())
{
targets.push_back(make_tuple(DNN_BACKEND_OPENCV, DNN_TARGET_OPENCL));
targets.push_back(make_tuple(DNN_BACKEND_OPENCV, DNN_TARGET_OPENCL_FP16));
}
#endif
return testing::ValuesIn(targets);
}
} // namespace
#endif
......@@ -44,23 +44,9 @@ static void test(LayerParams& params, Mat& input, Backend backendId, Target targ
test(input, net, backendId, targetId, skipCheck);
}
static testing::internal::ParamGenerator<tuple<Backend, Target> > dnnBackendsAndTargetsWithHalide()
static inline testing::internal::ParamGenerator<tuple<Backend, Target> > dnnBackendsAndTargetsWithHalide()
{
static const tuple<Backend, Target> testCases[] = {
#ifdef HAVE_HALIDE
tuple<Backend, Target>(DNN_BACKEND_HALIDE, DNN_TARGET_CPU),
tuple<Backend, Target>(DNN_BACKEND_HALIDE, DNN_TARGET_OPENCL),
#endif
#ifdef HAVE_INF_ENGINE
tuple<Backend, Target>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_CPU),
tuple<Backend, Target>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL),
tuple<Backend, Target>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL_FP16),
tuple<Backend, Target>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_MYRIAD),
#endif
tuple<Backend, Target>(DNN_BACKEND_OPENCV, DNN_TARGET_OPENCL),
tuple<Backend, Target>(DNN_BACKEND_OPENCV, DNN_TARGET_OPENCL_FP16)
};
return testing::ValuesIn(testCases);
return dnnBackendsAndTargets(true, true, false); // OpenCV/CPU is used as reference
}
class Test_Halide_layers : public DNNTestLayer {};
......
......@@ -66,22 +66,6 @@ static testing::internal::ParamGenerator<Target> availableDnnTargets()
return testing::ValuesIn(targets);
}
static testing::internal::ParamGenerator<tuple<Backend, Target> > dnnBackendsAndTargets()
{
static const tuple<Backend, Target> testCases[] = {
#ifdef HAVE_INF_ENGINE
tuple<Backend, Target>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_CPU),
tuple<Backend, Target>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL),
tuple<Backend, Target>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_OPENCL_FP16),
tuple<Backend, Target>(DNN_BACKEND_INFERENCE_ENGINE, DNN_TARGET_MYRIAD),
#endif
tuple<Backend, Target>(DNN_BACKEND_OPENCV, DNN_TARGET_CPU),
tuple<Backend, Target>(DNN_BACKEND_OPENCV, DNN_TARGET_OPENCL),
tuple<Backend, Target>(DNN_BACKEND_OPENCV, DNN_TARGET_OPENCL_FP16)
};
return testing::ValuesIn(testCases);
}
class DNNTestLayer : public TestWithParam<tuple<Backend, Target> >
{
public:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment