Commit c05a7e01 authored by Maksim Shabunin's avatar Maksim Shabunin

Merge pull request #258 from sbokov/AddingConfidence

Added confidence support to disparity filtering
parents 8a05bdbc 7c421ba2
......@@ -55,3 +55,25 @@
year={2013},
organization={IEEE}
}
@article{Min2014,
title={Fast global image smoothing based on weighted least squares},
author={Min, Dongbo and Choi, Sunghwan and Lu, Jiangbo and Ham, Bumsub and Sohn, Kwanghoon and Do, Minh N},
journal={Image Processing, IEEE Transactions on},
volume={23},
number={12},
pages={5638--5653},
year={2014},
publisher={IEEE}
}
@inproceedings{Farbman2008,
title={Edge-preserving decompositions for multi-scale tone and detail manipulation},
author={Farbman, Zeev and Fattal, Raanan and Lischinski, Dani and Szeliski, Richard},
booktitle={ACM Transactions on Graphics (TOG)},
volume={27},
number={3},
pages={67},
year={2008},
organization={ACM}
}
......@@ -38,6 +38,7 @@
#define __OPENCV_XIMGPROC_HPP__
#include "ximgproc/edge_filter.hpp"
#include "ximgproc/disparity_filter.hpp"
#include "ximgproc/structured_edge_detection.hpp"
#include "ximgproc/seeds.hpp"
......
......@@ -43,49 +43,147 @@
namespace cv {
namespace ximgproc {
//! @addtogroup ximgproc_filters
//! @{
/** @brief Main interface for all disparity map filters.
*/
class CV_EXPORTS_W DisparityFilter : public Algorithm
{
public:
CV_WRAP virtual void filter(InputArray disparity_map, InputArray left_view, OutputArray filtered_disparity_map,Rect ROI) = 0;
};
/////////////////////////////////////////////////////////////////////////////////////////////////
/** @brief Apply filtering to the disparity map.
class CV_EXPORTS_W DisparityDTFilter : public DisparityFilter
{
public:
CV_WRAP virtual double getSigmaSpatial() = 0;
CV_WRAP virtual void setSigmaSpatial(double _sigmaSpatial) = 0;
CV_WRAP virtual double getSigmaColor() = 0;
CV_WRAP virtual void setSigmaColor(double _sigmaColor) = 0;
};
@param disparity_map_left disparity map of the left view, 1 channel, CV_16S type. Implicitly assumes that disparity
values are scaled by 16 (one-pixel disparity corresponds to the value of 16 in the disparity map). Disparity map
can have any resolution, it will be automatically resized to fit left_view resolution.
CV_EXPORTS_W
Ptr<DisparityDTFilter> createDisparityDTFilter();
@param left_view left view of the original stereo-pair to guide the filtering process, 8-bit single-channel
or three-channel image.
class CV_EXPORTS_W DisparityGuidedFilter : public DisparityFilter
{
public:
CV_WRAP virtual double getEps() = 0;
CV_WRAP virtual void setEps(double _eps) = 0;
CV_WRAP virtual int getRadius() = 0;
CV_WRAP virtual void setRadius(int _radius) = 0;
};
@param filtered_disparity_map output disparity map.
CV_EXPORTS_W
Ptr<DisparityGuidedFilter> createDisparityGuidedFilter();
@param ROI region of the disparity map to filter.
@param disparity_map_right optional argument, some implementations might also use the disparity map
of the right view to compute confidence maps, for instance.
@param right_view optional argument, some implementations might also use the right view of the original
stereo-pair.
*/
CV_WRAP virtual void filter(InputArray disparity_map_left, InputArray left_view, OutputArray filtered_disparity_map, Rect ROI, InputArray disparity_map_right = Mat(), InputArray right_view = Mat()) = 0;
};
/** @brief Disparity map filter based on Weighted Least Squares filter (in form of Fast Global Smoother that
is a lot faster than traditional Weighted Least Squares filter implementations) and optional use of
left-right-consistency-based confidence to refine the results in half-occlusions and uniform areas.
*/
class CV_EXPORTS_W DisparityWLSFilter : public DisparityFilter
{
public:
/** filter parameters */
/** @brief Lambda is a parameter defining the amount of regularization during filtering. Larger values force
filtered disparity map edges to adhere more to source image edges. Typical value is 8000.
*/
CV_WRAP virtual double getLambda() = 0;
/** @see getLambda */
CV_WRAP virtual void setLambda(double _lambda) = 0;
/** @brief SigmaColor is a parameter defining how sensitive the filtering process is to source image edges.
Large values can lead to disparity leakage through low-contrast edges. Small values can make the filter too
sensitive to noise and textures in the source image. Typical values range from 0.8 to 2.0.
*/
CV_WRAP virtual double getSigmaColor() = 0;
/** @see getSigmaColor */
CV_WRAP virtual void setSigmaColor(double _sigma_color) = 0;
/** confidence-related parameters */
/** @brief LRCthresh is a threshold of disparity difference used in left-right-consistency check during
confidence map computation. The default value of 24 (1.5 pixels) is virtually always good enough.
*/
CV_WRAP virtual int getLRCthresh() = 0;
/** @see getLRCthresh */
CV_WRAP virtual void setLRCthresh(int _LRC_thresh) = 0;
/** @brief DepthDiscontinuityRadius is a parameter used in confidence computation. It defines the size of
low-confidence regions around depth discontinuities. For typical window sizes used in stereo matching the
optimal value is around 5.
*/
CV_WRAP virtual int getDepthDiscontinuityRadius() = 0;
/** @see getDepthDiscontinuityRadius */
CV_WRAP virtual void setDepthDiscontinuityRadius(int _disc_radius) = 0;
/** @brief Get the confidence map that was used in the last filter call. It is a CV_32F one-channel image
with values ranging from 0.0 (totally untrusted regions of the raw disparity map) to 255.0 (regions containing
correct disparity values with a high degree of confidence).
*/
CV_WRAP virtual Mat getConfidenceMap() = 0;
};
/** @brief Factory method, create instance of DisparityWLSFilter and execute the initialization routines.
@param use_confidence filtering with confidence requires two disparity maps (for the left and right views) and is
approximately two times slower. However, quality is typically significantly better.
*/
CV_EXPORTS_W
Ptr<DisparityWLSFilter> createDisparityWLSFilter();
Ptr<DisparityWLSFilter> createDisparityWLSFilter(bool use_confidence);
//////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////
/** @brief Function for reading ground truth disparity maps. Supports basic Middlebury
and MPI-Sintel formats. Note that the resulting disparity map is scaled by 16.
@param src_path path to the image, containing ground-truth disparity map
@param dst output disparity map, CV_16S depth
@result returns zero if successfully read the ground truth
*/
CV_EXPORTS
int readGT(String src_path,OutputArray dst);
/** @brief Function for computing mean square error for disparity maps
@param GT ground truth disparity map
@param src disparity map to evaluate
@param ROI region of interest
@result returns mean square error between GT and src
*/
CV_EXPORTS
double computeMSE(InputArray GT, InputArray src, Rect ROI);
/** @brief Function for computing the percent of "bad" pixels in the disparity map
(pixels where error is higher than a specified threshold)
@param GT ground truth disparity map
@param src disparity map to evaluate
@param ROI region of interest
@param thresh threshold used to determine "bad" pixels
@result returns mean square error between GT and src
*/
CV_EXPORTS
double computeBadPixelPercent(InputArray GT, InputArray src, Rect ROI, int thresh=24/*1.5 pixels*/);
/** @brief Function for creating a disparity map visualization (clamped CV_8U image)
@param src input disparity map (CV_16S depth)
@param dst output visualization
@param scale disparity map will be multiplied by this value for visualization
*/
CV_EXPORTS
void getDisparityVis(InputArray src,OutputArray dst,double scale=1.0);
//! @}
}
}
#endif
......
......@@ -316,19 +316,69 @@ proportional to sigmaSpace .
CV_EXPORTS_W
void jointBilateralFilter(InputArray joint, InputArray src, OutputArray dst, int d, double sigmaColor, double sigmaSpace, int borderType = BORDER_DEFAULT);
//! @}
//////////////////////////////////////////////////////////////////////////
//////////////////////////////////////////////////////////////////////////
class CV_EXPORTS_W WeightedLeastSquaresFilter : public Algorithm
/** @brief Interface for implementations of Fast Global Smoother filter.
For more details about this filter see @cite Min2014 and @cite Farbman2008 .
*/
class CV_EXPORTS_W FastGlobalSmootherFilter : public Algorithm
{
public:
/** @brief Apply smoothing operation to the source image.
@param src source image for filtering with unsigned 8-bit or signed 16-bit or floating-point 32-bit depth and up to 4 channels.
@param dst destination image.
*/
CV_WRAP virtual void filter(InputArray src, OutputArray dst) = 0;
virtual ~WeightedLeastSquaresFilter();
};
CV_EXPORTS_W Ptr<WeightedLeastSquaresFilter> createWeightedLeastSquaresFilter(InputArray guide, double lambda, double sigma_color, int num_iter=3);
/** @brief Factory method, create instance of FastGlobalSmootherFilter and execute the initialization routines.
@param guide image serving as guide for filtering. It should have 8-bit depth and either 1 or 3 channels.
@param lambda parameter defining the amount of regularization
@param sigma_color parameter, that is similar to color space sigma in bilateralFilter.
@param lambda_attenuation internal parameter, defining how much lambda decreases after each iteration. Normally,
it should be 0.25. Setting it to 1.0 may lead to streaking artifacts.
CV_EXPORTS_W void weightedLeastSquaresFilter(InputArray guide, InputArray src, OutputArray dst, double lambda, double sigma_color, int num_iter=3);
@param num_iter number of iterations used for filtering, 3 is usually enough.
For more details about Fast Global Smoother parameters, see the original paper @cite Min2014. However, please note that
there are several differences. Lambda attenuation described in the paper is implemented a bit differently so do not
expect the results to be identical to those from the paper; sigma_color values from the paper should be multiplied by 255.0 to
achieve the same effect. Also, in case of image filtering where source and guide image are the same, authors
propose to dynamically update the guide image after each iteration. To maximize the performance this feature
was not implemented here.
*/
CV_EXPORTS_W Ptr<FastGlobalSmootherFilter> createFastGlobalSmootherFilter(InputArray guide, double lambda, double sigma_color, double lambda_attenuation=0.25, int num_iter=3);
/** @brief Simple one-line Fast Global Smoother filter call. If you have multiple images to filter with the same
guide then use FastGlobalSmootherFilter interface to avoid extra computations.
@param guide image serving as guide for filtering. It should have 8-bit depth and either 1 or 3 channels.
@param src source image for filtering with unsigned 8-bit or signed 16-bit or floating-point 32-bit depth and up to 4 channels.
@param dst destination image.
@param lambda parameter defining the amount of regularization
@param sigma_color parameter, that is similar to color space sigma in bilateralFilter.
@param lambda_attenuation internal parameter, defining how much lambda decreases after each iteration. Normally,
it should be 0.25. Setting it to 1.0 may lead to streaking artifacts.
@param num_iter number of iterations used for filtering, 3 is usually enough.
*/
CV_EXPORTS_W void fastGlobalSmootherFilter(InputArray guide, InputArray src, OutputArray dst, double lambda, double sigma_color, double lambda_attenuation=0.25, int num_iter=3);
//! @}
}
}
#endif
......
/*
* By downloading, copying, installing or using the software you agree to this license.
* If you do not agree to this license, do not download, install,
* copy or use the software.
*
*
* License Agreement
* For Open Source Computer Vision Library
* (3 - clause BSD License)
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met :
*
* *Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and / or other materials provided with the distribution.
*
* * Neither the names of the copyright holders nor the names of the contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* This software is provided by the copyright holders and contributors "as is" and
* any express or implied warranties, including, but not limited to, the implied
* warranties of merchantability and fitness for a particular purpose are disclaimed.
* In no event shall copyright holders or contributors be liable for any direct,
* indirect, incidental, special, exemplary, or consequential damages
* (including, but not limited to, procurement of substitute goods or services;
* loss of use, data, or profits; or business interruption) however caused
* and on any theory of liability, whether in contract, strict liability,
* or tort(including negligence or otherwise) arising in any way out of
* the use of this software, even if advised of the possibility of such damage.
*/
#include "perf_precomp.hpp"
#include "opencv2/ximgproc/disparity_filter.hpp"
namespace cvtest
{
using std::tr1::tuple;
using std::tr1::get;
using namespace perf;
using namespace testing;
using namespace cv;
using namespace cv::ximgproc;
void MakeArtificialExample(RNG rng, Mat& dst_left_view, Mat& dst_left_disparity_map, Mat& dst_right_disparity_map, Rect& dst_ROI);
CV_ENUM(GuideTypes, CV_8UC3);
CV_ENUM(SrcTypes, CV_16S);
typedef tuple<GuideTypes, SrcTypes, Size, bool, bool> DisparityWLSParams;
typedef TestBaseWithParam<DisparityWLSParams> DisparityWLSFilterPerfTest;
PERF_TEST_P( DisparityWLSFilterPerfTest, perf, Combine(GuideTypes::all(), SrcTypes::all(), Values(sz720p), Values(true,false), Values(true,false)) )
{
RNG rng(0);
DisparityWLSParams params = GetParam();
int guideType = get<0>(params);
int srcType = get<1>(params);
Size sz = get<2>(params);
bool use_conf = get<3>(params);
bool use_downscale = get<4>(params);
Mat guide(sz, guideType);
Mat disp_left(sz, srcType);
Mat disp_right(sz, srcType);
Mat dst(sz, srcType);
Rect ROI;
MakeArtificialExample(rng,guide,disp_left,disp_right,ROI);
if(use_downscale)
{
resize(disp_left,disp_left,Size(),0.5,0.5);
disp_left/=2;
resize(disp_right,disp_right,Size(),0.5,0.5);
disp_right/=2;
ROI = Rect(ROI.x/2,ROI.y/2,ROI.width/2,ROI.height/2);
}
cv::setNumThreads(cv::getNumberOfCPUs());
TEST_CYCLE_N(10)
{
Ptr<DisparityWLSFilter> wls_filter = createDisparityWLSFilter(use_conf);
wls_filter->filter(disp_left,guide,dst,ROI,disp_right);
}
SANITY_CHECK(dst);
}
void MakeArtificialExample(RNG rng, Mat& dst_left_view, Mat& dst_left_disparity_map, Mat& dst_right_disparity_map, Rect& dst_ROI)
{
int w = dst_left_view.cols;
int h = dst_left_view.rows;
//params:
unsigned char bg_level = (unsigned char)rng.uniform(0.0,255.0);
unsigned char fg_level = (unsigned char)rng.uniform(0.0,255.0);
int rect_width = (int)rng.uniform(w/16,w/2);
int rect_height = (int)rng.uniform(h/16,h/2);
int rect_disparity = (int)(0.15*w); //typical maximum disparity value
double sigma = 6.0;
int rect_x_offset = (w-rect_width) /2;
int rect_y_offset = (h-rect_height)/2;
if(dst_left_view.channels()==3)
dst_left_view = Scalar(Vec3b(bg_level,bg_level,bg_level));
else
dst_left_view = Scalar(bg_level);
dst_left_disparity_map = Scalar(0);
dst_right_disparity_map = Scalar(0);
Mat dst_left_view_rect = Mat(dst_left_view, Rect(rect_x_offset,rect_y_offset,rect_width,rect_height));
Mat dst_left_disparity_map_rect = Mat(dst_left_disparity_map,Rect(rect_x_offset,rect_y_offset,rect_width,rect_height));
if(dst_left_view.channels()==3)
dst_left_view_rect = Scalar(Vec3b(fg_level,fg_level,fg_level));
else
dst_left_view_rect = Scalar(fg_level);
dst_left_disparity_map_rect = Scalar(16*rect_disparity);
rect_x_offset-=rect_disparity;
Mat dst_right_disparity_map_rect = Mat(dst_right_disparity_map,Rect(rect_x_offset,rect_y_offset,rect_width,rect_height));
dst_right_disparity_map_rect = Scalar(-16*rect_disparity);
//add some gaussian noise:
unsigned char *l;
short *ldisp, *rdisp;
for(int i=0;i<h;i++)
{
l = dst_left_view.ptr(i);
ldisp = (short*)dst_left_disparity_map.ptr(i);
rdisp = (short*)dst_right_disparity_map.ptr(i);
if(dst_left_view.channels()==3)
{
for(int j=0;j<w;j++)
{
l[0] = saturate_cast<unsigned char>(l[0] + rng.gaussian(sigma));
l[1] = saturate_cast<unsigned char>(l[1] + rng.gaussian(sigma));
l[2] = saturate_cast<unsigned char>(l[2] + rng.gaussian(sigma));
l+=3;
ldisp[0] = saturate_cast<short>(ldisp[0] + rng.gaussian(sigma));
ldisp++;
rdisp[0] = saturate_cast<short>(rdisp[0] + rng.gaussian(sigma));
rdisp++;
}
}
else
{
for(int j=0;j<w;j++)
{
l[0] = saturate_cast<unsigned char>(l[0] + rng.gaussian(sigma));
l++;
ldisp[0] = saturate_cast<short>(ldisp[0] + rng.gaussian(sigma));
ldisp++;
rdisp[0] = saturate_cast<short>(rdisp[0] + rng.gaussian(sigma));
rdisp++;
}
}
}
dst_ROI = Rect(rect_disparity,0,w-rect_disparity,h);
}
}
/*
* By downloading, copying, installing or using the software you agree to this license.
* If you do not agree to this license, do not download, install,
* copy or use the software.
*
*
* License Agreement
* For Open Source Computer Vision Library
* (3 - clause BSD License)
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met :
*
* *Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and / or other materials provided with the distribution.
*
* * Neither the names of the copyright holders nor the names of the contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* This software is provided by the copyright holders and contributors "as is" and
* any express or implied warranties, including, but not limited to, the implied
* warranties of merchantability and fitness for a particular purpose are disclaimed.
* In no event shall copyright holders or contributors be liable for any direct,
* indirect, incidental, special, exemplary, or consequential damages
* (including, but not limited to, procurement of substitute goods or services;
* loss of use, data, or profits; or business interruption) however caused
* and on any theory of liability, whether in contract, strict liability,
* or tort(including negligence or otherwise) arising in any way out of
* the use of this software, even if advised of the possibility of such damage.
*/
#include "perf_precomp.hpp"
namespace cvtest
{
using std::tr1::tuple;
using std::tr1::get;
using namespace perf;
using namespace testing;
using namespace cv;
using namespace cv::ximgproc;
CV_ENUM(GuideTypes, CV_8UC1, CV_8UC3);
CV_ENUM(SrcTypes, CV_8UC1, CV_8UC3, CV_16SC1, CV_16SC3, CV_32FC1, CV_32FC3);
typedef tuple<GuideTypes, SrcTypes, Size> FGSParams;
typedef TestBaseWithParam<FGSParams> FGSFilterPerfTest;
PERF_TEST_P( FGSFilterPerfTest, perf, Combine(GuideTypes::all(), SrcTypes::all(), Values(sz720p)) )
{
RNG rng(0);
FGSParams params = GetParam();
int guideType = get<0>(params);
int srcType = get<1>(params);
Size sz = get<2>(params);
Mat guide(sz, guideType);
Mat src(sz, srcType);
Mat dst(sz, srcType);
declare.in(guide, src, WARMUP_RNG).out(dst).tbb_threads(cv::getNumberOfCPUs());
cv::setNumThreads(cv::getNumberOfCPUs());
TEST_CYCLE_N(10)
{
double lambda = rng.uniform(500.0, 10000.0);
double sigma = rng.uniform(1.0, 100.0);
fastGlobalSmootherFilter(guide,src,dst,lambda,sigma);
}
SANITY_CHECK(dst);
}
}
\ No newline at end of file
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
/*
* By downloading, copying, installing or using the software you agree to this license.
* If you do not agree to this license, do not download, install,
* copy or use the software.
*
*
* License Agreement
* For Open Source Computer Vision Library
* (3 - clause BSD License)
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met :
*
* *Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and / or other materials provided with the distribution.
*
* * Neither the names of the copyright holders nor the names of the contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* This software is provided by the copyright holders and contributors "as is" and
* any express or implied warranties, including, but not limited to, the implied
* warranties of merchantability and fitness for a particular purpose are disclaimed.
* In no event shall copyright holders or contributors be liable for any direct,
* indirect, incidental, special, exemplary, or consequential damages
* (including, but not limited to, procurement of substitute goods or services;
* loss of use, data, or profits; or business interruption) however caused
* and on any theory of liability, whether in contract, strict liability,
* or tort(including negligence or otherwise) arising in any way out of
* the use of this software, even if advised of the possibility of such damage.
*/
#include "test_precomp.hpp"
#include "opencv2/ximgproc/disparity_filter.hpp"
namespace cvtest
{
using namespace std;
using namespace std::tr1;
using namespace testing;
using namespace perf;
using namespace cv;
using namespace cv::ximgproc;
static string getDataDir()
{
return cvtest::TS::ptr()->get_data_path();
}
CV_ENUM(SrcTypes, CV_16S);
CV_ENUM(GuideTypes, CV_8UC1, CV_8UC3)
typedef tuple<Size, SrcTypes, GuideTypes, bool, bool> DisparityWLSParams;
typedef TestWithParam<DisparityWLSParams> DisparityWLSFilterTest;
TEST(DisparityWLSFilterTest, ReferenceAccuracy)
{
string dir = getDataDir() + "cv/disparityfilter";
Mat left = imread(dir + "/left_view.png",IMREAD_COLOR);
ASSERT_FALSE(left.empty());
Mat left_disp = imread(dir + "/disparity_left_raw.png",IMREAD_GRAYSCALE);
ASSERT_FALSE(left_disp.empty());
left_disp.convertTo(left_disp,CV_16S,16);
Mat right_disp = imread(dir + "/disparity_right_raw.png",IMREAD_GRAYSCALE);
ASSERT_FALSE(right_disp.empty());
right_disp.convertTo(right_disp,CV_16S,-16);
Mat GT;
ASSERT_FALSE(readGT(dir + "/GT.png",GT));
FileStorage ROI_storage( dir + "/ROI.xml", FileStorage::READ );
Rect ROI((int)ROI_storage["x"],(int)ROI_storage["y"],(int)ROI_storage["width"],(int)ROI_storage["height"]);
FileStorage reference_res( dir + "/reference_accuracy.xml", FileStorage::READ );
double ref_MSE = (double)reference_res["MSE_after"];
double ref_BadPercent = (double)reference_res["BadPercent_after"];
cv::setNumThreads(cv::getNumberOfCPUs());
Mat res;
Ptr<DisparityWLSFilter> wls_filter = createDisparityWLSFilter(true);
wls_filter->setLambda(8000.0);
wls_filter->setSigmaColor(0.5);
wls_filter->filter(left_disp,left,res,ROI,right_disp);
double MSE = computeMSE(GT,res,ROI);
double BadPercent = computeBadPixelPercent(GT,res,ROI);
double eps = 0.01;
EXPECT_LE(MSE,ref_MSE+eps*ref_MSE);
EXPECT_LE(BadPercent,ref_BadPercent+eps*ref_BadPercent);
}
TEST_P(DisparityWLSFilterTest, MultiThreadReproducibility)
{
if (cv::getNumberOfCPUs() == 1)
return;
double MAX_DIF = 1.0;
double MAX_MEAN_DIF = 1.0 / 256.0;
int loopsCount = 2;
RNG rng(0);
DisparityWLSParams params = GetParam();
Size size = get<0>(params);
int srcType = get<1>(params);
int guideType = get<2>(params);
bool use_conf = get<3>(params);
bool use_downscale = get<4>(params);
Mat left(size, guideType);
randu(left, 0, 255);
Mat left_disp(size,srcType);
int max_disp = (int)(size.width*0.1);
randu(left_disp, 0, max_disp-1);
Mat right_disp(size,srcType);
randu(left_disp, -max_disp+1, 0);
Rect ROI(max_disp,0,size.width-max_disp,size.height);
if(use_downscale)
{
resize(left_disp,left_disp,Size(),0.5,0.5);
resize(right_disp,right_disp,Size(),0.5,0.5);
ROI = Rect(ROI.x/2,ROI.y/2,ROI.width/2,ROI.height/2);
}
for (int iter = 0; iter <= loopsCount; iter++)
{
double lambda = rng.uniform(100.0, 10000.0);
double sigma = rng.uniform(1.0, 100.0);
Ptr<DisparityWLSFilter> wls_filter = createDisparityWLSFilter(use_conf);
wls_filter->setLambda(lambda);
wls_filter->setSigmaColor(sigma);
cv::setNumThreads(cv::getNumberOfCPUs());
Mat resMultiThread;
wls_filter->filter(left_disp,left,resMultiThread,ROI,right_disp);
cv::setNumThreads(1);
Mat resSingleThread;
wls_filter->filter(left_disp,left,resSingleThread,ROI,right_disp);
EXPECT_LE(cv::norm(resSingleThread, resMultiThread, NORM_INF), MAX_DIF);
EXPECT_LE(cv::norm(resSingleThread, resMultiThread, NORM_L1), MAX_MEAN_DIF*left.total());
}
}
INSTANTIATE_TEST_CASE_P(FullSet,DisparityWLSFilterTest,Combine(Values(szODD, szQVGA), SrcTypes::all(), GuideTypes::all(),Values(true,false),Values(true,false)));
}
\ No newline at end of file
/*
* By downloading, copying, installing or using the software you agree to this license.
* If you do not agree to this license, do not download, install,
* copy or use the software.
*
*
* License Agreement
* For Open Source Computer Vision Library
* (3 - clause BSD License)
*
* Redistribution and use in source and binary forms, with or without modification,
* are permitted provided that the following conditions are met :
*
* *Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and / or other materials provided with the distribution.
*
* * Neither the names of the copyright holders nor the names of the contributors
* may be used to endorse or promote products derived from this software
* without specific prior written permission.
*
* This software is provided by the copyright holders and contributors "as is" and
* any express or implied warranties, including, but not limited to, the implied
* warranties of merchantability and fitness for a particular purpose are disclaimed.
* In no event shall copyright holders or contributors be liable for any direct,
* indirect, incidental, special, exemplary, or consequential damages
* (including, but not limited to, procurement of substitute goods or services;
* loss of use, data, or profits; or business interruption) however caused
* and on any theory of liability, whether in contract, strict liability,
* or tort(including negligence or otherwise) arising in any way out of
* the use of this software, even if advised of the possibility of such damage.
*/
#include "test_precomp.hpp"
namespace cvtest
{
using namespace std;
using namespace std::tr1;
using namespace testing;
using namespace perf;
using namespace cv;
using namespace cv::ximgproc;
static string getDataDir()
{
return cvtest::TS::ptr()->get_data_path();
}
CV_ENUM(SrcTypes, CV_8UC1, CV_8UC2, CV_8UC3, CV_8UC4, CV_16SC1, CV_16SC2, CV_16SC3, CV_16SC4, CV_32FC1, CV_32FC2, CV_32FC3, CV_32FC4);
CV_ENUM(GuideTypes, CV_8UC1, CV_8UC3)
typedef tuple<Size, SrcTypes, GuideTypes> FGSParams;
typedef TestWithParam<FGSParams> FastGlobalSmootherTest;
TEST(FastGlobalSmootherTest, SplatSurfaceAccuracy)
{
RNG rnd(0);
for (int i = 0; i < 10; i++)
{
Size sz(rnd.uniform(512, 1024), rnd.uniform(512, 1024));
int guideCn = rnd.uniform(1, 2);
if(guideCn==2) guideCn++; //1 or 3 channels
Mat guide(sz, CV_MAKE_TYPE(CV_8U, guideCn));
randu(guide, 0, 255);
Scalar surfaceValue;
int srcCn = rnd.uniform(1, 4);
rnd.fill(surfaceValue, RNG::UNIFORM, 0, 255);
Mat src(sz, CV_MAKE_TYPE(CV_16S, srcCn), surfaceValue);
double lambda = rnd.uniform(100, 10000);
double sigma = rnd.uniform(1.0, 100.0);
Mat res;
fastGlobalSmootherFilter(guide, src, res, lambda, sigma);
// When filtering a constant image we should get the same image:
double normL1 = cvtest::norm(src, res, NORM_L1)/src.total()/src.channels();
EXPECT_LE(normL1, 1.0/64);
}
}
TEST(FastGlobalSmootherTest, ReferenceAccuracy)
{
string dir = getDataDir() + "cv/edgefilter";
Mat src = imread(dir + "/kodim23.png");
Mat ref = imread(dir + "/fgs/kodim23_lambda=1000_sigma=10.png");
ASSERT_FALSE(src.empty());
ASSERT_FALSE(ref.empty());
cv::setNumThreads(cv::getNumberOfCPUs());
Mat res;
fastGlobalSmootherFilter(src,src,res,1000.0,10.0);
double totalMaxError = 1.0/64.0*src.total()*src.channels();
EXPECT_LE(cvtest::norm(res, ref, NORM_L2), totalMaxError);
EXPECT_LE(cvtest::norm(res, ref, NORM_INF), 1);
}
TEST_P(FastGlobalSmootherTest, MultiThreadReproducibility)
{
if (cv::getNumberOfCPUs() == 1)
return;
double MAX_DIF = 1.0;
double MAX_MEAN_DIF = 1.0 / 64.0;
int loopsCount = 2;
RNG rng(0);
FGSParams params = GetParam();
Size size = get<0>(params);
int srcType = get<1>(params);
int guideType = get<2>(params);
Mat guide(size, guideType);
randu(guide, 0, 255);
Mat src(size,srcType);
if(src.depth()==CV_8U)
randu(src, 0, 255);
else if(src.depth()==CV_16S)
randu(src, -32767, 32767);
else
randu(src, -100000.0f, 100000.0f);
for (int iter = 0; iter <= loopsCount; iter++)
{
double lambda = rng.uniform(100.0, 10000.0);
double sigma = rng.uniform(1.0, 100.0);
cv::setNumThreads(cv::getNumberOfCPUs());
Mat resMultiThread;
fastGlobalSmootherFilter(guide, src, resMultiThread, lambda, sigma);
cv::setNumThreads(1);
Mat resSingleThread;
fastGlobalSmootherFilter(guide, src, resSingleThread, lambda, sigma);
EXPECT_LE(cv::norm(resSingleThread, resMultiThread, NORM_INF), MAX_DIF);
EXPECT_LE(cv::norm(resSingleThread, resMultiThread, NORM_L1), MAX_MEAN_DIF*src.total()*src.channels());
}
}
INSTANTIATE_TEST_CASE_P(FullSet, FastGlobalSmootherTest,Combine(Values(szODD, szQVGA), SrcTypes::all(), GuideTypes::all()));
}
\ No newline at end of file
<?xml version="1.0"?>
<opencv_storage>
<x>166</x>
<y>7</y>
<width>851</width>
<height>422</height>
</opencv_storage>
<?xml version="1.0"?>
<opencv_storage>
<MSE_before>5043.6495</MSE_before>
<MSE_after>128.3773</MSE_after>
<BadPercent_before>48.5417</BadPercent_before>
<BadPercent_after>45.8749</BadPercent_after>
</opencv_storage>
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment