Commit 1d77931b authored by Alexander Alekhin's avatar Alexander Alekhin

Merge remote-tracking branch 'upstream/3.4' into merge-3.4

parents 698f805c f3982616
......@@ -67,6 +67,39 @@ namespace cv
namespace xfeatures2d
{
/** @brief Class for extracting keypoints and computing descriptors using the Scale Invariant Feature Transform
(SIFT) algorithm by D. Lowe @cite Lowe04 .
*/
class CV_EXPORTS_W SIFT : public Feature2D
{
public:
/**
@param nfeatures The number of best features to retain. The features are ranked by their scores
(measured in SIFT algorithm as the local contrast)
@param nOctaveLayers The number of layers in each octave. 3 is the value used in D. Lowe paper. The
number of octaves is computed automatically from the image resolution.
@param contrastThreshold The contrast threshold used to filter out weak features in semi-uniform
(low-contrast) regions. The larger the threshold, the less features are produced by the detector.
@param edgeThreshold The threshold used to filter out edge-like features. Note that the its meaning
is different from the contrastThreshold, i.e. the larger the edgeThreshold, the less features are
filtered out (more features are retained).
@param sigma The sigma of the Gaussian applied to the input image at the octave \#0. If your image
is captured with a weak camera with soft lenses, you might want to reduce the number.
*/
CV_WRAP static Ptr<SIFT> create(int nfeatures = 0, int nOctaveLayers = 3,
double contrastThreshold = 0.04, double edgeThreshold = 10,
double sigma = 1.6);
};
typedef SIFT SiftFeatureDetector;
typedef SIFT SiftDescriptorExtractor;
//! @addtogroup xfeatures2d_experiment
//! @{
......
......@@ -50,40 +50,6 @@ namespace cv
namespace xfeatures2d
{
//! @addtogroup xfeatures2d_nonfree
//! @{
/** @brief Class for extracting keypoints and computing descriptors using the Scale Invariant Feature Transform
(SIFT) algorithm by D. Lowe @cite Lowe04 .
*/
class CV_EXPORTS_W SIFT : public Feature2D
{
public:
/**
@param nfeatures The number of best features to retain. The features are ranked by their scores
(measured in SIFT algorithm as the local contrast)
@param nOctaveLayers The number of layers in each octave. 3 is the value used in D. Lowe paper. The
number of octaves is computed automatically from the image resolution.
@param contrastThreshold The contrast threshold used to filter out weak features in semi-uniform
(low-contrast) regions. The larger the threshold, the less features are produced by the detector.
@param edgeThreshold The threshold used to filter out edge-like features. Note that the its meaning
is different from the contrastThreshold, i.e. the larger the edgeThreshold, the less features are
filtered out (more features are retained).
@param sigma The sigma of the Gaussian applied to the input image at the octave \#0. If your image
is captured with a weak camera with soft lenses, you might want to reduce the number.
*/
CV_WRAP static Ptr<SIFT> create( int nfeatures = 0, int nOctaveLayers = 3,
double contrastThreshold = 0.04, double edgeThreshold = 10,
double sigma = 1.6);
};
typedef SIFT SiftFeatureDetector;
typedef SIFT SiftDescriptorExtractor;
/** @brief Class for extracting Speeded Up Robust Features from an image @cite Bay06 .
The algorithm parameters:
......
......@@ -283,8 +283,8 @@ void FREAK_Impl::buildPattern()
const float dx = patternLookup[orientationPairs[m].i].x-patternLookup[orientationPairs[m].j].x;
const float dy = patternLookup[orientationPairs[m].i].y-patternLookup[orientationPairs[m].j].y;
const float norm_sq = (dx*dx+dy*dy);
orientationPairs[m].weight_dx = int((dx/(norm_sq))*4096.0+0.5);
orientationPairs[m].weight_dy = int((dy/(norm_sq))*4096.0+0.5);
orientationPairs[m].weight_dx = cvRound((dx/(norm_sq))*4096.0);
orientationPairs[m].weight_dy = cvRound((dy/(norm_sq))*4096.0);
}
// build the list of description pairs
......@@ -482,7 +482,7 @@ void FREAK_Impl::computeDescriptors( InputArray _image, std::vector<KeyPoint>& k
}
else
{
const int scIdx = std::max( (int)(1.0986122886681*sizeCst+0.5) ,0);
const int scIdx = std::max( cvRound(1.0986122886681*sizeCst) ,0);
for( size_t k = keypoints.size(); k--; )
{
kpScaleIdx[k] = scIdx; // equivalent to the formule when the scale is normalized with a constant size of keypoints[k].size=3*SMALLEST_KP_SIZE
......@@ -539,10 +539,7 @@ void FREAK_Impl::computeDescriptors( InputArray _image, std::vector<KeyPoint>& k
keypoints[k].angle = static_cast<float>(atan2((float)direction1,(float)direction0)*(180.0/CV_PI));//estimate orientation
if(keypoints[k].angle < 0.f)
thetaIdx = int(FREAK_NB_ORIENTATION*keypoints[k].angle*(1/360.0)-0.5);
else
thetaIdx = int(FREAK_NB_ORIENTATION*keypoints[k].angle*(1/360.0)+0.5);
thetaIdx = cvRound(FREAK_NB_ORIENTATION*keypoints[k].angle*(1/360.0));
if( thetaIdx < 0 )
thetaIdx += FREAK_NB_ORIENTATION;
......@@ -596,10 +593,7 @@ void FREAK_Impl::computeDescriptors( InputArray _image, std::vector<KeyPoint>& k
keypoints[k].angle = static_cast<float>(atan2((float)direction1,(float)direction0)*(180.0/CV_PI)); //estimate orientation
if(keypoints[k].angle < 0.f)
thetaIdx = int(FREAK_NB_ORIENTATION*keypoints[k].angle*(1/360.0)-0.5);
else
thetaIdx = int(FREAK_NB_ORIENTATION*keypoints[k].angle*(1/360.0)+0.5);
thetaIdx = cvRound(FREAK_NB_ORIENTATION*keypoints[k].angle*(1/360.0));
if( thetaIdx < 0 )
thetaIdx += FREAK_NB_ORIENTATION;
......@@ -671,10 +665,10 @@ imgType FREAK_Impl::meanIntensity( InputArray _image, InputArray _integral,
// expected case:
// calculate borders
const int x_left = int(xf-radius+0.5);
const int y_top = int(yf-radius+0.5);
const int x_right = int(xf+radius+1.5);//integral image is 1px wider
const int y_bottom = int(yf+radius+1.5);//integral image is 1px higher
const int x_left = cvRound(xf-radius);
const int y_top = cvRound(yf-radius);
const int x_right = cvRound(xf+radius+1);//integral image is 1px wider
const int y_bottom = cvRound(yf+radius+1);//integral image is 1px higher
iiType ret_val;
ret_val = integral.at<iiType>(y_bottom,x_right);//bottom right corner
......
......@@ -114,8 +114,6 @@ namespace cv
namespace xfeatures2d
{
#ifdef OPENCV_ENABLE_NONFREE
/*!
SIFT implementation.
......@@ -1202,14 +1200,5 @@ void SIFT_Impl::detectAndCompute(InputArray _image, InputArray _mask,
}
}
#else // ! #ifdef OPENCV_ENABLE_NONFREE
Ptr<SIFT> SIFT::create( int, int, double, double, double )
{
CV_Error(Error::StsNotImplemented,
"This algorithm is patented and is excluded in this configuration; "
"Set OPENCV_ENABLE_NONFREE CMake option and rebuild the library");
}
#endif
}
}
......@@ -53,13 +53,13 @@ const string IMAGE_FILENAME = "tsukuba.png";
namespace opencv_test { namespace {
#ifdef OPENCV_ENABLE_NONFREE
TEST( Features2d_Detector_SIFT, regression)
TEST( Features2d_Detector_SIFT, regression )
{
CV_FeatureDetectorTest test( "detector-sift", SIFT::create() );
test.safe_run();
}
#ifdef OPENCV_ENABLE_NONFREE
TEST( Features2d_Detector_SURF, regression )
{
CV_FeatureDetectorTest test( "detector-surf", SURF::create() );
......@@ -94,7 +94,6 @@ TEST( Features2d_Detector_Harris_Laplace_Affine, regression )
/*
* Descriptors
*/
#ifdef OPENCV_ENABLE_NONFREE
TEST( Features2d_DescriptorExtractor_SIFT, regression )
{
CV_DescriptorExtractorTest<L1<float> > test( "descriptor-sift", 1.0f,
......@@ -102,6 +101,7 @@ TEST( Features2d_DescriptorExtractor_SIFT, regression )
test.safe_run();
}
#ifdef OPENCV_ENABLE_NONFREE
TEST( Features2d_DescriptorExtractor_SURF, regression )
{
#ifdef HAVE_OPENCL
......@@ -375,8 +375,9 @@ protected:
Ptr<Feature2D> f2d;
};
#ifdef OPENCV_ENABLE_NONFREE
TEST(Features2d_SIFTHomographyTest, regression) { CV_DetectPlanarTest test("SIFT", 80, SIFT::create()); test.safe_run(); }
#ifdef OPENCV_ENABLE_NONFREE
TEST(Features2d_SURFHomographyTest, regression) { CV_DetectPlanarTest test("SURF", 80, SURF::create()); test.safe_run(); }
#endif
......@@ -441,13 +442,13 @@ protected:
Ptr<FeatureDetector> featureDetector_;
};
#ifdef OPENCV_ENABLE_NONFREE
TEST(Features2d_SIFT_using_mask, regression)
{
FeatureDetectorUsingMaskTest test(SIFT::create());
test.safe_run();
}
#ifdef OPENCV_ENABLE_NONFREE
TEST(DISABLED_Features2d_SURF_using_mask, regression)
{
FeatureDetectorUsingMaskTest test(SURF::create());
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment