Commit c6750a0f authored by Victor Erukhimov's avatar Victor Erukhimov

Moved detector and descriptor class factory functions into features2d module

parent d7691f6c
...@@ -1397,6 +1397,7 @@ protected: ...@@ -1397,6 +1397,7 @@ protected:
SURF surf; SURF surf;
}; };
CV_EXPORTS FeatureDetector* createDetector( const string& detectorType );
/****************************************************************************************\ /****************************************************************************************\
* DescriptorExtractor * * DescriptorExtractor *
...@@ -1468,6 +1469,8 @@ protected: ...@@ -1468,6 +1469,8 @@ protected:
SURF surf; SURF surf;
}; };
DescriptorExtractor* createDescriptorExtractor( const string& descriptorExtractorType );
/****************************************************************************************\ /****************************************************************************************\
* Distance * * Distance *
\****************************************************************************************/ \****************************************************************************************/
...@@ -1758,6 +1761,7 @@ void BruteForceMatcher<Distance>::matchImpl( const Mat& descriptors_1, const Mat ...@@ -1758,6 +1761,7 @@ void BruteForceMatcher<Distance>::matchImpl( const Mat& descriptors_1, const Mat
} }
} }
DescriptorMatcher* createDescriptorMatcher( const string& descriptorMatcherType );
/****************************************************************************************\ /****************************************************************************************\
* GenericDescriptorMatch * * GenericDescriptorMatch *
......
...@@ -42,9 +42,10 @@ ...@@ -42,9 +42,10 @@
#include "precomp.hpp" #include "precomp.hpp"
using namespace std; using namespace std;
using namespace cv; namespace cv
{
CV_EXPORTS void cv::drawMatches( const Mat& img1, const Mat& img2, CV_EXPORTS void drawMatches( const Mat& img1, const Mat& img2,
const vector<KeyPoint>& keypoints1, const vector<KeyPoint>& keypoints2, const vector<KeyPoint>& keypoints1, const vector<KeyPoint>& keypoints2,
const vector<int>& matches, const vector<char>& mask, Mat& outImg, const vector<int>& matches, const vector<char>& mask, Mat& outImg,
const Scalar& matchColor, const Scalar& singlePointColor, const Scalar& matchColor, const Scalar& singlePointColor,
...@@ -221,6 +222,45 @@ void SurfDescriptorExtractor::write( FileStorage &fs ) const ...@@ -221,6 +222,45 @@ void SurfDescriptorExtractor::write( FileStorage &fs ) const
fs << "extended" << surf.extended; fs << "extended" << surf.extended;
} }
DescriptorExtractor* createDescriptorExtractor( const string& descriptorExtractorType )
{
DescriptorExtractor* de = 0;
if( !descriptorExtractorType.compare( "SIFT" ) )
{
de = new SiftDescriptorExtractor/*( double magnification=SIFT::DescriptorParams::GET_DEFAULT_MAGNIFICATION(),
bool isNormalize=true, bool recalculateAngles=true,
int nOctaves=SIFT::CommonParams::DEFAULT_NOCTAVES,
int nOctaveLayers=SIFT::CommonParams::DEFAULT_NOCTAVE_LAYERS,
int firstOctave=SIFT::CommonParams::DEFAULT_FIRST_OCTAVE,
int angleMode=SIFT::CommonParams::FIRST_ANGLE )*/;
}
else if( !descriptorExtractorType.compare( "SURF" ) )
{
de = new SurfDescriptorExtractor/*( int nOctaves=4, int nOctaveLayers=2, bool extended=false )*/;
}
else
{
//CV_Error( CV_StsBadArg, "unsupported descriptor extractor type");
}
return de;
}
DescriptorMatcher* createDescriptorMatcher( const string& descriptorMatcherType )
{
DescriptorMatcher* dm = 0;
if( !descriptorMatcherType.compare( "BruteForce" ) )
{
dm = new BruteForceMatcher<L2<float> >();
}
else
{
//CV_Error( CV_StsBadArg, "unsupported descriptor matcher type");
}
return dm;
}
/****************************************************************************************\ /****************************************************************************************\
* GenericDescriptorMatch * * GenericDescriptorMatch *
\****************************************************************************************/ \****************************************************************************************/
...@@ -764,3 +804,5 @@ void FernDescriptorMatch::clear () ...@@ -764,3 +804,5 @@ void FernDescriptorMatch::clear ()
GenericDescriptorMatch::clear(); GenericDescriptorMatch::clear();
classifier.release(); classifier.release();
} }
}
...@@ -42,8 +42,9 @@ ...@@ -42,8 +42,9 @@
#include "precomp.hpp" #include "precomp.hpp"
using namespace std; using namespace std;
using namespace cv;
namespace cv
{
/* /*
FeatureDetector FeatureDetector
*/ */
...@@ -314,3 +315,44 @@ void SurfFeatureDetector::detectImpl( const Mat& image, const Mat& mask, ...@@ -314,3 +315,44 @@ void SurfFeatureDetector::detectImpl( const Mat& image, const Mat& mask,
{ {
surf(image, mask, keypoints); surf(image, mask, keypoints);
} }
FeatureDetector* createDetector( const string& detectorType )
{
FeatureDetector* fd = 0;
if( !detectorType.compare( "FAST" ) )
{
fd = new FastFeatureDetector( 10/*threshold*/, true/*nonmax_suppression*/ );
}
else if( !detectorType.compare( "STAR" ) )
{
fd = new StarFeatureDetector( 16/*max_size*/, 5/*response_threshold*/, 10/*line_threshold_projected*/,
8/*line_threshold_binarized*/, 5/*suppress_nonmax_size*/ );
}
else if( !detectorType.compare( "SIFT" ) )
{
fd = new SiftFeatureDetector(SIFT::DetectorParams::GET_DEFAULT_THRESHOLD(),
SIFT::DetectorParams::GET_DEFAULT_EDGE_THRESHOLD());
}
else if( !detectorType.compare( "SURF" ) )
{
fd = new SurfFeatureDetector( 100./*hessian_threshold*/, 3 /*octaves*/, 4/*octave_layers*/ );
}
else if( !detectorType.compare( "MSER" ) )
{
fd = new MserFeatureDetector( 5/*delta*/, 60/*min_area*/, 14400/*_max_area*/, 0.25f/*max_variation*/,
0.2/*min_diversity*/, 200/*max_evolution*/, 1.01/*area_threshold*/, 0.003/*min_margin*/,
5/*edge_blur_size*/ );
}
else if( !detectorType.compare( "GFTT" ) )
{
fd = new GoodFeaturesToTrackDetector( 1000/*maxCorners*/, 0.01/*qualityLevel*/, 1./*minDistance*/,
3/*int _blockSize*/, true/*useHarrisDetector*/, 0.04/*k*/ );
}
else
{
//CV_Error( CV_StsBadArg, "unsupported feature detector type");
}
return fd;
}
}
#include <cvaux.h> //#include <cvaux.h>
#include <highgui.h> #include <highgui.h>
#include "opencv2/core/core.hpp"
#include "opencv2/calib3d/calib3d.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include "opencv2/features2d/features2d.hpp" #include "opencv2/features2d/features2d.hpp"
#include <iostream> #include <iostream>
...@@ -34,83 +37,6 @@ void warpPerspectiveRand( const Mat& src, Mat& dst, Mat& H, RNG* rng ) ...@@ -34,83 +37,6 @@ void warpPerspectiveRand( const Mat& src, Mat& dst, Mat& H, RNG* rng )
warpPerspective( src, dst, H, src.size() ); warpPerspective( src, dst, H, src.size() );
} }
FeatureDetector* createDetector( const string& detectorType )
{
FeatureDetector* fd = 0;
if( !detectorType.compare( "FAST" ) )
{
fd = new FastFeatureDetector( 10/*threshold*/, true/*nonmax_suppression*/ );
}
else if( !detectorType.compare( "STAR" ) )
{
fd = new StarFeatureDetector( 16/*max_size*/, 5/*response_threshold*/, 10/*line_threshold_projected*/,
8/*line_threshold_binarized*/, 5/*suppress_nonmax_size*/ );
}
else if( !detectorType.compare( "SIFT" ) )
{
fd = new SiftFeatureDetector(SIFT::DetectorParams::GET_DEFAULT_THRESHOLD(),
SIFT::DetectorParams::GET_DEFAULT_EDGE_THRESHOLD());
}
else if( !detectorType.compare( "SURF" ) )
{
fd = new SurfFeatureDetector( 100./*hessian_threshold*/, 3 /*octaves*/, 4/*octave_layers*/ );
}
else if( !detectorType.compare( "MSER" ) )
{
fd = new MserFeatureDetector( 5/*delta*/, 60/*min_area*/, 14400/*_max_area*/, 0.25f/*max_variation*/,
0.2/*min_diversity*/, 200/*max_evolution*/, 1.01/*area_threshold*/, 0.003/*min_margin*/,
5/*edge_blur_size*/ );
}
else if( !detectorType.compare( "GFTT" ) )
{
fd = new GoodFeaturesToTrackDetector( 1000/*maxCorners*/, 0.01/*qualityLevel*/, 1./*minDistance*/,
3/*int _blockSize*/, true/*useHarrisDetector*/, 0.04/*k*/ );
}
else
{
//CV_Error( CV_StsBadArg, "unsupported feature detector type");
}
return fd;
}
DescriptorExtractor* createDescriptorExtractor( const string& descriptorExtractorType )
{
DescriptorExtractor* de = 0;
if( !descriptorExtractorType.compare( "SIFT" ) )
{
de = new SiftDescriptorExtractor/*( double magnification=SIFT::DescriptorParams::GET_DEFAULT_MAGNIFICATION(),
bool isNormalize=true, bool recalculateAngles=true,
int nOctaves=SIFT::CommonParams::DEFAULT_NOCTAVES,
int nOctaveLayers=SIFT::CommonParams::DEFAULT_NOCTAVE_LAYERS,
int firstOctave=SIFT::CommonParams::DEFAULT_FIRST_OCTAVE,
int angleMode=SIFT::CommonParams::FIRST_ANGLE )*/;
}
else if( !descriptorExtractorType.compare( "SURF" ) )
{
de = new SurfDescriptorExtractor/*( int nOctaves=4, int nOctaveLayers=2, bool extended=false )*/;
}
else
{
//CV_Error( CV_StsBadArg, "unsupported descriptor extractor type");
}
return de;
}
DescriptorMatcher* createDescriptorMatcher( const string& descriptorMatcherType )
{
DescriptorMatcher* dm = 0;
if( !descriptorMatcherType.compare( "BruteForce" ) )
{
dm = new BruteForceMatcher<L2<float> >();
}
else
{
//CV_Error( CV_StsBadArg, "unsupported descriptor matcher type");
}
return dm;
}
const string winName = "correspondences"; const string winName = "correspondences";
void doIteration( const Mat& img1, Mat& img2, bool isWarpPerspective, void doIteration( const Mat& img1, Mat& img2, bool isWarpPerspective,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment