Commit 69e329c9 authored by Maria Dimashova's avatar Maria Dimashova

modified features2d interface; added algorithmic test for DescriptorMatcher;…

modified features2d interface; added algorithmic test for DescriptorMatcher; added sample on matching to many images
parent 0d3809d0
......@@ -1234,27 +1234,28 @@ public:
virtual ~FeatureDetector() {}
/*
* Detect keypoints in an image.
* Detect keypoints in an image. Must be implemented by the subclass.
*
* image The image.
* keypoints The detected keypoints.
* mask Mask specifying where to look for keypoints (optional). Must be a char
* matrix with non-zero values in the region of interest.
*/
void detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const
{
detectImpl( image, mask, keypoints );
}
virtual void detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const = 0;
/*
* Detect keypoints in an image set.
*
* images Image collection.
* pointCollection Collection of keypoints detected in an input images.
* masks Masks for each input image.
*/
void detect( const vector<Mat>& imageCollection, vector<vector<KeyPoint> >& pointCollection, const vector<Mat>& masks=vector<Mat>() ) const;
virtual void read(const FileNode&) {}
virtual void write(FileStorage&) const {}
protected:
/*
* Detect keypoints; detect() calls this. Must be implemented by the subclass.
*/
virtual void detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints ) const = 0;
/*
* Remove keypoints that are not in the mask.
*
......@@ -1268,13 +1269,12 @@ class CV_EXPORTS FastFeatureDetector : public FeatureDetector
{
public:
FastFeatureDetector( int _threshold = 1, bool _nonmaxSuppression = true );
virtual void detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void read (const FileNode& fn);
virtual void write (FileStorage& fs) const;
protected:
virtual void detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints ) const;
int threshold;
bool nonmaxSuppression;
};
......@@ -1285,13 +1285,12 @@ class CV_EXPORTS GoodFeaturesToTrackDetector : public FeatureDetector
public:
GoodFeaturesToTrackDetector( int _maxCorners, double _qualityLevel, double _minDistance,
int _blockSize=3, bool _useHarrisDetector=false, double _k=0.04 );
virtual void detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void read (const FileNode& fn);
virtual void write (FileStorage& fs) const;
protected:
virtual void detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints ) const;
int maxCorners;
double qualityLevel;
double minDistance;
......@@ -1306,13 +1305,12 @@ public:
MserFeatureDetector( CvMSERParams params = cvMSERParams () );
MserFeatureDetector( int delta, int minArea, int maxArea, double maxVariation, double minDiversity,
int maxEvolution, double areaThreshold, double minMargin, int edgeBlurSize );
virtual void detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void read (const FileNode& fn);
virtual void write (FileStorage& fs) const;
protected:
virtual void detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints ) const;
MSER mser;
};
......@@ -1321,13 +1319,12 @@ class CV_EXPORTS StarFeatureDetector : public FeatureDetector
public:
StarFeatureDetector( int maxSize=16, int responseThreshold=30, int lineThresholdProjected = 10,
int lineThresholdBinarized=8, int suppressNonmaxSize=5 );
virtual void detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void read (const FileNode& fn);
virtual void write (FileStorage& fs) const;
protected:
virtual void detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints ) const;
StarDetector star;
};
......@@ -1340,13 +1337,12 @@ public:
int nOctaveLayers=SIFT::CommonParams::DEFAULT_NOCTAVE_LAYERS,
int firstOctave=SIFT::CommonParams::DEFAULT_FIRST_OCTAVE,
int angleMode=SIFT::CommonParams::FIRST_ANGLE );
virtual void detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void read (const FileNode& fn);
virtual void write (FileStorage& fs) const;
protected:
virtual void detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints ) const;
SIFT sift;
};
......@@ -1354,18 +1350,14 @@ class CV_EXPORTS SurfFeatureDetector : public FeatureDetector
{
public:
SurfFeatureDetector( double hessianThreshold = 400., int octaves = 3, int octaveLayers = 4 );
virtual void detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
virtual void read (const FileNode& fn);
virtual void write (FileStorage& fs) const;
protected:
virtual void detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints ) const;
SURF surf;
};
CV_EXPORTS Ptr<FeatureDetector> createFeatureDetector( const string& detectorType );
class CV_EXPORTS DenseFeatureDetector : public FeatureDetector
{
public:
......@@ -1375,11 +1367,9 @@ public:
int _initXyStep=6, int _initImgBound=0, bool _varyXyStepWithScale=true, bool _varyImgBoundWithScale=false ) :
initFeatureScale(_initFeatureScale), featureScaleLevels(_featureScaleLevels), featureScaleMul(_featureScaleMul),
initXyStep(_initXyStep), initImgBound(_initImgBound), varyXyStepWithScale(_varyXyStepWithScale), varyImgBoundWithScale(_varyImgBoundWithScale) {}
virtual void detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
// todo read/write
protected:
virtual void detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints ) const;
float initFeatureScale;
int featureScaleLevels;
float featureScaleMul;
......@@ -1400,6 +1390,7 @@ class CV_EXPORTS GridAdaptedFeatureDetector : public FeatureDetector
public:
GridAdaptedFeatureDetector( const Ptr<FeatureDetector>& _detector, int _maxTotalKeypoints,
int _gridRows=4, int _gridCols=4 );
virtual void detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
// todo read/write
protected:
......@@ -1407,8 +1398,6 @@ protected:
int maxTotalKeypoints;
int gridRows;
int gridCols;
virtual void detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints ) const;
};
/*
......@@ -1419,19 +1408,18 @@ class PyramidAdaptedFeatureDetector : public FeatureDetector
{
public:
PyramidAdaptedFeatureDetector( const Ptr<FeatureDetector>& _detector, int _levels=2 );
virtual void detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask=Mat() ) const;
// todo read/write
protected:
Ptr<FeatureDetector> detector;
int levels;
virtual void detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints ) const;
};
CV_EXPORTS Mat windowedMatchingMask( const vector<KeyPoint>& keypoints1, const vector<KeyPoint>& keypoints2,
float maxDeltaX, float maxDeltaY );
CV_EXPORTS Ptr<FeatureDetector> createFeatureDetector( const string& detectorType );
/****************************************************************************************\
* DescriptorExtractor *
\****************************************************************************************/
......@@ -1451,7 +1439,6 @@ public:
virtual ~DescriptorExtractor() {}
/*
* Compute the descriptors for a set of keypoints in an image.
*
* Must be implemented by the subclass.
*
* image The image.
......@@ -1460,8 +1447,17 @@ public:
*/
virtual void compute( const Mat& image, vector<KeyPoint>& keypoints, Mat& descriptors ) const = 0;
virtual void read( const FileNode& ) {};
virtual void write( FileStorage& ) const {};
/*
* Compute the descriptors for a keypoints collection detected in image collection.
*
* imageCollection Image collection.
* pointCollection Keypoints collection. pointCollection[i] is keypoints detected in imageCollection[i].
* descCollection Descriptor collection. descCollection[i] is descriptors computed for pointCollection[i].
*/
void compute( const vector<Mat>& imageCollection, vector<vector<KeyPoint> >& pointCollection, vector<Mat>& descCollection ) const;
virtual void read( const FileNode& ) {}
virtual void write( FileStorage& ) const {}
virtual int descriptorSize() const = 0;
virtual int descriptorType() const = 0;
......@@ -1651,12 +1647,20 @@ struct CV_EXPORTS L1
* DMatch *
\****************************************************************************************/
/*
* Struct for matching: match index and distance between descriptors
* Struct for matching: query descriptor index, train descriptor index, train image index and distance between descriptors.
*/
struct CV_EXPORTS DMatch
{
int indexTrain;
int indexQuery;
DMatch() : queryIdx(-1), trainIdx(-1), imgIdx(-1), distance(std::numeric_limits<float>::max()) {}
DMatch( int _queryIdx, int _trainIdx, float _distance ) :
queryIdx(_queryIdx), trainIdx(_trainIdx), imgIdx(-1), distance(_distance) {}
DMatch( int _queryIdx, int _trainIdx, int _imgIdx, float _distance ) :
queryIdx(_queryIdx), trainIdx(_trainIdx), imgIdx(_imgIdx), distance(_distance) {}
int queryIdx; // query descriptor index
int trainIdx; // train descriptor index
int imgIdx; // train image index
float distance;
//less is better
......@@ -1677,328 +1681,473 @@ class CV_EXPORTS DescriptorMatcher
public:
virtual ~DescriptorMatcher() {}
/*
* Add descriptors to the training set
* descriptors Descriptors to add to the training set
* Add descriptors to train descriptor collection.
* descCollection Descriptors to add. Each descCollection[i] is from one image.
*/
void add( const Mat& descriptors );
virtual void add( const vector<Mat>& descCollection );
/*
* Index the descriptors training set
* Get descriptor collection.
*/
virtual void index() = 0;
const vector<Mat>& getTrainDescCollection() const { return trainDescCollection; }
/*
* Find the best match for each descriptor from a query set
*
* query The query set of descriptors
* matches Indices of the closest matches from the training set
* Clear inner data (train image collection).
*/
void match( const Mat& query, vector<int>& matches ) const;
virtual void clear();
virtual bool supportMask() = 0;
/*
* Find the best matches between two descriptor sets, with constraints
* on which pairs of descriptors can be matched.
*
* The mask describes which descriptors can be matched. descriptors_1[i]
* can be matched with descriptors_2[j] only if mask.at<char>(i,j) is non-zero.
*
* query The query set of descriptors
* mask Mask specifying permissible matches.
* matches Indices of the closest matches from the training set
* Train matcher (e.g. train flann index)
*/
void match( const Mat& query, const Mat& mask, vector<int>& matches ) const;
virtual void train() = 0;
/*
* Find the best match for each descriptor from a query set
*
* query The query set of descriptors
* matches DMatches of the closest matches from the training set
* Group of methods to match descriptors from image pair.
*/
void match( const Mat& query, vector<DMatch>& matches ) const;
// Find one best match for each query descriptor (if mask is empty).
void match( const Mat& queryDescs, const Mat& trainDescs, vector<DMatch>& matches,
const Mat& mask=Mat() ) const;
// Find knn best matches for each query descriptor (in increasing order of distances).
// compactResult is used when mask is not empty. If compactResult is false matches vector will have the same size as queryDescs rows.
// If compactResult is true matches vector will not contain matches for fully masked out query descriptors.
void knnMatch( const Mat& queryDescs, const Mat& trainDescs, vector<vector<DMatch> >& matches, int knn,
const Mat& mask=Mat(), bool compactResult=false ) const;
// Find best matches for each query descriptor which have distance less than maxDistance (in increasing order of distances).
void radiusMatch( const Mat& queryDescs, const Mat& trainDescs, vector<vector<DMatch> >& matches, float maxDistance,
const Mat& mask=Mat(), bool compactResult=false ) const;
/*
* Find the best matches between two descriptor sets, with constraints
* on which pairs of descriptors can be matched.
*
* The mask describes which descriptors can be matched. descriptors_1[i]
* can be matched with descriptors_2[j] only if mask.at<char>(i,j) is non-zero.
*
* query The query set of descriptors
* mask Mask specifying permissible matches.
* matches DMatches of the closest matches from the training set
* Group of methods to match descriptors from one image to image set.
* See description of similar methods for matching image pair above.
*/
void match( const Mat& query, const Mat& mask, vector<DMatch>& matches ) const;
void match( const Mat& queryDescs, vector<DMatch>& matches,
const vector<Mat>& masks=vector<Mat>() );
void knnMatch( const Mat& queryDescs, vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks=vector<Mat>(), bool compactResult=false );
void radiusMatch( const Mat& queryDescs, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks=vector<Mat>(), bool compactResult=false );
void match( const Mat& query, const Mat& train, vector<DMatch>& matches, const Mat& mask ) const;
// Reads matcher object from a file node
virtual void read( const FileNode& ) {}
// Writes matcher object to a file storage
virtual void write( FileStorage& ) const {}
protected:
/*
* Find many matches for each descriptor from a query set
*
* query The query set of descriptors
* matches DMatches of the closest matches from the training set
* threshold Distance threshold for descriptors matching
* Class to work with descriptors from several images as with one merged matrix.
* It is used e.g. in FlannBasedMatcher
*/
void match( const Mat& query, vector<vector<DMatch> >& matches, float threshold ) const;
class DescriptorCollection
{
public:
DescriptorCollection() {}
virtual ~DescriptorCollection() {}
/*
* Find many matches for each descriptor from a query set, with constraints
* on which pairs of descriptors can be matched.
*
* The mask describes which descriptors can be matched. descriptors_1[i]
* can be matched with descriptors_2[j] only if mask.at<char>(i,j) is non-zero.
*
* query The query set of descriptors
* mask Mask specifying permissible matches.
* matches DMatches of the closest matches from the training set
* threshold Distance threshold for descriptors matching
*/
void match( const Mat& query, const Mat& mask,
vector<vector<DMatch> >& matches, float threshold ) const;
// descCollection will be merged to dmatrix here
void set( const vector<Mat>& descCollection );
virtual void clear();
virtual void clear();
const Mat& getDescriptors() const { return dmatrix; }
const Mat getDescriptor( int imgIdx, int localDescIdx ) const;
const Mat getDescriptor( int globalDescIdx ) const;
void getLocalIdx( int globalDescIdx, int& imgIdx, int& localDescIdx ) const;
protected:
Mat m_train;
int size() const { return dmatrix.rows; }
/*
* Find matches; match() calls this. Must be implemented by the subclass.
* The mask may be empty.
*/
virtual void matchImpl( const Mat& query, const Mat& train, vector<int>& matches, const Mat& mask ) const = 0;
protected:
Mat dmatrix;
vector<int> startIdxs;
};
/*
* Find matches; match() calls this. Must be implemented by the subclass.
* The mask may be empty.
*/
virtual void matchImpl( const Mat& query, const Mat& train, vector<DMatch>& matches, const Mat& mask ) const = 0;
// create matcher clone with current parameters but with empty data
virtual Ptr<DescriptorMatcher> cloneWithoutData() const = 0;
virtual void matchImpl( const Mat& query, const Mat& train, vector<vector<DMatch> >& matches, float threshold, const Mat& mask ) const = 0;
virtual void knnMatchImpl( const Mat& queryDescs, vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult ) = 0;
virtual void radiusMatchImpl( const Mat& queryDescs, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult ) = 0;
static bool possibleMatch( const Mat& mask, int index_1, int index_2 )
{
return mask.empty() || mask.at<char>(index_1, index_2);
return mask.empty() || mask.at<uchar>(index_1, index_2);
}
static bool maskedOut( const vector<Mat>& masks, int queryDescIdx )
{
size_t outCount = 0;
for( size_t i = 0; i < masks.size(); i++ )
{
if( !masks[i].empty() && (countNonZero(masks[i].row(queryDescIdx)) == 0) )
outCount++;
}
return !masks.empty() && outCount == masks.size() ;
}
vector<Mat> trainDescCollection;
};
/*
* Brute-force descriptor matcher.
*
* For each descriptor in the first set, this matcher finds the closest
* descriptor in the second set by trying each one.
*
* For efficiency, BruteForceMatcher is templated on the distance metric.
* For float descriptors, a common choice would be cv::L2<float>.
* Next two functions are used to implement BruteForceMatcher class specialization
*/
template<class Distance>
class CV_EXPORTS BruteForceMatcher : public DescriptorMatcher
{
public:
BruteForceMatcher( Distance d = Distance() ) : distance(d) {}
virtual void index() {}
protected:
virtual void matchImpl( const Mat& query, const Mat& train, vector<int>& matches, const Mat& mask ) const;
virtual void matchImpl( const Mat& query, const Mat& train, vector<DMatch>& matches, const Mat& mask ) const;
virtual void matchImpl( const Mat& query, const Mat& train, vector<vector<DMatch> >& matches, float threshold, const Mat& mask ) const;
class BruteForceMatcher;
Distance distance;
};
template<class Distance> inline
void BruteForceMatcher<Distance>::matchImpl( const Mat& query, const Mat& train, vector<int>& matches, const Mat& mask ) const
{
vector<DMatch> fullMatches;
matchImpl( query, train, fullMatches, mask );
matches.clear();
matches.resize( fullMatches.size() );
for( size_t i=0;i<fullMatches.size();i++)
{
matches[i] = fullMatches[i].indexTrain;
}
template<class Distance>
inline void bfKnnMatchImpl( BruteForceMatcher<Distance>& matcher,
const Mat& queryDescs, vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult )
{
typedef typename Distance::ValueType ValueType;
typedef typename Distance::ResultType DistanceType;
CV_Assert( DataType<ValueType>::type == queryDescs.type() || queryDescs.empty() );
CV_Assert( masks.empty() || masks.size() == matcher.trainDescCollection.size() );
int dimension = queryDescs.cols;
matches.reserve(queryDescs.rows);
size_t imgCount = matcher.trainDescCollection.size();
vector<Mat> allDists( imgCount ); // distances between one query descriptor and all train descriptors
for( size_t i = 0; i < imgCount; i++ )
allDists[i] = Mat( 1, matcher.trainDescCollection[i].rows, queryDescs.type() );
for( int qIdx = 0; qIdx < queryDescs.rows; qIdx++ )
{
if( matcher.maskedOut( masks, qIdx ) )
{
if( !compactResult ) // push empty vector
matches.push_back( vector<DMatch>() );
}
else
{
// 1. compute distances between i-th query descriptor and all train descriptors
for( size_t iIdx = 0; iIdx < imgCount; iIdx++ )
{
CV_Assert( masks.empty() || masks[iIdx].empty() ||
( masks[iIdx].rows == queryDescs.rows && masks[iIdx].cols == matcher.trainDescCollection[iIdx].rows &&
masks[iIdx].type() == CV_8UC1 ) );
CV_Assert( DataType<ValueType>::type == matcher.trainDescCollection[iIdx].type() || matcher.trainDescCollection[iIdx].empty() );
CV_Assert( queryDescs.cols == matcher.trainDescCollection[iIdx].cols );
const ValueType* d1 = (const ValueType*)(queryDescs.data + queryDescs.step*qIdx);
allDists[iIdx].setTo( Scalar::all(std::numeric_limits<DistanceType>::max()) );
for( int tIdx = 0; tIdx < matcher.trainDescCollection[iIdx].rows; tIdx++ )
{
if( masks.empty() || matcher.possibleMatch(masks[iIdx], qIdx, tIdx) )
{
const ValueType* d2 = (const ValueType*)(matcher.trainDescCollection[iIdx].data +
matcher.trainDescCollection[iIdx].step*tIdx);
allDists[iIdx].at<ValueType>(0, tIdx) = matcher.distance(d1, d2, dimension);
}
}
}
// 2. choose knn nearest matches for query[i]
matches.push_back( vector<DMatch>() );
vector<vector<DMatch> >::reverse_iterator curMatches = matches.rbegin();
for( int k = 0; k < knn; k++ )
{
DMatch bestMatch;
for( size_t iIdx = 0; iIdx < imgCount; iIdx++ )
{
double minVal;
Point minLoc;
minMaxLoc( allDists[iIdx], &minVal, 0, &minLoc, 0 );
if( minVal < bestMatch.distance )
bestMatch = DMatch( qIdx, minLoc.x, iIdx, minVal );
}
if( bestMatch.trainIdx == -1 )
break;
allDists[bestMatch.imgIdx].at<ValueType>(0, bestMatch.trainIdx) = std::numeric_limits<DistanceType>::max();
curMatches->push_back( bestMatch );
}
std::sort( curMatches->begin(), curMatches->end() );
}
}
}
template<class Distance> inline
void BruteForceMatcher<Distance>::matchImpl( const Mat& query, const Mat& train, vector<DMatch>& matches, const Mat& mask ) const
template<class Distance>
inline void bfRadiusMatchImpl( BruteForceMatcher<Distance>& matcher,
const Mat& queryDescs, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult )
{
typedef typename Distance::ValueType ValueType;
typedef typename Distance::ResultType DistanceType;
CV_Assert( DataType<ValueType>::type == queryDescs.type() || queryDescs.empty() );
CV_Assert( masks.empty() || masks.size() == matcher.trainDescCollection.size() );
assert( mask.empty() || (mask.rows == query.rows && mask.cols == train.rows) );
int dimension = queryDescs.cols;
matches.reserve(queryDescs.rows);
assert( query.cols == train.cols || query.empty() || train.empty() );
assert( DataType<ValueType>::type == query.type() || query.empty() );
assert( DataType<ValueType>::type == train.type() || train.empty() );
int dimension = query.cols;
matches.clear();
matches.reserve(query.rows);
for( int i = 0; i < query.rows; i++ )
size_t imgCount = matcher.trainDescCollection.size();
for( int qIdx = 0; qIdx < queryDescs.rows; qIdx++ )
{
const ValueType* d1 = (const ValueType*)(query.data + query.step*i);
int matchIndex = -1;
DistanceType matchDistance = std::numeric_limits<DistanceType>::max();
for( int j = 0; j < train.rows; j++ )
if( matcher.maskedOut( masks, qIdx ) )
{
if( possibleMatch(mask, i, j) )
if( !compactResult ) // push empty vector
matches.push_back( vector<DMatch>() );
}
else
{
matches.push_back( vector<DMatch>() );
vector<vector<DMatch> >::reverse_iterator curMatches = matches.rbegin();
for( size_t iIdx = 0; iIdx < imgCount; iIdx++ )
{
const ValueType* d2 = (const ValueType*)(train.data + train.step*j);
DistanceType curDistance = distance(d1, d2, dimension);
if( curDistance < matchDistance )
CV_Assert( masks.empty() || masks[iIdx].empty() ||
( masks[iIdx].rows == queryDescs.rows && masks[iIdx].cols == matcher.trainDescCollection[iIdx].rows &&
masks[iIdx].type() == CV_8UC1 ) );
CV_Assert( DataType<ValueType>::type == matcher.trainDescCollection[iIdx].type() ||
matcher.trainDescCollection[iIdx].empty() );
CV_Assert( queryDescs.cols == matcher.trainDescCollection[iIdx].cols );
const ValueType* d1 = (const ValueType*)(queryDescs.data + queryDescs.step*qIdx);
for( int tIdx = 0; tIdx < matcher.trainDescCollection[iIdx].rows; tIdx++ )
{
matchDistance = curDistance;
matchIndex = j;
if( masks.empty() || matcher.possibleMatch(masks[iIdx], qIdx, tIdx) )
{
const ValueType* d2 = (const ValueType*)(matcher.trainDescCollection[iIdx].data +
matcher.trainDescCollection[iIdx].step*tIdx);
DistanceType d = matcher.distance(d1, d2, dimension);
if( d < maxDistance )
curMatches->push_back( DMatch( qIdx, tIdx, iIdx, d ) );
}
}
}
}
if( matchIndex != -1 )
{
DMatch match;
match.indexTrain = matchIndex;
match.indexQuery = i;
match.distance = matchDistance;
matches.push_back( match );
std::sort( curMatches->begin(), curMatches->end() );
}
}
}
template<class Distance> inline
void BruteForceMatcher<Distance>::matchImpl( const Mat& query, const Mat& train, vector<vector<DMatch> >& matches,
float threshold, const Mat& mask ) const
/*
* Brute-force descriptor matcher.
*
* For each descriptor in the first set, this matcher finds the closest
* descriptor in the second set by trying each one.
*
* For efficiency, BruteForceMatcher is templated on the distance metric.
* For float descriptors, a common choice would be cv::L2<float>.
*/
template<class Distance>
class CV_EXPORTS BruteForceMatcher : public DescriptorMatcher
{
typedef typename Distance::ValueType ValueType;
typedef typename Distance::ResultType DistanceType;
public:
template<class bfDistance>
friend void bfKnnMatchImpl( BruteForceMatcher<bfDistance>& matcher,
const Mat& queryDescs, vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult );
template<class bfDistance>
friend void bfRadiusMatchImpl( BruteForceMatcher<bfDistance>& matcher,
const Mat& queryDescs, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult );
assert( mask.empty() || (mask.rows == query.rows && mask.cols == train.rows) );
BruteForceMatcher( Distance d = Distance() ) : distance(d) {}
virtual ~BruteForceMatcher() {}
assert( query.cols == train.cols || query.empty() || train.empty() );
assert( DataType<ValueType>::type == query.type() || query.empty() );
assert( DataType<ValueType>::type == train.type() || train.empty() );
virtual void train() {}
virtual bool supportMask() { return true; }
int dimension = query.cols;
matches.clear();
matches.resize( query.rows );
protected:
virtual Ptr<DescriptorMatcher> cloneWithoutData() const { return new BruteForceMatcher(distance); }
for( int i = 0; i < query.rows; i++ )
{
const ValueType* d1 = (const ValueType*)(query.data + query.step*i);
virtual void knnMatchImpl( const Mat& queryDescs, vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult );
virtual void radiusMatchImpl( const Mat& queryDescs, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult );
Distance distance;
};
for( int j = 0; j < train.rows; j++ )
{
if( possibleMatch(mask, i, j) )
{
const ValueType* d2 = (const ValueType*)(train.data + train.step*j);
DistanceType curDistance = distance(d1, d2, dimension);
if( curDistance < threshold )
{
DMatch match;
match.distance = curDistance;
match.indexQuery = i;
match.indexTrain = j;
matches[i].push_back( match );
}
}
}
}
template<class Distance>
void BruteForceMatcher<Distance>::knnMatchImpl( const Mat& queryDescs, vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult )
{
bfKnnMatchImpl<Distance>( *this, queryDescs, matches, knn, masks, compactResult );
}
template<>
void BruteForceMatcher<L2<float> >::matchImpl( const Mat& query, const Mat& train, vector<DMatch>& matches, const Mat& mask) const;
template<class Distance>
void BruteForceMatcher<Distance>::radiusMatchImpl( const Mat& queryDescs, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult )
{
bfRadiusMatchImpl<Distance>( *this, queryDescs, matches, maxDistance, masks, compactResult );
}
CV_EXPORTS Ptr<DescriptorMatcher> createDescriptorMatcher( const string& descriptorMatcherType );
/*
* BruteForceMatcher L2 specialization
*/
template<>
void BruteForceMatcher<L2<float> >::knnMatchImpl( const Mat& queryDescs, vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult );
template<>
void BruteForceMatcher<L2<float> >::radiusMatchImpl( const Mat& queryDescs, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult );
/****************************************************************************************\
* GenericDescriptorMatch *
\****************************************************************************************/
/*
* A storage for sets of keypoints together with corresponding images and class IDs
* Flann based matcher
*/
class CV_EXPORTS KeyPointCollection
class CV_EXPORTS FlannBasedMatcher : public DescriptorMatcher
{
public:
// Adds keypoints from a single image to the storage
// image Source image
// points A vector of keypoints
void add( const Mat& _image, const vector<KeyPoint>& _points );
FlannBasedMatcher( const Ptr<flann::IndexParams>& _indexParams=new flann::KDTreeIndexParams(),
const Ptr<flann::SearchParams>& _searchParams=new flann::SearchParams() );
// Returns the total number of keypoints in the collection
size_t calcKeypointCount() const;
virtual void add( const vector<Mat>& descCollection );
virtual void clear();
// Returns the keypoint by its global index
KeyPoint getKeyPoint( int index ) const;
virtual void train();
virtual bool supportMask() { return false; }
protected:
virtual Ptr<DescriptorMatcher> cloneWithoutData() const { return new FlannBasedMatcher(indexParams, searchParams); }
// Clears images, keypoints and startIndices
void clear();
// masks is ignored (unsupported)
virtual void knnMatchImpl( const Mat& queryDescs, vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult );
virtual void radiusMatchImpl( const Mat& queryDescs, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult );
vector<Mat> images;
vector<vector<KeyPoint> > points;
static void convertToDMatches( const DescriptorCollection& collection, const Mat& indices, const Mat& dists,
vector<vector<DMatch> >& matches );
// global indices of the first points in each image,
// startIndices.size() = points.size()
vector<int> startIndices;
Ptr<flann::IndexParams> indexParams;
Ptr<flann::SearchParams> searchParams;
Ptr<flann::Index> flannIndex;
DescriptorCollection mergedDescriptors;
int addedDescCount;
};
CV_EXPORTS Ptr<DescriptorMatcher> createDescriptorMatcher( const string& descriptorMatcherType );
/****************************************************************************************\
* GenericDescriptorMatcher *
\****************************************************************************************/
/*
* Abstract interface for a keypoint descriptor
*/
class CV_EXPORTS GenericDescriptorMatch
class GenericDescriptorMatcher;
typedef GenericDescriptorMatcher GenericDescriptorMatch;
class CV_EXPORTS GenericDescriptorMatcher
{
public:
enum IndexType
{
NoIndex,
KDTreeIndex
};
GenericDescriptorMatcher() {}
virtual ~GenericDescriptorMatcher() {}
GenericDescriptorMatch() {}
virtual ~GenericDescriptorMatch() {}
/*
* Set train collection: images and keypoints from them.
* imgCollection Image collection.
* pointCollection Keypoint collection detected on imgCollection.
*/
virtual void add( const vector<Mat>& imgCollection,
vector<vector<KeyPoint> >& pointCollection );
// Adds keypoints to the training set (descriptors are supposed to be calculated here)
virtual void add( KeyPointCollection& keypoints );
const vector<Mat>& getTrainImgCollection() const { return trainPointCollection.getImages(); }
const vector<vector<KeyPoint> >& getTrainPointCollection() const { return trainPointCollection.getKeypoints(); }
// Adds keypoints from a single image to the training set (descriptors are supposed to be calculated here)
virtual void add( const Mat& image, vector<KeyPoint>& points ) = 0;
// Clears keypoints storing in collection
virtual void clear();
// Classifies test keypoints
// image The source image
// points Test keypoints from the source image
virtual void classify( const Mat& image, vector<KeyPoint>& points );
virtual void train() = 0;
// Matches test keypoints to the training set
// image The source image
// points Test keypoints from the source image
// indices A vector to be filled with keypoint class indices
virtual void match( const Mat& image, vector<KeyPoint>& points, vector<int>& indices ) = 0;
/*
* Classifies query keypoints.
* queryImage The query image
* queryPoints Keypoints from the query image
* trainImage The train image
* trainPoints Keypoints from the train image
*/
// Classify keypoints from query image under one train image.
virtual void classify( const Mat& queryImage, vector<KeyPoint>& queryPoints,
const Mat& trainImage, vector<KeyPoint>& trainPoints ) const;
// Classify keypoints from query image under train image collection.
virtual void classify( const Mat& queryImage, vector<KeyPoint>& queryPoints );
// Matches test keypoints to the training set
// image The source image
// points Test keypoints from the source image
// matches A vector to be filled with keypoint matches
virtual void match( const Mat& image, vector<KeyPoint>& points, vector<DMatch>& matches );
/*
* Group of methods to match keypoints from image pair.
*/
// Find one best match for each query descriptor (if mask is empty).
void match( const Mat& queryImg, vector<KeyPoint>& queryPoints,
const Mat& trainImg, vector<KeyPoint>& trainPoints,
vector<DMatch>& matches, const Mat& mask=Mat() ) const;
// Find knn best matches for each query keypoint (in increasing order of distances).
// compactResult is used when mask is not empty. If compactResult is false matches vector will have the same size as queryDescs rows.
// If compactResult is true matches vector will not contain matches for fully masked out query descriptors.
void knnMatch( const Mat& queryImg, vector<KeyPoint>& queryPoints,
const Mat& trainImg, vector<KeyPoint>& trainPoints,
vector<vector<DMatch> >& matches, int knn, const Mat& mask=Mat(), bool compactResult=false ) const;
// Find best matches for each query descriptor which have distance less than maxDistance (in increasing order of distances).
void radiusMatch( const Mat& queryImg, vector<KeyPoint>& queryPoints,
const Mat& trainImg, vector<KeyPoint>& trainPoints,
vector<vector<DMatch> >& matches, float maxDistance, const Mat& mask=Mat(), bool compactResult=false ) const;
/*
* Group of methods to match keypoints from one image to image set.
* See description of similar methods for matching image pair above.
*/
void match( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<DMatch>& matches, const vector<Mat>& masks=vector<Mat>() );
void knnMatch( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, int knn, const vector<Mat>& masks=vector<Mat>(), bool compactResult=false );
void radiusMatch( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, float maxDistance, const vector<Mat>& masks=vector<Mat>(), bool compactResult=false );
// Reads matcher object from a file node
virtual void read( const FileNode& ) {}
// Writes matcher object to a file storage
virtual void write( FileStorage& ) const {}
virtual void match( const Mat& image, vector<KeyPoint>& points, vector<vector<DMatch> >& matches, float threshold );
protected:
virtual Ptr<GenericDescriptorMatcher> createEmptyMatcherCopy() const = 0;
virtual void knnMatchImpl( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult ) = 0;
virtual void radiusMatchImpl( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult ) = 0;
/*
* A storage for sets of keypoints together with corresponding images and class IDs
*/
class CV_EXPORTS KeyPointCollection
{
public:
KeyPointCollection() : size(0) {}
void add( const vector<Mat>& _images, const vector<vector<KeyPoint> >& _points );
void clear();
// Clears keypoints storing in collection
virtual void clear();
// Returns the total number of keypoints in the collection
size_t pointCount() const { return size; }
size_t imageCount() const { return images.size(); }
// Reads match object from a file node
virtual void read( const FileNode& ) {};
// Writes match object to a file storage
virtual void write( FileStorage& ) const {};
const vector<vector<KeyPoint> >& getKeypoints() const { return points; }
const vector<KeyPoint>& getKeypoints( int imgIdx ) const { CV_Assert( imgIdx < (int)imageCount() ); return points[imgIdx]; }
const KeyPoint& getKeyPoint( int imgIdx, int localPointIdx ) const;
const KeyPoint& getKeyPoint( int globalPointIdx ) const;
void getLocalIdx( int globalPointIdx, int& imgIdx, int& localPointIdx ) const;
protected:
KeyPointCollection collection;
const vector<Mat>& getImages() const { return images; }
const Mat& getImage( int imgIdx ) const { CV_Assert( imgIdx < (int)imageCount() ); return images[imgIdx]; }
protected:
int size;
vector<Mat> images;
vector<vector<KeyPoint> > points;
// global indices of the first points in each image,
// startIndices.size() = points.size()
vector<int> startIndices;
};
KeyPointCollection trainPointCollection;
};
/*
* OneWayDescriptorMatch
* OneWayDescriptorMatcher
*/
class CV_EXPORTS OneWayDescriptorMatch : public GenericDescriptorMatch
class OneWayDescriptorMatcher;
typedef OneWayDescriptorMatcher OneWayDescriptorMatch;
class CV_EXPORTS OneWayDescriptorMatcher : public GenericDescriptorMatcher
{
public:
class Params
......@@ -2013,7 +2162,7 @@ public:
Params( int _poseCount = POSE_COUNT,
Size _patchSize = Size(PATCH_WIDTH, PATCH_HEIGHT),
string _pcaFilename = string (),
string _pcaFilename = string(),
string _trainPath = string(),
string _trainImagesList = string(),
float _minScale = GET_MIN_SCALE(), float _maxScale = GET_MAX_SCALE(),
......@@ -2031,39 +2180,17 @@ public:
float minScale, maxScale, stepScale;
};
OneWayDescriptorMatch();
// Equivalent to calling PointMatchOneWay() followed by Initialize(_params)
OneWayDescriptorMatch( const Params& _params );
virtual ~OneWayDescriptorMatch();
// Sets one way descriptor parameters
void initialize( const Params& _params, OneWayDescriptorBase *_base = 0 );
OneWayDescriptorMatcher( const Params& _params=Params() );
virtual ~OneWayDescriptorMatcher();
// Calculates one way descriptors for a set of keypoints
virtual void add( const Mat& image, vector<KeyPoint>& keypoints );
// Calculates one way descriptors for a set of keypoints
virtual void add( KeyPointCollection& keypoints );
// Matches a set of keypoints from a single image of the training set. A rectangle with a center in a keypoint
// and size (patch_width/2*scale, patch_height/2*scale) is cropped from the source image for each
// keypoint. scale is iterated from DescriptorOneWayParams::min_scale to DescriptorOneWayParams::max_scale.
// The minimum distance to each training patch with all its affine poses is found over all scales.
// The class ID of a match is returned for each keypoint. The distance is calculated over PCA components
// loaded with DescriptorOneWay::Initialize, kd tree is used for finding minimum distances.
virtual void match( const Mat& image, vector<KeyPoint>& points, vector<int>& indices );
virtual void match( const Mat& image, vector<KeyPoint>& points, vector<DMatch>& matches );
virtual void match( const Mat& image, vector<KeyPoint>& points, vector<vector<DMatch> >& matches, float threshold);
// Classify a set of keypoints. The same as match, but returns point classes rather than indices
virtual void classify( const Mat& image, vector<KeyPoint>& points );
void initialize( const Params& _params, const Ptr<OneWayDescriptorBase>& _base=Ptr<OneWayDescriptorBase>() );
// Clears keypoints storing in collection and OneWayDescriptorBase
virtual void clear ();
virtual void train();
// Reads match object from a file node
virtual void read( const FileNode &fn );
......@@ -2071,14 +2198,33 @@ public:
virtual void write( FileStorage& fs ) const;
protected:
virtual Ptr<GenericDescriptorMatcher> createEmptyMatcherCopy() const { return new OneWayDescriptorMatcher( params ); }
// Matches a set of keypoints from a single image of the training set. A rectangle with a center in a keypoint
// and size (patch_width/2*scale, patch_height/2*scale) is cropped from the source image for each
// keypoint. scale is iterated from DescriptorOneWayParams::min_scale to DescriptorOneWayParams::max_scale.
// The minimum distance to each training patch with all its affine poses is found over all scales.
// The class ID of a match is returned for each keypoint. The distance is calculated over PCA components
// loaded with DescriptorOneWay::Initialize, kd tree is used for finding minimum distances.
virtual void knnMatchImpl( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult );
virtual void radiusMatchImpl( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult );
Ptr<OneWayDescriptorBase> base;
Params params;
int prevTrainCount;
};
/*
* FernDescriptorMatch
* FernDescriptorMatcher
*/
class CV_EXPORTS FernDescriptorMatch : public GenericDescriptorMatch
class FernDescriptorMatcher;
typedef FernDescriptorMatcher FernDescriptorMatch;
class CV_EXPORTS FernDescriptorMatcher : public GenericDescriptorMatcher
{
public:
class Params
......@@ -2107,79 +2253,76 @@ public:
string filename;
};
FernDescriptorMatch();
FernDescriptorMatch( const Params& _params );
virtual ~FernDescriptorMatch();
void initialize( const Params& _params );
virtual void add( const Mat& image, vector<KeyPoint>& keypoints );
virtual void match( const Mat& image, vector<KeyPoint>& keypoints, vector<int>& indices );
virtual void match( const Mat& image, vector<KeyPoint>& points, vector<DMatch>& matches);
virtual void match( const Mat& image, vector<KeyPoint>& points, vector<vector<DMatch> >& matches, float threshold);
virtual void classify( const Mat& image, vector<KeyPoint>& keypoints );
FernDescriptorMatcher( const Params& _params=Params() );
virtual ~FernDescriptorMatcher();
virtual void clear();
virtual void read( const FileNode &fn );
virtual void train();
virtual void read( const FileNode &fn );
virtual void write( FileStorage& fs ) const;
protected:
virtual Ptr<GenericDescriptorMatcher> createEmptyMatcherCopy() const { return new FernDescriptorMatcher( params ); }
virtual void knnMatchImpl( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult );
virtual void radiusMatchImpl( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult );
void trainFernClassifier();
void calcBestProbAndMatchIdx( const Mat& image, const Point2f& pt,
float& bestProb, int& bestMatchIdx, vector<float>& signature );
Ptr<FernClassifier> classifier;
Params params;
int prevTrainCount;
};
CV_EXPORTS Ptr<GenericDescriptorMatch> createGenericDescriptorMatcher( const string& genericDescritptorMatcherType,
const string &paramsFilename = string () );
CV_EXPORTS Ptr<GenericDescriptorMatcher> createGenericDescriptorMatcher( const string& genericDescritptorMatcherType,
const string &paramsFilename = string () );
/****************************************************************************************\
* VectorDescriptorMatch *
* VectorDescriptorMatcher *
\****************************************************************************************/
/*
* A class used for matching descriptors that can be described as vectors in a finite-dimensional space
*/
class CV_EXPORTS VectorDescriptorMatch : public GenericDescriptorMatch
class VectorDescriptorMatcher;
typedef VectorDescriptorMatcher VectorDescriptorMatch;
class CV_EXPORTS VectorDescriptorMatcher : public GenericDescriptorMatcher
{
public:
using GenericDescriptorMatch::add;
VectorDescriptorMatch( const Ptr<DescriptorExtractor>& _extractor, const Ptr<DescriptorMatcher>& _matcher )
: extractor( _extractor ), matcher( _matcher ) {}
virtual ~VectorDescriptorMatch() {}
// Builds flann index
void index();
VectorDescriptorMatcher( const Ptr<DescriptorExtractor>& _extractor, const Ptr<DescriptorMatcher>& _matcher )
: extractor( _extractor ), matcher( _matcher ) { CV_Assert( !extractor.empty() && !matcher.empty() ); }
virtual ~VectorDescriptorMatcher() {}
// Calculates descriptors for a set of keypoints from a single image
virtual void add( const Mat& image, vector<KeyPoint>& keypoints );
virtual void add( const vector<Mat>& imgCollection,
vector<vector<KeyPoint> >& pointCollection );
// Matches a set of keypoints with the training set
virtual void match( const Mat& image, vector<KeyPoint>& points, vector<int>& keypointIndices );
virtual void match( const Mat& image, vector<KeyPoint>& points, vector<DMatch>& matches );
virtual void clear();
virtual void match( const Mat& image, vector<KeyPoint>& points,
vector<vector<DMatch> >& matches, float threshold );
virtual void train();
virtual void clear();
virtual void read( const FileNode& fn );
virtual void write( FileStorage& fs ) const;
protected:
virtual Ptr<GenericDescriptorMatcher> createEmptyMatcherCopy() const { return new VectorDescriptorMatcher(extractor, matcher); }
virtual void knnMatchImpl( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult );
virtual void radiusMatchImpl( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult );
Ptr<DescriptorExtractor> extractor;
Ptr<DescriptorMatcher> matcher;
//vector<int> classIds;
};
/****************************************************************************************\
......@@ -2205,12 +2348,6 @@ CV_EXPORTS void drawKeypoints( const Mat& image, const vector<KeyPoint>& keypoin
const Scalar& color=Scalar::all(-1), int flags=DrawMatchesFlags::DEFAULT );
// Draws matches of keypints from two images on output image.
CV_EXPORTS void drawMatches( const Mat& img1, const vector<KeyPoint>& keypoints1,
const Mat& img2, const vector<KeyPoint>& keypoints2,
const vector<int>& matches1to2, Mat& outImg,
const Scalar& matchColor=Scalar::all(-1), const Scalar& singlePointColor=Scalar::all(-1),
const vector<char>& matchesMask=vector<char>(), int flags=DrawMatchesFlags::DEFAULT );
CV_EXPORTS void drawMatches( const Mat& img1, const vector<KeyPoint>& keypoints1,
const Mat& img2, const vector<KeyPoint>& keypoints2,
const vector<DMatch>& matches1to2, Mat& outImg,
......@@ -2241,7 +2378,7 @@ CV_EXPORTS void evaluateGenericDescriptorMatcher( const Mat& img1, const Mat& im
vector<KeyPoint>& keypoints1, vector<KeyPoint>& keypoints2,
vector<vector<DMatch> >* matches1to2, vector<vector<uchar> >* correctMatches1to2Mask,
vector<Point2f>& recallPrecisionCurve,
const Ptr<GenericDescriptorMatch>& dmatch=Ptr<GenericDescriptorMatch>() );
const Ptr<GenericDescriptorMatcher>& dmatch=Ptr<GenericDescriptorMatcher>() );
/****************************************************************************************\
......@@ -2312,7 +2449,7 @@ public:
void setVocabulary( const Mat& vocabulary );
const Mat& getVocabulary() const { return vocabulary; }
void compute( const Mat& image, vector<KeyPoint>& keypoints, Mat& imgDescriptor,
vector<vector<int> >* pointIdxsInClusters=0 ) const;
vector<vector<int> >* pointIdxsOfClusters=0 ); //not constant because DescriptorMatcher::match is not constant
int descriptorSize() const { return vocabulary.empty() ? 0 : vocabulary.rows; }
int descriptorType() const { return CV_32FC1; }
......
......@@ -108,11 +108,11 @@ void BOWImgDescriptorExtractor::setVocabulary( const Mat& _vocabulary )
{
dmatcher->clear();
vocabulary = _vocabulary;
dmatcher->add( vocabulary );
dmatcher->add( vector<Mat>(1, vocabulary) );
}
void BOWImgDescriptorExtractor::compute( const Mat& image, vector<KeyPoint>& keypoints, Mat& imgDescriptor,
vector<vector<int> >* pointIdxsOfClusters ) const
vector<vector<int> >* pointIdxsOfClusters )
{
imgDescriptor.release();
......@@ -140,8 +140,8 @@ void BOWImgDescriptorExtractor::compute( const Mat& image, vector<KeyPoint>& key
float *dptr = (float*)imgDescriptor.data;
for( size_t i = 0; i < matches.size(); i++ )
{
int queryIdx = matches[i].indexQuery;
int trainIdx = matches[i].indexTrain; // cluster index
int queryIdx = matches[i].queryIdx;
int trainIdx = matches[i].trainIdx; // cluster index
CV_Assert( queryIdx == (int)i );
dptr[trainIdx] = dptr[trainIdx] + 1.f;
......
......@@ -67,6 +67,13 @@ struct RoiPredicate
float minX, minY, maxX, maxY;
};
void DescriptorExtractor::compute( const vector<Mat>& imageCollection, vector<vector<KeyPoint> >& pointCollection, vector<Mat>& descCollection ) const
{
descCollection.resize( imageCollection.size() );
for( size_t i = 0; i < imageCollection.size(); i++ )
compute( imageCollection[i], pointCollection[i], descCollection[i] );
}
void DescriptorExtractor::removeBorderKeypoints( vector<KeyPoint>& keypoints,
Size imageSize, int borderPixels )
{
......
......@@ -61,6 +61,13 @@ struct MaskPredicate
const Mat& mask;
};
void FeatureDetector::detect(const vector<Mat>& imageCollection, vector<vector<KeyPoint> >& pointCollection, const vector<Mat>& masks ) const
{
pointCollection.resize( imageCollection.size() );
for( size_t i = 0; i < imageCollection.size(); i++ )
detect( imageCollection[i], pointCollection[i], masks.empty() ? Mat() : masks[i] );
}
void FeatureDetector::removeInvalidPoints( const Mat& mask, vector<KeyPoint>& keypoints )
{
if( mask.empty() )
......@@ -88,7 +95,7 @@ void FastFeatureDetector::write (FileStorage& fs) const
fs << "nonmaxSuppression" << nonmaxSuppression;
}
void FastFeatureDetector::detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints) const
void FastFeatureDetector::detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
{
Mat grayImage = image;
if( image.type() != CV_8U ) cvtColor( image, grayImage, CV_BGR2GRAY );
......@@ -126,8 +133,7 @@ void GoodFeaturesToTrackDetector::write (FileStorage& fs) const
fs << "k" << k;
}
void GoodFeaturesToTrackDetector::detectImpl( const Mat& image, const Mat& mask,
vector<KeyPoint>& keypoints ) const
void GoodFeaturesToTrackDetector::detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask) const
{
Mat grayImage = image;
if( image.type() != CV_8U ) cvtColor( image, grayImage, CV_BGR2GRAY );
......@@ -192,7 +198,7 @@ void MserFeatureDetector::write (FileStorage& fs) const
}
void MserFeatureDetector::detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints ) const
void MserFeatureDetector::detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
{
vector<vector<Point> > msers;
......@@ -246,7 +252,7 @@ void StarFeatureDetector::write (FileStorage& fs) const
fs << "suppressNonmaxSize" << star.suppressNonmaxSize;
}
void StarFeatureDetector::detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints) const
void StarFeatureDetector::detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
{
Mat grayImage = image;
if( image.type() != CV_8U ) cvtColor( image, grayImage, CV_BGR2GRAY );
......@@ -291,8 +297,7 @@ void SiftFeatureDetector::write (FileStorage& fs) const
}
void SiftFeatureDetector::detectImpl( const Mat& image, const Mat& mask,
vector<KeyPoint>& keypoints) const
void SiftFeatureDetector::detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
{
Mat grayImage = image;
if( image.type() != CV_8U ) cvtColor( image, grayImage, CV_BGR2GRAY );
......@@ -325,8 +330,7 @@ void SurfFeatureDetector::write (FileStorage& fs) const
fs << "octaveLayers" << surf.nOctaveLayers;
}
void SurfFeatureDetector::detectImpl( const Mat& image, const Mat& mask,
vector<KeyPoint>& keypoints) const
void SurfFeatureDetector::detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
{
Mat grayImage = image;
if( image.type() != CV_8U ) cvtColor( image, grayImage, CV_BGR2GRAY );
......@@ -337,7 +341,7 @@ void SurfFeatureDetector::detectImpl( const Mat& image, const Mat& mask,
/*
* DenseFeatureDetector
*/
void DenseFeatureDetector::detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints ) const
void DenseFeatureDetector::detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
{
keypoints.clear();
......@@ -388,8 +392,7 @@ void keepStrongest( int N, vector<KeyPoint>& keypoints )
}
}
void GridAdaptedFeatureDetector::detectImpl( const Mat &image, const Mat &mask,
vector<KeyPoint> &keypoints ) const
void GridAdaptedFeatureDetector::detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
{
keypoints.clear();
keypoints.reserve(maxTotalKeypoints);
......@@ -428,7 +431,7 @@ PyramidAdaptedFeatureDetector::PyramidAdaptedFeatureDetector( const Ptr<FeatureD
: detector(_detector), levels(_levels)
{}
void PyramidAdaptedFeatureDetector::detectImpl( const Mat& image, const Mat& mask, vector<KeyPoint>& keypoints ) const
void PyramidAdaptedFeatureDetector::detect( const Mat& image, vector<KeyPoint>& keypoints, const Mat& mask ) const
{
Mat src = image;
for( int l = 0, multiplier = 1; l <= levels; ++l, multiplier *= 2 )
......
......@@ -165,33 +165,6 @@ static inline void _drawMatch( Mat& outImg, Mat& outImg1, Mat& outImg2 ,
color, 1, CV_AA, draw_shift_bits );
}
void drawMatches( const Mat& img1, const vector<KeyPoint>& keypoints1,
const Mat& img2,const vector<KeyPoint>& keypoints2,
const vector<int>& matches1to2, Mat& outImg,
const Scalar& matchColor, const Scalar& singlePointColor,
const vector<char>& matchesMask, int flags )
{
if( matches1to2.size() != keypoints1.size() )
CV_Error( CV_StsBadSize, "matches1to2 must have the same size as keypoints1" );
if( !matchesMask.empty() && matchesMask.size() != matches1to2.size() )
CV_Error( CV_StsBadSize, "matchesMask must have the same size as matches1to2" );
Mat outImg1, outImg2;
_prepareImgAndDrawKeypoints( img1, keypoints1, img2, keypoints2,
outImg, outImg1, outImg2, singlePointColor, flags );
// draw matches
for( size_t i1 = 0; i1 < keypoints1.size(); i1++ )
{
int i2 = matches1to2[i1];
if( (matchesMask.empty() || matchesMask[i1] ) && i2 >= 0 )
{
const KeyPoint &kp1 = keypoints1[i1], &kp2 = keypoints2[i2];
_drawMatch( outImg, outImg1, outImg2, kp1, kp2, matchColor, flags );
}
}
}
void drawMatches( const Mat& img1, const vector<KeyPoint>& keypoints1,
const Mat& img2, const vector<KeyPoint>& keypoints2,
const vector<DMatch>& matches1to2, Mat& outImg,
......@@ -208,8 +181,8 @@ void drawMatches( const Mat& img1, const vector<KeyPoint>& keypoints1,
// draw matches
for( size_t m = 0; m < matches1to2.size(); m++ )
{
int i1 = matches1to2[m].indexQuery;
int i2 = matches1to2[m].indexTrain;
int i1 = matches1to2[m].queryIdx;
int i2 = matches1to2[m].trainIdx;
if( matchesMask.empty() || matchesMask[m] )
{
const KeyPoint &kp1 = keypoints1[i1], &kp2 = keypoints2[i2];
......@@ -236,8 +209,8 @@ void drawMatches( const Mat& img1, const vector<KeyPoint>& keypoints1,
{
for( size_t j = 0; j < matches1to2[i].size(); j++ )
{
int i1 = matches1to2[i][j].indexQuery;
int i2 = matches1to2[i][j].indexTrain;
int i1 = matches1to2[i][j].queryIdx;
int i2 = matches1to2[i][j].trainIdx;
if( matchesMask.empty() || matchesMask[i][j] )
{
const KeyPoint &kp1 = keypoints1[i1], &kp2 = keypoints2[i2];
......
......@@ -517,10 +517,10 @@ void cv::evaluateGenericDescriptorMatcher( const Mat& img1, const Mat& img2, con
vector<KeyPoint>& keypoints1, vector<KeyPoint>& keypoints2,
vector<vector<DMatch> >* _matches1to2, vector<vector<uchar> >* _correctMatches1to2Mask,
vector<Point2f>& recallPrecisionCurve,
const Ptr<GenericDescriptorMatch>& _dmatch )
const Ptr<GenericDescriptorMatcher>& _dmatcher )
{
Ptr<GenericDescriptorMatch> dmatch = _dmatch;
dmatch->clear();
Ptr<GenericDescriptorMatcher> dmatcher = _dmatcher;
dmatcher->clear();
vector<vector<DMatch> > *matches1to2, buf1;
matches1to2 = _matches1to2 != 0 ? _matches1to2 : &buf1;
......@@ -531,7 +531,7 @@ void cv::evaluateGenericDescriptorMatcher( const Mat& img1, const Mat& img2, con
if( keypoints1.empty() )
CV_Error( CV_StsBadArg, "keypoints1 must be no empty" );
if( matches1to2->empty() && dmatch.empty() )
if( matches1to2->empty() && dmatcher.empty() )
CV_Error( CV_StsBadArg, "dmatch must be no empty when matches1to2 is empty" );
bool computeKeypoints2ByPrj = keypoints2.empty();
......@@ -543,10 +543,8 @@ void cv::evaluateGenericDescriptorMatcher( const Mat& img1, const Mat& img2, con
if( matches1to2->empty() || computeKeypoints2ByPrj )
{
dmatch->clear();
dmatch->add( img2, keypoints2 );
// TODO: use more sophisticated strategy to choose threshold
dmatch->match( img1, keypoints1, *matches1to2, std::numeric_limits<float>::max() );
dmatcher->clear();
dmatcher->radiusMatch( img1, keypoints1, img2, keypoints2, *matches1to2, std::numeric_limits<float>::max() );
}
float repeatability;
int correspCount;
......@@ -559,8 +557,8 @@ void cv::evaluateGenericDescriptorMatcher( const Mat& img1, const Mat& img2, con
(*correctMatches1to2Mask)[i].resize((*matches1to2)[i].size());
for( size_t j = 0;j < (*matches1to2)[i].size(); j++ )
{
int indexQuery = (*matches1to2)[i][j].indexQuery;
int indexTrain = (*matches1to2)[i][j].indexTrain;
int indexQuery = (*matches1to2)[i][j].queryIdx;
int indexTrain = (*matches1to2)[i][j].trainIdx;
(*correctMatches1to2Mask)[i][j] = thresholdedOverlapMask.at<uchar>( indexQuery, indexTrain );
}
}
......
......@@ -71,163 +71,405 @@ Mat windowedMatchingMask( const vector<KeyPoint>& keypoints1, const vector<KeyPo
/****************************************************************************************\
* DescriptorMatcher *
\****************************************************************************************/
void DescriptorMatcher::add( const Mat& descriptors )
void DescriptorMatcher::DescriptorCollection::set( const vector<Mat>& descCollection )
{
if( m_train.empty() )
clear();
size_t imageCount = descCollection.size();
CV_Assert( imageCount > 0 );
startIdxs.resize( imageCount );
int dim = -1;
int type = -1;
startIdxs[0] = 0;
for( size_t i = 1; i < imageCount; i++ )
{
m_train = descriptors;
int s = 0;
if( !descCollection[i-1].empty() )
{
dim = descCollection[i-1].cols;
type = descCollection[i-1].type();
s = descCollection[i-1].rows;
}
startIdxs[i] = startIdxs[i-1] + s;
}
else
if( imageCount == 1 )
{
// merge train and descriptors
Mat m( m_train.rows + descriptors.rows, m_train.cols, CV_32F );
Mat m1 = m.rowRange( 0, m_train.rows );
m_train.copyTo( m1 );
Mat m2 = m.rowRange( m_train.rows + 1, m.rows );
descriptors.copyTo( m2 );
m_train = m;
if( descCollection[0].empty() ) return;
dim = descCollection[0].cols;
type = descCollection[0].type();
}
assert( dim > 0 );
int count = startIdxs[imageCount-1] + descCollection[imageCount-1].rows;
if( count > 0 )
{
dmatrix.create( count, dim, type );
for( size_t i = 0; i < imageCount; i++ )
{
if( !descCollection[i].empty() )
{
CV_Assert( descCollection[i].cols == dim && descCollection[i].type() == type );
Mat m = dmatrix.rowRange( startIdxs[i], startIdxs[i] + descCollection[i].rows );
descCollection[i].copyTo(m);
}
}
}
}
void DescriptorMatcher::match( const Mat& query, vector<int>& matches ) const
void DescriptorMatcher::DescriptorCollection::clear()
{
matchImpl( query, m_train, matches, Mat() );
startIdxs.clear();
dmatrix.release();
}
void DescriptorMatcher::match( const Mat& query, const Mat& mask,
vector<int>& matches ) const
const Mat DescriptorMatcher::DescriptorCollection::getDescriptor( int imgIdx, int localDescIdx ) const
{
matchImpl( query, m_train, matches, mask );
CV_Assert( imgIdx < (int)startIdxs.size() );
int globalIdx = startIdxs[imgIdx] + localDescIdx;
CV_Assert( globalIdx < (int)size() );
return getDescriptor( globalIdx );
}
void DescriptorMatcher::match( const Mat& query, vector<DMatch>& matches ) const
const Mat DescriptorMatcher::DescriptorCollection::getDescriptor( int globalDescIdx ) const
{
matchImpl( query, m_train, matches, Mat() );
CV_Assert( globalDescIdx < size() );
return dmatrix.row( globalDescIdx );
}
void DescriptorMatcher::match( const Mat& query, const Mat& mask,
vector<DMatch>& matches ) const
void DescriptorMatcher::DescriptorCollection::getLocalIdx( int globalDescIdx, int& imgIdx, int& localDescIdx ) const
{
matchImpl( query, m_train, matches, mask );
imgIdx = -1;
CV_Assert( globalDescIdx < size() );
for( size_t i = 1; i < startIdxs.size(); i++ )
{
if( globalDescIdx < startIdxs[i] )
{
imgIdx = i - 1;
break;
}
}
imgIdx = imgIdx == -1 ? startIdxs.size() -1 : imgIdx;
localDescIdx = globalDescIdx - startIdxs[imgIdx];
}
/*
* DescriptorMatcher
*/
void convertMatches( const vector<vector<DMatch> >& knnMatches, vector<DMatch>& matches )
{
matches.clear();
matches.reserve( knnMatches.size() );
for( size_t i = 0; i < knnMatches.size(); i++ )
{
CV_Assert( knnMatches[i].size() <= 1 );
if( !knnMatches[i].empty() )
matches.push_back( knnMatches[i][0] );
}
}
void DescriptorMatcher::match( const Mat& query, const Mat& train, vector<DMatch>& matches, const Mat& mask ) const
void DescriptorMatcher::add( const vector<Mat>& descCollection )
{
matchImpl( query, train, matches, mask );
trainDescCollection.insert( trainDescCollection.end(), descCollection.begin(), descCollection.end() );
}
void DescriptorMatcher::match( const Mat& query, vector<vector<DMatch> >& matches, float threshold ) const
void DescriptorMatcher::clear()
{
matchImpl( query, m_train, matches, threshold, Mat() );
trainDescCollection.clear();
}
void DescriptorMatcher::match( const Mat& query, const Mat& mask,
vector<vector<DMatch> >& matches, float threshold ) const
void DescriptorMatcher::match( const Mat& queryDescs, const Mat& trainDescs, vector<DMatch>& matches, const Mat& mask ) const
{
matchImpl( query, m_train, matches, threshold, mask );
Ptr<DescriptorMatcher> tempMatcher = cloneWithoutData();
tempMatcher->add( vector<Mat>(1, trainDescs) );
tempMatcher->match( queryDescs, matches, vector<Mat>(1, mask) );
}
void DescriptorMatcher::clear()
void DescriptorMatcher::knnMatch( const Mat& queryDescs, const Mat& trainDescs, vector<vector<DMatch> >& matches, int knn,
const Mat& mask, bool compactResult ) const
{
m_train.release();
Ptr<DescriptorMatcher> tempMatcher = cloneWithoutData();
tempMatcher->add( vector<Mat>(1, trainDescs) );
tempMatcher->knnMatch( queryDescs, matches, knn, vector<Mat>(1, mask), compactResult );
}
void DescriptorMatcher::radiusMatch( const Mat& queryDescs, const Mat& trainDescs, vector<vector<DMatch> >& matches, float maxDistance,
const Mat& mask, bool compactResult ) const
{
Ptr<DescriptorMatcher> tempMatcher = cloneWithoutData();
tempMatcher->add( vector<Mat>(1, trainDescs) );
tempMatcher->radiusMatch( queryDescs, matches, maxDistance, vector<Mat>(1, mask), compactResult );
}
void DescriptorMatcher::match( const Mat& queryDescs, vector<DMatch>& matches, const vector<Mat>& masks )
{
vector<vector<DMatch> > knnMatches;
knnMatch( queryDescs, knnMatches, 1, masks, true /*compactResult*/ );
convertMatches( knnMatches, matches );
}
void DescriptorMatcher::knnMatch( const Mat& queryDescs, vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult )
{
train();
knnMatchImpl( queryDescs, matches, knn, masks, compactResult );
}
void DescriptorMatcher::radiusMatch( const Mat& queryDescs, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult )
{
train();
radiusMatchImpl( queryDescs, matches, maxDistance, masks, compactResult );
}
/*
* BruteForceMatcher L2 specialization
*/
template<>
void BruteForceMatcher<L2<float> >::matchImpl( const Mat& query, const Mat& train, vector<DMatch>& matches, const Mat& mask ) const
void BruteForceMatcher<L2<float> >::knnMatchImpl( const Mat& queryDescs, vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult )
{
assert( mask.empty() || (mask.rows == query.rows && mask.cols == train.rows) );
assert( query.cols == train.cols || query.empty() || train.empty() );
#ifndef HAVE_EIGEN2
bfKnnMatchImpl<L2<float> >( *this, queryDescs, matches, knn, masks, compactResult );
#else
CV_Assert( queryDescs.type() == CV_32FC1 || queryDescs.empty() );
CV_Assert( masks.empty() || masks.size() == trainDescCollection.size() );
matches.clear();
matches.reserve( query.rows );
#if (!defined HAVE_EIGEN2)
Mat norms;
cv::reduce( train.mul( train ), norms, 1, 0);
norms = norms.t();
Mat desc_2t = train.t();
for( int i=0;i<query.rows;i++ )
{
Mat distances = (-2)*query.row(i)*desc_2t;
distances += norms;
DMatch match;
match.indexTrain = -1;
double minVal;
Point minLoc;
if( mask.empty() )
matches.reserve(queryDescs.rows);
size_t imgCount = trainDescCollection.size();
Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic> e_query_t;
vector<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic> > e_trainCollection(trainDescCollection.size());
vector<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic> > e_trainNorms2(trainDescCollection.size());
cv2eigen( queryDescs.t(), e_query_t);
for( size_t i = 0; i < trainDescCollection.size(); i++ )
{
cv2eigen( trainDescCollection[i], e_trainCollection[i] );
e_trainNorms2[i] = e_trainCollection[i].rowwise().squaredNorm() / 2;
}
vector<Eigen::Matrix<float, Eigen::Dynamic, 1> > e_allDists( imgCount ); // distances between one query descriptor and all train descriptors
for( int qIdx = 0; qIdx < queryDescs.rows; qIdx++ )
{
if( maskedOut( masks, qIdx ) )
{
minMaxLoc ( distances, &minVal, 0, &minLoc );
if( !compactResult ) // push empty vector
matches.push_back( vector<DMatch>() );
}
else
{
minMaxLoc ( distances, &minVal, 0, &minLoc, 0, mask.row( i ) );
}
match.indexTrain = minLoc.x;
float queryNorm2 = e_query_t.col(qIdx).squaredNorm();
// 1. compute distances between i-th query descriptor and all train descriptors
for( size_t iIdx = 0; iIdx < imgCount; iIdx++ )
{
CV_Assert( masks.empty() || masks[iIdx].empty() ||
( masks[iIdx].rows == queryDescs.rows && masks[iIdx].cols == trainDescCollection[iIdx].rows &&
masks[iIdx].type() == CV_8UC1 ) );
CV_Assert( trainDescCollection[iIdx].type() == CV_32FC1 || trainDescCollection[iIdx].empty() );
CV_Assert( queryDescs.cols == trainDescCollection[iIdx].cols );
if( match.indexTrain != -1 )
{
match.indexQuery = i;
double queryNorm = norm( query.row(i) );
match.distance = (float)sqrt( minVal + queryNorm*queryNorm );
matches.push_back( match );
e_allDists[iIdx] = e_trainCollection[iIdx] *e_query_t.col(qIdx);
e_allDists[iIdx] -= e_trainNorms2[iIdx];
if( !masks.empty() && !masks[iIdx].empty() )
{
const uchar* maskPtr = (uchar*)masks[iIdx].ptr(qIdx);
for( int c = 0; c < masks[iIdx].cols; c++ )
{
if( maskPtr[c] == 0 )
e_allDists[iIdx](c) = std::numeric_limits<float>::min();
}
}
}
// 2. choose knn nearest matches for query[i]
matches.push_back( vector<DMatch>() );
vector<vector<DMatch> >::reverse_iterator curMatches = matches.rbegin();
for( int k = 0; k < knn; k++ )
{
float totalMaxCoeff = std::numeric_limits<float>::min();
int bestTrainIdx = -1, bestImgIdx = -1;
for( size_t iIdx = 0; iIdx < imgCount; iIdx++ )
{
int loc;
float curMaxCoeff = e_allDists[iIdx].maxCoeff( &loc );
if( curMaxCoeff > totalMaxCoeff )
{
totalMaxCoeff = curMaxCoeff;
bestTrainIdx = loc;
bestImgIdx = iIdx;
}
}
if( bestTrainIdx == -1 )
break;
e_allDists[bestImgIdx](bestTrainIdx) = std::numeric_limits<float>::min();
curMatches->push_back( DMatch(qIdx, bestTrainIdx, bestImgIdx, sqrt((-2)*totalMaxCoeff + queryNorm2)) );
}
std::sort( curMatches->begin(), curMatches->end() );
}
}
#endif
}
template<>
void BruteForceMatcher<L2<float> >::radiusMatchImpl( const Mat& queryDescs, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult )
{
#ifndef HAVE_EIGEN2
bfRadiusMatchImpl<L2<float> >( *this, queryDescs, matches, maxDistance, masks, compactResult );
#else
Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic> desc1t;
Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic> desc2;
cv2eigen( query.t(), desc1t);
cv2eigen( train, desc2 );
CV_Assert( queryDescs.type() == CV_32FC1 || queryDescs.empty() );
CV_Assert( masks.empty() || masks.size() == trainDescCollection.size() );
Eigen::Matrix<float, Eigen::Dynamic, 1> norms = desc2.rowwise().squaredNorm() / 2;
matches.reserve(queryDescs.rows);
size_t imgCount = trainDescCollection.size();
if( mask.empty() )
Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic> e_query_t;
vector<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic> > e_trainCollection(trainDescCollection.size());
vector<Eigen::Matrix<float, Eigen::Dynamic, Eigen::Dynamic> > e_trainNorms2(trainDescCollection.size());
cv2eigen( queryDescs.t(), e_query_t);
for( size_t i = 0; i < trainDescCollection.size(); i++ )
{
for( int i=0;i<query.rows;i++ )
{
Eigen::Matrix<float, Eigen::Dynamic, 1> distances = desc2*desc1t.col(i);
distances -= norms;
DMatch match;
match.indexQuery = i;
match.distance = sqrt( (-2)*distances.maxCoeff( &match.indexTrain ) + desc1t.col(i).squaredNorm() );
matches.push_back( match );
}
cv2eigen( trainDescCollection[i], e_trainCollection[i] );
e_trainNorms2[i] = e_trainCollection[i].rowwise().squaredNorm() / 2;
}
else
vector<Eigen::Matrix<float, Eigen::Dynamic, 1> > e_allDists( imgCount ); // distances between one query descriptor and all train descriptors
for( int qIdx = 0; qIdx < queryDescs.rows; qIdx++ )
{
for( int i=0;i<query.rows;i++ )
if( maskedOut( masks, qIdx ) )
{
Eigen::Matrix<float, Eigen::Dynamic, 1> distances = desc2*desc1t.col(i);
distances -= norms;
if( !compactResult ) // push empty vector
matches.push_back( vector<DMatch>() );
}
else
{
float queryNorm2 = e_query_t.col(qIdx).squaredNorm();
// 1. compute distances between i-th query descriptor and all train descriptors
for( size_t iIdx = 0; iIdx < imgCount; iIdx++ )
{
CV_Assert( masks.empty() || masks[iIdx].empty() ||
( masks[iIdx].rows == queryDescs.rows && masks[iIdx].cols == trainDescCollection[iIdx].rows &&
masks[iIdx].type() == CV_8UC1 ) );
CV_Assert( trainDescCollection[iIdx].type() == CV_32FC1 || trainDescCollection[iIdx].empty() );
CV_Assert( queryDescs.cols == trainDescCollection[iIdx].cols );
e_allDists[iIdx] = e_trainCollection[iIdx] *e_query_t.col(qIdx);
e_allDists[iIdx] -= e_trainNorms2[iIdx];
}
float maxCoeff = -std::numeric_limits<float>::max();
DMatch match;
match.indexTrain = -1;
for( int j=0;j<train.rows;j++ )
matches.push_back( vector<DMatch>() );
vector<vector<DMatch> >::reverse_iterator curMatches = matches.rbegin();
for( size_t iIdx = 0; iIdx < imgCount; iIdx++ )
{
if( possibleMatch( mask, i, j ) && distances( j, 0 ) > maxCoeff )
assert( e_allDists[iIdx].rows() == trainDescCollection[iIdx].rows );
for( int tIdx = 0; tIdx < e_allDists[iIdx].rows(); tIdx++ )
{
maxCoeff = distances( j, 0 );
match.indexTrain = j;
if( masks.empty() || possibleMatch(masks[iIdx], qIdx, tIdx) )
{
float d = sqrt((-2)*e_allDists[iIdx](tIdx) + queryNorm2);
if( d < maxDistance )
curMatches->push_back( DMatch( qIdx, tIdx, iIdx, d ) );
}
}
}
std::sort( curMatches->begin(), curMatches->end() );
}
}
#endif
}
/*
* Flann based matcher
*/
FlannBasedMatcher::FlannBasedMatcher( const Ptr<flann::IndexParams>& _indexParams, const Ptr<flann::SearchParams>& _searchParams )
: indexParams(_indexParams), searchParams(_searchParams), addedDescCount(0)
{
CV_Assert( !_indexParams.empty() );
CV_Assert( !_searchParams.empty() );
}
void FlannBasedMatcher::add( const vector<Mat>& descCollection )
{
DescriptorMatcher::add( descCollection );
for( size_t i = 0; i < descCollection.size(); i++ )
{
addedDescCount += descCollection[i].rows;
}
}
void FlannBasedMatcher::clear()
{
DescriptorMatcher::clear();
mergedDescriptors.clear();
flannIndex.release();
addedDescCount = 0;
}
void FlannBasedMatcher::train()
{
if( flannIndex.empty() || mergedDescriptors.size() < addedDescCount )
{
mergedDescriptors.set( trainDescCollection );
flannIndex = new flann::Index( mergedDescriptors.getDescriptors(), *indexParams );
}
}
if( match.indexTrain != -1 )
void FlannBasedMatcher::convertToDMatches( const DescriptorCollection& collection, const Mat& indices, const Mat& dists,
vector<vector<DMatch> >& matches )
{
matches.resize( indices.rows );
for( int i = 0; i < indices.rows; i++ )
{
for( int j = 0; j < indices.cols; j++ )
{
int idx = indices.at<int>(i, j);
if( idx >= 0 )
{
match.indexQuery = i;
match.distance = sqrt( (-2)*maxCoeff + desc1t.col(i).squaredNorm() );
matches.push_back( match );
int imgIdx, trainIdx;
collection.getLocalIdx( idx, imgIdx, trainIdx );
matches[i].push_back( DMatch( i, trainIdx, imgIdx, std::sqrt(dists.at<float>(i,j))) );
}
}
}
#endif
}
/****************************************************************************************\
* Factory function for descriptor matcher creating *
\****************************************************************************************/
void FlannBasedMatcher::knnMatchImpl( const Mat& queryDescs, vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& /*masks*/, bool /*compactResult*/ )
{
Mat indices( queryDescs.rows, knn, CV_32SC1 );
Mat dists( queryDescs.rows, knn, CV_32FC1);
flannIndex->knnSearch( queryDescs, indices, dists, knn, *searchParams );
convertToDMatches( mergedDescriptors, indices, dists, matches );
}
void FlannBasedMatcher::radiusMatchImpl( const Mat& queryDescs, vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& /*masks*/, bool /*compactResult*/ )
{
const int count = mergedDescriptors.size(); // TODO do count as param?
Mat indices( queryDescs.rows, count, CV_32SC1, Scalar::all(-1) );
Mat dists( queryDescs.rows, count, CV_32FC1, Scalar::all(-1) );
for( int qIdx = 0; qIdx < queryDescs.rows; qIdx++ )
{
Mat queryDescsRow = queryDescs.row(qIdx);
Mat indicesRow = indices.row(qIdx);
Mat distsRow = dists.row(qIdx);
flannIndex->radiusSearch( queryDescsRow, indicesRow, distsRow, maxDistance*maxDistance, *searchParams );
}
convertToDMatches( mergedDescriptors, indices, dists, matches );
}
/*
* Factory function for DescriptorMatcher creating
*/
Ptr<DescriptorMatcher> createDescriptorMatcher( const string& descriptorMatcherType )
{
DescriptorMatcher* dm = 0;
......@@ -239,288 +481,284 @@ Ptr<DescriptorMatcher> createDescriptorMatcher( const string& descriptorMatcherT
{
dm = new BruteForceMatcher<L1<float> >();
}
else if ( !descriptorMatcherType.compare( "FlannBased" ) )
{
dm = new FlannBasedMatcher();
}
else
{
//CV_Error( CV_StsBadArg, "unsupported descriptor matcher type");
}
return dm;
}
/****************************************************************************************\
* GenericDescriptorMatch *
* GenericDescriptorMatcher *
\****************************************************************************************/
/*
* KeyPointCollection
*/
void KeyPointCollection::add( const Mat& _image, const vector<KeyPoint>& _points )
void GenericDescriptorMatcher::KeyPointCollection::add( const vector<Mat>& _images,
const vector<vector<KeyPoint> >& _points )
{
// update m_start_indices
if( startIndices.empty() )
startIndices.push_back(0);
CV_Assert( !_images.empty() );
CV_Assert( _images.size() == _points.size() );
images.insert( images.end(), _images.begin(), _images.end() );
points.insert( points.end(), _points.begin(), _points.end() );
for( size_t i = 0; i < _points.size(); i++ )
size += _points[i].size();
size_t prevSize = startIndices.size(), addSize = _images.size();
startIndices.resize( prevSize + addSize );
if( prevSize == 0 )
startIndices[prevSize] = 0; //first
else
startIndices.push_back((int)(*startIndices.rbegin() + points.rbegin()->size()));
startIndices[prevSize] = startIndices[prevSize-1] + points[prevSize-1].size();
// add image and keypoints
images.push_back(_image);
points.push_back(_points);
for( size_t i = prevSize + 1; i < prevSize + addSize; i++ )
{
startIndices[i] = startIndices[i - 1] + points[i - 1].size();
}
}
KeyPoint KeyPointCollection::getKeyPoint( int index ) const
void GenericDescriptorMatcher::KeyPointCollection::clear()
{
size_t i = 0;
for(; i < startIndices.size() && startIndices[i] <= index; i++);
i--;
assert(i < startIndices.size() && (size_t)index - startIndices[i] < points[i].size());
points.clear();
}
return points[i][index - startIndices[i]];
const KeyPoint& GenericDescriptorMatcher::KeyPointCollection::getKeyPoint( int imgIdx, int localPointIdx ) const
{
CV_Assert( imgIdx < (int)images.size() );
CV_Assert( localPointIdx < (int)points[imgIdx].size() );
return points[imgIdx][localPointIdx];
}
size_t KeyPointCollection::calcKeypointCount() const
const KeyPoint& GenericDescriptorMatcher::KeyPointCollection::getKeyPoint( int globalPointIdx ) const
{
if( startIndices.empty() )
return 0;
return *startIndices.rbegin() + points.rbegin()->size();
int imgIdx, localPointIdx;
getLocalIdx( globalPointIdx, imgIdx, localPointIdx );
return points[imgIdx][localPointIdx];
}
void KeyPointCollection::clear()
void GenericDescriptorMatcher::KeyPointCollection::getLocalIdx( int globalPointIdx, int& imgIdx, int& localPointIdx ) const
{
images.clear();
points.clear();
startIndices.clear();
imgIdx = -1;
CV_Assert( globalPointIdx < (int)pointCount() );
for( size_t i = 1; i < startIndices.size(); i++ )
{
if( globalPointIdx < startIndices[i] )
{
imgIdx = i - 1;
break;
}
}
imgIdx = imgIdx == -1 ? startIndices.size() -1 : imgIdx;
localPointIdx = globalPointIdx - startIndices[imgIdx];
}
/*
* GenericDescriptorMatch
* GenericDescriptorMatcher
*/
void GenericDescriptorMatch::match( const Mat&, vector<KeyPoint>&, vector<DMatch>& )
void GenericDescriptorMatcher::add( const vector<Mat>& imgCollection,
vector<vector<KeyPoint> >& pointCollection )
{
trainPointCollection.add( imgCollection, pointCollection );
}
void GenericDescriptorMatch::match( const Mat&, vector<KeyPoint>&, vector<vector<DMatch> >&, float )
void GenericDescriptorMatcher::clear()
{
trainPointCollection.clear();
}
void GenericDescriptorMatch::add( KeyPointCollection& collection )
void GenericDescriptorMatcher::classify( const Mat& queryImage, vector<KeyPoint>& queryPoints,
const Mat& trainImage, vector<KeyPoint>& trainPoints ) const
{
for( size_t i = 0; i < collection.images.size(); i++ )
add( collection.images[i], collection.points[i] );
vector<DMatch> matches;
match( queryImage, queryPoints, trainImage, trainPoints, matches );
// remap keypoint indices to descriptors
for( size_t i = 0; i < matches.size(); i++ )
queryPoints[matches[i].queryIdx].class_id = trainPoints[matches[i].trainIdx].class_id;
}
void GenericDescriptorMatch::classify( const Mat& image, vector<cv::KeyPoint>& points )
void GenericDescriptorMatcher::classify( const Mat& queryImage, vector<KeyPoint>& queryPoints )
{
vector<int> keypointIndices;
match( image, points, keypointIndices );
vector<DMatch> matches;
match( queryImage, queryPoints, matches );
// remap keypoint indices to descriptors
for( size_t i = 0; i < keypointIndices.size(); i++ )
points[i].class_id = collection.getKeyPoint(keypointIndices[i]).class_id;
};
for( size_t i = 0; i < matches.size(); i++ )
queryPoints[matches[i].queryIdx].class_id = trainPointCollection.getKeyPoint( matches[i].trainIdx, matches[i].trainIdx ).class_id;
}
void GenericDescriptorMatcher::match( const Mat& queryImg, vector<KeyPoint>& queryPoints,
const Mat& trainImg, vector<KeyPoint>& trainPoints,
vector<DMatch>& matches, const Mat& mask ) const
{
Ptr<GenericDescriptorMatcher> tempMatcher = createEmptyMatcherCopy();
vector<vector<KeyPoint> > vecTrainPoints(1, trainPoints);
tempMatcher->add( vector<Mat>(1, trainImg), vecTrainPoints );
tempMatcher->match( queryImg, queryPoints, matches, vector<Mat>(1, mask) );
vecTrainPoints[0].swap( trainPoints );
}
void GenericDescriptorMatcher::knnMatch( const Mat& queryImg, vector<KeyPoint>& queryPoints,
const Mat& trainImg, vector<KeyPoint>& trainPoints,
vector<vector<DMatch> >& matches, int knn, const Mat& mask, bool compactResult ) const
{
Ptr<GenericDescriptorMatcher> tempMatcher = createEmptyMatcherCopy();
vector<vector<KeyPoint> > vecTrainPoints(1, trainPoints);
tempMatcher->add( vector<Mat>(1, trainImg), vecTrainPoints );
tempMatcher->knnMatch( queryImg, queryPoints, matches, knn, vector<Mat>(1, mask), compactResult );
vecTrainPoints[0].swap( trainPoints );
}
void GenericDescriptorMatch::clear()
void GenericDescriptorMatcher::radiusMatch( const Mat& queryImg, vector<KeyPoint>& queryPoints,
const Mat& trainImg, vector<KeyPoint>& trainPoints,
vector<vector<DMatch> >& matches, float maxDistance,
const Mat& mask, bool compactResult ) const
{
collection.clear();
Ptr<GenericDescriptorMatcher> tempMatcher = createEmptyMatcherCopy();
vector<vector<KeyPoint> > vecTrainPoints(1, trainPoints);
tempMatcher->add( vector<Mat>(1, trainImg), vecTrainPoints );
tempMatcher->radiusMatch( queryImg, queryPoints, matches, maxDistance, vector<Mat>(1, mask), compactResult );
vecTrainPoints[0].swap( trainPoints );
}
void GenericDescriptorMatcher::match( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<DMatch>& matches, const vector<Mat>& masks )
{
vector<vector<DMatch> > knnMatches;
knnMatch( queryImg, queryPoints, knnMatches, 1, masks, false );
convertMatches( knnMatches, matches );
}
void GenericDescriptorMatcher::knnMatch( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult )
{
train();
knnMatchImpl( queryImg, queryPoints, matches, knn, masks, compactResult );
}
void GenericDescriptorMatcher::radiusMatch( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult )
{
train();
radiusMatchImpl( queryImg, queryPoints, matches, maxDistance, masks, compactResult );
}
/****************************************************************************************\
* OneWayDescriptorMatch *
* OneWayDescriptorMatcher *
\****************************************************************************************/
OneWayDescriptorMatch::OneWayDescriptorMatch()
{}
OneWayDescriptorMatch::OneWayDescriptorMatch( const Params& _params)
OneWayDescriptorMatcher::OneWayDescriptorMatcher( const Params& _params)
{
initialize(_params);
}
OneWayDescriptorMatch::~OneWayDescriptorMatch()
OneWayDescriptorMatcher::~OneWayDescriptorMatcher()
{}
void OneWayDescriptorMatch::initialize( const Params& _params, OneWayDescriptorBase *_base)
void OneWayDescriptorMatcher::initialize( const Params& _params, const Ptr<OneWayDescriptorBase>& _base )
{
base.release();
if (_base != 0)
{
clear();
if( _base.empty() )
base = _base;
}
params = _params;
}
void OneWayDescriptorMatch::add( const Mat& image, vector<KeyPoint>& keypoints )
void OneWayDescriptorMatcher::clear()
{
if( base.empty() )
base = new OneWayDescriptorObject( params.patchSize, params.poseCount, params.pcaFilename,
params.trainPath, params.trainImagesList, params.minScale, params.maxScale, params.stepScale);
GenericDescriptorMatcher::clear();
size_t trainFeatureCount = keypoints.size();
base->Allocate( (int)trainFeatureCount );
IplImage _image = image;
for( size_t i = 0; i < keypoints.size(); i++ )
base->InitializeDescriptor( (int)i, &_image, keypoints[i], "" );
collection.add( Mat(), keypoints );
#if defined(_KDTREE)
base->ConvertDescriptorsArrayToTree();
#endif
prevTrainCount = 0;
base->clear();
}
void OneWayDescriptorMatch::add( KeyPointCollection& keypoints )
void OneWayDescriptorMatcher::train()
{
if( base.empty() )
if( base.empty() || prevTrainCount < (int)trainPointCollection.pointCount() )
{
base = new OneWayDescriptorObject( params.patchSize, params.poseCount, params.pcaFilename,
params.trainPath, params.trainImagesList, params.minScale, params.maxScale, params.stepScale);
params.trainPath, params.trainImagesList, params.minScale, params.maxScale, params.stepScale );
size_t trainFeatureCount = keypoints.calcKeypointCount();
base->Allocate( trainPointCollection.pointCount() );
prevTrainCount = trainPointCollection.pointCount();
base->Allocate( (int)trainFeatureCount );
int count = 0;
for( size_t i = 0; i < keypoints.points.size(); i++ )
{
for( size_t j = 0; j < keypoints.points[i].size(); j++ )
const vector<vector<KeyPoint> >& points = trainPointCollection.getKeypoints();
int count = 0;
for( size_t i = 0; i < points.size(); i++ )
{
IplImage img = keypoints.images[i];
base->InitializeDescriptor( count++, &img, keypoints.points[i][j], "" );
IplImage _image = trainPointCollection.getImage(i);
for( size_t j = 0; j < points[i].size(); j++ )
base->InitializeDescriptor( count++, &_image, points[i][j], "" );
}
collection.add( Mat(), keypoints.points[i] );
}
#if defined(_KDTREE)
base->ConvertDescriptorsArrayToTree();
base->ConvertDescriptorsArrayToTree();
#endif
}
}
void OneWayDescriptorMatch::match( const Mat& image, vector<KeyPoint>& points, vector<int>& indices)
void OneWayDescriptorMatcher::knnMatchImpl( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& /*masks*/, bool /*compactResult*/ )
{
vector<DMatch> matchings( points.size() );
indices.resize(points.size());
match( image, points, matchings );
train();
for( size_t i = 0; i < points.size(); i++ )
indices[i] = matchings[i].indexTrain;
}
CV_Assert( knn == 1 ); // knn > 1 unsupported because of bug in OneWayDescriptorBase for this case
void OneWayDescriptorMatch::match( const Mat& image, vector<KeyPoint>& points, vector<DMatch>& matches )
{
matches.resize( points.size() );
IplImage _image = image;
for( size_t i = 0; i < points.size(); i++ )
matches.resize( queryPoints.size() );
IplImage _qimage = queryImg;
for( size_t i = 0; i < queryPoints.size(); i++ )
{
int poseIdx = -1;
DMatch match;
match.indexQuery = (int)i;
match.indexTrain = -1;
base->FindDescriptor( &_image, points[i].pt, match.indexTrain, poseIdx, match.distance );
matches[i] = match;
int descIdx = -1, poseIdx = -1;
float distance;
base->FindDescriptor( &_qimage, queryPoints[i].pt, descIdx, poseIdx, distance );
matches[i].push_back( DMatch(i, descIdx, distance) );
}
}
void OneWayDescriptorMatch::match( const Mat& image, vector<KeyPoint>& points, vector<vector<DMatch> >& matches, float /*threshold*/ )
void OneWayDescriptorMatcher::radiusMatchImpl( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& /*masks*/, bool /*compactResult*/ )
{
matches.clear();
matches.resize( points.size() );
vector<DMatch> dmatches;
match( image, points, dmatches );
for( size_t i=0;i<matches.size();i++ )
{
matches[i].push_back( dmatches[i] );
}
train();
/*
printf("Start matching %d points\n", points.size());
//std::cout << "Start matching " << points.size() << "points\n";
assert(collection.images.size() == 1);
int n = collection.points[0].size();
printf("n = %d\n", n);
for( size_t i = 0; i < points.size(); i++ )
matches.resize( queryPoints.size() );
IplImage _qimage = queryImg;
for( size_t i = 0; i < queryPoints.size(); i++ )
{
//printf("Matching %d\n", i);
//int poseIdx = -1;
DMatch match;
match.indexQuery = i;
match.indexTrain = -1;
CvPoint pt = points[i].pt;
CvRect roi = cvRect(cvRound(pt.x - 24/4),
cvRound(pt.y - 24/4),
24/2, 24/2);
cvSetImageROI(&_image, roi);
std::vector<int> desc_idxs;
std::vector<int> pose_idxs;
std::vector<float> distances;
std::vector<float> _scales;
base->FindDescriptor(&_image, n, desc_idxs, pose_idxs, distances, _scales);
cvResetImageROI(&_image);
for( int j=0;j<n;j++ )
{
match.indexTrain = desc_idxs[j];
match.distance = distances[j];
matches[i].push_back( match );
}
//sort( matches[i].begin(), matches[i].end(), compareIndexTrain );
//for( int j=0;j<n;j++ )
//{
//printf( "%d %f; ",matches[i][j].indexTrain, matches[i][j].distance);
//}
//printf("\n\n\n");
//base->FindDescriptor( &_image, 100, points[i].pt, match.indexTrain, poseIdx, match.distance );
//matches[i].push_back( match );
int descIdx = -1, poseIdx = -1;
float distance;
base->FindDescriptor( &_qimage, queryPoints[i].pt, descIdx, poseIdx, distance );
if( distance < maxDistance )
matches[i].push_back( DMatch(i, descIdx, distance) );
}
*/
}
void OneWayDescriptorMatch::read( const FileNode &fn )
void OneWayDescriptorMatcher::read( const FileNode &fn )
{
base = new OneWayDescriptorObject( params.patchSize, params.poseCount, string (), string (), string (),
params.minScale, params.maxScale, params.stepScale );
base->Read (fn);
}
void OneWayDescriptorMatch::write( FileStorage& fs ) const
void OneWayDescriptorMatcher::write( FileStorage& fs ) const
{
base->Write (fs);
}
void OneWayDescriptorMatch::classify( const Mat& image, vector<KeyPoint>& points )
{
IplImage _image = image;
for( size_t i = 0; i < points.size(); i++ )
{
int descIdx = -1;
int poseIdx = -1;
float distance;
base->FindDescriptor(&_image, points[i].pt, descIdx, poseIdx, distance);
points[i].class_id = collection.getKeyPoint(descIdx).class_id;
}
}
void OneWayDescriptorMatch::clear ()
{
GenericDescriptorMatch::clear();
base->clear ();
}
/****************************************************************************************\
* FernDescriptorMatch *
* FernDescriptorMatcher *
\****************************************************************************************/
FernDescriptorMatch::Params::Params( int _nclasses, int _patchSize, int _signatureSize,
FernDescriptorMatcher::Params::Params( int _nclasses, int _patchSize, int _signatureSize,
int _nstructs, int _structSize, int _nviews, int _compressionMethod,
const PatchGenerator& _patchGenerator ) :
nclasses(_nclasses), patchSize(_patchSize), signatureSize(_signatureSize),
......@@ -528,25 +766,14 @@ FernDescriptorMatch::Params::Params( int _nclasses, int _patchSize, int _signatu
compressionMethod(_compressionMethod), patchGenerator(_patchGenerator)
{}
FernDescriptorMatch::Params::Params( const string& _filename )
FernDescriptorMatcher::Params::Params( const string& _filename )
{
filename = _filename;
}
FernDescriptorMatch::FernDescriptorMatch()
{}
FernDescriptorMatch::FernDescriptorMatch( const Params& _params )
FernDescriptorMatcher::FernDescriptorMatcher( const Params& _params )
{
params = _params;
}
FernDescriptorMatch::~FernDescriptorMatch()
{}
void FernDescriptorMatch::initialize( const Params& _params )
{
classifier.release();
prevTrainCount = 0;
params = _params;
if( !params.filename.empty() )
{
......@@ -557,30 +784,35 @@ void FernDescriptorMatch::initialize( const Params& _params )
}
}
void FernDescriptorMatch::add( const Mat& image, vector<KeyPoint>& keypoints )
FernDescriptorMatcher::~FernDescriptorMatcher()
{}
void FernDescriptorMatcher::clear()
{
if( params.filename.empty() )
collection.add( image, keypoints );
GenericDescriptorMatcher::clear();
classifier.release();
prevTrainCount = 0;
}
void FernDescriptorMatch::trainFernClassifier()
void FernDescriptorMatcher::train()
{
if( classifier.empty() )
if( classifier.empty() || prevTrainCount < (int)trainPointCollection.pointCount() )
{
assert( params.filename.empty() );
vector<vector<Point2f> > points(collection.images.size());
for( size_t imgIdx = 0; imgIdx < collection.images.size(); imgIdx++ )
KeyPoint::convert( collection.points[imgIdx], points[imgIdx] );
vector<vector<Point2f> > points( trainPointCollection.imageCount() );
for( size_t imgIdx = 0; imgIdx < trainPointCollection.imageCount(); imgIdx++ )
KeyPoint::convert( trainPointCollection.getKeypoints(imgIdx), points[imgIdx] );
classifier = new FernClassifier( points, collection.images, vector<vector<int> >(), 0, // each points is a class
classifier = new FernClassifier( points, trainPointCollection.getImages(), vector<vector<int> >(), 0, // each points is a class
params.patchSize, params.signatureSize, params.nstructs, params.structSize,
params.nviews, params.compressionMethod, params.patchGenerator );
}
}
void FernDescriptorMatch::calcBestProbAndMatchIdx( const Mat& image, const Point2f& pt,
float& bestProb, int& bestMatchIdx, vector<float>& signature )
void FernDescriptorMatcher::calcBestProbAndMatchIdx( const Mat& image, const Point2f& pt,
float& bestProb, int& bestMatchIdx, vector<float>& signature )
{
(*classifier)( image, pt, signature);
......@@ -596,78 +828,66 @@ void FernDescriptorMatch::calcBestProbAndMatchIdx( const Mat& image, const Point
}
}
void FernDescriptorMatch::match( const Mat& image, vector<KeyPoint>& keypoints, vector<int>& indices )
void FernDescriptorMatcher::knnMatchImpl( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& /*masks*/, bool /*compactResult*/ )
{
trainFernClassifier();
train();
indices.resize( keypoints.size() );
matches.resize( queryPoints.size() );
vector<float> signature( (size_t)classifier->getClassCount() );
for( size_t pi = 0; pi < keypoints.size(); pi++ )
for( size_t queryIdx = 0; queryIdx < queryPoints.size(); queryIdx++ )
{
//calcBestProbAndMatchIdx( image, keypoints[pi].pt, bestProb, indices[pi], signature );
//TODO: use octave and image pyramid
indices[pi] = (*classifier)(image, keypoints[pi].pt, signature);
}
}
(*classifier)( queryImg, queryPoints[queryIdx].pt, signature);
void FernDescriptorMatch::match( const Mat& image, vector<KeyPoint>& keypoints, vector<DMatch>& matches )
{
trainFernClassifier();
matches.resize( keypoints.size() );
vector<float> signature( (size_t)classifier->getClassCount() );
for( int k = 0; k < knn; k++ )
{
DMatch bestMatch;
size_t ci = 0;
for( ; ci < signature.size(); ci++ )
{
if( -signature[ci] < bestMatch.distance )
{
int imgIdx = -1, trainIdx = -1;
trainPointCollection.getLocalIdx( ci , imgIdx, trainIdx );
bestMatch = DMatch( queryIdx, trainIdx, imgIdx, -signature[ci] );
}
}
for( int pi = 0; pi < (int)keypoints.size(); pi++ )
{
matches[pi].indexQuery = pi;
calcBestProbAndMatchIdx( image, keypoints[pi].pt, matches[pi].distance, matches[pi].indexTrain, signature );
//matching[pi].distance is log of probability so we need to transform it
matches[pi].distance = -matches[pi].distance;
if( bestMatch.trainIdx == -1 )
break;
signature[ci] = std::numeric_limits<float>::min();
matches[queryIdx].push_back( bestMatch );
}
}
}
void FernDescriptorMatch::match( const Mat& image, vector<KeyPoint>& keypoints, vector<vector<DMatch> >& matches, float threshold )
void FernDescriptorMatcher::radiusMatchImpl( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& /*masks*/, bool /*compactResult*/ )
{
trainFernClassifier();
matches.resize( keypoints.size() );
train();
matches.resize( queryPoints.size() );
vector<float> signature( (size_t)classifier->getClassCount() );
for( int pi = 0; pi < (int)keypoints.size(); pi++ )
for( size_t i = 0; i < queryPoints.size(); i++ )
{
(*classifier)( image, keypoints[pi].pt, signature);
DMatch match;
match.indexQuery = pi;
(*classifier)( queryImg, queryPoints[i].pt, signature);
for( int ci = 0; ci < classifier->getClassCount(); ci++ )
{
if( -signature[ci] < threshold )
if( -signature[ci] < maxDistance )
{
match.distance = -signature[ci];
match.indexTrain = ci;
matches[pi].push_back( match );
int imgIdx = -1, trainIdx = -1;
trainPointCollection.getLocalIdx( ci , imgIdx, trainIdx );
matches[i].push_back( DMatch( i, trainIdx, imgIdx, -signature[ci] ) );
}
}
}
}
void FernDescriptorMatch::classify( const Mat& image, vector<KeyPoint>& keypoints )
{
trainFernClassifier();
vector<float> signature( (size_t)classifier->getClassCount() );
for( size_t pi = 0; pi < keypoints.size(); pi++ )
{
float bestProb = 0;
int bestMatchIdx = -1;
calcBestProbAndMatchIdx( image, keypoints[pi].pt, bestProb, bestMatchIdx, signature );
keypoints[pi].class_id = collection.getKeyPoint(bestMatchIdx).class_id;
}
}
void FernDescriptorMatch::read( const FileNode &fn )
void FernDescriptorMatcher::read( const FileNode &fn )
{
params.nclasses = fn["nclasses"];
params.patchSize = fn["patchSize"];
......@@ -680,7 +900,7 @@ void FernDescriptorMatch::read( const FileNode &fn )
//classifier->read(fn);
}
void FernDescriptorMatch::write( FileStorage& fs ) const
void FernDescriptorMatcher::write( FileStorage& fs ) const
{
fs << "nclasses" << params.nclasses;
fs << "patchSize" << params.patchSize;
......@@ -693,81 +913,75 @@ void FernDescriptorMatch::write( FileStorage& fs ) const
// classifier->write(fs);
}
void FernDescriptorMatch::clear ()
{
GenericDescriptorMatch::clear();
classifier.release();
}
/****************************************************************************************\
* VectorDescriptorMatch *
* VectorDescriptorMatcher *
\****************************************************************************************/
void VectorDescriptorMatch::add( const Mat& image, vector<KeyPoint>& keypoints )
void VectorDescriptorMatcher::add( const vector<Mat>& imgCollection,
vector<vector<KeyPoint> >& pointCollection )
{
Mat descriptors;
extractor->compute( image, keypoints, descriptors );
matcher->add( descriptors );
collection.add( Mat(), keypoints );
};
vector<Mat> descCollection;
extractor->compute( imgCollection, pointCollection, descCollection );
void VectorDescriptorMatch::match( const Mat& image, vector<KeyPoint>& points, vector<int>& keypointIndices )
{
Mat descriptors;
extractor->compute( image, points, descriptors );
matcher->add( descCollection );
matcher->match( descriptors, keypointIndices );
};
trainPointCollection.add( imgCollection, pointCollection );
}
void VectorDescriptorMatch::match( const Mat& image, vector<KeyPoint>& points, vector<DMatch>& matches )
void VectorDescriptorMatcher::clear()
{
Mat descriptors;
extractor->compute( image, points, descriptors );
matcher->match( descriptors, matches );
//extractor->clear();
matcher->clear();
GenericDescriptorMatcher::clear();
}
void VectorDescriptorMatch::match( const Mat& image, vector<KeyPoint>& points,
vector<vector<DMatch> >& matches, float threshold )
void VectorDescriptorMatcher::train()
{
Mat descriptors;
extractor->compute( image, points, descriptors );
matcher->train();
}
matcher->match( descriptors, matches, threshold );
void VectorDescriptorMatcher::knnMatchImpl( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, int knn,
const vector<Mat>& masks, bool compactResult )
{
Mat queryDescs;
extractor->compute( queryImg, queryPoints, queryDescs );
matcher->knnMatch( queryDescs, matches, knn, masks, compactResult );
}
void VectorDescriptorMatch::clear()
void VectorDescriptorMatcher::radiusMatchImpl( const Mat& queryImg, vector<KeyPoint>& queryPoints,
vector<vector<DMatch> >& matches, float maxDistance,
const vector<Mat>& masks, bool compactResult )
{
GenericDescriptorMatch::clear();
matcher->clear();
Mat queryDescs;
extractor->compute( queryImg, queryPoints, queryDescs );
matcher->radiusMatch( queryDescs, matches, maxDistance, masks, compactResult );
}
void VectorDescriptorMatch::read( const FileNode& fn )
void VectorDescriptorMatcher::read( const FileNode& fn )
{
GenericDescriptorMatch::read(fn);
GenericDescriptorMatcher::read(fn);
extractor->read (fn);
}
void VectorDescriptorMatch::write (FileStorage& fs) const
void VectorDescriptorMatcher::write (FileStorage& fs) const
{
GenericDescriptorMatch::write(fs);
GenericDescriptorMatcher::write(fs);
extractor->write (fs);
}
/****************************************************************************************\
* Factory function for GenericDescriptorMatch creating *
\****************************************************************************************/
Ptr<GenericDescriptorMatch> createGenericDescriptorMatcher( const string& genericDescritptorMatcherType,
const string &paramsFilename )
/*
* Factory function for GenericDescriptorMatch creating
*/
Ptr<GenericDescriptorMatcher> createGenericDescriptorMatcher( const string& genericDescritptorMatcherType, const string &paramsFilename )
{
GenericDescriptorMatch *descriptorMatcher = 0;
Ptr<GenericDescriptorMatcher> descriptorMatcher;
if( ! genericDescritptorMatcherType.compare("ONEWAY") )
{
descriptorMatcher = new OneWayDescriptorMatch();
descriptorMatcher = new OneWayDescriptorMatcher();
}
else if( ! genericDescritptorMatcherType.compare("FERN") )
{
descriptorMatcher = new FernDescriptorMatch();
descriptorMatcher = new FernDescriptorMatcher();
}
else if( ! genericDescritptorMatcherType.compare ("CALONDER") )
{
......@@ -783,7 +997,6 @@ Ptr<GenericDescriptorMatch> createGenericDescriptorMatcher( const string& generi
fs.release();
}
}
return descriptorMatcher;
}
......
......@@ -109,19 +109,17 @@ void testCalonderClassifier( const string& classifierFilename, const string& img
// Match descriptors
BruteForceMatcher<L1<float> > matcher;
matcher.add( descriptors2 );
vector<int> matches;
matcher.match( descriptors1, matches );
vector<DMatch> matches;
matcher.match( descriptors1, descriptors2, matches );
// Prepare inlier mask
vector<char> matchesMask( matches.size(), 0 );
vector<Point2f> points1; KeyPoint::convert( keypoints1, points1 );
vector<Point2f> points2; KeyPoint::convert( keypoints2, points2 );
Mat points1t; perspectiveTransform(Mat(points1), points1t, H12);
vector<int>::const_iterator mit = matches.begin();
for( size_t mi = 0; mi < matches.size(); mi++ )
{
if( norm(points2[matches[mi]] - points1t.at<Point2f>(mi,0)) < 4 ) // inlier
if( norm(points2[matches[mi].trainIdx] - points1t.at<Point2f>(mi,0)) < 4 ) // inlier
matchesMask[mi] = 1;
}
......
......@@ -948,7 +948,7 @@ void VocData::calcClassifierConfMatRow(const string& obj_class, const vector<Obd
/* prepare variables related to calculating recall if using the recall threshold */
int retrieved_hits = 0;
int total_relevant;
int total_relevant = 0;
if (cond == CV_VOC_CCOND_RECALL)
{
vector<char> ground_truth;
......@@ -2200,7 +2200,7 @@ bool writeBowImageDescriptor( const string& file, const Mat& bowImageDescriptor
// Load in the bag of words vectors for a set of images, from file if possible
void calculateImageDescriptors( const vector<ObdImage>& images, vector<Mat>& imageDescriptors,
const Ptr<BOWImgDescriptorExtractor>& bowExtractor, const Ptr<FeatureDetector>& fdetector,
Ptr<BOWImgDescriptorExtractor>& bowExtractor, const Ptr<FeatureDetector>& fdetector,
const string& resPath )
{
CV_Assert( !bowExtractor->getVocabulary().empty() );
......@@ -2343,7 +2343,7 @@ void setSVMTrainAutoParams( CvParamGrid& c_grid, CvParamGrid& gamma_grid,
}
void trainSVMClassifier( CvSVM& svm, const SVMTrainParamsExt& svmParamsExt, const string& objClassName, VocData& vocData,
const Ptr<BOWImgDescriptorExtractor>& bowExtractor, const Ptr<FeatureDetector>& fdetector,
Ptr<BOWImgDescriptorExtractor>& bowExtractor, const Ptr<FeatureDetector>& fdetector,
const string& resPath )
{
/* first check if a previously trained svm for the current class has been saved to file */
......@@ -2418,7 +2418,7 @@ void trainSVMClassifier( CvSVM& svm, const SVMTrainParamsExt& svmParamsExt, cons
}
void computeConfidences( CvSVM& svm, const string& objClassName, VocData& vocData,
const Ptr<BOWImgDescriptorExtractor>& bowExtractor, const Ptr<FeatureDetector>& fdetector,
Ptr<BOWImgDescriptorExtractor>& bowExtractor, const Ptr<FeatureDetector>& fdetector,
const string& resPath )
{
cout << "*** CALCULATING CONFIDENCES FOR CLASS " << objClassName << " ***" << endl;
......@@ -2437,7 +2437,7 @@ void computeConfidences( CvSVM& svm, const string& objClassName, VocData& vocDat
// Use the bag of words vectors to calculate classifier output for each image in test set
cout << "CALCULATING CONFIDENCE SCORES FOR CLASS " << objClassName << "..." << endl;
vector<float> confidences( images.size() );
float signMul;
float signMul = 1.f;
for( size_t imageIdx = 0; imageIdx < images.size(); imageIdx++ )
{
if( imageIdx == 0 )
......
......@@ -72,7 +72,7 @@ void doIteration( const Mat& img1, Mat& img2, bool isWarpPerspective,
{
cout << "< Evaluate descriptor match..." << endl;
vector<Point2f> curve;
Ptr<GenericDescriptorMatch> gdm = new VectorDescriptorMatch( descriptorExtractor, descriptorMatcher );
Ptr<GenericDescriptorMatcher> gdm = new VectorDescriptorMatcher( descriptorExtractor, descriptorMatcher );
evaluateGenericDescriptorMatcher( img1, img2, H12, keypoints1, keypoints2, 0, 0, curve, gdm );
for( float l_p = 0; l_p < 1 - FLT_EPSILON; l_p+=0.1 )
cout << "1-precision = " << l_p << "; recall = " << getRecall( curve, l_p ) << endl;
......@@ -81,7 +81,7 @@ void doIteration( const Mat& img1, Mat& img2, bool isWarpPerspective,
vector<int> trainIdxs( matches.size() );
for( size_t i = 0; i < matches.size(); i++ )
trainIdxs[i] = matches[i].indexTrain;
trainIdxs[i] = matches[i].trainIdx;
if( !isWarpPerspective && ransacReprojThreshold >= 0 )
{
......
......@@ -8,7 +8,7 @@
using namespace cv;
IplImage* DrawCorrespondences(IplImage* img1, const vector<KeyPoint>& features1, IplImage* img2,
const vector<KeyPoint>& features2, const vector<int>& desc_idx);
const vector<KeyPoint>& features2, const vector<DMatch>& desc_idx);
int main(int argc, char** argv)
{
......@@ -24,7 +24,7 @@ int main(int argc, char** argv)
std::string alg_name = std::string(argv[3]);
std::string params_filename = std::string(argv[4]);
GenericDescriptorMatch *descriptorMatcher = createGenericDescriptorMatcher(alg_name, params_filename);
Ptr<GenericDescriptorMatcher> descriptorMatcher = createGenericDescriptorMatcher(alg_name, params_filename);
if( descriptorMatcher == 0 )
{
printf ("Cannot create descriptor\n");
......@@ -50,10 +50,8 @@ int main(int argc, char** argv)
printf("Finding nearest neighbors... \n");
// find NN for each of keypoints2 in keypoints1
descriptorMatcher->add( img1, keypoints1 );
vector<int> matches2to1;
matches2to1.resize(keypoints2.size());
descriptorMatcher->match( img2, keypoints2, matches2to1 );
vector<DMatch> matches2to1;
descriptorMatcher->match( img2, keypoints2, img1, keypoints1, matches2to1 );
printf("Done\n");
IplImage* img_corr = DrawCorrespondences(img1, keypoints1, img2, keypoints2, matches2to1);
......@@ -65,11 +63,10 @@ int main(int argc, char** argv)
cvReleaseImage(&img1);
cvReleaseImage(&img2);
cvReleaseImage(&img_corr);
delete descriptorMatcher;
}
IplImage* DrawCorrespondences(IplImage* img1, const vector<KeyPoint>& features1, IplImage* img2,
const vector<KeyPoint>& features2, const vector<int>& desc_idx)
const vector<KeyPoint>& features2, const vector<DMatch>& desc_idx)
{
IplImage* img_corr = cvCreateImage(cvSize(img1->width + img2->width, MAX(img1->height, img2->height)),
IPL_DEPTH_8U, 3);
......@@ -88,7 +85,7 @@ IplImage* DrawCorrespondences(IplImage* img1, const vector<KeyPoint>& features1,
{
CvPoint pt = cvPoint(cvRound(features2[i].pt.x + img1->width), cvRound(features2[i].pt.y));
cvCircle(img_corr, pt, 3, CV_RGB(255, 0, 0));
cvLine(img_corr, features1[desc_idx[i]].pt, pt, CV_RGB(0, 255, 0));
cvLine(img_corr, features1[desc_idx[i].trainIdx].pt, pt, CV_RGB(0, 255, 0));
}
return img_corr;
......
......@@ -35,9 +35,8 @@ int main(int argc, char** argv)
// matching descriptors
BruteForceMatcher<L2<float> > matcher;
vector<int> matches;
matcher.add(descriptors2);
matcher.match(descriptors1, matches);
vector<DMatch> matches;
matcher.match(descriptors1, descriptors2, matches);
// drawing the results
namedWindow("matches", 1);
......
#include <highgui.h>
#include "opencv2/features2d/features2d.hpp"
#include <iostream>
#include <fstream>
using namespace cv;
using namespace std;
const char dlmtr = '/';
void maskMatchesByTrainImgIdx( const vector<DMatch>& matches, int trainImgIdx, vector<char>& mask );
void readTrainFilenames( const string& filename, string& dirName, vector<string>& trainFilenames );
int main(int argc, char** argv)
{
Mat queryImg;
vector<KeyPoint> queryPoints;
Mat queryDescs;
vector<Mat> trainImgCollection;
vector<vector<KeyPoint> > trainPointCollection;
vector<Mat> trainDescCollection;
vector<DMatch> matches;
if( argc != 7 )
{
cout << "Format:" << endl;
cout << argv[0] << "[detectorType] [descriptorType] [matcherType] [queryImage] [fileWithTrainImages] [dirToSaveResImages]" << endl;
return -1;
}
cout << "< 1.) Creating feature detector, descriptor extractor and descriptor matcher ..." << endl;
Ptr<FeatureDetector> detector = createFeatureDetector( argv[1] );
Ptr<DescriptorExtractor> descriptorExtractor = createDescriptorExtractor( argv[2] );
Ptr<DescriptorMatcher> descriptorMatcher = createDescriptorMatcher( argv[3] );
cout << ">" << endl;
if( detector.empty() || descriptorExtractor.empty() || descriptorMatcher.empty() )
{
cout << "Can not create feature detector or descriptor exstractor or descriptor matcher of given types." << endl << ">" << endl;
return -1;
}
cout << "< 2.) Reading the images..." << endl;
queryImg = imread( argv[4], CV_LOAD_IMAGE_GRAYSCALE);
if( queryImg.empty() )
{
cout << "Query image can not be read." << endl << ">" << endl;
return -1;
}
string trainDirName;
vector<string> trainFilenames;
vector<int> usedTrainImgIdxs;
readTrainFilenames( argv[5], trainDirName, trainFilenames );
if( trainFilenames.empty() )
{
cout << "Train image filenames can not be read." << endl << ">" << endl;
return -1;
}
for( size_t i = 0; i < trainFilenames.size(); i++ )
{
Mat img = imread( trainDirName + trainFilenames[i], CV_LOAD_IMAGE_GRAYSCALE );
if( img.empty() ) cout << "Train image " << trainDirName + trainFilenames[i] << " can not be read." << endl;
trainImgCollection.push_back( img );
usedTrainImgIdxs.push_back( i );
}
if( trainImgCollection.empty() )
{
cout << "All train images can not be read." << endl << ">" << endl;
return -1;
}
else
cout << trainImgCollection.size() << " train images were read." << endl;
cout << ">" << endl;
cout << endl << "< 3.) Extracting keypoints from images..." << endl;
detector->detect( queryImg, queryPoints );
detector->detect( trainImgCollection, trainPointCollection );
cout << ">" << endl;
cout << "< 4.) Computing descriptors for keypoints..." << endl;
descriptorExtractor->compute( queryImg, queryPoints, queryDescs );
descriptorExtractor->compute( trainImgCollection, trainPointCollection, trainDescCollection );
cout << ">" << endl;
cout << "< 5.) Set train descriptors collection in the matcher and match query descriptors to them..." << endl;
descriptorMatcher->add( trainDescCollection );
descriptorMatcher->match( queryDescs, matches );
CV_Assert( queryPoints.size() == matches.size() );
cout << ">" << endl;
Mat drawImg;
vector<char> mask;
for( size_t i = 0; i < trainImgCollection.size(); i++ )
{
maskMatchesByTrainImgIdx( matches, i, mask );
drawMatches( queryImg, queryPoints, trainImgCollection[i], trainPointCollection[i],
matches, drawImg, Scalar::all(-1), Scalar::all(-1), mask );
imwrite( string(argv[6]) + "/res_" + trainFilenames[usedTrainImgIdxs[i]] + ".png", drawImg );
}
return 0;
}
void maskMatchesByTrainImgIdx( const vector<DMatch>& matches, int trainImgIdx, vector<char>& mask )
{
mask.resize( matches.size() );
fill( mask.begin(), mask.end(), 0 );
for( size_t i = 0; i < matches.size(); i++ )
{
if( matches[i].imgIdx == trainImgIdx )
mask[i] = 1;
}
}
void readTrainFilenames( const string& filename, string& dirName, vector<string>& trainFilenames )
{
trainFilenames.clear();
ifstream file( filename.c_str() );
if ( !file.is_open() )
return;
size_t pos = filename.rfind(dlmtr);
dirName = pos == string::npos ? "" : filename.substr(0, pos) + dlmtr;
while( !file.eof() )
{
string str; getline( file, str );
if( str.empty() ) break;
trainFilenames.push_back(str);
}
file.close();
}
......@@ -1028,7 +1028,7 @@ void DescriptorQualityTest::runDatasetTest (const vector<Mat> &imgs, const vecto
return;
}
Ptr<GenericDescriptorMatch> descMatch = commRunParams[di].isActiveParams ? specificDescMatcher : defaultDescMatcher;
Ptr<GenericDescriptorMatcher> descMatch = commRunParams[di].isActiveParams ? specificDescMatcher : defaultDescMatcher;
calcQuality[di].resize(TEST_CASE_COUNT);
vector<KeyPoint> keypoints1;
......@@ -1165,7 +1165,7 @@ void OneWayDescriptorQualityTest::writeDatasetRunParams( FileStorage& fs, int da
//DetectorQualityTest siftDetectorQuality = DetectorQualityTest( "SIFT", "quality-detector-sift" );
//DetectorQualityTest surfDetectorQuality = DetectorQualityTest( "SURF", "quality-detector-surf" );
// Detectors
// Descriptors
//DescriptorQualityTest siftDescriptorQuality = DescriptorQualityTest( "SIFT", "quality-descriptor-sift", "BruteForce" );
//DescriptorQualityTest surfDescriptorQuality = DescriptorQualityTest( "SURF", "quality-descriptor-surf", "BruteForce" );
//DescriptorQualityTest fernDescriptorQualityTest( "FERN", "quality-descriptor-fern");
......@@ -1173,7 +1173,7 @@ void OneWayDescriptorQualityTest::writeDatasetRunParams( FileStorage& fs, int da
// Don't run them because of bug in OneWayDescriptorBase many to many matching. TODO: fix this bug.
// Don't run it because of bug in OneWayDescriptorBase many to many matching. TODO: fix this bug.
//OneWayDescriptorQualityTest oneWayDescriptorQuality;
// Don't run them (will validate and save results as "quality-descriptor-sift" and "quality-descriptor-surf" test data).
......
......@@ -166,14 +166,6 @@ protected:
Ptr<FeatureDetector> fdetector;
};
CV_FeatureDetectorTest fastTest( "detector-fast", createFeatureDetector("FAST") );
CV_FeatureDetectorTest gfttTest( "detector-gftt", createFeatureDetector("GFTT") );
CV_FeatureDetectorTest harrisTest( "detector-harris", createFeatureDetector("HARRIS") );
CV_FeatureDetectorTest mserTest( "detector-mser", createFeatureDetector("MSER") );
CV_FeatureDetectorTest siftTest( "detector-sift", createFeatureDetector("SIFT") );
CV_FeatureDetectorTest starTest( "detector-star", createFeatureDetector("STAR") );
CV_FeatureDetectorTest surfTest( "detector-surf", createFeatureDetector("SURF") );
/****************************************************************************************\
* Regression tests for descriptor extractors. *
\****************************************************************************************/
......@@ -320,6 +312,413 @@ public:
}
};
/****************************************************************************************\
* Algorithmic tests for descriptor matchers *
\****************************************************************************************/
class CV_DescriptorMatcherTest : public CvTest
{
public:
CV_DescriptorMatcherTest( const char* testName, const Ptr<DescriptorMatcher>& _dmatcher, float _badPart ) :
CvTest( testName, "cv::DescritorMatcher::[,knn,radius]match()"), badPart(_badPart), dmatcher(_dmatcher)
{ CV_Assert( queryDescCount % 2 == 0 ); // because we split train data in same cases in two
CV_Assert( countFactor == 4); }
protected:
static const int dim = 500;
static const int queryDescCount = 300;
static const int countFactor = 4;
const float badPart;
virtual void run( int );
void generateData( Mat& query, Mat& train );
int testMatch( const Mat& query, const Mat& train );
int testKnnMatch( const Mat& query, const Mat& train );
int testRadiusMatch( const Mat& query, const Mat& train );
Ptr<DescriptorMatcher> dmatcher;
};
void CV_DescriptorMatcherTest::generateData( Mat& query, Mat& train )
{
RNG& rng = theRNG();
// Generate query descriptors randomly.
// Descriptor vector elements are integer values.
Mat buf( queryDescCount, dim, CV_32SC1 );
rng.fill( buf, RNG::UNIFORM, Scalar::all(0), Scalar(3) );
buf.convertTo( query, CV_32FC1 );
// Generate train decriptors as follows:
// copy each query descriptor to train set countFactor times
// and perturb some one element of the copied descriptors in
// in ascending order. General boundaries of the perturbation
// are (0.f, 1.f).
train.create( query.rows*countFactor, query.cols, CV_32FC1 );
float step = 1.f / countFactor;
for( int qIdx = 0; qIdx < query.rows; qIdx++ )
{
Mat queryDescriptor = query.row(qIdx);
for( int c = 0; c < countFactor; c++ )
{
int tIdx = qIdx * countFactor + c;
Mat trainDescriptor = train.row(tIdx);
queryDescriptor.copyTo( trainDescriptor );
int elem = rng(dim);
float diff = rng.uniform( step*c, step*(c+1) );
trainDescriptor.at<float>(0, elem) += diff;
}
}
}
int CV_DescriptorMatcherTest::testMatch( const Mat& query, const Mat& train )
{
dmatcher->clear();
// test const version of match()
int res = CvTS::OK;
{
vector<DMatch> matches;
dmatcher->match( query, train, matches );
int curRes = CvTS::OK;
if( (int)matches.size() != queryDescCount )
{
curRes = CvTS::FAIL_INVALID_OUTPUT;
ts->printf(CvTS::LOG, "Incorrect matches count while test match() function (1)\n");
}
else
{
int badCount = 0;
for( size_t i = 0; i < matches.size(); i++ )
{
DMatch match = matches[i];
if( (match.queryIdx != (int)i) || (match.trainIdx != (int)i*countFactor) || (match.imgIdx != 0) )
badCount++;
}
if( (float)badCount > (float)queryDescCount*badPart )
{
curRes = CvTS::FAIL_INVALID_OUTPUT;
ts->printf( CvTS::LOG, "%f - too large bad matches part while test match() function (1)\n",
(float)badCount/(float)queryDescCount );
}
}
res = curRes != CvTS::OK ? curRes : res;
}
// test version of match() with add()
{
vector<DMatch> matches;
// make add() twice to test such case
dmatcher->add( vector<Mat>(1,train.rowRange(0, train.rows/2)) );
dmatcher->add( vector<Mat>(1,train.rowRange(train.rows/2, train.rows)) );
// prepare masks (make first nearest match illegal)
vector<Mat> masks(2);
for(int mi = 0; mi < 2; mi++ )
{
masks[mi] = Mat(query.rows, train.rows/2, CV_8UC1, Scalar::all(1));
for( int di = 0; di < queryDescCount/2; di++ )
masks[mi].col(di*countFactor).setTo(Scalar::all(0));
}
dmatcher->match( query, matches, masks );
int curRes = CvTS::OK;
if( (int)matches.size() != queryDescCount )
{
curRes = CvTS::FAIL_INVALID_OUTPUT;
ts->printf(CvTS::LOG, "Incorrect matches count while test match() function (2)\n");
}
else
{
int badCount = 0;
for( size_t i = 0; i < matches.size(); i++ )
{
DMatch match = matches[i];
int shift = dmatcher->supportMask() ? 1 : 0;
{
if( i < queryDescCount/2 )
{
if( (match.queryIdx != (int)i) || (match.trainIdx != (int)i*countFactor + shift) || (match.imgIdx != 0) )
badCount++;
}
else
{
if( (match.queryIdx != (int)i) || (match.trainIdx != ((int)i-queryDescCount/2)*countFactor + shift) || (match.imgIdx != 1) )
badCount++;
}
}
}
if( (float)badCount > (float)queryDescCount*badPart )
{
ts->printf( CvTS::LOG, "%f - too large bad matches part while test match() function (2)\n",
(float)badCount/(float)queryDescCount );
}
}
res = curRes != CvTS::OK ? curRes : res;
}
return res;
}
int CV_DescriptorMatcherTest::testKnnMatch( const Mat& query, const Mat& train )
{
dmatcher->clear();
// test const version of knnMatch()
int res = CvTS::OK;
{
const int knn = 3;
vector<vector<DMatch> > matches;
dmatcher->knnMatch( query, train, matches, knn );
int curRes = CvTS::OK;
if( (int)matches.size() != queryDescCount )
{
curRes = CvTS::FAIL_INVALID_OUTPUT;
ts->printf(CvTS::LOG, "Incorrect matches count while test knnMatch() function (1)\n");
}
else
{
int badCount = 0;
for( size_t i = 0; i < matches.size(); i++ )
{
if( (int)matches[i].size() != knn )
badCount++;
else
{
int localBadCount = 0;
for( int k = 0; k < knn; k++ )
{
DMatch match = matches[i][k];
if( (match.queryIdx != (int)i) || (match.trainIdx != (int)i*countFactor+k) || (match.imgIdx != 0) )
localBadCount++;
}
badCount += localBadCount > 0 ? 1 : 0;
}
}
if( (float)badCount > (float)queryDescCount*badPart )
{
curRes = CvTS::FAIL_INVALID_OUTPUT;
ts->printf( CvTS::LOG, "%f - too large bad matches part while test knnMatch() function (1)\n",
(float)badCount/(float)queryDescCount );
}
}
res = curRes != CvTS::OK ? curRes : res;
}
// test version of knnMatch() with add()
{
const int knn = 2;
vector<vector<DMatch> > matches;
// make add() twice to test such case
dmatcher->add( vector<Mat>(1,train.rowRange(0, train.rows/2)) );
dmatcher->add( vector<Mat>(1,train.rowRange(train.rows/2, train.rows)) );
// prepare masks (make first nearest match illegal)
vector<Mat> masks(2);
for(int mi = 0; mi < 2; mi++ )
{
masks[mi] = Mat(query.rows, train.rows/2, CV_8UC1, Scalar::all(1));
for( int di = 0; di < queryDescCount/2; di++ )
masks[mi].col(di*countFactor).setTo(Scalar::all(0));
}
dmatcher->knnMatch( query, matches, knn, masks );
int curRes = CvTS::OK;
if( (int)matches.size() != queryDescCount )
{
curRes = CvTS::FAIL_INVALID_OUTPUT;
ts->printf(CvTS::LOG, "Incorrect matches count while test knnMatch() function (2)\n");
}
else
{
int badCount = 0;
int shift = dmatcher->supportMask() ? 1 : 0;
for( size_t i = 0; i < matches.size(); i++ )
{
if( (int)matches[i].size() != knn )
badCount++;
else
{
int localBadCount = 0;
for( int k = 0; k < knn; k++ )
{
DMatch match = matches[i][k];
{
if( i < queryDescCount/2 )
{
if( (match.queryIdx != (int)i) || (match.trainIdx != (int)i*countFactor + k + shift) ||
(match.imgIdx != 0) )
localBadCount++;
}
else
{
if( (match.queryIdx != (int)i) || (match.trainIdx != ((int)i-queryDescCount/2)*countFactor + k + shift) ||
(match.imgIdx != 1) )
localBadCount++;
}
}
}
badCount += localBadCount > 0 ? 1 : 0;
}
}
if( (float)badCount > (float)queryDescCount*badPart )
{
ts->printf( CvTS::LOG, "%f - too large bad matches part while test knnMatch() function (2)\n",
(float)badCount/(float)queryDescCount );
}
}
res = curRes != CvTS::OK ? curRes : res;
}
return res;
}
int CV_DescriptorMatcherTest::testRadiusMatch( const Mat& query, const Mat& train )
{
dmatcher->clear();
// test const version of match()
int res = CvTS::OK;
{
const float radius = 1.f/countFactor;
vector<vector<DMatch> > matches;
dmatcher->radiusMatch( query, train, matches, radius );
int curRes = CvTS::OK;
if( (int)matches.size() != queryDescCount )
{
curRes = CvTS::FAIL_INVALID_OUTPUT;
ts->printf(CvTS::LOG, "Incorrect matches count while test radiusMatch() function (1)\n");
}
else
{
int badCount = 0;
for( size_t i = 0; i < matches.size(); i++ )
{
if( (int)matches[i].size() != 1 )
badCount++;
else
{
DMatch match = matches[i][0];
if( (match.queryIdx != (int)i) || (match.trainIdx != (int)i*countFactor) || (match.imgIdx != 0) )
badCount++;
}
}
if( (float)badCount > (float)queryDescCount*badPart )
{
curRes = CvTS::FAIL_INVALID_OUTPUT;
ts->printf( CvTS::LOG, "%f - too large bad matches part while test radiusMatch() function (1)\n",
(float)badCount/(float)queryDescCount );
}
}
res = curRes != CvTS::OK ? curRes : res;
}
// test version of match() with add()
{
int n = 3;
const float radius = 1.f/countFactor * n;
vector<vector<DMatch> > matches;
// make add() twice to test such case
dmatcher->add( vector<Mat>(1,train.rowRange(0, train.rows/2)) );
dmatcher->add( vector<Mat>(1,train.rowRange(train.rows/2, train.rows)) );
// prepare masks (make first nearest match illegal)
vector<Mat> masks(2);
for(int mi = 0; mi < 2; mi++ )
{
masks[mi] = Mat(query.rows, train.rows/2, CV_8UC1, Scalar::all(1));
for( int di = 0; di < queryDescCount/2; di++ )
masks[mi].col(di*countFactor).setTo(Scalar::all(0));
}
dmatcher->radiusMatch( query, matches, radius, masks );
int curRes = CvTS::OK;
if( (int)matches.size() != queryDescCount )
{
curRes = CvTS::FAIL_INVALID_OUTPUT;
ts->printf(CvTS::LOG, "Incorrect matches count while test radiusMatch() function (1)\n");
}
res = curRes != CvTS::OK ? curRes : res;
int badCount = 0;
int shift = dmatcher->supportMask() ? 1 : 0;
int needMatchCount = dmatcher->supportMask() ? n-1 : n;
for( size_t i = 0; i < matches.size(); i++ )
{
if( (int)matches[i].size() != needMatchCount )
badCount++;
else
{
int localBadCount = 0;
for( int k = 0; k < needMatchCount; k++ )
{
DMatch match = matches[i][k];
{
if( i < queryDescCount/2 )
{
if( (match.queryIdx != (int)i) || (match.trainIdx != (int)i*countFactor + k + shift) ||
(match.imgIdx != 0) )
localBadCount++;
}
else
{
if( (match.queryIdx != (int)i) || (match.trainIdx != ((int)i-queryDescCount/2)*countFactor + k + shift) ||
(match.imgIdx != 1) )
localBadCount++;
}
}
}
badCount += localBadCount > 0 ? 1 : 0;
}
}
if( (float)badCount > (float)queryDescCount*badPart )
{
curRes = CvTS::FAIL_INVALID_OUTPUT;
ts->printf( CvTS::LOG, "%f - too large bad matches part while test radiusMatch() function (2)\n",
(float)badCount/(float)queryDescCount );
}
res = curRes != CvTS::OK ? curRes : res;
}
return res;
}
void CV_DescriptorMatcherTest::run( int )
{
Mat query, train;
generateData( query, train );
int res = CvTS::OK, curRes;
curRes = testMatch( query, train );
res = curRes != CvTS::OK ? curRes : res;
curRes = testKnnMatch( query, train );
res = curRes != CvTS::OK ? curRes : res;
curRes = testRadiusMatch( query, train );
res = curRes != CvTS::OK ? curRes : res;
ts->set_failed_test_info( res );
}
/****************************************************************************************\
* Tests registrations *
\****************************************************************************************/
/*
* Detectors
*/
CV_FeatureDetectorTest fastTest( "detector-fast", createFeatureDetector("FAST") );
CV_FeatureDetectorTest gfttTest( "detector-gftt", createFeatureDetector("GFTT") );
CV_FeatureDetectorTest harrisTest( "detector-harris", createFeatureDetector("HARRIS") );
CV_FeatureDetectorTest mserTest( "detector-mser", createFeatureDetector("MSER") );
CV_FeatureDetectorTest siftTest( "detector-sift", createFeatureDetector("SIFT") );
CV_FeatureDetectorTest starTest( "detector-star", createFeatureDetector("STAR") );
CV_FeatureDetectorTest surfTest( "detector-surf", createFeatureDetector("SURF") );
/*
* Descriptors
*/
CV_DescriptorExtractorTest siftDescriptorTest( "descriptor-sift", 0.03f,
createDescriptorExtractor("SIFT"), 8.06652f );
CV_DescriptorExtractorTest surfDescriptorTest( "descriptor-surf", 0.035f,
......@@ -337,3 +736,11 @@ CV_CalonderDescriptorExtractorTest<float> floatCalonderTest( "descriptor-calonde
std::numeric_limits<float>::epsilon(),
0.0221308f );
#endif // CV_SSE2
/*
* Matchers
*/
CV_DescriptorMatcherTest bruteForceMatcherTest( "descriptor-matcher-brute-force",
new BruteForceMatcher<L2<float> >, 0.01 );
CV_DescriptorMatcherTest flannBasedMatcherTest( "descriptor-matcher-flann-based",
new FlannBasedMatcher, 0.02 );
......@@ -49,14 +49,11 @@ void BruteForceMatcherTest::run( int )
vector<DMatch> specMatches, genericMatches;
BruteForceMatcher<L2<float> > specMatcher;
BruteForceMatcher<L2Fake > genericMatcher;
specMatcher.add( train );
genericMatcher.add( train );
int64 time0 = cvGetTickCount();
specMatcher.match( query, specMatches );
specMatcher.match( query, train, specMatches );
int64 time1 = cvGetTickCount();
genericMatcher.match( query, genericMatches );
genericMatcher.match( query, train, genericMatches );
int64 time2 = cvGetTickCount();
float specMatcherTime = float(time1 - time0)/(float)cvGetTickFrequency();
......@@ -72,8 +69,10 @@ void BruteForceMatcherTest::run( int )
for( int i=0;i<descriptorsNumber;i++ )
{
float epsilon = 1e-2;
bool isEquiv = fabs( specMatches[i].distance - genericMatches[i].distance ) < epsilon && specMatches[i].indexQuery == genericMatches[i].indexQuery && specMatches[i].indexTrain == genericMatches[i].indexTrain;
if( !isEquiv || specMatches[i].indexTrain != permutation.at<int>( 0, i ) )
bool isEquiv = fabs( specMatches[i].distance - genericMatches[i].distance ) < epsilon &&
specMatches[i].queryIdx == genericMatches[i].queryIdx &&
specMatches[i].trainIdx == genericMatches[i].trainIdx;
if( !isEquiv || specMatches[i].trainIdx != permutation.at<int>( 0, i ) )
{
ts->set_failed_test_info( CvTS::FAIL_MISMATCH );
break;
......@@ -87,9 +86,9 @@ void BruteForceMatcherTest::run( int )
time0 = cvGetTickCount();
specMatcher.match( query, mask, specMatches );
specMatcher.match( query, train, specMatches, mask );
time1 = cvGetTickCount();
genericMatcher.match( query, mask, genericMatches );
genericMatcher.match( query, train, genericMatches, mask );
time2 = cvGetTickCount();
specMatcherTime = float(time1 - time0)/(float)cvGetTickFrequency();
......@@ -103,12 +102,13 @@ void BruteForceMatcherTest::run( int )
if( specMatches.size() != genericMatches.size() )
ts->set_failed_test_info( CvTS::FAIL_INVALID_OUTPUT );
for( int i=0;i<specMatches.size();i++ )
for( size_t i=0;i<specMatches.size();i++ )
{
//float epsilon = 1e-2;
float epsilon = 10000000;
bool isEquiv = fabs( specMatches[i].distance - genericMatches[i].distance ) < epsilon && specMatches[i].indexQuery == genericMatches[i].indexQuery && specMatches[i].indexTrain == genericMatches[i].indexTrain;
bool isEquiv = fabs( specMatches[i].distance - genericMatches[i].distance ) < epsilon &&
specMatches[i].queryIdx == genericMatches[i].queryIdx &&
specMatches[i].trainIdx == genericMatches[i].trainIdx;
if( !isEquiv )
{
ts->set_failed_test_info( CvTS::FAIL_MISMATCH );
......@@ -117,4 +117,4 @@ void BruteForceMatcherTest::run( int )
}
}
BruteForceMatcherTest bruteForceMatcherTest;
BruteForceMatcherTest taBruteForceMatcherTest;
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment