Commit d79c9769 authored by Maria Dimashova's avatar Maria Dimashova

updated detectors quality test, added descriptors quality test

parent 0043fe6f
......@@ -46,10 +46,17 @@
using namespace std;
using namespace cv;
#define AFFINE_COVARIANT_VERSION
inline Point2f applyHomography( const Mat_<double>& H, const Point2f& pt )
{
double w = 1./(H(2,0)*pt.x + H(2,1)*pt.y + H(2,2));
double z = H(2,0)*pt.x + H(2,1)*pt.y + H(2,2);
if( z )
{
double w = 1./z;
return Point2f( (H(0,0)*pt.x + H(0,1)*pt.y + H(0,2))*w, (H(1,0)*pt.x + H(1,1)*pt.y + H(1,2))*w );
}
return Point2f( numeric_limits<double>::max(), numeric_limits<double>::max() );
}
inline void linearizeHomographyAt( const Mat_<double>& H, const Point2f& pt, Mat_<double>& A )
......@@ -59,26 +66,34 @@ inline void linearizeHomographyAt( const Mat_<double>& H, const Point2f& pt, Mat
p2 = H(1,0)*pt.x + H(1,1)*pt.y + H(1,2),
p3 = H(2,0)*pt.x + H(2,1)*pt.y + H(2,2),
p3_2 = p3*p3;
if( p3 )
{
A(0,0) = H(0,0)/p3 - p1*H(2,0)/p3_2; // fxdx
A(0,1) = H(0,1)/p3 - p1*H(2,1)/p3_2; // fxdy
A(1,0) = H(1,0)/p3 - p2*H(2,0)/p3_2; // fydx
A(1,1) = H(1,1)/p3 - p2*H(2,1)/p3_2; // fydx
}
else
A.setTo(Scalar::all(numeric_limits<double>::max()));
}
//----------------------------------- Repeatability ---------------------------------------------------
#ifndef AFFINE_COVARIANT_VERSION
/****************************************************************************************\
* 1. Initial version of evaluating detectors. This version calculate repeatability *
* for scale invariant detectors (circular regions) *
\****************************************************************************************/
// Find the key points located in the part of the scene present in both images
// and project keypoints2 on img1
void getCommonKeyPointsOnImg1( const Mat& img1, const Mat img2, const Mat& H12,
void getCircularKeyPointsInCommonPart( const Mat& img1, const Mat img2, const Mat& H12,
const vector<KeyPoint>& keypoints1, const vector<KeyPoint>& keypoints2,
vector<KeyPoint>& ckeypoints1, vector<KeyPoint>& hckeypoints2,
bool isAffineInvariant )
vector<KeyPoint>& ckeypoints1, vector<KeyPoint>& ckeypoints2t )
{
assert( !img1.empty() && !img2.empty() );
assert( !H12.empty() && H12.cols==3 && H12.rows==3 && H12.type()==CV_64FC1 );
ckeypoints1.clear();
hckeypoints2.clear();
ckeypoints2t.clear();
Rect r1(0, 0, img1.cols, img1.rows), r2(0, 0, img2.cols, img2.rows);
Mat H21; invert( H12, H21 );
......@@ -97,17 +112,12 @@ void getCommonKeyPointsOnImg1( const Mat& img1, const Mat img2, const Mat& H12,
{
KeyPoint kp = *it;
kp.pt = pt;
if( isAffineInvariant )
assert(0);
else // scale invariant
{
Mat_<double> A, eval;
linearizeHomographyAt(H21, it->pt, A);
eigen(A, eval);
assert( eval.type()==CV_64FC1 && eval.cols==1 && eval.rows==2 );
kp.size *= sqrt(eval(0,0) * eval(1,0)) /*scale from linearized homography matrix*/;
}
hckeypoints2.push_back(kp);
ckeypoints2t.push_back(kp);
}
}
}
......@@ -115,11 +125,10 @@ void getCommonKeyPointsOnImg1( const Mat& img1, const Mat img2, const Mat& H12,
// Locations p1 and p2 are repeated if ||p1 - H21*p2|| < 1.5 pixels.
// Regions are repeated if Es < 0.4 (Es differs for scale invariant and affine invarian detectors).
// For more details see "Scale&Affine Invariant Interest Point Detectors", Mikolajczyk, Schmid.
void repeatability( const Mat& img1, const Mat img2, const Mat& H12,
void evaluateScaleInvDetectors( const Mat& img1, const Mat img2, const Mat& H12,
const vector<KeyPoint>& keypoints1, const vector<KeyPoint>& keypoints2,
int& repeatingLocationCount, float& repeatingLocationRltv,
int& repeatingRegionCount, float& repeatingRegionRltv,
bool isAffineInvariant )
int& repeatingRegionCount, float& repeatingRegionRltv )
{
const double locThreshold = 1.5,
regThreshold = 0.4;
......@@ -128,18 +137,13 @@ void repeatability( const Mat& img1, const Mat img2, const Mat& H12,
Mat H21; invert( H12, H21 );
vector<KeyPoint> ckeypoints1, hckeypoints2;
getCommonKeyPointsOnImg1( img1, img2, H12, keypoints1, keypoints2, ckeypoints1, hckeypoints2, false );
vector<KeyPoint> ckeypoints1, ckeypoints2t;
getCircularKeyPointsInCommonPart( img1, img2, H12, keypoints1, keypoints2, ckeypoints1, ckeypoints2t );
vector<KeyPoint> *smallKPSet, *bigKPSet;
if( ckeypoints1.size() < hckeypoints2.size() )
vector<KeyPoint> *smallKPSet = &ckeypoints1, *bigKPSet = &ckeypoints2t;
if( ckeypoints1.size() > ckeypoints2t.size() )
{
smallKPSet = &ckeypoints1;
bigKPSet = &hckeypoints2;
}
else
{
smallKPSet = &hckeypoints2;
smallKPSet = &ckeypoints2t;
bigKPSet = &ckeypoints1;
}
......@@ -176,10 +180,6 @@ void repeatability( const Mat& img1, const Mat img2, const Mat& H12,
{
matchedMask[nearestIdx] = true;
repeatingLocationCount++;
if( isAffineInvariant )
assert(0);
else // scale invariant
{
double minRadius = min( skpIt->size, nearestBkp->size ),
maxRadius = max( skpIt->size, nearestBkp->size );
double Es = abs(1 - (minRadius*minRadius)/(maxRadius*maxRadius));
......@@ -187,83 +187,459 @@ void repeatability( const Mat& img1, const Mat img2, const Mat& H12,
repeatingRegionCount++;
}
}
repeatingLocationRltv = smallKPSet->size() ? (float)repeatingLocationCount / smallKPSet->size() : 0;
repeatingRegionRltv = smallKPSet->size() ? (float)repeatingRegionCount / smallKPSet->size() : 0;
}
}
#else
/****************************************************************************************\
* 2. Functions to evaluate affine covariant detectors and descriptors. *
\****************************************************************************************/
class EllipticKeyPoint
{
public:
EllipticKeyPoint();
EllipticKeyPoint( const Point2f& _center, const Scalar& _ellipse );
static Mat_<double> getSecondMomentsMatrix( const Scalar& _ellipse );
Mat_<double> getSecondMomentsMatrix() const;
void calcProjection( const Mat_<double>& H, EllipticKeyPoint& projection ) const;
Point2f center;
Scalar ellipse; // 3 elements a, b, c: ax^2+2bxy+cy^2=1
Size_<float> axes; // half lenght of elipse axes
Size_<float> boundingBox; // half sizes of bounding box
};
EllipticKeyPoint::EllipticKeyPoint()
{
*this = EllipticKeyPoint(Point2f(0,0), Scalar(1, 0, 1) );
}
EllipticKeyPoint::EllipticKeyPoint( const Point2f& _center, const Scalar& _ellipse )
{
center = _center;
ellipse = _ellipse;
Mat_<double> M = getSecondMomentsMatrix(_ellipse), eval;
eigen( M, eval );
assert( eval.rows == 2 && eval.cols == 1 );
axes.width = 1.f / sqrt(eval(0,0));
axes.height = 1.f / sqrt(eval(1,0));
float ac_b2 = ellipse[0]*ellipse[2] - ellipse[1]*ellipse[1];
boundingBox.width = sqrt(ellipse[2]/ac_b2);
boundingBox.height = sqrt(ellipse[0]/ac_b2);
}
Mat_<double> EllipticKeyPoint::getSecondMomentsMatrix( const Scalar& _ellipse )
{
Mat_<double> M(2, 2);
M(0,0) = _ellipse[0];
M(1,0) = M(0,1) = _ellipse[1];
M(1,1) = _ellipse[2];
return M;
}
Mat_<double> EllipticKeyPoint::getSecondMomentsMatrix() const
{
return getSecondMomentsMatrix(ellipse);
}
void EllipticKeyPoint::calcProjection( const Mat_<double>& H, EllipticKeyPoint& projection ) const
{
Point2f dstCenter = applyHomography(H, center);
Mat_<double> invM; invert(getSecondMomentsMatrix(), invM);
Mat_<double> Aff; linearizeHomographyAt(H, center, Aff);
Mat_<double> dstM; invert(Aff*invM*Aff.t(), dstM);
projection = EllipticKeyPoint( dstCenter, Scalar(dstM(0,0), dstM(0,1), dstM(1,1)) );
}
void calcEllipticKeyPointProjections( const vector<EllipticKeyPoint>& src, const Mat_<double>& H, vector<EllipticKeyPoint>& dst )
{
assert( !src.empty() && !H.empty() && H.cols == 3 && H.rows == 3);
dst.resize(src.size());
vector<EllipticKeyPoint>::const_iterator srcIt = src.begin();
vector<EllipticKeyPoint>::iterator dstIt = dst.begin();
for( ; srcIt != src.end(); ++srcIt, ++dstIt )
srcIt->calcProjection(H, *dstIt);
}
void transformToEllipticKeyPoints( const vector<KeyPoint>& src, vector<EllipticKeyPoint>& dst )
{
assert( !src.empty() );
dst.resize(src.size());
for( size_t i = 0; i < src.size(); i++ )
{
float rad = src[i].size;
assert( rad );
float fac = 1.f/(rad*rad);
dst[i] = EllipticKeyPoint( src[i].pt, Scalar(fac, 0, fac) );
}
}
void transformToKeyPoints( const vector<EllipticKeyPoint>& src, vector<KeyPoint>& dst )
{
assert( !src.empty() );
dst.resize(src.size());
for( size_t i = 0; i < src.size(); i++ )
{
Size_<float> axes = src[i].axes;
float rad = sqrt(axes.height*axes.width);
dst[i] = KeyPoint(src[i].center, rad );
}
}
void calcKeyPointProjections( const vector<KeyPoint>& src, const Mat_<double>& H, vector<KeyPoint>& dst )
{
assert( !src.empty() && !H.empty() && H.cols == 3 && H.rows == 3);
dst.resize(src.size());
vector<KeyPoint>::const_iterator srcIt = src.begin();
vector<KeyPoint>::iterator dstIt = dst.begin();
for( ; srcIt != src.end(); ++srcIt, ++dstIt )
{
Point2f dstPt = applyHomography(H, srcIt->pt);
Mat_<double> Aff; linearizeHomographyAt(H, srcIt->pt, Aff);
Mat_<double> eval; eigen(Aff, eval);
assert( eval.type()==CV_64FC1 && eval.cols==1 && eval.rows==2 );
float dstSize = srcIt->size * sqrt(eval(0,0) * eval(1,0)) /*scale from linearized homography matrix*/;
assert( dstSize );
assert( dstSize < 0.5 ); // TODO check for surf
// calculate new anngle
float srcAngleRad = srcIt->angle*CV_PI/180;
Point2f vec1(cos(srcAngleRad), sin(srcAngleRad)), vec2;
vec2 = applyHomography(H, vec1);
float w = 1.f/norm(vec2);
vec2 = vec2*w;
float dstAngleGrad = acos(vec2.x)*180.f/CV_PI; // 0..180
if( asin(vec2.y) < 0 ) // -pi/2 .. pi/2
dstAngleGrad += 180;
*dstIt = KeyPoint( dstPt, dstSize, dstAngleGrad, srcIt->response, srcIt->octave, srcIt->class_id );
}
}
void filterKeyPointsByImageSize( vector<KeyPoint>& keypoints, const Size& imgSize, vector<int>& origIdxs )
{
vector<KeyPoint> filtered;
filtered.reserve(keypoints.size());
Rect r(0, 0, imgSize.width, imgSize.height);
origIdxs.clear();
vector<KeyPoint>::const_iterator it = keypoints.begin();
for( int i = 0; it != keypoints.end(); ++it, i++ )
{
if( r.contains(it->pt) )
{
filtered.push_back(*it);
origIdxs.push_back(i);
}
}
keypoints.assign(filtered.begin(), filtered.end());
}
/*
* calulate ovelap errors
*/
void overlap( const vector<EllipticKeyPoint>& keypoints1, const vector<EllipticKeyPoint>& keypoints2t, bool commonPart,
SparseMat_<float>& overlaps )
{
assert( !keypoints1.empty() && !keypoints2t.empty() );
int size[] = { keypoints1.size(), keypoints2t.size() };
overlaps.create( 2, size );
for( size_t i1 = 0; i1 < keypoints1.size(); i1++ )
{
EllipticKeyPoint kp1 = keypoints1[i1];
float maxDist = sqrt(kp1.axes.width*kp1.axes.height),
fac = 30.f/maxDist;
if( !commonPart)
fac=3;
maxDist = maxDist*4;
fac = 1.0/(fac*fac);
EllipticKeyPoint keypoint1a = EllipticKeyPoint( kp1.center, Scalar(fac*kp1.ellipse[0], fac*kp1.ellipse[1], fac*kp1.ellipse[2]) );
for( size_t i2 = 0; i2 < keypoints2t.size(); i2++ )
{
EllipticKeyPoint kp2 = keypoints2t[i2];
Point2f diff = kp2.center - kp1.center;
if( norm(diff) < maxDist )
{
EllipticKeyPoint keypoint2a = EllipticKeyPoint( kp2.center, Scalar(fac*kp2.ellipse[0], fac*kp2.ellipse[1], fac*kp2.ellipse[2]) );
//find the largest eigenvalue
float maxx = ceil(( keypoint1a.boundingBox.width > (diff.x+keypoint2a.boundingBox.width)) ?
keypoint1a.boundingBox.width : (diff.x+keypoint2a.boundingBox.width));
float minx = floor((-keypoint1a.boundingBox.width < (diff.x-keypoint2a.boundingBox.width)) ?
-keypoint1a.boundingBox.width : (diff.x-keypoint2a.boundingBox.width));
float maxy = ceil(( keypoint1a.boundingBox.height > (diff.y+keypoint2a.boundingBox.height)) ?
keypoint1a.boundingBox.height : (diff.y+keypoint2a.boundingBox.height));
float miny = floor((-keypoint1a.boundingBox.height < (diff.y-keypoint2a.boundingBox.height)) ?
-keypoint1a.boundingBox.height : (diff.y-keypoint2a.boundingBox.height));
float mina = (maxx-minx) < (maxy-miny) ? (maxx-minx) : (maxy-miny) ;
float dr = mina/50.0;
float bua = 0, bna = 0;
//compute the area
for( float rx1 = minx; rx1 <= maxx; rx1+=dr )
{
float rx2 = rx1-diff.x;
for( float ry1=miny; ry1<=maxy; ry1+=dr )
{
float ry2=ry1-diff.y;
//compute the distance from the ellipse center
float e1 = keypoint1a.ellipse[0]*rx1*rx1+2*keypoint1a.ellipse[1]*rx1*ry1+keypoint1a.ellipse[2]*ry1*ry1;
float e2 = keypoint2a.ellipse[0]*rx2*rx2+2*keypoint2a.ellipse[1]*rx2*ry2+keypoint2a.ellipse[2]*ry2*ry2;
//compute the area
if( e1<1 && e2<1 ) bna++;
if( e1<1 || e2<1 ) bua++;
}
}
if( bna > 0)
overlaps.ref(i1,i2) = 100.0*bna/bua;
}
}
repeatingLocationRltv = (float)repeatingLocationCount / smallKPSet->size();
repeatingRegionRltv = (float)repeatingRegionCount / smallKPSet->size();
}
}
//----------------------------------- base class of detector test ------------------------------------
void filterEllipticKeyPointsByImageSize( vector<EllipticKeyPoint>& keypoints, const Size& imgSize )
{
vector<EllipticKeyPoint> filtered;
filtered.reserve(keypoints.size());
vector<EllipticKeyPoint>::const_iterator it = keypoints.begin();
for( int i = 0; it != keypoints.end(); ++it, i++ )
{
if( it->center.x + it->boundingBox.width < imgSize.width &&
it->center.x - it->boundingBox.width > 0 &&
it->center.y + it->boundingBox.height < imgSize.height &&
it->center.y - it->boundingBox.height > 0 )
filtered.push_back(*it);
}
keypoints.assign(filtered.begin(), filtered.end());
}
void getEllipticKeyPointsInCommonPart( vector<EllipticKeyPoint>& keypoints1, vector<EllipticKeyPoint>& keypoints2,
vector<EllipticKeyPoint>& keypoints1t, vector<EllipticKeyPoint>& keypoints2t,
Size& imgSize1, const Size& imgSize2 )
{
assert( !keypoints1.empty() && !keypoints2.empty() );
assert( keypoints1t.size() == keypoints1.size() && keypoints2t.size() == keypoints2.size() );
filterEllipticKeyPointsByImageSize( keypoints1, imgSize1 );
filterEllipticKeyPointsByImageSize( keypoints1t, imgSize2 );
filterEllipticKeyPointsByImageSize( keypoints2, imgSize2 );
filterEllipticKeyPointsByImageSize( keypoints2t, imgSize1 );
}
void calculateRepeatability( const vector<EllipticKeyPoint>& _keypoints1, const vector<EllipticKeyPoint>& _keypoints2,
const Mat& img1, const Mat& img2, const Mat& H1to2,
float& repeatability, int& correspondencesCount,
SparseMat_<uchar>* thresholdedOverlapMask=0 )
{
vector<EllipticKeyPoint> keypoints1( _keypoints1.begin(), _keypoints1.end() ),
keypoints2( _keypoints2.begin(), _keypoints2.end() ),
keypoints1t( keypoints1.size() ),
keypoints2t( keypoints2.size() );
// calculate projections of key points
calcEllipticKeyPointProjections( keypoints1, H1to2, keypoints1t );
Mat H2to1; invert(H1to2, H2to1);
calcEllipticKeyPointProjections( keypoints2, H2to1, keypoints2t );
bool ifEvaluateDetectors = !thresholdedOverlapMask; // == commonPart
float overlapThreshold;
if( ifEvaluateDetectors )
{
overlapThreshold = 100.f - 40.f;
// remove key points from outside of the common image part
Size sz1 = img1.size(), sz2 = img2.size();
getEllipticKeyPointsInCommonPart( keypoints1, keypoints2, keypoints1t, keypoints2t, sz1, sz2 );
}
else
{
overlapThreshold = 100.f - 50.f;
}
int minCount = min( keypoints1.size(), keypoints2t.size() );
// calculate overlap errors
SparseMat_<float> overlaps;
overlap( keypoints1, keypoints2t, ifEvaluateDetectors, overlaps );
correspondencesCount = -1;
repeatability = -1.f;
const int* size = overlaps.size();
if( !size || overlaps.nzcount() == 0 )
return;
if( ifEvaluateDetectors )
{
// regions one-to-one matching
correspondencesCount = 0;
SparseMat_<float> currOverlaps( 2, size );
for( int y = 0; y < size[0]; y++ )
{
for( int x = 0; x < size[1]; x++ )
{
float val = overlaps(y,x);
if ( val >= overlapThreshold )
currOverlaps.ref(y,x) = val;
}
}
while( currOverlaps.nzcount() > 0 )
{
double maxOverlap = 0;
int maxIdx[2];
minMaxLoc( currOverlaps, 0, &maxOverlap, 0, maxIdx );
for( size_t i1 = 0; i1 < keypoints1.size(); i1++ )
currOverlaps.erase(i1, maxIdx[1]);
for( size_t i2 = 0; i2 < keypoints2t.size(); i2++ )
currOverlaps.erase(maxIdx[0], i2);
correspondencesCount++;
}
repeatability = minCount ? (float)(correspondencesCount*100)/minCount : 0;
}
else
{
thresholdedOverlapMask->create( 2, size );
for( int y = 0; y < size[0]; y++ )
{
for( int x = 0; x < size[1]; x++ )
{
float val = overlaps(y,x);
if ( val >= overlapThreshold )
thresholdedOverlapMask->ref(y,x) = val;
}
}
}
}
void evaluateDetectors( const vector<EllipticKeyPoint>& keypoints1, const vector<EllipticKeyPoint>& keypoints2,
const Mat& img1, const Mat& img2, const Mat& H1to2,
float& repeatability, int& correspCount )
{
calculateRepeatability( keypoints1, keypoints2,
img1, img2, H1to2,
repeatability, correspCount );
}
inline float recall( int correctMatchCount, int correspondenceCount )
{
return correspondenceCount ? (float)correctMatchCount / (float)correspondenceCount : 0;
}
inline float precision( int correctMatchCount, int falseMatchCount )
{
return correctMatchCount + falseMatchCount ? (float)correctMatchCount / (float)(correctMatchCount + falseMatchCount) : 0;
}
void evaluateDescriptors( const vector<EllipticKeyPoint>& keypoints1, const vector<EllipticKeyPoint>& keypoints2,
const vector<int>& matches1to2,
const Mat& img1, const Mat& img2, const Mat& H1to2,
int& correctMatchCount, int& falseMatchCount, int& correspondenceCount )
{
assert( !keypoints1.empty() && !keypoints2.empty() && !matches1to2.empty() );
assert( keypoints1.size() == matches1to2.size() );
float repeatability;
int correspCount;
SparseMat_<uchar> thresholdedOverlapMask; // thresholded allOverlapErrors
calculateRepeatability( keypoints1, keypoints2,
img1, img2, H1to2,
repeatability, correspCount,
&thresholdedOverlapMask );
correspondenceCount = thresholdedOverlapMask.nzcount();
correctMatchCount = falseMatchCount = 0;
for( size_t i1 = 0; i1 < matches1to2.size(); i1++ )
{
int i2 = matches1to2[i1];
if( i2 > 0 )
{
if( thresholdedOverlapMask(i1, i2) )
correctMatchCount++;
else
falseMatchCount++;
}
}
}
#endif
/****************************************************************************************\
* Detectors evaluation *
\****************************************************************************************/
const int DATASETS_COUNT = 8;
const int TEST_CASE_COUNT = 5;
const string DATASET_DIR = "detectors/datasets/";
const string ALGORITHMS_DIR = "detectors/algorithms/";
const string IMAGE_DATASETS_DIR = "detectors_descriptors_evaluation/images_datasets/";
const string DETECTORS_DIR = "detectors_descriptors_evaluation/detectors/";
const string DESCRIPTORS_DIR = "detectors_descriptors_evaluation/descriptors/";
const string KEYPOINTS_DIR = "detectors_descriptors_evaluation/keypoints_datasets/";
const string PARAMS_POSTFIX = "_params.xml";
const string RES_POSTFIX = "_res.xml";
#ifndef AFFINE_COVARIANT_VERSION
const string RLC = "repeating_locations_count";
const string RLR = "repeating_locations_rltv";
const string RRC = "repeating_regions_count";
const string RRR = "repeating_regions_rltv";
#else
const string REPEAT = "repeatability";
const string CORRESP_COUNT = "correspondence_count";
#endif
string DATASET_NAMES[DATASETS_COUNT] = { "bark", "bikes", "boat", "graf", "leuven", "trees", "ubc", "wall"};
class CV_DetectorRepeatabilityTest : public CvTest
class BaseQualityTest : public CvTest
{
public:
CV_DetectorRepeatabilityTest( const char* _detectorName, const char* testName ) : CvTest( testName, "repeatability-of-detector" )
{
detectorName = _detectorName;
isAffineInvariant = false;
validRepeatability.resize(DATASETS_COUNT);
calcRepeatability.resize(DATASETS_COUNT);
}
BaseQualityTest( const char* _algName, const char* _testName, const char* _testFuncs ) :
CvTest( _testName, _testFuncs ), algName(_algName) {}
protected:
virtual FeatureDetector* createDetector( int datasetIdx ) = 0;
void readAllRunParams();
virtual void readRunParams( FileNode& fn, int datasetIdx ) = 0;
void writeAllRunParams();
virtual void writeRunParams( FileStorage& fs, int datasetIdx ) = 0;
void setDefaultAllRunParams();
virtual void setDefaultRunParams( int datasetIdx ) = 0;
void readResults();
void writeResults();
virtual string getRunParamsFilename() const = 0;
virtual string getResultsFilename() const = 0;
virtual void validQualityClear( int datasetIdx ) = 0;
virtual void validQualityCreate( int datasetIdx ) = 0;
virtual bool isValidQualityEmpty( int datasetIdx ) const = 0;
virtual bool isCalcQualityEmpty( int datasetIdx ) const = 0;
void readAllDatasetsRunParams();
virtual void readDatasetRunParams( FileNode& fn, int datasetIdx ) = 0;
void writeAllDatasetsRunParams() const;
virtual void writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const = 0;
void setDefaultAllDatasetsRunParams();
virtual void setDefaultDatasetRunParams( int datasetIdx ) = 0;
virtual void readResults();
virtual void readResults( FileNode& fn, int datasetIdx, int caseIdx ) = 0;
void writeResults() const;
virtual void writeResults( FileStorage& fs, int datasetIdx, int caseIdx ) const = 0;
bool readDataset( const string& datasetName, vector<Mat>& Hs, vector<Mat>& imgs );
void run( int );
void processResults();
virtual void processResults();
virtual int processResults( int datasetIdx, int caseIdx ) = 0;
bool isAffineInvariant;
string detectorName;
string algName;
bool isWriteParams, isWriteResults;
struct Repeatability
{
int repeatingLocationCount;
float repeatingLocationRltv;
int repeatingRegionCount;
float repeatingRegionRltv;
};
vector<vector<Repeatability> > validRepeatability;
vector<vector<Repeatability> > calcRepeatability;
};
void CV_DetectorRepeatabilityTest::readAllRunParams()
void BaseQualityTest::readAllDatasetsRunParams()
{
string filename = string(ts->get_data_path()) + ALGORITHMS_DIR + detectorName + PARAMS_POSTFIX;
string filename = getRunParamsFilename();
FileStorage fs( filename, FileStorage::READ );
if( !fs.isOpened() )
{
isWriteParams = true;
setDefaultAllRunParams();
setDefaultAllDatasetsRunParams();
ts->printf(CvTS::LOG, "all runParams are default\n");
}
else
......@@ -276,17 +652,17 @@ void CV_DetectorRepeatabilityTest::readAllRunParams()
if( fn.empty() )
{
ts->printf( CvTS::LOG, "%d-runParams is default\n", i);
setDefaultRunParams(i);
setDefaultDatasetRunParams(i);
}
else
readRunParams(fn, i);
readDatasetRunParams(fn, i);
}
}
}
void CV_DetectorRepeatabilityTest::writeAllRunParams()
void BaseQualityTest::writeAllDatasetsRunParams() const
{
string filename = string(ts->get_data_path()) + ALGORITHMS_DIR + detectorName + PARAMS_POSTFIX;
string filename = getRunParamsFilename();
FileStorage fs( filename, FileStorage::WRITE );
if( fs.isOpened() )
{
......@@ -294,7 +670,7 @@ void CV_DetectorRepeatabilityTest::writeAllRunParams()
for( int i = 0; i < DATASETS_COUNT; i++ )
{
fs << DATASET_NAMES[i] << "{";
writeRunParams(fs, i);
writeDatasetRunParams(fs, i);
fs << "}";
}
fs << "}";
......@@ -303,17 +679,17 @@ void CV_DetectorRepeatabilityTest::writeAllRunParams()
ts->printf(CvTS::LOG, "file %s for writing run params can not be opened\n", filename.c_str() );
}
void CV_DetectorRepeatabilityTest::setDefaultAllRunParams()
void BaseQualityTest::setDefaultAllDatasetsRunParams()
{
for( int i = 0; i < DATASETS_COUNT; i++ )
setDefaultRunParams(i);
setDefaultDatasetRunParams(i);
}
bool CV_DetectorRepeatabilityTest::readDataset( const string& datasetName, vector<Mat>& Hs, vector<Mat>& imgs )
bool BaseQualityTest::readDataset( const string& datasetName, vector<Mat>& Hs, vector<Mat>& imgs )
{
Hs.resize( TEST_CASE_COUNT );
imgs.resize( TEST_CASE_COUNT+1 );
string dirname = string(ts->get_data_path()) + DATASET_DIR + datasetName + "/";
string dirname = string(ts->get_data_path()) + IMAGE_DATASETS_DIR + datasetName + "/";
for( int i = 0; i < (int)Hs.size(); i++ )
{
......@@ -334,9 +710,9 @@ bool CV_DetectorRepeatabilityTest::readDataset( const string& datasetName, vecto
return true;
}
void CV_DetectorRepeatabilityTest::readResults()
void BaseQualityTest::readResults()
{
string filename = string(ts->get_data_path()) + ALGORITHMS_DIR + detectorName + RES_POSTFIX;
string filename = getResultsFilename();
FileStorage fs( filename, FileStorage::READ );
if( fs.isOpened() )
{
......@@ -347,22 +723,19 @@ void CV_DetectorRepeatabilityTest::readResults()
FileNode datafn = topfn[DATASET_NAMES[di]];
if( datafn.empty() )
{
validRepeatability[di].clear();
validQualityClear(di);
ts->printf( CvTS::LOG, "results for %s dataset were not read\n",
DATASET_NAMES[di].c_str());
DATASET_NAMES[di].c_str() );
}
else
{
validRepeatability[di].resize(TEST_CASE_COUNT);
validQualityCreate(di);
for( int ci = 0; ci < TEST_CASE_COUNT; ci++ )
{
stringstream ss; ss << "case" << ci;
FileNode casefn = datafn[ss.str()];
CV_Assert( !casefn.empty() );
validRepeatability[di][ci].repeatingLocationCount = casefn[RLC];
validRepeatability[di][ci].repeatingLocationRltv = casefn[RLR];
validRepeatability[di][ci].repeatingRegionCount = casefn[RRC];
validRepeatability[di][ci].repeatingRegionRltv = casefn[RRR];
readResults( casefn , di, ci );
}
}
}
......@@ -371,16 +744,16 @@ void CV_DetectorRepeatabilityTest::readResults()
isWriteResults = true;
}
void CV_DetectorRepeatabilityTest::writeResults()
void BaseQualityTest::writeResults() const
{
string filename = string(ts->get_data_path()) + ALGORITHMS_DIR + detectorName + RES_POSTFIX;
string filename = getResultsFilename();;
FileStorage fs( filename, FileStorage::WRITE );
if( fs.isOpened() )
{
fs << "results" << "{";
for( int di = 0; di < DATASETS_COUNT; di++ )
{
if( calcRepeatability[di].empty() )
if( isCalcQualityEmpty(di) )
{
ts->printf(CvTS::LOG, "results on %s dataset were not write because of empty\n",
DATASET_NAMES[di].c_str());
......@@ -392,10 +765,7 @@ void CV_DetectorRepeatabilityTest::writeResults()
{
stringstream ss; ss << "case" << ci;
fs << ss.str() << "{";
fs << RLC << calcRepeatability[di][ci].repeatingLocationCount;
fs << RLR << calcRepeatability[di][ci].repeatingLocationRltv;
fs << RRC << calcRepeatability[di][ci].repeatingRegionCount;
fs << RRR << calcRepeatability[di][ci].repeatingRegionRltv;
writeResults( fs, di, ci );
fs << "}"; //ss.str()
}
fs << "}"; //DATASET_NAMES[di]
......@@ -407,39 +777,224 @@ void CV_DetectorRepeatabilityTest::writeResults()
ts->printf(CvTS::LOG, "results were not written because file %s can not be opened\n", filename.c_str() );
}
void CV_DetectorRepeatabilityTest::run( int )
void BaseQualityTest::processResults()
{
if( isWriteParams )
writeAllDatasetsRunParams();
int res = CvTS::OK;
if( isWriteResults )
writeResults();
else
{
for( int di = 0; di < DATASETS_COUNT; di++ )
{
if( isValidQualityEmpty(di) || isCalcQualityEmpty(di) )
continue;
ts->printf(CvTS::LOG, "\nDataset: %s\n", DATASET_NAMES[di].c_str() );
for( int ci = 0; ci < TEST_CASE_COUNT; ci++ )
{
ts->printf(CvTS::LOG, "case%d\n", ci);
int currRes = processResults( di, ci );
res = currRes == CvTS::OK ? res : currRes;
}
}
}
if( res != CvTS::OK )
ts->printf(CvTS::LOG, "BAD ACCURACY\n");
ts->set_failed_test_info( res );
}
class DetectorQualityTest : public BaseQualityTest
{
public:
DetectorQualityTest( const char* _detectorName, const char* _testName ) :
BaseQualityTest( _detectorName, _testName, "quality-of-detector" )
{
validQuality.resize(DATASETS_COUNT);
calcQuality.resize(DATASETS_COUNT);
}
protected:
using BaseQualityTest::readResults;
using BaseQualityTest::writeResults;
using BaseQualityTest::processResults;
virtual string getRunParamsFilename() const;
virtual string getResultsFilename() const;
virtual void validQualityClear( int datasetIdx );
virtual void validQualityCreate( int datasetIdx );
virtual bool isValidQualityEmpty( int datasetIdx ) const;
virtual bool isCalcQualityEmpty( int datasetIdx ) const;
virtual void readResults( FileNode& fn, int datasetIdx, int caseIdx );
virtual void writeResults( FileStorage& fs, int datasetIdx, int caseIdx ) const;
virtual void readDatasetRunParams( FileNode& fn, int datasetIdx ) = 0;
virtual void writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const = 0;
virtual void setDefaultDatasetRunParams( int datasetIdx ) = 0;
virtual FeatureDetector* createDetector( int datasetIdx ) = 0;
void openToWriteKeypointsFile( FileStorage& fs, int datasetIdx );
void run( int );
virtual int processResults( int datasetIdx, int caseIdx );
struct Quality
{
#ifndef AFFINE_COVARIANT_VERSION
int repeatingLocationCount;
float repeatingLocationRltv;
int repeatingRegionCount;
float repeatingRegionRltv;
#else
float repeatability;
int correspondenceCount;
#endif
};
vector<vector<Quality> > validQuality;
vector<vector<Quality> > calcQuality;
};
string DetectorQualityTest::getRunParamsFilename() const
{
return string(ts->get_data_path()) + DETECTORS_DIR + algName + PARAMS_POSTFIX;
}
string DetectorQualityTest::getResultsFilename() const
{
return string(ts->get_data_path()) + DETECTORS_DIR + algName + RES_POSTFIX;
}
void DetectorQualityTest::validQualityClear( int datasetIdx )
{
validQuality[datasetIdx].clear();
}
void DetectorQualityTest::validQualityCreate( int datasetIdx )
{
validQuality[datasetIdx].resize(TEST_CASE_COUNT);
}
bool DetectorQualityTest::isValidQualityEmpty( int datasetIdx ) const
{
return validQuality[datasetIdx].empty();
}
bool DetectorQualityTest::isCalcQualityEmpty( int datasetIdx ) const
{
return calcQuality[datasetIdx].empty();
}
void DetectorQualityTest::readResults( FileNode& fn, int datasetIdx, int caseIdx )
{
#ifndef AFFINE_COVARIANT_VERSION
validQuality[datasetIdx][caseIdx].repeatingLocationCount = fn[RLC];
validQuality[datasetIdx][caseIdx].repeatingLocationRltv = fn[RLR];
validQuality[datasetIdx][caseIdx].repeatingRegionCount = fn[RRC];
validQuality[datasetIdx][caseIdx].repeatingRegionRltv = fn[RRR];
#else
validQuality[datasetIdx][caseIdx].repeatability = fn[REPEAT];
validQuality[datasetIdx][caseIdx].correspondenceCount = fn[CORRESP_COUNT];
#endif
}
void DetectorQualityTest::writeResults( FileStorage& fs, int datasetIdx, int caseIdx ) const
{
readAllRunParams();
#ifndef AFFINE_COVARIANT_VERSION
fs << RLC << calcQuality[datasetIdx][caseIdx].repeatingLocationCount;
fs << RLR << calcQuality[datasetIdx][caseIdx].repeatingLocationRltv;
fs << RRC << calcQuality[datasetIdx][caseIdx].repeatingRegionCount;
fs << RRR << calcQuality[datasetIdx][caseIdx].repeatingRegionRltv;
#else
fs << REPEAT << calcQuality[datasetIdx][caseIdx].repeatability;
fs << CORRESP_COUNT << calcQuality[datasetIdx][caseIdx].correspondenceCount;
#endif
}
void DetectorQualityTest::openToWriteKeypointsFile( FileStorage& fs, int datasetIdx )
{
string filename = string(ts->get_data_path()) + KEYPOINTS_DIR + algName + "_"+
DATASET_NAMES[datasetIdx] + ".xml" ;
// open file to write keypoints if there is not yet
fs.open(filename, FileStorage::READ);
if( fs.isOpened() )
fs.release();
else
{
fs.open(filename, FileStorage::WRITE);
if( !fs.isOpened() )
ts->printf( CvTS::LOG, "keypoints can not be written in file %s because this file can not be opened\n",
filename.c_str());
}
}
inline void writeKeypoints( FileStorage& fs, const vector<KeyPoint>& keypoints, int imgIdx )
{
if( fs.isOpened() )
{
stringstream imgName; imgName << "img" << imgIdx;
write( fs, imgName.str(), keypoints );
}
}
inline void readKeypoints( FileStorage& fs, vector<KeyPoint>& keypoints, int imgIdx )
{
assert( fs.isOpened() );
stringstream imgName; imgName << "img" << imgIdx;
read( fs[imgName.str()], keypoints);
}
void DetectorQualityTest::run( int )
{
readAllDatasetsRunParams();
readResults();
int notReadDatasets = 0;
int progress = 0, progressCount = DATASETS_COUNT*TEST_CASE_COUNT;
for(int di = 0; di < DATASETS_COUNT; di++ )
{
FileStorage keypontsFS;
openToWriteKeypointsFile( keypontsFS, di );
vector<Mat> imgs, Hs;
if( !readDataset( DATASET_NAMES[di], Hs, imgs ) )
{
calcRepeatability[di].clear();
calcQuality[di].clear();
ts->printf( CvTS::LOG, "images or homography matrices of dataset named %s can not be read\n",
DATASET_NAMES[di].c_str());
notReadDatasets++;
}
else
{
calcRepeatability[di].resize(TEST_CASE_COUNT);
calcQuality[di].resize(TEST_CASE_COUNT);
Ptr<FeatureDetector> detector = createDetector(di);
vector<KeyPoint> keypoints1;
vector<KeyPoint> keypoints1; vector<EllipticKeyPoint> ekeypoints1;
detector->detect( imgs[0], keypoints1 );
writeKeypoints( keypontsFS, keypoints1, 0);
transformToEllipticKeyPoints( keypoints1, ekeypoints1 );
for( int ci = 0; ci < TEST_CASE_COUNT; ci++ )
{
progress = update_progress( progress, di*TEST_CASE_COUNT + ci, progressCount, 0 );
vector<KeyPoint> keypoints2;
detector->detect( imgs[ci+1], keypoints2 );
repeatability( imgs[0], imgs[ci+1], Hs[ci], keypoints1, keypoints2,
calcRepeatability[di][ci].repeatingLocationCount, calcRepeatability[di][ci].repeatingLocationRltv,
calcRepeatability[di][ci].repeatingRegionCount, calcRepeatability[di][ci].repeatingRegionRltv,
isAffineInvariant );
writeKeypoints( keypontsFS, keypoints2, ci+1);
#ifndef AFFINE_COVARIANT_VERSION
evaluateScaleInvDetectors( imgs[0], imgs[ci+1], Hs[ci], keypoints1, keypoints2,
calcQuality[di][ci].repeatingLocationCount, calcQuality[di][ci].repeatingLocationRltv,
calcQuality[di][ci].repeatingRegionCount, calcQuality[di][ci].repeatingRegionRltv );
#else
vector<EllipticKeyPoint> ekeypoints2;
transformToEllipticKeyPoints( keypoints2, ekeypoints2 );
evaluateDetectors( ekeypoints1, ekeypoints2, imgs[0], imgs[ci], Hs[ci],
calcQuality[di][ci].repeatability, calcQuality[di][ci].correspondenceCount );
#endif
}
}
}
......@@ -460,31 +1015,16 @@ void testLog( CvTS* ts, bool isBadAccuracy )
ts->printf(CvTS::LOG, "\n");
}
void CV_DetectorRepeatabilityTest::processResults()
int DetectorQualityTest::processResults( int datasetIdx, int caseIdx )
{
if( isWriteParams )
writeAllRunParams();
bool isBadAccuracy;
int res = CvTS::OK;
if( isWriteResults )
writeResults();
else
{
for( int di = 0; di < DATASETS_COUNT; di++ )
{
if( validRepeatability[di].empty() || calcRepeatability[di].empty() )
continue;
ts->printf(CvTS::LOG, "\nDataset: %s\n", DATASET_NAMES[di].c_str() );
Quality valid = validQuality[datasetIdx][caseIdx], calc = calcQuality[datasetIdx][caseIdx];
bool isBadAccuracy;
int countEps = 1;
float rltvEps = 0.001f;
for( int ci = 0; ci < TEST_CASE_COUNT; ci++ )
{
ts->printf(CvTS::LOG, "case%d\n", ci);
Repeatability valid = validRepeatability[di][ci], calc = calcRepeatability[di][ci];
const float rltvEps = 0.001;
#ifndef AFFINE_COVARIANT_VERSION
ts->printf(CvTS::LOG, "%s: calc=%d, valid=%d", RLC.c_str(), calc.repeatingLocationCount, valid.repeatingLocationCount );
isBadAccuracy = valid.repeatingLocationCount - calc.repeatingLocationCount > countEps;
testLog( ts, isBadAccuracy );
......@@ -504,27 +1044,32 @@ void CV_DetectorRepeatabilityTest::processResults()
isBadAccuracy = valid.repeatingRegionRltv - calc.repeatingRegionRltv > rltvEps;
testLog( ts, isBadAccuracy );
res = isBadAccuracy ? CvTS::FAIL_BAD_ACCURACY : res;
}
}
}
#else
ts->printf(CvTS::LOG, "%s: calc=%f, valid=%f", REPEAT.c_str(), calc.repeatability, valid.repeatability );
isBadAccuracy = valid.repeatability - calc.repeatability > rltvEps;
testLog( ts, isBadAccuracy );
res = isBadAccuracy ? CvTS::FAIL_BAD_ACCURACY : res;
if( res != CvTS::OK )
ts->printf(CvTS::LOG, "BAD ACCURACY\n");
ts->set_failed_test_info( res );
ts->printf(CvTS::LOG, "%s: calc=%d, valid=%d", CORRESP_COUNT.c_str(), calc.correspondenceCount, valid.correspondenceCount );
isBadAccuracy = valid.correspondenceCount - calc.correspondenceCount > countEps;
testLog( ts, isBadAccuracy );
res = isBadAccuracy ? CvTS::FAIL_BAD_ACCURACY : res;
#endif
return res;
}
//--------------------------------- FAST detector test --------------------------------------------
class CV_FastDetectorTest : public CV_DetectorRepeatabilityTest
class FastDetectorQualityTest : public DetectorQualityTest
{
public:
CV_FastDetectorTest() : CV_DetectorRepeatabilityTest( "fast", "repeatability-fast-detector" )
FastDetectorQualityTest() : DetectorQualityTest( "fast", "quality-fast-detector" )
{ runParams.resize(DATASETS_COUNT); }
protected:
virtual FeatureDetector* createDetector( int datasetIdx );
virtual void readRunParams( FileNode& fn, int datasetIdx );
virtual void writeRunParams( FileStorage& fs, int datasetIdx );
virtual void setDefaultRunParams( int datasetIdx );
virtual void readDatasetRunParams( FileNode& fn, int datasetIdx );
virtual void writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const;
virtual void setDefaultDatasetRunParams( int datasetIdx );
struct RunParams
{
......@@ -534,37 +1079,36 @@ protected:
vector<RunParams> runParams;
};
FeatureDetector* CV_FastDetectorTest::createDetector( int datasetIdx )
FeatureDetector* FastDetectorQualityTest::createDetector( int datasetIdx )
{
return new FastFeatureDetector( runParams[datasetIdx].threshold, runParams[datasetIdx].nonmaxSuppression );
}
void CV_FastDetectorTest::readRunParams( FileNode& fn, int datasetIdx )
void FastDetectorQualityTest::readDatasetRunParams( FileNode& fn, int datasetIdx )
{
runParams[datasetIdx].threshold = fn["threshold"];
runParams[datasetIdx].nonmaxSuppression = (int)fn["nonmaxSuppression"] ? true : false;
}
runParams[datasetIdx].nonmaxSuppression = (int)fn["nonmaxSuppression"] ? true : false;}
void CV_FastDetectorTest::writeRunParams( FileStorage& fs, int datasetIdx )
void FastDetectorQualityTest::writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const
{
fs << "threshold" << runParams[datasetIdx].threshold;
fs << "nonmaxSuppression" << runParams[datasetIdx].nonmaxSuppression;
}
void CV_FastDetectorTest::setDefaultRunParams( int datasetIdx )
void FastDetectorQualityTest::setDefaultDatasetRunParams( int datasetIdx )
{
runParams[datasetIdx].threshold = 1;
runParams[datasetIdx].nonmaxSuppression = true;
}
CV_FastDetectorTest fastDetector;
FastDetectorQualityTest fastDetectorQuality;
//--------------------------------- GFTT & HARRIS detectors tests --------------------------------------------
class CV_BaseGfttDetectorTest : public CV_DetectorRepeatabilityTest
class BaseGfttDetectorQualityTest : public DetectorQualityTest
{
public:
CV_BaseGfttDetectorTest( const char* detectorName, const char* testName )
: CV_DetectorRepeatabilityTest( detectorName, testName )
BaseGfttDetectorQualityTest( const char* detectorName, const char* testName )
: DetectorQualityTest( detectorName, testName )
{
runParams.resize(DATASETS_COUNT);
useHarrisDetector = false;
......@@ -572,9 +1116,9 @@ public:
protected:
virtual FeatureDetector* createDetector( int datasetIdx );
virtual void readRunParams( FileNode& fn, int datasetIdx );
virtual void writeRunParams( FileStorage& fs, int datasetIdx );
virtual void setDefaultRunParams( int datasetIdx );
virtual void readDatasetRunParams( FileNode& fn, int datasetIdx );
virtual void writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const;
virtual void setDefaultDatasetRunParams( int datasetIdx );
struct RunParams
{
......@@ -588,7 +1132,7 @@ protected:
bool useHarrisDetector;
};
FeatureDetector* CV_BaseGfttDetectorTest::createDetector( int datasetIdx )
FeatureDetector* BaseGfttDetectorQualityTest::createDetector( int datasetIdx )
{
return new GoodFeaturesToTrackDetector( runParams[datasetIdx].maxCorners,
runParams[datasetIdx].qualityLevel,
......@@ -598,7 +1142,7 @@ FeatureDetector* CV_BaseGfttDetectorTest::createDetector( int datasetIdx )
runParams[datasetIdx].k );
}
void CV_BaseGfttDetectorTest::readRunParams( FileNode& fn, int datasetIdx )
void BaseGfttDetectorQualityTest::readDatasetRunParams( FileNode& fn, int datasetIdx )
{
runParams[datasetIdx].maxCorners = fn["maxCorners"];
runParams[datasetIdx].qualityLevel = fn["qualityLevel"];
......@@ -607,7 +1151,7 @@ void CV_BaseGfttDetectorTest::readRunParams( FileNode& fn, int datasetIdx )
runParams[datasetIdx].k = fn["k"];
}
void CV_BaseGfttDetectorTest::writeRunParams( FileStorage& fs, int datasetIdx )
void BaseGfttDetectorQualityTest::writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const
{
fs << "maxCorners" << runParams[datasetIdx].maxCorners;
fs << "qualityLevel" << runParams[datasetIdx].qualityLevel;
......@@ -616,7 +1160,7 @@ void CV_BaseGfttDetectorTest::writeRunParams( FileStorage& fs, int datasetIdx )
fs << "k" << runParams[datasetIdx].k;
}
void CV_BaseGfttDetectorTest::setDefaultRunParams( int datasetIdx )
void BaseGfttDetectorQualityTest::setDefaultDatasetRunParams( int datasetIdx )
{
runParams[datasetIdx].maxCorners = 1500;
runParams[datasetIdx].qualityLevel = 0.01;
......@@ -625,35 +1169,35 @@ void CV_BaseGfttDetectorTest::setDefaultRunParams( int datasetIdx )
runParams[datasetIdx].k = 0.04;
}
class CV_GfttDetectorTest : public CV_BaseGfttDetectorTest
class GfttDetectorQualityTest : public BaseGfttDetectorQualityTest
{
public:
CV_GfttDetectorTest() : CV_BaseGfttDetectorTest( "gftt", "repeatability-gftt-detector" ) {}
GfttDetectorQualityTest() : BaseGfttDetectorQualityTest( "gftt", "quality-gftt-detector" ) {}
};
CV_GfttDetectorTest gfttDetector;
GfttDetectorQualityTest gfttDetectorQuality;
class CV_HarrisDetectorTest : public CV_BaseGfttDetectorTest
class HarrisDetectorQualityTest : public BaseGfttDetectorQualityTest
{
public:
CV_HarrisDetectorTest() : CV_BaseGfttDetectorTest( "harris", "repeatability-harris-detector" )
HarrisDetectorQualityTest() : BaseGfttDetectorQualityTest( "harris", "quality-harris-detector" )
{ useHarrisDetector = true; }
};
CV_HarrisDetectorTest harrisDetector;
HarrisDetectorQualityTest harrisDetectorQuality;
//--------------------------------- MSER detector test --------------------------------------------
class CV_MserDetectorTest : public CV_DetectorRepeatabilityTest
class MserDetectorQualityTest : public DetectorQualityTest
{
public:
CV_MserDetectorTest() : CV_DetectorRepeatabilityTest( "mser", "repeatability-mser-detector" )
MserDetectorQualityTest() : DetectorQualityTest( "mser", "quality-mser-detector" )
{ runParams.resize(DATASETS_COUNT); }
protected:
virtual FeatureDetector* createDetector( int datasetIdx );
virtual void readRunParams( FileNode& fn, int datasetIdx );
virtual void writeRunParams( FileStorage& fs, int datasetIdx );
virtual void setDefaultRunParams( int datasetIdx );
virtual void readDatasetRunParams( FileNode& fn, int datasetIdx );
virtual void writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const;
virtual void setDefaultDatasetRunParams( int datasetIdx );
struct RunParams
{
......@@ -670,7 +1214,7 @@ protected:
vector<RunParams> runParams;
};
FeatureDetector* CV_MserDetectorTest::createDetector( int datasetIdx )
FeatureDetector* MserDetectorQualityTest::createDetector( int datasetIdx )
{
return new MserFeatureDetector( runParams[datasetIdx].delta,
runParams[datasetIdx].minArea,
......@@ -683,7 +1227,7 @@ FeatureDetector* CV_MserDetectorTest::createDetector( int datasetIdx )
runParams[datasetIdx].edgeBlurSize );
}
void CV_MserDetectorTest::readRunParams( FileNode& fn, int datasetIdx )
void MserDetectorQualityTest::readDatasetRunParams( FileNode& fn, int datasetIdx )
{
runParams[datasetIdx].delta = fn["delta"];
runParams[datasetIdx].minArea = fn["minArea"];
......@@ -696,7 +1240,7 @@ void CV_MserDetectorTest::readRunParams( FileNode& fn, int datasetIdx )
runParams[datasetIdx].edgeBlurSize = fn["edgeBlurSize"];
}
void CV_MserDetectorTest::writeRunParams( FileStorage& fs, int datasetIdx )
void MserDetectorQualityTest::writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const
{
fs << "delta" << runParams[datasetIdx].delta;
fs << "minArea" << runParams[datasetIdx].minArea;
......@@ -709,7 +1253,7 @@ void CV_MserDetectorTest::writeRunParams( FileStorage& fs, int datasetIdx )
fs << "edgeBlurSize" << runParams[datasetIdx].edgeBlurSize;
}
void CV_MserDetectorTest::setDefaultRunParams( int datasetIdx )
void MserDetectorQualityTest::setDefaultDatasetRunParams( int datasetIdx )
{
runParams[datasetIdx].delta = 5;
runParams[datasetIdx].minArea = 60;
......@@ -722,20 +1266,20 @@ void CV_MserDetectorTest::setDefaultRunParams( int datasetIdx )
runParams[datasetIdx].edgeBlurSize = 5;
}
CV_MserDetectorTest mserDetector;
MserDetectorQualityTest mserDetectorQuality;
//--------------------------------- STAR detector test --------------------------------------------
class CV_StarDetectorTest : public CV_DetectorRepeatabilityTest
class StarDetectorQualityTest : public DetectorQualityTest
{
public:
CV_StarDetectorTest() : CV_DetectorRepeatabilityTest( "star", "repeatability-star-detector" )
StarDetectorQualityTest() : DetectorQualityTest( "star", "quality-star-detector" )
{ runParams.resize(DATASETS_COUNT); }
protected:
virtual FeatureDetector* createDetector( int datasetIdx );
virtual void readRunParams( FileNode& fn, int datasetIdx );
virtual void writeRunParams( FileStorage& fs, int datasetIdx );
virtual void setDefaultRunParams( int datasetIdx );
virtual void readDatasetRunParams( FileNode& fn, int datasetIdx );
virtual void writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const;
virtual void setDefaultDatasetRunParams( int datasetIdx );
struct RunParams
{
......@@ -748,7 +1292,7 @@ protected:
vector<RunParams> runParams;
};
FeatureDetector* CV_StarDetectorTest::createDetector( int datasetIdx )
FeatureDetector* StarDetectorQualityTest::createDetector( int datasetIdx )
{
return new StarFeatureDetector( runParams[datasetIdx].maxSize,
runParams[datasetIdx].responseThreshold,
......@@ -757,7 +1301,7 @@ FeatureDetector* CV_StarDetectorTest::createDetector( int datasetIdx )
runParams[datasetIdx].suppressNonmaxSize );
}
void CV_StarDetectorTest::readRunParams( FileNode& fn, int datasetIdx )
void StarDetectorQualityTest::readDatasetRunParams( FileNode& fn, int datasetIdx )
{
runParams[datasetIdx].maxSize = fn["maxSize"];
runParams[datasetIdx].responseThreshold = fn["responseThreshold"];
......@@ -766,7 +1310,7 @@ void CV_StarDetectorTest::readRunParams( FileNode& fn, int datasetIdx )
runParams[datasetIdx].suppressNonmaxSize = fn["suppressNonmaxSize"];
}
void CV_StarDetectorTest::writeRunParams( FileStorage& fs, int datasetIdx )
void StarDetectorQualityTest::writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const
{
fs << "maxSize" << runParams[datasetIdx].maxSize;
fs << "responseThreshold" << runParams[datasetIdx].responseThreshold;
......@@ -775,7 +1319,7 @@ void CV_StarDetectorTest::writeRunParams( FileStorage& fs, int datasetIdx )
fs << "suppressNonmaxSize" << runParams[datasetIdx].suppressNonmaxSize;
}
void CV_StarDetectorTest::setDefaultRunParams( int datasetIdx )
void StarDetectorQualityTest::setDefaultDatasetRunParams( int datasetIdx )
{
runParams[datasetIdx].maxSize = 16;
runParams[datasetIdx].responseThreshold = 30;
......@@ -784,20 +1328,20 @@ void CV_StarDetectorTest::setDefaultRunParams( int datasetIdx )
runParams[datasetIdx].suppressNonmaxSize = 5;
}
CV_StarDetectorTest starDetector;
StarDetectorQualityTest starDetectorQuality;
//--------------------------------- SIFT detector test --------------------------------------------
class CV_SiftDetectorTest : public CV_DetectorRepeatabilityTest
class SiftDetectorQualityTest : public DetectorQualityTest
{
public:
CV_SiftDetectorTest() : CV_DetectorRepeatabilityTest( "sift", "repeatability-sift-detector" )
SiftDetectorQualityTest() : DetectorQualityTest( "sift", "quality-sift-detector" )
{ runParams.resize(DATASETS_COUNT); }
protected:
virtual FeatureDetector* createDetector( int datasetIdx );
virtual void readRunParams( FileNode& fn, int datasetIdx );
virtual void writeRunParams( FileStorage& fs, int datasetIdx );
virtual void setDefaultRunParams( int datasetIdx );
virtual void readDatasetRunParams( FileNode& fn, int datasetIdx );
virtual void writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const;
virtual void setDefaultDatasetRunParams( int datasetIdx );
struct RunParams
{
......@@ -808,7 +1352,7 @@ protected:
vector<RunParams> runParams;
};
FeatureDetector* CV_SiftDetectorTest::createDetector( int datasetIdx )
FeatureDetector* SiftDetectorQualityTest::createDetector( int datasetIdx )
{
return new SiftFeatureDetector( runParams[datasetIdx].detect.threshold,
runParams[datasetIdx].detect.edgeThreshold,
......@@ -818,7 +1362,7 @@ FeatureDetector* CV_SiftDetectorTest::createDetector( int datasetIdx )
runParams[datasetIdx].comm.firstOctave );
}
void CV_SiftDetectorTest::readRunParams( FileNode& fn, int datasetIdx )
void SiftDetectorQualityTest::readDatasetRunParams( FileNode& fn, int datasetIdx )
{
runParams[datasetIdx].detect.threshold = fn["threshold"];
runParams[datasetIdx].detect.edgeThreshold = fn["edgeThreshold"];
......@@ -828,7 +1372,7 @@ void CV_SiftDetectorTest::readRunParams( FileNode& fn, int datasetIdx )
runParams[datasetIdx].comm.firstOctave = fn["firstOctave"];
}
void CV_SiftDetectorTest::writeRunParams( FileStorage& fs, int datasetIdx )
void SiftDetectorQualityTest::writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const
{
fs << "threshold" << runParams[datasetIdx].detect.threshold;
fs << "edgeThreshold" << runParams[datasetIdx].detect.edgeThreshold;
......@@ -838,26 +1382,26 @@ void CV_SiftDetectorTest::writeRunParams( FileStorage& fs, int datasetIdx )
fs << "firstOctave" << runParams[datasetIdx].comm.firstOctave;
}
void CV_SiftDetectorTest::setDefaultRunParams( int datasetIdx )
void SiftDetectorQualityTest::setDefaultDatasetRunParams( int datasetIdx )
{
runParams[datasetIdx].detect = SIFT::DetectorParams();
runParams[datasetIdx].comm = SIFT::CommonParams();
}
CV_SiftDetectorTest siftDetector;
SiftDetectorQualityTest siftDetectorQuality;
//--------------------------------- SURF detector test --------------------------------------------
class CV_SurfDetectorTest : public CV_DetectorRepeatabilityTest
class SurfDetectorQualityTest : public DetectorQualityTest
{
public:
CV_SurfDetectorTest() : CV_DetectorRepeatabilityTest( "surf", "repeatability-surf-detector" )
SurfDetectorQualityTest() : DetectorQualityTest( "surf", "quality-surf-detector" )
{ runParams.resize(DATASETS_COUNT); }
protected:
virtual FeatureDetector* createDetector( int datasetIdx );
virtual void readRunParams( FileNode& fn, int datasetIdx );
virtual void writeRunParams( FileStorage& fs, int datasetIdx );
virtual void setDefaultRunParams( int datasetIdx );
virtual void readDatasetRunParams( FileNode& fn, int datasetIdx );
virtual void writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const;
virtual void setDefaultDatasetRunParams( int datasetIdx );
struct RunParams
{
......@@ -868,32 +1412,401 @@ protected:
vector<RunParams> runParams;
};
FeatureDetector* CV_SurfDetectorTest::createDetector( int datasetIdx )
FeatureDetector* SurfDetectorQualityTest::createDetector( int datasetIdx )
{
return new SurfFeatureDetector( runParams[datasetIdx].hessianThreshold,
runParams[datasetIdx].octaves,
runParams[datasetIdx].octaveLayers );
}
void CV_SurfDetectorTest::readRunParams( FileNode& fn, int datasetIdx )
void SurfDetectorQualityTest::readDatasetRunParams( FileNode& fn, int datasetIdx )
{
runParams[datasetIdx].hessianThreshold = fn["hessianThreshold"];
runParams[datasetIdx].octaves = fn["octaves"];
runParams[datasetIdx].octaveLayers = fn["octaveLayers"];
}
void CV_SurfDetectorTest::writeRunParams( FileStorage& fs, int datasetIdx )
void SurfDetectorQualityTest::writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const
{
fs << "hessianThreshold" << runParams[datasetIdx].hessianThreshold;
fs << "octaves" << runParams[datasetIdx].octaves;
fs << "octaveLayers" << runParams[datasetIdx].octaveLayers;
}
void CV_SurfDetectorTest::setDefaultRunParams( int datasetIdx )
void SurfDetectorQualityTest::setDefaultDatasetRunParams( int datasetIdx )
{
runParams[datasetIdx].hessianThreshold = 400.;
runParams[datasetIdx].octaves = 3;
runParams[datasetIdx].octaveLayers = 4;
}
CV_SurfDetectorTest surfDetector;
SurfDetectorQualityTest surfDetectorQuality;
/****************************************************************************************\
* Descriptors evaluation *
\****************************************************************************************/
const string RECALL = "recall";
const string PRECISION = "precision";
const string KEYPOINTS_FILENAME = "keypointsFilename";
const string PROJECT_KEYPOINTS_FROM_1IMAGE = "projectKeypointsFrom1Image";
const string MATCH_FILTER = "matchFilter";
class DescriptorQualityTest : public BaseQualityTest
{
public:
enum{ NO_MATCH_FILTER = 0 };
DescriptorQualityTest( const char* _descriptorName, const char* _testName ) :
BaseQualityTest( _descriptorName, _testName, "quality-of-descriptor" )
{
validQuality.resize(DATASETS_COUNT);
calcQuality.resize(DATASETS_COUNT);
commRunParams.resize(DATASETS_COUNT);
}
protected:
using BaseQualityTest::readResults;
using BaseQualityTest::writeResults;
using BaseQualityTest::processResults;
virtual string getRunParamsFilename() const;
virtual string getResultsFilename() const;
virtual void validQualityClear( int datasetIdx );
virtual void validQualityCreate( int datasetIdx );
virtual bool isValidQualityEmpty( int datasetIdx ) const;
virtual bool isCalcQualityEmpty( int datasetIdx ) const;
virtual void readResults( FileNode& fn, int datasetIdx, int caseIdx );
virtual void writeResults( FileStorage& fs, int datasetIdx, int caseIdx ) const;
virtual void readDatasetRunParams( FileNode& fn, int datasetIdx ); //
virtual void writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const;
virtual void setDefaultDatasetRunParams( int datasetIdx );
virtual GenericDescriptorMatch* createDescriptorMatch( int datasetIdx ) = 0;
void run( int );
virtual int processResults( int datasetIdx, int caseIdx );
struct Quality
{
float recall;
float precision;
};
vector<vector<Quality> > validQuality;
vector<vector<Quality> > calcQuality;
struct CommonRunParams
{
string keypontsFilename;
bool projectKeypointsFrom1Image;
int matchFilter; // not used now
};
vector<CommonRunParams> commRunParams;
};
string DescriptorQualityTest::getRunParamsFilename() const
{
return string(ts->get_data_path()) + DESCRIPTORS_DIR + algName + PARAMS_POSTFIX;
}
string DescriptorQualityTest::getResultsFilename() const
{
return string(ts->get_data_path()) + DESCRIPTORS_DIR + algName + RES_POSTFIX;
}
void DescriptorQualityTest::validQualityClear( int datasetIdx )
{
validQuality[datasetIdx].clear();
}
void DescriptorQualityTest::validQualityCreate( int datasetIdx )
{
validQuality[datasetIdx].resize(TEST_CASE_COUNT);
}
bool DescriptorQualityTest::isValidQualityEmpty( int datasetIdx ) const
{
return validQuality[datasetIdx].empty();
}
bool DescriptorQualityTest::isCalcQualityEmpty( int datasetIdx ) const
{
return calcQuality[datasetIdx].empty();
}
void DescriptorQualityTest::readResults( FileNode& fn, int datasetIdx, int caseIdx )
{
validQuality[datasetIdx][caseIdx].recall = fn[RECALL];
validQuality[datasetIdx][caseIdx].precision = fn[PRECISION];
}
void DescriptorQualityTest::writeResults( FileStorage& fs, int datasetIdx, int caseIdx ) const
{
fs << RECALL << calcQuality[datasetIdx][caseIdx].recall;
fs << PRECISION << calcQuality[datasetIdx][caseIdx].precision;
}
void DescriptorQualityTest::DescriptorQualityTest::readDatasetRunParams( FileNode& fn, int datasetIdx )
{
commRunParams[datasetIdx].keypontsFilename = (string)fn[KEYPOINTS_FILENAME];
commRunParams[datasetIdx].projectKeypointsFrom1Image = (int)fn[PROJECT_KEYPOINTS_FROM_1IMAGE] != 0;
commRunParams[datasetIdx].matchFilter = (int)fn[MATCH_FILTER];
}
void DescriptorQualityTest::writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const
{
fs << KEYPOINTS_FILENAME << commRunParams[datasetIdx].keypontsFilename;
fs << PROJECT_KEYPOINTS_FROM_1IMAGE << commRunParams[datasetIdx].projectKeypointsFrom1Image;
fs << MATCH_FILTER << commRunParams[datasetIdx].matchFilter;
}
void DescriptorQualityTest::setDefaultDatasetRunParams( int datasetIdx )
{
commRunParams[datasetIdx].keypontsFilename = "surf_" + DATASET_NAMES[datasetIdx] + ".xml";
commRunParams[datasetIdx].projectKeypointsFrom1Image = false;
commRunParams[datasetIdx].matchFilter = NO_MATCH_FILTER;
}
// if keyponts from first image are projected on second image using homography matrix
void evaluateDescriptors( const vector<int>& origIdxs2,
const vector<int>& matches1to2,
int& correctMatchCount, int& falseMatchCount, int& correspondenceCount )
{
assert( !origIdxs2.empty() > 0 && !matches1to2.empty() );
correspondenceCount = origIdxs2.size();
correctMatchCount = falseMatchCount = 0;
for( size_t i1 = 0; i1 < matches1to2.size(); i1++ )
{
size_t i2 = matches1to2[i1];
if( i2 > 0 )
{
if( origIdxs2[i2] == (int)i1 )
correctMatchCount++;
else
falseMatchCount++;
}
}
}
void DescriptorQualityTest::run( int )
{
readAllDatasetsRunParams();
readResults();
int notReadDatasets = 0;
int progress = 0, progressCount = DATASETS_COUNT*TEST_CASE_COUNT;
for(int di = 0; di < DATASETS_COUNT; di++ )
{
FileStorage keypontsFS( string(ts->get_data_path()) + KEYPOINTS_DIR + commRunParams[di].keypontsFilename,
FileStorage::READ );
vector<Mat> imgs, Hs;
if( !keypontsFS.isOpened() || !readDataset( DATASET_NAMES[di], Hs, imgs ) )
{
calcQuality[di].clear();
ts->printf( CvTS::LOG, "images or homography matrices of dataset named %s can not be read OR "
"keypoints from file %s can not be read\n",
DATASET_NAMES[di].c_str(), commRunParams[di].keypontsFilename.c_str() );
notReadDatasets++;
}
else
{
calcQuality[di].resize(TEST_CASE_COUNT);
vector<KeyPoint> keypoints1; vector<EllipticKeyPoint> ekeypoints1;
readKeypoints( keypontsFS, keypoints1, 0);
if( !commRunParams[di].projectKeypointsFrom1Image )
transformToEllipticKeyPoints( keypoints1, ekeypoints1 );
else
{
assert(0);
// TODO debug!
}
for( int ci = 0; ci < TEST_CASE_COUNT; ci++ )
{
progress = update_progress( progress, di*TEST_CASE_COUNT + ci, progressCount, 0 );
vector<KeyPoint> keypoints2; vector<EllipticKeyPoint> ekeypoints2;
vector<int> origIdxs2;
if( commRunParams[di].projectKeypointsFrom1Image )
{
calcKeyPointProjections( keypoints1, Hs[ci], keypoints2 );
filterKeyPointsByImageSize( keypoints2, imgs[ci+1].size(), origIdxs2 );
}
else
{
readKeypoints( keypontsFS, keypoints2, ci+1 );
transformToEllipticKeyPoints( keypoints2, ekeypoints2 );
}
Ptr<GenericDescriptorMatch> descMatch = createDescriptorMatch(di);
descMatch->add( imgs[ci+1], keypoints2 );
vector<int> matches1to2;
descMatch->match( imgs[0], keypoints1, matches1to2 );
// TODO if( commRunParams[di].matchFilter )
int correctMatchCount, falseMatchCount, correspCount;
if( commRunParams[di].projectKeypointsFrom1Image )
evaluateDescriptors( origIdxs2, matches1to2, correctMatchCount, falseMatchCount, correspCount );
else
evaluateDescriptors( ekeypoints1, ekeypoints2, matches1to2, imgs[0], imgs[ci+1], Hs[ci],
correctMatchCount, falseMatchCount, correspCount );
calcQuality[di][ci].recall = recall( correctMatchCount, correspCount );
calcQuality[di][ci].precision = precision( correctMatchCount, falseMatchCount );
}
}
}
if( notReadDatasets == DATASETS_COUNT )
{
ts->printf(CvTS::LOG, "All datasets were not be read\n");
ts->set_failed_test_info( CvTS::FAIL_INVALID_TEST_DATA );
}
else
processResults();
}
int DescriptorQualityTest::processResults( int datasetIdx, int caseIdx )
{
int res = CvTS::OK;
Quality valid = validQuality[datasetIdx][caseIdx], calc = calcQuality[datasetIdx][caseIdx];
bool isBadAccuracy;
const float rltvEps = 0.001;
ts->printf(CvTS::LOG, "%s: calc=%f, valid=%f", RECALL.c_str(), calc.recall, valid.recall );
isBadAccuracy = valid.recall - calc.recall > rltvEps;
testLog( ts, isBadAccuracy );
res = isBadAccuracy ? CvTS::FAIL_BAD_ACCURACY : res;
ts->printf(CvTS::LOG, "%s: calc=%f, valid=%f", PRECISION.c_str(), calc.precision, valid.precision );
isBadAccuracy = valid.precision - calc.precision > rltvEps;
testLog( ts, isBadAccuracy );
res = isBadAccuracy ? CvTS::FAIL_BAD_ACCURACY : res;
return res;
}
//--------------------------------- SIFT descriptor test --------------------------------------------
class SiftDescriptorQualityTest : public DescriptorQualityTest
{
public:
SiftDescriptorQualityTest() : DescriptorQualityTest( "sift", "quality-sift-descriptor" )
{ runParams.resize(DATASETS_COUNT); }
protected:
virtual GenericDescriptorMatch* createDescriptorMatch( int datasetIdx );
virtual void readDatasetRunParams( FileNode& fn, int datasetIdx );
virtual void writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const;
virtual void setDefaultDatasetRunParams( int datasetIdx );
struct RunParams
{
double magnification;
bool isNormalize;
int nOctaves;
int nOctaveLayers;
int firstOctave;
};
vector<RunParams> runParams;
};
GenericDescriptorMatch* SiftDescriptorQualityTest::createDescriptorMatch( int datasetIdx )
{
SiftDescriptorExtractor extractor( runParams[datasetIdx].magnification,
runParams[datasetIdx].isNormalize,
runParams[datasetIdx].nOctaves,
runParams[datasetIdx].nOctaveLayers,
runParams[datasetIdx].firstOctave );
BruteForceMatcher<L2<float> > matcher;
return new VectorDescriptorMatch<SiftDescriptorExtractor, BruteForceMatcher<L2<float> > >(extractor, matcher);
}
void SiftDescriptorQualityTest::readDatasetRunParams( FileNode& fn, int datasetIdx )
{
DescriptorQualityTest::readDatasetRunParams( fn, datasetIdx);
runParams[datasetIdx].magnification = fn["magnification"];
runParams[datasetIdx].isNormalize = (int)fn["isNormalize"] != 0;
runParams[datasetIdx].nOctaves = fn["nOctaves"];
runParams[datasetIdx].nOctaveLayers = fn["nOctaveLayers"];
runParams[datasetIdx].firstOctave = fn["firstOctave"];
}
void SiftDescriptorQualityTest::writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const
{
DescriptorQualityTest::writeDatasetRunParams( fs, datasetIdx );
fs << "magnification" << runParams[datasetIdx].magnification;
fs << "isNormalize" << runParams[datasetIdx].isNormalize;
fs << "nOctaves" << runParams[datasetIdx].nOctaves;
fs << "nOctaveLayers" << runParams[datasetIdx].nOctaveLayers;
fs << "firstOctave" << runParams[datasetIdx].firstOctave;
}
void SiftDescriptorQualityTest::setDefaultDatasetRunParams( int datasetIdx )
{
DescriptorQualityTest::setDefaultDatasetRunParams( datasetIdx );
runParams[datasetIdx].magnification = SIFT::DescriptorParams::GET_DEFAULT_MAGNIFICATION();
runParams[datasetIdx].isNormalize = SIFT::DescriptorParams::DEFAULT_IS_NORMALIZE;
runParams[datasetIdx].nOctaves = SIFT::CommonParams::DEFAULT_NOCTAVES;
runParams[datasetIdx].nOctaveLayers = SIFT::CommonParams::DEFAULT_NOCTAVE_LAYERS;
runParams[datasetIdx].firstOctave = SIFT::CommonParams::DEFAULT_FIRST_OCTAVE;
}
SiftDescriptorQualityTest siftDescriptorQuality;
//--------------------------------- SURF descriptor test --------------------------------------------
class SurfDescriptorQualityTest : public DescriptorQualityTest
{
public:
SurfDescriptorQualityTest() : DescriptorQualityTest( "surf", "quality-surf-descriptor" )
{ runParams.resize(DATASETS_COUNT); }
protected:
virtual GenericDescriptorMatch* createDescriptorMatch( int datasetIdx );
virtual void readDatasetRunParams( FileNode& fn, int datasetIdx );
virtual void writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const;
virtual void setDefaultDatasetRunParams( int datasetIdx );
struct RunParams
{
int nOctaves;
int nOctaveLayers;
bool extended;
};
vector<RunParams> runParams;
};
GenericDescriptorMatch* SurfDescriptorQualityTest::createDescriptorMatch( int datasetIdx )
{
SurfDescriptorExtractor extractor( runParams[datasetIdx].nOctaves,
runParams[datasetIdx].nOctaveLayers,
runParams[datasetIdx].extended );
BruteForceMatcher<L2<float> > matcher;
return new VectorDescriptorMatch<SurfDescriptorExtractor, BruteForceMatcher<L2<float> > >(extractor, matcher);
}
void SurfDescriptorQualityTest::readDatasetRunParams( FileNode& fn, int datasetIdx )
{
DescriptorQualityTest::readDatasetRunParams( fn, datasetIdx);
runParams[datasetIdx].nOctaves = fn["nOctaves"];
runParams[datasetIdx].nOctaveLayers = fn["nOctaveLayers"];
runParams[datasetIdx].extended = (int)fn["extended"] != 0;
}
void SurfDescriptorQualityTest::writeDatasetRunParams( FileStorage& fs, int datasetIdx ) const
{
DescriptorQualityTest::writeDatasetRunParams( fs, datasetIdx );
fs << "nOctaves" << runParams[datasetIdx].nOctaves;
fs << "nOctaveLayers" << runParams[datasetIdx].nOctaveLayers;
fs << "extended" << runParams[datasetIdx].extended;
}
void SurfDescriptorQualityTest::setDefaultDatasetRunParams( int datasetIdx )
{
DescriptorQualityTest::setDefaultDatasetRunParams( datasetIdx );
runParams[datasetIdx].nOctaves = 4;
runParams[datasetIdx].nOctaveLayers = 2;
runParams[datasetIdx].extended = false;
}
SurfDescriptorQualityTest surfDescriptorQuality;
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment