Commit c02b241e authored by catree's avatar catree

Port GMS matching by JiaWang Bian into xfeatures2d module.

parent 670acd99
...@@ -122,3 +122,10 @@ ...@@ -122,3 +122,10 @@
year={2004}, year={2004},
publisher={Springer} publisher={Springer}
} }
@inproceedings{Bian2017gms,
title={GMS: Grid-based Motion Statistics for Fast, Ultra-robust Feature Correspondence},
author={JiaWang Bian and Wen-Yan Lin and Yasuyuki Matsushita and Sai-Kit Yeung and Tan Dat Nguyen and Ming-Ming Cheng},
booktitle={IEEE Conference on Computer Vision and Pattern Recognition},
year={2017}
}
...@@ -53,6 +53,10 @@ This section describes experimental algorithms for 2d feature detection. ...@@ -53,6 +53,10 @@ This section describes experimental algorithms for 2d feature detection.
This section describes two popular algorithms for 2d feature detection, SIFT and SURF, that are This section describes two popular algorithms for 2d feature detection, SIFT and SURF, that are
known to be patented. Use them at your own risk. known to be patented. Use them at your own risk.
@defgroup xfeatures2d_match Experimental 2D Features Matching Algorithm
This section describes the GMS (Grid-based Motion Statistics) matching strategy.
@} @}
*/ */
...@@ -941,6 +945,32 @@ CV_EXPORTS void FASTForPointSet( InputArray image, CV_IN_OUT std::vector<KeyPoin ...@@ -941,6 +945,32 @@ CV_EXPORTS void FASTForPointSet( InputArray image, CV_IN_OUT std::vector<KeyPoin
//! @} //! @}
//! @addtogroup xfeatures2d_match
//! @{
/** @brief GMS (Grid-based Motion Statistics) feature matching strategy by @cite Bian2017gms .
@param size1 Input size of image1.
@param size2 Input size of image2.
@param keypoints1 Input keypoints of image1.
@param keypoints2 Input keypoints of image2.
@param matches1to2 Input 1-nearest neighbor matches.
@param matchesGMS Matches returned by the GMS matching strategy.
@param withRotation Take rotation transformation into account.
@param withScale Take scale transformation into account.
@param thresholdFactor The higher, the less matches.
@note
Since GMS works well when the number of features is large, we recommend to use the ORB feature and set FastThreshold to 0 to get as many as possible features quickly.
If matching results are not satisfying, please add more features. (We use 10000 for images with 640 X 480).
If your images have big rotation and scale changes, please set withRotation or withScale to true.
*/
CV_EXPORTS void matchGMS( const Size& size1, const Size& size2, const std::vector<KeyPoint>& keypoints1, const std::vector<KeyPoint>& keypoints2,
const std::vector<DMatch>& matches1to2, std::vector<DMatch>& matchesGMS, const bool withRotation = false,
const bool withScale = false, const double thresholdFactor = 6.0 );
//! @}
} }
} }
......
#include <iostream>
#include <opencv2/core.hpp>
#include <opencv2/videoio.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/imgproc.hpp>
#include <opencv2/features2d.hpp>
#include <opencv2/flann.hpp>
#include <opencv2/xfeatures2d.hpp>
using namespace cv;
using namespace cv::xfeatures2d;
////////////////////////////////////////////////////
// This program demonstrates the GMS matching strategy.
int main(int argc, char* argv[])
{
const char* keys =
"{ h help | | print help message }"
"{ l left | | specify left (reference) image }"
"{ r right | | specify right (query) image }"
"{ camera | 0 | specify the camera device number }"
"{ nfeatures | 10000 | specify the maximum number of ORB features }"
"{ fastThreshold | 20 | specify the FAST threshold }"
"{ drawSimple | true | do not draw not matched keypoints }"
"{ withRotation | false | take rotation into account }"
"{ withScale | false | take scale into account }";
CommandLineParser cmd(argc, argv, keys);
if (cmd.has("help"))
{
std::cout << "Usage: gms_matcher [options]" << std::endl;
std::cout << "Available options:" << std::endl;
cmd.printMessage();
return EXIT_SUCCESS;
}
Ptr<Feature2D> orb = ORB::create(cmd.get<int>("nfeatures"));
orb.dynamicCast<cv::ORB>()->setFastThreshold(cmd.get<int>("fastThreshold"));
Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("BruteForce-Hamming");
if (!cmd.get<String>("left").empty() && !cmd.get<String>("right").empty())
{
Mat imgL = imread(cmd.get<String>("left"));
Mat imgR = imread(cmd.get<String>("right"));
std::vector<KeyPoint> kpRef, kpCur;
Mat descRef, descCur;
orb->detectAndCompute(imgL, noArray(), kpRef, descRef);
orb->detectAndCompute(imgR, noArray(), kpCur, descCur);
std::vector<DMatch> matchesAll, matchesGMS;
matcher->match(descCur, descRef, matchesAll);
matchGMS(imgR.size(), imgL.size(), kpCur, kpRef, matchesAll, matchesGMS, cmd.get<bool>("withRotation"), cmd.get<bool>("withScale"));
std::cout << "matchesGMS: " << matchesGMS.size() << std::endl;
Mat frameMatches;
if (cmd.get<bool>("drawSimple"))
drawMatches(imgR, kpCur, imgL, kpRef, matchesGMS, frameMatches, Scalar::all(-1), Scalar::all(-1),
std::vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);
else
drawMatches(imgR, kpCur, imgL, kpRef, matchesGMS, frameMatches);
imshow("Matches GMS", frameMatches);
waitKey();
}
else
{
std::vector<KeyPoint> kpRef;
Mat descRef;
VideoCapture capture(cmd.get<int>("camera"));
//Camera warm-up
for (int i = 0; i < 10; i++)
{
Mat frame;
capture >> frame;
}
Mat frameRef;
for (;;)
{
Mat frame;
capture >> frame;
if (frameRef.empty())
{
frame.copyTo(frameRef);
orb->detectAndCompute(frameRef, noArray(), kpRef, descRef);
}
TickMeter tm;
tm.start();
std::vector<KeyPoint> kp;
Mat desc;
orb->detectAndCompute(frame, noArray(), kp, desc);
tm.stop();
double t_orb = tm.getTimeMilli();
tm.reset();
tm.start();
std::vector<DMatch> matchesAll, matchesGMS;
matcher->match(desc, descRef, matchesAll);
tm.stop();
double t_match = tm.getTimeMilli();
matchGMS(frame.size(), frameRef.size(), kp, kpRef, matchesAll, matchesGMS, cmd.get<bool>("withRotation"), cmd.get<bool>("withScale"));
tm.stop();
Mat frameMatches;
if (cmd.get<bool>("drawSimple"))
drawMatches(frame, kp, frameRef, kpRef, matchesGMS, frameMatches, Scalar::all(-1), Scalar::all(-1),
std::vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);
else
drawMatches(frame, kp, frameRef, kpRef, matchesGMS, frameMatches);
String label = format("ORB: %.2f ms", t_orb);
putText(frameMatches, label, Point(20, 20), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(0,0,255));
label = format("Matching: %.2f ms", t_match);
putText(frameMatches, label, Point(20, 40), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(0,0,255));
label = format("GMS matching: %.2f ms", tm.getTimeMilli());
putText(frameMatches, label, Point(20, 60), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(0,0,255));
putText(frameMatches, "Press r to reinitialize the reference image.", Point(frameMatches.cols-380, 20), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(0,0,255));
putText(frameMatches, "Press esc to quit.", Point(frameMatches.cols-180, 40), FONT_HERSHEY_SIMPLEX, 0.5, Scalar(0,0,255));
imshow("Matches GMS", frameMatches);
int c = waitKey(30);
if (c == 27)
break;
else if (c == 'r')
{
frame.copyTo(frameRef);
orb->detectAndCompute(frameRef, noArray(), kpRef, descRef);
}
}
}
return EXIT_SUCCESS;
}
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
/*********************************************************************
* This is the implementation of the paper
* GMS: Grid-based Motion Statistics for Fast, Ultra-robust Feature Correspondence.
* JiaWang Bian, Wen-Yan Lin, Yasuyuki Matsushita, Sai-Kit Yeung, Tan Dat Nguyen, Ming-Ming Cheng
* IEEE CVPR, 2017
* ProjectPage: http://jwbian.net/gms
*********************************************************************/
#include "precomp.hpp"
#include <algorithm>
using namespace std;
namespace cv
{
namespace xfeatures2d
{
// 8 possible rotation and each one is 3 X 3
const int mRotationPatterns[8][9] = {
{
1,2,3,
4,5,6,
7,8,9
},
{
4,1,2,
7,5,3,
8,9,6
},
{
7,4,1,
8,5,2,
9,6,3
},
{
8,7,4,
9,5,1,
6,3,2
},
{
9,8,7,
6,5,4,
3,2,1
},
{
6,9,8,
3,5,7,
2,1,4
},
{
3,6,9,
2,5,8,
1,4,7
},
{
2,3,6,
1,5,9,
4,7,8
}
};
// 5 level scales
const double mScaleRatios[5] = { 1.0, 1.0 / 2, 1.0 / std::sqrt(2.0), std::sqrt(2.0), 2.0 };
class GMSMatcher
{
public:
// OpenCV Keypoints & Correspond Image Size & Nearest Neighbor Matches
GMSMatcher(const vector<KeyPoint>& vkp1, const Size& size1, const vector<KeyPoint>& vkp2, const Size& size2,
const vector<DMatch>& vDMatches, const double thresholdFactor) : mThresholdFactor(thresholdFactor)
{
// Input initialize
normalizePoints(vkp1, size1, mvP1);
normalizePoints(vkp2, size2, mvP2);
mNumberMatches = vDMatches.size();
convertMatches(vDMatches, mvMatches);
// Grid initialize
mGridSizeLeft = Size(20, 20);
mGridNumberLeft = mGridSizeLeft.width * mGridSizeLeft.height;
// Initialize the neighbor of left grid
mGridNeighborLeft = Mat::zeros(mGridNumberLeft, 9, CV_32SC1);
initalizeNeighbors(mGridNeighborLeft, mGridSizeLeft);
}
~GMSMatcher() {}
// Get Inlier Mask
// Return number of inliers
int getInlierMask(vector<bool> &vbInliers, const bool withRotation = false, const bool withScale = false);
private:
// Normalized Points
vector<Point2f> mvP1, mvP2;
// Matches
vector<pair<int, int> > mvMatches;
// Number of Matches
size_t mNumberMatches;
// Grid Size
Size mGridSizeLeft, mGridSizeRight;
int mGridNumberLeft;
int mGridNumberRight;
// x : left grid idx
// y : right grid idx
// value : how many matches from idx_left to idx_right
Mat mMotionStatistics;
//
vector<int> mNumberPointsInPerCellLeft;
// Inldex : grid_idx_left
// Value : grid_idx_right
vector<int> mCellPairs;
// Every Matches has a cell-pair
// first : grid_idx_left
// second : grid_idx_right
vector<pair<int, int> > mvMatchPairs;
// Inlier Mask for output
vector<bool> mvbInlierMask;
//
Mat mGridNeighborLeft;
Mat mGridNeighborRight;
double mThresholdFactor;
// Assign Matches to Cell Pairs
void assignMatchPairs(const int GridType);
void convertMatches(const vector<DMatch> &vDMatches, vector<pair<int, int> > &vMatches);
int getGridIndexLeft(const Point2f &pt, const int type);
int getGridIndexRight(const Point2f &pt);
vector<int> getNB9(const int idx, const Size& GridSize);
void initalizeNeighbors(Mat &neighbor, const Size& GridSize);
void normalizePoints(const vector<KeyPoint> &kp, const Size &size, vector<Point2f> &npts);
// Run
int run(const int rotationType);
void setScale(const int scale);
// Verify Cell Pairs
void verifyCellPairs(const int rotationType);
};
void GMSMatcher::assignMatchPairs(const int gridType)
{
for (size_t i = 0; i < mNumberMatches; i++)
{
Point2f &lp = mvP1[mvMatches[i].first];
Point2f &rp = mvP2[mvMatches[i].second];
int lgidx = mvMatchPairs[i].first = getGridIndexLeft(lp, gridType);
int rgidx = -1;
if (gridType == 1)
{
rgidx = mvMatchPairs[i].second = getGridIndexRight(rp);
}
else
{
rgidx = mvMatchPairs[i].second;
}
if (lgidx < 0 || rgidx < 0) continue;
mMotionStatistics.at<int>(lgidx, rgidx)++;
mNumberPointsInPerCellLeft[lgidx]++;
}
}
// Convert OpenCV DMatch to Match (pair<int, int>)
void GMSMatcher::convertMatches(const vector<DMatch> &vDMatches, vector<pair<int, int> > &vMatches)
{
vMatches.resize(mNumberMatches);
for (size_t i = 0; i < mNumberMatches; i++)
vMatches[i] = pair<int, int>(vDMatches[i].queryIdx, vDMatches[i].trainIdx);
}
int GMSMatcher::getGridIndexLeft(const Point2f &pt, const int type)
{
int x = 0, y = 0;
if (type == 1) {
x = cvFloor(pt.x * mGridSizeLeft.width);
y = cvFloor(pt.y * mGridSizeLeft.height);
}
if (type == 2) {
x = cvFloor(pt.x * mGridSizeLeft.width + 0.5);
y = cvFloor(pt.y * mGridSizeLeft.height);
}
if (type == 3) {
x = cvFloor(pt.x * mGridSizeLeft.width);
y = cvFloor(pt.y * mGridSizeLeft.height + 0.5);
}
if (type == 4) {
x = cvFloor(pt.x * mGridSizeLeft.width + 0.5);
y = cvFloor(pt.y * mGridSizeLeft.height + 0.5);
}
if (x >= mGridSizeLeft.width || y >= mGridSizeLeft.height)
return -1;
return x + y * mGridSizeLeft.width;
}
int GMSMatcher::getGridIndexRight(const Point2f &pt)
{
int x = cvFloor(pt.x * mGridSizeRight.width);
int y = cvFloor(pt.y * mGridSizeRight.height);
return x + y * mGridSizeRight.width;
}
int GMSMatcher::getInlierMask(vector<bool> &vbInliers, const bool withRotation, const bool withScale)
{
int max_inlier = 0;
if (!withScale && !withRotation)
{
setScale(0);
max_inlier = run(1);
vbInliers = mvbInlierMask;
return max_inlier;
}
if (withRotation && withScale)
{
for (int scale = 0; scale < 5; scale++)
{
setScale(scale);
for (int rotationType = 1; rotationType <= 8; rotationType++)
{
int num_inlier = run(rotationType);
if (num_inlier > max_inlier)
{
vbInliers = mvbInlierMask;
max_inlier = num_inlier;
}
}
}
return max_inlier;
}
if (withRotation && !withScale)
{
setScale(0);
for (int rotationType = 1; rotationType <= 8; rotationType++)
{
int num_inlier = run(rotationType);
if (num_inlier > max_inlier)
{
vbInliers = mvbInlierMask;
max_inlier = num_inlier;
}
}
return max_inlier;
}
if (!withRotation && withScale)
{
for (int scale = 0; scale < 5; scale++)
{
setScale(scale);
int num_inlier = run(1);
if (num_inlier > max_inlier)
{
vbInliers = mvbInlierMask;
max_inlier = num_inlier;
}
}
return max_inlier;
}
return max_inlier;
}
// Get Neighbor 9
vector<int> GMSMatcher::getNB9(const int idx, const Size& gridSize)
{
vector<int> NB9(9, -1);
int idx_x = idx % gridSize.width;
int idx_y = idx / gridSize.width;
for (int yi = -1; yi <= 1; yi++)
{
for (int xi = -1; xi <= 1; xi++)
{
int idx_xx = idx_x + xi;
int idx_yy = idx_y + yi;
if (idx_xx < 0 || idx_xx >= gridSize.width || idx_yy < 0 || idx_yy >= gridSize.height)
continue;
NB9[xi + 4 + yi * 3] = idx_xx + idx_yy * gridSize.width;
}
}
return NB9;
}
void GMSMatcher::initalizeNeighbors(Mat &neighbor, const Size& gridSize)
{
for (int i = 0; i < neighbor.rows; i++)
{
vector<int> NB9 = getNB9(i, gridSize);
int *data = neighbor.ptr<int>(i);
memcpy(data, &NB9[0], sizeof(int) * 9);
}
}
// Normalize Key Points to Range(0 - 1)
void GMSMatcher::normalizePoints(const vector<KeyPoint> &kp, const Size &size, vector<Point2f> &npts)
{
const size_t numP = kp.size();
const int width = size.width;
const int height = size.height;
npts.resize(numP);
for (size_t i = 0; i < numP; i++)
{
npts[i].x = kp[i].pt.x / width;
npts[i].y = kp[i].pt.y / height;
}
}
int GMSMatcher::run(const int rotationType)
{
mvbInlierMask.assign(mNumberMatches, false);
// Initialize Motion Statisctics
mMotionStatistics = Mat::zeros(mGridNumberLeft, mGridNumberRight, CV_32SC1);
mvMatchPairs.assign(mNumberMatches, pair<int, int>(0, 0));
for (int gridType = 1; gridType <= 4; gridType++)
{
// initialize
mMotionStatistics.setTo(0);
mCellPairs.assign(mGridNumberLeft, -1);
mNumberPointsInPerCellLeft.assign(mGridNumberLeft, 0);
assignMatchPairs(gridType);
verifyCellPairs(rotationType);
// Mark inliers
for (size_t i = 0; i < mNumberMatches; i++)
{
if (mCellPairs[mvMatchPairs[i].first] == mvMatchPairs[i].second)
mvbInlierMask[i] = true;
}
}
return (int) count(mvbInlierMask.begin(), mvbInlierMask.end(), true); //number of inliers
}
void GMSMatcher::setScale(const int scale)
{
// Set Scale
mGridSizeRight.width = cvRound(mGridSizeLeft.width * mScaleRatios[scale]);
mGridSizeRight.height = cvRound(mGridSizeLeft.height * mScaleRatios[scale]);
mGridNumberRight = mGridSizeRight.width * mGridSizeRight.height;
// Initialize the neighbor of right grid
mGridNeighborRight = Mat::zeros(mGridNumberRight, 9, CV_32SC1);
initalizeNeighbors(mGridNeighborRight, mGridSizeRight);
}
void GMSMatcher::verifyCellPairs(const int rotationType)
{
const int *CurrentRP = mRotationPatterns[rotationType - 1];
for (int i = 0; i < mGridNumberLeft; i++)
{
if (sum(mMotionStatistics.row(i))[0] == 0)
{
mCellPairs[i] = -1;
continue;
}
int max_number = 0;
for (int j = 0; j < mGridNumberRight; j++)
{
int *value = mMotionStatistics.ptr<int>(i);
if (value[j] > max_number)
{
mCellPairs[i] = j;
max_number = value[j];
}
}
int idx_grid_rt = mCellPairs[i];
const int *NB9_lt = mGridNeighborLeft.ptr<int>(i);
const int *NB9_rt = mGridNeighborRight.ptr<int>(idx_grid_rt);
int score = 0;
double thresh = 0;
int numpair = 0;
for (size_t j = 0; j < 9; j++)
{
int ll = NB9_lt[j];
int rr = NB9_rt[CurrentRP[j] - 1];
if (ll == -1 || rr == -1)
continue;
score += mMotionStatistics.at<int>(ll, rr);
thresh += mNumberPointsInPerCellLeft[ll];
numpair++;
}
thresh = mThresholdFactor * std::sqrt(thresh / numpair);
if (score < thresh)
mCellPairs[i] = -2;
}
}
void matchGMS( const Size& size1, const Size& size2, const vector<KeyPoint>& keypoints1, const vector<KeyPoint>& keypoints2,
const vector<DMatch>& matches1to2, vector<DMatch>& matchesGMS, const bool withRotation, const bool withScale,
const double thresholdFactor )
{
GMSMatcher gms(keypoints1, size1, keypoints2, size2, matches1to2, thresholdFactor);
vector<bool> inlierMask;
gms.getInlierMask(inlierMask, withRotation, withScale);
matchesGMS.clear();
for (size_t i = 0; i < inlierMask.size(); i++) {
if (inlierMask[i])
matchesGMS.push_back(matches1to2[i]);
}
}
} //namespace xfeatures2d
} //namespace cv
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "test_precomp.hpp"
using namespace std;
using namespace cv;
using namespace cv::xfeatures2d;
class CV_GMSMatcherTest : public cvtest::BaseTest
{
public:
CV_GMSMatcherTest();
~CV_GMSMatcherTest();
protected:
virtual void run(int);
bool combinations[4][2];
double eps[3][4]; //3 imgs x 4 combinations
double correctMatchDistThreshold;
};
CV_GMSMatcherTest::CV_GMSMatcherTest()
{
combinations[0][0] = false; combinations[0][1] = false;
combinations[1][0] = false; combinations[1][1] = true;
combinations[2][0] = true; combinations[2][1] = false;
combinations[3][0] = true; combinations[3][1] = true;
//Threshold = truncate(min(acc_win32, acc_win64))
eps[0][0] = 0.9313;
eps[0][1] = 0.9223;
eps[0][2] = 0.9313;
eps[0][3] = 0.9223;
eps[1][0] = 0.8199;
eps[1][1] = 0.7964;
eps[1][2] = 0.8199;
eps[1][3] = 0.7964;
eps[2][0] = 0.7098;
eps[2][1] = 0.6659;
eps[2][2] = 0.6939;
eps[2][3] = 0.6457;
correctMatchDistThreshold = 5.0;
}
CV_GMSMatcherTest::~CV_GMSMatcherTest() {}
void CV_GMSMatcherTest::run( int )
{
ts->set_failed_test_info(cvtest::TS::OK);
Mat imgRef = imread(string(ts->get_data_path()) + "detectors_descriptors_evaluation/images_datasets/graf/img1.png");
Ptr<Feature2D> orb = ORB::create(10000);
vector<KeyPoint> keypointsRef, keypointsCur;
Mat descriptorsRef, descriptorsCur;
orb->detectAndCompute(imgRef, noArray(), keypointsRef, descriptorsRef);
vector<DMatch> matchesAll, matchesGMS;
Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("BruteForce-Hamming");
const int startImg = 2;
const int nImgs = 3;
for (int num = startImg; num < startImg+nImgs; num++)
{
string imgPath = string(ts->get_data_path()) + format("detectors_descriptors_evaluation/images_datasets/graf/img%d.png", num);
Mat imgCur = imread(imgPath);
orb->detectAndCompute(imgCur, noArray(), keypointsCur, descriptorsCur);
matcher->match(descriptorsCur, descriptorsRef, matchesAll);
string xml = string(ts->get_data_path()) + format("detectors_descriptors_evaluation/images_datasets/graf/H1to%dp.xml", num);
FileStorage fs(xml, FileStorage::READ);
if (!fs.isOpened())
{
ts->set_failed_test_info(cvtest::TS::FAIL_INVALID_TEST_DATA);
return;
}
Mat H1toCur;
fs[format("H1%d", num)] >> H1toCur;
for (int comb = 0; comb < 4; comb++)
{
matchGMS(imgCur.size(), imgRef.size(), keypointsCur, keypointsRef, matchesAll, matchesGMS, combinations[comb][0], combinations[comb][1]);
int nbCorrectMatches = 0;
for (size_t i = 0; i < matchesGMS.size(); i++)
{
Point2f ptRef = keypointsRef[matchesGMS[i].trainIdx].pt;
Point2f ptCur = keypointsCur[matchesGMS[i].queryIdx].pt;
Mat matRef = (Mat_<double>(3,1) << ptRef.x, ptRef.y, 1);
Mat matTrans = H1toCur * matRef;
Point2f ptTrans( (float) (matTrans.at<double>(0,0)/matTrans.at<double>(2,0)),
(float) (matTrans.at<double>(1,0)/matTrans.at<double>(2,0)));
if (norm(ptTrans-ptCur) < correctMatchDistThreshold)
nbCorrectMatches++;
}
double ratio = nbCorrectMatches / (double) matchesGMS.size();
if (ratio < eps[num-startImg][comb])
{
ts->printf( cvtest::TS::LOG, "Invalid accuracy for image %s and combination withRotation=%d withScale=%d, "
"matches ratio is %f, ratio threshold is %f, distance threshold is %f.\n",
imgPath.substr(imgPath.size()-8).c_str(), combinations[comb][0], combinations[comb][1], ratio,
eps[num-startImg][comb], correctMatchDistThreshold);
ts->set_failed_test_info(cvtest::TS::FAIL_BAD_ACCURACY);
}
}
}
}
TEST(XFeatures2d_GMSMatcher, gms_matcher_regression) { CV_GMSMatcherTest test; test.safe_run(); }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment