Commit 1a5fcd71 authored by Ievgen Khvedchenia's avatar Ievgen Khvedchenia

Refactor of KAZE and AKAZE:

1) Clean-up from the unused code
2) Remove of SURF extraction method
3) Enabled threading for KAZE extraction
4) Exposed new properties for runtime configuration
parent 220de140
...@@ -893,7 +893,15 @@ KAZE implementation ...@@ -893,7 +893,15 @@ KAZE implementation
class CV_EXPORTS_W KAZE : public Feature2D class CV_EXPORTS_W KAZE : public Feature2D
{ {
public: public:
CV_WRAP explicit KAZE(bool _extended = false);
/// AKAZE Descriptor Type
enum DESCRIPTOR_TYPE {
DESCRIPTOR_MSURF = 1,
DESCRIPTOR_GSURF = 2
};
CV_WRAP KAZE();
CV_WRAP explicit KAZE(DESCRIPTOR_TYPE type, bool _extended, bool _upright);
virtual ~KAZE(); virtual ~KAZE();
...@@ -917,7 +925,9 @@ protected: ...@@ -917,7 +925,9 @@ protected:
void detectImpl(InputArray image, std::vector<KeyPoint>& keypoints, InputArray mask) const; void detectImpl(InputArray image, std::vector<KeyPoint>& keypoints, InputArray mask) const;
void computeImpl(InputArray image, std::vector<KeyPoint>& keypoints, OutputArray descriptors) const; void computeImpl(InputArray image, std::vector<KeyPoint>& keypoints, OutputArray descriptors) const;
CV_PROP int descriptor;
CV_PROP bool extended; CV_PROP bool extended;
CV_PROP bool upright;
}; };
/*! /*!
...@@ -926,7 +936,16 @@ AKAZE implementation ...@@ -926,7 +936,16 @@ AKAZE implementation
class CV_EXPORTS_W AKAZE : public Feature2D class CV_EXPORTS_W AKAZE : public Feature2D
{ {
public: public:
CV_WRAP explicit AKAZE(int _descriptor = 5, int _descriptor_size = 0, int _descriptor_channels = 3); /// AKAZE Descriptor Type
enum DESCRIPTOR_TYPE {
DESCRIPTOR_KAZE_UPRIGHT = 2, ///< Upright descriptors, not invariant to rotation
DESCRIPTOR_KAZE = 3,
DESCRIPTOR_MLDB_UPRIGHT = 4, ///< Upright descriptors, not invariant to rotation
DESCRIPTOR_MLDB = 5
};
CV_WRAP AKAZE();
CV_WRAP explicit AKAZE(DESCRIPTOR_TYPE _descriptor, int _descriptor_size = 0, int _descriptor_channels = 3);
virtual ~AKAZE(); virtual ~AKAZE();
...@@ -951,8 +970,8 @@ protected: ...@@ -951,8 +970,8 @@ protected:
void computeImpl(InputArray image, std::vector<KeyPoint>& keypoints, OutputArray descriptors) const; void computeImpl(InputArray image, std::vector<KeyPoint>& keypoints, OutputArray descriptors) const;
void detectImpl(InputArray image, std::vector<KeyPoint>& keypoints, InputArray mask = noArray()) const; void detectImpl(InputArray image, std::vector<KeyPoint>& keypoints, InputArray mask = noArray()) const;
CV_PROP int descriptor_channels;
CV_PROP int descriptor; CV_PROP int descriptor;
CV_PROP int descriptor_channels;
CV_PROP int descriptor_size; CV_PROP int descriptor_size;
}; };
......
...@@ -53,10 +53,16 @@ http://www.robesafe.com/personal/pablo.alcantarilla/papers/Alcantarilla13bmvc.pd ...@@ -53,10 +53,16 @@ http://www.robesafe.com/personal/pablo.alcantarilla/papers/Alcantarilla13bmvc.pd
namespace cv namespace cv
{ {
AKAZE::AKAZE()
: descriptor(DESCRIPTOR_MLDB)
, descriptor_channels(3)
, descriptor_size(0)
{
}
AKAZE::AKAZE(int _descriptor, int _descriptor_size, int _descriptor_channels) AKAZE::AKAZE(DESCRIPTOR_TYPE _descriptor, int _descriptor_size, int _descriptor_channels)
: descriptor_channels(_descriptor_channels) : descriptor(_descriptor)
, descriptor(_descriptor) , descriptor_channels(_descriptor_channels)
, descriptor_size(_descriptor_size) , descriptor_size(_descriptor_size)
{ {
...@@ -70,12 +76,14 @@ namespace cv ...@@ -70,12 +76,14 @@ namespace cv
// returns the descriptor size in bytes // returns the descriptor size in bytes
int AKAZE::descriptorSize() const int AKAZE::descriptorSize() const
{ {
if (descriptor < MLDB_UPRIGHT) switch (descriptor)
{ {
case cv::AKAZE::DESCRIPTOR_KAZE:
case cv::AKAZE::DESCRIPTOR_KAZE_UPRIGHT:
return 64; return 64;
}
else case cv::AKAZE::DESCRIPTOR_MLDB:
{ case cv::AKAZE::DESCRIPTOR_MLDB_UPRIGHT:
// We use the full length binary descriptor -> 486 bits // We use the full length binary descriptor -> 486 bits
if (descriptor_size == 0) if (descriptor_size == 0)
{ {
...@@ -87,32 +95,45 @@ namespace cv ...@@ -87,32 +95,45 @@ namespace cv
// We use the random bit selection length binary descriptor // We use the random bit selection length binary descriptor
return (int)ceil(descriptor_size / 8.); return (int)ceil(descriptor_size / 8.);
} }
default:
return -1;
} }
} }
// returns the descriptor type // returns the descriptor type
int AKAZE::descriptorType() const int AKAZE::descriptorType() const
{ {
if (descriptor < MLDB_UPRIGHT) switch (descriptor)
{ {
case cv::AKAZE::DESCRIPTOR_KAZE:
case cv::AKAZE::DESCRIPTOR_KAZE_UPRIGHT:
return CV_32F; return CV_32F;
}
else case cv::AKAZE::DESCRIPTOR_MLDB:
{ case cv::AKAZE::DESCRIPTOR_MLDB_UPRIGHT:
return CV_8U; return CV_8U;
default:
return -1;
} }
} }
// returns the default norm type // returns the default norm type
int AKAZE::defaultNorm() const int AKAZE::defaultNorm() const
{ {
if (descriptor < MLDB_UPRIGHT) switch (descriptor)
{
return NORM_L2;
}
else
{ {
return NORM_HAMMING; case cv::AKAZE::DESCRIPTOR_KAZE:
case cv::AKAZE::DESCRIPTOR_KAZE_UPRIGHT:
return cv::NORM_L2;
case cv::AKAZE::DESCRIPTOR_MLDB:
case cv::AKAZE::DESCRIPTOR_MLDB_UPRIGHT:
return cv::NORM_HAMMING;
default:
return -1;
} }
} }
...@@ -132,6 +153,9 @@ namespace cv ...@@ -132,6 +153,9 @@ namespace cv
cv::Mat& desc = descriptors.getMatRef(); cv::Mat& desc = descriptors.getMatRef();
AKAZEOptions options; AKAZEOptions options;
options.descriptor = static_cast<DESCRIPTOR_TYPE>(descriptor);
options.descriptor_channels = descriptor_channels;
options.descriptor_size = descriptor_size;
options.img_width = img.cols; options.img_width = img.cols;
options.img_height = img.rows; options.img_height = img.rows;
...@@ -164,6 +188,9 @@ namespace cv ...@@ -164,6 +188,9 @@ namespace cv
img.convertTo(img1_32, CV_32F, 1.0 / 255.0, 0); img.convertTo(img1_32, CV_32F, 1.0 / 255.0, 0);
AKAZEOptions options; AKAZEOptions options;
options.descriptor = static_cast<DESCRIPTOR_TYPE>(descriptor);
options.descriptor_channels = descriptor_channels;
options.descriptor_size = descriptor_size;
options.img_width = img.cols; options.img_width = img.cols;
options.img_height = img.rows; options.img_height = img.rows;
...@@ -189,6 +216,9 @@ namespace cv ...@@ -189,6 +216,9 @@ namespace cv
cv::Mat& desc = descriptors.getMatRef(); cv::Mat& desc = descriptors.getMatRef();
AKAZEOptions options; AKAZEOptions options;
options.descriptor = static_cast<DESCRIPTOR_TYPE>(descriptor);
options.descriptor_channels = descriptor_channels;
options.descriptor_size = descriptor_size;
options.img_width = img.cols; options.img_width = img.cols;
options.img_height = img.rows; options.img_height = img.rows;
......
...@@ -10,6 +10,7 @@ ...@@ -10,6 +10,7 @@
/* ************************************************************************* */ /* ************************************************************************* */
// OpenCV // OpenCV
#include "precomp.hpp" #include "precomp.hpp"
#include <opencv2/features2d.hpp>
/* ************************************************************************* */ /* ************************************************************************* */
/// Lookup table for 2d gaussian (sigma = 2.5) where (0,0) is top left and (6,6) is bottom right /// Lookup table for 2d gaussian (sigma = 2.5) where (0,0) is top left and (6,6) is bottom right
...@@ -24,28 +25,16 @@ const float gauss25[7][7] = { ...@@ -24,28 +25,16 @@ const float gauss25[7][7] = {
}; };
/* ************************************************************************* */ /* ************************************************************************* */
/// AKAZE Descriptor Type /// AKAZE configuration options structure
enum DESCRIPTOR_TYPE { struct AKAZEOptions {
SURF_UPRIGHT = 0, ///< Upright descriptors, not invariant to rotation
SURF = 1,
MSURF_UPRIGHT = 2, ///< Upright descriptors, not invariant to rotation
MSURF = 3,
MLDB_UPRIGHT = 4, ///< Upright descriptors, not invariant to rotation
MLDB = 5
};
/* ************************************************************************* */ /// AKAZE Diffusivities
/// AKAZE Diffusivities enum DIFFUSIVITY_TYPE {
enum DIFFUSIVITY_TYPE {
PM_G1 = 0, PM_G1 = 0,
PM_G2 = 1, PM_G2 = 1,
WEICKERT = 2, WEICKERT = 2,
CHARBONNIER = 3 CHARBONNIER = 3
}; };
/* ************************************************************************* */
/// AKAZE configuration options structure
struct AKAZEOptions {
AKAZEOptions() AKAZEOptions()
: omax(4) : omax(4)
...@@ -60,7 +49,7 @@ struct AKAZEOptions { ...@@ -60,7 +49,7 @@ struct AKAZEOptions {
, dthreshold(0.001f) , dthreshold(0.001f)
, min_dthreshold(0.00001f) , min_dthreshold(0.00001f)
, descriptor(MLDB) , descriptor(cv::AKAZE::DESCRIPTOR_MLDB)
, descriptor_size(0) , descriptor_size(0)
, descriptor_channels(3) , descriptor_channels(3)
, descriptor_pattern_size(10) , descriptor_pattern_size(10)
...@@ -83,7 +72,7 @@ struct AKAZEOptions { ...@@ -83,7 +72,7 @@ struct AKAZEOptions {
float dthreshold; ///< Detector response threshold to accept point float dthreshold; ///< Detector response threshold to accept point
float min_dthreshold; ///< Minimum detector threshold to accept a point float min_dthreshold; ///< Minimum detector threshold to accept a point
DESCRIPTOR_TYPE descriptor; ///< Type of descriptor cv::AKAZE::DESCRIPTOR_TYPE descriptor; ///< Type of descriptor
int descriptor_size; ///< Size of the descriptor in bits. 0->Full size int descriptor_size; ///< Size of the descriptor in bits. 0->Full size
int descriptor_channels; ///< Number of channels in the descriptor (1, 2, 3) int descriptor_channels; ///< Number of channels in the descriptor (1, 2, 3)
int descriptor_pattern_size; ///< Actual patch size is 2*pattern_size*point.scale int descriptor_pattern_size; ///< Actual patch size is 2*pattern_size*point.scale
......
...@@ -25,7 +25,8 @@ AKAZEFeatures::AKAZEFeatures(const AKAZEOptions& options) : options_(options) { ...@@ -25,7 +25,8 @@ AKAZEFeatures::AKAZEFeatures(const AKAZEOptions& options) : options_(options) {
ncycles_ = 0; ncycles_ = 0;
reordering_ = true; reordering_ = true;
if (options_.descriptor_size > 0 && options_.descriptor >= MLDB_UPRIGHT) { if (options_.descriptor_size > 0 && options_.descriptor >= cv::AKAZE::DESCRIPTOR_MLDB_UPRIGHT)
{
generateDescriptorSubsample(descriptorSamples_, descriptorBits_, options_.descriptor_size, generateDescriptorSubsample(descriptorSamples_, descriptorBits_, options_.descriptor_size,
options_.descriptor_pattern_size, options_.descriptor_channels); options_.descriptor_pattern_size, options_.descriptor_channels);
} }
...@@ -124,16 +125,16 @@ int AKAZEFeatures::Create_Nonlinear_Scale_Space(const cv::Mat& img) ...@@ -124,16 +125,16 @@ int AKAZEFeatures::Create_Nonlinear_Scale_Space(const cv::Mat& img)
// Compute the conductivity equation // Compute the conductivity equation
switch (options_.diffusivity) { switch (options_.diffusivity) {
case PM_G1: case AKAZEOptions::PM_G1:
pm_g1(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options_.kcontrast); pm_g1(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options_.kcontrast);
break; break;
case PM_G2: case AKAZEOptions::PM_G2:
pm_g2(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options_.kcontrast); pm_g2(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options_.kcontrast);
break; break;
case WEICKERT: case AKAZEOptions::WEICKERT:
weickert_diffusivity(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options_.kcontrast); weickert_diffusivity(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options_.kcontrast);
break; break;
case CHARBONNIER: case AKAZEOptions::CHARBONNIER:
charbonnier_diffusivity(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options_.kcontrast); charbonnier_diffusivity(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options_.kcontrast);
break; break;
default: default:
...@@ -255,11 +256,10 @@ void AKAZEFeatures::Find_Scale_Space_Extrema(std::vector<cv::KeyPoint>& kpts) ...@@ -255,11 +256,10 @@ void AKAZEFeatures::Find_Scale_Space_Extrema(std::vector<cv::KeyPoint>& kpts)
vector<cv::KeyPoint> kpts_aux; vector<cv::KeyPoint> kpts_aux;
// Set maximum size // Set maximum size
if (options_.descriptor == SURF_UPRIGHT || options_.descriptor == SURF || if (options_.descriptor == cv::AKAZE::DESCRIPTOR_MLDB_UPRIGHT || options_.descriptor == cv::AKAZE::DESCRIPTOR_MLDB) {
options_.descriptor == MLDB_UPRIGHT || options_.descriptor == MLDB) {
smax = 10.0f*sqrtf(2.0f); smax = 10.0f*sqrtf(2.0f);
} }
else if (options_.descriptor == MSURF_UPRIGHT || options_.descriptor == MSURF) { else if (options_.descriptor == cv::AKAZE::DESCRIPTOR_KAZE_UPRIGHT || options_.descriptor == cv::AKAZE::DESCRIPTOR_KAZE) {
smax = 12.0f*sqrtf(2.0f); smax = 12.0f*sqrtf(2.0f);
} }
...@@ -684,7 +684,7 @@ private: ...@@ -684,7 +684,7 @@ private:
void AKAZEFeatures::Compute_Descriptors(std::vector<cv::KeyPoint>& kpts, cv::Mat& desc) void AKAZEFeatures::Compute_Descriptors(std::vector<cv::KeyPoint>& kpts, cv::Mat& desc)
{ {
// Allocate memory for the matrix with the descriptors // Allocate memory for the matrix with the descriptors
if (options_.descriptor < MLDB_UPRIGHT) { if (options_.descriptor < cv::AKAZE::DESCRIPTOR_MLDB_UPRIGHT) {
desc = cv::Mat::zeros((int)kpts.size(), 64, CV_32FC1); desc = cv::Mat::zeros((int)kpts.size(), 64, CV_32FC1);
} }
else { else {
...@@ -699,29 +699,19 @@ void AKAZEFeatures::Compute_Descriptors(std::vector<cv::KeyPoint>& kpts, cv::Mat ...@@ -699,29 +699,19 @@ void AKAZEFeatures::Compute_Descriptors(std::vector<cv::KeyPoint>& kpts, cv::Mat
} }
} }
switch (options_.descriptor) { switch (options_.descriptor)
case SURF_UPRIGHT: // Upright descriptors, not invariant to rotation
{
cv::parallel_for_(cv::Range(0, (int)kpts.size()), SURF_Descriptor_Upright_64_Invoker(kpts, desc, evolution_));
}
break;
case SURF:
{ {
cv::parallel_for_(cv::Range(0, (int)kpts.size()), SURF_Descriptor_64_Invoker(kpts, desc, evolution_)); case cv::AKAZE::DESCRIPTOR_KAZE_UPRIGHT: // Upright descriptors, not invariant to rotation
}
break;
case MSURF_UPRIGHT: // Upright descriptors, not invariant to rotation
{ {
cv::parallel_for_(cv::Range(0, (int)kpts.size()), MSURF_Upright_Descriptor_64_Invoker(kpts, desc, evolution_)); cv::parallel_for_(cv::Range(0, (int)kpts.size()), MSURF_Upright_Descriptor_64_Invoker(kpts, desc, evolution_));
} }
break; break;
case MSURF: case cv::AKAZE::DESCRIPTOR_KAZE:
{ {
cv::parallel_for_(cv::Range(0, (int)kpts.size()), MSURF_Descriptor_64_Invoker(kpts, desc, evolution_)); cv::parallel_for_(cv::Range(0, (int)kpts.size()), MSURF_Descriptor_64_Invoker(kpts, desc, evolution_));
} }
break; break;
case MLDB_UPRIGHT: // Upright descriptors, not invariant to rotation case cv::AKAZE::DESCRIPTOR_MLDB_UPRIGHT: // Upright descriptors, not invariant to rotation
{ {
if (options_.descriptor_size == 0) if (options_.descriptor_size == 0)
cv::parallel_for_(cv::Range(0, (int)kpts.size()), Upright_MLDB_Full_Descriptor_Invoker(kpts, desc, evolution_, options_)); cv::parallel_for_(cv::Range(0, (int)kpts.size()), Upright_MLDB_Full_Descriptor_Invoker(kpts, desc, evolution_, options_));
...@@ -729,7 +719,7 @@ void AKAZEFeatures::Compute_Descriptors(std::vector<cv::KeyPoint>& kpts, cv::Mat ...@@ -729,7 +719,7 @@ void AKAZEFeatures::Compute_Descriptors(std::vector<cv::KeyPoint>& kpts, cv::Mat
cv::parallel_for_(cv::Range(0, (int)kpts.size()), Upright_MLDB_Descriptor_Subset_Invoker(kpts, desc, evolution_, options_, descriptorSamples_, descriptorBits_)); cv::parallel_for_(cv::Range(0, (int)kpts.size()), Upright_MLDB_Descriptor_Subset_Invoker(kpts, desc, evolution_, options_, descriptorSamples_, descriptorBits_));
} }
break; break;
case MLDB: case cv::AKAZE::DESCRIPTOR_MLDB:
{ {
if (options_.descriptor_size == 0) if (options_.descriptor_size == 0)
cv::parallel_for_(cv::Range(0, (int)kpts.size()), MLDB_Full_Descriptor_Invoker(kpts, desc, evolution_, options_)); cv::parallel_for_(cv::Range(0, (int)kpts.size()), MLDB_Full_Descriptor_Invoker(kpts, desc, evolution_, options_));
...@@ -783,7 +773,7 @@ void AKAZEFeatures::Compute_Main_Orientation(cv::KeyPoint& kpt, const std::vecto ...@@ -783,7 +773,7 @@ void AKAZEFeatures::Compute_Main_Orientation(cv::KeyPoint& kpt, const std::vecto
// Loop slides pi/3 window around feature point // Loop slides pi/3 window around feature point
for (ang1 = 0; ang1 < (float)(2.0 * CV_PI); ang1 += 0.15f) { for (ang1 = 0; ang1 < (float)(2.0 * CV_PI); ang1 += 0.15f) {
ang2 = (ang1 + (float)(CV_PI / 3.0) > (float)(2.0*CV_PI) ? ang1 - (float)(5.0*CV_PI / 3.0) : ang1 + (float)(CV_PI / 3.0)); ang2 = (ang1 + (float)(CV_PI / 3.0) >(float)(2.0*CV_PI) ? ang1 - (float)(5.0*CV_PI / 3.0) : ang1 + (float)(CV_PI / 3.0));
sumX = sumY = 0.f; sumX = sumY = 0.f;
for (size_t k = 0; k < Ang.size(); ++k) { for (size_t k = 0; k < Ang.size(); ++k) {
...@@ -812,195 +802,6 @@ void AKAZEFeatures::Compute_Main_Orientation(cv::KeyPoint& kpt, const std::vecto ...@@ -812,195 +802,6 @@ void AKAZEFeatures::Compute_Main_Orientation(cv::KeyPoint& kpt, const std::vecto
} }
} }
/* ************************************************************************* */
/**
* @brief This method computes the upright descriptor of the provided keypoint
* @param kpt Input keypoint
* @note Rectangular grid of 20 s x 20 s. Descriptor Length 64. No additional
* Gaussian weighting is performed. The descriptor is inspired from Bay et al.,
* Speeded Up Robust Features, ECCV, 2006
*/
void SURF_Descriptor_Upright_64_Invoker::Get_SURF_Descriptor_Upright_64(const cv::KeyPoint& kpt, float *desc) const {
float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0;
float rx = 0.0, ry = 0.0, len = 0.0, xf = 0.0, yf = 0.0;
float sample_x = 0.0, sample_y = 0.0;
float fx = 0.0, fy = 0.0, ratio = 0.0, res1 = 0.0, res2 = 0.0, res3 = 0.0, res4 = 0.0;
int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0;
int scale = 0, dsize = 0, level = 0;
const std::vector<TEvolution>& evolution = *evolution_;
// Set the descriptor size and the sample and pattern sizes
dsize = 64;
sample_step = 5;
pattern_size = 10;
// Get the information from the keypoint
ratio = (float)(1 << kpt.octave);
scale = fRound(0.5f*kpt.size / ratio);
level = kpt.class_id;
yf = kpt.pt.y / ratio;
xf = kpt.pt.x / ratio;
// Calculate descriptor for this interest point
for (int i = -pattern_size; i < pattern_size; i += sample_step) {
for (int j = -pattern_size; j < pattern_size; j += sample_step) {
dx = dy = mdx = mdy = 0.0;
for (int k = i; k < i + sample_step; k++) {
for (int l = j; l < j + sample_step; l++) {
// Get the coordinates of the sample point on the rotated axis
sample_y = yf + l*scale;
sample_x = xf + k*scale;
y1 = (int)(sample_y - 0.5f);
x1 = (int)(sample_x - 0.5f);
y2 = (int)(sample_y + 0.5f);
x2 = (int)(sample_x + 0.5f);
fx = sample_x - x1;
fy = sample_y - y1;
res1 = *(evolution[level].Lx.ptr<float>(y1)+x1);
res2 = *(evolution[level].Lx.ptr<float>(y1)+x2);
res3 = *(evolution[level].Lx.ptr<float>(y2)+x1);
res4 = *(evolution[level].Lx.ptr<float>(y2)+x2);
rx = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
res1 = *(evolution[level].Ly.ptr<float>(y1)+x1);
res2 = *(evolution[level].Ly.ptr<float>(y1)+x2);
res3 = *(evolution[level].Ly.ptr<float>(y2)+x1);
res4 = *(evolution[level].Ly.ptr<float>(y2)+x2);
ry = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
// Sum the derivatives to the cumulative descriptor
dx += rx;
dy += ry;
mdx += fabs(rx);
mdy += fabs(ry);
}
}
// Add the values to the descriptor vector
desc[dcount++] = dx;
desc[dcount++] = dy;
desc[dcount++] = mdx;
desc[dcount++] = mdy;
// Store the current length^2 of the vector
len += dx*dx + dy*dy + mdx*mdx + mdy*mdy;
}
}
// convert to unit vector
len = sqrt(len);
for (int i = 0; i < dsize; i++) {
desc[i] /= len;
}
}
/* ************************************************************************* */
/**
* @brief This method computes the descriptor of the provided keypoint given the
* main orientation
* @param kpt Input keypoint
* @param desc Descriptor vector
* @note Rectangular grid of 20 s x 20 s. Descriptor Length 64. No additional
* Gaussian weighting is performed. The descriptor is inspired from Bay et al.,
* Speeded Up Robust Features, ECCV, 2006
*/
void SURF_Descriptor_64_Invoker::Get_SURF_Descriptor_64(const cv::KeyPoint& kpt, float *desc) const {
float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0;
float rx = 0.0, ry = 0.0, rrx = 0.0, rry = 0.0, len = 0.0, xf = 0.0, yf = 0.0;
float sample_x = 0.0, sample_y = 0.0, co = 0.0, si = 0.0, angle = 0.0;
float fx = 0.0, fy = 0.0, ratio = 0.0, res1 = 0.0, res2 = 0.0, res3 = 0.0, res4 = 0.0;
int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0;
int scale = 0, dsize = 0, level = 0;
// Set the descriptor size and the sample and pattern sizes
dsize = 64;
sample_step = 5;
pattern_size = 10;
const std::vector<TEvolution>& evolution = *evolution_;
// Get the information from the keypoint
ratio = (float)(1 << kpt.octave);
scale = fRound(0.5f*kpt.size / ratio);
angle = kpt.angle;
level = kpt.class_id;
yf = kpt.pt.y / ratio;
xf = kpt.pt.x / ratio;
co = cos(angle);
si = sin(angle);
// Calculate descriptor for this interest point
for (int i = -pattern_size; i < pattern_size; i += sample_step) {
for (int j = -pattern_size; j < pattern_size; j += sample_step) {
dx = dy = mdx = mdy = 0.0;
for (int k = i; k < i + sample_step; k++) {
for (int l = j; l < j + sample_step; l++) {
// Get the coordinates of the sample point on the rotated axis
sample_y = yf + (l*scale*co + k*scale*si);
sample_x = xf + (-l*scale*si + k*scale*co);
y1 = (int)(sample_y - 0.5f);
x1 = (int)(sample_x - 0.5f);
y2 = (int)(sample_y + 0.5f);
x2 = (int)(sample_x + 0.5f);
fx = sample_x - x1;
fy = sample_y - y1;
res1 = *(evolution[level].Lx.ptr<float>(y1)+x1);
res2 = *(evolution[level].Lx.ptr<float>(y1)+x2);
res3 = *(evolution[level].Lx.ptr<float>(y2)+x1);
res4 = *(evolution[level].Lx.ptr<float>(y2)+x2);
rx = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
res1 = *(evolution[level].Ly.ptr<float>(y1)+x1);
res2 = *(evolution[level].Ly.ptr<float>(y1)+x2);
res3 = *(evolution[level].Ly.ptr<float>(y2)+x1);
res4 = *(evolution[level].Ly.ptr<float>(y2)+x2);
ry = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
// Get the x and y derivatives on the rotated axis
rry = rx*co + ry*si;
rrx = -rx*si + ry*co;
// Sum the derivatives to the cumulative descriptor
dx += rrx;
dy += rry;
mdx += fabs(rrx);
mdy += fabs(rry);
}
}
// Add the values to the descriptor vector
desc[dcount++] = dx;
desc[dcount++] = dy;
desc[dcount++] = mdx;
desc[dcount++] = mdy;
// Store the current length^2 of the vector
len += dx*dx + dy*dy + mdx*mdx + mdy*mdy;
}
}
// convert to unit vector
len = sqrt(len);
for (int i = 0; i < dsize; i++) {
desc[i] /= len;
}
}
/* ************************************************************************* */ /* ************************************************************************* */
/** /**
* @brief This method computes the upright descriptor (not rotation invariant) of * @brief This method computes the upright descriptor (not rotation invariant) of
...@@ -2077,11 +1878,11 @@ inline float get_angle(float x, float y) { ...@@ -2077,11 +1878,11 @@ inline float get_angle(float x, float y) {
} }
if (x < 0 && y >= 0) { if (x < 0 && y >= 0) {
return static_cast<float>(CV_PI) - atanf(-y / x); return static_cast<float>(CV_PI)-atanf(-y / x);
} }
if (x < 0 && y < 0) { if (x < 0 && y < 0) {
return static_cast<float>(CV_PI) + atanf(y / x); return static_cast<float>(CV_PI)+atanf(y / x);
} }
if (x >= 0 && y < 0) { if (x >= 0 && y < 0) {
......
...@@ -126,6 +126,8 @@ CV_INIT_ALGORITHM(GFTTDetector, "Feature2D.GFTT", ...@@ -126,6 +126,8 @@ CV_INIT_ALGORITHM(GFTTDetector, "Feature2D.GFTT",
/////////////////////////////////////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////////////////////////////////////
CV_INIT_ALGORITHM(KAZE, "Feature2D.KAZE", CV_INIT_ALGORITHM(KAZE, "Feature2D.KAZE",
obj.info()->addParam(obj, "descriptor", obj.descriptor);
obj.info()->addParam(obj, "upright", obj.upright);
obj.info()->addParam(obj, "extended", obj.extended)) obj.info()->addParam(obj, "extended", obj.extended))
/////////////////////////////////////////////////////////////////////////////////////////////////////////// ///////////////////////////////////////////////////////////////////////////////////////////////////////////
......
...@@ -52,11 +52,20 @@ http://www.robesafe.com/personal/pablo.alcantarilla/papers/Alcantarilla12eccv.pd ...@@ -52,11 +52,20 @@ http://www.robesafe.com/personal/pablo.alcantarilla/papers/Alcantarilla12eccv.pd
namespace cv namespace cv
{ {
KAZE::KAZE(bool _extended /* = false */) KAZE::KAZE()
: extended(_extended) : descriptor(DESCRIPTOR_MSURF)
, extended(false)
, upright(false)
{ {
} }
KAZE::KAZE(DESCRIPTOR_TYPE type, bool _extended, bool _upright)
: descriptor(type)
, extended(_extended)
, upright(_upright)
{
}
KAZE::~KAZE() KAZE::~KAZE()
{ {
...@@ -102,7 +111,9 @@ namespace cv ...@@ -102,7 +111,9 @@ namespace cv
KAZEOptions options; KAZEOptions options;
options.img_width = img.cols; options.img_width = img.cols;
options.img_height = img.rows; options.img_height = img.rows;
options.descriptor = static_cast<DESCRIPTOR_TYPE>(descriptor);
options.extended = extended; options.extended = extended;
options.upright = upright;
KAZEFeatures impl(options); KAZEFeatures impl(options);
impl.Create_Nonlinear_Scale_Space(img1_32); impl.Create_Nonlinear_Scale_Space(img1_32);
...@@ -135,7 +146,9 @@ namespace cv ...@@ -135,7 +146,9 @@ namespace cv
KAZEOptions options; KAZEOptions options;
options.img_width = img.cols; options.img_width = img.cols;
options.img_height = img.rows; options.img_height = img.rows;
options.descriptor = static_cast<DESCRIPTOR_TYPE>(descriptor);
options.extended = extended; options.extended = extended;
options.upright = upright;
KAZEFeatures impl(options); KAZEFeatures impl(options);
impl.Create_Nonlinear_Scale_Space(img1_32); impl.Create_Nonlinear_Scale_Space(img1_32);
...@@ -161,7 +174,9 @@ namespace cv ...@@ -161,7 +174,9 @@ namespace cv
KAZEOptions options; KAZEOptions options;
options.img_width = img.cols; options.img_width = img.cols;
options.img_height = img.rows; options.img_height = img.rows;
options.descriptor = static_cast<DESCRIPTOR_TYPE>(descriptor);
options.extended = extended; options.extended = extended;
options.upright = upright;
KAZEFeatures impl(options); KAZEFeatures impl(options);
impl.Create_Nonlinear_Scale_Space(img1_32); impl.Create_Nonlinear_Scale_Space(img1_32);
......
...@@ -5,74 +5,68 @@ ...@@ -5,74 +5,68 @@
* @author Pablo F. Alcantarilla * @author Pablo F. Alcantarilla
*/ */
#ifndef __OPENCV_FEATURES_2D_KAZE_CONFIG_HPP__ #pragma once
#define __OPENCV_FEATURES_2D_KAZE_CONFIG_HPP__
//******************************************************************************
//******************************************************************************
// OpenCV Includes // OpenCV Includes
#include "precomp.hpp" #include "precomp.hpp"
#include <opencv2/features2d.hpp>
//*************************************************************************************
//************************************************************************************* //*************************************************************************************
// Some defines struct KAZEOptions {
#define NMAX_CHAR 400
// Some default options enum DIFFUSIVITY_TYPE {
static const float DEFAULT_SCALE_OFFSET = 1.60f; // Base scale offset (sigma units) PM_G1 = 0,
static const float DEFAULT_OCTAVE_MAX = 4.0f; // Maximum octave evolution of the image 2^sigma (coarsest scale sigma units) PM_G2 = 1,
static const int DEFAULT_NSUBLEVELS = 4; // Default number of sublevels per scale level WEICKERT = 2
static const float DEFAULT_DETECTOR_THRESHOLD = 0.001f; // Detector response threshold to accept point };
static const float DEFAULT_MIN_DETECTOR_THRESHOLD = 0.00001f; // Minimum Detector response threshold to accept point
static const int DEFAULT_DESCRIPTOR_MODE = 1; // Descriptor Mode 0->SURF, 1->M-SURF
static const bool DEFAULT_USE_FED = true; // 0->AOS, 1->FED
static const bool DEFAULT_UPRIGHT = false; // Upright descriptors, not invariant to rotation
static const bool DEFAULT_EXTENDED = false; // Extended descriptor, dimension 128
// Some important configuration variables KAZEOptions()
static const float DEFAULT_SIGMA_SMOOTHING_DERIVATIVES = 1.0f; : descriptor(cv::KAZE::DESCRIPTOR_MSURF)
static const float DEFAULT_KCONTRAST = 0.01f; , diffusivity(PM_G2)
static const float KCONTRAST_PERCENTILE = 0.7f;
static const int KCONTRAST_NBINS = 300;
static const bool COMPUTE_KCONTRAST = true;
static const int DEFAULT_DIFFUSIVITY_TYPE = 1; // 0 -> PM G1, 1 -> PM G2, 2 -> Weickert
static const bool USE_CLIPPING_NORMALIZATION = false;
static const float CLIPPING_NORMALIZATION_RATIO = 1.6f;
static const int CLIPPING_NORMALIZATION_NITER = 5;
//************************************************************************************* , soffset(1.60f)
//************************************************************************************* , omax(4)
, nsublevels(4)
, img_width(0)
, img_height(0)
, sderivatives(1.0f)
, dthreshold(0.001f)
, kcontrast(0.01f)
, kcontrast_percentille(0.7f)
, kcontrast_bins(300)
struct KAZEOptions { , use_fed(true)
, upright(false)
, extended(false)
KAZEOptions() { , use_clipping_normalilzation(false)
// Load the default options , clipping_normalization_ratio(1.6f)
soffset = DEFAULT_SCALE_OFFSET; , clipping_normalization_niter(5)
omax = static_cast<int>(DEFAULT_OCTAVE_MAX); {
nsublevels = DEFAULT_NSUBLEVELS;
dthreshold = DEFAULT_DETECTOR_THRESHOLD;
use_fed = DEFAULT_USE_FED;
upright = DEFAULT_UPRIGHT;
extended = DEFAULT_EXTENDED;
descriptor = DEFAULT_DESCRIPTOR_MODE;
diffusivity = DEFAULT_DIFFUSIVITY_TYPE;
sderivatives = DEFAULT_SIGMA_SMOOTHING_DERIVATIVES;
} }
cv::KAZE::DESCRIPTOR_TYPE descriptor;
DIFFUSIVITY_TYPE diffusivity;
float soffset; float soffset;
int omax; int omax;
int nsublevels; int nsublevels;
int img_width; int img_width;
int img_height; int img_height;
int diffusivity;
float sderivatives; float sderivatives;
float dthreshold; float dthreshold;
float kcontrast;
float kcontrast_percentille;
int kcontrast_bins;
bool use_fed; bool use_fed;
bool upright; bool upright;
bool extended; bool extended;
int descriptor;
bool use_clipping_normalilzation;
float clipping_normalization_ratio;
int clipping_normalization_niter;
}; };
struct TEvolution { struct TEvolution {
...@@ -89,8 +83,3 @@ struct TEvolution { ...@@ -89,8 +83,3 @@ struct TEvolution {
float sublevel; // Image sublevel in each octave float sublevel; // Image sublevel in each octave
int sigma_size; // Integer esigma. For computing the feature detector responses int sigma_size; // Integer esigma. For computing the feature detector responses
}; };
//*************************************************************************************
//*************************************************************************************
#endif
\ No newline at end of file
...@@ -36,31 +36,11 @@ using namespace cv::details::kaze; ...@@ -36,31 +36,11 @@ using namespace cv::details::kaze;
* @param options KAZE configuration options * @param options KAZE configuration options
* @note The constructor allocates memory for the nonlinear scale space * @note The constructor allocates memory for the nonlinear scale space
*/ */
KAZEFeatures::KAZEFeatures(KAZEOptions& options) { KAZEFeatures::KAZEFeatures(KAZEOptions& _options)
: options(_options)
soffset_ = options.soffset; {
sderivatives_ = options.sderivatives;
omax_ = options.omax;
nsublevels_ = options.nsublevels;
img_width_ = options.img_width;
img_height_ = options.img_height;
dthreshold_ = options.dthreshold;
diffusivity_ = options.diffusivity;
descriptor_mode_ = options.descriptor;
use_fed_ = options.use_fed;
use_upright_ = options.upright;
use_extended_ = options.extended;
use_normalization = USE_CLIPPING_NORMALIZATION;
kcontrast_ = DEFAULT_KCONTRAST;
ncycles_ = 0; ncycles_ = 0;
reordering_ = true; reordering_ = true;
//tkcontrast_ = 0.0;
//tnlscale_ = 0.0;
//tdetector_ = 0.0;
//tmderivatives_ = 0.0;
//tdresponse_ = 0.0;
//tdescriptor_ = 0.0;
// Now allocate memory for the evolution // Now allocate memory for the evolution
Allocate_Memory_Evolution(); Allocate_Memory_Evolution();
...@@ -75,21 +55,21 @@ KAZEFeatures::KAZEFeatures(KAZEOptions& options) { ...@@ -75,21 +55,21 @@ KAZEFeatures::KAZEFeatures(KAZEOptions& options) {
void KAZEFeatures::Allocate_Memory_Evolution(void) { void KAZEFeatures::Allocate_Memory_Evolution(void) {
// Allocate the dimension of the matrices for the evolution // Allocate the dimension of the matrices for the evolution
for (int i = 0; i <= omax_ - 1; i++) { for (int i = 0; i <= options.omax - 1; i++) {
for (int j = 0; j <= nsublevels_ - 1; j++) { for (int j = 0; j <= options.nsublevels - 1; j++) {
TEvolution aux; TEvolution aux;
aux.Lx = cv::Mat::zeros(img_height_, img_width_, CV_32F); aux.Lx = cv::Mat::zeros(options.img_height, options.img_width, CV_32F);
aux.Ly = cv::Mat::zeros(img_height_, img_width_, CV_32F); aux.Ly = cv::Mat::zeros(options.img_height, options.img_width, CV_32F);
aux.Lxx = cv::Mat::zeros(img_height_, img_width_, CV_32F); aux.Lxx = cv::Mat::zeros(options.img_height, options.img_width, CV_32F);
aux.Lxy = cv::Mat::zeros(img_height_, img_width_, CV_32F); aux.Lxy = cv::Mat::zeros(options.img_height, options.img_width, CV_32F);
aux.Lyy = cv::Mat::zeros(img_height_, img_width_, CV_32F); aux.Lyy = cv::Mat::zeros(options.img_height, options.img_width, CV_32F);
aux.Lflow = cv::Mat::zeros(img_height_, img_width_, CV_32F); aux.Lflow = cv::Mat::zeros(options.img_height, options.img_width, CV_32F);
aux.Lt = cv::Mat::zeros(img_height_, img_width_, CV_32F); aux.Lt = cv::Mat::zeros(options.img_height, options.img_width, CV_32F);
aux.Lsmooth = cv::Mat::zeros(img_height_, img_width_, CV_32F); aux.Lsmooth = cv::Mat::zeros(options.img_height, options.img_width, CV_32F);
aux.Lstep = cv::Mat::zeros(img_height_, img_width_, CV_32F); aux.Lstep = cv::Mat::zeros(options.img_height, options.img_width, CV_32F);
aux.Ldet = cv::Mat::zeros(img_height_, img_width_, CV_32F); aux.Ldet = cv::Mat::zeros(options.img_height, options.img_width, CV_32F);
aux.esigma = soffset_*pow((float)2.0f, (float)(j) / (float)(nsublevels_)+i); aux.esigma = options.soffset*pow((float)2.0f, (float)(j) / (float)(options.nsublevels)+i);
aux.etime = 0.5f*(aux.esigma*aux.esigma); aux.etime = 0.5f*(aux.esigma*aux.esigma);
aux.sigma_size = fRound(aux.esigma); aux.sigma_size = fRound(aux.esigma);
aux.octave = (float)i; aux.octave = (float)i;
...@@ -99,7 +79,7 @@ void KAZEFeatures::Allocate_Memory_Evolution(void) { ...@@ -99,7 +79,7 @@ void KAZEFeatures::Allocate_Memory_Evolution(void) {
} }
// Allocate memory for the FED number of cycles and time steps // Allocate memory for the FED number of cycles and time steps
if (use_fed_) { if (options.use_fed) {
for (size_t i = 1; i < evolution_.size(); i++) { for (size_t i = 1; i < evolution_.size(); i++) {
int naux = 0; int naux = 0;
vector<float> tau; vector<float> tau;
...@@ -113,16 +93,16 @@ void KAZEFeatures::Allocate_Memory_Evolution(void) { ...@@ -113,16 +93,16 @@ void KAZEFeatures::Allocate_Memory_Evolution(void) {
} }
else { else {
// Allocate memory for the auxiliary variables that are used in the AOS scheme // Allocate memory for the auxiliary variables that are used in the AOS scheme
Ltx_ = Mat::zeros(img_width_, img_height_, CV_32F); Ltx_ = Mat::zeros(options.img_width, options.img_height, CV_32F); // TODO? IS IT A BUG???
Lty_ = Mat::zeros(img_height_, img_width_, CV_32F); Lty_ = Mat::zeros(options.img_height, options.img_width, CV_32F);
px_ = Mat::zeros(img_height_, img_width_, CV_32F); px_ = Mat::zeros(options.img_height, options.img_width, CV_32F);
py_ = Mat::zeros(img_height_, img_width_, CV_32F); py_ = Mat::zeros(options.img_height, options.img_width, CV_32F);
ax_ = Mat::zeros(img_height_, img_width_, CV_32F); ax_ = Mat::zeros(options.img_height, options.img_width, CV_32F);
ay_ = Mat::zeros(img_height_, img_width_, CV_32F); ay_ = Mat::zeros(options.img_height, options.img_width, CV_32F);
bx_ = Mat::zeros(img_height_ - 1, img_width_, CV_32F); bx_ = Mat::zeros(options.img_height - 1, options.img_width, CV_32F);
by_ = Mat::zeros(img_height_ - 1, img_width_, CV_32F); by_ = Mat::zeros(options.img_height - 1, options.img_width, CV_32F);
qr_ = Mat::zeros(img_height_ - 1, img_width_, CV_32F); qr_ = Mat::zeros(options.img_height - 1, options.img_width, CV_32F);
qc_ = Mat::zeros(img_height_, img_width_ - 1, CV_32F); qc_ = Mat::zeros(options.img_height, options.img_width - 1, CV_32F);
} }
} }
...@@ -141,35 +121,35 @@ int KAZEFeatures::Create_Nonlinear_Scale_Space(const cv::Mat &img) ...@@ -141,35 +121,35 @@ int KAZEFeatures::Create_Nonlinear_Scale_Space(const cv::Mat &img)
// Copy the original image to the first level of the evolution // Copy the original image to the first level of the evolution
img.copyTo(evolution_[0].Lt); img.copyTo(evolution_[0].Lt);
gaussian_2D_convolution(evolution_[0].Lt, evolution_[0].Lt, 0, 0, soffset_); gaussian_2D_convolution(evolution_[0].Lt, evolution_[0].Lt, 0, 0, options.soffset);
gaussian_2D_convolution(evolution_[0].Lt, evolution_[0].Lsmooth, 0, 0, sderivatives_); gaussian_2D_convolution(evolution_[0].Lt, evolution_[0].Lsmooth, 0, 0, options.sderivatives);
// Firstly compute the kcontrast factor // Firstly compute the kcontrast factor
Compute_KContrast(evolution_[0].Lt, KCONTRAST_PERCENTILE); Compute_KContrast(evolution_[0].Lt, options.kcontrast_percentille);
// Now generate the rest of evolution levels // Now generate the rest of evolution levels
for (size_t i = 1; i < evolution_.size(); i++) { for (size_t i = 1; i < evolution_.size(); i++) {
evolution_[i - 1].Lt.copyTo(evolution_[i].Lt); evolution_[i - 1].Lt.copyTo(evolution_[i].Lt);
gaussian_2D_convolution(evolution_[i - 1].Lt, evolution_[i].Lsmooth, 0, 0, sderivatives_); gaussian_2D_convolution(evolution_[i - 1].Lt, evolution_[i].Lsmooth, 0, 0, options.sderivatives);
// Compute the Gaussian derivatives Lx and Ly // Compute the Gaussian derivatives Lx and Ly
Scharr(evolution_[i].Lsmooth, evolution_[i].Lx, CV_32F, 1, 0, 1, 0, BORDER_DEFAULT); Scharr(evolution_[i].Lsmooth, evolution_[i].Lx, CV_32F, 1, 0, 1, 0, BORDER_DEFAULT);
Scharr(evolution_[i].Lsmooth, evolution_[i].Ly, CV_32F, 0, 1, 1, 0, BORDER_DEFAULT); Scharr(evolution_[i].Lsmooth, evolution_[i].Ly, CV_32F, 0, 1, 1, 0, BORDER_DEFAULT);
// Compute the conductivity equation // Compute the conductivity equation
if (diffusivity_ == 0) { if (options.diffusivity == KAZEOptions::PM_G1) {
pm_g1(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, kcontrast_); pm_g1(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options.kcontrast);
} }
else if (diffusivity_ == 1) { else if (options.diffusivity == KAZEOptions::PM_G2) {
pm_g2(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, kcontrast_); pm_g2(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options.kcontrast);
} }
else if (diffusivity_ == 2) { else if (options.diffusivity == KAZEOptions::WEICKERT) {
weickert_diffusivity(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, kcontrast_); weickert_diffusivity(evolution_[i].Lx, evolution_[i].Ly, evolution_[i].Lflow, options.kcontrast);
} }
// Perform FED n inner steps // Perform FED n inner steps
if (use_fed_) { if (options.use_fed) {
for (int j = 0; j < nsteps_[i - 1]; j++) { for (int j = 0; j < nsteps_[i - 1]; j++) {
nld_step_scalar(evolution_[i].Lt, evolution_[i].Lflow, evolution_[i].Lstep, tsteps_[i - 1][j]); nld_step_scalar(evolution_[i].Lt, evolution_[i].Lflow, evolution_[i].Lstep, tsteps_[i - 1][j]);
} }
...@@ -194,7 +174,7 @@ int KAZEFeatures::Create_Nonlinear_Scale_Space(const cv::Mat &img) ...@@ -194,7 +174,7 @@ int KAZEFeatures::Create_Nonlinear_Scale_Space(const cv::Mat &img)
*/ */
void KAZEFeatures::Compute_KContrast(const cv::Mat &img, const float &kpercentile) void KAZEFeatures::Compute_KContrast(const cv::Mat &img, const float &kpercentile)
{ {
kcontrast_ = compute_k_percentile(img, kpercentile, sderivatives_, KCONTRAST_NBINS, 0, 0); options.kcontrast = compute_k_percentile(img, kpercentile, options.sderivatives, options.kcontrast_bins, 0, 0);
} }
//************************************************************************************* //*************************************************************************************
...@@ -239,9 +219,9 @@ void KAZEFeatures::Compute_Detector_Response(void) ...@@ -239,9 +219,9 @@ void KAZEFeatures::Compute_Detector_Response(void)
for (size_t i = 0; i < evolution_.size(); i++) for (size_t i = 0; i < evolution_.size(); i++)
{ {
for (int ix = 0; ix < img_height_; ix++) for (int ix = 0; ix < options.img_height; ix++)
{ {
for (int jx = 0; jx < img_width_; jx++) for (int jx = 0; jx < options.img_width; jx++)
{ {
lxx = *(evolution_[i].Lxx.ptr<float>(ix)+jx); lxx = *(evolution_[i].Lxx.ptr<float>(ix)+jx);
lxy = *(evolution_[i].Lxy.ptr<float>(ix)+jx); lxy = *(evolution_[i].Lxy.ptr<float>(ix)+jx);
...@@ -376,14 +356,14 @@ void KAZEFeatures::Find_Extremum_Threading(const int& level) { ...@@ -376,14 +356,14 @@ void KAZEFeatures::Find_Extremum_Threading(const int& level) {
float value = 0.0; float value = 0.0;
bool is_extremum = false; bool is_extremum = false;
for (int ix = 1; ix < img_height_ - 1; ix++) { for (int ix = 1; ix < options.img_height - 1; ix++) {
for (int jx = 1; jx < img_width_ - 1; jx++) { for (int jx = 1; jx < options.img_width - 1; jx++) {
is_extremum = false; is_extremum = false;
value = *(evolution_[level].Ldet.ptr<float>(ix)+jx); value = *(evolution_[level].Ldet.ptr<float>(ix)+jx);
// Filter the points with the detector threshold // Filter the points with the detector threshold
if (value > dthreshold_ && value >= DEFAULT_MIN_DETECTOR_THRESHOLD) { if (value > options.dthreshold) {
if (value >= *(evolution_[level].Ldet.ptr<float>(ix)+jx - 1)) { if (value >= *(evolution_[level].Ldet.ptr<float>(ix)+jx - 1)) {
// First check on the same scale // First check on the same scale
if (check_maximum_neighbourhood(evolution_[level].Ldet, 1, value, ix, jx, 1)) { if (check_maximum_neighbourhood(evolution_[level].Ldet, 1, value, ix, jx, 1)) {
...@@ -495,10 +475,10 @@ void KAZEFeatures::Do_Subpixel_Refinement(std::vector<cv::KeyPoint> &kpts) { ...@@ -495,10 +475,10 @@ void KAZEFeatures::Do_Subpixel_Refinement(std::vector<cv::KeyPoint> &kpts) {
if (fabs(*(dst.ptr<float>(0))) <= 1.0f && fabs(*(dst.ptr<float>(1))) <= 1.0f && fabs(*(dst.ptr<float>(2))) <= 1.0f) { if (fabs(*(dst.ptr<float>(0))) <= 1.0f && fabs(*(dst.ptr<float>(1))) <= 1.0f && fabs(*(dst.ptr<float>(2))) <= 1.0f) {
kpts_[i].pt.x += *(dst.ptr<float>(0)); kpts_[i].pt.x += *(dst.ptr<float>(0));
kpts_[i].pt.y += *(dst.ptr<float>(1)); kpts_[i].pt.y += *(dst.ptr<float>(1));
dsc = kpts_[i].octave + (kpts_[i].angle + *(dst.ptr<float>(2))) / ((float)(nsublevels_)); dsc = kpts_[i].octave + (kpts_[i].angle + *(dst.ptr<float>(2))) / ((float)(options.nsublevels));
// In OpenCV the size of a keypoint is the diameter!! // In OpenCV the size of a keypoint is the diameter!!
kpts_[i].size = 2.0f*soffset_*pow((float)2.0f, dsc); kpts_[i].size = 2.0f*options.soffset*pow((float)2.0f, dsc);
kpts_[i].angle = 0.0; kpts_[i].angle = 0.0;
} }
// Set the points to be deleted after the for loop // Set the points to be deleted after the for loop
...@@ -520,142 +500,142 @@ void KAZEFeatures::Do_Subpixel_Refinement(std::vector<cv::KeyPoint> &kpts) { ...@@ -520,142 +500,142 @@ void KAZEFeatures::Do_Subpixel_Refinement(std::vector<cv::KeyPoint> &kpts) {
//************************************************************************************* //*************************************************************************************
//************************************************************************************* //*************************************************************************************
/** class MSURF_Descriptor_Invoker : public cv::ParallelLoopBody
* @brief This method computes the set of descriptors through the nonlinear scale space
* @param kpts Vector of keypoints
* @param desc Matrix with the feature descriptors
*/
void KAZEFeatures::Feature_Description(std::vector<cv::KeyPoint> &kpts, cv::Mat &desc)
{ {
// Allocate memory for the matrix of descriptors public:
if (use_extended_ == true) { MSURF_Descriptor_Invoker(std::vector<cv::KeyPoint> &kpts, cv::Mat &desc, std::vector<TEvolution>& evolution, const KAZEOptions& _options)
desc = Mat::zeros((int)kpts.size(), 128, CV_32FC1); : _kpts(&kpts)
, _desc(&desc)
, _evolution(&evolution)
, options(_options)
{
} }
else {
desc = Mat::zeros((int)kpts.size(), 64, CV_32FC1); virtual ~MSURF_Descriptor_Invoker()
{
} }
if (use_upright_ == true) { void operator() (const cv::Range& range) const
if (use_extended_ == false) { {
if (descriptor_mode_ == 0) { std::vector<cv::KeyPoint> &kpts = *_kpts;
#ifdef _OPENMP cv::Mat &desc = *_desc;
#pragma omp parallel for std::vector<TEvolution> &evolution = *_evolution;
#endif
for (size_t i = 0; i < kpts.size(); i++) { for (int i = range.start; i < range.end; i++)
{
kpts[i].angle = 0.0; kpts[i].angle = 0.0;
Get_SURF_Upright_Descriptor_64(kpts[i], desc.ptr<float>((int)i)); if (options.upright)
} {
}
else if (descriptor_mode_ == 1) {
#ifdef _OPENMP
#pragma omp parallel for
#endif
for (size_t i = 0; i < kpts.size(); i++) {
kpts[i].angle = 0.0; kpts[i].angle = 0.0;
if (options.extended)
Get_MSURF_Upright_Descriptor_128(kpts[i], desc.ptr<float>((int)i));
else
Get_MSURF_Upright_Descriptor_64(kpts[i], desc.ptr<float>((int)i)); Get_MSURF_Upright_Descriptor_64(kpts[i], desc.ptr<float>((int)i));
} }
} else
else if (descriptor_mode_ == 2) { {
#ifdef _OPENMP KAZEFeatures::Compute_Main_Orientation(kpts[i], evolution, options);
#pragma omp parallel for
#endif if (options.extended)
for (size_t i = 0; i < kpts.size(); i++) { Get_MSURF_Descriptor_128(kpts[i], desc.ptr<float>((int)i));
kpts[i].angle = 0.0; else
Get_GSURF_Upright_Descriptor_64(kpts[i], desc.ptr<float>((int)i)); Get_MSURF_Descriptor_64(kpts[i], desc.ptr<float>((int)i));
} }
} }
} }
else private:
void Get_MSURF_Upright_Descriptor_64(const cv::KeyPoint& kpt, float* desc) const;
void Get_MSURF_Descriptor_64(const cv::KeyPoint& kpt, float* desc) const;
void Get_MSURF_Upright_Descriptor_128(const cv::KeyPoint& kpt, float* desc) const;
void Get_MSURF_Descriptor_128(const cv::KeyPoint& kpt, float *desc) const;
std::vector<cv::KeyPoint> * _kpts;
cv::Mat * _desc;
std::vector<TEvolution> * _evolution;
KAZEOptions options;
};
class GSURF_Descriptor_Invoker : public cv::ParallelLoopBody
{
public:
GSURF_Descriptor_Invoker(std::vector<cv::KeyPoint> &kpts, cv::Mat &desc, std::vector<TEvolution>& evolution, const KAZEOptions& _options)
: _kpts(&kpts)
, _desc(&desc)
, _evolution(&evolution)
, options(_options)
{ {
if (descriptor_mode_ == 0) {
#ifdef _OPENMP
#pragma omp parallel for
#endif
for (size_t i = 0; i < kpts.size(); i++) {
kpts[i].angle = 0.0;
Get_SURF_Upright_Descriptor_128(kpts[i], desc.ptr<float>((int)i));
} }
virtual ~GSURF_Descriptor_Invoker()
{
} }
else if (descriptor_mode_ == 1) {
#ifdef _OPENMP void operator() (const cv::Range& range) const
#pragma omp parallel for {
#endif std::vector<cv::KeyPoint> &kpts = *_kpts;
for (size_t i = 0; i < kpts.size(); i++) { cv::Mat &desc = *_desc;
std::vector<TEvolution> &evolution = *_evolution;
for (int i = range.start; i < range.end; i++)
{
kpts[i].angle = 0.0; kpts[i].angle = 0.0;
Get_MSURF_Upright_Descriptor_128(kpts[i], desc.ptr<float>((int)i)); if (options.upright)
} {
}
else if (descriptor_mode_ == 2) {
#ifdef _OPENMP
#pragma omp parallel for
#endif
for (size_t i = 0; i < kpts.size(); i++) {
kpts[i].angle = 0.0; kpts[i].angle = 0.0;
if (options.extended)
Get_GSURF_Upright_Descriptor_128(kpts[i], desc.ptr<float>((int)i)); Get_GSURF_Upright_Descriptor_128(kpts[i], desc.ptr<float>((int)i));
else
Get_GSURF_Upright_Descriptor_64(kpts[i], desc.ptr<float>((int)i));
} }
} else
} {
} KAZEFeatures::Compute_Main_Orientation(kpts[i], evolution, options);
else {
if (use_extended_ == false) { if (options.extended)
if (descriptor_mode_ == 0) { Get_GSURF_Descriptor_128(kpts[i], desc.ptr<float>((int)i));
#ifdef _OPENMP else
#pragma omp parallel for
#endif
for (size_t i = 0; i < kpts.size(); i++) {
Compute_Main_Orientation_SURF(kpts[i]);
Get_SURF_Descriptor_64(kpts[i], desc.ptr<float>((int)i));
}
}
else if (descriptor_mode_ == 1) {
#ifdef _OPENMP
#pragma omp parallel for
#endif
for (size_t i = 0; i < kpts.size(); i++) {
Compute_Main_Orientation_SURF(kpts[i]);
Get_MSURF_Descriptor_64(kpts[i], desc.ptr<float>((int)i));
}
}
else if (descriptor_mode_ == 2) {
#ifdef _OPENMP
#pragma omp parallel for
#endif
for (size_t i = 0; i < kpts.size(); i++) {
Compute_Main_Orientation_SURF(kpts[i]);
Get_GSURF_Descriptor_64(kpts[i], desc.ptr<float>((int)i)); Get_GSURF_Descriptor_64(kpts[i], desc.ptr<float>((int)i));
} }
} }
} }
else {
if (descriptor_mode_ == 0) { private:
#ifdef _OPENMP void Get_GSURF_Upright_Descriptor_64(const cv::KeyPoint& kpt, float* desc) const;
#pragma omp parallel for void Get_GSURF_Descriptor_64(const cv::KeyPoint& kpt, float *desc) const;
#endif void Get_GSURF_Upright_Descriptor_128(const cv::KeyPoint& kpt, float* desc) const;
for (size_t i = 0; i < kpts.size(); i++) { void Get_GSURF_Descriptor_128(const cv::KeyPoint& kpt, float* desc) const;
Compute_Main_Orientation_SURF(kpts[i]);
Get_SURF_Descriptor_128(kpts[i], desc.ptr<float>((int)i)); std::vector<cv::KeyPoint> * _kpts;
} cv::Mat * _desc;
} std::vector<TEvolution> * _evolution;
else if (descriptor_mode_ == 1) { KAZEOptions options;
#ifdef _OPENMP };
#pragma omp parallel for
#endif /**
for (size_t i = 0; i < kpts.size(); i++) { * @brief This method computes the set of descriptors through the nonlinear scale space
Compute_Main_Orientation_SURF(kpts[i]); * @param kpts Vector of keypoints
Get_MSURF_Descriptor_128(kpts[i], desc.ptr<float>((int)i)); * @param desc Matrix with the feature descriptors
} */
} void KAZEFeatures::Feature_Description(std::vector<cv::KeyPoint> &kpts, cv::Mat &desc)
else if (descriptor_mode_ == 2) { {
#ifdef _OPENMP // Allocate memory for the matrix of descriptors
#pragma omp parallel for if (options.extended == true) {
#endif desc = Mat::zeros((int)kpts.size(), 128, CV_32FC1);
for (size_t i = 0; i < kpts.size(); i++) {
Compute_Main_Orientation_SURF(kpts[i]);
Get_GSURF_Descriptor_128(kpts[i], desc.ptr<float>((int)i));
}
}
} }
else {
desc = Mat::zeros((int)kpts.size(), 64, CV_32FC1);
} }
switch (options.descriptor)
{
case cv::KAZE::DESCRIPTOR_MSURF:
cv::parallel_for_(cv::Range(0, (int)kpts.size()), MSURF_Descriptor_Invoker(kpts, desc, evolution_, options));
break;
case cv::KAZE::DESCRIPTOR_GSURF:
cv::parallel_for_(cv::Range(0, (int)kpts.size()), GSURF_Descriptor_Invoker(kpts, desc, evolution_, options));
break;
};
} }
//************************************************************************************* //*************************************************************************************
...@@ -667,7 +647,7 @@ void KAZEFeatures::Feature_Description(std::vector<cv::KeyPoint> &kpts, cv::Mat ...@@ -667,7 +647,7 @@ void KAZEFeatures::Feature_Description(std::vector<cv::KeyPoint> &kpts, cv::Mat
* @note The orientation is computed using a similar approach as described in the * @note The orientation is computed using a similar approach as described in the
* original SURF method. See Bay et al., Speeded Up Robust Features, ECCV 2006 * original SURF method. See Bay et al., Speeded Up Robust Features, ECCV 2006
*/ */
void KAZEFeatures::Compute_Main_Orientation_SURF(cv::KeyPoint &kpt) void KAZEFeatures::Compute_Main_Orientation(cv::KeyPoint &kpt, const std::vector<TEvolution>& evolution_, const KAZEOptions& options)
{ {
int ix = 0, iy = 0, idx = 0, s = 0, level = 0; int ix = 0, iy = 0, idx = 0, s = 0, level = 0;
float xf = 0.0, yf = 0.0, gweight = 0.0; float xf = 0.0, yf = 0.0, gweight = 0.0;
...@@ -689,7 +669,7 @@ void KAZEFeatures::Compute_Main_Orientation_SURF(cv::KeyPoint &kpt) ...@@ -689,7 +669,7 @@ void KAZEFeatures::Compute_Main_Orientation_SURF(cv::KeyPoint &kpt)
iy = fRound(yf + j*s); iy = fRound(yf + j*s);
ix = fRound(xf + i*s); ix = fRound(xf + i*s);
if (iy >= 0 && iy < img_height_ && ix >= 0 && ix < img_width_) { if (iy >= 0 && iy < options.img_height && ix >= 0 && ix < options.img_width) {
gweight = gaussian(iy - yf, ix - xf, 2.5f*s); gweight = gaussian(iy - yf, ix - xf, 2.5f*s);
resX[idx] = gweight*(*(evolution_[level].Lx.ptr<float>(iy)+ix)); resX[idx] = gweight*(*(evolution_[level].Lx.ptr<float>(iy)+ix));
resY[idx] = gweight*(*(evolution_[level].Ly.ptr<float>(iy)+ix)); resY[idx] = gweight*(*(evolution_[level].Ly.ptr<float>(iy)+ix));
...@@ -739,212 +719,6 @@ void KAZEFeatures::Compute_Main_Orientation_SURF(cv::KeyPoint &kpt) ...@@ -739,212 +719,6 @@ void KAZEFeatures::Compute_Main_Orientation_SURF(cv::KeyPoint &kpt)
//************************************************************************************* //*************************************************************************************
//************************************************************************************* //*************************************************************************************
/**
* @brief This method computes the upright descriptor (no rotation invariant)
* of the provided keypoint
* @param kpt Input keypoint
* @param desc Descriptor vector
* @note Rectangular grid of 20 s x 20 s. Descriptor Length 64. No additional
* Gaussian weighting is performed. The descriptor is inspired from Bay et al.,
* Speeded Up Robust Features, ECCV, 2006
*/
void KAZEFeatures::Get_SURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, float *desc)
{
float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0;
float rx = 0.0, ry = 0.0, len = 0.0, xf = 0.0, yf = 0.0, sample_x = 0.0, sample_y = 0.0;
float fx = 0.0, fy = 0.0, res1 = 0.0, res2 = 0.0, res3 = 0.0, res4 = 0.0;
int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0;
int dsize = 0, scale = 0, level = 0;
// Set the descriptor size and the sample and pattern sizes
dsize = 64;
sample_step = 5;
pattern_size = 10;
// Get the information from the keypoint
yf = kpt.pt.y;
xf = kpt.pt.x;
level = kpt.class_id;
scale = fRound(kpt.size / 2.0f);
// Calculate descriptor for this interest point
for (int i = -pattern_size; i < pattern_size; i += sample_step) {
for (int j = -pattern_size; j < pattern_size; j += sample_step) {
dx = dy = mdx = mdy = 0.0;
for (int k = i; k < i + sample_step; k++) {
for (int l = j; l < j + sample_step; l++) {
sample_y = k*scale + yf;
sample_x = l*scale + xf;
y1 = (int)(sample_y - .5f);
x1 = (int)(sample_x - .5f);
checkDescriptorLimits(x1, y1, img_width_, img_height_);
y2 = (int)(sample_y + .5f);
x2 = (int)(sample_x + .5f);
checkDescriptorLimits(x2, y2, img_width_, img_height_);
fx = sample_x - x1;
fy = sample_y - y1;
res1 = *(evolution_[level].Lx.ptr<float>(y1)+x1);
res2 = *(evolution_[level].Lx.ptr<float>(y1)+x2);
res3 = *(evolution_[level].Lx.ptr<float>(y2)+x1);
res4 = *(evolution_[level].Lx.ptr<float>(y2)+x2);
rx = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
res1 = *(evolution_[level].Ly.ptr<float>(y1)+x1);
res2 = *(evolution_[level].Ly.ptr<float>(y1)+x2);
res3 = *(evolution_[level].Ly.ptr<float>(y2)+x1);
res4 = *(evolution_[level].Ly.ptr<float>(y2)+x2);
ry = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
// Sum the derivatives to the cumulative descriptor
dx += rx;
dy += ry;
mdx += fabs(rx);
mdy += fabs(ry);
}
}
// Add the values to the descriptor vector
desc[dcount++] = dx;
desc[dcount++] = dy;
desc[dcount++] = mdx;
desc[dcount++] = mdy;
// Store the current length^2 of the vector
len += dx*dx + dy*dy + mdx*mdx + mdy*mdy;
}
}
// convert to unit vector
len = sqrt(len);
for (int i = 0; i < dsize; i++) {
desc[i] /= len;
}
if (use_normalization == true) {
clippingDescriptor(desc, dsize, CLIPPING_NORMALIZATION_NITER, CLIPPING_NORMALIZATION_RATIO);
}
}
//*************************************************************************************
//*************************************************************************************
/**
* @brief This method computes the descriptor of the provided keypoint given the
* main orientation
* @param kpt Input keypoint
* @param desc Descriptor vector
* @note Rectangular grid of 20 s x 20 s. Descriptor Length 64. No additional
* Gaussian weighting is performed. The descriptor is inspired from Bay et al.,
* Speeded Up Robust Features, ECCV, 2006
*/
void KAZEFeatures::Get_SURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc) {
float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0;
float rx = 0.0, ry = 0.0, rrx = 0.0, rry = 0.0, len = 0.0, xf = 0.0, yf = 0.0;
float sample_x = 0.0, sample_y = 0.0, co = 0.0, si = 0.0, angle = 0.0;
float fx = 0.0, fy = 0.0, res1 = 0.0, res2 = 0.0, res3 = 0.0, res4 = 0.0;
int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0;
int dsize = 0, scale = 0, level = 0;
// Set the descriptor size and the sample and pattern sizes
dsize = 64;
sample_step = 5;
pattern_size = 10;
// Get the information from the keypoint
yf = kpt.pt.y;
xf = kpt.pt.x;
scale = fRound(kpt.size / 2.0f);
angle = kpt.angle;
level = kpt.class_id;
co = cos(angle);
si = sin(angle);
// Calculate descriptor for this interest point
for (int i = -pattern_size; i < pattern_size; i += sample_step) {
for (int j = -pattern_size; j < pattern_size; j += sample_step) {
dx = dy = mdx = mdy = 0.0;
for (int k = i; k < i + sample_step; k++) {
for (int l = j; l < j + sample_step; l++) {
// Get the coordinates of the sample point on the rotated axis
sample_y = yf + (l*scale*co + k*scale*si);
sample_x = xf + (-l*scale*si + k*scale*co);
y1 = (int)(sample_y - .5f);
x1 = (int)(sample_x - .5f);
checkDescriptorLimits(x1, y1, img_width_, img_height_);
y2 = (int)(sample_y + .5f);
x2 = (int)(sample_x + .5f);
checkDescriptorLimits(x2, y2, img_width_, img_height_);
fx = sample_x - x1;
fy = sample_y - y1;
res1 = *(evolution_[level].Lx.ptr<float>(y1)+x1);
res2 = *(evolution_[level].Lx.ptr<float>(y1)+x2);
res3 = *(evolution_[level].Lx.ptr<float>(y2)+x1);
res4 = *(evolution_[level].Lx.ptr<float>(y2)+x2);
rx = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
res1 = *(evolution_[level].Ly.ptr<float>(y1)+x1);
res2 = *(evolution_[level].Ly.ptr<float>(y1)+x2);
res3 = *(evolution_[level].Ly.ptr<float>(y2)+x1);
res4 = *(evolution_[level].Ly.ptr<float>(y2)+x2);
ry = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
// Get the x and y derivatives on the rotated axis
rry = rx*co + ry*si;
rrx = -rx*si + ry*co;
// Sum the derivatives to the cumulative descriptor
dx += rrx;
dy += rry;
mdx += fabs(rrx);
mdy += fabs(rry);
}
}
// Add the values to the descriptor vector
desc[dcount++] = dx;
desc[dcount++] = dy;
desc[dcount++] = mdx;
desc[dcount++] = mdy;
// Store the current length^2 of the vector
len += dx*dx + dy*dy + mdx*mdx + mdy*mdy;
}
}
// convert to unit vector
len = sqrt(len);
for (int i = 0; i < dsize; i++) {
desc[i] /= len;
}
if (use_normalization == true) {
clippingDescriptor(desc, dsize, CLIPPING_NORMALIZATION_NITER, CLIPPING_NORMALIZATION_RATIO);
}
}
//*************************************************************************************
//*************************************************************************************
/** /**
* @brief This method computes the upright descriptor (not rotation invariant) of * @brief This method computes the upright descriptor (not rotation invariant) of
* the provided keypoint * the provided keypoint
...@@ -954,7 +728,7 @@ void KAZEFeatures::Get_SURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc) ...@@ -954,7 +728,7 @@ void KAZEFeatures::Get_SURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc)
* from Agrawal et al., CenSurE: Center Surround Extremas for Realtime Feature Detection and Matching, * from Agrawal et al., CenSurE: Center Surround Extremas for Realtime Feature Detection and Matching,
* ECCV 2008 * ECCV 2008
*/ */
void KAZEFeatures::Get_MSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, float *desc) void MSURF_Descriptor_Invoker::Get_MSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, float *desc) const
{ {
float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0, gauss_s1 = 0.0, gauss_s2 = 0.0; float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0, gauss_s1 = 0.0, gauss_s2 = 0.0;
float rx = 0.0, ry = 0.0, len = 0.0, xf = 0.0, yf = 0.0, ys = 0.0, xs = 0.0; float rx = 0.0, ry = 0.0, len = 0.0, xf = 0.0, yf = 0.0, ys = 0.0, xs = 0.0;
...@@ -964,6 +738,8 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa ...@@ -964,6 +738,8 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa
float fx = 0.0, fy = 0.0, res1 = 0.0, res2 = 0.0, res3 = 0.0, res4 = 0.0; float fx = 0.0, fy = 0.0, res1 = 0.0, res2 = 0.0, res3 = 0.0, res4 = 0.0;
int dsize = 0, scale = 0, level = 0; int dsize = 0, scale = 0, level = 0;
std::vector<TEvolution>& evolution_ = *_evolution;
// Subregion centers for the 4x4 gaussian weighting // Subregion centers for the 4x4 gaussian weighting
float cx = -0.5f, cy = 0.5f; float cx = -0.5f, cy = 0.5f;
...@@ -1013,12 +789,12 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa ...@@ -1013,12 +789,12 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa
y1 = (int)(sample_y - 0.5f); y1 = (int)(sample_y - 0.5f);
x1 = (int)(sample_x - 0.5f); x1 = (int)(sample_x - 0.5f);
checkDescriptorLimits(x1, y1, img_width_, img_height_); checkDescriptorLimits(x1, y1, options.img_width, options.img_height);
y2 = (int)(sample_y + 0.5f); y2 = (int)(sample_y + 0.5f);
x2 = (int)(sample_x + 0.5f); x2 = (int)(sample_x + 0.5f);
checkDescriptorLimits(x2, y2, img_width_, img_height_); checkDescriptorLimits(x2, y2, options.img_width, options.img_height);
fx = sample_x - x1; fx = sample_x - x1;
fy = sample_y - y1; fy = sample_y - y1;
...@@ -1069,8 +845,8 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa ...@@ -1069,8 +845,8 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa
desc[i] /= len; desc[i] /= len;
} }
if (use_normalization == true) { if (options.use_clipping_normalilzation) {
clippingDescriptor(desc, dsize, CLIPPING_NORMALIZATION_NITER, CLIPPING_NORMALIZATION_RATIO); clippingDescriptor(desc, dsize, options.clipping_normalization_niter, options.clipping_normalization_ratio);
} }
} }
...@@ -1086,7 +862,7 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa ...@@ -1086,7 +862,7 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa
* from Agrawal et al., CenSurE: Center Surround Extremas for Realtime Feature Detection and Matching, * from Agrawal et al., CenSurE: Center Surround Extremas for Realtime Feature Detection and Matching,
* ECCV 2008 * ECCV 2008
*/ */
void KAZEFeatures::Get_MSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc) void MSURF_Descriptor_Invoker::Get_MSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc) const
{ {
float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0, gauss_s1 = 0.0, gauss_s2 = 0.0; float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0, gauss_s1 = 0.0, gauss_s2 = 0.0;
float rx = 0.0, ry = 0.0, rrx = 0.0, rry = 0.0, len = 0.0, xf = 0.0, yf = 0.0, ys = 0.0, xs = 0.0; float rx = 0.0, ry = 0.0, rrx = 0.0, rry = 0.0, len = 0.0, xf = 0.0, yf = 0.0, ys = 0.0, xs = 0.0;
...@@ -1096,6 +872,8 @@ void KAZEFeatures::Get_MSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc) ...@@ -1096,6 +872,8 @@ void KAZEFeatures::Get_MSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc)
int kx = 0, ky = 0, i = 0, j = 0, dcount = 0; int kx = 0, ky = 0, i = 0, j = 0, dcount = 0;
int dsize = 0, scale = 0, level = 0; int dsize = 0, scale = 0, level = 0;
std::vector<TEvolution>& evolution_ = *_evolution;
// Subregion centers for the 4x4 gaussian weighting // Subregion centers for the 4x4 gaussian weighting
float cx = -0.5f, cy = 0.5f; float cx = -0.5f, cy = 0.5f;
...@@ -1149,12 +927,12 @@ void KAZEFeatures::Get_MSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc) ...@@ -1149,12 +927,12 @@ void KAZEFeatures::Get_MSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc)
y1 = fRound(sample_y - 0.5f); y1 = fRound(sample_y - 0.5f);
x1 = fRound(sample_x - 0.5f); x1 = fRound(sample_x - 0.5f);
checkDescriptorLimits(x1, y1, img_width_, img_height_); checkDescriptorLimits(x1, y1, options.img_width, options.img_height);
y2 = (int)(sample_y + 0.5f); y2 = (int)(sample_y + 0.5f);
x2 = (int)(sample_x + 0.5f); x2 = (int)(sample_x + 0.5f);
checkDescriptorLimits(x2, y2, img_width_, img_height_); checkDescriptorLimits(x2, y2, options.img_width, options.img_height);
fx = sample_x - x1; fx = sample_x - x1;
fy = sample_y - y1; fy = sample_y - y1;
...@@ -1202,8 +980,8 @@ void KAZEFeatures::Get_MSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc) ...@@ -1202,8 +980,8 @@ void KAZEFeatures::Get_MSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc)
desc[i] /= len; desc[i] /= len;
} }
if (use_normalization == true) { if (options.use_clipping_normalilzation) {
clippingDescriptor(desc, dsize, CLIPPING_NORMALIZATION_NITER, CLIPPING_NORMALIZATION_RATIO); clippingDescriptor(desc, dsize, options.clipping_normalization_niter, options.clipping_normalization_ratio);
} }
} }
...@@ -1219,7 +997,7 @@ void KAZEFeatures::Get_MSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc) ...@@ -1219,7 +997,7 @@ void KAZEFeatures::Get_MSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc)
* G-SURF descriptor as described in Pablo F. Alcantarilla, Luis M. Bergasa and * G-SURF descriptor as described in Pablo F. Alcantarilla, Luis M. Bergasa and
* Andrew J. Davison, Gauge-SURF Descriptors, Image and Vision Computing 31(1), 2013 * Andrew J. Davison, Gauge-SURF Descriptors, Image and Vision Computing 31(1), 2013
*/ */
void KAZEFeatures::Get_GSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, float *desc) void GSURF_Descriptor_Invoker::Get_GSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, float *desc) const
{ {
float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0; float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0;
float rx = 0.0, ry = 0.0, rxx = 0.0, rxy = 0.0, ryy = 0.0, len = 0.0, xf = 0.0, yf = 0.0; float rx = 0.0, ry = 0.0, rxx = 0.0, rxy = 0.0, ryy = 0.0, len = 0.0, xf = 0.0, yf = 0.0;
...@@ -1229,6 +1007,8 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa ...@@ -1229,6 +1007,8 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa
int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0; int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0;
int dsize = 0, scale = 0, level = 0; int dsize = 0, scale = 0, level = 0;
std::vector<TEvolution>& evolution_ = *_evolution;
// Set the descriptor size and the sample and pattern sizes // Set the descriptor size and the sample and pattern sizes
dsize = 64; dsize = 64;
sample_step = 5; sample_step = 5;
...@@ -1256,12 +1036,12 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa ...@@ -1256,12 +1036,12 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa
y1 = (int)(sample_y - 0.5f); y1 = (int)(sample_y - 0.5f);
x1 = (int)(sample_x - 0.5f); x1 = (int)(sample_x - 0.5f);
checkDescriptorLimits(x1, y1, img_width_, img_height_); checkDescriptorLimits(x1, y1, options.img_width, options.img_height);
y2 = (int)(sample_y + 0.5f); y2 = (int)(sample_y + 0.5f);
x2 = (int)(sample_x + 0.5f); x2 = (int)(sample_x + 0.5f);
checkDescriptorLimits(x2, y2, img_width_, img_height_); checkDescriptorLimits(x2, y2, options.img_width, options.img_height);
fx = sample_x - x1; fx = sample_x - x1;
fy = sample_y - y1; fy = sample_y - y1;
...@@ -1337,8 +1117,8 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa ...@@ -1337,8 +1117,8 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa
desc[i] /= len; desc[i] /= len;
} }
if (use_normalization == true) { if (options.use_clipping_normalilzation) {
clippingDescriptor(desc, dsize, CLIPPING_NORMALIZATION_NITER, CLIPPING_NORMALIZATION_RATIO); clippingDescriptor(desc, dsize, options.clipping_normalization_niter, options.clipping_normalization_ratio);
} }
} }
...@@ -1354,7 +1134,7 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa ...@@ -1354,7 +1134,7 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_64(const cv::KeyPoint &kpt, floa
* G-SURF descriptor as described in Pablo F. Alcantarilla, Luis M. Bergasa and * G-SURF descriptor as described in Pablo F. Alcantarilla, Luis M. Bergasa and
* Andrew J. Davison, Gauge-SURF Descriptors, Image and Vision Computing 31(1), 2013 * Andrew J. Davison, Gauge-SURF Descriptors, Image and Vision Computing 31(1), 2013
*/ */
void KAZEFeatures::Get_GSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc) void GSURF_Descriptor_Invoker::Get_GSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc) const
{ {
float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0; float dx = 0.0, dy = 0.0, mdx = 0.0, mdy = 0.0;
float rx = 0.0, ry = 0.0, rxx = 0.0, rxy = 0.0, ryy = 0.0, len = 0.0, xf = 0.0, yf = 0.0; float rx = 0.0, ry = 0.0, rxx = 0.0, rxy = 0.0, ryy = 0.0, len = 0.0, xf = 0.0, yf = 0.0;
...@@ -1364,6 +1144,8 @@ void KAZEFeatures::Get_GSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc) ...@@ -1364,6 +1144,8 @@ void KAZEFeatures::Get_GSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc)
int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0; int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0;
int dsize = 0, scale = 0, level = 0; int dsize = 0, scale = 0, level = 0;
std::vector<TEvolution>& evolution_ = *_evolution;
// Set the descriptor size and the sample and pattern sizes // Set the descriptor size and the sample and pattern sizes
dsize = 64; dsize = 64;
sample_step = 5; sample_step = 5;
...@@ -1394,12 +1176,12 @@ void KAZEFeatures::Get_GSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc) ...@@ -1394,12 +1176,12 @@ void KAZEFeatures::Get_GSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc)
y1 = (int)(sample_y - 0.5f); y1 = (int)(sample_y - 0.5f);
x1 = (int)(sample_x - 0.5f); x1 = (int)(sample_x - 0.5f);
checkDescriptorLimits(x1, y1, img_width_, img_height_); checkDescriptorLimits(x1, y1, options.img_width, options.img_height);
y2 = (int)(sample_y + 0.5f); y2 = (int)(sample_y + 0.5f);
x2 = (int)(sample_x + 0.5f); x2 = (int)(sample_x + 0.5f);
checkDescriptorLimits(x2, y2, img_width_, img_height_); checkDescriptorLimits(x2, y2, options.img_width, options.img_height);
fx = sample_x - x1; fx = sample_x - x1;
fy = sample_y - y1; fy = sample_y - y1;
...@@ -1475,257 +1257,10 @@ void KAZEFeatures::Get_GSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc) ...@@ -1475,257 +1257,10 @@ void KAZEFeatures::Get_GSURF_Descriptor_64(const cv::KeyPoint &kpt, float *desc)
desc[i] /= len; desc[i] /= len;
} }
if (use_normalization == true) { if (options.use_clipping_normalilzation) {
clippingDescriptor(desc, dsize, CLIPPING_NORMALIZATION_NITER, CLIPPING_NORMALIZATION_RATIO); clippingDescriptor(desc, dsize, options.clipping_normalization_niter, options.clipping_normalization_ratio);
}
}
//*************************************************************************************
//*************************************************************************************
/**
* @brief This method computes the upright extended descriptor (no rotation invariant)
* of the provided keypoint
* @param kpt Input keypoint
* @param desc Descriptor vector
* @note Rectangular grid of 20 s x 20 s. Descriptor Length 128. No additional
* Gaussian weighting is performed. The descriptor is inspired from Bay et al.,
* Speeded Up Robust Features, ECCV, 2006
*/
void KAZEFeatures::Get_SURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, float *desc)
{
float rx = 0.0, ry = 0.0, len = 0.0, xf = 0.0, yf = 0.0, sample_x = 0.0, sample_y = 0.0;
float fx = 0.0, fy = 0.0, res1 = 0.0, res2 = 0.0, res3 = 0.0, res4 = 0.0;
float dxp = 0.0, dyp = 0.0, mdxp = 0.0, mdyp = 0.0;
float dxn = 0.0, dyn = 0.0, mdxn = 0.0, mdyn = 0.0;
int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0;
int dsize = 0, scale = 0, level = 0;
// Set the descriptor size and the sample and pattern sizes
dsize = 128;
sample_step = 5;
pattern_size = 10;
// Get the information from the keypoint
yf = kpt.pt.y;
xf = kpt.pt.x;
scale = fRound(kpt.size / 2.0f);
level = kpt.class_id;
// Calculate descriptor for this interest point
for (int i = -pattern_size; i < pattern_size; i += sample_step) {
for (int j = -pattern_size; j < pattern_size; j += sample_step) {
dxp = dxn = mdxp = mdxn = 0.0;
dyp = dyn = mdyp = mdyn = 0.0;
for (int k = i; k < i + sample_step; k++) {
for (int l = j; l < j + sample_step; l++) {
sample_y = k*scale + yf;
sample_x = l*scale + xf;
y1 = (int)(sample_y - 0.5f);
x1 = (int)(sample_x - 0.5f);
checkDescriptorLimits(x1, y1, img_width_, img_height_);
y2 = (int)(sample_y + 0.5f);
x2 = (int)(sample_x + 0.5f);
checkDescriptorLimits(x2, y2, img_width_, img_height_);
fx = sample_x - x1;
fy = sample_y - y1;
res1 = *(evolution_[level].Lx.ptr<float>(y1)+x1);
res2 = *(evolution_[level].Lx.ptr<float>(y1)+x2);
res3 = *(evolution_[level].Lx.ptr<float>(y2)+x1);
res4 = *(evolution_[level].Lx.ptr<float>(y2)+x2);
rx = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
res1 = *(evolution_[level].Ly.ptr<float>(y1)+x1);
res2 = *(evolution_[level].Ly.ptr<float>(y1)+x2);
res3 = *(evolution_[level].Ly.ptr<float>(y2)+x1);
res4 = *(evolution_[level].Ly.ptr<float>(y2)+x2);
ry = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
// Sum the derivatives to the cumulative descriptor
if (ry >= 0.0) {
dxp += rx;
mdxp += fabs(rx);
}
else {
dxn += rx;
mdxn += fabs(rx);
}
if (rx >= 0.0) {
dyp += ry;
mdyp += fabs(ry);
}
else {
dyn += ry;
mdyn += fabs(ry);
}
}
}
// Add the values to the descriptor vector
desc[dcount++] = dxp;
desc[dcount++] = dxn;
desc[dcount++] = mdxp;
desc[dcount++] = mdxn;
desc[dcount++] = dyp;
desc[dcount++] = dyn;
desc[dcount++] = mdyp;
desc[dcount++] = mdyn;
// Store the current length^2 of the vector
len += dxp*dxp + dxn*dxn + mdxp*mdxp + mdxn*mdxn +
dyp*dyp + dyn*dyn + mdyp*mdyp + mdyn*mdyn;
}
}
// convert to unit vector
len = sqrt(len);
for (int i = 0; i < dsize; i++) {
desc[i] /= len;
} }
if (use_normalization == true) {
clippingDescriptor(desc, dsize, CLIPPING_NORMALIZATION_NITER, CLIPPING_NORMALIZATION_RATIO);
}
}
//*************************************************************************************
//*************************************************************************************
/**
* @brief This method computes the extended descriptor of the provided keypoint given the
* main orientation
* @param kpt Input keypoint
* @param desc Descriptor vector
* @note Rectangular grid of 20 s x 20 s. Descriptor Length 128. No additional
* Gaussian weighting is performed. The descriptor is inspired from Bay et al.,
* Speeded Up Robust Features, ECCV, 2006
*/
void KAZEFeatures::Get_SURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc)
{
float rx = 0.0, ry = 0.0, rrx = 0.0, rry = 0.0, len = 0.0, xf = 0.0, yf = 0.0;
float sample_x = 0.0, sample_y = 0.0, co = 0.0, si = 0.0, angle = 0.0;
float fx = 0.0, fy = 0.0, res1 = 0.0, res2 = 0.0, res3 = 0.0, res4 = 0.0;
float dxp = 0.0, dyp = 0.0, mdxp = 0.0, mdyp = 0.0;
float dxn = 0.0, dyn = 0.0, mdxn = 0.0, mdyn = 0.0;
int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0;
int dsize = 0, scale = 0, level = 0;
// Set the descriptor size and the sample and pattern sizes
dsize = 128;
sample_step = 5;
pattern_size = 10;
// Get the information from the keypoint
yf = kpt.pt.y;
xf = kpt.pt.x;
scale = fRound(kpt.size / 2.0f);
angle = kpt.angle;
level = kpt.class_id;
co = cos(angle);
si = sin(angle);
// Calculate descriptor for this interest point
for (int i = -pattern_size; i < pattern_size; i += sample_step) {
for (int j = -pattern_size; j < pattern_size; j += sample_step) {
dxp = dxn = mdxp = mdxn = 0.0;
dyp = dyn = mdyp = mdyn = 0.0;
for (int k = i; k < i + sample_step; k++) {
for (int l = j; l < j + sample_step; l++) {
// Get the coordinates of the sample point on the rotated axis
sample_y = yf + (l*scale*co + k*scale*si);
sample_x = xf + (-l*scale*si + k*scale*co);
y1 = (int)(sample_y - 0.5f);
x1 = (int)(sample_x - 0.5f);
checkDescriptorLimits(x1, y1, img_width_, img_height_);
y2 = (int)(sample_y + 0.5f);
x2 = (int)(sample_x + 0.5f);
checkDescriptorLimits(x2, y2, img_width_, img_height_);
fx = sample_x - x1;
fy = sample_y - y1;
res1 = *(evolution_[level].Lx.ptr<float>(y1)+x1);
res2 = *(evolution_[level].Lx.ptr<float>(y1)+x2);
res3 = *(evolution_[level].Lx.ptr<float>(y2)+x1);
res4 = *(evolution_[level].Lx.ptr<float>(y2)+x2);
rx = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
res1 = *(evolution_[level].Ly.ptr<float>(y1)+x1);
res2 = *(evolution_[level].Ly.ptr<float>(y1)+x2);
res3 = *(evolution_[level].Ly.ptr<float>(y2)+x1);
res4 = *(evolution_[level].Ly.ptr<float>(y2)+x2);
ry = (1.0f - fx)*(1.0f - fy)*res1 + fx*(1.0f - fy)*res2 + (1.0f - fx)*fy*res3 + fx*fy*res4;
// Get the x and y derivatives on the rotated axis
rry = rx*co + ry*si;
rrx = -rx*si + ry*co;
// Sum the derivatives to the cumulative descriptor
if (rry >= 0.0) {
dxp += rrx;
mdxp += fabs(rrx);
}
else {
dxn += rrx;
mdxn += fabs(rrx);
}
if (rrx >= 0.0) {
dyp += rry;
mdyp += fabs(rry);
}
else {
dyn += rry;
mdyn += fabs(rry);
}
}
}
// Add the values to the descriptor vector
desc[dcount++] = dxp;
desc[dcount++] = dxn;
desc[dcount++] = mdxp;
desc[dcount++] = mdxn;
desc[dcount++] = dyp;
desc[dcount++] = dyn;
desc[dcount++] = mdyp;
desc[dcount++] = mdyn;
// Store the current length^2 of the vector
len += dxp*dxp + dxn*dxn + mdxp*mdxp + mdxn*mdxn +
dyp*dyp + dyn*dyn + mdyp*mdyp + mdyn*mdyn;
}
}
// convert to unit vector
len = sqrt(len);
for (int i = 0; i < dsize; i++) {
desc[i] /= len;
}
if (use_normalization == true) {
clippingDescriptor(desc, dsize, CLIPPING_NORMALIZATION_NITER, CLIPPING_NORMALIZATION_RATIO);
}
} }
//************************************************************************************* //*************************************************************************************
...@@ -1740,8 +1275,8 @@ void KAZEFeatures::Get_SURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc) ...@@ -1740,8 +1275,8 @@ void KAZEFeatures::Get_SURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc)
* from Agrawal et al., CenSurE: Center Surround Extremas for Realtime Feature Detection and Matching, * from Agrawal et al., CenSurE: Center Surround Extremas for Realtime Feature Detection and Matching,
* ECCV 2008 * ECCV 2008
*/ */
void KAZEFeatures::Get_MSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, float *desc) { void MSURF_Descriptor_Invoker::Get_MSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, float *desc) const
{
float gauss_s1 = 0.0, gauss_s2 = 0.0; float gauss_s1 = 0.0, gauss_s2 = 0.0;
float rx = 0.0, ry = 0.0, len = 0.0, xf = 0.0, yf = 0.0, ys = 0.0, xs = 0.0; float rx = 0.0, ry = 0.0, len = 0.0, xf = 0.0, yf = 0.0, ys = 0.0, xs = 0.0;
float sample_x = 0.0, sample_y = 0.0; float sample_x = 0.0, sample_y = 0.0;
...@@ -1755,6 +1290,8 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo ...@@ -1755,6 +1290,8 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo
// Subregion centers for the 4x4 gaussian weighting // Subregion centers for the 4x4 gaussian weighting
float cx = -0.5f, cy = 0.5f; float cx = -0.5f, cy = 0.5f;
std::vector<TEvolution>& evolution_ = *_evolution;
// Set the descriptor size and the sample and pattern sizes // Set the descriptor size and the sample and pattern sizes
dsize = 128; dsize = 128;
sample_step = 5; sample_step = 5;
...@@ -1804,12 +1341,12 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo ...@@ -1804,12 +1341,12 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo
y1 = (int)(sample_y - 0.5f); y1 = (int)(sample_y - 0.5f);
x1 = (int)(sample_x - 0.5f); x1 = (int)(sample_x - 0.5f);
checkDescriptorLimits(x1, y1, img_width_, img_height_); checkDescriptorLimits(x1, y1, options.img_width, options.img_height);
y2 = (int)(sample_y + 0.5f); y2 = (int)(sample_y + 0.5f);
x2 = (int)(sample_x + 0.5f); x2 = (int)(sample_x + 0.5f);
checkDescriptorLimits(x2, y2, img_width_, img_height_); checkDescriptorLimits(x2, y2, options.img_width, options.img_height);
fx = sample_x - x1; fx = sample_x - x1;
fy = sample_y - y1; fy = sample_y - y1;
...@@ -1879,8 +1416,8 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo ...@@ -1879,8 +1416,8 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo
desc[i] /= len; desc[i] /= len;
} }
if (use_normalization == true) { if (options.use_clipping_normalilzation) {
clippingDescriptor(desc, dsize, CLIPPING_NORMALIZATION_NITER, CLIPPING_NORMALIZATION_RATIO); clippingDescriptor(desc, dsize, options.clipping_normalization_niter, options.clipping_normalization_ratio);
} }
} }
...@@ -1896,8 +1433,8 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo ...@@ -1896,8 +1433,8 @@ void KAZEFeatures::Get_MSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo
* from Agrawal et al., CenSurE: Center Surround Extremas for Realtime Feature Detection and Matching, * from Agrawal et al., CenSurE: Center Surround Extremas for Realtime Feature Detection and Matching,
* ECCV 2008 * ECCV 2008
*/ */
void KAZEFeatures::Get_MSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc) { void MSURF_Descriptor_Invoker::Get_MSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc) const
{
float gauss_s1 = 0.0, gauss_s2 = 0.0; float gauss_s1 = 0.0, gauss_s2 = 0.0;
float rx = 0.0, ry = 0.0, rrx = 0.0, rry = 0.0, len = 0.0, xf = 0.0, yf = 0.0, ys = 0.0, xs = 0.0; float rx = 0.0, ry = 0.0, rrx = 0.0, rry = 0.0, len = 0.0, xf = 0.0, yf = 0.0, ys = 0.0, xs = 0.0;
float sample_x = 0.0, sample_y = 0.0, co = 0.0, si = 0.0, angle = 0.0; float sample_x = 0.0, sample_y = 0.0, co = 0.0, si = 0.0, angle = 0.0;
...@@ -1908,6 +1445,8 @@ void KAZEFeatures::Get_MSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc ...@@ -1908,6 +1445,8 @@ void KAZEFeatures::Get_MSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc
int kx = 0, ky = 0, i = 0, j = 0, dcount = 0; int kx = 0, ky = 0, i = 0, j = 0, dcount = 0;
int dsize = 0, scale = 0, level = 0; int dsize = 0, scale = 0, level = 0;
std::vector<TEvolution>& evolution_ = *_evolution;
// Subregion centers for the 4x4 gaussian weighting // Subregion centers for the 4x4 gaussian weighting
float cx = -0.5f, cy = 0.5f; float cx = -0.5f, cy = 0.5f;
...@@ -1964,12 +1503,12 @@ void KAZEFeatures::Get_MSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc ...@@ -1964,12 +1503,12 @@ void KAZEFeatures::Get_MSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc
y1 = fRound(sample_y - 0.5f); y1 = fRound(sample_y - 0.5f);
x1 = fRound(sample_x - 0.5f); x1 = fRound(sample_x - 0.5f);
checkDescriptorLimits(x1, y1, img_width_, img_height_); checkDescriptorLimits(x1, y1, options.img_width, options.img_height);
y2 = (int)(sample_y + 0.5f); y2 = (int)(sample_y + 0.5f);
x2 = (int)(sample_x + 0.5f); x2 = (int)(sample_x + 0.5f);
checkDescriptorLimits(x2, y2, img_width_, img_height_); checkDescriptorLimits(x2, y2, options.img_width, options.img_height);
fx = sample_x - x1; fx = sample_x - x1;
fy = sample_y - y1; fy = sample_y - y1;
...@@ -2040,8 +1579,8 @@ void KAZEFeatures::Get_MSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc ...@@ -2040,8 +1579,8 @@ void KAZEFeatures::Get_MSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc
desc[i] /= len; desc[i] /= len;
} }
if (use_normalization == true) { if (options.use_clipping_normalilzation) {
clippingDescriptor(desc, dsize, CLIPPING_NORMALIZATION_NITER, CLIPPING_NORMALIZATION_RATIO); clippingDescriptor(desc, dsize, options.clipping_normalization_niter, options.clipping_normalization_ratio);
} }
} }
...@@ -2057,7 +1596,7 @@ void KAZEFeatures::Get_MSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc ...@@ -2057,7 +1596,7 @@ void KAZEFeatures::Get_MSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc
* G-SURF descriptor as described in Pablo F. Alcantarilla, Luis M. Bergasa and * G-SURF descriptor as described in Pablo F. Alcantarilla, Luis M. Bergasa and
* Andrew J. Davison, Gauge-SURF Descriptors, Image and Vision Computing 31(1), 2013 * Andrew J. Davison, Gauge-SURF Descriptors, Image and Vision Computing 31(1), 2013
*/ */
void KAZEFeatures::Get_GSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, float *desc) void GSURF_Descriptor_Invoker::Get_GSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, float *desc) const
{ {
float len = 0.0, xf = 0.0, yf = 0.0, sample_x = 0.0, sample_y = 0.0; float len = 0.0, xf = 0.0, yf = 0.0, sample_x = 0.0, sample_y = 0.0;
float rx = 0.0, ry = 0.0, rxx = 0.0, rxy = 0.0, ryy = 0.0, modg = 0.0; float rx = 0.0, ry = 0.0, rxx = 0.0, rxy = 0.0, ryy = 0.0, modg = 0.0;
...@@ -2067,6 +1606,8 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo ...@@ -2067,6 +1606,8 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo
int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0; int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0;
int dsize = 0, scale = 0, level = 0; int dsize = 0, scale = 0, level = 0;
std::vector<TEvolution>& evolution_ = *_evolution;
// Set the descriptor size and the sample and pattern sizes // Set the descriptor size and the sample and pattern sizes
dsize = 128; dsize = 128;
sample_step = 5; sample_step = 5;
...@@ -2094,12 +1635,12 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo ...@@ -2094,12 +1635,12 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo
y1 = (int)(sample_y - 0.5f); y1 = (int)(sample_y - 0.5f);
x1 = (int)(sample_x - 0.5f); x1 = (int)(sample_x - 0.5f);
checkDescriptorLimits(x1, y1, img_width_, img_height_); checkDescriptorLimits(x1, y1, options.img_width, options.img_height);
y2 = (int)(sample_y + 0.5f); y2 = (int)(sample_y + 0.5f);
x2 = (int)(sample_x + 0.5f); x2 = (int)(sample_x + 0.5f);
checkDescriptorLimits(x2, y2, img_width_, img_height_); checkDescriptorLimits(x2, y2, options.img_width, options.img_height);
fx = sample_x - x1; fx = sample_x - x1;
fy = sample_y - y1; fy = sample_y - y1;
...@@ -2193,8 +1734,8 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo ...@@ -2193,8 +1734,8 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo
desc[i] /= len; desc[i] /= len;
} }
if (use_normalization == true) { if (options.use_clipping_normalilzation) {
clippingDescriptor(desc, dsize, CLIPPING_NORMALIZATION_NITER, CLIPPING_NORMALIZATION_RATIO); clippingDescriptor(desc, dsize, options.clipping_normalization_niter, options.clipping_normalization_ratio);
} }
} }
...@@ -2210,7 +1751,8 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo ...@@ -2210,7 +1751,8 @@ void KAZEFeatures::Get_GSURF_Upright_Descriptor_128(const cv::KeyPoint &kpt, flo
* G-SURF descriptor as described in Pablo F. Alcantarilla, Luis M. Bergasa and * G-SURF descriptor as described in Pablo F. Alcantarilla, Luis M. Bergasa and
* Andrew J. Davison, Gauge-SURF Descriptors, Image and Vision Computing 31(1), 2013 * Andrew J. Davison, Gauge-SURF Descriptors, Image and Vision Computing 31(1), 2013
*/ */
void KAZEFeatures::Get_GSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc) { void GSURF_Descriptor_Invoker::Get_GSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc) const
{
float len = 0.0, xf = 0.0, yf = 0.0; float len = 0.0, xf = 0.0, yf = 0.0;
float rx = 0.0, ry = 0.0, rxx = 0.0, rxy = 0.0, ryy = 0.0; float rx = 0.0, ry = 0.0, rxx = 0.0, rxy = 0.0, ryy = 0.0;
...@@ -2222,6 +1764,8 @@ void KAZEFeatures::Get_GSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc ...@@ -2222,6 +1764,8 @@ void KAZEFeatures::Get_GSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc
int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0; int x1 = 0, y1 = 0, x2 = 0, y2 = 0, sample_step = 0, pattern_size = 0, dcount = 0;
int dsize = 0, scale = 0, level = 0; int dsize = 0, scale = 0, level = 0;
std::vector<TEvolution>& evolution_ = *_evolution;
// Set the descriptor size and the sample and pattern sizes // Set the descriptor size and the sample and pattern sizes
dsize = 128; dsize = 128;
sample_step = 5; sample_step = 5;
...@@ -2253,12 +1797,12 @@ void KAZEFeatures::Get_GSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc ...@@ -2253,12 +1797,12 @@ void KAZEFeatures::Get_GSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc
y1 = (int)(sample_y - 0.5f); y1 = (int)(sample_y - 0.5f);
x1 = (int)(sample_x - 0.5f); x1 = (int)(sample_x - 0.5f);
checkDescriptorLimits(x1, y1, img_width_, img_height_); checkDescriptorLimits(x1, y1, options.img_width, options.img_height);
y2 = (int)(sample_y + 0.5f); y2 = (int)(sample_y + 0.5f);
x2 = (int)(sample_x + 0.5f); x2 = (int)(sample_x + 0.5f);
checkDescriptorLimits(x2, y2, img_width_, img_height_); checkDescriptorLimits(x2, y2, options.img_width, options.img_height);
fx = sample_x - x1; fx = sample_x - x1;
fy = sample_y - y1; fy = sample_y - y1;
...@@ -2351,8 +1895,8 @@ void KAZEFeatures::Get_GSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc ...@@ -2351,8 +1895,8 @@ void KAZEFeatures::Get_GSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc
desc[i] /= len; desc[i] /= len;
} }
if (use_normalization == true) { if (options.use_clipping_normalilzation) {
clippingDescriptor(desc, dsize, CLIPPING_NORMALIZATION_NITER, CLIPPING_NORMALIZATION_RATIO); clippingDescriptor(desc, dsize, options.clipping_normalization_niter, options.clipping_normalization_ratio);
} }
} }
...@@ -2371,22 +1915,8 @@ void KAZEFeatures::Get_GSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc ...@@ -2371,22 +1915,8 @@ void KAZEFeatures::Get_GSURF_Descriptor_128(const cv::KeyPoint &kpt, float *desc
*/ */
void KAZEFeatures::AOS_Step_Scalar(cv::Mat &Ld, const cv::Mat &Ldprev, const cv::Mat &c, const float& stepsize) { void KAZEFeatures::AOS_Step_Scalar(cv::Mat &Ld, const cv::Mat &Ldprev, const cv::Mat &c, const float& stepsize) {
#ifdef _OPENMP
#pragma omp sections
{
#pragma omp section
{
AOS_Rows(Ldprev,c,stepsize);
}
#pragma omp section
{
AOS_Columns(Ldprev,c,stepsize);
}
}
#else
AOS_Rows(Ldprev, c, stepsize); AOS_Rows(Ldprev, c, stepsize);
AOS_Columns(Ldprev, c, stepsize); AOS_Columns(Ldprev, c, stepsize);
#endif
Ld = 0.5f*(Lty_ + Ltx_.t()); Ld = 0.5f*(Lty_ + Ltx_.t());
} }
......
...@@ -26,22 +26,10 @@ class KAZEFeatures { ...@@ -26,22 +26,10 @@ class KAZEFeatures {
private: private:
KAZEOptions options;
// Parameters of the Nonlinear diffusion class // Parameters of the Nonlinear diffusion class
float soffset_; // Base scale offset
float sderivatives_; // Standard deviation of the Gaussian for the nonlinear diff. derivatives
int omax_; // Maximum octave level
int nsublevels_; // Number of sublevels per octave level
int img_width_; // Width of the original image
int img_height_; // Height of the original image
std::vector<TEvolution> evolution_; // Vector of nonlinear diffusion evolution std::vector<TEvolution> evolution_; // Vector of nonlinear diffusion evolution
float kcontrast_; // The contrast parameter for the scalar nonlinear diffusion
float dthreshold_; // Feature detector threshold response
int diffusivity_; // Diffusivity type, 0->PM G1, 1->PM G2, 2-> Weickert
int descriptor_mode_; // Descriptor mode
bool use_fed_; // Set to true in case we want to use FED for the nonlinear diffusion filtering. Set false for using AOS
bool use_upright_; // Set to true in case we want to use the upright version of the descriptors
bool use_extended_; // Set to true in case we want to use the extended version of the descriptors
bool use_normalization;
// Vector of keypoint vectors for finding extrema in multiple threads // Vector of keypoint vectors for finding extrema in multiple threads
std::vector<std::vector<cv::KeyPoint> > kpts_par_; std::vector<std::vector<cv::KeyPoint> > kpts_par_;
...@@ -52,15 +40,6 @@ private: ...@@ -52,15 +40,6 @@ private:
std::vector<std::vector<float > > tsteps_; // Vector of FED dynamic time steps std::vector<std::vector<float > > tsteps_; // Vector of FED dynamic time steps
std::vector<int> nsteps_; // Vector of number of steps per cycle std::vector<int> nsteps_; // Vector of number of steps per cycle
// Computation times variables in ms
//double tkcontrast_; // Kcontrast factor computation
//double tnlscale_; // Nonlinear Scale space generation
//double tdetector_; // Feature detector
//double tmderivatives_; // Multiscale derivatives computation
//double tdresponse_; // Detector response computation
//double tdescriptor_; // Feature descriptor
//double tsubpixel_; // Subpixel refinement
// Some auxiliary variables used in the AOS step // Some auxiliary variables used in the AOS step
cv::Mat Ltx_, Lty_, px_, py_, ax_, ay_, bx_, by_, qr_, qc_; cv::Mat Ltx_, Lty_, px_, py_, ax_, ay_, bx_, by_, qr_, qc_;
...@@ -75,6 +54,8 @@ public: ...@@ -75,6 +54,8 @@ public:
void Feature_Detection(std::vector<cv::KeyPoint>& kpts); void Feature_Detection(std::vector<cv::KeyPoint>& kpts);
void Feature_Description(std::vector<cv::KeyPoint>& kpts, cv::Mat& desc); void Feature_Description(std::vector<cv::KeyPoint>& kpts, cv::Mat& desc);
static void Compute_Main_Orientation(cv::KeyPoint& kpt, const std::vector<TEvolution>& evolution_, const KAZEOptions& options);
private: private:
// Feature Detection Methods // Feature Detection Methods
...@@ -91,32 +72,6 @@ private: ...@@ -91,32 +72,6 @@ private:
void AOS_Columns(const cv::Mat &Ldprev, const cv::Mat &c, const float& stepsize); void AOS_Columns(const cv::Mat &Ldprev, const cv::Mat &c, const float& stepsize);
void Thomas(const cv::Mat &a, const cv::Mat &b, const cv::Mat &Ld, cv::Mat &x); void Thomas(const cv::Mat &a, const cv::Mat &b, const cv::Mat &Ld, cv::Mat &x);
// Feature Description methods
void Compute_Main_Orientation_SURF(cv::KeyPoint& kpt);
// Descriptor Mode -> 0 SURF 64
void Get_SURF_Upright_Descriptor_64(const cv::KeyPoint& kpt, float* desc);
void Get_SURF_Descriptor_64(const cv::KeyPoint& kpt, float* desc);
// Descriptor Mode -> 0 SURF 128
void Get_SURF_Upright_Descriptor_128(const cv::KeyPoint& kpt, float* desc);
void Get_SURF_Descriptor_128(const cv::KeyPoint& kpt, float* desc);
// Descriptor Mode -> 1 M-SURF 64
void Get_MSURF_Upright_Descriptor_64(const cv::KeyPoint& kpt, float* desc);
void Get_MSURF_Descriptor_64(const cv::KeyPoint& kpt, float* desc);
// Descriptor Mode -> 1 M-SURF 128
void Get_MSURF_Upright_Descriptor_128(const cv::KeyPoint& kpt, float* desc);
void Get_MSURF_Descriptor_128(const cv::KeyPoint& kpt, float *desc);
// Descriptor Mode -> 2 G-SURF 64
void Get_GSURF_Upright_Descriptor_64(const cv::KeyPoint& kpt, float* desc);
void Get_GSURF_Descriptor_64(const cv::KeyPoint& kpt, float *desc);
// Descriptor Mode -> 2 G-SURF 128
void Get_GSURF_Upright_Descriptor_128(const cv::KeyPoint& kpt, float* desc);
void Get_GSURF_Descriptor_128(const cv::KeyPoint& kpt, float* desc);
}; };
//************************************************************************************* //*************************************************************************************
......
...@@ -169,12 +169,18 @@ TEST(Features2d_Detector_Keypoints_Dense, validation) ...@@ -169,12 +169,18 @@ TEST(Features2d_Detector_Keypoints_Dense, validation)
TEST(Features2d_Detector_Keypoints_KAZE, validation) TEST(Features2d_Detector_Keypoints_KAZE, validation)
{ {
CV_FeatureDetectorKeypointsTest test(Algorithm::create<FeatureDetector>("Feature2D.KAZE")); CV_FeatureDetectorKeypointsTest test_gsurf(cv::Ptr<FeatureDetector>(new cv::KAZE(cv::KAZE::DESCRIPTOR_GSURF, false, false)));
test.safe_run(); test_gsurf.safe_run();
CV_FeatureDetectorKeypointsTest test_msurf(cv::Ptr<FeatureDetector>(new cv::KAZE(cv::KAZE::DESCRIPTOR_MSURF, false, false)));
test_msurf.safe_run();
} }
TEST(Features2d_Detector_Keypoints_AKAZE, validation) TEST(Features2d_Detector_Keypoints_AKAZE, validation)
{ {
CV_FeatureDetectorKeypointsTest test(Algorithm::create<FeatureDetector>("Feature2D.AKAZE")); CV_FeatureDetectorKeypointsTest test_kaze(cv::Ptr<FeatureDetector>(new cv::AKAZE(cv::AKAZE::DESCRIPTOR_KAZE)));
test.safe_run(); test_kaze.safe_run();
CV_FeatureDetectorKeypointsTest test_mldb(cv::Ptr<FeatureDetector>(new cv::AKAZE(cv::AKAZE::DESCRIPTOR_MLDB)));
test_mldb.safe_run();
} }
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment