Commit a5b56846 authored by Jiri Horner's avatar Jiri Horner Committed by Alexander Alekhin

Merge pull request #9330 from hrnr:akaze_ocl

[GSOC] Enable OCL for AKAZE (#9330)

* revert e0489cb4 - reenable OCL for AKAZE

* deal with conversion internally in AKAZE

* pass InputArray directly to AKAZE to allow distiguishing input Mat/UMat. deal with conversion there
* ensure that keypoints orientations are always computed. prevents misuse of internal AKAZE class.

* covert internal AKAZE functions to use InputArray/OutputArray

* make internal functions private in AKAZE

* split OCL and CPU paths in AKAZE

* create 2 separate pyramids, 1 for OCL and 1 for CPU
* template functions that use temporaries to always store them as correct type (UMat/Mat)

* remove variable used only in OCL path

causes unused variable warning

* update AKAZE documentation

* run ocl version only when ocl is enabled

* add tests for OCL path in AKAZE

* relax condition for keypoints angle
parent 3a8dbebd
......@@ -658,13 +658,22 @@ public:
CV_WRAP virtual int getDiffusivity() const = 0;
};
/** @brief Class implementing the AKAZE keypoint detector and descriptor extractor, described in @cite ANB13 . :
/** @brief Class implementing the AKAZE keypoint detector and descriptor extractor, described in @cite ANB13.
@note AKAZE descriptors can only be used with KAZE or AKAZE keypoints. Try to avoid using *extract*
and *detect* instead of *operator()* due to performance reasons. .. [ANB13] Fast Explicit Diffusion
for Accelerated Features in Nonlinear Scale Spaces. Pablo F. Alcantarilla, Jesús Nuevo and Adrien
Bartoli. In British Machine Vision Conference (BMVC), Bristol, UK, September 2013.
*/
@details AKAZE descriptors can only be used with KAZE or AKAZE keypoints. This class is thread-safe.
@note When you need descriptors use Feature2D::detectAndCompute, which
provides better performance. When using Feature2D::detect followed by
Feature2D::compute scale space pyramid is computed twice.
@note AKAZE implements T-API. When image is passed as UMat some parts of the algorithm
will use OpenCL.
@note [ANB13] Fast Explicit Diffusion for Accelerated Features in Nonlinear
Scale Spaces. Pablo F. Alcantarilla, Jesús Nuevo and Adrien Bartoli. In
British Machine Vision Conference (BMVC), Bristol, UK, September 2013.
*/
class CV_EXPORTS_W AKAZE : public Feature2D
{
public:
......
......@@ -169,38 +169,25 @@ namespace cv
{
CV_INSTRUMENT_REGION()
Mat img = image.getMat();
if (img.channels() > 1)
cvtColor(image, img, COLOR_BGR2GRAY);
Mat img1_32;
if ( img.depth() == CV_32F )
img1_32 = img;
else if ( img.depth() == CV_8U )
img.convertTo(img1_32, CV_32F, 1.0 / 255.0, 0);
else if ( img.depth() == CV_16U )
img.convertTo(img1_32, CV_32F, 1.0 / 65535.0, 0);
CV_Assert( ! img1_32.empty() );
CV_Assert( ! image.empty() );
AKAZEOptions options;
options.descriptor = descriptor;
options.descriptor_channels = descriptor_channels;
options.descriptor_size = descriptor_size;
options.img_width = img.cols;
options.img_height = img.rows;
options.img_width = image.cols();
options.img_height = image.rows();
options.dthreshold = threshold;
options.omax = octaves;
options.nsublevels = sublevels;
options.diffusivity = diffusivity;
AKAZEFeatures impl(options);
impl.Create_Nonlinear_Scale_Space(img1_32);
impl.Create_Nonlinear_Scale_Space(image);
if (!useProvidedKeypoints)
{
impl.Feature_Detection(keypoints);
impl.Compute_Keypoints_Orientation(keypoints);
}
if (!mask.empty())
......@@ -208,7 +195,7 @@ namespace cv
KeyPointsFilter::runByPixelsMask(keypoints, mask.getMat());
}
if( descriptors.needed() )
if(descriptors.needed())
{
impl.Compute_Descriptors(keypoints, descriptors);
......
......@@ -17,6 +17,7 @@ namespace cv
{
/// A-KAZE nonlinear diffusion filtering evolution
template <typename MatType>
struct Evolution
{
Evolution() {
......@@ -29,10 +30,28 @@ struct Evolution
border = 0;
}
Mat Lx, Ly; ///< First order spatial derivatives
Mat Lt; ///< Evolution image
Mat Lsmooth; ///< Smoothed image, used only for computing determinant, released afterwards
Mat Ldet; ///< Detector response
template <typename T>
explicit Evolution(const Evolution<T> &other) {
size = other.size;
etime = other.etime;
esigma = other.esigma;
octave = other.octave;
sublevel = other.sublevel;
sigma_size = other.sigma_size;
octave_ratio = other.octave_ratio;
border = other.border;
other.Lx.copyTo(Lx);
other.Ly.copyTo(Ly);
other.Lt.copyTo(Lt);
other.Lsmooth.copyTo(Lsmooth);
other.Ldet.copyTo(Ldet);
}
MatType Lx, Ly; ///< First order spatial derivatives
MatType Lt; ///< Evolution image
MatType Lsmooth; ///< Smoothed image, used only for computing determinant, released afterwards
MatType Ldet; ///< Detector response
Size size; ///< Size of the layer
float etime; ///< Evolution time
......@@ -44,6 +63,11 @@ struct Evolution
int border; ///< Width of border where descriptors cannot be computed
};
typedef Evolution<Mat> MEvolution;
typedef Evolution<UMat> UEvolution;
typedef std::vector<MEvolution> Pyramid;
typedef std::vector<UEvolution> UMatPyramid;
/* ************************************************************************* */
// AKAZE Class Declaration
class AKAZEFeatures {
......@@ -51,7 +75,7 @@ class AKAZEFeatures {
private:
AKAZEOptions options_; ///< Configuration options for AKAZE
std::vector<Evolution> evolution_; ///< Vector of nonlinear diffusion evolution
Pyramid evolution_; ///< Vector of nonlinear diffusion evolution
/// FED parameters
int ncycles_; ///< Number of cycles
......@@ -64,23 +88,21 @@ private:
cv::Mat descriptorBits_;
cv::Mat bitMask_;
public:
/// Constructor with input arguments
AKAZEFeatures(const AKAZEOptions& options);
/// Scale Space methods
void Allocate_Memory_Evolution();
void Create_Nonlinear_Scale_Space(InputArray img);
void Feature_Detection(std::vector<cv::KeyPoint>& kpts);
void Compute_Determinant_Hessian_Response(void);
void Find_Scale_Space_Extrema(std::vector<Mat>& keypoints_by_layers);
void Do_Subpixel_Refinement(std::vector<Mat>& keypoints_by_layers,
std::vector<KeyPoint>& kpts);
/// Feature description methods
void Compute_Descriptors(std::vector<cv::KeyPoint>& kpts, OutputArray desc);
void Compute_Keypoints_Orientation(std::vector<cv::KeyPoint>& kpts) const;
public:
/// Constructor with input arguments
AKAZEFeatures(const AKAZEOptions& options);
void Create_Nonlinear_Scale_Space(InputArray img);
void Feature_Detection(std::vector<cv::KeyPoint>& kpts);
void Compute_Descriptors(std::vector<cv::KeyPoint>& kpts, OutputArray desc);
};
/* ************************************************************************* */
......
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html
#include "../test_precomp.hpp"
#include "cvconfig.h"
#include "opencv2/ts/ocl_test.hpp"
#ifdef HAVE_OPENCL
namespace cvtest {
namespace ocl {
#define TEST_IMAGES testing::Values(\
"detectors_descriptors_evaluation/images_datasets/leuven/img1.png",\
"../stitching/a3.png", \
"../stitching/s2.jpg")
PARAM_TEST_CASE(Feature2DFixture, Ptr<Feature2D>, std::string)
{
std::string filename;
Mat image, descriptors;
vector<KeyPoint> keypoints;
UMat uimage, udescriptors;
vector<KeyPoint> ukeypoints;
Ptr<Feature2D> feature;
virtual void SetUp()
{
feature = GET_PARAM(0);
filename = GET_PARAM(1);
image = readImage(filename);
ASSERT_FALSE(image.empty());
image.copyTo(uimage);
OCL_OFF(feature->detect(image, keypoints));
OCL_ON(feature->detect(uimage, ukeypoints));
// note: we use keypoints from CPU for GPU too, to test descriptors separately
OCL_OFF(feature->compute(image, keypoints, descriptors));
OCL_ON(feature->compute(uimage, keypoints, udescriptors));
}
};
OCL_TEST_P(Feature2DFixture, KeypointsSame)
{
EXPECT_EQ(keypoints.size(), ukeypoints.size());
for (size_t i = 0; i < keypoints.size(); ++i)
{
EXPECT_GE(KeyPoint::overlap(keypoints[i], ukeypoints[i]), 0.95);
EXPECT_NEAR(keypoints[i].angle, ukeypoints[i].angle, 0.001);
}
}
OCL_TEST_P(Feature2DFixture, DescriptorsSame)
{
EXPECT_MAT_NEAR(descriptors, udescriptors, 0.001);
}
OCL_INSTANTIATE_TEST_CASE_P(AKAZE, Feature2DFixture,
testing::Combine(testing::Values(AKAZE::create()), TEST_IMAGES));
OCL_INSTANTIATE_TEST_CASE_P(AKAZE_DESCRIPTOR_KAZE, Feature2DFixture,
testing::Combine(testing::Values(AKAZE::create(AKAZE::DESCRIPTOR_KAZE)), TEST_IMAGES));
}//ocl
}//cvtest
#endif //HAVE_OPENCL
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment