• Jiri Horner's avatar
    Merge pull request #8869 from hrnr:akaze_part1 · 5f20e802
    Jiri Horner authored
    [GSOC] Speeding-up AKAZE, part #1 (#8869)
    
    * ts: expand arguments before stringifications in CV_ENUM and CV_FLAGS
    
    added protective macros to always force macro expansion of arguments. This allows using CV_ENUM and CV_FLAGS with macro arguments.
    
    * feature2d: unify perf test
    
    use the same test for all detectors/descriptors we have.
    
    * added AKAZE tests
    
    * features2d: extend perf tests
    
    * add BRISK, KAZE, MSER
    * run all extract tests on AKAZE keypoints, so that the test si more comparable for the speed of extraction
    
    * feature2d: rework opencl perf tests
    
    use the same configuration as cpu tests
    
    * feature2d: fix descriptors allocation for AKAZE and KAZE
    
    fix crash when descriptors are UMat
    
    * feature2d: name enum to fix build with older gcc
    
    * Revert "ts: expand arguments before stringifications in CV_ENUM and CV_FLAGS"
    
    This reverts commit 19538cac1e45b0cec98190cf06a5ecb07d9b596e.
    
    This wasn't a great idea after all. There is a lot of flags implemented as #define, that we don't want to expand.
    
    * feature2d: fix expansion problems with CV_ENUM in perf
    
    * expand arguments before passing them to CV_ENUM. This does not need modifications of CV_ENUM.
    * added include guards to `perf_feature2d.hpp`
    
    * feature2d: fix crash in AKAZE when using KAZE descriptors
    
    * out-of-bound access in Get_MSURF_Descriptor_64
    * this happened reliably when running on provided keypoints (not computed by the same instance)
    
    * feature2d: added regression tests for AKAZE
    
    * test with both MLDB and KAZE keypoints
    
    * feature2d: do not compute keypoints orientation twice
    
    * always compute keypoints orientation, when computing keypoints
    * do not recompute keypoint orientation when computing descriptors
    
    this allows to test detection and extraction separately
    
    * features2d: fix crash in AKAZE
    
    * out-of-bound reads near the image edge
    * same as the bug in KAZE descriptors
    
    * feature2d: refactor invariance testing
    
    * split detectors and descriptors tests
    * rewrite to google test to simplify debugging
    * add tests for AKAZE and one test for ORB
    
    * stitching: add tests with AKAZE feature finder
    
    * added basic stitching cpu and ocl tests
    * fix bug in AKAZE wrapper for stitching pipeline causing lots of
    ! OPENCV warning: getUMat()/getMat() call chain possible problem.
    !                 Base object is dead, while nested/derived object is still alive or processed.
    !                 Please check lifetime of UMat/Mat objects!
    5f20e802
perf_stich.cpp 5.42 KB
#include "perf_precomp.hpp"
#include "opencv2/imgcodecs.hpp"
#include "opencv2/opencv_modules.hpp"

#include "opencv2/core/ocl.hpp"

using namespace std;
using namespace cv;
using namespace perf;
using std::tr1::tuple;
using std::tr1::get;

#define SURF_MATCH_CONFIDENCE 0.65f
#define ORB_MATCH_CONFIDENCE  0.3f
#define WORK_MEGAPIX 0.6

typedef TestBaseWithParam<string> stitch;
typedef TestBaseWithParam<tuple<string, string> > stitchDatasets;

#ifdef HAVE_OPENCV_XFEATURES2D
#define TEST_DETECTORS testing::Values("surf", "orb", "akaze")
#else
#define TEST_DETECTORS testing::Values("orb", "akaze")
#endif
#define AFFINE_DATASETS testing::Values("s", "budapest", "newspaper", "prague")

PERF_TEST_P(stitch, a123, TEST_DETECTORS)
{
    Mat pano;

    vector<Mat> imgs;
    imgs.push_back( imread( getDataPath("stitching/a1.png") ) );
    imgs.push_back( imread( getDataPath("stitching/a2.png") ) );
    imgs.push_back( imread( getDataPath("stitching/a3.png") ) );

    Ptr<detail::FeaturesFinder> featuresFinder = getFeatureFinder(GetParam());

    Ptr<detail::FeaturesMatcher> featuresMatcher = GetParam() == "orb"
            ? makePtr<detail::BestOf2NearestMatcher>(false, ORB_MATCH_CONFIDENCE)
            : makePtr<detail::BestOf2NearestMatcher>(false, SURF_MATCH_CONFIDENCE);

    declare.time(30 * 20).iterations(20);

    while(next())
    {
        Stitcher stitcher = Stitcher::createDefault();
        stitcher.setFeaturesFinder(featuresFinder);
        stitcher.setFeaturesMatcher(featuresMatcher);
        stitcher.setWarper(makePtr<SphericalWarper>());
        stitcher.setRegistrationResol(WORK_MEGAPIX);

        startTimer();
        stitcher.stitch(imgs, pano);
        stopTimer();
    }

    EXPECT_NEAR(pano.size().width, 1182, 50);
    EXPECT_NEAR(pano.size().height, 682, 30);

    SANITY_CHECK_NOTHING();
}

PERF_TEST_P(stitch, b12, TEST_DETECTORS)
{
    Mat pano;

    vector<Mat> imgs;
    imgs.push_back( imread( getDataPath("stitching/b1.png") ) );
    imgs.push_back( imread( getDataPath("stitching/b2.png") ) );

    Ptr<detail::FeaturesFinder> featuresFinder = getFeatureFinder(GetParam());

    Ptr<detail::FeaturesMatcher> featuresMatcher = GetParam() == "orb"
            ? makePtr<detail::BestOf2NearestMatcher>(false, ORB_MATCH_CONFIDENCE)
            : makePtr<detail::BestOf2NearestMatcher>(false, SURF_MATCH_CONFIDENCE);

    declare.time(30 * 20).iterations(20);

    while(next())
    {
        Stitcher stitcher = Stitcher::createDefault();
        stitcher.setFeaturesFinder(featuresFinder);
        stitcher.setFeaturesMatcher(featuresMatcher);
        stitcher.setWarper(makePtr<SphericalWarper>());
        stitcher.setRegistrationResol(WORK_MEGAPIX);

        startTimer();
        stitcher.stitch(imgs, pano);
        stopTimer();
    }

    EXPECT_NEAR(pano.size().width, 1117, 50);
    EXPECT_NEAR(pano.size().height, 642, 30);

    SANITY_CHECK_NOTHING();
}

PERF_TEST_P(stitchDatasets, affine, testing::Combine(AFFINE_DATASETS, TEST_DETECTORS))
{
    string dataset = get<0>(GetParam());
    string detector = get<1>(GetParam());

    Mat pano;
    vector<Mat> imgs;
    int width, height, allowed_diff = 10;
    Ptr<detail::FeaturesFinder> featuresFinder = getFeatureFinder(detector);

    if(dataset == "budapest")
    {
        imgs.push_back(imread(getDataPath("stitching/budapest1.jpg")));
        imgs.push_back(imread(getDataPath("stitching/budapest2.jpg")));
        imgs.push_back(imread(getDataPath("stitching/budapest3.jpg")));
        imgs.push_back(imread(getDataPath("stitching/budapest4.jpg")));
        imgs.push_back(imread(getDataPath("stitching/budapest5.jpg")));
        imgs.push_back(imread(getDataPath("stitching/budapest6.jpg")));
        width = 2313;
        height = 1158;
        // this dataset is big, the results between surf and orb differ slightly,
        // but both are still good
        allowed_diff = 27;
    }
    else if (dataset == "newspaper")
    {
        imgs.push_back(imread(getDataPath("stitching/newspaper1.jpg")));
        imgs.push_back(imread(getDataPath("stitching/newspaper2.jpg")));
        imgs.push_back(imread(getDataPath("stitching/newspaper3.jpg")));
        imgs.push_back(imread(getDataPath("stitching/newspaper4.jpg")));
        width = 1791;
        height = 1136;
        // we need to boost ORB number of features to be able to stitch this dataset
        // SURF works just fine with default settings
        if(detector == "orb")
            featuresFinder = makePtr<detail::OrbFeaturesFinder>(Size(3,1), 3000);
    }
    else if (dataset == "prague")
    {
        imgs.push_back(imread(getDataPath("stitching/prague1.jpg")));
        imgs.push_back(imread(getDataPath("stitching/prague2.jpg")));
        width = 983;
        height = 1759;
    }
    else // dataset == "s"
    {
        imgs.push_back(imread(getDataPath("stitching/s1.jpg")));
        imgs.push_back(imread(getDataPath("stitching/s2.jpg")));
        width = 1815;
        height = 700;
    }

    declare.time(30 * 20).iterations(20);

    while(next())
    {
        Ptr<Stitcher> stitcher = Stitcher::create(Stitcher::SCANS, false);
        stitcher->setFeaturesFinder(featuresFinder);

        if (cv::ocl::useOpenCL())
            cv::theRNG() = cv::RNG(12345); // prevent fails of Windows OpenCL builds (see #8294)

        startTimer();
        stitcher->stitch(imgs, pano);
        stopTimer();
    }

    EXPECT_NEAR(pano.size().width, width, allowed_diff);
    EXPECT_NEAR(pano.size().height, height, allowed_diff);

    SANITY_CHECK_NOTHING();
}