Commit 1d9808e5 authored by Alexander Alekhin's avatar Alexander Alekhin Committed by Andrey Pavlenko

TAPI: stitching, replaced Mat->UMat/_Array

parent 8b6c5ade
......@@ -218,6 +218,9 @@ public:
virtual void release() const;
virtual void clear() const;
virtual void setTo(const _InputArray& value, const _InputArray & mask = _InputArray()) const;
void assign(const UMat& u) const;
void assign(const Mat& m) const;
};
......
......@@ -2592,6 +2592,43 @@ void _OutputArray::setTo(const _InputArray& arr, const _InputArray & mask) const
CV_Error(Error::StsNotImplemented, "");
}
void _OutputArray::assign(const UMat& u) const
{
int k = kind();
if (k == UMAT)
{
*(UMat*)obj = u;
}
else if (k == MAT)
{
u.copyTo(*(Mat*)obj); // TODO check u.getMat()
}
else
{
CV_Error(Error::StsNotImplemented, "");
}
}
void _OutputArray::assign(const Mat& m) const
{
int k = kind();
if (k == UMAT)
{
m.copyTo(*(UMat*)obj); // TODO check m.getUMat()
}
else if (k == MAT)
{
*(Mat*)obj = m;
}
else
{
CV_Error(Error::StsNotImplemented, "");
}
}
static _InputOutputArray _none;
InputOutputArray noArray() { return _none; }
......
......@@ -98,8 +98,8 @@ public:
void setFeaturesMatcher(Ptr<detail::FeaturesMatcher> features_matcher)
{ features_matcher_ = features_matcher; }
const cv::Mat& matchingMask() const { return matching_mask_; }
void setMatchingMask(const cv::Mat &mask)
const cv::UMat& matchingMask() const { return matching_mask_; }
void setMatchingMask(const cv::UMat &mask)
{
CV_Assert(mask.type() == CV_8U && mask.cols == mask.rows);
matching_mask_ = mask.clone();
......@@ -127,14 +127,14 @@ public:
const Ptr<detail::Blender> blender() const { return blender_; }
void setBlender(Ptr<detail::Blender> b) { blender_ = b; }
Status estimateTransform(InputArray images);
Status estimateTransform(InputArray images, const std::vector<std::vector<Rect> > &rois);
Status estimateTransform(InputArrayOfArrays images);
Status estimateTransform(InputArrayOfArrays images, const std::vector<std::vector<Rect> > &rois);
Status composePanorama(OutputArray pano);
Status composePanorama(InputArray images, OutputArray pano);
Status composePanorama(InputArrayOfArrays images, OutputArray pano);
Status stitch(InputArray images, OutputArray pano);
Status stitch(InputArray images, const std::vector<std::vector<Rect> > &rois, OutputArray pano);
Status stitch(InputArrayOfArrays images, OutputArray pano);
Status stitch(InputArrayOfArrays images, const std::vector<std::vector<Rect> > &rois, OutputArray pano);
std::vector<int> component() const { return indices_; }
std::vector<detail::CameraParams> cameras() const { return cameras_; }
......@@ -152,7 +152,7 @@ private:
double conf_thresh_;
Ptr<detail::FeaturesFinder> features_finder_;
Ptr<detail::FeaturesMatcher> features_matcher_;
cv::Mat matching_mask_;
cv::UMat matching_mask_;
Ptr<detail::BundleAdjusterBase> bundle_adjuster_;
bool do_wave_correct_;
detail::WaveCorrectKind wave_correct_kind_;
......@@ -161,12 +161,12 @@ private:
Ptr<detail::SeamFinder> seam_finder_;
Ptr<detail::Blender> blender_;
std::vector<cv::Mat> imgs_;
std::vector<cv::UMat> imgs_;
std::vector<std::vector<cv::Rect> > rois_;
std::vector<cv::Size> full_img_sizes_;
std::vector<detail::ImageFeatures> features_;
std::vector<detail::MatchesInfo> pairwise_matches_;
std::vector<cv::Mat> seam_est_imgs_;
std::vector<cv::UMat> seam_est_imgs_;
std::vector<int> indices_;
std::vector<detail::CameraParams> cameras_;
double work_scale_;
......
......@@ -60,11 +60,11 @@ public:
void prepare(const std::vector<Point> &corners, const std::vector<Size> &sizes);
virtual void prepare(Rect dst_roi);
virtual void feed(const Mat &img, const Mat &mask, Point tl);
virtual void blend(Mat &dst, Mat &dst_mask);
virtual void feed(InputArray img, InputArray mask, Point tl);
virtual void blend(InputOutputArray dst, InputOutputArray dst_mask);
protected:
Mat dst_, dst_mask_;
UMat dst_, dst_mask_;
Rect dst_roi_;
};
......@@ -78,18 +78,18 @@ public:
void setSharpness(float val) { sharpness_ = val; }
void prepare(Rect dst_roi);
void feed(const Mat &img, const Mat &mask, Point tl);
void blend(Mat &dst, Mat &dst_mask);
void feed(InputArray img, InputArray mask, Point tl);
void blend(InputOutputArray dst, InputOutputArray dst_mask);
// Creates weight maps for fixed set of source images by their masks and top-left corners.
// Final image can be obtained by simple weighting of the source images.
Rect createWeightMaps(const std::vector<Mat> &masks, const std::vector<Point> &corners,
std::vector<Mat> &weight_maps);
Rect createWeightMaps(const std::vector<UMat> &masks, const std::vector<Point> &corners,
std::vector<UMat> &weight_maps);
private:
float sharpness_;
Mat weight_map_;
Mat dst_weight_map_;
UMat weight_map_;
UMat dst_weight_map_;
};
inline FeatherBlender::FeatherBlender(float _sharpness) { setSharpness(_sharpness); }
......@@ -104,13 +104,13 @@ public:
void setNumBands(int val) { actual_num_bands_ = val; }
void prepare(Rect dst_roi);
void feed(const Mat &img, const Mat &mask, Point tl);
void blend(Mat &dst, Mat &dst_mask);
void feed(InputArray img, InputArray mask, Point tl);
void blend(InputOutputArray dst, InputOutputArray dst_mask);
private:
int actual_num_bands_, num_bands_;
std::vector<Mat> dst_pyr_laplace_;
std::vector<Mat> dst_band_weights_;
std::vector<UMat> dst_pyr_laplace_;
std::vector<UMat> dst_band_weights_;
Rect dst_roi_final_;
bool can_use_gpu_;
int weight_type_; //CV_32F or CV_16S
......@@ -120,16 +120,16 @@ private:
//////////////////////////////////////////////////////////////////////////////
// Auxiliary functions
void CV_EXPORTS normalizeUsingWeightMap(const Mat& weight, Mat& src);
void CV_EXPORTS normalizeUsingWeightMap(InputArray weight, InputOutputArray src);
void CV_EXPORTS createWeightMap(const Mat& mask, float sharpness, Mat& weight);
void CV_EXPORTS createWeightMap(InputArray mask, float sharpness, InputOutputArray weight);
void CV_EXPORTS createLaplacePyr(const Mat &img, int num_levels, std::vector<Mat>& pyr);
void CV_EXPORTS createLaplacePyrGpu(const Mat &img, int num_levels, std::vector<Mat>& pyr);
void CV_EXPORTS createLaplacePyr(InputArray img, int num_levels, std::vector<UMat>& pyr);
void CV_EXPORTS createLaplacePyrGpu(InputArray img, int num_levels, std::vector<UMat>& pyr);
// Restores source image
void CV_EXPORTS restoreImageFromLaplacePyr(std::vector<Mat>& pyr);
void CV_EXPORTS restoreImageFromLaplacePyrGpu(std::vector<Mat>& pyr);
void CV_EXPORTS restoreImageFromLaplacePyr(std::vector<UMat>& pyr);
void CV_EXPORTS restoreImageFromLaplacePyrGpu(std::vector<UMat>& pyr);
} // namespace detail
} // namespace cv
......
......@@ -56,29 +56,29 @@ public:
enum { NO, GAIN, GAIN_BLOCKS };
static Ptr<ExposureCompensator> createDefault(int type);
void feed(const std::vector<Point> &corners, const std::vector<Mat> &images,
const std::vector<Mat> &masks);
virtual void feed(const std::vector<Point> &corners, const std::vector<Mat> &images,
const std::vector<std::pair<Mat,uchar> > &masks) = 0;
virtual void apply(int index, Point corner, Mat &image, const Mat &mask) = 0;
void feed(const std::vector<Point> &corners, const std::vector<UMat> &images,
const std::vector<UMat> &masks);
virtual void feed(const std::vector<Point> &corners, const std::vector<UMat> &images,
const std::vector<std::pair<UMat,uchar> > &masks) = 0;
virtual void apply(int index, Point corner, InputOutputArray image, InputArray mask) = 0;
};
class CV_EXPORTS NoExposureCompensator : public ExposureCompensator
{
public:
void feed(const std::vector<Point> &/*corners*/, const std::vector<Mat> &/*images*/,
const std::vector<std::pair<Mat,uchar> > &/*masks*/) { }
void apply(int /*index*/, Point /*corner*/, Mat &/*image*/, const Mat &/*mask*/) { }
void feed(const std::vector<Point> &/*corners*/, const std::vector<UMat> &/*images*/,
const std::vector<std::pair<UMat,uchar> > &/*masks*/) { }
void apply(int /*index*/, Point /*corner*/, InputOutputArray /*image*/, InputArray /*mask*/) { }
};
class CV_EXPORTS GainCompensator : public ExposureCompensator
{
public:
void feed(const std::vector<Point> &corners, const std::vector<Mat> &images,
const std::vector<std::pair<Mat,uchar> > &masks);
void apply(int index, Point corner, Mat &image, const Mat &mask);
void feed(const std::vector<Point> &corners, const std::vector<UMat> &images,
const std::vector<std::pair<UMat,uchar> > &masks);
void apply(int index, Point corner, InputOutputArray image, InputArray mask);
std::vector<double> gains() const;
private:
......@@ -91,9 +91,9 @@ class CV_EXPORTS BlocksGainCompensator : public ExposureCompensator
public:
BlocksGainCompensator(int bl_width = 32, int bl_height = 32)
: bl_width_(bl_width), bl_height_(bl_height) {}
void feed(const std::vector<Point> &corners, const std::vector<Mat> &images,
const std::vector<std::pair<Mat,uchar> > &masks);
void apply(int index, Point corner, Mat &image, const Mat &mask);
void feed(const std::vector<Point> &corners, const std::vector<UMat> &images,
const std::vector<std::pair<UMat,uchar> > &masks);
void apply(int index, Point corner, InputOutputArray image, InputArray mask);
private:
int bl_width_, bl_height_;
......
......@@ -60,7 +60,7 @@ struct CV_EXPORTS ImageFeatures
int img_idx;
Size img_size;
std::vector<KeyPoint> keypoints;
Mat descriptors;
UMat descriptors;
};
......@@ -68,12 +68,12 @@ class CV_EXPORTS FeaturesFinder
{
public:
virtual ~FeaturesFinder() {}
void operator ()(const Mat &image, ImageFeatures &features);
void operator ()(const Mat &image, ImageFeatures &features, const std::vector<cv::Rect> &rois);
void operator ()(InputArray image, ImageFeatures &features);
void operator ()(InputArray image, ImageFeatures &features, const std::vector<cv::Rect> &rois);
virtual void collectGarbage() {}
protected:
virtual void find(const Mat &image, ImageFeatures &features) = 0;
virtual void find(InputArray image, ImageFeatures &features) = 0;
};
......@@ -84,7 +84,7 @@ public:
int num_octaves_descr = /*4*/3, int num_layers_descr = /*2*/4);
private:
void find(const Mat &image, ImageFeatures &features);
void find(InputArray image, ImageFeatures &features);
Ptr<FeatureDetector> detector_;
Ptr<DescriptorExtractor> extractor_;
......@@ -97,7 +97,7 @@ public:
OrbFeaturesFinder(Size _grid_size = Size(3,1), int nfeatures=1500, float scaleFactor=1.3f, int nlevels=5);
private:
void find(const Mat &image, ImageFeatures &features);
void find(InputArray image, ImageFeatures &features);
Ptr<ORB> orb;
Size grid_size;
......@@ -114,7 +114,7 @@ public:
void collectGarbage();
private:
void find(const Mat &image, ImageFeatures &features);
void find(InputArray image, ImageFeatures &features);
cuda::GpuMat image_;
cuda::GpuMat gray_image_;
......@@ -151,7 +151,7 @@ public:
MatchesInfo& matches_info) { match(features1, features2, matches_info); }
void operator ()(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const cv::Mat &mask = cv::Mat());
const cv::UMat &mask = cv::UMat());
bool isThreadSafe() const { return is_thread_safe_; }
......
......@@ -54,32 +54,32 @@ class CV_EXPORTS SeamFinder
{
public:
virtual ~SeamFinder() {}
virtual void find(const std::vector<Mat> &src, const std::vector<Point> &corners,
std::vector<Mat> &masks) = 0;
virtual void find(const std::vector<UMat> &src, const std::vector<Point> &corners,
std::vector<UMat> &masks) = 0;
};
class CV_EXPORTS NoSeamFinder : public SeamFinder
{
public:
void find(const std::vector<Mat>&, const std::vector<Point>&, std::vector<Mat>&) {}
void find(const std::vector<UMat>&, const std::vector<Point>&, std::vector<UMat>&) {}
};
class CV_EXPORTS PairwiseSeamFinder : public SeamFinder
{
public:
virtual void find(const std::vector<Mat> &src, const std::vector<Point> &corners,
std::vector<Mat> &masks);
virtual void find(const std::vector<UMat> &src, const std::vector<Point> &corners,
std::vector<UMat> &masks);
protected:
void run();
virtual void findInPair(size_t first, size_t second, Rect roi) = 0;
std::vector<Mat> images_;
std::vector<UMat> images_;
std::vector<Size> sizes_;
std::vector<Point> corners_;
std::vector<Mat> masks_;
std::vector<UMat> masks_;
};
......@@ -87,7 +87,7 @@ class CV_EXPORTS VoronoiSeamFinder : public PairwiseSeamFinder
{
public:
virtual void find(const std::vector<Size> &size, const std::vector<Point> &corners,
std::vector<Mat> &masks);
std::vector<UMat> &masks);
private:
void findInPair(size_t first, size_t second, Rect roi);
};
......@@ -103,8 +103,8 @@ public:
CostFunction costFunction() const { return costFunc_; }
void setCostFunction(CostFunction val) { costFunc_ = val; }
virtual void find(const std::vector<Mat> &src, const std::vector<Point> &corners,
std::vector<Mat> &masks);
virtual void find(const std::vector<UMat> &src, const std::vector<Point> &corners,
std::vector<UMat> &masks);
private:
enum ComponentState
......@@ -154,7 +154,7 @@ private:
};
void process(
const Mat &image1, const Mat &image2, Point tl1, Point tl2, Mat &mask1, Mat &mask2);
const Mat &image1, const Mat &image2, Point tl1, Point tl2, Mat &mask1, Mat &mask2);
void findComponents();
......@@ -217,8 +217,8 @@ public:
~GraphCutSeamFinder();
void find(const std::vector<Mat> &src, const std::vector<Point> &corners,
std::vector<Mat> &masks);
void find(const std::vector<UMat> &src, const std::vector<Point> &corners,
std::vector<UMat> &masks);
private:
// To avoid GCGraph dependency
......@@ -236,8 +236,8 @@ public:
: cost_type_(cost_type), terminal_cost_(terminal_cost),
bad_region_penalty_(bad_region_penalty) {}
void find(const std::vector<cv::Mat> &src, const std::vector<cv::Point> &corners,
std::vector<cv::Mat> &masks);
void find(const std::vector<cv::UMat> &src, const std::vector<cv::Point> &corners,
std::vector<cv::UMat> &masks);
void findInPair(size_t first, size_t second, Rect roi);
private:
......
......@@ -145,7 +145,7 @@ private:
// Auxiliary functions
CV_EXPORTS bool overlapRoi(Point tl1, Point tl2, Size sz1, Size sz2, Rect &roi);
CV_EXPORTS Rect resultRoi(const std::vector<Point> &corners, const std::vector<Mat> &images);
CV_EXPORTS Rect resultRoi(const std::vector<Point> &corners, const std::vector<UMat> &images);
CV_EXPORTS Rect resultRoi(const std::vector<Point> &corners, const std::vector<Size> &sizes);
CV_EXPORTS Point resultTl(const std::vector<Point> &corners);
......
This diff is collapsed.
......@@ -58,18 +58,18 @@ Ptr<ExposureCompensator> ExposureCompensator::createDefault(int type)
}
void ExposureCompensator::feed(const std::vector<Point> &corners, const std::vector<Mat> &images,
const std::vector<Mat> &masks)
void ExposureCompensator::feed(const std::vector<Point> &corners, const std::vector<UMat> &images,
const std::vector<UMat> &masks)
{
std::vector<std::pair<Mat,uchar> > level_masks;
std::vector<std::pair<UMat,uchar> > level_masks;
for (size_t i = 0; i < masks.size(); ++i)
level_masks.push_back(std::make_pair(masks[i], 255));
feed(corners, images, level_masks);
}
void GainCompensator::feed(const std::vector<Point> &corners, const std::vector<Mat> &images,
const std::vector<std::pair<Mat,uchar> > &masks)
void GainCompensator::feed(const std::vector<Point> &corners, const std::vector<UMat> &images,
const std::vector<std::pair<UMat,uchar> > &masks)
{
LOGLN("Exposure compensation...");
#if ENABLE_LOG
......@@ -93,11 +93,11 @@ void GainCompensator::feed(const std::vector<Point> &corners, const std::vector<
Rect roi;
if (overlapRoi(corners[i], corners[j], images[i].size(), images[j].size(), roi))
{
subimg1 = images[i](Rect(roi.tl() - corners[i], roi.br() - corners[i]));
subimg2 = images[j](Rect(roi.tl() - corners[j], roi.br() - corners[j]));
subimg1 = images[i](Rect(roi.tl() - corners[i], roi.br() - corners[i])).getMat(ACCESS_READ);
subimg2 = images[j](Rect(roi.tl() - corners[j], roi.br() - corners[j])).getMat(ACCESS_READ);
submask1 = masks[i].first(Rect(roi.tl() - corners[i], roi.br() - corners[i]));
submask2 = masks[j].first(Rect(roi.tl() - corners[j], roi.br() - corners[j]));
submask1 = masks[i].first(Rect(roi.tl() - corners[i], roi.br() - corners[i])).getMat(ACCESS_READ);
submask2 = masks[j].first(Rect(roi.tl() - corners[j], roi.br() - corners[j])).getMat(ACCESS_READ);
intersect = (submask1 == masks[i].second) & (submask2 == masks[j].second);
N(i, j) = N(j, i) = std::max(1, countNonZero(intersect));
......@@ -145,9 +145,9 @@ void GainCompensator::feed(const std::vector<Point> &corners, const std::vector<
}
void GainCompensator::apply(int index, Point /*corner*/, Mat &image, const Mat &/*mask*/)
void GainCompensator::apply(int index, Point /*corner*/, InputOutputArray image, InputArray /*mask*/)
{
image *= gains_(index, 0);
multiply(image, gains_(index, 0), image);
}
......@@ -160,8 +160,8 @@ std::vector<double> GainCompensator::gains() const
}
void BlocksGainCompensator::feed(const std::vector<Point> &corners, const std::vector<Mat> &images,
const std::vector<std::pair<Mat,uchar> > &masks)
void BlocksGainCompensator::feed(const std::vector<Point> &corners, const std::vector<UMat> &images,
const std::vector<std::pair<UMat,uchar> > &masks)
{
CV_Assert(corners.size() == images.size() && images.size() == masks.size());
......@@ -169,8 +169,8 @@ void BlocksGainCompensator::feed(const std::vector<Point> &corners, const std::v
std::vector<Size> bl_per_imgs(num_images);
std::vector<Point> block_corners;
std::vector<Mat> block_images;
std::vector<std::pair<Mat,uchar> > block_masks;
std::vector<UMat> block_images;
std::vector<std::pair<UMat,uchar> > block_masks;
// Construct blocks for gain compensator
for (int img_idx = 0; img_idx < num_images; ++img_idx)
......@@ -220,8 +220,10 @@ void BlocksGainCompensator::feed(const std::vector<Point> &corners, const std::v
}
void BlocksGainCompensator::apply(int index, Point /*corner*/, Mat &image, const Mat &/*mask*/)
void BlocksGainCompensator::apply(int index, Point /*corner*/, InputOutputArray _image, InputArray /*mask*/)
{
Mat image = _image.getMat();
CV_Assert(image.type() == CV_8UC3);
Mat_<float> gain_map;
......
......@@ -264,14 +264,14 @@ void GpuMatcher::collectGarbage()
namespace cv {
namespace detail {
void FeaturesFinder::operator ()(const Mat &image, ImageFeatures &features)
void FeaturesFinder::operator ()(InputArray image, ImageFeatures &features)
{
find(image, features);
features.img_size = image.size();
}
void FeaturesFinder::operator ()(const Mat &image, ImageFeatures &features, const std::vector<Rect> &rois)
void FeaturesFinder::operator ()(InputArray image, ImageFeatures &features, const std::vector<Rect> &rois)
{
std::vector<ImageFeatures> roi_features(rois.size());
size_t total_kps_count = 0;
......@@ -279,7 +279,7 @@ void FeaturesFinder::operator ()(const Mat &image, ImageFeatures &features, cons
for (size_t i = 0; i < rois.size(); ++i)
{
find(image(rois[i]), roi_features[i]);
find(image.getUMat()(rois[i]), roi_features[i]);
total_kps_count += roi_features[i].keypoints.size();
total_descriptors_height += roi_features[i].descriptors.rows;
}
......@@ -300,7 +300,7 @@ void FeaturesFinder::operator ()(const Mat &image, ImageFeatures &features, cons
features.keypoints[kp_idx].pt.x += (float)rois[i].x;
features.keypoints[kp_idx].pt.y += (float)rois[i].y;
}
Mat subdescr = features.descriptors.rowRange(
UMat subdescr = features.descriptors.rowRange(
descr_offset, descr_offset + roi_features[i].descriptors.rows);
roi_features[i].descriptors.copyTo(subdescr);
descr_offset += roi_features[i].descriptors.rows;
......@@ -337,9 +337,9 @@ SurfFeaturesFinder::SurfFeaturesFinder(double hess_thresh, int num_octaves, int
}
}
void SurfFeaturesFinder::find(const Mat &image, ImageFeatures &features)
void SurfFeaturesFinder::find(InputArray image, ImageFeatures &features)
{
Mat gray_image;
UMat gray_image;
CV_Assert((image.type() == CV_8UC3) || (image.type() == CV_8UC1));
if(image.type() == CV_8UC3)
{
......@@ -347,7 +347,7 @@ void SurfFeaturesFinder::find(const Mat &image, ImageFeatures &features)
}
else
{
gray_image = image;
gray_image = image.getUMat();
}
if (!surf)
{
......@@ -356,7 +356,7 @@ void SurfFeaturesFinder::find(const Mat &image, ImageFeatures &features)
}
else
{
Mat descriptors;
UMat descriptors;
(*surf)(gray_image, Mat(), features.keypoints, descriptors);
features.descriptors = descriptors.reshape(1, (int)features.keypoints.size());
}
......@@ -368,9 +368,9 @@ OrbFeaturesFinder::OrbFeaturesFinder(Size _grid_size, int n_features, float scal
orb = makePtr<ORB>(n_features * (99 + grid_size.area())/100/grid_size.area(), scaleFactor, nlevels);
}
void OrbFeaturesFinder::find(const Mat &image, ImageFeatures &features)
void OrbFeaturesFinder::find(InputArray image, ImageFeatures &features)
{
Mat gray_image;
UMat gray_image;
CV_Assert((image.type() == CV_8UC3) || (image.type() == CV_8UC4) || (image.type() == CV_8UC1));
......@@ -379,7 +379,7 @@ void OrbFeaturesFinder::find(const Mat &image, ImageFeatures &features)
} else if (image.type() == CV_8UC4) {
cvtColor(image, gray_image, COLOR_BGRA2GRAY);
} else if (image.type() == CV_8UC1) {
gray_image=image;
gray_image = image.getUMat();
} else {
CV_Error(Error::StsUnsupportedFormat, "");
}
......@@ -392,7 +392,8 @@ void OrbFeaturesFinder::find(const Mat &image, ImageFeatures &features)
features.descriptors.release();
std::vector<KeyPoint> points;
Mat descriptors;
Mat _descriptors;
UMat descriptors;
for (int r = 0; r < grid_size.height; ++r)
for (int c = 0; c < grid_size.width; ++c)
......@@ -408,13 +409,13 @@ void OrbFeaturesFinder::find(const Mat &image, ImageFeatures &features)
// << " xl=" << xl << ", xr=" << xr << ", gray_image.data=" << ((size_t)gray_image.data) << ", "
// << "gray_image.dims=" << gray_image.dims << "\n");
Mat gray_image_part=gray_image(Range(yl, yr), Range(xl, xr));
UMat gray_image_part=gray_image(Range(yl, yr), Range(xl, xr));
// LOGLN("OrbFeaturesFinder::find: gray_image_part.empty=" << (gray_image_part.empty()?"true":"false") << ", "
// << " gray_image_part.size()=(" << gray_image_part.size().width << "x" << gray_image_part.size().height << "), "
// << " gray_image_part.dims=" << gray_image_part.dims << ", "
// << " gray_image_part.data=" << ((size_t)gray_image_part.data) << "\n");
(*orb)(gray_image_part, Mat(), points, descriptors);
(*orb)(gray_image_part, UMat(), points, descriptors);
features.keypoints.reserve(features.keypoints.size() + points.size());
for (std::vector<KeyPoint>::iterator kp = points.begin(); kp != points.end(); ++kp)
......@@ -423,8 +424,10 @@ void OrbFeaturesFinder::find(const Mat &image, ImageFeatures &features)
kp->pt.y += yl;
features.keypoints.push_back(*kp);
}
features.descriptors.push_back(descriptors);
_descriptors.push_back(descriptors.getMat(ACCESS_READ));
}
features.descriptors = _descriptors.getUMat(ACCESS_READ);
}
}
......@@ -442,7 +445,7 @@ SurfFeaturesFinderGpu::SurfFeaturesFinderGpu(double hess_thresh, int num_octaves
}
void SurfFeaturesFinderGpu::find(const Mat &image, ImageFeatures &features)
void SurfFeaturesFinderGpu::find(InputArray image, ImageFeatures &features)
{
CV_Assert(image.depth() == CV_8U);
......@@ -499,12 +502,12 @@ const MatchesInfo& MatchesInfo::operator =(const MatchesInfo &other)
//////////////////////////////////////////////////////////////////////////////
void FeaturesMatcher::operator ()(const std::vector<ImageFeatures> &features, std::vector<MatchesInfo> &pairwise_matches,
const Mat &mask)
const UMat &mask)
{
const int num_images = static_cast<int>(features.size());
CV_Assert(mask.empty() || (mask.type() == CV_8U && mask.cols == num_images && mask.rows));
Mat_<uchar> mask_(mask);
Mat_<uchar> mask_(mask.getMat(ACCESS_READ));
if (mask_.empty())
mask_ = Mat::ones(num_images, num_images, CV_8U);
......
......@@ -46,8 +46,8 @@
namespace cv {
namespace detail {
void PairwiseSeamFinder::find(const std::vector<Mat> &src, const std::vector<Point> &corners,
std::vector<Mat> &masks)
void PairwiseSeamFinder::find(const std::vector<UMat> &src, const std::vector<Point> &corners,
std::vector<UMat> &masks)
{
LOGLN("Finding seams...");
if (src.size() == 0)
......@@ -84,7 +84,7 @@ void PairwiseSeamFinder::run()
void VoronoiSeamFinder::find(const std::vector<Size> &sizes, const std::vector<Point> &corners,
std::vector<Mat> &masks)
std::vector<UMat> &masks)
{
LOGLN("Finding seams...");
if (sizes.size() == 0)
......@@ -110,7 +110,7 @@ void VoronoiSeamFinder::findInPair(size_t first, size_t second, Rect roi)
Mat submask2(roi.height + 2 * gap, roi.width + 2 * gap, CV_8U);
Size img1 = sizes_[first], img2 = sizes_[second];
Mat mask1 = masks_[first], mask2 = masks_[second];
Mat mask1 = masks_[first].getMat(ACCESS_READ), mask2 = masks_[second].getMat(ACCESS_READ);
Point tl1 = corners_[first], tl2 = corners_[second];
// Cut submasks with some gap
......@@ -160,7 +160,7 @@ void VoronoiSeamFinder::findInPair(size_t first, size_t second, Rect roi)
DpSeamFinder::DpSeamFinder(CostFunction costFunc) : costFunc_(costFunc) {}
void DpSeamFinder::find(const std::vector<Mat> &src, const std::vector<Point> &corners, std::vector<Mat> &masks)
void DpSeamFinder::find(const std::vector<UMat> &src, const std::vector<Point> &corners, std::vector<UMat> &masks)
{
LOGLN("Finding seams...");
#if ENABLE_LOG
......@@ -176,13 +176,18 @@ void DpSeamFinder::find(const std::vector<Mat> &src, const std::vector<Point> &c
for (size_t j = i+1; j < src.size(); ++j)
pairs.push_back(std::make_pair(i, j));
sort(pairs.begin(), pairs.end(), ImagePairLess(src, corners));
{
std::vector<Mat> _src(src.size());
for (size_t i = 0; i < src.size(); ++i) _src[i] = src[i].getMat(ACCESS_READ);
sort(pairs.begin(), pairs.end(), ImagePairLess(_src, corners));
}
std::reverse(pairs.begin(), pairs.end());
for (size_t i = 0; i < pairs.size(); ++i)
{
size_t i0 = pairs[i].first, i1 = pairs[i].second;
process(src[i0], src[i1], corners[i0], corners[i1], masks[i0], masks[i1]);
Mat mask0 = masks[i0].getMat(ACCESS_RW), mask1 = masks[i1].getMat(ACCESS_RW);
process(src[i0].getMat(ACCESS_READ), src[i1].getMat(ACCESS_READ), corners[i0], corners[i1], mask0, mask1);
}
LOGLN("Finding seams, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec");
......@@ -1055,7 +1060,7 @@ public:
~Impl() {}
void find(const std::vector<Mat> &src, const std::vector<Point> &corners, std::vector<Mat> &masks);
void find(const std::vector<UMat> &src, const std::vector<Point> &corners, std::vector<UMat> &masks);
void findInPair(size_t first, size_t second, Rect roi);
private:
......@@ -1072,8 +1077,8 @@ private:
};
void GraphCutSeamFinder::Impl::find(const std::vector<Mat> &src, const std::vector<Point> &corners,
std::vector<Mat> &masks)
void GraphCutSeamFinder::Impl::find(const std::vector<UMat> &src, const std::vector<Point> &corners,
std::vector<UMat> &masks)
{
// Compute gradients
dx_.resize(src.size());
......@@ -1207,10 +1212,10 @@ void GraphCutSeamFinder::Impl::setGraphWeightsColorGrad(
void GraphCutSeamFinder::Impl::findInPair(size_t first, size_t second, Rect roi)
{
Mat img1 = images_[first], img2 = images_[second];
Mat img1 = images_[first].getMat(ACCESS_READ), img2 = images_[second].getMat(ACCESS_READ);
Mat dx1 = dx_[first], dx2 = dx_[second];
Mat dy1 = dy_[first], dy2 = dy_[second];
Mat mask1 = masks_[first], mask2 = masks_[second];
Mat mask1 = masks_[first].getMat(ACCESS_RW), mask2 = masks_[second].getMat(ACCESS_RW);
Point tl1 = corners_[first], tl2 = corners_[second];
const int gap = 10;
......@@ -1309,16 +1314,16 @@ GraphCutSeamFinder::GraphCutSeamFinder(int cost_type, float terminal_cost, float
GraphCutSeamFinder::~GraphCutSeamFinder() {}
void GraphCutSeamFinder::find(const std::vector<Mat> &src, const std::vector<Point> &corners,
std::vector<Mat> &masks)
void GraphCutSeamFinder::find(const std::vector<UMat> &src, const std::vector<Point> &corners,
std::vector<UMat> &masks)
{
impl_->find(src, corners, masks);
}
#ifdef HAVE_OPENCV_CUDA
void GraphCutSeamFinderGpu::find(const std::vector<Mat> &src, const std::vector<Point> &corners,
std::vector<Mat> &masks)
void GraphCutSeamFinderGpu::find(const std::vector<UMat> &src, const std::vector<Point> &corners,
std::vector<UMat> &masks)
{
// Compute gradients
dx_.resize(src.size());
......@@ -1350,10 +1355,10 @@ void GraphCutSeamFinderGpu::find(const std::vector<Mat> &src, const std::vector<
void GraphCutSeamFinderGpu::findInPair(size_t first, size_t second, Rect roi)
{
Mat img1 = images_[first], img2 = images_[second];
Mat img1 = images_[first].getMat(ACCESS_READ), img2 = images_[second].getMat(ACCESS_READ);
Mat dx1 = dx_[first], dx2 = dx_[second];
Mat dy1 = dy_[first], dy2 = dy_[second];
Mat mask1 = masks_[first], mask2 = masks_[second];
Mat mask1 = masks_[first].getMat(ACCESS_READ), mask2 = masks_[second].getMat(ACCESS_READ);
Point tl1 = corners_[first], tl2 = corners_[second];
const int gap = 10;
......
......@@ -86,15 +86,15 @@ Stitcher Stitcher::createDefault(bool try_use_gpu)
}
Stitcher::Status Stitcher::estimateTransform(InputArray images)
Stitcher::Status Stitcher::estimateTransform(InputArrayOfArrays images)
{
return estimateTransform(images, std::vector<std::vector<Rect> >());
}
Stitcher::Status Stitcher::estimateTransform(InputArray images, const std::vector<std::vector<Rect> > &rois)
Stitcher::Status Stitcher::estimateTransform(InputArrayOfArrays images, const std::vector<std::vector<Rect> > &rois)
{
images.getMatVector(imgs_);
images.getUMatVector(imgs_);
rois_ = rois;
Status status;
......@@ -112,21 +112,21 @@ Stitcher::Status Stitcher::estimateTransform(InputArray images, const std::vecto
Stitcher::Status Stitcher::composePanorama(OutputArray pano)
{
return composePanorama(std::vector<Mat>(), pano);
return composePanorama(std::vector<UMat>(), pano);
}
Stitcher::Status Stitcher::composePanorama(InputArray images, OutputArray pano)
Stitcher::Status Stitcher::composePanorama(InputArrayOfArrays images, OutputArray pano)
{
LOGLN("Warping images (auxiliary)... ");
std::vector<Mat> imgs;
images.getMatVector(imgs);
std::vector<UMat> imgs;
images.getUMatVector(imgs);
if (!imgs.empty())
{
CV_Assert(imgs.size() == imgs_.size());
Mat img;
UMat img;
seam_est_imgs_.resize(imgs.size());
for (size_t i = 0; i < imgs.size(); ++i)
......@@ -136,8 +136,8 @@ Stitcher::Status Stitcher::composePanorama(InputArray images, OutputArray pano)
seam_est_imgs_[i] = img.clone();
}
std::vector<Mat> seam_est_imgs_subset;
std::vector<Mat> imgs_subset;
std::vector<UMat> seam_est_imgs_subset;
std::vector<UMat> imgs_subset;
for (size_t i = 0; i < indices_.size(); ++i)
{
......@@ -149,17 +149,17 @@ Stitcher::Status Stitcher::composePanorama(InputArray images, OutputArray pano)
imgs_ = imgs_subset;
}
Mat &pano_ = pano.getMatRef();
UMat pano_;
#if ENABLE_LOG
int64 t = getTickCount();
#endif
std::vector<Point> corners(imgs_.size());
std::vector<Mat> masks_warped(imgs_.size());
std::vector<Mat> images_warped(imgs_.size());
std::vector<UMat> masks_warped(imgs_.size());
std::vector<UMat> images_warped(imgs_.size());
std::vector<Size> sizes(imgs_.size());
std::vector<Mat> masks(imgs_.size());
std::vector<UMat> masks(imgs_.size());
// Prepare image masks
for (size_t i = 0; i < imgs_.size(); ++i)
......@@ -185,7 +185,7 @@ Stitcher::Status Stitcher::composePanorama(InputArray images, OutputArray pano)
w->warp(masks[i], K, cameras_[i].R, INTER_NEAREST, BORDER_CONSTANT, masks_warped[i]);
}
std::vector<Mat> images_warped_f(imgs_.size());
std::vector<UMat> images_warped_f(imgs_.size());
for (size_t i = 0; i < imgs_.size(); ++i)
images_warped[i].convertTo(images_warped_f[i], CV_32F);
......@@ -206,8 +206,8 @@ Stitcher::Status Stitcher::composePanorama(InputArray images, OutputArray pano)
t = getTickCount();
#endif
Mat img_warped, img_warped_s;
Mat dilated_mask, seam_mask, mask, mask_warped;
UMat img_warped, img_warped_s;
UMat dilated_mask, seam_mask, mask, mask_warped;
//double compose_seam_aspect = 1;
double compose_work_aspect = 1;
......@@ -216,7 +216,7 @@ Stitcher::Status Stitcher::composePanorama(InputArray images, OutputArray pano)
double compose_scale = 1;
bool is_compose_scale_set = false;
Mat full_img, img;
UMat full_img, img;
for (size_t img_idx = 0; img_idx < imgs_.size(); ++img_idx)
{
LOGLN("Compositing image #" << indices_[img_idx] + 1);
......@@ -290,7 +290,7 @@ Stitcher::Status Stitcher::composePanorama(InputArray images, OutputArray pano)
dilate(masks_warped[img_idx], dilated_mask, Mat());
resize(dilated_mask, seam_mask, mask_warped.size());
mask_warped = seam_mask & mask_warped;
bitwise_and(seam_mask, mask_warped, mask_warped);
if (!is_blender_prepared)
{
......@@ -302,7 +302,7 @@ Stitcher::Status Stitcher::composePanorama(InputArray images, OutputArray pano)
blender_->feed(img_warped_s, mask_warped, corners[img_idx]);
}
Mat result, result_mask;
UMat result, result_mask;
blender_->blend(result, result_mask);
LOGLN("Compositing, time: " << ((getTickCount() - t) / getTickFrequency()) << " sec");
......@@ -311,11 +311,13 @@ Stitcher::Status Stitcher::composePanorama(InputArray images, OutputArray pano)
// so convert it to avoid user confusing
result.convertTo(pano_, CV_8U);
pano.assign(pano_);
return OK;
}
Stitcher::Status Stitcher::stitch(InputArray images, OutputArray pano)
Stitcher::Status Stitcher::stitch(InputArrayOfArrays images, OutputArray pano)
{
Status status = estimateTransform(images);
if (status != OK)
......@@ -324,7 +326,7 @@ Stitcher::Status Stitcher::stitch(InputArray images, OutputArray pano)
}
Stitcher::Status Stitcher::stitch(InputArray images, const std::vector<std::vector<Rect> > &rois, OutputArray pano)
Stitcher::Status Stitcher::stitch(InputArrayOfArrays images, const std::vector<std::vector<Rect> > &rois, OutputArray pano)
{
Status status = estimateTransform(images, rois);
if (status != OK)
......@@ -346,7 +348,7 @@ Stitcher::Status Stitcher::matchImages()
seam_scale_ = 1;
bool is_work_scale_set = false;
bool is_seam_scale_set = false;
Mat full_img, img;
UMat full_img, img;
features_.resize(imgs_.size());
seam_est_imgs_.resize(imgs_.size());
full_img_sizes_.resize(imgs_.size());
......@@ -420,8 +422,8 @@ Stitcher::Status Stitcher::matchImages()
// Leave only images we are sure are from the same panorama
indices_ = detail::leaveBiggestComponent(features_, pairwise_matches_, (float)conf_thresh_);
std::vector<Mat> seam_est_imgs_subset;
std::vector<Mat> imgs_subset;
std::vector<UMat> seam_est_imgs_subset;
std::vector<UMat> imgs_subset;
std::vector<Size> full_img_sizes_subset;
for (size_t i = 0; i < indices_.size(); ++i)
{
......
......@@ -113,7 +113,7 @@ bool overlapRoi(Point tl1, Point tl2, Size sz1, Size sz2, Rect &roi)
}
Rect resultRoi(const std::vector<Point> &corners, const std::vector<Mat> &images)
Rect resultRoi(const std::vector<Point> &corners, const std::vector<UMat> &images)
{
std::vector<Size> sizes(images.size());
for (size_t i = 0; i < images.size(); ++i)
......
......@@ -348,7 +348,9 @@ int main(int argc, char* argv[])
int64 app_start_time = getTickCount();
#endif
#if 0
cv::setBreakOnError(true);
#endif
int retval = parseCmdArgs(argc, argv);
if (retval)
......@@ -554,10 +556,10 @@ int main(int argc, char* argv[])
#endif
vector<Point> corners(num_images);
vector<Mat> masks_warped(num_images);
vector<Mat> images_warped(num_images);
vector<UMat> masks_warped(num_images);
vector<UMat> images_warped(num_images);
vector<Size> sizes(num_images);
vector<Mat> masks(num_images);
vector<UMat> masks(num_images);
// Preapre images masks
for (int i = 0; i < num_images; ++i)
......@@ -645,7 +647,7 @@ int main(int argc, char* argv[])
warper->warp(masks[i], K, cameras[i].R, INTER_NEAREST, BORDER_CONSTANT, masks_warped[i]);
}
vector<Mat> images_warped_f(num_images);
vector<UMat> images_warped_f(num_images);
for (int i = 0; i < num_images; ++i)
images_warped[i].convertTo(images_warped_f[i], CV_32F);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment