Commit dbce1558 authored by Alexey Spizhevoy's avatar Alexey Spizhevoy

Refactored motion estimators in stitching module

parent 4a5abc75
...@@ -80,29 +80,61 @@ private: ...@@ -80,29 +80,61 @@ private:
}; };
// Minimizes reprojection error class CV_EXPORTS BundleAdjusterBase : public Estimator
class CV_EXPORTS BundleAdjusterReproj : public Estimator
{ {
public: public:
BundleAdjusterReproj(float conf_thresh = 1.f) : conf_thresh_(conf_thresh) {} double confThresh() const { return conf_thresh_; }
void setConfThresh(double conf_thresh) { conf_thresh_ = conf_thresh; }
private: protected:
void estimate(const std::vector<ImageFeatures> &features, const std::vector<MatchesInfo> &pairwise_matches, BundleAdjusterBase(int num_params_per_cam, int num_errs_per_measurement)
std::vector<CameraParams> &cameras); : num_params_per_cam_(num_params_per_cam),
num_errs_per_measurement_(num_errs_per_measurement)
{ setConfThresh(1.); }
void calcError(Mat &err); // Runs bundle adjustment
void calcJacobian(); virtual void estimate(const std::vector<ImageFeatures> &features,
const std::vector<MatchesInfo> &pairwise_matches,
std::vector<CameraParams> &cameras);
virtual void setUpInitialCameraParams(const std::vector<CameraParams> &cameras) = 0;
virtual void obtainRefinedCameraParams(std::vector<CameraParams> &cameras) const = 0;
virtual void calcError(Mat &err) = 0;
virtual void calcJacobian(Mat &jac) = 0;
int num_images_; int num_images_;
int total_num_matches_; int total_num_matches_;
int num_params_per_cam_;
int num_errs_per_measurement_;
const ImageFeatures *features_; const ImageFeatures *features_;
const MatchesInfo *pairwise_matches_; const MatchesInfo *pairwise_matches_;
Mat cameras_;
// Threshold to filter out poorly matched image pairs
double conf_thresh_;
// Camera parameters matrix (CV_64F)
Mat cam_params_;
// Connected images pairs
std::vector<std::pair<int,int> > edges_; std::vector<std::pair<int,int> > edges_;
};
// Minimizes reprojection error
class CV_EXPORTS BundleAdjusterReproj : public BundleAdjusterBase
{
public:
BundleAdjusterReproj() : BundleAdjusterBase(6, 2) {}
private:
void setUpInitialCameraParams(const std::vector<CameraParams> &cameras);
void obtainRefinedCameraParams(std::vector<CameraParams> &cameras) const;
void calcError(Mat &err);
void calcJacobian(Mat &jac);
float conf_thresh_; Mat err1_, err2_;
Mat err_, err1_, err2_;
Mat J_;
}; };
......
...@@ -155,9 +155,9 @@ void HomographyBasedEstimator::estimate(const vector<ImageFeatures> &features, c ...@@ -155,9 +155,9 @@ void HomographyBasedEstimator::estimate(const vector<ImageFeatures> &features, c
////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////
void BundleAdjusterReproj::estimate(const vector<ImageFeatures> &features, void BundleAdjusterBase::estimate(const vector<ImageFeatures> &features,
const vector<MatchesInfo> &pairwise_matches, const vector<MatchesInfo> &pairwise_matches,
vector<CameraParams> &cameras) vector<CameraParams> &cameras)
{ {
LOG("Bundle adjustment"); LOG("Bundle adjustment");
int64 t = getTickCount(); int64 t = getTickCount();
...@@ -166,28 +166,9 @@ void BundleAdjusterReproj::estimate(const vector<ImageFeatures> &features, ...@@ -166,28 +166,9 @@ void BundleAdjusterReproj::estimate(const vector<ImageFeatures> &features,
features_ = &features[0]; features_ = &features[0];
pairwise_matches_ = &pairwise_matches[0]; pairwise_matches_ = &pairwise_matches[0];
// Prepare focals and rotations setUpInitialCameraParams(cameras);
cameras_.create(num_images_ * 6, 1, CV_64F);
SVD svd;
for (int i = 0; i < num_images_; ++i)
{
cameras_.at<double>(i * 6, 0) = cameras[i].focal;
cameras_.at<double>(i * 6 + 1, 0) = cameras[i].ppx;
cameras_.at<double>(i * 6 + 2, 0) = cameras[i].ppy;
svd(cameras[i].R, SVD::FULL_UV);
Mat R = svd.u * svd.vt;
if (determinant(R) < 0)
R *= -1;
Mat rvec;
Rodrigues(R, rvec); CV_Assert(rvec.type() == CV_32F);
cameras_.at<double>(i * 6 + 3, 0) = rvec.at<float>(0, 0);
cameras_.at<double>(i * 6 + 4, 0) = rvec.at<float>(1, 0);
cameras_.at<double>(i * 6 + 5, 0) = rvec.at<float>(2, 0);
}
// Select only consistent image pairs for futher adjustment // Leave only consistent image pairs
edges_.clear(); edges_.clear();
for (int i = 0; i < num_images_ - 1; ++i) for (int i = 0; i < num_images_ - 1; ++i)
{ {
...@@ -202,63 +183,53 @@ void BundleAdjusterReproj::estimate(const vector<ImageFeatures> &features, ...@@ -202,63 +183,53 @@ void BundleAdjusterReproj::estimate(const vector<ImageFeatures> &features,
// Compute number of correspondences // Compute number of correspondences
total_num_matches_ = 0; total_num_matches_ = 0;
for (size_t i = 0; i < edges_.size(); ++i) for (size_t i = 0; i < edges_.size(); ++i)
total_num_matches_ += static_cast<int>(pairwise_matches[edges_[i].first * num_images_ + edges_[i].second].num_inliers); total_num_matches_ += static_cast<int>(pairwise_matches[edges_[i].first * num_images_ +
edges_[i].second].num_inliers);
CvLevMarq solver(num_images_ * 6, total_num_matches_ * 2, CvLevMarq solver(num_images_ * num_params_per_cam_,
total_num_matches_ * num_errs_per_measurement_,
cvTermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 1000, DBL_EPSILON)); cvTermCriteria(CV_TERMCRIT_EPS + CV_TERMCRIT_ITER, 1000, DBL_EPSILON));
CvMat matParams = cameras_; Mat err, jac;
CvMat matParams = cam_params_;
cvCopy(&matParams, solver.param); cvCopy(&matParams, solver.param);
int count = 0; int iter = 0;
for(;;) for(;;)
{ {
const CvMat* _param = 0; const CvMat* _param = 0;
CvMat* _J = 0; CvMat* _jac = 0;
CvMat* _err = 0; CvMat* _err = 0;
bool proceed = solver.update(_param, _J, _err); bool proceed = solver.update(_param, _jac, _err);
cvCopy( _param, &matParams ); cvCopy(_param, &matParams);
if( !proceed || !_err ) if (!proceed || !_err)
break; break;
if( _J ) if (_jac)
{ {
calcJacobian(); calcJacobian(jac);
CvMat matJ = J_; CvMat tmp = jac;
cvCopy( &matJ, _J ); cvCopy(&tmp, _jac);
} }
if (_err) if (_err)
{ {
calcError(err_); calcError(err);
LOG("."); LOG(".");
count++; iter++;
CvMat matErr = err_; CvMat tmp = err;
cvCopy( &matErr, _err ); cvCopy(&tmp, _err);
} }
} }
LOGLN(""); LOGLN("");
LOGLN("Bundle adjustment, final error: " << sqrt(err_.dot(err_)) / total_num_matches_); LOGLN("Bundle adjustment, final RMS error: " << sqrt(err.dot(err) / total_num_matches_));
LOGLN("Bundle adjustment, iterations done: " << count); LOGLN("Bundle adjustment, iterations done: " << iter);
// Obtain global motion obtainRefinedCameraParams(cameras);
for (int i = 0; i < num_images_; ++i)
{
cameras[i].focal = cameras_.at<double>(i * 6, 0);
cameras[i].ppx = cameras_.at<double>(i * 6 + 1, 0);
cameras[i].ppy = cameras_.at<double>(i * 6 + 2, 0);
Mat rvec(3, 1, CV_64F);
rvec.at<double>(0, 0) = cameras_.at<double>(i * 6 + 3, 0);
rvec.at<double>(1, 0) = cameras_.at<double>(i * 6 + 4, 0);
rvec.at<double>(2, 0) = cameras_.at<double>(i * 6 + 5, 0);
Rodrigues(rvec, cameras[i].R);
Mat Mf;
cameras[i].R.convertTo(Mf, CV_32F);
cameras[i].R = Mf;
}
// Normalize motion to center image // Normalize motion to center image
Graph span_tree; Graph span_tree;
...@@ -272,6 +243,55 @@ void BundleAdjusterReproj::estimate(const vector<ImageFeatures> &features, ...@@ -272,6 +243,55 @@ void BundleAdjusterReproj::estimate(const vector<ImageFeatures> &features,
} }
//////////////////////////////////////////////////////////////////////////////
void BundleAdjusterReproj::setUpInitialCameraParams(const vector<CameraParams> &cameras)
{
cam_params_.create(num_images_ * 6, 1, CV_64F);
SVD svd;
for (int i = 0; i < num_images_; ++i)
{
cam_params_.at<double>(i * 6, 0) = cameras[i].focal;
cam_params_.at<double>(i * 6 + 1, 0) = cameras[i].ppx;
cam_params_.at<double>(i * 6 + 2, 0) = cameras[i].ppy;
svd(cameras[i].R, SVD::FULL_UV);
Mat R = svd.u * svd.vt;
if (determinant(R) < 0)
R *= -1;
Mat rvec;
Rodrigues(R, rvec);
CV_Assert(rvec.type() == CV_32F);
cam_params_.at<double>(i * 6 + 3, 0) = rvec.at<float>(0, 0);
cam_params_.at<double>(i * 6 + 4, 0) = rvec.at<float>(1, 0);
cam_params_.at<double>(i * 6 + 5, 0) = rvec.at<float>(2, 0);
}
}
void BundleAdjusterReproj::obtainRefinedCameraParams(vector<CameraParams> &cameras) const
{
for (int i = 0; i < num_images_; ++i)
{
cameras[i].focal = cam_params_.at<double>(i * 6, 0);
cameras[i].ppx = cam_params_.at<double>(i * 6 + 1, 0);
cameras[i].ppy = cam_params_.at<double>(i * 6 + 2, 0);
cameras[i].aspect = 1.;
Mat rvec(3, 1, CV_64F);
rvec.at<double>(0, 0) = cam_params_.at<double>(i * 6 + 3, 0);
rvec.at<double>(1, 0) = cam_params_.at<double>(i * 6 + 4, 0);
rvec.at<double>(2, 0) = cam_params_.at<double>(i * 6 + 5, 0);
Rodrigues(rvec, cameras[i].R);
Mat tmp;
cameras[i].R.convertTo(tmp, CV_32F);
cameras[i].R = tmp;
}
}
void BundleAdjusterReproj::calcError(Mat &err) void BundleAdjusterReproj::calcError(Mat &err)
{ {
err.create(total_num_matches_ * 2, 1, CV_64F); err.create(total_num_matches_ * 2, 1, CV_64F);
...@@ -281,26 +301,26 @@ void BundleAdjusterReproj::calcError(Mat &err) ...@@ -281,26 +301,26 @@ void BundleAdjusterReproj::calcError(Mat &err)
{ {
int i = edges_[edge_idx].first; int i = edges_[edge_idx].first;
int j = edges_[edge_idx].second; int j = edges_[edge_idx].second;
double f1 = cameras_.at<double>(i * 6, 0); double f1 = cam_params_.at<double>(i * 6, 0);
double f2 = cameras_.at<double>(j * 6, 0); double f2 = cam_params_.at<double>(j * 6, 0);
double ppx1 = cameras_.at<double>(i * 6 + 1, 0); double ppx1 = cam_params_.at<double>(i * 6 + 1, 0);
double ppx2 = cameras_.at<double>(j * 6 + 1, 0); double ppx2 = cam_params_.at<double>(j * 6 + 1, 0);
double ppy1 = cameras_.at<double>(i * 6 + 2, 0); double ppy1 = cam_params_.at<double>(i * 6 + 2, 0);
double ppy2 = cameras_.at<double>(j * 6 + 2, 0); double ppy2 = cam_params_.at<double>(j * 6 + 2, 0);
double R1[9]; double R1[9];
Mat R1_(3, 3, CV_64F, R1); Mat R1_(3, 3, CV_64F, R1);
Mat rvec(3, 1, CV_64F); Mat rvec(3, 1, CV_64F);
rvec.at<double>(0, 0) = cameras_.at<double>(i * 6 + 3, 0); rvec.at<double>(0, 0) = cam_params_.at<double>(i * 6 + 3, 0);
rvec.at<double>(1, 0) = cameras_.at<double>(i * 6 + 4, 0); rvec.at<double>(1, 0) = cam_params_.at<double>(i * 6 + 4, 0);
rvec.at<double>(2, 0) = cameras_.at<double>(i * 6 + 5, 0); rvec.at<double>(2, 0) = cam_params_.at<double>(i * 6 + 5, 0);
Rodrigues(rvec, R1_); Rodrigues(rvec, R1_);
double R2[9]; double R2[9];
Mat R2_(3, 3, CV_64F, R2); Mat R2_(3, 3, CV_64F, R2);
rvec.at<double>(0, 0) = cameras_.at<double>(j * 6 + 3, 0); rvec.at<double>(0, 0) = cam_params_.at<double>(j * 6 + 3, 0);
rvec.at<double>(1, 0) = cameras_.at<double>(j * 6 + 4, 0); rvec.at<double>(1, 0) = cam_params_.at<double>(j * 6 + 4, 0);
rvec.at<double>(2, 0) = cameras_.at<double>(j * 6 + 5, 0); rvec.at<double>(2, 0) = cam_params_.at<double>(j * 6 + 5, 0);
Rodrigues(rvec, R2_); Rodrigues(rvec, R2_);
const ImageFeatures& features1 = features_[i]; const ImageFeatures& features1 = features_[i];
...@@ -321,8 +341,8 @@ void BundleAdjusterReproj::calcError(Mat &err) ...@@ -321,8 +341,8 @@ void BundleAdjusterReproj::calcError(Mat &err)
{ {
if (!matches_info.inliers_mask[k]) if (!matches_info.inliers_mask[k])
continue; continue;
const DMatch& m = matches_info.matches[k];
const DMatch& m = matches_info.matches[k];
Point2f p1 = features1.keypoints[m.queryIdx].pt; Point2f p1 = features1.keypoints[m.queryIdx].pt;
Point2f p2 = features2.keypoints[m.trainIdx].pt; Point2f p2 = features2.keypoints[m.trainIdx].pt;
double x = H(0,0)*p1.x + H(0,1)*p1.y + H(0,2); double x = H(0,0)*p1.x + H(0,1)*p1.y + H(0,2);
...@@ -337,9 +357,9 @@ void BundleAdjusterReproj::calcError(Mat &err) ...@@ -337,9 +357,9 @@ void BundleAdjusterReproj::calcError(Mat &err)
} }
void BundleAdjusterReproj::calcJacobian() void BundleAdjusterReproj::calcJacobian(Mat &jac)
{ {
J_.create(total_num_matches_ * 2, num_images_ * 6, CV_64F); jac.create(total_num_matches_ * 2, num_images_ * 6, CV_64F);
double val; double val;
const double step = 1e-4; const double step = 1e-4;
...@@ -348,13 +368,13 @@ void BundleAdjusterReproj::calcJacobian() ...@@ -348,13 +368,13 @@ void BundleAdjusterReproj::calcJacobian()
{ {
for (int j = 0; j < 6; ++j) for (int j = 0; j < 6; ++j)
{ {
val = cameras_.at<double>(i * 6 + j, 0); val = cam_params_.at<double>(i * 6 + j, 0);
cameras_.at<double>(i * 6 + j, 0) = val - step; cam_params_.at<double>(i * 6 + j, 0) = val - step;
calcError(err1_); calcError(err1_);
cameras_.at<double>(i * 6 + j, 0) = val + step; cam_params_.at<double>(i * 6 + j, 0) = val + step;
calcError(err2_); calcError(err2_);
calcDeriv(err1_, err2_, 2 * step, J_.col(i * 6 + j)); calcDeriv(err1_, err2_, 2 * step, jac.col(i * 6 + j));
cameras_.at<double>(i * 6 + j, 0) = val; cam_params_.at<double>(i * 6 + j, 0) = val;
} }
} }
} }
......
...@@ -189,7 +189,8 @@ Stitcher::Status Stitcher::stitch(InputArray imgs_, OutputArray pano_) ...@@ -189,7 +189,8 @@ Stitcher::Status Stitcher::stitch(InputArray imgs_, OutputArray pano_)
LOGLN("Initial intrinsic parameters #" << indices[i]+1 << ":\n " << cameras[i].K()); LOGLN("Initial intrinsic parameters #" << indices[i]+1 << ":\n " << cameras[i].K());
} }
detail::BundleAdjusterReproj adjuster(conf_thresh_); detail::BundleAdjusterReproj adjuster;
adjuster.setConfThresh(conf_thresh_);
adjuster(features, pairwise_matches, cameras); adjuster(features, pairwise_matches, cameras);
// Find median focal length // Find median focal length
......
...@@ -413,7 +413,8 @@ int main(int argc, char* argv[]) ...@@ -413,7 +413,8 @@ int main(int argc, char* argv[])
LOGLN("Initial intrinsics #" << indices[i]+1 << ":\n" << cameras[i].K()); LOGLN("Initial intrinsics #" << indices[i]+1 << ":\n" << cameras[i].K());
} }
BundleAdjusterReproj adjuster(conf_thresh); BundleAdjusterReproj adjuster;
adjuster.setConfThresh(conf_thresh);
adjuster(features, pairwise_matches, cameras); adjuster(features, pairwise_matches, cameras);
// Find median focal length // Find median focal length
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment