ba.cpp 36.2 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42
/*M///////////////////////////////////////////////////////////////////////////////////////
//
//  IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
//  By downloading, copying, installing or using the software you agree to this license.
//  If you do not agree to this license, do not download, install,
//  copy or use the software.
//
//
//                         License Agreement
//                For Open Source Computer Vision Library
//
// Copyright (C) 2009, PhaseSpace Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
//   * Redistribution's of source code must retain the above copyright notice,
//     this list of conditions and the following disclaimer.
//
//   * Redistribution's in binary form must reproduce the above copyright notice,
//     this list of conditions and the following disclaimer in the documentation
//     and/or other materials provided with the distribution.
//
//   * The names of the copyright holders may not be used to endorse or promote products
//     derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/

#include "precomp.hpp"
43
#include "opencv2/calib3d.hpp"
44
#include "opencv2/contrib/compat.hpp"
45
#include "opencv2/calib3d/calib3d_c.h"
46
#include <iostream>
47

48
using namespace cv;
49

50 51 52 53 54 55
LevMarqSparse::LevMarqSparse() {
  Vis_index = X = prevP = P = deltaP = err = JtJ_diag = S = hX = NULL;
  U = ea = V = inv_V_star = eb = Yj = NULL;
  num_cams = 0,   num_points = 0,   num_err_param = 0;
  num_cam_param = 0,  num_point_param = 0;
  A = B = W = NULL;
56 57
}

58 59
LevMarqSparse::~LevMarqSparse() {
  clear();
60
}
61 62

LevMarqSparse::LevMarqSparse(int npoints, // number of points
63 64 65 66 67 68 69 70 71
           int ncameras, // number of cameras
           int nPointParams, // number of params per one point  (3 in case of 3D points)
           int nCameraParams, // number of parameters per one camera
           int nErrParams, // number of parameters in measurement vector
           // for 1 point at one camera (2 in case of 2D projections)
           Mat& visibility, // visibility matrix. rows correspond to points, columns correspond to cameras
           // 1 - point is visible for the camera, 0 - invisible
           Mat& P0, // starting vector of parameters, first cameras then points
           Mat& X_, // measurements, in order of visibility. non visible cases are skipped
Andrey Kamaev's avatar
Andrey Kamaev committed
72
           TermCriteria _criteria, // termination criteria
73 74

           // callback for estimation of Jacobian matrices
Andrey Kamaev's avatar
Andrey Kamaev committed
75
           void (CV_CDECL * _fjac)(int i, int j, Mat& point_params,
76 77
                Mat& cam_params, Mat& A, Mat& B, void* data),
           // callback for estimation of backprojection errors
Andrey Kamaev's avatar
Andrey Kamaev committed
78
           void (CV_CDECL * _func)(int i, int j, Mat& point_params,
79
                Mat& cam_params, Mat& estim, void* data),
Andrey Kamaev's avatar
Andrey Kamaev committed
80
           void* _data, // user-specific data passed to the callbacks
81 82
           BundleAdjustCallback _cb, void* _user_data
           ) {
83 84 85 86 87 88
  Vis_index = X = prevP = P = deltaP = err = JtJ_diag = S = hX = NULL;
  U = ea = V = inv_V_star = eb = Yj = NULL;
  A = B = W = NULL;

  cb = _cb;
  user_data = _user_data;
89

90
  run(npoints, ncameras, nPointParams, nCameraParams, nErrParams, visibility,
Andrey Kamaev's avatar
Andrey Kamaev committed
91
      P0, X_, _criteria, _fjac, _func, _data);
92 93
}

94 95 96 97 98 99
void LevMarqSparse::clear() {
  for( int i = 0; i < num_points; i++ ) {
    for(int j = 0; j < num_cams; j++ ) {
      //CvMat* tmp = ((CvMat**)(A->data.ptr + i * A->step))[j];
      CvMat* tmp = A[j+i*num_cams];
      if (tmp)
100
  cvReleaseMat( &tmp );
101 102 103 104

      //tmp = ((CvMat**)(B->data.ptr + i * B->step))[j];
      tmp  = B[j+i*num_cams];
      if (tmp)
105 106
  cvReleaseMat( &tmp );

107 108 109
      //tmp = ((CvMat**)(W->data.ptr + j * W->step))[i];
      tmp  = W[j+i*num_cams];
      if (tmp)
110
  cvReleaseMat( &tmp );
111
    }
112
  }
113 114 115 116 117 118 119 120 121 122 123 124 125 126
  delete A; //cvReleaseMat(&A);
  delete B;//cvReleaseMat(&B);
  delete W;//cvReleaseMat(&W);
  cvReleaseMat( &Vis_index);

  for( int j = 0; j < num_cams; j++ ) {
    cvReleaseMat( &U[j] );
  }
  delete U;

  for( int j = 0; j < num_cams; j++ ) {
    cvReleaseMat( &ea[j] );
  }
  delete ea;
127

128 129 130 131 132 133 134 135 136 137 138 139 140 141 142
  //allocate V and inv_V_star
  for( int i = 0; i < num_points; i++ ) {
    cvReleaseMat(&V[i]);
    cvReleaseMat(&inv_V_star[i]);
  }
  delete V;
  delete inv_V_star;

  for( int i = 0; i < num_points; i++ ) {
    cvReleaseMat(&eb[i]);
  }
  delete eb;

  for( int i = 0; i < num_points; i++ ) {
    cvReleaseMat(&Yj[i]);
143
  }
144
  delete Yj;
145

146 147 148 149
  cvReleaseMat(&X);
  cvReleaseMat(&prevP);
  cvReleaseMat(&P);
  cvReleaseMat(&deltaP);
150

151 152
  cvReleaseMat(&err);

153 154 155
  cvReleaseMat(&JtJ_diag);
  cvReleaseMat(&S);
  cvReleaseMat(&hX);
156 157 158 159 160 161 162 163 164 165 166 167 168 169
}

//A params correspond to  Cameras
//B params correspont to  Points

//num_cameras  - total number of cameras
//num_points   - total number of points

//num_par_per_camera - number of parameters per camera
//num_par_per_point - number of parameters per point

//num_errors - number of measurements.

void LevMarqSparse::run( int num_points_, //number of points
170 171 172 173 174 175 176 177 178 179 180 181 182
       int num_cams_, //number of cameras
       int num_point_param_, //number of params per one point  (3 in case of 3D points)
       int num_cam_param_, //number of parameters per one camera
       int num_err_param_, //number of parameters in measurement vector for 1 point at one camera (2 in case of 2D projections)
       Mat& visibility,   //visibility matrix . rows correspond to points, columns correspond to cameras
       // 0 - point is visible for the camera, 0 - invisible
       Mat& P0, //starting vector of parameters, first cameras then points
       Mat& X_init, //measurements, in order of visibility. non visible cases are skipped
       TermCriteria criteria_init,
       void (*fjac_)(int i, int j, Mat& point_params, Mat& cam_params, Mat& A, Mat& B, void* data),
       void (*func_)(int i, int j, Mat& point_params, Mat& cam_params, Mat& estim, void* data),
       void* data_
       ) { //termination criteria
183
  //clear();
184

185 186 187
  func = func_; //assign evaluation function
  fjac = fjac_; //assign jacobian
  data = data_;
188

189 190
  num_cams = num_cams_;
  num_points = num_points_;
191
  num_err_param = num_err_param_;
192 193
  num_cam_param = num_cam_param_;
  num_point_param = num_point_param_;
194

195 196 197
  //compute all sizes
  int Aij_width = num_cam_param;
  int Aij_height = num_err_param;
198

199 200
  int Bij_width = num_point_param;
  int Bij_height = num_err_param;
201

202 203
  int U_size = Aij_width;
  int V_size = Bij_width;
204

205 206
  int Wij_height = Aij_width;
  int Wij_width = Bij_width;
207

208
  //allocate memory for all Aij, Bij, U, V, W
209

210
  //allocate num_points*num_cams matrices A
211

212 213 214 215 216 217 218 219 220 221 222 223 224 225
  //Allocate matrix A whose elements are nointers to Aij
  //if Aij is zero (point i is not visible in camera j) then A(i,j) contains NULL
  //A = cvCreateMat( num_points, num_cams, CV_32S /*pointer is stored here*/ );
  //B = cvCreateMat( num_points, num_cams, CV_32S /*pointer is stored here*/ );
  //W = cvCreateMat( num_cams, num_points, CV_32S /*pointer is stored here*/ );

  A = new CvMat* [num_points * num_cams];
  B = new CvMat* [num_points * num_cams];
  W = new CvMat* [num_cams * num_points];
  Vis_index = cvCreateMat( num_points, num_cams, CV_32S /*integer index is stored here*/ );
  //cvSetZero( A );
  //cvSetZero( B );
  //cvSetZero( W );
  cvSet( Vis_index, cvScalar(-1) );
226

227 228 229 230 231 232
  //fill matrices A and B based on visibility
  CvMat _vis = visibility;
  int index = 0;
  for (int i = 0; i < num_points; i++ ) {
    for (int j = 0; j < num_cams; j++ ) {
      if (((int*)(_vis.data.ptr+ i * _vis.step))[j] ) {
233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250
  ((int*)(Vis_index->data.ptr + i * Vis_index->step))[j] = index;
  index += num_err_param;

  //create matrices Aij, Bij
  CvMat* tmp = cvCreateMat(Aij_height, Aij_width, CV_64F );
  //((CvMat**)(A->data.ptr + i * A->step))[j] = tmp;
  cvSet(tmp,cvScalar(1.0,1.0,1.0,1.0));
  A[j+i*num_cams] = tmp;

  tmp = cvCreateMat( Bij_height, Bij_width, CV_64F );
  //((CvMat**)(B->data.ptr + i * B->step))[j] = tmp;
  cvSet(tmp,cvScalar(1.0,1.0,1.0,1.0));
  B[j+i*num_cams] = tmp;

  tmp = cvCreateMat( Wij_height, Wij_width, CV_64F );
  //((CvMat**)(W->data.ptr + j * W->step))[i] = tmp;  //note indices i and j swapped
  cvSet(tmp,cvScalar(1.0,1.0,1.0,1.0));
  W[j+i*num_cams] = tmp;
251
      } else{
252 253 254
  A[j+i*num_cams] = NULL;
  B[j+i*num_cams] = NULL;
  W[j+i*num_cams] = NULL;
255
      }
256
    }
257
  }
258

259 260 261 262 263 264 265 266 267 268 269 270 271
  //allocate U
  U = new CvMat* [num_cams];
  for (int j = 0; j < num_cams; j++ ) {
    U[j] = cvCreateMat( U_size, U_size, CV_64F );
    cvSetZero(U[j]);

  }
  //allocate ea
  ea = new CvMat* [num_cams];
  for (int j = 0; j < num_cams; j++ ) {
    ea[j] = cvCreateMat( U_size, 1, CV_64F );
    cvSetZero(ea[j]);
  }
272

273 274 275 276 277 278 279 280 281
  //allocate V and inv_V_star
  V = new CvMat* [num_points];
  inv_V_star = new CvMat* [num_points];
  for (int i = 0; i < num_points; i++ ) {
    V[i] = cvCreateMat( V_size, V_size, CV_64F );
    inv_V_star[i] = cvCreateMat( V_size, V_size, CV_64F );
    cvSetZero(V[i]);
    cvSetZero(inv_V_star[i]);
  }
282

283 284 285 286 287
  //allocate eb
  eb = new CvMat* [num_points];
  for (int i = 0; i < num_points; i++ ) {
    eb[i] = cvCreateMat( V_size, 1, CV_64F );
    cvSetZero(eb[i]);
288 289
  }

290 291 292 293 294
  //allocate Yj
  Yj = new CvMat* [num_points];
  for (int i = 0; i < num_points; i++ ) {
    Yj[i] = cvCreateMat( Wij_height, Wij_width, CV_64F );  //Yij has the same size as Wij
    cvSetZero(Yj[i]);
295 296
  }

297 298 299 300 301
  //allocate matrix S
  S = cvCreateMat( num_cams * num_cam_param, num_cams * num_cam_param, CV_64F);
  cvSetZero(S);
  JtJ_diag = cvCreateMat( num_cams * num_cam_param + num_points * num_point_param, 1, CV_64F );
  cvSetZero(JtJ_diag);
302

303
  //set starting parameters
304 305
  CvMat _tmp_ = CvMat(P0);
  prevP = cvCloneMat( &_tmp_ );
306 307
  P = cvCloneMat( &_tmp_ );
  deltaP = cvCloneMat( &_tmp_ );
308

309 310
  //set measurements
  _tmp_ = CvMat(X_init);
311
  X = cvCloneMat( &_tmp_ );
312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338
  //create vector for estimated measurements
  hX = cvCreateMat( X->rows, X->cols, CV_64F );
  cvSetZero(hX);
  //create error vector
  err = cvCreateMat( X->rows, X->cols, CV_64F );
  cvSetZero(err);
  ask_for_proj(_vis);
  //compute initial error
  cvSub(X, hX, err );

  /*
    assert(X->rows == hX->rows);
    std::cerr<<"X size = "<<X->rows<<" "<<X->cols<<std::endl;
    std::cerr<<"hX size = "<<hX->rows<<" "<<hX->cols<<std::endl;
    for (int j=0;j<X->rows;j+=2) {
    double Xj1 = *(double*)(X->data.ptr + j * X->step);
    double hXj1 = *(double*)(hX->data.ptr + j * hX->step);
    double err1 = *(double*)(err->data.ptr + j * err->step);
    double Xj2 = *(double*)(X->data.ptr + (j+1) * X->step);
    double hXj2 = *(double*)(hX->data.ptr + (j+1) * hX->step);
    double err2 = *(double*)(err->data.ptr + (j+1) * err->step);
    std::cerr<<"("<<Xj1<<","<<Xj2<<") -> ("<<hXj1<<","<<hXj2<<"). err = ("<<err1<<","<<err2<<")"<<std::endl;
    }
  */

  prevErrNorm = cvNorm( err, 0,  CV_L2 );
  //    std::cerr<<"prevErrNorm = "<<prevErrNorm<<std::endl;
339
  iters = 0;
340
  criteria = criteria_init;
341

342 343 344 345 346
  optimize(_vis);

  ask_for_proj(_vis,true);
  cvSub(X, hX, err );
  errNorm = cvNorm( err, 0,  CV_L2 );
347 348
}

349 350 351 352 353 354 355 356 357 358 359 360 361 362 363
void LevMarqSparse::ask_for_proj(CvMat &/*_vis*/,bool once) {
    (void)once;
    //given parameter P, compute measurement hX
    int ind = 0;
    for (int i = 0; i < num_points; i++ ) {
        CvMat point_mat;
        cvGetSubRect( P, &point_mat, cvRect( 0, num_cams * num_cam_param + num_point_param * i, 1, num_point_param ));
        for (int j = 0; j < num_cams; j++ ) {
            //CvMat* Aij = ((CvMat**)(A->data.ptr + A->step * i))[j];
            CvMat* Aij = A[j+i*num_cams];
            if (Aij ) { //visible
                CvMat cam_mat;
                cvGetSubRect( P, &cam_mat, cvRect( 0, j * num_cam_param, 1, num_cam_param ));
                CvMat measur_mat;
                cvGetSubRect( hX, &measur_mat, cvRect( 0, ind * num_err_param, 1, num_err_param ));
364
                Mat _point_mat = cv::cvarrToMat(&point_mat), _cam_mat = cv::cvarrToMat(&cam_mat), _measur_mat = cv::cvarrToMat(&measur_mat);
365 366 367
                func( i, j, _point_mat, _cam_mat, _measur_mat, data);
                assert( ind*num_err_param == ((int*)(Vis_index->data.ptr + i * Vis_index->step))[j]);
                ind+=1;
368 369
            }
        }
370
    }
371
}
372

373
//iteratively asks for Jacobians for every camera_point pair
374 375 376
void LevMarqSparse::ask_for_projac(CvMat &/*_vis*/)   //should be evaluated at point prevP
{
    // compute jacobians Aij and Bij
377
    for (int i = 0; i < num_points; i++ )
378 379 380 381 382 383
    {
        CvMat point_mat;
        cvGetSubRect( prevP, &point_mat, cvRect( 0, num_cams * num_cam_param + num_point_param * i, 1, num_point_param ));

        //CvMat** A_line = (CvMat**)(A->data.ptr + A->step * i);
        //CvMat** B_line = (CvMat**)(B->data.ptr + B->step * i);
384
        for( int j = 0; j < num_cams; j++ )
385 386 387 388 389
        {
            //CvMat* Aij = A_line[j];
            //if( Aij ) //Aij is not zero
            CvMat* Aij = A[j+i*num_cams];
            CvMat* Bij = B[j+i*num_cams];
390
            if(Aij)
391 392 393 394 395 396 397 398 399 400 401 402
            {
                //CvMat** A_line = (CvMat**)(A->data.ptr + A->step * i);
                //CvMat** B_line = (CvMat**)(B->data.ptr + B->step * i);

                //CvMat* Aij = A_line[j];
                //CvMat* Aij = ((CvMat**)(A->data.ptr + A->step * i))[j];

                CvMat cam_mat;
                cvGetSubRect( prevP, &cam_mat, cvRect( 0, j * num_cam_param, 1, num_cam_param ));

                //CvMat* Bij = B_line[j];
                //CvMat* Bij = ((CvMat**)(B->data.ptr + B->step * i))[j];
403
                Mat _point_mat = cv::cvarrToMat(&point_mat), _cam_mat = cv::cvarrToMat(&cam_mat), _Aij = cv::cvarrToMat(Aij), _Bij = cv::cvarrToMat(Bij);
404 405 406
                (*fjac)(i, j, _point_mat, _cam_mat, _Aij, _Bij, data);
            }
        }
407
    }
408
}
409

410 411
void LevMarqSparse::optimize(CvMat &_vis) { //main function that runs minimization
  bool done = false;
412 413 414

  CvMat* YWt = cvCreateMat( num_cam_param, num_cam_param, CV_64F ); //this matrix used to store Yij*Wik'
  CvMat* E = cvCreateMat( S->height, 1 , CV_64F ); //this is right part of system with S
415 416
  cvSetZero(YWt);
  cvSetZero(E);
417

418 419 420 421 422 423
  while(!done) {
    // compute jacobians Aij and Bij
    ask_for_projac(_vis);
    int invisible_count=0;
    //compute U_j  and  ea_j
    for (int j = 0; j < num_cams; j++ ) {
424
      cvSetZero(U[j]);
425 426 427
      cvSetZero(ea[j]);
      //summ by i (number of points)
      for (int i = 0; i < num_points; i++ ) {
428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443
  //get Aij
  //CvMat* Aij = ((CvMat**)(A->data.ptr + A->step * i))[j];
  CvMat* Aij = A[j+i*num_cams];
  if (Aij ) {
    //Uj+= AijT*Aij
    cvGEMM( Aij, Aij, 1, U[j], 1, U[j], CV_GEMM_A_T );
    //ea_j += AijT * e_ij
    CvMat eij;

    int index = ((int*)(Vis_index->data.ptr + i * Vis_index->step))[j];

    cvGetSubRect( err, &eij, cvRect( 0, index, 1, Aij->height  ) ); //width of transposed Aij
    cvGEMM( Aij, &eij, 1, ea[j], 1, ea[j], CV_GEMM_A_T );
  }
  else
    invisible_count++;
444 445 446 447
      }
    } //U_j and ea_j computed for all j

    //    if (!(iters%100))
Andrey Kamaev's avatar
Andrey Kamaev committed
448 449 450 451 452
    {
      int nviz = X->rows / num_err_param;
      double e2 = prevErrNorm*prevErrNorm, e2n = e2 / nviz;
      std::cerr<<"Iteration: "<<iters<<", normError: "<<e2<<" ("<<e2n<<")"<<std::endl;
    }
453 454 455 456
    if (cb)
      cb(iters, prevErrNorm, user_data);
    //compute V_i  and  eb_i
    for (int i = 0; i < num_points; i++ ) {
457
      cvSetZero(V[i]);
458
      cvSetZero(eb[i]);
459

460 461
      //summ by i (number of points)
      for( int j = 0; j < num_cams; j++ ) {
462 463 464 465 466 467 468 469 470 471 472 473 474 475
  //get Bij
  //CvMat* Bij = ((CvMat**)(B->data.ptr + B->step * i))[j];
  CvMat* Bij = B[j+i*num_cams];
  if (Bij ) {
    //Vi+= BijT*Bij
    cvGEMM( Bij, Bij, 1, V[i], 1, V[i], CV_GEMM_A_T );

    //eb_i += BijT * e_ij
    int index = ((int*)(Vis_index->data.ptr + i * Vis_index->step))[j];

    CvMat eij;
    cvGetSubRect( err, &eij, cvRect( 0, index, 1, Bij->height  ) ); //width of transposed Bij
    cvGEMM( Bij, &eij, 1, eb[i], 1, eb[i], CV_GEMM_A_T );
  }
476 477 478 479 480 481
      }
    } //V_i and eb_i computed for all i

      //compute W_ij
    for( int i = 0; i < num_points; i++ ) {
      for( int j = 0; j < num_cams; j++ ) {
482 483 484 485 486 487 488 489 490 491 492
  //CvMat* Aij = ((CvMat**)(A->data.ptr + A->step * i))[j];
  CvMat* Aij = A[j+i*num_cams];
  if( Aij ) { //visible
    //CvMat* Bij = ((CvMat**)(B->data.ptr + B->step * i))[j];
    CvMat* Bij = B[j+i*num_cams];
    //CvMat* Wij = ((CvMat**)(W->data.ptr + W->step * j))[i];
    CvMat* Wij = W[j+i*num_cams];

    //multiply
    cvGEMM( Aij, Bij, 1, NULL, 0, Wij, CV_GEMM_A_T );
  }
493 494 495 496
      }
    } //Wij computed

      //backup diagonal of JtJ before we start augmenting it
497
    {
498 499 500
      CvMat dia;
      CvMat subr;
      for( int j = 0; j < num_cams; j++ ) {
501 502 503 504 505
  cvGetDiag(U[j], &dia);
  cvGetSubRect(JtJ_diag, &subr,
         cvRect(0, j*num_cam_param, 1, num_cam_param ));
  cvCopy( &dia, &subr );
      }
506
      for( int i = 0; i < num_points; i++ ) {
507 508 509 510 511 512
  cvGetDiag(V[i], &dia);
  cvGetSubRect(JtJ_diag, &subr,
         cvRect(0, num_cams*num_cam_param + i * num_point_param, 1, num_point_param ));
  cvCopy( &dia, &subr );
      }
    }
513 514 515 516 517

    if( iters == 0 ) {
      //initialize lambda. It is set to 1e-3 * average diagonal element in JtJ
      double average_diag = 0;
      for( int j = 0; j < num_cams; j++ ) {
518
  average_diag += cvTrace( U[j] ).val[0];
519 520
      }
      for( int i = 0; i < num_points; i++ ) {
521
  average_diag += cvTrace( V[i] ).val[0];
522 523
      }
      average_diag /= (num_cams*num_cam_param + num_points * num_point_param );
524 525 526

      //      lambda = 1e-3 * average_diag;
      lambda = 1e-3 * average_diag;
527 528
      lambda = 0.245560;
    }
529

530 531 532 533
    //now we are going to find good step and make it
    for(;;) {
      //augmentation of diagonal
      for(int j = 0; j < num_cams; j++ ) {
534 535
  CvMat diag;
  cvGetDiag( U[j], &diag );
536
#if 1
537
  cvAddS( &diag, cvScalar( lambda ), &diag );
538
#else
539
  cvScale( &diag, &diag, 1 + lambda );
540
#endif
541 542
      }
      for(int i = 0; i < num_points; i++ ) {
543 544
  CvMat diag;
  cvGetDiag( V[i], &diag );
545
#if 1
546
  cvAddS( &diag, cvScalar( lambda ), &diag );
547
#else
548
  cvScale( &diag, &diag, 1 + lambda );
549
#endif
550
      }
551 552 553 554
      bool error = false;
      //compute inv(V*)
      bool inverted_ok = true;
      for(int i = 0; i < num_points; i++ ) {
555
  double det = cvInvert( V[i], inv_V_star[i] );
556

557 558 559 560 561
  if( fabs(det) <= FLT_EPSILON )  {
    inverted_ok = false;
    std::cerr<<"V["<<i<<"] failed"<<std::endl;
    break;
  } //means we did wrong augmentation, try to choose different lambda
562 563 564
      }

      if( inverted_ok ) {
565 566 567 568 569 570 571 572 573 574 575 576 577 578 579 580 581 582 583 584 585 586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674
  cvSetZero( E );
  //loop through cameras, compute upper diagonal blocks of matrix S
  for( int j = 0; j < num_cams; j++ ) {
    //compute Yij = Wij (V*_i)^-1  for all i   (if Wij exists/nonzero)
    for( int i = 0; i < num_points; i++ ) {
      //
      //CvMat* Wij = ((CvMat**)(W->data.ptr + W->step * j))[i];
      CvMat* Wij = W[j+i*num_cams];
      if( Wij ) {
        cvMatMul( Wij, inv_V_star[i], Yj[i] );
      }
    }

    //compute Sjk   for k>=j  (because Sjk = Skj)
    for( int k = j; k < num_cams; k++ ) {
      cvSetZero( YWt );
      for( int i = 0; i < num_points; i++ ) {
        //check that both Wij and Wik exist
        // CvMat* Wij = ((CvMat**)(W->data.ptr + W->step * j))[i];
        CvMat* Wij = W[j+i*num_cams];
        //CvMat* Wik = ((CvMat**)(W->data.ptr + W->step * k))[i];
        CvMat* Wik = W[k+i*num_cams];

        if( Wij && Wik ) {
    //multiply YWt += Yj[i]*Wik'
    cvGEMM( Yj[i], Wik, 1, YWt, 1, YWt, CV_GEMM_B_T  ); ///*transpose Wik
        }
      }

      //copy result to matrix S

      CvMat Sjk;
      //extract submat
      cvGetSubRect( S, &Sjk, cvRect( k * num_cam_param, j * num_cam_param, num_cam_param, num_cam_param ));


      //if j==k, add diagonal
      if( j != k ) {
        //just copy with minus
        cvScale( YWt, &Sjk, -1 ); //if we set initial S to zero then we can use cvSub( Sjk, YWt, Sjk);
      } else {
        //add diagonal value

        //subtract YWt from augmented Uj
        cvSub( U[j], YWt, &Sjk );
      }
    }

    //compute right part of equation involving matrix S
    // e_j=ea_j - \sum_i Y_ij eb_i
    {
      CvMat e_j;

      //select submat
      cvGetSubRect( E, &e_j, cvRect( 0, j * num_cam_param, 1, num_cam_param ) );

      for( int i = 0; i < num_points; i++ ) {
        //CvMat* Wij = ((CvMat**)(W->data.ptr + W->step * j))[i];
        CvMat* Wij = W[j+i*num_cams];
        if( Wij )
    cvMatMulAdd( Yj[i], eb[i], &e_j, &e_j );
      }

      cvSub( ea[j], &e_j, &e_j );
    }

  }
  //fill below diagonal elements of matrix S
  cvCompleteSymm( S,  0  ); ///*from upper to low //operation may be done by nonzero blocks or during upper diagonal computation

  //Solve linear system  S * deltaP_a = E
  CvMat dpa;
  cvGetSubRect( deltaP, &dpa, cvRect(0, 0, 1, S->width ) );
  int res = cvSolve( S, E, &dpa, CV_CHOLESKY );

  if( res ) { //system solved ok
    //compute db_i
    for( int i = 0; i < num_points; i++ ) {
      CvMat dbi;
      cvGetSubRect( deltaP, &dbi, cvRect( 0, dpa.height + i * num_point_param, 1, num_point_param ) );

      // compute \sum_j W_ij^T da_j
      for( int j = 0; j < num_cams; j++ ) {
        //get Wij
        //CvMat* Wij = ((CvMat**)(W->data.ptr + W->step * j))[i];
        CvMat* Wij = W[j+i*num_cams];
        if( Wij ) {
    //get da_j
    CvMat daj;
    cvGetSubRect( &dpa, &daj, cvRect( 0, j * num_cam_param, 1, num_cam_param ));
    cvGEMM( Wij, &daj, 1, &dbi, 1, &dbi, CV_GEMM_A_T  ); ///* transpose Wij
        }
      }
      //finalize dbi
      cvSub( eb[i], &dbi, &dbi );
      cvMatMul(inv_V_star[i], &dbi, &dbi );  //here we get final dbi
    }  //now we computed whole deltaP

    //add deltaP to delta
    cvAdd( prevP, deltaP, P );

    //evaluate  function with new parameters
    ask_for_proj(_vis); // func( P, hX );

    //compute error
    errNorm = cvNorm( X, hX, CV_L2 );

  } else {
    error = true;
  }
675
      } else {
676
  error = true;
677 678 679
      }
      //check solution
      if( error || ///* singularities somewhere
680 681 682
    errNorm > prevErrNorm )  { //step was not accepted
  //increase lambda and reject change
  lambda *= 10;
Andrey Kamaev's avatar
Andrey Kamaev committed
683 684 685 686 687 688
  {
    int nviz = X->rows / num_err_param;
    double e2 = errNorm*errNorm, e2_prev = prevErrNorm*prevErrNorm;
    double e2n = e2/nviz, e2n_prev = e2_prev/nviz;
    std::cerr<<"move failed: lambda = "<<lambda<<", e2 = "<<e2<<" ("<<e2n<<") > "<<e2_prev<<" ("<<e2n_prev<<")"<<std::endl;
  }
689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706

  //restore diagonal from backup
  {
    CvMat dia;
    CvMat subr;
    for( int j = 0; j < num_cams; j++ ) {
      cvGetDiag(U[j], &dia);
      cvGetSubRect(JtJ_diag, &subr,
       cvRect(0, j*num_cam_param, 1, num_cam_param ));
      cvCopy( &subr, &dia );
    }
    for( int i = 0; i < num_points; i++ ) {
      cvGetDiag(V[i], &dia);
      cvGetSubRect(JtJ_diag, &subr,
       cvRect(0, num_cams*num_cam_param + i * num_point_param, 1, num_point_param ));
      cvCopy( &subr, &dia );
    }
  }
707
      } else {  //all is ok
708 709 710 711 712 713 714 715 716
  //accept change and decrease lambda
  lambda /= 10;
  lambda = MAX(lambda, 1e-16);
  std::cerr<<"decreasing lambda to "<<lambda<<std::endl;
  prevErrNorm = errNorm;

  //compute new projection error vector
  cvSub(  X, hX, err );
  break;
717
      }
718
    }
719 720 721 722
    iters++;

    double param_change_norm = cvNorm(P, prevP, CV_RELATIVE_L2);
    //check termination criteria
723 724
    if( (criteria.type&CV_TERMCRIT_ITER && iters > criteria.max_iter ) ||
  (criteria.type&CV_TERMCRIT_EPS && param_change_norm < criteria.epsilon) ) {
725 726 727 728 729 730 731
      //      std::cerr<<"relative norm change "<<param_change_norm<<" lower than eps "<<criteria.epsilon<<", stopping"<<std::endl;
      done = true;
      break;
    } else {
      //copy new params and continue iterations
      cvCopy( P, prevP );
    }
732 733
  }
  cvReleaseMat(&YWt);
734
  cvReleaseMat(&E);
735
}
736 737 738

//Utilities

739
static void fjac(int /*i*/, int /*j*/, CvMat *point_params, CvMat* cam_params, CvMat* A, CvMat* B, void* /*data*/) {
740 741
  //compute jacobian per camera parameters (i.e. Aij)
  //take i-th point 3D current coordinates
742

743 744
  CvMat _Mi;
  cvReshape(point_params, &_Mi, 3, 1 );
745

746
  CvMat* _mp = cvCreateMat(1, 1, CV_64FC2 ); //projection of the point
747

748 749 750 751
  //split camera params into different matrices
  CvMat _ri, _ti, _k;
  cvGetRows( cam_params, &_ri, 0, 3 );
  cvGetRows( cam_params, &_ti, 3, 6 );
752

753 754 755 756 757
  double intr_data[9] = {0, 0, 0, 0, 0, 0, 0, 0, 1};
  intr_data[0] = cam_params->data.db[6];
  intr_data[4] = cam_params->data.db[7];
  intr_data[2] = cam_params->data.db[8];
  intr_data[5] = cam_params->data.db[9];
758

759
  CvMat _A = cvMat(3,3, CV_64F, intr_data );
760

761
  CvMat _dpdr, _dpdt, _dpdf, _dpdc, _dpdk;
762

763
  bool have_dk = cam_params->height - 10 ? true : false;
764

765 766 767 768
  cvGetCols( A, &_dpdr, 0, 3 );
  cvGetCols( A, &_dpdt, 3, 6 );
  cvGetCols( A, &_dpdf, 6, 8 );
  cvGetCols( A, &_dpdc, 8, 10 );
769

770 771 772 773 774
  if( have_dk ) {
    cvGetRows( cam_params, &_k, 10, cam_params->height );
    cvGetCols( A, &_dpdk, 10, A->width );
  }
  cvProjectPoints2(&_Mi, &_ri, &_ti, &_A, have_dk ? &_k : NULL, _mp, &_dpdr, &_dpdt,
775
       &_dpdf, &_dpdc, have_dk ? &_dpdk : NULL, 0);
776

777
  cvReleaseMat( &_mp );
778

779 780
  //compute jacobian for point params
  //compute dMeasure/dPoint3D
781

782 783 784
  // x = (r11 * X + r12 * Y + r13 * Z + t1)
  // y = (r21 * X + r22 * Y + r23 * Z + t2)
  // z = (r31 * X + r32 * Y + r33 * Z + t3)
785

786 787
  // x' = x/z
  // y' = y/z
788

789
  //d(x') = ( dx*z - x*dz)/(z*z)
790
  //d(y') = ( dy*z - y*dz)/(z*z)
791

792 793
  //g = 1 + k1*r_2 + k2*r_4 + k3*r_6
  //r_2 = x'*x' + y'*y'
794

795
  //d(r_2) = 2*x'*dx' + 2*y'*dy'
796

797
  //dg = k1* d(r_2) + k2*2*r_2*d(r_2) + k3*3*r_2*r_2*d(r_2)
798

799 800
  //x" = x'*g + 2*p1*x'*y' + p2(r_2+2*x'_2)
  //y" = y'*g + p1(r_2+2*y'_2) + 2*p2*x'*y'
801

802
  //d(x") = d(x') * g + x' * d(g) + 2*p1*( d(x')*y' + x'*dy) + p2*(d(r_2) + 2*2*x'* dx')
803
  //d(y") = d(y') * g + y' * d(g) + 2*p2*( d(x')*y' + x'*dy) + p1*(d(r_2) + 2*2*y'* dy')
804

805 806
  // u = fx*( x") + cx
  // v = fy*( y") + cy
807

808 809
  // du = fx * d(x")  = fx * ( dx*z - x*dz)/ (z*z)
  // dv = fy * d(y")  = fy * ( dy*z - y*dz)/ (z*z)
810

811
  // dx/dX = r11,  dx/dY = r12, dx/dZ = r13
812
  // dy/dX = r21,  dy/dY = r22, dy/dZ = r23
813
  // dz/dX = r31,  dz/dY = r32, dz/dZ = r33
814

815 816 817
  // du/dX = fx*(r11*z-x*r31)/(z*z)
  // du/dY = fx*(r12*z-x*r32)/(z*z)
  // du/dZ = fx*(r13*z-x*r33)/(z*z)
818

819 820 821
  // dv/dX = fy*(r21*z-y*r31)/(z*z)
  // dv/dY = fy*(r22*z-y*r32)/(z*z)
  // dv/dZ = fy*(r23*z-y*r33)/(z*z)
822

823 824 825 826
  //get rotation matrix
  double R[9], t[3], fx = intr_data[0], fy = intr_data[4];
  CvMat _R = cvMat( 3, 3, CV_64F, R );
  cvRodrigues2(&_ri, &_R);
827

828 829 830 831
  double X,Y,Z;
  X = point_params->data.db[0];
  Y = point_params->data.db[1];
  Z = point_params->data.db[2];
832

833 834 835
  t[0] = _ti.data.db[0];
  t[1] = _ti.data.db[1];
  t[2] = _ti.data.db[2];
836

837 838 839 840
  //compute x,y,z
  double x = R[0] * X + R[1] * Y + R[2] * Z + t[0];
  double y = R[3] * X + R[4] * Y + R[5] * Z + t[1];
  double z = R[6] * X + R[7] * Y + R[8] * Z + t[2];
841

842
#if 1
843 844
  //compute x',y'
  double x_strike = x/z;
845
  double y_strike = y/z;
846 847
  //compute dx',dy'  matrix
  //
848
  //    dx'/dX  dx'/dY dx'/dZ    =
849 850 851
  //    dy'/dX  dy'/dY dy'/dZ

  double coeff[6] = { z, 0, -x,
852
          0, z, -y };
853 854 855 856
  CvMat coeffmat = cvMat( 2, 3, CV_64F, coeff );

  CvMat* dstrike_dbig = cvCreateMat(2,3,CV_64F);
  cvMatMul(&coeffmat, &_R, dstrike_dbig);
857 858
  cvScale(dstrike_dbig, dstrike_dbig, 1/(z*z) );

859 860
  if( have_dk ) {
    double strike_[2] = {x_strike, y_strike};
861 862
    CvMat strike = cvMat(1, 2, CV_64F, strike_);

863 864 865 866 867 868 869 870 871 872 873 874 875
    //compute r_2
    double r_2 = x_strike*x_strike + y_strike*y_strike;
    double r_4 = r_2*r_2;
    double r_6 = r_4*r_2;

    //compute d(r_2)/dbig
    CvMat* dr2_dbig = cvCreateMat(1,3,CV_64F);
    cvMatMul( &strike, dstrike_dbig, dr2_dbig);
    cvScale( dr2_dbig, dr2_dbig, 2 );

    double& k1 = _k.data.db[0];
    double& k2 = _k.data.db[1];
    double& p1 = _k.data.db[2];
876
    double& p2 = _k.data.db[3];
877 878 879 880
    double k3 = 0;

    if( _k.cols*_k.rows == 5 ) {
      k3 = _k.data.db[4];
881
    }
882 883 884 885 886
    //compute dg/dbig
    double dg_dr2 = k1 + k2*2*r_2 + k3*3*r_4;
    double g = 1+k1*r_2+k2*r_4+k3*r_6;

    CvMat* dg_dbig = cvCreateMat(1,3,CV_64F);
887
    cvScale( dr2_dbig, dg_dbig, dg_dr2 );
888 889 890

    CvMat* tmp = cvCreateMat( 2, 3, CV_64F );
    CvMat* dstrike2_dbig = cvCreateMat( 2, 3, CV_64F );
891

892
    double c[4] = { g+2*p1*y_strike+4*p2*x_strike,       2*p1*x_strike,
893
        2*p2*y_strike,                 g+2*p2*x_strike + 4*p1*y_strike };
894

Andrey Kamaev's avatar
Andrey Kamaev committed
895
    CvMat coeffmat2 = cvMat(2,2,CV_64F, c );
896

Andrey Kamaev's avatar
Andrey Kamaev committed
897
    cvMatMul(&coeffmat2, dstrike_dbig, dstrike2_dbig );
898

899 900
    cvGEMM( &strike, dg_dbig, 1, NULL, 0, tmp, CV_GEMM_A_T );
    cvAdd( dstrike2_dbig, tmp, dstrike2_dbig );
901

902 903
    double p[2] = { p2, p1 };
    CvMat pmat = cvMat(2, 1, CV_64F, p );
904

905
    cvMatMul( &pmat, dr2_dbig ,tmp);
906
    cvAdd( dstrike2_dbig, tmp, dstrike2_dbig );
907

908
    cvCopy( dstrike2_dbig, B );
909

910 911
    cvReleaseMat(&dr2_dbig);
    cvReleaseMat(&dg_dbig);
912

913 914
    cvReleaseMat(&tmp);
    cvReleaseMat(&dstrike2_dbig);
915
    cvReleaseMat(&tmp);
916 917 918 919 920 921
  } else {
    cvCopy(dstrike_dbig, B);
  }
  //multiply by fx, fy
  CvMat row;
  cvGetRows( B, &row, 0, 1 );
922 923
  cvScale( &row, &row, fx );

924 925
  cvGetRows( B, &row, 1, 2 );
  cvScale( &row, &row, fy );
926 927 928

#else

929
  double k = fx/(z*z);
930

931 932 933
  cvmSet( B, 0, 0, k*(R[0]*z-x*R[6]));
  cvmSet( B, 0, 1, k*(R[1]*z-x*R[7]));
  cvmSet( B, 0, 2, k*(R[2]*z-x*R[8]));
934 935 936

  k = fy/(z*z);

937 938 939
  cvmSet( B, 1, 0, k*(R[3]*z-y*R[6]));
  cvmSet( B, 1, 1, k*(R[4]*z-y*R[7]));
  cvmSet( B, 1, 2, k*(R[5]*z-y*R[8]));
940

941
#endif
942

943
};
944
static void func(int /*i*/, int /*j*/, CvMat *point_params, CvMat* cam_params, CvMat* estim, void* /*data*/) {
945 946 947
  //just do projections
  CvMat _Mi;
  cvReshape( point_params, &_Mi, 3, 1 );
948

949 950
  CvMat* _mp = cvCreateMat(1, 1, CV_64FC2 ); //projection of the point
  CvMat* _mp2 = cvCreateMat(1, 2, CV_64F ); //projection of the point
951

952 953
  //split camera params into different matrices
  CvMat _ri, _ti, _k;
954

955 956
  cvGetRows( cam_params, &_ri, 0, 3 );
  cvGetRows( cam_params, &_ti, 3, 6 );
957

958 959 960 961 962
  double intr_data[9] = {0, 0, 0, 0, 0, 0, 0, 0, 1};
  intr_data[0] = cam_params->data.db[6];
  intr_data[4] = cam_params->data.db[7];
  intr_data[2] = cam_params->data.db[8];
  intr_data[5] = cam_params->data.db[9];
963

964
  CvMat _A = cvMat(3,3, CV_64F, intr_data );
965

966
  //int cn = CV_MAT_CN(_Mi.type);
967

968
  bool have_dk = cam_params->height - 10 ? true : false;
969

970
  if( have_dk ) {
971 972
    cvGetRows( cam_params, &_k, 10, cam_params->height );
  }
973
  cvProjectPoints2( &_Mi, &_ri, &_ti, &_A, have_dk ? &_k : NULL, _mp, NULL, NULL,
974
        NULL, NULL, NULL, 0);
975
  //    std::cerr<<"_mp = "<<_mp->data.db[0]<<","<<_mp->data.db[1]<<std::endl;
976
  //
977 978 979 980 981
  _mp2->data.db[0] = _mp->data.db[0];
  _mp2->data.db[1] = _mp->data.db[1];
  cvTranspose( _mp2, estim );
  cvReleaseMat( &_mp );
  cvReleaseMat( &_mp2 );
982 983
};

984
static void fjac_new(int i, int j, Mat& point_params, Mat& cam_params, Mat& A, Mat& B, void* data) {
Andrey Kamaev's avatar
Andrey Kamaev committed
985 986
  CvMat _point_params = point_params, _cam_params = cam_params, _Al = A, _Bl = B;
  fjac(i,j, &_point_params, &_cam_params, &_Al, &_Bl, data);
987 988
};

989
static void func_new(int i, int j, Mat& point_params, Mat& cam_params, Mat& estim, void* data)  {
990 991
  CvMat _point_params = point_params, _cam_params = cam_params, _estim = estim;
  func(i,j,&_point_params,&_cam_params,&_estim,data);
992
};
993

994 995 996 997 998 999 1000
void LevMarqSparse::bundleAdjust( std::vector<Point3d>& points, //positions of points in global coordinate system (input and output)
          const std::vector<std::vector<Point2d> >& imagePoints, //projections of 3d points for every camera
          const std::vector<std::vector<int> >& visibility, //visibility of 3d points for every camera
          std::vector<Mat>& cameraMatrix, //intrinsic matrices of all cameras (input and output)
          std::vector<Mat>& R, //rotation matrices of all cameras (input and output)
          std::vector<Mat>& T, //translation vector of all cameras (input and output)
          std::vector<Mat>& distCoeffs, //distortion coefficients of all cameras (input and output)
1001 1002
          const TermCriteria& criteria,
          BundleAdjustCallback cb, void* user_data) {
1003
  //,enum{MOTION_AND_STRUCTURE,MOTION,STRUCTURE})
1004 1005
  int num_points = (int)points.size();
  int num_cameras = (int)cameraMatrix.size();
1006

1007 1008 1009 1010 1011
  CV_Assert( imagePoints.size() == (size_t)num_cameras &&
       visibility.size() == (size_t)num_cameras &&
       R.size() == (size_t)num_cameras &&
       T.size() == (size_t)num_cameras &&
       (distCoeffs.size() == (size_t)num_cameras || distCoeffs.size() == 0) );
1012 1013 1014 1015

  int numdist = distCoeffs.size() ? (distCoeffs[0].rows * distCoeffs[0].cols) : 0;

  int num_cam_param = 3 /* rotation vector */ + 3 /* translation vector */
1016
    + 2 /* fx, fy */ + 2 /* cx, cy */ + numdist;
1017

1018
  int num_point_param = 3;
1019 1020 1021 1022 1023 1024 1025 1026 1027 1028 1029 1030 1031

  //collect camera parameters into vector
  Mat params( num_cameras * num_cam_param + num_points * num_point_param, 1, CV_64F );

  //fill camera params
  for( int i = 0; i < num_cameras; i++ ) {
    //rotation
    Mat rot_vec; Rodrigues( R[i], rot_vec );
    Mat dst = params.rowRange(i*num_cam_param, i*num_cam_param+3);
    rot_vec.copyTo(dst);

    //translation
    dst = params.rowRange(i*num_cam_param + 3, i*num_cam_param+6);
1032 1033
    T[i].copyTo(dst);

1034 1035 1036 1037 1038 1039 1040 1041
    //intrinsic camera matrix
    double* intr_data = (double*)cameraMatrix[i].data;
    double* intr = (double*)(params.data + params.step * (i*num_cam_param+6));
    //focals
    intr[0] = intr_data[0];  //fx
    intr[1] = intr_data[4];  //fy
    //center of projection
    intr[2] = intr_data[2];  //cx
1042
    intr[3] = intr_data[5];  //cy
1043 1044 1045 1046

    //add distortion if exists
    if( distCoeffs.size() ) {
      dst = params.rowRange(i*num_cam_param + 10, i*num_cam_param+10+numdist);
1047
      distCoeffs[i].copyTo(dst);
1048
    }
1049
  }
1050 1051 1052 1053 1054 1055 1056 1057 1058 1059 1060 1061 1062 1063 1064 1065 1066 1067

  //fill point params
  Mat ptparams(num_points, 1, CV_64FC3, params.data + num_cameras*num_cam_param*params.step);
  Mat _points(points);
  CV_Assert(_points.size() == ptparams.size() && _points.type() == ptparams.type());
  _points.copyTo(ptparams);

  //convert visibility vectors to visibility matrix
  Mat vismat(num_points, num_cameras, CV_32S);
  for( int i = 0; i < num_cameras; i++ ) {
    //get row
    Mat col = vismat.col(i);
    Mat((int)visibility[i].size(), 1, vismat.type(), (void*)&visibility[i][0]).copyTo( col );
  }

  int num_proj = countNonZero(vismat); //total number of points projections

  //collect measurements
1068 1069
  Mat X(num_proj*2,1,CV_64F); //measurement vector

1070 1071 1072 1073 1074
  int counter = 0;
  for(int i = 0; i < num_points; i++ ) {
    for(int j = 0; j < num_cameras; j++ ) {
      //check visibility
      if( visibility[j][i] ) {
1075 1076 1077 1078 1079 1080 1081 1082
  //extract point and put tu vector
  Point2d p = imagePoints[j][i];
  ((double*)(X.data))[counter] = p.x;
  ((double*)(X.data))[counter+1] = p.y;
  assert(p.x != -1 || p.y != -1);
  counter+=2;
      }
    }
1083 1084 1085
  }

  LevMarqSparse levmar( num_points, num_cameras, num_point_param, num_cam_param, 2, vismat, params, X,
1086 1087
      TermCriteria(criteria), fjac_new, func_new, NULL,
      cb, user_data);
1088 1089 1090 1091
  //extract results
  //fill point params
  /*Mat final_points(num_points, 1, CV_64FC3,
    levmar.P->data.db + num_cameras*num_cam_param *levmar.P->step);
1092
    CV_Assert(_points.size() == final_points.size() && _points.type() == final_points.type());
1093 1094 1095 1096 1097 1098 1099
    final_points.copyTo(_points);*/

  points.clear();
  for( int i = 0; i < num_points; i++ ) {
    CvMat point_mat;
    cvGetSubRect( levmar.P, &point_mat, cvRect( 0, levmar.num_cams * levmar.num_cam_param+ levmar.num_point_param * i, 1, levmar.num_point_param ));
    CvScalar x = cvGet2D(&point_mat,0,0); CvScalar y = cvGet2D(&point_mat,1,0); CvScalar z = cvGet2D(&point_mat,2,0);
1100
    points.push_back(Point3d(x.val[0],y.val[0],z.val[0]));
1101 1102 1103 1104
    //std::cerr<<"point"<<points[points.size()-1].x<<","<<points[points.size()-1].y<<","<<points[points.size()-1].z<<std::endl;
  }
  //fill camera params
  //R.clear();T.clear();cameraMatrix.clear();
1105
  Mat levmarP = cv::cvarrToMat(levmar.P);
1106 1107
  for( int i = 0; i < num_cameras; i++ ) {
    //rotation
1108
    Mat rot_vec = levmarP.rowRange(i*num_cam_param, i*num_cam_param+3);
1109 1110
    Rodrigues( rot_vec, R[i] );
    //translation
Andrey Kamaev's avatar
Andrey Kamaev committed
1111
    levmarP.rowRange(i*num_cam_param + 3, i*num_cam_param+6).copyTo(T[i]);
1112 1113 1114

    //intrinsic camera matrix
    double* intr_data = (double*)cameraMatrix[i].data;
1115
    double* intr = (double*)(levmarP.data +levmarP.step * (i*num_cam_param+6));
1116 1117 1118 1119 1120
    //focals
    intr_data[0] = intr[0];  //fx
    intr_data[4] = intr[1];  //fy
    //center of projection
    intr_data[2] = intr[2];  //cx
1121
    intr_data[5] = intr[3];  //cy
1122 1123 1124

    //add distortion if exists
    if( distCoeffs.size() ) {
1125
      levmarP.rowRange(i*num_cam_param + 10, i*num_cam_param+10+numdist).copyTo(distCoeffs[i]);
1126
    }
1127 1128
  }
}