Commit d6bbcd68 authored by Alexander Alekhin's avatar Alexander Alekhin

Merge pull request #1621 from alalek:replace_cv_errornoreturn

parents a4e8622c 1248ebeb
...@@ -143,7 +143,6 @@ Ptr<Object> OR_pascalImp::parseAnnotation(const string &path, const string &id) ...@@ -143,7 +143,6 @@ Ptr<Object> OR_pascalImp::parseAnnotation(const string &path, const string &id)
case XML_ERROR_FILE_NOT_FOUND: case XML_ERROR_FILE_NOT_FOUND:
error_message = "XML file not found! " + error_message; error_message = "XML file not found! " + error_message;
CV_Error(Error::StsParseError, error_message); CV_Error(Error::StsParseError, error_message);
return annotation;
default: default:
CV_Error(Error::StsParseError, error_message); CV_Error(Error::StsParseError, error_message);
break; break;
......
...@@ -79,8 +79,7 @@ bool FacemarkKazemiImpl::convertToUnit(Rect r,Mat &warp){ ...@@ -79,8 +79,7 @@ bool FacemarkKazemiImpl::convertToUnit(Rect r,Mat &warp){
bool FacemarkKazemiImpl::setMeanExtreme(){ bool FacemarkKazemiImpl::setMeanExtreme(){
if(meanshape.empty()){ if(meanshape.empty()){
String error_message = "Model not loaded properly.No mean shape found.Aborting..."; String error_message = "Model not loaded properly.No mean shape found.Aborting...";
CV_ErrorNoReturn(Error::StsBadArg, error_message); CV_Error(Error::StsBadArg, error_message);
return false;
} }
for(size_t i=0;i<meanshape.size();i++){ for(size_t i=0;i<meanshape.size();i++){
if(meanshape[i].x>maxmeanx) if(meanshape[i].x>maxmeanx)
...@@ -98,7 +97,7 @@ bool FacemarkKazemiImpl::calcMeanShape (vector< vector<Point2f> >& trainlandmark ...@@ -98,7 +97,7 @@ bool FacemarkKazemiImpl::calcMeanShape (vector< vector<Point2f> >& trainlandmark
//clear the loaded meanshape //clear the loaded meanshape
if(trainimages.empty()||trainlandmarks.size()!=trainimages.size()) { if(trainimages.empty()||trainlandmarks.size()!=trainimages.size()) {
// throw error if no data (or simply return -1?) // throw error if no data (or simply return -1?)
CV_ErrorNoReturn(Error::StsBadArg, "Number of images is not equal to corresponding landmarks. Aborting..."); CV_Error(Error::StsBadArg, "Number of images is not equal to corresponding landmarks. Aborting...");
} }
meanshape.clear(); meanshape.clear();
vector<Mat> finalimages; vector<Mat> finalimages;
...@@ -165,7 +164,7 @@ bool FacemarkKazemiImpl::scaleData( vector< vector<Point2f> > & trainlandmarks, ...@@ -165,7 +164,7 @@ bool FacemarkKazemiImpl::scaleData( vector< vector<Point2f> > & trainlandmarks,
{ {
if(trainimages.empty()||trainimages.size()!=trainlandmarks.size()){ if(trainimages.empty()||trainimages.size()!=trainlandmarks.size()){
// throw error if no data (or simply return -1?) // throw error if no data (or simply return -1?)
CV_ErrorNoReturn(Error::StsBadArg, "The data is not loaded properly by train function. Aborting..."); CV_Error(Error::StsBadArg, "The data is not loaded properly by train function. Aborting...");
} }
float scalex,scaley; float scalex,scaley;
//scale all images and their landmarks according to input size //scale all images and their landmarks according to input size
......
...@@ -228,8 +228,7 @@ bool loadTrainingData(vector<String> filename,vector< vector<Point2f> > ...@@ -228,8 +228,7 @@ bool loadTrainingData(vector<String> filename,vector< vector<Point2f> >
f1.open(filename[j].c_str(),ios::in); f1.open(filename[j].c_str(),ios::in);
if(!f1.is_open()){ if(!f1.is_open()){
cout<<filename[j]<<endl; cout<<filename[j]<<endl;
CV_ErrorNoReturn(Error::StsError, "File can't be opened for reading!"); CV_Error(Error::StsError, "File can't be opened for reading!");
return false;
} }
//get the path of the image whose landmarks have to be detected //get the path of the image whose landmarks have to be detected
getline(f1,img); getline(f1,img);
......
...@@ -307,10 +307,8 @@ static Mat histc(InputArray _src, int minVal, int maxVal, bool normed) ...@@ -307,10 +307,8 @@ static Mat histc(InputArray _src, int minVal, int maxVal, bool normed)
case CV_32FC1: case CV_32FC1:
return histc_(src, minVal, maxVal, normed); return histc_(src, minVal, maxVal, normed);
break; break;
default:
CV_Error(Error::StsUnmatchedFormats, "This type is not implemented yet."); break;
} }
return Mat(); CV_Error(Error::StsUnmatchedFormats, "This type is not implemented yet.");
} }
......
...@@ -113,8 +113,7 @@ bool FacemarkKazemiImpl:: getBestSplit(vector<Point2f> pixel_coordinates, vector ...@@ -113,8 +113,7 @@ bool FacemarkKazemiImpl:: getBestSplit(vector<Point2f> pixel_coordinates, vector
{ {
if(samples[0].shapeResiduals.size()!=samples[0].current_shape.size()){ if(samples[0].shapeResiduals.size()!=samples[0].current_shape.size()){
String error_message = "Error while generating split.Residuals are not complete.Aborting...."; String error_message = "Error while generating split.Residuals are not complete.Aborting....";
CV_ErrorNoReturn(Error::StsBadArg, error_message); CV_Error(Error::StsBadArg, error_message);
return false;
} }
//This vector stores the matrices where each matrix represents //This vector stores the matrices where each matrix represents
//sum of the residuals of shapes of samples which go to the left //sum of the residuals of shapes of samples which go to the left
...@@ -222,13 +221,11 @@ bool FacemarkKazemiImpl :: generateSplit(queue<node_info>& curr,vector<Point2f> ...@@ -222,13 +221,11 @@ bool FacemarkKazemiImpl :: generateSplit(queue<node_info>& curr,vector<Point2f>
bool FacemarkKazemiImpl :: buildRegtree(regtree& tree,vector<training_sample>& samples,vector<Point2f> pixel_coordinates){ bool FacemarkKazemiImpl :: buildRegtree(regtree& tree,vector<training_sample>& samples,vector<Point2f> pixel_coordinates){
if(samples.size()==0){ if(samples.size()==0){
String error_message = "Error while building regression tree.Empty samples. Aborting...."; String error_message = "Error while building regression tree.Empty samples. Aborting....";
CV_ErrorNoReturn(Error::StsBadArg, error_message); CV_Error(Error::StsBadArg, error_message);
return false;
} }
if(pixel_coordinates.size()==0){ if(pixel_coordinates.size()==0){
String error_message = "Error while building regression tree.No pixel coordinates. Aborting...."; String error_message = "Error while building regression tree.No pixel coordinates. Aborting....";
CV_ErrorNoReturn(Error::StsBadArg, error_message); CV_Error(Error::StsBadArg, error_message);
return false;
} }
queue<node_info> curr; queue<node_info> curr;
node_info parent; node_info parent;
...@@ -291,8 +288,7 @@ unsigned long FacemarkKazemiImpl::divideSamples (splitr split,vector<training_sa ...@@ -291,8 +288,7 @@ unsigned long FacemarkKazemiImpl::divideSamples (splitr split,vector<training_sa
{ {
if(samples.size()==0){ if(samples.size()==0){
String error_message = "Error while dividing samples. Sample array empty. Aborting...."; String error_message = "Error while dividing samples. Sample array empty. Aborting....";
CV_ErrorNoReturn(Error::StsBadArg, error_message); CV_Error(Error::StsBadArg, error_message);
return 0;
} }
unsigned long i = start; unsigned long i = start;
training_sample temp; training_sample temp;
......
...@@ -54,8 +54,7 @@ bool FacemarkKazemiImpl::setTrainingParameters(String filename){ ...@@ -54,8 +54,7 @@ bool FacemarkKazemiImpl::setTrainingParameters(String filename){
fs.open(filename, FileStorage::READ); fs.open(filename, FileStorage::READ);
if (!fs.isOpened()) if (!fs.isOpened())
{ String error_message = "Error while opening configuration file.Aborting.."; { String error_message = "Error while opening configuration file.Aborting..";
CV_ErrorNoReturn(Error::StsBadArg, error_message); CV_Error(Error::StsBadArg, error_message);
return false;
} }
int cascade_depth_; int cascade_depth_;
int tree_depth_; int tree_depth_;
...@@ -105,8 +104,7 @@ unsigned long FacemarkKazemiImpl:: getNearestLandmark(Point2f pixel) ...@@ -105,8 +104,7 @@ unsigned long FacemarkKazemiImpl:: getNearestLandmark(Point2f pixel)
if(meanshape.empty()) { if(meanshape.empty()) {
// throw error if no data (or simply return -1?) // throw error if no data (or simply return -1?)
String error_message = "The data is not loaded properly by train function. Aborting..."; String error_message = "The data is not loaded properly by train function. Aborting...";
CV_ErrorNoReturn(Error::StsBadArg, error_message); CV_Error(Error::StsBadArg, error_message);
return false;
} }
float dist=float(INT_MAX); float dist=float(INT_MAX);
unsigned long index =0; unsigned long index =0;
...@@ -122,8 +120,7 @@ unsigned long FacemarkKazemiImpl:: getNearestLandmark(Point2f pixel) ...@@ -122,8 +120,7 @@ unsigned long FacemarkKazemiImpl:: getNearestLandmark(Point2f pixel)
bool FacemarkKazemiImpl :: getRelativePixels(vector<Point2f> sample,vector<Point2f>& pixel_coordinates,std::vector<int> nearest){ bool FacemarkKazemiImpl :: getRelativePixels(vector<Point2f> sample,vector<Point2f>& pixel_coordinates,std::vector<int> nearest){
if(sample.size()!=meanshape.size()){ if(sample.size()!=meanshape.size()){
String error_message = "Error while finding relative shape. Aborting...."; String error_message = "Error while finding relative shape. Aborting....";
CV_ErrorNoReturn(Error::StsBadArg, error_message); CV_Error(Error::StsBadArg, error_message);
return false;
} }
Mat transform_mat; Mat transform_mat;
transform_mat = estimateRigidTransform(meanshape,sample,false); transform_mat = estimateRigidTransform(meanshape,sample,false);
...@@ -146,8 +143,7 @@ bool FacemarkKazemiImpl :: getRelativePixels(vector<Point2f> sample,vector<Point ...@@ -146,8 +143,7 @@ bool FacemarkKazemiImpl :: getRelativePixels(vector<Point2f> sample,vector<Point
bool FacemarkKazemiImpl::getPixelIntensities(Mat img,vector<Point2f> pixel_coordinates,vector<int>& pixel_intensities,Rect face){ bool FacemarkKazemiImpl::getPixelIntensities(Mat img,vector<Point2f> pixel_coordinates,vector<int>& pixel_intensities,Rect face){
if(pixel_coordinates.size()==0){ if(pixel_coordinates.size()==0){
String error_message = "No pixel coordinates found. Aborting....."; String error_message = "No pixel coordinates found. Aborting.....";
CV_ErrorNoReturn(Error::StsBadArg, error_message); CV_Error(Error::StsBadArg, error_message);
return false;
} }
Mat transform_mat; Mat transform_mat;
convertToActual(face,transform_mat); convertToActual(face,transform_mat);
...@@ -259,13 +255,11 @@ bool FacemarkKazemiImpl :: saveModel(String filename){ ...@@ -259,13 +255,11 @@ bool FacemarkKazemiImpl :: saveModel(String filename){
ofstream f(filename.c_str(),ios::binary); ofstream f(filename.c_str(),ios::binary);
if(!f.is_open()){ if(!f.is_open()){
String error_message = "Error while opening file to write model. Aborting...."; String error_message = "Error while opening file to write model. Aborting....";
CV_ErrorNoReturn(Error::StsBadArg, error_message); CV_Error(Error::StsBadArg, error_message);
return false;
} }
if(loaded_forests.size()!=loaded_pixel_coordinates.size()){ if(loaded_forests.size()!=loaded_pixel_coordinates.size()){
String error_message = "Incorrect training data. Aborting...."; String error_message = "Incorrect training data. Aborting....";
CV_ErrorNoReturn(Error::StsBadArg, error_message); CV_Error(Error::StsBadArg, error_message);
return false;
} }
string s("cascade_depth"); string s("cascade_depth");
uint64_t len = s.size(); uint64_t len = s.size();
...@@ -306,14 +300,12 @@ void FacemarkKazemiImpl::training(String imageList, String groundTruth){ ...@@ -306,14 +300,12 @@ void FacemarkKazemiImpl::training(String imageList, String groundTruth){
imageList.clear(); imageList.clear();
groundTruth.clear(); groundTruth.clear();
String error_message = "Less arguments than required"; String error_message = "Less arguments than required";
CV_ErrorNoReturn(Error::StsBadArg, error_message); CV_Error(Error::StsBadArg, error_message);
return ;
} }
bool FacemarkKazemiImpl::training(vector<Mat>& images, vector< vector<Point2f> >& landmarks,string filename,Size scale,string modelFilename){ bool FacemarkKazemiImpl::training(vector<Mat>& images, vector< vector<Point2f> >& landmarks,string filename,Size scale,string modelFilename){
if(!setTrainingParameters(filename)){ if(!setTrainingParameters(filename)){
String error_message = "Error while loading training parameters"; String error_message = "Error while loading training parameters";
CV_ErrorNoReturn(Error::StsBadArg, error_message); CV_Error(Error::StsBadArg, error_message);
return false;
} }
vector<Rect> rectangles; vector<Rect> rectangles;
scaleData(landmarks,images,scale); scaleData(landmarks,images,scale);
...@@ -321,8 +313,7 @@ bool FacemarkKazemiImpl::training(vector<Mat>& images, vector< vector<Point2f> > ...@@ -321,8 +313,7 @@ bool FacemarkKazemiImpl::training(vector<Mat>& images, vector< vector<Point2f> >
if(images.size()!=landmarks.size()){ if(images.size()!=landmarks.size()){
// throw error if no data (or simply return -1?) // throw error if no data (or simply return -1?)
String error_message = "The data is not loaded properly. Aborting training function...."; String error_message = "The data is not loaded properly. Aborting training function....";
CV_ErrorNoReturn(Error::StsBadArg, error_message); CV_Error(Error::StsBadArg, error_message);
return false;
} }
vector<training_sample> samples; vector<training_sample> samples;
getTestCoordinates(); getTestCoordinates();
......
...@@ -337,13 +337,8 @@ int BinaryDescriptor::descriptorSize() const ...@@ -337,13 +337,8 @@ int BinaryDescriptor::descriptorSize() const
/* power function with error management */ /* power function with error management */
static inline int get2Pow( int i ) static inline int get2Pow( int i )
{ {
if( i >= 0 && i <= 7 ) CV_DbgAssert(i >= 0 && i <= 7);
return 1 << i; return 1 << i;
else
{
CV_Error( Error::StsBadArg, "Invalid power argument" );
return -1;
}
} }
/* compute Gaussian pyramids */ /* compute Gaussian pyramids */
......
...@@ -66,7 +66,6 @@ static inline int getLabel(int quantized) ...@@ -66,7 +66,6 @@ static inline int getLabel(int quantized)
case 128: return 7; case 128: return 7;
default: default:
CV_Error(Error::StsBadArg, "Invalid value of quantized parameter"); CV_Error(Error::StsBadArg, "Invalid value of quantized parameter");
return -1; //avoid warning
} }
} }
......
...@@ -58,7 +58,7 @@ Ptr<TrackerGOTURN> TrackerGOTURN::create(const TrackerGOTURN::Params &parameters ...@@ -58,7 +58,7 @@ Ptr<TrackerGOTURN> TrackerGOTURN::create(const TrackerGOTURN::Params &parameters
return Ptr<gtr::TrackerGOTURNImpl>(new gtr::TrackerGOTURNImpl(parameters)); return Ptr<gtr::TrackerGOTURNImpl>(new gtr::TrackerGOTURNImpl(parameters));
#else #else
(void)(parameters); (void)(parameters);
CV_ErrorNoReturn(cv::Error::StsNotImplemented , "to use GOTURN, the tracking module needs to be built with opencv_dnn !"); CV_Error(cv::Error::StsNotImplemented , "to use GOTURN, the tracking module needs to be built with opencv_dnn !");
#endif #endif
} }
Ptr<TrackerGOTURN> TrackerGOTURN::create() Ptr<TrackerGOTURN> TrackerGOTURN::create()
......
...@@ -73,7 +73,6 @@ bool Tracker::init( InputArray image, const Rect2d& boundingBox ) ...@@ -73,7 +73,6 @@ bool Tracker::init( InputArray image, const Rect2d& boundingBox )
if( model == 0 ) if( model == 0 )
{ {
CV_Error( -1, "The model is not initialized" ); CV_Error( -1, "The model is not initialized" );
return false;
} }
if( initTracker ) if( initTracker )
......
...@@ -89,7 +89,6 @@ void TrackerBoostingModel::responseToConfidenceMap( const std::vector<Mat>& resp ...@@ -89,7 +89,6 @@ void TrackerBoostingModel::responseToConfidenceMap( const std::vector<Mat>& resp
if( currentSample.empty() ) if( currentSample.empty() )
{ {
CV_Error( -1, "The samples in Model estimation are empty" ); CV_Error( -1, "The samples in Model estimation are empty" );
return;
} }
for ( size_t i = 0; i < currentSample.size(); i++ ) for ( size_t i = 0; i < currentSample.size(); i++ )
......
...@@ -90,7 +90,6 @@ Ptr<TrackerFeature> TrackerFeature::create( const String& trackerFeatureType ) ...@@ -90,7 +90,6 @@ Ptr<TrackerFeature> TrackerFeature::create( const String& trackerFeatureType )
} }
CV_Error( -1, "Tracker feature type not supported" ); CV_Error( -1, "Tracker feature type not supported" );
return Ptr<TrackerFeature>();
} }
String TrackerFeature::getClassName() const String TrackerFeature::getClassName() const
......
...@@ -67,7 +67,6 @@ void TrackerMILModel::responseToConfidenceMap( const std::vector<Mat>& responses ...@@ -67,7 +67,6 @@ void TrackerMILModel::responseToConfidenceMap( const std::vector<Mat>& responses
if( currentSample.empty() ) if( currentSample.empty() )
{ {
CV_Error( -1, "The samples in Model estimation are empty" ); CV_Error( -1, "The samples in Model estimation are empty" );
return;
} }
for ( size_t i = 0; i < responses.size(); i++ ) for ( size_t i = 0; i < responses.size(); i++ )
......
...@@ -112,7 +112,6 @@ bool TrackerModel::runStateEstimator() ...@@ -112,7 +112,6 @@ bool TrackerModel::runStateEstimator()
if( stateEstimator == 0 ) if( stateEstimator == 0 )
{ {
CV_Error( -1, "Tracker state estimator is not setted" ); CV_Error( -1, "Tracker state estimator is not setted" );
return false;
} }
Ptr<TrackerTargetState> targetState = stateEstimator->estimate( confidenceMaps ); Ptr<TrackerTargetState> targetState = stateEstimator->estimate( confidenceMaps );
if( targetState == 0 ) if( targetState == 0 )
......
...@@ -83,7 +83,6 @@ Ptr<TrackerSamplerAlgorithm> TrackerSamplerAlgorithm::create( const String& trac ...@@ -83,7 +83,6 @@ Ptr<TrackerSamplerAlgorithm> TrackerSamplerAlgorithm::create( const String& trac
} }
CV_Error( -1, "Tracker sampler algorithm type not supported" ); CV_Error( -1, "Tracker sampler algorithm type not supported" );
return Ptr<TrackerSamplerAlgorithm>();
} }
String TrackerSamplerAlgorithm::getClassName() const String TrackerSamplerAlgorithm::getClassName() const
......
...@@ -85,7 +85,6 @@ Ptr<TrackerStateEstimator> TrackerStateEstimator::create( const String& trackeSt ...@@ -85,7 +85,6 @@ Ptr<TrackerStateEstimator> TrackerStateEstimator::create( const String& trackeSt
} }
CV_Error( -1, "Tracker state estimator type not supported" ); CV_Error( -1, "Tracker state estimator type not supported" );
return Ptr<TrackerStateEstimator>();
} }
String TrackerStateEstimator::getClassName() const String TrackerStateEstimator::getClassName() const
......
...@@ -1794,12 +1794,9 @@ bool VocData::getClassifierGroundTruthImage(const string& obj_class, const strin ...@@ -1794,12 +1794,9 @@ bool VocData::getClassifierGroundTruthImage(const string& obj_class, const strin
{ {
//image found, so return corresponding ground truth //image found, so return corresponding ground truth
return m_classifier_gt_all_present[std::distance(m_classifier_gt_all_ids.begin(),it)] != 0; return m_classifier_gt_all_present[std::distance(m_classifier_gt_all_ids.begin(),it)] != 0;
} else {
string err_msg = "could not find classifier ground truth for image '" + id + "' and class '" + obj_class + "'";
CV_Error(Error::StsError,err_msg.c_str());
} }
string err_msg = "could not find classifier ground truth for image '" + id + "' and class '" + obj_class + "'";
return true; CV_Error(Error::StsError,err_msg.c_str());
} }
//------------------------------------------------------------------- //-------------------------------------------------------------------
......
...@@ -204,10 +204,8 @@ namespace cv ...@@ -204,10 +204,8 @@ namespace cv
return distanceL5(points1, idx1, points2, idx2); return distanceL5(points1, idx1, points2, idx2);
case PCTSignatures::L_INFINITY: case PCTSignatures::L_INFINITY:
return distanceLInfinity(points1, idx1, points2, idx2); return distanceLInfinity(points1, idx1, points2, idx2);
default:
CV_Error(Error::StsBadArg, "Distance function not implemented!");
return -1;
} }
CV_Error(Error::StsBadArg, "Distance function not implemented!");
} }
} }
} }
......
...@@ -117,10 +117,8 @@ namespace cv ...@@ -117,10 +117,8 @@ namespace cv
return gaussianSimilarity(distancefunction, similarityParameter, points1, idx1, points2, idx2); return gaussianSimilarity(distancefunction, similarityParameter, points1, idx1, points2, idx2);
case PCTSignatures::HEURISTIC: case PCTSignatures::HEURISTIC:
return heuristicSimilarity(distancefunction, similarityParameter, points1, idx1, points2, idx2); return heuristicSimilarity(distancefunction, similarityParameter, points1, idx1, points2, idx2);
default:
CV_Error(Error::StsNotImplemented, "Similarity function not implemented!");
return -1;
} }
CV_Error(Error::StsNotImplemented, "Similarity function not implemented!");
} }
} }
} }
......
...@@ -51,7 +51,7 @@ using namespace cv::cuda; ...@@ -51,7 +51,7 @@ using namespace cv::cuda;
cv::cuda::SURF_CUDA::SURF_CUDA() { throw_no_cuda(); } cv::cuda::SURF_CUDA::SURF_CUDA() { throw_no_cuda(); }
cv::cuda::SURF_CUDA::SURF_CUDA(double, int, int, bool, float, bool) { throw_no_cuda(); } cv::cuda::SURF_CUDA::SURF_CUDA(double, int, int, bool, float, bool) { throw_no_cuda(); }
int cv::cuda::SURF_CUDA::descriptorSize() const { throw_no_cuda(); return 0;} int cv::cuda::SURF_CUDA::descriptorSize() const { throw_no_cuda(); }
void cv::cuda::SURF_CUDA::uploadKeypoints(const std::vector<KeyPoint>&, GpuMat&) { throw_no_cuda(); } void cv::cuda::SURF_CUDA::uploadKeypoints(const std::vector<KeyPoint>&, GpuMat&) { throw_no_cuda(); }
void cv::cuda::SURF_CUDA::downloadKeypoints(const GpuMat&, std::vector<KeyPoint>&) { throw_no_cuda(); } void cv::cuda::SURF_CUDA::downloadKeypoints(const GpuMat&, std::vector<KeyPoint>&) { throw_no_cuda(); }
void cv::cuda::SURF_CUDA::downloadDescriptors(const GpuMat&, std::vector<float>&) { throw_no_cuda(); } void cv::cuda::SURF_CUDA::downloadDescriptors(const GpuMat&, std::vector<float>&) { throw_no_cuda(); }
......
...@@ -480,7 +480,6 @@ Ptr<StereoMatcher> createRightMatcher(Ptr<StereoMatcher> matcher_left) ...@@ -480,7 +480,6 @@ Ptr<StereoMatcher> createRightMatcher(Ptr<StereoMatcher> matcher_left)
else else
{ {
CV_Error(Error::StsBadArg, "createRightMatcher supports only StereoBM and StereoSGBM"); CV_Error(Error::StsBadArg, "createRightMatcher supports only StereoBM and StereoSGBM");
return Ptr<StereoMatcher>();
} }
} }
......
...@@ -56,6 +56,7 @@ void from32FTo32S(Mat &img, Mat &outImg, int nI, float *mapping) ...@@ -56,6 +56,7 @@ void from32FTo32S(Mat &img, Mat &outImg, int nI, float *mapping)
{ {
int rows = img.rows, cols = img.cols; int rows = img.rows, cols = img.cols;
size_t alls = (size_t)rows * cols; size_t alls = (size_t)rows * cols;
CV_Assert(alls < INT_MAX);
CV_Assert(img.isContinuous()); CV_Assert(img.isContinuous());
float *imgPtr = img.ptr<float>(); float *imgPtr = img.ptr<float>();
...@@ -66,7 +67,7 @@ void from32FTo32S(Mat &img, Mat &outImg, int nI, float *mapping) ...@@ -66,7 +67,7 @@ void from32FTo32S(Mat &img, Mat &outImg, int nI, float *mapping)
for (size_t i = 0; i < alls; i++) for (size_t i = 0; i < alls; i++)
{ {
pairFI& d = data[i]; pairFI& d = data[i];
d.second = i; d.second = (int)i;
d.first = imgPtr[i]; d.first = imgPtr[i];
} }
...@@ -128,7 +129,7 @@ void from32FTo32S(Mat &img, Mat &outImg, int nI, float *mapping) ...@@ -128,7 +129,7 @@ void from32FTo32S(Mat &img, Mat &outImg, int nI, float *mapping)
mapping[cnt] = data[(baseI+i-1)>>1].first; //median mapping[cnt] = data[(baseI+i-1)>>1].first; //median
cnt++; cnt++;
base = data[i].first; base = data[i].first;
baseI = i; baseI = (int)i;
} }
retImgPtr[data[i].second] = cnt; retImgPtr[data[i].second] = cnt;
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment