Commit f58d4027 authored by lluis's avatar lluis

update to new OpenCV 3.0 ml API

parent 1cbeb8ce
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -61,6 +61,7 @@ namespace text
{
using namespace std;
using namespace cv::ml;
// Deletes a tree of ERStat regions starting at root. Used only
// internally to this implementation.
......@@ -178,7 +179,7 @@ public:
double eval(const ERStat& stat);
private:
CvBoost boost;
Ptr<Boost> boost;
};
// default 2nd stage classifier
......@@ -194,7 +195,7 @@ public:
double eval(const ERStat& stat);
private:
CvBoost boost;
Ptr<Boost> boost;
};
......@@ -1016,22 +1017,27 @@ ERClassifierNM1::ERClassifierNM1(const string& filename)
{
if (ifstream(filename.c_str()))
boost.load( filename.c_str(), "boost" );
{
boost = StatModel::load<Boost>( filename.c_str() );
if( boost.empty() )
{
cout << "Could not read the classifier " << filename.c_str() << endl;
CV_Error(Error::StsBadArg, "Could not read the default classifier!");
}
}
else
CV_Error(CV_StsBadArg, "Default classifier file not found!");
CV_Error(Error::StsBadArg, "Default classifier file not found!");
}
double ERClassifierNM1::eval(const ERStat& stat)
{
//Classify
float arr[] = {0,(float)(stat.rect.width)/(stat.rect.height), // aspect ratio
Mat sample = (Mat_<float>(1,4) << (float)(stat.rect.width)/(stat.rect.height), // aspect ratio
sqrt((float)(stat.area))/stat.perimeter, // compactness
(float)(1-stat.euler), //number of holes
stat.med_crossings};
stat.med_crossings);
vector<float> sample (arr, arr + sizeof(arr) / sizeof(arr[0]) );
float votes = boost.predict( Mat(sample), Mat(), Range::all(), false, true );
float votes = boost->predict( sample, noArray(), DTrees::PREDICT_SUM | StatModel::RAW_OUTPUT);
// Logistic Correction returns a probability value (in the range(0,1))
return (double)1-(double)1/(1+exp(-2*votes));
......@@ -1042,23 +1048,28 @@ double ERClassifierNM1::eval(const ERStat& stat)
ERClassifierNM2::ERClassifierNM2(const string& filename)
{
if (ifstream(filename.c_str()))
boost.load( filename.c_str(), "boost" );
{
boost = StatModel::load<Boost>( filename.c_str() );
if( boost.empty() )
{
cout << "Could not read the classifier " << filename.c_str() << endl;
CV_Error(Error::StsBadArg, "Could not read the default classifier!");
}
}
else
CV_Error(CV_StsBadArg, "Default classifier file not found!");
CV_Error(Error::StsBadArg, "Default classifier file not found!");
}
double ERClassifierNM2::eval(const ERStat& stat)
{
//Classify
float arr[] = {0,(float)(stat.rect.width)/(stat.rect.height), // aspect ratio
Mat sample = (Mat_<float>(1,7) << (float)(stat.rect.width)/(stat.rect.height), // aspect ratio
sqrt((float)(stat.area))/stat.perimeter, // compactness
(float)(1-stat.euler), //number of holes
stat.med_crossings, stat.hole_area_ratio,
stat.convex_hull_ratio, stat.num_inflexion_points};
stat.convex_hull_ratio, stat.num_inflexion_points);
vector<float> sample (arr, arr + sizeof(arr) / sizeof(arr[0]) );
float votes = boost.predict( Mat(sample), Mat(), Range::all(), false, true );
float votes = boost->predict( sample, noArray(), DTrees::PREDICT_SUM | StatModel::RAW_OUTPUT);
// Logistic Correction returns a probability value (in the range(0,1))
return (double)1-(double)1/(1+exp(-2*votes));
......@@ -1397,7 +1408,7 @@ static double NFA(int n, int k, double p, double logNT)
/* check parameters */
if( n<0 || k<0 || k>n || p<=0.0 || p>=1.0 )
{
CV_Error(CV_StsBadArg, "erGrouping wrong n, k or p values in NFA call!");
CV_Error(Error::StsBadArg, "erGrouping wrong n, k or p values in NFA call!");
}
/* trivial cases */
......@@ -2137,15 +2148,15 @@ static int linkage_vector(double *X, int N, int dim, double * Z, unsigned char m
} // try
catch (const bad_alloc&)
{
CV_Error(CV_StsNoMem, "Not enough Memory for erGrouping hierarchical clustering structures!");
CV_Error(Error::StsNoMem, "Not enough Memory for erGrouping hierarchical clustering structures!");
}
catch(const exception&)
{
CV_Error(CV_StsError, "Uncaught exception in erGrouping!");
CV_Error(Error::StsError, "Uncaught exception in erGrouping!");
}
catch(...)
{
CV_Error(CV_StsError, "C++ exception (unknown reason) in erGrouping!");
CV_Error(Error::StsError, "C++ exception (unknown reason) in erGrouping!");
}
return 0;
}
......@@ -2206,7 +2217,7 @@ public:
private:
double minProbability;
CvBoost group_boost;
Ptr<Boost> group_boost;
vector<ERFeatures> &regions;
Size imsize;
......@@ -2230,9 +2241,16 @@ MaxMeaningfulClustering::MaxMeaningfulClustering(unsigned char _method, unsigned
minProbability = _minProbability;
if (ifstream(filename.c_str()))
group_boost.load( filename.c_str(), "boost" );
{
group_boost = StatModel::load<Boost>( filename.c_str() );
if( group_boost.empty() )
{
cout << "Could not read the classifier " << filename.c_str() << endl;
CV_Error(Error::StsBadArg, "Could not read the default classifier!");
}
}
else
CV_Error(CV_StsBadArg, "erGrouping: Default classifier file not found!");
CV_Error(Error::StsBadArg, "erGrouping: Default classifier file not found!");
}
......@@ -2242,7 +2260,7 @@ void MaxMeaningfulClustering::operator()(double *data, unsigned int num, int dim
double *Z = (double*)malloc(((num-1)*4) * sizeof(double)); // we need 4 floats foreach sample merge.
if (Z == NULL)
CV_Error(CV_StsNoMem, "Not enough Memory for erGrouping hierarchical clustering structures!");
CV_Error(Error::StsNoMem, "Not enough Memory for erGrouping hierarchical clustering structures!");
linkage_vector(data, (int)num, dim, Z, method, metric);
......@@ -2541,7 +2559,6 @@ double MaxMeaningfulClustering::probability(vector<int> &cluster)
return 0.;
vector<float> sample;
sample.push_back(0);
sample.push_back((float)cluster.size());
Mat diameters ( (int)cluster.size(), 1, CV_32F, 1 );
......@@ -2723,7 +2740,7 @@ double MaxMeaningfulClustering::probability(vector<int> &cluster)
sample.push_back((float)mean[0]);
sample.push_back((float)std[0]);
float votes_group = group_boost.predict( Mat(sample), Mat(), Range::all(), false, true );
float votes_group = group_boost->predict( Mat(sample), noArray(), DTrees::PREDICT_SUM | StatModel::RAW_OUTPUT);
return (double)1-(double)1/(1+exp(-2*votes_group));
}
......@@ -3021,7 +3038,7 @@ static void erGroupingGK(InputArray _image, InputArrayOfArrays _src, vector<vect
// assert correct image type
CV_Assert( channel.type() == CV_8UC1 );
CV_Assert( !regions.at(c).empty() );
//CV_Assert( !regions.at(c).empty() );
if ( regions.at(c).size() < 3 )
continue;
......@@ -3039,7 +3056,7 @@ static void erGroupingGK(InputArray _image, InputArrayOfArrays _src, vector<vect
int dim = 7; //dimensionality of feature space
double *data = (double*)malloc(dim*N * sizeof(double));
if (data == NULL)
CV_Error(CV_StsNoMem, "Not enough Memory for erGrouping hierarchical clustering structures!");
CV_Error(Error::StsNoMem, "Not enough Memory for erGrouping hierarchical clustering structures!");
//Learned weights
float weight_param1 = 1.00f;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment