cv::error(Error::StsBadArg,"predict: classifier should be trained first","cv::ml::LogisticRegression::predict",__FILE__,__LINE__);
CV_Error(CV_StsBadArg,"classifier should be trained first");
}
if(data.type()!=CV_32F)
{
cv::error(Error::StsBadArg,"predict: data must be of floating type","cv::ml::LogisticRegression::predict",__FILE__,__LINE__);
CV_Error(CV_StsBadArg,"data must be of floating type");
}
// add a column of ones
...
...
@@ -322,12 +337,12 @@ cv::Mat LogisticRegression::compute_batch_gradient(const cv::Mat& _data, const c
// implements batch gradient descent
if(this->params.alpha<=0)
{
cv::error(Error::StsBadArg,"compute_batch_gradient: check training parameters for the classifier","cv::ml::LogisticRegression::compute_batch_gradient",__FILE__,__LINE__);
CV_Error(CV_StsBadArg,"check training parameters for the classifier");
}
if(this->params.num_iters<=0)
{
cv::error(Error::StsBadArg,"compute_batch_gradient: number of iterations cannot be zero or a negative number","cv::ml::LogisticRegression::compute_batch_gradient",__FILE__,__LINE__);
CV_Error(CV_StsBadArg,"number of iterations cannot be zero or a negative number");
}
intllambda=0;
...
...
@@ -352,7 +367,7 @@ cv::Mat LogisticRegression::compute_batch_gradient(const cv::Mat& _data, const c
if(cvIsNaN(ccost))
{
cv::error(Error::StsBadArg,"compute_batch_gradient: check training parameters. Invalid training classifier","cv::ml::LogisticRegression::compute_batch_gradient",__FILE__,__LINE__);
CV_Error(CV_StsBadArg,"check training parameters. Invalid training classifier");
}
pcal_b=calc_sigmoid((_data*theta_p)-_labels);
...
...
@@ -397,12 +412,12 @@ cv::Mat LogisticRegression::compute_mini_batch_gradient(const cv::Mat& _data, co
cv::error(Error::StsBadArg,"compute_mini_batch_gradient: check training parameters for the classifier","cv::ml::LogisticRegression::compute_mini_batch_gradient",__FILE__,__LINE__);
CV_Error(CV_StsBadArg,"check training parameters for the classifier");
}
if(this->params.num_iters<=0)
{
cv::error(Error::StsBadArg,"compute_mini_batch_gradient: number of iterations cannot be zero or a negative number","cv::ml::LogisticRegression::compute_mini_batch_gradient",__FILE__,__LINE__);
CV_Error(CV_StsBadArg,"number of iterations cannot be zero or a negative number");
}
cv::Matpcal_a;
...
...
@@ -418,7 +433,7 @@ cv::Mat LogisticRegression::compute_mini_batch_gradient(const cv::Mat& _data, co
lambda_l=1;
}
for(inti=0;this->params.term_crit.max_iter;i++)
for(inti=0;this->params.term_crit.maxCount;i++)
{
if(j+size_b<=_data.rows)
{
...
...
@@ -438,7 +453,7 @@ cv::Mat LogisticRegression::compute_mini_batch_gradient(const cv::Mat& _data, co
if(cvIsNaN(ccost)==1)
{
cv::error(Error::StsBadArg,"compute_mini_batch_gradient: check training parameters. Invalid training classifier","cv::ml::LogisticRegression::compute_mini_batch_gradient",__FILE__,__LINE__);
CV_Error(CV_StsBadArg,"check training parameters. Invalid training classifier");