Commit c20ff6ce authored by Vadim Pisarevsky's avatar Vadim Pisarevsky

made everything compile and even run somehow

parent 10b60f8d
set(OPENCV_TRAINCASCADE_DEPS opencv_core opencv_ml opencv_imgproc opencv_photo opencv_objdetect opencv_imgcodecs opencv_videoio opencv_highgui opencv_calib3d opencv_video opencv_features2d)
set(OPENCV_TRAINCASCADE_DEPS opencv_core opencv_imgproc opencv_objdetect opencv_imgcodecs opencv_highgui opencv_calib3d opencv_features2d)
ocv_check_dependencies(${OPENCV_TRAINCASCADE_DEPS})
if(NOT OCV_DEPENDENCIES_FOUND)
......@@ -10,13 +10,10 @@ project(traincascade)
ocv_include_directories("${CMAKE_CURRENT_SOURCE_DIR}" "${OpenCV_SOURCE_DIR}/include/opencv")
ocv_include_modules(${OPENCV_TRAINCASCADE_DEPS})
set(traincascade_files traincascade.cpp
cascadeclassifier.cpp cascadeclassifier.h
boost.cpp boost.h features.cpp traincascade_features.h
haarfeatures.cpp haarfeatures.h
lbpfeatures.cpp lbpfeatures.h
HOGfeatures.cpp HOGfeatures.h
imagestorage.cpp imagestorage.h)
file(GLOB SRCS *.cpp)
file(GLOB HDRS *.h*)
set(traincascade_files ${SRCS} ${HDRS})
set(the_target opencv_traincascade)
add_executable(${the_target} ${traincascade_files})
......
......@@ -2,7 +2,7 @@
#define _OPENCV_BOOST_H_
#include "traincascade_features.h"
#include "ml.h"
#include "old_ml.hpp"
struct CvCascadeBoostParams : CvBoostParams
{
......
......@@ -7,8 +7,6 @@
#include "lbpfeatures.h"
#include "HOGfeatures.h" //new
#include "boost.h"
#include "cv.h"
#include "cxcore.h"
#define CC_CASCADE_FILENAME "cascade.xml"
#define CC_PARAMS_FILENAME "params.xml"
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
#include "opencv2/core.hpp"
#include "cv.h"
#include "cascadeclassifier.h"
using namespace std;
......
......@@ -2,9 +2,6 @@
#define _OPENCV_FEATURES_H_
#include "imagestorage.h"
#include "cxcore.h"
#include "cv.h"
#include "ml.h"
#include <stdio.h>
#define FEATURES "features"
......
......@@ -135,7 +135,7 @@ public:
virtual Mat getCatMap() const = 0;
virtual void setTrainTestSplit(int count, bool shuffle=true) = 0;
virtual void setTrainTestSplitRatio(float ratio, bool shuffle=true) = 0;
virtual void setTrainTestSplitRatio(double ratio, bool shuffle=true) = 0;
virtual void shuffleTrainTest() = 0;
static Mat getSubVector(const Mat& vec, const Mat& idx);
......@@ -156,7 +156,6 @@ class CV_EXPORTS_W StatModel : public Algorithm
{
public:
enum { UPDATE_MODEL = 1, RAW_OUTPUT=1, COMPRESSED_INPUT=2, PREPROCESSED_INPUT=4 };
virtual ~StatModel();
virtual void clear();
virtual int getVarCount() const = 0;
......@@ -164,16 +163,30 @@ public:
virtual bool isTrained() const = 0;
virtual bool isClassifier() const = 0;
virtual bool train( const Ptr<TrainData>& trainData, int flags=0 ) = 0;
virtual bool train( const Ptr<TrainData>& trainData, int flags=0 );
virtual bool train( InputArray samples, int layout, InputArray responses );
virtual float calcError( const Ptr<TrainData>& data, bool test, OutputArray resp ) const;
virtual float predict( InputArray samples, OutputArray results=noArray(), int flags=0 ) const = 0;
template<typename _Tp> static Ptr<_Tp> load(const String& filename)
{
FileStorage fs(filename, FileStorage::READ);
Ptr<_Tp> p = _Tp::create();
p->read(fs.getFirstTopLevelNode());
return p->isTrained() ? p : Ptr<_Tp>();
Ptr<_Tp> model = _Tp::create();
model->read(fs.getFirstTopLevelNode());
return model->isTrained() ? model : Ptr<_Tp>();
}
template<typename _Tp> static Ptr<_Tp> train(const Ptr<TrainData>& data, const typename _Tp::Params& p, int flags=0)
{
Ptr<_Tp> model = _Tp::create(p);
return !model.empty() && model->train(data, flags) ? model : Ptr<_Tp>();
}
template<typename _Tp> static Ptr<_Tp> train(InputArray samples, int layout, InputArray responses,
const typename _Tp::Params& p, int flags=0)
{
Ptr<_Tp> model = _Tp::create(p);
return !model.empty() && model->train(TrainData::create(samples, layout, responses), flags) ? model : Ptr<_Tp>();
}
virtual void save(const String& filename) const;
......@@ -192,11 +205,17 @@ public:
class CV_EXPORTS_W NormalBayesClassifier : public StatModel
{
public:
virtual ~NormalBayesClassifier();
class CV_EXPORTS_W_MAP Params
{
public:
Params();
};
virtual float predictProb( InputArray inputs, OutputArray outputs,
OutputArray outputProbs, int flags=0 ) const = 0;
virtual void setParams(const Params& params) = 0;
virtual Params getParams() const = 0;
static Ptr<NormalBayesClassifier> create();
static Ptr<NormalBayesClassifier> create(const Params& params=Params());
};
/****************************************************************************************\
......@@ -207,13 +226,21 @@ public:
class CV_EXPORTS_W KNearest : public StatModel
{
public:
virtual void setDefaultK(int k) = 0;
virtual int getDefaultK() const = 0;
class CV_EXPORTS_W_MAP Params
{
public:
Params(int defaultK=10, bool isclassifier=true);
int defaultK;
bool isclassifier;
};
virtual void setParams(const Params& p) = 0;
virtual Params getParams() const = 0;
virtual float findNearest( InputArray samples, int k,
OutputArray results,
OutputArray neighborResponses=noArray(),
OutputArray dist=noArray() ) const = 0;
static Ptr<KNearest> create(bool isclassifier=true);
static Ptr<KNearest> create(const Params& params=Params());
};
/****************************************************************************************\
......@@ -247,7 +274,6 @@ public:
class CV_EXPORTS Kernel : public Algorithm
{
public:
virtual ~Kernel();
virtual int getType() const = 0;
virtual void calc( int vcount, int n, const float* vecs, const float* another, float* results ) = 0;
};
......@@ -261,8 +287,6 @@ public:
// SVM params type
enum { C=0, GAMMA=1, P=2, NU=3, COEF=4, DEGREE=5 };
virtual ~SVM();
virtual bool trainAuto( const Ptr<TrainData>& data, int kFold = 10,
ParamGrid Cgrid = SVM::getDefaultGrid(SVM::C),
ParamGrid gammaGrid = SVM::getDefaultGrid(SVM::GAMMA),
......@@ -399,8 +423,6 @@ public:
int subsetOfs;
};
virtual ~DTrees();
virtual void setDParams(const Params& p);
virtual Params getDParams() const;
......@@ -464,7 +486,6 @@ public:
// Boosting type
enum { DISCRETE=0, REAL=1, LOGIT=2, GENTLE=3 };
virtual ~Boost();
virtual Params getBParams() const = 0;
virtual void setBParams(const Params& p) = 0;
......@@ -491,7 +512,6 @@ public:
};
enum {SQUARED_LOSS=0, ABSOLUTE_LOSS, HUBER_LOSS=3, DEVIANCE_LOSS};
virtual ~GBTrees();
virtual void setK(int k) = 0;
......@@ -513,10 +533,16 @@ public:
struct CV_EXPORTS_W_MAP Params
{
Params();
Params( TermCriteria termCrit, int trainMethod, double param1, double param2=0 );
Params( const Mat& layerSizes, int activateFunc, double fparam1, double fparam2,
TermCriteria termCrit, int trainMethod, double param1, double param2=0 );
enum { BACKPROP=0, RPROP=1 };
CV_PROP_RW Mat layerSizes;
CV_PROP_RW int activateFunc;
CV_PROP_RW double fparam1;
CV_PROP_RW double fparam2;
CV_PROP_RW TermCriteria termCrit;
CV_PROP_RW int trainMethod;
......@@ -527,23 +553,17 @@ public:
CV_PROP_RW double rpDW0, rpDWPlus, rpDWMinus, rpDWMin, rpDWMax;
};
virtual ~ANN_MLP();
// possible activation functions
enum { IDENTITY = 0, SIGMOID_SYM = 1, GAUSSIAN = 2 };
// available training flags
enum { UPDATE_WEIGHTS = 1, NO_INPUT_SCALE = 2, NO_OUTPUT_SCALE = 4 };
virtual Mat getLayerSizes() const = 0;
virtual Mat getWeights(int layerIdx) const = 0;
virtual void setParams(const Params& p) = 0;
virtual Params getParams() const = 0;
static Ptr<ANN_MLP> create(InputArray layerSizes=noArray(),
const Params& params=Params(),
int activateFunc=ANN_MLP::SIGMOID_SYM,
double fparam1=0, double fparam2=0);
static Ptr<ANN_MLP> create(const Params& params=Params());
};
/****************************************************************************************\
......
This diff is collapsed.
This diff is collapsed.
......@@ -379,7 +379,7 @@ public:
tempCatOfs.push_back(ofs);
std::copy(labels.begin(), labels.end(), std::back_inserter(tempCatMap));
}
else if( haveMissing )
else
{
tempCatOfs.push_back(Vec2i(0, 0));
/*Mat missing_i = layout == ROW_SAMPLE ? missing.col(i) : missing.row(i);
......@@ -741,9 +741,9 @@ public:
CV_Error( CV_StsBadArg, "type of some variables is not specified" );
}
void setTrainTestSplitRatio(float ratio, bool shuffle)
void setTrainTestSplitRatio(double ratio, bool shuffle)
{
CV_Assert( 0 <= ratio && ratio <= 1 );
CV_Assert( 0. <= ratio && ratio <= 1. );
setTrainTestSplit(cvRound(getNSamples()*ratio), shuffle);
}
......
......@@ -50,7 +50,6 @@ ParamGrid::ParamGrid(double _minVal, double _maxVal, double _logStep)
logStep = std::max(_logStep, 1.);
}
StatModel::~StatModel() {}
void StatModel::clear() {}
int StatModel::getVarCount() const { return 0; }
......@@ -61,6 +60,11 @@ bool StatModel::train( const Ptr<TrainData>&, int )
return false;
}
bool StatModel::train( InputArray samples, int layout, InputArray responses )
{
return train(TrainData::create(samples, layout, responses));
}
float StatModel::calcError( const Ptr<TrainData>& data, bool testerr, OutputArray _resp ) const
{
Mat samples = data->getSamples();
......
This diff is collapsed.
......@@ -43,7 +43,7 @@
namespace cv {
namespace ml {
NormalBayesClassifier::~NormalBayesClassifier() {}
NormalBayesClassifier::Params::Params() {}
class NormalBayesClassifierImpl : public NormalBayesClassifier
{
......@@ -53,6 +53,9 @@ public:
nallvars = 0;
}
void setParams(const Params&) {}
Params getParams() const { return Params(); }
bool train( const Ptr<TrainData>& trainData, int flags )
{
const float min_variation = FLT_EPSILON;
......@@ -452,7 +455,7 @@ public:
};
Ptr<NormalBayesClassifier> NormalBayesClassifier::create()
Ptr<NormalBayesClassifier> NormalBayesClassifier::create(const Params&)
{
Ptr<NormalBayesClassifierImpl> p = makePtr<NormalBayesClassifierImpl>();
return p;
......
This diff is collapsed.
......@@ -48,8 +48,6 @@ namespace ml {
using std::vector;
DTrees::~DTrees() {}
void DTrees::setDParams(const DTrees::Params&)
{
CV_Error(CV_StsNotImplemented, "");
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment