Commit 432b7cb2 authored by Vadim Pisarevsky's avatar Vadim Pisarevsky

Merge pull request #3032 from vpisarev:refactor_ml2

parents dbedc63c cb9b88e6
set(OPENCV_TRAINCASCADE_DEPS opencv_core opencv_ml opencv_imgproc opencv_photo opencv_objdetect opencv_imgcodecs opencv_videoio opencv_highgui opencv_calib3d opencv_video opencv_features2d) set(OPENCV_TRAINCASCADE_DEPS opencv_core opencv_imgproc opencv_objdetect opencv_imgcodecs opencv_highgui opencv_calib3d opencv_features2d)
ocv_check_dependencies(${OPENCV_TRAINCASCADE_DEPS}) ocv_check_dependencies(${OPENCV_TRAINCASCADE_DEPS})
if(NOT OCV_DEPENDENCIES_FOUND) if(NOT OCV_DEPENDENCIES_FOUND)
...@@ -10,13 +10,10 @@ project(traincascade) ...@@ -10,13 +10,10 @@ project(traincascade)
ocv_include_directories("${CMAKE_CURRENT_SOURCE_DIR}" "${OpenCV_SOURCE_DIR}/include/opencv") ocv_include_directories("${CMAKE_CURRENT_SOURCE_DIR}" "${OpenCV_SOURCE_DIR}/include/opencv")
ocv_include_modules(${OPENCV_TRAINCASCADE_DEPS}) ocv_include_modules(${OPENCV_TRAINCASCADE_DEPS})
set(traincascade_files traincascade.cpp file(GLOB SRCS *.cpp)
cascadeclassifier.cpp cascadeclassifier.h file(GLOB HDRS *.h*)
boost.cpp boost.h features.cpp traincascade_features.h
haarfeatures.cpp haarfeatures.h set(traincascade_files ${SRCS} ${HDRS})
lbpfeatures.cpp lbpfeatures.h
HOGfeatures.cpp HOGfeatures.h
imagestorage.cpp imagestorage.h)
set(the_target opencv_traincascade) set(the_target opencv_traincascade)
add_executable(${the_target} ${traincascade_files}) add_executable(${the_target} ${traincascade_files})
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
#define _OPENCV_BOOST_H_ #define _OPENCV_BOOST_H_
#include "traincascade_features.h" #include "traincascade_features.h"
#include "ml.h" #include "old_ml.hpp"
struct CvCascadeBoostParams : CvBoostParams struct CvCascadeBoostParams : CvBoostParams
{ {
......
...@@ -7,8 +7,6 @@ ...@@ -7,8 +7,6 @@
#include "lbpfeatures.h" #include "lbpfeatures.h"
#include "HOGfeatures.h" //new #include "HOGfeatures.h" //new
#include "boost.h" #include "boost.h"
#include "cv.h"
#include "cxcore.h"
#define CC_CASCADE_FILENAME "cascade.xml" #define CC_CASCADE_FILENAME "cascade.xml"
#define CC_PARAMS_FILENAME "params.xml" #define CC_PARAMS_FILENAME "params.xml"
......
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
#include "opencv2/core.hpp" #include "opencv2/core.hpp"
#include "cv.h"
#include "cascadeclassifier.h" #include "cascadeclassifier.h"
using namespace std; using namespace std;
......
...@@ -2,9 +2,6 @@ ...@@ -2,9 +2,6 @@
#define _OPENCV_FEATURES_H_ #define _OPENCV_FEATURES_H_
#include "imagestorage.h" #include "imagestorage.h"
#include "cxcore.h"
#include "cv.h"
#include "ml.h"
#include <stdio.h> #include <stdio.h>
#define FEATURES "features" #define FEATURES "features"
......
...@@ -845,7 +845,6 @@ For convenience, the following types from the OpenCV C API already have such a s ...@@ -845,7 +845,6 @@ For convenience, the following types from the OpenCV C API already have such a s
that calls the appropriate release function: that calls the appropriate release function:
* ``CvCapture`` * ``CvCapture``
* :ocv:struct:`CvDTreeSplit`
* :ocv:struct:`CvFileStorage` * :ocv:struct:`CvFileStorage`
* ``CvHaarClassifierCascade`` * ``CvHaarClassifierCascade``
* :ocv:struct:`CvMat` * :ocv:struct:`CvMat`
......
...@@ -1557,13 +1557,17 @@ static void _SVDcompute( InputArray _aarr, OutputArray _w, ...@@ -1557,13 +1557,17 @@ static void _SVDcompute( InputArray _aarr, OutputArray _w,
{ {
if( !at ) if( !at )
{ {
transpose(temp_u, _u); if( _u.needed() )
temp_v.copyTo(_vt); transpose(temp_u, _u);
if( _vt.needed() )
temp_v.copyTo(_vt);
} }
else else
{ {
transpose(temp_v, _u); if( _u.needed() )
temp_u.copyTo(_vt); transpose(temp_v, _u);
if( _vt.needed() )
temp_u.copyTo(_vt);
} }
} }
} }
......
...@@ -1221,7 +1221,7 @@ static bool IPPMorphReplicate(int op, const Mat &src, Mat &dst, const Mat &kerne ...@@ -1221,7 +1221,7 @@ static bool IPPMorphReplicate(int op, const Mat &src, Mat &dst, const Mat &kerne
IPP_MORPH_CASE(CV_32FC3, 32f_C3R, 32f); IPP_MORPH_CASE(CV_32FC3, 32f_C3R, 32f);
IPP_MORPH_CASE(CV_32FC4, 32f_C4R, 32f); IPP_MORPH_CASE(CV_32FC4, 32f_C4R, 32f);
default: default:
return false; ;
} }
#undef IPP_MORPH_CASE #undef IPP_MORPH_CASE
...@@ -1253,14 +1253,11 @@ static bool IPPMorphReplicate(int op, const Mat &src, Mat &dst, const Mat &kerne ...@@ -1253,14 +1253,11 @@ static bool IPPMorphReplicate(int op, const Mat &src, Mat &dst, const Mat &kerne
IPP_MORPH_CASE(CV_32FC3, 32f_C3R, 32f); IPP_MORPH_CASE(CV_32FC3, 32f_C3R, 32f);
IPP_MORPH_CASE(CV_32FC4, 32f_C4R, 32f); IPP_MORPH_CASE(CV_32FC4, 32f_C4R, 32f);
default: default:
return false; ;
} }
#undef IPP_MORPH_CASE #undef IPP_MORPH_CASE
#if defined(__GNUC__) && __GNUC__ == 4 && __GNUC_MINOR__ >= 8
return false; /// It disables false positive warning in GCC 4.8 and further
#endif
} }
return false;
} }
static bool IPPMorphOp(int op, InputArray _src, OutputArray _dst, static bool IPPMorphOp(int op, InputArray _src, OutputArray _dst,
......
...@@ -6,7 +6,7 @@ if(IOS OR NOT PYTHON_EXECUTABLE OR NOT ANT_EXECUTABLE OR NOT (JNI_FOUND OR (ANDR ...@@ -6,7 +6,7 @@ if(IOS OR NOT PYTHON_EXECUTABLE OR NOT ANT_EXECUTABLE OR NOT (JNI_FOUND OR (ANDR
endif() endif()
set(the_description "The java bindings") set(the_description "The java bindings")
ocv_add_module(java BINDINGS opencv_core opencv_imgproc OPTIONAL opencv_objdetect opencv_features2d opencv_video opencv_imgcodecs opencv_videoio opencv_ml opencv_calib3d opencv_photo opencv_nonfree opencv_contrib) ocv_add_module(java BINDINGS opencv_core opencv_imgproc OPTIONAL opencv_objdetect opencv_features2d opencv_video opencv_imgcodecs opencv_videoio opencv_calib3d opencv_photo opencv_nonfree opencv_contrib)
ocv_module_include_directories("${CMAKE_CURRENT_SOURCE_DIR}/generator/src/cpp") ocv_module_include_directories("${CMAKE_CURRENT_SOURCE_DIR}/generator/src/cpp")
if(NOT ANDROID) if(NOT ANDROID)
......
...@@ -2,7 +2,7 @@ ...@@ -2,7 +2,7 @@
from __future__ import print_function from __future__ import print_function
import os, sys, re, string, fnmatch import os, sys, re, string, fnmatch
allmodules = ["core", "flann", "imgproc", "ml", "imgcodecs", "videoio", "highgui", "video", "features2d", "calib3d", "objdetect", "legacy", "contrib", "cuda", "androidcamera", "java", "python", "stitching", "ts", "photo", "nonfree", "videostab", "softcascade", "superres"] allmodules = ["core", "flann", "imgproc", "imgcodecs", "videoio", "highgui", "video", "features2d", "calib3d", "objdetect", "legacy", "contrib", "cuda", "androidcamera", "java", "python", "stitching", "ts", "photo", "nonfree", "videostab", "softcascade", "superres"]
verbose = False verbose = False
show_warnings = True show_warnings = True
show_errors = True show_errors = True
......
...@@ -14,10 +14,6 @@ ...@@ -14,10 +14,6 @@
# include "opencv2/video.hpp" # include "opencv2/video.hpp"
#endif #endif
#ifdef HAVE_OPENCV_ML
# include "opencv2/ml.hpp"
#endif
#ifdef HAVE_OPENCV_CONTRIB #ifdef HAVE_OPENCV_CONTRIB
# include "opencv2/contrib.hpp" # include "opencv2/contrib.hpp"
#endif #endif
...@@ -41,10 +37,7 @@ JNI_OnLoad(JavaVM* vm, void* ) ...@@ -41,10 +37,7 @@ JNI_OnLoad(JavaVM* vm, void* )
#ifdef HAVE_OPENCV_VIDEO #ifdef HAVE_OPENCV_VIDEO
init &= cv::initModule_video(); init &= cv::initModule_video();
#endif #endif
#ifdef HAVE_OPENCV_ML #ifdef HAVE_OPENCV_CONTRIB
init &= cv::initModule_ml();
#endif
#ifdef HAVE_OPENCV_CONTRIB
init &= cv::initModule_contrib(); init &= cv::initModule_contrib();
#endif #endif
......
This diff is collapsed.
This diff is collapsed.
Extremely randomized trees
==========================
Extremely randomized trees have been introduced by Pierre Geurts, Damien Ernst and Louis Wehenkel in the article "Extremely randomized trees", 2006 [http://citeseerx.ist.psu.edu/viewdoc/download?doi=10.1.1.65.7485&rep=rep1&type=pdf]. The algorithm of growing Extremely randomized trees is similar to :ref:`Random Trees` (Random Forest), but there are two differences:
#. Extremely randomized trees don't apply the bagging procedure to construct a set of the training samples for each tree. The same input training set is used to train all trees.
#. Extremely randomized trees pick a node split very extremely (both a variable index and variable splitting value are chosen randomly), whereas Random Forest finds the best split (optimal one by variable index and variable splitting value) among random subset of variables.
CvERTrees
----------
.. ocv:class:: CvERTrees : public CvRTrees
The class implements the Extremely randomized trees algorithm. ``CvERTrees`` is inherited from :ocv:class:`CvRTrees` and has the same interface, so see description of :ocv:class:`CvRTrees` class to get details. To set the training parameters of Extremely randomized trees the same class :ocv:struct:`CvRTParams` is used.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
...@@ -15,9 +15,7 @@ Most of the classification and regression algorithms are implemented as C++ clas ...@@ -15,9 +15,7 @@ Most of the classification and regression algorithms are implemented as C++ clas
support_vector_machines support_vector_machines
decision_trees decision_trees
boosting boosting
gradient_boosted_trees
random_trees random_trees
ertrees
expectation_maximization expectation_maximization
neural_networks neural_networks
mldata mldata
This diff is collapsed.
This diff is collapsed.
...@@ -9,55 +9,26 @@ This simple classification model assumes that feature vectors from each class ar ...@@ -9,55 +9,26 @@ This simple classification model assumes that feature vectors from each class ar
.. [Fukunaga90] K. Fukunaga. *Introduction to Statistical Pattern Recognition*. second ed., New York: Academic Press, 1990. .. [Fukunaga90] K. Fukunaga. *Introduction to Statistical Pattern Recognition*. second ed., New York: Academic Press, 1990.
CvNormalBayesClassifier NormalBayesClassifier
----------------------- -----------------------
.. ocv:class:: CvNormalBayesClassifier : public CvStatModel .. ocv:class:: NormalBayesClassifier : public StatModel
Bayes classifier for normally distributed data. Bayes classifier for normally distributed data.
CvNormalBayesClassifier::CvNormalBayesClassifier NormalBayesClassifier::create
------------------------------------------------ -----------------------------
Default and training constructors. Creates empty model
.. ocv:function:: CvNormalBayesClassifier::CvNormalBayesClassifier() .. ocv:function:: Ptr<NormalBayesClassifier> NormalBayesClassifier::create(const NormalBayesClassifier::Params& params=Params())
.. ocv:function:: CvNormalBayesClassifier::CvNormalBayesClassifier( const Mat& trainData, const Mat& responses, const Mat& varIdx=Mat(), const Mat& sampleIdx=Mat() ) :param params: The model parameters. There is none so far, the structure is used as a placeholder for possible extensions.
.. ocv:function:: CvNormalBayesClassifier::CvNormalBayesClassifier( const CvMat* trainData, const CvMat* responses, const CvMat* varIdx=0, const CvMat* sampleIdx=0 ) Use ``StatModel::train`` to train the model, ``StatModel::train<NormalBayesClassifier>(traindata, params)`` to create and train the model, ``StatModel::load<NormalBayesClassifier>(filename)`` to load the pre-trained model.
.. ocv:pyfunction:: cv2.NormalBayesClassifier([trainData, responses[, varIdx[, sampleIdx]]]) -> <NormalBayesClassifier object> NormalBayesClassifier::predictProb
----------------------------------
The constructors follow conventions of :ocv:func:`CvStatModel::CvStatModel`. See :ocv:func:`CvStatModel::train` for parameters descriptions.
CvNormalBayesClassifier::train
------------------------------
Trains the model.
.. ocv:function:: bool CvNormalBayesClassifier::train( const Mat& trainData, const Mat& responses, const Mat& varIdx = Mat(), const Mat& sampleIdx=Mat(), bool update=false )
.. ocv:function:: bool CvNormalBayesClassifier::train( const CvMat* trainData, const CvMat* responses, const CvMat* varIdx = 0, const CvMat* sampleIdx=0, bool update=false )
.. ocv:pyfunction:: cv2.NormalBayesClassifier.train(trainData, responses[, varIdx[, sampleIdx[, update]]]) -> retval
:param update: Identifies whether the model should be trained from scratch (``update=false``) or should be updated using the new training data (``update=true``).
The method trains the Normal Bayes classifier. It follows the conventions of the generic :ocv:func:`CvStatModel::train` approach with the following limitations:
* Only ``CV_ROW_SAMPLE`` data layout is supported.
* Input variables are all ordered.
* Output variable is categorical , which means that elements of ``responses`` must be integer numbers, though the vector may have the ``CV_32FC1`` type.
* Missing measurements are not supported.
CvNormalBayesClassifier::predict
--------------------------------
Predicts the response for sample(s). Predicts the response for sample(s).
.. ocv:function:: float CvNormalBayesClassifier::predict( const Mat& samples, Mat* results=0, Mat* results_prob=0 ) const .. ocv:function:: float NormalBayesClassifier::predictProb( InputArray inputs, OutputArray outputs, OutputArray outputProbs, int flags=0 ) const
.. ocv:function:: float CvNormalBayesClassifier::predict( const CvMat* samples, CvMat* results=0, CvMat* results_prob=0 ) const
.. ocv:pyfunction:: cv2.NormalBayesClassifier.predict(samples) -> retval, results
The method estimates the most probable classes for input vectors. Input vectors (one or more) are stored as rows of the matrix ``samples``. In case of multiple input vectors, there should be one output vector ``results``. The predicted class for a single input vector is returned by the method. The vector ``results_prob`` contains the output probabilities coresponding to each element of ``result``.
The function is parallelized with the TBB library. The method estimates the most probable classes for input vectors. Input vectors (one or more) are stored as rows of the matrix ``inputs``. In case of multiple input vectors, there should be one output vector ``outputs``. The predicted class for a single input vector is returned by the method. The vector ``outputProbs`` contains the output probabilities corresponding to each element of ``result``.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
...@@ -2,6 +2,8 @@ ...@@ -2,6 +2,8 @@
#include "precomp.hpp" #include "precomp.hpp"
#include <time.h> #include <time.h>
#if 0
#define pCvSeq CvSeq* #define pCvSeq CvSeq*
#define pCvDTreeNode CvDTreeNode* #define pCvDTreeNode CvDTreeNode*
...@@ -1359,3 +1361,5 @@ float CvGBTrees::predict( const cv::Mat& sample, const cv::Mat& _missing, ...@@ -1359,3 +1361,5 @@ float CvGBTrees::predict( const cv::Mat& sample, const cv::Mat& _missing,
return predict(&_sample, _missing.empty() ? 0 : &miss, 0, return predict(&_sample, _missing.empty() ? 0 : &miss, 0,
slice==cv::Range::all() ? CV_WHOLE_SEQ : cvSlice(slice.start, slice.end), k); slice==cv::Range::all() ? CV_WHOLE_SEQ : cvSlice(slice.start, slice.end), k);
} }
#endif
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
#include "test_precomp.hpp" #include "test_precomp.hpp"
#if 0
#include <string> #include <string>
#include <fstream> #include <fstream>
#include <iostream> #include <iostream>
...@@ -284,3 +286,5 @@ void CV_GBTreesTest::run(int) ...@@ -284,3 +286,5 @@ void CV_GBTreesTest::run(int)
///////////////////////////////////////////////////////////////////////////// /////////////////////////////////////////////////////////////////////////////
TEST(ML_GBTrees, regression) { CV_GBTreesTest test; test.safe_run(); } TEST(ML_GBTrees, regression) { CV_GBTreesTest test; test.safe_run(); }
#endif
...@@ -65,7 +65,7 @@ int CV_AMLTest::run_test_case( int testCaseIdx ) ...@@ -65,7 +65,7 @@ int CV_AMLTest::run_test_case( int testCaseIdx )
for (int k = 0; k < icount; k++) for (int k = 0; k < icount; k++)
{ {
#endif #endif
data.mix_train_and_test_idx(); data->shuffleTrainTest();
code = train( testCaseIdx ); code = train( testCaseIdx );
#ifdef GET_STAT #ifdef GET_STAT
float case_result = get_error(); float case_result = get_error();
...@@ -101,9 +101,10 @@ int CV_AMLTest::validate_test_results( int testCaseIdx ) ...@@ -101,9 +101,10 @@ int CV_AMLTest::validate_test_results( int testCaseIdx )
{ {
resultNode["mean"] >> mean; resultNode["mean"] >> mean;
resultNode["sigma"] >> sigma; resultNode["sigma"] >> sigma;
float curErr = get_error( testCaseIdx, CV_TEST_ERROR ); model->save(format("/Users/vp/tmp/dtree/testcase_%02d.cur.yml", testCaseIdx));
float curErr = get_test_error( testCaseIdx );
const int coeff = 4; const int coeff = 4;
ts->printf( cvtest::TS::LOG, "Test case = %d; test error = %f; mean error = %f (diff=%f), %d*sigma = %f", ts->printf( cvtest::TS::LOG, "Test case = %d; test error = %f; mean error = %f (diff=%f), %d*sigma = %f\n",
testCaseIdx, curErr, mean, abs( curErr - mean), coeff, coeff*sigma ); testCaseIdx, curErr, mean, abs( curErr - mean), coeff, coeff*sigma );
if ( abs( curErr - mean) > coeff*sigma ) if ( abs( curErr - mean) > coeff*sigma )
{ {
...@@ -125,6 +126,6 @@ int CV_AMLTest::validate_test_results( int testCaseIdx ) ...@@ -125,6 +126,6 @@ int CV_AMLTest::validate_test_results( int testCaseIdx )
TEST(ML_DTree, regression) { CV_AMLTest test( CV_DTREE ); test.safe_run(); } TEST(ML_DTree, regression) { CV_AMLTest test( CV_DTREE ); test.safe_run(); }
TEST(ML_Boost, regression) { CV_AMLTest test( CV_BOOST ); test.safe_run(); } TEST(ML_Boost, regression) { CV_AMLTest test( CV_BOOST ); test.safe_run(); }
TEST(ML_RTrees, regression) { CV_AMLTest test( CV_RTREES ); test.safe_run(); } TEST(ML_RTrees, regression) { CV_AMLTest test( CV_RTREES ); test.safe_run(); }
TEST(ML_ERTrees, regression) { CV_AMLTest test( CV_ERTREES ); test.safe_run(); } TEST(DISABLED_ML_ERTrees, regression) { CV_AMLTest test( CV_ERTREES ); test.safe_run(); }
/* End of file. */ /* End of file. */
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
This diff is collapsed.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment