Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
eeb786fc
Commit
eeb786fc
authored
Aug 03, 2014
by
Vadim Pisarevsky
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
fixed compile warnings from MSVC; fixed warnings in Python bindings; added some debugging code
parent
9cb79b9a
Hide whitespace changes
Inline
Side-by-side
Showing
10 changed files
with
30 additions
and
30 deletions
+30
-30
ml.hpp
modules/ml/include/opencv2/ml.hpp
+6
-6
ann_mlp.cpp
modules/ml/src/ann_mlp.cpp
+4
-5
boost.cpp
modules/ml/src/boost.cpp
+4
-2
data.cpp
modules/ml/src/data.cpp
+4
-4
inner_functions.cpp
modules/ml/src/inner_functions.cpp
+1
-1
knearest.cpp
modules/ml/src/knearest.cpp
+1
-1
precomp.hpp
modules/ml/src/precomp.hpp
+4
-4
rtrees.cpp
modules/ml/src/rtrees.cpp
+1
-2
svm.cpp
modules/ml/src/svm.cpp
+1
-1
tree.cpp
modules/ml/src/tree.cpp
+4
-4
No files found.
modules/ml/include/opencv2/ml.hpp
View file @
eeb786fc
...
@@ -205,7 +205,7 @@ public:
...
@@ -205,7 +205,7 @@ public:
class
CV_EXPORTS_W
NormalBayesClassifier
:
public
StatModel
class
CV_EXPORTS_W
NormalBayesClassifier
:
public
StatModel
{
{
public
:
public
:
class
CV_EXPORTS_W
_MAP
Params
class
CV_EXPORTS_W
Params
{
{
public
:
public
:
Params
();
Params
();
...
@@ -231,8 +231,8 @@ public:
...
@@ -231,8 +231,8 @@ public:
public
:
public
:
Params
(
int
defaultK
=
10
,
bool
isclassifier
=
true
);
Params
(
int
defaultK
=
10
,
bool
isclassifier
=
true
);
int
defaultK
;
CV_PROP_RW
int
defaultK
;
bool
isclassifier
;
CV_PROP_RW
bool
isclassifier
;
};
};
virtual
void
setParams
(
const
Params
&
p
)
=
0
;
virtual
void
setParams
(
const
Params
&
p
)
=
0
;
virtual
Params
getParams
()
const
=
0
;
virtual
Params
getParams
()
const
=
0
;
...
@@ -328,9 +328,9 @@ public:
...
@@ -328,9 +328,9 @@ public:
explicit
Params
(
int
nclusters
=
DEFAULT_NCLUSTERS
,
int
covMatType
=
EM
::
COV_MAT_DIAGONAL
,
explicit
Params
(
int
nclusters
=
DEFAULT_NCLUSTERS
,
int
covMatType
=
EM
::
COV_MAT_DIAGONAL
,
const
TermCriteria
&
termCrit
=
TermCriteria
(
TermCriteria
::
COUNT
+
TermCriteria
::
EPS
,
const
TermCriteria
&
termCrit
=
TermCriteria
(
TermCriteria
::
COUNT
+
TermCriteria
::
EPS
,
EM
::
DEFAULT_MAX_ITERS
,
1e-6
));
EM
::
DEFAULT_MAX_ITERS
,
1e-6
));
int
nclusters
;
CV_PROP_RW
int
nclusters
;
int
covMatType
;
CV_PROP_RW
int
covMatType
;
TermCriteria
termCrit
;
CV_PROP_RW
TermCriteria
termCrit
;
};
};
virtual
void
setParams
(
const
Params
&
p
)
=
0
;
virtual
void
setParams
(
const
Params
&
p
)
=
0
;
...
...
modules/ml/src/ann_mlp.cpp
View file @
eeb786fc
...
@@ -123,7 +123,7 @@ public:
...
@@ -123,7 +123,7 @@ public:
void
clear
()
void
clear
()
{
{
min_val
=
max_val
=
min_val1
=
max_val1
=
0.
;
min_val
=
max_val
=
min_val1
=
max_val1
=
0.
;
rng
=
RNG
(
-
1
);
rng
=
RNG
(
(
uint64
)
-
1
);
weights
.
clear
();
weights
.
clear
();
trained
=
false
;
trained
=
false
;
}
}
...
@@ -300,7 +300,7 @@ public:
...
@@ -300,7 +300,7 @@ public:
{
{
int
maxIdx
[]
=
{
0
,
0
};
int
maxIdx
[]
=
{
0
,
0
};
minMaxIdx
(
outputs
,
0
,
0
,
0
,
maxIdx
);
minMaxIdx
(
outputs
,
0
,
0
,
0
,
maxIdx
);
return
maxIdx
[
0
]
+
maxIdx
[
1
]
;
return
(
float
)(
maxIdx
[
0
]
+
maxIdx
[
1
])
;
}
}
return
0.
f
;
return
0.
f
;
...
@@ -702,9 +702,8 @@ public:
...
@@ -702,9 +702,8 @@ public:
train_backprop
(
inputs
,
outputs
,
sw
,
termcrit
)
:
train_backprop
(
inputs
,
outputs
,
sw
,
termcrit
)
:
train_rprop
(
inputs
,
outputs
,
sw
,
termcrit
);
train_rprop
(
inputs
,
outputs
,
sw
,
termcrit
);
trained
=
true
;
trained
=
iter
>
0
;
return
trained
;
return
iter
;
}
}
int
train_backprop
(
const
Mat
&
inputs
,
const
Mat
&
outputs
,
const
Mat
&
_sw
,
TermCriteria
termCrit
)
int
train_backprop
(
const
Mat
&
inputs
,
const
Mat
&
outputs
,
const
Mat
&
_sw
,
TermCriteria
termCrit
)
...
...
modules/ml/src/boost.cpp
View file @
eeb786fc
...
@@ -220,12 +220,13 @@ public:
...
@@ -220,12 +220,13 @@ public:
void
updateWeightsAndTrim
(
int
treeidx
,
vector
<
int
>&
sidx
)
void
updateWeightsAndTrim
(
int
treeidx
,
vector
<
int
>&
sidx
)
{
{
putchar
(
'<'
);
int
i
,
n
=
(
int
)
w
->
sidx
.
size
();
int
i
,
n
=
(
int
)
w
->
sidx
.
size
();
int
nvars
=
(
int
)
varIdx
.
size
();
int
nvars
=
(
int
)
varIdx
.
size
();
double
sumw
=
0.
,
C
=
1.
;
double
sumw
=
0.
,
C
=
1.
;
cv
::
AutoBuffer
<
double
>
buf
(
n
*
3
+
nvars
);
cv
::
AutoBuffer
<
double
>
buf
(
n
+
nvars
);
double
*
result
=
buf
;
double
*
result
=
buf
;
float
*
sbuf
=
(
float
*
)(
result
+
n
*
3
);
float
*
sbuf
=
(
float
*
)(
result
+
n
);
Mat
sample
(
1
,
nvars
,
CV_32F
,
sbuf
);
Mat
sample
(
1
,
nvars
,
CV_32F
,
sbuf
);
int
predictFlags
=
bparams
.
boostType
==
Boost
::
DISCRETE
?
(
PREDICT_MAX_VOTE
|
RAW_OUTPUT
)
:
PREDICT_SUM
;
int
predictFlags
=
bparams
.
boostType
==
Boost
::
DISCRETE
?
(
PREDICT_MAX_VOTE
|
RAW_OUTPUT
)
:
PREDICT_SUM
;
predictFlags
|=
COMPRESSED_INPUT
;
predictFlags
|=
COMPRESSED_INPUT
;
...
@@ -373,6 +374,7 @@ public:
...
@@ -373,6 +374,7 @@ public:
if
(
w
->
sample_weights
[
si
]
>=
threshold
)
if
(
w
->
sample_weights
[
si
]
>=
threshold
)
sidx
.
push_back
(
si
);
sidx
.
push_back
(
si
);
}
}
putchar
(
'>'
);
fflush
(
stdout
);
}
}
float
predictTrees
(
const
Range
&
range
,
const
Mat
&
sample
,
int
flags0
)
const
float
predictTrees
(
const
Range
&
range
,
const
Mat
&
sample
,
int
flags0
)
const
...
...
modules/ml/src/data.cpp
View file @
eeb786fc
...
@@ -310,7 +310,7 @@ public:
...
@@ -310,7 +310,7 @@ public:
varType
.
create
(
1
,
nvars
,
CV_8U
);
varType
.
create
(
1
,
nvars
,
CV_8U
);
varType
=
Scalar
::
all
(
VAR_ORDERED
);
varType
=
Scalar
::
all
(
VAR_ORDERED
);
if
(
noutputvars
==
1
)
if
(
noutputvars
==
1
)
varType
.
at
<
uchar
>
(
ninputvars
)
=
responses
.
type
()
<
CV_32F
?
VAR_CATEGORICAL
:
VAR_ORDERED
;
varType
.
at
<
uchar
>
(
ninputvars
)
=
(
uchar
)(
responses
.
type
()
<
CV_32F
?
VAR_CATEGORICAL
:
VAR_ORDERED
)
;
}
}
if
(
noutputvars
>
1
)
if
(
noutputvars
>
1
)
...
@@ -558,7 +558,7 @@ public:
...
@@ -558,7 +558,7 @@ public:
if
(
tp
==
VAR_MISSED
)
if
(
tp
==
VAR_MISSED
)
haveMissed
=
true
;
haveMissed
=
true
;
rowvals
.
push_back
(
val
);
rowvals
.
push_back
(
val
);
rowtypes
.
push_back
(
tp
);
rowtypes
.
push_back
(
(
uchar
)
tp
);
token
=
strtok
(
NULL
,
delimiters
);
token
=
strtok
(
NULL
,
delimiters
);
if
(
!
token
)
if
(
!
token
)
break
;
break
;
...
@@ -880,7 +880,7 @@ public:
...
@@ -880,7 +880,7 @@ public:
if
(
s
)
if
(
s
)
{
{
j
=
s
[
i
];
j
=
s
[
i
];
CV_
Dbg
Assert
(
0
<=
j
&&
j
<
nsamples
);
CV_Assert
(
0
<=
j
&&
j
<
nsamples
);
}
}
values
[
i
]
=
src
[
j
*
sstep
];
values
[
i
]
=
src
[
j
*
sstep
];
if
(
values
[
i
]
==
MISSED_VAL
)
if
(
values
[
i
]
==
MISSED_VAL
)
...
@@ -955,7 +955,7 @@ public:
...
@@ -955,7 +955,7 @@ public:
if
(
vptr
)
if
(
vptr
)
{
{
j
=
vptr
[
i
];
j
=
vptr
[
i
];
CV_
Dbg
Assert
(
0
<=
j
&&
j
<
nvars
);
CV_Assert
(
0
<=
j
&&
j
<
nvars
);
}
}
buf
[
i
]
=
src
[
j
*
vstep
];
buf
[
i
]
=
src
[
j
*
vstep
];
}
}
...
...
modules/ml/src/inner_functions.cpp
View file @
eeb786fc
...
@@ -108,7 +108,7 @@ float StatModel::calcError( const Ptr<TrainData>& data, bool testerr, OutputArra
...
@@ -108,7 +108,7 @@ float StatModel::calcError( const Ptr<TrainData>& data, bool testerr, OutputArra
if
(
_resp
.
needed
()
)
if
(
_resp
.
needed
()
)
resp
.
copyTo
(
_resp
);
resp
.
copyTo
(
_resp
);
return
err
/
n
*
(
isclassifier
?
100
:
1
);
return
(
float
)(
err
/
n
*
(
isclassifier
?
100
:
1
)
);
}
}
void
StatModel
::
save
(
const
String
&
filename
)
const
void
StatModel
::
save
(
const
String
&
filename
)
const
...
...
modules/ml/src/knearest.cpp
View file @
eeb786fc
...
@@ -173,7 +173,7 @@ public:
...
@@ -173,7 +173,7 @@ public:
}
}
float
result
=
0.
f
;
float
result
=
0.
f
;
float
inv_scale
=
1.
/
k
;
float
inv_scale
=
1.
f
/
k
;
for
(
testidx
=
0
;
testidx
<
testcount
;
testidx
++
)
for
(
testidx
=
0
;
testidx
<
testcount
;
testidx
++
)
{
{
...
...
modules/ml/src/precomp.hpp
View file @
eeb786fc
...
@@ -111,7 +111,7 @@ namespace ml
...
@@ -111,7 +111,7 @@ namespace ml
termCrit
.
type
|=
TermCriteria
::
EPS
;
termCrit
.
type
|=
TermCriteria
::
EPS
;
termCrit
.
epsilon
=
epsilon
;
termCrit
.
epsilon
=
epsilon
;
}
}
int
iters
=
(
double
)
fn
[
"iterations"
];
int
iters
=
(
int
)
fn
[
"iterations"
];
if
(
iters
>
0
)
if
(
iters
>
0
)
{
{
termCrit
.
type
|=
TermCriteria
::
COUNT
;
termCrit
.
type
|=
TermCriteria
::
COUNT
;
...
@@ -134,7 +134,7 @@ namespace ml
...
@@ -134,7 +134,7 @@ namespace ml
}
}
int
class_idx
;
int
class_idx
;
int
Tn
;
double
Tn
;
double
value
;
double
value
;
int
parent
;
int
parent
;
...
@@ -164,7 +164,7 @@ namespace ml
...
@@ -164,7 +164,7 @@ namespace ml
}
}
int
varIdx
;
int
varIdx
;
int
inversed
;
bool
inversed
;
float
quality
;
float
quality
;
int
next
;
int
next
;
float
c
;
float
c
;
...
@@ -179,7 +179,7 @@ namespace ml
...
@@ -179,7 +179,7 @@ namespace ml
vector
<
WNode
>
wnodes
;
vector
<
WNode
>
wnodes
;
vector
<
WSplit
>
wsplits
;
vector
<
WSplit
>
wsplits
;
vector
<
int
>
wsubsets
;
vector
<
int
>
wsubsets
;
vector
<
int
>
cv_Tn
;
vector
<
double
>
cv_Tn
;
vector
<
double
>
cv_node_risk
;
vector
<
double
>
cv_node_risk
;
vector
<
double
>
cv_node_error
;
vector
<
double
>
cv_node_error
;
vector
<
int
>
cv_labels
;
vector
<
int
>
cv_labels
;
...
...
modules/ml/src/rtrees.cpp
View file @
eeb786fc
...
@@ -90,7 +90,7 @@ public:
...
@@ -90,7 +90,7 @@ public:
{
{
DTreesImpl
::
clear
();
DTreesImpl
::
clear
();
oobError
=
0.
;
oobError
=
0.
;
rng
=
RNG
(
-
1
);
rng
=
RNG
(
(
uint64
)
-
1
);
}
}
const
vector
<
int
>&
getActiveVars
()
const
vector
<
int
>&
getActiveVars
()
...
@@ -177,7 +177,6 @@ public:
...
@@ -177,7 +177,6 @@ public:
for
(
treeidx
=
0
;
treeidx
<
ntrees
;
treeidx
++
)
for
(
treeidx
=
0
;
treeidx
<
ntrees
;
treeidx
++
)
{
{
putchar
(
'.'
);
fflush
(
stdout
);
for
(
i
=
0
;
i
<
n
;
i
++
)
for
(
i
=
0
;
i
<
n
;
i
++
)
oobmask
[
i
]
=
(
uchar
)
1
;
oobmask
[
i
]
=
(
uchar
)
1
;
...
...
modules/ml/src/svm.cpp
View file @
eeb786fc
...
@@ -1587,7 +1587,7 @@ public:
...
@@ -1587,7 +1587,7 @@ public:
bool
balanced
)
bool
balanced
)
{
{
int
svmType
=
params
.
svmType
;
int
svmType
=
params
.
svmType
;
RNG
rng
(
-
1
);
RNG
rng
(
(
uint64
)
-
1
);
if
(
svmType
==
ONE_CLASS
)
if
(
svmType
==
ONE_CLASS
)
// current implementation of "auto" svm does not support the 1-class case.
// current implementation of "auto" svm does not support the 1-class case.
...
...
modules/ml/src/tree.cpp
View file @
eeb786fc
...
@@ -730,7 +730,7 @@ DTreesImpl::WSplit DTreesImpl::findSplitOrdClass( int vi, const vector<int>& _si
...
@@ -730,7 +730,7 @@ DTreesImpl::WSplit DTreesImpl::findSplitOrdClass( int vi, const vector<int>& _si
{
{
split
.
varIdx
=
vi
;
split
.
varIdx
=
vi
;
split
.
c
=
(
values
[
sorted_idx
[
best_i
]]
+
values
[
sorted_idx
[
best_i
+
1
]])
*
0.5
f
;
split
.
c
=
(
values
[
sorted_idx
[
best_i
]]
+
values
[
sorted_idx
[
best_i
+
1
]])
*
0.5
f
;
split
.
inversed
=
0
;
split
.
inversed
=
false
;
split
.
quality
=
(
float
)
best_val
;
split
.
quality
=
(
float
)
best_val
;
}
}
return
split
;
return
split
;
...
@@ -744,12 +744,12 @@ void DTreesImpl::clusterCategories( const double* vectors, int n, int m, double*
...
@@ -744,12 +744,12 @@ void DTreesImpl::clusterCategories( const double* vectors, int n, int m, double*
cv
::
AutoBuffer
<
double
>
buf
(
n
+
k
);
cv
::
AutoBuffer
<
double
>
buf
(
n
+
k
);
double
*
v_weights
=
buf
,
*
c_weights
=
buf
+
n
;
double
*
v_weights
=
buf
,
*
c_weights
=
buf
+
n
;
bool
modified
=
true
;
bool
modified
=
true
;
RNG
r
(
-
1
);
RNG
r
(
(
uint64
)
-
1
);
// assign labels randomly
// assign labels randomly
for
(
i
=
0
;
i
<
n
;
i
++
)
for
(
i
=
0
;
i
<
n
;
i
++
)
{
{
int
sum
=
0
;
double
sum
=
0
;
const
double
*
v
=
vectors
+
i
*
m
;
const
double
*
v
=
vectors
+
i
*
m
;
labels
[
i
]
=
i
<
k
?
i
:
r
.
uniform
(
0
,
k
);
labels
[
i
]
=
i
<
k
?
i
:
r
.
uniform
(
0
,
k
);
...
@@ -1063,7 +1063,7 @@ DTreesImpl::WSplit DTreesImpl::findSplitOrdReg( int vi, const vector<int>& _sidx
...
@@ -1063,7 +1063,7 @@ DTreesImpl::WSplit DTreesImpl::findSplitOrdReg( int vi, const vector<int>& _sidx
{
{
split
.
varIdx
=
vi
;
split
.
varIdx
=
vi
;
split
.
c
=
(
values
[
sorted_idx
[
best_i
]]
+
values
[
sorted_idx
[
best_i
+
1
]])
*
0.5
f
;
split
.
c
=
(
values
[
sorted_idx
[
best_i
]]
+
values
[
sorted_idx
[
best_i
+
1
]])
*
0.5
f
;
split
.
inversed
=
0
;
split
.
inversed
=
false
;
split
.
quality
=
(
float
)
best_val
;
split
.
quality
=
(
float
)
best_val
;
}
}
return
split
;
return
split
;
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment