Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
dd8de0c4
Commit
dd8de0c4
authored
Jan 09, 2013
by
marina.kolpakova
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
move soft cascade octave to ml module
parent
a0e93d04
Show whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
497 additions
and
409 deletions
+497
-409
octave.hpp
apps/sft/include/sft/octave.hpp
+8
-65
octave.cpp
apps/sft/octave.cpp
+6
-340
sft.cpp
apps/sft/sft.cpp
+3
-3
ml.hpp
modules/ml/include/opencv2/ml/ml.hpp
+66
-1
octave.cpp
modules/ml/src/octave.cpp
+414
-0
No files found.
apps/sft/include/sft/octave.hpp
View file @
dd8de0c4
...
...
@@ -155,6 +155,9 @@ private:
void
write
(
cv
::
FileStorage
&
fs
,
const
std
::
string
&
,
const
ICF
&
f
);
std
::
ostream
&
operator
<<
(
std
::
ostream
&
out
,
const
ICF
&
m
);
using
cv
::
FeaturePool
;
using
cv
::
Dataset
;
class
ICFFeaturePool
:
public
cv
::
FeaturePool
{
public
:
...
...
@@ -184,79 +187,20 @@ private:
};
using
cv
::
FeaturePool
;
class
Dataset
class
ScaledDataset
:
public
Dataset
{
public
:
typedef
enum
{
POSITIVE
=
1
,
NEGATIVE
=
2
}
SampleType
;
Dataset
(
const
sft
::
string
&
path
,
const
int
octave
);
ScaledDataset
(
const
sft
::
string
&
path
,
const
int
octave
);
cv
::
Mat
get
(
SampleType
type
,
int
idx
)
const
;
int
available
(
SampleType
type
)
const
;
virtual
cv
::
Mat
get
(
SampleType
type
,
int
idx
)
const
;
virtual
int
available
(
SampleType
type
)
const
;
virtual
~
ScaledDataset
();
private
:
svector
pos
;
svector
neg
;
};
// used for traning single octave scale
class
Octave
:
cv
::
Boost
{
public
:
enum
{
// Direct backward pruning. (Cha Zhang and Paul Viola)
DBP
=
1
,
// Multiple instance pruning. (Cha Zhang and Paul Viola)
MIP
=
2
,
// Originally proposed by L. bourdev and J. brandt
HEURISTIC
=
4
};
Octave
(
cv
::
Rect
boundingBox
,
int
npositives
,
int
nnegatives
,
int
logScale
,
int
shrinkage
);
virtual
~
Octave
();
virtual
bool
train
(
const
Dataset
&
dataset
,
const
FeaturePool
*
pool
,
int
weaks
,
int
treeDepth
);
virtual
float
predict
(
const
Mat
&
_sample
,
Mat
&
_votes
,
bool
raw_mode
,
bool
return_sum
)
const
;
virtual
void
setRejectThresholds
(
cv
::
Mat
&
thresholds
);
virtual
void
write
(
CvFileStorage
*
fs
,
string
name
)
const
;
virtual
void
write
(
cv
::
FileStorage
&
fs
,
const
FeaturePool
*
pool
,
const
Mat
&
thresholds
)
const
;
int
logScale
;
protected
:
virtual
bool
train
(
const
cv
::
Mat
&
trainData
,
const
cv
::
Mat
&
responses
,
const
cv
::
Mat
&
varIdx
=
cv
::
Mat
(),
const
cv
::
Mat
&
sampleIdx
=
cv
::
Mat
(),
const
cv
::
Mat
&
varType
=
cv
::
Mat
(),
const
cv
::
Mat
&
missingDataMask
=
cv
::
Mat
());
void
processPositives
(
const
Dataset
&
dataset
,
const
FeaturePool
*
pool
);
void
generateNegatives
(
const
Dataset
&
dataset
,
const
FeaturePool
*
pool
);
float
predict
(
const
Mat
&
_sample
,
const
cv
::
Range
range
)
const
;
private
:
void
traverse
(
const
CvBoostTree
*
tree
,
cv
::
FileStorage
&
fs
,
int
&
nfeatures
,
int
*
used
,
const
double
*
th
)
const
;
virtual
void
initial_weights
(
double
(
&
p
)[
2
]);
cv
::
Rect
boundingBox
;
int
npositives
;
int
nnegatives
;
int
shrinkage
;
Mat
integrals
;
Mat
responses
;
CvBoostParams
params
;
Mat
trainData
;
};
}
#endif
\ No newline at end of file
apps/sft/octave.cpp
View file @
dd8de0c4
...
...
@@ -46,343 +46,6 @@
#include <glob.h>
#include <queue>
// ============ Octave ============ //
sft
::
Octave
::
Octave
(
cv
::
Rect
bb
,
int
np
,
int
nn
,
int
ls
,
int
shr
)
:
logScale
(
ls
),
boundingBox
(
bb
),
npositives
(
np
),
nnegatives
(
nn
),
shrinkage
(
shr
)
{
int
maxSample
=
npositives
+
nnegatives
;
responses
.
create
(
maxSample
,
1
,
CV_32FC1
);
CvBoostParams
_params
;
{
// tree params
_params
.
max_categories
=
10
;
_params
.
max_depth
=
2
;
_params
.
cv_folds
=
0
;
_params
.
truncate_pruned_tree
=
false
;
_params
.
use_surrogates
=
false
;
_params
.
use_1se_rule
=
false
;
_params
.
regression_accuracy
=
1.0e-6
;
// boost params
_params
.
boost_type
=
CvBoost
::
GENTLE
;
_params
.
split_criteria
=
CvBoost
::
SQERR
;
_params
.
weight_trim_rate
=
0.95
;
// simple defaults
_params
.
min_sample_count
=
2
;
_params
.
weak_count
=
1
;
}
params
=
_params
;
}
sft
::
Octave
::~
Octave
(){}
bool
sft
::
Octave
::
train
(
const
cv
::
Mat
&
_trainData
,
const
cv
::
Mat
&
_responses
,
const
cv
::
Mat
&
varIdx
,
const
cv
::
Mat
&
sampleIdx
,
const
cv
::
Mat
&
varType
,
const
cv
::
Mat
&
missingDataMask
)
{
bool
update
=
false
;
return
cv
::
Boost
::
train
(
_trainData
,
CV_COL_SAMPLE
,
_responses
,
varIdx
,
sampleIdx
,
varType
,
missingDataMask
,
params
,
update
);
}
void
sft
::
Octave
::
setRejectThresholds
(
cv
::
Mat
&
thresholds
)
{
dprintf
(
"set thresholds according to DBP strategy
\n
"
);
// labels desided by classifier
cv
::
Mat
desisions
(
responses
.
cols
,
responses
.
rows
,
responses
.
type
());
float
*
dptr
=
desisions
.
ptr
<
float
>
(
0
);
// mask of samples satisfying the condition
cv
::
Mat
ppmask
(
responses
.
cols
,
responses
.
rows
,
CV_8UC1
);
uchar
*
mptr
=
ppmask
.
ptr
<
uchar
>
(
0
);
int
nsamples
=
npositives
+
nnegatives
;
cv
::
Mat
stab
;
for
(
int
si
=
0
;
si
<
nsamples
;
++
si
)
{
float
decision
=
dptr
[
si
]
=
predict
(
trainData
.
col
(
si
),
stab
,
false
,
false
);
mptr
[
si
]
=
cv
::
saturate_cast
<
uchar
>
((
uint
)(
(
responses
.
ptr
<
float
>
(
si
)[
0
]
==
1.
f
)
&&
(
decision
==
1.
f
)));
}
int
weaks
=
weak
->
total
;
thresholds
.
create
(
1
,
weaks
,
CV_64FC1
);
double
*
thptr
=
thresholds
.
ptr
<
double
>
(
0
);
cv
::
Mat
traces
(
weaks
,
nsamples
,
CV_64FC1
,
cv
::
Scalar
::
all
(
FLT_MAX
));
for
(
int
w
=
0
;
w
<
weaks
;
++
w
)
{
double
*
rptr
=
traces
.
ptr
<
double
>
(
w
);
for
(
int
si
=
0
;
si
<
nsamples
;
++
si
)
{
cv
::
Range
curr
(
0
,
w
+
1
);
if
(
mptr
[
si
])
{
float
trace
=
predict
(
trainData
.
col
(
si
),
curr
);
rptr
[
si
]
=
trace
;
}
}
double
mintrace
=
0.
;
cv
::
minMaxLoc
(
traces
.
row
(
w
),
&
mintrace
);
thptr
[
w
]
=
mintrace
;
}
}
namespace
{
using
namespace
sft
;
}
void
sft
::
Octave
::
processPositives
(
const
Dataset
&
dataset
,
const
FeaturePool
*
pool
)
{
int
w
=
boundingBox
.
width
;
int
h
=
boundingBox
.
height
;
integrals
.
create
(
pool
->
size
(),
(
w
/
shrinkage
+
1
)
*
(
h
/
shrinkage
*
10
+
1
),
CV_32SC1
);
int
total
=
0
;
// for (svector::const_iterator it = dataset.pos.begin(); it != dataset.pos.end(); ++it)
for
(
int
curr
=
0
;
curr
<
dataset
.
available
(
Dataset
::
POSITIVE
);
++
curr
)
{
cv
::
Mat
sample
=
dataset
.
get
(
Dataset
::
POSITIVE
,
curr
);
cv
::
Mat
channels
=
integrals
.
row
(
total
).
reshape
(
0
,
h
/
shrinkage
*
10
+
1
);
sample
=
sample
(
boundingBox
);
pool
->
preprocess
(
sample
,
channels
);
responses
.
ptr
<
float
>
(
total
)[
0
]
=
1.
f
;
if
(
++
total
>=
npositives
)
break
;
}
dprintf
(
"Processing positives finished:
\n\t
requested %d positives, collected %d samples.
\n
"
,
npositives
,
total
);
npositives
=
total
;
nnegatives
=
cvRound
(
nnegatives
*
total
/
(
double
)
npositives
);
}
void
sft
::
Octave
::
generateNegatives
(
const
Dataset
&
dataset
,
const
FeaturePool
*
pool
)
{
// ToDo: set seed, use offsets
sft
::
Random
::
engine
eng
(
65633343L
);
sft
::
Random
::
engine
idxEng
(
764224349868L
);
// int w = boundingBox.width;
int
h
=
boundingBox
.
height
;
int
nimages
=
dataset
.
available
(
Dataset
::
NEGATIVE
);
sft
::
Random
::
uniform
iRand
(
0
,
nimages
-
1
);
int
total
=
0
;
Mat
sum
;
for
(
int
i
=
npositives
;
i
<
nnegatives
+
npositives
;
++
total
)
{
int
curr
=
iRand
(
idxEng
);
Mat
frame
=
dataset
.
get
(
Dataset
::
NEGATIVE
,
curr
);
int
maxW
=
frame
.
cols
-
2
*
boundingBox
.
x
-
boundingBox
.
width
;
int
maxH
=
frame
.
rows
-
2
*
boundingBox
.
y
-
boundingBox
.
height
;
sft
::
Random
::
uniform
wRand
(
0
,
maxW
-
1
);
sft
::
Random
::
uniform
hRand
(
0
,
maxH
-
1
);
int
dx
=
wRand
(
eng
);
int
dy
=
hRand
(
eng
);
frame
=
frame
(
cv
::
Rect
(
dx
,
dy
,
boundingBox
.
width
,
boundingBox
.
height
));
cv
::
Mat
channels
=
integrals
.
row
(
i
).
reshape
(
0
,
h
/
shrinkage
*
10
+
1
);
pool
->
preprocess
(
frame
,
channels
);
dprintf
(
"generated %d %d
\n
"
,
dx
,
dy
);
// // if (predict(sum))
{
responses
.
ptr
<
float
>
(
i
)[
0
]
=
0.
f
;
++
i
;
}
}
dprintf
(
"Processing negatives finished:
\n\t
requested %d negatives, viewed %d samples.
\n
"
,
nnegatives
,
total
);
}
template
<
typename
T
>
int
sgn
(
T
val
)
{
return
(
T
(
0
)
<
val
)
-
(
val
<
T
(
0
));
}
void
sft
::
Octave
::
traverse
(
const
CvBoostTree
*
tree
,
cv
::
FileStorage
&
fs
,
int
&
nfeatures
,
int
*
used
,
const
double
*
th
)
const
{
std
::
queue
<
const
CvDTreeNode
*>
nodes
;
nodes
.
push
(
tree
->
get_root
());
const
CvDTreeNode
*
tempNode
;
int
leafValIdx
=
0
;
int
internalNodeIdx
=
1
;
float
*
leafs
=
new
float
[(
int
)
pow
(
2.
f
,
get_params
().
max_depth
)];
fs
<<
"{"
;
fs
<<
"treeThreshold"
<<
*
th
;
fs
<<
"internalNodes"
<<
"["
;
while
(
!
nodes
.
empty
())
{
tempNode
=
nodes
.
front
();
CV_Assert
(
tempNode
->
left
);
if
(
!
tempNode
->
left
->
left
&&
!
tempNode
->
left
->
right
)
{
leafs
[
-
leafValIdx
]
=
(
float
)
tempNode
->
left
->
value
;
fs
<<
leafValIdx
--
;
}
else
{
nodes
.
push
(
tempNode
->
left
);
fs
<<
internalNodeIdx
++
;
}
CV_Assert
(
tempNode
->
right
);
if
(
!
tempNode
->
right
->
left
&&
!
tempNode
->
right
->
right
)
{
leafs
[
-
leafValIdx
]
=
(
float
)
tempNode
->
right
->
value
;
fs
<<
leafValIdx
--
;
}
else
{
nodes
.
push
(
tempNode
->
right
);
fs
<<
internalNodeIdx
++
;
}
int
fidx
=
tempNode
->
split
->
var_idx
;
fs
<<
nfeatures
;
used
[
nfeatures
++
]
=
fidx
;
fs
<<
tempNode
->
split
->
ord
.
c
;
nodes
.
pop
();
}
fs
<<
"]"
;
fs
<<
"leafValues"
<<
"["
;
for
(
int
ni
=
0
;
ni
<
-
leafValIdx
;
ni
++
)
fs
<<
leafs
[
ni
];
fs
<<
"]"
;
fs
<<
"}"
;
}
void
sft
::
Octave
::
write
(
cv
::
FileStorage
&
fso
,
const
FeaturePool
*
pool
,
const
Mat
&
thresholds
)
const
{
CV_Assert
(
!
thresholds
.
empty
());
cv
::
Mat
used
(
1
,
weak
->
total
*
(
pow
(
2
,
params
.
max_depth
)
-
1
),
CV_32SC1
);
int
*
usedPtr
=
used
.
ptr
<
int
>
(
0
);
int
nfeatures
=
0
;
fso
<<
"{"
<<
"scale"
<<
logScale
<<
"weaks"
<<
weak
->
total
<<
"trees"
<<
"["
;
// should be replased with the H.L. one
CvSeqReader
reader
;
cvStartReadSeq
(
weak
,
&
reader
);
for
(
int
i
=
0
;
i
<
weak
->
total
;
i
++
)
{
CvBoostTree
*
tree
;
CV_READ_SEQ_ELEM
(
tree
,
reader
);
traverse
(
tree
,
fso
,
nfeatures
,
usedPtr
,
thresholds
.
ptr
<
double
>
(
0
)
+
i
);
}
fso
<<
"]"
;
// features
fso
<<
"features"
<<
"["
;
for
(
int
i
=
0
;
i
<
nfeatures
;
++
i
)
pool
->
write
(
fso
,
usedPtr
[
i
]);
fso
<<
"]"
<<
"}"
;
}
void
sft
::
Octave
::
initial_weights
(
double
(
&
p
)[
2
])
{
double
n
=
data
->
sample_count
;
p
[
0
]
=
n
/
(
2.
*
(
double
)(
nnegatives
));
p
[
1
]
=
n
/
(
2.
*
(
double
)(
npositives
));
}
bool
sft
::
Octave
::
train
(
const
Dataset
&
dataset
,
const
FeaturePool
*
pool
,
int
weaks
,
int
treeDepth
)
{
CV_Assert
(
treeDepth
==
2
);
CV_Assert
(
weaks
>
0
);
params
.
max_depth
=
treeDepth
;
params
.
weak_count
=
weaks
;
// 1. fill integrals and classes
processPositives
(
dataset
,
pool
);
generateNegatives
(
dataset
,
pool
);
// 2. only sumple case (all features used)
int
nfeatures
=
pool
->
size
();
cv
::
Mat
varIdx
(
1
,
nfeatures
,
CV_32SC1
);
int
*
ptr
=
varIdx
.
ptr
<
int
>
(
0
);
for
(
int
x
=
0
;
x
<
nfeatures
;
++
x
)
ptr
[
x
]
=
x
;
// 3. only sumple case (all samples used)
int
nsamples
=
npositives
+
nnegatives
;
cv
::
Mat
sampleIdx
(
1
,
nsamples
,
CV_32SC1
);
ptr
=
sampleIdx
.
ptr
<
int
>
(
0
);
for
(
int
x
=
0
;
x
<
nsamples
;
++
x
)
ptr
[
x
]
=
x
;
// 4. ICF has an orderable responce.
cv
::
Mat
varType
(
1
,
nfeatures
+
1
,
CV_8UC1
);
uchar
*
uptr
=
varType
.
ptr
<
uchar
>
(
0
);
for
(
int
x
=
0
;
x
<
nfeatures
;
++
x
)
uptr
[
x
]
=
CV_VAR_ORDERED
;
uptr
[
nfeatures
]
=
CV_VAR_CATEGORICAL
;
trainData
.
create
(
nfeatures
,
nsamples
,
CV_32FC1
);
for
(
int
fi
=
0
;
fi
<
nfeatures
;
++
fi
)
{
float
*
dptr
=
trainData
.
ptr
<
float
>
(
fi
);
for
(
int
si
=
0
;
si
<
nsamples
;
++
si
)
{
dptr
[
si
]
=
pool
->
apply
(
fi
,
si
,
integrals
);
}
}
cv
::
Mat
missingMask
;
bool
ok
=
train
(
trainData
,
responses
,
varIdx
,
sampleIdx
,
varType
,
missingMask
);
if
(
!
ok
)
std
::
cout
<<
"ERROR: tree can not be trained "
<<
std
::
endl
;
return
ok
;
}
float
sft
::
Octave
::
predict
(
const
Mat
&
_sample
,
Mat
&
_votes
,
bool
raw_mode
,
bool
return_sum
)
const
{
CvMat
sample
=
_sample
,
votes
=
_votes
;
return
CvBoost
::
predict
(
&
sample
,
0
,
(
_votes
.
empty
())
?
0
:
&
votes
,
CV_WHOLE_SEQ
,
raw_mode
,
return_sum
);
}
float
sft
::
Octave
::
predict
(
const
Mat
&
_sample
,
const
cv
::
Range
range
)
const
{
CvMat
sample
=
_sample
;
return
CvBoost
::
predict
(
&
sample
,
0
,
0
,
range
,
false
,
true
);
}
void
sft
::
Octave
::
write
(
CvFileStorage
*
fs
,
string
name
)
const
{
CvBoost
::
write
(
fs
,
name
.
c_str
());
}
// ========= FeaturePool ========= //
sft
::
ICFFeaturePool
::
ICFFeaturePool
(
cv
::
Size
m
,
int
n
)
:
FeaturePool
(),
model
(
m
),
nfeatures
(
n
)
...
...
@@ -499,7 +162,7 @@ void glob(const string& path, svector& ret)
// in the default case data folders should be alligned as following:
// 1. positives: <train or test path>/octave_<octave number>/pos/*.png
// 2. negatives: <train or test path>/octave_<octave number>/neg/*.png
Dataset
::
Dataset
(
const
string
&
path
,
const
int
oct
)
ScaledDataset
::
Scaled
Dataset
(
const
string
&
path
,
const
int
oct
)
{
dprintf
(
"%s
\n
"
,
"get dataset file names..."
);
...
...
@@ -514,13 +177,15 @@ Dataset::Dataset(const string& path, const int oct)
CV_Assert
(
neg
.
size
()
!=
size_t
(
0
));
}
cv
::
Mat
Dataset
::
get
(
SampleType
type
,
int
idx
)
const
cv
::
Mat
Scaled
Dataset
::
get
(
SampleType
type
,
int
idx
)
const
{
const
std
::
string
&
src
=
(
type
==
POSITIVE
)
?
pos
[
idx
]
:
neg
[
idx
];
return
cv
::
imread
(
src
);
}
int
Dataset
::
available
(
SampleType
type
)
const
int
Scaled
Dataset
::
available
(
SampleType
type
)
const
{
return
(
int
)((
type
==
POSITIVE
)
?
pos
.
size
()
:
neg
.
size
());
}
ScaledDataset
::~
ScaledDataset
(){}
\ No newline at end of file
apps/sft/sft.cpp
View file @
dd8de0c4
...
...
@@ -127,12 +127,12 @@ int main(int argc, char** argv)
cv
::
Rect
boundingBox
=
cfg
.
bbox
(
it
);
std
::
cout
<<
"Object bounding box"
<<
boundingBox
<<
std
::
endl
;
sft
::
Octave
boost
(
boundingBox
,
npositives
,
nnegatives
,
*
it
,
shrinkage
);
cv
::
Octave
boost
(
boundingBox
,
npositives
,
nnegatives
,
*
it
,
shrinkage
);
std
::
string
path
=
cfg
.
trainPath
;
sft
::
Dataset
dataset
(
path
,
boost
.
logScale
);
sft
::
Scaled
Dataset
dataset
(
path
,
boost
.
logScale
);
if
(
boost
.
train
(
dataset
,
&
pool
,
cfg
.
weaks
,
cfg
.
treeDepth
))
if
(
boost
.
train
(
&
dataset
,
&
pool
,
cfg
.
weaks
,
cfg
.
treeDepth
))
{
CvFileStorage
*
fout
=
cvOpenFileStorage
(
cfg
.
resPath
(
it
).
c_str
(),
0
,
CV_STORAGE_WRITE
);
boost
.
write
(
fout
,
cfg
.
cascadeName
);
...
...
modules/ml/include/opencv2/ml/ml.hpp
View file @
dd8de0c4
...
...
@@ -2142,7 +2142,72 @@ public:
virtual
void
preprocess
(
const
Mat
&
frame
,
Mat
&
integrals
)
const
=
0
;
virtual
~
FeaturePool
()
=
0
;
virtual
~
FeaturePool
();
};
class
Dataset
{
public
:
typedef
enum
{
POSITIVE
=
1
,
NEGATIVE
=
2
}
SampleType
;
virtual
cv
::
Mat
get
(
SampleType
type
,
int
idx
)
const
=
0
;
virtual
int
available
(
SampleType
type
)
const
=
0
;
virtual
~
Dataset
();
};
// used for traning single octave scale
class
Octave
:
cv
::
Boost
{
public
:
enum
{
// Direct backward pruning. (Cha Zhang and Paul Viola)
DBP
=
1
,
// Multiple instance pruning. (Cha Zhang and Paul Viola)
MIP
=
2
,
// Originally proposed by L. bourdev and J. brandt
HEURISTIC
=
4
};
Octave
(
cv
::
Rect
boundingBox
,
int
npositives
,
int
nnegatives
,
int
logScale
,
int
shrinkage
);
virtual
~
Octave
();
virtual
bool
train
(
const
Dataset
*
dataset
,
const
FeaturePool
*
pool
,
int
weaks
,
int
treeDepth
);
virtual
float
predict
(
const
Mat
&
_sample
,
Mat
&
_votes
,
bool
raw_mode
,
bool
return_sum
)
const
;
virtual
void
setRejectThresholds
(
cv
::
Mat
&
thresholds
);
virtual
void
write
(
CvFileStorage
*
fs
,
string
name
)
const
;
virtual
void
write
(
cv
::
FileStorage
&
fs
,
const
FeaturePool
*
pool
,
const
Mat
&
thresholds
)
const
;
int
logScale
;
protected
:
virtual
bool
train
(
const
cv
::
Mat
&
trainData
,
const
cv
::
Mat
&
responses
,
const
cv
::
Mat
&
varIdx
=
cv
::
Mat
(),
const
cv
::
Mat
&
sampleIdx
=
cv
::
Mat
(),
const
cv
::
Mat
&
varType
=
cv
::
Mat
(),
const
cv
::
Mat
&
missingDataMask
=
cv
::
Mat
());
void
processPositives
(
const
Dataset
*
dataset
,
const
FeaturePool
*
pool
);
void
generateNegatives
(
const
Dataset
*
dataset
,
const
FeaturePool
*
pool
);
float
predict
(
const
Mat
&
_sample
,
const
cv
::
Range
range
)
const
;
private
:
void
traverse
(
const
CvBoostTree
*
tree
,
cv
::
FileStorage
&
fs
,
int
&
nfeatures
,
int
*
used
,
const
double
*
th
)
const
;
virtual
void
initial_weights
(
double
(
&
p
)[
2
]);
cv
::
Rect
boundingBox
;
int
npositives
;
int
nnegatives
;
int
shrinkage
;
Mat
integrals
;
Mat
responses
;
CvBoostParams
params
;
Mat
trainData
;
};
}
...
...
modules/ml/src/octave.cpp
View file @
dd8de0c4
...
...
@@ -41,5 +41,419 @@
//M*/
#include "precomp.hpp"
#include <queue>
#define WITH_DEBUG_OUT
#if defined WITH_DEBUG_OUT
# include <stdio.h>
# define dprintf(format, ...) \
do { printf(format, ##__VA_ARGS__); } while (0)
#else
# define dprintf(format, ...)
#endif
#if defined(_MSC_VER) && _MSC_VER >= 1600
# include <random>
namespace
sft
{
struct
Random
{
typedef
std
::
mt19937
engine
;
typedef
std
::
uniform_int
<
int
>
uniform
;
};
}
#elif (__GNUC__) && __GNUC__ > 3 && __GNUC_MINOR__ > 1
# if defined (__cplusplus) && __cplusplus > 201100L
# include <random>
namespace
sft
{
struct
Random
{
typedef
std
::
mt19937
engine
;
typedef
std
::
uniform_int
<
int
>
uniform
;
};
}
# else
# include <tr1/random>
namespace
sft
{
struct
Random
{
typedef
std
::
tr1
::
mt19937
engine
;
typedef
std
::
tr1
::
uniform_int
<
int
>
uniform
;
};
}
# endif
#else
#include <opencv2/core/core.hpp>
namespace
rnd
{
typedef
cv
::
RNG
engine
;
template
<
typename
T
>
struct
uniform_int
{
uniform_int
(
const
int
_min
,
const
int
_max
)
:
min
(
_min
),
max
(
_max
)
{}
T
operator
()
(
engine
&
eng
,
const
int
bound
)
const
{
return
(
T
)
eng
.
uniform
(
min
,
bound
);
}
T
operator
()
(
engine
&
eng
)
const
{
return
(
T
)
eng
.
uniform
(
min
,
max
);
}
private
:
int
min
;
int
max
;
};
}
namespace
sft
{
struct
Random
{
typedef
rnd
::
engine
engine
;
typedef
rnd
::
uniform_int
<
int
>
uniform
;
};
}
#endif
cv
::
FeaturePool
::~
FeaturePool
(){}
cv
::
Dataset
::~
Dataset
(){}
cv
::
Octave
::
Octave
(
cv
::
Rect
bb
,
int
np
,
int
nn
,
int
ls
,
int
shr
)
:
logScale
(
ls
),
boundingBox
(
bb
),
npositives
(
np
),
nnegatives
(
nn
),
shrinkage
(
shr
)
{
int
maxSample
=
npositives
+
nnegatives
;
responses
.
create
(
maxSample
,
1
,
CV_32FC1
);
CvBoostParams
_params
;
{
// tree params
_params
.
max_categories
=
10
;
_params
.
max_depth
=
2
;
_params
.
cv_folds
=
0
;
_params
.
truncate_pruned_tree
=
false
;
_params
.
use_surrogates
=
false
;
_params
.
use_1se_rule
=
false
;
_params
.
regression_accuracy
=
1.0e-6
;
// boost params
_params
.
boost_type
=
CvBoost
::
GENTLE
;
_params
.
split_criteria
=
CvBoost
::
SQERR
;
_params
.
weight_trim_rate
=
0.95
;
// simple defaults
_params
.
min_sample_count
=
2
;
_params
.
weak_count
=
1
;
}
params
=
_params
;
}
cv
::
Octave
::~
Octave
(){}
bool
cv
::
Octave
::
train
(
const
cv
::
Mat
&
_trainData
,
const
cv
::
Mat
&
_responses
,
const
cv
::
Mat
&
varIdx
,
const
cv
::
Mat
&
sampleIdx
,
const
cv
::
Mat
&
varType
,
const
cv
::
Mat
&
missingDataMask
)
{
bool
update
=
false
;
return
cv
::
Boost
::
train
(
_trainData
,
CV_COL_SAMPLE
,
_responses
,
varIdx
,
sampleIdx
,
varType
,
missingDataMask
,
params
,
update
);
}
void
cv
::
Octave
::
setRejectThresholds
(
cv
::
Mat
&
thresholds
)
{
dprintf
(
"set thresholds according to DBP strategy
\n
"
);
// labels desided by classifier
cv
::
Mat
desisions
(
responses
.
cols
,
responses
.
rows
,
responses
.
type
());
float
*
dptr
=
desisions
.
ptr
<
float
>
(
0
);
// mask of samples satisfying the condition
cv
::
Mat
ppmask
(
responses
.
cols
,
responses
.
rows
,
CV_8UC1
);
uchar
*
mptr
=
ppmask
.
ptr
<
uchar
>
(
0
);
int
nsamples
=
npositives
+
nnegatives
;
cv
::
Mat
stab
;
for
(
int
si
=
0
;
si
<
nsamples
;
++
si
)
{
float
decision
=
dptr
[
si
]
=
predict
(
trainData
.
col
(
si
),
stab
,
false
,
false
);
mptr
[
si
]
=
cv
::
saturate_cast
<
uchar
>
((
uint
)(
(
responses
.
ptr
<
float
>
(
si
)[
0
]
==
1.
f
)
&&
(
decision
==
1.
f
)));
}
int
weaks
=
weak
->
total
;
thresholds
.
create
(
1
,
weaks
,
CV_64FC1
);
double
*
thptr
=
thresholds
.
ptr
<
double
>
(
0
);
cv
::
Mat
traces
(
weaks
,
nsamples
,
CV_64FC1
,
cv
::
Scalar
::
all
(
FLT_MAX
));
for
(
int
w
=
0
;
w
<
weaks
;
++
w
)
{
double
*
rptr
=
traces
.
ptr
<
double
>
(
w
);
for
(
int
si
=
0
;
si
<
nsamples
;
++
si
)
{
cv
::
Range
curr
(
0
,
w
+
1
);
if
(
mptr
[
si
])
{
float
trace
=
predict
(
trainData
.
col
(
si
),
curr
);
rptr
[
si
]
=
trace
;
}
}
double
mintrace
=
0.
;
cv
::
minMaxLoc
(
traces
.
row
(
w
),
&
mintrace
);
thptr
[
w
]
=
mintrace
;
}
}
void
cv
::
Octave
::
processPositives
(
const
Dataset
*
dataset
,
const
FeaturePool
*
pool
)
{
int
w
=
boundingBox
.
width
;
int
h
=
boundingBox
.
height
;
integrals
.
create
(
pool
->
size
(),
(
w
/
shrinkage
+
1
)
*
(
h
/
shrinkage
*
10
+
1
),
CV_32SC1
);
int
total
=
0
;
// for (svector::const_iterator it = dataset.pos.begin(); it != dataset.pos.end(); ++it)
for
(
int
curr
=
0
;
curr
<
dataset
->
available
(
Dataset
::
POSITIVE
);
++
curr
)
{
cv
::
Mat
sample
=
dataset
->
get
(
Dataset
::
POSITIVE
,
curr
);
cv
::
Mat
channels
=
integrals
.
row
(
total
).
reshape
(
0
,
h
/
shrinkage
*
10
+
1
);
sample
=
sample
(
boundingBox
);
pool
->
preprocess
(
sample
,
channels
);
responses
.
ptr
<
float
>
(
total
)[
0
]
=
1.
f
;
if
(
++
total
>=
npositives
)
break
;
}
dprintf
(
"Processing positives finished:
\n\t
requested %d positives, collected %d samples.
\n
"
,
npositives
,
total
);
npositives
=
total
;
nnegatives
=
cvRound
(
nnegatives
*
total
/
(
double
)
npositives
);
}
void
cv
::
Octave
::
generateNegatives
(
const
Dataset
*
dataset
,
const
FeaturePool
*
pool
)
{
// ToDo: set seed, use offsets
sft
::
Random
::
engine
eng
(
65633343L
);
sft
::
Random
::
engine
idxEng
(
764224349868L
);
// int w = boundingBox.width;
int
h
=
boundingBox
.
height
;
int
nimages
=
dataset
->
available
(
Dataset
::
NEGATIVE
);
sft
::
Random
::
uniform
iRand
(
0
,
nimages
-
1
);
int
total
=
0
;
Mat
sum
;
for
(
int
i
=
npositives
;
i
<
nnegatives
+
npositives
;
++
total
)
{
int
curr
=
iRand
(
idxEng
);
Mat
frame
=
dataset
->
get
(
Dataset
::
NEGATIVE
,
curr
);
int
maxW
=
frame
.
cols
-
2
*
boundingBox
.
x
-
boundingBox
.
width
;
int
maxH
=
frame
.
rows
-
2
*
boundingBox
.
y
-
boundingBox
.
height
;
sft
::
Random
::
uniform
wRand
(
0
,
maxW
-
1
);
sft
::
Random
::
uniform
hRand
(
0
,
maxH
-
1
);
int
dx
=
wRand
(
eng
);
int
dy
=
hRand
(
eng
);
frame
=
frame
(
cv
::
Rect
(
dx
,
dy
,
boundingBox
.
width
,
boundingBox
.
height
));
cv
::
Mat
channels
=
integrals
.
row
(
i
).
reshape
(
0
,
h
/
shrinkage
*
10
+
1
);
pool
->
preprocess
(
frame
,
channels
);
dprintf
(
"generated %d %d
\n
"
,
dx
,
dy
);
// // if (predict(sum))
{
responses
.
ptr
<
float
>
(
i
)[
0
]
=
0.
f
;
++
i
;
}
}
dprintf
(
"Processing negatives finished:
\n\t
requested %d negatives, viewed %d samples.
\n
"
,
nnegatives
,
total
);
}
template
<
typename
T
>
int
sgn
(
T
val
)
{
return
(
T
(
0
)
<
val
)
-
(
val
<
T
(
0
));
}
void
cv
::
Octave
::
traverse
(
const
CvBoostTree
*
tree
,
cv
::
FileStorage
&
fs
,
int
&
nfeatures
,
int
*
used
,
const
double
*
th
)
const
{
std
::
queue
<
const
CvDTreeNode
*>
nodes
;
nodes
.
push
(
tree
->
get_root
());
const
CvDTreeNode
*
tempNode
;
int
leafValIdx
=
0
;
int
internalNodeIdx
=
1
;
float
*
leafs
=
new
float
[(
int
)
pow
(
2.
f
,
get_params
().
max_depth
)];
fs
<<
"{"
;
fs
<<
"treeThreshold"
<<
*
th
;
fs
<<
"internalNodes"
<<
"["
;
while
(
!
nodes
.
empty
())
{
tempNode
=
nodes
.
front
();
CV_Assert
(
tempNode
->
left
);
if
(
!
tempNode
->
left
->
left
&&
!
tempNode
->
left
->
right
)
{
leafs
[
-
leafValIdx
]
=
(
float
)
tempNode
->
left
->
value
;
fs
<<
leafValIdx
--
;
}
else
{
nodes
.
push
(
tempNode
->
left
);
fs
<<
internalNodeIdx
++
;
}
CV_Assert
(
tempNode
->
right
);
if
(
!
tempNode
->
right
->
left
&&
!
tempNode
->
right
->
right
)
{
leafs
[
-
leafValIdx
]
=
(
float
)
tempNode
->
right
->
value
;
fs
<<
leafValIdx
--
;
}
else
{
nodes
.
push
(
tempNode
->
right
);
fs
<<
internalNodeIdx
++
;
}
int
fidx
=
tempNode
->
split
->
var_idx
;
fs
<<
nfeatures
;
used
[
nfeatures
++
]
=
fidx
;
fs
<<
tempNode
->
split
->
ord
.
c
;
nodes
.
pop
();
}
fs
<<
"]"
;
fs
<<
"leafValues"
<<
"["
;
for
(
int
ni
=
0
;
ni
<
-
leafValIdx
;
ni
++
)
fs
<<
leafs
[
ni
];
fs
<<
"]"
;
fs
<<
"}"
;
}
void
cv
::
Octave
::
write
(
cv
::
FileStorage
&
fso
,
const
FeaturePool
*
pool
,
const
Mat
&
thresholds
)
const
{
CV_Assert
(
!
thresholds
.
empty
());
cv
::
Mat
used
(
1
,
weak
->
total
*
(
pow
(
2
,
params
.
max_depth
)
-
1
),
CV_32SC1
);
int
*
usedPtr
=
used
.
ptr
<
int
>
(
0
);
int
nfeatures
=
0
;
fso
<<
"{"
<<
"scale"
<<
logScale
<<
"weaks"
<<
weak
->
total
<<
"trees"
<<
"["
;
// should be replased with the H.L. one
CvSeqReader
reader
;
cvStartReadSeq
(
weak
,
&
reader
);
for
(
int
i
=
0
;
i
<
weak
->
total
;
i
++
)
{
CvBoostTree
*
tree
;
CV_READ_SEQ_ELEM
(
tree
,
reader
);
traverse
(
tree
,
fso
,
nfeatures
,
usedPtr
,
thresholds
.
ptr
<
double
>
(
0
)
+
i
);
}
fso
<<
"]"
;
// features
fso
<<
"features"
<<
"["
;
for
(
int
i
=
0
;
i
<
nfeatures
;
++
i
)
pool
->
write
(
fso
,
usedPtr
[
i
]);
fso
<<
"]"
<<
"}"
;
}
void
cv
::
Octave
::
initial_weights
(
double
(
&
p
)[
2
])
{
double
n
=
data
->
sample_count
;
p
[
0
]
=
n
/
(
2.
*
(
double
)(
nnegatives
));
p
[
1
]
=
n
/
(
2.
*
(
double
)(
npositives
));
}
bool
cv
::
Octave
::
train
(
const
Dataset
*
dataset
,
const
FeaturePool
*
pool
,
int
weaks
,
int
treeDepth
)
{
CV_Assert
(
treeDepth
==
2
);
CV_Assert
(
weaks
>
0
);
params
.
max_depth
=
treeDepth
;
params
.
weak_count
=
weaks
;
// 1. fill integrals and classes
processPositives
(
dataset
,
pool
);
generateNegatives
(
dataset
,
pool
);
// 2. only sumple case (all features used)
int
nfeatures
=
pool
->
size
();
cv
::
Mat
varIdx
(
1
,
nfeatures
,
CV_32SC1
);
int
*
ptr
=
varIdx
.
ptr
<
int
>
(
0
);
for
(
int
x
=
0
;
x
<
nfeatures
;
++
x
)
ptr
[
x
]
=
x
;
// 3. only sumple case (all samples used)
int
nsamples
=
npositives
+
nnegatives
;
cv
::
Mat
sampleIdx
(
1
,
nsamples
,
CV_32SC1
);
ptr
=
sampleIdx
.
ptr
<
int
>
(
0
);
for
(
int
x
=
0
;
x
<
nsamples
;
++
x
)
ptr
[
x
]
=
x
;
// 4. ICF has an orderable responce.
cv
::
Mat
varType
(
1
,
nfeatures
+
1
,
CV_8UC1
);
uchar
*
uptr
=
varType
.
ptr
<
uchar
>
(
0
);
for
(
int
x
=
0
;
x
<
nfeatures
;
++
x
)
uptr
[
x
]
=
CV_VAR_ORDERED
;
uptr
[
nfeatures
]
=
CV_VAR_CATEGORICAL
;
trainData
.
create
(
nfeatures
,
nsamples
,
CV_32FC1
);
for
(
int
fi
=
0
;
fi
<
nfeatures
;
++
fi
)
{
float
*
dptr
=
trainData
.
ptr
<
float
>
(
fi
);
for
(
int
si
=
0
;
si
<
nsamples
;
++
si
)
{
dptr
[
si
]
=
pool
->
apply
(
fi
,
si
,
integrals
);
}
}
cv
::
Mat
missingMask
;
bool
ok
=
train
(
trainData
,
responses
,
varIdx
,
sampleIdx
,
varType
,
missingMask
);
if
(
!
ok
)
std
::
cout
<<
"ERROR: tree can not be trained "
<<
std
::
endl
;
return
ok
;
}
float
cv
::
Octave
::
predict
(
const
Mat
&
_sample
,
Mat
&
_votes
,
bool
raw_mode
,
bool
return_sum
)
const
{
CvMat
sample
=
_sample
,
votes
=
_votes
;
return
CvBoost
::
predict
(
&
sample
,
0
,
(
_votes
.
empty
())
?
0
:
&
votes
,
CV_WHOLE_SEQ
,
raw_mode
,
return_sum
);
}
float
cv
::
Octave
::
predict
(
const
Mat
&
_sample
,
const
cv
::
Range
range
)
const
{
CvMat
sample
=
_sample
;
return
CvBoost
::
predict
(
&
sample
,
0
,
0
,
range
,
false
,
true
);
}
void
cv
::
Octave
::
write
(
CvFileStorage
*
fs
,
string
name
)
const
{
CvBoost
::
write
(
fs
,
name
.
c_str
());
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment