Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
e7f491ae
Commit
e7f491ae
authored
Dec 14, 2010
by
Kirill Kornyakov
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
CascadeClassifier refactored. Most of the members and methods are private now.
parent
e7cf541f
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
200 additions
and
142 deletions
+200
-142
objdetect.hpp
modules/objdetect/include/opencv2/objdetect/objdetect.hpp
+54
-22
cascadedetect.cpp
modules/objdetect/src/cascadedetect.cpp
+134
-108
boost.cpp
modules/traincascade/boost.cpp
+12
-12
No files found.
modules/objdetect/include/opencv2/objdetect/objdetect.hpp
View file @
e7f491ae
...
...
@@ -278,6 +278,7 @@ class CV_EXPORTS FeatureEvaluator
public
:
enum
{
HAAR
=
0
,
LBP
=
1
};
virtual
~
FeatureEvaluator
();
virtual
bool
read
(
const
FileNode
&
node
);
virtual
Ptr
<
FeatureEvaluator
>
clone
()
const
;
virtual
int
getFeatureType
()
const
;
...
...
@@ -296,6 +297,55 @@ template<> CV_EXPORTS void Ptr<CvHaarClassifierCascade>::delete_obj();
class
CV_EXPORTS_W
CascadeClassifier
{
public
:
CV_WRAP
CascadeClassifier
();
CV_WRAP
CascadeClassifier
(
const
string
&
filename
);
virtual
~
CascadeClassifier
();
CV_WRAP
virtual
bool
empty
()
const
;
CV_WRAP
bool
load
(
const
string
&
filename
);
bool
read
(
const
FileNode
&
node
);
CV_WRAP
void
detectMultiScale
(
const
Mat
&
image
,
CV_OUT
vector
<
Rect
>&
objects
,
double
scaleFactor
=
1.1
,
int
minNeighbors
=
3
,
int
flags
=
0
,
Size
minSize
=
Size
(),
Size
maxSize
=
Size
()
);
bool
isOldFormatCascade
()
const
;
virtual
Size
getOriginalWindowSize
()
const
;
int
getFeatureType
()
const
;
bool
setImage
(
const
Mat
&
);
protected
:
virtual
bool
detectSingleScale
(
const
Mat
&
image
,
int
stripCount
,
Size
processingRectSize
,
int
stripSize
,
int
yStep
,
double
factor
,
vector
<
Rect
>&
candidates
);
private
:
enum
{
BOOST
=
0
};
enum
{
DO_CANNY_PRUNING
=
1
,
SCALE_IMAGE
=
2
,
FIND_BIGGEST_OBJECT
=
4
,
DO_ROUGH_SEARCH
=
8
};
friend
class
CascadeClassifierInvoker
;
template
<
class
FEval
>
friend
int
predictOrdered
(
CascadeClassifier
&
cascade
,
Ptr
<
FeatureEvaluator
>
&
featureEvaluator
);
template
<
class
FEval
>
friend
int
predictCategorical
(
CascadeClassifier
&
cascade
,
Ptr
<
FeatureEvaluator
>
&
featureEvaluator
);
template
<
class
FEval
>
friend
int
predictOrderedStump
(
CascadeClassifier
&
cascade
,
Ptr
<
FeatureEvaluator
>
&
featureEvaluator
);
template
<
class
FEval
>
friend
int
predictCategoricalStump
(
CascadeClassifier
&
cascade
,
Ptr
<
FeatureEvaluator
>
&
featureEvaluator
);
bool
setImage
(
Ptr
<
FeatureEvaluator
>&
,
const
Mat
&
);
int
runAt
(
Ptr
<
FeatureEvaluator
>&
,
Point
);
class
Data
{
public
:
struct
CV_EXPORTS
DTreeNode
{
int
featureIdx
;
...
...
@@ -316,26 +366,7 @@ public:
float
threshold
;
};
enum
{
BOOST
=
0
};
enum
{
DO_CANNY_PRUNING
=
1
,
SCALE_IMAGE
=
2
,
FIND_BIGGEST_OBJECT
=
4
,
DO_ROUGH_SEARCH
=
8
};
CV_WRAP
CascadeClassifier
();
CV_WRAP
CascadeClassifier
(
const
string
&
filename
);
~
CascadeClassifier
();
CV_WRAP
bool
empty
()
const
;
CV_WRAP
bool
load
(
const
string
&
filename
);
bool
read
(
const
FileNode
&
node
);
CV_WRAP
void
detectMultiScale
(
const
Mat
&
image
,
CV_OUT
vector
<
Rect
>&
objects
,
double
scaleFactor
=
1.1
,
int
minNeighbors
=
3
,
int
flags
=
0
,
Size
minSize
=
Size
(),
Size
maxSize
=
Size
());
bool
setImage
(
Ptr
<
FeatureEvaluator
>&
,
const
Mat
&
);
int
runAt
(
Ptr
<
FeatureEvaluator
>&
,
Point
);
bool
read
(
const
FileNode
&
node
);
bool
isStumpBased
;
...
...
@@ -349,12 +380,13 @@ public:
vector
<
DTreeNode
>
nodes
;
vector
<
float
>
leaves
;
vector
<
int
>
subsets
;
};
Ptr
<
FeatureEvaluator
>
feval
;
Data
data
;
Ptr
<
FeatureEvaluator
>
featureEvaluator
;
Ptr
<
CvHaarClassifierCascade
>
oldCascade
;
};
//////////////// HOG (Histogram-of-Oriented-Gradients) Descriptor and Object Detector //////////////
struct
CV_EXPORTS_W
HOGDescriptor
...
...
modules/objdetect/src/cascadedetect.cpp
View file @
e7f491ae
...
...
@@ -258,6 +258,7 @@ public:
{
return
featuresPtr
[
featureIdx
].
calc
(
offset
)
*
varianceNormFactor
;
}
virtual
double
calcOrd
(
int
featureIdx
)
const
{
return
(
*
this
)(
featureIdx
);
}
private
:
Size
origWinSize
;
Ptr
<
vector
<
Feature
>
>
features
;
...
...
@@ -440,6 +441,7 @@ bool HaarEvaluator::setWindow( Point pt )
nf
=
1.
;
varianceNormFactor
=
1.
/
nf
;
offset
=
(
int
)
pOffset
;
return
true
;
}
...
...
@@ -614,7 +616,7 @@ CascadeClassifier::~CascadeClassifier()
bool
CascadeClassifier
::
empty
()
const
{
return
oldCascade
.
empty
()
&&
stages
.
empty
();
return
oldCascade
.
empty
()
&&
data
.
stages
.
empty
();
}
bool
CascadeClassifier
::
load
(
const
string
&
filename
)
...
...
@@ -635,31 +637,31 @@ bool CascadeClassifier::load(const string& filename)
}
template
<
class
FEval
>
inline
int
predictOrdered
(
CascadeClassifier
&
cascade
,
Ptr
<
FeatureEvaluator
>
&
_fe
val
)
inline
int
predictOrdered
(
CascadeClassifier
&
cascade
,
Ptr
<
FeatureEvaluator
>
&
_fe
atureEvaluator
)
{
int
si
,
nstages
=
(
int
)
cascade
.
stages
.
size
();
int
nstages
=
(
int
)
cascade
.
data
.
stages
.
size
();
int
nodeOfs
=
0
,
leafOfs
=
0
;
FEval
&
fe
val
=
(
FEval
&
)
*
_feval
;
float
*
cascadeLeaves
=
&
cascade
.
leaves
[
0
];
CascadeClassifier
::
D
TreeNode
*
cascadeNodes
=
&
cascade
.
nodes
[
0
];
CascadeClassifier
::
D
Tree
*
cascadeWeaks
=
&
cascade
.
classifiers
[
0
];
CascadeClassifier
::
Stage
*
cascadeStages
=
&
cascade
.
stages
[
0
];
FEval
&
fe
atureEvaluator
=
(
FEval
&
)
*
_featureEvaluator
;
float
*
cascadeLeaves
=
&
cascade
.
data
.
leaves
[
0
];
CascadeClassifier
::
D
ata
::
DTreeNode
*
cascadeNodes
=
&
cascade
.
data
.
nodes
[
0
];
CascadeClassifier
::
D
ata
::
DTree
*
cascadeWeaks
=
&
cascade
.
data
.
classifiers
[
0
];
CascadeClassifier
::
Data
::
Stage
*
cascadeStages
=
&
cascade
.
data
.
stages
[
0
];
for
(
si
=
0
;
si
<
nstages
;
si
++
)
for
(
int
si
=
0
;
si
<
nstages
;
si
++
)
{
CascadeClassifier
::
Stage
&
stage
=
cascadeStages
[
si
];
CascadeClassifier
::
Data
::
Stage
&
stage
=
cascadeStages
[
si
];
int
wi
,
ntrees
=
stage
.
ntrees
;
double
sum
=
0
;
for
(
wi
=
0
;
wi
<
ntrees
;
wi
++
)
{
CascadeClassifier
::
DTree
&
weak
=
cascadeWeaks
[
stage
.
first
+
wi
];
CascadeClassifier
::
D
ata
::
D
Tree
&
weak
=
cascadeWeaks
[
stage
.
first
+
wi
];
int
idx
=
0
,
root
=
nodeOfs
;
do
{
CascadeClassifier
::
DTreeNode
&
node
=
cascadeNodes
[
root
+
idx
];
double
val
=
fe
val
(
node
.
featureIdx
);
CascadeClassifier
::
D
ata
::
D
TreeNode
&
node
=
cascadeNodes
[
root
+
idx
];
double
val
=
fe
atureEvaluator
(
node
.
featureIdx
);
idx
=
val
<
node
.
threshold
?
node
.
left
:
node
.
right
;
}
while
(
idx
>
0
);
...
...
@@ -674,32 +676,32 @@ inline int predictOrdered( CascadeClassifier& cascade, Ptr<FeatureEvaluator> &_f
}
template
<
class
FEval
>
inline
int
predictCategorical
(
CascadeClassifier
&
cascade
,
Ptr
<
FeatureEvaluator
>
&
_fe
val
)
inline
int
predictCategorical
(
CascadeClassifier
&
cascade
,
Ptr
<
FeatureEvaluator
>
&
_fe
atureEvaluator
)
{
int
si
,
nstages
=
(
int
)
cascade
.
stages
.
size
();
int
nstages
=
(
int
)
cascade
.
data
.
stages
.
size
();
int
nodeOfs
=
0
,
leafOfs
=
0
;
FEval
&
fe
val
=
(
FEval
&
)
*
_feval
;
size_t
subsetSize
=
(
cascade
.
ncategories
+
31
)
/
32
;
int
*
cascadeSubsets
=
&
cascade
.
subsets
[
0
];
float
*
cascadeLeaves
=
&
cascade
.
leaves
[
0
];
CascadeClassifier
::
D
TreeNode
*
cascadeNodes
=
&
cascade
.
nodes
[
0
];
CascadeClassifier
::
D
Tree
*
cascadeWeaks
=
&
cascade
.
classifiers
[
0
];
CascadeClassifier
::
Stage
*
cascadeStages
=
&
cascade
.
stages
[
0
];
for
(
si
=
0
;
si
<
nstages
;
si
++
)
FEval
&
fe
atureEvaluator
=
(
FEval
&
)
*
_featureEvaluator
;
size_t
subsetSize
=
(
cascade
.
data
.
ncategories
+
31
)
/
32
;
int
*
cascadeSubsets
=
&
cascade
.
data
.
subsets
[
0
];
float
*
cascadeLeaves
=
&
cascade
.
data
.
leaves
[
0
];
CascadeClassifier
::
D
ata
::
DTreeNode
*
cascadeNodes
=
&
cascade
.
data
.
nodes
[
0
];
CascadeClassifier
::
D
ata
::
DTree
*
cascadeWeaks
=
&
cascade
.
data
.
classifiers
[
0
];
CascadeClassifier
::
Data
::
Stage
*
cascadeStages
=
&
cascade
.
data
.
stages
[
0
];
for
(
int
si
=
0
;
si
<
nstages
;
si
++
)
{
CascadeClassifier
::
Stage
&
stage
=
cascadeStages
[
si
];
CascadeClassifier
::
Data
::
Stage
&
stage
=
cascadeStages
[
si
];
int
wi
,
ntrees
=
stage
.
ntrees
;
double
sum
=
0
;
for
(
wi
=
0
;
wi
<
ntrees
;
wi
++
)
{
CascadeClassifier
::
DTree
&
weak
=
cascadeWeaks
[
stage
.
first
+
wi
];
CascadeClassifier
::
D
ata
::
D
Tree
&
weak
=
cascadeWeaks
[
stage
.
first
+
wi
];
int
idx
=
0
,
root
=
nodeOfs
;
do
{
CascadeClassifier
::
DTreeNode
&
node
=
cascadeNodes
[
root
+
idx
];
int
c
=
fe
val
(
node
.
featureIdx
);
CascadeClassifier
::
D
ata
::
D
TreeNode
&
node
=
cascadeNodes
[
root
+
idx
];
int
c
=
fe
atureEvaluator
(
node
.
featureIdx
);
const
int
*
subset
=
&
cascadeSubsets
[(
root
+
idx
)
*
subsetSize
];
idx
=
(
subset
[
c
>>
5
]
&
(
1
<<
(
c
&
31
)))
?
node
.
left
:
node
.
right
;
}
...
...
@@ -715,25 +717,25 @@ inline int predictCategorical( CascadeClassifier& cascade, Ptr<FeatureEvaluator>
}
template
<
class
FEval
>
inline
int
predictOrderedStump
(
CascadeClassifier
&
cascade
,
Ptr
<
FeatureEvaluator
>
&
_fe
val
)
inline
int
predictOrderedStump
(
CascadeClassifier
&
cascade
,
Ptr
<
FeatureEvaluator
>
&
_fe
atureEvaluator
)
{
int
nodeOfs
=
0
,
leafOfs
=
0
;
FEval
&
fe
val
=
(
FEval
&
)
*
_feval
;
float
*
cascadeLeaves
=
&
cascade
.
leaves
[
0
];
CascadeClassifier
::
D
TreeNode
*
cascadeNodes
=
&
cascade
.
nodes
[
0
];
CascadeClassifier
::
Stage
*
cascadeStages
=
&
cascade
.
stages
[
0
];
FEval
&
fe
atureEvaluator
=
(
FEval
&
)
*
_featureEvaluator
;
float
*
cascadeLeaves
=
&
cascade
.
data
.
leaves
[
0
];
CascadeClassifier
::
D
ata
::
DTreeNode
*
cascadeNodes
=
&
cascade
.
data
.
nodes
[
0
];
CascadeClassifier
::
Data
::
Stage
*
cascadeStages
=
&
cascade
.
data
.
stages
[
0
];
int
nstages
=
(
int
)
cascade
.
stages
.
size
();
int
nstages
=
(
int
)
cascade
.
data
.
stages
.
size
();
for
(
int
stageIdx
=
0
;
stageIdx
<
nstages
;
stageIdx
++
)
{
CascadeClassifier
::
Stage
&
stage
=
cascadeStages
[
stageIdx
];
CascadeClassifier
::
Data
::
Stage
&
stage
=
cascadeStages
[
stageIdx
];
double
sum
=
0.0
;
int
ntrees
=
stage
.
ntrees
;
for
(
int
i
=
0
;
i
<
ntrees
;
i
++
,
nodeOfs
++
,
leafOfs
+=
2
)
{
CascadeClassifier
::
DTreeNode
&
node
=
cascadeNodes
[
nodeOfs
];
double
value
=
fe
val
(
node
.
featureIdx
);
CascadeClassifier
::
D
ata
::
D
TreeNode
&
node
=
cascadeNodes
[
nodeOfs
];
double
value
=
fe
atureEvaluator
(
node
.
featureIdx
);
sum
+=
cascadeLeaves
[
value
<
node
.
threshold
?
leafOfs
:
leafOfs
+
1
];
}
...
...
@@ -745,27 +747,27 @@ inline int predictOrderedStump( CascadeClassifier& cascade, Ptr<FeatureEvaluator
}
template
<
class
FEval
>
inline
int
predictCategoricalStump
(
CascadeClassifier
&
cascade
,
Ptr
<
FeatureEvaluator
>
&
_fe
val
)
inline
int
predictCategoricalStump
(
CascadeClassifier
&
cascade
,
Ptr
<
FeatureEvaluator
>
&
_fe
atureEvaluator
)
{
int
si
,
nstages
=
(
int
)
cascade
.
stages
.
size
();
int
nstages
=
(
int
)
cascade
.
data
.
stages
.
size
();
int
nodeOfs
=
0
,
leafOfs
=
0
;
FEval
&
fe
val
=
(
FEval
&
)
*
_feval
;
size_t
subsetSize
=
(
cascade
.
ncategories
+
31
)
/
32
;
int
*
cascadeSubsets
=
&
cascade
.
subsets
[
0
];
float
*
cascadeLeaves
=
&
cascade
.
leaves
[
0
];
CascadeClassifier
::
D
TreeNode
*
cascadeNodes
=
&
cascade
.
nodes
[
0
];
CascadeClassifier
::
Stage
*
cascadeStages
=
&
cascade
.
stages
[
0
];
for
(
si
=
0
;
si
<
nstages
;
si
++
)
FEval
&
fe
atureEvaluator
=
(
FEval
&
)
*
_featureEvaluator
;
size_t
subsetSize
=
(
cascade
.
data
.
ncategories
+
31
)
/
32
;
int
*
cascadeSubsets
=
&
cascade
.
data
.
subsets
[
0
];
float
*
cascadeLeaves
=
&
cascade
.
data
.
leaves
[
0
];
CascadeClassifier
::
D
ata
::
DTreeNode
*
cascadeNodes
=
&
cascade
.
data
.
nodes
[
0
];
CascadeClassifier
::
Data
::
Stage
*
cascadeStages
=
&
cascade
.
data
.
stages
[
0
];
for
(
int
si
=
0
;
si
<
nstages
;
si
++
)
{
CascadeClassifier
::
Stage
&
stage
=
cascadeStages
[
si
];
CascadeClassifier
::
Data
::
Stage
&
stage
=
cascadeStages
[
si
];
int
wi
,
ntrees
=
stage
.
ntrees
;
double
sum
=
0
;
for
(
wi
=
0
;
wi
<
ntrees
;
wi
++
)
{
CascadeClassifier
::
DTreeNode
&
node
=
cascadeNodes
[
nodeOfs
];
int
c
=
fe
val
(
node
.
featureIdx
);
CascadeClassifier
::
D
ata
::
D
TreeNode
&
node
=
cascadeNodes
[
nodeOfs
];
int
c
=
fe
atureEvaluator
(
node
.
featureIdx
);
const
int
*
subset
=
&
cascadeSubsets
[
nodeOfs
*
subsetSize
];
sum
+=
cascadeLeaves
[
subset
[
c
>>
5
]
&
(
1
<<
(
c
&
31
))
?
leafOfs
:
leafOfs
+
1
];
nodeOfs
++
;
...
...
@@ -780,43 +782,30 @@ inline int predictCategoricalStump( CascadeClassifier& cascade, Ptr<FeatureEvalu
int
CascadeClassifier
::
runAt
(
Ptr
<
FeatureEvaluator
>&
featureEvaluator
,
Point
pt
)
{
CV_Assert
(
oldCascade
.
empty
()
);
/*if( !oldCascade.empty() )
return cvRunHaarClassifierCascade(oldCascade, pt, 0);*/
assert
(
featureType
==
FeatureEvaluator
::
HAAR
||
featureType
==
FeatureEvaluator
::
LBP
);
assert
(
data
.
featureType
==
FeatureEvaluator
::
HAAR
||
data
.
featureType
==
FeatureEvaluator
::
LBP
);
return
!
featureEvaluator
->
setWindow
(
pt
)
?
-
1
:
isStumpBased
?
(
featureType
==
FeatureEvaluator
::
HAAR
?
data
.
isStumpBased
?
(
data
.
featureType
==
FeatureEvaluator
::
HAAR
?
predictOrderedStump
<
HaarEvaluator
>
(
*
this
,
featureEvaluator
)
:
predictCategoricalStump
<
LBPEvaluator
>
(
*
this
,
featureEvaluator
)
)
:
(
featureType
==
FeatureEvaluator
::
HAAR
?
(
data
.
featureType
==
FeatureEvaluator
::
HAAR
?
predictOrdered
<
HaarEvaluator
>
(
*
this
,
featureEvaluator
)
:
predictCategorical
<
LBPEvaluator
>
(
*
this
,
featureEvaluator
)
);
}
bool
CascadeClassifier
::
setImage
(
Ptr
<
FeatureEvaluator
>&
featureEvaluator
,
const
Mat
&
image
)
{
/*if( !oldCascade.empty() )
{
Mat sum(image.rows+1, image.cols+1, CV_32S);
Mat tilted(image.rows+1, image.cols+1, CV_32S);
Mat sqsum(image.rows+1, image.cols+1, CV_64F);
integral(image, sum, sqsum, tilted);
CvMat _sum = sum, _sqsum = sqsum, _tilted = tilted;
cvSetImagesForHaarClassifierCascade( oldCascade, &_sum, &_sqsum, &_tilted, 1. );
return true;
}*/
return
empty
()
?
false
:
featureEvaluator
->
setImage
(
image
,
origWinSize
);
return
empty
()
?
false
:
featureEvaluator
->
setImage
(
image
,
data
.
origWinSize
);
}
struct
CascadeClassifierInvoker
{
CascadeClassifierInvoker
(
CascadeClassifier
&
_cc
,
Size
_sz1
,
int
_stripSize
,
int
_yStep
,
double
_factor
,
ConcurrentRectVector
&
_vec
)
{
classifier
=
&
_cc
;
processing
Area
Size
=
_sz1
;
processing
Rect
Size
=
_sz1
;
stripSize
=
_stripSize
;
yStep
=
_yStep
;
scalingFactor
=
_factor
;
...
...
@@ -825,14 +814,14 @@ struct CascadeClassifierInvoker
void
operator
()(
const
BlockedRange
&
range
)
const
{
Ptr
<
FeatureEvaluator
>
evaluator
=
classifier
->
fe
val
->
clone
();
Size
winSize
(
cvRound
(
classifier
->
origWinSize
.
width
*
scalingFactor
),
cvRound
(
classifier
->
origWinSize
.
height
*
scalingFactor
));
Ptr
<
FeatureEvaluator
>
evaluator
=
classifier
->
fe
atureEvaluator
->
clone
();
Size
winSize
(
cvRound
(
classifier
->
data
.
origWinSize
.
width
*
scalingFactor
),
cvRound
(
classifier
->
data
.
origWinSize
.
height
*
scalingFactor
));
int
y1
=
range
.
begin
()
*
stripSize
;
int
y2
=
min
(
range
.
end
()
*
stripSize
,
processing
Area
Size
.
height
);
int
y2
=
min
(
range
.
end
()
*
stripSize
,
processing
Rect
Size
.
height
);
for
(
int
y
=
y1
;
y
<
y2
;
y
+=
yStep
)
{
for
(
int
x
=
0
;
x
<
processing
Area
Size
.
width
;
x
+=
yStep
)
for
(
int
x
=
0
;
x
<
processing
Rect
Size
.
width
;
x
+=
yStep
)
{
int
result
=
classifier
->
runAt
(
evaluator
,
Point
(
x
,
y
));
if
(
result
>
0
)
...
...
@@ -846,14 +835,46 @@ struct CascadeClassifierInvoker
CascadeClassifier
*
classifier
;
ConcurrentRectVector
*
rectangles
;
Size
processing
Area
Size
;
Size
processing
Rect
Size
;
int
stripSize
,
yStep
;
double
scalingFactor
;
};
struct
getRect
{
Rect
operator
()(
const
CvAvgComp
&
e
)
const
{
return
e
.
rect
;
}
};
bool
CascadeClassifier
::
detectSingleScale
(
const
Mat
&
image
,
int
stripCount
,
Size
processingRectSize
,
int
stripSize
,
int
yStep
,
double
factor
,
vector
<
Rect
>&
candidates
)
{
if
(
!
featureEvaluator
->
setImage
(
image
,
data
.
origWinSize
)
)
return
false
;
ConcurrentRectVector
concurrentCandidates
;
parallel_for
(
BlockedRange
(
0
,
stripCount
),
CascadeClassifierInvoker
(
*
this
,
processingRectSize
,
stripSize
,
yStep
,
factor
,
concurrentCandidates
));
candidates
.
insert
(
candidates
.
end
(),
concurrentCandidates
.
begin
(),
concurrentCandidates
.
end
()
);
return
true
;
}
bool
CascadeClassifier
::
isOldFormatCascade
()
const
{
return
!
oldCascade
.
empty
();
}
int
CascadeClassifier
::
getFeatureType
()
const
{
return
featureEvaluator
->
getFeatureType
();
}
Size
CascadeClassifier
::
getOriginalWindowSize
()
const
{
return
data
.
origWinSize
;
}
bool
CascadeClassifier
::
setImage
(
const
Mat
&
image
)
{
featureEvaluator
->
setImage
(
image
,
data
.
origWinSize
);
}
void
CascadeClassifier
::
detectMultiScale
(
const
Mat
&
image
,
vector
<
Rect
>&
objects
,
double
scaleFactor
,
int
minNeighbors
,
int
flags
,
Size
minObjectSize
,
Size
maxObjectSize
)
...
...
@@ -865,7 +886,7 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
if
(
empty
()
)
return
;
if
(
!
oldCascade
.
empty
()
)
if
(
isOldFormatCascade
()
)
{
MemStorage
storage
(
cvCreateMemStorage
(
0
));
CvMat
_image
=
image
;
...
...
@@ -892,41 +913,41 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
}
Mat
imageBuffer
(
image
.
rows
+
1
,
image
.
cols
+
1
,
CV_8U
);
ConcurrentRectVector
candidates
;
vector
<
Rect
>
candidates
;
for
(
double
factor
=
1
;
;
factor
*=
scaleFactor
)
{
int
stripCount
,
stripSize
;
Size
originalWindowSize
=
getOriginalWindowSize
()
;
Size
windowSize
(
cvRound
(
orig
WinSize
.
width
*
factor
),
cvRound
(
origWin
Size
.
height
*
factor
)
);
Size
windowSize
(
cvRound
(
orig
inalWindowSize
.
width
*
factor
),
cvRound
(
originalWindow
Size
.
height
*
factor
)
);
Size
scaledImageSize
(
cvRound
(
grayImage
.
cols
/
factor
),
cvRound
(
grayImage
.
rows
/
factor
)
);
Size
processing
AreaSize
(
scaledImageSize
.
width
-
origWinSize
.
width
,
scaledImageSize
.
height
-
origWin
Size
.
height
);
Size
processing
RectSize
(
scaledImageSize
.
width
-
originalWindowSize
.
width
,
scaledImageSize
.
height
-
originalWindow
Size
.
height
);
if
(
processing
AreaSize
.
width
<=
0
||
processingArea
Size
.
height
<=
0
)
if
(
processing
RectSize
.
width
<=
0
||
processingRect
Size
.
height
<=
0
)
break
;
if
(
windowSize
.
width
>
maxObjectSize
.
width
||
windowSize
.
height
>
maxObjectSize
.
height
)
break
;
if
(
windowSize
.
width
<
minObjectSize
.
width
||
windowSize
.
height
<
minObjectSize
.
height
)
continue
;
Mat
scaledImage
(
scaledImageSize
,
CV_8U
,
imageBuffer
.
data
);
resize
(
grayImage
,
scaledImage
,
scaledImageSize
,
0
,
0
,
CV_INTER_LINEAR
);
int
yStep
=
factor
>
2.
?
1
:
2
;
int
stripCount
,
stripSize
;
#ifdef HAVE_TBB
const
int
PTS_PER_THREAD
=
1000
;
stripCount
=
((
processing
AreaSize
.
width
/
yStep
)
*
(
processingArea
Size
.
height
+
yStep
-
1
)
/
yStep
+
PTS_PER_THREAD
/
2
)
/
PTS_PER_THREAD
;
stripCount
=
((
processing
RectSize
.
width
/
yStep
)
*
(
processingRect
Size
.
height
+
yStep
-
1
)
/
yStep
+
PTS_PER_THREAD
/
2
)
/
PTS_PER_THREAD
;
stripCount
=
std
::
min
(
std
::
max
(
stripCount
,
1
),
100
);
stripSize
=
(((
processing
Area
Size
.
height
+
stripCount
-
1
)
/
stripCount
+
yStep
-
1
)
/
yStep
)
*
yStep
;
stripSize
=
(((
processing
Rect
Size
.
height
+
stripCount
-
1
)
/
stripCount
+
yStep
-
1
)
/
yStep
)
*
yStep
;
#else
stripCount
=
1
;
stripSize
=
processing
Area
Size
.
height
;
stripSize
=
processing
Rect
Size
.
height
;
#endif
Mat
scaledImage
(
scaledImageSize
,
CV_8U
,
imageBuffer
.
data
);
resize
(
grayImage
,
scaledImage
,
scaledImageSize
,
0
,
0
,
CV_INTER_LINEAR
);
if
(
!
feval
->
setImage
(
scaledImage
,
origWinSize
)
)
if
(
!
detectSingleScale
(
scaledImage
,
stripCount
,
processingRectSize
,
stripSize
,
yStep
,
factor
,
candidates
)
)
break
;
parallel_for
(
BlockedRange
(
0
,
stripCount
),
CascadeClassifierInvoker
(
*
this
,
processingAreaSize
,
stripSize
,
yStep
,
factor
,
candidates
));
}
objects
.
resize
(
candidates
.
size
());
...
...
@@ -935,8 +956,7 @@ void CascadeClassifier::detectMultiScale( const Mat& image, vector<Rect>& object
groupRectangles
(
objects
,
minNeighbors
,
GROUP_EPS
);
}
bool
CascadeClassifier
::
read
(
const
FileNode
&
root
)
bool
CascadeClassifier
::
Data
::
read
(
const
FileNode
&
root
)
{
// load stage params
string
stageTypeStr
=
(
string
)
root
[
CC_STAGE_TYPE
];
...
...
@@ -1000,6 +1020,7 @@ bool CascadeClassifier::read(const FileNode& root)
FileNode
leafValues
=
fnw
[
CC_LEAF_VALUES
];
if
(
internalNodes
.
empty
()
||
leafValues
.
empty
()
)
return
false
;
DTree
tree
;
tree
.
nodeCount
=
(
int
)
internalNodes
.
size
()
/
nodeStep
;
classifiers
.
push_back
(
tree
);
...
...
@@ -1009,47 +1030,52 @@ bool CascadeClassifier::read(const FileNode& root)
if
(
subsetSize
>
0
)
subsets
.
reserve
(
subsets
.
size
()
+
tree
.
nodeCount
*
subsetSize
);
FileNodeIterator
i
t2
=
internalNodes
.
begin
(),
it2_e
nd
=
internalNodes
.
end
();
FileNodeIterator
i
nternalNodesIter
=
internalNodes
.
begin
(),
internalNodesE
nd
=
internalNodes
.
end
();
for
(
;
i
t2
!=
it2_e
nd
;
)
// nodes
for
(
;
i
nternalNodesIter
!=
internalNodesE
nd
;
)
// nodes
{
DTreeNode
node
;
node
.
left
=
(
int
)
*
i
t2
;
++
it2
;
node
.
right
=
(
int
)
*
i
t2
;
++
it2
;
node
.
featureIdx
=
(
int
)
*
i
t2
;
++
it2
;
node
.
left
=
(
int
)
*
i
nternalNodesIter
;
++
internalNodesIter
;
node
.
right
=
(
int
)
*
i
nternalNodesIter
;
++
internalNodesIter
;
node
.
featureIdx
=
(
int
)
*
i
nternalNodesIter
;
++
internalNodesIter
;
if
(
subsetSize
>
0
)
{
for
(
int
j
=
0
;
j
<
subsetSize
;
j
++
,
++
i
t2
)
subsets
.
push_back
((
int
)
*
i
t2
);
for
(
int
j
=
0
;
j
<
subsetSize
;
j
++
,
++
i
nternalNodesIter
)
subsets
.
push_back
((
int
)
*
i
nternalNodesIter
);
node
.
threshold
=
0.
f
;
}
else
{
node
.
threshold
=
(
float
)
*
i
t2
;
++
it2
;
node
.
threshold
=
(
float
)
*
i
nternalNodesIter
;
++
internalNodesIter
;
}
nodes
.
push_back
(
node
);
}
i
t2
=
leafValues
.
begin
(),
it2_e
nd
=
leafValues
.
end
();
i
nternalNodesIter
=
leafValues
.
begin
(),
internalNodesE
nd
=
leafValues
.
end
();
for
(
;
i
t2
!=
it2_end
;
++
it2
)
// leaves
leaves
.
push_back
((
float
)
*
i
t2
);
for
(
;
i
nternalNodesIter
!=
internalNodesEnd
;
++
internalNodesIter
)
// leaves
leaves
.
push_back
((
float
)
*
i
nternalNodesIter
);
}
}
return
true
;
}
bool
CascadeClassifier
::
read
(
const
FileNode
&
root
)
{
if
(
!
data
.
read
(
root
)
)
return
false
;
// load features
fe
val
=
FeatureEvaluator
::
create
(
featureType
);
fn
=
root
[
CC_FEATURES
];
fe
atureEvaluator
=
FeatureEvaluator
::
create
(
data
.
featureType
);
FileNode
fn
=
root
[
CC_FEATURES
];
if
(
fn
.
empty
()
)
return
false
;
return
fe
val
->
read
(
fn
);
return
fe
atureEvaluator
->
read
(
fn
);
}
template
<>
void
Ptr
<
CvHaarClassifierCascade
>::
delete_obj
()
{
cvReleaseHaarClassifierCascade
(
&
obj
);
}
}
// namespace cv
/* End of file. */
modules/traincascade/boost.cpp
View file @
e7f491ae
...
...
@@ -474,9 +474,9 @@ float CvCascadeBoostTrainData::getVarValue( int vi, int si )
struct
FeatureIdxOnlyPrecalc
{
FeatureIdxOnlyPrecalc
(
const
CvFeatureEvaluator
*
_fe
val
,
CvMat
*
_buf
,
int
_sample_count
,
bool
_is_buf_16u
)
FeatureIdxOnlyPrecalc
(
const
CvFeatureEvaluator
*
_fe
atureEvaluator
,
CvMat
*
_buf
,
int
_sample_count
,
bool
_is_buf_16u
)
{
fe
val
=
_feval
;
fe
atureEvaluator
=
_featureEvaluator
;
sample_count
=
_sample_count
;
udst
=
(
unsigned
short
*
)
_buf
->
data
.
s
;
idst
=
_buf
->
data
.
i
;
...
...
@@ -490,7 +490,7 @@ struct FeatureIdxOnlyPrecalc
{
for
(
int
si
=
0
;
si
<
sample_count
;
si
++
)
{
valCachePtr
[
si
]
=
(
*
fe
val
)(
fi
,
si
);
valCachePtr
[
si
]
=
(
*
fe
atureEvaluator
)(
fi
,
si
);
if
(
is_buf_16u
)
*
(
udst
+
fi
*
sample_count
+
si
)
=
(
unsigned
short
)
si
;
else
...
...
@@ -502,7 +502,7 @@ struct FeatureIdxOnlyPrecalc
icvSortIntAux
(
idst
+
fi
*
sample_count
,
sample_count
,
valCachePtr
);
}
}
const
CvFeatureEvaluator
*
fe
val
;
const
CvFeatureEvaluator
*
fe
atureEvaluator
;
int
sample_count
;
int
*
idst
;
unsigned
short
*
udst
;
...
...
@@ -511,9 +511,9 @@ struct FeatureIdxOnlyPrecalc
struct
FeatureValAndIdxPrecalc
{
FeatureValAndIdxPrecalc
(
const
CvFeatureEvaluator
*
_fe
val
,
CvMat
*
_buf
,
Mat
*
_valCache
,
int
_sample_count
,
bool
_is_buf_16u
)
FeatureValAndIdxPrecalc
(
const
CvFeatureEvaluator
*
_fe
atureEvaluator
,
CvMat
*
_buf
,
Mat
*
_valCache
,
int
_sample_count
,
bool
_is_buf_16u
)
{
fe
val
=
_feval
;
fe
atureEvaluator
=
_featureEvaluator
;
valCache
=
_valCache
;
sample_count
=
_sample_count
;
udst
=
(
unsigned
short
*
)
_buf
->
data
.
s
;
...
...
@@ -526,7 +526,7 @@ struct FeatureValAndIdxPrecalc
{
for
(
int
si
=
0
;
si
<
sample_count
;
si
++
)
{
valCache
->
at
<
float
>
(
fi
,
si
)
=
(
*
fe
val
)(
fi
,
si
);
valCache
->
at
<
float
>
(
fi
,
si
)
=
(
*
fe
atureEvaluator
)(
fi
,
si
);
if
(
is_buf_16u
)
*
(
udst
+
fi
*
sample_count
+
si
)
=
(
unsigned
short
)
si
;
else
...
...
@@ -538,7 +538,7 @@ struct FeatureValAndIdxPrecalc
icvSortIntAux
(
idst
+
fi
*
sample_count
,
sample_count
,
valCache
->
ptr
<
float
>
(
fi
)
);
}
}
const
CvFeatureEvaluator
*
fe
val
;
const
CvFeatureEvaluator
*
fe
atureEvaluator
;
Mat
*
valCache
;
int
sample_count
;
int
*
idst
;
...
...
@@ -548,9 +548,9 @@ struct FeatureValAndIdxPrecalc
struct
FeatureValOnlyPrecalc
{
FeatureValOnlyPrecalc
(
const
CvFeatureEvaluator
*
_fe
val
,
Mat
*
_valCache
,
int
_sample_count
)
FeatureValOnlyPrecalc
(
const
CvFeatureEvaluator
*
_fe
atureEvaluator
,
Mat
*
_valCache
,
int
_sample_count
)
{
fe
val
=
_feval
;
fe
atureEvaluator
=
_featureEvaluator
;
valCache
=
_valCache
;
sample_count
=
_sample_count
;
}
...
...
@@ -558,9 +558,9 @@ struct FeatureValOnlyPrecalc
{
for
(
int
fi
=
range
.
begin
();
fi
<
range
.
end
();
fi
++
)
for
(
int
si
=
0
;
si
<
sample_count
;
si
++
)
valCache
->
at
<
float
>
(
fi
,
si
)
=
(
*
fe
val
)(
fi
,
si
);
valCache
->
at
<
float
>
(
fi
,
si
)
=
(
*
fe
atureEvaluator
)(
fi
,
si
);
}
const
CvFeatureEvaluator
*
fe
val
;
const
CvFeatureEvaluator
*
fe
atureEvaluator
;
Mat
*
valCache
;
int
sample_count
;
};
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment