Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
74c87a26
Commit
74c87a26
authored
Feb 25, 2016
by
Marina Noskova
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Delete function areClassesEmpty().
parent
068677ad
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
12 additions
and
46 deletions
+12
-46
ml.hpp
modules/ml/include/opencv2/ml.hpp
+2
-5
svmsgd.cpp
modules/ml/src/svmsgd.cpp
+10
-38
test_svmsgd.cpp
modules/ml/test/test_svmsgd.cpp
+0
-2
train_svmsgd.cpp
samples/cpp/train_svmsgd.cpp
+0
-1
No files found.
modules/ml/include/opencv2/ml.hpp
View file @
74c87a26
...
@@ -1507,7 +1507,7 @@ public:
...
@@ -1507,7 +1507,7 @@ public:
SVMSGD provides a fast and easy-to-use implementation of the SVM classifier using the Stochastic Gradient Descent approach,
SVMSGD provides a fast and easy-to-use implementation of the SVM classifier using the Stochastic Gradient Descent approach,
as presented in @cite bottou2010large.
as presented in @cite bottou2010large.
The classifier has
5 parameters. These are
The classifier has
following parameters:
- model type,
- model type,
- margin type,
- margin type,
- margin regularization (\f$\lambda\f$),
- margin regularization (\f$\lambda\f$),
...
@@ -1567,11 +1567,8 @@ To use SVMSGD algorithm do as follows:
...
@@ -1567,11 +1567,8 @@ To use SVMSGD algorithm do as follows:
// Create empty object
// Create empty object
cv::Ptr<SVMSGD> svmsgd = SVMSGD::create();
cv::Ptr<SVMSGD> svmsgd = SVMSGD::create();
// Set parameters
svmsgd->setOptimalParameters();
// Train the Stochastic Gradient Descent SVM
// Train the Stochastic Gradient Descent SVM
SvmS
gd->train(trainData);
svms
gd->train(trainData);
// Predict labels for the new samples
// Predict labels for the new samples
svmsgd->predict(samples, responses);
svmsgd->predict(samples, responses);
...
...
modules/ml/src/svmsgd.cpp
View file @
74c87a26
...
@@ -99,8 +99,6 @@ public:
...
@@ -99,8 +99,6 @@ public:
private
:
private
:
void
updateWeights
(
InputArray
sample
,
bool
isPositive
,
float
stepSize
,
Mat
&
weights
);
void
updateWeights
(
InputArray
sample
,
bool
isPositive
,
float
stepSize
,
Mat
&
weights
);
std
::
pair
<
bool
,
bool
>
areClassesEmpty
(
Mat
responses
);
void
writeParams
(
FileStorage
&
fs
)
const
;
void
writeParams
(
FileStorage
&
fs
)
const
;
void
readParams
(
const
FileNode
&
fn
);
void
readParams
(
const
FileNode
&
fn
);
...
@@ -138,26 +136,6 @@ Ptr<SVMSGD> SVMSGD::create()
...
@@ -138,26 +136,6 @@ Ptr<SVMSGD> SVMSGD::create()
return
makePtr
<
SVMSGDImpl
>
();
return
makePtr
<
SVMSGDImpl
>
();
}
}
std
::
pair
<
bool
,
bool
>
SVMSGDImpl
::
areClassesEmpty
(
Mat
responses
)
{
CV_Assert
(
responses
.
cols
==
1
||
responses
.
rows
==
1
);
std
::
pair
<
bool
,
bool
>
emptyInClasses
(
true
,
true
);
int
limitIndex
=
responses
.
rows
;
for
(
int
index
=
0
;
index
<
limitIndex
;
index
++
)
{
if
(
isPositive
(
responses
.
at
<
float
>
(
index
)))
emptyInClasses
.
first
=
false
;
else
emptyInClasses
.
second
=
false
;
if
(
!
emptyInClasses
.
first
&&
!
emptyInClasses
.
second
)
break
;
}
return
emptyInClasses
;
}
void
SVMSGDImpl
::
normalizeSamples
(
Mat
&
samples
,
Mat
&
average
,
float
&
multiplier
)
void
SVMSGDImpl
::
normalizeSamples
(
Mat
&
samples
,
Mat
&
average
,
float
&
multiplier
)
{
{
int
featuresCount
=
samples
.
cols
;
int
featuresCount
=
samples
.
cols
;
...
@@ -248,16 +226,20 @@ bool SVMSGDImpl::train(const Ptr<TrainData>& data, int)
...
@@ -248,16 +226,20 @@ bool SVMSGDImpl::train(const Ptr<TrainData>& data, int)
int
featureCount
=
trainSamples
.
cols
;
int
featureCount
=
trainSamples
.
cols
;
Mat
trainResponses
=
data
->
getTrainResponses
();
// (trainSamplesCount x 1) matrix
Mat
trainResponses
=
data
->
getTrainResponses
();
// (trainSamplesCount x 1) matrix
std
::
pair
<
bool
,
bool
>
areEmpty
=
areClassesEmpty
(
trainResponse
s
);
CV_Assert
(
trainResponses
.
rows
==
trainSamples
.
row
s
);
if
(
areEmpty
.
first
&&
areEmpty
.
second
)
if
(
trainResponses
.
empty
()
)
{
{
return
false
;
return
false
;
}
}
if
(
areEmpty
.
first
||
areEmpty
.
second
)
int
positiveCount
=
countNonZero
(
trainResponses
>=
0
);
int
negativeCount
=
countNonZero
(
trainResponses
<
0
);
if
(
positiveCount
<=
0
||
negativeCount
<=
0
)
{
{
weights_
=
Mat
::
zeros
(
1
,
featureCount
,
CV_32F
);
weights_
=
Mat
::
zeros
(
1
,
featureCount
,
CV_32F
);
shift_
=
areEmpty
.
first
?
-
1.
f
:
1.
f
;
shift_
=
(
positiveCount
>
0
)
?
1.
f
:
-
1.
f
;
return
true
;
return
true
;
}
}
...
@@ -340,7 +322,7 @@ float SVMSGDImpl::predict( InputArray _samples, OutputArray _results, int ) cons
...
@@ -340,7 +322,7 @@ float SVMSGDImpl::predict( InputArray _samples, OutputArray _results, int ) cons
int
nSamples
=
samples
.
rows
;
int
nSamples
=
samples
.
rows
;
cv
::
Mat
results
;
cv
::
Mat
results
;
CV_Assert
(
samples
.
cols
==
weights_
.
cols
&&
samples
.
type
()
==
CV_32F
);
CV_Assert
(
samples
.
cols
==
weights_
.
cols
&&
samples
.
type
()
==
CV_32F
C1
);
if
(
_results
.
needed
()
)
if
(
_results
.
needed
()
)
{
{
...
@@ -498,17 +480,7 @@ void SVMSGDImpl::clear()
...
@@ -498,17 +480,7 @@ void SVMSGDImpl::clear()
SVMSGDImpl
::
SVMSGDImpl
()
SVMSGDImpl
::
SVMSGDImpl
()
{
{
clear
();
clear
();
setOptimalParameters
();
params
.
svmsgdType
=
-
1
;
params
.
marginType
=
-
1
;
// Parameters for learning
params
.
marginRegularization
=
0
;
// regularization
params
.
initialStepSize
=
0
;
// learning rate (ideally should be large at beginning and decay each iteration)
params
.
stepDecreasingPower
=
0
;
TermCriteria
_termCrit
(
TermCriteria
::
COUNT
+
TermCriteria
::
EPS
,
0
,
0
);
params
.
termCrit
=
_termCrit
;
}
}
void
SVMSGDImpl
::
setOptimalParameters
(
int
svmsgdType
,
int
marginType
)
void
SVMSGDImpl
::
setOptimalParameters
(
int
svmsgdType
,
int
marginType
)
...
...
modules/ml/test/test_svmsgd.cpp
View file @
74c87a26
...
@@ -182,8 +182,6 @@ void CV_SVMSGDTrainTest::run( int /*start_from*/ )
...
@@ -182,8 +182,6 @@ void CV_SVMSGDTrainTest::run( int /*start_from*/ )
{
{
cv
::
Ptr
<
SVMSGD
>
svmsgd
=
SVMSGD
::
create
();
cv
::
Ptr
<
SVMSGD
>
svmsgd
=
SVMSGD
::
create
();
svmsgd
->
setOptimalParameters
();
svmsgd
->
train
(
data
);
svmsgd
->
train
(
data
);
Mat
responses
;
Mat
responses
;
...
...
samples/cpp/train_svmsgd.cpp
View file @
74c87a26
...
@@ -46,7 +46,6 @@ void addPointRetrainAndRedraw(Data &data, int x, int y, int response);
...
@@ -46,7 +46,6 @@ void addPointRetrainAndRedraw(Data &data, int x, int y, int response);
bool
doTrain
(
const
Mat
samples
,
const
Mat
responses
,
Mat
&
weights
,
float
&
shift
)
bool
doTrain
(
const
Mat
samples
,
const
Mat
responses
,
Mat
&
weights
,
float
&
shift
)
{
{
cv
::
Ptr
<
SVMSGD
>
svmsgd
=
SVMSGD
::
create
();
cv
::
Ptr
<
SVMSGD
>
svmsgd
=
SVMSGD
::
create
();
svmsgd
->
setOptimalParameters
();
cv
::
Ptr
<
TrainData
>
trainData
=
TrainData
::
create
(
samples
,
cv
::
ml
::
ROW_SAMPLE
,
responses
);
cv
::
Ptr
<
TrainData
>
trainData
=
TrainData
::
create
(
samples
,
cv
::
ml
::
ROW_SAMPLE
,
responses
);
svmsgd
->
train
(
trainData
);
svmsgd
->
train
(
trainData
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment