Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv
Commits
64aaa6e1
Commit
64aaa6e1
authored
Oct 04, 2013
by
Rahul Kavi
Committed by
Maksim Shabunin
Aug 18, 2014
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
updated test for logistic regression after changes to LogisticRegression class
parent
d5ad4f32
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
23 additions
and
33 deletions
+23
-33
test_lr.cpp
modules/ml/test/test_lr.cpp
+23
-33
No files found.
modules/ml/test/test_lr.cpp
View file @
64aaa6e1
...
...
@@ -73,9 +73,8 @@ static bool calculateError( const Mat& _p_labels, const Mat& _o_labels, float& e
CV_Assert
(
_p_labels_temp
.
total
()
==
_o_labels_temp
.
total
());
CV_Assert
(
_p_labels_temp
.
rows
==
_o_labels_temp
.
rows
);
Mat
result
=
(
_p_labels_temp
==
_o_labels_temp
)
/
255
;
accuracy
=
(
float
)
cv
::
sum
(
result
)[
0
]
/
result
.
rows
;
accuracy
=
(
float
)
cv
::
countNonZero
(
_p_labels_temp
==
_o_labels_temp
)
/
_p_labels_temp
.
rows
;
error
=
1
-
accuracy
;
return
true
;
}
...
...
@@ -133,25 +132,23 @@ void CV_LRTest::run( int /*start_from*/ )
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
);
CvLR_TrainParams
params
=
CvLR_TrainParams
();
Mat
responses1
,
responses2
;
float
error
=
0.0
f
;
CvLR_TrainParams
params1
=
CvLR_Trai
nParams
();
CvLR_TrainParams
params2
=
CvLR_Trai
nParams
();
LogisticRegressionParams
params1
=
LogisticRegressio
nParams
();
LogisticRegressionParams
params2
=
LogisticRegressio
nParams
();
params1
.
alpha
=
1.0
;
params1
.
num_iters
=
10001
;
params1
.
norm
=
CvLR
::
REG_L2
;
// params1.debug = 1;
params1
.
norm
=
LogisticRegression
::
REG_L2
;
params1
.
regularized
=
1
;
params1
.
train_method
=
CvLR
::
BATCH
;
params1
.
mini
batch
size
=
10
;
params1
.
train_method
=
LogisticRegression
::
BATCH
;
params1
.
mini
_batch_
size
=
10
;
// run LR classifier train classifier
data
.
convertTo
(
data
,
CV_32FC1
);
labels
.
convertTo
(
labels
,
CV_32FC1
);
CvLR
lr1
(
data
,
labels
,
params1
);
LogisticRegression
lr1
(
data
,
labels
,
params1
);
// predict using the same data
lr1
.
predict
(
data
,
responses1
);
...
...
@@ -164,7 +161,6 @@ void CV_LRTest::run( int /*start_from*/ )
ts
->
printf
(
cvtest
::
TS
::
LOG
,
"Bad prediction labels
\n
"
);
test_code
=
cvtest
::
TS
::
FAIL_INVALID_OUTPUT
;
}
else
if
(
error
>
0.05
f
)
{
ts
->
printf
(
cvtest
::
TS
::
LOG
,
"Bad accuracy of (%f)
\n
"
,
error
);
...
...
@@ -173,14 +169,13 @@ void CV_LRTest::run( int /*start_from*/ )
params2
.
alpha
=
1.0
;
params2
.
num_iters
=
9000
;
params2
.
norm
=
CvLR
::
REG_L2
;
// params2.debug = 1;
params2
.
norm
=
LogisticRegression
::
REG_L2
;
params2
.
regularized
=
1
;
params2
.
train_method
=
CvLR
::
MINI_BATCH
;
params2
.
mini
batch
size
=
10
;
params2
.
train_method
=
LogisticRegression
::
MINI_BATCH
;
params2
.
mini
_batch_
size
=
10
;
// now train using mini batch gradient descent
CvLR
lr2
(
data
,
labels
,
params2
);
LogisticRegression
lr2
(
data
,
labels
,
params2
);
lr2
.
predict
(
data
,
responses2
);
responses2
.
convertTo
(
responses2
,
CV_32S
);
...
...
@@ -191,7 +186,6 @@ void CV_LRTest::run( int /*start_from*/ )
ts
->
printf
(
cvtest
::
TS
::
LOG
,
"Bad prediction labels
\n
"
);
test_code
=
cvtest
::
TS
::
FAIL_INVALID_OUTPUT
;
}
else
if
(
error
>
0.06
f
)
{
ts
->
printf
(
cvtest
::
TS
::
LOG
,
"Bad accuracy of (%f)
\n
"
,
error
);
...
...
@@ -257,7 +251,7 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ )
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
,
3
);
CvLR_TrainParams
params
=
CvLR_Trai
nParams
();
// LogisticRegressionParams params = LogisticRegressio
nParams();
Mat
responses1
,
responses2
;
Mat
learnt_mat1
,
learnt_mat2
;
...
...
@@ -265,28 +259,26 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ )
float
errorCount
=
0.0
;
CvLR_TrainParams
params1
=
CvLR_TrainParams
();
CvLR_TrainParams
params2
=
CvLR_TrainParams
();
LogisticRegressionParams
params1
=
LogisticRegressionParams
();
params1
.
alpha
=
1.0
;
params1
.
num_iters
=
10001
;
params1
.
norm
=
CvLR
::
REG_L2
;
// params1.debug = 1;
params1
.
norm
=
LogisticRegression
::
REG_L2
;
params1
.
regularized
=
1
;
params1
.
train_method
=
CvLR
::
BATCH
;
params1
.
mini
batch
size
=
10
;
params1
.
train_method
=
LogisticRegression
::
BATCH
;
params1
.
mini
_batch_
size
=
10
;
data
.
convertTo
(
data
,
CV_32FC1
);
labels
.
convertTo
(
labels
,
CV_32FC1
);
// run LR classifier train classifier
CvLR
lr1
(
data
,
labels
,
params1
);
CvLR
lr2
;
learnt_mat1
=
lr1
.
get_learnt_mat
();
LogisticRegression
lr1
(
data
,
labels
,
params1
);
LogisticRegression
lr2
;
learnt_mat1
=
lr1
.
get_learnt_thetas
();
lr1
.
predict
(
data
,
responses1
);
// now save the classifier
// Write out
string
filename
=
cv
::
tempfile
(
".xml"
);
try
{
...
...
@@ -312,10 +304,9 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ )
lr2
.
predict
(
data
,
responses2
);
learnt_mat2
=
lr2
.
get_learnt_
mat
();
learnt_mat2
=
lr2
.
get_learnt_
thetas
();
// compare difference in prediction outputs before and after loading from disk
pred_result1
=
(
responses1
==
responses2
)
/
255
;
CV_Assert
(
responses1
.
rows
==
responses2
.
rows
);
// compare difference in learnt matrices before and after loading from disk
comp_learnt_mats
=
(
learnt_mat1
==
learnt_mat2
);
...
...
@@ -326,10 +317,9 @@ void CV_LRTest_SaveLoad::run( int /*start_from*/ )
// compare difference in prediction outputs and stored inputs
// check if there is any difference between computed learnt mat and retreived mat
errorCount
+=
1
-
(
float
)
cv
::
sum
(
pred_result1
)[
0
]
/
pred_result
1
.
rows
;
errorCount
+=
1
-
(
float
)
cv
::
countNonZero
(
responses1
==
responses2
)
/
responses
1
.
rows
;
errorCount
+=
1
-
(
float
)
cv
::
sum
(
comp_learnt_mats
)[
0
]
/
comp_learnt_mats
.
rows
;
if
(
errorCount
>
0
)
{
ts
->
printf
(
cvtest
::
TS
::
LOG
,
"Different prediction results before writing and after reading (errorCount=%d).
\n
"
,
errorCount
);
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment