Skip to content
Projects
Groups
Snippets
Help
Loading...
Sign in / Register
Toggle navigation
O
opencv_contrib
Project
Project
Details
Activity
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Packages
Packages
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
submodule
opencv_contrib
Commits
5072cc69
Commit
5072cc69
authored
Jul 19, 2015
by
Wangyida
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Add feature extraction codes using Caffe in Class DataTrans
parent
db396b9f
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
158 additions
and
18 deletions
+158
-18
CMakeLists.txt
modules/cnn_3dobj/CMakeLists.txt
+2
-1
README.md
modules/cnn_3dobj/README.md
+1
-1
cnn_3dobj.hpp
modules/cnn_3dobj/include/opencv2/cnn_3dobj.hpp
+38
-10
cnn_image2db.cpp
modules/cnn_3dobj/src/cnn_image2db.cpp
+88
-0
precomp.hpp
modules/cnn_3dobj/src/precomp.hpp
+29
-6
No files found.
modules/cnn_3dobj/CMakeLists.txt
View file @
5072cc69
set
(
the_description
"CNN for 3D object recognition and pose estimation including a completed Sphere View on 3D objects"
)
ocv_define_module
(
cnn_3dobj opencv_core opencv_imgproc opencv_viz opencv_highgui OPTIONAL WRAP python
)
ocv_define_module
(
cnn_3dobj opencv_core opencv_imgproc opencv_viz opencv_highgui caffe protobuf leveldb glog OPTIONAL WRAP python
)
target_link_libraries
(
cnn_3dobj caffe protobuf leveldb glog
)
modules/cnn_3dobj/README.md
View file @
5072cc69
...
...
@@ -4,7 +4,7 @@ Building Process:
$ cd
<opencv
_source_directory
>
$ mkdir build
$ cd build
$ cmake -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D WITH_TBB=ON -D BUILD_NEW_PYTHON_SUPPORT=O
N
-D WITH_V4L=ON -D WITH_QT=ON -D WITH_OPENGL=ON -D WITH_VTK=ON -D OPENCV_EXTRA_MODULES_PATH=
<opencv
_contrib
>
/modules ..
$ cmake -D CMAKE_BUILD_TYPE=RELEASE -D CMAKE_INSTALL_PREFIX=/usr/local -D WITH_TBB=ON -D BUILD_NEW_PYTHON_SUPPORT=O
FF
-D WITH_V4L=ON -D WITH_QT=ON -D WITH_OPENGL=ON -D WITH_VTK=ON -D OPENCV_EXTRA_MODULES_PATH=
<opencv
_contrib
>
/modules ..
$ make -j4
$ sudo make install
$ cd
<opencv
_contrib
>
/modules/cnn_3dobj/samples
...
...
modules/cnn_3dobj/include/opencv2/cnn_3dobj.hpp
View file @
5072cc69
...
...
@@ -45,25 +45,39 @@ the use of this software, even if advised of the possibility of such damage.
#ifndef __OPENCV_CNN_3DOBJ_HPP__
#define __OPENCV_CNN_3DOBJ_HPP__
#ifdef __cplusplus
#include <glog/logging.h>
#include <leveldb/db.h>
#include <caffe/proto/caffe.pb.h>
#include <opencv2/calib3d.hpp>
#include <opencv2/viz/vizcore.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/highgui/highgui_c.h>
//#include <opencv2/imgproc/imgproc.hpp>
#include <opencv/cv.h>
#include <opencv/cxcore.h>
#include <string>
#include <fstream>
#include <vector>
#include <stdio.h>
#include <math.h>
#include <iostream>
#include <string>
#include <set>
#include <string.h>
#include <stdlib.h>
#include <tr1/memory>
#include <dirent.h>
#include <glog/logging.h>
#include <google/protobuf/text_format.h>
#include <leveldb/db.h>
#include <opencv2/calib3d.hpp>
#include <opencv2/viz/vizcore.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/highgui/highgui_c.h>
#define CPU_ONLY
#include <caffe/blob.hpp>
#include <caffe/common.hpp>
#include <caffe/net.hpp>
#include <caffe/proto/caffe.pb.h>
#include <caffe/util/io.hpp>
#include <caffe/vision_layers.hpp>
using
std
::
string
;
using
caffe
::
Blob
;
using
caffe
::
Caffe
;
using
caffe
::
Datum
;
using
caffe
::
Net
;
/** @defgroup cnn_3dobj CNN based on Caffe aimming at 3D object recognition and pose estimation
*/
namespace
cv
...
...
@@ -131,10 +145,24 @@ class CV_EXPORTS_W DataTrans
public
:
DataTrans
();
CV_WRAP
void
list_dir
(
const
char
*
path
,
std
::
vector
<
string
>&
files
,
bool
r
);
/** @brief Use directory of the file including images starting with an int label as the name of each image.
*/
CV_WRAP
string
get_classname
(
string
path
);
/** @brief
*/
CV_WRAP
int
get_labelid
(
string
fileName
);
/** @brief Get the label of each image.
*/
CV_WRAP
void
loadimg
(
string
path
,
char
*
buffer
,
bool
is_color
);
CV_WRAP
void
convert
(
string
imgdir
,
string
outputdb
,
string
attachdir
,
int
channel
,
int
width
,
int
height
);
/** @brief Load images.
*/
CV_WRAP
void
convert
(
string
imgdir
,
string
outputdb
,
string
attachdir
,
int
channel
,
int
width
,
int
height
);
/** @brief Convert a set of images as a leveldb database for CNN training.
*/
template
<
typename
Dtype
>
CV_WRAP
std
::
vector
<
cv
::
Mat
>
feature_extraction_pipeline
(
std
::
string
pretrained_binary_proto
,
std
::
string
feature_extraction_proto
,
std
::
string
save_feature_dataset_names
,
std
::
string
extract_feature_blob_names
,
int
num_mini_batches
,
std
::
string
device
,
int
dev_id
);
/** @brief Extract feature into a binary file and vector<cv::Mat> for classification, the model proto and network proto are needed, All images in the file root will be used for feature extraction.
*/
};
//! @}
}}
...
...
modules/cnn_3dobj/src/cnn_image2db.cpp
View file @
5072cc69
...
...
@@ -147,4 +147,92 @@ namespace cnn_3dobj
writefile
.
close
();
};
template
<
typename
Dtype
>
std
::
vector
<
cv
::
Mat
>
feature_extraction_pipeline
(
std
::
string
pretrained_binary_proto
,
std
::
string
feature_extraction_proto
,
std
::
string
save_feature_dataset_names
,
std
::
string
extract_feature_blob_names
,
int
num_mini_batches
,
std
::
string
device
,
int
dev_id
)
{
if
(
strcmp
(
device
.
c_str
(),
"GPU"
)
==
0
)
{
LOG
(
ERROR
)
<<
"Using GPU"
;
int
device_id
=
0
;
if
(
strcmp
(
device
.
c_str
(),
"GPU"
)
==
0
)
{
device_id
=
dev_id
;
CHECK_GE
(
device_id
,
0
);
}
LOG
(
ERROR
)
<<
"Using Device_id="
<<
device_id
;
Caffe
::
SetDevice
(
device_id
);
Caffe
::
set_mode
(
Caffe
::
GPU
);
}
else
{
LOG
(
ERROR
)
<<
"Using CPU"
;
Caffe
::
set_mode
(
Caffe
::
CPU
);
}
boost
::
shared_ptr
<
Net
<
Dtype
>
>
feature_extraction_net
(
new
Net
<
Dtype
>
(
feature_extraction_proto
,
caffe
::
TEST
));
feature_extraction_net
->
CopyTrainedLayersFrom
(
pretrained_binary_proto
);
std
::
vector
<
std
::
string
>
blob_names
;
blob_names
.
push_back
(
extract_feature_blob_names
);
std
::
vector
<
std
::
string
>
dataset_names
;
dataset_names
.
push_back
(
save_feature_dataset_names
);
CHECK_EQ
(
blob_names
.
size
(),
dataset_names
.
size
())
<<
" the number of blob names and dataset names must be equal"
;
size_t
num_features
=
blob_names
.
size
();
for
(
size_t
i
=
0
;
i
<
num_features
;
i
++
)
{
CHECK
(
feature_extraction_net
->
has_blob
(
blob_names
[
i
]))
<<
"Unknown feature blob name "
<<
blob_names
[
i
]
<<
" in the network "
<<
feature_extraction_proto
;
}
std
::
vector
<
FILE
*>
files
;
for
(
size_t
i
=
0
;
i
<
num_features
;
++
i
)
{
LOG
(
INFO
)
<<
"Opening file "
<<
dataset_names
[
i
];
FILE
*
temp
=
fopen
(
dataset_names
[
i
].
c_str
(),
"wb"
);
files
.
push_back
(
temp
);
}
LOG
(
ERROR
)
<<
"Extacting Features"
;
Datum
datum
;
std
::
vector
<
cv
::
Mat
>
featureVec
;
std
::
vector
<
Blob
<
float
>*>
input_vec
;
std
::
vector
<
int
>
image_indices
(
num_features
,
0
);
for
(
int
batch_index
=
0
;
batch_index
<
num_mini_batches
;
++
batch_index
)
{
feature_extraction_net
->
Forward
(
input_vec
);
for
(
int
i
=
0
;
i
<
num_features
;
++
i
)
{
const
boost
::
shared_ptr
<
Blob
<
Dtype
>
>
feature_blob
=
feature_extraction_net
->
blob_by_name
(
blob_names
[
i
]);
int
batch_size
=
feature_blob
->
num
();
int
dim_features
=
feature_blob
->
count
()
/
batch_size
;
if
(
batch_index
==
0
)
{
int
fea_num
=
batch_size
*
num_mini_batches
;
fwrite
(
&
dim_features
,
sizeof
(
int
),
1
,
files
[
i
]);
fwrite
(
&
fea_num
,
sizeof
(
int
),
1
,
files
[
i
]);
}
const
Dtype
*
feature_blob_data
;
for
(
int
n
=
0
;
n
<
batch_size
;
++
n
)
{
feature_blob_data
=
feature_blob
->
cpu_data
()
+
feature_blob
->
offset
(
n
);
fwrite
(
feature_blob_data
,
sizeof
(
Dtype
),
dim_features
,
files
[
i
]);
for
(
int
dim
=
0
;
dim
<
dim_features
;
dim
++
)
{
cv
::
Mat
tempfeat
=
cv
::
Mat
(
1
,
dim_features
,
CV_32FC1
);
tempfeat
.
at
<
Dtype
>
(
0
,
dim
)
=
*
(
feature_blob_data
++
);
featureVec
.
push_back
(
tempfeat
);
}
++
image_indices
[
i
];
if
(
image_indices
[
i
]
%
1000
==
0
)
{
LOG
(
ERROR
)
<<
"Extracted features of "
<<
image_indices
[
i
]
<<
" query images for feature blob "
<<
blob_names
[
i
];
}
}
// for (int n = 0; n < batch_size; ++n)
}
// for (int i = 0; i < num_features; ++i)
}
// for (int batch_index = 0; batch_index < num_mini_batches; ++batch_index)
// write the last batch
for
(
int
i
=
0
;
i
<
num_features
;
++
i
)
{
fclose
(
files
[
i
]);
}
LOG
(
ERROR
)
<<
"Successfully extracted the features!"
;
return
featureVec
;
};
}}
modules/cnn_3dobj/src/precomp.hpp
View file @
5072cc69
...
...
@@ -43,14 +43,37 @@ the use of this software, even if advised of the possibility of such damage.
#define __OPENCV_CNN_3DOBJ_PRECOMP_HPP__
#include <opencv2/cnn_3dobj.hpp>
#include <opencv2/calib3d.hpp>
#include <opencv2/viz/vizcore.hpp>
#include <opencv2/highgui.hpp>
#include <iostream>
#include <vector>
#include <opencv/cv.h>
#include <opencv/cxcore.h>
#include <string>
#include <fstream>
#include <vector>
#include <stdio.h>
#include <iostream>
#include <math.h>
#include <iostream>
#include <set>
#include <string.h>
#include <stdlib.h>
#include <tr1/memory>
#include <dirent.h>
#include <glog/logging.h>
#include <google/protobuf/text_format.h>
#include <leveldb/db.h>
#include <opencv2/calib3d.hpp>
#include <opencv2/viz/vizcore.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/highgui/highgui_c.h>
#define CPU_ONLY
#include <caffe/blob.hpp>
#include <caffe/common.hpp>
#include <caffe/net.hpp>
#include <caffe/proto/caffe.pb.h>
#include <caffe/util/io.hpp>
#include <caffe/vision_layers.hpp>
using
std
::
string
;
using
caffe
::
Blob
;
using
caffe
::
Caffe
;
using
caffe
::
Datum
;
using
caffe
::
Net
;
#endif
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment