Commit 8aa47cac authored by Wangyida's avatar Wangyida

add training shell script in cnn_3dobj/samples/build

parent 109da10f
name: "lfw_siamese"
input: "data"
input_dim: 10000
input_dim: 1
input_dim: 150
input_dim: 130
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 20
kernel_size: 5
stride: 1
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
convolution_param {
num_output: 50
kernel_size: 5
stride: 1
}
}
layer {
name: "pool2"
type: "Pooling"
bottom: "conv2"
top: "pool2"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "ip1"
type: "InnerProduct"
bottom: "pool2"
top: "ip1"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 500
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "ip1"
top: "ip1"
}
layer {
name: "ip2"
type: "InnerProduct"
bottom: "ip1"
top: "ip2"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 10
}
}
layer {
name: "feat"
type: "InnerProduct"
bottom: "ip2"
top: "feat"
param {
lr_mult: 1
}
param {
lr_mult: 2
}
inner_product_param {
num_output: 2
}
}
# The train/test net protocol buffer definition
net: "examples/triplet/lfw_triplet_train_test.prototxt"
# test_iter specifies how many forward passes the test should carry out.
# In the case of lfw, we have test batch size 100 and 100 test iterations,
# covering the full 10,000 testing images.
test_iter: 100
# Carry out testing every 500 training iterations.
test_interval: 500
# The base learning rate, momentum and the weight decay of the network.
base_lr: 0.01
momentum: 0.9
weight_decay: 0.0000
# The learning rate policy
lr_policy: "inv"
gamma: 0.0001
power: 0.75
# Display every 100 iterations
display: 100
# The maximum number of iterations
max_iter: 50000
# snapshot intermediate results
snapshot: 5000
snapshot_prefix: "examples/triplet/lfw_triplet"
# solver mode: CPU or GPU
solver_mode: CPU
name: "lfw_triplet_train_test"
layer {
name: "triplet_data"
type: "Data"
top: "triplet_data"
top: "sim"
include {
phase: TRAIN
}
transform_param {
scale: 0.00390625
}
data_param {
source: "examples/triplet/lfw_triplet_train_leveldb"
batch_size: 64
}
}
layer {
name: "triplet_data"
type: "Data"
top: "triplet_data"
top: "sim"
include {
phase: TEST
}
transform_param {
scale: 0.00390625
}
data_param {
source: "examples/triplet/lfw_triplet_test_leveldb"
batch_size: 100
}
}
layer {
name: "slice_triplet"
type: "Slice"
bottom: "triplet_data"
top: "data"
top: "data_true"
top: "data_false"
slice_param {
slice_dim: 1
slice_point: 1
slice_point: 2
}
}
layer {
name: "conv1"
type: "Convolution"
bottom: "data"
top: "conv1"
param {
name: "conv1_w"
lr_mult: 1
}
param {
name: "conv1_b"
lr_mult: 2
}
convolution_param {
num_output: 20
kernel_size: 5
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool1"
type: "Pooling"
bottom: "conv1"
top: "pool1"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv2"
type: "Convolution"
bottom: "pool1"
top: "conv2"
param {
name: "conv2_w"
lr_mult: 1
}
param {
name: "conv2_b"
lr_mult: 2
}
convolution_param {
num_output: 50
kernel_size: 5
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool2"
type: "Pooling"
bottom: "conv2"
top: "pool2"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "ip1"
type: "InnerProduct"
bottom: "pool2"
top: "ip1"
param {
name: "ip1_w"
lr_mult: 1
}
param {
name: "ip1_b"
lr_mult: 2
}
inner_product_param {
num_output: 500
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu1"
type: "ReLU"
bottom: "ip1"
top: "ip1"
}
layer {
name: "ip2"
type: "InnerProduct"
bottom: "ip1"
top: "ip2"
param {
name: "ip2_w"
lr_mult: 1
}
param {
name: "ip2_b"
lr_mult: 2
}
inner_product_param {
num_output: 10
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "feat"
type: "InnerProduct"
bottom: "ip2"
top: "feat"
param {
name: "feat_w"
lr_mult: 1
}
param {
name: "feat_b"
lr_mult: 2
}
inner_product_param {
num_output: 2
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "conv1_true"
type: "Convolution"
bottom: "data_true"
top: "conv1_true"
param {
name: "conv1_w"
lr_mult: 1
}
param {
name: "conv1_b"
lr_mult: 2
}
convolution_param {
num_output: 20
kernel_size: 5
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool1_true"
type: "Pooling"
bottom: "conv1_true"
top: "pool1_true"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv2_true"
type: "Convolution"
bottom: "pool1_true"
top: "conv2_true"
param {
name: "conv2_w"
lr_mult: 1
}
param {
name: "conv2_b"
lr_mult: 2
}
convolution_param {
num_output: 50
kernel_size: 5
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool2_true"
type: "Pooling"
bottom: "conv2_true"
top: "pool2_true"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "ip1_true"
type: "InnerProduct"
bottom: "pool2_true"
top: "ip1_true"
param {
name: "ip1_w"
lr_mult: 1
}
param {
name: "ip1_b"
lr_mult: 2
}
inner_product_param {
num_output: 500
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu1_true"
type: "ReLU"
bottom: "ip1_true"
top: "ip1_true"
}
layer {
name: "ip2_true"
type: "InnerProduct"
bottom: "ip1_true"
top: "ip2_true"
param {
name: "ip2_w"
lr_mult: 1
}
param {
name: "ip2_b"
lr_mult: 2
}
inner_product_param {
num_output: 10
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "feat_true"
type: "InnerProduct"
bottom: "ip2_true"
top: "feat_true"
param {
name: "feat_w"
lr_mult: 1
}
param {
name: "feat_b"
lr_mult: 2
}
inner_product_param {
num_output: 2
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "conv1_false"
type: "Convolution"
bottom: "data_false"
top: "conv1_false"
param {
name: "conv1_w"
lr_mult: 1
}
param {
name: "conv1_b"
lr_mult: 2
}
convolution_param {
num_output: 20
kernel_size: 5
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool1_false"
type: "Pooling"
bottom: "conv1_false"
top: "pool1_false"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "conv2_false"
type: "Convolution"
bottom: "pool1_false"
top: "conv2_false"
param {
name: "conv2_w"
lr_mult: 1
}
param {
name: "conv2_b"
lr_mult: 2
}
convolution_param {
num_output: 50
kernel_size: 5
stride: 1
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "pool2_false"
type: "Pooling"
bottom: "conv2_false"
top: "pool2_false"
pooling_param {
pool: MAX
kernel_size: 2
stride: 2
}
}
layer {
name: "ip1_false"
type: "InnerProduct"
bottom: "pool2_false"
top: "ip1_false"
param {
name: "ip1_w"
lr_mult: 1
}
param {
name: "ip1_b"
lr_mult: 2
}
inner_product_param {
num_output: 500
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "relu1_false"
type: "ReLU"
bottom: "ip1_false"
top: "ip1_false"
}
layer {
name: "ip2_false"
type: "InnerProduct"
bottom: "ip1_false"
top: "ip2_false"
param {
name: "ip2_w"
lr_mult: 1
}
param {
name: "ip2_b"
lr_mult: 2
}
inner_product_param {
num_output: 10
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "feat_false"
type: "InnerProduct"
bottom: "ip2_false"
top: "feat_false"
param {
name: "feat_w"
lr_mult: 1
}
param {
name: "feat_b"
lr_mult: 2
}
inner_product_param {
num_output: 2
weight_filler {
type: "xavier"
}
bias_filler {
type: "constant"
}
}
}
layer {
name: "loss"
type: "TripletLoss"
bottom: "feat"
bottom: "feat_true"
bottom: "feat_false"
bottom: "sim"
top: "loss"
triplet_loss_param {
margin: 0.2
}
}
#!/usr/bin/env sh
# This script converts the lfw data into leveldb format.
git clone https://github.com/Wangyida/caffe/tree/cnn_triplet
cd caffe
mkdir build
cd build
cmake -DCMAKE_INSTALL_PREFIX=/usr/local ..
make -j4
make test
make install
cd ..
cmake ..
make -j4
./sphereview_test -radius=250 -ite_depth=4 -plymodel=../ape.ply -imagedir=../data/images_ape/ -labeldir=../data/label_ape.txt -num_class=3 -label_class=0
./sphereview_test -radius=250 -ite_depth=4 -plymodel=../duck.ply -imagedir=../data/images_duck/ -labeldir=../data/label_duck.txt -num_class=3 -label_class=1
./sphereview_test -radius=250 -ite_depth=4 -plymodel=../cat.ply -imagedir=../data/images_cat/ -labeldir=../data/label_cat.txt -num_class=3 -label_class=2
echo "Creating leveldb..."
rm -rf ./linemod_triplet_train_leveldb
rm -rf ./linemod_triplet_test_leveldb
convert_lfw_triplet_data \
./binary_image_train \
./binary_label_train \
./linemod_triplet_train_leveldb
convert_lfw_triplet_data \
./binary_image_test \
./binary_image_test \
./linemod_triplet_test_leveldb
echo "Done."
caffe train --solver=examples/triplet/lfw_triplet_solver.prototxt
0 0.707107 0.707107
0.211325 0.57735 0.788675
-0.211325 0.57735 0.788675
0.408248 0.408248 0.816497
0 0.447214 0.894427
0.211325 0.57735 0.788675
-0.408248 0.408248 0.816497
-0.211325 0.57735 0.788675
0.211325 0.57735 0.788675
-0.211325 0.57735 0.788675
0.707107 0 0.707107
0.382683 0 0.92388
0.57735 0.211325 0.788675
0 0 1
0.214187 0.214187 0.953021
0.57735 0.211325 0.788675
0.57735 0.211325 0.788675
-0.707107 0 0.707107
-0.57735 0.211325 0.788675
-0.382683 0 0.92388
-0.214187 0.214187 0.953021
-0.57735 0.211325 0.788675
-0.57735 0.211325 0.788675
-0.707107 0.707107 0
-0.57735 0.788675 0.211325
-0.408248 0.816497 0.408248
-0.211325 0.788675 0.57735
0 0.707107 -0.707107
0 0.92388 -0.382683
0 1 4.21468e-08
0 0.92388 0.382683
0.211325 0.788675 -0.57735
0 0.92388 -0.382683
0.408248 0.816497 -0.408248
0.214186 0.953021 -0.214187
0 1 0
0.707107 0.707107 0
0.57735 0.788675 0.211325
0.57735 0.788675 -0.211325
0.408248 0.816497 0.408248
0.447214 0.894427 0
0 0.92388 0.382683
0.211325 0.788675 0.57735
0.214186 0.953021 0.214187
0.788675 0.57735 0.211325
0.816497 0.408248 0.408248
0.788675 0.211325 0.57735
0.707107 -0.707107 0
0.788675 -0.57735 0.211325
0.92388 -0.382683 0
0.816497 -0.408248 0.408248
0.953021 -0.214186 0.214187
1 4.21468e-08 0
0.788675 -0.211325 0.57735
0.894427 1.63234e-08 0.447214
0.92388 0.382683 0
0.953021 0.214187 0.214187
0.707107 0 -0.707107
0.788675 -0.211325 -0.57735
0.57735 0.211325 -0.788675
0.816497 -0.408248 -0.408248
0.408248 0.408248 -0.816497
0.57735 0.211325 -0.788675
0.57735 0.211325 -0.788675
0.816497 -0.408248 -0.408248
0.788675 -0.57735 -0.211325
0.211325 0.57735 -0.788675
0.408248 0.408248 -0.816497
0.788675 0.211325 -0.57735
0.816497 0.408248 -0.408248
0.639602 0.426401 -0.639602
0.788675 0.57735 -0.211325
0.408248 0.816497 -0.408248
0.639602 0.639602 -0.426401
0.211325 0.57735 -0.788675
0.211325 0.788675 -0.57735
0.426401 0.639602 -0.639602
0.211325 0.57735 -0.788675
0.211325 0.57735 -0.788675
-0.707107 0 -0.707107
-0.382683 0 -0.92388
0 0 -1
0.382683 0 -0.92388
0 -0.707107 -0.707107
0.211325 -0.57735 -0.788675
0.408248 -0.408248 -0.816497
0.211325 -0.57735 -0.788675
0.211325 -0.57735 -0.788675
0.57735 -0.211325 -0.788675
0.57735 -0.211325 -0.788675
0.57735 -0.211325 -0.788675
0.788675 -0.57735 -0.211325
0.57735 -0.788675 -0.211325
0.639602 -0.639602 -0.426401
0.408248 -0.816497 -0.408248
0.639602 -0.426401 -0.639602
0.211325 -0.788675 -0.57735
0.426401 -0.639602 -0.639602
0 -0.707107 0.707107
0.211325 -0.788675 0.57735
0 -0.92388 0.382683
0.408248 -0.816497 0.408248
0.214187 -0.953021 0.214186
0 -1 -4.21468e-08
0.57735 -0.788675 0.211325
0.447214 -0.894427 -1.63234e-08
0 -0.92388 -0.382683
0.214187 -0.953021 -0.214187
-0.707107 -0.707107 0
-0.57735 -0.788675 0.211325
-0.408248 -0.816497 0.408248
-0.211325 -0.788675 0.57735
-0.57735 -0.211325 0.788675
-0.408248 -0.408248 0.816497
-0.57735 -0.211325 0.788675
-0.57735 -0.211325 0.788675
-0.211325 -0.57735 0.788675
-0.211325 -0.57735 0.788675
-0.211325 -0.57735 0.788675
0.57735 -0.211325 0.788675
0.408248 -0.408248 0.816497
0.57735 -0.211325 0.788675
0.57735 -0.211325 0.788675
0.211325 -0.57735 0.788675
0.211325 -0.57735 0.788675
0.211325 -0.57735 0.788675
0.788675 -0.211325 0.57735
0.816497 -0.408248 0.408248
0.57735 0.211325 0.788675
-nan -nan -nan
-nan -nan -nan
-nan -nan -nan
-nan -nan -nan
-nan -nan -nan
-nan -nan -nan
-nan -nan -nan
-nan -nan -nan
06898 7.62273e-09 0.59069
0.788675 -0.211325 0.57735
0.874728 0.208721 0.437364
0.894427 1.63234e-08 0.447214
0.849313 0.106637 0.517007
0.809511 -0.31246 0.497052
0.874728 -0.208721 0.437364
0.849313 -0.106637 0.517007
0.83147 0.55557 0
0.92388 0.382683 0
0.867278 0.486184 0.10702
0.988185 0.108374 0.108374
0.980785 0.19509 0
0.953021 0.214187 0.214187
0.947388 0.301277 0.108113
0.895337 0.314938 0.314938
0.888679 0.403872 0.217112
0.935925 -0.108508 0.335068
0.935925 0.108508 0.335068
0.975663 2.28828e-08 0.219275
0.707107 0 -0.707107
0.754344 -0.106574 -0.64777
0.64777 0.106574 -0.754344
0.788675 -0.211325 -0.57735
0.57735 0.211325 -0.788675
0.816497 -0.408248 -0.408248
0.809511 -0.31246 -0.497052
0.408248 0.408248 -0.816497
0.497052 0.31246 -0.809511
0.57735 0.211325 -0.788675
0.57735 0.211325 -0.788675
0.64777 0.106574 -0.754344
0.707107 -7.71341e-09 -0.707107
0.788675 -0.57735 -0.211325
0.754344 -0.64777 -0.106574
0.816497 -0.408248 -0.408248
0.809511 -0.497052 -0.31246
0.788675 -0.57735 -0.211325
0.809511 -0.31246 -0.497052
0.809511 -0.497052 -0.31246
0.106574 0.64777 -0.754344
0.211325 0.57735 -0.788675
0.211325 0.57735 -0.788675
0.31246 0.497052 -0.809511
0.408248 0.408248 -0.816497
0.211325 0.57735 -0.788675
0.211325 0.57735 -0.788675
0.497052 0.31246 -0.809511
0.31246 0.497052 -0.809511
0.497052 0.31246 -0.809511
0.754344 0.106574 -0.64777
0.788675 0.211325 -0.57735
0.690768 0.213724 -0.690768
0.816497 0.408248 -0.408248
0.735924 0.421839 -0.529592
0.809511 0.31246 -0.497052
0.639602 0.426401 -0.639602
0.720687 0.321787 -0.614055
0.529592 0.421839 -0.735924
0.614055 0.321787 -0.720687
0.754344 0.64777 -0.106574
0.690768 0.690768 -0.213724
0.788675 0.57735 -0.211325
0.408248 0.816497 -0.408248
0.529592 0.735924 -0.421839
0.497052 0.809511 -0.31246
0.639602 0.639602 -0.426401
0.614055 0.720687 -0.321787
0.809511 0.497052 -0.31246
0.735924 0.529592 -0.421839
0.720687 0.614055 -0.321787
0.106574 0.64777 -0.754344
0.106574 0.754344 -0.64777
0.213724 0.690768 -0.690768
0.211325 0.788675 -0.57735
0.421839 0.529592 -0.735924
0.31246 0.497052 -0.809511
0.426401 0.639602 -0.639602
0.321787 0.614055 -0.720687
0.31246 0.809511 -0.497052
0.421839 0.735924 -0.529592
0.321787 0.720687 -0.614055
0.646997 0.539164 -0.539164
0.539164 0.646997 -0.539164
0.539164 0.539164 -0.646997
-0.707107 0 -0.707107
-0.55557 0 -0.83147
-0.382683 0 -0.92388
0 0 -1
-0.19509 0 -0.980785
0.55557 0 -0.83147
0.382683 0 -0.92388
0.19509 0 -0.980785
0 -0.707107 -0.707107
0.106574 -0.64777 -0.754344
0.211325 -0.57735 -0.788675
0.408248 -0.408248 -0.816497
0.31246 -0.497052 -0.809511
0.31246 -0.497052 -0.809511
0.211325 -0.57735 -0.788675
0.106574 -0.64777 -0.754344
0.64777 -0.106574 -0.754344
0.57735 -0.211325 -0.788675
0.57735 -0.211325 -0.788675
0.497052 -0.31246 -0.809511
0.754344 -0.64777 -0.106574
0.64777 -0.754344 -0.106574
0.690768 -0.690768 -0.213724
0.57735 -0.788675 -0.211325
0.735924 -0.529592 -0.421839
0.639602 -0.639602 -0.426401
0.720687 -0.614055 -0.321787
0.408248 -0.816497 -0.408248
0.497052 -0.809511 -0.31246
0.529592 -0.735924 -0.421839
0.614055 -0.720687 -0.321787
0.690768 -0.213724 -0.690768
0.529592 -0.421839 -0.735924
0.639602 -0.426401 -0.639602
0.614055 -0.321787 -0.720687
0.735924 -0.421839 -0.529592
0.720687 -0.321787 -0.614055
0.106574 -0.754344 -0.64777
0.211325 -0.788675 -0.57735
0.213724 -0.690768 -0.690768
0.421839 -0.735924 -0.529592
0.31246 -0.809511 -0.497052
0.426401 -0.639602 -0.639602
0.321787 -0.720687 -0.614055
0.421839 -0.529592 -0.735924
0.321787 -0.614055 -0.720687
0.646997 -0.539164 -0.539164
0.539164 -0.539164 -0.646997
0.539164 -0.646997 -0.539164
0 -0.707107 0.707107
0.106574 -0.754344 0.64777
0 -0.83147 0.55557
0.211325 -0.788675 0.57735
0.10702 -0.867278 0.486184
0 -0.92388 0.382683
0.408248 -0.816497 0.408248
0.314938 -0.895337 0.314938
0.31246 -0.809511 0.497052
0.214187 -0.953021 0.214186
0.217112 -0.888679 0.403872
0 -1 -4.21468e-08
0 -0.980785 0.19509
0.108374 -0.988185 0.108374
0.108113 -0.947388 0.301277
0.64777 -0.754344 0.106574
0.59069 -0.806898 -7.62273e-09
0.57735 -0.788675 0.211325
0.437364 -0.874728 -0.208721
0.447214 -0.894427 -1.63234e-08
0.517007 -0.849313 -0.106637
0.517007 -0.849313 -0.106637
0.517007 -0.849313 -0.106637
0.497052 -0.809511 0.31246
0.437364 -0.874728 0.208721
0.517007 -0.849313 0.106637
0.517007 -0.849313 0.106637
0.517007 -0.849313 0.106637
0 -0.83147 -0.55557
0 -0.92388 -0.382683
0.10702 -0.867278 -0.486184
0.108374 -0.988185 -0.108374
0 -0.980785 -0.19509
0.214187 -0.953021 -0.214187
0.108113 -0.947388 -0.301277
0.314938 -0.895337 -0.314938
0.217112 -0.888679 -0.403872
0.335068 -0.935925 0.108508
0.335068 -0.935925 -0.108508
0.219275 -0.975663 -2.28828e-08
-0.707107 -0.707107 0
-0.64777 -0.754344 0.106574
-0.57735 -0.788675 0.211325
-0.408248 -0.816497 0.408248
-0.497052 -0.809511 0.31246
-0.106574 -0.754344 0.64777
-0.211325 -0.788675 0.57735
-0.31246 -0.809511 0.497052
-0.64777 -0.106574 0.754344
-0.57735 -0.211325 0.788675
-0.408248 -0.408248 0.816497
-0.497052 -0.31246 0.809511
-0.497052 -0.31246 0.809511
-0.57735 -0.211325 0.788675
-0.64777 -0.106574 0.754344
-0.106574 -0.64777 0.754344
-0.211325 -0.57735 0.788675
-0.211325 -0.57735 0.788675
-0.31246 -0.497052 0.809511
0.64777 -0.106574 0.754344
0.57735 -0.211325 0.788675
0.57735 -0.211325 0.788675
0.408248 -0.408248 0.816497
0.497052 -0.31246 0.809511
0.106574 -0.64777 0.754344
0.211325 -0.57735 0.788675
0.31246 -0.497052 0.809511
0.211325 -0.57735 0.788675
0.106574 -0.64777 0.754344
0.31246 -0.497052 0.809511
0.754344 -0.106574 0.64777
0.57735 0.211325 0.788675
0.788675 -0.211325 0.57735
0.816497 -0.408248 0.408248
0.754344 -0.106574 0.64777
0.809511 -0.31246 0.497052
0.57735 0.211325 0.788675
0.211325 0.57735 0.788675
0.31246 0.497052 0.809511
0.497052 0.31246 0.809511
......@@ -41,6 +41,10 @@
using namespace cv;
using namespace std;
using namespace cv::cnn_3dobj;
uint32_t swap_endian(uint32_t val) {
val = ((val << 8) & 0xFF00FF00) | ((val >> 8) & 0xFF00FF);
return (val << 16) | (val >> 16);
}
Point3d getCenter(string plymodel)
{
char* path_model=(char*)plymodel.data();
......@@ -79,13 +83,17 @@ void createHeader(int num_item, int rows, int cols, const char* headerPath)
std::ofstream headerImg(headerPathimg, ios::out|ios::binary);
std::ofstream headerLabel(headerPathlab, ios::out|ios::binary);
int headerimg[4] = {2051,num_item,rows,cols};
for (int i=0; i<4; i++)
headerimg[i] = swap_endian(headerimg[i]);
int headerlabel[2] = {2049,num_item};
for (int i=0; i<2; i++)
headerlabel[i] = swap_endian(headerlabel[i]);
headerImg.write(reinterpret_cast<const char*>(headerimg), sizeof(int)*4);
headerImg.close();
headerLabel.write(reinterpret_cast<const char*>(headerlabel), sizeof(int)*2);
headerLabel.close();
};
void writeBinaryfile(string filename, const char* binaryPath, const char* headerPath, int num_item)
void writeBinaryfile(string filename, const char* binaryPath, const char* headerPath, int num_item, int label_class)
{
int isrgb = 0;
cv::Mat ImgforBin = cv::imread(filename, isrgb);
......@@ -122,7 +130,7 @@ void writeBinaryfile(string filename, const char* binaryPath, const char* header
{
img_file.write(reinterpret_cast<const char*>(ImgforBin.ptr(r)), ImgforBin.cols*ImgforBin.elemSize());
}
unsigned char templab = 0;
unsigned char templab = (unsigned char)label_class;
lab_file << templab;
}
else
......@@ -136,19 +144,21 @@ void writeBinaryfile(string filename, const char* binaryPath, const char* header
{
img_file.write(reinterpret_cast<const char*>(ImgforBin.ptr(r)), ImgforBin.cols*ImgforBin.elemSize());
}
unsigned char templab = 0;
unsigned char templab = (unsigned char)label_class;
lab_file << templab;
}
img_file.close();
lab_file.close();
};
int main(int argc, char *argv[]){
const String keys = "{help | | demo :$ ./sphereview_test -radius=250 -ite_depth=1 -plymodel=../ape.ply -imagedir=../data/images_ape/ -labeldir=../data/label_ape.txt, then press 'q' to run the demo for images generation when you see the gray background and a coordinate.}"
const String keys = "{help | | demo :$ ./sphereview_test -radius=250 -ite_depth=2 -plymodel=../ape.ply -imagedir=../data/images_ape/ -labeldir=../data/label_ape.txt -num_class=2 -label_class=0, then press 'q' to run the demo for images generation when you see the gray background and a coordinate.}"
"{radius | 250 | Distanse from camera to object, used for adjust view for the reason that differet scale of .ply model.}"
"{ite_depth | 1 | Iteration of sphere generation, we add points on the middle of lines of sphere and adjust the radius suit for the original radius.}"
"{plymodel | ../ape.ply | path of the '.ply' file for image rendering. }"
"{imagedir | ../data/images_ape/ | path of the generated images for one particular .ply model. }"
"{labeldir | ../data/label_ape.txt | path of the generated images for one particular .ply model. }";
"{labeldir | ../data/label_ape.txt | path of the generated images for one particular .ply model. }"
"{num_class | 2 | total number of classes of models}"
"{label_class | 0 | class label of current .ply model}";
cv::CommandLineParser parser(argc, argv, keys);
parser.about("Demo for Sphere View data generation");
if (parser.has("help"))
......@@ -161,6 +171,8 @@ int main(int argc, char *argv[]){
string plymodel = parser.get<string>("plymodel");
string imagedir = parser.get<string>("imagedir");
string labeldir = parser.get<string>("labeldir");
int num_class = parser.get<int>("num_class");
int label_class = parser.get<int>("label_class");
cv::cnn_3dobj::IcoSphere ViewSphere(10,ite_depth);
std::vector<cv::Point3d> campos = ViewSphere.CameraPos;
std::fstream imglabel;
......@@ -180,7 +192,6 @@ int main(int argc, char *argv[]){
/// Let's assume camera has the following properties
Point3d cam_focal_point = getCenter(plymodel);
Point3d cam_y_dir(0.0f,0.0f,1.0f);
int num_obj = 1;
const char* headerPath = "./header_for_";
const char* binaryPath = "./binary_";
createHeader((int)campos.size(), 250, 250, headerPath);
......@@ -225,7 +236,7 @@ int main(int argc, char *argv[]){
filename = imagedir + filename;
filename += ".png";
myWindow.saveScreenshot(filename);
writeBinaryfile(filename, binaryPath, headerPath,(int)campos.size()*num_obj);
writeBinaryfile(filename, binaryPath, headerPath,(int)campos.size()*num_class, label_class);
}
return 1;
};
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment