Commit 95d432db authored by Maksim Shabunin's avatar Maksim Shabunin

Download cache

parent 38758066
......@@ -41,7 +41,7 @@ function(append_if_exist OUTPUT_LIST)
set(${OUTPUT_LIST} ${${OUTPUT_LIST}} PARENT_SCOPE)
endfunction()
set(PROTOBUF_ROOT "${PROTOBUF_CPP_PATH}/protobuf-3.1.0")
set(PROTOBUF_ROOT "${PROTOBUF_CPP_ROOT}")
if(MSVC)
set(ATOMICOPS_INTERNALS ${PROTOBUF_ROOT}/src/google/protobuf/stubs/atomicops_internals_x86_msvc.cc)
......
......@@ -2,6 +2,11 @@ if(WINRT)
ocv_module_disable(dnn)
endif()
include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/OpenCVFindLibProtobuf.cmake)
if(NOT DEFINED PROTOBUF_INCLUDE_DIR)
ocv_module_disable(opencv_dnn)
endif()
set(the_description "Deep neural network module. It allows to load models from different frameworks and to make forward pass")
ocv_add_module(dnn opencv_core opencv_imgproc WRAP python matlab)
......@@ -35,7 +40,20 @@ endif()
# ----------------------------------------------------------------------------
# Resolve libprotobuf dependency
# ----------------------------------------------------------------------------
include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/OpenCVFindLibProtobuf.cmake)
if(NOT PROTOBUF_FOUND)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/protobuf "${PROTOBUF_CPP_PATH}")
add_definitions(-DHAVE_PROTOBUF=1)
#supress warnings in autogenerated caffe.pb.* files
ocv_warnings_disable(CMAKE_CXX_FLAGS
-Wunused-parameter -Wundef -Wignored-qualifiers -Wno-enum-compare
-Wdeprecated-declarations
/wd4125 /wd4267 /wd4127 /wd4244 /wd4512 /wd4702
/wd4456 /wd4510 /wd4610 /wd4800
-wd858 -wd2196
)
endif()
ocv_source_group("Src\\protobuf" FILES ${PROTOBUF_SRCS} ${PROTOBUF_HDRS})
ocv_module_include_directories(include ${PROTOBUF_INCLUDE_DIR})
......@@ -45,59 +63,14 @@ ocv_add_samples()
ocv_add_accuracy_tests()
ocv_add_perf_tests()
# ----------------------------------------------------------------------------
# Download pre-trained models for complex testing on GoogLeNet and AlexNet
# ----------------------------------------------------------------------------
OCV_OPTION(${the_module}_DOWNLOAD_MODELS "Use GoogLeNet Caffe model for testing" OFF IF BUILD_TESTS AND DEFINED ENV{OPENCV_TEST_DATA_PATH})
if(BUILD_TESTS AND DEFINED ENV{OPENCV_TEST_DATA_PATH} AND (DOWNLOAD_EXTERNAL_TEST_DATA OR ${the_module}_DOWNLOAD_MODELS))
add_custom_command( TARGET opencv_test_${name} POST_BUILD
COMMAND ${CMAKE_COMMAND} -Dmodel=GoogleNet -P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/download_model.cmake)
add_custom_command( TARGET opencv_test_${name} POST_BUILD
COMMAND ${CMAKE_COMMAND} -Dmodel=Alexnet -P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/download_model.cmake)
add_custom_command( TARGET opencv_test_${name} POST_BUILD
COMMAND ${CMAKE_COMMAND} -Dmodel=Inception -P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/download_model.cmake)
add_custom_command( TARGET opencv_test_${name} POST_BUILD
COMMAND ${CMAKE_COMMAND} -Dmodel=Enet -P ${CMAKE_CURRENT_SOURCE_DIR}/cmake/download_model.cmake)
add_definitions(-DENABLE_CAFFE_MODEL_TESTS=1)
add_definitions(-DENABLE_CAFFE_ALEXNET_TEST=1)
add_definitions(-DENABLE_TF_INCEPTION_TESTS=1)
add_definitions(-DENABLE_TORCH_ENET_TESTS=1)
endif()
# ----------------------------------------------------------------------------
# Torch7 importer of blobs and models, produced by Torch.nn module
# ----------------------------------------------------------------------------
OCV_OPTION(${the_module}_BUILD_TORCH_IMPORTER "Build Torch model importer" ON)
if(${the_module}_BUILD_TORCH_IMPORTER)
message(STATUS "Torch importer has been enabled. To run the tests you have to install Torch "
"('th' executable should be available) "
"and generate testdata using opencv_extra/testdata/dnn/generate_torch_models.py script.")
add_definitions(-DENABLE_TORCH_IMPORTER=1)
ocv_warnings_disable(CMAKE_CXX_FLAGS /wd4702 /wd4127 /wd4267) #supress warnings in original torch files
if(NOT DEFINED HAVE_TORCH_EXE)
execute_process(COMMAND th ${CMAKE_CURRENT_SOURCE_DIR}/testdata/dnn/torch/torch_nn_echo.lua RESULT_VARIABLE TORCH_EXE_STATUS)
set(HAVE_TORCH_EXE OFF)
if(${TORCH_EXE_STATUS} EQUAL 0)
set(HAVE_TORCH_EXE ON)
endif()
set(HAVE_TORCH_EXE ${HAVE_TORCH_EXE} CACHE INTERNAL "Have torch binary")
endif()
endif()
# ----------------------------------------------------------------------------
# Generating test data for Torch importer
# ----------------------------------------------------------------------------
OCV_OPTION(${the_module}_BUILD_TORCH_TESTS "Build Torch tests (installed torch7 with nn module is required)" ON IF BUILD_TESTS AND ${the_module}_BUILD_TORCH_IMPORTER AND HAVE_TORCH_EXE)
if(${the_module}_BUILD_TORCH_TESTS)
if(NOT DEFINED ENV{OPENCV_TEST_DATA_PATH})
message(FATAL_ERROR "OPENCV_TEST_DATA_PATH environment variable was not specified")
endif()
if(NOT HAVE_TORCH_EXE)
message(FATAL_ERROR "Torch executable \"th\" not found or nn module not found")
endif()
add_custom_command( TARGET opencv_test_${name} POST_BUILD
COMMAND th ${CMAKE_CURRENT_SOURCE_DIR}/testdata/dnn/torch/torch_gen_test_data.lua
WORKING_DIRECTORY $ENV{OPENCV_TEST_DATA_PATH}/dnn/torch )
add_definitions(-DENABLE_TORCH_TESTS=1)
endif()
......@@ -27,10 +27,22 @@ endif()
if(PROTOBUF_FOUND)
# nothing
else()
include(${CMAKE_CURRENT_LIST_DIR}/download_protobuf.cmake)
add_subdirectory(${CMAKE_CURRENT_SOURCE_DIR}/3rdparty/protobuf ${CMAKE_BINARY_DIR}/3rdparty/protobuf)
set(PROTOBUF_CPP_PATH "${OpenCV_BINARY_DIR}/3rdparty/protobuf")
set(PROTOBUF_CPP_ROOT "${PROTOBUF_CPP_PATH}/protobuf-3.1.0")
ocv_download(FILENAME "protobuf-cpp-3.1.0.tar.gz"
HASH "bd5e3eed635a8d32e2b99658633815ef"
URL
"${OPENCV_PROTOBUF_URL}"
"$ENV{OPENCV_PROTOBUF_URL}"
"https://github.com/google/protobuf/releases/download/v3.1.0/"
DESTINATION_DIR "${PROTOBUF_CPP_PATH}"
STATUS res
UNPACK RELATIVE_URL)
if(NOT res)
return()
endif()
set(PROTOBUF_LIBRARIES libprotobuf)
set(PROTOBUF_INCLUDE_DIR ${PROTOBUF_CPP_PATH}/protobuf-3.1.0/src)
set(PROTOBUF_INCLUDE_DIR "${PROTOBUF_CPP_ROOT}/src")
endif()
if(NOT UPDATE_PROTO_FILES)
......@@ -43,14 +55,3 @@ if(NOT UPDATE_PROTO_FILES)
list(APPEND PROTOBUF_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/misc/caffe)
list(APPEND PROTOBUF_INCLUDE_DIR ${CMAKE_CURRENT_SOURCE_DIR}/misc/tensorflow)
endif()
add_definitions(-DHAVE_PROTOBUF=1)
#supress warnings in autogenerated caffe.pb.* files
ocv_warnings_disable(CMAKE_CXX_FLAGS
-Wunused-parameter -Wundef -Wignored-qualifiers -Wno-enum-compare
-Wdeprecated-declarations
/wd4125 /wd4267 /wd4127 /wd4244 /wd4512 /wd4702
/wd4456 /wd4510 /wd4610 /wd4800
-wd858 -wd2196
)
set(GoogleNet_url "http://dl.caffe.berkeleyvision.org/bvlc_googlenet.caffemodel")
set(GoogleNet_dst "$ENV{OPENCV_TEST_DATA_PATH}/dnn/bvlc_googlenet.caffemodel")
set(GoogleNet_sha "405fc5acd08a3bb12de8ee5e23a96bec22f08204")
set(VGG16_url "http://www.robots.ox.ac.uk/~vgg/software/very_deep/caffe/VGG_ILSVRC_16_layers.caffemodel")
set(GG16_dst "$ENV{OPENCV_TEST_DATA_PATH}/dnn/VGG_ILSVRC_16_layers.caffemodel")
set(voc-fcn32s_url "http://dl.caffe.berkeleyvision.org/fcn32s-heavy-pascal.caffemodel")
set(voc-fcn32s_dst "$ENV{OPENCV_TEST_DATA_PATH}/dnn/fcn32s-heavy-pascal.caffemodel")
set(Alexnet_url "http://dl.caffe.berkeleyvision.org/bvlc_alexnet.caffemodel")
set(Alexnet_dst "$ENV{OPENCV_TEST_DATA_PATH}/dnn/bvlc_alexnet.caffemodel")
set(Alexnet_sha "9116a64c0fbe4459d18f4bb6b56d647b63920377")
set(Inception_url "https://github.com/petewarden/tf_ios_makefile_example/raw/master/data/tensorflow_inception_graph.pb")
set(Inception_dst "$ENV{OPENCV_TEST_DATA_PATH}/dnn/tensorflow_inception_graph.pb")
set(Enet_url "https://www.dropbox.com/sh/dywzk3gyb12hpe5/AABoUwqQGWvClUu27Z1EWeu9a/model-best.net?dl=0")
set(Enet_dst "$ENV{OPENCV_TEST_DATA_PATH}/dnn/Enet-model-best.net")
set(Fcn_url "http://dl.caffe.berkeleyvision.org/fcn8s-heavy-pascal.caffemodel")
set(Fcn_dst "$ENV{OPENCV_TEST_DATA_PATH}/dnn/fcn8s-heavy-pascal.caffemodel")
if(NOT model)
set(model "GoogleNet")
endif()
message(STATUS "Downloading ${${model}_url} to ${${model}_dst}")
if(NOT EXISTS ${${model}_dst})
if(${${model}_sha})
file(DOWNLOAD ${${model}_url} ${${model}_dst} SHOW_PROGRESS EXPECTED_HASH SHA1=${${model}_sha} STATUS status_vec)
else()
file(DOWNLOAD ${${model}_url} ${${model}_dst} SHOW_PROGRESS STATUS status_vec)
endif()
list(GET status_vec 0 status)
list(GET status_vec 1 status_msg)
if(status EQUAL 0)
message(STATUS "Ok! ${status_msg}")
else()
message(STATUS "Fail! ${status_msg}")
endif()
endif()
set(PROTOBUF_CPP_NAME "libprotobuf")
set(PROTOBUF_CPP_DOWNLOAD_HASH "bd5e3eed635a8d32e2b99658633815ef")
set(PROTOBUF_CPP_PATH "${CMAKE_CURRENT_BINARY_DIR}/3rdparty/protobuf/sources") # /protobuf-3.1.0 subdirectory
set(OPENCV_PROTOBUF_CPP_DOWNLOAD_URL ${OPENCV_PROTOBUF_URL};$ENV{OPENCV_PROTOBUF_URL};https://github.com/google/protobuf/releases/download/)
function(ocv_protobuf_download file ID)
if(DEFINED ${ID}_DOWNLOADED_HASH
AND ${ID}_DOWNLOADED_HASH STREQUAL ${ID}_DOWNLOAD_HASH
AND EXISTS ${${ID}_PATH})
# Files have been downloaded and checked by the previous CMake run
return()
else()
if(EXISTS ${${ID}_PATH})
message(STATUS "${${ID}_NAME}: Removing previous unpacked files: ${${ID}_PATH}")
file(REMOVE_RECURSE ${${ID}_PATH})
endif()
endif()
unset(${ID}_DOWNLOADED_HASH CACHE)
file(MAKE_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/.download)
file(WRITE "${CMAKE_CURRENT_SOURCE_DIR}/.download/.gitignore" "*\n")
ocv_download(PACKAGE ${file}
HASH ${${ID}_DOWNLOAD_HASH}
URL ${OPENCV_${ID}_DOWNLOAD_URL}
DOWNLOAD_DIR ${CMAKE_CURRENT_SOURCE_DIR}/.download)
set(${ID}_ARCHIVE "${DOWNLOAD_PACKAGE_LOCATION}")
ocv_assert(EXISTS "${${ID}_ARCHIVE}")
ocv_assert(NOT EXISTS "${${ID}_PATH}")
file(MAKE_DIRECTORY ${${ID}_PATH})
ocv_assert(EXISTS "${${ID}_PATH}")
file(WRITE "${${ID}_PATH}/.gitignore" "*\n")
message(STATUS "${${ID}_NAME}: Unpacking ${file} to ${${ID}_PATH}...")
execute_process(COMMAND ${CMAKE_COMMAND} -E tar xz "${${ID}_ARCHIVE}"
WORKING_DIRECTORY "${${ID}_PATH}"
RESULT_VARIABLE __result)
if(NOT __result EQUAL 0)
message(FATAL_ERROR "${${ID}_NAME}: Failed to unpack ${ID} archive from ${${ID}_ARCHIVE} to ${${ID}_PATH} with error ${__result}")
endif()
ocv_assert(EXISTS "${${ID}_PATH}")
set(${ID}_DOWNLOADED_HASH "${${ID}_DOWNLOAD_HASH}" CACHE INTERNAL "${ID} hash")
#message(STATUS "${${ID}_NAME}: Successfully downloaded")
endfunction()
ocv_protobuf_download(v3.1.0/protobuf-cpp-3.1.0.tar.gz PROTOBUF_CPP)
......@@ -74,15 +74,13 @@ TEST(Test_Caffe, read_googlenet)
}
}
#if defined(ENABLE_CAFFE_MODEL_TESTS)
#if defined(ENABLE_CAFFE_ALEXNET_TEST) //AlexNet is disabled now
TEST(Reproducibility_AlexNet, Accuracy)
{
Net net;
{
Ptr<Importer> importer = createCaffeImporter(_tf("bvlc_alexnet.prototxt"), _tf("bvlc_alexnet.caffemodel"));
const string proto = findDataFile("dnn/bvlc_alexnet.prototxt", false);
const string model = findDataFile("dnn/bvlc_alexnet.caffemodel", false);
Ptr<Importer> importer = createCaffeImporter(proto, model);
ASSERT_TRUE(importer != NULL);
importer->populateNet(net);
}
......@@ -104,15 +102,13 @@ TEST(Reproducibility_AlexNet, Accuracy)
normAssert(ref, out);
}
#endif
#if defined(ENABLE_CAFFE_FCN_TEST)
TEST(Reproducibility_FCN, Accuracy)
{
Net net;
{
Ptr<Importer> importer = createCaffeImporter(_tf("fcn8s-heavy-pascal.prototxt"), _tf("fcn8s-heavy-pascal.caffemodel"));
const string proto = findDataFile("dnn/fcn8s-heavy-pascal.prototxt", false);
const string model = findDataFile("dnn/fcn8s-heavy-pascal.caffemodel", false);
Ptr<Importer> importer = createCaffeImporter(proto, model);
ASSERT_TRUE(importer != NULL);
importer->populateNet(net);
}
......@@ -135,8 +131,4 @@ TEST(Reproducibility_FCN, Accuracy)
normAssert(ref, out);
}
#endif
#endif
}
......@@ -39,7 +39,6 @@
//
//M*/
#if defined(ENABLE_CAFFE_MODEL_TESTS)
#include "test_precomp.hpp"
#include "npy_blob.hpp"
#include <opencv2/core/ocl.hpp>
......@@ -61,7 +60,9 @@ static void launchGoogleNetTest()
{
Net net;
{
Ptr<Importer> importer = createCaffeImporter(_tf("bvlc_googlenet.prototxt"), _tf("bvlc_googlenet.caffemodel"));
const string proto = findDataFile("dnn/bvlc_googlenet.prototxt", false);
const string model = findDataFile("dnn/bvlc_googlenet.caffemodel", false);
Ptr<Importer> importer = createCaffeImporter(proto, model);
ASSERT_TRUE(importer != NULL);
importer->populateNet(net);
}
......@@ -91,4 +92,3 @@ OCL_TEST(Reproducibility_GoogLeNet, Accuracy)
}
}
#endif
......@@ -9,8 +9,6 @@
Test for Tensorflow models loading
*/
#if defined(ENABLE_TF_INCEPTION_TESTS)
#include "test_precomp.hpp"
#include "npy_blob.hpp"
......@@ -30,7 +28,8 @@ TEST(Test_TensorFlow, read_inception)
{
Net net;
{
Ptr<Importer> importer = createTensorflowImporter(_tf("tensorflow_inception_graph.pb"));
const string model = findDataFile("dnn/tensorflow_inception_graph.pb", false);
Ptr<Importer> importer = createTensorflowImporter(model);
ASSERT_TRUE(importer != NULL);
importer->populateNet(net);
}
......@@ -54,7 +53,8 @@ TEST(Test_TensorFlow, inception_accuracy)
{
Net net;
{
Ptr<Importer> importer = createTensorflowImporter(_tf("tensorflow_inception_graph.pb"));
const string model = findDataFile("dnn/tensorflow_inception_graph.pb", false);
Ptr<Importer> importer = createTensorflowImporter(model);
ASSERT_TRUE(importer != NULL);
importer->populateNet(net);
}
......@@ -76,5 +76,3 @@ TEST(Test_TensorFlow, inception_accuracy)
}
}
#endif
......@@ -39,8 +39,8 @@
//
//M*/
#if defined(ENABLE_TORCH_IMPORTER) && ENABLE_TORCH_IMPORTER
#if defined(ENABLE_TORCH_TESTS) && ENABLE_TORCH_TESTS
#ifdef ENABLE_TORCH_IMPORTER
#include "test_precomp.hpp"
#include "npy_blob.hpp"
......@@ -158,13 +158,12 @@ TEST(Torch_Importer, net_cadd_table)
runTorchNet("net_cadd_table");
}
#if defined(ENABLE_TORCH_ENET_TESTS)
TEST(Torch_Importer, ENet_accuracy)
{
Net net;
{
Ptr<Importer> importer = createTorchImporter(_tf("Enet-model-best.net", false));
const string model = findDataFile("dnn/Enet-model-best.net", false);
Ptr<Importer> importer = createTorchImporter(model, true);
ASSERT_TRUE(importer != NULL);
importer->populateNet(net);
}
......@@ -182,8 +181,6 @@ TEST(Torch_Importer, ENet_accuracy)
normAssert(ref, out);
}
#endif
}
#endif
#endif
name: "test_Convolution"
input: "input"
input_dim: 2
input_dim: 6
input_dim: 75
input_dim: 113
layer {
type: "Convolution"
convolution_param
{
group: 3
num_output: 12
pad_h: 0
pad_w: 1
kernel_h: 4
kernel_w: 5
stride_h: 2
stride_w: 3
weight_filler{
type: 'uniform'
min: -1
max: 1
}
bias_filler {
type: 'uniform'
min: -1
max: 1
}
}
name: "output"
bottom: "input"
top: "output"
}
\ No newline at end of file
name: "test_Convolution"
input: "input"
input_dim: 2
input_dim: 12
input_dim: 36
input_dim: 37
layer {
type: "Deconvolution"
convolution_param
{
group: 3
num_output: 12
pad_h: 0
pad_w: 1
kernel_h: 4
kernel_w: 5
stride_h: 2
stride_w: 3
weight_filler{
type: 'uniform'
min: -1
max: 1
}
bias_filler {
type: 'uniform'
min: -1
max: 1
}
}
name: "output"
bottom: "input"
top: "output"
}
\ No newline at end of file
name: "test_InnerProduct"
input: "input"
input_dim: 2
input_dim: 6
input_dim: 75
input_dim: 113
layer {
type: "InnerProduct"
inner_product_param
{
axis: 3
num_output: 2
weight_filler{
type: 'uniform'
min: -1
max: 1
}
bias_filler {
type: 'uniform'
min: -1
max: 1
}
}
name: "output"
bottom: "input"
top: "output"
}
\ No newline at end of file
name: "test_LRN_channels"
input: "input"
input_dim: 2
input_dim: 6
input_dim: 75
input_dim: 113
layer {
type: "LRN"
lrn_param {
norm_region: ACROSS_CHANNELS;
local_size: 5
alpha: 1.1
beta: 0.75
}
name: "output"
bottom: "input"
top: "output"
}
name: "test_LRN_spatial"
input: "input"
input_dim: 2
input_dim: 6
input_dim: 75
input_dim: 113
layer {
type: "LRN"
lrn_param {
norm_region: WITHIN_CHANNEL;
local_size: 5
alpha: 0.9
beta: 0.75
}
name: "output"
bottom: "input"
top: "output"
}
name: "test_MVN_channels"
input: "input"
input_dim: 2
input_dim: 6
input_dim: 75
input_dim: 113
layer {
type: "MVN"
mvn_param {
eps: 0.1
across_channels: false
normalize_variance: true
}
name: "output"
bottom: "input"
top: "output"
}
name: "test_Pooling_max"
input: "input"
input_dim: 2
input_dim: 6
input_dim: 75
input_dim: 113
layer {
type: "Pooling"
pooling_param
{
pool: AVE
pad_h: 2
pad_w: 1
kernel_h: 3
kernel_w: 5
stride_h: 2
stride_w: 1
}
name: "output"
bottom: "input"
top: "output"
}
\ No newline at end of file
name: "test_Pooling_max"
input: "input"
input_dim: 2
input_dim: 6
input_dim: 75
input_dim: 113
layer {
type: "Pooling"
pooling_param
{
pool: MAX
pad_h: 2
pad_w: 1
kernel_h: 3
kernel_w: 5
stride_h: 2
stride_w: 1
}
name: "output"
bottom: "input"
top: "output"
}
\ No newline at end of file
name: "test_Softmax"
input: "input"
input_dim: 2
input_dim: 6
input_dim: 75
input_dim: 113
layer {
type: "Softmax"
name: "output"
bottom: "input"
top: "output"
}
\ No newline at end of file
name: "test_reshape_splice_split"
input: "input"
layer{
type: "Split"
name: "dummy_split"
bottom: "input"
top: "dummy_split_0"
top: "dummy_split_1"
}
layer{
type: "Slice"
name: "dummy_slice_0"
bottom: "dummy_split_0"
slice_param{
slice_point: 1
slice_point: 2
}
top: "dummy_slice_0_0"
top: "dummy_slice_0_1"
top: "dummy_slice_0_2"
}
layer{
type: "Slice"
name: "dummy_slice_1"
bottom: "dummy_split_1"
slice_param{
slice_point: 1
slice_point: 2
}
top: "dummy_slice_1_0"
top: "dummy_slice_1_1"
top: "dummy_slice_1_2"
}
layer{
type: "Sigmoid"
name: "alter_sliced_split"
bottom: "dummy_slice_1_2"
top: "dummy_slice_1_2"
}
layer{
type: "Concat"
name: "dummy_concat"
bottom: "dummy_slice_0_0"
bottom: "dummy_slice_1_1"
bottom: "dummy_slice_0_2"
top: "dummy_concat"
}
layer{
type: "Reshape"
name: "dummy_reshape"
bottom: "dummy_concat"
reshape_param{
shape{
dim: 0
dim: 1
dim: 1
dim: -1
dim: 1
}
axis: 1
num_axes: 1
}
top: "dummy_reshape"
}
layer{
type: "Flatten"
name: "dummy_reshape_undo"
bottom: "dummy_reshape"
top: "dummy_reshape_undo"
}
layer{
type: "Split"
name: "output"
bottom: "dummy_reshape_undo"
top: "output"
}
\ No newline at end of file
# coding: utf-8
import sys, os, glob
CAFFE_ROOT = "/home/vitaliy/opencv/caffe/"
sys.path.insert(0, CAFFE_ROOT + 'python')
import numpy as np
import caffe
#import cv2
def get_cafe_output(inp_blob, proto_name, caffemodel_name):
caffe.set_mode_cpu()
net = caffe.Net(proto_name, caffe.TEST)
#net.blobs['input'].reshape(*inp_blob.shape)
net.blobs['input'].data[...] = inp_blob
net.forward()
out_blob = net.blobs['output'].data[...];
if net.params.get('output'):
print "Params count:", len(net.params['output'])
net.save(caffemodel_name)
return out_blob
if __name__ == '__main__':
proto_filenames = glob.glob("layer_*.prototxt")
for proto_filename in proto_filenames:
proto_filename = os.path.basename(proto_filename)
proto_basename = os.path.splitext(proto_filename)[0]
cfmod_basename = proto_basename + ".caffemodel"
npy_filename = proto_basename + ".npy"
inp_blob_name = proto_basename + ".input.npy"
inp_blob = np.load(inp_blob_name) if os.path.exists(inp_blob_name) else np.load('blob.npy')
print "\nGenerate data for:"
print cfmod_basename, inp_blob.shape
out_blob = get_cafe_output(inp_blob, proto_filename, cfmod_basename)
print out_blob.shape
np.save(npy_filename, out_blob)
import 'nn'
function fill_net(net)
if net.modules then
for i = 1, #net.modules do
fill_net(net.modules[i])
end
end
if net.weight then
net.weight = torch.rand(net.weight:size())
end
if net.bias then
net.bias = torch.rand(net.bias:size())
end
if net.train then
net.train = 0
end
end
function save(net, input, label)
fill_net(net)
output = net:forward(input)
--torch.save(label .. '_net.dat', net)
torch.save(label .. '_net.txt', net, 'ascii')
--torch.save(label .. '_input.dat', input)
torch.save(label .. '_input.txt', input, 'ascii')
--torch.save(label .. '_output.dat', output)
torch.save(label .. '_output.txt', output, 'ascii')
return net
end
local net_simple = nn.Sequential()
net_simple:add(nn.ReLU())
net_simple:add(nn.SpatialConvolution(3,64, 11,7, 3,4, 3,2))
net_simple:add(nn.SpatialMaxPooling(4,5, 3,2, 1,2))
net_simple:add(nn.Sigmoid())
save(net_simple, torch.Tensor(2, 3, 25, 35), 'net_simple')
local net_pool_max = nn.Sequential()
net_pool_max:add(nn.SpatialMaxPooling(4,5, 3,2, 1,2):ceil()) --TODO: add ceil and floor modes
local net = save(net_pool_max, torch.rand(2, 3, 50, 30), 'net_pool_max')
torch.save('net_pool_max_output_2.txt', net.modules[1].indices - 1, 'ascii')
local net_pool_ave = nn.Sequential()
net_pool_ave:add(nn.SpatialAveragePooling(4,5, 2,1, 1,2))
save(net_pool_ave, torch.rand(2, 3, 50, 30), 'net_pool_ave')
local net_conv = nn.Sequential()
net_conv:add(nn.SpatialConvolution(3,64, 11,7, 3,4, 3,2))
save(net_conv, torch.rand(1, 3, 50, 60), 'net_conv')
local net_reshape = nn.Sequential()
net_reshape:add(nn.Reshape(5, 4, 3, 2))
save(net_reshape, torch.rand(2, 3, 4, 5), 'net_reshape')
local net_reshape_batch = nn.Sequential()
net_reshape_batch:add(nn.Reshape(5, 4, 3, true))
save(net_reshape_batch, torch.rand(2, 3, 4, 5), 'net_reshape_batch')
save(nn.Linear(7, 3), torch.rand(13, 7), 'net_linear_2d')
local net_parallel = nn.Parallel(4, 2)
net_parallel:add(nn.Sigmoid())
net_parallel:add(nn.Tanh())
save(net_parallel, torch.rand(2, 6, 4, 2), 'net_parallel')
local net_concat = nn.Concat(2)
net_concat:add(nn.ReLU())
net_concat:add(nn.Tanh())
net_concat:add(nn.Sigmoid())
save(net_concat, torch.rand(2, 6, 4, 3) - 0.5, 'net_concat')
local net_deconv = nn.Sequential()
net_deconv:add(nn.SpatialFullConvolution(3, 9, 4, 5, 1, 2, 0, 1, 0, 1))
save(net_deconv, torch.rand(2, 3, 4, 3) - 0.5, 'net_deconv')
local net_batch_norm = nn.Sequential()
net_batch_norm:add(nn.SpatialBatchNormalization(4, 1e-3))
save(net_batch_norm, torch.rand(1, 4, 5, 6) - 0.5, 'net_batch_norm')
local net_prelu = nn.Sequential()
net_prelu:add(nn.PReLU(5))
save(net_prelu, torch.rand(1, 5, 40, 50) - 0.5, 'net_prelu')
local net_cadd_table = nn.Sequential()
local sum = nn.ConcatTable()
sum:add(nn.Identity()):add(nn.Identity())
net_cadd_table:add(sum):add(nn.CAddTable())
save(net_cadd_table, torch.rand(1, 5, 40, 50) - 0.5, 'net_cadd_table')
\ No newline at end of file
require 'nn'
print("nn module exists!")
\ No newline at end of file
set(the_description "Contributed/Experimental Algorithms for Salient 2D Features Detection")
ocv_define_module(xfeatures2d opencv_core opencv_imgproc opencv_features2d opencv_calib3d OPTIONAL opencv_shape opencv_cudaarithm WRAP python java)
include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/download_vgg.cmake)
include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/download_boostdesc.cmake)
set(DOWNLOAD_DIR "${OpenCV_BINARY_DIR}/downloads/xfeatures2d")
download_boost_descriptors("${DOWNLOAD_DIR}" boost_status)
download_vgg_descriptors("${DOWNLOAD_DIR}" vgg_status)
if(NOT boost_status OR NOT vgg_status)
ocv_module_disable(xfeatures2d)
endif()
ocv_module_include_directories("${DOWNLOAD_DIR}")
function(download_boost_descriptors dst_dir status_var)
set(OPENCV_3RDPARTY_COMMIT "34e4206aef44d50e6bbcd0ab06354b52e7466d26")
set(OPENCV_3RDPARTY_COMMIT "34e4206aef44d50e6bbcd0ab06354b52e7466d26")
set(FILE_HASH_BOOSTDESC_BGM "0ea90e7a8f3f7876d450e4149c97c74f")
set(FILE_HASH_BOOSTDESC_BGM_BI "232c966b13651bd0e46a1497b0852191")
set(FILE_HASH_BOOSTDESC_BGM_HD "324426a24fa56ad9c5b8e3e0b3e5303e")
set(FILE_HASH_BOOSTDESC_BINBOOST_064 "202e1b3e9fec871b04da31f7f016679f")
set(FILE_HASH_BOOSTDESC_BINBOOST_128 "98ea99d399965c03d555cef3ea502a0b")
set(FILE_HASH_BOOSTDESC_BINBOOST_256 "e6dcfa9f647779eb1ce446a8d759b6ea")
set(FILE_HASH_BOOSTDESC_LBGM "0ae0675534aa318d9668f2a179c2a052")
set(ids BGM BGM_BI BGM_HD BINBOOST_064 BINBOOST_128 BINBOOST_256 LBGM)
set(name_BGM boostdesc_bgm.i)
set(name_BGM_BI boostdesc_bgm_bi.i)
set(name_BGM_HD boostdesc_bgm_hd.i)
set(name_BINBOOST_064 boostdesc_binboost_064.i)
set(name_BINBOOST_128 boostdesc_binboost_128.i)
set(name_BINBOOST_256 boostdesc_binboost_256.i)
set(name_LBGM boostdesc_lbgm.i)
set(hash_BGM "0ea90e7a8f3f7876d450e4149c97c74f")
set(hash_BGM_BI "232c966b13651bd0e46a1497b0852191")
set(hash_BGM_HD "324426a24fa56ad9c5b8e3e0b3e5303e")
set(hash_BINBOOST_064 "202e1b3e9fec871b04da31f7f016679f")
set(hash_BINBOOST_128 "98ea99d399965c03d555cef3ea502a0b")
set(hash_BINBOOST_256 "e6dcfa9f647779eb1ce446a8d759b6ea")
set(hash_LBGM "0ae0675534aa318d9668f2a179c2a052")
set(BOOSTDESC_DOWNLOAD_URL ${OPENCV_CONTRIB_BOOSTDESC_URL};$ENV{OPENCV_CONTRIB_BOOSTDESC_URL};https://raw.githubusercontent.com/opencv/opencv_3rdparty/${OPENCV_3RDPARTY_COMMIT}/)
function(boostdesc_download file id)
message(STATUS "Check contents of ${file} ...")
ocv_download(PACKAGE ${file}
HASH ${FILE_HASH_${id}}
URL ${BOOSTDESC_DOWNLOAD_URL}
DESTINATION_DIR ${CMAKE_CURRENT_LIST_DIR}/../src
DOWNLOAD_DIR ${CMAKE_CURRENT_LIST_DIR}/.download)
set(${status_var} TRUE PARENT_SCOPE)
foreach(id ${ids})
ocv_download(FILENAME ${name_${id}}
HASH ${hash_${id}}
URL
"${OPENCV_BOOSTDESC_URL}"
"$ENV{OPENCV_BOOSTDESC_URL}"
"https://raw.githubusercontent.com/opencv/opencv_3rdparty/${OPENCV_3RDPARTY_COMMIT}/"
DESTINATION_DIR ${dst_dir}
RELATIVE_URL
STATUS res)
if(NOT res)
set(${status_var} FALSE PARENT_SCOPE)
endif()
endforeach()
endfunction()
boostdesc_download(boostdesc_bgm.i BOOSTDESC_BGM)
boostdesc_download(boostdesc_bgm_bi.i BOOSTDESC_BGM_BI)
boostdesc_download(boostdesc_bgm_hd.i BOOSTDESC_BGM_HD)
boostdesc_download(boostdesc_binboost_064.i BOOSTDESC_BINBOOST_064)
boostdesc_download(boostdesc_binboost_128.i BOOSTDESC_BINBOOST_128)
boostdesc_download(boostdesc_binboost_256.i BOOSTDESC_BINBOOST_256)
boostdesc_download(boostdesc_lbgm.i BOOSTDESC_LBGM)
function(download_vgg_descriptors dst_dir status_var)
set(OPENCV_3RDPARTY_COMMIT "fccf7cd6a4b12079f73bbfb21745f9babcd4eb1d")
set(OPENCV_3RDPARTY_COMMIT "fccf7cd6a4b12079f73bbfb21745f9babcd4eb1d")
set(FILE_HASH_VGG_48 "e8d0dcd54d1bcfdc29203d011a797179")
set(FILE_HASH_VGG_64 "7126a5d9a8884ebca5aea5d63d677225")
set(FILE_HASH_VGG_80 "7cd47228edec52b6d82f46511af325c5")
set(FILE_HASH_VGG_120 "151805e03568c9f490a5e3a872777b75")
set(ids VGG_48 VGG_64 VGG_80 VGG_120)
set(name_VGG_48 "vgg_generated_48.i")
set(name_VGG_64 "vgg_generated_64.i")
set(name_VGG_80 "vgg_generated_80.i")
set(name_VGG_120 "vgg_generated_120.i")
set(hash_VGG_48 "e8d0dcd54d1bcfdc29203d011a797179")
set(hash_VGG_64 "7126a5d9a8884ebca5aea5d63d677225")
set(hash_VGG_80 "7cd47228edec52b6d82f46511af325c5")
set(hash_VGG_120 "151805e03568c9f490a5e3a872777b75")
set(VGG_DOWNLOAD_URL ${OPENCV_CONTRIB_VGG_URL};$ENV{OPENCV_CONTRIB_VGG_URL};https://raw.githubusercontent.com/opencv/opencv_3rdparty/${OPENCV_3RDPARTY_COMMIT}/)
function(vgg_download file id)
message(STATUS "Check contents of ${file} ...")
ocv_download(PACKAGE ${file}
HASH ${FILE_HASH_${id}}
URL ${VGG_DOWNLOAD_URL}
DESTINATION_DIR ${CMAKE_CURRENT_LIST_DIR}/../src
DOWNLOAD_DIR ${CMAKE_CURRENT_LIST_DIR}/.download)
set(${status_var} TRUE PARENT_SCOPE)
foreach(id ${ids})
ocv_download(FILENAME ${name_${id}}
HASH ${hash_${id}}
URL
"${OPENCV_VGGDESC_URL}"
"$ENV{OPENCV_VGGDESC_URL}"
"https://raw.githubusercontent.com/opencv/opencv_3rdparty/${OPENCV_3RDPARTY_COMMIT}/"
DESTINATION_DIR "${dst_dir}"
RELATIVE_URL
STATUS res)
if(NOT res)
set(${status_var} FALSE PARENT_SCOPE)
endif()
endforeach()
endfunction()
vgg_download(vgg_generated_48.i VGG_48)
vgg_download(vgg_generated_64.i VGG_64)
vgg_download(vgg_generated_80.i VGG_80)
vgg_download(vgg_generated_120.i VGG_120)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment