Commit 44a8e818 authored by Vitaliy Lyudvichenko's avatar Vitaliy Lyudvichenko

Adding of native OpenBLAS and Atlas BLAS'es support and fixed some warnings

parent 4cb108ef
...@@ -15,55 +15,35 @@ ocv_warnings_disable(CMAKE_CXX_FLAGS /wd4701) ...@@ -15,55 +15,35 @@ ocv_warnings_disable(CMAKE_CXX_FLAGS /wd4701)
# Resolve libprotobuf dependency # Resolve libprotobuf dependency
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
include(cmake/OpenCVFindLibProtobuf.cmake) include(cmake/OpenCVFindLibProtobuf.cmake)
ocv_glob_module_sources(${PROTOBUF_SRCS} ${PROTOBUF_HDRS})
ocv_source_group("Src\\protobuf" FILES ${PROTOBUF_SRCS} ${PROTOBUF_HDRS}) ocv_source_group("Src\\protobuf" FILES ${PROTOBUF_SRCS} ${PROTOBUF_HDRS})
ocv_module_include_directories(include ${PROTOBUF_INCLUDE_DIR}) ocv_module_include_directories(include ${PROTOBUF_INCLUDE_DIR})
# ----------------------------------------------------------------------------
# Try to find BLAS libraries
# ----------------------------------------------------------------------------
OCV_OPTION(${the_module}_WITH_BLAS "Use external BLAS library to speedup processing" OFF)
include(cmake/OpenCVFindCBLAS.cmake)
ocv_glob_module_sources(${PROTOBUF_SRCS} ${PROTOBUF_HDRS} ${CBLAS_H_PATH})
ocv_create_module(${PROTOBUF_LIBRARIES}) ocv_create_module(${PROTOBUF_LIBRARIES})
ocv_add_samples() ocv_add_samples()
ocv_add_accuracy_tests() ocv_add_accuracy_tests()
ocv_add_perf_tests() ocv_add_perf_tests()
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
# Find BLAS library # Link BLAS
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
OCV_OPTION(${the_module}_WITH_BLAS "Use external BLAS library to speedup processing" OFF) if(${the_module}_WITH_BLAS AND HAVE_BLAS)
if(${the_module}_WITH_BLAS) add_definitions(-DHAVE_CBLAS=1)
set(BLAS_CBLAS_H "cblas.h")
include(cmake/OpenCVFindMKL.cmake)
if(MKL_FOUND)
set(BLAS_INCLUDE_DIR ${MKL_INCLUDE_DIRS})
set(BLAS_CBLAS_H "mkl_cblas.h" )
set(BLAS_LIBRARIES ${MKL_LIBRARIES} )
set(BLAS_BINARIES "" )
add_definitions(${MKL_CXX_FLAGS})
endif()
set(BLAS_PREF ${the_module}_BLAS)
set(${BLAS_PREF}_INCLUDE_DIR ${BLAS_INCLUDE_DIR} CACHE PATH "Path to BLAS include dir")
set(${BLAS_PREF}_CBLAS_H ${BLAS_CBLAS_H} CACHE STRING "Name of cblas.h")
set(${BLAS_PREF}_LIBRARIES ${BLAS_LIBRARIES} CACHE FILEPATH "Path to BLAS libraries that will be linked with ${the_module} module")
set(${BLAS_PREF}_BINARIES ${BLAS_BINARIES} CACHE FILEPATH "Path to BLAS binaries (.so, .dll) that will be installed with ${the_module} module")
set(CBLAS_H ${${the_module}_BLAS_INCLUDE_DIR}/${${BLAS_PREF}_CBLAS_H})
if(${BLAS_PREF}_INCLUDE_DIR AND NOT EXISTS ${CBLAS_H})
message(WARNING "cblas.h at \"${CBLAS_H}\" not found")
endif()
ocv_module_include_directories(${${the_module}_BLAS_INCLUDE_DIR}) ocv_module_include_directories(${${the_module}_BLAS_INCLUDE_DIR})
list(APPEND OPENCV_MODULE_${the_module}_DEPS_EXT ${${the_module}_BLAS_LIBRARIES}) ocv_add_dependencies(${the_module} ${${the_module}_BLAS_LIBRARIES})
target_link_libraries(${the_module} ${${the_module}_BLAS_LIBRARIES})
add_definitions(-DHAVE_CBLAS)
message(CMAKE_CURRENT_BINARY_DIR=${CMAKE_CURRENT_BINARY_DIR})
add_custom_command(TARGET ${the_module} PRE_BUILD #OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/cblas.h
COMMAND ${CMAKE_COMMAND} ARGS -E echo \"\#include <${${BLAS_PREF}_CBLAS_H}>\" > ${CMAKE_CURRENT_BINARY_DIR}/cblas.h
COMMENT "Adding proxy cblas.h header")
if(${the_module}_BLAS_BINARIES) if(${the_module}_BLAS_BINARIES)
ocv_install_target(${the_module} EXPORT ${the_module}_BLAS_BINARIES ocv_install_target(${the_module} EXPORT ${the_module}_BLAS_BINARIES
RUNTIME DESTINATION ${OPENCV_BIN_INSTALL_PATH} COMPONENT libs) RUNTIME DESTINATION ${OPENCV_BIN_INSTALL_PATH} COMPONENT libs)
endif() endif()
else()
add_definitions(-DHAVE_CBLAS=0)
endif() endif()
# ---------------------------------------------------------------------------- # ----------------------------------------------------------------------------
......
#COPYRIGHT
#
#All contributions by the University of California:
#Copyright (c) 2014, 2015, The Regents of the University of California (Regents)
#All rights reserved.
#
#All other contributions:
#Copyright (c) 2014, 2015, the respective contributors
#All rights reserved.
#
#Caffe uses a shared copyright model: each contributor holds copyright over
#their contributions to Caffe. The project versioning records all such
#contribution and copyright details. If a contributor wants to further mark
#their specific copyright on a particular contribution, they should indicate
#their copyright solely in the commit message of the change when it is
#committed.
#
#LICENSE
#
#Redistribution and use in source and binary forms, with or without
#modification, are permitted provided that the following conditions are met:
#
#1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
#WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
#DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
#ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
#(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
#LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
#ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
#(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
#SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#CONTRIBUTION AGREEMENT
#
#By contributing to the BVLC/caffe repository through pull-request, comment,
#or otherwise, the contributor releases their content to the
#license and copyright terms herein.
# Find the Atlas (and Lapack) libraries
#
# The following variables are optionally searched for defaults
# Atlas_ROOT_DIR: Base directory where all Atlas components are found
#
# The following are set after configuration is done:
# Atlas_FOUND
# Atlas_INCLUDE_DIRS
# Atlas_LIBRARIES
# Atlas_LIBRARYRARY_DIRS
set(Atlas_INCLUDE_SEARCH_PATHS
/usr/include/atlas
/usr/include/atlas-base
$ENV{Atlas_ROOT_DIR}
$ENV{Atlas_ROOT_DIR}/include
)
set(Atlas_LIB_SEARCH_PATHS
/usr/lib/atlas
/usr/lib/atlas-base
$ENV{Atlas_ROOT_DIR}
$ENV{Atlas_ROOT_DIR}/lib
)
find_path(Atlas_CBLAS_INCLUDE_DIR NAMES cblas.h PATHS ${Atlas_INCLUDE_SEARCH_PATHS})
find_path(Atlas_CLAPACK_INCLUDE_DIR NAMES clapack.h PATHS ${Atlas_INCLUDE_SEARCH_PATHS})
find_library(Atlas_CBLAS_LIBRARY NAMES ptcblas_r ptcblas cblas_r cblas PATHS ${Atlas_LIB_SEARCH_PATHS})
find_library(Atlas_BLAS_LIBRARY NAMES atlas_r atlas PATHS ${Atlas_LIB_SEARCH_PATHS})
find_library(Atlas_LAPACK_LIBRARY NAMES alapack_r alapack lapack_atlas PATHS ${Atlas_LIB_SEARCH_PATHS})
set(LOOKED_FOR
Atlas_CBLAS_INCLUDE_DIR
Atlas_CLAPACK_INCLUDE_DIR
Atlas_CBLAS_LIBRARY
Atlas_BLAS_LIBRARY
Atlas_LAPACK_LIBRARY
)
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(Atlas DEFAULT_MSG ${LOOKED_FOR})
if(ATLAS_FOUND)
set(Atlas_INCLUDE_DIR ${Atlas_CBLAS_INCLUDE_DIR} ${Atlas_CLAPACK_INCLUDE_DIR})
set(Atlas_LIBRARIES ${Atlas_LAPACK_LIBRARY} ${Atlas_CBLAS_LIBRARY} ${Atlas_BLAS_LIBRARY})
mark_as_advanced(${LOOKED_FOR})
message(STATUS "Found Atlas (include: ${Atlas_CBLAS_INCLUDE_DIR}, library: ${Atlas_BLAS_LIBRARY})")
endif(ATLAS_FOUND)
\ No newline at end of file
# - Find the MKL libraries
# Modified from Armadillo's ARMA_FindMKL.cmake
# This module defines
# MKL_INCLUDE_DIR, the directory for the MKL headers
# MKL_LIB_DIR, the directory for the MKL library files
# MKL_COMPILER_LIB_DIR, the directory for the MKL compiler library files
# MKL_LIBRARIES, the libraries needed to use Intel's implementation of BLAS & LAPACK.
# MKL_FOUND, If false, do not try to use MKL; if true, the macro definition USE_MKL is added.
# Set the include path
# TODO: what if MKL is not installed in /opt/intel/mkl?
# try to find at /opt/intel/mkl
# in windows, try to find MKL at C:/Program Files (x86)/Intel/Composer XE/mkl
if ( WIN32 )
if(NOT DEFINED ENV{MKLROOT_PATH})
#set(MKLROOT_PATH "C:/Program Files (x86)/Intel/Composer XE" CACHE PATH "Where the MKL are stored")
set(MKLROOT_PATH "C:/Program Files (x86)/IntelSWTools/compilers_and_libraries/windows" CACHE PATH "Where the MKL are stored")
endif(NOT DEFINED ENV{MKLROOT_PATH})
else ( WIN32 )
set(MKLROOT_PATH "/opt/intel" CACHE PATH "Where the MKL are stored")
endif ( WIN32 )
if (EXISTS ${MKLROOT_PATH}/mkl)
SET(MKL_FOUND TRUE)
message("MKL is found at ${MKLROOT_PATH}/mkl")
IF(CMAKE_SIZEOF_VOID_P EQUAL 8)
set( USE_MKL_64BIT On )
if ( ARMADILLO_FOUND )
if ( ARMADILLO_BLAS_LONG_LONG )
set( USE_MKL_64BIT_LIB On )
ADD_DEFINITIONS(-DMKL_ILP64)
message("MKL is linked against ILP64 interface ... ")
endif ( ARMADILLO_BLAS_LONG_LONG )
endif ( ARMADILLO_FOUND )
ELSE(CMAKE_SIZEOF_VOID_P EQUAL 8)
set( USE_MKL_64BIT Off )
ENDIF(CMAKE_SIZEOF_VOID_P EQUAL 8)
else (EXISTS ${MKLROOT_PATH}/mkl)
SET(MKL_FOUND FALSE)
message("MKL is NOT found ... ")
endif (EXISTS ${MKLROOT_PATH}/mkl)
if (MKL_FOUND)
set(MKL_INCLUDE_DIR "${MKLROOT_PATH}/mkl/include")
ADD_DEFINITIONS(-DUSE_MKL)
if ( USE_MKL_64BIT )
set(MKL_LIB_DIR "${MKLROOT_PATH}/mkl/lib/intel64")
set(MKL_COMPILER_LIB_DIR "${MKLROOT_PATH}/compiler/lib/intel64")
set(MKL_COMPILER_LIB_DIR ${MKL_COMPILER_LIB_DIR} "${MKLROOT_PATH}/lib/intel64")
if ( USE_MKL_64BIT_LIB )
if (WIN32)
set(MKL_LIBRARIES ${MKL_LIBRARIES} mkl_intel_ilp64)
else (WIN32)
set(MKL_LIBRARIES ${MKL_LIBRARIES} mkl_intel_ilp64)
endif (WIN32)
else ( USE_MKL_64BIT_LIB )
if (WIN32)
set(MKL_LIBRARIES ${MKL_LIBRARIES} mkl_intel_lp64)
else (WIN32)
set(MKL_LIBRARIES ${MKL_LIBRARIES} mkl_intel_lp64)
endif (WIN32)
endif ( USE_MKL_64BIT_LIB )
else ( USE_MKL_64BIT )
set(MKL_LIB_DIR "${MKLROOT_PATH}/mkl/lib/ia32")
set(MKL_COMPILER_LIB_DIR "${MKLROOT_PATH}/compiler/lib/ia32")
set(MKL_COMPILER_LIB_DIR ${MKL_COMPILER_LIB_DIR} "${MKLROOT_PATH}/lib/ia32")
if ( WIN32 )
set(MKL_LIBRARIES ${MKL_LIBRARIES} mkl_intel_c)
else ( WIN32 )
set(MKL_LIBRARIES ${MKL_LIBRARIES} mkl_intel)
endif ( WIN32 )
endif ( USE_MKL_64BIT )
if (WIN32)
SET(MKL_LIBRARIES ${MKL_LIBRARIES} mkl_intel_thread)
SET(MKL_LIBRARIES ${MKL_LIBRARIES} mkl_core)
SET(MKL_LIBRARIES ${MKL_LIBRARIES} libiomp5md)
else (WIN32)
SET(MKL_LIBRARIES ${MKL_LIBRARIES} mkl_gnu_thread)
SET(MKL_LIBRARIES ${MKL_LIBRARIES} mkl_core)
endif (WIN32)
endif (MKL_FOUND)
IF (MKL_FOUND)
IF (NOT MKL_FIND_QUIETLY)
MESSAGE(STATUS "Found MKL libraries: ${MKL_LIBRARIES}")
MESSAGE(STATUS "MKL_INCLUDE_DIR: ${MKL_INCLUDE_DIR}")
MESSAGE(STATUS "MKL_LIB_DIR: ${MKL_LIB_DIR}")
MESSAGE(STATUS "MKL_COMPILER_LIB_DIR: ${MKL_COMPILER_LIB_DIR}")
ENDIF (NOT MKL_FIND_QUIETLY)
INCLUDE_DIRECTORIES( ${MKL_INCLUDE_DIR} )
LINK_DIRECTORIES( ${MKL_LIB_DIR} ${MKL_COMPILER_LIB_DIR} )
ELSE (MKL_FOUND)
IF (MKL_FIND_REQUIRED)
MESSAGE(FATAL_ERROR "Could not find MKL libraries")
ENDIF (MKL_FIND_REQUIRED)
ENDIF (MKL_FOUND)
# MARK_AS_ADVANCED(MKL_LIBRARY)
\ No newline at end of file
#COPYRIGHT
#
#All contributions by the University of California:
#Copyright (c) 2014, 2015, The Regents of the University of California (Regents)
#All rights reserved.
#
#All other contributions:
#Copyright (c) 2014, 2015, the respective contributors
#All rights reserved.
#
#Caffe uses a shared copyright model: each contributor holds copyright over
#their contributions to Caffe. The project versioning records all such
#contribution and copyright details. If a contributor wants to further mark
#their specific copyright on a particular contribution, they should indicate
#their copyright solely in the commit message of the change when it is
#committed.
#
#LICENSE
#
#Redistribution and use in source and binary forms, with or without
#modification, are permitted provided that the following conditions are met:
#
#1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
#THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
#ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
#WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
#DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
#ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
#(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
#LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
#ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
#(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
#SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
#CONTRIBUTION AGREEMENT
#
#By contributing to the BVLC/caffe repository through pull-request, comment,
#or otherwise, the contributor releases their content to the
#license and copyright terms herein.
SET(Open_BLAS_INCLUDE_SEARCH_PATHS
/usr/include
/usr/include/openblas
/usr/include/openblas-base
/usr/local/include
/usr/local/include/openblas
/usr/local/include/openblas-base
/opt/OpenBLAS/include
$ENV{OpenBLAS_HOME}
$ENV{OpenBLAS_HOME}/include
)
SET(Open_BLAS_LIB_SEARCH_PATHS
/lib/
/lib/openblas-base
/lib64/
/usr/lib
/usr/lib/openblas-base
/usr/lib64
/usr/local/lib
/usr/local/lib64
/opt/OpenBLAS/lib
$ENV{OpenBLAS}cd
$ENV{OpenBLAS}/lib
$ENV{OpenBLAS_HOME}
$ENV{OpenBLAS_HOME}/lib
)
FIND_PATH(OpenBLAS_INCLUDE_DIR NAMES cblas.h PATHS ${Open_BLAS_INCLUDE_SEARCH_PATHS})
FIND_LIBRARY(OpenBLAS_LIB NAMES openblas PATHS ${Open_BLAS_LIB_SEARCH_PATHS})
SET(OpenBLAS_FOUND ON)
# Check include files
IF(NOT OpenBLAS_INCLUDE_DIR)
SET(OpenBLAS_FOUND OFF)
MESSAGE(STATUS "Could not find OpenBLAS include. Turning OpenBLAS_FOUND off")
ENDIF()
# Check libraries
IF(NOT OpenBLAS_LIB)
SET(OpenBLAS_FOUND OFF)
MESSAGE(STATUS "Could not find OpenBLAS lib. Turning OpenBLAS_FOUND off")
ENDIF()
IF (OpenBLAS_FOUND)
IF (NOT OpenBLAS_FIND_QUIETLY)
MESSAGE(STATUS "Found OpenBLAS libraries: ${OpenBLAS_LIB}")
MESSAGE(STATUS "Found OpenBLAS include: ${OpenBLAS_INCLUDE_DIR}")
ENDIF (NOT OpenBLAS_FIND_QUIETLY)
ELSE (OpenBLAS_FOUND)
IF (OpenBLAS_FIND_REQUIRED)
MESSAGE(FATAL_ERROR "Could not find OpenBLAS")
ENDIF (OpenBLAS_FIND_REQUIRED)
ENDIF (OpenBLAS_FOUND)
MARK_AS_ADVANCED(
OpenBLAS_INCLUDE_DIR
OpenBLAS_LIB
OpenBLAS
)
\ No newline at end of file
macro(_find_file_in_dirs VAR NAME DIRS)
find_path(${VAR} ${NAME} ${DIRS} NO_DEFAULT_PATH)
set(${VAR} ${${VAR}})
unset(${VAR} CACHE)
endmacro()
if(${the_module}_WITH_BLAS)
set(_bp ${the_module}_BLAS) #prefix for blas variables
set(BLAS_CBLAS_H "cblas.h")
set(HAVE_BLAS "")
if(NOT HAVE_BLAS) #check custom BLAS from user input
if(${_bp}_INCLUDE_DIR AND ${_bp}_LIBRARIES AND ${_bp}_CBLAS_H)
set(HAVE_BLAS "Custom")
endif()
endif()
if(NOT HAVE_BLAS)
include(cmake/OpenCVFindMKL.cmake)
if(MKL_FOUND)
set(BLAS_INCLUDE_DIR ${MKL_INCLUDE_DIRS})
set(BLAS_LIBRARIES ${MKL_LIBRARIES} )
set(BLAS_CBLAS_H "mkl_cblas.h" )
set(HAVE_BLAS "MKL")
endif()
endif()
if(NOT HAVE_BLAS)
include(cmake/FindOpenBLAS.cmake)
if(OpenBLAS_FOUND)
set(BLAS_INCLUDE_DIR ${OpenBLAS_INCLUDE_DIR} )
set(BLAS_LIBRARIES ${OpenBLAS_LIB} )
set(HAVE_BLAS "OpenBLAS")
endif()
endif()
if(NOT HAVE_BLAS AND UNIX)
include(cmake/FindAtlas.cmake)
if(ATLAS_FOUND)
set(BLAS_INCLUDE_DIR ${Atlas_INCLUDE_DIR})
set(BLAS_LIBRARIES ${Atlas_LIBRARIES} )
set(HAVE_BLAS "Atlas")
endif()
endif()
if(NOT HAVE_BLAS OR NOT (HAVE_BLAS STREQUAL "Custom"))
set(${_bp}_INCLUDE_DIR ${BLAS_INCLUDE_DIR} CACHE PATH "Path to BLAS include dir" FORCE)
set(${_bp}_CBLAS_H ${BLAS_CBLAS_H} CACHE STRING "Alternative name of cblas.h" FORCE)
set(${_bp}_LIBRARIES ${BLAS_LIBRARIES} CACHE FILEPATH "Path to BLAS libraries that will be linked with ${the_module} module" FORCE)
set(${_bp}_BINARIES ${BLAS_BINARIES} CACHE FILEPATH "Path to BLAS binaries (.so, .dll) that will be installed with ${the_module} module" FORCE)
endif()
if(HAVE_BLAS) #adding proxy cblas.h header
_find_file_in_dirs(CBLAS_H_PATH ${${_bp}_CBLAS_H} ${${_bp}_INCLUDE_DIR})
if(NOT CBLAS_H_PATH)
message(WARNING "CBLAS header '${${_bp}_CBLAS_H}' not found into '${${_bp}_INCLUDE_DIR}'")
endif()
add_custom_command(OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/cblas.h #TARGET ${the_module} PRE_BUILD
COMMAND ${CMAKE_COMMAND} ARGS -E echo "\#include \"${CBLAS_H_PATH}\"" > ${CMAKE_CURRENT_BINARY_DIR}/cblas.h
COMMENT "Adding proxy cblas.h header")
endif()
endif()
\ No newline at end of file
...@@ -69,7 +69,7 @@ set(MKL_INCLUDE_HEADERS ${MKL_INCLUDE_DIRS}/mkl.h ${MKL_INCLUDE_DIRS}/mkl_versio ...@@ -69,7 +69,7 @@ set(MKL_INCLUDE_HEADERS ${MKL_INCLUDE_DIRS}/mkl.h ${MKL_INCLUDE_DIRS}/mkl_versio
if(CMAKE_CXX_SIZEOF_DATA_PTR EQUAL 8) if(CMAKE_CXX_SIZEOF_DATA_PTR EQUAL 8)
set(MKL_X64 1) set(MKL_X64 1)
set(MKL_ARCH "intel64") set(MKL_ARCH "intel64")
include(CheckTypeSize) include(CheckTypeSize)
CHECK_TYPE_SIZE(int _sizeof_int) CHECK_TYPE_SIZE(int _sizeof_int)
if (_sizeof_int EQUAL 4) if (_sizeof_int EQUAL 4)
...@@ -107,7 +107,7 @@ find_package_handle_standard_args(MKL MKL_INCLUDE_HEADERS MKL_LIBRARIES) ...@@ -107,7 +107,7 @@ find_package_handle_standard_args(MKL MKL_INCLUDE_HEADERS MKL_LIBRARIES)
if(MKL_FOUND) if(MKL_FOUND)
get_mkl_version(${MKL_INCLUDE_DIRS}/mkl_version.h) get_mkl_version(${MKL_INCLUDE_DIRS}/mkl_version.h)
message(STATUS "Found MKL ${MKL_VERSION_STR} at: ${MKL_ROOT_DIR}") message(STATUS "Found MKL ${MKL_VERSION_STR} at: ${MKL_ROOT_DIR}")
set(HAVE_MKL ON CACHE BOOL "True if MKL found") set(HAVE_MKL ON CACHE BOOL "True if MKL found")
set(MKL_ROOT_DIR ${MKL_ROOT_DIR} CACHE PATH "Path to MKL directory") set(MKL_ROOT_DIR ${MKL_ROOT_DIR} CACHE PATH "Path to MKL directory")
set(MKL_INCLUDE_DIRS ${MKL_INCLUDE_DIRS} CACHE PATH "Path to MKL include directory") set(MKL_INCLUDE_DIRS ${MKL_INCLUDE_DIRS} CACHE PATH "Path to MKL include directory")
...@@ -120,4 +120,4 @@ if(MKL_FOUND) ...@@ -120,4 +120,4 @@ if(MKL_FOUND)
endif() endif()
else() else()
endif() endif()
\ No newline at end of file
...@@ -73,11 +73,11 @@ namespace dnn ...@@ -73,11 +73,11 @@ namespace dnn
*/ */
//! LSTM recurrent layer //! LSTM recurrent layer
class LSTMLayer : public Layer class CV_EXPORTS_W LSTMLayer : public Layer
{ {
public: public:
/** Creates instance of LSTM layer */ /** Creates instance of LSTM layer */
CV_EXPORTS_W static Ptr<LSTMLayer> create(); static Ptr<LSTMLayer> create();
/** Set trained weights for LSTM layer. /** Set trained weights for LSTM layer.
LSTM behavior on each step is defined by current input, previous output, previous cell state and learned weights. LSTM behavior on each step is defined by current input, previous output, previous cell state and learned weights.
...@@ -122,15 +122,15 @@ namespace dnn ...@@ -122,15 +122,15 @@ namespace dnn
Size of the last dimension of @f$x_t@f$ must be @f$N_x@f$, (@f$N_h@f$ for @f$h_{t-1}@f$ and @f$N_c@f$ for @f$c_{t-1}@f$). Size of the last dimension of @f$x_t@f$ must be @f$N_x@f$, (@f$N_h@f$ for @f$h_{t-1}@f$ and @f$N_c@f$ for @f$c_{t-1}@f$).
Sizes of remainder dimensions could be any, but thay must be consistent among @f$x_t@f$, @f$h_{t-1}@f$ and @f$c_{t-1}@f$. Sizes of remainder dimensions could be any, but thay must be consistent among @f$x_t@f$, @f$h_{t-1}@f$ and @f$c_{t-1}@f$.
*/ */
CV_EXPORTS_W void forward(std::vector<Blob*> &input, std::vector<Blob> &output); void forward(std::vector<Blob*> &input, std::vector<Blob> &output);
}; };
//! Classical recurrent layer //! Classical recurrent layer
class RNNLayer : public Layer class CV_EXPORTS_W RNNLayer : public Layer
{ {
public: public:
/** Creates instance of RNNLayer */ /** Creates instance of RNNLayer */
CV_EXPORTS_W static Ptr<RNNLayer> create(); static Ptr<RNNLayer> create();
/** Setups learned weights. /** Setups learned weights.
...@@ -146,7 +146,7 @@ namespace dnn ...@@ -146,7 +146,7 @@ namespace dnn
@param Who is @f$ W_{xo} @f$ matrix @param Who is @f$ W_{xo} @f$ matrix
@param bo is @f$ b_{o} @f$ vector @param bo is @f$ b_{o} @f$ vector
*/ */
CV_EXPORTS_W virtual void setWeights(const Blob &Whh, const Blob &Wxh, const Blob &bh, const Blob &Who, const Blob &bo) = 0; virtual void setWeights(const Blob &Whh, const Blob &Wxh, const Blob &bh, const Blob &Who, const Blob &bo) = 0;
/** Accepts two inputs @f$x_t@f$ and @f$h_{t-1}@f$ and compute two outputs @f$o_t@f$ and @f$h_t@f$. /** Accepts two inputs @f$x_t@f$ and @f$h_{t-1}@f$ and compute two outputs @f$o_t@f$ and @f$h_t@f$.
......
...@@ -55,7 +55,7 @@ namespace dnn ...@@ -55,7 +55,7 @@ namespace dnn
/** @brief Lightweight class for storing and processing a shape of blob (or anything else). */ /** @brief Lightweight class for storing and processing a shape of blob (or anything else). */
struct BlobShape struct BlobShape
{ {
BlobShape(); //!< Returns @ref all(4, 1) BlobShape(); //!< Creates [1, 1, 1, 1] shape @todo Make more clearer behavior.
BlobShape(int s0); //!< Creates 1-dim shape [@p s0] BlobShape(int s0); //!< Creates 1-dim shape [@p s0]
BlobShape(int s0, int s1); //!< @overload BlobShape(int s0, int s1); //!< @overload
BlobShape(int s0, int s1, int s2); //!< @overload BlobShape(int s0, int s1, int s2); //!< @overload
...@@ -117,7 +117,7 @@ namespace dnn ...@@ -117,7 +117,7 @@ namespace dnn
* The class is realized as a wrapper over @ref cv::Mat and @ref cv::UMat. * The class is realized as a wrapper over @ref cv::Mat and @ref cv::UMat.
* It will support methods for switching and logical synchronization between CPU and GPU. * It will support methods for switching and logical synchronization between CPU and GPU.
*/ */
class CV_EXPORTS Blob class CV_EXPORTS_W Blob
{ {
public: public:
explicit Blob(); explicit Blob();
......
...@@ -226,11 +226,11 @@ inline size_t Blob::total(int startAxis, int endAxis) const ...@@ -226,11 +226,11 @@ inline size_t Blob::total(int startAxis, int endAxis) const
CV_Assert(0 <= startAxis && startAxis <= endAxis && endAxis <= dims()); CV_Assert(0 <= startAxis && startAxis <= endAxis && endAxis <= dims());
size_t size = 1; //fix: assume that slice isn't empty size_t cnt = 1; //fix: assume that slice isn't empty
for (int i = startAxis; i < endAxis; i++) for (int i = startAxis; i < endAxis; i++)
size *= (size_t)sizes()[i]; cnt *= (size_t)sizes()[i];
return size; return cnt;
} }
...@@ -360,9 +360,9 @@ inline Blob &Blob::shareFrom(const Blob &blob) ...@@ -360,9 +360,9 @@ inline Blob &Blob::shareFrom(const Blob &blob)
return *this; return *this;
} }
inline Blob &Blob::reshape(const BlobShape &shape) inline Blob &Blob::reshape(const BlobShape &newShape)
{ {
m = m.reshape(1, shape.dims(), shape.ptr()); m = m.reshape(1, newShape.dims(), newShape.ptr());
return *this; return *this;
} }
......
...@@ -95,10 +95,10 @@ private: ...@@ -95,10 +95,10 @@ private:
AutoBuffer<int64, 1> *pi; AutoBuffer<int64, 1> *pi;
AutoBuffer<double, 1> *pd; AutoBuffer<double, 1> *pd;
AutoBuffer<String, 1> *ps; AutoBuffer<String, 1> *ps;
void *p; void *pv;
}; };
DictValue(int _type, void *_p) : type(_type), p(_p) {} DictValue(int _type, void *_p) : type(_type), pv(_p) {}
void release(); void release();
}; };
......
...@@ -59,7 +59,7 @@ namespace dnn //! This namespace is used for dnn module functionlaity. ...@@ -59,7 +59,7 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
* This function automatically called on most of OpenCV builds, * This function automatically called on most of OpenCV builds,
* but you need to call it manually on some specific configurations (iOS for example). * but you need to call it manually on some specific configurations (iOS for example).
*/ */
CV_EXPORTS void initModule(); CV_EXPORTS_W void initModule();
/** @brief This class provides all data needed to initialize layer. /** @brief This class provides all data needed to initialize layer.
* *
...@@ -81,7 +81,7 @@ namespace dnn //! This namespace is used for dnn module functionlaity. ...@@ -81,7 +81,7 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
* Each class, derived from Layer, must implement allocate() methods to declare own outputs and forward() to compute outputs. * Each class, derived from Layer, must implement allocate() methods to declare own outputs and forward() to compute outputs.
* Also before using the new layer into networks you must register your layer by using one of @ref dnnLayerFactory "LayerFactory" macros. * Also before using the new layer into networks you must register your layer by using one of @ref dnnLayerFactory "LayerFactory" macros.
*/ */
class CV_EXPORTS Layer class CV_EXPORTS_W Layer
{ {
public: public:
...@@ -134,7 +134,7 @@ namespace dnn //! This namespace is used for dnn module functionlaity. ...@@ -134,7 +134,7 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
* *
* This class supports reference counting of its instances, i. e. copies point to the same instance. * This class supports reference counting of its instances, i. e. copies point to the same instance.
*/ */
class CV_EXPORTS Net class CV_EXPORTS_W Net
{ {
public: public:
...@@ -178,6 +178,7 @@ namespace dnn //! This namespace is used for dnn module functionlaity. ...@@ -178,6 +178,7 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
* @see setNetInputs(), Layer::inputNameToIndex(), Layer::outputNameToIndex() * @see setNetInputs(), Layer::inputNameToIndex(), Layer::outputNameToIndex()
*/ */
void connect(String outPin, String inpPin); void connect(String outPin, String inpPin);
/** @brief Connects #@p outNum output of the first layer to #@p inNum input of the second layer. /** @brief Connects #@p outNum output of the first layer to #@p inNum input of the second layer.
* @param outLayerId identifier of the first layer * @param outLayerId identifier of the first layer
* @param inpLayerId identifier of the second layer * @param inpLayerId identifier of the second layer
...@@ -185,6 +186,7 @@ namespace dnn //! This namespace is used for dnn module functionlaity. ...@@ -185,6 +186,7 @@ namespace dnn //! This namespace is used for dnn module functionlaity.
* @param inpNum number of the second layer input * @param inpNum number of the second layer input
*/ */
void connect(int outLayerId, int outNum, int inpLayerId, int inpNum); void connect(int outLayerId, int outNum, int inpLayerId, int inpNum);
/** @brief Sets ouputs names of the network input pseudo layer. /** @brief Sets ouputs names of the network input pseudo layer.
* *
* Each net always has special own the network input pseudo layer with id=0. * Each net always has special own the network input pseudo layer with id=0.
......
...@@ -86,7 +86,7 @@ inline DictValue DictValue::get<DictValue>(int idx) const ...@@ -86,7 +86,7 @@ inline DictValue DictValue::get<DictValue>(int idx) const
template<> template<>
inline int64 DictValue::get<int64>(int idx) const inline int64 DictValue::get<int64>(int idx) const
{ {
CV_Assert(idx == -1 && size() == 1 || idx >= 0 && idx < size()); CV_Assert((idx == -1 && size() == 1) || (idx >= 0 && idx < size()));
idx = (idx == -1) ? 0 : idx; idx = (idx == -1) ? 0 : idx;
if (type == Param::INT) if (type == Param::INT)
...@@ -131,7 +131,7 @@ inline bool DictValue::get<bool>(int idx) const ...@@ -131,7 +131,7 @@ inline bool DictValue::get<bool>(int idx) const
template<> template<>
inline double DictValue::get<double>(int idx) const inline double DictValue::get<double>(int idx) const
{ {
CV_Assert(idx == -1 && size() == 1 || idx >= 0 && idx < size()); CV_Assert((idx == -1 && size() == 1) || (idx >= 0 && idx < size()));
idx = (idx == -1) ? 0 : idx; idx = (idx == -1) ? 0 : idx;
if (type == Param::REAL) if (type == Param::REAL)
...@@ -159,7 +159,7 @@ template<> ...@@ -159,7 +159,7 @@ template<>
inline String DictValue::get<String>(int idx) const inline String DictValue::get<String>(int idx) const
{ {
CV_Assert(isString()); CV_Assert(isString());
CV_Assert(idx == -1 && ps->size() == 1 || idx >= 0 && idx < (int)ps->size()); CV_Assert((idx == -1 && ps->size() == 1) || (idx >= 0 && idx < (int)ps->size()));
return (*ps)[(idx == -1) ? 0 : idx]; return (*ps)[(idx == -1) ? 0 : idx];
} }
......
...@@ -131,10 +131,10 @@ class _LayerStaticRegisterer ...@@ -131,10 +131,10 @@ class _LayerStaticRegisterer
String type; String type;
public: public:
_LayerStaticRegisterer(const String &type, LayerFactory::Constuctor constuctor) _LayerStaticRegisterer(const String &layerType, LayerFactory::Constuctor layerConstuctor)
{ {
this->type = type; this->type = layerType;
LayerFactory::registerLayer(type, constuctor); LayerFactory::registerLayer(layerType, layerConstuctor);
} }
~_LayerStaticRegisterer() ~_LayerStaticRegisterer()
......
...@@ -26,12 +26,12 @@ typedef tuple<Size, InpShapeNumOut, GroupSize, StrideSize> ConvParam; //kernel_s ...@@ -26,12 +26,12 @@ typedef tuple<Size, InpShapeNumOut, GroupSize, StrideSize> ConvParam; //kernel_s
typedef TestBaseWithParam<ConvParam> ConvolutionPerfTest; typedef TestBaseWithParam<ConvParam> ConvolutionPerfTest;
PERF_TEST_P( ConvolutionPerfTest, perf, Combine( PERF_TEST_P( ConvolutionPerfTest, perf, Combine(
Values(Size(1, 1), Size(3, 3), Size(5, 5), Size(11, 11)), Values(Size(1, 1), Size(3, 3), Size(5, 5), Size(11, 11)),
Values(make_pair(BlobShape(1, 4, 224, 224), 64), Values(make_pair(BlobShape(1, 4, 224, 224), 64),
make_pair(BlobShape(1, 64, 112, 122), 128), make_pair(BlobShape(1, 64, 112, 122), 128),
make_pair(BlobShape(1, 256, 28, 28), 512)), make_pair(BlobShape(1, 256, 28, 28), 512)),
GroupSize::all(), GroupSize::all(),
StrideSize::all()) StrideSize::all())
) )
{ {
RNG rng(0); RNG rng(0);
...@@ -77,4 +77,4 @@ PERF_TEST_P( ConvolutionPerfTest, perf, Combine( ...@@ -77,4 +77,4 @@ PERF_TEST_P( ConvolutionPerfTest, perf, Combine(
SANITY_CHECK_NOTHING(); SANITY_CHECK_NOTHING();
} }
} }
\ No newline at end of file
...@@ -141,7 +141,7 @@ namespace dnn ...@@ -141,7 +141,7 @@ namespace dnn
Mat dstMat(outGroupCn, outH*outW, outBlob.type(), outBlob.ptr(n, g*outGroupCn)); Mat dstMat(outGroupCn, outH*outW, outBlob.type(), outBlob.ptr(n, g*outGroupCn));
gemmCPU(kerMat, colMat, 1, dstMat, 0); gemmCPU(kerMat, colMat, 1, dstMat, 0);
if (bias) if (bias)
{ {
float *biasPtr = blobs[1].ptrf() + g*outGroupCn; float *biasPtr = blobs[1].ptrf() + g*outGroupCn;
...@@ -179,9 +179,9 @@ namespace dnn ...@@ -179,9 +179,9 @@ namespace dnn
#endif // HAVE_OPENCL #endif // HAVE_OPENCL
if (inpBlob.type() == CV_32F) if (inpBlob.type() == CV_32F)
im2col_CpuPBody<float>::run((float*)srcPtr, inpGroupCn, inpH, inpW, kerH, kerW, padH, padW, strideH, strideW, (float *)colMat.ptr()); im2col_CpuPBody<float>::run((float*)srcPtr, inpGroupCn, inpH, inpW, kerH, kerW, padH, padW, strideH, strideW, colMat.ptr<float>());
if (inpBlob.type() == CV_64F) if (inpBlob.type() == CV_64F)
im2col_CpuPBody<double>::run((double*)srcPtr, inpGroupCn, inpH, inpW, kerH, kerW, padH, padW, strideH, strideW, (double*)colMat.ptr()); im2col_CpuPBody<double>::run((double*)srcPtr, inpGroupCn, inpH, inpW, kerH, kerW, padH, padW, strideH, strideW, colMat.ptr<double>());
} }
void ConvolutionLayer::computeInpOutShape(const Blob &inpBlob) void ConvolutionLayer::computeInpOutShape(const Blob &inpBlob)
......
...@@ -63,8 +63,8 @@ class ElementWiseLayer : public Layer ...@@ -63,8 +63,8 @@ class ElementWiseLayer : public Layer
template<typename Dtype> template<typename Dtype>
class PBody : public cv::ParallelLoopBody class PBody : public cv::ParallelLoopBody
{ {
Dtype *data;
Func &func; Func &func;
Dtype *data;
public: public:
PBody(Blob &blob, Func &func_) : PBody(Blob &blob, Func &func_) :
......
...@@ -66,7 +66,7 @@ public: ...@@ -66,7 +66,7 @@ public:
{ {
CV_Assert(Wh.dims() == 2 && Wx.dims() == 2); CV_Assert(Wh.dims() == 2 && Wx.dims() == 2);
CV_Assert(Wh.size(0) == Wx.size(0) && Wh.size(0) % 4 == 0); CV_Assert(Wh.size(0) == Wx.size(0) && Wh.size(0) % 4 == 0);
CV_Assert(Wh.size(0) == bias.total()); CV_Assert(Wh.size(0) == (int)bias.total());
blobs.resize(3); blobs.resize(3);
blobs[0] = Wh; blobs[0] = Wh;
...@@ -78,14 +78,14 @@ public: ...@@ -78,14 +78,14 @@ public:
{ {
CV_Assert(blobs.size() == 3); CV_Assert(blobs.size() == 3);
Blob &Wh = blobs[0], &Wx = blobs[1]; Blob &Wh = blobs[0], &Wx = blobs[1];
nH = Wh.size(1); nH = Wh.size(1);
nX = Wx.size(1); nX = Wx.size(1);
nC = Wh.size(0) / 4; nC = Wh.size(0) / 4;
CV_Assert(input.size() >= 1 && input.size() <= 3); CV_Assert(input.size() >= 1 && input.size() <= 3);
CV_Assert(input[0]->size(-1) == nX); CV_Assert(input[0]->size(-1) == nX);
BlobShape inpShape = input[0]->shape(); BlobShape inpShape = input[0]->shape();
numSamples = input[0]->total(0, input[0]->dims()-1); numSamples = input[0]->total(0, input[0]->dims()-1);
...@@ -162,12 +162,12 @@ public: ...@@ -162,12 +162,12 @@ public:
Mat gateF = gatesDiv(Range(1*numSamples, 2*numSamples), Range::all()); Mat gateF = gatesDiv(Range(1*numSamples, 2*numSamples), Range::all());
Mat gateO = gatesDiv(Range(2*numSamples, 3*numSamples), Range::all()); Mat gateO = gatesDiv(Range(2*numSamples, 3*numSamples), Range::all());
Mat gateG = gatesDiv(Range(3*numSamples, 4*numSamples), Range::all()); Mat gateG = gatesDiv(Range(3*numSamples, 4*numSamples), Range::all());
sigmoid(getesIFO); sigmoid(getesIFO);
tanh(gateG, gateG); tanh(gateG, gateG);
cv::add(gateF.mul(cPrev), gateI.mul(gateG), cCurr); cv::add(gateF.mul(cPrev), gateI.mul(gateG), cCurr);
tanh(cCurr, hCurr); tanh(cCurr, hCurr);
cv::multiply(gateO, hCurr, hCurr); cv::multiply(gateO, hCurr, hCurr);
...@@ -207,8 +207,8 @@ public: ...@@ -207,8 +207,8 @@ public:
void setWeights(const Blob &W_hh, const Blob &W_xh, const Blob &b_h, const Blob &W_ho, const Blob &b_o) void setWeights(const Blob &W_hh, const Blob &W_xh, const Blob &b_h, const Blob &W_ho, const Blob &b_o)
{ {
CV_Assert(W_hh.dims() == 2 && W_xh.dims() == 2); CV_Assert(W_hh.dims() == 2 && W_xh.dims() == 2);
CV_Assert(W_hh.size(0) == W_xh.size(0) && W_hh.size(0) == W_hh.size(1) && b_h.total() == W_xh.size(0)); CV_Assert(W_hh.size(0) == W_xh.size(0) && W_hh.size(0) == W_hh.size(1) && (int)b_h.total() == W_xh.size(0));
CV_Assert(W_ho.size(0) == b_o.total()); CV_Assert(W_ho.size(0) == (int)b_o.total());
CV_Assert(W_ho.size(1) == W_hh.size(1)); CV_Assert(W_ho.size(1) == W_hh.size(1));
//TODO: Check type //TODO: Check type
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment