Commit 26087e28 authored by Alexander Alekhin's avatar Alexander Alekhin

Merge remote-tracking branch 'upstream/3.4' into merge-3.4

parents 17ad33c9 2c3f02c8
......@@ -216,7 +216,7 @@
#endif
/* Number of bits in a file offset, on hosts where this is settable. */
#define _FILE_OFFSET_BITS @FILE_OFFSET_BITS@
//disabled for OpenCV CMakeLists.txt: #define _FILE_OFFSET_BITS @FILE_OFFSET_BITS@
/* Define to `__inline__' or `__inline' if that's what the C compiler
calls it, or to nothing if 'inline' is not supported under any name. */
......
......@@ -181,12 +181,24 @@ if(CV_GCC OR CV_CLANG)
string(REPLACE "-ffunction-sections" "" ${flags} "${${flags}}")
string(REPLACE "-fdata-sections" "" ${flags} "${${flags}}")
endforeach()
elseif(NOT ((IOS OR ANDROID) AND NOT BUILD_SHARED_LIBS) AND NOT MSVC)
# Remove unreferenced functions: function level linking
add_extra_compiler_option(-ffunction-sections)
add_extra_compiler_option(-fdata-sections)
if(NOT APPLE AND NOT OPENCV_SKIP_GC_SECTIONS)
set(OPENCV_EXTRA_EXE_LINKER_FLAGS "${OPENCV_EXTRA_EXE_LINKER_FLAGS} -Wl,--gc-sections")
else()
if(MSVC)
# TODO: Clang/C2 is not supported
elseif(((IOS OR ANDROID) AND NOT BUILD_SHARED_LIBS) AND NOT OPENCV_FORCE_FUNCTIONS_SECTIONS)
# don't create separate sections for functions/data, reduce package size
else()
# Remove unreferenced functions: function level linking
add_extra_compiler_option(-ffunction-sections)
add_extra_compiler_option(-fdata-sections)
if(NOT OPENCV_SKIP_GC_SECTIONS)
if(APPLE)
set(OPENCV_EXTRA_EXE_LINKER_FLAGS "${OPENCV_EXTRA_EXE_LINKER_FLAGS} -Wl,-dead_strip")
set(OPENCV_EXTRA_SHARED_LINKER_FLAGS "${OPENCV_EXTRA_SHARED_LINKER_FLAGS} -Wl,-dead_strip")
else()
set(OPENCV_EXTRA_EXE_LINKER_FLAGS "${OPENCV_EXTRA_EXE_LINKER_FLAGS} -Wl,--gc-sections")
set(OPENCV_EXTRA_SHARED_LINKER_FLAGS "${OPENCV_EXTRA_SHARED_LINKER_FLAGS} -Wl,--gc-sections")
endif()
endif()
endif()
endif()
......@@ -281,6 +293,15 @@ if((CV_GCC OR CV_CLANG)
add_extra_compiler_option(-fvisibility-inlines-hidden)
endif()
# workaround gcc bug for aligned ld/st
# https://github.com/opencv/opencv/issues/13211
if((PPC64LE AND NOT CMAKE_CROSSCOMPILING) OR OPENCV_FORCE_COMPILER_CHECK_VSX_ALIGNED)
ocv_check_runtime_flag("${CPU_BASELINE_FLAGS}" "OPENCV_CHECK_VSX_ALIGNED" "${OpenCV_SOURCE_DIR}/cmake/checks/runtime/cpu_vsx_aligned.cpp")
if(NOT OPENCV_CHECK_VSX_ALIGNED)
add_extra_compiler_option_force(-DCV_COMPILER_VSX_BROKEN_ALIGNED)
endif()
endif()
# combine all "extra" options
if(NOT OPENCV_SKIP_EXTRA_COMPILER_FLAGS)
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} ${OPENCV_EXTRA_FLAGS} ${OPENCV_EXTRA_C_FLAGS}")
......
......@@ -480,6 +480,44 @@ macro(ocv_check_flag_support lang flag varname base_options)
ocv_check_compiler_flag("${_lang}" "${base_options} ${flag}" ${${varname}} ${ARGN})
endmacro()
macro(ocv_check_runtime_flag flag result)
set(_fname "${ARGN}")
if(NOT DEFINED ${result})
file(RELATIVE_PATH _rname "${CMAKE_SOURCE_DIR}" "${_fname}")
message(STATUS "Performing Runtime Test ${result} (check file: ${_rname})")
try_run(exec_return compile_result
"${CMAKE_BINARY_DIR}"
"${_fname}"
CMAKE_FLAGS "-DCMAKE_EXE_LINKER_FLAGS=${CMAKE_EXE_LINKER_FLAGS}" # CMP0056 do this on new CMake
COMPILE_DEFINITIONS "${flag}"
OUTPUT_VARIABLE OUTPUT)
if(${compile_result})
if(exec_return EQUAL 0)
set(${result} 1 CACHE INTERNAL "Runtime Test ${result}")
message(STATUS "Performing Runtime Test ${result} - Success")
else()
message(STATUS "Performing Runtime Test ${result} - Failed(${exec_return})")
set(${result} 0 CACHE INTERNAL "Runtime Test ${result}")
endif()
else()
set(${result} 0 CACHE INTERNAL "Runtime Test ${result}")
message(STATUS "Performing Runtime Test ${result} - Compiling Failed")
endif()
if(NOT ${result})
file(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
"Runtime Test failed:\n"
" source file: '${_fname}'\n"
" check option: '${flag}'\n"
" exec return: ${exec_return}\n"
"===== BUILD AND RUNTIME LOG =====\n"
"${OUTPUT}\n"
"===== END =====\n\n")
endif()
endif()
endmacro()
# turns off warnings
macro(ocv_warnings_disable)
if(NOT ENABLE_NOISY_WARNINGS)
......
// check sanity of vsx aligned ld/st
// https://github.com/opencv/opencv/issues/13211
#include <altivec.h>
#define vsx_ld vec_vsx_ld
#define vsx_st vec_vsx_st
template<typename T>
static void fill(T& d, int from = 0, int to = 16)
{
for (int i = from; i < to; i++)
d[i] = i;
}
template<typename T, typename Tvec>
static bool check_data(T& d, Tvec& v, int from = 0, int to = 16)
{
for (int i = from; i < to; i++)
{
if (d[i] != vec_extract(v, i))
return false;
}
return true;
}
int main()
{
unsigned char __attribute__ ((aligned (16))) rbuf[16];
unsigned char __attribute__ ((aligned (16))) wbuf[16];
__vector unsigned char a;
// 1- check aligned load and store
fill(rbuf);
a = vec_ld(0, rbuf);
if (!check_data(rbuf, a))
return 1;
vec_st(a, 0, wbuf);
if (!check_data(wbuf, a))
return 11;
// 2- check mixing aligned load and unaligned store
a = vec_ld(0, rbuf);
vsx_st(a, 0, wbuf);
if (!check_data(wbuf, a))
return 2;
// 3- check mixing unaligned load and aligned store
a = vsx_ld(0, rbuf);
vec_st(a, 0, wbuf);
if (!check_data(wbuf, a))
return 3;
return 0;
}
\ No newline at end of file
......@@ -258,8 +258,16 @@ inline void v_store_low(_Tp* ptr, const _Tpvec& a) \
inline void v_store_high(_Tp* ptr, const _Tpvec& a) \
{ vec_st_h8(a.val, ptr); }
#define OPENCV_HAL_IMPL_VSX_LOADSTORE(_Tpvec, _Tp) \
OPENCV_HAL_IMPL_VSX_LOADSTORE_C(_Tpvec, _Tp, vsx_ld, vec_ld, vsx_st, vec_st)
// working around gcc bug for aligned ld/st
// if runtime check for vec_ld/st fail we failback to unaligned ld/st
// https://github.com/opencv/opencv/issues/13211
#ifdef CV_COMPILER_VSX_BROKEN_ALIGNED
#define OPENCV_HAL_IMPL_VSX_LOADSTORE(_Tpvec, _Tp) \
OPENCV_HAL_IMPL_VSX_LOADSTORE_C(_Tpvec, _Tp, vsx_ld, vsx_ld, vsx_st, vsx_st)
#else
#define OPENCV_HAL_IMPL_VSX_LOADSTORE(_Tpvec, _Tp) \
OPENCV_HAL_IMPL_VSX_LOADSTORE_C(_Tpvec, _Tp, vsx_ld, vec_ld, vsx_st, vec_st)
#endif
OPENCV_HAL_IMPL_VSX_LOADSTORE(v_uint8x16, uchar)
OPENCV_HAL_IMPL_VSX_LOADSTORE(v_int8x16, schar)
......
......@@ -280,8 +280,8 @@ public:
dstData[box_index + 0] = (x + logistic_activate(srcData[box_index + 0])) / cols;
dstData[box_index + 1] = (y + logistic_activate(srcData[box_index + 1])) / rows;
dstData[box_index + 2] = exp(srcData[box_index + 2]) * biasData[2 * a] / hNorm;
dstData[box_index + 3] = exp(srcData[box_index + 3]) * biasData[2 * a + 1] / wNorm;
dstData[box_index + 2] = exp(srcData[box_index + 2]) * biasData[2 * a] / wNorm;
dstData[box_index + 3] = exp(srcData[box_index + 3]) * biasData[2 * a + 1] / hNorm;
int class_index = index_sample_offset + index * cell_size + 5;
for (int j = 0; j < classes; ++j) {
......
......@@ -48,6 +48,9 @@ endif()
if(HAVE_JASPER)
ocv_include_directories(${JASPER_INCLUDE_DIR})
list(APPEND GRFMT_LIBS ${JASPER_LIBRARIES})
if(OPENCV_IO_FORCE_JASPER)
add_definitions(-DOPENCV_IMGCODECS_FORCE_JASPER=1)
endif()
endif()
if(HAVE_OPENEXR)
......@@ -148,4 +151,7 @@ if(NOT BUILD_opencv_world)
endif()
ocv_add_accuracy_tests()
if(TARGET opencv_test_imgcodecs AND HAVE_JASPER AND "$ENV{OPENCV_IO_ENABLE_JASPER}")
ocv_target_compile_definitions(opencv_test_imgcodecs PRIVATE OPENCV_IMGCODECS_ENABLE_JASPER_TESTS=1)
endif()
ocv_add_perf_tests()
......@@ -45,6 +45,9 @@
#ifdef HAVE_JASPER
#include <sstream>
#include <opencv2/core/utils/configuration.private.hpp>
#include <opencv2/core/utils/logger.hpp>
#include "grfmt_jpeg2000.hpp"
#include "opencv2/imgproc.hpp"
......@@ -71,7 +74,36 @@ struct JasperInitializer
~JasperInitializer() { jas_cleanup(); }
};
static JasperInitializer initialize_jasper;
static JasperInitializer& _initJasper()
{
static JasperInitializer initialize_jasper;
return initialize_jasper;
}
static bool isJasperEnabled()
{
static const bool PARAM_ENABLE_JASPER = utils::getConfigurationParameterBool("OPENCV_IO_ENABLE_JASPER",
#ifdef OPENCV_IMGCODECS_FORCE_JASPER
true
#else
false
#endif
);
return PARAM_ENABLE_JASPER;
}
static JasperInitializer& initJasper()
{
if (isJasperEnabled())
{
return _initJasper();
}
else
{
const char* message = "imgcodecs: Jasper (JPEG-2000) codec is disabled. You can enable it via 'OPENCV_IO_ENABLE_JASPER' option. Refer for details and cautions here: https://github.com/opencv/opencv/issues/14058";
CV_LOG_WARNING(NULL, message);
CV_Error(Error::StsNotImplemented, message);
}
}
/////////////////////// Jpeg2KDecoder ///////////////////
......@@ -91,6 +123,7 @@ Jpeg2KDecoder::~Jpeg2KDecoder()
ImageDecoder Jpeg2KDecoder::newDecoder() const
{
initJasper();
return makePtr<Jpeg2KDecoder>();
}
......@@ -98,12 +131,14 @@ void Jpeg2KDecoder::close()
{
if( m_stream )
{
CV_Assert(isJasperEnabled());
jas_stream_close( (jas_stream_t*)m_stream );
m_stream = 0;
}
if( m_image )
{
CV_Assert(isJasperEnabled());
jas_image_destroy( (jas_image_t*)m_image );
m_image = 0;
}
......@@ -112,6 +147,7 @@ void Jpeg2KDecoder::close()
bool Jpeg2KDecoder::readHeader()
{
CV_Assert(isJasperEnabled());
bool result = false;
close();
......@@ -178,6 +214,8 @@ static void Jpeg2KDecoder_close(Jpeg2KDecoder* ptr)
bool Jpeg2KDecoder::readData( Mat& img )
{
CV_Assert(isJasperEnabled());
Ptr<Jpeg2KDecoder> close_this(this, Jpeg2KDecoder_close);
bool result = false;
bool color = img.channels() > 1;
......@@ -320,6 +358,8 @@ bool Jpeg2KDecoder::readComponent8u( uchar *data, void *_buffer,
int step, int cmpt,
int maxval, int offset, int ncmpts )
{
CV_Assert(isJasperEnabled());
jas_matrix_t* buffer = (jas_matrix_t*)_buffer;
jas_image_t* image = (jas_image_t*)m_image;
int xstart = jas_image_cmpttlx( image, cmpt );
......@@ -384,6 +424,8 @@ bool Jpeg2KDecoder::readComponent16u( unsigned short *data, void *_buffer,
int step, int cmpt,
int maxval, int offset, int ncmpts )
{
CV_Assert(isJasperEnabled());
jas_matrix_t* buffer = (jas_matrix_t*)_buffer;
jas_image_t* image = (jas_image_t*)m_image;
int xstart = jas_image_cmpttlx( image, cmpt );
......@@ -459,6 +501,7 @@ Jpeg2KEncoder::~Jpeg2KEncoder()
ImageEncoder Jpeg2KEncoder::newEncoder() const
{
initJasper();
return makePtr<Jpeg2KEncoder>();
}
......@@ -470,6 +513,7 @@ bool Jpeg2KEncoder::isFormatSupported( int depth ) const
bool Jpeg2KEncoder::write( const Mat& _img, const std::vector<int>& params )
{
CV_Assert(isJasperEnabled());
int width = _img.cols, height = _img.rows;
int depth = _img.depth(), channels = _img.channels();
depth = depth == CV_8U ? 8 : 16;
......@@ -541,6 +585,8 @@ bool Jpeg2KEncoder::write( const Mat& _img, const std::vector<int>& params )
bool Jpeg2KEncoder::writeComponent8u( void *__img, const Mat& _img )
{
CV_Assert(isJasperEnabled());
jas_image_t* img = (jas_image_t*)__img;
int w = _img.cols, h = _img.rows, ncmpts = _img.channels();
jas_matrix_t *row = jas_matrix_create( 1, w );
......@@ -565,6 +611,8 @@ bool Jpeg2KEncoder::writeComponent8u( void *__img, const Mat& _img )
bool Jpeg2KEncoder::writeComponent16u( void *__img, const Mat& _img )
{
CV_Assert(isJasperEnabled());
jas_image_t* img = (jas_image_t*)__img;
int w = _img.cols, h = _img.rows, ncmpts = _img.channels();
jas_matrix_t *row = jas_matrix_create( 1, w );
......
......@@ -71,7 +71,7 @@ TEST_P(Imgcodecs_FileMode, regression)
const string all_images[] =
{
#ifdef HAVE_JASPER
#if defined(HAVE_JASPER) && defined(OPENCV_IMGCODECS_ENABLE_JASPER_TESTS)
"readwrite/Rome.jp2",
"readwrite/Bretagne2.jp2",
"readwrite/Bretagne2.jp2",
......
......@@ -105,7 +105,7 @@ const string exts[] = {
#ifdef HAVE_JPEG
"jpg",
#endif
#ifdef HAVE_JASPER
#if defined(HAVE_JASPER) && defined(OPENCV_IMGCODECS_ENABLE_JASPER_TESTS)
"jp2",
#endif
#if 0 /*defined HAVE_OPENEXR && !defined __APPLE__*/
......
......@@ -899,7 +899,7 @@ public:
int count = inputs.rows;
int iter = -1, max_iter = termCrit.maxCount*count;
double epsilon = termCrit.epsilon*count;
double epsilon = (termCrit.type & CV_TERMCRIT_EPS) ? termCrit.epsilon*count : 0;
int l_count = layer_count();
int ivcount = layer_sizes[0];
......
......@@ -135,10 +135,14 @@ if(NOT OPENCV_SKIP_PYTHON_LOADER AND DEFINED OPENCV_PYTHON_INSTALL_PATH)
set(OPENCV_PYTHON_INSTALL_PATH_SETUPVARS "${OPENCV_PYTHON_INSTALL_PATH}" CACHE INTERNAL "")
endif()
if(NOT " ${PYTHON}" STREQUAL " PYTHON" AND DEFINED OPENCV_${PYTHON}_INSTALL_PATH)
set(__python_binary_install_path "${OPENCV_${PYTHON}_INSTALL_PATH}")
elseif(OPENCV_SKIP_PYTHON_LOADER AND DEFINED ${PYTHON}_PACKAGES_PATH)
set(__python_binary_install_path "${${PYTHON}_PACKAGES_PATH}")
if(OPENCV_SKIP_PYTHON_LOADER)
if(DEFINED OPENCV_${PYTHON}_INSTALL_PATH)
set(__python_binary_install_path "${OPENCV_${PYTHON}_INSTALL_PATH}")
elseif(DEFINED ${PYTHON}_PACKAGES_PATH)
set(__python_binary_install_path "${${PYTHON}_PACKAGES_PATH}")
else()
message(FATAL_ERROR "Specify 'OPENCV_${PYTHON}_INSTALL_PATH' variable")
endif()
else()
ocv_assert(DEFINED OPENCV_PYTHON_INSTALL_PATH)
set(__python_binary_install_path "${OPENCV_PYTHON_INSTALL_PATH}/${__python_loader_subdir}python-${${PYTHON}_VERSION_MAJOR}.${${PYTHON}_VERSION_MINOR}")
......
......@@ -5,11 +5,10 @@ include("${OpenCV_BINARY_DIR}/opencv_python_config.cmake")
if(NOT DEFINED OpenCV_SOURCE_DIR)
message(FATAL_ERROR "Missing OpenCV_SOURCE_DIR")
endif()
if(NOT OPENCV_PYTHON_INSTALL_PATH)
if(NOT DEFINED OPENCV_PYTHON_STANDALONE_INSTALL_PATH)
message(FATAL_ERROR "Missing OPENCV_PYTHON_STANDALONE_INSTALL_PATH / OPENCV_PYTHON_INSTALL_PATH")
endif()
if(DEFINED OPENCV_PYTHON_STANDALONE_INSTALL_PATH)
set(OPENCV_PYTHON_INSTALL_PATH "${OPENCV_PYTHON_STANDALONE_INSTALL_PATH}")
elseif(NOT OPENCV_PYTHON_INSTALL_PATH)
message(FATAL_ERROR "Missing OPENCV_PYTHON_STANDALONE_INSTALL_PATH / OPENCV_PYTHON_INSTALL_PATH")
endif()
include("${OpenCV_SOURCE_DIR}/cmake/OpenCVUtils.cmake")
......
......@@ -60,108 +60,6 @@
#include <stdlib.h>
#include <string.h>
static dc1394error_t adaptBufferStereoLocal(dc1394video_frame_t *in, dc1394video_frame_t *out)
{
uint32_t bpp;
// buffer position is not changed. Size is boubled in Y
out->size[0] = in->size[0];
out->size[1] = in->size[1] * 2;
out->position[0] = in->position[0];
out->position[1] = in->position[1];
// color coding is set to mono8 or raw8.
switch (in->color_coding)
{
case DC1394_COLOR_CODING_RAW16:
out->color_coding = DC1394_COLOR_CODING_RAW8;
break;
case DC1394_COLOR_CODING_MONO16:
case DC1394_COLOR_CODING_YUV422:
out->color_coding = DC1394_COLOR_CODING_MONO8;
break;
default:
return DC1394_INVALID_COLOR_CODING;
}
// keep the color filter value in all cases. if the format is not raw it will not be further used anyway
out->color_filter = in->color_filter;
// the output YUV byte order must be already set if the buffer is YUV422 at the output
// if the output is not YUV we don't care about this field.
// Hence nothing to do.
// we always convert to 8bits (at this point) we can safely set this value to 8.
out->data_depth = 8;
// don't know what to do with stride... >>>> TODO: STRIDE SHOULD BE TAKEN INTO ACCOUNT... <<<<
// out->stride=??
// the video mode should not change. Color coding and other stuff can be accessed in specific fields of this struct
out->video_mode = in->video_mode;
// padding is kept:
out->padding_bytes = in->padding_bytes;
// image bytes changes: >>>> TODO: STRIDE SHOULD BE TAKEN INTO ACCOUNT... <<<<
dc1394_get_color_coding_bit_size(out->color_coding, &bpp);
out->image_bytes = (out->size[0] * out->size[1] * bpp) / 8;
// total is image_bytes + padding_bytes
out->total_bytes = out->image_bytes + out->padding_bytes;
// bytes-per-packet and packets_per_frame are internal data that can be kept as is.
out->packet_size = in->packet_size;
out->packets_per_frame = in->packets_per_frame;
// timestamp, frame_behind, id and camera are copied too:
out->timestamp = in->timestamp;
out->frames_behind = in->frames_behind;
out->camera = in->camera;
out->id = in->id;
// verify memory allocation:
if (out->total_bytes > out->allocated_image_bytes)
{
free(out->image);
out->image = (uint8_t*)malloc(out->total_bytes * sizeof(uint8_t));
out->allocated_image_bytes = out->total_bytes;
}
// Copy padding bytes:
memcpy(&(out->image[out->image_bytes]), &(in->image[in->image_bytes]), out->padding_bytes);
out->little_endian = DC1394_FALSE; // not used before 1.32 is out.
out->data_in_padding = DC1394_FALSE; // not used before 1.32 is out.
return DC1394_SUCCESS;
}
static dc1394error_t dc1394_deinterlace_stereo_frames_fixed(dc1394video_frame_t *in,
dc1394video_frame_t *out, dc1394stereo_method_t method)
{
if((in->color_coding == DC1394_COLOR_CODING_RAW16) ||
(in->color_coding == DC1394_COLOR_CODING_MONO16) ||
(in->color_coding == DC1394_COLOR_CODING_YUV422))
{
switch (method)
{
case DC1394_STEREO_METHOD_INTERLACED:
adaptBufferStereoLocal(in, out);
//FIXED by AB:
// dc1394_deinterlace_stereo(in->image, out->image, in->size[0], in->size[1]);
dc1394_deinterlace_stereo(in->image, out->image, out->size[0], out->size[1]);
break;
case DC1394_STEREO_METHOD_FIELD:
adaptBufferStereoLocal(in, out);
memcpy(out->image, in->image, out->image_bytes);
break;
}
return DC1394_INVALID_STEREO_METHOD;
}
else
return DC1394_FUNCTION_NOT_SUPPORTED;
}
struct CvDC1394
{
CvDC1394();
......@@ -540,10 +438,7 @@ bool CvCaptureCAM_DC1394_v2_CPP::grabFrame()
if (nimages == 2)
{
fs = (dc1394video_frame_t*)calloc(1, sizeof(*fs));
//dc1394_deinterlace_stereo_frames(dcFrame, fs, DC1394_STEREO_METHOD_INTERLACED);
dc1394_deinterlace_stereo_frames_fixed(dcFrame, fs, DC1394_STEREO_METHOD_INTERLACED);
dc1394_deinterlace_stereo_frames(dcFrame, fs, DC1394_STEREO_METHOD_INTERLACED);
dc1394_capture_enqueue(dcCam, dcFrame); // release the captured frame as soon as possible
dcFrame = 0;
if (!fs->image)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment