Commit 3fb6617d authored by Alexander Alekhin's avatar Alexander Alekhin

Merge remote-tracking branch 'upstream/3.4' into merge-3.4

parents 3133bb49 a105f569
...@@ -29,6 +29,16 @@ macro(ippiw_debugmsg MESSAGE) ...@@ -29,6 +29,16 @@ macro(ippiw_debugmsg MESSAGE)
message(STATUS "${MESSAGE}") message(STATUS "${MESSAGE}")
endif() endif()
endmacro() endmacro()
macro(ippiw_done)
foreach(__file ${IPP_IW_LICENSE_FILES})
if(EXISTS "${__file}")
ocv_install_3rdparty_licenses(ippiw "${__file}")
endif()
endforeach()
return()
endmacro()
file(TO_CMAKE_PATH "${IPPROOT}" IPPROOT) file(TO_CMAKE_PATH "${IPPROOT}" IPPROOT)
# This function detects Intel IPP Integration Wrappers version by analyzing .h file # This function detects Intel IPP Integration Wrappers version by analyzing .h file
...@@ -81,7 +91,7 @@ macro(ippiw_setup PATH BUILD) ...@@ -81,7 +91,7 @@ macro(ippiw_setup PATH BUILD)
if(EXISTS "${FILE}") if(EXISTS "${FILE}")
set(HAVE_IPP_IW_LL 1) set(HAVE_IPP_IW_LL 1)
endif() endif()
return() ippiw_done()
else() else()
ippiw_debugmsg("sources\tno") ippiw_debugmsg("sources\tno")
endif() endif()
...@@ -120,7 +130,7 @@ macro(ippiw_setup PATH BUILD) ...@@ -120,7 +130,7 @@ macro(ippiw_setup PATH BUILD)
if(EXISTS "${FILE}") if(EXISTS "${FILE}")
set(HAVE_IPP_IW_LL 1) set(HAVE_IPP_IW_LL 1)
endif() endif()
return() ippiw_done()
else() else()
ippiw_debugmsg("binaries\tno") ippiw_debugmsg("binaries\tno")
endif() endif()
...@@ -147,14 +157,12 @@ if(BUILD_IPP_IW) ...@@ -147,14 +157,12 @@ if(BUILD_IPP_IW)
ippiw_setup("${OpenCV_SOURCE_DIR}/3rdparty/ippiw" 1) ippiw_setup("${OpenCV_SOURCE_DIR}/3rdparty/ippiw" 1)
set(IPPIW_ROOT "${IPPROOT}/../iw") set(IPPIW_ROOT "${IPPROOT}/../iw")
ocv_install_3rdparty_licenses(ippiw set(IPP_IW_LICENSE_FILES ${IPP_IW_LICENSE_FILES_EXTRA}
"${IPPIW_ROOT}/../support.txt" "${IPPIW_ROOT}/../support.txt"
"${IPPIW_ROOT}/../third-party-programs.txt") "${IPPIW_ROOT}/../third-party-programs.txt"
if(WIN32) "${IPPIW_ROOT}/../EULA.rtf"
ocv_install_3rdparty_licenses(ippiw "${IPPIW_ROOT}/../EULA.rtf") "${IPPIW_ROOT}/../EULA.txt"
else() )
ocv_install_3rdparty_licenses(ippiw "${IPPIW_ROOT}/../EULA.txt")
endif()
# Package sources # Package sources
get_filename_component(__PATH "${IPPROOT}/../iw/" ABSOLUTE) get_filename_component(__PATH "${IPPROOT}/../iw/" ABSOLUTE)
...@@ -167,10 +175,11 @@ if(BUILD_IPP_IW) ...@@ -167,10 +175,11 @@ if(BUILD_IPP_IW)
include("${OpenCV_SOURCE_DIR}/3rdparty/ippicv/ippicv.cmake") include("${OpenCV_SOURCE_DIR}/3rdparty/ippicv/ippicv.cmake")
download_ippicv(TEMP_ROOT) download_ippicv(TEMP_ROOT)
set(IPPIW_ROOT "${TEMP_ROOT}/iw/") set(IPPIW_ROOT "${TEMP_ROOT}/iw/")
ocv_install_3rdparty_licenses(ippiw set(IPP_IW_LICENSE_FILES ${IPP_IW_LICENSE_FILES_EXTRA}
"${IPPIW_ROOT}/../EULA.txt" "${IPPIW_ROOT}/../EULA.txt"
"${IPPIW_ROOT}/../support.txt" "${IPPIW_ROOT}/../support.txt"
"${IPPIW_ROOT}/../third-party-programs.txt") "${IPPIW_ROOT}/../third-party-programs.txt"
)
ippiw_setup("${IPPIW_ROOT}" 1) ippiw_setup("${IPPIW_ROOT}" 1)
endif() endif()
......
...@@ -442,6 +442,12 @@ void transform(InputArray _src, OutputArray _dst, InputArray _mtx) ...@@ -442,6 +442,12 @@ void transform(InputArray _src, OutputArray _dst, InputArray _mtx)
_dst.create( src.size(), CV_MAKETYPE(depth, dcn) ); _dst.create( src.size(), CV_MAKETYPE(depth, dcn) );
Mat dst = _dst.getMat(); Mat dst = _dst.getMat();
if (src.data == dst.data) // inplace case
{
CV_Assert(scn == dcn);
src = src.clone(); // TODO Add performance warning
}
int mtype = depth == CV_32S || depth == CV_64F ? CV_64F : CV_32F; int mtype = depth == CV_32S || depth == CV_64F ? CV_64F : CV_32F;
AutoBuffer<double> _mbuf; AutoBuffer<double> _mbuf;
double* mbuf; double* mbuf;
......
...@@ -215,7 +215,7 @@ class dnn_test(NewOpenCVTests): ...@@ -215,7 +215,7 @@ class dnn_test(NewOpenCVTests):
testScores, testBoxes, 0.5, scoresDiff, iouDiff) testScores, testBoxes, 0.5, scoresDiff, iouDiff)
def test_async(self): def test_async(self):
timeout = 500*10**6 # in nanoseconds (500ms) timeout = 10*1000*10**6 # in nanoseconds (10 sec)
testdata_required = bool(os.environ.get('OPENCV_DNN_TEST_REQUIRE_TESTDATA', False)) testdata_required = bool(os.environ.get('OPENCV_DNN_TEST_REQUIRE_TESTDATA', False))
proto = self.find_dnn_file('dnn/layers/layer_convolution.prototxt', required=testdata_required) proto = self.find_dnn_file('dnn/layers/layer_convolution.prototxt', required=testdata_required)
model = self.find_dnn_file('dnn/layers/layer_convolution.caffemodel', required=testdata_required) model = self.find_dnn_file('dnn/layers/layer_convolution.caffemodel', required=testdata_required)
......
...@@ -329,7 +329,7 @@ TEST_P(Test_Darknet_nets, TinyYoloVoc) ...@@ -329,7 +329,7 @@ TEST_P(Test_Darknet_nets, TinyYoloVoc)
} }
#ifdef HAVE_INF_ENGINE #ifdef HAVE_INF_ENGINE
static const std::chrono::milliseconds async_timeout(500); static const std::chrono::milliseconds async_timeout(10000);
typedef testing::TestWithParam<tuple<std::string, Target> > Test_Darknet_nets_async; typedef testing::TestWithParam<tuple<std::string, Target> > Test_Darknet_nets_async;
TEST_P(Test_Darknet_nets_async, Accuracy) TEST_P(Test_Darknet_nets_async, Accuracy)
......
...@@ -758,6 +758,12 @@ TEST_P(Eltwise, Accuracy) ...@@ -758,6 +758,12 @@ TEST_P(Eltwise, Accuracy)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1); applyTestTag(CV_TEST_TAG_DNN_SKIP_IE, CV_TEST_TAG_DNN_SKIP_IE_2019R1, CV_TEST_TAG_DNN_SKIP_IE_2019R1_1);
#endif #endif
#if defined(INF_ENGINE_RELEASE)
if (backendId == DNN_BACKEND_INFERENCE_ENGINE && targetId == DNN_TARGET_OPENCL &&
op == "sum" && numConv == 1 && !weighted)
applyTestTag(CV_TEST_TAG_DNN_SKIP_IE_OPENCL, CV_TEST_TAG_DNN_SKIP_IE);
#endif
Net net; Net net;
std::vector<int> convLayerIds(numConv); std::vector<int> convLayerIds(numConv);
......
...@@ -363,7 +363,7 @@ TEST(Net, forwardAndRetrieve) ...@@ -363,7 +363,7 @@ TEST(Net, forwardAndRetrieve)
} }
#ifdef HAVE_INF_ENGINE #ifdef HAVE_INF_ENGINE
static const std::chrono::milliseconds async_timeout(500); static const std::chrono::milliseconds async_timeout(10000);
// This test runs network in synchronous mode for different inputs and then // This test runs network in synchronous mode for different inputs and then
// runs the same model asynchronously for the same inputs. // runs the same model asynchronously for the same inputs.
......
...@@ -40,7 +40,7 @@ ...@@ -40,7 +40,7 @@
//M*/ //M*/
// //
// Loading and saving IPL images. // Loading and saving images.
// //
#include "precomp.hpp" #include "precomp.hpp"
...@@ -393,7 +393,6 @@ static void ApplyExifOrientation(const Mat& buf, Mat& img) ...@@ -393,7 +393,6 @@ static void ApplyExifOrientation(const Mat& buf, Mat& img)
* LOAD_MAT=2 * LOAD_MAT=2
* } * }
* @param[in] mat Reference to C++ Mat object (If LOAD_MAT) * @param[in] mat Reference to C++ Mat object (If LOAD_MAT)
* @param[in] scale_denom Scale value
* *
*/ */
static bool static bool
...@@ -459,11 +458,11 @@ imread_( const String& filename, int flags, Mat& mat ) ...@@ -459,11 +458,11 @@ imread_( const String& filename, int flags, Mat& mat )
int type = decoder->type(); int type = decoder->type();
if( (flags & IMREAD_LOAD_GDAL) != IMREAD_LOAD_GDAL && flags != IMREAD_UNCHANGED ) if( (flags & IMREAD_LOAD_GDAL) != IMREAD_LOAD_GDAL && flags != IMREAD_UNCHANGED )
{ {
if( (flags & CV_LOAD_IMAGE_ANYDEPTH) == 0 ) if( (flags & IMREAD_ANYDEPTH) == 0 )
type = CV_MAKETYPE(CV_8U, CV_MAT_CN(type)); type = CV_MAKETYPE(CV_8U, CV_MAT_CN(type));
if( (flags & CV_LOAD_IMAGE_COLOR) != 0 || if( (flags & IMREAD_COLOR) != 0 ||
((flags & CV_LOAD_IMAGE_ANYCOLOR) != 0 && CV_MAT_CN(type) > 1) ) ((flags & IMREAD_ANYCOLOR) != 0 && CV_MAT_CN(type) > 1) )
type = CV_MAKETYPE(CV_MAT_DEPTH(type), 3); type = CV_MAKETYPE(CV_MAT_DEPTH(type), 3);
else else
type = CV_MAKETYPE(CV_MAT_DEPTH(type), 1); type = CV_MAKETYPE(CV_MAT_DEPTH(type), 1);
...@@ -558,11 +557,11 @@ imreadmulti_(const String& filename, int flags, std::vector<Mat>& mats) ...@@ -558,11 +557,11 @@ imreadmulti_(const String& filename, int flags, std::vector<Mat>& mats)
int type = decoder->type(); int type = decoder->type();
if( (flags & IMREAD_LOAD_GDAL) != IMREAD_LOAD_GDAL && flags != IMREAD_UNCHANGED ) if( (flags & IMREAD_LOAD_GDAL) != IMREAD_LOAD_GDAL && flags != IMREAD_UNCHANGED )
{ {
if ((flags & CV_LOAD_IMAGE_ANYDEPTH) == 0) if ((flags & IMREAD_ANYDEPTH) == 0)
type = CV_MAKETYPE(CV_8U, CV_MAT_CN(type)); type = CV_MAKETYPE(CV_8U, CV_MAT_CN(type));
if ((flags & CV_LOAD_IMAGE_COLOR) != 0 || if ((flags & CV_LOAD_IMAGE_COLOR) != 0 ||
((flags & CV_LOAD_IMAGE_ANYCOLOR) != 0 && CV_MAT_CN(type) > 1)) ((flags & IMREAD_ANYCOLOR) != 0 && CV_MAT_CN(type) > 1))
type = CV_MAKETYPE(CV_MAT_DEPTH(type), 3); type = CV_MAKETYPE(CV_MAT_DEPTH(type), 3);
else else
type = CV_MAKETYPE(CV_MAT_DEPTH(type), 1); type = CV_MAKETYPE(CV_MAT_DEPTH(type), 1);
...@@ -791,11 +790,11 @@ imdecode_( const Mat& buf, int flags, Mat& mat ) ...@@ -791,11 +790,11 @@ imdecode_( const Mat& buf, int flags, Mat& mat )
int type = decoder->type(); int type = decoder->type();
if( (flags & IMREAD_LOAD_GDAL) != IMREAD_LOAD_GDAL && flags != IMREAD_UNCHANGED ) if( (flags & IMREAD_LOAD_GDAL) != IMREAD_LOAD_GDAL && flags != IMREAD_UNCHANGED )
{ {
if( (flags & CV_LOAD_IMAGE_ANYDEPTH) == 0 ) if( (flags & IMREAD_ANYDEPTH) == 0 )
type = CV_MAKETYPE(CV_8U, CV_MAT_CN(type)); type = CV_MAKETYPE(CV_8U, CV_MAT_CN(type));
if( (flags & CV_LOAD_IMAGE_COLOR) != 0 || if( (flags & IMREAD_COLOR) != 0 ||
((flags & CV_LOAD_IMAGE_ANYCOLOR) != 0 && CV_MAT_CN(type) > 1) ) ((flags & IMREAD_ANYCOLOR) != 0 && CV_MAT_CN(type) > 1) )
type = CV_MAKETYPE(CV_MAT_DEPTH(type), 3); type = CV_MAKETYPE(CV_MAT_DEPTH(type), 3);
else else
type = CV_MAKETYPE(CV_MAT_DEPTH(type), 1); type = CV_MAKETYPE(CV_MAT_DEPTH(type), 1);
......
...@@ -741,13 +741,13 @@ pyrDown_( const Mat& _src, Mat& _dst, int borderType ) ...@@ -741,13 +741,13 @@ pyrDown_( const Mat& _src, Mat& _dst, int borderType )
CV_Assert( ssize.width > 0 && ssize.height > 0 && CV_Assert( ssize.width > 0 && ssize.height > 0 &&
std::abs(dsize.width*2 - ssize.width) <= 2 && std::abs(dsize.width*2 - ssize.width) <= 2 &&
std::abs(dsize.height*2 - ssize.height) <= 2 ); std::abs(dsize.height*2 - ssize.height) <= 2 );
int k, x, sy0 = -PD_SZ/2, sy = sy0, width0 = std::min((ssize.width-PD_SZ/2-1)/2 + 1, dsize.width); int sy0 = -PD_SZ/2, sy = sy0, width0 = std::min((ssize.width-PD_SZ/2-1)/2 + 1, dsize.width);
for( x = 0; x <= PD_SZ+1; x++ ) for (int x = 0; x <= PD_SZ+1; x++)
{ {
int sx0 = borderInterpolate(x - PD_SZ/2, ssize.width, borderType)*cn; int sx0 = borderInterpolate(x - PD_SZ/2, ssize.width, borderType)*cn;
int sx1 = borderInterpolate(x + width0*2 - PD_SZ/2, ssize.width, borderType)*cn; int sx1 = borderInterpolate(x + width0*2 - PD_SZ/2, ssize.width, borderType)*cn;
for( k = 0; k < cn; k++ ) for (int k = 0; k < cn; k++)
{ {
tabL[x*cn + k] = sx0 + k; tabL[x*cn + k] = sx0 + k;
tabR[x*cn + k] = sx1 + k; tabR[x*cn + k] = sx1 + k;
...@@ -758,10 +758,10 @@ pyrDown_( const Mat& _src, Mat& _dst, int borderType ) ...@@ -758,10 +758,10 @@ pyrDown_( const Mat& _src, Mat& _dst, int borderType )
dsize.width *= cn; dsize.width *= cn;
width0 *= cn; width0 *= cn;
for( x = 0; x < dsize.width; x++ ) for (int x = 0; x < dsize.width; x++)
tabM[x] = (x/cn)*2*cn + x % cn; tabM[x] = (x/cn)*2*cn + x % cn;
for( int y = 0; y < dsize.height; y++ ) for (int y = 0; y < dsize.height; y++)
{ {
T* dst = _dst.ptr<T>(y); T* dst = _dst.ptr<T>(y);
WT *row0, *row1, *row2, *row3, *row4; WT *row0, *row1, *row2, *row3, *row4;
...@@ -772,15 +772,13 @@ pyrDown_( const Mat& _src, Mat& _dst, int borderType ) ...@@ -772,15 +772,13 @@ pyrDown_( const Mat& _src, Mat& _dst, int borderType )
WT* row = buf + ((sy - sy0) % PD_SZ)*bufstep; WT* row = buf + ((sy - sy0) % PD_SZ)*bufstep;
int _sy = borderInterpolate(sy, ssize.height, borderType); int _sy = borderInterpolate(sy, ssize.height, borderType);
const T* src = _src.ptr<T>(_sy); const T* src = _src.ptr<T>(_sy);
int limit = cn;
const int* tab = tabL;
for( x = 0;;) do {
{ int x = 0;
for( ; x < limit; x++ ) for( ; x < cn; x++ )
{ {
row[x] = src[tab[x+cn*2]]*6 + (src[tab[x+cn]] + src[tab[x+cn*3]])*4 + row[x] = src[tabL[x+cn*2]]*6 + (src[tabL[x+cn]] + src[tabL[x+cn*3]])*4 +
src[tab[x]] + src[tab[x+cn*4]]; src[tabL[x]] + src[tabL[x+cn*4]];
} }
if( x == dsize.width ) if( x == dsize.width )
...@@ -840,18 +838,22 @@ pyrDown_( const Mat& _src, Mat& _dst, int borderType ) ...@@ -840,18 +838,22 @@ pyrDown_( const Mat& _src, Mat& _dst, int borderType )
} }
} }
limit = dsize.width; // tabR
tab = tabR - x; for (int x_ = 0; x < dsize.width; x++, x_++)
{
row[x] = src[tabR[x_+cn*2]]*6 + (src[tabR[x_+cn]] + src[tabR[x_+cn*3]])*4 +
src[tabR[x_]] + src[tabR[x_+cn*4]];
} }
} while (0);
} }
// do vertical convolution and decimation and write the result to the destination image // do vertical convolution and decimation and write the result to the destination image
for( k = 0; k < PD_SZ; k++ ) for (int k = 0; k < PD_SZ; k++)
rows[k] = buf + ((y*2 - PD_SZ/2 + k - sy0) % PD_SZ)*bufstep; rows[k] = buf + ((y*2 - PD_SZ/2 + k - sy0) % PD_SZ)*bufstep;
row0 = rows[0]; row1 = rows[1]; row2 = rows[2]; row3 = rows[3]; row4 = rows[4]; row0 = rows[0]; row1 = rows[1]; row2 = rows[2]; row3 = rows[3]; row4 = rows[4];
x = PyrDownVecV<WT, T>(rows, dst, dsize.width); int x = PyrDownVecV<WT, T>(rows, dst, dsize.width);
for( ; x < dsize.width; x++ ) for (; x < dsize.width; x++ )
dst[x] = castOp(row2[x]*6 + (row1[x] + row3[x])*4 + row0[x] + row4[x]); dst[x] = castOp(row2[x]*6 + (row1[x] + row3[x])*4 + row0[x] + row4[x]);
} }
} }
......
...@@ -112,7 +112,7 @@ imgproc = {'': ['Canny', 'GaussianBlur', 'Laplacian', 'HoughLines', 'HoughLinesP ...@@ -112,7 +112,7 @@ imgproc = {'': ['Canny', 'GaussianBlur', 'Laplacian', 'HoughLines', 'HoughLinesP
'goodFeaturesToTrack','grabCut','initUndistortRectifyMap', 'integral','integral2', 'isContourConvex', 'line', \ 'goodFeaturesToTrack','grabCut','initUndistortRectifyMap', 'integral','integral2', 'isContourConvex', 'line', \
'matchShapes', 'matchTemplate','medianBlur', 'minAreaRect', 'minEnclosingCircle', 'moments', 'morphologyEx', \ 'matchShapes', 'matchTemplate','medianBlur', 'minAreaRect', 'minEnclosingCircle', 'moments', 'morphologyEx', \
'pointPolygonTest', 'putText','pyrDown','pyrUp','rectangle','remap', 'resize','sepFilter2D','threshold', \ 'pointPolygonTest', 'putText','pyrDown','pyrUp','rectangle','remap', 'resize','sepFilter2D','threshold', \
'undistort','warpAffine','warpPerspective','watershed', \ 'undistort','warpAffine','warpPerspective','warpPolar','watershed', \
'fillPoly', 'fillConvexPoly'], 'fillPoly', 'fillConvexPoly'],
'CLAHE': ['apply', 'collectGarbage', 'getClipLimit', 'getTilesGridSize', 'setClipLimit', 'setTilesGridSize']} 'CLAHE': ['apply', 'collectGarbage', 'getClipLimit', 'getTilesGridSize', 'setClipLimit', 'setTilesGridSize']}
...@@ -171,7 +171,7 @@ aruco = {'': ['detectMarkers', 'drawDetectedMarkers', 'drawAxis', 'estimatePoseS ...@@ -171,7 +171,7 @@ aruco = {'': ['detectMarkers', 'drawDetectedMarkers', 'drawAxis', 'estimatePoseS
'aruco_CharucoBoard': ['create', 'draw'], 'aruco_CharucoBoard': ['create', 'draw'],
} }
calib3d = {'': ['findHomography','calibrateCameraExtended', 'drawFrameAxes', 'getDefaultNewCameraMatrix', 'initUndistortRectifyMap']} calib3d = {'': ['findHomography', 'calibrateCameraExtended', 'drawFrameAxes', 'estimateAffine2D', 'getDefaultNewCameraMatrix', 'initUndistortRectifyMap', 'Rodrigues']}
def makeWhiteList(module_list): def makeWhiteList(module_list):
wl = {} wl = {}
......
...@@ -41,3 +41,51 @@ QUnit.test('findHomography', function(assert) { ...@@ -41,3 +41,51 @@ QUnit.test('findHomography', function(assert) {
assert.ok(mat instanceof cv.Mat); assert.ok(mat instanceof cv.Mat);
}); });
QUnit.test('Rodrigues', function(assert) {
// Converts a rotation matrix to a rotation vector and vice versa
// data64F is the output array
const rvec0 = cv.matFromArray(1, 3, cv.CV_64F, [1,1,1]);
let rMat0 = new cv.Mat();
let rvec1 = new cv.Mat();
// Args: input Mat, output Mat. The function mutates the output Mat, so the function does not return anything.
// cv.Rodrigues (InputArray=src, OutputArray=dst, jacobian=0)
// https://docs.opencv.org/2.4/modules/calib3d/doc/camera_calibration_and_3d_reconstruction.html#void%20Rodrigues(InputArray%20src,%20OutputArray%20dst,%20OutputArray%20jacobian)
// vec to Mat, starting number is 3 long and each element is 1.
cv.Rodrigues(rvec0, rMat0);
assert.ok(rMat0.data64F.length == 9);
assert.ok(0.23 > rMat0.data64F[0] > 0.22);
// convert Mat to Vec, should be same as what we started with, 3 long and each item should be a 1.
cv.Rodrigues(rMat0, rvec1);
assert.ok(rvec1.data64F.length == 3);
assert.ok(1.01 > rvec1.data64F[0] > 0.9);
// Answer should be around 1: 0.9999999999999999
});
QUnit.test('estimateAffine2D', function(assert) {
const inputs = cv.matFromArray(4, 1, cv.CV_32FC2, [
1, 1,
80, 0,
0, 80,
80, 80
]);
const outputs = cv.matFromArray(4, 1, cv.CV_32FC2, [
21, 51,
70, 77,
40, 40,
10, 70
]);
const M = cv.estimateAffine2D(inputs, outputs);
assert.ok(M instanceof cv.Mat);
assert.deepEqual(Array.from(M.data), [
23, 55, 97, 126, 87, 139, 227, 63, 0, 0,
0, 0, 0, 0, 232, 191, 71, 246, 12, 68,
165, 35, 53, 64, 99, 56, 27, 66, 14, 254,
212, 63, 103, 102, 102, 102, 102, 102, 182, 191,
195, 252, 174, 22, 55, 97, 73, 64
]);
});
...@@ -960,3 +960,20 @@ QUnit.test('test_filter', function(assert) { ...@@ -960,3 +960,20 @@ QUnit.test('test_filter', function(assert) {
src.delete(); src.delete();
} }
}); });
QUnit.test('warpPolar', function(assert) {
const lines = new cv.Mat(255, 255, cv.CV_8U, new cv.Scalar(0));
for (let r = 0; r < lines.rows; r++) {
lines.row(r).setTo(new cv.Scalar(r));
}
cv.warpPolar(lines, lines, { width: 5, height: 5 }, new cv.Point(2, 2), 3,
cv.INTER_CUBIC | cv.WARP_FILL_OUTLIERS | cv.WARP_INVERSE_MAP);
assert.ok(lines instanceof cv.Mat);
assert.deepEqual(Array.from(lines.data), [
159, 172, 191, 210, 223,
146, 159, 191, 223, 236,
128, 128, 0, 0, 0,
109, 96, 64, 32, 19,
96, 83, 64, 45, 32
]);
});
...@@ -40,9 +40,10 @@ protected: ...@@ -40,9 +40,10 @@ protected:
bool testBypassRoute(vector<Point2f> hull, int start, int finish); bool testBypassRoute(vector<Point2f> hull, int start, int finish);
inline double getCosVectors(Point2f a, Point2f b, Point2f c); inline double getCosVectors(Point2f a, Point2f b, Point2f c);
Mat barcode, bin_barcode, straight_barcode; Mat barcode, bin_barcode, resized_barcode, resized_bin_barcode, straight_barcode;
vector<Point2f> localization_points, transformation_points; vector<Point2f> localization_points, transformation_points;
double eps_vertical, eps_horizontal, coeff_expansion; double eps_vertical, eps_horizontal, coeff_expansion;
enum resize_direction { ZOOMING, SHRINKING, UNCHANGED } purpose;
}; };
...@@ -50,24 +51,36 @@ void QRDetect::init(const Mat& src, double eps_vertical_, double eps_horizontal_ ...@@ -50,24 +51,36 @@ void QRDetect::init(const Mat& src, double eps_vertical_, double eps_horizontal_
{ {
CV_TRACE_FUNCTION(); CV_TRACE_FUNCTION();
CV_Assert(!src.empty()); CV_Assert(!src.empty());
barcode = src.clone();
const double min_side = std::min(src.size().width, src.size().height); const double min_side = std::min(src.size().width, src.size().height);
if (min_side < 512.0) if (min_side < 512.0)
{ {
purpose = ZOOMING;
coeff_expansion = 512.0 / min_side; coeff_expansion = 512.0 / min_side;
const int width = cvRound(src.size().width * coeff_expansion); const int width = cvRound(src.size().width * coeff_expansion);
const int height = cvRound(src.size().height * coeff_expansion); const int height = cvRound(src.size().height * coeff_expansion);
Size new_size(width, height); Size new_size(width, height);
resize(src, barcode, new_size, 0, 0, INTER_LINEAR); resize(src, barcode, new_size, 0, 0, INTER_LINEAR);
} }
else if (min_side > 512.0)
{
purpose = SHRINKING;
coeff_expansion = min_side / 512.0;
const int width = cvRound(src.size().width / coeff_expansion);
const int height = cvRound(src.size().height / coeff_expansion);
Size new_size(width, height);
resize(src, resized_barcode, new_size, 0, 0, INTER_AREA);
}
else else
{ {
purpose = UNCHANGED;
coeff_expansion = 1.0; coeff_expansion = 1.0;
barcode = src;
} }
eps_vertical = eps_vertical_; eps_vertical = eps_vertical_;
eps_horizontal = eps_horizontal_; eps_horizontal = eps_horizontal_;
adaptiveThreshold(barcode, bin_barcode, 255, ADAPTIVE_THRESH_GAUSSIAN_C, THRESH_BINARY, 83, 2); adaptiveThreshold(barcode, bin_barcode, 255, ADAPTIVE_THRESH_GAUSSIAN_C, THRESH_BINARY, 83, 2);
adaptiveThreshold(resized_barcode, resized_bin_barcode, 255, ADAPTIVE_THRESH_GAUSSIAN_C, THRESH_BINARY, 83, 2);
} }
...@@ -140,11 +153,18 @@ vector<Point2f> QRDetect::separateVerticalLines(const vector<Vec3d> &list_lines) ...@@ -140,11 +153,18 @@ vector<Point2f> QRDetect::separateVerticalLines(const vector<Vec3d> &list_lines)
{ {
CV_TRACE_FUNCTION(); CV_TRACE_FUNCTION();
vector<Vec3d> result; vector<Vec3d> result;
int temp_length = 0; int temp_length;
vector<Point2f> point2f_result;
uint8_t next_pixel; uint8_t next_pixel;
vector<double> test_lines; vector<double> test_lines;
for (int coeff_epsilon = 1; coeff_epsilon < 10; coeff_epsilon++)
{
result.clear();
temp_length = 0;
point2f_result.clear();
for (size_t pnt = 0; pnt < list_lines.size(); pnt++) for (size_t pnt = 0; pnt < list_lines.size(); pnt++)
{ {
const int x = cvRound(list_lines[pnt][0] + list_lines[pnt][2] * 0.5); const int x = cvRound(list_lines[pnt][0] + list_lines[pnt][2] * 0.5);
...@@ -199,20 +219,31 @@ vector<Point2f> QRDetect::separateVerticalLines(const vector<Vec3d> &list_lines) ...@@ -199,20 +219,31 @@ vector<Point2f> QRDetect::separateVerticalLines(const vector<Vec3d> &list_lines)
else { weight += fabs((test_lines[i] / length) - 3.0/14.0); } else { weight += fabs((test_lines[i] / length) - 3.0/14.0); }
} }
if(weight < eps_horizontal) if(weight < eps_horizontal * coeff_epsilon)
{ {
result.push_back(list_lines[pnt]); result.push_back(list_lines[pnt]);
} }
} }
} }
if (result.size() > 2)
vector<Point2f> point2f_result; {
for (size_t i = 0; i < result.size(); i++) for (size_t i = 0; i < result.size(); i++)
{ {
point2f_result.push_back( point2f_result.push_back(
Point2f(static_cast<float>(result[i][0] + result[i][2] * 0.5), Point2f(static_cast<float>(result[i][0] + result[i][2] * 0.5),
static_cast<float>(result[i][1]))); static_cast<float>(result[i][1])));
} }
vector<Point2f> centers;
Mat labels;
double compactness;
compactness = kmeans(point2f_result, 3, labels,
TermCriteria( TermCriteria::EPS + TermCriteria::COUNT, 10, 0.1),
3, KMEANS_PP_CENTERS, centers);
if (compactness == 0) { continue; }
if (compactness > 0) { break; }
}
}
return point2f_result; return point2f_result;
} }
...@@ -316,10 +347,46 @@ bool QRDetect::localization() ...@@ -316,10 +347,46 @@ bool QRDetect::localization()
vector<Vec3d> list_lines_x = searchHorizontalLines(); vector<Vec3d> list_lines_x = searchHorizontalLines();
if( list_lines_x.empty() ) { return false; } if( list_lines_x.empty() ) { return false; }
vector<Point2f> list_lines_y = separateVerticalLines(list_lines_x); vector<Point2f> list_lines_y = separateVerticalLines(list_lines_x);
if( list_lines_y.size() < 3 ) { return false; } if( list_lines_y.empty() ) { return false; }
vector<Point2f> centers; vector<Point2f> centers;
Mat labels; Mat labels;
kmeans(list_lines_y, 3, labels,
TermCriteria( TermCriteria::EPS + TermCriteria::COUNT, 10, 0.1),
3, KMEANS_PP_CENTERS, localization_points);
fixationPoints(localization_points);
bool suare_flag = false, local_points_flag = false;
double triangle_sides[3];
triangle_sides[0] = norm(localization_points[0] - localization_points[1]);
triangle_sides[1] = norm(localization_points[1] - localization_points[2]);
triangle_sides[2] = norm(localization_points[2] - localization_points[0]);
double triangle_perim = (triangle_sides[0] + triangle_sides[1] + triangle_sides[2]) / 2;
double square_area = sqrt((triangle_perim * (triangle_perim - triangle_sides[0])
* (triangle_perim - triangle_sides[1])
* (triangle_perim - triangle_sides[2]))) * 2;
double img_square_area = bin_barcode.cols * bin_barcode.rows;
if (square_area > (img_square_area * 0.2))
{
suare_flag = true;
}
if (localization_points.size() != 3)
{
local_points_flag = true;
}
if ((suare_flag || local_points_flag) && purpose == SHRINKING)
{
localization_points.clear();
bin_barcode = resized_bin_barcode.clone();
list_lines_x = searchHorizontalLines();
if( list_lines_x.empty() ) { return false; }
list_lines_y = separateVerticalLines(list_lines_x);
if( list_lines_y.empty() ) { return false; }
kmeans(list_lines_y, 3, labels, kmeans(list_lines_y, 3, labels,
TermCriteria( TermCriteria::EPS + TermCriteria::COUNT, 10, 0.1), TermCriteria( TermCriteria::EPS + TermCriteria::COUNT, 10, 0.1),
3, KMEANS_PP_CENTERS, localization_points); 3, KMEANS_PP_CENTERS, localization_points);
...@@ -327,7 +394,18 @@ bool QRDetect::localization() ...@@ -327,7 +394,18 @@ bool QRDetect::localization()
fixationPoints(localization_points); fixationPoints(localization_points);
if (localization_points.size() != 3) { return false; } if (localization_points.size() != 3) { return false; }
if (coeff_expansion > 1.0) const int width = cvRound(bin_barcode.size().width * coeff_expansion);
const int height = cvRound(bin_barcode.size().height * coeff_expansion);
Size new_size(width, height);
Mat intermediate;
resize(bin_barcode, intermediate, new_size, 0, 0, INTER_LINEAR);
bin_barcode = intermediate.clone();
for (size_t i = 0; i < localization_points.size(); i++)
{
localization_points[i] *= coeff_expansion;
}
}
if (purpose == ZOOMING)
{ {
const int width = cvRound(bin_barcode.size().width / coeff_expansion); const int width = cvRound(bin_barcode.size().width / coeff_expansion);
const int height = cvRound(bin_barcode.size().height / coeff_expansion); const int height = cvRound(bin_barcode.size().height / coeff_expansion);
...@@ -475,6 +553,13 @@ bool QRDetect::computeTransformationPoints() ...@@ -475,6 +553,13 @@ bool QRDetect::computeTransformationPoints()
vector<Point2f> quadrilateral = getQuadrilateral(transformation_points); vector<Point2f> quadrilateral = getQuadrilateral(transformation_points);
transformation_points = quadrilateral; transformation_points = quadrilateral;
int width = bin_barcode.size().width;
int height = bin_barcode.size().height;
for (size_t i = 0; i < transformation_points.size(); i++)
{
if ((cvRound(transformation_points[i].x) > width) ||
(cvRound(transformation_points[i].y) > height)) { return false; }
}
return true; return true;
} }
...@@ -826,9 +911,27 @@ protected: ...@@ -826,9 +911,27 @@ protected:
void QRDecode::init(const Mat &src, const vector<Point2f> &points) void QRDecode::init(const Mat &src, const vector<Point2f> &points)
{ {
CV_TRACE_FUNCTION(); CV_TRACE_FUNCTION();
vector<Point2f> bbox = points;
double coeff_expansion;
const int min_side = std::min(src.size().width, src.size().height);
if (min_side > 512)
{
coeff_expansion = min_side / 512;
const int width = cvRound(src.size().width / coeff_expansion);
const int height = cvRound(src.size().height / coeff_expansion);
Size new_size(width, height);
resize(src, original, new_size, 0, 0, INTER_AREA);
for (size_t i = 0; i < bbox.size(); i++)
{
bbox[i] /= static_cast<float>(coeff_expansion);
}
}
else
{
original = src.clone(); original = src.clone();
intermediate = Mat::zeros(src.size(), CV_8UC1); }
original_points = points; intermediate = Mat::zeros(original.size(), CV_8UC1);
original_points = bbox;
version = 0; version = 0;
version_size = 0; version_size = 0;
test_perspective_size = 251; test_perspective_size = 251;
......
...@@ -15,6 +15,12 @@ std::string qrcode_images_name[] = { ...@@ -15,6 +15,12 @@ std::string qrcode_images_name[] = {
"russian.jpg", "kanji.jpg", "link_github_ocv.jpg", "link_ocv.jpg", "link_wiki_cv.jpg" "russian.jpg", "kanji.jpg", "link_github_ocv.jpg", "link_ocv.jpg", "link_wiki_cv.jpg"
}; };
std::string qrcode_images_close[] = {
"close_1.png", "close_2.png", "close_3.png", "close_4.png", "close_5.png"
};
std::string qrcode_images_monitor[] = {
"monitor_1.png", "monitor_2.png", "monitor_3.png", "monitor_4.png", "monitor_5.png"
};
// #define UPDATE_QRCODE_TEST_DATA // #define UPDATE_QRCODE_TEST_DATA
#ifdef UPDATE_QRCODE_TEST_DATA #ifdef UPDATE_QRCODE_TEST_DATA
...@@ -51,6 +57,83 @@ TEST(Objdetect_QRCode, generate_test_data) ...@@ -51,6 +57,83 @@ TEST(Objdetect_QRCode, generate_test_data)
file_config.release(); file_config.release();
} }
TEST(Objdetect_QRCode_Close, generate_test_data)
{
const std::string root = "qrcode/close/";
const std::string dataset_config = findDataFile(root + "dataset_config.json");
FileStorage file_config(dataset_config, FileStorage::WRITE);
file_config << "close_images" << "[";
size_t close_count = sizeof(qrcode_images_close) / sizeof(qrcode_images_close[0]);
for (size_t i = 0; i < close_count; i++)
{
file_config << "{:" << "image_name" << qrcode_images_close[i];
std::string image_path = findDataFile(root + qrcode_images_close[i]);
std::vector<Point> corners;
Mat src = imread(image_path, IMREAD_GRAYSCALE), barcode, straight_barcode;
std::string decoded_info;
ASSERT_FALSE(src.empty()) << "Can't read image: " << image_path;
const double min_side = std::min(src.size().width, src.size().height);
double coeff_expansion = 1024.0 / min_side;
const int width = cvRound(src.size().width * coeff_expansion);
const int height = cvRound(src.size().height * coeff_expansion);
Size new_size(width, height);
resize(src, barcode, new_size, 0, 0, INTER_LINEAR);
EXPECT_TRUE(detectQRCode(barcode, corners));
#ifdef HAVE_QUIRC
EXPECT_TRUE(decodeQRCode(barcode, corners, decoded_info, straight_barcode));
#endif
file_config << "x" << "[:";
for (size_t j = 0; j < corners.size(); j++) { file_config << corners[j].x; }
file_config << "]";
file_config << "y" << "[:";
for (size_t j = 0; j < corners.size(); j++) { file_config << corners[j].y; }
file_config << "]";
file_config << "info" << decoded_info;
file_config << "}";
}
file_config << "]";
file_config.release();
}
TEST(Objdetect_QRCode_Monitor, generate_test_data)
{
const std::string root = "qrcode/monitor/";
const std::string dataset_config = findDataFile(root + "dataset_config.json");
FileStorage file_config(dataset_config, FileStorage::WRITE);
file_config << "monitor_images" << "[";
size_t monitor_count = sizeof(qrcode_images_monitor) / sizeof(qrcode_images_monitor[0]);
for (size_t i = 0; i < monitor_count; i++)
{
file_config << "{:" << "image_name" << qrcode_images_monitor[i];
std::string image_path = findDataFile(root + qrcode_images_monitor[i]);
std::vector<Point> corners;
Mat src = imread(image_path, IMREAD_GRAYSCALE), barcode, straight_barcode;
std::string decoded_info;
ASSERT_FALSE(src.empty()) << "Can't read image: " << image_path;
const double min_side = std::min(src.size().width, src.size().height);
double coeff_expansion = 1024.0 / min_side;
const int width = cvRound(src.size().width * coeff_expansion);
const int height = cvRound(src.size().height * coeff_expansion);
Size new_size(width, height);
resize(src, barcode, new_size, 0, 0, INTER_LINEAR);
EXPECT_TRUE(detectQRCode(barcode, corners));
#ifdef HAVE_QUIRC
EXPECT_TRUE(decodeQRCode(barcode, corners, decoded_info, straight_barcode));
#endif
file_config << "x" << "[:";
for (size_t j = 0; j < corners.size(); j++) { file_config << corners[j].x; }
file_config << "]";
file_config << "y" << "[:";
for (size_t j = 0; j < corners.size(); j++) { file_config << corners[j].y; }
file_config << "]";
file_config << "info" << decoded_info;
file_config << "}";
}
file_config << "]";
file_config.release();
}
#else #else
typedef testing::TestWithParam< std::string > Objdetect_QRCode; typedef testing::TestWithParam< std::string > Objdetect_QRCode;
...@@ -113,9 +196,139 @@ TEST_P(Objdetect_QRCode, regression) ...@@ -113,9 +196,139 @@ TEST_P(Objdetect_QRCode, regression)
} }
} }
INSTANTIATE_TEST_CASE_P(/**/, Objdetect_QRCode, testing::ValuesIn(qrcode_images_name)); typedef testing::TestWithParam< std::string > Objdetect_QRCode_Close;
TEST_P(Objdetect_QRCode_Close, regression)
{
const std::string name_current_image = GetParam();
const std::string root = "qrcode/close/";
const int pixels_error = 3;
std::string image_path = findDataFile(root + name_current_image);
Mat src = imread(image_path, IMREAD_GRAYSCALE), barcode, straight_barcode;
ASSERT_FALSE(src.empty()) << "Can't read image: " << image_path;
const double min_side = std::min(src.size().width, src.size().height);
double coeff_expansion = 1024.0 / min_side;
const int width = cvRound(src.size().width * coeff_expansion);
const int height = cvRound(src.size().height * coeff_expansion);
Size new_size(width, height);
resize(src, barcode, new_size, 0, 0, INTER_LINEAR);
std::vector<Point> corners;
std::string decoded_info;
QRCodeDetector qrcode;
#ifdef HAVE_QUIRC
decoded_info = qrcode.detectAndDecode(barcode, corners, straight_barcode);
ASSERT_FALSE(corners.empty());
ASSERT_FALSE(decoded_info.empty());
#else
ASSERT_TRUE(qrcode.detect(src, corners));
#endif
const std::string dataset_config = findDataFile(root + "dataset_config.json");
FileStorage file_config(dataset_config, FileStorage::READ);
ASSERT_TRUE(file_config.isOpened()) << "Can't read validation data: " << dataset_config;
{
FileNode images_list = file_config["close_images"];
size_t images_count = static_cast<size_t>(images_list.size());
ASSERT_GT(images_count, 0u) << "Can't find validation data entries in 'test_images': " << dataset_config;
for (size_t index = 0; index < images_count; index++)
{
FileNode config = images_list[(int)index];
std::string name_test_image = config["image_name"];
if (name_test_image == name_current_image)
{
for (int i = 0; i < 4; i++)
{
int x = config["x"][i];
int y = config["y"][i];
EXPECT_NEAR(x, corners[i].x, pixels_error);
EXPECT_NEAR(y, corners[i].y, pixels_error);
}
#ifdef HAVE_QUIRC
std::string original_info = config["info"];
EXPECT_EQ(decoded_info, original_info);
#endif
return; // done
}
}
std::cerr
<< "Not found results for '" << name_current_image
<< "' image in config file:" << dataset_config << std::endl
<< "Re-run tests with enabled UPDATE_QRCODE_TEST_DATA macro to update test data."
<< std::endl;
}
}
typedef testing::TestWithParam< std::string > Objdetect_QRCode_Monitor;
TEST_P(Objdetect_QRCode_Monitor, regression)
{
const std::string name_current_image = GetParam();
const std::string root = "qrcode/monitor/";
const int pixels_error = 3;
std::string image_path = findDataFile(root + name_current_image);
Mat src = imread(image_path, IMREAD_GRAYSCALE), barcode, straight_barcode;
ASSERT_FALSE(src.empty()) << "Can't read image: " << image_path;
const double min_side = std::min(src.size().width, src.size().height);
double coeff_expansion = 1024.0 / min_side;
const int width = cvRound(src.size().width * coeff_expansion);
const int height = cvRound(src.size().height * coeff_expansion);
Size new_size(width, height);
resize(src, barcode, new_size, 0, 0, INTER_LINEAR);
std::vector<Point> corners;
std::string decoded_info;
QRCodeDetector qrcode;
#ifdef HAVE_QUIRC
decoded_info = qrcode.detectAndDecode(barcode, corners, straight_barcode);
ASSERT_FALSE(corners.empty());
ASSERT_FALSE(decoded_info.empty());
#else
ASSERT_TRUE(qrcode.detect(src, corners));
#endif
const std::string dataset_config = findDataFile(root + "dataset_config.json");
FileStorage file_config(dataset_config, FileStorage::READ);
ASSERT_TRUE(file_config.isOpened()) << "Can't read validation data: " << dataset_config;
{
FileNode images_list = file_config["monitor_images"];
size_t images_count = static_cast<size_t>(images_list.size());
ASSERT_GT(images_count, 0u) << "Can't find validation data entries in 'test_images': " << dataset_config;
for (size_t index = 0; index < images_count; index++)
{
FileNode config = images_list[(int)index];
std::string name_test_image = config["image_name"];
if (name_test_image == name_current_image)
{
for (int i = 0; i < 4; i++)
{
int x = config["x"][i];
int y = config["y"][i];
EXPECT_NEAR(x, corners[i].x, pixels_error);
EXPECT_NEAR(y, corners[i].y, pixels_error);
}
#ifdef HAVE_QUIRC
std::string original_info = config["info"];
EXPECT_EQ(decoded_info, original_info);
#endif
return; // done
}
}
std::cerr
<< "Not found results for '" << name_current_image
<< "' image in config file:" << dataset_config << std::endl
<< "Re-run tests with enabled UPDATE_QRCODE_TEST_DATA macro to update test data."
<< std::endl;
}
}
INSTANTIATE_TEST_CASE_P(/**/, Objdetect_QRCode, testing::ValuesIn(qrcode_images_name));
INSTANTIATE_TEST_CASE_P(/**/, Objdetect_QRCode_Close, testing::ValuesIn(qrcode_images_close));
INSTANTIATE_TEST_CASE_P(/**/, Objdetect_QRCode_Monitor, testing::ValuesIn(qrcode_images_monitor));
TEST(Objdetect_QRCode_basic, not_found_qrcode) TEST(Objdetect_QRCode_basic, not_found_qrcode)
{ {
......
...@@ -2946,6 +2946,22 @@ int videoInput::start(int deviceID, videoDevice *VD){ ...@@ -2946,6 +2946,22 @@ int videoInput::start(int deviceID, videoDevice *VD){
DebugPrintOut("SETUP: Device is setup and ready to capture.\n\n"); DebugPrintOut("SETUP: Device is setup and ready to capture.\n\n");
VD->readyToCapture = true; VD->readyToCapture = true;
// check for optional saving the direct show graph to a file
const char* graph_filename = getenv("OPENCV_DSHOW_SAVEGRAPH_FILENAME");
if (graph_filename) {
size_t filename_len = strlen(graph_filename);
std::vector<WCHAR> wfilename(filename_len + 1);
size_t len = mbstowcs(&wfilename[0], graph_filename, filename_len + 1);
CV_Assert(len == filename_len);
HRESULT res = SaveGraphFile(VD->pGraph, &wfilename[0]);
if (SUCCEEDED(res)) {
DebugPrintOut("Saved DSHOW graph to %s\n", graph_filename);
} else {
DebugPrintOut("Failed to save DSHOW graph to %s\n", graph_filename);
}
}
//Release filters - seen someone else do this //Release filters - seen someone else do this
//looks like it solved the freezes //looks like it solved the freezes
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment