Commit bf0075a5 authored by Alexander Alekhin's avatar Alexander Alekhin

Merge remote-tracking branch 'upstream/3.4' into merge-3.4

parents d442856f 2a32a5ad
//! [charucohdr]
#include <opencv2/aruco/charuco.hpp>
//! [charucohdr]
#include <opencv2/highgui.hpp>
#include <iostream>
#include <string>
namespace {
const char* about = "A tutorial code on charuco board creation and detection of charuco board with and without camera caliberation";
const char* keys = "{c | | Put value of c=1 to create charuco board;\nc=2 to detect charuco board without camera calibration;\nc=3 to detect charuco board with camera calibration and Pose Estimation}";
}
void createBoard();
void detectCharucoBoardWithCalibrationPose();
void detectCharucoBoardWithoutCalibration();
static bool readCameraParameters(std::string filename, cv::Mat& camMatrix, cv::Mat& distCoeffs)
{
cv::FileStorage fs(filename, cv::FileStorage::READ);
if (!fs.isOpened())
return false;
fs["camera_matrix"] >> camMatrix;
fs["distortion_coefficients"] >> distCoeffs;
return true;
}
void createBoard()
{
cv::Ptr<cv::aruco::Dictionary> dictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_6X6_250);
//! [createBoard]
cv::Ptr<cv::aruco::CharucoBoard> board = cv::aruco::CharucoBoard::create(5, 7, 0.04f, 0.02f, dictionary);
cv::Mat boardImage;
board->draw(cv::Size(600, 500), boardImage, 10, 1);
//! [createBoard]
cv::imwrite("BoardImage.jpg", boardImage);
}
//! [detwcp]
void detectCharucoBoardWithCalibrationPose()
{
cv::VideoCapture inputVideo;
inputVideo.open(0);
//! [matdiscoff]
cv::Mat cameraMatrix, distCoeffs;
std::string filename = "calib.txt";
bool readOk = readCameraParameters(filename, cameraMatrix, distCoeffs);
//! [matdiscoff]
if (!readOk) {
std::cerr << "Invalid camera file" << std::endl;
} else {
//! [dictboard]
cv::Ptr<cv::aruco::Dictionary> dictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_6X6_250);
cv::Ptr<cv::aruco::CharucoBoard> board = cv::aruco::CharucoBoard::create(5, 7, 0.04f, 0.02f, dictionary);
cv::Ptr<cv::aruco::DetectorParameters> params = cv::aruco::DetectorParameters::create();
//! [dictboard]
while (inputVideo.grab()) {
//! [inputImg]
cv::Mat image;
//! [inputImg]
cv::Mat imageCopy;
inputVideo.retrieve(image);
image.copyTo(imageCopy);
//! [midcornerdet]
std::vector<int> markerIds;
std::vector<std::vector<cv::Point2f> > markerCorners;
cv::aruco::detectMarkers(image, board->dictionary, markerCorners, markerIds, params);
//! [midcornerdet]
// if at least one marker detected
if (markerIds.size() > 0) {
cv::aruco::drawDetectedMarkers(imageCopy, markerCorners, markerIds);
//! [charidcor]
std::vector<cv::Point2f> charucoCorners;
std::vector<int> charucoIds;
cv::aruco::interpolateCornersCharuco(markerCorners, markerIds, image, board, charucoCorners, charucoIds, cameraMatrix, distCoeffs);
//! [charidcor]
// if at least one charuco corner detected
if (charucoIds.size() > 0) {
cv::Scalar color = cv::Scalar(255, 0, 0);
//! [detcor]
cv::aruco::drawDetectedCornersCharuco(imageCopy, charucoCorners, charucoIds, color);
//! [detcor]
cv::Vec3d rvec, tvec;
//! [pose]
// cv::aruco::estimatePoseCharucoBoard(charucoCorners, charucoIds, board, cameraMatrix, distCoeffs, rvec, tvec);
//! [pose]
bool valid = cv::aruco::estimatePoseCharucoBoard(charucoCorners, charucoIds, board, cameraMatrix, distCoeffs, rvec, tvec);
// if charuco pose is valid
if (valid)
cv::aruco::drawAxis(imageCopy, cameraMatrix, distCoeffs, rvec, tvec, 0.1f);
}
}
cv::imshow("out", imageCopy);
char key = (char)cv::waitKey(30);
if (key == 27)
break;
}
}
}
//! [detwcp]
//! [detwc]
void detectCharucoBoardWithoutCalibration()
{
cv::VideoCapture inputVideo;
inputVideo.open(0);
cv::Ptr<cv::aruco::Dictionary> dictionary = cv::aruco::getPredefinedDictionary(cv::aruco::DICT_6X6_250);
cv::Ptr<cv::aruco::CharucoBoard> board = cv::aruco::CharucoBoard::create(5, 7, 0.04f, 0.02f, dictionary);
cv::Ptr<cv::aruco::DetectorParameters> params = cv::aruco::DetectorParameters::create();
params->cornerRefinementMethod = cv::aruco::CORNER_REFINE_NONE;
while (inputVideo.grab()) {
cv::Mat image, imageCopy;
inputVideo.retrieve(image);
image.copyTo(imageCopy);
std::vector<int> markerIds;
std::vector<std::vector<cv::Point2f> > markerCorners;
cv::aruco::detectMarkers(image, board->dictionary, markerCorners, markerIds, params);
//or
//cv::aruco::detectMarkers(image, dictionary, markerCorners, markerIds, params);
// if at least one marker detected
if (markerIds.size() > 0) {
cv::aruco::drawDetectedMarkers(imageCopy, markerCorners, markerIds);
//! [charidcorwc]
std::vector<cv::Point2f> charucoCorners;
std::vector<int> charucoIds;
cv::aruco::interpolateCornersCharuco(markerCorners, markerIds, image, board, charucoCorners, charucoIds);
//! [charidcorwc]
// if at least one charuco corner detected
if (charucoIds.size() > 0)
cv::aruco::drawDetectedCornersCharuco(imageCopy, charucoCorners, charucoIds, cv::Scalar(255, 0, 0));
}
cv::imshow("out", imageCopy);
char key = (char)cv::waitKey(30);
if (key == 27)
break;
}
}
//! [detwc]
int main(int argc, char* argv[])
{
cv::CommandLineParser parser(argc, argv, keys);
parser.about(about);
if (argc < 2) {
parser.printMessage();
return 0;
}
int choose = parser.get<int>("c");
switch (choose) {
case 1:
createBoard();
std::cout << "An image named BoardImg.jpg is generated in folder containing this file" << std::endl;
break;
case 2:
detectCharucoBoardWithoutCalibration();
break;
case 3:
detectCharucoBoardWithCalibrationPose();
break;
default:
break;
}
return 0;
}
\ No newline at end of file
......@@ -75,20 +75,21 @@ public:
* @param nbrOfSmallBins Number of bins between 0 and "histThresh". Default value is 10.
* @param nbrOfLargeBins Number of bins between "histThresh" and 32*pi*pi (highest edge reliability value). Default value is 5.
*/
struct CV_EXPORTS Params
struct CV_EXPORTS_W_SIMPLE Params
{
Params();
int width;
int height;
float histThresh;
int nbrOfSmallBins;
int nbrOfLargeBins;
CV_WRAP Params();
CV_PROP_RW int width;
CV_PROP_RW int height;
CV_PROP_RW float histThresh;
CV_PROP_RW int nbrOfSmallBins;
CV_PROP_RW int nbrOfLargeBins;
};
/**
* @brief Constructor
* @param parameters HistogramPhaseUnwrapping parameters HistogramPhaseUnwrapping::Params: width,height of the phase map and histogram characteristics.
*/
CV_WRAP
static Ptr<HistogramPhaseUnwrapping> create( const HistogramPhaseUnwrapping::Params &parameters =
HistogramPhaseUnwrapping::Params() );
......
#ifdef HAVE_OPENCV_PHASE_UNWRAPPING
typedef cv::phase_unwrapping::HistogramPhaseUnwrapping::Params HistogramPhaseUnwrapping_Params;
#endif
......@@ -712,7 +712,10 @@ void HistogramPhaseUnwrapping_Impl::addIncrement( OutputArray unwrappedPhaseMap
int rows = params.height;
int cols = params.width;
if( uPhaseMap.empty() )
{
uPhaseMap.create(rows, cols, CV_32FC1);
uPhaseMap = Scalar::all(0);
}
int nbrOfPixels = static_cast<int>(pixels.size());
for( int i = 0; i < nbrOfPixels; ++i )
{
......
......@@ -119,7 +119,7 @@ public:
* @param shadowMask Mask used to discard shadow regions.
*/
CV_WRAP
virtual void unwrapPhaseMap( InputArrayOfArrays wrappedPhaseMap,
virtual void unwrapPhaseMap( InputArray wrappedPhaseMap,
OutputArray unwrappedPhaseMap,
cv::Size camSize,
InputArray shadowMask = noArray() ) = 0;
......
#!/usr/bin/env python
# Python 2/3 compatibility
from __future__ import print_function
import os, numpy
import cv2 as cv
from tests_common import NewOpenCVTests
class structured_light_test(NewOpenCVTests):
def test_unwrap(self):
paramsPsp = cv.structured_light_SinusoidalPattern_Params();
paramsFtp = cv.structured_light_SinusoidalPattern_Params();
paramsFaps = cv.structured_light_SinusoidalPattern_Params();
paramsPsp.methodId = cv.structured_light.PSP;
paramsFtp.methodId = cv.structured_light.FTP;
paramsFaps.methodId = cv.structured_light.FAPS;
sinusPsp = cv.structured_light.SinusoidalPattern_create(paramsPsp)
sinusFtp = cv.structured_light.SinusoidalPattern_create(paramsFtp)
sinusFaps = cv.structured_light.SinusoidalPattern_create(paramsFaps)
captures = []
for i in range(0,3):
capture = self.get_sample('/cv/structured_light/data/capture_sin_%d.jpg'%i, cv.IMREAD_GRAYSCALE)
if capture is None:
raise unittest.SkipTest("Missing files with test data")
captures.append(capture)
rows,cols = captures[0].shape
unwrappedPhaseMapPspRef = self.get_sample('/cv/structured_light/data/unwrappedPspTest.jpg',
cv.IMREAD_GRAYSCALE)
unwrappedPhaseMapFtpRef = self.get_sample('/cv/structured_light/data/unwrappedFtpTest.jpg',
cv.IMREAD_GRAYSCALE)
unwrappedPhaseMapFapsRef = self.get_sample('/cv/structured_light/data/unwrappedFapsTest.jpg',
cv.IMREAD_GRAYSCALE)
wrappedPhaseMap,shadowMask = sinusPsp.computePhaseMap(captures);
unwrappedPhaseMap = sinusPsp.unwrapPhaseMap(wrappedPhaseMap, (cols, rows), shadowMask=shadowMask)
unwrappedPhaseMap8 = unwrappedPhaseMap*1 + 128
unwrappedPhaseMap8 = numpy.uint8(unwrappedPhaseMap8)
sumOfDiff = 0
count = 0
for i in range(rows):
for j in range(cols):
ref = int(unwrappedPhaseMapPspRef[i, j])
comp = int(unwrappedPhaseMap8[i, j])
sumOfDiff += (ref - comp)
count += 1
ratio = sumOfDiff/float(count)
self.assertLessEqual(ratio, 0.2)
wrappedPhaseMap,shadowMask = sinusFtp.computePhaseMap(captures);
unwrappedPhaseMap = sinusFtp.unwrapPhaseMap(wrappedPhaseMap, (cols, rows), shadowMask=shadowMask)
unwrappedPhaseMap8 = unwrappedPhaseMap*1 + 128
unwrappedPhaseMap8 = numpy.uint8(unwrappedPhaseMap8)
sumOfDiff = 0
count = 0
for i in range(rows):
for j in range(cols):
ref = int(unwrappedPhaseMapFtpRef[i, j])
comp = int(unwrappedPhaseMap8[i, j])
sumOfDiff += (ref - comp)
count += 1
ratio = sumOfDiff/float(count)
self.assertLessEqual(ratio, 0.2)
wrappedPhaseMap,shadowMask2 = sinusFaps.computePhaseMap(captures);
unwrappedPhaseMap = sinusFaps.unwrapPhaseMap(wrappedPhaseMap, (cols, rows), shadowMask=shadowMask)
unwrappedPhaseMap8 = unwrappedPhaseMap*1 + 128
unwrappedPhaseMap8 = numpy.uint8(unwrappedPhaseMap8)
sumOfDiff = 0
count = 0
for i in range(rows):
for j in range(cols):
ref = int(unwrappedPhaseMapFapsRef[i, j])
comp = int(unwrappedPhaseMap8[i, j])
sumOfDiff += (ref - comp)
count += 1
ratio = sumOfDiff/float(count)
self.assertLessEqual(ratio, 0.2)
if __name__ == '__main__':
NewOpenCVTests.bootstrap()
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment