Commit b68df415 authored by Vadim Pisarevsky's avatar Vadim Pisarevsky Committed by OpenCV Buildbot

Merge pull request #135 from nevion:master

parents 47963f00 4cb25e95
......@@ -118,6 +118,38 @@ These values are proved to be invariants to the image scale, rotation, and refle
.. seealso:: :ocv:func:`matchShapes`
connectedComponents
-----------------------
computes the connected components labeled image of boolean image ``image`` with 4 or 8 way connectivity - returns N, the total number of labels [0, N-1] where 0 represents the background label. ltype specifies the output label image type, an important consideration based on the total number of labels or alternatively the total number of pixels in the source image.
.. ocv:function:: int connectedComponents(InputArray image, OutputArray labels, int connectivity = 8, int ltype=CV_32S)
.. ocv:function:: int connectedComponentsWithStats(InputArray image, OutputArray labels, OutputArray stats, OutputArray centroids, int connectivity = 8, int ltype=CV_32S)
:param image: the image to be labeled
:param labels: destination labeled image
:param connectivity: 8 or 4 for 8-way or 4-way connectivity respectively
:param ltype: output image label type. Currently CV_32S and CV_16U are supported.
:param statsv: statistics output for each label, including the background label, see below for available statistics. Statistics are accessed via statsv(label, COLUMN) where available columns are defined below.
* **CC_STAT_LEFT** The leftmost (x) coordinate which is the inclusive start of the bounding box in the horizontal
direction.
* **CC_STAT_TOP** The topmost (y) coordinate which is the inclusive start of the bounding box in the vertical
direction.
* **CC_STAT_WIDTH** The horizontal size of the bounding box
* **CC_STAT_HEIGHT** The vertical size of the bounding box
* **CC_STAT_AREA** The total area (in pixels) of the connected component
:param centroids: floating point centroid (x,y) output for each label, including the background label
findContours
----------------
......
......@@ -1102,6 +1102,20 @@ enum { TM_SQDIFF=0, TM_SQDIFF_NORMED=1, TM_CCORR=2, TM_CCORR_NORMED=3, TM_CCOEFF
CV_EXPORTS_W void matchTemplate( InputArray image, InputArray templ,
OutputArray result, int method );
enum { CC_STAT_LEFT=0, CC_STAT_TOP=1, CC_STAT_WIDTH=2, CC_STAT_HEIGHT=3, CC_STAT_AREA=4, CC_STAT_MAX = 5};
// computes the connected components labeled image of boolean image ``image``
// with 4 or 8 way connectivity - returns N, the total
// number of labels [0, N-1] where 0 represents the background label.
// ltype specifies the output label image type, an important
// consideration based on the total number of labels or
// alternatively the total number of pixels in the source image.
CV_EXPORTS_W int connectedComponents(InputArray image, OutputArray labels,
int connectivity = 8, int ltype=CV_32S);
CV_EXPORTS_W int connectedComponentsWithStats(InputArray image, OutputArray labels,
OutputArray stats, OutputArray centroids,
int connectivity = 8, int ltype=CV_32S);
//! mode of the contour retrieval algorithm
enum
{
......
This diff is collapsed.
/*M///////////////////////////////////////////////////////////////////////////////////////
//
// IMPORTANT: READ BEFORE DOWNLOADING, COPYING, INSTALLING OR USING.
//
// By downloading, copying, installing or using the software you agree to this license.
// If you do not agree to this license, do not download, install,
// copy or use the software.
//
//
// License Agreement
// For Open Source Computer Vision Library
//
// Copyright (C) 2000-2008, Intel Corporation, all rights reserved.
// Copyright (C) 2009, Willow Garage Inc., all rights reserved.
// Third party copyrights are property of their respective owners.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistribution's of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistribution's in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * The name of the copyright holders may not be used to endorse or promote products
// derived from this software without specific prior written permission.
//
// This software is provided by the copyright holders and contributors "as is" and
// any express or implied warranties, including, but not limited to, the implied
// warranties of merchantability and fitness for a particular purpose are disclaimed.
// In no event shall the Intel Corporation or contributors be liable for any direct,
// indirect, incidental, special, exemplary, or consequential damages
// (including, but not limited to, procurement of substitute goods or services;
// loss of use, data, or profits; or business interruption) however caused
// and on any theory of liability, whether in contract, strict liability,
// or tort (including negligence or otherwise) arising in any way out of
// the use of this software, even if advised of the possibility of such damage.
//
//M*/
#include "test_precomp.hpp"
#include <string>
using namespace cv;
using namespace std;
class CV_ConnectedComponentsTest : public cvtest::BaseTest
{
public:
CV_ConnectedComponentsTest();
~CV_ConnectedComponentsTest();
protected:
void run(int);
};
CV_ConnectedComponentsTest::CV_ConnectedComponentsTest() {}
CV_ConnectedComponentsTest::~CV_ConnectedComponentsTest() {}
void CV_ConnectedComponentsTest::run( int /* start_from */)
{
string exp_path = string(ts->get_data_path()) + "connectedcomponents/ccomp_exp.png";
Mat exp = imread(exp_path, 0);
Mat orig = imread(string(ts->get_data_path()) + "connectedcomponents/concentric_circles.png", 0);
if (orig.empty())
{
ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_TEST_DATA );
return;
}
Mat bw = orig > 128;
Mat labelImage;
int nLabels = connectedComponents(bw, labelImage, 8, CV_32S);
for(int r = 0; r < labelImage.rows; ++r){
for(int c = 0; c < labelImage.cols; ++c){
int l = labelImage.at<int>(r, c);
bool pass = l >= 0 && l <= nLabels;
if(!pass){
ts->set_failed_test_info( cvtest::TS::FAIL_INVALID_OUTPUT );
return;
}
}
}
if( exp.empty() || orig.size() != exp.size() )
{
imwrite(exp_path, labelImage);
exp = labelImage;
}
if (0 != norm(labelImage > 0, exp > 0, NORM_INF))
{
ts->set_failed_test_info( cvtest::TS::FAIL_MISMATCH );
return;
}
if (nLabels != norm(labelImage, NORM_INF)+1)
{
ts->set_failed_test_info( cvtest::TS::FAIL_MISMATCH );
return;
}
ts->set_failed_test_info(cvtest::TS::OK);
}
TEST(Imgproc_ConnectedComponents, regression) { CV_ConnectedComponentsTest test; test.safe_run(); }
......@@ -410,7 +410,7 @@ static bool pyopencv_to(PyObject* obj, bool& value, const char* name = "<unknown
static PyObject* pyopencv_from(size_t value)
{
return PyLong_FromUnsignedLong((unsigned long)value);
return PyLong_FromSize_t(value);
}
static bool pyopencv_to(PyObject* obj, size_t& value, const char* name = "<unknown>")
......@@ -497,9 +497,16 @@ static bool pyopencv_to(PyObject* obj, float& value, const char* name = "<unknow
static PyObject* pyopencv_from(int64 value)
{
return PyFloat_FromDouble((double)value);
return PyLong_FromLongLong(value);
}
#if !defined(__LP64__)
static PyObject* pyopencv_from(uint64 value)
{
return PyLong_FromUnsignedLongLong(value);
}
#endif
static PyObject* pyopencv_from(const string& value)
{
return PyString_FromString(value.empty() ? "" : value.c_str());
......
......@@ -11,25 +11,21 @@ int threshval = 100;
static void on_trackbar(int, void*)
{
Mat bw = threshval < 128 ? (img < threshval) : (img > threshval);
vector<vector<Point> > contours;
vector<Vec4i> hierarchy;
findContours( bw, contours, hierarchy, CV_RETR_CCOMP, CV_CHAIN_APPROX_SIMPLE );
Mat dst = Mat::zeros(img.size(), CV_8UC3);
if( !contours.empty() && !hierarchy.empty() )
{
// iterate through all the top-level contours,
// draw each connected component with its own random color
int idx = 0;
for( ; idx >= 0; idx = hierarchy[idx][0] )
{
Scalar color( (rand()&255), (rand()&255), (rand()&255) );
drawContours( dst, contours, idx, color, CV_FILLED, 8, hierarchy );
}
Mat labelImage(img.size(), CV_32S);
int nLabels = connectedComponents(bw, labelImage, 8);
std::vector<Vec3b> colors(nLabels);
colors[0] = Vec3b(0, 0, 0);//background
for(int label = 1; label < nLabels; ++label){
colors[label] = Vec3b( (rand()&255), (rand()&255), (rand()&255) );
}
Mat dst(img.size(), CV_8UC3);
for(int r = 0; r < dst.rows; ++r){
for(int c = 0; c < dst.cols; ++c){
int label = labelImage.at<int>(r, c);
Vec3b &pixel = dst.at<Vec3b>(r, c);
pixel = colors[label];
}
}
imshow( "Connected Components", dst );
}
......@@ -45,14 +41,14 @@ static void help()
const char* keys =
{
"{@image |stuff.jpg|image for converting to a grayscale}"
"{@image|stuff.jpg|image for converting to a grayscale}"
};
int main( int argc, const char** argv )
{
help();
CommandLineParser parser(argc, argv, keys);
string inputImage = parser.get<string>(1);
string inputImage = parser.get<string>("@image");
img = imread(inputImage.c_str(), 0);
if(img.empty())
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment