Commit ad896ae6 authored by Maria Dimashova's avatar Maria Dimashova

refactored OpenNI integration

parent 5c3495a0
...@@ -775,37 +775,11 @@ endif() ...@@ -775,37 +775,11 @@ endif()
############################### OpenNI ################################ ############################### OpenNI ################################
set(HAVE_OPENNI FALSE) set(HAVE_OPENNI FALSE)
set(HAVE_PRIME_SENSOR_FOR_OPENNI FALSE) set(HAVE_OPENNI_PRIME_SENSOR_MODULE FALSE)
if(WITH_OPENNI) if(WITH_OPENNI)
# find OpenNI library include(OpenCVFindOpenNI.cmake)
unset(OPENNI_LIBRARY CACHE) endif()
unset(PRIME_SENSOR_MODULES_FOR_OPENNI CACHE)
find_library(OPENNI_LIBRARY "OpenNI" PATHS "/usr/lib" "c:/Program Files/OpenNI/Lib" DOC "OpenNI library")
if(OPENNI_LIBRARY)
set(HAVE_OPENNI TRUE)
# set OpenNI include directory
if(WIN32)
set(OPENNI_INCLUDE_DIR "c:/Program Files/OpenNI/Include")
elseif(UNIX OR APPLE)
set(OPENNI_INCLUDE_DIR "/usr/include/ni")
endif()
# the check: are PrimeSensor Modules for OpenNI installed?
if(WIN32)
find_file(PRIME_SENSOR_MODULES_FOR_OPENNI "XnCore.dll" PATHS "c:/Program Files/Prime Sense/Sensor/Bin" DOC "Core library of PrimeSensor Modules for OpenNI")
elseif(UNIX OR APPLE)
find_library(PRIME_SENSOR_MODULES_FOR_OPENNI "XnCore" PATHS "/usr/lib" DOC "Core library of PrimeSensor Modules for OpenNI")
endif()
if(PRIME_SENSOR_MODULES_FOR_OPENNI)
set(HAVE_PRIME_SENSOR_FOR_OPENNI TRUE)
endif()
endif() #if(OPENNI_LIBRARY)
endif() #if(WITH_OPENNI)
############################## Eigen2 ############################## ############################## Eigen2 ##############################
...@@ -1383,11 +1357,11 @@ else() ...@@ -1383,11 +1357,11 @@ else()
message(STATUS " OpenEXR: NO") message(STATUS " OpenEXR: NO")
endif() endif()
if(NOT HAVE_OPENNI OR HAVE_PRIME_SENSOR_FOR_OPENNI) if(NOT HAVE_OPENNI OR HAVE_OPENNI_PRIME_SENSOR_MODULE)
message(STATUS " OpenNI: ${HAVE_OPENNI}") message(STATUS " OpenNI: ${HAVE_OPENNI}")
else() else()
message(STATUS " OpenNI: ${HAVE_OPENNI} (WARNING: PrimeSensor Modules for OpenNI are not installed.)") message(STATUS " OpenNI: ${HAVE_OPENNI} (WARNING: PrimeSensor Modules for OpenNI are not installed (not found in OPENNI_PRIME_SENSOR_MODULE_BIN_DIR).)")
endif() #if(NOT HAVE_OPENNI OR HAVE_PRIME_SENSOR_FOR_OPENNI) endif()
if(UNIX AND NOT APPLE) if(UNIX AND NOT APPLE)
message(STATUS "") message(STATUS "")
......
# Main variables:
# OPENNI_LIBRARY and OPENNI_INCLUDES to link OpenCV modules with OpenNI
# HAVE_OPENNI for conditional compilation OpenCV with/without OpenNI
set(OPENNI_LIB_DESCR "Path to the directory of OpenNI libraries" CACHE INTERNAL "Description" )
set(OPENNI_INCLUDE_DESCR "Path to the directory of OpenNI includes" CACHE INTERNAL "Description" )
set(OPENNI_PRIME_SENSOR_MODULE_BIN_DESCR "Path to the directory of PrimeSensor Module binaries" CACHE INTERNAL "Description" )
if(NOT "${OPENNI_LIB_DIR}" STREQUAL "${OPENNI_LIB_DIR_INTERNAL}")
unset(OPENNI_LIBRARY CACHE)
endif()
if(NOT "${OPENNI_INCLUDE_DIR}" STREQUAL "${OPENNI_INCLUDE_DIR_INTERNAL}")
unset(OPENNI_INCLUDES CACHE)
endif()
if(NOT "${OPENNI_PRIME_SENSOR_MODULE_BIN_DIR}" STREQUAL "${OPENNI_PRIME_SENSOR_MODULE_BIN_DIR}")
unset(OPENNI_PRIME_SENSOR_MODULE CACHE)
endif()
if(NOT OPENNI_LIB_DIR OR OPENNI_LIB_DIR STREQUAL "")
if(WIN32)
set(OPENNI_LIB_DIR "c:/Program Files/OpenNI/Lib" CACHE PATH ${OPENNI_LIB_DESCR})
elseif(UNIX OR APPLE)
set(OPENNI_LIB_DIR "/usr/lib" CACHE PATH ${OPENNI_LIB_DESCR})
endif()
endif()
if(NOT OPENNI_INCLUDE_DIR OR OPENNI_INCLUDE_DIR STREQUAL "")
if(WIN32)
set(OPENNI_INCLUDE_DIR "c:/Program Files/OpenNI/Include" CACHE PATH ${OPENNI_INCLUDE_DESCR})
elseif(UNIX OR APPLE)
set(OPENNI_INCLUDE_DIR "/usr/include/ni" CACHE PATH ${OPENNI_INCLUDE_DESCR})
endif()
endif()
if(NOT OPENNI_PRIME_SENSOR_MODULE_BIN_DIR OR OPENNI_PRIME_SENSOR_MODULE_BIN_DIR STREQUAL "")
if(WIN32)
set(OPENNI_PRIME_SENSOR_MODULE_BIN_DIR "c:/Program Files/Prime Sense/Sensor/Bin" CACHE PATH ${OPENNI_PRIME_SENSOR_MODULE_BIN_DESCR})
elseif(UNIX OR APPLE)
set(OPENNI_PRIME_SENSOR_MODULE_BIN_DIR "/usr/lib" CACHE PATH ${OPENNI_PRIME_SENSOR_MODULE_BIN_DESCR})
endif()
endif()
find_library(OPENNI_LIBRARY "OpenNI" PATHS ${OPENNI_LIB_DIR} DOC "OpenNI library" NO_DEFAULT_PATH)
find_path(OPENNI_INCLUDES "XnCppWrapper.h" PATHS ${OPENNI_INCLUDE_DIR} DOC "OpenNI c++ interface header" NO_DEFAULT_PATH)
if(OPENNI_LIBRARY AND OPENNI_INCLUDES)
set(HAVE_OPENNI TRUE)
# the check: are PrimeSensor Modules for OpenNI installed?
if(WIN32)
find_file(OPENNI_PRIME_SENSOR_MODULE "XnCore.dll" PATHS ${OPENNI_PRIME_SENSOR_MODULE_BIN_DIR} DOC "Core library of PrimeSensor Modules for OpenNI" NO_DEFAULT_PATH)
elseif(UNIX OR APPLE)
find_library(OPENNI_PRIME_SENSOR_MODULE "XnCore" PATHS ${OPENNI_PRIME_SENSOR_MODULE_BIN_DIR} DOC "Core library of PrimeSensor Modules for OpenNI" NO_DEFAULT_PATH)
endif()
if(OPENNI_PRIME_SENSOR_MODULE)
set(HAVE_OPENNI_PRIME_SENSOR_MODULE TRUE)
endif()
endif() #if(OPENNI_LIBRARY AND OPENNI_INCLUDES)
if(OPENNI_LIBRARY)
set(OPENNI_LIB_DIR_INTERNAL "${OPENNI_LIB_DIR}" CACHE INTERNAL "This is the value of the last time OPENNI_LIB_DIR was set successfully." FORCE)
else()
set(OPENNI_LIB_DIR "${OPENNI_LIB_DIR}-NOTFOUND or does not have OpenNI libraries" CACHE PATH ${OPENNI_LIB_DESCR} FORCE)
endif()
if(OPENNI_INCLUDES)
set(OPENNI_INCLUDE_DIR_INTERNAL "${OPENNI_INCLUDE_DIR}" CACHE INTERNAL "This is the value of the last time OPENNI_INCLUDE_DIR was set successfully." FORCE)
else()
set(OPENNI_INCLUDE_DIR "${OPENNI_INCLUDE_DIR}-NOTFOUND or does not have OpenNI includes" CACHE PATH ${OPENNI_INCLUDE_DESCR} FORCE)
endif()
if(OPENNI_PRIME_SENSOR_MODULE)
set(OPENNI_PRIME_SENSOR_MODULE_BIN_DIR_INTERNAL "${OPENNI_PRIME_SENSOR_MODULE_BIN_DIR}" CACHE INTERNAL "This is the value of the last time OPENNI_PRIME_SENSOR_MODULE_BIN_DIR was set successfully." FORCE)
else()
set(OPENNI_PRIME_SENSOR_MODULE_BIN_DIR "${OPENNI_PRIME_SENSOR_MODULE_BIN_DIR}-NOTFOUND or does not have PrimeSensor Module binaries" CACHE PATH ${OPENNI_PRIME_SENSOR_MODULE_BIN_DESCR} FORCE)
endif()
mark_as_advanced(FORCE OPENNI_PRIME_SENSOR_MODULE)
mark_as_advanced(FORCE OPENNI_LIBRARY)
mark_as_advanced(FORCE OPENNI_INCLUDES)
...@@ -13,8 +13,7 @@ output can be retrieved by using familiar interface of \texttt{VideoCapture}.\pa ...@@ -13,8 +13,7 @@ output can be retrieved by using familiar interface of \texttt{VideoCapture}.\pa
In order to use Kinect with OpenCV you should do the following preliminary steps:\newline In order to use Kinect with OpenCV you should do the following preliminary steps:\newline
1) Install OpenNI library and PrimeSensor Module for OpenNI from here \url{http://www.openni.org/downloadfiles}. 1) Install OpenNI library and PrimeSensor Module for OpenNI from here \url{http://www.openni.org/downloadfiles}.
The installation should be done to default folders listed in the instruc- The installation should be done to default folders listed in the instructions of these products:
tions of these products:
\begin{lstlisting} \begin{lstlisting}
OpenNI: OpenNI:
Linux & MacOSX: Linux & MacOSX:
...@@ -25,12 +24,13 @@ OpenNI: ...@@ -25,12 +24,13 @@ OpenNI:
Includes into: c:/Program Files/OpenNI/Include Includes into: c:/Program Files/OpenNI/Include
PrimeSensor Module: PrimeSensor Module:
Linux & MacOSX: Linux & MacOSX:
Libs into: /usr/lib
Bins into: /usr/bin Bins into: /usr/bin
Windows: Windows:
Libs into: c:/Program Files/Prime Sense/Sensor/Lib
Bins into: c:/Program Files/Prime Sense/Sensor/Bin Bins into: c:/Program Files/Prime Sense/Sensor/Bin
\end{lstlisting} \end{lstlisting}
If one or both products were installed to the other folders, the user should change corresponding CMake variables
(\texttt{OPENNI\_LIB\_DIR}, \texttt{OPENNI\_INCLUDE\_DIR} or/and
\texttt{OPENNI\_PRIME\_SENSOR\_MODULE\_BIN\_DIR}).\newline
2) Configure OpenCV with OpenNI support by setting \texttt{WITH\_OPENNI} flag in CMake. If OpenNI 2) Configure OpenCV with OpenNI support by setting \texttt{WITH\_OPENNI} flag in CMake. If OpenNI
is found in default install folders OpenCV will be built with OpenNI library regardless of whether is found in default install folders OpenCV will be built with OpenNI library regardless of whether
PrimeSensor Module is found or not. If PrimeSensor Module was not found you will get a warning PrimeSensor Module is found or not. If PrimeSensor Module was not found you will get a warning
......
...@@ -334,7 +334,42 @@ enum ...@@ -334,7 +334,42 @@ enum
CV_CAP_PROP_CONVERT_RGB =16, CV_CAP_PROP_CONVERT_RGB =16,
CV_CAP_PROP_WHITE_BALANCE =17, CV_CAP_PROP_WHITE_BALANCE =17,
CV_CAP_PROP_RECTIFICATION =18, CV_CAP_PROP_RECTIFICATION =18,
CV_CAP_PROP_MONOCROME =19 CV_CAP_PROP_MONOCROME =19,
// OpenNI map generators
CV_CAP_OPENNI_DEPTH_GENERATOR = 0,
CV_CAP_OPENNI_IMAGE_GENERATOR = 1 << 31,
CV_CAP_OPENNI_GENERATORS_MASK = 1 << 31,
// Properties of cameras avalible through OpenNI interfaces
CV_CAP_PROP_OPENNI_OUTPUT_MODE = 20,
CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH = 21, // in mm
CV_CAP_PROP_OPENNI_BASELINE = 22, // in mm
CV_CAP_PROP_OPENNI_FOCAL_LENGTH = 23, // in pixels
CV_CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE = CV_CAP_OPENNI_IMAGE_GENERATOR + CV_CAP_PROP_OPENNI_OUTPUT_MODE,
CV_CAP_OPENNI_DEPTH_GENERATOR_BASELINE = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_PROP_OPENNI_BASELINE,
CV_CAP_OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH = CV_CAP_OPENNI_DEPTH_GENERATOR + CV_CAP_PROP_OPENNI_FOCAL_LENGTH
};
enum
{
// Data given from depth generator.
CV_CAP_OPENNI_DEPTH_MAP = 0, // Depth values in mm (CV_16UC1)
CV_CAP_OPENNI_POINT_CLOUD_MAP = 1, // XYZ in meters (CV_32FC3)
CV_CAP_OPENNI_DISPARITY_MAP = 2, // Disparity in pixels (CV_8UC1)
CV_CAP_OPENNI_DISPARITY_MAP_32F = 3, // Disparity in pixels (CV_32FC1)
CV_CAP_OPENNI_VALID_DEPTH_MASK = 4, // CV_8UC1
// Data given from RGB image generator.
CV_CAP_OPENNI_BGR_IMAGE = 5,
CV_CAP_OPENNI_GRAY_IMAGE = 6
};
// Supported output modes of OpenNI image generator
enum
{
CV_CAP_OPENNI_VGA_30HZ = 0,
CV_CAP_OPENNI_SXGA_15HZ = 1
}; };
/* retrieve or set capture properties */ /* retrieve or set capture properties */
...@@ -369,50 +404,6 @@ CVAPI(int) cvWriteFrame( CvVideoWriter* writer, const IplImage* image ); ...@@ -369,50 +404,6 @@ CVAPI(int) cvWriteFrame( CvVideoWriter* writer, const IplImage* image );
/* close video file writer */ /* close video file writer */
CVAPI(void) cvReleaseVideoWriter( CvVideoWriter** writer ); CVAPI(void) cvReleaseVideoWriter( CvVideoWriter** writer );
enum
{
// Data given from depth generator.
OPENNI_DEPTH_MAP = 0, // Depth values in mm (CV_16UC1)
OPENNI_POINT_CLOUD_MAP = 1, // XYZ in meters (CV_32FC3)
OPENNI_DISPARITY_MAP = 2, // Disparity in pixels (CV_8UC1)
OPENNI_DISPARITY_MAP_32F = 3, // Disparity in pixels (CV_32FC1)
OPENNI_VALID_DEPTH_MASK = 4, // CV_8UC1
// Data given from RGB image generator.
OPENNI_BGR_IMAGE = 5,
OPENNI_GRAY_IMAGE = 6
};
// OpenNI map generators
enum
{
OPENNI_DEPTH_GENERATOR = 0,
OPENNI_IMAGE_GENERATOR = 1 << 31
};
// Properties of cameras avalible through OpenNI interfaces
// (additional to ones begining from CV_CAP_PROP_...)
enum
{
OPENNI_OUTPUT_MODE = 20,
OPENNI_FRAME_MAX_DEPTH = 21, // in mm
OPENNI_BASELINE = 22, // in mm
OPENNI_FOCAL_LENGTH = 23, // in pixels
OPENNI_IMAGE_GENERATOR_OUTPUT_MODE = OPENNI_IMAGE_GENERATOR + OPENNI_OUTPUT_MODE,
OPENNI_DEPTH_GENERATOR_BASELINE = OPENNI_DEPTH_GENERATOR + OPENNI_BASELINE,
OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH = OPENNI_DEPTH_GENERATOR + OPENNI_FOCAL_LENGTH
};
// Supported output modes of OpenNI image generator
enum
{
OPENNI_VGA_30HZ = 0,
OPENNI_SXGA_15HZ = 1
};
const int OPENNI_BAD_DEPTH_VAL = 0;
const int OPENNI_BAD_DISP_VAL = 0;
/****************************************************************************************\ /****************************************************************************************\
* Obsolete functions/synonyms * * Obsolete functions/synonyms *
\****************************************************************************************/ \****************************************************************************************/
......
...@@ -84,13 +84,15 @@ const std::string XMLConfig = ...@@ -84,13 +84,15 @@ const std::string XMLConfig =
class CvCapture_OpenNI : public CvCapture class CvCapture_OpenNI : public CvCapture
{ {
public: public:
static const int INVALID_PIXEL_VAL = 0;
CvCapture_OpenNI(); CvCapture_OpenNI();
virtual ~CvCapture_OpenNI(); virtual ~CvCapture_OpenNI();
virtual double getProperty(int); virtual double getProperty(int propIdx);
virtual bool setProperty(int, double); virtual bool setProperty(int probIdx, double propVal);
virtual bool grabFrame(); virtual bool grabFrame();
virtual IplImage* retrieveFrame(int); virtual IplImage* retrieveFrame(int outputType);
bool isOpened() const; bool isOpened() const;
...@@ -116,10 +118,10 @@ protected: ...@@ -116,10 +118,10 @@ protected:
void readCamerasParams(); void readCamerasParams();
double getDepthGeneratorProperty(int); double getDepthGeneratorProperty(int propIdx);
bool setDepthGeneratorProperty(int, double); bool setDepthGeneratorProperty(int propIdx, double propVal);
double getImageGeneratorProperty(int); double getImageGeneratorProperty(int propIdx);
bool setImageGeneratorProperty(int, double); bool setImageGeneratorProperty(int propIdx, double propVal);
// OpenNI context // OpenNI context
xn::Context context; xn::Context context;
...@@ -135,12 +137,12 @@ protected: ...@@ -135,12 +137,12 @@ protected:
XnMapOutputMode imageOutputMode; XnMapOutputMode imageOutputMode;
// Cameras settings: // Cameras settings:
#if 1 // TODO find in OpenNI function to convert z->disparity and remove fields "baseline" and depthFocalLength_VGA
// Distance between IR projector and IR camera (in meters) // Distance between IR projector and IR camera (in meters)
XnDouble baseline; XnDouble baseline;
// Focal length for the IR camera in VGA resolution (in pixels) // Focal length for the IR camera in VGA resolution (in pixels)
XnUInt64 depthFocalLength_VGA; XnUInt64 depthFocalLength_VGA;
#endif
// The value for shadow (occluded pixels) // The value for shadow (occluded pixels)
XnUInt64 shadowValue; XnUInt64 shadowValue;
// The value for pixels without a valid disparity measurement // The value for pixels without a valid disparity measurement
...@@ -230,7 +232,6 @@ CvCapture_OpenNI::~CvCapture_OpenNI() ...@@ -230,7 +232,6 @@ CvCapture_OpenNI::~CvCapture_OpenNI()
void CvCapture_OpenNI::readCamerasParams() void CvCapture_OpenNI::readCamerasParams()
{ {
#if 1
XnDouble pixelSize = 0; XnDouble pixelSize = 0;
if( depthGenerator.GetRealProperty( "ZPPS", pixelSize ) != XN_STATUS_OK ) if( depthGenerator.GetRealProperty( "ZPPS", pixelSize ) != XN_STATUS_OK )
CV_Error( CV_StsError, "Could not read pixel size!" ); CV_Error( CV_StsError, "Could not read pixel size!" );
...@@ -239,8 +240,8 @@ void CvCapture_OpenNI::readCamerasParams() ...@@ -239,8 +240,8 @@ void CvCapture_OpenNI::readCamerasParams()
pixelSize *= 2.0; // in mm pixelSize *= 2.0; // in mm
// focal length of IR camera in pixels for VGA resolution // focal length of IR camera in pixels for VGA resolution
XnUInt64 zpd; // in mm XnUInt64 zeroPlanDistance; // in mm
if( depthGenerator.GetIntProperty( "ZPD", zpd ) != XN_STATUS_OK ) if( depthGenerator.GetIntProperty( "ZPD", zeroPlanDistance ) != XN_STATUS_OK )
CV_Error( CV_StsError, "Could not read virtual plane distance!" ); CV_Error( CV_StsError, "Could not read virtual plane distance!" );
if( depthGenerator.GetRealProperty( "LDDIS", baseline ) != XN_STATUS_OK ) if( depthGenerator.GetRealProperty( "LDDIS", baseline ) != XN_STATUS_OK )
...@@ -250,14 +251,13 @@ void CvCapture_OpenNI::readCamerasParams() ...@@ -250,14 +251,13 @@ void CvCapture_OpenNI::readCamerasParams()
baseline *= 10; baseline *= 10;
// focal length from mm -> pixels (valid for 640x480) // focal length from mm -> pixels (valid for 640x480)
depthFocalLength_VGA = (XnUInt64)((double)zpd / (double)pixelSize); depthFocalLength_VGA = (XnUInt64)((double)zeroPlanDistance / (double)pixelSize);
#endif
if( depthGenerator.GetIntProperty( "ShadowValue", shadowValue ) != XN_STATUS_OK ) if( depthGenerator.GetIntProperty( "ShadowValue", shadowValue ) != XN_STATUS_OK )
CV_Error( CV_StsError, "Could not read shadow value!" ); CV_Error( CV_StsError, "Could not read property \"ShadowValue\"!" );
if( depthGenerator.GetIntProperty("NoSampleValue", noSampleValue ) != XN_STATUS_OK ) if( depthGenerator.GetIntProperty("NoSampleValue", noSampleValue ) != XN_STATUS_OK )
CV_Error( CV_StsError, "Could not read no sample value!" ); CV_Error( CV_StsError, "Could not read property \"NoSampleValue\"!" );
} }
double CvCapture_OpenNI::getProperty( int propIdx ) double CvCapture_OpenNI::getProperty( int propIdx )
...@@ -266,13 +266,19 @@ double CvCapture_OpenNI::getProperty( int propIdx ) ...@@ -266,13 +266,19 @@ double CvCapture_OpenNI::getProperty( int propIdx )
if( isOpened() ) if( isOpened() )
{ {
if( propIdx & OPENNI_IMAGE_GENERATOR ) int purePropIdx = propIdx & ~CV_CAP_OPENNI_GENERATORS_MASK;
if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IMAGE_GENERATOR )
{ {
propValue = getImageGeneratorProperty( propIdx ^ OPENNI_IMAGE_GENERATOR ); propValue = getImageGeneratorProperty( purePropIdx );
} }
else // depth generator (by default, OPENNI_DEPTH_GENERATOR == 0) else if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_DEPTH_GENERATOR )
{ {
propValue = getDepthGeneratorProperty( propIdx /*^ OPENNI_DEPTH_GENERATOR*/ ); propValue = getDepthGeneratorProperty( purePropIdx );
}
else
{
CV_Error( CV_StsError, "Unsupported generator prefix!" );
} }
} }
...@@ -284,13 +290,19 @@ bool CvCapture_OpenNI::setProperty( int propIdx, double propValue ) ...@@ -284,13 +290,19 @@ bool CvCapture_OpenNI::setProperty( int propIdx, double propValue )
bool res = false; bool res = false;
if( isOpened() ) if( isOpened() )
{ {
if( propIdx & OPENNI_IMAGE_GENERATOR ) int purePropIdx = propIdx & ~CV_CAP_OPENNI_GENERATORS_MASK;
if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_IMAGE_GENERATOR )
{ {
res = setImageGeneratorProperty( propIdx ^ OPENNI_IMAGE_GENERATOR, propValue ); res = setImageGeneratorProperty( purePropIdx, propValue );
} }
else // depth generator (by default, OPENNI_DEPTH_GENERATOR == 0) else if( (propIdx & CV_CAP_OPENNI_GENERATORS_MASK) == CV_CAP_OPENNI_DEPTH_GENERATOR )
{ {
res = setDepthGeneratorProperty( propIdx /*^ OPENNI_DEPTH_GENERATOR*/, propValue ); res = setDepthGeneratorProperty( purePropIdx, propValue );
}
else
{
CV_Error( CV_StsError, "Unsupported generator prefix!" );
} }
} }
...@@ -313,13 +325,13 @@ double CvCapture_OpenNI::getDepthGeneratorProperty( int propIdx ) ...@@ -313,13 +325,13 @@ double CvCapture_OpenNI::getDepthGeneratorProperty( int propIdx )
case CV_CAP_PROP_FPS : case CV_CAP_PROP_FPS :
res = depthOutputMode.nFPS; res = depthOutputMode.nFPS;
break; break;
case OPENNI_FRAME_MAX_DEPTH : case CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH :
res = depthGenerator.GetDeviceMaxDepth(); res = depthGenerator.GetDeviceMaxDepth();
break; break;
case OPENNI_BASELINE : case CV_CAP_PROP_OPENNI_BASELINE :
res = baseline; res = baseline;
break; break;
case OPENNI_FOCAL_LENGTH : case CV_CAP_PROP_OPENNI_FOCAL_LENGTH :
res = depthFocalLength_VGA; res = depthFocalLength_VGA;
break; break;
default : default :
...@@ -333,6 +345,7 @@ bool CvCapture_OpenNI::setDepthGeneratorProperty( int propIdx, double propValue ...@@ -333,6 +345,7 @@ bool CvCapture_OpenNI::setDepthGeneratorProperty( int propIdx, double propValue
{ {
CV_Assert( depthGenerator.IsValid() ); CV_Assert( depthGenerator.IsValid() );
CV_Error( CV_StsBadArg, "Depth generator does not support such parameter for setting.\n"); CV_Error( CV_StsBadArg, "Depth generator does not support such parameter for setting.\n");
return false;
} }
double CvCapture_OpenNI::getImageGeneratorProperty( int propIdx ) double CvCapture_OpenNI::getImageGeneratorProperty( int propIdx )
...@@ -366,15 +379,15 @@ bool CvCapture_OpenNI::setImageGeneratorProperty( int propIdx, double propValue ...@@ -366,15 +379,15 @@ bool CvCapture_OpenNI::setImageGeneratorProperty( int propIdx, double propValue
XnMapOutputMode newImageOutputMode = imageOutputMode; XnMapOutputMode newImageOutputMode = imageOutputMode;
switch( propIdx ) switch( propIdx )
{ {
case OPENNI_OUTPUT_MODE : case CV_CAP_PROP_OPENNI_OUTPUT_MODE :
switch( cvRound(propValue) ) switch( cvRound(propValue) )
{ {
case OPENNI_VGA_30HZ : case CV_CAP_OPENNI_VGA_30HZ :
newImageOutputMode.nXRes = XN_VGA_X_RES; newImageOutputMode.nXRes = XN_VGA_X_RES;
newImageOutputMode.nYRes = XN_VGA_Y_RES; newImageOutputMode.nYRes = XN_VGA_Y_RES;
newImageOutputMode.nFPS = 30; newImageOutputMode.nFPS = 30;
break; break;
case OPENNI_SXGA_15HZ : case CV_CAP_OPENNI_SXGA_15HZ :
newImageOutputMode.nXRes = XN_SXGA_X_RES; newImageOutputMode.nXRes = XN_SXGA_X_RES;
newImageOutputMode.nYRes = XN_SXGA_Y_RES; newImageOutputMode.nYRes = XN_SXGA_Y_RES;
newImageOutputMode.nFPS = 15; newImageOutputMode.nFPS = 15;
...@@ -402,7 +415,7 @@ bool CvCapture_OpenNI::grabFrame() ...@@ -402,7 +415,7 @@ bool CvCapture_OpenNI::grabFrame()
if( !isOpened() ) if( !isOpened() )
return false; return false;
XnStatus status = context.WaitAnyUpdateAll(); XnStatus status = context.WaitAndUpdateAll();
if( status != XN_STATUS_OK ) if( status != XN_STATUS_OK )
return false; return false;
...@@ -426,7 +439,7 @@ inline void getDepthMapFromMetaData( const xn::DepthMetaData& depthMetaData, cv: ...@@ -426,7 +439,7 @@ inline void getDepthMapFromMetaData( const xn::DepthMetaData& depthMetaData, cv:
cv::Mat badMask = (depthMap == noSampleValue) | (depthMap == shadowValue) | (depthMap == 0); cv::Mat badMask = (depthMap == noSampleValue) | (depthMap == shadowValue) | (depthMap == 0);
// mask the pixels with invalid depth // mask the pixels with invalid depth
depthMap.setTo( cv::Scalar::all( OPENNI_BAD_DEPTH_VAL ), badMask ); depthMap.setTo( cv::Scalar::all( CvCapture_OpenNI::INVALID_PIXEL_VAL ), badMask );
} }
IplImage* CvCapture_OpenNI::retrieveDepthMap() IplImage* CvCapture_OpenNI::retrieveDepthMap()
...@@ -434,9 +447,9 @@ IplImage* CvCapture_OpenNI::retrieveDepthMap() ...@@ -434,9 +447,9 @@ IplImage* CvCapture_OpenNI::retrieveDepthMap()
if( depthMetaData.XRes() <= 0 || depthMetaData.YRes() <= 0 ) if( depthMetaData.XRes() <= 0 || depthMetaData.YRes() <= 0 )
return 0; return 0;
getDepthMapFromMetaData( depthMetaData, outputMaps[OPENNI_DEPTH_MAP].mat, noSampleValue, shadowValue ); getDepthMapFromMetaData( depthMetaData, outputMaps[CV_CAP_OPENNI_DEPTH_MAP].mat, noSampleValue, shadowValue );
return outputMaps[OPENNI_DEPTH_MAP].getIplImagePtr(); return outputMaps[CV_CAP_OPENNI_DEPTH_MAP].getIplImagePtr();
} }
IplImage* CvCapture_OpenNI::retrievePointCloudMap() IplImage* CvCapture_OpenNI::retrievePointCloudMap()
...@@ -449,7 +462,7 @@ IplImage* CvCapture_OpenNI::retrievePointCloudMap() ...@@ -449,7 +462,7 @@ IplImage* CvCapture_OpenNI::retrievePointCloudMap()
getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue ); getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue );
const float badPoint = 0; const float badPoint = 0;
cv::Mat XYZ( rows, cols, CV_32FC3, cv::Scalar::all(badPoint) ); cv::Mat pointCloud_XYZ( rows, cols, CV_32FC3, cv::Scalar::all(badPoint) );
for( int y = 0; y < rows; y++ ) for( int y = 0; y < rows; y++ )
{ {
...@@ -458,7 +471,7 @@ IplImage* CvCapture_OpenNI::retrievePointCloudMap() ...@@ -458,7 +471,7 @@ IplImage* CvCapture_OpenNI::retrievePointCloudMap()
unsigned short d = depth.at<unsigned short>(y, x); unsigned short d = depth.at<unsigned short>(y, x);
// Check for invalid measurements // Check for invalid measurements
if( d == OPENNI_BAD_DEPTH_VAL ) // not valid if( d == CvCapture_OpenNI::INVALID_PIXEL_VAL ) // not valid
continue; continue;
XnPoint3D proj, real; XnPoint3D proj, real;
...@@ -466,13 +479,13 @@ IplImage* CvCapture_OpenNI::retrievePointCloudMap() ...@@ -466,13 +479,13 @@ IplImage* CvCapture_OpenNI::retrievePointCloudMap()
proj.Y = y; proj.Y = y;
proj.Z = d; proj.Z = d;
depthGenerator.ConvertProjectiveToRealWorld(1, &proj, &real); depthGenerator.ConvertProjectiveToRealWorld(1, &proj, &real);
XYZ.at<cv::Point3f>(y,x) = cv::Point3f( real.X*0.001f, real.Y*0.001f, real.Z*0.001f); // from mm to meters pointCloud_XYZ.at<cv::Point3f>(y,x) = cv::Point3f( real.X*0.001f, real.Y*0.001f, real.Z*0.001f); // from mm to meters
} }
} }
outputMaps[OPENNI_POINT_CLOUD_MAP].mat = XYZ; outputMaps[CV_CAP_OPENNI_POINT_CLOUD_MAP].mat = pointCloud_XYZ;
return outputMaps[OPENNI_POINT_CLOUD_MAP].getIplImagePtr(); return outputMaps[CV_CAP_OPENNI_POINT_CLOUD_MAP].getIplImagePtr();
} }
void computeDisparity_32F( const xn::DepthMetaData& depthMetaData, cv::Mat& disp, XnDouble baseline, XnUInt64 F, void computeDisparity_32F( const xn::DepthMetaData& depthMetaData, cv::Mat& disp, XnDouble baseline, XnUInt64 F,
...@@ -488,13 +501,13 @@ void computeDisparity_32F( const xn::DepthMetaData& depthMetaData, cv::Mat& disp ...@@ -488,13 +501,13 @@ void computeDisparity_32F( const xn::DepthMetaData& depthMetaData, cv::Mat& disp
float mult = baseline /*mm*/ * F /*pixels*/; float mult = baseline /*mm*/ * F /*pixels*/;
disp.create( depth.size(), CV_32FC1); disp.create( depth.size(), CV_32FC1);
disp = cv::Scalar::all( OPENNI_BAD_DISP_VAL ); disp = cv::Scalar::all( CvCapture_OpenNI::INVALID_PIXEL_VAL );
for( int y = 0; y < disp.rows; y++ ) for( int y = 0; y < disp.rows; y++ )
{ {
for( int x = 0; x < disp.cols; x++ ) for( int x = 0; x < disp.cols; x++ )
{ {
unsigned short curDepth = depth.at<unsigned short>(y,x); unsigned short curDepth = depth.at<unsigned short>(y,x);
if( curDepth != OPENNI_BAD_DEPTH_VAL ) if( curDepth != CvCapture_OpenNI::INVALID_PIXEL_VAL )
disp.at<float>(y,x) = mult / curDepth; disp.at<float>(y,x) = mult / curDepth;
} }
} }
...@@ -508,9 +521,9 @@ IplImage* CvCapture_OpenNI::retrieveDisparityMap() ...@@ -508,9 +521,9 @@ IplImage* CvCapture_OpenNI::retrieveDisparityMap()
cv::Mat disp32; cv::Mat disp32;
computeDisparity_32F( depthMetaData, disp32, baseline, depthFocalLength_VGA, noSampleValue, shadowValue ); computeDisparity_32F( depthMetaData, disp32, baseline, depthFocalLength_VGA, noSampleValue, shadowValue );
disp32.convertTo( outputMaps[OPENNI_DISPARITY_MAP].mat, CV_8UC1 ); disp32.convertTo( outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].mat, CV_8UC1 );
return outputMaps[OPENNI_DISPARITY_MAP].getIplImagePtr(); return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP].getIplImagePtr();
} }
IplImage* CvCapture_OpenNI::retrieveDisparityMap_32F() IplImage* CvCapture_OpenNI::retrieveDisparityMap_32F()
...@@ -518,9 +531,9 @@ IplImage* CvCapture_OpenNI::retrieveDisparityMap_32F() ...@@ -518,9 +531,9 @@ IplImage* CvCapture_OpenNI::retrieveDisparityMap_32F()
if( depthMetaData.XRes() <= 0 || depthMetaData.YRes() <= 0 ) if( depthMetaData.XRes() <= 0 || depthMetaData.YRes() <= 0 )
return 0; return 0;
computeDisparity_32F( depthMetaData, outputMaps[OPENNI_DISPARITY_MAP_32F].mat, baseline, depthFocalLength_VGA, noSampleValue, shadowValue ); computeDisparity_32F( depthMetaData, outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].mat, baseline, depthFocalLength_VGA, noSampleValue, shadowValue );
return outputMaps[OPENNI_DISPARITY_MAP_32F].getIplImagePtr(); return outputMaps[CV_CAP_OPENNI_DISPARITY_MAP_32F].getIplImagePtr();
} }
IplImage* CvCapture_OpenNI::retrieveValidDepthMask() IplImage* CvCapture_OpenNI::retrieveValidDepthMask()
...@@ -531,9 +544,9 @@ IplImage* CvCapture_OpenNI::retrieveValidDepthMask() ...@@ -531,9 +544,9 @@ IplImage* CvCapture_OpenNI::retrieveValidDepthMask()
cv::Mat depth; cv::Mat depth;
getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue ); getDepthMapFromMetaData( depthMetaData, depth, noSampleValue, shadowValue );
outputMaps[OPENNI_VALID_DEPTH_MASK].mat = depth != OPENNI_BAD_DEPTH_VAL; outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].mat = depth != CvCapture_OpenNI::INVALID_PIXEL_VAL;
return outputMaps[OPENNI_VALID_DEPTH_MASK].getIplImagePtr(); return outputMaps[CV_CAP_OPENNI_VALID_DEPTH_MASK].getIplImagePtr();
} }
inline void getBGRImageFromMetaData( const xn::ImageMetaData& imageMetaData, cv::Mat& bgrImage ) inline void getBGRImageFromMetaData( const xn::ImageMetaData& imageMetaData, cv::Mat& bgrImage )
...@@ -555,9 +568,9 @@ IplImage* CvCapture_OpenNI::retrieveBGRImage() ...@@ -555,9 +568,9 @@ IplImage* CvCapture_OpenNI::retrieveBGRImage()
if( imageMetaData.XRes() <= 0 || imageMetaData.YRes() <= 0 ) if( imageMetaData.XRes() <= 0 || imageMetaData.YRes() <= 0 )
return 0; return 0;
getBGRImageFromMetaData( imageMetaData, outputMaps[OPENNI_BGR_IMAGE].mat ); getBGRImageFromMetaData( imageMetaData, outputMaps[CV_CAP_OPENNI_BGR_IMAGE].mat );
return outputMaps[OPENNI_BGR_IMAGE].getIplImagePtr(); return outputMaps[CV_CAP_OPENNI_BGR_IMAGE].getIplImagePtr();
} }
IplImage* CvCapture_OpenNI::retrieveGrayImage() IplImage* CvCapture_OpenNI::retrieveGrayImage()
...@@ -569,41 +582,41 @@ IplImage* CvCapture_OpenNI::retrieveGrayImage() ...@@ -569,41 +582,41 @@ IplImage* CvCapture_OpenNI::retrieveGrayImage()
cv::Mat rgbImage; cv::Mat rgbImage;
getBGRImageFromMetaData( imageMetaData, rgbImage ); getBGRImageFromMetaData( imageMetaData, rgbImage );
cv::cvtColor( rgbImage, outputMaps[OPENNI_GRAY_IMAGE].mat, CV_BGR2GRAY ); cv::cvtColor( rgbImage, outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].mat, CV_BGR2GRAY );
return outputMaps[OPENNI_GRAY_IMAGE].getIplImagePtr(); return outputMaps[CV_CAP_OPENNI_GRAY_IMAGE].getIplImagePtr();
} }
IplImage* CvCapture_OpenNI::retrieveFrame( int dataType ) IplImage* CvCapture_OpenNI::retrieveFrame( int outputType )
{ {
IplImage* image = 0; IplImage* image = 0;
CV_Assert( dataType < outputTypesCount && dataType >= 0); CV_Assert( outputType < outputTypesCount && outputType >= 0);
if( dataType == OPENNI_DEPTH_MAP ) if( outputType == CV_CAP_OPENNI_DEPTH_MAP )
{ {
image = retrieveDepthMap(); image = retrieveDepthMap();
} }
else if( dataType == OPENNI_POINT_CLOUD_MAP ) else if( outputType == CV_CAP_OPENNI_POINT_CLOUD_MAP )
{ {
image = retrievePointCloudMap(); image = retrievePointCloudMap();
} }
else if( dataType == OPENNI_DISPARITY_MAP ) else if( outputType == CV_CAP_OPENNI_DISPARITY_MAP )
{ {
image = retrieveDisparityMap(); image = retrieveDisparityMap();
} }
else if( dataType == OPENNI_DISPARITY_MAP_32F ) else if( outputType == CV_CAP_OPENNI_DISPARITY_MAP_32F )
{ {
image = retrieveDisparityMap_32F(); image = retrieveDisparityMap_32F();
} }
else if( dataType == OPENNI_VALID_DEPTH_MASK ) else if( outputType == CV_CAP_OPENNI_VALID_DEPTH_MASK )
{ {
image = retrieveValidDepthMask(); image = retrieveValidDepthMask();
} }
else if( dataType == OPENNI_BGR_IMAGE ) else if( outputType == CV_CAP_OPENNI_BGR_IMAGE )
{ {
image = retrieveBGRImage(); image = retrieveBGRImage();
} }
else if( dataType == OPENNI_GRAY_IMAGE ) else if( outputType == CV_CAP_OPENNI_GRAY_IMAGE )
{ {
image = retrieveGrayImage(); image = retrieveGrayImage();
} }
......
...@@ -6,9 +6,22 @@ ...@@ -6,9 +6,22 @@
using namespace cv; using namespace cv;
using namespace std; using namespace std;
#define COLORIZED_DISP 1 void help()
#define IMAGE_GENERATOR_VGA_30HZ 1 {
#define FIXED_MAX_DISP 0 cout << "\nThis program demonstrates usage of Kinect sensor.\n"
"The user gets some of the supported output images.\n"
"\nAll supported output map types:\n"
"1.) Data given from depth generator\n"
" OPENNI_DEPTH_MAP - depth values in mm (CV_16UC1)\n"
" OPENNI_POINT_CLOUD_MAP - XYZ in meters (CV_32FC3)\n"
" OPENNI_DISPARITY_MAP - disparity in pixels (CV_8UC1)\n"
" OPENNI_DISPARITY_MAP_32F - disparity in pixels (CV_32FC1)\n"
" OPENNI_VALID_DEPTH_MASK - mask of valid pixels (not ocluded, not shaded etc.) (CV_8UC1)\n"
"2.) Data given from RGB image generator\n"
" OPENNI_BGR_IMAGE - color image (CV_8UC3)\n"
" OPENNI_GRAY_IMAGE - gray image (CV_8UC1)\n"
<< endl;
}
void colorizeDisparity( const Mat& gray, Mat& rgb, double maxDisp=-1.f, float S=1.f, float V=1.f ) void colorizeDisparity( const Mat& gray, Mat& rgb, double maxDisp=-1.f, float S=1.f, float V=1.f )
{ {
...@@ -59,45 +72,98 @@ void colorizeDisparity( const Mat& gray, Mat& rgb, double maxDisp=-1.f, float S= ...@@ -59,45 +72,98 @@ void colorizeDisparity( const Mat& gray, Mat& rgb, double maxDisp=-1.f, float S=
} }
} }
void help()
{
cout << "\nThis program demonstrates usage of Kinect sensor.\n"
"The user gets some of the supported output images.\n"
"\nAll supported output map types:\n"
"1.) Data given from depth generator\n"
" OPENNI_DEPTH_MAP - depth values in mm (CV_16UC1)\n"
" OPENNI_POINT_CLOUD_MAP - XYZ in meters (CV_32FC3)\n"
" OPENNI_DISPARITY_MAP - disparity in pixels (CV_8UC1)\n"
" OPENNI_DISPARITY_MAP_32F - disparity in pixels (CV_32FC1)\n"
" OPENNI_VALID_DEPTH_MASK - mask of valid pixels (not ocluded, not shaded etc.) (CV_8UC1)\n"
"2.) Data given from RGB image generator\n"
" OPENNI_BGR_IMAGE - color image (CV_8UC3)\n"
" OPENNI_GRAY_IMAGE - gray image (CV_8UC1)\n"
<< endl;
}
float getMaxDisparity( VideoCapture& capture ) float getMaxDisparity( VideoCapture& capture )
{ {
#if FIXED_MAX_DISP
const int minDistance = 400; // mm const int minDistance = 400; // mm
float b = capture.get( OPENNI_DEPTH_GENERATOR_BASELINE ); // mm float b = capture.get( CV_CAP_OPENNI_DEPTH_GENERATOR_BASELINE ); // mm
float F = capture.get( OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH ); // pixels float F = capture.get( CV_CAP_OPENNI_DEPTH_GENERATOR_FOCAL_LENGTH ); // pixels
return b * F / minDistance; return b * F / minDistance;
#else }
return -1;
#endif void printCommandLineParams()
{
cout << "-cd Colorized disparity? (0 or 1; 1 by default) Ignored if disparity map is not selected to show." << endl;
cout << "-fmd Fixed max disparity? (0 or 1; 0 by default) Ignored if disparity map is not colorized (-cd 0)." << endl;
cout << "-sxga SXGA resolution of image? (0 or 1; 0 by default) Ignored if rgb image or gray image are not selected to show." << endl;
cout << " If -sxga is 0 then vga resolution will be set by default." << endl;
cout << "-m Mask to set which output images are need. It is a string of size 5. Each element of this is '0' or '1' and" << endl;
cout << " determine: is depth map, disparity map, valid pixels mask, rgb image, gray image need or not (correspondently)?" << endl ;
cout << " By default -m 01010 i.e. disparity map and rgb image will be shown." << endl ;
}
void parseCommandLine( int argc, char* argv[], bool& isColorizeDisp, bool& isFixedMaxDisp, bool& isSetSXGA, bool retrievedImageFlags[] )
{
// set defaut values
isColorizeDisp = true;
isSetSXGA = false;
retrievedImageFlags[0] = false;
retrievedImageFlags[1] = true;
retrievedImageFlags[2] = false;
retrievedImageFlags[3] = true;
retrievedImageFlags[4] = false;
if( argc == 1 )
{
help();
}
else
{
for( int i = 1; i < argc; i++ )
{
if( !strcmp( argv[i], "--help" ) )
{
printCommandLineParams();
exit(0);
}
else if( !strcmp( argv[i], "-cd" ) )
{
isColorizeDisp = atoi(argv[++i]) == 0 ? false : true;
}
else if( !strcmp( argv[i], "-fmd" ) )
{
isFixedMaxDisp = atoi(argv[++i]) == 0 ? false : true;
}
else if( !strcmp( argv[i], "-sxga" ) )
{
isSetSXGA = atoi(argv[++i]) == 0 ? false : true;
}
else if( !strcmp( argv[i], "-m" ) )
{
string mask( argv[++i] );
if( mask.size() != 5)
CV_Error( CV_StsBadArg, "Incorrect length of -m argument string" );
int val = atoi(mask.c_str());
int l = 100000, r = 10000, sum = 0;
for( int i = 0; i < 5; i++ )
{
retrievedImageFlags[i] = ((val % l) / r ) == 0 ? false : true;
l /= 10; r /= 10;
if( retrievedImageFlags[i] ) sum++;
}
if( sum == 0 )
{
cout << "No one output image is selected." << endl;
exit(0);
}
}
}
}
} }
/* /*
* To work with Kinect the user must install OpenNI library and PrimeSensorModule for OpenNI and * To work with Kinect the user must install OpenNI library and PrimeSensorModule for OpenNI and
* configure OpenCV with WITH_OPENNI flag is ON (using CMake). * configure OpenCV with WITH_OPENNI flag is ON (using CMake).
*/ */
int main() int main( int argc, char* argv[] )
{ {
help(); bool isColorizeDisp, isFixedMaxDisp, isSetSXGA;
bool retrievedImageFlags[5];
parseCommandLine( argc, argv, isColorizeDisp, isFixedMaxDisp, isSetSXGA, retrievedImageFlags );
cout << "Kinect opening ..." << endl; cout << "Kinect opening ..." << endl;
VideoCapture capture(0); // or CV_CAP_OPENNI VideoCapture capture( CV_CAP_OPENNI );
cout << "done." << endl; cout << "done." << endl;
if( !capture.isOpened() ) if( !capture.isOpened() )
...@@ -106,23 +172,22 @@ int main() ...@@ -106,23 +172,22 @@ int main()
return -1; return -1;
} }
#if IMAGE_GENERATOR_VGA_30HZ if( isSetSXGA )
capture.set( OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, OPENNI_VGA_30HZ ); // default capture.set( CV_CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, CV_CAP_OPENNI_SXGA_15HZ );
#else else
capture.set( OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, OPENNI_SXGA_15HZ ); capture.set( CV_CAP_OPENNI_IMAGE_GENERATOR_OUTPUT_MODE, CV_CAP_OPENNI_VGA_30HZ ); // default
#endif
// Print some avalible Kinect settings. // Print some avalible Kinect settings.
cout << "\nDepth generator output mode:" << endl << cout << "\nDepth generator output mode:" << endl <<
"FRAME_WIDTH " << capture.get( CV_CAP_PROP_FRAME_WIDTH ) << endl << "FRAME_WIDTH " << capture.get( CV_CAP_PROP_FRAME_WIDTH ) << endl <<
"FRAME_HEIGHT " << capture.get( CV_CAP_PROP_FRAME_HEIGHT ) << endl << "FRAME_HEIGHT " << capture.get( CV_CAP_PROP_FRAME_HEIGHT ) << endl <<
"FRAME_MAX_DEPTH " << capture.get( OPENNI_FRAME_MAX_DEPTH ) << " mm" << endl << "FRAME_MAX_DEPTH " << capture.get( CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH ) << " mm" << endl <<
"FPS " << capture.get( CV_CAP_PROP_FPS ) << endl; "FPS " << capture.get( CV_CAP_PROP_FPS ) << endl;
cout << "\nImage generator output mode:" << endl << cout << "\nImage generator output mode:" << endl <<
"FRAME_WIDTH " << capture.get( OPENNI_IMAGE_GENERATOR+CV_CAP_PROP_FRAME_WIDTH ) << endl << "FRAME_WIDTH " << capture.get( CV_CAP_OPENNI_IMAGE_GENERATOR+CV_CAP_PROP_FRAME_WIDTH ) << endl <<
"FRAME_HEIGHT " << capture.get( OPENNI_IMAGE_GENERATOR+CV_CAP_PROP_FRAME_HEIGHT ) << endl << "FRAME_HEIGHT " << capture.get( CV_CAP_OPENNI_IMAGE_GENERATOR+CV_CAP_PROP_FRAME_HEIGHT ) << endl <<
"FPS " << capture.get( OPENNI_IMAGE_GENERATOR+CV_CAP_PROP_FPS ) << endl; "FPS " << capture.get( CV_CAP_OPENNI_IMAGE_GENERATOR+CV_CAP_PROP_FPS ) << endl;
for(;;) for(;;)
{ {
...@@ -139,33 +204,36 @@ int main() ...@@ -139,33 +204,36 @@ int main()
} }
else else
{ {
if( capture.retrieve( depthMap, OPENNI_DEPTH_MAP ) ) if( retrievedImageFlags[0] && capture.retrieve( depthMap, CV_CAP_OPENNI_DEPTH_MAP ) )
{ {
const float scaleFactor = 0.05f; const float scaleFactor = 0.05f;
Mat show; depthMap.convertTo( show, CV_8UC1, scaleFactor ); Mat show; depthMap.convertTo( show, CV_8UC1, scaleFactor );
imshow( "depth map", show ); imshow( "depth map", show );
} }
if( capture.retrieve( disparityMap, OPENNI_DISPARITY_MAP ) ) if( retrievedImageFlags[1] && capture.retrieve( disparityMap, CV_CAP_OPENNI_DISPARITY_MAP ) )
{
if( isColorizeDisp )
{ {
#if COLORIZED_DISP // colorized disparity for more visibility
Mat colorDisparityMap; Mat colorDisparityMap;
colorizeDisparity( disparityMap, colorDisparityMap, getMaxDisparity( capture ) ); colorizeDisparity( disparityMap, colorDisparityMap, isFixedMaxDisp ? getMaxDisparity(capture) : -1 );
Mat validColorDisparityMap; Mat validColorDisparityMap;
colorDisparityMap.copyTo( validColorDisparityMap, disparityMap != OPENNI_BAD_DISP_VAL ); colorDisparityMap.copyTo( validColorDisparityMap, disparityMap != 0 );
imshow( "colorized disparity map", validColorDisparityMap ); imshow( "colorized disparity map", validColorDisparityMap );
#else // original disparity }
else
{
imshow( "original disparity map", disparityMap ); imshow( "original disparity map", disparityMap );
#endif }
} }
if( capture.retrieve( validDepthMap, OPENNI_VALID_DEPTH_MASK ) ) if( retrievedImageFlags[2] && capture.retrieve( validDepthMap, CV_CAP_OPENNI_VALID_DEPTH_MASK ) )
imshow( "valid depth mask", validDepthMap ); imshow( "valid depth mask", validDepthMap );
if( capture.retrieve( bgrImage, OPENNI_BGR_IMAGE ) ) if( retrievedImageFlags[3] && capture.retrieve( bgrImage, CV_CAP_OPENNI_BGR_IMAGE ) )
imshow( "rgb image", bgrImage ); imshow( "rgb image", bgrImage );
if( capture.retrieve( grayImage, OPENNI_GRAY_IMAGE ) ) if( retrievedImageFlags[4] && capture.retrieve( grayImage, CV_CAP_OPENNI_GRAY_IMAGE ) )
imshow( "gray image", grayImage ); imshow( "gray image", grayImage );
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment