Commit d0964ab0 authored by Pavel Rojtberg's avatar Pavel Rojtberg

ovis: add Ogre3D based visualizer module

parent 6848eb63
......@@ -42,6 +42,8 @@ $ cmake -D OPENCV_EXTRA_MODULES_PATH=<opencv_contrib>/modules -D BUILD_opencv_<r
- **optflow**: Optical Flow -- Algorithms for running and evaluating deepflow, simpleflow, sparsetodenseflow and motion templates (silhouette flow).
- **ovis**: OGRE 3D Visualiser -- allows you to render 3D data using the OGRE 3D engine.
- **plot**: Plotting -- The plot module allows you to easily plot data in 1D or 2D.
- **reg**: Image Registration -- Pixels based image registration for precise alignment. Follows the paper "Image Alignment and Stitching: A Tutorial", by Richard Szeliski.
......
set(the_description "OGRE 3D Visualiser.")
find_package(OGRE 1.10 QUIET)
if(NOT OGRE_FOUND)
message(STATUS "Module opencv_ovis disabled because OGRE3D was not found.")
ocv_module_disable(ovis)
endif()
include_directories(${OGRE_INCLUDE_DIRS}})
link_directories(${OGRE_LIBRARY_DIRS})
ocv_define_module(ovis opencv_core opencv_imgproc opencv_calib3d WRAP python)
ocv_warnings_disable(CMAKE_CXX_FLAGS -Wunused-parameter)
ocv_target_link_libraries(${the_module} ${OGRE_LIBRARIES})
\ No newline at end of file
OVIS Module
===========
allows you to render 3D data using the OGRE 3D engine and obtain the rendering as cv::Mat.
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#ifndef _OPENCV_OVIS_H_
#define _OPENCV_OVIS_H_
#include <opencv2/core.hpp>
/**
@defgroup ovis OGRE 3D Visualiser
*/
namespace cv {
namespace ovis {
//! @addtogroup ovis
//! @{
enum SceneSettings
{
/// the window will use a seperate scene. The scene will be shared otherwise.
SCENE_SEPERATE = 1,
/// allow the user to control the camera.
SCENE_INTERACTIVE = 2,
/// draw coordinate system crosses for debugging
SCENE_SHOW_CS_CROSS = 4
};
enum MaterialProperty
{
MATERIAL_POINT_SIZE,
MATERIAL_OPACITY,
MATERIAL_EMISSIVE,
MATERIAL_TEXTURE
};
/**
* A 3D viewport and the associated scene
*/
class CV_EXPORTS_W WindowScene {
public:
virtual ~WindowScene();
/**
* set window background to custom image
*
* creates a texture named "<title>_Background"
* @param image
*/
CV_WRAP virtual void setBackground(InputArray image) = 0;
/**
* place an entity of an mesh in the scene
* @param name entity name
* @param meshname mesh name
* @param rot Rodrigues vector or 3x3 rotation matrix
* @param tvec translation
*/
CV_WRAP virtual void createEntity(const String& name, const String& meshname,
InputArray tvec = noArray(), InputArray rot = noArray()) = 0;
/**
* convenience method to visualize a camera position
*
* the entity uses a material with the same name that can be used to change the line color.
* @param name entity name
* @param K intrinsic matrix
* @param imsize image size
* @param zFar far plane in camera coordinates
* @param rot Rodrigues vector or 3x3 rotation matrix
* @param tvec translation
* @return the extents of the Frustum at far plane, where the top left corner denotes the principal
* point offset
*/
CV_WRAP virtual Rect2d createCameraEntity(const String& name, InputArray K, const Size& imsize,
float zFar, InputArray tvec = noArray(),
InputArray rot = noArray()) = 0;
/**
* creates a point light in the scene
* @param name entity name
* @param rot Rodrigues vector or 3x3 rotation matrix
* @param tvec translation
* @param diffuseColor
* @param specularColor
*/
CV_WRAP virtual void createLightEntity(const String& name, InputArray tvec = noArray(),
InputArray rot = noArray(),
const Scalar& diffuseColor = Scalar::all(1),
const Scalar& specularColor = Scalar::all(1)) = 0;
/**
* update entity pose by transformation in the parent coordinate space. (pre-rotation)
* @param name entity name
* @param rot Rodrigues vector or 3x3 rotation matrix
* @param tvec translation
*/
CV_WRAP virtual void updateEntityPose(const String& name, InputArray tvec = noArray(),
InputArray rot = noArray()) = 0;
/**
* set entity pose in the world coordinate space.
* @param name enitity name
* @param rot Rodrigues vector or 3x3 rotation matrix
* @param tvec translation
* @param invert use the inverse of the given pose
*/
CV_WRAP virtual void setEntityPose(const String& name, InputArray tvec = noArray(),
InputArray rot = noArray(), bool invert = false) = 0;
/**
* read back image of last call to @ref renderOneFrame
*/
CV_WRAP virtual void getScreenshot(OutputArray frame) = 0;
/**
* convenience method to force the "up" axis to stay fixed
*
* works with both programmatic changes and SCENE_INTERACTIVE
* @param useFixed whether to enforce the fixed yaw axis
* @param up the axis to be fixed
*/
CV_WRAP virtual void fixCameraYawAxis(bool useFixed, InputArray up = noArray()) = 0;
/**
* Sets the current camera pose
* @param rot Rodrigues vector or 3x3 rotation matrix
* @param tvec translation
* @param invert use the inverse of the given pose
*/
CV_WRAP virtual void setCameraPose(InputArray tvec = noArray(), InputArray rot = noArray(),
bool invert = false) = 0;
/**
* convenience method to orient the camera to a specific entity
* @param target entity name
* @param offset offset from entity centre
*/
CV_WRAP virtual void setCameraLookAt(const String& target, InputArray offset = noArray()) = 0;
/**
* Retrieves the current camera pose
* @param R 3x3 rotation matrix
* @param tvec translation vector
* @param invert return the inverted pose
*/
CV_WRAP virtual void getCameraPose(OutputArray R = noArray(), OutputArray tvec = noArray(),
bool invert = false) = 0;
/**
* set intrinsics of the camera
* @param K intrinsic matrix
* @param imsize image size
*/
CV_WRAP virtual void setCameraIntrinsics(InputArray K, const Size& imsize) = 0;
};
/**
* Add an additional resource location that is search for meshes, textures and materials
*
* must be called before the first createWindow. If give path does not exist, retries inside
* Ogre Media Directory.
* @param path folder or Zip archive.
*/
CV_EXPORTS_W void addResourceLocation(const String& path);
/**
* create a new rendering window/ viewport
* @param title window title
* @param size size of the window
* @param flags @see SceneSettings
*/
CV_EXPORTS_W Ptr<WindowScene> createWindow(const String& title, const Size& size,
int flags = SCENE_INTERACTIVE);
/**
* update all windows
* @return true if this functian can be called again (i.e. continue rendering). false otherwise.
*/
CV_EXPORTS_W bool renderOneFrame();
/**
* set the property of a material to the given value
* @param name material name
* @param prop property @ref MaterialProperty
* @param value the value
*/
CV_EXPORTS_W void setMaterialProperty(const String& name, int prop, const Scalar& value);
/// @overload
CV_EXPORTS_W void setMaterialProperty(const String& name, int prop, const String& value);
/**
* create a 2D plane, X right, Y down, Z up
*
* creates a material and a texture with the same name
* @param name name of the mesh
* @param size size in world units
* @param image optional texture
*/
CV_EXPORTS_W void createPlaneMesh(const String& name, const Size2f& size, InputArray image = noArray());
/**
* creates a point cloud mesh
*
* creates a material with the same name
* @param name name of the mesh
* @param vertices float vector of positions
* @param colors uchar vector of colors
*/
CV_EXPORTS_W void createPointCloudMesh(const String& name, InputArray vertices, InputArray colors = noArray());
/**
* creates a grid
*
* creates a material with the same name
* @param name name of the mesh
* @param size extents of the grid
* @param segments number of segments per side
*/
CV_EXPORTS_W void createGridMesh(const String& name, const Size2f& size, const Size& segments = Size(1, 1));
//! @}
}
}
#endif
import cv2
import numpy as np
# aruco
adict = cv2.aruco.Dictionary_get(cv2.aruco.DICT_4X4_50)
cv2.imshow("marker", cv2.aruco.drawMarker(adict, 0, 400))
# random calibration data. your mileage may vary.
imsize = (800, 600)
K = cv2.getDefaultNewCameraMatrix(np.diag([800, 800, 1]), imsize, True)
# AR scene
cv2.ovis.addResourceLocation("packs/Sinbad.zip") # shipped with Ogre
win = cv2.ovis.createWindow("arucoAR", imsize, flags=0)
win.createEntity("figure", "Sinbad.mesh", (0, 0, -5), (-1.57, 0, 0))
win.createLightEntity("sun", (0, 0, -100))
# video capture
cap = cv2.VideoCapture(0)
cap.set(cv2.CAP_PROP_FRAME_WIDTH, imsize[0])
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, imsize[1])
while cv2.ovis.renderOneFrame():
img = cap.read()[1]
win.setBackground(img)
corners, ids = cv2.aruco.detectMarkers(img, adict)[:2]
cv2.waitKey(1)
if ids is None:
continue
rvecs, tvecs = cv2.aruco.estimatePoseSingleMarkers(corners, 5, K, None)[:2]
win.setCameraPose(tvecs[0].ravel(), rvecs[0].ravel(), invert=True)
import cv2
import numpy as np
# add some external resources
cv2.ovis.addResourceLocation("packs/Sinbad.zip")
# camera intrinsics
imsize = (800, 600)
K = np.diag([800, 800, 1])
K[:2, 2] = (400, 100) # offset pp
# observer scene
owin = cv2.ovis.createWindow("VR", imsize)
cv2.ovis.createGridMesh("ground", (10, 10), (10, 10))
owin.createEntity("ground", "ground", rot=(1.57, 0, 0))
owin.createCameraEntity("cam", K, imsize, 5)
owin.createEntity("figure", "Sinbad.mesh", (0, -5, 0)) # externally defined mesh
owin.createLightEntity("sun", (0, 0, -100))
# interaction scene
iwin = cv2.ovis.createWindow("AR", imsize, cv2.ovis.SCENE_SEPERATE | cv2.ovis.SCENE_INTERACTIVE)
iwin.createEntity("figure", "Sinbad.mesh", (0, -5, 0))
iwin.createLightEntity("sun", (0, 0, -100))
iwin.setCameraIntrinsics(K, imsize)
while cv2.ovis.renderOneFrame():
R, t = iwin.getCameraPose()
owin.setEntityPose("cam", t, R)
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#include "precomp.hpp"
namespace cv
{
namespace ovis
{
using namespace Ogre;
void createPlaneMesh(const String& name, const Size2f& size, InputArray image)
{
CV_Assert(_app);
// material
MaterialPtr mat = MaterialManager::getSingleton().create(name, RESOURCEGROUP_NAME);
Pass* rpass = mat->getTechniques()[0]->getPasses()[0];
rpass->setCullingMode(CULL_NONE);
rpass->setEmissive(ColourValue::White);
if (!image.empty())
{
_createTexture(name, image.getMat());
rpass->createTextureUnitState(name);
}
// plane
MovablePlane plane(Vector3::UNIT_Z, 0);
MeshPtr mesh = MeshManager::getSingleton().createPlane(
name, RESOURCEGROUP_NAME, plane, size.width, size.height, 1, 1, true, 1, 1, 1, Vector3::UNIT_Y);
mesh->getSubMesh(0)->setMaterialName(name);
}
void createPointCloudMesh(const String& name, InputArray vertices, InputArray colors)
{
int color_type = colors.type();
CV_Assert(_app, vertices.type() == CV_32FC3 && vertices.isContinuous(),
colors.empty() || color_type == CV_8UC3 || color_type == CV_8UC4);
// material
MaterialPtr mat = MaterialManager::getSingleton().create(name, RESOURCEGROUP_NAME);
Pass* rpass = mat->getTechniques()[0]->getPasses()[0];
rpass->setEmissive(ColourValue::White);
// mesh
MeshPtr mesh = MeshManager::getSingleton().createManual(name, RESOURCEGROUP_NAME);
SubMesh* sub = mesh->createSubMesh();
sub->useSharedVertices = true;
sub->operationType = RenderOperation::OT_POINT_LIST;
sub->setMaterialName(name);
int n = vertices.rows();
mesh->sharedVertexData = new VertexData();
mesh->sharedVertexData->vertexCount = n;
VertexDeclaration* decl = mesh->sharedVertexData->vertexDeclaration;
// vertex data
HardwareBufferManager& hbm = HardwareBufferManager::getSingleton();
Mat _vertices = vertices.getMat();
int source = 0;
HardwareVertexBufferSharedPtr hwbuf;
decl->addElement(source, 0, VET_FLOAT3, VES_POSITION);
hwbuf = hbm.createVertexBuffer(decl->getVertexSize(source), n, HardwareBuffer::HBU_STATIC_WRITE_ONLY);
hwbuf->writeData(0, hwbuf->getSizeInBytes(), _vertices.ptr(), true);
mesh->sharedVertexData->vertexBufferBinding->setBinding(source, hwbuf);
// color data
if (!colors.empty())
{
mat->setLightingEnabled(false);
source += 1;
Mat col4;
cvtColor(colors, col4, color_type == CV_8UC3 ? COLOR_BGR2RGBA : COLOR_BGRA2RGBA);
decl->addElement(source, 0, VET_COLOUR, VES_DIFFUSE);
hwbuf =
hbm.createVertexBuffer(decl->getVertexSize(source), n, HardwareBuffer::HBU_STATIC_WRITE_ONLY);
hwbuf->writeData(0, hwbuf->getSizeInBytes(), col4.ptr(), true);
mesh->sharedVertexData->vertexBufferBinding->setBinding(source, hwbuf);
rpass->setVertexColourTracking(TVC_DIFFUSE);
}
AxisAlignedBox bounds(AxisAlignedBox::EXTENT_NULL);
for (int i = 0; i < n; i++)
{
Vec3f v = _vertices.at<Vec3f>(i);
bounds.merge(Vector3(v[0], v[1], v[2]));
}
mesh->_setBounds(bounds);
}
void createGridMesh(const String& name, const Size2f& size, const Size& segments)
{
CV_Assert(_app, !segments.empty());
// material
MaterialPtr mat = MaterialManager::getSingleton().create(name, RESOURCEGROUP_NAME);
Pass* rpass = mat->getTechniques()[0]->getPasses()[0];
rpass->setEmissive(ColourValue::White);
// mesh
MeshPtr mesh = MeshManager::getSingleton().createManual(name, RESOURCEGROUP_NAME);
SubMesh* sub = mesh->createSubMesh();
sub->useSharedVertices = true;
sub->operationType = RenderOperation::OT_LINE_LIST;
sub->setMaterialName(name);
int n = (segments.width + 1) * 2 + (segments.height + 1) * 2;
mesh->sharedVertexData = new VertexData();
mesh->sharedVertexData->vertexCount = n;
VertexDeclaration* decl = mesh->sharedVertexData->vertexDeclaration;
// vertex data
HardwareBufferManager& hbm = HardwareBufferManager::getSingleton();
int source = 0;
HardwareVertexBufferSharedPtr hwbuf;
decl->addElement(source, 0, VET_FLOAT2, VES_POSITION);
hwbuf = hbm.createVertexBuffer(decl->getVertexSize(source), n, HardwareBuffer::HBU_STATIC_WRITE_ONLY);
mesh->sharedVertexData->vertexBufferBinding->setBinding(source, hwbuf);
Vector2 step = Vector2(size.width, size.height) / Vector2(segments.width, segments.height);
Vec2f* data = (Vec2f*)hwbuf->lock(HardwareBuffer::HBL_DISCARD);
for (int i = 0; i < segments.width + 1; i++)
{
data[i * 2] = Vec2f(-size.width / 2 + step.x * i, -size.height / 2);
data[i * 2 + 1] = Vec2f(-size.width / 2 + step.x * i, size.height / 2);
}
data += (segments.width + 1) * 2;
for (int i = 0; i < segments.height + 1; i++)
{
data[i * 2] = Vec2f(-size.width / 2, -size.height / 2 + step.y * i);
data[i * 2 + 1] = Vec2f(size.width / 2, -size.height / 2 + step.y * i);
}
hwbuf->unlock();
Vector3 sz(size.width, size.height, 0);
mesh->_setBounds(AxisAlignedBox(-sz/2, sz/2));
}
}
}
This diff is collapsed.
// This file is part of OpenCV project.
// It is subject to the license terms in the LICENSE file found in the top-level directory
// of this distribution and at http://opencv.org/license.html.
#ifndef __OPENCV_PRECOMP_H__
#define __OPENCV_PRECOMP_H__
#include <opencv2/core.hpp>
#include <opencv2/imgproc.hpp>
#include "opencv2/ovis.hpp"
#include "opencv2/opencv_modules.hpp"
#include <Ogre.h>
namespace cv {
namespace ovis {
struct Application;
extern Ptr<Application> _app;
extern const char* RESOURCEGROUP_NAME;
void _createTexture(const String& name, Mat image);
}
}
#endif
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment