diff --git a/modules/README.md b/modules/README.md index d0cde4b83..54ba6cab0 100644 --- a/modules/README.md +++ b/modules/README.md @@ -42,6 +42,8 @@ $ cmake -D OPENCV_EXTRA_MODULES_PATH=/modules -D BUILD_opencv_ + +/** +@defgroup ovis OGRE 3D Visualiser +*/ + +namespace cv { +namespace ovis { +//! @addtogroup ovis +//! @{ + +enum SceneSettings +{ + /// the window will use a seperate scene. The scene will be shared otherwise. + SCENE_SEPERATE = 1, + /// allow the user to control the camera. + SCENE_INTERACTIVE = 2, + /// draw coordinate system crosses for debugging + SCENE_SHOW_CS_CROSS = 4 +}; + +enum MaterialProperty +{ + MATERIAL_POINT_SIZE, + MATERIAL_OPACITY, + MATERIAL_EMISSIVE, + MATERIAL_TEXTURE +}; + +/** + * A 3D viewport and the associated scene + */ +class CV_EXPORTS_W WindowScene { +public: + virtual ~WindowScene(); + + /** + * set window background to custom image + * + * creates a texture named "_Background" + * @param image + */ + CV_WRAP virtual void setBackground(InputArray image) = 0; + + /** + * place an entity of an mesh in the scene + * @param name entity name + * @param meshname mesh name + * @param rot Rodrigues vector or 3x3 rotation matrix + * @param tvec translation + */ + CV_WRAP virtual void createEntity(const String& name, const String& meshname, + InputArray tvec = noArray(), InputArray rot = noArray()) = 0; + + /** + * convenience method to visualize a camera position + * + * the entity uses a material with the same name that can be used to change the line color. + * @param name entity name + * @param K intrinsic matrix + * @param imsize image size + * @param zFar far plane in camera coordinates + * @param rot Rodrigues vector or 3x3 rotation matrix + * @param tvec translation + * @return the extents of the Frustum at far plane, where the top left corner denotes the principal + * point offset + */ + CV_WRAP virtual Rect2d createCameraEntity(const String& name, InputArray K, const Size& imsize, + float zFar, InputArray tvec = noArray(), + InputArray rot = noArray()) = 0; + + /** + * creates a point light in the scene + * @param name entity name + * @param rot Rodrigues vector or 3x3 rotation matrix + * @param tvec translation + * @param diffuseColor + * @param specularColor + */ + CV_WRAP virtual void createLightEntity(const String& name, InputArray tvec = noArray(), + InputArray rot = noArray(), + const Scalar& diffuseColor = Scalar::all(1), + const Scalar& specularColor = Scalar::all(1)) = 0; + + /** + * update entity pose by transformation in the parent coordinate space. (pre-rotation) + * @param name entity name + * @param rot Rodrigues vector or 3x3 rotation matrix + * @param tvec translation + */ + CV_WRAP virtual void updateEntityPose(const String& name, InputArray tvec = noArray(), + InputArray rot = noArray()) = 0; + + /** + * set entity pose in the world coordinate space. + * @param name enitity name + * @param rot Rodrigues vector or 3x3 rotation matrix + * @param tvec translation + * @param invert use the inverse of the given pose + */ + CV_WRAP virtual void setEntityPose(const String& name, InputArray tvec = noArray(), + InputArray rot = noArray(), bool invert = false) = 0; + + /** + * read back image of last call to @ref renderOneFrame + */ + CV_WRAP virtual void getScreenshot(OutputArray frame) = 0; + + /** + * convenience method to force the "up" axis to stay fixed + * + * works with both programmatic changes and SCENE_INTERACTIVE + * @param useFixed whether to enforce the fixed yaw axis + * @param up the axis to be fixed + */ + CV_WRAP virtual void fixCameraYawAxis(bool useFixed, InputArray up = noArray()) = 0; + + /** + * Sets the current camera pose + * @param rot Rodrigues vector or 3x3 rotation matrix + * @param tvec translation + * @param invert use the inverse of the given pose + */ + CV_WRAP virtual void setCameraPose(InputArray tvec = noArray(), InputArray rot = noArray(), + bool invert = false) = 0; + + /** + * convenience method to orient the camera to a specific entity + * @param target entity name + * @param offset offset from entity centre + */ + CV_WRAP virtual void setCameraLookAt(const String& target, InputArray offset = noArray()) = 0; + + /** + * Retrieves the current camera pose + * @param R 3x3 rotation matrix + * @param tvec translation vector + * @param invert return the inverted pose + */ + CV_WRAP virtual void getCameraPose(OutputArray R = noArray(), OutputArray tvec = noArray(), + bool invert = false) = 0; + + /** + * set intrinsics of the camera + * @param K intrinsic matrix + * @param imsize image size + */ + CV_WRAP virtual void setCameraIntrinsics(InputArray K, const Size& imsize) = 0; +}; + +/** + * Add an additional resource location that is search for meshes, textures and materials + * + * must be called before the first createWindow. If give path does not exist, retries inside + * Ogre Media Directory. + * @param path folder or Zip archive. + */ +CV_EXPORTS_W void addResourceLocation(const String& path); + +/** + * create a new rendering window/ viewport + * @param title window title + * @param size size of the window + * @param flags @see SceneSettings + */ +CV_EXPORTS_W Ptr<WindowScene> createWindow(const String& title, const Size& size, + int flags = SCENE_INTERACTIVE); + +/** + * update all windows + * @return true if this functian can be called again (i.e. continue rendering). false otherwise. + */ +CV_EXPORTS_W bool renderOneFrame(); + +/** + * set the property of a material to the given value + * @param name material name + * @param prop property @ref MaterialProperty + * @param value the value + */ +CV_EXPORTS_W void setMaterialProperty(const String& name, int prop, const Scalar& value); + +/// @overload +CV_EXPORTS_W void setMaterialProperty(const String& name, int prop, const String& value); + +/** + * create a 2D plane, X right, Y down, Z up + * + * creates a material and a texture with the same name + * @param name name of the mesh + * @param size size in world units + * @param image optional texture + */ +CV_EXPORTS_W void createPlaneMesh(const String& name, const Size2f& size, InputArray image = noArray()); + +/** + * creates a point cloud mesh + * + * creates a material with the same name + * @param name name of the mesh + * @param vertices float vector of positions + * @param colors uchar vector of colors + */ +CV_EXPORTS_W void createPointCloudMesh(const String& name, InputArray vertices, InputArray colors = noArray()); + +/** + * creates a grid + * + * creates a material with the same name + * @param name name of the mesh + * @param size extents of the grid + * @param segments number of segments per side + */ +CV_EXPORTS_W void createGridMesh(const String& name, const Size2f& size, const Size& segments = Size(1, 1)); +//! @} +} +} + +#endif diff --git a/modules/ovis/samples/aruco_ar_demo.py b/modules/ovis/samples/aruco_ar_demo.py new file mode 100644 index 000000000..ae3ba764f --- /dev/null +++ b/modules/ovis/samples/aruco_ar_demo.py @@ -0,0 +1,35 @@ +import cv2 +import numpy as np + +# aruco +adict = cv2.aruco.Dictionary_get(cv2.aruco.DICT_4X4_50) +cv2.imshow("marker", cv2.aruco.drawMarker(adict, 0, 400)) + +# random calibration data. your mileage may vary. +imsize = (800, 600) +K = cv2.getDefaultNewCameraMatrix(np.diag([800, 800, 1]), imsize, True) + +# AR scene +cv2.ovis.addResourceLocation("packs/Sinbad.zip") # shipped with Ogre + +win = cv2.ovis.createWindow("arucoAR", imsize, flags=0) +win.createEntity("figure", "Sinbad.mesh", (0, 0, -5), (-1.57, 0, 0)) +win.createLightEntity("sun", (0, 0, -100)) + +# video capture +cap = cv2.VideoCapture(0) +cap.set(cv2.CAP_PROP_FRAME_WIDTH, imsize[0]) +cap.set(cv2.CAP_PROP_FRAME_HEIGHT, imsize[1]) + +while cv2.ovis.renderOneFrame(): + img = cap.read()[1] + win.setBackground(img) + corners, ids = cv2.aruco.detectMarkers(img, adict)[:2] + + cv2.waitKey(1) + + if ids is None: + continue + + rvecs, tvecs = cv2.aruco.estimatePoseSingleMarkers(corners, 5, K, None)[:2] + win.setCameraPose(tvecs[0].ravel(), rvecs[0].ravel(), invert=True) diff --git a/modules/ovis/samples/ovis_demo.py b/modules/ovis/samples/ovis_demo.py new file mode 100644 index 000000000..5dc643e9a --- /dev/null +++ b/modules/ovis/samples/ovis_demo.py @@ -0,0 +1,28 @@ +import cv2 +import numpy as np + +# add some external resources +cv2.ovis.addResourceLocation("packs/Sinbad.zip") + +# camera intrinsics +imsize = (800, 600) +K = np.diag([800, 800, 1]) +K[:2, 2] = (400, 100) # offset pp + +# observer scene +owin = cv2.ovis.createWindow("VR", imsize) +cv2.ovis.createGridMesh("ground", (10, 10), (10, 10)) +owin.createEntity("ground", "ground", rot=(1.57, 0, 0)) +owin.createCameraEntity("cam", K, imsize, 5) +owin.createEntity("figure", "Sinbad.mesh", (0, -5, 0)) # externally defined mesh +owin.createLightEntity("sun", (0, 0, -100)) + +# interaction scene +iwin = cv2.ovis.createWindow("AR", imsize, cv2.ovis.SCENE_SEPERATE | cv2.ovis.SCENE_INTERACTIVE) +iwin.createEntity("figure", "Sinbad.mesh", (0, -5, 0)) +iwin.createLightEntity("sun", (0, 0, -100)) +iwin.setCameraIntrinsics(K, imsize) + +while cv2.ovis.renderOneFrame(): + R, t = iwin.getCameraPose() + owin.setEntityPose("cam", t, R) diff --git a/modules/ovis/src/meshes.cpp b/modules/ovis/src/meshes.cpp new file mode 100644 index 000000000..c8cd481be --- /dev/null +++ b/modules/ovis/src/meshes.cpp @@ -0,0 +1,156 @@ +// This file is part of OpenCV project. +// It is subject to the license terms in the LICENSE file found in the top-level directory +// of this distribution and at http://opencv.org/license.html. + +#include "precomp.hpp" + +namespace cv +{ +namespace ovis +{ +using namespace Ogre; + +void createPlaneMesh(const String& name, const Size2f& size, InputArray image) +{ + CV_Assert(_app); + + // material + MaterialPtr mat = MaterialManager::getSingleton().create(name, RESOURCEGROUP_NAME); + + Pass* rpass = mat->getTechniques()[0]->getPasses()[0]; + rpass->setCullingMode(CULL_NONE); + rpass->setEmissive(ColourValue::White); + + if (!image.empty()) + { + _createTexture(name, image.getMat()); + rpass->createTextureUnitState(name); + } + + // plane + MovablePlane plane(Vector3::UNIT_Z, 0); + MeshPtr mesh = MeshManager::getSingleton().createPlane( + name, RESOURCEGROUP_NAME, plane, size.width, size.height, 1, 1, true, 1, 1, 1, Vector3::UNIT_Y); + mesh->getSubMesh(0)->setMaterialName(name); +} + +void createPointCloudMesh(const String& name, InputArray vertices, InputArray colors) +{ + int color_type = colors.type(); + CV_Assert(_app, vertices.type() == CV_32FC3 && vertices.isContinuous(), + colors.empty() || color_type == CV_8UC3 || color_type == CV_8UC4); + + // material + MaterialPtr mat = MaterialManager::getSingleton().create(name, RESOURCEGROUP_NAME); + Pass* rpass = mat->getTechniques()[0]->getPasses()[0]; + rpass->setEmissive(ColourValue::White); + + // mesh + MeshPtr mesh = MeshManager::getSingleton().createManual(name, RESOURCEGROUP_NAME); + SubMesh* sub = mesh->createSubMesh(); + sub->useSharedVertices = true; + sub->operationType = RenderOperation::OT_POINT_LIST; + sub->setMaterialName(name); + + int n = vertices.rows(); + + mesh->sharedVertexData = new VertexData(); + mesh->sharedVertexData->vertexCount = n; + VertexDeclaration* decl = mesh->sharedVertexData->vertexDeclaration; + + // vertex data + HardwareBufferManager& hbm = HardwareBufferManager::getSingleton(); + + Mat _vertices = vertices.getMat(); + + int source = 0; + HardwareVertexBufferSharedPtr hwbuf; + + decl->addElement(source, 0, VET_FLOAT3, VES_POSITION); + hwbuf = hbm.createVertexBuffer(decl->getVertexSize(source), n, HardwareBuffer::HBU_STATIC_WRITE_ONLY); + hwbuf->writeData(0, hwbuf->getSizeInBytes(), _vertices.ptr(), true); + mesh->sharedVertexData->vertexBufferBinding->setBinding(source, hwbuf); + + // color data + if (!colors.empty()) + { + mat->setLightingEnabled(false); + source += 1; + + Mat col4; + cvtColor(colors, col4, color_type == CV_8UC3 ? COLOR_BGR2RGBA : COLOR_BGRA2RGBA); + + decl->addElement(source, 0, VET_COLOUR, VES_DIFFUSE); + hwbuf = + hbm.createVertexBuffer(decl->getVertexSize(source), n, HardwareBuffer::HBU_STATIC_WRITE_ONLY); + hwbuf->writeData(0, hwbuf->getSizeInBytes(), col4.ptr(), true); + mesh->sharedVertexData->vertexBufferBinding->setBinding(source, hwbuf); + + rpass->setVertexColourTracking(TVC_DIFFUSE); + } + + AxisAlignedBox bounds(AxisAlignedBox::EXTENT_NULL); + for (int i = 0; i < n; i++) + { + Vec3f v = _vertices.at<Vec3f>(i); + bounds.merge(Vector3(v[0], v[1], v[2])); + } + mesh->_setBounds(bounds); +} + +void createGridMesh(const String& name, const Size2f& size, const Size& segments) +{ + CV_Assert(_app, !segments.empty()); + + // material + MaterialPtr mat = MaterialManager::getSingleton().create(name, RESOURCEGROUP_NAME); + Pass* rpass = mat->getTechniques()[0]->getPasses()[0]; + rpass->setEmissive(ColourValue::White); + + // mesh + MeshPtr mesh = MeshManager::getSingleton().createManual(name, RESOURCEGROUP_NAME); + SubMesh* sub = mesh->createSubMesh(); + sub->useSharedVertices = true; + sub->operationType = RenderOperation::OT_LINE_LIST; + sub->setMaterialName(name); + + int n = (segments.width + 1) * 2 + (segments.height + 1) * 2; + + mesh->sharedVertexData = new VertexData(); + mesh->sharedVertexData->vertexCount = n; + VertexDeclaration* decl = mesh->sharedVertexData->vertexDeclaration; + + // vertex data + HardwareBufferManager& hbm = HardwareBufferManager::getSingleton(); + + int source = 0; + HardwareVertexBufferSharedPtr hwbuf; + decl->addElement(source, 0, VET_FLOAT2, VES_POSITION); + hwbuf = hbm.createVertexBuffer(decl->getVertexSize(source), n, HardwareBuffer::HBU_STATIC_WRITE_ONLY); + mesh->sharedVertexData->vertexBufferBinding->setBinding(source, hwbuf); + + Vector2 step = Vector2(size.width, size.height) / Vector2(segments.width, segments.height); + + Vec2f* data = (Vec2f*)hwbuf->lock(HardwareBuffer::HBL_DISCARD); + + for (int i = 0; i < segments.width + 1; i++) + { + data[i * 2] = Vec2f(-size.width / 2 + step.x * i, -size.height / 2); + data[i * 2 + 1] = Vec2f(-size.width / 2 + step.x * i, size.height / 2); + } + + data += (segments.width + 1) * 2; + + for (int i = 0; i < segments.height + 1; i++) + { + data[i * 2] = Vec2f(-size.width / 2, -size.height / 2 + step.y * i); + data[i * 2 + 1] = Vec2f(size.width / 2, -size.height / 2 + step.y * i); + } + + hwbuf->unlock(); + + Vector3 sz(size.width, size.height, 0); + mesh->_setBounds(AxisAlignedBox(-sz/2, sz/2)); +} +} +} diff --git a/modules/ovis/src/ovis.cpp b/modules/ovis/src/ovis.cpp new file mode 100644 index 000000000..bf71993cd --- /dev/null +++ b/modules/ovis/src/ovis.cpp @@ -0,0 +1,580 @@ +// This file is part of OpenCV project. +// It is subject to the license terms in the LICENSE file found in the top-level directory +// of this distribution and at http://opencv.org/license.html. + +#include "precomp.hpp" + +#include <OgreApplicationContext.h> +#include <OgreCameraMan.h> +#include <OgreRectangle2D.h> + +#include <opencv2/calib3d.hpp> + +namespace cv +{ +namespace ovis +{ +using namespace Ogre; + +const char* RESOURCEGROUP_NAME = "OVIS"; +Ptr<Application> _app; + +static const char* RENDERSYSTEM_NAME = "OpenGL 3+ Rendering Subsystem"; +static std::vector<String> _extraResourceLocations; + +// convert from OpenCV to Ogre coordinates: +// rotation by 180° around x axis +static Matrix3 toOGRE = Matrix3(1, 0, 0, 0, -1, 0, 0, 0, -1); +static Vector2 toOGRE_SS = Vector2(1, -1); + +WindowScene::~WindowScene() {} + +void _createTexture(const String& name, Mat image) +{ + TextureManager& texMgr = TextureManager::getSingleton(); + TexturePtr tex = texMgr.getByName(name, RESOURCEGROUP_NAME); + + Image im; + im.loadDynamicImage(image.ptr(), image.cols, image.rows, 1, PF_BYTE_BGR); + + if (tex) + { + // update + PixelBox box = im.getPixelBox(); + tex->getBuffer()->blitFromMemory(box, box); + return; + } + + texMgr.loadImage(name, RESOURCEGROUP_NAME, im); +} + +static void _convertRT(InputArray rot, InputArray tvec, Quaternion& q, Vector3& t, + bool invert = false) +{ + CV_Assert(rot.empty() || rot.rows() == 3 || rot.size() == Size(3, 3), + tvec.empty() || tvec.rows() == 3); + + q = Quaternion::IDENTITY; + t = Vector3::ZERO; + + if (!rot.empty()) + { + Mat _R; + + if (rot.size() == Size(3, 3)) + { + _R = rot.getMat(); + } + else + { + Rodrigues(rot, _R); + } + + Matrix3 R; + _R.copyTo(Mat_<Real>(3, 3, R[0])); + q = Quaternion(toOGRE * R); + + if (invert) + { + q = q.Inverse(); + } + } + + if (!tvec.empty()) + { + tvec.copyTo(Mat_<Real>(3, 1, t.ptr())); + t = toOGRE * t; + + if(invert) + { + t = q * -t; + } + } +} + +static void _setCameraIntrinsics(Camera* cam, InputArray _K, const Size& imsize) +{ + CV_Assert(_K.size() == Size(3, 3)); + + cam->setAspectRatio(float(imsize.width) / imsize.height); + + Matx33f K = _K.getMat(); + + float fovy = atan2(K(1, 2), K(1, 1)) + atan2(imsize.height - K(1, 2), K(1, 1)); + cam->setFOVy(Radian(fovy)); + + Vec2f pp_offset = Vec2f(0.5, 0.5) - Vec2f(K(0, 2) / imsize.width, K(1, 2) / imsize.height); + cam->setFrustumOffset(toOGRE_SS * Vector2(pp_offset.val)); +} + +static SceneNode* _getSceneNode(SceneManager* sceneMgr, const String& name) +{ + MovableObject* mo = NULL; + + try + { + mo = sceneMgr->getCamera(name); + } + catch (ItemIdentityException&) + { + // ignore + } + + try + { + if (!mo) + mo = sceneMgr->getLight(name); + } + catch (ItemIdentityException&) + { + // ignore + } + + if (!mo) + mo = sceneMgr->getEntity(name); + + return mo->getParentSceneNode(); +} + +struct Application : public OgreBites::ApplicationContext +{ + Ogre::SceneManager* sceneMgr; + Ogre::String title; + uint32_t w; + uint32_t h; + + Application(const Ogre::String& _title, const Size& sz) + : OgreBites::ApplicationContext("ovis", false), sceneMgr(NULL), title(_title), w(sz.width), + h(sz.height) + { + } + + void setupInput(bool /*grab*/) + { + // empty impl to show cursor + } + + bool oneTimeConfig() + { + Ogre::RenderSystem* rs = getRoot()->getRenderSystemByName(RENDERSYSTEM_NAME); + CV_Assert(rs); + getRoot()->setRenderSystem(rs); + return true; + } + + OgreBites::NativeWindowPair createWindow(const Ogre::String& name, uint32_t _w, uint32_t _h, + NameValuePairList miscParams = NameValuePairList()) + { + Ogre::String _name = name; + if (!sceneMgr) + { + _w = w; + _h = h; + _name = title; + } + miscParams["FSAA"] = "4"; + miscParams["vsync"] = "true"; + + return OgreBites::ApplicationContext::createWindow(_name, _w, _h, miscParams); + } + + void locateResources() + { + OgreBites::ApplicationContext::locateResources(); + ResourceGroupManager& rgm = ResourceGroupManager::getSingleton(); + rgm.createResourceGroup(RESOURCEGROUP_NAME); + + for (size_t i = 0; i < _extraResourceLocations.size(); i++) + { + String loc = _extraResourceLocations[i]; + String type = StringUtil::endsWith(loc, ".zip") ? "Zip" : "FileSystem"; + + if (!FileSystemLayer::fileExists(loc)) + { + loc = FileSystemLayer::resolveBundlePath(getDefaultMediaDir() + "/" + loc); + } + + rgm.addResourceLocation(loc, type, RESOURCEGROUP_NAME); + } + } + + void setup() + { + OgreBites::ApplicationContext::setup(); + + MaterialManager& matMgr = MaterialManager::getSingleton(); + matMgr.setDefaultTextureFiltering(TFO_ANISOTROPIC); + matMgr.setDefaultAnisotropy(16); + } +}; + +class WindowSceneImpl : public WindowScene, public OgreBites::InputListener +{ + String title; + Root* root; + SceneManager* sceneMgr; + SceneNode* camNode; + RenderWindow* rWin; + Ptr<OgreBites::CameraMan> camman; + Ptr<Rectangle2D> bgplane; + +public: + WindowSceneImpl(Ptr<Application> app, const String& _title, const Size& sz, int flags) + : title(_title), root(app->getRoot()) + { + if (!app->sceneMgr) + { + flags |= SCENE_SEPERATE; + } + + if (flags & SCENE_SEPERATE) + { + sceneMgr = root->createSceneManager("DefaultSceneManager", title); + RTShader::ShaderGenerator& shadergen = RTShader::ShaderGenerator::getSingleton(); + shadergen.addSceneManager(sceneMgr); // must be done before we do anything with the scene + + sceneMgr->setAmbientLight(ColourValue(.1, .1, .1)); + _createBackground(); + } + else + { + sceneMgr = app->sceneMgr; + } + + if(flags & SCENE_SHOW_CS_CROSS) + { + sceneMgr->setDisplaySceneNodes(true); + } + + Camera* cam = sceneMgr->createCamera(title); + cam->setNearClipDistance(0.5); + cam->setAutoAspectRatio(true); + camNode = sceneMgr->getRootSceneNode()->createChildSceneNode(); + camNode->attachObject(cam); + + if (flags & SCENE_INTERACTIVE) + { + camman.reset(new OgreBites::CameraMan(camNode)); + camman->setStyle(OgreBites::CS_ORBIT); + } + + if (!app->sceneMgr) + { + app->sceneMgr = sceneMgr; + rWin = app->getRenderWindow(); + app->addInputListener(this); + + if (camman) + app->addInputListener(camman.get()); + } + else + { + OgreBites::NativeWindowPair nwin = app->createWindow(title, sz.width, sz.height); + rWin = nwin.render; + if (camman) + app->addInputListener(nwin.native, camman.get()); + + app->addInputListener(nwin.native, this); + } + + rWin->addViewport(cam); + } + + void setBackground(InputArray image) + { + CV_Assert(image.type() == CV_8UC3, bgplane); + + String name = sceneMgr->getName() + "_Background"; + + _createTexture(name, image.getMat()); + + // correct for pixel centers + Vector2 pc(0.5 / image.cols(), 0.5 / image.rows()); + bgplane->setUVs(pc, Vector2(pc[0], 1 - pc[1]), Vector2(1 - pc[0], pc[1]), Vector2(1, 1) - pc); + + Pass* rpass = bgplane->getMaterial()->getBestTechnique()->getPasses()[0]; + rpass->getTextureUnitStates()[0]->setTextureName(name); + } + + void createEntity(const String& name, const String& meshname, InputArray tvec, InputArray rot) + { + Entity* ent = sceneMgr->createEntity(name, meshname, RESOURCEGROUP_NAME); + + Quaternion q; + Vector3 t; + _convertRT(rot, tvec, q, t); + SceneNode* node = sceneMgr->getRootSceneNode()->createChildSceneNode(t, q); + node->attachObject(ent); + } + + Rect2d createCameraEntity(const String& name, InputArray K, const Size& imsize, float zFar, + InputArray tvec, InputArray rot) + { + MaterialPtr mat = MaterialManager::getSingleton().create(name, RESOURCEGROUP_NAME); + Pass* rpass = mat->getTechniques()[0]->getPasses()[0]; + rpass->setEmissive(ColourValue::White); + + Camera* cam = sceneMgr->createCamera(name); + cam->setMaterial(mat); + + cam->setVisible(true); + cam->setDebugDisplayEnabled(true); + cam->setNearClipDistance(1e-9); + cam->setFarClipDistance(zFar); + + _setCameraIntrinsics(cam, K, imsize); + + Quaternion q; + Vector3 t; + _convertRT(rot, tvec, q, t); + SceneNode* node = sceneMgr->getRootSceneNode()->createChildSceneNode(t, q); + node->attachObject(cam); + + RealRect ext = cam->getFrustumExtents(); + float scale = zFar / cam->getNearClipDistance(); // convert to ext at zFar + + return Rect2d(toOGRE_SS[0] * (ext.right - ext.width() / 2) * scale, + toOGRE_SS[1] * (ext.bottom - ext.height() / 2) * scale, ext.width() * scale, + ext.height() * scale); + } + + void createLightEntity(const String& name, InputArray tvec, InputArray rot, const Scalar& diffuseColour, + const Scalar& specularColour) + { + Light* light = sceneMgr->createLight(name); + light->setDirection(Vector3::NEGATIVE_UNIT_Z); + // convert to BGR + light->setDiffuseColour(ColourValue(diffuseColour[2], diffuseColour[1], diffuseColour[0])); + light->setSpecularColour(ColourValue(specularColour[2], specularColour[1], specularColour[0])); + + Quaternion q; + Vector3 t; + _convertRT(rot, tvec, q, t); + SceneNode* node = sceneMgr->getRootSceneNode()->createChildSceneNode(t, q); + node->attachObject(light); + } + + void updateEntityPose(const String& name, InputArray tvec, InputArray rot) + { + SceneNode* node = _getSceneNode(sceneMgr, name); + Quaternion q; + Vector3 t; + _convertRT(rot, tvec, q, t); + node->rotate(q, Ogre::Node::TS_LOCAL); + node->translate(t, Ogre::Node::TS_LOCAL); + } + + void setEntityPose(const String& name, InputArray tvec, InputArray rot, bool invert) + { + SceneNode* node = _getSceneNode(sceneMgr, name); + Quaternion q; + Vector3 t; + _convertRT(rot, tvec, q, t, invert); + node->setOrientation(q); + node->setPosition(t); + } + + void _createBackground() + { + String name = "_" + sceneMgr->getName() + "_DefaultBackground"; + + Mat_<Vec3b> img = (Mat_<Vec3b>(2, 1) << Vec3b(2, 1, 1), Vec3b(240, 120, 120)); + _createTexture(name, img); + + MaterialPtr mat = MaterialManager::getSingleton().create(name, RESOURCEGROUP_NAME); + Pass* rpass = mat->getTechniques()[0]->getPasses()[0]; + rpass->setLightingEnabled(false); + rpass->setDepthCheckEnabled(false); + rpass->setDepthWriteEnabled(false); + rpass->createTextureUnitState(name); + + bgplane.reset(new Rectangle2D(true)); + bgplane->setCorners(-1.0, 1.0, 1.0, -1.0); + + // correct for pixel centers + Vector2 pc(0.5 / img.cols, 0.5 / img.rows); + bgplane->setUVs(pc, Vector2(pc[0], 1 - pc[1]), Vector2(1 - pc[0], pc[1]), Vector2(1, 1) - pc); + + bgplane->setMaterial(mat); + bgplane->setRenderQueueGroup(RENDER_QUEUE_BACKGROUND); + bgplane->setBoundingBox(AxisAlignedBox(AxisAlignedBox::BOX_INFINITE)); + + sceneMgr->getRootSceneNode()->createChildSceneNode()->attachObject(bgplane.get()); + } + + void getScreenshot(OutputArray frame) + { + frame.create(rWin->getHeight(), rWin->getWidth(), CV_8UC3); + + Mat out = frame.getMat(); + PixelBox pb(rWin->getWidth(), rWin->getHeight(), 1, PF_BYTE_BGR, out.ptr()); + rWin->copyContentsToMemory(pb, pb); + } + + bool keyPressed(const OgreBites::KeyboardEvent& evt) + { + if (evt.keysym.sym == SDLK_ESCAPE) + root->queueEndRendering(); + + return true; + } + + void fixCameraYawAxis(bool useFixed, InputArray _up) + { + Vector3 up = Vector3::UNIT_Y; + if (!_up.empty()) + { + _up.copyTo(Mat_<Real>(3, 1, up.ptr())); + up = toOGRE * up; + } + + Camera* cam = sceneMgr->getCamera(title); + cam->getParentSceneNode()->setFixedYawAxis(useFixed, up); + } + + void setCameraPose(InputArray tvec, InputArray rot, bool invert) + { + Camera* cam = sceneMgr->getCamera(title); + + SceneNode* node = cam->getParentSceneNode(); + Quaternion q; + Vector3 t; + _convertRT(rot, tvec, q, t, invert); + + if (!rot.empty()) + node->setOrientation(q); + + if (!tvec.empty()) + node->setPosition(t); + } + + void getCameraPose(OutputArray R, OutputArray tvec, bool invert) + { + Camera* cam = sceneMgr->getCamera(title); + SceneNode* node = cam->getParentSceneNode(); + + Matrix3 _R; + node->getOrientation().ToRotationMatrix(_R); + + if (invert) + { + _R = _R.Transpose(); + } + + if (tvec.needed()) + { + Vector3 _tvec = node->getPosition(); + + if (invert) + { + _tvec = _R * -_tvec; + } + + _tvec = toOGRE.Transpose() * _tvec; + Mat_<Real>(3, 1, _tvec.ptr()).copyTo(tvec); + } + + if (R.needed()) + { + _R = toOGRE.Transpose() * _R; + Mat_<Real>(3, 3, _R[0]).copyTo(R); + } + } + + void setCameraIntrinsics(InputArray K, const Size& imsize) + { + Camera* cam = sceneMgr->getCamera(title); + _setCameraIntrinsics(cam, K, imsize); + } + + void setCameraLookAt(const String& target, InputArray offset) + { + SceneNode* cam = sceneMgr->getCamera(title)->getParentSceneNode(); + SceneNode* tgt = sceneMgr->getEntity(target)->getParentSceneNode(); + + Vector3 _offset = Vector3::ZERO; + + if (!offset.empty()) + { + offset.copyTo(Mat_<Real>(3, 1, _offset.ptr())); + _offset = toOGRE * _offset; + } + + cam->lookAt(tgt->_getDerivedPosition() + _offset, Ogre::Node::TS_WORLD); + } +}; + +CV_EXPORTS_W void addResourceLocation(const String& path) { _extraResourceLocations.push_back(path); } + +Ptr<WindowScene> createWindow(const String& title, const Size& size, int flags) +{ + if (!_app) + { + _app = makePtr<Application>(title.c_str(), size); + _app->initApp(); + } + + return makePtr<WindowSceneImpl>(_app, title, size, flags); +} + +CV_EXPORTS_W bool renderOneFrame() +{ + CV_Assert(_app); + + _app->getRoot()->renderOneFrame(); + return not _app->getRoot()->endRenderingQueued(); +} + +void setMaterialProperty(const String& name, int prop, const Scalar& val) +{ + CV_Assert(_app); + + MaterialPtr mat = MaterialManager::getSingleton().getByName(name, RESOURCEGROUP_NAME); + + CV_Assert(mat); + + Pass* rpass = mat->getTechniques()[0]->getPasses()[0]; + ColourValue col; + + switch (prop) + { + case MATERIAL_POINT_SIZE: + rpass->setPointSize(val[0]); + break; + case MATERIAL_OPACITY: + col = rpass->getDiffuse(); + col.a = val[0]; + rpass->setDiffuse(col); + rpass->setSceneBlending(SBT_TRANSPARENT_ALPHA); + rpass->setDepthWriteEnabled(false); + break; + case MATERIAL_EMISSIVE: + col = ColourValue(val[2], val[1], val[0]) / 255; // BGR as uchar + col.saturate(); + rpass->setEmissive(col); + break; + default: + CV_Error(Error::StsBadArg, "invalid or non Scalar property"); + break; + } +} + +void setMaterialProperty(const String& name, int prop, const String& value) +{ + CV_Assert(prop == MATERIAL_TEXTURE, _app); + + MaterialPtr mat = MaterialManager::getSingleton().getByName(name, RESOURCEGROUP_NAME); + CV_Assert(mat); + + Pass* rpass = mat->getTechniques()[0]->getPasses()[0]; + + if (rpass->getTextureUnitStates().empty()) + { + rpass->createTextureUnitState(value); + return; + } + + rpass->getTextureUnitStates()[0]->setTextureName(value); +} +} +} diff --git a/modules/ovis/src/precomp.hpp b/modules/ovis/src/precomp.hpp new file mode 100644 index 000000000..3111ee60d --- /dev/null +++ b/modules/ovis/src/precomp.hpp @@ -0,0 +1,25 @@ +// This file is part of OpenCV project. +// It is subject to the license terms in the LICENSE file found in the top-level directory +// of this distribution and at http://opencv.org/license.html. + +#ifndef __OPENCV_PRECOMP_H__ +#define __OPENCV_PRECOMP_H__ + +#include <opencv2/core.hpp> +#include <opencv2/imgproc.hpp> +#include "opencv2/ovis.hpp" +#include "opencv2/opencv_modules.hpp" + +#include <Ogre.h> + +namespace cv { +namespace ovis { +struct Application; +extern Ptr<Application> _app; + +extern const char* RESOURCEGROUP_NAME; +void _createTexture(const String& name, Mat image); +} +} + +#endif