Visualization Library 2.0.0

A lightweight C++ OpenGL middleware for 2D/3D graphics

VL     Star     Watch     Fork     Issue

[Download] [Tutorials] [All Classes] [Grouped Classes]
Stereo Rendering With Anaglyphs

This tutorial demonstrates how simple it is to setup a stereo rendering using red/cyan anaglyphs with Visualization Library.

[From App_Stereo.cpp]

#include "BaseDemo.hpp"
using namespace vl;
class App_Stereo: public BaseDemo
{
public:
virtual String appletInfo()
{
return BaseDemo::appletInfo();
}
void updateScene()
{
/* update the left and right cameras to reflect the movement of the mono camera */
mStereoCamera->updateLeftRightCameras();
/* animate the rotating spheres */
mRootTransform->setLocalMatrix( mat4::getRotation( Time::currentTime() * 45, 0,1,0 ) );
mRootTransform->computeWorldMatrixRecursive();
}
void initEvent()
{
Log::notify(appletInfo());
/* save for later */
OpenGLContext* gl_context = rendering()->as<Rendering>()->renderer()->framebuffer()->openglContext();
/* let the two left and right cameras follow the mono camera */
mMonoCamera = rendering()->as<Rendering>()->camera();
mStereoCamera = new StereoCamera;
mStereoCamera->setMonoCamera(mMonoCamera.get());
/* install two renderings, one for the left eye and one for the right */
mLeftRendering = new Rendering;
mRightRendering = new Rendering;
mMainRendering = new RenderingTree;
mMainRendering->subRenderings()->push_back(mLeftRendering.get());
mMainRendering->subRenderings()->push_back(mRightRendering.get());
setRendering(mMainRendering.get());
/* let the left and right scene managers share the same scene */
mLeftRendering->sceneManagers()->push_back(sceneManager());
mRightRendering->sceneManagers()->push_back(sceneManager());
/* let the left and right rendering write on the same framebuffer */
mLeftRendering->renderer()->setFramebuffer(gl_context->framebuffer());
mRightRendering->renderer()->setFramebuffer(gl_context->framebuffer());
/* set left/right cameras to the cameras of the left and right rendering,
the viewport will be automatically taken from the mono camera. */
mStereoCamera->setLeftCamera(mLeftRendering->camera());
mStereoCamera->setRightCamera(mRightRendering->camera());
/* set adequate eye separation and convergence */
mStereoCamera->setConvergence(20);
mStereoCamera->setEyeSeparation(1);
/* setup color masks for red (left) / cyan (right) glasses */
mLeftRendering->renderer()->overriddenDefaultRenderStates().push_back(RenderStateSlot(new ColorMask(false, true, true),-1));
/* for the right we set the clear flags to clear only the depth buffer, not the color buffer */
mRightRendering->renderer()->overriddenDefaultRenderStates().push_back(RenderStateSlot(new ColorMask(true, false, false),-1));
mRightRendering->renderer()->setClearFlags(CF_CLEAR_DEPTH);
/* let the trackball rotate the mono camera */
trackball()->setCamera(mMonoCamera.get());
trackball()->setTransform(NULL);
/* populate the scene */
setupScene();
}
// populates the scene
void setupScene()
{
ref<Light> camera_light = new Light;
ref<EnableSet> enables = new EnableSet;
enables->enable(EN_DEPTH_TEST);
enables->enable(EN_LIGHTING);
ref<Effect> sphere_fx = new Effect;
sphere_fx->shader()->setEnableSet(enables.get());
sphere_fx->shader()->gocMaterial()->setDiffuse(gray);
sphere_fx->shader()->setRenderState(camera_light.get(), 0);
ref<Effect> fx = new Effect;
fx->shader()->setEnableSet(enables.get());
fx->shader()->gocMaterial()->setDiffuse(gray);
fx->shader()->setRenderState(camera_light.get(), 0);
mRootTransform = new Transform;
// central sphere
ref<Geometry> sphere = makeUVSphere(vec3(0,0,0), 4);
sphere->computeNormals();
sceneManager()->tree()->addActor( sphere.get(), sphere_fx.get(), mRootTransform.get());
// rotating spheres
float count = 10;
for(size_t i=0; i<count; ++i)
{
ref<Geometry> satellite = makeUVSphere(vec3(7,0,0), 2.5);
satellite->computeNormals();
ref<Transform> child_transform = new Transform;
mRootTransform->addChild(child_transform.get());
child_transform->setLocalMatrix( mat4::getRotation(360.0f * (i/count), 0,1,0) );
sceneManager()->tree()->addActor( satellite.get(), fx.get(), child_transform.get());
}
}
void resizeEvent(int w, int h)
{
/* update the viewport of the main camera */
mMonoCamera->viewport()->setWidth ( w );
mMonoCamera->viewport()->setHeight( h );
/* update the left and right cameras since the viewport has changed */
mStereoCamera->updateLeftRightCameras();
}
void loadModel(const std::vector<String>& files)
{
// resets the scene
sceneManager()->tree()->actors()->clear();
for(unsigned int ifile=0; ifile<files.size(); ++ifile)
{
ref<ResourceDatabase> resource_db = loadResource(files[ifile],true);
if (!resource_db || resource_db->count<Actor>() == 0)
{
Log::error("No data found.\n");
continue;
}
std::vector< ref<Actor> > actors;
resource_db->get<Actor>(actors);
for(unsigned iact=0; iact<actors.size(); ++iact)
{
ref<Actor> actor = actors[iact].get();
// define a reasonable Shader
actor->effect()->shader()->setRenderState( new Light, 0 );
actor->effect()->shader()->gocLightModel()->setTwoSide(true);
// add the actor to the scene
sceneManager()->tree()->addActor( actor.get() );
}
}
// position the camera to nicely see the objects in the scene
trackball()->adjustView( sceneManager(), vec3(0,0,1)/*direction*/, vec3(0,1,0)/*up*/, 1.0f/*bias*/ );
/* try to adjust the convergence and eye separation to reasonable values */
sceneManager()->computeBounds();
real convergence = sceneManager()->boundingSphere().radius() / 2;
real eye_separation = convergence/20;
mStereoCamera->setConvergence(convergence);
mStereoCamera->setEyeSeparation(eye_separation);
Log::notify(Say("Convergence = %n\n") << convergence);
Log::notify(Say("Eye separation = %n\n") << eye_separation);
}
// laod the files dropped in the window
void fileDroppedEvent(const std::vector<String>& files) { loadModel(files); }
protected:
ref<RenderingTree> mMainRendering;
ref<Rendering> mLeftRendering;
ref<Rendering> mRightRendering;
ref<Rendering> mCompositingRendering;
ref<Camera> mMonoCamera;
ref<StereoCamera> mStereoCamera;
ref<Transform> mRootTransform;
};
// Have fun!