Commit fdc6494d authored by Robert Menzel's avatar Robert Menzel

added SimpleRiftController

parent c8d60cf8
/**
*
* Includes the Oculus Rift LibOVR but tries to suppress as much compiler warnings
* as possible.
*
*/
#ifdef ACGL_USE_OCULUS_RIFT
/////////////////////////////////////////////////////////////////////////////////////
// ignore compiler warnings from LibOVR:
//
#ifdef _MSC_VER
#pragma warning( push )
#pragma warning ( disable : 4201 )
#pragma warning ( disable : 4100 )
#pragma warning ( disable : 4996 )
#pragma warning ( disable : 4244 )
#endif
#if (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 6)) || (__GNUC__ > 4))
#define COMPILER_IS_GCC_4_6_OR_NEWER
#endif
#ifdef __clang__
// clang/llvm:
# pragma clang diagnostic push
# pragma clang diagnostic ignored "-Wuninitialized"
# pragma clang diagnostic ignored "-Wunused-parameter"
#elif defined __GNUC__
# ifdef COMPILER_IS_GCC_4_6_OR_NEWER
// gcc >= 4.6:
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wtype-limits"
# pragma GCC diagnostic ignored "-Wstrict-aliasing"
# pragma GCC diagnostic ignored "-Wattributes"
# pragma GCC diagnostic ignored "-Wreorder"
# endif
// gcc:
# pragma GCC diagnostic ignored "-Wunused-parameter"
#endif
//
/////////////////////////////////////////////////////////////////////////////////////
#include <OVR.h>
#include <OVRVersion.h>
/////////////////////////////////////////////////////////////////////////////////////
// reactivate compiler warnings:
//
#ifdef __clang__
// clang/llvm:
# pragma clang diagnostic pop
#elif defined COMPILER_IS_GCC_4_6_OR_NEWER
// gcc >= 4.6:
# pragma GCC diagnostic pop
#endif
#ifdef _MSC_VER
#pragma warning( pop )
#endif
//
/////////////////////////////////////////////////////////////////////////////////////
#endif // ACGL_USE_OCULUS_RIFT
#pragma once
/**
* IMPORTANT:
*
* This class needs the LibOVR version 0.2.4 or higher to work.
* Headers of this lib need to be placed in the search path.
*
* In addition ACGL_USE_OCULUS_RIFT has to be defined.
*
*/
#include <ACGL/ACGL.hh>
#ifdef ACGL_USE_OCULUS_RIFT
#include <ACGL/Math/Math.hh>
#include <ACGL/Scene/HMDCamera.hh>
#include <ACGL/OpenGL/Objects/Texture.hh>
#include <ACGL/OpenGL/Managers.hh>
#include <ACGL/HardwareSupport/OVRWrapper.hh>
namespace ACGL{
namespace HardwareSupport{
/**
* This class provides access to the Oculus Rift. It can read out the orientation and control a HMDCamera
* based on this.
* Distorted rendering is provided in two ways:
* * renderDistorted( texture ) if the input is one side-by-side rendered image
* * renderDistorted( texture, texture ) if the input are two seperate textures for the both eyes
*
* Alternatively the application can implement the distortion on its own (e.g. to add other effects in the
* same pass). For this the needed parameters are provided.
*
* Use the camera provided by this class (getCamera) or provide your own (attachCamera).
* This class needs to use a HMDCamera which is derived from GenericCamera!
*/
class SimpleRiftController
{
public:
/**
* _riftnumber: which device to use in case multiple are attached
* _performAutomaticMagneticCalibration: try to calibrate the magetometer to reduce drift
* the user has to look into at least four very different directions
* for this to work.
*/
SimpleRiftController( uint32_t _riftnumber = 0, bool _performAutomaticMagneticCalibration = true );
~SimpleRiftController();
///////////////////////////////////////////////////////////////////////////////////////////////////////////////
//
// Camera and sensor handling:
//
///////////////////////////////////////////////////////////////////////////////////////////////////////////////
//! attach an external camera to manipulate (see updateCamera)
//! per default the SimpleRiftController already has a camera which can be used as well
//! only one camera can be attached at all times
void attachCamera( ACGL::Scene::SharedHMDCamera _camera );
ACGL::Scene::SharedHMDCamera getCamera() { return mCamera; }
//! Query the orientation of the Rift and set it as the cameras orientation.
//! This will do nothing if no Rift is attached (so the camera can get controlled
//! e.g. by a mouse)!
void updateCamera();
//! returns the current orientation as a rotation matrix from the device.
//! this can be used as an alternative to updateCamera if the attached camera should not be used.
glm::mat3 getCurrentRotation();
//! start the automatic calibration process, by default this is done in the constructor
void startMagneticCalibration();
//! delete the calibration data and work uncalibrated. A new calibration can be started if wanted
void deactivateMagneticDriftCorrection();
//! check if the calibration worked
bool magneticCalibrationDone();
//! sets the amound of seconds to predict the headmovements into the future
//! default is 0.03f, should be no more than the rendering latency!
void setPrediction( float _seconds );
///////////////////////////////////////////////////////////////////////////////////////////////////////////////
//
// RAW parameters for distortion rendering:
//
///////////////////////////////////////////////////////////////////////////////////////////////////////////////
//! Default is 1.0, larger values result in higher FoVs and larger areas of the Rift being used.
//! Note that - depending on the lenses used - the user will not be able to see the whole screen.
//! Often 1.75 is enough.
//! Increase the offscreen rendering viewport accordingly to counter the decreased image quality.
void setDistortionScaleFactor( float _f );
float getDistortionScaleFactor() { return mDistortionScaleFactor; }
//! x,y are the values for the left eye. z,w the values for the right eye
glm::vec4 getLensCenter();
//! x,y are the values for the left eye. z,w the values for the right eye
glm::vec4 getScreenCenter();
//! x,y are the values for both eyes, ignore z,w
glm::vec4 getScale();
//! x,y are the values for both eyes, ignore z,w
glm::vec4 getScaleIn();
//! the four distortion parameters are the same for both eyes
glm::vec4 getHmdWarpParam();
//! the four chromatic aberation parameters are the same for both eyes
glm::vec4 getChromAbParam();
//! the full physical screen resolution, offscreen rendering should get performed at a higher resolution!
//! 'full' means it's the size used fpr both eyes!
glm::uvec2 getPhysicalScreenResolution();
//! returns the stereo projection from the stored camera adjusted for the rift
//! returns nonsens in case no camera was set
glm::mat4 getProjectionMatrixFromCamera();
///////////////////////////////////////////////////////////////////////////////////////////////////////////////
//
// Optional integrated distortion rendering:
//
// If it's activated and used, make sure the RiftDistort* shader files are located where the
// ShaderProgramFileManager can find them.
// They may set texture units 0..3 and render to Framebuffer 0 (which will get bound) using the viewport of the
// physical dimensions of the rift!
//
///////////////////////////////////////////////////////////////////////////////////////////////////////////////
void renderDistorted( ACGL::OpenGL::ConstSharedTexture2D _sideBySideTexture );
void renderDistorted( ACGL::OpenGL::ConstSharedTexture2D _leftTexture, ACGL::OpenGL::ConstSharedTexture2D _rightTexture );
void renderDistorted( ACGL::OpenGL::ConstSharedShaderProgram _program );
bool getSuccessfulConnected() { return mSuccessfulConnected; }
//! activate and deactivate the distortion, only works if the renderers above are used, does not change the raw
//! distortion parameters!
void setDistortion( bool _value ) { mUseDistortion = _value; }
bool getDistortion() { return mUseDistortion; }
//! activate and deactivate the chromatic aberation correction (true to correct the aberation), only works if the renderers
//! above are used, does not change the raw distortion parameters!
void setChromaticAberation( bool _value ) { mUseChromaticAberation = _value; }
bool getChromaticAberation() { return mUseChromaticAberation; }
//! Sets the size of the final rendering. This should be the size of the window to render into.
void setOutputViewportSize( glm::uvec2 _size ) { mOutputViewport = _size; }
//! Defines that the current HMD orientations should be defined as "no rotation"
//! Can be used to "reset" the orientation.
//! Note: if the user is just looking in the wrong direction, use setNeutralYaw(), if e.g. looking up should be
//! neutral (laying on the ground), this is the way to go.
void setNeutralPosition();
//! Will define the current view direction as the neutral direction but only takes yaw into account.
//! Will also reset the neutral position.
//! Basically works as if the Rift was started in the current orientation
void setNeutralYaw();
private:
ACGL::OpenGL::ConstSharedShaderProgram mDistortShaderSideBySide;
ACGL::OpenGL::ConstSharedShaderProgram mDistortShaderTwoTextures;
bool mUseDistortion;
bool mUseChromaticAberation;
glm::uvec2 mOutputViewport; // if it's 0,0 -> use the Rifts screen dimensions!
private:
glm::vec4 getShaderValue( int v );
void updateCameraFoV();
bool mSuccessfulConnected;
ACGL::Scene::SharedHMDCamera mCamera;
float mDistortionScaleFactor;
// handles to the rift:
OVR::Ptr<OVR::DeviceManager> mORManager;
OVR::Ptr<OVR::HMDDevice> mORDevice;
OVR::Ptr<OVR::SensorDevice> mORSensor;
OVR::SensorFusion mORSensorFusion;
OVR::HMDInfo mORHMDInfo;
OVR::Util::MagCalibration mMagneticCalibration;
// all rotations are relative to the one the Rift started with:
OVR::Quatf mInverseNeutralRotation; // as quaternion
float mPredictionTime;
};
}
}
#endif // ACGL_USE_OCULUS_RIFT
#include <ACGL/HardwareSupport/SimpleRiftController.hh>
#include <ACGL/OpenGL/Creator/ShaderProgramCreator.hh>
#ifdef ACGL_USE_OCULUS_RIFT
using namespace OVR;
using namespace ACGL;
using namespace ACGL::Utils;
using namespace ACGL::Scene;
using namespace ACGL::HardwareSupport;
using namespace std;
SimpleRiftController::SimpleRiftController( uint32_t _riftnumber, bool _performAutomaticMagneticCalibration )
{
mSuccessfulConnected = false;
mDistortionScaleFactor = 1.0f;
// default values from the first devkit:
mORHMDInfo.HResolution = 1280;
mORHMDInfo.VResolution = 800;
mORHMDInfo.HScreenSize = 0.14976f;
mORHMDInfo.VScreenSize = 0.0935f;
mORHMDInfo.VScreenCenter = mORHMDInfo.VScreenSize*0.5f;
mORHMDInfo.DistortionK[0] = 1.0f;
mORHMDInfo.DistortionK[1] = 0.22f;
mORHMDInfo.DistortionK[2] = 0.24f;
mORHMDInfo.EyeToScreenDistance = 0.041f;
mORHMDInfo.ChromaAbCorrection[0] = 0.996f;
mORHMDInfo.ChromaAbCorrection[1] = -0.004f;
mORHMDInfo.ChromaAbCorrection[2] = 1.014f;
mORHMDInfo.ChromaAbCorrection[3] = 0.0f;
mORHMDInfo.LensSeparationDistance = 0.0635f;
mORHMDInfo.InterpupillaryDistance = 0.064f;
mORHMDInfo.DisplayDeviceName[0] = 0;
mUseDistortion = true;
mUseChromaticAberation = true;
mPredictionTime = -1.0f;
mCamera = SharedHMDCamera(); // set to NULL
attachCamera( SharedHMDCamera( new HMDCamera() ) ); // attach a blank camera
// set a good default viewport:
glm::uvec2 viewport = getPhysicalScreenResolution();
mCamera->resize( viewport.x/2, viewport.y );
updateCameraFoV(); // indirectly based on the viewport
debug() << "try to connect to Oculus Rift via SDK " << OVR_VERSION_STRING << endl;
if (_riftnumber != 0) {
error() << "opening any other Rift than ID 0 is not supported yet! Trying to open Rift nr 0" << endl;
}
System::Init( Log::ConfigureDefaultLog( LogMask_All) );
mORManager = *DeviceManager::Create();
if (!mORManager) {
error() << "could not create a Rift Device Manager" << endl;
return;
}
mORDevice = *mORManager->EnumerateDevices<HMDDevice>().CreateDevice();
if (!mORDevice) {
error() << "could not create a Rift device" << endl;
return;
}
debug() << "found HMD" << endl;
if (!mORDevice->GetDeviceInfo( &mORHMDInfo )) {
error() << "could not get HMD device info" << endl;
return;
}
mORSensor = *mORDevice->GetSensor();
if (!mORSensor) {
error() << "could not get sensor of HMD" << endl;
#ifdef __linux
error() << "do you have read/write permissions of /dev/hidraw* ?" << endl;
#endif
return;
}
mORSensorFusion.AttachToSensor( mORSensor );
mSuccessfulConnected = true;
if (_performAutomaticMagneticCalibration) {
startMagneticCalibration();
}
}
SimpleRiftController::~SimpleRiftController()
{
// setting the reference counted pointers to NULL will call the object destructors:
debug() << "disconnecting from Oculus Rift..." << endl;
mORSensor = NULL;
mORDevice = NULL;
mORManager = NULL;
mORSensorFusion.AttachToSensor( NULL );
System::Destroy(); // Oculus Rift
}
void SimpleRiftController::startMagneticCalibration()
{
deactivateMagneticDriftCorrection(); // in case there was an old calibration already
mMagneticCalibration.BeginAutoCalibration( mORSensorFusion );
}
bool SimpleRiftController::magneticCalibrationDone()
{
return mMagneticCalibration.IsCalibrated();
}
void SimpleRiftController::deactivateMagneticDriftCorrection()
{
mMagneticCalibration.ClearCalibration( mORSensorFusion );
}
void SimpleRiftController::setPrediction( float _seconds )
{
if ( _seconds >= 0.0f ) {
mORSensorFusion.SetPrediction( _seconds );
} else {
mORSensorFusion.SetPrediction( 0.0f, false );
}
mPredictionTime = _seconds;
}
glm::mat4 SimpleRiftController::getProjectionMatrixFromCamera()
{
if (!mCamera) return glm::mat4();
return mCamera->getProjectionMatrix();
}
glm::mat3 riftMatrixToGLM( const Matrix4f &_mat4 )
{
glm::mat3 glmMat3;
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 3; j++) {
glmMat3[i][j] = _mat4.M[i][j];
}
}
return glmMat3;
}
void SimpleRiftController::attachCamera( ACGL::Scene::SharedHMDCamera _camera )
{
mCamera = _camera;
mCamera->setInterpupillaryDistance( mORHMDInfo.InterpupillaryDistance );
updateCameraFoV();
float viewCenterH = mORHMDInfo.HScreenSize * 0.25f;
float eyeProjectionShift = viewCenterH - mORHMDInfo.LensSeparationDistance * 0.5f;
float offsetX = 4.0f * eyeProjectionShift / mORHMDInfo.HScreenSize;
mCamera->setProjectionCenterOffset( glm::vec2(offsetX, 0.0f));
mCamera->setNeckToEyeVerticalDistance( 0.12f );
mCamera->setNeckToEyeHorizontalDistance( 0.08f );
}
glm::mat3 SimpleRiftController::getCurrentRotation()
{
// magnetic calibration:
if (mMagneticCalibration.IsAutoCalibrating()) {
mMagneticCalibration.UpdateAutoCalibration( mORSensorFusion );
if (mMagneticCalibration.IsCalibrated()) {
Vector3f mc = mMagneticCalibration.GetMagCenter();
debug() << "mag center " << mc.x << " " << mc.y << " " << mc.z << " found, enabling yaw correction" << endl;
mORSensorFusion.SetYawCorrectionEnabled( true );
}
}
// update orientation:
Quatf q;
if (mPredictionTime > 0.0f) {
q = mORSensorFusion.GetPredictedOrientation();
} else {
q = mORSensorFusion.GetOrientation();
}
q.Normalize();
//debug() << "Rift orientation: " << q.x << " " << q.y << " " << q.z << " " << q.w << endl;
q = mInverseNeutralRotation * q;
if (mORSensorFusion.IsYawCorrectionEnabled()) {
//debug() << "yaw corrected" << endl;
}
if (mMagneticCalibration.IsCalibrated()) {
//debug() << "calibrated" << endl;
}
Matrix4f orientation( q );
return riftMatrixToGLM( orientation );
}
void SimpleRiftController::setNeutralPosition()
{
mInverseNeutralRotation = mORSensorFusion.GetOrientation();
mInverseNeutralRotation.Normalize();
mInverseNeutralRotation.x *= -1.0;
mInverseNeutralRotation.y *= -1.0;
mInverseNeutralRotation.z *= -1.0;
}
void SimpleRiftController::setNeutralYaw()
{
mInverseNeutralRotation = OVR::Quatf();
mORSensorFusion.Reset();
}
void SimpleRiftController::updateCamera()
{
if (!mSuccessfulConnected) return;
mCamera->setHMDRotation( getCurrentRotation() );
}
void SimpleRiftController::updateCameraFoV()
{
float percievedHalfRTDist = (mORHMDInfo.VScreenSize / 2) * mDistortionScaleFactor;
float vfov = 2.0f * atan( percievedHalfRTDist/mORHMDInfo.EyeToScreenDistance );
vfov = ACGL::Math::Functions::calcRadToDeg( vfov );
mCamera->setVerticalFieldOfView( vfov );
debug() << "update VFoV: " << vfov << endl;
}
glm::uvec2 SimpleRiftController::getPhysicalScreenResolution()
{
return glm::uvec2( mORHMDInfo.HResolution, mORHMDInfo.VResolution );
}
void SimpleRiftController::setDistortionScaleFactor( float _f ) {
if (mDistortionScaleFactor == _f) return;
ACGL::Utils::debug() << "set distortion scale " << _f << std::endl;
mDistortionScaleFactor = _f;
updateCameraFoV();
}
glm::vec4 SimpleRiftController::getShaderValue( int v )
{
//
// note that this isn't the virtul camera viewport (the size the offscreen rendering is performed in)
// but the physical size of the Rifts screen:
glm::vec2 windowSize = glm::vec2( getPhysicalScreenResolution() );
glm::vec2 viewport = windowSize;
viewport.x /= 2;
glm::vec2 viewportPosL = glm::vec2( 0, 0 );
glm::vec2 viewportPosR = glm::vec2( viewport.x, 0 );
glm::vec2 viewportSize = glm::vec2( viewport.x, viewport.y ); // viewport of one eye
float w = float(viewportSize.x) / float(windowSize.x);
float h = float(viewportSize.y) / float(windowSize.y);
float xl = float(viewportPosL.x) / float(windowSize.x);
float yl = float(viewportPosL.y) / float(windowSize.y);
float xr = float(viewportPosR.x) / float(windowSize.x);
float yr = float(viewportPosR.y) / float(windowSize.y);
// both eyes have the same aspect ratio: hals the windowsize as the image was rendered side by side
float aspectRatio = (0.5 * windowSize.x) / windowSize.y;
float lensOffset = mORHMDInfo.LensSeparationDistance * 0.5f;
float lensShift = mORHMDInfo.HScreenSize * 0.25f - lensOffset;
float lensViewportShift = 4.0f * lensShift / mORHMDInfo.HScreenSize;
float lensViewportShiftL = lensViewportShift;
float lensViewportShiftR = -lensViewportShift;
glm::vec4 lensCenter;
lensCenter.x = xl + (w + lensViewportShiftL * 0.5f)*0.5f;
lensCenter.y = yl + h*0.5f;
lensCenter.z = xr + (w + lensViewportShiftR * 0.5f)*0.5f;
lensCenter.w = yr + h*0.5f;
glm::vec4 screenCenter;
screenCenter.x = xl + w*0.5f;
screenCenter.y = yl + h*0.5f;
screenCenter.z = xr + w*0.5f;
screenCenter.w = yr + h*0.5f;
glm::vec4 scale;
scale.x = (w/2);
scale.y = (h/2) * aspectRatio;
scale /= mDistortionScaleFactor;
glm::vec4 scaleIn;
scaleIn.x = (2/w);
scaleIn.y = (2/h) / aspectRatio;
if (v == 0) return lensCenter;
if (v == 1) return screenCenter;
if (v == 2) return scale;
return scaleIn;
}
glm::vec4 SimpleRiftController::getLensCenter()
{
return getShaderValue(0);
}
glm::vec4 SimpleRiftController::getScreenCenter()
{
return getShaderValue(1);
}
glm::vec4 SimpleRiftController::getScale()
{
return getShaderValue(2);
}
glm::vec4 SimpleRiftController::getScaleIn()
{
return getShaderValue(3);
}
glm::vec4 SimpleRiftController::getHmdWarpParam()
{
glm::vec4 distortionK;
distortionK.x = mORHMDInfo.DistortionK[0];
distortionK.y = mORHMDInfo.DistortionK[1];
distortionK.z = mORHMDInfo.DistortionK[2];
distortionK.w = mORHMDInfo.DistortionK[3];
return distortionK;
}
glm::vec4 SimpleRiftController::getChromAbParam()
{
glm::vec4 chromaK;
chromaK.x = mORHMDInfo.ChromaAbCorrection[0];
chromaK.y = mORHMDInfo.ChromaAbCorrection[1];
chromaK.z = mORHMDInfo.ChromaAbCorrection[2];
chromaK.w = mORHMDInfo.ChromaAbCorrection[3];
return chromaK;
}
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
//
// optional rendering:
//
///////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
void SimpleRiftController::renderDistorted( OpenGL::ConstSharedTexture2D _sideBySideTexture )
{
if (!mDistortShaderSideBySide) {
// initialize shaders:
mDistortShaderSideBySide = OpenGL::ShaderProgramFileManager::the()->get( OpenGL::ShaderProgramCreator("RiftDistortSideBySide.fsh").andFile("RiftDistort.vsh") );
if (!mDistortShaderSideBySide) {
ACGL::Utils::error() << "could not load distortion shader!" << std::endl;
return;
}
}
mDistortShaderSideBySide->use();
mDistortShaderSideBySide->setTexture("uSamplerColor", _sideBySideTexture, 0 );
renderDistorted( mDistortShaderSideBySide );
}
void SimpleRiftController::renderDistorted( OpenGL::ConstSharedTexture2D _leftTexture, OpenGL::ConstSharedTexture2D _rightTexture )
{
if (!mDistortShaderTwoTextures) {
// initialize shaders:
mDistortShaderTwoTextures = OpenGL::ShaderProgramFileManager::the()->get( OpenGL::ShaderProgramCreator("RiftDistortTwoTexture.fsh").andFile("RiftDistort.vsh") );
if (!mDistortShaderTwoTextures) {
ACGL::Utils::error() << "could not load distortion shader!" << std::endl;
return;
}
}
mDistortShaderTwoTextures->use();
mDistortShaderTwoTextures->setTexture("uSamplerColorLeft", _leftTexture, 0 );
mDistortShaderTwoTextures->setTexture("uSamplerColorRight", _rightTexture, 1 );
renderDistorted( mDistortShaderTwoTextures );
}
void SimpleRiftController::renderDistorted( ACGL::OpenGL::ConstSharedShaderProgram _program )
{
// if the user defined an output size, use that, otherwise default to the Rifts size:
glm::uvec2 windowSize = mOutputViewport;
if (windowSize.x == 0) {
windowSize = getPhysicalScreenResolution();
}
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glViewport( 0, 0, windowSize.x, windowSize.y );
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// we assume that _program is in use as this should only get called from our own functions
_program->setUniform("uLensCenter", getLensCenter() );
_program->setUniform("uScreenCenter", getScreenCenter() );
_program->setUniform("uScale", getScale() );
_program->setUniform("uScaleIn", getScaleIn() );
_program->setUniform("uHmdWarpParam", getHmdWarpParam() );
_program->setUniform("uChromAbParam", getChromAbParam() );
_program->setUniform("uDistort", (int)mUseDistortion );
_program->setUniform("uCorrectChromaticAberation", (int)mUseChromaticAberation );
// attribute-less rendering:
// just rendering a fullscreen quad
OpenGL::VertexArrayObject vao;
vao.bind(); // 'empty' VAO -> no attributes are defined
glDrawArrays( GL_TRIANGLE_STRIP, 0, 4 ); // create 2 triangles with no attributes
}
#endif
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment