Commit 211206bc authored by Philip Trettner's avatar Philip Trettner

Added new Rift SDK support (ACGL_RIFT_SDK_VERSION must be at least 32)

parent 585949e0
#pragma once
/**
* IMPORTANT: DON'T EXPECT THIS CLASS TO HAVE A FINAL AND STABLE API!
*
* This class needs the LibOVR version 0.3.2 or higher to work.
* Headers of this lib need to be placed in the search path.
*
* In addition ACGL_USE_OCULUS_RIFT has to be defined.
*
*
*/
#include <OVR_CAPI.h>
#include <ACGL/ACGL.hh>
#include <glm/glm.hpp>
#ifdef ACGL_USE_OCULUS_RIFT
#if ACGL_RIFT_SDK_VERSION >= 32
namespace ACGL{
namespace HardwareSupport{
bool initRiftSDK();
void shutdownRiftSDK();
// create a standard Rift, can be replaced with own, more specialized code:
ovrHmd createRift(bool _headTrackingIsRequired = false, bool _headTranslationTrackingIsAllowed = true);
void destroyRift(ovrHmd _hmd);
// SDK gives different sizes per eye, return the max to make things easier:
glm::uvec2 getOptimalRenderSizePerEye(ovrHmd _hmd);
}
}
#endif // RIFT_VERSION
#endif // ACGL_USE_OCULUS_RIFT
\ No newline at end of file
/***********************************************************************
* Copyright 2011-2013 Computer Graphics Group RWTH Aachen University. *
* All rights reserved. *
* Distributed under the terms of the MIT License (see LICENSE.TXT). *
**********************************************************************/
#pragma once
/**
* The RiftCamera is controlled completely by the Oculus Rift. Some members are
* compatible with a GenericCamera but only in the getters as all internal state is
* defined by a connected Rift.
*
* The center of the coordinate system is roughly in the users head.
*
* This cameras View-Matrix gives the translation/rotation from the center inside the
* users body to the requested eye.
*
*/
#include <ACGL/ACGL.hh>
#include <ACGL/Scene/GenericCamera.hh>
#include <OVR_CAPI.h>
namespace ACGL{
namespace Scene {
class OculusRiftCamera : public GenericCamera {
public:
struct OvrEye
{
ovrEyeType Eye;
ovrFovPort Fov;
ovrSizei TextureSize;
ovrRecti RenderViewport;
};
void connectWithRift(ovrHmd _hmd);
virtual glm::vec3 getPosition() const override;
virtual glm::mat4 getViewMatrix() const override;
virtual glm::mat4 getProjectionMatrix() const override;
virtual glm::uvec2 getViewportSize() const override;
// Will update the pose based on the Rift tracking
// and define the correct eye
// _eyeNumber can be 0 or 1, it is undefined which is the
// left eye and which is the right eye. The eye set will
// be returned. Call stopRenderingEye() afterwards!
// does also call ovrHmd_BeginEyeRender internally!
GenericCamera::Eye startRenderingEye(int _eyeNumber);
void stopRenderingEye(int _eyeNumber, ovrTexture* eyeTexture);
void updateFromRift();
const OvrEye *getOVREyeDescription() const { return mEyeDescription; }
private:
ovrHmd mHmd;
OvrEye mEyeDescription[2]; // left, right
GenericCamera::Eye mEyeOrdering[2];
ovrPosef mPoseUsedForRendering[2];
int mActiveEye; // to index the two-element arrays
};
ACGL_SMARTPOINTER_TYPEDEFS(OculusRiftCamera)
}
}
#include <ACGL/HardwareSupport/RiftSdk.hh>
#include <ACGL/OpenGL/Creator/ShaderProgramCreator.hh>
#include <iostream>
#include <ACGL/Utils/Log.hh>
#ifdef ACGL_USE_OCULUS_RIFT
#if ACGL_RIFT_SDK_VERSION >= 32
namespace ACGL{
namespace HardwareSupport{
using namespace std;
using namespace ACGL::Utils;
// C API helpers:
static bool ACGL_RiftSDKInitialized = false;
bool initRiftSDK()
{
if (ACGL_RiftSDKInitialized) return true; // don't init twice
ovrBool ok = ovr_Initialize();
if (!ok) {
error() << "could not initialize Oculus Rift library" << endl;
}
else {
ACGL_RiftSDKInitialized = true;
}
return ACGL_RiftSDKInitialized;
}
void shutdownRiftSDK()
{
ovr_Shutdown();
}
// For more sophisticated use cases build your own Rift for your needs based on the Rift SDK instead of using this default Rift.
//
// _headTrackingIsRequired = if false, the call will create a dummy device that won't generate any data in case no real Rift is connected
// (for developing without an actual device).
// _headTranslationTrackingIsAllowed = if true the Tracking of DK2 will get supported, if false even a DK2 will behave like a DK1
ovrHmd createRift(bool _headTrackingIsRequired, bool _headTranslationTrackingIsAllowed)
{
if (!ACGL_RiftSDKInitialized) {
error() << "Rift SDK not initialized correctly - did you call/check initRiftSDK()?" << endl;
}
ovrHmd mHmd = ovrHmd_Create(0);
if (!mHmd && _headTrackingIsRequired) {
error() << "could not connect to an Oculus Rift HMD" << endl;
return NULL;
}
else if (!mHmd && !_headTrackingIsRequired) {
//warning() << "could not connect to a real Oculus Rift HMD - generating sensorless dummy" << endl;
//mHmd = ovrHmd_CreateDebug(ovrHmd_DK1);
debug() << "could not connect to a real Oculus Rift HMD" << endl;
return NULL;
}
ovrHmdDesc mHmdDesc;
ovrHmd_GetDesc(mHmd, &mHmdDesc);
// debug output:
debug() << "Connected to: " << mHmdDesc.ProductName << endl;
// start the tracking:
// what the application supports:
unsigned int supportedCaps = ovrSensorCap_Orientation | ovrSensorCap_YawCorrection | ovrHmdCap_LowPersistence | ovrHmdCap_LatencyTest | ovrHmdCap_DynamicPrediction;
if (_headTranslationTrackingIsAllowed) supportedCaps |= ovrSensorCap_Position;
// what the device must deliver as a bare minimum:
unsigned int requiredCaps = 0;
if (_headTrackingIsRequired) requiredCaps |= ovrSensorCap_Orientation;
ovrBool ok = ovrHmd_StartSensor(mHmd, supportedCaps, requiredCaps);
if (!ok) {
error() << "could not get connected to a Rift tracker - only rendering is supported" << endl;
}
return mHmd;
}
void destroyRift(ovrHmd _hmd)
{
ovrHmd_Destroy(_hmd);
}
glm::uvec2 getOptimalRenderSizePerEye(ovrHmd _hmd)
{
if (_hmd == NULL) return glm::uvec2(640, 800);
ovrHmdDesc hmdDesc;
ovrHmd_GetDesc(_hmd, &hmdDesc);
ovrSizei optimalLeft = ovrHmd_GetFovTextureSize(_hmd, ovrEye_Left, hmdDesc.DefaultEyeFov[0], 1.0f);
ovrSizei optimalRight = ovrHmd_GetFovTextureSize(_hmd, ovrEye_Right, hmdDesc.DefaultEyeFov[1], 1.0f);
debug() << "optimalLeft " << optimalLeft.w << " " << optimalLeft.h << endl;
debug() << "optimalRight " << optimalRight.w << " " << optimalRight.h << endl;
debug() << "hmd: " << hmdDesc.ProductName << endl;
debug() << "hmd WindowsPos: " << hmdDesc.WindowsPos.x << " " << hmdDesc.WindowsPos.y << endl;
return glm::uvec2(glm::max(optimalLeft.w, optimalRight.w), glm::max(optimalLeft.h, optimalRight.h));
}
}
}
#endif
#endif
\ No newline at end of file
/***********************************************************************
* Copyright 2011-2013 Computer Graphics Group RWTH Aachen University. *
* All rights reserved. *
* Distributed under the terms of the MIT License (see LICENSE.TXT). *
**********************************************************************/
#include <ACGL/Scene/OculusRiftCamera.hh>
#include <ACGL/Utils/Log.hh>
#include <ACGL/HardwareSupport/RiftSdk.hh>
#include <glm/ext.hpp>
#include <glm/gtc/matrix_transform.hpp>
namespace ACGL{
namespace Scene{
using namespace std;
using namespace ACGL::Utils;
using namespace ACGL::HardwareSupport;
static glm::quat ovr2glm(const ovrQuatf& _quat)
{
glm::quat q;
q.x = _quat.x;
q.y = _quat.y;
q.z = _quat.z;
q.w = _quat.w;
return q;
}
static glm::vec3 ovr2glm(const ovrVector3f& _vec)
{
// This cast is ok as ovrVector3f has the same internal structure
return *(glm::vec3*)&_vec;
}
static glm::mat4 ovr2glm(const ovrMatrix4f& _matrix)
{
// CAUTION: column-major vs. row-major difference
return glm::transpose(*(glm::mat4*)&_matrix);
/*glm::mat4 m;
for (int i = 0; i < 4; ++i) {
for (int j = 0; j < 4; ++j) {
m[i][j] = _matrix.M[j][i];
}
}
return m;*/
}
void OculusRiftCamera::connectWithRift(ovrHmd _hmd)
{
mHmd = _hmd;
ovrHmdDesc hmdDesc;
ovrHmd_GetDesc(_hmd, &hmdDesc);
mActiveEye = 0;
// both eyes are equal,
// two texture rendering, NOT side-by-side:
glm::uvec2 renderTargetSizeForOneEye = getOptimalRenderSizePerEye(_hmd);
ovrSizei ovrRenderTargetSizeForOneEye;
ovrRenderTargetSizeForOneEye.w = renderTargetSizeForOneEye.x;
ovrRenderTargetSizeForOneEye.h = renderTargetSizeForOneEye.y;
ovrRecti perEyeViewport;
perEyeViewport.Pos = { 0, 0 };
perEyeViewport.Size = ovrRenderTargetSizeForOneEye;
for (int i = 0; i < 2; ++i) {
// for all eyes:
mEyeDescription[i].Eye = hmdDesc.EyeRenderOrder[i];
mEyeDescription[i].Fov = hmdDesc.DefaultEyeFov[i];
mEyeDescription[i].TextureSize = ovrRenderTargetSizeForOneEye;
mEyeDescription[i].RenderViewport = perEyeViewport;
// the eye ordering (left-right or right-left) is defined by the SDK based on the
// hardware!
if (mEyeDescription[i].Eye == ovrEye_Left) {
mEyeOrdering[i] = GenericCamera::Eye::EYE_LEFT;
} else {
mEyeOrdering[i] = GenericCamera::Eye::EYE_RIGHT;
}
}
}
GenericCamera::Eye OculusRiftCamera::startRenderingEye(int _eyeNumber)
{
mPoseUsedForRendering[_eyeNumber] = ovrHmd_BeginEyeRender(mHmd, mEyeDescription[_eyeNumber].Eye);
setEye(mEyeOrdering[_eyeNumber]);
mActiveEye = _eyeNumber;
/*debug() << "Render Eye " << mActiveEye << ": "
<< mPoseUsedForRendering[_eyeNumber].Orientation.w << " "
<< mPoseUsedForRendering[_eyeNumber].Orientation.x << " "
<< mPoseUsedForRendering[_eyeNumber].Orientation.y << " "
<< mPoseUsedForRendering[_eyeNumber].Orientation.z << endl;*/
updateFromRift();
return mEyeOrdering[_eyeNumber];
}
void OculusRiftCamera::stopRenderingEye(int _eyeNumber, ovrTexture* eyeTexture)
{
ovrHmd_EndEyeRender(mHmd, mEyeDescription[_eyeNumber].Eye, mPoseUsedForRendering[_eyeNumber], eyeTexture);
}
void OculusRiftCamera::updateFromRift()
{
// get orientation quaternion and set camera state
// get translation and set it.
// empty for now as matrices are constructed on-the-fly
}
glm::mat4 OculusRiftCamera::getProjectionMatrix() const
{
ovrMatrix4f projectionMatrix = ovrMatrix4f_Projection(mEyeDescription[mActiveEye].Fov, getNearClippingPlane(), getFarClippingPlane(), true);
return ovr2glm(projectionMatrix);
}
glm::vec3 OculusRiftCamera::getPosition() const
{
return ovr2glm(mPoseUsedForRendering->Position);
}
glm::mat4 OculusRiftCamera::getViewMatrix() const
{
return glm::mat4_cast(ovr2glm(mPoseUsedForRendering->Orientation)) * glm::translate(ovr2glm(mPoseUsedForRendering->Position));
}
glm::uvec2 OculusRiftCamera::getViewportSize() const
{
return glm::uvec2(
mEyeDescription[mActiveEye].TextureSize.w,
mEyeDescription[mActiveEye].TextureSize.h);
}
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment