Oculus VR DK2 Support

- Updated to work with 0.5.x SDK
- Uses Oculus Rendering rather than PostFX
- Stereo rendering refactored so more rendering info is grabbed from the DisplayDevice
- Implements an Offscreen Canvas for in-game gui with oculus
- Message dialogs and metrics display can now go to the OffScreen Canvas (if oculus demo is setup correctly)
This commit is contained in:
James Urquhart 2015-05-06 23:07:48 +01:00
parent b3170bcddf
commit 3a457749ec
56 changed files with 2654 additions and 1426 deletions

View file

@ -1,199 +0,0 @@
//-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#include "platform/input/oculusVR/barrelDistortionPostEffect.h"
#include "console/consoleTypes.h"
#include "console/engineAPI.h"
#include "gfx/gfxDevice.h"
#include "platform/input/oculusVR/oculusVRDevice.h"
extern bool gEditingMission;
ConsoleDocClass( BarrelDistortionPostEffect,
"@brief A fullscreen shader effect used with the Oculus Rift.\n\n"
"@section PFXTextureIdentifiers\n\n"
"@ingroup Rendering\n"
);
IMPLEMENT_CONOBJECT(BarrelDistortionPostEffect);
BarrelDistortionPostEffect::BarrelDistortionPostEffect()
: PostEffect(),
mHmdWarpParamSC(NULL),
mHmdChromaAbSC(NULL),
mScaleSC(NULL),
mScaleInSC(NULL),
mLensCenterSC(NULL),
mScreenCenterSC(NULL)
{
mHMDIndex = 0;
mSensorIndex = 0;
mScaleOutput = 1.0f;
}
BarrelDistortionPostEffect::~BarrelDistortionPostEffect()
{
}
void BarrelDistortionPostEffect::initPersistFields()
{
addField( "hmdIndex", TypeS32, Offset( mHMDIndex, BarrelDistortionPostEffect ),
"Oculus VR HMD index to reference." );
addField( "sensorIndex", TypeS32, Offset( mSensorIndex, BarrelDistortionPostEffect ),
"Oculus VR sensor index to reference." );
addField( "scaleOutput", TypeF32, Offset( mScaleOutput, BarrelDistortionPostEffect ),
"Used to increase the size of the window into the world at the expense of apparent resolution." );
Parent::initPersistFields();
}
bool BarrelDistortionPostEffect::onAdd()
{
if( !Parent::onAdd() )
return false;
return true;
}
void BarrelDistortionPostEffect::onRemove()
{
Parent::onRemove();
}
void BarrelDistortionPostEffect::_setupConstants( const SceneRenderState *state )
{
// Test if setup is required before calling the parent method as the parent method
// will set up the shader constants buffer for us.
bool setupRequired = mShaderConsts.isNull();
Parent::_setupConstants(state);
// Define the shader constants
if(setupRequired)
{
mHmdWarpParamSC = mShader->getShaderConstHandle( "$HmdWarpParam" );
mHmdChromaAbSC = mShader->getShaderConstHandle( "$HmdChromaAbParam" );
mScaleSC = mShader->getShaderConstHandle( "$Scale" );
mScaleInSC = mShader->getShaderConstHandle( "$ScaleIn" );
mLensCenterSC = mShader->getShaderConstHandle( "$LensCenter" );
mScreenCenterSC = mShader->getShaderConstHandle( "$ScreenCenter" );
}
const Point2I &resolution = GFX->getActiveRenderTarget()->getSize();
F32 widthScale = 0.5f;
F32 heightScale = 1.0f;
F32 aspectRatio = (resolution.x * 0.5f) / resolution.y;
// Set up the HMD dependant shader constants
if(ManagedSingleton<OculusVRDevice>::instanceOrNull() && OCULUSVRDEV->getHMDDevice(mHMDIndex))
{
const OculusVRHMDDevice* hmd = OCULUSVRDEV->getHMDDevice(mHMDIndex);
if(mHmdWarpParamSC->isValid())
{
const Point4F& distortion = hmd->getKDistortion();
mShaderConsts->set( mHmdWarpParamSC, distortion );
}
if(mHmdChromaAbSC->isValid())
{
const Point4F& correction = hmd->getChromaticAbCorrection();
mShaderConsts->set( mHmdChromaAbSC, correction );
}
if(mScaleSC->isValid())
{
F32 scaleFactor = hmd->getDistortionScale();
if(!mIsZero(mScaleOutput))
{
scaleFactor /= mScaleOutput;
}
Point2F scale;
scale.x = widthScale * 0.5f * scaleFactor;
scale.y = heightScale * 0.5f * scaleFactor * aspectRatio;
mShaderConsts->set( mScaleSC, scale );
}
if(mLensCenterSC->isValid())
{
F32 xCenterOffset = hmd->getCenterOffset();
Point3F lensCenter;
lensCenter.x = (widthScale + xCenterOffset * 0.5f) * 0.5f;
lensCenter.y = (widthScale - xCenterOffset * 0.5f) * 0.5f;
lensCenter.z = heightScale * 0.5f;
mShaderConsts->set( mLensCenterSC, lensCenter );
}
}
else
{
if(mHmdWarpParamSC->isValid())
{
mShaderConsts->set( mHmdWarpParamSC, Point4F(0.0f, 0.0f, 0.0f, 0.0f) );
}
if(mHmdChromaAbSC->isValid())
{
mShaderConsts->set( mHmdChromaAbSC, Point4F(1.0f, 0.0f, 1.0f, 0.0f) );
}
if(mScaleSC->isValid())
{
mShaderConsts->set( mScaleSC, Point2F(1.0f, 1.0f) );
}
if(mLensCenterSC->isValid())
{
Point3F lensCenter;
lensCenter.x = widthScale * 0.5f;
lensCenter.y = widthScale * 0.5f;
lensCenter.z = heightScale * 0.5f;
mShaderConsts->set( mLensCenterSC, lensCenter );
}
}
if(mScaleInSC->isValid())
{
Point2F scaleIn;
scaleIn.x = 2.0f / widthScale;
scaleIn.y = 2.0f / heightScale / aspectRatio;
mShaderConsts->set( mScaleInSC, scaleIn );
}
if(mScreenCenterSC->isValid())
{
mShaderConsts->set( mScreenCenterSC, Point2F(widthScale * 0.5f, heightScale * 0.5f) );
}
}
void BarrelDistortionPostEffect::process(const SceneRenderState *state, GFXTexHandle &inOutTex, const RectI *inTexViewport)
{
// Don't draw the post effect if the editor is active
if(gEditingMission)
return;
Parent::process(state, inOutTex, inTexViewport);
}

View file

@ -1,69 +0,0 @@
//-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#ifndef _BARRELDISTORTIONPOSTEFFECT_H_
#define _BARRELDISTORTIONPOSTEFFECT_H_
#include "postFx/postEffect.h"
class BarrelDistortionPostEffect : public PostEffect
{
typedef PostEffect Parent;
protected:
GFXShaderConstHandle *mHmdWarpParamSC;
GFXShaderConstHandle *mHmdChromaAbSC;
GFXShaderConstHandle *mScaleSC;
GFXShaderConstHandle *mScaleInSC;
GFXShaderConstHandle *mLensCenterSC;
GFXShaderConstHandle *mScreenCenterSC;
// Oculus VR HMD index to reference
S32 mHMDIndex;
// Oculus VR sensor index to reference
S32 mSensorIndex;
// Used to increase the size of the window into the world at the
// expense of apparent resolution.
F32 mScaleOutput;
protected:
virtual void _setupConstants( const SceneRenderState *state );
public:
BarrelDistortionPostEffect();
virtual ~BarrelDistortionPostEffect();
DECLARE_CONOBJECT(BarrelDistortionPostEffect);
// SimObject
virtual bool onAdd();
virtual void onRemove();
static void initPersistFields();
virtual void process( const SceneRenderState *state,
GFXTexHandle &inOutTex,
const RectI *inTexViewport = NULL );
};
#endif // _BARRELDISTORTIONPOSTEFFECT_H_

File diff suppressed because it is too large Load diff

View file

@ -32,7 +32,8 @@
#include "core/util/tSingleton.h"
#include "math/mQuat.h"
#include "math/mPoint4.h"
#include "OVR.h"
#include "gfx/gfxDevice.h"
#include "OVR_CAPI_0_5_0.h"
#define DEFAULT_RIFT_UNIT 0
@ -44,13 +45,10 @@ public:
// If no HMD is present simulate it being available
static bool smSimulateHMD;
// Use the chromatic aberration correction version of the barrel
// distortion shader.
static bool smUseChromaticAberrationCorrection;
// Type of rotation events to broadcast
static bool smGenerateAngleAxisRotationEvents;
static bool smGenerateEulerRotationEvents;
static bool smGeneratePositionEvents;
// Broadcast sensor rotation as axis
static bool smGenerateRotationAsAxisEvents;
@ -66,37 +64,24 @@ public:
// should be buffered.
static bool smGenerateWholeFrameEvents;
/// Determines desired pixel density for render target
static F32 smDesiredPixelDensity;
/// Determined if the window is moved to the oculus display
static bool smWindowDebug;
static F32 smPositionTrackingScale;
protected:
class DeviceListener : public OVR::MessageHandler
{
protected:
OculusVRDevice* mOwner;
public:
DeviceListener(OculusVRDevice* owner) { mOwner = owner; }
virtual ~DeviceListener() { mOwner = NULL; }
virtual void OnMessage(const OVR::Message&);
};
// Our OVR SDK device listener class
DeviceListener* mListener;
// The OVR SDK device manager
OVR::DeviceManager* mDeviceManager;
// Discovered HMD devices
Vector<OculusVRHMDDevice*> mHMDDevices;
// Discovered sensor devices
Vector<OculusVRSensorDevice*> mSensorDevices;
/// Is the device active
bool mActive;
// Should the input texture into the HMD (the render target that the scene has been
// rendered to) be scaled according to the HMD's distortion calculation?
bool mScaleInputTexture;
/// Which HMD is the active one
U32 mActiveDeviceId;
protected:
void cleanUp();
@ -105,14 +90,10 @@ protected:
/// Input Event Manager
void buildCodeTable();
void addHMDDevice(OVR::HMDDevice* hmd);
void addHMDDevice(ovrHmd hmd);
void createSimulatedHMD();
void addSensorDevice(OVR::SensorDevice* sensor);
void createSimulatedSensor();
public:
OculusVRDevice();
~OculusVRDevice();
@ -128,36 +109,50 @@ public:
bool process();
// IDisplayDevice
virtual bool providesYFOV() const;
virtual F32 getYFOV() const;
virtual bool providesEyeOffset() const;
virtual const Point3F& getEyeOffset() const;
virtual bool providesFrameEyePose() const;
virtual void getFrameEyePose(DisplayPose *outPose, U32 eyeId) const;
virtual bool providesEyeOffsets() const;
virtual void getEyeOffsets(Point3F *dest) const;
virtual bool providesFovPorts() const;
virtual void getFovPorts(FovPort *out) const;
virtual bool providesProjectionOffset() const;
virtual const Point2F& getProjectionOffset() const;
virtual void getStereoViewports(RectI *out) const;
virtual void getStereoTargets(GFXTextureTarget **out) const;
virtual void onStartFrame();
// HMDs
U32 getHMDCount() const { return mHMDDevices.size(); }
const OculusVRHMDDevice* getHMDDevice(U32 index) const;
OculusVRHMDDevice* getHMDDevice(U32 index) const;
F32 getHMDCurrentIPD(U32 index);
void setHMDCurrentIPD(U32 index, F32 ipd);
// Sensors
U32 getSensorCount() const { return mSensorDevices.size(); }
U32 getSensorCount() const { return mHMDDevices.size(); }
const OculusVRSensorDevice* getSensorDevice(U32 index) const;
EulerF getSensorEulerRotation(U32 index);
VectorF getSensorAcceleration(U32 index);
EulerF getSensorAngularVelocity(U32 index);
VectorF getSensorMagnetometer(U32 index);
F32 getSensorPredictionTime(U32 index);
void setSensorPredictionTime(U32 index, F32 dt);
void setAllSensorPredictionTime(F32 dt);
bool getSensorGravityCorrection(U32 index);
void setSensorGravityCorrection(U32 index, bool state);
bool getSensorYawCorrection(U32 index);
void setSensorYawCorrection(U32 index, bool state);
bool getSensorMagnetometerCalibrated(U32 index);
void setOptimalDisplaySize(U32 idx, GuiCanvas *canvas);
void resetAllSensors();
bool isDiplayingWarning();
void dismissWarning();
String dumpMetrics(U32 idx);
void setDrawCanvas(GuiCanvas *canvas);
virtual void setCurrentConnection(GameConnection *connection);
virtual GameConnection* getCurrentConnection();
bool _handleDeviceEvent( GFXDevice::GFXDeviceEventType evt );
public:
// For ManagedSingleton.
static const char* getSingletonName() { return "OculusVRDevice"; }

View file

@ -21,12 +21,45 @@
//-----------------------------------------------------------------------------
#include "platform/input/oculusVR/oculusVRHMDDevice.h"
#include "platform/input/oculusVR/oculusVRDevice.h"
#include "platform/input/oculusVR/oculusVRSensorDevice.h"
#include "postFx/postEffectCommon.h"
#include "gui/core/guiCanvas.h"
#include "platform/input/oculusVR/oculusVRUtil.h"
OculusVRHMDDevice::OculusVRHMDDevice()
#include "gfx/D3D9/gfxD3D9Device.h"
// Use D3D9 for win32
#ifdef TORQUE_OS_WIN
#define OVR_D3D_VERSION 9
#include "OVR_CAPI_D3D.h"
#define OCULUS_USE_D3D
#else
#include "OVR_CAPI_GL.h"
#define OCULUS_USE_GL
#endif
extern GFXTextureObject *gLastStereoTexture;
OculusVRHMDDevice::OculusVRHMDDevice() :
mWindowSize(1280,800)
{
mIsValid = false;
mIsSimulation = false;
mDevice = NULL;
mSupportedDistortionCaps = 0;
mCurrentDistortionCaps = 0;
mCurrentCaps = 0;
mSupportedCaps = 0;
mVsync = true;
mTimewarp = true;
mRenderConfigurationDirty = true;
mCurrentPixelDensity = OculusVRDevice::smDesiredPixelDensity;
mDesiredRenderingMode = GFXDevice::RS_StereoSideBySide;
mRTFormat = GFXFormatR8G8B8A8;
mDrawCanvas = NULL;
mFrameReady = false;
mConnection = NULL;
mSensor = NULL;
mActionCodeIndex = 0;
}
OculusVRHMDDevice::~OculusVRHMDDevice()
@ -36,197 +69,576 @@ OculusVRHMDDevice::~OculusVRHMDDevice()
void OculusVRHMDDevice::cleanUp()
{
onDeviceDestroy();
if (mSensor)
{
delete mSensor;
mSensor = NULL;
}
if(mDevice)
{
mDevice->Release();
ovrHmd_Destroy(mDevice);
mDevice = NULL;
}
mIsValid = false;
}
void OculusVRHMDDevice::set(OVR::HMDDevice* hmd, OVR::HMDInfo& info, bool calculateDistortionScale)
void OculusVRHMDDevice::set(ovrHmd hmd, U32 actionCodeIndex)
{
cleanUp();
mIsValid = false;
mIsSimulation = false;
mRenderConfigurationDirty = true;
mDevice = hmd;
mSupportedCaps = hmd->HmdCaps;
mCurrentCaps = mSupportedCaps & (ovrHmdCap_DynamicPrediction | ovrHmdCap_LowPersistence | (!mVsync ? ovrHmdCap_NoVSync : 0));
mSupportedDistortionCaps = hmd->DistortionCaps;
mCurrentDistortionCaps = mSupportedDistortionCaps & (ovrDistortionCap_TimeWarp | ovrDistortionCap_Vignette | ovrDistortionCap_Overdrive);
mTimewarp = mSupportedDistortionCaps & ovrDistortionCap_TimeWarp;
// DeviceInfo
mProductName = info.ProductName;
mManufacturer = info.Manufacturer;
mVersion = info.Version;
mProductName = hmd->ProductName;
mManufacturer = hmd->Manufacturer;
mVersion = hmd->FirmwareMajor;
mDisplayDeviceName = info.DisplayDeviceName;
mDisplayId = info.DisplayId;
mDisplayDeviceName = hmd->DisplayDeviceName;
mDisplayId = hmd->DisplayId;
mDesktopPosition.x = info.DesktopX;
mDesktopPosition.y = info.DesktopY;
mDesktopPosition.x = hmd->WindowsPos.x;
mDesktopPosition.y = hmd->WindowsPos.y;
mResolution.x = info.HResolution;
mResolution.y = info.VResolution;
mResolution.x = hmd->Resolution.w;
mResolution.y = hmd->Resolution.h;
mScreenSize.x = info.HScreenSize;
mScreenSize.y = info.VScreenSize;
mProfileInterpupillaryDistance = ovrHmd_GetFloat(hmd, OVR_KEY_IPD, OVR_DEFAULT_IPD);
mLensSeparation = ovrHmd_GetFloat(hmd, "LensSeparation", 0);
ovrHmd_GetFloatArray(hmd, "ScreenSize", &mScreenSize.x, 2);
mVerticalEyeCenter = info.VScreenCenter;
mEyeToScreen = info.EyeToScreenDistance;
mLensSeparation = info.LensSeparationDistance;
mProfileInterpupillaryDistance = info.InterpupillaryDistance;
mInterpupillaryDistance = mProfileInterpupillaryDistance;
dMemcpy(mCurrentFovPorts, mDevice->DefaultEyeFov, sizeof(mDevice->DefaultEyeFov));
mKDistortion.x = info.DistortionK[0];
mKDistortion.y = info.DistortionK[1];
mKDistortion.z = info.DistortionK[2];
mKDistortion.w = info.DistortionK[3];
mChromaticAbCorrection.x = info.ChromaAbCorrection[0];
mChromaticAbCorrection.y = info.ChromaAbCorrection[1];
mChromaticAbCorrection.z = info.ChromaAbCorrection[2];
mChromaticAbCorrection.w = info.ChromaAbCorrection[3];
// Calculated values
calculateValues(calculateDistortionScale);
mIsValid = true;
}
void OculusVRHMDDevice::createSimulation(SimulationTypes simulationType, bool calculateDistortionScale)
{
if(simulationType == ST_RIFT_PREVIEW)
for (U32 i=0; i<2; i++)
{
createSimulatedPreviewRift(calculateDistortionScale);
mCurrentFovPorts[i].UpTan = mDevice->DefaultEyeFov[i].UpTan;
mCurrentFovPorts[i].DownTan = mDevice->DefaultEyeFov[i].DownTan;
mCurrentFovPorts[i].LeftTan = mDevice->DefaultEyeFov[i].LeftTan;
mCurrentFovPorts[i].RightTan = mDevice->DefaultEyeFov[i].RightTan;
}
}
void OculusVRHMDDevice::createSimulatedPreviewRift(bool calculateDistortionScale)
{
if (mDevice->HmdCaps & ovrHmdCap_ExtendDesktop)
{
mWindowSize = Point2I(mDevice->Resolution.w, mDevice->Resolution.h);
}
else
{
mWindowSize = Point2I(1100, 618);
}
mActionCodeIndex = actionCodeIndex;
mIsValid = true;
mIsSimulation = true;
mProductName = "Oculus Rift DK1-SLA1";
mManufacturer = "Oculus VR";
mVersion = 0;
mSensor = new OculusVRSensorDevice();
mSensor->set(mDevice, mActionCodeIndex);
mDisplayDeviceName = "";
mResolution.x = 1280;
mResolution.y = 800;
mScreenSize.x = 0.14975999f;
mScreenSize.y = 0.093599997f;
mVerticalEyeCenter = 0.046799999f;
mEyeToScreen = 0.041000001f;
mLensSeparation = 0.064000003f;
mProfileInterpupillaryDistance = 0.064000003f;
mInterpupillaryDistance = mProfileInterpupillaryDistance;
mKDistortion.x = 1.0000000f;
mKDistortion.y = 0.22000000f;
mKDistortion.z = 0.23999999f;
mKDistortion.w = 0.00000000f;
mChromaticAbCorrection.x = 0.995999f;
mChromaticAbCorrection.y = -0.004f;
mChromaticAbCorrection.z = 1.014f;
mChromaticAbCorrection.w = 0.0f;
calculateValues(calculateDistortionScale);
updateCaps();
}
void OculusVRHMDDevice::setIPD(F32 ipd, bool calculateDistortionScale)
void OculusVRHMDDevice::setIPD(F32 ipd)
{
mInterpupillaryDistance = ipd;
// Recalculate as some values rely on the IPD
calculateValues(calculateDistortionScale);
}
// Computes scale that should be applied to the input render texture
// before distortion to fit the result in the same screen size.
// The 'fitRadius' parameter specifies the distance away from distortion center at
// which the input and output coordinates will match, assuming [-1,1] range.
F32 OculusVRHMDDevice::calcScale(F32 fitRadius)
void OculusVRHMDDevice::setOptimalDisplaySize(GuiCanvas *canvas)
{
F32 s = fitRadius;
if (!mDevice)
return;
// This should match distortion equation used in shader.
F32 ssq = s * s;
F32 scale = s * (mKDistortion.x + mKDistortion.y * ssq + mKDistortion.z * ssq * ssq + mKDistortion.w * ssq * ssq * ssq);
return scale;
}
PlatformWindow *window = canvas->getPlatformWindow();
GFXTarget *target = window->getGFXTarget();
void OculusVRHMDDevice::calculateValues(bool calculateDistortionScale)
{
F32 halfScreenX = mScreenSize.x * 0.5f;
if(halfScreenX > 0)
if (target && target->getSize() != mWindowSize)
{
F32 halfLensSeparation = mLensSeparation * 0.5;
F32 offset = halfLensSeparation / halfScreenX;
mEyeUVOffset.x = offset - 0.5;
mEyeUVOffset.y = 1.0f - offset - 0.5;
GFXVideoMode newMode;
newMode.antialiasLevel = 0;
newMode.bitDepth = 32;
newMode.fullScreen = false;
newMode.refreshRate = 75;
newMode.resolution = mWindowSize;
newMode.wideScreen = false;
window->setVideoMode(newMode);
//AssertFatal(window->getClientExtent().x == mWindowSize[0] && window->getClientExtent().y == mWindowSize[1], "Window didn't resize to correct dimensions");
}
// Need to move window over to the rift side of the desktop
if (mDevice->HmdCaps & ovrHmdCap_ExtendDesktop && !OculusVRDevice::smWindowDebug)
{
#ifndef OCULUS_WINDOW_DEBUG
window->setPosition(getDesktopPosition());
#endif
}
}
bool OculusVRHMDDevice::isDisplayingWarning()
{
if (!mIsValid || !mDevice)
return false;
ovrHSWDisplayState displayState;
ovrHmd_GetHSWDisplayState(mDevice, &displayState);
return displayState.Displayed;
}
void OculusVRHMDDevice::dismissWarning()
{
if (!mIsValid || !mDevice)
return;
ovrHmd_DismissHSWDisplay(mDevice);
}
bool OculusVRHMDDevice::setupTargets()
{
ovrFovPort eyeFov[2] = {mDevice->DefaultEyeFov[0], mDevice->DefaultEyeFov[1]};
mRecomendedEyeTargetSize[0] = ovrHmd_GetFovTextureSize(mDevice, ovrEye_Left, eyeFov[0], mCurrentPixelDensity);
mRecomendedEyeTargetSize[1] = ovrHmd_GetFovTextureSize(mDevice, ovrEye_Right, eyeFov[1], mCurrentPixelDensity);
// Calculate render target size
if (mDesiredRenderingMode == GFXDevice::RS_StereoSideBySide)
{
// Setup a single texture, side-by-side viewports
Point2I rtSize(
mRecomendedEyeTargetSize[0].w + mRecomendedEyeTargetSize[1].w,
mRecomendedEyeTargetSize[0].h > mRecomendedEyeTargetSize[1].h ? mRecomendedEyeTargetSize[0].h : mRecomendedEyeTargetSize[1].h
);
GFXFormat targetFormat = GFX->getActiveRenderTarget()->getFormat();
mRTFormat = targetFormat;
rtSize = generateRenderTarget(mStereoRT, mStereoTexture, mStereoDepthTexture, rtSize);
// Left
mEyeRenderSize[0] = rtSize;
mEyeRT[0] = mStereoRT;
mEyeTexture[0] = mStereoTexture;
mEyeViewport[0] = RectI(Point2I(0,0), Point2I((mRecomendedEyeTargetSize[0].w+1)/2, mRecomendedEyeTargetSize[0].h));
// Right
mEyeRenderSize[1] = rtSize;
mEyeRT[1] = mStereoRT;
mEyeTexture[1] = mStereoTexture;
mEyeViewport[1] = RectI(Point2I((mRecomendedEyeTargetSize[0].w+1)/2,0), Point2I((mRecomendedEyeTargetSize[1].w+1)/2, mRecomendedEyeTargetSize[1].h));
gLastStereoTexture = mEyeTexture[0];
}
else if (mDesiredRenderingMode == GFXDevice::RS_StereoRenderTargets)
{
// Setup two targets
Point2I rtSize;
GFXFormat targetFormat = GFX->getActiveRenderTarget()->getFormat();
mRTFormat = targetFormat;
// Left
rtSize = generateRenderTarget(mEyeRT[0], mEyeTexture[0], mStereoDepthTexture, Point2I(mRecomendedEyeTargetSize[0].w, mRecomendedEyeTargetSize[0].h));
mEyeViewport[0] = RectI(Point2I(0,0), Point2I((rtSize.x+1)/2, rtSize.y));
// Right
rtSize = generateRenderTarget(mEyeRT[1], mEyeTexture[1], mStereoDepthTexture, Point2I(mRecomendedEyeTargetSize[1].w, mRecomendedEyeTargetSize[1].h));
mEyeViewport[1] = RectI(Point2I(0,0), Point2I((rtSize.x+1)/2, rtSize.y));
mStereoRT = NULL;
mStereoTexture = NULL;
gLastStereoTexture = mEyeTexture[0];
}
else
{
mEyeUVOffset.x = 0.5f;
mEyeUVOffset.y = 0.5f;
// No rendering, abort!
return false;
}
F32 lensOffset = mLensSeparation * 0.5f;
F32 lensShift = mScreenSize.x * 0.25f - lensOffset;
F32 lensViewportShift = 4.0f * lensShift / mScreenSize.x;
mXCenterOffset= lensViewportShift;
return true;
}
// Determine how the input texture should be scaled relative to the back buffer
// so that we fit the distorted view to the backbuffer after calculating the
// distortion. In reference to section 5.6.3 Distortion Scale and FOV in the
// SDK docs.
if(!calculateDistortionScale)
String OculusVRHMDDevice::dumpMetrics()
{
StringBuilder sb;
EulerF rot = mSensor->getEulerRotation();
Point3F pos = mSensor->getPosition();
FovPort eyeFov[2];
this->getFovPorts(eyeFov);
mSensor->getPositionTrackingAvailable();
F32 ipd = this->getIPD();
U32 lastStatus = mSensor->getLastTrackingStatus();
sb.format(" | OVR Sensor %i | rot: %f %f %f, pos: %f %f %f, FOV (%f %f %f %f, %f %f %f %f), IPD %f, Track:%s%s, Disort:%s%s%s",
mActionCodeIndex,
rot.x, rot.y, rot.z,
pos.x, pos.y, pos.z,
eyeFov[0].upTan, eyeFov[0].downTan, eyeFov[0].leftTan, eyeFov[0].rightTan, eyeFov[1].upTan, eyeFov[1].downTan, eyeFov[1].leftTan, eyeFov[1].rightTan,
getIPD(),
lastStatus & ovrStatus_OrientationTracked ? " ORIENT" : "",
lastStatus & ovrStatus_PositionTracked ? " POS" : "",
mCurrentDistortionCaps & ovrDistortionCap_TimeWarp ? " TIMEWARP" : "",
mCurrentDistortionCaps & ovrDistortionCap_Vignette ? " VIGNETTE" : "",
mCurrentDistortionCaps & ovrDistortionCap_Overdrive ? " OVERDRIVE" : "");
return sb.data();
}
void OculusVRHMDDevice::updateRenderInfo()
{
// Check console values first
if (mCurrentPixelDensity != OculusVRDevice::smDesiredPixelDensity)
{
// Do not calculate a distortion scale for the input texture. This means that the input
// texture and the backbuffer will be the same resolution.
mDistortionFit.x = 0.0f;
mDistortionFit.y = 0.0f;
}
else if (mScreenSize.x > 0.140f) // 7"
{
mDistortionFit.x = -1.0f;
mDistortionFit.y = 0.0f;
}
else // 5"
{
mDistortionFit.x = 0.0f;
mDistortionFit.y = 1.0f;
mRenderConfigurationDirty = true;
mCurrentPixelDensity = OculusVRDevice::smDesiredPixelDensity;
}
// Compute distortion scale from DistortionFitX & DistortionFitY.
// Fit value of 0.0 means "no fit".
if (mIsZero(mDistortionFit.x) && mIsZero(mDistortionFit.y))
if (!mIsValid || !mDevice || !mRenderConfigurationDirty)
return;
if (!mDrawCanvas)
return;
PlatformWindow *window = mDrawCanvas->getPlatformWindow();
ovrFovPort eyeFov[2] = {mDevice->DefaultEyeFov[0], mDevice->DefaultEyeFov[1]};
// Update window size if it's incorrect
Point2I backbufferSize = mDrawCanvas->getBounds().extent;
// Reset
ovrHmd_ConfigureRendering(mDevice, NULL, 0, NULL, NULL);
#ifdef OCULUS_USE_D3D
// Generate render target textures
GFXD3D9Device *d3d9GFX = dynamic_cast<GFXD3D9Device*>(GFX);
if (d3d9GFX)
{
mDistortionScale = 1.0f;
ovrD3D9Config cfg;
cfg.D3D9.Header.API = ovrRenderAPI_D3D9;
cfg.D3D9.Header.Multisample = 0;
cfg.D3D9.Header.BackBufferSize = OVR::Sizei(backbufferSize.x, backbufferSize.y);
cfg.D3D9.pDevice = d3d9GFX->getDevice();
cfg.D3D9.pDevice->GetSwapChain(0, &cfg.D3D9.pSwapChain);
// Finally setup!
if (!setupTargets())
{
onDeviceDestroy();
return;
}
ovrHmd_AttachToWindow(mDevice, window->getPlatformDrawable(), NULL, NULL);
if (!ovrHmd_ConfigureRendering( mDevice, &cfg.Config, mCurrentDistortionCaps, eyeFov, mEyeRenderDesc ))
{
Con::errorf("Couldn't configure oculus rendering!");
return;
}
}
#endif
#ifdef OCULUS_USE_GL
// Generate render target textures
GFXGLDevice *glGFX = dynamic_cast<GFXGLDevice*>(GFX);
if (glGFX)
{
ovrGLConfig cfg;
cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
cfg.OGL.Header.Multisample = 0;
cfg.OGL.Header.BackBufferSize = OVR::Sizei(backbufferSize.x, backbufferSize.y);
#ifdef WIN32
cfg.OGL.Window = GetActiveWindow();//window->getPlatformDrawable();
cfg.OGL.DC = wglGetCurrentDC();
#else
cfg.OGL.Disp = NULL;
#endif
// Finally setup!
if (!setupTargets())
{
onDeviceDestroy();
return;
}
ovrHmd_AttachToWindow(mDevice, window->getPlatformDrawable(), NULL, NULL);
if (!ovrHmd_ConfigureRendering( mDevice, &cfg.Config, mCurrentDistortionCaps, eyeFov, mEyeRenderDesc ))
{
Con::errorf("Couldn't configure oculus rendering!");
return;
}
}
#endif
mRenderConfigurationDirty = false;
}
Point2I OculusVRHMDDevice::generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &texture, GFXTexHandle &depth, Point2I desiredSize)
{
// Texture size that we already have might be big enough.
Point2I newRTSize;
bool newRT = false;
if (!target.getPointer())
{
target = GFX->allocRenderToTextureTarget();
newRTSize = desiredSize;
newRT = true;
}
else
{
Point2I currentSize = target->getSize();
newRTSize = currentSize;
}
// %50 linear growth each time is a nice balance between being too greedy
// for a 2D surface and too slow to prevent fragmentation.
while ( newRTSize.x < desiredSize.x )
{
newRTSize.x += newRTSize.x/2;
}
while ( newRTSize.y < desiredSize.y )
{
newRTSize.y += newRTSize.y/2;
}
// Put some sane limits on it. 4k x 4k is fine for most modern video cards.
// Nobody should be messing around with surfaces smaller than 4k pixels these days.
newRTSize.setMin(Point2I(4096, 4096));
newRTSize.setMax(Point2I(64, 64));
// Stereo RT needs to be the same size as the recommended RT
if ( newRT || texture.getWidthHeight() != newRTSize )
{
texture.set( newRTSize.x, newRTSize.y, mRTFormat, &VRTextureProfile, avar( "%s() - (line %d)", __FUNCTION__, __LINE__ ) );
target->attachTexture( GFXTextureTarget::Color0, texture );
Con::printf("generateRenderTarget generated %x", texture.getPointer());
}
if ( depth.getWidthHeight() != newRTSize )
{
depth.set( newRTSize.x, newRTSize.y, GFXFormatD24S8, &VRDepthProfile, avar( "%s() - (line %d)", __FUNCTION__, __LINE__ ) );
target->attachTexture( GFXTextureTarget::DepthStencil, depth );
Con::printf("generateRenderTarget generated depth %x", depth.getPointer());
}
return newRTSize;
}
void OculusVRHMDDevice::clearRenderTargets()
{
mStereoRT = NULL;
mEyeRT[0] = NULL;
mEyeRT[1] = NULL;
}
void OculusVRHMDDevice::updateCaps()
{
if (!mIsValid || !mDevice)
return;
U32 oldDistortionCaps = mCurrentDistortionCaps;
// Distortion
if (mTimewarp)
{
mCurrentDistortionCaps |= ovrDistortionCap_TimeWarp;
}
else
{
// Convert fit value to distortion-centered coordinates before fit radius
// calculation.
// NOTE: For now just assume a full view the same size as the HMD supports. It is
// possible that this full view is smaller or larger.
F32 stereoAspect = 0.5f * mResolution.x / mResolution.y;
F32 dx = mDistortionFit.x - mXCenterOffset;
F32 dy = mDistortionFit.y / stereoAspect;
F32 fitRadius = sqrt(dx * dx + dy * dy);
mDistortionScale = calcScale(fitRadius)/fitRadius;
mCurrentDistortionCaps &= ~ovrDistortionCap_TimeWarp;
}
// Calculate the vertical FOV for a single eye
mAspectRatio = F32(mResolution.x * 0.5f) / F32(mResolution.y);
F32 halfScreenDistance = mScreenSize.y * 0.5f * mDistortionScale;
mYFOV = 2.0f * mAtan(halfScreenDistance / mEyeToScreen);
if (oldDistortionCaps != mCurrentDistortionCaps)
{
mRenderConfigurationDirty = true;
}
F32 viewCenter = mScreenSize.x * 0.25f;
F32 eyeProjectionShift = viewCenter - (mInterpupillaryDistance * 0.5f);
mProjectionCenterOffset.set(4.0f * eyeProjectionShift / mScreenSize.x, 0.0f);
mEyeWorldOffset.set(mInterpupillaryDistance * 0.5f, 0.0f, 0.0f);
// Device
if (!mVsync)
{
mCurrentCaps |= ovrHmdCap_NoVSync;
}
else
{
mCurrentCaps &= ~ovrHmdCap_NoVSync;
}
ovrHmd_SetEnabledCaps(mDevice, mCurrentCaps);
}
static bool sInFrame = false; // protects against recursive onStartFrame calls
void OculusVRHMDDevice::onStartFrame()
{
if (!mIsValid || !mDevice || !mDrawCanvas || sInFrame || mFrameReady)
return;
sInFrame = true;
#ifndef OCULUS_DEBUG_FRAME
ovrHmd_BeginFrame(mDevice, 0);
#endif
ovrVector3f hmdToEyeViewOffset[2] = { mEyeRenderDesc[0].HmdToEyeViewOffset, mEyeRenderDesc[1].HmdToEyeViewOffset };
ovrHmd_GetEyePoses(mDevice, 0, hmdToEyeViewOffset, mCurrentEyePoses, &mLastTrackingState);
for (U32 i=0; i<2; i++)
{
mCurrentEyePoses[i].Position.x *= OculusVRDevice::smPositionTrackingScale;
mCurrentEyePoses[i].Position.y *= OculusVRDevice::smPositionTrackingScale;
mCurrentEyePoses[i].Position.z *= OculusVRDevice::smPositionTrackingScale;
}
sInFrame = false;
mFrameReady = true;
}
void OculusVRHMDDevice::onEndFrame()
{
if (!mIsValid || !mDevice || !mDrawCanvas || sInFrame || !mFrameReady)
return;
Point2I eyeSize;
GFXTarget *windowTarget = mDrawCanvas->getPlatformWindow()->getGFXTarget();
#ifndef OCULUS_DEBUG_FRAME
#ifdef OCULUS_USE_D3D
GFXD3D9Device *d3d9GFX = dynamic_cast<GFXD3D9Device*>(GFX);
if (d3d9GFX && mEyeRT[0].getPointer())
{
// Left
ovrD3D9Texture eyeTextures[2];
eyeSize = mEyeTexture[0].getWidthHeight();
eyeTextures[0].D3D9.Header.API = ovrRenderAPI_D3D9;
eyeTextures[0].D3D9.Header.RenderViewport.Pos.x = mEyeViewport[0].point.x;
eyeTextures[0].D3D9.Header.RenderViewport.Pos.y = mEyeViewport[0].point.y;
eyeTextures[0].D3D9.Header.RenderViewport.Size.w = mEyeViewport[0].extent.x;
eyeTextures[0].D3D9.Header.RenderViewport.Size.h = mEyeViewport[0].extent.y;
eyeTextures[0].D3D9.Header.TextureSize.w = eyeSize.x;
eyeTextures[0].D3D9.Header.TextureSize.h = eyeSize.y;
eyeTextures[0].D3D9.pTexture = mEyeRT[0].getPointer() ? static_cast<GFXD3D9TextureObject*>(mEyeTexture[0].getPointer())->get2DTex() : NULL;
// Right
eyeSize = mEyeTexture[1].getWidthHeight();
eyeTextures[1].D3D9.Header.API = ovrRenderAPI_D3D9;
eyeTextures[1].D3D9.Header.RenderViewport.Pos.x = mEyeViewport[1].point.x;
eyeTextures[1].D3D9.Header.RenderViewport.Pos.y = mEyeViewport[1].point.y;
eyeTextures[1].D3D9.Header.RenderViewport.Size.w = mEyeViewport[1].extent.x;
eyeTextures[1].D3D9.Header.RenderViewport.Size.h = mEyeViewport[1].extent.y;
eyeTextures[1].D3D9.Header.TextureSize.w = eyeSize.x;
eyeTextures[1].D3D9.Header.TextureSize.h = eyeSize.y;
eyeTextures[1].D3D9.pTexture = mEyeRT[0].getPointer() ? static_cast<GFXD3D9TextureObject*>(mEyeTexture[1].getPointer())->get2DTex() : NULL;
// Submit!
GFX->disableShaders();
GFX->setActiveRenderTarget(windowTarget);
GFX->clear(GFXClearZBuffer | GFXClearStencil | GFXClearTarget, ColorI(255,0,0), 1.0f, 0);
ovrHmd_EndFrame(mDevice, mCurrentEyePoses, (ovrTexture*)(&eyeTextures[0]));
}
#endif
#ifdef OCULUS_USE_GL
GFXGLDevice *glGFX = dynamic_cast<GFXGLDevice*>(GFX);
if (glGFX && mEyeRT[0].getPointer())
{
// Left
ovrGLTexture eyeTextures[2];
eyeSize = mEyeTexture[0].getWidthHeight();
eyeTextures[0].OGL.Header.API = ovrRenderAPI_GL;
eyeTextures[0].OGL.Header.RenderViewport.Pos.x = mEyeViewport[0].point.x;
eyeTextures[0].OGL.Header.RenderViewport.Pos.y = mEyeViewport[0].point.y;
eyeTextures[0].OGL.Header.RenderViewport.Size.w = mEyeViewport[0].extent.x;
eyeTextures[0].OGL.Header.RenderViewport.Size.h = mEyeViewport[0].extent.y;
eyeTextures[0].OGL.Header.TextureSize.w = eyeSize.x;
eyeTextures[0].OGL.Header.TextureSize.h = eyeSize.y;
eyeTextures[0].OGL.TexId = mEyeRT[0].getPointer() ? static_cast<GFXGLTextureObject*>(mEyeTexture[0].getPointer())->getHandle() : 0;
// Right
eyeSize = mEyeTexture[1].getWidthHeight();
eyeTextures[1].OGL.Header.API = ovrRenderAPI_GL;
eyeTextures[1].OGL.Header.RenderViewport.Pos.x = mEyeViewport[1].point.x;
eyeTextures[1].OGL.Header.RenderViewport.Pos.y = mEyeViewport[1].point.y;
eyeTextures[1].OGL.Header.RenderViewport.Size.w = mEyeViewport[1].extent.x;
eyeTextures[1].OGL.Header.RenderViewport.Size.h = mEyeViewport[1].extent.y;
eyeTextures[1].OGL.Header.TextureSize.w = eyeSize.x;
eyeTextures[1].OGL.Header.TextureSize.h = eyeSize.y;
eyeTextures[0].OGL.TexId = mEyeRT[1].getPointer() ? static_cast<GFXGLTextureObject*>(mEyeTexture[1].getPointer())->getHandle() : 0;
// Submit!
GFX->disableShaders();
GFX->setActiveRenderTarget(windowTarget);
GFX->clear(GFXClearZBuffer | GFXClearStencil | GFXClearTarget, ColorI(255,0,0), 1.0f, 0);
ovrHmd_EndFrame(mDevice, mCurrentEyePoses, (ovrTexture*)(&eyeTextures[0]));
}
#endif
#endif
mFrameReady = false;
}
void OculusVRHMDDevice::getFrameEyePose(DisplayPose *outPose, U32 eyeId) const
{
// Directly set the rotation and position from the eye transforms
ovrPosef pose = mCurrentEyePoses[eyeId];
OVR::Quatf orientation = pose.Orientation;
const OVR::Vector3f position = pose.Position;
EulerF rotEuler;
OculusVRUtil::convertRotation(orientation, rotEuler);
outPose->orientation = rotEuler;
outPose->position = Point3F(-position.x, position.z, -position.y);
}
void OculusVRHMDDevice::onDeviceDestroy()
{
if (!mIsValid || !mDevice)
return;
if (mStereoRT.getPointer())
{
mStereoRT->zombify();
}
if (mEyeRT[1].getPointer() && mEyeRT[1] != mStereoRT)
{
mEyeRT[0]->zombify();
mEyeRT[1]->zombify();
}
mStereoRT = NULL;
mStereoTexture = NULL;
mStereoDepthTexture = NULL;
mEyeTexture[0] = NULL;
mEyeDepthTexture[0] = NULL;
mEyeTexture[1] = NULL;
mEyeDepthTexture[1] = NULL;
mEyeRT[0] = NULL;
mEyeRT[1] = NULL;
mRenderConfigurationDirty = true;
ovrHmd_ConfigureRendering(mDevice, NULL, 0, NULL, NULL);
}

View file

@ -30,7 +30,16 @@
#include "math/mPoint4.h"
#include "platform/input/oculusVR/oculusVRConstants.h"
#include "platform/types.h"
#include "OVR.h"
#include "gfx/gfxTextureHandle.h"
#include "math/mRect.h"
#include "gfx/gfxDevice.h"
#include "OVR_CAPI_0_5_0.h"
class GuiCanvas;
class GameConnection;
struct DisplayPose;
class OculusVRSensorDevice;
class OculusVRHMDDevice
{
@ -42,9 +51,19 @@ public:
protected:
bool mIsValid;
bool mIsSimulation;
bool mVsync;
bool mTimewarp;
OVR::HMDDevice* mDevice;
bool mRenderConfigurationDirty;
bool mFrameReady;
ovrHmd mDevice;
U32 mSupportedDistortionCaps;
U32 mCurrentDistortionCaps;
U32 mSupportedCaps;
U32 mCurrentCaps;
// From OVR::DeviceInfo
String mProductName;
@ -66,13 +85,6 @@ protected:
// Physical screen size in meters
Point2F mScreenSize;
// Physical offset from the top of the screen to the center of the
// eye, in meters. Usually half of the vertical physical screen size
F32 mVerticalEyeCenter;
// Physical distance from the eye to the screen
F32 mEyeToScreen;
// Physical distance between lens centers, in meters
F32 mLensSeparation;
@ -82,50 +94,25 @@ protected:
// Physical distance between the user's eye centers
F32 mInterpupillaryDistance;
// The eye IPD as a Point3F
Point3F mEyeWorldOffset;
// Radial distortion correction coefficients used by the barrel distortion shader
Point4F mKDistortion;
// Chromatic aberration correction coefficients
Point4F mChromaticAbCorrection;
// Calculated values of eye x offset from center in normalized (uv) coordinates
// where each eye is 0..1. Used for the mono to stereo postFX to simulate an
// eye offset of the camera. The x component is the left eye, the y component
// is the right eye.
Point2F mEyeUVOffset;
// Used to adjust where an eye's view is rendered to account for the lenses not
// being in the center of the physical screen half.
F32 mXCenterOffset;
// When calculating the distortion scale to use to increase the size of the input texture
// this determines how we should attempt to fit the distorted view into the backbuffer.
Point2F mDistortionFit;
// Is the factor by which the input texture size is increased to make post-distortion
// result distortion fit the viewport. If the input texture is the same size as the
// backbuffer, then this should be 1.0.
F32 mDistortionScale;
// Aspect ratio for a single eye
F32 mAspectRatio;
// Vertical field of view
F32 mYFOV;
// The amount to offset the projection matrix to account for the eye not being in the
// center of the screen.
Point2F mProjectionCenterOffset;
// Current pose of eyes
ovrPosef mCurrentEyePoses[2];
ovrEyeRenderDesc mEyeRenderDesc[2];
ovrFovPort mCurrentFovPorts[2];
Point2I mWindowSize;
GameConnection *mConnection;
OculusVRSensorDevice *mSensor;
U32 mActionCodeIndex;
protected:
F32 calcScale(F32 fitRadius);
void calculateValues(bool calculateDistortionScale);
void createSimulatedPreviewRift(bool calculateDistortionScale);
void updateRenderInfo();
public:
OculusVRHMDDevice();
@ -134,13 +121,12 @@ public:
void cleanUp();
// Set the HMD properties based on information from the OVR device
void set(OVR::HMDDevice* hmd, OVR::HMDInfo& info, bool calculateDistortionScale);
void set(ovrHmd hmd, U32 actionCodeIndex);
// Set the HMD properties based on a simulation of the given type
void createSimulation(SimulationTypes simulationType, bool calculateDistortionScale);
// Sets optimal display size for canvas
void setOptimalDisplaySize(GuiCanvas *canvas);
bool isValid() const {return mIsValid;}
bool isSimulated() const {return mIsSimulation;}
const char* getProductName() const { return mProductName.c_str(); }
const char* getManufacturer() const { return mManufacturer.c_str(); }
@ -161,13 +147,6 @@ public:
// Physical screen size in meters
const Point2F& getScreenSize() const { return mScreenSize; }
// Physical offset from the top of the screen to the center of the
// eye, in meters. Usually half of the vertical physical screen size
F32 getVerticalEyeCenter() const { return mVerticalEyeCenter; }
// Physical distance from the eye to the screen
F32 getEyeToScreen() const { return mEyeToScreen; }
// Physical distance between lens centers, in meters
F32 getLensSeparation() const { return mLensSeparation; }
@ -178,37 +157,78 @@ public:
F32 getIPD() const { return mInterpupillaryDistance; }
// Set a new physical distance between the user's eye centers
void setIPD(F32 ipd, bool calculateDistortionScale);
// Provides the IPD of one eye as a Point3F
const Point3F& getEyeWorldOffset() const { return mEyeWorldOffset; }
// Radial distortion correction coefficients used by the barrel distortion shader
const Point4F& getKDistortion() const { return mKDistortion; }
// Chromatic aberration correction coefficients used by the barrel distortion shader
const Point4F& getChromaticAbCorrection() const { return mChromaticAbCorrection; }
// Calculated values of eye x offset from center in normalized (uv) coordinates.
const Point2F& getEyeUVOffset() const { return mEyeUVOffset; }
// Used to adjust where an eye's view is rendered to account for the lenses not
// being in the center of the physical screen half.
F32 getCenterOffset() const { return mXCenterOffset; }
// Is the factor by which the input texture size is increased to make post-distortion
// result distortion fit the viewport.
F32 getDistortionScale() const { return mDistortionScale; }
// Aspect ration for a single eye
F32 getAspectRation() const { return mAspectRatio; }
// Vertical field of view
F32 getYFOV() const { return mYFOV; }
void setIPD(F32 ipd);
// The amount to offset the projection matrix to account for the eye not being in the
// center of the screen.
const Point2F& getProjectionCenterOffset() const { return mProjectionCenterOffset; }
void getStereoViewports(RectI *dest) const { dMemcpy(dest, mEyeViewport, sizeof(mEyeViewport)); }
void getStereoTargets(GFXTextureTarget **dest) const { dest[0] = mEyeRT[0]; dest[1] = mEyeRT[1]; }
void getFovPorts(FovPort *dest) const { dMemcpy(dest, mCurrentFovPorts, sizeof(mCurrentFovPorts)); }
/// Returns eye offsets in torque coordinate space, i.e. z being up, x being left-right, and y being depth (forward).
void getEyeOffsets(Point3F *offsets) const {
offsets[0] = Point3F(-mEyeRenderDesc[0].HmdToEyeViewOffset.x, mEyeRenderDesc[0].HmdToEyeViewOffset.z, -mEyeRenderDesc[0].HmdToEyeViewOffset.y);
offsets[1] = Point3F(-mEyeRenderDesc[1].HmdToEyeViewOffset.x, mEyeRenderDesc[1].HmdToEyeViewOffset.z, -mEyeRenderDesc[1].HmdToEyeViewOffset.y); }
void getFrameEyePose(DisplayPose *outPose, U32 eyeId) const;
void updateCaps();
void onStartFrame();
void onEndFrame();
void onDeviceDestroy();
Point2I generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &texture, GFXTexHandle &depth, Point2I desiredSize);
void clearRenderTargets();
bool isDisplayingWarning();
void dismissWarning();
bool setupTargets();
/// Designates canvas we are drawing to. Also updates render targets
void setDrawCanvas(GuiCanvas *canvas) { if (mDrawCanvas != canvas) { mDrawCanvas = canvas; } updateRenderInfo(); }
virtual void setCurrentConnection(GameConnection *connection) { mConnection = connection; }
virtual GameConnection* getCurrentConnection() { return mConnection; }
String dumpMetrics();
// Stereo RT
GFXTexHandle mStereoTexture;
GFXTexHandle mStereoDepthTexture;
GFXTextureTargetRef mStereoRT;
// Eye RTs (if we are using separate targets)
GFXTextureTargetRef mEyeRT[2];
GFXTexHandle mEyeTexture[2];
GFXTexHandle mEyeDepthTexture[2];
// Current render target size for each eye
Point2I mEyeRenderSize[2];
// Recommended eye target size for each eye
ovrSizei mRecomendedEyeTargetSize[2];
// Desired viewport for each eye
RectI mEyeViewport[2];
F32 mCurrentPixelDensity;
F32 smDesiredPixelDensity;
ovrTrackingState mLastTrackingState;
GFXDevice::GFXDeviceRenderStyles mDesiredRenderingMode;
GFXFormat mRTFormat;
// Canvas we should be drawing
GuiCanvas *mDrawCanvas;
OculusVRSensorDevice *getSensorDevice() { return mSensor; }
};
#endif // _OCULUSVRHMDDEVICE_H_

View file

@ -20,6 +20,7 @@
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#include "platform/input/oculusVR/oculusVRDevice.h"
#include "platform/input/oculusVR/oculusVRSensorData.h"
#include "platform/input/oculusVR/oculusVRUtil.h"
#include "console/console.h"
@ -32,66 +33,44 @@ OculusVRSensorData::OculusVRSensorData()
void OculusVRSensorData::reset()
{
mDataSet = false;
mStatusFlags = 0;
}
void OculusVRSensorData::setData(OVR::SensorFusion& data, const F32& maxAxisRadius)
void OculusVRSensorData::setData(ovrTrackingState& data, const F32& maxAxisRadius)
{
// Sensor rotation
OVR::Quatf orientation;
if(data.GetPredictionDelta() > 0)
{
orientation = data.GetPredictedOrientation();
}
else
{
orientation = data.GetOrientation();
}
// Sensor rotation & position
OVR::Posef pose = data.HeadPose.ThePose;
OVR::Quatf orientation = pose.Rotation;
OVR::Vector3f position = data.HeadPose.ThePose.Position;
mPosition = Point3F(-position.z, position.x, position.y);
mPosition *= OculusVRDevice::smPositionTrackingScale;
OVR::Matrix4f orientMat(orientation);
OculusVRUtil::convertRotation(orientMat.M, mRot);
mRotQuat.set(mRot);
// Sensor rotation in Euler format
OculusVRUtil::convertRotation(orientation, mRotEuler);
OculusVRUtil::convertRotation(orientation, mRotEuler); // mRotEuler == pitch, roll, yaw FROM yaw, pitch, roll
//mRotEuler = EulerF(0,0,0);
float hmdYaw, hmdPitch, hmdRoll;
orientation.GetEulerAngles<OVR::Axis_Y, OVR::Axis_X, OVR::Axis_Z>(&hmdYaw, &hmdPitch, &hmdRoll);
// Sensor rotation as axis
OculusVRUtil::calculateAxisRotation(mRot, maxAxisRadius, mRotAxis);
// Sensor raw values
OVR::Vector3f accel = data.GetAcceleration();
OVR::Vector3f accel = data.HeadPose.LinearAcceleration;
OculusVRUtil::convertAcceleration(accel, mAcceleration);
OVR::Vector3f angVel = data.GetAngularVelocity();
OVR::Vector3f angVel = data.HeadPose.AngularVelocity;
OculusVRUtil::convertAngularVelocity(angVel, mAngVelocity);
OVR::Vector3f mag;
if(data.HasMagCalibration() && data.IsYawCorrectionEnabled())
{
mag = data.GetCalibratedMagnetometer();
}
else
{
mag = data.GetMagnetometer();
}
OVR::Vector3f mag = data.RawSensorData.Magnetometer;
OculusVRUtil::convertMagnetometer(mag, mMagnetometer);
mDataSet = true;
}
void OculusVRSensorData::simulateData(const F32& maxAxisRadius)
{
// Sensor rotation
mRot.identity();
mRotQuat.identity();
mRotEuler.zero();
// Sensor rotation as axis
OculusVRUtil::calculateAxisRotation(mRot, maxAxisRadius, mRotAxis);
// Sensor raw values
mAcceleration.zero();
mAngVelocity.zero();
mMagnetometer.zero();
mStatusFlags = data.StatusFlags;
mDataSet = true;
}
@ -132,5 +111,10 @@ U32 OculusVRSensorData::compare(OculusVRSensorData* other, bool doRawCompare)
}
}
if (other->mStatusFlags != mStatusFlags)
{
result |= DIFF_STATUS;
}
return result;
}

View file

@ -27,7 +27,7 @@
#include "math/mMatrix.h"
#include "math/mQuat.h"
#include "math/mPoint2.h"
#include "OVR.h"
#include "OVR_CAPI_0_5_0.h"
struct OculusVRSensorData
{
@ -39,6 +39,8 @@ struct OculusVRSensorData
DIFF_ACCEL = (1<<3),
DIFF_ANGVEL = (1<<4),
DIFF_MAG = (1<<5),
DIFF_POS = (1<<6),
DIFF_STATUS = (1<<7),
DIFF_ROTAXIS = (DIFF_ROTAXISX | DIFF_ROTAXISY),
DIFF_RAW = (DIFF_ACCEL | DIFF_ANGVEL | DIFF_MAG),
@ -46,6 +48,9 @@ struct OculusVRSensorData
bool mDataSet;
// Position
Point3F mPosition;
// Rotation
MatrixF mRot;
QuatF mRotQuat;
@ -59,16 +64,15 @@ struct OculusVRSensorData
EulerF mAngVelocity;
VectorF mMagnetometer;
U32 mStatusFlags;
OculusVRSensorData();
/// Reset the data
void reset();
/// Set data based on given sensor fusion
void setData(OVR::SensorFusion& data, const F32& maxAxisRadius);
/// Simulate valid data
void simulateData(const F32& maxAxisRadius);
void setData(ovrTrackingState& data, const F32& maxAxisRadius);
/// Compare this data and given and return differences
U32 compare(OculusVRSensorData* other, bool doRawCompare);

View file

@ -24,6 +24,8 @@
#include "platform/input/oculusVR/oculusVRSensorData.h"
#include "platform/input/oculusVR/oculusVRUtil.h"
#include "platform/platformInput.h"
#include"console/simBase.h"
#include "console/engineAPI.h"
U32 OculusVRSensorDevice::OVR_SENSORROT[OculusVRConstants::MaxSensors] = {0};
U32 OculusVRSensorDevice::OVR_SENSORROTANG[OculusVRConstants::MaxSensors] = {0};
@ -32,13 +34,15 @@ U32 OculusVRSensorDevice::OVR_SENSORROTAXISY[OculusVRConstants::MaxSensors] = {0
U32 OculusVRSensorDevice::OVR_SENSORACCELERATION[OculusVRConstants::MaxSensors] = {0};
U32 OculusVRSensorDevice::OVR_SENSORANGVEL[OculusVRConstants::MaxSensors] = {0};
U32 OculusVRSensorDevice::OVR_SENSORMAGNETOMETER[OculusVRConstants::MaxSensors] = {0};
U32 OculusVRSensorDevice::OVR_SENSORPOSITION[OculusVRConstants::MaxSensors] = {0};
OculusVRSensorDevice::OculusVRSensorDevice()
{
mIsValid = false;
mIsSimulation = false;
mDevice = NULL;
mCurrentTrackingCaps = 0;
mSupportedTrackingCaps = 0;
mPositionTrackingDisabled = false;
for(U32 i=0; i<2; ++i)
{
mDataBuffer[i] = new OculusVRSensorData();
@ -60,34 +64,33 @@ OculusVRSensorDevice::~OculusVRSensorDevice()
void OculusVRSensorDevice::cleanUp()
{
mSensorFusion.AttachToSensor(NULL);
if(mDevice)
{
mDevice->Release();
mDevice = NULL;
}
mIsValid = false;
ovrHmd_ConfigureTracking(mDevice, 0, 0);
}
void OculusVRSensorDevice::set(OVR::SensorDevice* sensor, OVR::SensorInfo& info, S32 actionCodeIndex)
void OculusVRSensorDevice::set(ovrHmd sensor, S32 actionCodeIndex)
{
mIsValid = false;
mDevice = sensor;
mSensorFusion.AttachToSensor(sensor);
mYawCorrectionDisabled = !mSensorFusion.IsYawCorrectionEnabled();
mSupportedTrackingCaps = sensor->TrackingCaps;
mCurrentTrackingCaps = ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position;
mCurrentTrackingCaps = mSupportedTrackingCaps & mCurrentTrackingCaps;
mYawCorrectionDisabled = !(mCurrentTrackingCaps & ovrTrackingCap_MagYawCorrection);
mPositionTrackingDisabled = !(mCurrentTrackingCaps & ovrTrackingCap_Position);
// DeviceInfo
mProductName = info.ProductName;
mManufacturer = info.Manufacturer;
mVersion = info.Version;
mProductName = sensor->ProductName;
mManufacturer = sensor->Manufacturer;
mVersion = sensor->Type;
// SensorInfo
mVendorId = info.VendorId;
mProductId = info.ProductId;
mSerialNumber = info.SerialNumber;
mVendorId = sensor->VendorId;
mProductId = sensor->ProductId;
mSerialNumber = sensor->SerialNumber;
mActionCodeIndex = actionCodeIndex;
@ -100,43 +103,8 @@ void OculusVRSensorDevice::set(OVR::SensorDevice* sensor, OVR::SensorInfo& info,
{
mIsValid = true;
}
}
void OculusVRSensorDevice::createSimulation(SimulationTypes simulationType, S32 actionCodeIndex)
{
if(simulationType == ST_RIFT_PREVIEW)
{
createSimulatedPreviewRift(actionCodeIndex);
}
}
void OculusVRSensorDevice::createSimulatedPreviewRift(S32 actionCodeIndex)
{
mIsValid = false;
mIsSimulation = true;
mYawCorrectionDisabled = true;
// DeviceInfo
mProductName = "Tracker DK";
mManufacturer = "Oculus VR, Inc.";
mVersion = 0;
// SensorInfo
mVendorId = 10291;
mProductId = 1;
mSerialNumber = "000000000000";
mActionCodeIndex = actionCodeIndex;
if(mActionCodeIndex >= OculusVRConstants::MaxSensors)
{
// Cannot declare more sensors than we are able to handle
mIsValid = false;
}
else
{
mIsValid = true;
}
updateTrackingCaps();
}
void OculusVRSensorDevice::buildCodeTable()
@ -154,6 +122,8 @@ void OculusVRSensorDevice::buildCodeTable()
OVR_SENSORACCELERATION[i] = INPUTMGR->getNextDeviceCode();
OVR_SENSORANGVEL[i] = INPUTMGR->getNextDeviceCode();
OVR_SENSORMAGNETOMETER[i] = INPUTMGR->getNextDeviceCode();
OVR_SENSORPOSITION[i] = INPUTMGR->getNextDeviceCode();
}
// Build out the virtual map
@ -179,27 +149,27 @@ void OculusVRSensorDevice::buildCodeTable()
dSprintf(buffer, 64, "ovr_sensormagnetometer%d", i);
INPUTMGR->addVirtualMap( buffer, SI_POS, OVR_SENSORMAGNETOMETER[i] );
dSprintf(buffer, 64, "ovr_sensorpos%d", i);
INPUTMGR->addVirtualMap( buffer, SI_POS, OVR_SENSORPOSITION[i] );
}
}
//-----------------------------------------------------------------------------
bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bool generateRotAsEuler, bool generateRotationAsAxisEvents, F32 maxAxisRadius, bool generateRawSensor)
bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bool generateRotAsEuler, bool generateRotationAsAxisEvents, bool generatePositionEvents, F32 maxAxisRadius, bool generateRawSensor)
{
if(!mIsValid)
return false;
// Grab current state
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
mLastStatus = ts.StatusFlags;
// Store the current data from the sensor and compare with previous data
U32 diff;
OculusVRSensorData* currentBuffer = (mPrevData == mDataBuffer[0]) ? mDataBuffer[1] : mDataBuffer[0];
if(!mIsSimulation)
{
currentBuffer->setData(mSensorFusion, maxAxisRadius);
}
else
{
currentBuffer->simulateData(maxAxisRadius);
}
currentBuffer->setData(ts, maxAxisRadius);
diff = mPrevData->compare(currentBuffer, generateRawSensor);
// Update the previous data pointer. We do this here in case someone calls our
@ -218,7 +188,7 @@ bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bo
{
// Convert angles to degrees
VectorF angles;
for(U32 i=0; i<3; ++i)
for(U32 i=0; i<3; ++i)
{
angles[i] = mRadToDeg(currentBuffer->mRotEuler[i]);
}
@ -235,6 +205,11 @@ bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bo
INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_AXIS, OVR_SENSORROTAXISY[mActionCodeIndex], SI_MOVE, currentBuffer->mRotAxis.y);
}
if (generatePositionEvents && diff & OculusVRSensorData::DIFF_POS)
{
INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_AXIS, OVR_SENSORROTAXISX[mActionCodeIndex], SI_MOVE, currentBuffer->mPosition);
}
// Raw sensor event
if(generateRawSensor && diff & OculusVRSensorData::DIFF_RAW)
{
@ -256,6 +231,14 @@ bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bo
INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_POS, OVR_SENSORMAGNETOMETER[mActionCodeIndex], SI_MOVE, currentBuffer->mMagnetometer);
}
if (diff & OculusVRSensorData::DIFF_STATUS)
{
if (Con::isFunction("onOculusStatusUpdate"))
{
Con::executef("onOculusStatusUpdate", ts.StatusFlags);
}
}
return true;
}
@ -266,39 +249,7 @@ void OculusVRSensorDevice::reset()
if(!mIsValid)
return;
mSensorFusion.Reset();
}
F32 OculusVRSensorDevice::getPredictionTime() const
{
if(!mIsValid)
return 0.0f;
return mSensorFusion.GetPredictionDelta();
}
void OculusVRSensorDevice::setPredictionTime(F32 dt)
{
if(!mIsValid)
return;
mSensorFusion.SetPrediction(dt);
}
bool OculusVRSensorDevice::getGravityCorrection() const
{
if(!mIsValid)
return false;
return mSensorFusion.IsGravityEnabled();
}
void OculusVRSensorDevice::setGravityCorrection(bool state)
{
if(!mIsValid)
return;
mSensorFusion.SetGravityEnabled(state);
ovrHmd_RecenterPose(mDevice);
}
bool OculusVRSensorDevice::getYawCorrection() const
@ -306,7 +257,7 @@ bool OculusVRSensorDevice::getYawCorrection() const
if(!mIsValid)
return false;
return mSensorFusion.IsYawCorrectionEnabled();
return !(mCurrentTrackingCaps & ovrTrackingCap_MagYawCorrection);
}
void OculusVRSensorDevice::setYawCorrection(bool state)
@ -314,10 +265,30 @@ void OculusVRSensorDevice::setYawCorrection(bool state)
if(!mIsValid)
return;
if(mYawCorrectionDisabled || !mSensorFusion.HasMagCalibration())
if (state == !mYawCorrectionDisabled)
return;
mSensorFusion.SetYawCorrectionEnabled(state);
// Don't allow if not capable
if(state && !(mSupportedTrackingCaps & ovrTrackingCap_MagYawCorrection))
return;
mYawCorrectionDisabled = !state;
updateTrackingCaps();
}
void OculusVRSensorDevice::setPositionTracking(bool state)
{
if(!mIsValid)
return;
if (state == !mPositionTrackingDisabled)
return;
if(state && !(mSupportedTrackingCaps & ovrTrackingCap_Position))
return;
mPositionTrackingDisabled = !state;
updateTrackingCaps();
}
bool OculusVRSensorDevice::getMagnetometerCalibrationAvailable() const
@ -325,7 +296,23 @@ bool OculusVRSensorDevice::getMagnetometerCalibrationAvailable() const
if(!mIsValid)
return false;
return mSensorFusion.HasMagCalibration();
return (mSupportedTrackingCaps & ovrTrackingCap_MagYawCorrection) != 0;
}
bool OculusVRSensorDevice::getOrientationTrackingAvailable() const
{
if(!mIsValid)
return false;
return (mSupportedTrackingCaps & ovrTrackingCap_Orientation) != 0;
}
bool OculusVRSensorDevice::getPositionTrackingAvailable() const
{
if(!mIsValid)
return false;
return (mSupportedTrackingCaps & ovrTrackingCap_Position) != 0;
}
//-----------------------------------------------------------------------------
@ -335,15 +322,8 @@ EulerF OculusVRSensorDevice::getEulerRotation()
if(!mIsValid)
return Point3F::Zero;
OVR::Quatf orientation;
if(mSensorFusion.GetPredictionDelta() > 0)
{
orientation = mSensorFusion.GetPredictedOrientation();
}
else
{
orientation = mSensorFusion.GetOrientation();
}
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
OVR::Quatf orientation = ts.HeadPose.ThePose.Orientation;
// Sensor rotation in Euler format
EulerF rot;
@ -357,13 +337,12 @@ EulerF OculusVRSensorDevice::getRawEulerRotation()
if(!mIsValid)
return Point3F::Zero;
OVR::Quatf orientation;
orientation = mSensorFusion.GetOrientation();
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
OVR::Quatf orientation = ts.HeadPose.ThePose.Orientation;
// Sensor rotation in Euler format
EulerF rot;
OculusVRUtil::convertRotation(orientation, rot);
return rot;
}
@ -371,9 +350,10 @@ VectorF OculusVRSensorDevice::getAcceleration()
{
if(!mIsValid)
return VectorF::Zero;
OVR::Vector3f a = mSensorFusion.GetAcceleration();
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
OVR::Vector3f a = ts.HeadPose.LinearAcceleration;
// Sensor acceleration in VectorF format
VectorF acceleration;
OculusVRUtil::convertAcceleration(a, acceleration);
@ -385,8 +365,9 @@ EulerF OculusVRSensorDevice::getAngularVelocity()
{
if(!mIsValid)
return EulerF::Zero;
OVR::Vector3f v = mSensorFusion.GetAngularVelocity();
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
OVR::Vector3f v = ts.HeadPose.AngularVelocity;
// Sensor angular velocity in EulerF format
EulerF vel;
@ -395,38 +376,28 @@ EulerF OculusVRSensorDevice::getAngularVelocity()
return vel;
}
VectorF OculusVRSensorDevice::getMagnetometer()
Point3F OculusVRSensorDevice::getPosition()
{
if(!mIsValid)
return VectorF::Zero;
OVR::Vector3f m;
if(mSensorFusion.HasMagCalibration() && mSensorFusion.IsYawCorrectionEnabled())
{
m = mSensorFusion.GetCalibratedMagnetometer();
}
else
{
m = mSensorFusion.GetMagnetometer();
}
return Point3F();
// Sensor magnetometer reading in VectorF format
VectorF mag;
OculusVRUtil::convertMagnetometer(m, mag);
return mag;
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
OVR::Vector3f v = ts.HeadPose.ThePose.Position;
return Point3F(-v.x, v.z, -v.y);
}
VectorF OculusVRSensorDevice::getRawMagnetometer()
void OculusVRSensorDevice::updateTrackingCaps()
{
if(!mIsValid)
return VectorF::Zero;
if (!mIsValid)
return;
OVR::Vector3f m = mSensorFusion.GetMagnetometer();
// Sensor magnetometer reading in VectorF format
VectorF mag;
OculusVRUtil::convertMagnetometer(m, mag);
// Set based on current vars
mCurrentTrackingCaps = ovrTrackingCap_Orientation;
return mag;
if (!mYawCorrectionDisabled)
mCurrentTrackingCaps |= ovrTrackingCap_MagYawCorrection;
if (!mPositionTrackingDisabled)
mCurrentTrackingCaps |= ovrTrackingCap_Position;
ovrHmd_ConfigureTracking(mDevice, mCurrentTrackingCaps, 0);
}

View file

@ -30,17 +30,12 @@
#include "math/mPoint4.h"
#include "platform/input/oculusVR/oculusVRConstants.h"
#include "platform/types.h"
#include "OVR.h"
#include "OVR_CAPI_0_5_0.h"
struct OculusVRSensorData;
class OculusVRSensorDevice
{
public:
enum SimulationTypes {
ST_RIFT_PREVIEW,
};
public:
// Action codes
static U32 OVR_SENSORROT[OculusVRConstants::MaxSensors]; // SI_ROT
@ -54,15 +49,15 @@ public:
static U32 OVR_SENSORANGVEL[OculusVRConstants::MaxSensors]; // SI_POS but is EulerF
static U32 OVR_SENSORMAGNETOMETER[OculusVRConstants::MaxSensors]; // SI_POS
static U32 OVR_SENSORPOSITION[OculusVRConstants::MaxSensors];
protected:
bool mIsValid;
bool mIsSimulation;
OVR::SensorDevice* mDevice;
OVR::SensorFusion mSensorFusion;
ovrHmd mDevice;
U32 mCurrentTrackingCaps;
U32 mSupportedTrackingCaps;
// From OVR::DeviceInfo
String mProductName;
String mManufacturer;
@ -76,6 +71,12 @@ protected:
// Has yaw correction been disabled by the control panel
bool mYawCorrectionDisabled;
// Has position tracking been disabled
bool mPositionTrackingDisabled;
// Last tracking status
U32 mLastStatus;
// Assigned by the OculusVRDevice
S32 mActionCodeIndex;
@ -86,9 +87,6 @@ protected:
// for the sensor
OculusVRSensorData* mPrevData;
protected:
void createSimulatedPreviewRift(S32 actionCodeIndex);
public:
OculusVRSensorDevice();
virtual ~OculusVRSensorDevice();
@ -98,44 +96,42 @@ public:
void cleanUp();
// Set the sensor properties based on information from the OVR device
void set(OVR::SensorDevice* sensor, OVR::SensorInfo& info, S32 actionCodeIndex);
// Set the sensor properties based on a simulation of the given type
void createSimulation(SimulationTypes simulationType, S32 actionCodeIndex);
void set(ovrHmd sensor, S32 actionCodeIndex);
bool isValid() const {return mIsValid;}
bool isSimulated() {return mIsSimulation;}
bool process(U32 deviceType, bool generateRotAsAngAxis, bool generateRotAsEuler, bool generateRotationAsAxisEvents, F32 maxAxisRadius, bool generateRawSensor);
bool process(U32 deviceType, bool generateRotAsAngAxis, bool generateRotAsEuler, bool generateRotationAsAxisEvents, bool generatePositionEvents, F32 maxAxisRadius, bool generateRawSensor);
void reset();
// Get the prediction time for the sensor fusion. The time is in seconds.
F32 getPredictionTime() const;
// Set the prediction time for the sensor fusion. The time is in seconds.
void setPredictionTime(F32 dt);
// Is gravity correction enabled for pitch and roll
bool getGravityCorrection() const;
// Set the pitch and roll gravity correction
void setGravityCorrection(bool state);
// Has yaw correction been disabled using the control panel
bool getYawCorrectionUserDisabled() const { return mYawCorrectionDisabled; }
// Is yaw correction enabled
bool getYawCorrection() const;
// Position is valid
bool getHasValidPosition() const { return mLastStatus & ovrStatus_PositionTracked; }
// Set the yaw correction. Note: if magnetometer calibration data is not present,
// or user has disabled yaw correction in the control panel, this method will
// not enable it.
void setYawCorrection(bool state);
// Sets position tracking state
void setPositionTracking(bool state);
// Is magnetometer calibration data available for this sensor
bool getMagnetometerCalibrationAvailable() const;
// Is position tracking data available for this sensor
bool getOrientationTrackingAvailable() const;
// Is position tracking data available for this sensor
bool getPositionTrackingAvailable() const;
U32 getLastTrackingStatus() const { return mLastStatus; }
const char* getProductName() { return mProductName.c_str(); }
const char* getManufacturer() { return mManufacturer.c_str(); }
U32 getVersion() { return mVersion; }
@ -155,12 +151,10 @@ public:
// Get the current angular velocity reading, in rad/s
EulerF getAngularVelocity();
// Get the current magnetometer reading (direction and field strength), in Gauss.
// Uses magnetometer calibration if set.
VectorF getMagnetometer();
// Get the current position
Point3F getPosition();
// Get the current raw magnetometer reading (direction and field strength), in Gauss
VectorF getRawMagnetometer();
void updateTrackingCaps();
};
#endif // _OCULUSVRSENSORDEVICE_H_

View file

@ -25,7 +25,12 @@
#include "math/mPoint2.h"
#include "math/mMatrix.h"
#include "OVR.h"
#include "OVR_CAPI_0_5_0.h"
// NOTE: math code in oculus uses "Offset" which is a preprocessor macro
#define TorqueOffset Offset
#undef Offset
#include "Extras/OVR_Math.h"
namespace OculusVRUtil
{
@ -48,4 +53,6 @@ namespace OculusVRUtil
void convertMagnetometer(OVR::Vector3f& inMagnetometer, VectorF& outMagnetometer);
}
#define Offset TorqueOffset
#endif // _OCULUSVRUTIL_H_