Oculus VR DK2 Support

- Updated to work with 0.5.x SDK
- Uses Oculus Rendering rather than PostFX
- Stereo rendering refactored so more rendering info is grabbed from the DisplayDevice
- Implements an Offscreen Canvas for in-game gui with oculus
- Message dialogs and metrics display can now go to the OffScreen Canvas (if oculus demo is setup correctly)
This commit is contained in:
James Urquhart 2015-05-06 23:07:48 +01:00
parent b3170bcddf
commit 3a457749ec
56 changed files with 2654 additions and 1426 deletions

View file

@ -279,6 +279,7 @@ Camera::Camera()
mLastAbsoluteYaw = 0.0f;
mLastAbsolutePitch = 0.0f;
mLastAbsoluteRoll = 0.0f;
// For NewtonFlyMode
mNewtonRotation = false;
@ -379,6 +380,57 @@ void Camera::getCameraTransform(F32* pos, MatrixF* mat)
mat->mul( gCamFXMgr.getTrans() );
}
void Camera::getEyeCameraTransform(IDisplayDevice *displayDevice, U32 eyeId, MatrixF *outMat)
{
// The camera doesn't support a third person mode,
// so we want to override the default ShapeBase behavior.
ShapeBase * obj = dynamic_cast<ShapeBase*>(static_cast<SimObject*>(mOrbitObject));
if(obj && static_cast<ShapeBaseData*>(obj->getDataBlock())->observeThroughObject)
obj->getEyeCameraTransform(displayDevice, eyeId, outMat);
else
{
Parent::getEyeCameraTransform(displayDevice, eyeId, outMat);
}
}
DisplayPose Camera::calcCameraDeltaPose(GameConnection *con, DisplayPose inPose)
{
// NOTE: this is intended to be similar to updateMove
DisplayPose outPose;
outPose.orientation = EulerF(0,0,0);
outPose.position = inPose.position;
// Pitch
outPose.orientation.x = (inPose.orientation.x - mLastAbsolutePitch);
// Constrain the range of mRot.x
while (outPose.orientation.x < -M_PI_F)
outPose.orientation.x += M_2PI_F;
while (outPose.orientation.x > M_PI_F)
outPose.orientation.x -= M_2PI_F;
// Yaw
outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw);
// Constrain the range of mRot.z
while (outPose.orientation.z < -M_PI_F)
outPose.orientation.z += M_2PI_F;
while (outPose.orientation.z > M_PI_F)
outPose.orientation.z -= M_2PI_F;
// Bank
if (mDataBlock->cameraCanBank)
{
outPose.orientation.y = (inPose.orientation.y - mLastAbsoluteRoll);
}
// Constrain the range of mRot.y
while (outPose.orientation.y > M_PI_F)
outPose.orientation.y -= M_2PI_F;
return outPose;
}
//----------------------------------------------------------------------------
F32 Camera::getCameraFov()
@ -547,6 +599,7 @@ void Camera::processTick(const Move* move)
mLastAbsoluteYaw = emove->rotZ[emoveIndex];
mLastAbsolutePitch = emove->rotX[emoveIndex];
mLastAbsoluteRoll = emove->rotY[emoveIndex];
// Bank
mRot.y = emove->rotY[emoveIndex];

View file

@ -113,6 +113,7 @@ class Camera: public ShapeBase
F32 mLastAbsoluteYaw; ///< Stores that last absolute yaw value as passed in by ExtendedMove
F32 mLastAbsolutePitch; ///< Stores that last absolute pitch value as passed in by ExtendedMove
F32 mLastAbsoluteRoll; ///< Stores that last absolute roll value as passed in by ExtendedMove
/// @name NewtonFlyMode
/// @{
@ -235,6 +236,8 @@ class Camera: public ShapeBase
virtual void processTick( const Move* move );
virtual void interpolateTick( F32 delta);
virtual void getCameraTransform( F32* pos,MatrixF* mat );
virtual void getEyeCameraTransform( IDisplayDevice *display, U32 eyeId, MatrixF *outMat );
virtual DisplayPose calcCameraDeltaPose(GameConnection *con, DisplayPose inPose);
virtual void writePacketData( GameConnection* conn, BitStream* stream );
virtual void readPacketData( GameConnection* conn, BitStream* stream );

View file

@ -39,6 +39,9 @@
#include "scene/sceneManager.h"
#define __SCENEMANAGER_H__
#endif
#ifndef _IDISPLAYDEVICE_H_
#include "platform/output/IDisplayDevice.h"
#endif
class NetConnection;
class ProcessList;
@ -418,6 +421,7 @@ public:
// Not implemented here, but should return the Camera to world transformation matrix
virtual void getCameraTransform (F32 *pos, MatrixF *mat ) { *mat = MatrixF::Identity; }
virtual void getEyeCameraTransform ( IDisplayDevice *device, U32 eyeId, MatrixF *mat ) { *mat = MatrixF::Identity; }
/// Returns the water object we are colliding with, it is up to derived
/// classes to actually set this object.

View file

@ -235,6 +235,7 @@ GameConnection::GameConnection()
GameConnection::~GameConnection()
{
setDisplayDevice(NULL);
delete mAuthInfo;
for(U32 i = 0; i < mConnectArgc; i++)
dFree(mConnectArgv[i]);
@ -673,6 +674,30 @@ bool GameConnection::getControlCameraTransform(F32 dt, MatrixF* mat)
return true;
}
bool GameConnection::getControlCameraEyeTransforms(IDisplayDevice *display, MatrixF *transforms)
{
GameBase* obj = getCameraObject();
if(!obj)
return false;
GameBase* cObj = obj;
while((cObj = cObj->getControlObject()) != 0)
{
if(cObj->useObjsEyePoint())
obj = cObj;
}
// Perform operation on left & right eyes. For each we need to calculate the world space
// of the rotated eye offset and add that onto the camera world space.
for (U32 i=0; i<2; i++)
{
obj->getEyeCameraTransform(display, i, &transforms[i]);
}
return true;
}
bool GameConnection::getControlCameraDefaultFov(F32 * fov)
{
//find the last control object in the chain (client->player->turret->whatever...)

View file

@ -269,6 +269,10 @@ public:
bool getControlCameraTransform(F32 dt,MatrixF* mat);
bool getControlCameraVelocity(Point3F *vel);
/// Returns the eye transforms for the control object, using supplemental information
/// from the provided IDisplayDevice.
bool getControlCameraEyeTransforms(IDisplayDevice *display, MatrixF *transforms);
bool getControlCameraDefaultFov(F32 *fov);
bool getControlCameraFov(F32 *fov);
bool setControlCameraFov(F32 fov);
@ -280,8 +284,8 @@ public:
void setFirstPerson(bool firstPerson);
bool hasDisplayDevice() const { return mDisplayDevice != NULL; }
const IDisplayDevice* getDisplayDevice() const { return mDisplayDevice; }
void setDisplayDevice(IDisplayDevice* display) { mDisplayDevice = display; }
IDisplayDevice* getDisplayDevice() const { return mDisplayDevice; }
void setDisplayDevice(IDisplayDevice* display) { if (mDisplayDevice) mDisplayDevice->setDrawCanvas(NULL); mDisplayDevice = display; }
void clearDisplayDevice() { mDisplayDevice = NULL; }
void setControlSchemeParameters(bool absoluteRotation, bool addYawToAbsRot, bool addPitchToAbsRot);

View file

@ -349,7 +349,6 @@ bool GameProcessCameraQuery(CameraQuery *query)
// Provide some default values
query->projectionOffset = Point2F::Zero;
query->eyeOffset = Point3F::Zero;
F32 cameraFov = 0.0f;
bool fovSet = false;
@ -358,14 +357,14 @@ bool GameProcessCameraQuery(CameraQuery *query)
// is not open
if(!gEditingMission && connection->hasDisplayDevice())
{
const IDisplayDevice* display = connection->getDisplayDevice();
IDisplayDevice* display = connection->getDisplayDevice();
// Note: all eye values are invalid until this is called
display->setDrawCanvas(query->drawCanvas);
// The connection's display device may want to set the FOV
if(display->providesYFOV())
{
cameraFov = mRadToDeg(display->getYFOV());
fovSet = true;
}
display->setCurrentConnection(connection);
// Display may activate AFTER so we need to call this again just in case
display->onStartFrame();
// The connection's display device may want to set the projection offset
if(display->providesProjectionOffset())
@ -374,14 +373,27 @@ bool GameProcessCameraQuery(CameraQuery *query)
}
// The connection's display device may want to set the eye offset
if(display->providesEyeOffset())
if(display->providesEyeOffsets())
{
query->eyeOffset = display->getEyeOffset();
display->getEyeOffsets(query->eyeOffset);
}
// Grab field of view for both eyes
if (display->providesFovPorts())
{
display->getFovPorts(query->fovPort);
fovSet = true;
}
// Grab the latest overriding render view transforms
connection->getControlCameraEyeTransforms(display, query->eyeTransforms);
display->getStereoViewports(query->stereoViewports);
display->getStereoTargets(query->stereoTargets);
}
// Use the connection's FOV settings if requried
if(!fovSet && !connection->getControlCameraFov(&cameraFov))
if(!connection->getControlCameraFov(&cameraFov))
{
return false;
}

View file

@ -1650,6 +1650,7 @@ Player::Player()
mLastAbsoluteYaw = 0.0f;
mLastAbsolutePitch = 0.0f;
mLastAbsoluteRoll = 0.0f;
}
Player::~Player()
@ -2608,6 +2609,7 @@ void Player::updateMove(const Move* move)
}
mLastAbsoluteYaw = emove->rotZ[emoveIndex];
mLastAbsolutePitch = emove->rotX[emoveIndex];
mLastAbsoluteRoll = emove->rotY[emoveIndex];
// Head bank
mHead.y = emove->rotY[emoveIndex];
@ -5584,6 +5586,57 @@ void Player::getMuzzleTransform(U32 imageSlot,MatrixF* mat)
*mat = nmat;
}
DisplayPose Player::calcCameraDeltaPose(GameConnection *con, DisplayPose inPose)
{
// NOTE: this is intended to be similar to updateMove
DisplayPose outPose;
outPose.orientation = getRenderTransform().toEuler();
outPose.position = inPose.position;
if (con && con->getControlSchemeAbsoluteRotation())
{
// Pitch
outPose.orientation.x = (inPose.orientation.x - mLastAbsolutePitch);
// Constrain the range of mRot.x
while (outPose.orientation.x < -M_PI_F)
outPose.orientation.x += M_2PI_F;
while (outPose.orientation.x > M_PI_F)
outPose.orientation.x -= M_2PI_F;
// Yaw
// Rotate (heading) head or body?
if ((isMounted() && getMountNode() == 0) || (con && !con->isFirstPerson()))
{
// Rotate head
outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw);
}
else
{
// Rotate body
outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw);
}
// Constrain the range of mRot.z
while (outPose.orientation.z < 0.0f)
outPose.orientation.z += M_2PI_F;
while (outPose.orientation.z > M_2PI_F)
outPose.orientation.z -= M_2PI_F;
// Bank
if (mDataBlock->cameraCanBank)
{
outPose.orientation.y = (inPose.orientation.y - mLastAbsoluteRoll);
}
// Constrain the range of mRot.y
while (outPose.orientation.y > M_PI_F)
outPose.orientation.y -= M_2PI_F;
}
return outPose;
}
void Player::getRenderMuzzleTransform(U32 imageSlot,MatrixF* mat)
{

View file

@ -439,6 +439,7 @@ protected:
F32 mLastAbsoluteYaw; ///< Stores that last absolute yaw value as passed in by ExtendedMove
F32 mLastAbsolutePitch; ///< Stores that last absolute pitch value as passed in by ExtendedMove
F32 mLastAbsoluteRoll; ///< Stores that last absolute roll value as passed in by ExtendedMove
S32 mMountPending; ///< mMountPending suppresses tickDelay countdown so players will sit until
///< their mount, or another animation, comes through (or 13 seconds elapses).
@ -683,6 +684,7 @@ public:
void getEyeBaseTransform(MatrixF* mat, bool includeBank);
void getRenderEyeTransform(MatrixF* mat);
void getRenderEyeBaseTransform(MatrixF* mat, bool includeBank);
virtual DisplayPose calcCameraDeltaPose(GameConnection *con, DisplayPose inPose);
void getCameraParameters(F32 *min, F32 *max, Point3F *offset, MatrixF *rot);
void getMuzzleTransform(U32 imageSlot,MatrixF* mat);
void getRenderMuzzleTransform(U32 imageSlot,MatrixF* mat);

View file

@ -1969,6 +1969,75 @@ void ShapeBase::getCameraTransform(F32* pos,MatrixF* mat)
mat->mul( gCamFXMgr.getTrans() );
}
void ShapeBase::getEyeCameraTransform(IDisplayDevice *displayDevice, U32 eyeId, MatrixF *outMat)
{
MatrixF temp(1);
Point3F eyePos;
Point3F rotEyePos;
DisplayPose inPose;
displayDevice->getFrameEyePose(&inPose, eyeId);
DisplayPose newPose = calcCameraDeltaPose(displayDevice->getCurrentConnection(), inPose);
// Ok, basically we just need to add on newPose to the camera transform
// NOTE: currently we dont support third-person camera in this mode
MatrixF cameraTransform(1);
F32 fakePos = 0;
getCameraTransform(&fakePos, &cameraTransform);
QuatF baserot = cameraTransform;
QuatF qrot = QuatF(newPose.orientation);
QuatF concatRot;
concatRot.mul(baserot, qrot);
concatRot.setMatrix(&temp);
temp.setPosition(cameraTransform.getPosition() + concatRot.mulP(newPose.position, &rotEyePos));
*outMat = temp;
}
DisplayPose ShapeBase::calcCameraDeltaPose(GameConnection *con, DisplayPose inPose)
{
// NOTE: this is intended to be similar to updateMove
// WARNING: does not take into account any move values
DisplayPose outPose;
outPose.orientation = getRenderTransform().toEuler();
outPose.position = inPose.position;
if (con && con->getControlSchemeAbsoluteRotation())
{
// Pitch
outPose.orientation.x = inPose.orientation.x;
// Constrain the range of mRot.x
while (outPose.orientation.x < -M_PI_F)
outPose.orientation.x += M_2PI_F;
while (outPose.orientation.x > M_PI_F)
outPose.orientation.x -= M_2PI_F;
// Yaw
outPose.orientation.z = inPose.orientation.z;
// Constrain the range of mRot.z
while (outPose.orientation.z < -M_PI_F)
outPose.orientation.z += M_2PI_F;
while (outPose.orientation.z > M_PI_F)
outPose.orientation.z -= M_2PI_F;
// Bank
if (mDataBlock->cameraCanBank)
{
outPose.orientation.y = inPose.orientation.y;
}
// Constrain the range of mRot.y
while (outPose.orientation.y > M_PI_F)
outPose.orientation.y -= M_2PI_F;
}
return outPose;
}
void ShapeBase::getCameraParameters(F32 *min,F32* max,Point3F* off,MatrixF* rot)
{
*min = mDataBlock->cameraMinDist;
@ -1977,7 +2046,6 @@ void ShapeBase::getCameraParameters(F32 *min,F32* max,Point3F* off,MatrixF* rot)
rot->identity();
}
//----------------------------------------------------------------------------
F32 ShapeBase::getDamageFlash() const
{

View file

@ -63,7 +63,6 @@
#include "console/dynamicTypes.h"
#endif
class GFXCubemap;
class TSShapeInstance;
class SceneRenderState;
@ -1583,6 +1582,13 @@ public:
/// @param mat Camera transform (out)
virtual void getCameraTransform(F32* pos,MatrixF* mat);
/// Gets the view transform for a particular eye, taking into account the current absolute
/// orient and position values of the display device.
virtual void getEyeCameraTransform( IDisplayDevice *display, U32 eyeId, MatrixF *outMat );
/// Calculates a delta camera angle and view position based on inPose
virtual DisplayPose calcCameraDeltaPose(GameConnection *con, DisplayPose inPose);
/// Gets the index of a node inside a mounted image given the name
/// @param imageSlot Image slot
/// @param nodeName Node name

View file

@ -695,9 +695,9 @@ GFXShader* GFXD3D9Device::createShader()
return shader;
}
void GFXD3D9Device::disableShaders()
void GFXD3D9Device::disableShaders(bool force)
{
setShader( NULL );
setShader( NULL, force );
setShaderConstBuffer( NULL );
}
@ -706,25 +706,24 @@ void GFXD3D9Device::disableShaders()
// and to make sure redundant shader states are not being
// sent to the card.
//-----------------------------------------------------------------------------
void GFXD3D9Device::setShader( GFXShader *shader )
void GFXD3D9Device::setShader( GFXShader *shader, bool force )
{
GFXD3D9Shader *d3dShader = static_cast<GFXD3D9Shader*>( shader );
IDirect3DPixelShader9 *pixShader = ( d3dShader != NULL ? d3dShader->mPixShader : NULL );
IDirect3DVertexShader9 *vertShader = ( d3dShader ? d3dShader->mVertShader : NULL );
if( pixShader != mLastPixShader )
if( pixShader != mLastPixShader || force )
{
mD3DDevice->SetPixelShader( pixShader );
mLastPixShader = pixShader;
}
if( vertShader != mLastVertShader )
if( vertShader != mLastVertShader || force )
{
mD3DDevice->SetVertexShader( vertShader );
mLastVertShader = vertShader;
}
}
//-----------------------------------------------------------------------------

View file

@ -238,7 +238,7 @@ protected:
// }
virtual GFXShader* createShader();
void disableShaders();
void disableShaders(bool force = false);
/// Device helper function
virtual D3DPRESENT_PARAMETERS setupPresentParams( const GFXVideoMode &mode, const HWND &hwnd ) const = 0;
@ -272,7 +272,7 @@ public:
virtual F32 getPixelShaderVersion() const { return mPixVersion; }
virtual void setPixelShaderVersion( F32 version ){ mPixVersion = version; }
virtual void setShader( GFXShader *shader );
virtual void setShader( GFXShader *shader, bool force = false );
virtual U32 getNumSamplers() const { return mNumSamplers; }
virtual U32 getNumRenderTargets() const { return mNumRenderTargets; }
// }

View file

@ -161,7 +161,6 @@ GFXDevice::GFXDevice()
mAllowRender = true;
mCurrentRenderStyle = RS_Standard;
mCurrentProjectionOffset = Point2F::Zero;
mStereoEyeOffset = Point3F::Zero;
mCanCurrentlyRender = false;
mInitialized = false;
@ -197,6 +196,9 @@ GFXDevice::GFXDevice()
#elif defined TORQUE_OS_PS3
GFXShader::addGlobalMacro( "TORQUE_OS_PS3" );
#endif
mStereoTargets[0] = NULL;
mStereoTargets[1] = NULL;
}
GFXDrawUtil* GFXDevice::getDrawUtil()

View file

@ -213,6 +213,9 @@ public:
/// The device is about to finish rendering a frame
deEndOfFrame,
/// The device has rendered a frame and ended the scene
dePostFrame,
/// The device has started rendering a frame's field (such as for side-by-side rendering)
deStartOfField,
@ -244,7 +247,12 @@ public:
enum GFXDeviceRenderStyles
{
RS_Standard = 0,
RS_StereoSideBySide = (1<<0),
RS_StereoSideBySide = (1<<0), // Render into current Render Target side-by-side
};
enum GFXDeviceLimits
{
NumStereoPorts = 2
};
private:
@ -277,7 +285,19 @@ protected:
Point2F mCurrentProjectionOffset;
/// Eye offset used when using a stereo rendering style
Point3F mStereoEyeOffset;
Point3F mStereoEyeOffset[NumStereoPorts];
MatrixF mStereoEyeTransforms[NumStereoPorts];
MatrixF mInverseStereoEyeTransforms[NumStereoPorts];
/// Fov port settings
FovPort mFovPorts[NumStereoPorts];
/// Destination viewports for stereo rendering
RectI mStereoViewports[NumStereoPorts];
/// Destination targets for stereo rendering
GFXTextureTarget* mStereoTargets[NumStereoPorts];
/// This will allow querying to see if a device is initialized and ready to
/// have operations performed on it.
@ -323,10 +343,50 @@ public:
void setCurrentProjectionOffset(const Point2F& offset) { mCurrentProjectionOffset = offset; }
/// Get the current eye offset used during stereo rendering
const Point3F& getStereoEyeOffset() { return mStereoEyeOffset; }
const Point3F* getStereoEyeOffsets() { return mStereoEyeOffset; }
const MatrixF* getStereoEyeTransforms() { return mStereoEyeTransforms; }
const MatrixF* getInverseStereoEyeTransforms() { return mInverseStereoEyeTransforms; }
/// Set the current eye offset used during stereo rendering
void setStereoEyeOffset(const Point3F& offset) { mStereoEyeOffset = offset; }
void setStereoEyeOffsets(Point3F *offsets) { dMemcpy(mStereoEyeOffset, offsets, sizeof(Point3F) * NumStereoPorts); }
void setStereoEyeTransforms(MatrixF *transforms) { dMemcpy(mStereoEyeTransforms, transforms, sizeof(mStereoEyeTransforms)); dMemcpy(mInverseStereoEyeTransforms, transforms, sizeof(mInverseStereoEyeTransforms)); mInverseStereoEyeTransforms[0].inverse(); mInverseStereoEyeTransforms[1].inverse(); }
/// Set the current eye offset used during stereo rendering. Assumes NumStereoPorts are available.
void setFovPort(const FovPort *ports) { dMemcpy(mFovPorts, ports, sizeof(mFovPorts)); }
/// Get the current eye offset used during stereo rendering
const FovPort* getSteroFovPort() { return mFovPorts; }
/// Sets stereo viewports
void setSteroViewports(const RectI *ports) { dMemcpy(mStereoViewports, ports, sizeof(RectI) * NumStereoPorts); }
/// Sets stereo render targets
void setStereoTargets(GFXTextureTarget **targets) { mStereoTargets[0] = targets[0]; mStereoTargets[1] = targets[1]; }
RectI* getStereoViewports() { return mStereoViewports; }
/// Activates a stereo render target, setting the correct viewport to render eye contents.
/// If eyeId is -1, set a viewport encompassing the entire size of the render targets.
void activateStereoTarget(S32 eyeId)
{
if (eyeId == -1)
{
if (mStereoTargets[0])
{
setActiveRenderTarget(mStereoTargets[0], true);
}
}
else
{
if (mStereoTargets[eyeId])
{
setActiveRenderTarget(mStereoTargets[eyeId], false);
}
setViewport(mStereoViewports[eyeId]);
}
}
GFXCardProfiler* getCardProfiler() const { return mCardProfiler; }
@ -722,8 +782,8 @@ public:
/// Returns the number of simultaneous render targets supported by the device.
virtual U32 getNumRenderTargets() const = 0;
virtual void setShader( GFXShader *shader ) {}
virtual void disableShaders() {} // TODO Remove when T3D 4.0
virtual void setShader( GFXShader *shader, bool force = false ) {}
virtual void disableShaders( bool force = false ) {} // TODO Remove when T3D 4.0
/// Set the buffer! (Actual set happens on the next draw call, just like textures, state blocks, etc)
void setShaderConstBuffer(GFXShaderConstBuffer* buffer);

View file

@ -22,6 +22,7 @@
#include "platform/platform.h"
#include "gui/3d/guiTSControl.h"
#include "gui/core/guiOffscreenCanvas.h"
#include "console/engineAPI.h"
#include "scene/sceneManager.h"
@ -34,7 +35,12 @@
#include "scene/reflectionManager.h"
#include "postFx/postEffectManager.h"
#include "gfx/gfxTransformSaver.h"
#include "gfx/gfxDrawUtil.h"
#include "gfx/gfxDebugEvent.h"
GFXTextureObject *gLastStereoTexture = NULL;
#define TS_OVERLAY_SCREEN_WIDTH 0.75
IMPLEMENT_CONOBJECT( GuiTSCtrl );
@ -51,6 +57,7 @@ ConsoleDocClass( GuiTSCtrl,
);
U32 GuiTSCtrl::smFrameCount = 0;
bool GuiTSCtrl::smUseLatestDisplayTransform = true;
Vector<GuiTSCtrl*> GuiTSCtrl::smAwakeTSCtrls;
ImplementEnumType( GuiTSRenderStyles,
@ -60,7 +67,6 @@ ImplementEnumType( GuiTSRenderStyles,
{ GuiTSCtrl::RenderStyleStereoSideBySide, "stereo side by side" },
EndImplementEnumType;
//-----------------------------------------------------------------------------
namespace
@ -153,7 +159,6 @@ GuiTSCtrl::GuiTSCtrl()
mLastCameraQuery.nearPlane = 0.01f;
mLastCameraQuery.projectionOffset = Point2F::Zero;
mLastCameraQuery.eyeOffset = Point3F::Zero;
mLastCameraQuery.ortho = false;
}
@ -192,6 +197,8 @@ void GuiTSCtrl::consoleInit()
{
Con::addVariable("$TSControl::frameCount", TypeS32, &smFrameCount, "The number of frames that have been rendered since this control was created.\n"
"@ingroup Rendering\n");
Con::addVariable("$TSControl::useLatestDisplayTransform", TypeBool, &smUseLatestDisplayTransform, "Use the latest view transform when rendering stereo instead of the one calculated by the last move.\n"
"@ingroup Rendering\n");
}
//-----------------------------------------------------------------------------
@ -206,6 +213,9 @@ bool GuiTSCtrl::onWake()
"GuiTSCtrl::onWake - This control is already in the awake list!" );
smAwakeTSCtrls.push_back( this );
// For VR
mLastCameraQuery.drawCanvas = getRoot();
return true;
}
@ -307,6 +317,7 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
// Save the current transforms so we can restore
// it for child control rendering below.
GFXTransformSaver saver;
bool renderingToTarget = false;
if(!processCameraQuery(&mLastCameraQuery))
{
@ -317,15 +328,52 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
return;
}
GFXTargetRef origTarget = GFX->getActiveRenderTarget();
// Set up the appropriate render style
U32 prevRenderStyle = GFX->getCurrentRenderStyle();
Point2F prevProjectionOffset = GFX->getCurrentProjectionOffset();
Point3F prevEyeOffset = GFX->getStereoEyeOffset();
Point2I renderSize = getExtent();
if(mRenderStyle == RenderStyleStereoSideBySide)
{
GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSideBySide);
GFX->setCurrentProjectionOffset(mLastCameraQuery.projectionOffset);
GFX->setStereoEyeOffset(mLastCameraQuery.eyeOffset);
GFX->setStereoEyeOffsets(mLastCameraQuery.eyeOffset);
GFX->setFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes
GFX->setSteroViewports(mLastCameraQuery.stereoViewports);
GFX->setStereoTargets(mLastCameraQuery.stereoTargets);
MatrixF myTransforms[2];
if (smUseLatestDisplayTransform)
{
// Use the view matrix determined from the display device
myTransforms[0] = mLastCameraQuery.eyeTransforms[0];
myTransforms[1] = mLastCameraQuery.eyeTransforms[1];
}
else
{
// Use the view matrix determined from the control object
myTransforms[0] = mLastCameraQuery.cameraMatrix;
myTransforms[1] = mLastCameraQuery.cameraMatrix;
QuatF qrot = mLastCameraQuery.cameraMatrix;
Point3F pos = mLastCameraQuery.cameraMatrix.getPosition();
Point3F rotEyePos;
myTransforms[0].setPosition(pos + qrot.mulP(mLastCameraQuery.eyeOffset[0], &rotEyePos));
myTransforms[1].setPosition(pos + qrot.mulP(mLastCameraQuery.eyeOffset[1], &rotEyePos));
}
GFX->setStereoEyeTransforms(myTransforms);
// Allow render size to originate from the render target
if (mLastCameraQuery.stereoTargets[0])
{
renderSize = mLastCameraQuery.stereoViewports[0].extent;
renderingToTarget = true;
}
}
else
{
@ -357,8 +405,8 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
// set up the camera and viewport stuff:
F32 wwidth;
F32 wheight;
F32 renderWidth = (mRenderStyle == RenderStyleStereoSideBySide) ? F32(getWidth())*0.5f : F32(getWidth());
F32 renderHeight = F32(getHeight());
F32 renderWidth = F32(renderSize.x);
F32 renderHeight = F32(renderSize.y);
F32 aspectRatio = renderWidth / renderHeight;
// Use the FOV to calculate the viewport height scale
@ -380,12 +428,8 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
Frustum frustum;
if(mRenderStyle == RenderStyleStereoSideBySide)
{
F32 left = 0.0f * hscale - wwidth;
F32 right = renderWidth * hscale - wwidth;
F32 top = wheight - vscale * 0.0f;
F32 bottom = wheight - vscale * renderHeight;
frustum.set( mLastCameraQuery.ortho, left, right, top, bottom, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane );
// NOTE: these calculations are essentially overridden later by the fov port settings when rendering each eye.
MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]);
}
else
{
@ -407,15 +451,24 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
RectI tempRect = updateRect;
#ifdef TORQUE_OS_MAC
Point2I screensize = getRoot()->getWindowSize();
tempRect.point.y = screensize.y - (tempRect.point.y + tempRect.extent.y);
#endif
if (!renderingToTarget)
{
#ifdef TORQUE_OS_MAC
Point2I screensize = getRoot()->getWindowSize();
tempRect.point.y = screensize.y - (tempRect.point.y + tempRect.extent.y);
#endif
GFX->setViewport( tempRect );
GFX->setViewport( tempRect );
}
else
{
// Activate stereo RT
GFX->activateStereoTarget(-1);
}
// Clear the zBuffer so GUI doesn't hose object rendering accidentally
GFX->clear( GFXClearZBuffer , ColorI(20,20,20), 1.0f, 0 );
//GFX->clear( GFXClearTarget, ColorI(255,0,0), 1.0f, 0);
GFX->setFrustum( frustum );
if(mLastCameraQuery.ortho)
@ -427,7 +480,7 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
// We're going to be displaying this render at size of this control in
// pixels - let the scene know so that it can calculate e.g. reflections
// correctly for that final display result.
gClientSceneGraph->setDisplayTargetResolution(getExtent());
gClientSceneGraph->setDisplayTargetResolution(renderSize);
// Set the GFX world matrix to the world-to-camera transform, but don't
// change the cameraMatrix in mLastCameraQuery. This is because
@ -455,20 +508,121 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
renderWorld(updateRect);
DebugDrawer::get()->render();
// Render the canvas overlay if its available
if (mRenderStyle == RenderStyleStereoSideBySide && mStereoGuiTarget.getPointer())
{
GFXDEBUGEVENT_SCOPE( StereoGui_Render, ColorI( 255, 0, 0 ) );
MatrixF proj(1);
Frustum originalFrustum = GFX->getFrustum();
GFXTextureObject *texObject = mStereoGuiTarget->getTexture(0);
const FovPort *currentFovPort = GFX->getSteroFovPort();
const MatrixF *eyeTransforms = GFX->getStereoEyeTransforms();
const MatrixF *worldEyeTransforms = GFX->getInverseStereoEyeTransforms();
const Point3F *eyeOffset = GFX->getStereoEyeOffsets();
for (U32 i=0; i<2; i++)
{
GFX->activateStereoTarget(i);
Frustum gfxFrustum = originalFrustum;
const F32 frustumDepth = gfxFrustum.getNearDist();
MathUtils::makeFovPortFrustum(&gfxFrustum, true, gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[i], eyeTransforms[i]);
GFX->setFrustum(gfxFrustum);
MatrixF eyeWorldTrans(1);
eyeWorldTrans.setPosition(Point3F(eyeOffset[i].x,eyeOffset[i].y,eyeOffset[i].z));
MatrixF eyeWorld(1);
eyeWorld.mul(eyeWorldTrans);
eyeWorld.inverse();
GFX->setWorldMatrix(eyeWorld);
GFX->setViewMatrix(MatrixF::Identity);
if (!mStereoOverlayVB.getPointer())
{
mStereoOverlayVB.set(GFX, 4, GFXBufferTypeStatic);
GFXVertexPCT *verts = mStereoOverlayVB.lock(0, 4);
F32 texLeft = 0.0f;
F32 texRight = 1.0f;
F32 texTop = 1.0f;
F32 texBottom = 0.0f;
F32 rectRatio = gfxFrustum.getWidth() / gfxFrustum.getHeight();
F32 rectWidth = gfxFrustum.getWidth() * TS_OVERLAY_SCREEN_WIDTH;
F32 rectHeight = rectWidth * rectRatio;
F32 screenLeft = -rectWidth * 0.5;
F32 screenRight = rectWidth * 0.5;
F32 screenTop = -rectHeight * 0.5;
F32 screenBottom = rectHeight * 0.5;
const F32 fillConv = 0.0f;
const F32 frustumDepth = gfxFrustum.getNearDist() + 0.012;
verts[0].point.set( screenLeft - fillConv, frustumDepth, screenTop - fillConv );
verts[1].point.set( screenRight - fillConv, frustumDepth, screenTop - fillConv );
verts[2].point.set( screenLeft - fillConv, frustumDepth, screenBottom - fillConv );
verts[3].point.set( screenRight - fillConv, frustumDepth, screenBottom - fillConv );
verts[0].color = verts[1].color = verts[2].color = verts[3].color = ColorI(255,255,255,255);
verts[0].texCoord.set( texLeft, texTop );
verts[1].texCoord.set( texRight, texTop );
verts[2].texCoord.set( texLeft, texBottom );
verts[3].texCoord.set( texRight, texBottom );
mStereoOverlayVB.unlock();
}
if (!mStereoGuiSB.getPointer())
{
// DrawBitmapStretchSR
GFXStateBlockDesc bitmapStretchSR;
bitmapStretchSR.setCullMode(GFXCullNone);
bitmapStretchSR.setZReadWrite(false, false);
bitmapStretchSR.setBlend(true, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha);
bitmapStretchSR.samplersDefined = true;
bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear();
bitmapStretchSR.samplers[0].minFilter = GFXTextureFilterPoint;
bitmapStretchSR.samplers[0].mipFilter = GFXTextureFilterPoint;
bitmapStretchSR.samplers[0].magFilter = GFXTextureFilterPoint;
mStereoGuiSB = GFX->createStateBlock(bitmapStretchSR);
}
GFX->setVertexBuffer(mStereoOverlayVB);
GFX->setStateBlock(mStereoGuiSB);
GFX->setTexture( 0, texObject );
GFX->setupGenericShaders( GFXDevice::GSModColorTexture );
GFX->drawPrimitive( GFXTriangleStrip, 0, 2 );
}
}
// Restore the previous matrix state before
// we begin rendering the child controls.
saver.restore();
// Restore the render style and any stereo parameters
GFX->setActiveRenderTarget(origTarget);
GFX->setCurrentRenderStyle(prevRenderStyle);
GFX->setCurrentProjectionOffset(prevProjectionOffset);
GFX->setStereoEyeOffset(prevEyeOffset);
if(mRenderStyle == RenderStyleStereoSideBySide && gLastStereoTexture)
{
GFX->setClipRect(updateRect);
GFX->getDrawUtil()->drawBitmapStretch(gLastStereoTexture, updateRect);
}
// Allow subclasses to render 2D elements.
GFX->setClipRect(updateRect);
renderGui( offset, updateRect );
renderChildControls(offset, updateRect);
if (shouldRenderChildControls())
{
renderChildControls(offset, updateRect);
}
smFrameCount++;
}
@ -499,6 +653,12 @@ void GuiTSCtrl::drawLineList( const Vector<Point3F> &points, const ColorI color,
drawLine( points[i], points[i+1], color, width );
}
void GuiTSCtrl::setStereoGui(GuiOffscreenCanvas *canvas)
{
mStereoGuiTarget = canvas ? canvas->getTarget() : NULL;
}
//=============================================================================
// Console Methods.
//=============================================================================
@ -547,3 +707,10 @@ DefineEngineMethod( GuiTSCtrl, calculateViewDistance, F32, ( F32 radius ),,
{
return object->calculateViewDistance( radius );
}
DefineEngineMethod( GuiTSCtrl, setStereoGui, void, ( GuiOffscreenCanvas* canvas ),,
"Sets the current stereo texture to an offscreen canvas\n"
"@param canvas The desired canvas." )
{
object->setStereoGui(canvas);
}

View file

@ -30,16 +30,29 @@
#include "math/mMath.h"
#endif
#ifndef _MATTEXTURETARGET_H_
#include "materials/matTextureTarget.h"
#endif
class IDisplayDevice;
class GuiOffscreenCanvas;
struct CameraQuery
{
SimObject* object;
F32 nearPlane;
F32 farPlane;
F32 fov;
FovPort fovPort[2]; // fov for each eye
Point2F projectionOffset;
Point3F eyeOffset;
Point3F eyeOffset[2];
MatrixF eyeTransforms[2];
bool ortho;
MatrixF cameraMatrix;
RectI stereoViewports[2]; // destination viewports
GFXTextureTarget* stereoTargets[2];
GuiCanvas* drawCanvas; // Canvas we are drawing to. Needed for VR
};
/// Abstract base class for 3D viewport GUIs.
@ -50,11 +63,12 @@ class GuiTSCtrl : public GuiContainer
public:
enum RenderStyles {
RenderStyleStandard = 0,
RenderStyleStereoSideBySide = (1<<0),
RenderStyleStereoSideBySide = (1<<0)
};
protected:
static U32 smFrameCount;
static bool smUseLatestDisplayTransform;
F32 mCameraZRot;
F32 mForceFOV;
@ -83,7 +97,11 @@ protected:
/// The last camera query set in onRender.
/// @see getLastCameraQuery
CameraQuery mLastCameraQuery;
CameraQuery mLastCameraQuery;
NamedTexTargetRef mStereoGuiTarget;
GFXVertexBufferHandle<GFXVertexPCT> mStereoOverlayVB;
GFXStateBlockRef mStereoGuiSB;
public:
@ -155,6 +173,10 @@ public:
static const U32& getFrameCount() { return smFrameCount; }
bool shouldRenderChildControls() { return mRenderStyle == RenderStyleStandard; }
void setStereoGui(GuiOffscreenCanvas *canvas);
DECLARE_CONOBJECT(GuiTSCtrl);
DECLARE_CATEGORY( "Gui 3D" );
DECLARE_DESCRIPTION( "Abstract base class for controls that render a 3D viewport." );

View file

@ -36,6 +36,7 @@
#include "gfx/video/videoCapture.h"
#include "lighting/lightManager.h"
#include "core/strings/stringUnit.h"
#include "gui/core/guiOffscreenCanvas.h"
#ifndef TORQUE_TGB_ONLY
#include "scene/sceneObject.h"
@ -126,7 +127,8 @@ GuiCanvas::GuiCanvas(): GuiControl(),
mMouseDownPoint(0.0f,0.0f),
mPlatformWindow(NULL),
mLastRenderMs(0),
mDisplayWindow(true)
mDisplayWindow(true),
mMenuBarCtrl(NULL)
{
setBounds(0, 0, 640, 480);
mAwake = true;
@ -508,6 +510,55 @@ bool GuiCanvas::isCursorShown()
return mPlatformWindow->isCursorVisible();
}
void GuiCanvas::cursorClick(S32 buttonId, bool isDown)
{
InputEventInfo inputEvent;
inputEvent.deviceType = MouseDeviceType;
inputEvent.deviceInst = 0;
inputEvent.objType = SI_BUTTON;
inputEvent.objInst = (InputObjectInstances)(KEY_BUTTON0 + buttonId);
inputEvent.modifier = (InputModifiers)0;
inputEvent.ascii = 0;
inputEvent.action = isDown ? SI_MAKE : SI_BREAK;
inputEvent.fValue = isDown ? 1.0 : 0.0;
processMouseEvent(inputEvent);
}
void GuiCanvas::cursorNudge(F32 x, F32 y)
{
// Generate a base Movement along and Axis event
InputEventInfo inputEvent;
inputEvent.deviceType = MouseDeviceType;
inputEvent.deviceInst = 0;
inputEvent.objType = SI_AXIS;
inputEvent.modifier = (InputModifiers)0;
inputEvent.ascii = 0;
// Generate delta movement along each axis
Point2F cursDelta(x, y);
// If X axis changed, generate a relative event
if(mFabs(cursDelta.x) > 0.1)
{
inputEvent.objInst = SI_XAXIS;
inputEvent.action = SI_MOVE;
inputEvent.fValue = cursDelta.x;
processMouseEvent(inputEvent);
}
// If Y axis changed, generate a relative event
if(mFabs(cursDelta.y) > 0.1)
{
inputEvent.objInst = SI_YAXIS;
inputEvent.action = SI_MOVE;
inputEvent.fValue = cursDelta.y;
processMouseEvent(inputEvent);
}
processMouseEvent(inputEvent);
}
void GuiCanvas::addAcceleratorKey(GuiControl *ctrl, U32 index, U32 keyCode, U32 modifier)
{
if (keyCode > 0 && ctrl)
@ -708,14 +759,22 @@ bool GuiCanvas::processMouseEvent(InputEventInfo &inputEvent)
//
// 'mCursorPt' basically is an accumulation of errors and the number of bugs that have cropped up with
// the GUI clicking stuff where it is not supposed to are probably all to blame on this.
// Need to query platform for specific things
AssertISV(mPlatformWindow, "GuiCanvas::processMouseEvent - no window present!");
PlatformCursorController *pController = mPlatformWindow->getCursorController();
AssertFatal(pController != NULL, "GuiCanvas::processInputEvent - No Platform Controller Found")
//copy the modifier into the new event
mLastEvent.modifier = inputEvent.modifier;
S32 mouseDoubleClickWidth = 12;
S32 mouseDoubleClickHeight = 12;
U32 mouseDoubleClickTime = 500;
// Query platform for mouse info if its available
PlatformCursorController *pController = mPlatformWindow ? mPlatformWindow->getCursorController() : NULL;
if (pController)
{
mouseDoubleClickWidth = pController->getDoubleClickWidth();
mouseDoubleClickHeight = pController->getDoubleClickHeight();
mouseDoubleClickTime = pController->getDoubleClickTime();
}
//copy the modifier into the new event
mLastEvent.modifier = inputEvent.modifier;
if(inputEvent.objType == SI_AXIS &&
(inputEvent.objInst == SI_XAXIS || inputEvent.objInst == SI_YAXIS))
@ -747,7 +806,7 @@ bool GuiCanvas::processMouseEvent(InputEventInfo &inputEvent)
// moving too much.
Point2F movement = mMouseDownPoint - mCursorPt;
if ((mAbs((S32)movement.x) > pController->getDoubleClickWidth()) || (mAbs((S32)movement.y) > pController->getDoubleClickHeight() ) )
if ((mAbs((S32)movement.x) > mouseDoubleClickWidth) || (mAbs((S32)movement.y) > mouseDoubleClickHeight ) )
{
mLeftMouseLast = false;
mMiddleMouseLast = false;
@ -799,7 +858,7 @@ bool GuiCanvas::processMouseEvent(InputEventInfo &inputEvent)
if (mLeftMouseLast)
{
//if it was within the double click time count the clicks
if (curTime - mLastMouseDownTime <= pController->getDoubleClickTime())
if (curTime - mLastMouseDownTime <= mouseDoubleClickTime)
mLastMouseClickCount++;
else
mLastMouseClickCount = 1;
@ -833,7 +892,7 @@ bool GuiCanvas::processMouseEvent(InputEventInfo &inputEvent)
if (mRightMouseLast)
{
//if it was within the double click time count the clicks
if (curTime - mLastMouseDownTime <= pController->getDoubleClickTime())
if (curTime - mLastMouseDownTime <= mouseDoubleClickTime)
mLastMouseClickCount++;
else
mLastMouseClickCount = 1;
@ -864,7 +923,7 @@ bool GuiCanvas::processMouseEvent(InputEventInfo &inputEvent)
if (mMiddleMouseLast)
{
//if it was within the double click time count the clicks
if (curTime - mLastMouseDownTime <= pController->getDoubleClickTime())
if (curTime - mLastMouseDownTime <= mouseDoubleClickTime)
mLastMouseClickCount++;
else
mLastMouseClickCount = 1;
@ -1768,6 +1827,21 @@ void GuiCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = true */)
PROFILE_END();
// Render all offscreen canvas objects here since we may need them in the render loop
if (GuiOffscreenCanvas::sList.size() != 0)
{
// Reset the entire state since oculus shit will have barfed it.
GFX->disableShaders(true);
GFX->updateStates(true);
for (Vector<GuiOffscreenCanvas*>::iterator itr = GuiOffscreenCanvas::sList.begin(); itr != GuiOffscreenCanvas::sList.end(); itr++)
{
(*itr)->renderFrame(false, false);
}
GFX->setActiveRenderTarget(renderTarget);
}
// Can't render if waiting for device to reset.
if ( !beginSceneRes )
{
@ -1907,7 +1981,8 @@ void GuiCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = true */)
PROFILE_START(GFXEndScene);
GFX->endScene();
PROFILE_END();
GFX->getDeviceEventSignal().trigger( GFXDevice::dePostFrame );
swapBuffers();
GuiCanvas::getGuiCanvasFrameSignal().trigger(false);
@ -2761,3 +2836,16 @@ ConsoleMethod( GuiCanvas, hideWindow, void, 2, 2, "" )
WindowManager->setDisplayWindow(false);
object->getPlatformWindow()->setDisplayWindow(false);
}
ConsoleMethod( GuiCanvas, cursorClick, void, 4, 4, "button, isDown" )
{
const S32 buttonId = dAtoi(argv[2]);
const bool isDown = dAtob(argv[3]);
object->cursorClick(buttonId, isDown);
}
ConsoleMethod( GuiCanvas, cursorNudge, void, 4, 4, "x, y" )
{
object->cursorNudge(dAtof(argv[2]), dAtof(argv[3]));
}

View file

@ -331,6 +331,10 @@ public:
/// Returns true if the cursor is being rendered.
virtual bool isCursorShown();
void cursorClick(S32 buttonId, bool isDown);
void cursorNudge(F32 x, F32 y);
/// @}
///used by the tooltip resource

View file

@ -2380,7 +2380,8 @@ void GuiControl::getCursor(GuiCursor *&cursor, bool &showCursor, const GuiEvent
// so set it back before we change it again.
PlatformWindow *pWindow = static_cast<GuiCanvas*>(getRoot())->getPlatformWindow();
AssertFatal(pWindow != NULL,"GuiControl without owning platform window! This should not be possible.");
if (!pWindow)
return;
PlatformCursorController *pController = pWindow->getCursorController();
AssertFatal(pController != NULL,"PlatformWindow without an owned CursorController!");

View file

@ -0,0 +1,273 @@
#include "gui/core/guiOffscreenCanvas.h"
#include "gfx/gfxDrawUtil.h"
#include "gfx/gfxTextureManager.h"
#include "gfx/gfxAPI.h"
#include "gfx/gfxDebugEvent.h"
#include "console/consoleTypes.h"
#include "console/console.h"
IMPLEMENT_CONOBJECT(GuiOffscreenCanvas);
Vector<GuiOffscreenCanvas*> GuiOffscreenCanvas::sList;
GuiOffscreenCanvas::GuiOffscreenCanvas()
{
mTargetFormat = GFXFormatR8G8B8A8;
mTargetSize = Point2I(256,256);
mTargetName = "offscreenCanvas";
mTargetDirty = true;
mDynamicTarget = false;
}
GuiOffscreenCanvas::~GuiOffscreenCanvas()
{
}
void GuiOffscreenCanvas::initPersistFields()
{
addField( "targetSize", TypePoint2I, Offset( mTargetSize, GuiOffscreenCanvas ),"" );
addField( "targetFormat", TypeGFXFormat, Offset( mTargetFormat, GuiOffscreenCanvas ), "");
addField( "targetName", TypeRealString, Offset( mTargetName, GuiOffscreenCanvas ), "");
addField( "dynamicTarget", TypeBool, Offset( mDynamicTarget, GuiOffscreenCanvas ), "");
Parent::initPersistFields();
}
bool GuiOffscreenCanvas::onAdd()
{
if (GuiControl::onAdd()) // jamesu - skip GuiCanvas onAdd since it sets up GFX which is bad
{
// ensure that we have a cursor
setCursor(dynamic_cast<GuiCursor*>(Sim::findObject("DefaultCursor")));
mRenderFront = true;
sList.push_back(this);
//Con::printf("Registering target %s...", mTargetName.c_str());
mNamedTarget.registerWithName( mTargetName );
_setupTargets();
GFXTextureManager::addEventDelegate( this, &GuiOffscreenCanvas::_onTextureEvent );
return true;
}
return false;
}
void GuiOffscreenCanvas::onRemove()
{
GFXTextureManager::removeEventDelegate( this, &GuiOffscreenCanvas::_onTextureEvent );
_teardownTargets();
U32 idx = sList.find_next(this);
if (idx != (U32)-1)
{
sList.erase(idx);
}
mTarget = NULL;
mTargetTexture = NULL;
Parent::onRemove();
}
void GuiOffscreenCanvas::_setupTargets()
{
_teardownTargets();
if (!mTarget.isValid())
{
mTarget = GFX->allocRenderToTextureTarget();
}
// Update color
if (!mTargetTexture.isValid() || mTargetSize != mTargetTexture.getWidthHeight())
{
mTargetTexture.set( mTargetSize.x, mTargetSize.y, mTargetFormat, &GFXDefaultRenderTargetProfile, avar( "%s() - (line %d)", __FUNCTION__, __LINE__ ), 1, 0 );
}
mTarget->attachTexture( GFXTextureTarget::RenderSlot(GFXTextureTarget::Color0), mTargetTexture );
mNamedTarget.setTexture(0, mTargetTexture);
}
void GuiOffscreenCanvas::_teardownTargets()
{
mNamedTarget.release();
mTargetTexture = NULL;
mTargetDirty = true;
}
void GuiOffscreenCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = true */)
{
if (!mTargetDirty)
return;
#ifdef TORQUE_ENABLE_GFXDEBUGEVENTS
char buf[256];
dSprintf(buf, sizeof(buf), "OffsceenCanvas %s", getName() ? getName() : getIdString());
GFXDEBUGEVENT_SCOPE_EX(GuiOffscreenCanvas_renderFrame, ColorI::GREEN, buf);
#endif
PROFILE_START(OffscreenCanvasPreRender);
#ifdef TORQUE_GFX_STATE_DEBUG
GFX->getDebugStateManager()->startFrame();
#endif
if (mTarget->getSize() != mTargetSize)
{
_setupTargets();
mNamedTarget.setViewport( RectI( Point2I::Zero, mTargetSize ) );
}
// Make sure the root control is the size of the canvas.
Point2I size = mTarget->getSize();
if(size.x == 0 || size.y == 0)
{
PROFILE_END();
return;
}
RectI screenRect(0, 0, size.x, size.y);
maintainSizing();
//preRender (recursive) all controls
preRender();
PROFILE_END();
// Are we just doing pre-render?
if(preRenderOnly)
{
return;
}
resetUpdateRegions();
PROFILE_START(OffscreenCanvasRenderControls);
GuiCursor *mouseCursor = NULL;
bool cursorVisible = true;
Point2I cursorPos((S32)mCursorPt.x, (S32)mCursorPt.y);
mouseCursor = mDefaultCursor;
mLastCursorEnabled = cursorVisible;
mLastCursor = mouseCursor;
mLastCursorPt = cursorPos;
// Set active target
GFX->pushActiveRenderTarget();
GFX->setActiveRenderTarget(mTarget);
// Clear the current viewport area
GFX->setViewport( screenRect );
GFX->clear( GFXClearTarget, ColorF(0,0,0,0), 1.0f, 0 );
resetUpdateRegions();
// Make sure we have a clean matrix state
// before we start rendering anything!
GFX->setWorldMatrix( MatrixF::Identity );
GFX->setViewMatrix( MatrixF::Identity );
GFX->setProjectionMatrix( MatrixF::Identity );
RectI contentRect(Point2I(0,0), mTargetSize);
{
// Render active GUI Dialogs
for(iterator i = begin(); i != end(); i++)
{
// Get the control
GuiControl *contentCtrl = static_cast<GuiControl*>(*i);
GFX->setClipRect( contentRect );
GFX->setStateBlock(mDefaultGuiSB);
contentCtrl->onRender(contentCtrl->getPosition(), contentRect);
}
// Fill Blue if no Dialogs
if(this->size() == 0)
GFX->clear( GFXClearTarget, ColorF(0,0,1,1), 1.0f, 0 );
GFX->setClipRect( contentRect );
// Draw cursor
//
if (mCursorEnabled && mouseCursor && mShowCursor)
{
Point2I pos((S32)mCursorPt.x, (S32)mCursorPt.y);
Point2I spot = mouseCursor->getHotSpot();
pos -= spot;
mouseCursor->render(pos);
}
GFX->getDrawUtil()->clearBitmapModulation();
}
mTarget->resolve();
GFX->popActiveRenderTarget();
PROFILE_END();
// Keep track of the last time we rendered.
mLastRenderMs = Platform::getRealMilliseconds();
mTargetDirty = mDynamicTarget;
}
Point2I GuiOffscreenCanvas::getWindowSize()
{
return mTargetSize;
}
Point2I GuiOffscreenCanvas::getCursorPos()
{
return Point2I(mCursorPt.x, mCursorPt.y);
}
void GuiOffscreenCanvas::setCursorPos(const Point2I &pt)
{
mCursorPt.x = F32( pt.x );
mCursorPt.y = F32( pt.y );
}
void GuiOffscreenCanvas::showCursor(bool state)
{
mShowCursor = state;
}
bool GuiOffscreenCanvas::isCursorShown()
{
return mShowCursor;
}
void GuiOffscreenCanvas::_onTextureEvent( GFXTexCallbackCode code )
{
switch(code)
{
case GFXZombify:
_teardownTargets();
break;
case GFXResurrect:
_setupTargets();
break;
}
}
DefineEngineMethod(GuiOffscreenCanvas, resetTarget, void, (), , "")
{
object->_setupTargets();
}
DefineEngineMethod(GuiOffscreenCanvas, markDirty, void, (), , "")
{
object->markDirty();
}

View file

@ -0,0 +1,63 @@
#ifndef _GUIOFFSCREENCANVAS_H_
#define _GUIOFFSCREENCANVAS_H_
#include "math/mMath.h"
#include "gui/core/guiCanvas.h"
#include "core/util/tVector.h"
#ifndef _MATTEXTURETARGET_H_
#include "materials/matTextureTarget.h"
#endif
class GuiTextureDebug;
class GuiOffscreenCanvas : public GuiCanvas
{
public:
typedef GuiCanvas Parent;
GuiOffscreenCanvas();
~GuiOffscreenCanvas();
bool onAdd();
void onRemove();
void renderFrame(bool preRenderOnly, bool bufferSwap);
Point2I getWindowSize();
Point2I getCursorPos();
void setCursorPos(const Point2I &pt);
void showCursor(bool state);
bool isCursorShown();
void _onTextureEvent( GFXTexCallbackCode code );
void _setupTargets();
void _teardownTargets();
NamedTexTargetRef getTarget() { return &mNamedTarget; }
void markDirty() { mTargetDirty = true; }
static void initPersistFields();
DECLARE_CONOBJECT(GuiOffscreenCanvas);
protected:
GFXTextureTargetRef mTarget;
NamedTexTarget mNamedTarget;
GFXTexHandle mTargetTexture;
GFXFormat mTargetFormat;
Point2I mTargetSize;
String mTargetName;
bool mTargetDirty;
bool mDynamicTarget;
public:
static Vector<GuiOffscreenCanvas*> sList;
};
#endif

View file

@ -21,6 +21,7 @@
//-----------------------------------------------------------------------------
#include "platform/platform.h"
#include "math/util/frustum.h"
#include "math/mathUtils.h"
#include "math/mMath.h"
@ -1409,6 +1410,29 @@ void makeProjection( MatrixF *outMatrix,
//-----------------------------------------------------------------------------
void makeFovPortFrustum(
Frustum *outFrustum,
bool isOrtho,
F32 nearDist,
F32 farDist,
const FovPort &inPort,
const MatrixF &transform)
{
F32 leftSize = nearDist * inPort.leftTan;
F32 rightSize = nearDist * inPort.rightTan;
F32 upSize = nearDist * inPort.upTan;
F32 downSize = nearDist * inPort.downTan;
F32 left = -leftSize;
F32 right = rightSize;
F32 top = upSize;
F32 bottom = -downSize;
outFrustum->set(isOrtho, left, right, top, bottom, nearDist, farDist, transform);
}
//-----------------------------------------------------------------------------
/// This is the special rotation matrix applied to
/// projection matricies for GFX.
///

View file

@ -39,6 +39,10 @@
#include "core/util/tVector.h"
#endif
#ifndef _MATHUTIL_FRUSTUM_H_
#include "math/util/frustum.h"
#endif
class Box3F;
class RectI;
@ -326,6 +330,13 @@ namespace MathUtils
F32 aspectRatio,
F32 nearPlane );
void makeFovPortFrustum( Frustum *outFrustum,
bool isOrtho,
F32 nearDist,
F32 farDist,
const FovPort &inPort,
const MatrixF &transform = MatrixF(1) );
/// Build a GFX projection matrix from the frustum parameters
/// including the optional rotation required by GFX.
void makeProjection( MatrixF *outMatrix,

View file

@ -28,6 +28,8 @@
#include "math/mSphere.h"
#include "platform/profiler.h"
static const MatrixF sGFXProjRotMatrix( EulerF( (M_PI_F / 2.0f), 0.0f, 0.0f ) );
//TODO: For OBB/frustum intersections and ortho frustums, we can resort to a much quicker AABB/OBB test
@ -174,7 +176,7 @@ void Frustum::set( const MatrixF &projMat, bool normalize )
mPlanes[ i ].normalize();
}
/* // Create the corner points via plane intersections.
/*// Create the corner points via plane intersections.
mPlanes[ PlaneNear ].intersect( mPlanes[ PlaneTop ], mPlanes[ PlaneLeft ], &mPoints[ NearTopLeft ] );
mPlanes[ PlaneNear ].intersect( mPlanes[ PlaneTop ], mPlanes[ PlaneRight ], &mPoints[ NearTopRight ] );
mPlanes[ PlaneNear ].intersect( mPlanes[ PlaneBottom ], mPlanes[ PlaneLeft ], &mPoints[ NearBottomLeft ] );

View file

@ -53,6 +53,14 @@
class OrientedBox3F;
/// Advanced fov specification for oculus
struct FovPort
{
float upTan;
float downTan;
float leftTan;
float rightTan;
};
/// Polyhedron data for use by frustums. Uses fixed-size vectors
/// and a static vector for the edge list as that never changes

View file

@ -1,199 +0,0 @@
//-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#include "platform/input/oculusVR/barrelDistortionPostEffect.h"
#include "console/consoleTypes.h"
#include "console/engineAPI.h"
#include "gfx/gfxDevice.h"
#include "platform/input/oculusVR/oculusVRDevice.h"
extern bool gEditingMission;
ConsoleDocClass( BarrelDistortionPostEffect,
"@brief A fullscreen shader effect used with the Oculus Rift.\n\n"
"@section PFXTextureIdentifiers\n\n"
"@ingroup Rendering\n"
);
IMPLEMENT_CONOBJECT(BarrelDistortionPostEffect);
BarrelDistortionPostEffect::BarrelDistortionPostEffect()
: PostEffect(),
mHmdWarpParamSC(NULL),
mHmdChromaAbSC(NULL),
mScaleSC(NULL),
mScaleInSC(NULL),
mLensCenterSC(NULL),
mScreenCenterSC(NULL)
{
mHMDIndex = 0;
mSensorIndex = 0;
mScaleOutput = 1.0f;
}
BarrelDistortionPostEffect::~BarrelDistortionPostEffect()
{
}
void BarrelDistortionPostEffect::initPersistFields()
{
addField( "hmdIndex", TypeS32, Offset( mHMDIndex, BarrelDistortionPostEffect ),
"Oculus VR HMD index to reference." );
addField( "sensorIndex", TypeS32, Offset( mSensorIndex, BarrelDistortionPostEffect ),
"Oculus VR sensor index to reference." );
addField( "scaleOutput", TypeF32, Offset( mScaleOutput, BarrelDistortionPostEffect ),
"Used to increase the size of the window into the world at the expense of apparent resolution." );
Parent::initPersistFields();
}
bool BarrelDistortionPostEffect::onAdd()
{
if( !Parent::onAdd() )
return false;
return true;
}
void BarrelDistortionPostEffect::onRemove()
{
Parent::onRemove();
}
void BarrelDistortionPostEffect::_setupConstants( const SceneRenderState *state )
{
// Test if setup is required before calling the parent method as the parent method
// will set up the shader constants buffer for us.
bool setupRequired = mShaderConsts.isNull();
Parent::_setupConstants(state);
// Define the shader constants
if(setupRequired)
{
mHmdWarpParamSC = mShader->getShaderConstHandle( "$HmdWarpParam" );
mHmdChromaAbSC = mShader->getShaderConstHandle( "$HmdChromaAbParam" );
mScaleSC = mShader->getShaderConstHandle( "$Scale" );
mScaleInSC = mShader->getShaderConstHandle( "$ScaleIn" );
mLensCenterSC = mShader->getShaderConstHandle( "$LensCenter" );
mScreenCenterSC = mShader->getShaderConstHandle( "$ScreenCenter" );
}
const Point2I &resolution = GFX->getActiveRenderTarget()->getSize();
F32 widthScale = 0.5f;
F32 heightScale = 1.0f;
F32 aspectRatio = (resolution.x * 0.5f) / resolution.y;
// Set up the HMD dependant shader constants
if(ManagedSingleton<OculusVRDevice>::instanceOrNull() && OCULUSVRDEV->getHMDDevice(mHMDIndex))
{
const OculusVRHMDDevice* hmd = OCULUSVRDEV->getHMDDevice(mHMDIndex);
if(mHmdWarpParamSC->isValid())
{
const Point4F& distortion = hmd->getKDistortion();
mShaderConsts->set( mHmdWarpParamSC, distortion );
}
if(mHmdChromaAbSC->isValid())
{
const Point4F& correction = hmd->getChromaticAbCorrection();
mShaderConsts->set( mHmdChromaAbSC, correction );
}
if(mScaleSC->isValid())
{
F32 scaleFactor = hmd->getDistortionScale();
if(!mIsZero(mScaleOutput))
{
scaleFactor /= mScaleOutput;
}
Point2F scale;
scale.x = widthScale * 0.5f * scaleFactor;
scale.y = heightScale * 0.5f * scaleFactor * aspectRatio;
mShaderConsts->set( mScaleSC, scale );
}
if(mLensCenterSC->isValid())
{
F32 xCenterOffset = hmd->getCenterOffset();
Point3F lensCenter;
lensCenter.x = (widthScale + xCenterOffset * 0.5f) * 0.5f;
lensCenter.y = (widthScale - xCenterOffset * 0.5f) * 0.5f;
lensCenter.z = heightScale * 0.5f;
mShaderConsts->set( mLensCenterSC, lensCenter );
}
}
else
{
if(mHmdWarpParamSC->isValid())
{
mShaderConsts->set( mHmdWarpParamSC, Point4F(0.0f, 0.0f, 0.0f, 0.0f) );
}
if(mHmdChromaAbSC->isValid())
{
mShaderConsts->set( mHmdChromaAbSC, Point4F(1.0f, 0.0f, 1.0f, 0.0f) );
}
if(mScaleSC->isValid())
{
mShaderConsts->set( mScaleSC, Point2F(1.0f, 1.0f) );
}
if(mLensCenterSC->isValid())
{
Point3F lensCenter;
lensCenter.x = widthScale * 0.5f;
lensCenter.y = widthScale * 0.5f;
lensCenter.z = heightScale * 0.5f;
mShaderConsts->set( mLensCenterSC, lensCenter );
}
}
if(mScaleInSC->isValid())
{
Point2F scaleIn;
scaleIn.x = 2.0f / widthScale;
scaleIn.y = 2.0f / heightScale / aspectRatio;
mShaderConsts->set( mScaleInSC, scaleIn );
}
if(mScreenCenterSC->isValid())
{
mShaderConsts->set( mScreenCenterSC, Point2F(widthScale * 0.5f, heightScale * 0.5f) );
}
}
void BarrelDistortionPostEffect::process(const SceneRenderState *state, GFXTexHandle &inOutTex, const RectI *inTexViewport)
{
// Don't draw the post effect if the editor is active
if(gEditingMission)
return;
Parent::process(state, inOutTex, inTexViewport);
}

View file

@ -1,69 +0,0 @@
//-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#ifndef _BARRELDISTORTIONPOSTEFFECT_H_
#define _BARRELDISTORTIONPOSTEFFECT_H_
#include "postFx/postEffect.h"
class BarrelDistortionPostEffect : public PostEffect
{
typedef PostEffect Parent;
protected:
GFXShaderConstHandle *mHmdWarpParamSC;
GFXShaderConstHandle *mHmdChromaAbSC;
GFXShaderConstHandle *mScaleSC;
GFXShaderConstHandle *mScaleInSC;
GFXShaderConstHandle *mLensCenterSC;
GFXShaderConstHandle *mScreenCenterSC;
// Oculus VR HMD index to reference
S32 mHMDIndex;
// Oculus VR sensor index to reference
S32 mSensorIndex;
// Used to increase the size of the window into the world at the
// expense of apparent resolution.
F32 mScaleOutput;
protected:
virtual void _setupConstants( const SceneRenderState *state );
public:
BarrelDistortionPostEffect();
virtual ~BarrelDistortionPostEffect();
DECLARE_CONOBJECT(BarrelDistortionPostEffect);
// SimObject
virtual bool onAdd();
virtual void onRemove();
static void initPersistFields();
virtual void process( const SceneRenderState *state,
GFXTexHandle &inOutTex,
const RectI *inTexViewport = NULL );
};
#endif // _BARRELDISTORTIONPOSTEFFECT_H_

File diff suppressed because it is too large Load diff

View file

@ -32,7 +32,8 @@
#include "core/util/tSingleton.h"
#include "math/mQuat.h"
#include "math/mPoint4.h"
#include "OVR.h"
#include "gfx/gfxDevice.h"
#include "OVR_CAPI_0_5_0.h"
#define DEFAULT_RIFT_UNIT 0
@ -44,13 +45,10 @@ public:
// If no HMD is present simulate it being available
static bool smSimulateHMD;
// Use the chromatic aberration correction version of the barrel
// distortion shader.
static bool smUseChromaticAberrationCorrection;
// Type of rotation events to broadcast
static bool smGenerateAngleAxisRotationEvents;
static bool smGenerateEulerRotationEvents;
static bool smGeneratePositionEvents;
// Broadcast sensor rotation as axis
static bool smGenerateRotationAsAxisEvents;
@ -66,37 +64,24 @@ public:
// should be buffered.
static bool smGenerateWholeFrameEvents;
/// Determines desired pixel density for render target
static F32 smDesiredPixelDensity;
/// Determined if the window is moved to the oculus display
static bool smWindowDebug;
static F32 smPositionTrackingScale;
protected:
class DeviceListener : public OVR::MessageHandler
{
protected:
OculusVRDevice* mOwner;
public:
DeviceListener(OculusVRDevice* owner) { mOwner = owner; }
virtual ~DeviceListener() { mOwner = NULL; }
virtual void OnMessage(const OVR::Message&);
};
// Our OVR SDK device listener class
DeviceListener* mListener;
// The OVR SDK device manager
OVR::DeviceManager* mDeviceManager;
// Discovered HMD devices
Vector<OculusVRHMDDevice*> mHMDDevices;
// Discovered sensor devices
Vector<OculusVRSensorDevice*> mSensorDevices;
/// Is the device active
bool mActive;
// Should the input texture into the HMD (the render target that the scene has been
// rendered to) be scaled according to the HMD's distortion calculation?
bool mScaleInputTexture;
/// Which HMD is the active one
U32 mActiveDeviceId;
protected:
void cleanUp();
@ -105,14 +90,10 @@ protected:
/// Input Event Manager
void buildCodeTable();
void addHMDDevice(OVR::HMDDevice* hmd);
void addHMDDevice(ovrHmd hmd);
void createSimulatedHMD();
void addSensorDevice(OVR::SensorDevice* sensor);
void createSimulatedSensor();
public:
OculusVRDevice();
~OculusVRDevice();
@ -128,36 +109,50 @@ public:
bool process();
// IDisplayDevice
virtual bool providesYFOV() const;
virtual F32 getYFOV() const;
virtual bool providesEyeOffset() const;
virtual const Point3F& getEyeOffset() const;
virtual bool providesFrameEyePose() const;
virtual void getFrameEyePose(DisplayPose *outPose, U32 eyeId) const;
virtual bool providesEyeOffsets() const;
virtual void getEyeOffsets(Point3F *dest) const;
virtual bool providesFovPorts() const;
virtual void getFovPorts(FovPort *out) const;
virtual bool providesProjectionOffset() const;
virtual const Point2F& getProjectionOffset() const;
virtual void getStereoViewports(RectI *out) const;
virtual void getStereoTargets(GFXTextureTarget **out) const;
virtual void onStartFrame();
// HMDs
U32 getHMDCount() const { return mHMDDevices.size(); }
const OculusVRHMDDevice* getHMDDevice(U32 index) const;
OculusVRHMDDevice* getHMDDevice(U32 index) const;
F32 getHMDCurrentIPD(U32 index);
void setHMDCurrentIPD(U32 index, F32 ipd);
// Sensors
U32 getSensorCount() const { return mSensorDevices.size(); }
U32 getSensorCount() const { return mHMDDevices.size(); }
const OculusVRSensorDevice* getSensorDevice(U32 index) const;
EulerF getSensorEulerRotation(U32 index);
VectorF getSensorAcceleration(U32 index);
EulerF getSensorAngularVelocity(U32 index);
VectorF getSensorMagnetometer(U32 index);
F32 getSensorPredictionTime(U32 index);
void setSensorPredictionTime(U32 index, F32 dt);
void setAllSensorPredictionTime(F32 dt);
bool getSensorGravityCorrection(U32 index);
void setSensorGravityCorrection(U32 index, bool state);
bool getSensorYawCorrection(U32 index);
void setSensorYawCorrection(U32 index, bool state);
bool getSensorMagnetometerCalibrated(U32 index);
void setOptimalDisplaySize(U32 idx, GuiCanvas *canvas);
void resetAllSensors();
bool isDiplayingWarning();
void dismissWarning();
String dumpMetrics(U32 idx);
void setDrawCanvas(GuiCanvas *canvas);
virtual void setCurrentConnection(GameConnection *connection);
virtual GameConnection* getCurrentConnection();
bool _handleDeviceEvent( GFXDevice::GFXDeviceEventType evt );
public:
// For ManagedSingleton.
static const char* getSingletonName() { return "OculusVRDevice"; }

View file

@ -21,12 +21,45 @@
//-----------------------------------------------------------------------------
#include "platform/input/oculusVR/oculusVRHMDDevice.h"
#include "platform/input/oculusVR/oculusVRDevice.h"
#include "platform/input/oculusVR/oculusVRSensorDevice.h"
#include "postFx/postEffectCommon.h"
#include "gui/core/guiCanvas.h"
#include "platform/input/oculusVR/oculusVRUtil.h"
OculusVRHMDDevice::OculusVRHMDDevice()
#include "gfx/D3D9/gfxD3D9Device.h"
// Use D3D9 for win32
#ifdef TORQUE_OS_WIN
#define OVR_D3D_VERSION 9
#include "OVR_CAPI_D3D.h"
#define OCULUS_USE_D3D
#else
#include "OVR_CAPI_GL.h"
#define OCULUS_USE_GL
#endif
extern GFXTextureObject *gLastStereoTexture;
OculusVRHMDDevice::OculusVRHMDDevice() :
mWindowSize(1280,800)
{
mIsValid = false;
mIsSimulation = false;
mDevice = NULL;
mSupportedDistortionCaps = 0;
mCurrentDistortionCaps = 0;
mCurrentCaps = 0;
mSupportedCaps = 0;
mVsync = true;
mTimewarp = true;
mRenderConfigurationDirty = true;
mCurrentPixelDensity = OculusVRDevice::smDesiredPixelDensity;
mDesiredRenderingMode = GFXDevice::RS_StereoSideBySide;
mRTFormat = GFXFormatR8G8B8A8;
mDrawCanvas = NULL;
mFrameReady = false;
mConnection = NULL;
mSensor = NULL;
mActionCodeIndex = 0;
}
OculusVRHMDDevice::~OculusVRHMDDevice()
@ -36,197 +69,576 @@ OculusVRHMDDevice::~OculusVRHMDDevice()
void OculusVRHMDDevice::cleanUp()
{
onDeviceDestroy();
if (mSensor)
{
delete mSensor;
mSensor = NULL;
}
if(mDevice)
{
mDevice->Release();
ovrHmd_Destroy(mDevice);
mDevice = NULL;
}
mIsValid = false;
}
void OculusVRHMDDevice::set(OVR::HMDDevice* hmd, OVR::HMDInfo& info, bool calculateDistortionScale)
void OculusVRHMDDevice::set(ovrHmd hmd, U32 actionCodeIndex)
{
cleanUp();
mIsValid = false;
mIsSimulation = false;
mRenderConfigurationDirty = true;
mDevice = hmd;
mSupportedCaps = hmd->HmdCaps;
mCurrentCaps = mSupportedCaps & (ovrHmdCap_DynamicPrediction | ovrHmdCap_LowPersistence | (!mVsync ? ovrHmdCap_NoVSync : 0));
mSupportedDistortionCaps = hmd->DistortionCaps;
mCurrentDistortionCaps = mSupportedDistortionCaps & (ovrDistortionCap_TimeWarp | ovrDistortionCap_Vignette | ovrDistortionCap_Overdrive);
mTimewarp = mSupportedDistortionCaps & ovrDistortionCap_TimeWarp;
// DeviceInfo
mProductName = info.ProductName;
mManufacturer = info.Manufacturer;
mVersion = info.Version;
mProductName = hmd->ProductName;
mManufacturer = hmd->Manufacturer;
mVersion = hmd->FirmwareMajor;
mDisplayDeviceName = info.DisplayDeviceName;
mDisplayId = info.DisplayId;
mDisplayDeviceName = hmd->DisplayDeviceName;
mDisplayId = hmd->DisplayId;
mDesktopPosition.x = info.DesktopX;
mDesktopPosition.y = info.DesktopY;
mDesktopPosition.x = hmd->WindowsPos.x;
mDesktopPosition.y = hmd->WindowsPos.y;
mResolution.x = info.HResolution;
mResolution.y = info.VResolution;
mResolution.x = hmd->Resolution.w;
mResolution.y = hmd->Resolution.h;
mScreenSize.x = info.HScreenSize;
mScreenSize.y = info.VScreenSize;
mProfileInterpupillaryDistance = ovrHmd_GetFloat(hmd, OVR_KEY_IPD, OVR_DEFAULT_IPD);
mLensSeparation = ovrHmd_GetFloat(hmd, "LensSeparation", 0);
ovrHmd_GetFloatArray(hmd, "ScreenSize", &mScreenSize.x, 2);
mVerticalEyeCenter = info.VScreenCenter;
mEyeToScreen = info.EyeToScreenDistance;
mLensSeparation = info.LensSeparationDistance;
mProfileInterpupillaryDistance = info.InterpupillaryDistance;
mInterpupillaryDistance = mProfileInterpupillaryDistance;
dMemcpy(mCurrentFovPorts, mDevice->DefaultEyeFov, sizeof(mDevice->DefaultEyeFov));
mKDistortion.x = info.DistortionK[0];
mKDistortion.y = info.DistortionK[1];
mKDistortion.z = info.DistortionK[2];
mKDistortion.w = info.DistortionK[3];
mChromaticAbCorrection.x = info.ChromaAbCorrection[0];
mChromaticAbCorrection.y = info.ChromaAbCorrection[1];
mChromaticAbCorrection.z = info.ChromaAbCorrection[2];
mChromaticAbCorrection.w = info.ChromaAbCorrection[3];
// Calculated values
calculateValues(calculateDistortionScale);
mIsValid = true;
}
void OculusVRHMDDevice::createSimulation(SimulationTypes simulationType, bool calculateDistortionScale)
{
if(simulationType == ST_RIFT_PREVIEW)
for (U32 i=0; i<2; i++)
{
createSimulatedPreviewRift(calculateDistortionScale);
mCurrentFovPorts[i].UpTan = mDevice->DefaultEyeFov[i].UpTan;
mCurrentFovPorts[i].DownTan = mDevice->DefaultEyeFov[i].DownTan;
mCurrentFovPorts[i].LeftTan = mDevice->DefaultEyeFov[i].LeftTan;
mCurrentFovPorts[i].RightTan = mDevice->DefaultEyeFov[i].RightTan;
}
}
void OculusVRHMDDevice::createSimulatedPreviewRift(bool calculateDistortionScale)
{
if (mDevice->HmdCaps & ovrHmdCap_ExtendDesktop)
{
mWindowSize = Point2I(mDevice->Resolution.w, mDevice->Resolution.h);
}
else
{
mWindowSize = Point2I(1100, 618);
}
mActionCodeIndex = actionCodeIndex;
mIsValid = true;
mIsSimulation = true;
mProductName = "Oculus Rift DK1-SLA1";
mManufacturer = "Oculus VR";
mVersion = 0;
mSensor = new OculusVRSensorDevice();
mSensor->set(mDevice, mActionCodeIndex);
mDisplayDeviceName = "";
mResolution.x = 1280;
mResolution.y = 800;
mScreenSize.x = 0.14975999f;
mScreenSize.y = 0.093599997f;
mVerticalEyeCenter = 0.046799999f;
mEyeToScreen = 0.041000001f;
mLensSeparation = 0.064000003f;
mProfileInterpupillaryDistance = 0.064000003f;
mInterpupillaryDistance = mProfileInterpupillaryDistance;
mKDistortion.x = 1.0000000f;
mKDistortion.y = 0.22000000f;
mKDistortion.z = 0.23999999f;
mKDistortion.w = 0.00000000f;
mChromaticAbCorrection.x = 0.995999f;
mChromaticAbCorrection.y = -0.004f;
mChromaticAbCorrection.z = 1.014f;
mChromaticAbCorrection.w = 0.0f;
calculateValues(calculateDistortionScale);
updateCaps();
}
void OculusVRHMDDevice::setIPD(F32 ipd, bool calculateDistortionScale)
void OculusVRHMDDevice::setIPD(F32 ipd)
{
mInterpupillaryDistance = ipd;
// Recalculate as some values rely on the IPD
calculateValues(calculateDistortionScale);
}
// Computes scale that should be applied to the input render texture
// before distortion to fit the result in the same screen size.
// The 'fitRadius' parameter specifies the distance away from distortion center at
// which the input and output coordinates will match, assuming [-1,1] range.
F32 OculusVRHMDDevice::calcScale(F32 fitRadius)
void OculusVRHMDDevice::setOptimalDisplaySize(GuiCanvas *canvas)
{
F32 s = fitRadius;
if (!mDevice)
return;
// This should match distortion equation used in shader.
F32 ssq = s * s;
F32 scale = s * (mKDistortion.x + mKDistortion.y * ssq + mKDistortion.z * ssq * ssq + mKDistortion.w * ssq * ssq * ssq);
return scale;
}
PlatformWindow *window = canvas->getPlatformWindow();
GFXTarget *target = window->getGFXTarget();
void OculusVRHMDDevice::calculateValues(bool calculateDistortionScale)
{
F32 halfScreenX = mScreenSize.x * 0.5f;
if(halfScreenX > 0)
if (target && target->getSize() != mWindowSize)
{
F32 halfLensSeparation = mLensSeparation * 0.5;
F32 offset = halfLensSeparation / halfScreenX;
mEyeUVOffset.x = offset - 0.5;
mEyeUVOffset.y = 1.0f - offset - 0.5;
GFXVideoMode newMode;
newMode.antialiasLevel = 0;
newMode.bitDepth = 32;
newMode.fullScreen = false;
newMode.refreshRate = 75;
newMode.resolution = mWindowSize;
newMode.wideScreen = false;
window->setVideoMode(newMode);
//AssertFatal(window->getClientExtent().x == mWindowSize[0] && window->getClientExtent().y == mWindowSize[1], "Window didn't resize to correct dimensions");
}
// Need to move window over to the rift side of the desktop
if (mDevice->HmdCaps & ovrHmdCap_ExtendDesktop && !OculusVRDevice::smWindowDebug)
{
#ifndef OCULUS_WINDOW_DEBUG
window->setPosition(getDesktopPosition());
#endif
}
}
bool OculusVRHMDDevice::isDisplayingWarning()
{
if (!mIsValid || !mDevice)
return false;
ovrHSWDisplayState displayState;
ovrHmd_GetHSWDisplayState(mDevice, &displayState);
return displayState.Displayed;
}
void OculusVRHMDDevice::dismissWarning()
{
if (!mIsValid || !mDevice)
return;
ovrHmd_DismissHSWDisplay(mDevice);
}
bool OculusVRHMDDevice::setupTargets()
{
ovrFovPort eyeFov[2] = {mDevice->DefaultEyeFov[0], mDevice->DefaultEyeFov[1]};
mRecomendedEyeTargetSize[0] = ovrHmd_GetFovTextureSize(mDevice, ovrEye_Left, eyeFov[0], mCurrentPixelDensity);
mRecomendedEyeTargetSize[1] = ovrHmd_GetFovTextureSize(mDevice, ovrEye_Right, eyeFov[1], mCurrentPixelDensity);
// Calculate render target size
if (mDesiredRenderingMode == GFXDevice::RS_StereoSideBySide)
{
// Setup a single texture, side-by-side viewports
Point2I rtSize(
mRecomendedEyeTargetSize[0].w + mRecomendedEyeTargetSize[1].w,
mRecomendedEyeTargetSize[0].h > mRecomendedEyeTargetSize[1].h ? mRecomendedEyeTargetSize[0].h : mRecomendedEyeTargetSize[1].h
);
GFXFormat targetFormat = GFX->getActiveRenderTarget()->getFormat();
mRTFormat = targetFormat;
rtSize = generateRenderTarget(mStereoRT, mStereoTexture, mStereoDepthTexture, rtSize);
// Left
mEyeRenderSize[0] = rtSize;
mEyeRT[0] = mStereoRT;
mEyeTexture[0] = mStereoTexture;
mEyeViewport[0] = RectI(Point2I(0,0), Point2I((mRecomendedEyeTargetSize[0].w+1)/2, mRecomendedEyeTargetSize[0].h));
// Right
mEyeRenderSize[1] = rtSize;
mEyeRT[1] = mStereoRT;
mEyeTexture[1] = mStereoTexture;
mEyeViewport[1] = RectI(Point2I((mRecomendedEyeTargetSize[0].w+1)/2,0), Point2I((mRecomendedEyeTargetSize[1].w+1)/2, mRecomendedEyeTargetSize[1].h));
gLastStereoTexture = mEyeTexture[0];
}
else if (mDesiredRenderingMode == GFXDevice::RS_StereoRenderTargets)
{
// Setup two targets
Point2I rtSize;
GFXFormat targetFormat = GFX->getActiveRenderTarget()->getFormat();
mRTFormat = targetFormat;
// Left
rtSize = generateRenderTarget(mEyeRT[0], mEyeTexture[0], mStereoDepthTexture, Point2I(mRecomendedEyeTargetSize[0].w, mRecomendedEyeTargetSize[0].h));
mEyeViewport[0] = RectI(Point2I(0,0), Point2I((rtSize.x+1)/2, rtSize.y));
// Right
rtSize = generateRenderTarget(mEyeRT[1], mEyeTexture[1], mStereoDepthTexture, Point2I(mRecomendedEyeTargetSize[1].w, mRecomendedEyeTargetSize[1].h));
mEyeViewport[1] = RectI(Point2I(0,0), Point2I((rtSize.x+1)/2, rtSize.y));
mStereoRT = NULL;
mStereoTexture = NULL;
gLastStereoTexture = mEyeTexture[0];
}
else
{
mEyeUVOffset.x = 0.5f;
mEyeUVOffset.y = 0.5f;
// No rendering, abort!
return false;
}
F32 lensOffset = mLensSeparation * 0.5f;
F32 lensShift = mScreenSize.x * 0.25f - lensOffset;
F32 lensViewportShift = 4.0f * lensShift / mScreenSize.x;
mXCenterOffset= lensViewportShift;
return true;
}
// Determine how the input texture should be scaled relative to the back buffer
// so that we fit the distorted view to the backbuffer after calculating the
// distortion. In reference to section 5.6.3 Distortion Scale and FOV in the
// SDK docs.
if(!calculateDistortionScale)
String OculusVRHMDDevice::dumpMetrics()
{
StringBuilder sb;
EulerF rot = mSensor->getEulerRotation();
Point3F pos = mSensor->getPosition();
FovPort eyeFov[2];
this->getFovPorts(eyeFov);
mSensor->getPositionTrackingAvailable();
F32 ipd = this->getIPD();
U32 lastStatus = mSensor->getLastTrackingStatus();
sb.format(" | OVR Sensor %i | rot: %f %f %f, pos: %f %f %f, FOV (%f %f %f %f, %f %f %f %f), IPD %f, Track:%s%s, Disort:%s%s%s",
mActionCodeIndex,
rot.x, rot.y, rot.z,
pos.x, pos.y, pos.z,
eyeFov[0].upTan, eyeFov[0].downTan, eyeFov[0].leftTan, eyeFov[0].rightTan, eyeFov[1].upTan, eyeFov[1].downTan, eyeFov[1].leftTan, eyeFov[1].rightTan,
getIPD(),
lastStatus & ovrStatus_OrientationTracked ? " ORIENT" : "",
lastStatus & ovrStatus_PositionTracked ? " POS" : "",
mCurrentDistortionCaps & ovrDistortionCap_TimeWarp ? " TIMEWARP" : "",
mCurrentDistortionCaps & ovrDistortionCap_Vignette ? " VIGNETTE" : "",
mCurrentDistortionCaps & ovrDistortionCap_Overdrive ? " OVERDRIVE" : "");
return sb.data();
}
void OculusVRHMDDevice::updateRenderInfo()
{
// Check console values first
if (mCurrentPixelDensity != OculusVRDevice::smDesiredPixelDensity)
{
// Do not calculate a distortion scale for the input texture. This means that the input
// texture and the backbuffer will be the same resolution.
mDistortionFit.x = 0.0f;
mDistortionFit.y = 0.0f;
}
else if (mScreenSize.x > 0.140f) // 7"
{
mDistortionFit.x = -1.0f;
mDistortionFit.y = 0.0f;
}
else // 5"
{
mDistortionFit.x = 0.0f;
mDistortionFit.y = 1.0f;
mRenderConfigurationDirty = true;
mCurrentPixelDensity = OculusVRDevice::smDesiredPixelDensity;
}
// Compute distortion scale from DistortionFitX & DistortionFitY.
// Fit value of 0.0 means "no fit".
if (mIsZero(mDistortionFit.x) && mIsZero(mDistortionFit.y))
if (!mIsValid || !mDevice || !mRenderConfigurationDirty)
return;
if (!mDrawCanvas)
return;
PlatformWindow *window = mDrawCanvas->getPlatformWindow();
ovrFovPort eyeFov[2] = {mDevice->DefaultEyeFov[0], mDevice->DefaultEyeFov[1]};
// Update window size if it's incorrect
Point2I backbufferSize = mDrawCanvas->getBounds().extent;
// Reset
ovrHmd_ConfigureRendering(mDevice, NULL, 0, NULL, NULL);
#ifdef OCULUS_USE_D3D
// Generate render target textures
GFXD3D9Device *d3d9GFX = dynamic_cast<GFXD3D9Device*>(GFX);
if (d3d9GFX)
{
mDistortionScale = 1.0f;
ovrD3D9Config cfg;
cfg.D3D9.Header.API = ovrRenderAPI_D3D9;
cfg.D3D9.Header.Multisample = 0;
cfg.D3D9.Header.BackBufferSize = OVR::Sizei(backbufferSize.x, backbufferSize.y);
cfg.D3D9.pDevice = d3d9GFX->getDevice();
cfg.D3D9.pDevice->GetSwapChain(0, &cfg.D3D9.pSwapChain);
// Finally setup!
if (!setupTargets())
{
onDeviceDestroy();
return;
}
ovrHmd_AttachToWindow(mDevice, window->getPlatformDrawable(), NULL, NULL);
if (!ovrHmd_ConfigureRendering( mDevice, &cfg.Config, mCurrentDistortionCaps, eyeFov, mEyeRenderDesc ))
{
Con::errorf("Couldn't configure oculus rendering!");
return;
}
}
#endif
#ifdef OCULUS_USE_GL
// Generate render target textures
GFXGLDevice *glGFX = dynamic_cast<GFXGLDevice*>(GFX);
if (glGFX)
{
ovrGLConfig cfg;
cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
cfg.OGL.Header.Multisample = 0;
cfg.OGL.Header.BackBufferSize = OVR::Sizei(backbufferSize.x, backbufferSize.y);
#ifdef WIN32
cfg.OGL.Window = GetActiveWindow();//window->getPlatformDrawable();
cfg.OGL.DC = wglGetCurrentDC();
#else
cfg.OGL.Disp = NULL;
#endif
// Finally setup!
if (!setupTargets())
{
onDeviceDestroy();
return;
}
ovrHmd_AttachToWindow(mDevice, window->getPlatformDrawable(), NULL, NULL);
if (!ovrHmd_ConfigureRendering( mDevice, &cfg.Config, mCurrentDistortionCaps, eyeFov, mEyeRenderDesc ))
{
Con::errorf("Couldn't configure oculus rendering!");
return;
}
}
#endif
mRenderConfigurationDirty = false;
}
Point2I OculusVRHMDDevice::generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &texture, GFXTexHandle &depth, Point2I desiredSize)
{
// Texture size that we already have might be big enough.
Point2I newRTSize;
bool newRT = false;
if (!target.getPointer())
{
target = GFX->allocRenderToTextureTarget();
newRTSize = desiredSize;
newRT = true;
}
else
{
Point2I currentSize = target->getSize();
newRTSize = currentSize;
}
// %50 linear growth each time is a nice balance between being too greedy
// for a 2D surface and too slow to prevent fragmentation.
while ( newRTSize.x < desiredSize.x )
{
newRTSize.x += newRTSize.x/2;
}
while ( newRTSize.y < desiredSize.y )
{
newRTSize.y += newRTSize.y/2;
}
// Put some sane limits on it. 4k x 4k is fine for most modern video cards.
// Nobody should be messing around with surfaces smaller than 4k pixels these days.
newRTSize.setMin(Point2I(4096, 4096));
newRTSize.setMax(Point2I(64, 64));
// Stereo RT needs to be the same size as the recommended RT
if ( newRT || texture.getWidthHeight() != newRTSize )
{
texture.set( newRTSize.x, newRTSize.y, mRTFormat, &VRTextureProfile, avar( "%s() - (line %d)", __FUNCTION__, __LINE__ ) );
target->attachTexture( GFXTextureTarget::Color0, texture );
Con::printf("generateRenderTarget generated %x", texture.getPointer());
}
if ( depth.getWidthHeight() != newRTSize )
{
depth.set( newRTSize.x, newRTSize.y, GFXFormatD24S8, &VRDepthProfile, avar( "%s() - (line %d)", __FUNCTION__, __LINE__ ) );
target->attachTexture( GFXTextureTarget::DepthStencil, depth );
Con::printf("generateRenderTarget generated depth %x", depth.getPointer());
}
return newRTSize;
}
void OculusVRHMDDevice::clearRenderTargets()
{
mStereoRT = NULL;
mEyeRT[0] = NULL;
mEyeRT[1] = NULL;
}
void OculusVRHMDDevice::updateCaps()
{
if (!mIsValid || !mDevice)
return;
U32 oldDistortionCaps = mCurrentDistortionCaps;
// Distortion
if (mTimewarp)
{
mCurrentDistortionCaps |= ovrDistortionCap_TimeWarp;
}
else
{
// Convert fit value to distortion-centered coordinates before fit radius
// calculation.
// NOTE: For now just assume a full view the same size as the HMD supports. It is
// possible that this full view is smaller or larger.
F32 stereoAspect = 0.5f * mResolution.x / mResolution.y;
F32 dx = mDistortionFit.x - mXCenterOffset;
F32 dy = mDistortionFit.y / stereoAspect;
F32 fitRadius = sqrt(dx * dx + dy * dy);
mDistortionScale = calcScale(fitRadius)/fitRadius;
mCurrentDistortionCaps &= ~ovrDistortionCap_TimeWarp;
}
// Calculate the vertical FOV for a single eye
mAspectRatio = F32(mResolution.x * 0.5f) / F32(mResolution.y);
F32 halfScreenDistance = mScreenSize.y * 0.5f * mDistortionScale;
mYFOV = 2.0f * mAtan(halfScreenDistance / mEyeToScreen);
if (oldDistortionCaps != mCurrentDistortionCaps)
{
mRenderConfigurationDirty = true;
}
F32 viewCenter = mScreenSize.x * 0.25f;
F32 eyeProjectionShift = viewCenter - (mInterpupillaryDistance * 0.5f);
mProjectionCenterOffset.set(4.0f * eyeProjectionShift / mScreenSize.x, 0.0f);
mEyeWorldOffset.set(mInterpupillaryDistance * 0.5f, 0.0f, 0.0f);
// Device
if (!mVsync)
{
mCurrentCaps |= ovrHmdCap_NoVSync;
}
else
{
mCurrentCaps &= ~ovrHmdCap_NoVSync;
}
ovrHmd_SetEnabledCaps(mDevice, mCurrentCaps);
}
static bool sInFrame = false; // protects against recursive onStartFrame calls
void OculusVRHMDDevice::onStartFrame()
{
if (!mIsValid || !mDevice || !mDrawCanvas || sInFrame || mFrameReady)
return;
sInFrame = true;
#ifndef OCULUS_DEBUG_FRAME
ovrHmd_BeginFrame(mDevice, 0);
#endif
ovrVector3f hmdToEyeViewOffset[2] = { mEyeRenderDesc[0].HmdToEyeViewOffset, mEyeRenderDesc[1].HmdToEyeViewOffset };
ovrHmd_GetEyePoses(mDevice, 0, hmdToEyeViewOffset, mCurrentEyePoses, &mLastTrackingState);
for (U32 i=0; i<2; i++)
{
mCurrentEyePoses[i].Position.x *= OculusVRDevice::smPositionTrackingScale;
mCurrentEyePoses[i].Position.y *= OculusVRDevice::smPositionTrackingScale;
mCurrentEyePoses[i].Position.z *= OculusVRDevice::smPositionTrackingScale;
}
sInFrame = false;
mFrameReady = true;
}
void OculusVRHMDDevice::onEndFrame()
{
if (!mIsValid || !mDevice || !mDrawCanvas || sInFrame || !mFrameReady)
return;
Point2I eyeSize;
GFXTarget *windowTarget = mDrawCanvas->getPlatformWindow()->getGFXTarget();
#ifndef OCULUS_DEBUG_FRAME
#ifdef OCULUS_USE_D3D
GFXD3D9Device *d3d9GFX = dynamic_cast<GFXD3D9Device*>(GFX);
if (d3d9GFX && mEyeRT[0].getPointer())
{
// Left
ovrD3D9Texture eyeTextures[2];
eyeSize = mEyeTexture[0].getWidthHeight();
eyeTextures[0].D3D9.Header.API = ovrRenderAPI_D3D9;
eyeTextures[0].D3D9.Header.RenderViewport.Pos.x = mEyeViewport[0].point.x;
eyeTextures[0].D3D9.Header.RenderViewport.Pos.y = mEyeViewport[0].point.y;
eyeTextures[0].D3D9.Header.RenderViewport.Size.w = mEyeViewport[0].extent.x;
eyeTextures[0].D3D9.Header.RenderViewport.Size.h = mEyeViewport[0].extent.y;
eyeTextures[0].D3D9.Header.TextureSize.w = eyeSize.x;
eyeTextures[0].D3D9.Header.TextureSize.h = eyeSize.y;
eyeTextures[0].D3D9.pTexture = mEyeRT[0].getPointer() ? static_cast<GFXD3D9TextureObject*>(mEyeTexture[0].getPointer())->get2DTex() : NULL;
// Right
eyeSize = mEyeTexture[1].getWidthHeight();
eyeTextures[1].D3D9.Header.API = ovrRenderAPI_D3D9;
eyeTextures[1].D3D9.Header.RenderViewport.Pos.x = mEyeViewport[1].point.x;
eyeTextures[1].D3D9.Header.RenderViewport.Pos.y = mEyeViewport[1].point.y;
eyeTextures[1].D3D9.Header.RenderViewport.Size.w = mEyeViewport[1].extent.x;
eyeTextures[1].D3D9.Header.RenderViewport.Size.h = mEyeViewport[1].extent.y;
eyeTextures[1].D3D9.Header.TextureSize.w = eyeSize.x;
eyeTextures[1].D3D9.Header.TextureSize.h = eyeSize.y;
eyeTextures[1].D3D9.pTexture = mEyeRT[0].getPointer() ? static_cast<GFXD3D9TextureObject*>(mEyeTexture[1].getPointer())->get2DTex() : NULL;
// Submit!
GFX->disableShaders();
GFX->setActiveRenderTarget(windowTarget);
GFX->clear(GFXClearZBuffer | GFXClearStencil | GFXClearTarget, ColorI(255,0,0), 1.0f, 0);
ovrHmd_EndFrame(mDevice, mCurrentEyePoses, (ovrTexture*)(&eyeTextures[0]));
}
#endif
#ifdef OCULUS_USE_GL
GFXGLDevice *glGFX = dynamic_cast<GFXGLDevice*>(GFX);
if (glGFX && mEyeRT[0].getPointer())
{
// Left
ovrGLTexture eyeTextures[2];
eyeSize = mEyeTexture[0].getWidthHeight();
eyeTextures[0].OGL.Header.API = ovrRenderAPI_GL;
eyeTextures[0].OGL.Header.RenderViewport.Pos.x = mEyeViewport[0].point.x;
eyeTextures[0].OGL.Header.RenderViewport.Pos.y = mEyeViewport[0].point.y;
eyeTextures[0].OGL.Header.RenderViewport.Size.w = mEyeViewport[0].extent.x;
eyeTextures[0].OGL.Header.RenderViewport.Size.h = mEyeViewport[0].extent.y;
eyeTextures[0].OGL.Header.TextureSize.w = eyeSize.x;
eyeTextures[0].OGL.Header.TextureSize.h = eyeSize.y;
eyeTextures[0].OGL.TexId = mEyeRT[0].getPointer() ? static_cast<GFXGLTextureObject*>(mEyeTexture[0].getPointer())->getHandle() : 0;
// Right
eyeSize = mEyeTexture[1].getWidthHeight();
eyeTextures[1].OGL.Header.API = ovrRenderAPI_GL;
eyeTextures[1].OGL.Header.RenderViewport.Pos.x = mEyeViewport[1].point.x;
eyeTextures[1].OGL.Header.RenderViewport.Pos.y = mEyeViewport[1].point.y;
eyeTextures[1].OGL.Header.RenderViewport.Size.w = mEyeViewport[1].extent.x;
eyeTextures[1].OGL.Header.RenderViewport.Size.h = mEyeViewport[1].extent.y;
eyeTextures[1].OGL.Header.TextureSize.w = eyeSize.x;
eyeTextures[1].OGL.Header.TextureSize.h = eyeSize.y;
eyeTextures[0].OGL.TexId = mEyeRT[1].getPointer() ? static_cast<GFXGLTextureObject*>(mEyeTexture[1].getPointer())->getHandle() : 0;
// Submit!
GFX->disableShaders();
GFX->setActiveRenderTarget(windowTarget);
GFX->clear(GFXClearZBuffer | GFXClearStencil | GFXClearTarget, ColorI(255,0,0), 1.0f, 0);
ovrHmd_EndFrame(mDevice, mCurrentEyePoses, (ovrTexture*)(&eyeTextures[0]));
}
#endif
#endif
mFrameReady = false;
}
void OculusVRHMDDevice::getFrameEyePose(DisplayPose *outPose, U32 eyeId) const
{
// Directly set the rotation and position from the eye transforms
ovrPosef pose = mCurrentEyePoses[eyeId];
OVR::Quatf orientation = pose.Orientation;
const OVR::Vector3f position = pose.Position;
EulerF rotEuler;
OculusVRUtil::convertRotation(orientation, rotEuler);
outPose->orientation = rotEuler;
outPose->position = Point3F(-position.x, position.z, -position.y);
}
void OculusVRHMDDevice::onDeviceDestroy()
{
if (!mIsValid || !mDevice)
return;
if (mStereoRT.getPointer())
{
mStereoRT->zombify();
}
if (mEyeRT[1].getPointer() && mEyeRT[1] != mStereoRT)
{
mEyeRT[0]->zombify();
mEyeRT[1]->zombify();
}
mStereoRT = NULL;
mStereoTexture = NULL;
mStereoDepthTexture = NULL;
mEyeTexture[0] = NULL;
mEyeDepthTexture[0] = NULL;
mEyeTexture[1] = NULL;
mEyeDepthTexture[1] = NULL;
mEyeRT[0] = NULL;
mEyeRT[1] = NULL;
mRenderConfigurationDirty = true;
ovrHmd_ConfigureRendering(mDevice, NULL, 0, NULL, NULL);
}

View file

@ -30,7 +30,16 @@
#include "math/mPoint4.h"
#include "platform/input/oculusVR/oculusVRConstants.h"
#include "platform/types.h"
#include "OVR.h"
#include "gfx/gfxTextureHandle.h"
#include "math/mRect.h"
#include "gfx/gfxDevice.h"
#include "OVR_CAPI_0_5_0.h"
class GuiCanvas;
class GameConnection;
struct DisplayPose;
class OculusVRSensorDevice;
class OculusVRHMDDevice
{
@ -42,9 +51,19 @@ public:
protected:
bool mIsValid;
bool mIsSimulation;
bool mVsync;
bool mTimewarp;
OVR::HMDDevice* mDevice;
bool mRenderConfigurationDirty;
bool mFrameReady;
ovrHmd mDevice;
U32 mSupportedDistortionCaps;
U32 mCurrentDistortionCaps;
U32 mSupportedCaps;
U32 mCurrentCaps;
// From OVR::DeviceInfo
String mProductName;
@ -66,13 +85,6 @@ protected:
// Physical screen size in meters
Point2F mScreenSize;
// Physical offset from the top of the screen to the center of the
// eye, in meters. Usually half of the vertical physical screen size
F32 mVerticalEyeCenter;
// Physical distance from the eye to the screen
F32 mEyeToScreen;
// Physical distance between lens centers, in meters
F32 mLensSeparation;
@ -82,50 +94,25 @@ protected:
// Physical distance between the user's eye centers
F32 mInterpupillaryDistance;
// The eye IPD as a Point3F
Point3F mEyeWorldOffset;
// Radial distortion correction coefficients used by the barrel distortion shader
Point4F mKDistortion;
// Chromatic aberration correction coefficients
Point4F mChromaticAbCorrection;
// Calculated values of eye x offset from center in normalized (uv) coordinates
// where each eye is 0..1. Used for the mono to stereo postFX to simulate an
// eye offset of the camera. The x component is the left eye, the y component
// is the right eye.
Point2F mEyeUVOffset;
// Used to adjust where an eye's view is rendered to account for the lenses not
// being in the center of the physical screen half.
F32 mXCenterOffset;
// When calculating the distortion scale to use to increase the size of the input texture
// this determines how we should attempt to fit the distorted view into the backbuffer.
Point2F mDistortionFit;
// Is the factor by which the input texture size is increased to make post-distortion
// result distortion fit the viewport. If the input texture is the same size as the
// backbuffer, then this should be 1.0.
F32 mDistortionScale;
// Aspect ratio for a single eye
F32 mAspectRatio;
// Vertical field of view
F32 mYFOV;
// The amount to offset the projection matrix to account for the eye not being in the
// center of the screen.
Point2F mProjectionCenterOffset;
// Current pose of eyes
ovrPosef mCurrentEyePoses[2];
ovrEyeRenderDesc mEyeRenderDesc[2];
ovrFovPort mCurrentFovPorts[2];
Point2I mWindowSize;
GameConnection *mConnection;
OculusVRSensorDevice *mSensor;
U32 mActionCodeIndex;
protected:
F32 calcScale(F32 fitRadius);
void calculateValues(bool calculateDistortionScale);
void createSimulatedPreviewRift(bool calculateDistortionScale);
void updateRenderInfo();
public:
OculusVRHMDDevice();
@ -134,13 +121,12 @@ public:
void cleanUp();
// Set the HMD properties based on information from the OVR device
void set(OVR::HMDDevice* hmd, OVR::HMDInfo& info, bool calculateDistortionScale);
void set(ovrHmd hmd, U32 actionCodeIndex);
// Set the HMD properties based on a simulation of the given type
void createSimulation(SimulationTypes simulationType, bool calculateDistortionScale);
// Sets optimal display size for canvas
void setOptimalDisplaySize(GuiCanvas *canvas);
bool isValid() const {return mIsValid;}
bool isSimulated() const {return mIsSimulation;}
const char* getProductName() const { return mProductName.c_str(); }
const char* getManufacturer() const { return mManufacturer.c_str(); }
@ -161,13 +147,6 @@ public:
// Physical screen size in meters
const Point2F& getScreenSize() const { return mScreenSize; }
// Physical offset from the top of the screen to the center of the
// eye, in meters. Usually half of the vertical physical screen size
F32 getVerticalEyeCenter() const { return mVerticalEyeCenter; }
// Physical distance from the eye to the screen
F32 getEyeToScreen() const { return mEyeToScreen; }
// Physical distance between lens centers, in meters
F32 getLensSeparation() const { return mLensSeparation; }
@ -178,37 +157,78 @@ public:
F32 getIPD() const { return mInterpupillaryDistance; }
// Set a new physical distance between the user's eye centers
void setIPD(F32 ipd, bool calculateDistortionScale);
// Provides the IPD of one eye as a Point3F
const Point3F& getEyeWorldOffset() const { return mEyeWorldOffset; }
// Radial distortion correction coefficients used by the barrel distortion shader
const Point4F& getKDistortion() const { return mKDistortion; }
// Chromatic aberration correction coefficients used by the barrel distortion shader
const Point4F& getChromaticAbCorrection() const { return mChromaticAbCorrection; }
// Calculated values of eye x offset from center in normalized (uv) coordinates.
const Point2F& getEyeUVOffset() const { return mEyeUVOffset; }
// Used to adjust where an eye's view is rendered to account for the lenses not
// being in the center of the physical screen half.
F32 getCenterOffset() const { return mXCenterOffset; }
// Is the factor by which the input texture size is increased to make post-distortion
// result distortion fit the viewport.
F32 getDistortionScale() const { return mDistortionScale; }
// Aspect ration for a single eye
F32 getAspectRation() const { return mAspectRatio; }
// Vertical field of view
F32 getYFOV() const { return mYFOV; }
void setIPD(F32 ipd);
// The amount to offset the projection matrix to account for the eye not being in the
// center of the screen.
const Point2F& getProjectionCenterOffset() const { return mProjectionCenterOffset; }
void getStereoViewports(RectI *dest) const { dMemcpy(dest, mEyeViewport, sizeof(mEyeViewport)); }
void getStereoTargets(GFXTextureTarget **dest) const { dest[0] = mEyeRT[0]; dest[1] = mEyeRT[1]; }
void getFovPorts(FovPort *dest) const { dMemcpy(dest, mCurrentFovPorts, sizeof(mCurrentFovPorts)); }
/// Returns eye offsets in torque coordinate space, i.e. z being up, x being left-right, and y being depth (forward).
void getEyeOffsets(Point3F *offsets) const {
offsets[0] = Point3F(-mEyeRenderDesc[0].HmdToEyeViewOffset.x, mEyeRenderDesc[0].HmdToEyeViewOffset.z, -mEyeRenderDesc[0].HmdToEyeViewOffset.y);
offsets[1] = Point3F(-mEyeRenderDesc[1].HmdToEyeViewOffset.x, mEyeRenderDesc[1].HmdToEyeViewOffset.z, -mEyeRenderDesc[1].HmdToEyeViewOffset.y); }
void getFrameEyePose(DisplayPose *outPose, U32 eyeId) const;
void updateCaps();
void onStartFrame();
void onEndFrame();
void onDeviceDestroy();
Point2I generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &texture, GFXTexHandle &depth, Point2I desiredSize);
void clearRenderTargets();
bool isDisplayingWarning();
void dismissWarning();
bool setupTargets();
/// Designates canvas we are drawing to. Also updates render targets
void setDrawCanvas(GuiCanvas *canvas) { if (mDrawCanvas != canvas) { mDrawCanvas = canvas; } updateRenderInfo(); }
virtual void setCurrentConnection(GameConnection *connection) { mConnection = connection; }
virtual GameConnection* getCurrentConnection() { return mConnection; }
String dumpMetrics();
// Stereo RT
GFXTexHandle mStereoTexture;
GFXTexHandle mStereoDepthTexture;
GFXTextureTargetRef mStereoRT;
// Eye RTs (if we are using separate targets)
GFXTextureTargetRef mEyeRT[2];
GFXTexHandle mEyeTexture[2];
GFXTexHandle mEyeDepthTexture[2];
// Current render target size for each eye
Point2I mEyeRenderSize[2];
// Recommended eye target size for each eye
ovrSizei mRecomendedEyeTargetSize[2];
// Desired viewport for each eye
RectI mEyeViewport[2];
F32 mCurrentPixelDensity;
F32 smDesiredPixelDensity;
ovrTrackingState mLastTrackingState;
GFXDevice::GFXDeviceRenderStyles mDesiredRenderingMode;
GFXFormat mRTFormat;
// Canvas we should be drawing
GuiCanvas *mDrawCanvas;
OculusVRSensorDevice *getSensorDevice() { return mSensor; }
};
#endif // _OCULUSVRHMDDEVICE_H_

View file

@ -20,6 +20,7 @@
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
#include "platform/input/oculusVR/oculusVRDevice.h"
#include "platform/input/oculusVR/oculusVRSensorData.h"
#include "platform/input/oculusVR/oculusVRUtil.h"
#include "console/console.h"
@ -32,66 +33,44 @@ OculusVRSensorData::OculusVRSensorData()
void OculusVRSensorData::reset()
{
mDataSet = false;
mStatusFlags = 0;
}
void OculusVRSensorData::setData(OVR::SensorFusion& data, const F32& maxAxisRadius)
void OculusVRSensorData::setData(ovrTrackingState& data, const F32& maxAxisRadius)
{
// Sensor rotation
OVR::Quatf orientation;
if(data.GetPredictionDelta() > 0)
{
orientation = data.GetPredictedOrientation();
}
else
{
orientation = data.GetOrientation();
}
// Sensor rotation & position
OVR::Posef pose = data.HeadPose.ThePose;
OVR::Quatf orientation = pose.Rotation;
OVR::Vector3f position = data.HeadPose.ThePose.Position;
mPosition = Point3F(-position.z, position.x, position.y);
mPosition *= OculusVRDevice::smPositionTrackingScale;
OVR::Matrix4f orientMat(orientation);
OculusVRUtil::convertRotation(orientMat.M, mRot);
mRotQuat.set(mRot);
// Sensor rotation in Euler format
OculusVRUtil::convertRotation(orientation, mRotEuler);
OculusVRUtil::convertRotation(orientation, mRotEuler); // mRotEuler == pitch, roll, yaw FROM yaw, pitch, roll
//mRotEuler = EulerF(0,0,0);
float hmdYaw, hmdPitch, hmdRoll;
orientation.GetEulerAngles<OVR::Axis_Y, OVR::Axis_X, OVR::Axis_Z>(&hmdYaw, &hmdPitch, &hmdRoll);
// Sensor rotation as axis
OculusVRUtil::calculateAxisRotation(mRot, maxAxisRadius, mRotAxis);
// Sensor raw values
OVR::Vector3f accel = data.GetAcceleration();
OVR::Vector3f accel = data.HeadPose.LinearAcceleration;
OculusVRUtil::convertAcceleration(accel, mAcceleration);
OVR::Vector3f angVel = data.GetAngularVelocity();
OVR::Vector3f angVel = data.HeadPose.AngularVelocity;
OculusVRUtil::convertAngularVelocity(angVel, mAngVelocity);
OVR::Vector3f mag;
if(data.HasMagCalibration() && data.IsYawCorrectionEnabled())
{
mag = data.GetCalibratedMagnetometer();
}
else
{
mag = data.GetMagnetometer();
}
OVR::Vector3f mag = data.RawSensorData.Magnetometer;
OculusVRUtil::convertMagnetometer(mag, mMagnetometer);
mDataSet = true;
}
void OculusVRSensorData::simulateData(const F32& maxAxisRadius)
{
// Sensor rotation
mRot.identity();
mRotQuat.identity();
mRotEuler.zero();
// Sensor rotation as axis
OculusVRUtil::calculateAxisRotation(mRot, maxAxisRadius, mRotAxis);
// Sensor raw values
mAcceleration.zero();
mAngVelocity.zero();
mMagnetometer.zero();
mStatusFlags = data.StatusFlags;
mDataSet = true;
}
@ -132,5 +111,10 @@ U32 OculusVRSensorData::compare(OculusVRSensorData* other, bool doRawCompare)
}
}
if (other->mStatusFlags != mStatusFlags)
{
result |= DIFF_STATUS;
}
return result;
}

View file

@ -27,7 +27,7 @@
#include "math/mMatrix.h"
#include "math/mQuat.h"
#include "math/mPoint2.h"
#include "OVR.h"
#include "OVR_CAPI_0_5_0.h"
struct OculusVRSensorData
{
@ -39,6 +39,8 @@ struct OculusVRSensorData
DIFF_ACCEL = (1<<3),
DIFF_ANGVEL = (1<<4),
DIFF_MAG = (1<<5),
DIFF_POS = (1<<6),
DIFF_STATUS = (1<<7),
DIFF_ROTAXIS = (DIFF_ROTAXISX | DIFF_ROTAXISY),
DIFF_RAW = (DIFF_ACCEL | DIFF_ANGVEL | DIFF_MAG),
@ -46,6 +48,9 @@ struct OculusVRSensorData
bool mDataSet;
// Position
Point3F mPosition;
// Rotation
MatrixF mRot;
QuatF mRotQuat;
@ -59,16 +64,15 @@ struct OculusVRSensorData
EulerF mAngVelocity;
VectorF mMagnetometer;
U32 mStatusFlags;
OculusVRSensorData();
/// Reset the data
void reset();
/// Set data based on given sensor fusion
void setData(OVR::SensorFusion& data, const F32& maxAxisRadius);
/// Simulate valid data
void simulateData(const F32& maxAxisRadius);
void setData(ovrTrackingState& data, const F32& maxAxisRadius);
/// Compare this data and given and return differences
U32 compare(OculusVRSensorData* other, bool doRawCompare);

View file

@ -24,6 +24,8 @@
#include "platform/input/oculusVR/oculusVRSensorData.h"
#include "platform/input/oculusVR/oculusVRUtil.h"
#include "platform/platformInput.h"
#include"console/simBase.h"
#include "console/engineAPI.h"
U32 OculusVRSensorDevice::OVR_SENSORROT[OculusVRConstants::MaxSensors] = {0};
U32 OculusVRSensorDevice::OVR_SENSORROTANG[OculusVRConstants::MaxSensors] = {0};
@ -32,13 +34,15 @@ U32 OculusVRSensorDevice::OVR_SENSORROTAXISY[OculusVRConstants::MaxSensors] = {0
U32 OculusVRSensorDevice::OVR_SENSORACCELERATION[OculusVRConstants::MaxSensors] = {0};
U32 OculusVRSensorDevice::OVR_SENSORANGVEL[OculusVRConstants::MaxSensors] = {0};
U32 OculusVRSensorDevice::OVR_SENSORMAGNETOMETER[OculusVRConstants::MaxSensors] = {0};
U32 OculusVRSensorDevice::OVR_SENSORPOSITION[OculusVRConstants::MaxSensors] = {0};
OculusVRSensorDevice::OculusVRSensorDevice()
{
mIsValid = false;
mIsSimulation = false;
mDevice = NULL;
mCurrentTrackingCaps = 0;
mSupportedTrackingCaps = 0;
mPositionTrackingDisabled = false;
for(U32 i=0; i<2; ++i)
{
mDataBuffer[i] = new OculusVRSensorData();
@ -60,34 +64,33 @@ OculusVRSensorDevice::~OculusVRSensorDevice()
void OculusVRSensorDevice::cleanUp()
{
mSensorFusion.AttachToSensor(NULL);
if(mDevice)
{
mDevice->Release();
mDevice = NULL;
}
mIsValid = false;
ovrHmd_ConfigureTracking(mDevice, 0, 0);
}
void OculusVRSensorDevice::set(OVR::SensorDevice* sensor, OVR::SensorInfo& info, S32 actionCodeIndex)
void OculusVRSensorDevice::set(ovrHmd sensor, S32 actionCodeIndex)
{
mIsValid = false;
mDevice = sensor;
mSensorFusion.AttachToSensor(sensor);
mYawCorrectionDisabled = !mSensorFusion.IsYawCorrectionEnabled();
mSupportedTrackingCaps = sensor->TrackingCaps;
mCurrentTrackingCaps = ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position;
mCurrentTrackingCaps = mSupportedTrackingCaps & mCurrentTrackingCaps;
mYawCorrectionDisabled = !(mCurrentTrackingCaps & ovrTrackingCap_MagYawCorrection);
mPositionTrackingDisabled = !(mCurrentTrackingCaps & ovrTrackingCap_Position);
// DeviceInfo
mProductName = info.ProductName;
mManufacturer = info.Manufacturer;
mVersion = info.Version;
mProductName = sensor->ProductName;
mManufacturer = sensor->Manufacturer;
mVersion = sensor->Type;
// SensorInfo
mVendorId = info.VendorId;
mProductId = info.ProductId;
mSerialNumber = info.SerialNumber;
mVendorId = sensor->VendorId;
mProductId = sensor->ProductId;
mSerialNumber = sensor->SerialNumber;
mActionCodeIndex = actionCodeIndex;
@ -100,43 +103,8 @@ void OculusVRSensorDevice::set(OVR::SensorDevice* sensor, OVR::SensorInfo& info,
{
mIsValid = true;
}
}
void OculusVRSensorDevice::createSimulation(SimulationTypes simulationType, S32 actionCodeIndex)
{
if(simulationType == ST_RIFT_PREVIEW)
{
createSimulatedPreviewRift(actionCodeIndex);
}
}
void OculusVRSensorDevice::createSimulatedPreviewRift(S32 actionCodeIndex)
{
mIsValid = false;
mIsSimulation = true;
mYawCorrectionDisabled = true;
// DeviceInfo
mProductName = "Tracker DK";
mManufacturer = "Oculus VR, Inc.";
mVersion = 0;
// SensorInfo
mVendorId = 10291;
mProductId = 1;
mSerialNumber = "000000000000";
mActionCodeIndex = actionCodeIndex;
if(mActionCodeIndex >= OculusVRConstants::MaxSensors)
{
// Cannot declare more sensors than we are able to handle
mIsValid = false;
}
else
{
mIsValid = true;
}
updateTrackingCaps();
}
void OculusVRSensorDevice::buildCodeTable()
@ -154,6 +122,8 @@ void OculusVRSensorDevice::buildCodeTable()
OVR_SENSORACCELERATION[i] = INPUTMGR->getNextDeviceCode();
OVR_SENSORANGVEL[i] = INPUTMGR->getNextDeviceCode();
OVR_SENSORMAGNETOMETER[i] = INPUTMGR->getNextDeviceCode();
OVR_SENSORPOSITION[i] = INPUTMGR->getNextDeviceCode();
}
// Build out the virtual map
@ -179,27 +149,27 @@ void OculusVRSensorDevice::buildCodeTable()
dSprintf(buffer, 64, "ovr_sensormagnetometer%d", i);
INPUTMGR->addVirtualMap( buffer, SI_POS, OVR_SENSORMAGNETOMETER[i] );
dSprintf(buffer, 64, "ovr_sensorpos%d", i);
INPUTMGR->addVirtualMap( buffer, SI_POS, OVR_SENSORPOSITION[i] );
}
}
//-----------------------------------------------------------------------------
bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bool generateRotAsEuler, bool generateRotationAsAxisEvents, F32 maxAxisRadius, bool generateRawSensor)
bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bool generateRotAsEuler, bool generateRotationAsAxisEvents, bool generatePositionEvents, F32 maxAxisRadius, bool generateRawSensor)
{
if(!mIsValid)
return false;
// Grab current state
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
mLastStatus = ts.StatusFlags;
// Store the current data from the sensor and compare with previous data
U32 diff;
OculusVRSensorData* currentBuffer = (mPrevData == mDataBuffer[0]) ? mDataBuffer[1] : mDataBuffer[0];
if(!mIsSimulation)
{
currentBuffer->setData(mSensorFusion, maxAxisRadius);
}
else
{
currentBuffer->simulateData(maxAxisRadius);
}
currentBuffer->setData(ts, maxAxisRadius);
diff = mPrevData->compare(currentBuffer, generateRawSensor);
// Update the previous data pointer. We do this here in case someone calls our
@ -218,7 +188,7 @@ bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bo
{
// Convert angles to degrees
VectorF angles;
for(U32 i=0; i<3; ++i)
for(U32 i=0; i<3; ++i)
{
angles[i] = mRadToDeg(currentBuffer->mRotEuler[i]);
}
@ -235,6 +205,11 @@ bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bo
INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_AXIS, OVR_SENSORROTAXISY[mActionCodeIndex], SI_MOVE, currentBuffer->mRotAxis.y);
}
if (generatePositionEvents && diff & OculusVRSensorData::DIFF_POS)
{
INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_AXIS, OVR_SENSORROTAXISX[mActionCodeIndex], SI_MOVE, currentBuffer->mPosition);
}
// Raw sensor event
if(generateRawSensor && diff & OculusVRSensorData::DIFF_RAW)
{
@ -256,6 +231,14 @@ bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bo
INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_POS, OVR_SENSORMAGNETOMETER[mActionCodeIndex], SI_MOVE, currentBuffer->mMagnetometer);
}
if (diff & OculusVRSensorData::DIFF_STATUS)
{
if (Con::isFunction("onOculusStatusUpdate"))
{
Con::executef("onOculusStatusUpdate", ts.StatusFlags);
}
}
return true;
}
@ -266,39 +249,7 @@ void OculusVRSensorDevice::reset()
if(!mIsValid)
return;
mSensorFusion.Reset();
}
F32 OculusVRSensorDevice::getPredictionTime() const
{
if(!mIsValid)
return 0.0f;
return mSensorFusion.GetPredictionDelta();
}
void OculusVRSensorDevice::setPredictionTime(F32 dt)
{
if(!mIsValid)
return;
mSensorFusion.SetPrediction(dt);
}
bool OculusVRSensorDevice::getGravityCorrection() const
{
if(!mIsValid)
return false;
return mSensorFusion.IsGravityEnabled();
}
void OculusVRSensorDevice::setGravityCorrection(bool state)
{
if(!mIsValid)
return;
mSensorFusion.SetGravityEnabled(state);
ovrHmd_RecenterPose(mDevice);
}
bool OculusVRSensorDevice::getYawCorrection() const
@ -306,7 +257,7 @@ bool OculusVRSensorDevice::getYawCorrection() const
if(!mIsValid)
return false;
return mSensorFusion.IsYawCorrectionEnabled();
return !(mCurrentTrackingCaps & ovrTrackingCap_MagYawCorrection);
}
void OculusVRSensorDevice::setYawCorrection(bool state)
@ -314,10 +265,30 @@ void OculusVRSensorDevice::setYawCorrection(bool state)
if(!mIsValid)
return;
if(mYawCorrectionDisabled || !mSensorFusion.HasMagCalibration())
if (state == !mYawCorrectionDisabled)
return;
mSensorFusion.SetYawCorrectionEnabled(state);
// Don't allow if not capable
if(state && !(mSupportedTrackingCaps & ovrTrackingCap_MagYawCorrection))
return;
mYawCorrectionDisabled = !state;
updateTrackingCaps();
}
void OculusVRSensorDevice::setPositionTracking(bool state)
{
if(!mIsValid)
return;
if (state == !mPositionTrackingDisabled)
return;
if(state && !(mSupportedTrackingCaps & ovrTrackingCap_Position))
return;
mPositionTrackingDisabled = !state;
updateTrackingCaps();
}
bool OculusVRSensorDevice::getMagnetometerCalibrationAvailable() const
@ -325,7 +296,23 @@ bool OculusVRSensorDevice::getMagnetometerCalibrationAvailable() const
if(!mIsValid)
return false;
return mSensorFusion.HasMagCalibration();
return (mSupportedTrackingCaps & ovrTrackingCap_MagYawCorrection) != 0;
}
bool OculusVRSensorDevice::getOrientationTrackingAvailable() const
{
if(!mIsValid)
return false;
return (mSupportedTrackingCaps & ovrTrackingCap_Orientation) != 0;
}
bool OculusVRSensorDevice::getPositionTrackingAvailable() const
{
if(!mIsValid)
return false;
return (mSupportedTrackingCaps & ovrTrackingCap_Position) != 0;
}
//-----------------------------------------------------------------------------
@ -335,15 +322,8 @@ EulerF OculusVRSensorDevice::getEulerRotation()
if(!mIsValid)
return Point3F::Zero;
OVR::Quatf orientation;
if(mSensorFusion.GetPredictionDelta() > 0)
{
orientation = mSensorFusion.GetPredictedOrientation();
}
else
{
orientation = mSensorFusion.GetOrientation();
}
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
OVR::Quatf orientation = ts.HeadPose.ThePose.Orientation;
// Sensor rotation in Euler format
EulerF rot;
@ -357,13 +337,12 @@ EulerF OculusVRSensorDevice::getRawEulerRotation()
if(!mIsValid)
return Point3F::Zero;
OVR::Quatf orientation;
orientation = mSensorFusion.GetOrientation();
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
OVR::Quatf orientation = ts.HeadPose.ThePose.Orientation;
// Sensor rotation in Euler format
EulerF rot;
OculusVRUtil::convertRotation(orientation, rot);
return rot;
}
@ -371,9 +350,10 @@ VectorF OculusVRSensorDevice::getAcceleration()
{
if(!mIsValid)
return VectorF::Zero;
OVR::Vector3f a = mSensorFusion.GetAcceleration();
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
OVR::Vector3f a = ts.HeadPose.LinearAcceleration;
// Sensor acceleration in VectorF format
VectorF acceleration;
OculusVRUtil::convertAcceleration(a, acceleration);
@ -385,8 +365,9 @@ EulerF OculusVRSensorDevice::getAngularVelocity()
{
if(!mIsValid)
return EulerF::Zero;
OVR::Vector3f v = mSensorFusion.GetAngularVelocity();
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
OVR::Vector3f v = ts.HeadPose.AngularVelocity;
// Sensor angular velocity in EulerF format
EulerF vel;
@ -395,38 +376,28 @@ EulerF OculusVRSensorDevice::getAngularVelocity()
return vel;
}
VectorF OculusVRSensorDevice::getMagnetometer()
Point3F OculusVRSensorDevice::getPosition()
{
if(!mIsValid)
return VectorF::Zero;
OVR::Vector3f m;
if(mSensorFusion.HasMagCalibration() && mSensorFusion.IsYawCorrectionEnabled())
{
m = mSensorFusion.GetCalibratedMagnetometer();
}
else
{
m = mSensorFusion.GetMagnetometer();
}
return Point3F();
// Sensor magnetometer reading in VectorF format
VectorF mag;
OculusVRUtil::convertMagnetometer(m, mag);
return mag;
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
OVR::Vector3f v = ts.HeadPose.ThePose.Position;
return Point3F(-v.x, v.z, -v.y);
}
VectorF OculusVRSensorDevice::getRawMagnetometer()
void OculusVRSensorDevice::updateTrackingCaps()
{
if(!mIsValid)
return VectorF::Zero;
if (!mIsValid)
return;
OVR::Vector3f m = mSensorFusion.GetMagnetometer();
// Sensor magnetometer reading in VectorF format
VectorF mag;
OculusVRUtil::convertMagnetometer(m, mag);
// Set based on current vars
mCurrentTrackingCaps = ovrTrackingCap_Orientation;
return mag;
if (!mYawCorrectionDisabled)
mCurrentTrackingCaps |= ovrTrackingCap_MagYawCorrection;
if (!mPositionTrackingDisabled)
mCurrentTrackingCaps |= ovrTrackingCap_Position;
ovrHmd_ConfigureTracking(mDevice, mCurrentTrackingCaps, 0);
}

View file

@ -30,17 +30,12 @@
#include "math/mPoint4.h"
#include "platform/input/oculusVR/oculusVRConstants.h"
#include "platform/types.h"
#include "OVR.h"
#include "OVR_CAPI_0_5_0.h"
struct OculusVRSensorData;
class OculusVRSensorDevice
{
public:
enum SimulationTypes {
ST_RIFT_PREVIEW,
};
public:
// Action codes
static U32 OVR_SENSORROT[OculusVRConstants::MaxSensors]; // SI_ROT
@ -54,15 +49,15 @@ public:
static U32 OVR_SENSORANGVEL[OculusVRConstants::MaxSensors]; // SI_POS but is EulerF
static U32 OVR_SENSORMAGNETOMETER[OculusVRConstants::MaxSensors]; // SI_POS
static U32 OVR_SENSORPOSITION[OculusVRConstants::MaxSensors];
protected:
bool mIsValid;
bool mIsSimulation;
OVR::SensorDevice* mDevice;
OVR::SensorFusion mSensorFusion;
ovrHmd mDevice;
U32 mCurrentTrackingCaps;
U32 mSupportedTrackingCaps;
// From OVR::DeviceInfo
String mProductName;
String mManufacturer;
@ -76,6 +71,12 @@ protected:
// Has yaw correction been disabled by the control panel
bool mYawCorrectionDisabled;
// Has position tracking been disabled
bool mPositionTrackingDisabled;
// Last tracking status
U32 mLastStatus;
// Assigned by the OculusVRDevice
S32 mActionCodeIndex;
@ -86,9 +87,6 @@ protected:
// for the sensor
OculusVRSensorData* mPrevData;
protected:
void createSimulatedPreviewRift(S32 actionCodeIndex);
public:
OculusVRSensorDevice();
virtual ~OculusVRSensorDevice();
@ -98,44 +96,42 @@ public:
void cleanUp();
// Set the sensor properties based on information from the OVR device
void set(OVR::SensorDevice* sensor, OVR::SensorInfo& info, S32 actionCodeIndex);
// Set the sensor properties based on a simulation of the given type
void createSimulation(SimulationTypes simulationType, S32 actionCodeIndex);
void set(ovrHmd sensor, S32 actionCodeIndex);
bool isValid() const {return mIsValid;}
bool isSimulated() {return mIsSimulation;}
bool process(U32 deviceType, bool generateRotAsAngAxis, bool generateRotAsEuler, bool generateRotationAsAxisEvents, F32 maxAxisRadius, bool generateRawSensor);
bool process(U32 deviceType, bool generateRotAsAngAxis, bool generateRotAsEuler, bool generateRotationAsAxisEvents, bool generatePositionEvents, F32 maxAxisRadius, bool generateRawSensor);
void reset();
// Get the prediction time for the sensor fusion. The time is in seconds.
F32 getPredictionTime() const;
// Set the prediction time for the sensor fusion. The time is in seconds.
void setPredictionTime(F32 dt);
// Is gravity correction enabled for pitch and roll
bool getGravityCorrection() const;
// Set the pitch and roll gravity correction
void setGravityCorrection(bool state);
// Has yaw correction been disabled using the control panel
bool getYawCorrectionUserDisabled() const { return mYawCorrectionDisabled; }
// Is yaw correction enabled
bool getYawCorrection() const;
// Position is valid
bool getHasValidPosition() const { return mLastStatus & ovrStatus_PositionTracked; }
// Set the yaw correction. Note: if magnetometer calibration data is not present,
// or user has disabled yaw correction in the control panel, this method will
// not enable it.
void setYawCorrection(bool state);
// Sets position tracking state
void setPositionTracking(bool state);
// Is magnetometer calibration data available for this sensor
bool getMagnetometerCalibrationAvailable() const;
// Is position tracking data available for this sensor
bool getOrientationTrackingAvailable() const;
// Is position tracking data available for this sensor
bool getPositionTrackingAvailable() const;
U32 getLastTrackingStatus() const { return mLastStatus; }
const char* getProductName() { return mProductName.c_str(); }
const char* getManufacturer() { return mManufacturer.c_str(); }
U32 getVersion() { return mVersion; }
@ -155,12 +151,10 @@ public:
// Get the current angular velocity reading, in rad/s
EulerF getAngularVelocity();
// Get the current magnetometer reading (direction and field strength), in Gauss.
// Uses magnetometer calibration if set.
VectorF getMagnetometer();
// Get the current position
Point3F getPosition();
// Get the current raw magnetometer reading (direction and field strength), in Gauss
VectorF getRawMagnetometer();
void updateTrackingCaps();
};
#endif // _OCULUSVRSENSORDEVICE_H_

View file

@ -25,7 +25,12 @@
#include "math/mPoint2.h"
#include "math/mMatrix.h"
#include "OVR.h"
#include "OVR_CAPI_0_5_0.h"
// NOTE: math code in oculus uses "Offset" which is a preprocessor macro
#define TorqueOffset Offset
#undef Offset
#include "Extras/OVR_Math.h"
namespace OculusVRUtil
{
@ -48,4 +53,6 @@ namespace OculusVRUtil
void convertMagnetometer(OVR::Vector3f& inMagnetometer, VectorF& outMagnetometer);
}
#define Offset TorqueOffset
#endif // _OCULUSVRUTIL_H_

View file

@ -25,20 +25,44 @@
#include "console/consoleTypes.h"
class GameConnection;
class GuiCanvas;
// Defines a custom display device that requires particular rendering settings
// in order for a scene to display correctly.
/// Defines the basic display pose common to most display devices
typedef struct DisplayPose
{
EulerF orientation; /// Direction device is facing
Point3F position; /// Relative position of device in view space
} IDevicePose;
class IDisplayDevice
{
public:
virtual bool providesYFOV() const = 0;
virtual F32 getYFOV() const = 0;
virtual bool providesFrameEyePose() const = 0;
virtual void getFrameEyePose(IDevicePose *pose, U32 eye) const = 0;
virtual bool providesEyeOffset() const = 0;
virtual const Point3F& getEyeOffset() const = 0;
virtual bool providesEyeOffsets() const = 0;
/// Returns eye offset not taking into account any position tracking info
virtual void getEyeOffsets(Point3F *dest) const = 0;
virtual bool providesFovPorts() const = 0;
virtual void getFovPorts(FovPort *out) const = 0;
virtual bool providesProjectionOffset() const = 0;
virtual const Point2F& getProjectionOffset() const = 0;
virtual void getStereoViewports(RectI *out) const = 0;
virtual void getStereoTargets(GFXTextureTarget **out) const = 0;
virtual void setDrawCanvas(GuiCanvas *canvas) = 0;
virtual void setCurrentConnection(GameConnection *connection) = 0;
virtual GameConnection* getCurrentConnection() = 0;
virtual void onStartFrame() = 0;
};
#endif // _IDISPLAYDEVICE_H_

View file

@ -144,6 +144,20 @@ GFX_ImplementTextureProfile( PostFxTextureProfile,
GFXTextureProfile::Static | GFXTextureProfile::PreserveSize | GFXTextureProfile::NoMipmap,
GFXTextureProfile::NONE );
GFX_ImplementTextureProfile( VRTextureProfile,
GFXTextureProfile::DiffuseMap,
GFXTextureProfile::PreserveSize |
GFXTextureProfile::RenderTarget |
GFXTextureProfile::NoMipmap,
GFXTextureProfile::NONE );
GFX_ImplementTextureProfile( VRDepthProfile,
GFXTextureProfile::DiffuseMap,
GFXTextureProfile::PreserveSize |
GFXTextureProfile::RenderTarget |
GFXTextureProfile::NoMipmap |
GFXTextureProfile::ZTarget,
GFXTextureProfile::NONE );
void PostEffect::EffectConst::set( const String &newVal )
{

View file

@ -101,6 +101,10 @@ struct PFXFrameState
///
GFX_DeclareTextureProfile( PostFxTextureProfile );
GFX_DeclareTextureProfile( VRTextureProfile );
GFX_DeclareTextureProfile( VRDepthProfile );
///
GFXDeclareVertexFormat( PFXVertex )
{

View file

@ -606,63 +606,58 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
RectI originalVP = GFX->getViewport();
Point2F projOffset = GFX->getCurrentProjectionOffset();
Point3F eyeOffset = GFX->getStereoEyeOffset();
const FovPort *currentFovPort = GFX->getSteroFovPort();
MatrixF inverseEyeTransforms[2];
// Calculate world transforms for eyes
inverseEyeTransforms[0] = params.query->eyeTransforms[0];
inverseEyeTransforms[1] = params.query->eyeTransforms[1];
inverseEyeTransforms[0].inverse();
inverseEyeTransforms[1].inverse();
Frustum originalFrustum = GFX->getFrustum();
// Render left half of display
RectI leftVP = originalVP;
leftVP.extent.x *= 0.5;
GFX->setViewport(leftVP);
GFX->activateStereoTarget(0);
GFX->setWorldMatrix(params.query->eyeTransforms[0]);
MatrixF leftWorldTrans(true);
leftWorldTrans.setPosition(Point3F(eyeOffset.x, eyeOffset.y, eyeOffset.z));
MatrixF leftWorld(params.query->cameraMatrix);
leftWorld.mulL(leftWorldTrans);
Frustum gfxFrustum = GFX->getFrustum();
gfxFrustum.setProjectionOffset(Point2F(projOffset.x, projOffset.y));
Frustum gfxFrustum = originalFrustum;
MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[0], inverseEyeTransforms[0]);
GFX->setFrustum(gfxFrustum);
setGFXMatrices( leftWorld );
setGFXMatrices( params.query->eyeTransforms[0] );
SceneCameraState cameraStateLeft = SceneCameraState::fromGFX();
SceneRenderState renderStateLeft( gClientSceneGraph, SPT_Reflect, cameraStateLeft );
renderStateLeft.setSceneRenderStyle(SRS_SideBySide);
renderStateLeft.setSceneRenderField(0);
renderStateLeft.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial );
renderStateLeft.setDiffuseCameraTransform( params.query->cameraMatrix );
renderStateLeft.setDiffuseCameraTransform( params.query->eyeTransforms[0] );
renderStateLeft.disableAdvancedLightingBins(true);
gClientSceneGraph->renderSceneNoLights( &renderStateLeft, objTypeFlag );
// Render right half of display
RectI rightVP = originalVP;
rightVP.extent.x *= 0.5;
rightVP.point.x += rightVP.extent.x;
GFX->setViewport(rightVP);
GFX->activateStereoTarget(1);
GFX->setWorldMatrix(params.query->eyeTransforms[1]);
MatrixF rightWorldTrans(true);
rightWorldTrans.setPosition(Point3F(-eyeOffset.x, eyeOffset.y, eyeOffset.z));
MatrixF rightWorld(params.query->cameraMatrix);
rightWorld.mulL(rightWorldTrans);
gfxFrustum = GFX->getFrustum();
gfxFrustum.setProjectionOffset(Point2F(-projOffset.x, projOffset.y));
gfxFrustum = originalFrustum;
MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[1], inverseEyeTransforms[1]);
GFX->setFrustum(gfxFrustum);
setGFXMatrices( rightWorld );
setGFXMatrices( params.query->eyeTransforms[1] );
SceneCameraState cameraStateRight = SceneCameraState::fromGFX();
SceneRenderState renderStateRight( gClientSceneGraph, SPT_Reflect, cameraStateRight );
renderStateRight.setSceneRenderStyle(SRS_SideBySide);
renderStateRight.setSceneRenderField(1);
renderStateRight.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial );
renderStateRight.setDiffuseCameraTransform( params.query->cameraMatrix );
renderStateRight.setDiffuseCameraTransform( params.query->eyeTransforms[1] );
renderStateRight.disableAdvancedLightingBins(true);
gClientSceneGraph->renderSceneNoLights( &renderStateRight, objTypeFlag );
// Restore previous values
gfxFrustum.clearProjectionOffset();
GFX->setFrustum(gfxFrustum);
GFX->setViewport(originalVP);
}

View file

@ -36,6 +36,7 @@
#include "console/engineAPI.h"
#include "sim/netConnection.h"
#include "T3D/gameBase/gameConnection.h"
#include "math/mathUtils.h"
// For player object bounds workaround.
#include "T3D/player.h"
@ -236,26 +237,21 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S
// Store previous values
RectI originalVP = GFX->getViewport();
MatrixF originalWorld = GFX->getWorldMatrix();
Frustum originalFrustum = GFX->getFrustum();
Point2F projOffset = GFX->getCurrentProjectionOffset();
Point3F eyeOffset = GFX->getStereoEyeOffset();
// Indicate that we're about to start a field
GFX->beginField();
const FovPort *currentFovPort = GFX->getSteroFovPort();
const MatrixF *eyeTransforms = GFX->getStereoEyeTransforms();
const MatrixF *worldEyeTransforms = GFX->getInverseStereoEyeTransforms();
// Render left half of display
RectI leftVP = originalVP;
leftVP.extent.x *= 0.5;
GFX->setViewport(leftVP);
GFX->activateStereoTarget(0);
GFX->beginField();
MatrixF leftWorldTrans(true);
leftWorldTrans.setPosition(Point3F(eyeOffset.x, eyeOffset.y, eyeOffset.z));
MatrixF leftWorld(originalWorld);
leftWorld.mulL(leftWorldTrans);
GFX->setWorldMatrix(leftWorld);
GFX->setWorldMatrix(worldEyeTransforms[0]);
Frustum gfxFrustum = GFX->getFrustum();
gfxFrustum.setProjectionOffset(Point2F(projOffset.x, projOffset.y));
Frustum gfxFrustum = originalFrustum;
MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[0], eyeTransforms[0]);
GFX->setFrustum(gfxFrustum);
SceneCameraState cameraStateLeft = SceneCameraState::fromGFX();
@ -266,25 +262,16 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S
renderSceneNoLights( &renderStateLeft, objectMask, baseObject, baseZone );
// Indicate that we've just finished a field
//GFX->clear(GFXClearTarget | GFXClearZBuffer | GFXClearStencil, ColorI(255,0,0), 1.0f, 0);
GFX->endField();
// Indicate that we're about to start a field
GFX->beginField();
// Render right half of display
RectI rightVP = originalVP;
rightVP.extent.x *= 0.5;
rightVP.point.x += rightVP.extent.x;
GFX->setViewport(rightVP);
GFX->activateStereoTarget(1);
GFX->beginField();
GFX->setWorldMatrix(worldEyeTransforms[1]);
MatrixF rightWorldTrans(true);
rightWorldTrans.setPosition(Point3F(-eyeOffset.x, eyeOffset.y, eyeOffset.z));
MatrixF rightWorld(originalWorld);
rightWorld.mulL(rightWorldTrans);
GFX->setWorldMatrix(rightWorld);
gfxFrustum = GFX->getFrustum();
gfxFrustum.setProjectionOffset(Point2F(-projOffset.x, projOffset.y));
gfxFrustum = originalFrustum;
MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[1], eyeTransforms[1]);
GFX->setFrustum(gfxFrustum);
SceneCameraState cameraStateRight = SceneCameraState::fromGFX();
@ -295,12 +282,12 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S
renderSceneNoLights( &renderStateRight, objectMask, baseObject, baseZone );
// Indicate that we've just finished a field
//GFX->clear(GFXClearTarget | GFXClearZBuffer | GFXClearStencil, ColorI(0,255,0), 1.0f, 0);
GFX->endField();
// Restore previous values
GFX->setWorldMatrix(originalWorld);
gfxFrustum.clearProjectionOffset();
GFX->setFrustum(gfxFrustum);
GFX->setFrustum(originalFrustum);
GFX->setViewport(originalVP);
}
else

View file

@ -62,11 +62,12 @@ function GuiCanvas::checkCursor(%this)
if ((%control.noCursor $= "") || !%control.noCursor)
{
showCursor();
return;
return true;
}
}
// If we get here, every control requested a hidden cursor, so we oblige.
hideCursor();
return false;
}
//---------------------------------------------------------------------------------------------

View file

@ -243,9 +243,9 @@ function metrics( %expr )
if( %metricsExpr !$= "" )
{
Canvas.pushDialog( FrameOverlayGui, 1000 );
$GameCanvas.pushDialog( FrameOverlayGui, 1000 );
TextOverlayControl.setValue( %metricsExpr );
}
else
Canvas.popDialog(FrameOverlayGui);
$GameCanvas.popDialog(FrameOverlayGui);
}

View file

@ -24,12 +24,103 @@
if(!isFunction(isOculusVRDeviceActive))
return;
function setupOculusActionMaps()
{
if (isObject(OculusWarningMap))
return;
new ActionMap(OculusWarningMap);
new ActionMap(OculusCanvasMap);
OculusWarningMap.bind(keyboard, space, dismissOculusVRWarnings);
OculusCanvasMap.bind( mouse, xaxis, oculusYaw );
OculusCanvasMap.bind( mouse, yaxis, oculusPitch );
OculusCanvasMap.bind( mouse, button0, oculusClick );
}
function oculusYaw(%val)
{
OculusCanvas.cursorNudge(%val * 0.10, 0);
}
function oculusPitch(%val)
{
OculusCanvas.cursorNudge(0, %val * 0.10);
}
function oculusClick(%active)
{
OculusCanvas.cursorClick(0, %active);
}
function GuiOffscreenCanvas::checkCursor(%this)
{
%count = %this.getCount();
for(%i = 0; %i < %count; %i++)
{
%control = %this.getObject(%i);
if ((%control.noCursor $= "") || !%control.noCursor)
{
%this.cursorOn();
return true;
}
}
// If we get here, every control requested a hidden cursor, so we oblige.
%this.cursorOff();
return false;
}
function GuiOffscreenCanvas::pushDialog(%this, %ctrl, %layer, %center)
{
Parent::pushDialog(%this, %ctrl, %layer, %center);
%cursorVisible = %this.checkCursor();
if (%cursorVisible)
{
echo("OffscreenCanvas visible");
OculusCanvasMap.pop();
OculusCanvasMap.push();
}
else
{
echo("OffscreenCanvas not visible");
OculusCanvasMap.pop();
}
}
function GuiOffscreenCanvas::popDialog(%this, %ctrl)
{
Parent::popDialog(%this, %ctrl);
%cursorVisible = %this.checkCursor();
if (%cursorVisible)
{
echo("OffscreenCanvas visible");
OculusCanvasMap.pop();
OculusCanvasMap.push();
}
else
{
echo("OffscreenCanvas not visible");
OculusCanvasMap.pop();
}
}
//-----------------------------------------------------------------------------
function oculusSensorMetricsCallback()
{
return " | OVR Sensor 0 |" @
" rot: " @ getOVRSensorEulerRotation(0);
return ovrDumpMetrics(0);
}
//-----------------------------------------------------------------------------
function onOculusStatusUpdate(%status)
{
$LastOculusTrackingState = %status;
}
//-----------------------------------------------------------------------------
@ -60,23 +151,34 @@ function enableOculusVRDisplay(%gameConnection, %trueStereoRendering)
{
setOVRHMDAsGameConnectionDisplayDevice(%gameConnection);
PlayGui.renderStyle = "stereo side by side";
if(%trueStereoRendering)
setOptimalOVRCanvasSize(Canvas);
if (!isObject(OculusCanvas))
{
if($pref::OculusVR::UseChromaticAberrationCorrection)
{
OVRBarrelDistortionChromaPostFX.isEnabled = true;
}
else
{
OVRBarrelDistortionPostFX.isEnabled = true;
}
new GuiOffscreenCanvas(OculusCanvas) {
targetSize = "512 512";
targetName = "oculusCanvas";
dynamicTarget = true;
};
}
else
if (!isObject(OculusVROverlay))
{
OVRBarrelDistortionMonoPostFX.isEnabled = true;
exec("./oculusVROverlay.gui");
}
OculusCanvas.setContent(OculusVROverlay);
OculusCanvas.setCursor(DefaultCursor);
PlayGui.setStereoGui(OculusCanvas);
OculusCanvas.setCursorPos("128 128");
OculusCanvas.cursorOff();
$GameCanvas = OculusCanvas;
%ext = Canvas.getExtent();
$OculusMouseScaleX = 512.0 / 1920.0;
$OculusMouseScaleY = 512.0 / 1060.0;
//$gfx::wireframe = true;
// Reset all sensors
ovrResetAllSensors();
}
@ -85,11 +187,15 @@ function enableOculusVRDisplay(%gameConnection, %trueStereoRendering)
// and barrel distortion for the Rift.
function disableOculusVRDisplay(%gameConnection)
{
%gameConnection.clearDisplayDevice();
OculusCanvas.popDialog();
OculusWarningMap.pop();
$GameCanvas = Canvas;
if (isObject(gameConnection))
{
%gameConnection.clearDisplayDevice();
}
PlayGui.renderStyle = "standard";
OVRBarrelDistortionPostFX.isEnabled = false;
OVRBarrelDistortionChromaPostFX.isEnabled = false;
OVRBarrelDistortionMonoPostFX.isEnabled = false;
}
// Helper function to set the standard Rift control scheme. You could place
@ -97,7 +203,7 @@ function disableOculusVRDisplay(%gameConnection)
// you call enableOculusVRDisplay().
function setStandardOculusVRControlScheme(%gameConnection)
{
if(isOVRHMDSimulated(0))
if($OculusVR::SimulateInput)
{
// We are simulating a HMD so allow the mouse and gamepad to control
// both yaw and pitch.
@ -131,3 +237,12 @@ function resetOculusVRSensors()
{
ovrResetAllSensors();
}
function dismissOculusVRWarnings(%value)
{
//if (%value)
//{
ovrDismissWarnings();
OculusWarningMap.pop();
//}
}

View file

@ -0,0 +1,19 @@
//--- OBJECT WRITE BEGIN ---
%guiContent = singleton GuiControl(OculusVROverlay) {
canSaveDynamicFields = "0";
Enabled = "1";
isContainer = "1";
Profile = "GuiContentProfile";
HorizSizing = "width";
VertSizing = "height";
Position = "0 0";
Extent = "512 512";
MinExtent = "8 8";
canSave = "1";
Visible = "1";
tooltipprofile = "GuiToolTipProfile";
hovertime = "1000";
useVariable = "0";
tile = "0";
};
//--- OBJECT WRITE END ---

View file

@ -20,6 +20,7 @@
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
$GameCanvas = 0;
// Cleanup Dialog created by 'core'
if( isObject( MessagePopupDlg ) )
@ -76,7 +77,7 @@ new SFXProfile(messageBoxBeep)
//---------------------------------------------------------------------------------------------
function messageCallback(%dlg, %callback)
{
Canvas.popDialog(%dlg);
$GameCanvas.popDialog(%dlg);
eval(%callback);
}
@ -89,7 +90,7 @@ function IOCallback(%dlg, %callback)
%callback = strreplace(%callback, "#", %text);
eval(%callback);
Canvas.popDialog(%dlg);
$GameCanvas.popDialog(%dlg);
}
//---------------------------------------------------------------------------------------------
@ -134,7 +135,7 @@ function MBSetText(%text, %frame, %msg)
function MessageBoxOK(%title, %message, %callback)
{
MBOKFrame.text = %title;
Canvas.pushDialog(MessageBoxOKDlg);
$GameCanvas.pushDialog(MessageBoxOKDlg);
MBSetText(MBOKText, MBOKFrame, %message);
MessageBoxOKDlg.callback = %callback;
}
@ -147,7 +148,7 @@ function MessageBoxOKDlg::onSleep( %this )
function MessageBoxOKCancel(%title, %message, %callback, %cancelCallback)
{
MBOKCancelFrame.text = %title;
Canvas.pushDialog(MessageBoxOKCancelDlg);
$GameCanvas.pushDialog(MessageBoxOKCancelDlg);
MBSetText(MBOKCancelText, MBOKCancelFrame, %message);
MessageBoxOKCancelDlg.callback = %callback;
MessageBoxOKCancelDlg.cancelCallback = %cancelCallback;
@ -169,7 +170,7 @@ function MessageBoxOKCancelDetails(%title, %message, %details, %callback, %cance
MBOKCancelDetailsFrame.setText( %title );
Canvas.pushDialog(MessageBoxOKCancelDetailsDlg);
$GameCanvas.pushDialog(MessageBoxOKCancelDetailsDlg);
MBSetText(MBOKCancelDetailsText, MBOKCancelDetailsFrame, %message);
MBOKCancelDetailsInfoText.setText(%details);
@ -233,7 +234,7 @@ function MessageBoxYesNo(%title, %message, %yesCallback, %noCallback)
{
MBYesNoFrame.text = %title;
MessageBoxYesNoDlg.profile = "GuiOverlayProfile";
Canvas.pushDialog(MessageBoxYesNoDlg);
$GameCanvas.pushDialog(MessageBoxYesNoDlg);
MBSetText(MBYesNoText, MBYesNoFrame, %message);
MessageBoxYesNoDlg.yesCallBack = %yesCallback;
MessageBoxYesNoDlg.noCallback = %noCallBack;
@ -243,7 +244,7 @@ function MessageBoxYesNoCancel(%title, %message, %yesCallback, %noCallback, %can
{
MBYesNoCancelFrame.text = %title;
MessageBoxYesNoDlg.profile = "GuiOverlayProfile";
Canvas.pushDialog(MessageBoxYesNoCancelDlg);
$GameCanvas.pushDialog(MessageBoxYesNoCancelDlg);
MBSetText(MBYesNoCancelText, MBYesNoCancelFrame, %message);
MessageBoxYesNoCancelDlg.yesCallBack = %yesCallback;
MessageBoxYesNoCancelDlg.noCallback = %noCallBack;
@ -264,7 +265,7 @@ function MessagePopup(%title, %message, %delay)
{
// Currently two lines max.
MessagePopFrame.setText(%title);
Canvas.pushDialog(MessagePopupDlg);
$GameCanvas.pushDialog(MessagePopupDlg);
MBSetText(MessagePopText, MessagePopFrame, %message);
if (%delay !$= "")
schedule(%delay, 0, CloseMessagePopup);
@ -279,7 +280,7 @@ function MessagePopup(%title, %message, %delay)
function IODropdown(%title, %message, %simgroup, %callback, %cancelCallback)
{
IODropdownFrame.text = %title;
Canvas.pushDialog(IODropdownDlg);
$GameCanvas.pushDialog(IODropdownDlg);
MBSetText(IODropdownText, IODropdownFrame, %message);
if(isObject(%simgroup))
@ -305,7 +306,7 @@ function IODropdownDlg::onSleep( %this )
function CloseMessagePopup()
{
Canvas.popDialog(MessagePopupDlg);
$GameCanvas.popDialog(MessagePopupDlg);
}
//---------------------------------------------------------------------------------------------

View file

@ -43,6 +43,8 @@ function createCanvas(%windowTitle)
displayWindow = $platform !$= "windows";
};
$GameCanvas = %foo;
// Set the window title
if (isObject(Canvas))
Canvas.setWindowTitle(getEngineName() @ " - " @ $appName);

View file

@ -42,6 +42,8 @@ function createCanvas(%windowTitle)
{
displayWindow = $platform !$= "windows";
};
$GameCanvas = %foo;
// Set the window title
if (isObject(Canvas))

View file

@ -62,11 +62,12 @@ function GuiCanvas::checkCursor(%this)
if ((%control.noCursor $= "") || !%control.noCursor)
{
showCursor();
return;
return true;
}
}
// If we get here, every control requested a hidden cursor, so we oblige.
hideCursor();
return false;
}
//---------------------------------------------------------------------------------------------

View file

@ -243,9 +243,9 @@ function metrics( %expr )
if( %metricsExpr !$= "" )
{
Canvas.pushDialog( FrameOverlayGui, 1000 );
$GameCanvas.pushDialog( FrameOverlayGui, 1000 );
TextOverlayControl.setValue( %metricsExpr );
}
else
Canvas.popDialog(FrameOverlayGui);
$GameCanvas.popDialog(FrameOverlayGui);
}

View file

@ -24,12 +24,103 @@
if(!isFunction(isOculusVRDeviceActive))
return;
function setupOculusActionMaps()
{
if (isObject(OculusWarningMap))
return;
new ActionMap(OculusWarningMap);
new ActionMap(OculusCanvasMap);
OculusWarningMap.bind(keyboard, space, dismissOculusVRWarnings);
OculusCanvasMap.bind( mouse, xaxis, oculusYaw );
OculusCanvasMap.bind( mouse, yaxis, oculusPitch );
OculusCanvasMap.bind( mouse, button0, oculusClick );
}
function oculusYaw(%val)
{
OculusCanvas.cursorNudge(%val * 0.10, 0);
}
function oculusPitch(%val)
{
OculusCanvas.cursorNudge(0, %val * 0.10);
}
function oculusClick(%active)
{
OculusCanvas.cursorClick(0, %active);
}
function GuiOffscreenCanvas::checkCursor(%this)
{
%count = %this.getCount();
for(%i = 0; %i < %count; %i++)
{
%control = %this.getObject(%i);
if ((%control.noCursor $= "") || !%control.noCursor)
{
%this.cursorOn();
return true;
}
}
// If we get here, every control requested a hidden cursor, so we oblige.
%this.cursorOff();
return false;
}
function GuiOffscreenCanvas::pushDialog(%this, %ctrl, %layer, %center)
{
Parent::pushDialog(%this, %ctrl, %layer, %center);
%cursorVisible = %this.checkCursor();
if (%cursorVisible)
{
echo("OffscreenCanvas visible");
OculusCanvasMap.pop();
OculusCanvasMap.push();
}
else
{
echo("OffscreenCanvas not visible");
OculusCanvasMap.pop();
}
}
function GuiOffscreenCanvas::popDialog(%this, %ctrl)
{
Parent::popDialog(%this, %ctrl);
%cursorVisible = %this.checkCursor();
if (%cursorVisible)
{
echo("OffscreenCanvas visible");
OculusCanvasMap.pop();
OculusCanvasMap.push();
}
else
{
echo("OffscreenCanvas not visible");
OculusCanvasMap.pop();
}
}
//-----------------------------------------------------------------------------
function oculusSensorMetricsCallback()
{
return " | OVR Sensor 0 |" @
" rot: " @ getOVRSensorEulerRotation(0);
return ovrDumpMetrics(0);
}
//-----------------------------------------------------------------------------
function onOculusStatusUpdate(%status)
{
$LastOculusTrackingState = %status;
}
//-----------------------------------------------------------------------------
@ -60,23 +151,34 @@ function enableOculusVRDisplay(%gameConnection, %trueStereoRendering)
{
setOVRHMDAsGameConnectionDisplayDevice(%gameConnection);
PlayGui.renderStyle = "stereo side by side";
if(%trueStereoRendering)
setOptimalOVRCanvasSize(Canvas);
if (!isObject(OculusCanvas))
{
if($pref::OculusVR::UseChromaticAberrationCorrection)
{
OVRBarrelDistortionChromaPostFX.isEnabled = true;
}
else
{
OVRBarrelDistortionPostFX.isEnabled = true;
}
new GuiOffscreenCanvas(OculusCanvas) {
targetSize = "512 512";
targetName = "oculusCanvas";
dynamicTarget = true;
};
}
else
if (!isObject(OculusVROverlay))
{
OVRBarrelDistortionMonoPostFX.isEnabled = true;
exec("./oculusVROverlay.gui");
}
OculusCanvas.setContent(OculusVROverlay);
OculusCanvas.setCursor(DefaultCursor);
PlayGui.setStereoGui(OculusCanvas);
OculusCanvas.setCursorPos("128 128");
OculusCanvas.cursorOff();
$GameCanvas = OculusCanvas;
%ext = Canvas.getExtent();
$OculusMouseScaleX = 512.0 / 1920.0;
$OculusMouseScaleY = 512.0 / 1060.0;
//$gfx::wireframe = true;
// Reset all sensors
ovrResetAllSensors();
}
@ -85,11 +187,15 @@ function enableOculusVRDisplay(%gameConnection, %trueStereoRendering)
// and barrel distortion for the Rift.
function disableOculusVRDisplay(%gameConnection)
{
%gameConnection.clearDisplayDevice();
OculusCanvas.popDialog();
OculusWarningMap.pop();
$GameCanvas = Canvas;
if (isObject(gameConnection))
{
%gameConnection.clearDisplayDevice();
}
PlayGui.renderStyle = "standard";
OVRBarrelDistortionPostFX.isEnabled = false;
OVRBarrelDistortionChromaPostFX.isEnabled = false;
OVRBarrelDistortionMonoPostFX.isEnabled = false;
}
// Helper function to set the standard Rift control scheme. You could place
@ -97,7 +203,7 @@ function disableOculusVRDisplay(%gameConnection)
// you call enableOculusVRDisplay().
function setStandardOculusVRControlScheme(%gameConnection)
{
if(isOVRHMDSimulated(0))
if($OculusVR::SimulateInput)
{
// We are simulating a HMD so allow the mouse and gamepad to control
// both yaw and pitch.
@ -131,3 +237,12 @@ function resetOculusVRSensors()
{
ovrResetAllSensors();
}
function dismissOculusVRWarnings(%value)
{
//if (%value)
//{
ovrDismissWarnings();
OculusWarningMap.pop();
//}
}

View file

@ -0,0 +1,19 @@
//--- OBJECT WRITE BEGIN ---
%guiContent = singleton GuiControl(OculusVROverlay) {
canSaveDynamicFields = "0";
Enabled = "1";
isContainer = "1";
Profile = "GuiContentProfile";
HorizSizing = "width";
VertSizing = "height";
Position = "0 0";
Extent = "512 512";
MinExtent = "8 8";
canSave = "1";
Visible = "1";
tooltipprofile = "GuiToolTipProfile";
hovertime = "1000";
useVariable = "0";
tile = "0";
};
//--- OBJECT WRITE END ---

View file

@ -20,6 +20,7 @@
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
$GameCanvas = 0;
// Cleanup Dialog created by 'core'
if( isObject( MessagePopupDlg ) )
@ -76,7 +77,7 @@ new SFXProfile(messageBoxBeep)
//---------------------------------------------------------------------------------------------
function messageCallback(%dlg, %callback)
{
Canvas.popDialog(%dlg);
$GameCanvas.popDialog(%dlg);
eval(%callback);
}
@ -89,7 +90,7 @@ function IOCallback(%dlg, %callback)
%callback = strreplace(%callback, "#", %text);
eval(%callback);
Canvas.popDialog(%dlg);
$GameCanvas.popDialog(%dlg);
}
//---------------------------------------------------------------------------------------------
@ -134,7 +135,7 @@ function MBSetText(%text, %frame, %msg)
function MessageBoxOK(%title, %message, %callback)
{
MBOKFrame.text = %title;
Canvas.pushDialog(MessageBoxOKDlg);
$GameCanvas.pushDialog(MessageBoxOKDlg);
MBSetText(MBOKText, MBOKFrame, %message);
MessageBoxOKDlg.callback = %callback;
}
@ -147,7 +148,7 @@ function MessageBoxOKDlg::onSleep( %this )
function MessageBoxOKCancel(%title, %message, %callback, %cancelCallback)
{
MBOKCancelFrame.text = %title;
Canvas.pushDialog(MessageBoxOKCancelDlg);
$GameCanvas.pushDialog(MessageBoxOKCancelDlg);
MBSetText(MBOKCancelText, MBOKCancelFrame, %message);
MessageBoxOKCancelDlg.callback = %callback;
MessageBoxOKCancelDlg.cancelCallback = %cancelCallback;
@ -169,7 +170,7 @@ function MessageBoxOKCancelDetails(%title, %message, %details, %callback, %cance
MBOKCancelDetailsFrame.setText( %title );
Canvas.pushDialog(MessageBoxOKCancelDetailsDlg);
$GameCanvas.pushDialog(MessageBoxOKCancelDetailsDlg);
MBSetText(MBOKCancelDetailsText, MBOKCancelDetailsFrame, %message);
MBOKCancelDetailsInfoText.setText(%details);
@ -233,7 +234,7 @@ function MessageBoxYesNo(%title, %message, %yesCallback, %noCallback)
{
MBYesNoFrame.text = %title;
MessageBoxYesNoDlg.profile = "GuiOverlayProfile";
Canvas.pushDialog(MessageBoxYesNoDlg);
$GameCanvas.pushDialog(MessageBoxYesNoDlg);
MBSetText(MBYesNoText, MBYesNoFrame, %message);
MessageBoxYesNoDlg.yesCallBack = %yesCallback;
MessageBoxYesNoDlg.noCallback = %noCallBack;
@ -243,7 +244,7 @@ function MessageBoxYesNoCancel(%title, %message, %yesCallback, %noCallback, %can
{
MBYesNoCancelFrame.text = %title;
MessageBoxYesNoDlg.profile = "GuiOverlayProfile";
Canvas.pushDialog(MessageBoxYesNoCancelDlg);
$GameCanvas.pushDialog(MessageBoxYesNoCancelDlg);
MBSetText(MBYesNoCancelText, MBYesNoCancelFrame, %message);
MessageBoxYesNoCancelDlg.yesCallBack = %yesCallback;
MessageBoxYesNoCancelDlg.noCallback = %noCallBack;
@ -264,7 +265,7 @@ function MessagePopup(%title, %message, %delay)
{
// Currently two lines max.
MessagePopFrame.setText(%title);
Canvas.pushDialog(MessagePopupDlg);
$GameCanvas.pushDialog(MessagePopupDlg);
MBSetText(MessagePopText, MessagePopFrame, %message);
if (%delay !$= "")
schedule(%delay, 0, CloseMessagePopup);
@ -279,7 +280,7 @@ function MessagePopup(%title, %message, %delay)
function IODropdown(%title, %message, %simgroup, %callback, %cancelCallback)
{
IODropdownFrame.text = %title;
Canvas.pushDialog(IODropdownDlg);
$GameCanvas.pushDialog(IODropdownDlg);
MBSetText(IODropdownText, IODropdownFrame, %message);
if(isObject(%simgroup))
@ -305,7 +306,7 @@ function IODropdownDlg::onSleep( %this )
function CloseMessagePopup()
{
Canvas.popDialog(MessagePopupDlg);
$GameCanvas.popDialog(MessagePopupDlg);
}
//---------------------------------------------------------------------------------------------

View file

@ -42,6 +42,8 @@ function createCanvas(%windowTitle)
{
displayWindow = $platform !$= "windows";
};
$GameCanvas = %foo;
// Set the window title
if (isObject(Canvas))

View file

@ -42,6 +42,8 @@ function createCanvas(%windowTitle)
{
displayWindow = $platform !$= "windows";
};
$GameCanvas = %foo;
// Set the window title
if (isObject(Canvas))

View file

@ -71,8 +71,8 @@ beginModule( 'oculusVR' );
addIncludePath( $OCULUSVR_SDK_PATH . "/LibOVR/Src" );
// Libs
addProjectLibDir( $OCULUSVR_SDK_PATH . "/LibOVR/Lib/Win32" );
addProjectLibInput( "libovr.lib", "libovrd.lib" );
addProjectLibDir( $OCULUSVR_SDK_PATH . "/LibOVR/Lib/Windows/Win32/Release/VS2010" );
addProjectLibInput( "libovr.lib", "libovr.lib" );
}
endModule();