mirror of
https://github.com/TorqueGameEngines/Torque3D.git
synced 2026-01-19 20:24:49 +00:00
Baseline working openvr code
This commit is contained in:
parent
e239d106f5
commit
ba91478fad
|
|
@ -393,44 +393,6 @@ void Camera::getEyeCameraTransform(IDisplayDevice *displayDevice, U32 eyeId, Mat
|
|||
}
|
||||
}
|
||||
|
||||
DisplayPose Camera::calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose)
|
||||
{
|
||||
// NOTE: this is intended to be similar to updateMove
|
||||
DisplayPose outPose;
|
||||
outPose.orientation = EulerF(0,0,0);
|
||||
outPose.position = inPose.position;
|
||||
|
||||
// Pitch
|
||||
outPose.orientation.x = (inPose.orientation.x - mLastAbsolutePitch);
|
||||
|
||||
// Constrain the range of mRot.x
|
||||
while (outPose.orientation.x < -M_PI_F)
|
||||
outPose.orientation.x += M_2PI_F;
|
||||
while (outPose.orientation.x > M_PI_F)
|
||||
outPose.orientation.x -= M_2PI_F;
|
||||
|
||||
// Yaw
|
||||
outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw);
|
||||
|
||||
// Constrain the range of mRot.z
|
||||
while (outPose.orientation.z < -M_PI_F)
|
||||
outPose.orientation.z += M_2PI_F;
|
||||
while (outPose.orientation.z > M_PI_F)
|
||||
outPose.orientation.z -= M_2PI_F;
|
||||
|
||||
// Bank
|
||||
if (mDataBlock->cameraCanBank)
|
||||
{
|
||||
outPose.orientation.y = (inPose.orientation.y - mLastAbsoluteRoll);
|
||||
}
|
||||
|
||||
// Constrain the range of mRot.y
|
||||
while (outPose.orientation.y > M_PI_F)
|
||||
outPose.orientation.y -= M_2PI_F;
|
||||
|
||||
return outPose;
|
||||
}
|
||||
|
||||
//----------------------------------------------------------------------------
|
||||
|
||||
F32 Camera::getCameraFov()
|
||||
|
|
|
|||
|
|
@ -237,7 +237,6 @@ class Camera: public ShapeBase
|
|||
virtual void interpolateTick( F32 delta);
|
||||
virtual void getCameraTransform( F32* pos,MatrixF* mat );
|
||||
virtual void getEyeCameraTransform( IDisplayDevice *display, U32 eyeId, MatrixF *outMat );
|
||||
virtual DisplayPose calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose);
|
||||
|
||||
virtual void writePacketData( GameConnection* conn, BitStream* stream );
|
||||
virtual void readPacketData( GameConnection* conn, BitStream* stream );
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
#include "T3D/gameBase/extended/extendedMove.h"
|
||||
#include "core/stream/bitStream.h"
|
||||
#include "math/mathIO.h"
|
||||
#include "math/mAngAxis.h"
|
||||
#include "core/module.h"
|
||||
#include "console/consoleTypes.h"
|
||||
#include "core/strings/stringFunctions.h"
|
||||
|
|
@ -268,7 +269,7 @@ void ExtendedMove::clamp()
|
|||
crotX[i] = CLAMPROT(rotX[i]);
|
||||
crotY[i] = CLAMPROT(rotY[i]);
|
||||
crotZ[i] = CLAMPROT(rotZ[i]);
|
||||
crotW[i] = CLAMPROT(rotW[i]);
|
||||
crotW[i] = CLAMPROT(rotW[i] / M_2PI_F);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -293,7 +294,7 @@ void ExtendedMove::unclamp()
|
|||
rotX[i] = UNCLAMPROT(crotX[i]);
|
||||
rotY[i] = UNCLAMPROT(crotY[i]);
|
||||
rotZ[i] = UNCLAMPROT(crotZ[i]);
|
||||
rotW[i] = UNCLAMPROT(crotW[i]);
|
||||
rotW[i] = UNCLAMPROT(crotW[i]) * M_2PI_F;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -75,11 +75,11 @@ bool ExtendedMoveList::getNextExtMove( ExtendedMove &curMove )
|
|||
else
|
||||
{
|
||||
//Rotation is passed in as an Angle Axis in degrees. We need to convert this into a Quat.
|
||||
QuatF q(Point3F(ExtendedMoveManager::mRotAX[i], ExtendedMoveManager::mRotAY[i], ExtendedMoveManager::mRotAZ[i]), mDegToRad(ExtendedMoveManager::mRotAA[i]));
|
||||
curMove.rotX[i] = q.x;
|
||||
curMove.rotY[i] = q.y;
|
||||
curMove.rotZ[i] = q.z;
|
||||
curMove.rotW[i] = q.w;
|
||||
AngAxisF q(Point3F(ExtendedMoveManager::mRotAX[i], ExtendedMoveManager::mRotAY[i], ExtendedMoveManager::mRotAZ[i]), mDegToRad(ExtendedMoveManager::mRotAA[i]));
|
||||
curMove.rotX[i] = q.axis.x;
|
||||
curMove.rotY[i] = q.axis.y;
|
||||
curMove.rotZ[i] = q.axis.z;
|
||||
curMove.rotW[i] = q.angle;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -355,6 +355,7 @@ bool GameProcessCameraQuery(CameraQuery *query)
|
|||
query->eyeOffset[1] = Point3F::Zero;
|
||||
query->hasFovPort = false;
|
||||
query->hasStereoTargets = false;
|
||||
query->displayDevice = NULL;
|
||||
|
||||
F32 cameraFov = 0.0f;
|
||||
bool fovSet = false;
|
||||
|
|
@ -364,6 +365,9 @@ bool GameProcessCameraQuery(CameraQuery *query)
|
|||
if(!gEditingMission && connection->hasDisplayDevice())
|
||||
{
|
||||
IDisplayDevice* display = connection->getDisplayDevice();
|
||||
|
||||
query->displayDevice = display;
|
||||
|
||||
// Note: all eye values are invalid until this is called
|
||||
display->setDrawCanvas(query->drawCanvas);
|
||||
|
||||
|
|
|
|||
|
|
@ -56,6 +56,7 @@
|
|||
#include "T3D/decal/decalManager.h"
|
||||
#include "T3D/decal/decalData.h"
|
||||
#include "materials/baseMatInstance.h"
|
||||
#include "math/mathUtils.h"
|
||||
|
||||
#ifdef TORQUE_EXTENDED_MOVE
|
||||
#include "T3D/gameBase/extended/extendedMove.h"
|
||||
|
|
@ -2489,6 +2490,8 @@ void Player::allowAllPoses()
|
|||
mAllowSwimming = true;
|
||||
}
|
||||
|
||||
AngAxisF gPlayerMoveRot;
|
||||
|
||||
void Player::updateMove(const Move* move)
|
||||
{
|
||||
delta.move = *move;
|
||||
|
|
@ -2531,6 +2534,7 @@ void Player::updateMove(const Move* move)
|
|||
delta.headVec = mHead;
|
||||
|
||||
bool doStandardMove = true;
|
||||
bool absoluteDelta = false;
|
||||
GameConnection* con = getControllingClient();
|
||||
|
||||
#ifdef TORQUE_EXTENDED_MOVE
|
||||
|
|
@ -2618,6 +2622,27 @@ void Player::updateMove(const Move* move)
|
|||
while (mHead.y > M_PI_F)
|
||||
mHead.y -= M_2PI_F;
|
||||
}
|
||||
else
|
||||
{
|
||||
// Orient the player so we are looking towards the required position, ignoring any banking
|
||||
AngAxisF moveRot(Point3F(emove->rotX[emoveIndex], emove->rotY[emoveIndex], emove->rotZ[emoveIndex]), emove->rotW[emoveIndex]);
|
||||
MatrixF trans(1);
|
||||
moveRot.setMatrix(&trans);
|
||||
|
||||
Point3F vecForward(0, 1, 0);
|
||||
Point3F orient;
|
||||
EulerF rot;
|
||||
trans.mulV(vecForward);
|
||||
|
||||
F32 yawAng;
|
||||
F32 pitchAng;
|
||||
MathUtils::getAnglesFromVector(vecForward, yawAng, pitchAng);
|
||||
mRot.z = yawAng;
|
||||
mHead = EulerF(0);
|
||||
mHead.x = -pitchAng;
|
||||
|
||||
absoluteDelta = true;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
|
|
@ -2666,6 +2691,13 @@ void Player::updateMove(const Move* move)
|
|||
|
||||
delta.head = mHead;
|
||||
delta.headVec -= mHead;
|
||||
|
||||
if (absoluteDelta)
|
||||
{
|
||||
delta.headVec = Point3F(0, 0, 0);
|
||||
delta.rotVec = Point3F(0, 0, 0);
|
||||
}
|
||||
|
||||
for(U32 i=0; i<3; ++i)
|
||||
{
|
||||
if (delta.headVec[i] > M_PI_F)
|
||||
|
|
@ -5589,58 +5621,6 @@ void Player::getMuzzleTransform(U32 imageSlot,MatrixF* mat)
|
|||
*mat = nmat;
|
||||
}
|
||||
|
||||
DisplayPose Player::calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose)
|
||||
{
|
||||
// NOTE: this is intended to be similar to updateMove
|
||||
DisplayPose outPose;
|
||||
outPose.orientation = getRenderTransform().toEuler();
|
||||
outPose.position = inPose.position;
|
||||
|
||||
if (con && con->getControlSchemeAbsoluteRotation())
|
||||
{
|
||||
// Pitch
|
||||
outPose.orientation.x = (inPose.orientation.x - mLastAbsolutePitch);
|
||||
|
||||
// Constrain the range of mRot.x
|
||||
while (outPose.orientation.x < -M_PI_F)
|
||||
outPose.orientation.x += M_2PI_F;
|
||||
while (outPose.orientation.x > M_PI_F)
|
||||
outPose.orientation.x -= M_2PI_F;
|
||||
|
||||
// Yaw
|
||||
|
||||
// Rotate (heading) head or body?
|
||||
if ((isMounted() && getMountNode() == 0) || (con && !con->isFirstPerson()))
|
||||
{
|
||||
// Rotate head
|
||||
outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw);
|
||||
}
|
||||
else
|
||||
{
|
||||
// Rotate body
|
||||
outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw);
|
||||
}
|
||||
|
||||
// Constrain the range of mRot.z
|
||||
while (outPose.orientation.z < 0.0f)
|
||||
outPose.orientation.z += M_2PI_F;
|
||||
while (outPose.orientation.z > M_2PI_F)
|
||||
outPose.orientation.z -= M_2PI_F;
|
||||
|
||||
// Bank
|
||||
if (mDataBlock->cameraCanBank)
|
||||
{
|
||||
outPose.orientation.y = (inPose.orientation.y - mLastAbsoluteRoll);
|
||||
}
|
||||
|
||||
// Constrain the range of mRot.y
|
||||
while (outPose.orientation.y > M_PI_F)
|
||||
outPose.orientation.y -= M_2PI_F;
|
||||
}
|
||||
|
||||
return outPose;
|
||||
}
|
||||
|
||||
void Player::getRenderMuzzleTransform(U32 imageSlot,MatrixF* mat)
|
||||
{
|
||||
disableHeadZCalc();
|
||||
|
|
|
|||
|
|
@ -686,7 +686,6 @@ public:
|
|||
void getEyeBaseTransform(MatrixF* mat, bool includeBank);
|
||||
void getRenderEyeTransform(MatrixF* mat);
|
||||
void getRenderEyeBaseTransform(MatrixF* mat, bool includeBank);
|
||||
virtual DisplayPose calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose);
|
||||
void getCameraParameters(F32 *min, F32 *max, Point3F *offset, MatrixF *rot);
|
||||
void getMuzzleTransform(U32 imageSlot,MatrixF* mat);
|
||||
void getRenderMuzzleTransform(U32 imageSlot,MatrixF* mat);
|
||||
|
|
|
|||
|
|
@ -1992,9 +1992,8 @@ void ShapeBase::getEyeCameraTransform(IDisplayDevice *displayDevice, U32 eyeId,
|
|||
Point3F eyePos;
|
||||
Point3F rotEyePos;
|
||||
|
||||
DisplayPose inPose;
|
||||
displayDevice->getFrameEyePose(&inPose, eyeId);
|
||||
DisplayPose newPose = calcCameraDeltaPose(displayDevice->getCurrentConnection(), inPose);
|
||||
DisplayPose newPose;
|
||||
displayDevice->getFrameEyePose(&newPose, eyeId);
|
||||
|
||||
// Ok, basically we just need to add on newPose to the camera transform
|
||||
// NOTE: currently we dont support third-person camera in this mode
|
||||
|
|
@ -2004,57 +2003,15 @@ void ShapeBase::getEyeCameraTransform(IDisplayDevice *displayDevice, U32 eyeId,
|
|||
|
||||
QuatF baserot = cameraTransform;
|
||||
QuatF qrot = QuatF(newPose.orientation);
|
||||
QuatF concatRot;
|
||||
concatRot.mul(baserot, qrot);
|
||||
concatRot.setMatrix(&temp);
|
||||
temp.setPosition(cameraTransform.getPosition() + concatRot.mulP(newPose.position, &rotEyePos));
|
||||
//QuatF concatRot;
|
||||
//concatRot.mul(baserot, qrot);
|
||||
qrot.setMatrix(&temp);
|
||||
|
||||
temp.setPosition(cameraTransform.getPosition() + qrot.mulP(newPose.position, &rotEyePos));
|
||||
|
||||
*outMat = temp;
|
||||
}
|
||||
|
||||
DisplayPose ShapeBase::calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose)
|
||||
{
|
||||
// NOTE: this is intended to be similar to updateMove
|
||||
// WARNING: does not take into account any move values
|
||||
|
||||
DisplayPose outPose;
|
||||
outPose.orientation = getRenderTransform().toEuler();
|
||||
outPose.position = inPose.position;
|
||||
|
||||
if (con && con->getControlSchemeAbsoluteRotation())
|
||||
{
|
||||
// Pitch
|
||||
outPose.orientation.x = inPose.orientation.x;
|
||||
|
||||
// Constrain the range of mRot.x
|
||||
while (outPose.orientation.x < -M_PI_F)
|
||||
outPose.orientation.x += M_2PI_F;
|
||||
while (outPose.orientation.x > M_PI_F)
|
||||
outPose.orientation.x -= M_2PI_F;
|
||||
|
||||
// Yaw
|
||||
outPose.orientation.z = inPose.orientation.z;
|
||||
|
||||
// Constrain the range of mRot.z
|
||||
while (outPose.orientation.z < -M_PI_F)
|
||||
outPose.orientation.z += M_2PI_F;
|
||||
while (outPose.orientation.z > M_PI_F)
|
||||
outPose.orientation.z -= M_2PI_F;
|
||||
|
||||
// Bank
|
||||
if (mDataBlock->cameraCanBank)
|
||||
{
|
||||
outPose.orientation.y = inPose.orientation.y;
|
||||
}
|
||||
|
||||
// Constrain the range of mRot.y
|
||||
while (outPose.orientation.y > M_PI_F)
|
||||
outPose.orientation.y -= M_2PI_F;
|
||||
}
|
||||
|
||||
return outPose;
|
||||
}
|
||||
|
||||
void ShapeBase::getCameraParameters(F32 *min,F32* max,Point3F* off,MatrixF* rot)
|
||||
{
|
||||
*min = mDataBlock->cameraMinDist;
|
||||
|
|
|
|||
|
|
@ -1588,9 +1588,6 @@ public:
|
|||
/// orient and position values of the display device.
|
||||
virtual void getEyeCameraTransform( IDisplayDevice *display, U32 eyeId, MatrixF *outMat );
|
||||
|
||||
/// Calculates a delta camera angle and view position based on inPose
|
||||
virtual DisplayPose calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose);
|
||||
|
||||
/// Gets the index of a node inside a mounted image given the name
|
||||
/// @param imageSlot Image slot
|
||||
/// @param nodeName Node name
|
||||
|
|
|
|||
|
|
@ -219,6 +219,12 @@ public:
|
|||
/// The device has started rendering a frame's field (such as for side-by-side rendering)
|
||||
deStartOfField,
|
||||
|
||||
/// left stereo frame has been rendered
|
||||
deLeftStereoFrameRendered,
|
||||
|
||||
/// right stereo frame has been rendered
|
||||
deRightStereoFrameRendered,
|
||||
|
||||
/// The device is about to finish rendering a frame's field
|
||||
deEndOfField,
|
||||
};
|
||||
|
|
@ -248,6 +254,7 @@ public:
|
|||
{
|
||||
RS_Standard = 0,
|
||||
RS_StereoSideBySide = (1<<0), // Render into current Render Target side-by-side
|
||||
RS_StereoSeparate = (1<<1) // Render in two separate passes (then combined by vr compositor)
|
||||
};
|
||||
|
||||
enum GFXDeviceLimits
|
||||
|
|
|
|||
|
|
@ -38,8 +38,8 @@
|
|||
#include "gfx/gfxDrawUtil.h"
|
||||
#include "gfx/gfxDebugEvent.h"
|
||||
#include "core/stream/fileStream.h"
|
||||
|
||||
GFXTextureObject *gLastStereoTexture = NULL;
|
||||
#include "platform/output/IDisplayDevice.h"
|
||||
#include "T3D/gameBase/extended/extendedMove.h"
|
||||
|
||||
#define TS_OVERLAY_SCREEN_WIDTH 0.75
|
||||
|
||||
|
|
@ -66,6 +66,7 @@ ImplementEnumType( GuiTSRenderStyles,
|
|||
"@ingroup Gui3D" )
|
||||
{ GuiTSCtrl::RenderStyleStandard, "standard" },
|
||||
{ GuiTSCtrl::RenderStyleStereoSideBySide, "stereo side by side" },
|
||||
{ GuiTSCtrl::RenderStyleStereoSeparate, "stereo separate" },
|
||||
EndImplementEnumType;
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
|
|
@ -353,32 +354,111 @@ static FovPort CalculateFovPortForCanvas(const RectI viewport, const CameraQuery
|
|||
return fovPort;
|
||||
}
|
||||
|
||||
void GuiTSCtrl::_internalRender(RectI viewport, Frustum &frustum)
|
||||
{
|
||||
GFXTransformSaver saver;
|
||||
Point2I renderSize = viewport.extent;
|
||||
|
||||
if (mReflectPriority > 0)
|
||||
{
|
||||
// Get the total reflection priority.
|
||||
F32 totalPriority = 0;
|
||||
for (U32 i = 0; i < smAwakeTSCtrls.size(); i++)
|
||||
if (smAwakeTSCtrls[i]->isVisible())
|
||||
totalPriority += smAwakeTSCtrls[i]->mReflectPriority;
|
||||
|
||||
REFLECTMGR->update(mReflectPriority / totalPriority,
|
||||
getExtent(),
|
||||
mLastCameraQuery);
|
||||
}
|
||||
|
||||
if (mForceFOV != 0)
|
||||
mLastCameraQuery.fov = mDegToRad(mForceFOV);
|
||||
|
||||
if (mCameraZRot)
|
||||
{
|
||||
MatrixF rotMat(EulerF(0, 0, mDegToRad(mCameraZRot)));
|
||||
mLastCameraQuery.cameraMatrix.mul(rotMat);
|
||||
}
|
||||
|
||||
GFX->setViewport(viewport);
|
||||
|
||||
// Clear the zBuffer so GUI doesn't hose object rendering accidentally
|
||||
GFX->clear(GFXClearZBuffer, ColorI(20, 20, 20), 1.0f, 0);
|
||||
|
||||
GFX->setFrustum(frustum);
|
||||
mSaveProjection = GFX->getProjectionMatrix();
|
||||
|
||||
if (mLastCameraQuery.ortho)
|
||||
{
|
||||
mOrthoWidth = frustum.getWidth();
|
||||
mOrthoHeight = frustum.getHeight();
|
||||
}
|
||||
|
||||
// We're going to be displaying this render at size of this control in
|
||||
// pixels - let the scene know so that it can calculate e.g. reflections
|
||||
// correctly for that final display result.
|
||||
gClientSceneGraph->setDisplayTargetResolution(renderSize);
|
||||
|
||||
// Set the GFX world matrix to the world-to-camera transform, but don't
|
||||
// change the cameraMatrix in mLastCameraQuery. This is because
|
||||
// mLastCameraQuery.cameraMatrix is supposed to contain the camera-to-world
|
||||
// transform. In-place invert would save a copy but mess up any GUIs that
|
||||
// depend on that value.
|
||||
MatrixF worldToCamera = mLastCameraQuery.cameraMatrix;
|
||||
worldToCamera.inverse();
|
||||
GFX->setWorldMatrix(worldToCamera);
|
||||
|
||||
mSaveProjection = GFX->getProjectionMatrix();
|
||||
mSaveModelview = GFX->getWorldMatrix();
|
||||
mSaveViewport = viewport;
|
||||
mSaveWorldToScreenScale = GFX->getWorldToScreenScale();
|
||||
mSaveFrustum = GFX->getFrustum();
|
||||
mSaveFrustum.setTransform(mLastCameraQuery.cameraMatrix);
|
||||
|
||||
// Set the default non-clip projection as some
|
||||
// objects depend on this even in non-reflect cases.
|
||||
gClientSceneGraph->setNonClipProjection(mSaveProjection);
|
||||
|
||||
// Give the post effect manager the worldToCamera, and cameraToScreen matrices
|
||||
PFXMGR->setFrameMatrices(mSaveModelview, mSaveProjection);
|
||||
|
||||
renderWorld(viewport);
|
||||
DebugDrawer::get()->render();
|
||||
|
||||
// Restore the previous matrix state before
|
||||
// we begin rendering the child controls.
|
||||
saver.restore();
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
|
||||
{
|
||||
// Save the current transforms so we can restore
|
||||
// Save the current transforms so we can restore
|
||||
// it for child control rendering below.
|
||||
GFXTransformSaver saver;
|
||||
bool renderingToTarget = false;
|
||||
|
||||
if(!processCameraQuery(&mLastCameraQuery))
|
||||
if (!processCameraQuery(&mLastCameraQuery))
|
||||
{
|
||||
// We have no camera, but render the GUI children
|
||||
// anyway. This makes editing GuiTSCtrl derived
|
||||
// controls easier in the GuiEditor.
|
||||
renderChildControls( offset, updateRect );
|
||||
renderChildControls(offset, updateRect);
|
||||
return;
|
||||
}
|
||||
|
||||
GFXTargetRef origTarget = GFX->getActiveRenderTarget();
|
||||
U32 origStyle = GFX->getCurrentRenderStyle();
|
||||
|
||||
// Set up the appropriate render style
|
||||
U32 prevRenderStyle = GFX->getCurrentRenderStyle();
|
||||
Point2F prevProjectionOffset = GFX->getCurrentProjectionOffset();
|
||||
Point2I renderSize = getExtent();
|
||||
Frustum frustum;
|
||||
|
||||
if(mRenderStyle == RenderStyleStereoSideBySide)
|
||||
if (mRenderStyle == RenderStyleStereoSideBySide)
|
||||
{
|
||||
GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSideBySide);
|
||||
GFX->setCurrentProjectionOffset(mLastCameraQuery.projectionOffset);
|
||||
|
|
@ -399,13 +479,13 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
|
|||
mLastCameraQuery.fovPort[0] = CalculateFovPortForCanvas(mLastCameraQuery.stereoViewports[0], mLastCameraQuery);
|
||||
mLastCameraQuery.fovPort[1] = CalculateFovPortForCanvas(mLastCameraQuery.stereoViewports[1], mLastCameraQuery);
|
||||
}
|
||||
|
||||
GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes
|
||||
|
||||
GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes
|
||||
GFX->setSteroViewports(mLastCameraQuery.stereoViewports);
|
||||
GFX->setStereoTargets(mLastCameraQuery.stereoTargets);
|
||||
|
||||
MatrixF myTransforms[2];
|
||||
Frustum frustum;
|
||||
|
||||
if (smUseLatestDisplayTransform)
|
||||
{
|
||||
|
|
@ -435,52 +515,109 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
|
|||
renderSize = mLastCameraQuery.stereoViewports[0].extent;
|
||||
renderingToTarget = true;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
GFX->setCurrentRenderStyle(GFXDevice::RS_Standard);
|
||||
}
|
||||
|
||||
if ( mReflectPriority > 0 )
|
||||
{
|
||||
// Get the total reflection priority.
|
||||
F32 totalPriority = 0;
|
||||
for ( U32 i=0; i < smAwakeTSCtrls.size(); i++ )
|
||||
if ( smAwakeTSCtrls[i]->isVisible() )
|
||||
totalPriority += smAwakeTSCtrls[i]->mReflectPriority;
|
||||
|
||||
REFLECTMGR->update( mReflectPriority / totalPriority,
|
||||
getExtent(),
|
||||
mLastCameraQuery );
|
||||
}
|
||||
|
||||
if(mForceFOV != 0)
|
||||
mLastCameraQuery.fov = mDegToRad(mForceFOV);
|
||||
|
||||
if(mCameraZRot)
|
||||
{
|
||||
MatrixF rotMat(EulerF(0, 0, mDegToRad(mCameraZRot)));
|
||||
mLastCameraQuery.cameraMatrix.mul(rotMat);
|
||||
}
|
||||
|
||||
Frustum frustum;
|
||||
if(mRenderStyle == RenderStyleStereoSideBySide)
|
||||
{
|
||||
// NOTE: these calculations are essentially overridden later by the fov port settings when rendering each eye.
|
||||
MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]);
|
||||
MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]);
|
||||
|
||||
GFX->activateStereoTarget(-1);
|
||||
_internalRender(RectI(updateRect.point, updateRect.extent), frustum);
|
||||
|
||||
// Render preview
|
||||
if (mLastCameraQuery.displayDevice)
|
||||
{
|
||||
GFXTexHandle previewTexture = mLastCameraQuery.displayDevice->getPreviewTexture();
|
||||
if (!previewTexture.isNull())
|
||||
{
|
||||
GFX->setActiveRenderTarget(origTarget);
|
||||
GFX->setCurrentRenderStyle(origStyle);
|
||||
GFX->setClipRect(updateRect);
|
||||
renderDisplayPreview(updateRect, previewTexture);
|
||||
}
|
||||
}
|
||||
}
|
||||
else if (mRenderStyle == RenderStyleStereoSeparate && mLastCameraQuery.stereoTargets[0])
|
||||
{
|
||||
// In this case we render the scene twice to different render targets, then
|
||||
// render the final composite view
|
||||
GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSeparate);
|
||||
GFX->setStereoEyeOffsets(mLastCameraQuery.eyeOffset);
|
||||
GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes
|
||||
GFX->setSteroViewports(mLastCameraQuery.stereoViewports);
|
||||
GFX->setStereoTargets(mLastCameraQuery.stereoTargets);
|
||||
|
||||
MatrixF myTransforms[2];
|
||||
|
||||
if (smUseLatestDisplayTransform)
|
||||
{
|
||||
// Use the view matrix determined from the display device
|
||||
myTransforms[0] = mLastCameraQuery.eyeTransforms[0];
|
||||
myTransforms[1] = mLastCameraQuery.eyeTransforms[1];
|
||||
}
|
||||
else
|
||||
{
|
||||
// Use the view matrix determined from the control object
|
||||
myTransforms[0] = mLastCameraQuery.cameraMatrix;
|
||||
myTransforms[1] = mLastCameraQuery.cameraMatrix;
|
||||
|
||||
QuatF qrot = mLastCameraQuery.cameraMatrix;
|
||||
Point3F pos = mLastCameraQuery.cameraMatrix.getPosition();
|
||||
Point3F rotEyePos;
|
||||
|
||||
myTransforms[0].setPosition(pos + qrot.mulP(mLastCameraQuery.eyeOffset[0], &rotEyePos));
|
||||
myTransforms[1].setPosition(pos + qrot.mulP(mLastCameraQuery.eyeOffset[1], &rotEyePos));
|
||||
}
|
||||
|
||||
MatrixF origMatrix = mLastCameraQuery.cameraMatrix;
|
||||
|
||||
// Left
|
||||
MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]);
|
||||
mLastCameraQuery.cameraMatrix = myTransforms[0];
|
||||
frustum.update();
|
||||
GFX->activateStereoTarget(0);
|
||||
_internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), frustum);
|
||||
GFX->getDeviceEventSignal().trigger(GFXDevice::deLeftStereoFrameRendered);
|
||||
|
||||
// Right
|
||||
GFX->activateStereoTarget(1);
|
||||
MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[1]);
|
||||
mLastCameraQuery.cameraMatrix = myTransforms[1];
|
||||
frustum.update();
|
||||
_internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[1]->getSize()), frustum);
|
||||
GFX->getDeviceEventSignal().trigger(GFXDevice::deRightStereoFrameRendered);
|
||||
|
||||
mLastCameraQuery.cameraMatrix = origMatrix;
|
||||
|
||||
// Render preview
|
||||
if (mLastCameraQuery.displayDevice)
|
||||
{
|
||||
GFXTexHandle previewTexture = mLastCameraQuery.displayDevice->getPreviewTexture();
|
||||
if (!previewTexture.isNull())
|
||||
{
|
||||
GFX->setActiveRenderTarget(origTarget);
|
||||
GFX->setCurrentRenderStyle(origStyle);
|
||||
GFX->setClipRect(updateRect);
|
||||
renderDisplayPreview(updateRect, previewTexture);
|
||||
}
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
#ifdef TORQUE_OS_MAC
|
||||
Point2I screensize = getRoot()->getWindowSize();
|
||||
tempRect.point.y = screensize.y - (tempRect.point.y + tempRect.extent.y);
|
||||
#endif
|
||||
GFX->setCurrentRenderStyle(GFXDevice::RS_Standard);
|
||||
|
||||
// set up the camera and viewport stuff:
|
||||
F32 wwidth;
|
||||
F32 wheight;
|
||||
F32 renderWidth = F32(renderSize.x);
|
||||
F32 renderHeight = F32(renderSize.y);
|
||||
F32 aspectRatio = renderWidth / renderHeight;
|
||||
|
||||
|
||||
// Use the FOV to calculate the viewport height scale
|
||||
// then generate the width scale from the aspect ratio.
|
||||
if(!mLastCameraQuery.ortho)
|
||||
if (!mLastCameraQuery.ortho)
|
||||
{
|
||||
wheight = mLastCameraQuery.nearPlane * mTan(mLastCameraQuery.fov / 2.0f);
|
||||
wwidth = aspectRatio * wheight;
|
||||
|
|
@ -499,251 +636,33 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
|
|||
F32 top = wheight - vscale * (updateRect.point.y - offset.y);
|
||||
F32 bottom = wheight - vscale * (updateRect.point.y + updateRect.extent.y - offset.y);
|
||||
|
||||
frustum.set( mLastCameraQuery.ortho, left, right, top, bottom, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane );
|
||||
}
|
||||
frustum.set(mLastCameraQuery.ortho, left, right, top, bottom, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane);
|
||||
|
||||
// Manipulate the frustum for tiled screenshots
|
||||
const bool screenShotMode = gScreenShot && gScreenShot->isPending();
|
||||
if ( screenShotMode )
|
||||
{
|
||||
gScreenShot->tileFrustum( frustum );
|
||||
GFX->setViewMatrix(MatrixF::Identity);
|
||||
}
|
||||
|
||||
RectI tempRect = updateRect;
|
||||
|
||||
if (!renderingToTarget)
|
||||
{
|
||||
#ifdef TORQUE_OS_MAC
|
||||
// Manipulate the frustum for tiled screenshots
|
||||
const bool screenShotMode = gScreenShot && gScreenShot->isPending();
|
||||
if (screenShotMode)
|
||||
{
|
||||
gScreenShot->tileFrustum(frustum);
|
||||
GFX->setViewMatrix(MatrixF::Identity);
|
||||
}
|
||||
|
||||
RectI tempRect = updateRect;
|
||||
|
||||
#ifdef TORQUE_OS_MAC
|
||||
Point2I screensize = getRoot()->getWindowSize();
|
||||
tempRect.point.y = screensize.y - (tempRect.point.y + tempRect.extent.y);
|
||||
#endif
|
||||
#endif
|
||||
|
||||
GFX->setViewport( tempRect );
|
||||
}
|
||||
else
|
||||
{
|
||||
// Activate stereo RT
|
||||
GFX->activateStereoTarget(-1);
|
||||
_internalRender(tempRect, frustum);
|
||||
}
|
||||
|
||||
// Clear the zBuffer so GUI doesn't hose object rendering accidentally
|
||||
GFX->clear( GFXClearZBuffer , ColorI(20,20,20), 1.0f, 0 );
|
||||
//GFX->clear( GFXClearTarget, ColorI(255,0,0), 1.0f, 0);
|
||||
|
||||
GFX->setFrustum( frustum );
|
||||
if(mLastCameraQuery.ortho)
|
||||
{
|
||||
mOrthoWidth = frustum.getWidth();
|
||||
mOrthoHeight = frustum.getHeight();
|
||||
}
|
||||
|
||||
// We're going to be displaying this render at size of this control in
|
||||
// pixels - let the scene know so that it can calculate e.g. reflections
|
||||
// correctly for that final display result.
|
||||
gClientSceneGraph->setDisplayTargetResolution(renderSize);
|
||||
|
||||
// Set the GFX world matrix to the world-to-camera transform, but don't
|
||||
// change the cameraMatrix in mLastCameraQuery. This is because
|
||||
// mLastCameraQuery.cameraMatrix is supposed to contain the camera-to-world
|
||||
// transform. In-place invert would save a copy but mess up any GUIs that
|
||||
// depend on that value.
|
||||
MatrixF worldToCamera = mLastCameraQuery.cameraMatrix;
|
||||
worldToCamera.inverse();
|
||||
GFX->setWorldMatrix( worldToCamera );
|
||||
|
||||
mSaveProjection = GFX->getProjectionMatrix();
|
||||
mSaveModelview = GFX->getWorldMatrix();
|
||||
mSaveViewport = updateRect;
|
||||
mSaveWorldToScreenScale = GFX->getWorldToScreenScale();
|
||||
mSaveFrustum = GFX->getFrustum();
|
||||
mSaveFrustum.setTransform( mLastCameraQuery.cameraMatrix );
|
||||
|
||||
// Set the default non-clip projection as some
|
||||
// objects depend on this even in non-reflect cases.
|
||||
gClientSceneGraph->setNonClipProjection( mSaveProjection );
|
||||
|
||||
// Give the post effect manager the worldToCamera, and cameraToScreen matrices
|
||||
PFXMGR->setFrameMatrices( mSaveModelview, mSaveProjection );
|
||||
|
||||
renderWorld(updateRect);
|
||||
DebugDrawer::get()->render();
|
||||
|
||||
// Render the canvas overlay if its available
|
||||
if (false && mRenderStyle == RenderStyleStereoSideBySide && mStereoGuiTarget.getPointer())
|
||||
{
|
||||
GFXDEBUGEVENT_SCOPE( StereoGui_Render, ColorI( 255, 0, 0 ) );
|
||||
MatrixF proj(1);
|
||||
|
||||
Frustum originalFrustum = GFX->getFrustum();
|
||||
GFXTextureObject *texObject = mStereoGuiTarget->getTexture(0);
|
||||
const FovPort *currentFovPort = GFX->getStereoFovPort();
|
||||
const MatrixF *eyeTransforms = GFX->getStereoEyeTransforms();
|
||||
const Point3F *eyeOffset = GFX->getStereoEyeOffsets();
|
||||
Frustum gfxFrustum = originalFrustum;
|
||||
|
||||
for (U32 i=0; i<2; i++)
|
||||
{
|
||||
GFX->activateStereoTarget(i);
|
||||
MathUtils::makeFovPortFrustum(&gfxFrustum, true, gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[i], eyeTransforms[i]);
|
||||
GFX->setFrustum(gfxFrustum);
|
||||
|
||||
MatrixF eyeWorldTrans(1);
|
||||
eyeWorldTrans.setPosition(Point3F(eyeOffset[i].x,eyeOffset[i].y,eyeOffset[i].z));
|
||||
MatrixF eyeWorld(1);
|
||||
eyeWorld.mul(eyeWorldTrans);
|
||||
eyeWorld.inverse();
|
||||
|
||||
GFX->setWorldMatrix(eyeWorld);
|
||||
GFX->setViewMatrix(MatrixF::Identity);
|
||||
|
||||
if (!mStereoOverlayVB.getPointer())
|
||||
{
|
||||
mStereoOverlayVB.set(GFX, 4, GFXBufferTypeStatic);
|
||||
GFXVertexPCT *verts = mStereoOverlayVB.lock(0, 4);
|
||||
|
||||
F32 texLeft = 0.0f;
|
||||
F32 texRight = 1.0f;
|
||||
F32 texTop = 1.0f;
|
||||
F32 texBottom = 0.0f;
|
||||
|
||||
F32 rectRatio = gfxFrustum.getWidth() / gfxFrustum.getHeight();
|
||||
F32 rectWidth = gfxFrustum.getWidth() * TS_OVERLAY_SCREEN_WIDTH;
|
||||
F32 rectHeight = rectWidth * rectRatio;
|
||||
|
||||
F32 screenLeft = -rectWidth * 0.5;
|
||||
F32 screenRight = rectWidth * 0.5;
|
||||
F32 screenTop = -rectHeight * 0.5;
|
||||
F32 screenBottom = rectHeight * 0.5;
|
||||
|
||||
const F32 fillConv = 0.0f;
|
||||
const F32 frustumDepthAdjusted = gfxFrustum.getNearDist() + 0.012;
|
||||
verts[0].point.set( screenLeft - fillConv, frustumDepthAdjusted, screenTop - fillConv );
|
||||
verts[1].point.set( screenRight - fillConv, frustumDepthAdjusted, screenTop - fillConv );
|
||||
verts[2].point.set( screenLeft - fillConv, frustumDepthAdjusted, screenBottom - fillConv );
|
||||
verts[3].point.set( screenRight - fillConv, frustumDepthAdjusted, screenBottom - fillConv );
|
||||
|
||||
verts[0].color = verts[1].color = verts[2].color = verts[3].color = ColorI(255,255,255,255);
|
||||
|
||||
verts[0].texCoord.set( texLeft, texTop );
|
||||
verts[1].texCoord.set( texRight, texTop );
|
||||
verts[2].texCoord.set( texLeft, texBottom );
|
||||
verts[3].texCoord.set( texRight, texBottom );
|
||||
|
||||
mStereoOverlayVB.unlock();
|
||||
}
|
||||
|
||||
if (!mStereoGuiSB.getPointer())
|
||||
{
|
||||
// DrawBitmapStretchSR
|
||||
GFXStateBlockDesc bitmapStretchSR;
|
||||
bitmapStretchSR.setCullMode(GFXCullNone);
|
||||
bitmapStretchSR.setZReadWrite(false, false);
|
||||
bitmapStretchSR.setBlend(false , GFXBlendSrcAlpha, GFXBlendInvSrcAlpha);
|
||||
bitmapStretchSR.samplersDefined = true;
|
||||
|
||||
bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear();
|
||||
bitmapStretchSR.samplers[0].minFilter = GFXTextureFilterPoint;
|
||||
bitmapStretchSR.samplers[0].mipFilter = GFXTextureFilterPoint;
|
||||
bitmapStretchSR.samplers[0].magFilter = GFXTextureFilterPoint;
|
||||
|
||||
mStereoGuiSB = GFX->createStateBlock(bitmapStretchSR);
|
||||
}
|
||||
|
||||
GFX->setVertexBuffer(mStereoOverlayVB);
|
||||
GFX->setStateBlock(mStereoGuiSB);
|
||||
GFX->setTexture( 0, texObject );
|
||||
GFX->setupGenericShaders( GFXDevice::GSModColorTexture );
|
||||
GFX->drawPrimitive( GFXTriangleStrip, 0, 2 );
|
||||
}
|
||||
}
|
||||
|
||||
// Restore the previous matrix state before
|
||||
// we begin rendering the child controls.
|
||||
saver.restore();
|
||||
|
||||
// Restore the render style and any stereo parameters
|
||||
GFX->setActiveRenderTarget(origTarget);
|
||||
GFX->setCurrentRenderStyle(prevRenderStyle);
|
||||
GFX->setCurrentProjectionOffset(prevProjectionOffset);
|
||||
|
||||
GFX->updateStates(true);
|
||||
|
||||
if(mRenderStyle == RenderStyleStereoSideBySide && gLastStereoTexture)
|
||||
{
|
||||
GFX->setWorldMatrix(MatrixF(1));
|
||||
GFX->setViewMatrix(MatrixF::Identity);
|
||||
GFX->setClipRect(updateRect);
|
||||
|
||||
GFX->getDrawUtil()->drawRectFill(RectI(Point2I(0,0), Point2I(1024, 768)), ColorI::BLACK);
|
||||
GFX->getDrawUtil()->drawRect(RectI(Point2I(0, 0), Point2I(1024, 768)), ColorI::RED);
|
||||
|
||||
if (!mStereoOverlayVB.getPointer())
|
||||
{
|
||||
mStereoOverlayVB.set(GFX, 4, GFXBufferTypeStatic);
|
||||
GFXVertexPCT *verts = mStereoOverlayVB.lock(0, 4);
|
||||
|
||||
F32 texLeft = 0.0f;
|
||||
F32 texRight = 1.0f;
|
||||
F32 texTop = 1.0f;
|
||||
F32 texBottom = 0.0f;
|
||||
|
||||
F32 rectWidth = 1024.0;
|
||||
F32 rectHeight = 768.0;
|
||||
|
||||
F32 screenLeft = 0;
|
||||
F32 screenRight = rectWidth;
|
||||
F32 screenTop = 0;
|
||||
F32 screenBottom = rectHeight;
|
||||
|
||||
const F32 fillConv = 0.0f;
|
||||
const F32 frustumDepthAdjusted = 0.0f;
|
||||
verts[0].point.set(screenLeft - fillConv, screenTop - fillConv, 0.f);
|
||||
verts[1].point.set(screenRight - fillConv, screenTop - fillConv, 0.f);
|
||||
verts[2].point.set(screenLeft - fillConv, screenBottom - fillConv, 0.f);
|
||||
verts[3].point.set(screenRight - fillConv, screenBottom - fillConv, 0.f);
|
||||
|
||||
verts[0].color = verts[1].color = verts[2].color = verts[3].color = ColorI(255,255,255,255);
|
||||
|
||||
verts[0].texCoord.set(texLeft, texTop);
|
||||
verts[1].texCoord.set(texRight, texTop);
|
||||
verts[2].texCoord.set(texLeft, texBottom);
|
||||
verts[3].texCoord.set(texRight, texBottom);
|
||||
|
||||
mStereoOverlayVB.unlock();
|
||||
}
|
||||
|
||||
if (!mStereoGuiSB.getPointer())
|
||||
{
|
||||
// DrawBitmapStretchSR
|
||||
GFXStateBlockDesc bitmapStretchSR;
|
||||
bitmapStretchSR.setCullMode(GFXCullNone);
|
||||
bitmapStretchSR.setZReadWrite(false, false);
|
||||
bitmapStretchSR.setBlend(false, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha);
|
||||
bitmapStretchSR.samplersDefined = true;
|
||||
|
||||
bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear();
|
||||
bitmapStretchSR.samplers[0].minFilter = GFXTextureFilterPoint;
|
||||
bitmapStretchSR.samplers[0].mipFilter = GFXTextureFilterPoint;
|
||||
bitmapStretchSR.samplers[0].magFilter = GFXTextureFilterPoint;
|
||||
|
||||
mStereoGuiSB = GFX->createStateBlock(bitmapStretchSR);
|
||||
}
|
||||
//static GFXTexHandle texHandle("art/gui/splash", &GFXDefaultPersistentProfile, avar("%s() - mTextureNormal (line %d)", __FUNCTION__, __LINE__));
|
||||
GFX->setVertexBuffer(mStereoOverlayVB);
|
||||
GFX->setStateBlock(mStereoGuiSB);
|
||||
GFX->setTexture(0, gLastStereoTexture);// texHandle);// gLastStereoTexture);
|
||||
GFX->setupGenericShaders(GFXDevice::GSModColorTexture);
|
||||
GFX->drawPrimitive(GFXTriangleStrip, 0, 2);
|
||||
|
||||
|
||||
|
||||
//GFX->getDrawUtil()->drawBitmapStretch(gLastStereoTexture, updateRect);
|
||||
}
|
||||
// TODO: Some render to sort of overlay system?
|
||||
|
||||
// Allow subclasses to render 2D elements.
|
||||
GFX->setActiveRenderTarget(origTarget);
|
||||
GFX->setCurrentRenderStyle(origStyle);
|
||||
GFX->setClipRect(updateRect);
|
||||
renderGui( offset, updateRect );
|
||||
renderGui(offset, updateRect);
|
||||
|
||||
if (shouldRenderChildControls())
|
||||
{
|
||||
|
|
@ -779,12 +698,84 @@ void GuiTSCtrl::drawLineList( const Vector<Point3F> &points, const ColorI color,
|
|||
drawLine( points[i], points[i+1], color, width );
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
void GuiTSCtrl::setStereoGui(GuiOffscreenCanvas *canvas)
|
||||
{
|
||||
mStereoGuiTarget = canvas ? canvas->getTarget() : NULL;
|
||||
}
|
||||
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
void GuiTSCtrl::renderDisplayPreview(const RectI &updateRect, GFXTexHandle &previewTexture)
|
||||
{
|
||||
GFX->setWorldMatrix(MatrixF(1));
|
||||
GFX->setViewMatrix(MatrixF::Identity);
|
||||
GFX->setClipRect(updateRect);
|
||||
|
||||
GFX->getDrawUtil()->drawRectFill(RectI(Point2I(0, 0), Point2I(1024, 768)), ColorI::BLACK);
|
||||
GFX->getDrawUtil()->drawRect(RectI(Point2I(0, 0), Point2I(1024, 768)), ColorI::RED);
|
||||
|
||||
if (!mStereoPreviewVB.getPointer())
|
||||
{
|
||||
mStereoPreviewVB.set(GFX, 4, GFXBufferTypeStatic);
|
||||
GFXVertexPCT *verts = mStereoPreviewVB.lock(0, 4);
|
||||
|
||||
F32 texLeft = 0.0f;
|
||||
F32 texRight = 1.0f;
|
||||
F32 texTop = 0.0f;
|
||||
F32 texBottom = 1.0f;
|
||||
|
||||
F32 rectWidth = updateRect.extent.x;
|
||||
F32 rectHeight = updateRect.extent.y;
|
||||
|
||||
F32 screenLeft = 0;
|
||||
F32 screenRight = rectWidth;
|
||||
F32 screenTop = 0;
|
||||
F32 screenBottom = rectHeight;
|
||||
|
||||
const F32 fillConv = 0.0f;
|
||||
const F32 frustumDepthAdjusted = 0.0f;
|
||||
verts[0].point.set(screenLeft - fillConv, screenTop - fillConv, 0.f);
|
||||
verts[1].point.set(screenRight - fillConv, screenTop - fillConv, 0.f);
|
||||
verts[2].point.set(screenLeft - fillConv, screenBottom - fillConv, 0.f);
|
||||
verts[3].point.set(screenRight - fillConv, screenBottom - fillConv, 0.f);
|
||||
|
||||
verts[0].color = verts[1].color = verts[2].color = verts[3].color = ColorI(255, 255, 255, 255);
|
||||
|
||||
verts[0].texCoord.set(texLeft, texTop);
|
||||
verts[1].texCoord.set(texRight, texTop);
|
||||
verts[2].texCoord.set(texLeft, texBottom);
|
||||
verts[3].texCoord.set(texRight, texBottom);
|
||||
|
||||
mStereoPreviewVB.unlock();
|
||||
}
|
||||
|
||||
if (!mStereoPreviewSB.getPointer())
|
||||
{
|
||||
// DrawBitmapStretchSR
|
||||
GFXStateBlockDesc bitmapStretchSR;
|
||||
bitmapStretchSR.setCullMode(GFXCullNone);
|
||||
bitmapStretchSR.setZReadWrite(false, false);
|
||||
bitmapStretchSR.setBlend(false, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha);
|
||||
bitmapStretchSR.samplersDefined = true;
|
||||
|
||||
bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear();
|
||||
bitmapStretchSR.samplers[0].minFilter = GFXTextureFilterPoint;
|
||||
bitmapStretchSR.samplers[0].mipFilter = GFXTextureFilterPoint;
|
||||
bitmapStretchSR.samplers[0].magFilter = GFXTextureFilterPoint;
|
||||
|
||||
mStereoPreviewSB = GFX->createStateBlock(bitmapStretchSR);
|
||||
}
|
||||
|
||||
GFX->setVertexBuffer(mStereoPreviewVB);
|
||||
GFX->setStateBlock(mStereoPreviewSB);
|
||||
GFX->setTexture(0, previewTexture);
|
||||
GFX->setupGenericShaders(GFXDevice::GSModColorTexture);
|
||||
GFX->drawPrimitive(GFXTriangleStrip, 0, 2);
|
||||
}
|
||||
|
||||
//=============================================================================
|
||||
// Console Methods.
|
||||
//=============================================================================
|
||||
|
|
|
|||
|
|
@ -55,6 +55,8 @@ struct CameraQuery
|
|||
RectI stereoViewports[2]; // destination viewports
|
||||
GFXTextureTarget* stereoTargets[2];
|
||||
GuiCanvas* drawCanvas; // Canvas we are drawing to. Needed for VR
|
||||
|
||||
IDisplayDevice* displayDevice;
|
||||
};
|
||||
|
||||
/// Abstract base class for 3D viewport GUIs.
|
||||
|
|
@ -65,7 +67,8 @@ class GuiTSCtrl : public GuiContainer
|
|||
public:
|
||||
enum RenderStyles {
|
||||
RenderStyleStandard = 0,
|
||||
RenderStyleStereoSideBySide = (1<<0)
|
||||
RenderStyleStereoSideBySide = (1<<0),
|
||||
RenderStyleStereoSeparate = (1<<1),
|
||||
};
|
||||
|
||||
protected:
|
||||
|
|
@ -104,12 +107,16 @@ protected:
|
|||
NamedTexTargetRef mStereoGuiTarget;
|
||||
GFXVertexBufferHandle<GFXVertexPCT> mStereoOverlayVB;
|
||||
GFXStateBlockRef mStereoGuiSB;
|
||||
|
||||
GFXVertexBufferHandle<GFXVertexPCT> mStereoPreviewVB;
|
||||
GFXStateBlockRef mStereoPreviewSB;
|
||||
|
||||
public:
|
||||
|
||||
GuiTSCtrl();
|
||||
|
||||
void onPreRender();
|
||||
void _internalRender(RectI viewport, Frustum &frustum);
|
||||
void onRender(Point2I offset, const RectI &updateRect);
|
||||
virtual bool processCameraQuery(CameraQuery *query);
|
||||
|
||||
|
|
@ -178,6 +185,7 @@ public:
|
|||
bool shouldRenderChildControls() { return mRenderStyle == RenderStyleStandard; }
|
||||
|
||||
void setStereoGui(GuiOffscreenCanvas *canvas);
|
||||
void renderDisplayPreview(const RectI &updateRect, GFXTexHandle &previewTexture);
|
||||
|
||||
DECLARE_CONOBJECT(GuiTSCtrl);
|
||||
DECLARE_CATEGORY( "Gui 3D" );
|
||||
|
|
|
|||
|
|
@ -27,6 +27,7 @@
|
|||
#include "core/stringTable.h"
|
||||
#include "platform/platformInput.h"
|
||||
#include "math/mQuat.h"
|
||||
#include "math/mAngAxis.h"
|
||||
|
||||
MODULE_BEGIN( InputEventManager )
|
||||
|
||||
|
|
@ -546,3 +547,21 @@ void InputEventManager::buildInputEvent(U32 deviceType, U32 deviceInst, InputEve
|
|||
|
||||
newEvent.postToSignal(Input::smInputEvent);
|
||||
}
|
||||
|
||||
void InputEventManager::buildInputEvent(U32 deviceType, U32 deviceInst, InputEventType objType, InputObjectInstances objInst, InputActionType action, AngAxisF& aValue)
|
||||
{
|
||||
InputEventInfo newEvent;
|
||||
|
||||
newEvent.deviceType = deviceType;
|
||||
newEvent.deviceInst = deviceInst;
|
||||
newEvent.objType = objType;
|
||||
newEvent.objInst = objInst;
|
||||
newEvent.action = action;
|
||||
newEvent.fValue = aValue.axis.x;
|
||||
newEvent.fValue2 = aValue.axis.y;
|
||||
newEvent.fValue3 = aValue.axis.z;
|
||||
newEvent.fValue4 = aValue.angle;
|
||||
|
||||
newEvent.postToSignal(Input::smInputEvent);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -504,6 +504,9 @@ public:
|
|||
/// Build an input event based on a QuatF
|
||||
void buildInputEvent(U32 deviceType, U32 deviceInst, InputEventType objType, InputObjectInstances objInst, InputActionType action, QuatF& qValue);
|
||||
|
||||
/// Build an input event based on a AngAxisF
|
||||
void buildInputEvent(U32 deviceType, U32 deviceInst, InputEventType objType, InputObjectInstances objInst, InputActionType action, AngAxisF& qValue);
|
||||
|
||||
protected:
|
||||
U32 mNextDeviceTypeCode;
|
||||
U32 mNextDeviceCode;
|
||||
|
|
|
|||
|
|
@ -62,7 +62,7 @@ MODULE_END;
|
|||
// OculusVRDevice
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
bool OculusVRDevice::smEnableDevice = true;
|
||||
bool OculusVRDevice::smEnableDevice = false;
|
||||
|
||||
bool OculusVRDevice::smSimulateHMD = true;
|
||||
|
||||
|
|
@ -318,17 +318,6 @@ void OculusVRDevice::getEyeOffsets(Point3F *dest) const
|
|||
hmd->getEyeOffsets(dest);
|
||||
}
|
||||
|
||||
bool OculusVRDevice::providesFovPorts() const
|
||||
{
|
||||
if(!mHMDDevices.size())
|
||||
return false;
|
||||
|
||||
const OculusVRHMDDevice* hmd = getHMDDevice(mActiveDeviceId);
|
||||
if(!hmd)
|
||||
return Point3F::Zero;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void OculusVRDevice::getFovPorts(FovPort *out) const
|
||||
{
|
||||
|
|
@ -562,6 +551,20 @@ GameConnection* OculusVRDevice::getCurrentConnection()
|
|||
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
GFXTexHandle OculusVRDevice::getPreviewTexture()
|
||||
{
|
||||
if (!mHMDDevices.size())
|
||||
return NULL;
|
||||
|
||||
OculusVRHMDDevice* hmd = getHMDDevice(mActiveDeviceId);
|
||||
if (!hmd)
|
||||
return NULL;
|
||||
|
||||
return hmd->getPreviewTexture();
|
||||
}
|
||||
|
||||
//-----------------------------------------------------------------------------
|
||||
|
||||
DefineEngineFunction(isOculusVRDeviceActive, bool, (),,
|
||||
"@brief Used to determine if the Oculus VR input device is active\n\n"
|
||||
|
||||
|
|
|
|||
|
|
@ -115,8 +115,8 @@ public:
|
|||
virtual bool providesFrameEyePose() const;
|
||||
virtual void getFrameEyePose(DisplayPose *outPose, U32 eyeId) const;
|
||||
virtual bool providesEyeOffsets() const;
|
||||
virtual bool providesFovPorts() const { return true; }
|
||||
virtual void getEyeOffsets(Point3F *dest) const;
|
||||
virtual bool providesFovPorts() const;
|
||||
virtual void getFovPorts(FovPort *out) const;
|
||||
virtual bool providesProjectionOffset() const;
|
||||
virtual const Point2F& getProjectionOffset() const;
|
||||
|
|
@ -154,6 +154,8 @@ public:
|
|||
virtual void setCurrentConnection(GameConnection *connection);
|
||||
virtual GameConnection* getCurrentConnection();
|
||||
|
||||
GFXTexHandle getPreviewTexture();
|
||||
|
||||
bool _handleDeviceEvent( GFXDevice::GFXDeviceEventType evt );
|
||||
|
||||
public:
|
||||
|
|
|
|||
|
|
@ -43,7 +43,6 @@
|
|||
#include "OVR_CAPI_GL.h"
|
||||
#define OCULUS_USE_GL
|
||||
#endif
|
||||
extern GFXTextureObject *gLastStereoTexture;
|
||||
|
||||
struct OculusTexture
|
||||
{
|
||||
|
|
@ -317,6 +316,14 @@ void OculusVRHMDDevice::dismissWarning()
|
|||
//ovr_DismissHSWDisplay(mDevice);
|
||||
}
|
||||
|
||||
GFXTexHandle OculusVRHMDDevice::getPreviewTexture()
|
||||
{
|
||||
if (!mIsValid || !mDevice)
|
||||
return NULL;
|
||||
|
||||
return mDebugMirrorTextureHandle;
|
||||
}
|
||||
|
||||
bool OculusVRHMDDevice::setupTargets()
|
||||
{
|
||||
// Create eye render buffers
|
||||
|
|
@ -381,9 +388,6 @@ bool OculusVRHMDDevice::setupTargets()
|
|||
mEyeRT[1] = mStereoRT;
|
||||
mEyeViewport[1] = RectI(Point2I(mRenderLayer.Viewport[1].Pos.x, mRenderLayer.Viewport[1].Pos.y), Point2I(mRenderLayer.Viewport[1].Size.w, mRenderLayer.Viewport[1].Size.h));
|
||||
|
||||
gLastStereoTexture = NULL;
|
||||
|
||||
|
||||
GFXD3D11Device* device = static_cast<GFXD3D11Device*>(GFX);
|
||||
|
||||
D3D11_TEXTURE2D_DESC dsDesc;
|
||||
|
|
@ -453,7 +457,6 @@ bool OculusVRHMDDevice::setupTargets()
|
|||
}
|
||||
|
||||
mDebugMirrorTextureHandle = object;
|
||||
gLastStereoTexture = mDebugMirrorTextureHandle;
|
||||
}
|
||||
else
|
||||
{
|
||||
|
|
@ -673,10 +676,11 @@ void OculusVRHMDDevice::getFrameEyePose(DisplayPose *outPose, U32 eyeId) const
|
|||
OVR::Quatf orientation = pose.Orientation;
|
||||
const OVR::Vector3f position = pose.Position;
|
||||
|
||||
EulerF rotEuler;
|
||||
OculusVRUtil::convertRotation(orientation, rotEuler);
|
||||
MatrixF torqueMat(1);
|
||||
OVR::Matrix4f mat(orientation);
|
||||
OculusVRUtil::convertRotation(mat.M, torqueMat);
|
||||
|
||||
outPose->orientation = rotEuler;
|
||||
outPose->orientation = QuatF(torqueMat);
|
||||
outPose->position = Point3F(-position.x, position.z, -position.y);
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -185,6 +185,8 @@ public:
|
|||
virtual void setCurrentConnection(GameConnection *connection) { mConnection = connection; }
|
||||
virtual GameConnection* getCurrentConnection() { return mConnection; }
|
||||
|
||||
GFXTexHandle getPreviewTexture();
|
||||
|
||||
String dumpMetrics();
|
||||
|
||||
// Stereo RT
|
||||
|
|
|
|||
|
|
@ -26,6 +26,7 @@
|
|||
#include "platform/platformInput.h"
|
||||
#include "console/simBase.h"
|
||||
#include "console/engineAPI.h"
|
||||
#include "math/mAngAxis.h"
|
||||
#include "OVR_CAPI_0_8_0.h"
|
||||
|
||||
U32 OculusVRSensorDevice::OVR_SENSORROT[OculusVRConstants::MaxSensors] = {0};
|
||||
|
|
@ -184,7 +185,8 @@ bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bo
|
|||
{
|
||||
if(generateRotAsAngAxis)
|
||||
{
|
||||
INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_ROT, OVR_SENSORROT[mActionCodeIndex], SI_MOVE, currentBuffer->mRotQuat);
|
||||
AngAxisF axisAA(currentBuffer->mRotQuat);
|
||||
INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_ROT, OVR_SENSORROT[mActionCodeIndex], SI_MOVE, axisAA);
|
||||
}
|
||||
|
||||
if(generateRotAsEuler)
|
||||
|
|
|
|||
|
|
@ -44,10 +44,7 @@ void convertRotation(const F32 inRotMat[4][4], MatrixF& outRotation)
|
|||
void convertRotation(OVR::Quatf& inRotation, EulerF& outRotation)
|
||||
{
|
||||
F32 yaw, pitch, roll;
|
||||
inRotation.GetEulerAngles<OVR::Axis_Y, OVR::Axis_X, OVR::Axis_Z>(&yaw, &pitch, &roll);
|
||||
outRotation.x = -pitch;
|
||||
outRotation.y = roll;
|
||||
outRotation.z = -yaw;
|
||||
inRotation.GetEulerAngles<OVR::Axis_X, OVR::Axis_Z, OVR::Axis_Y, OVR::Rotate_CW, OVR::Handed_R>(&outRotation.x, &outRotation.y, &outRotation.z);
|
||||
}
|
||||
|
||||
void calculateAxisRotation(const MatrixF& inRotation, const F32& maxAxisRadius, Point2F& outRotation)
|
||||
|
|
|
|||
886
Engine/source/platform/input/openVR/openVRProvider.cpp
Normal file
886
Engine/source/platform/input/openVR/openVRProvider.cpp
Normal file
|
|
@ -0,0 +1,886 @@
|
|||
#include "platform/input/openVR/openVRProvider.h"
|
||||
#include "platform/platformInput.h"
|
||||
#include "core/module.h"
|
||||
#include "console/engineAPI.h"
|
||||
#include "T3D/gameBase/gameConnection.h"
|
||||
#include "gui/core/guiCanvas.h"
|
||||
#include "postFx/postEffectCommon.h"
|
||||
|
||||
#include "gfx/D3D11/gfxD3D11Device.h"
|
||||
#include "gfx/D3D11/gfxD3D11TextureObject.h"
|
||||
#include "gfx/D3D11/gfxD3D11EnumTranslate.h"
|
||||
#include "gfx/gfxStringEnumTranslate.h"
|
||||
|
||||
/*
|
||||
#include "gfx/gl/gfxGLDevice.h"
|
||||
#include "gfx/gl/gfxGLTextureObject.h"
|
||||
#include "gfx/gl/gfxGLEnumTranslate.h"
|
||||
*/
|
||||
|
||||
#include "platform/input/oculusVR/oculusVRUtil.h"
|
||||
|
||||
|
||||
U32 OpenVRProvider::OVR_SENSORROT[vr::k_unMaxTrackedDeviceCount] = { 0 };
|
||||
U32 OpenVRProvider::OVR_SENSORROTANG[vr::k_unMaxTrackedDeviceCount] = { 0 };
|
||||
U32 OpenVRProvider::OVR_SENSORVELOCITY[vr::k_unMaxTrackedDeviceCount] = { 0 };
|
||||
U32 OpenVRProvider::OVR_SENSORANGVEL[vr::k_unMaxTrackedDeviceCount] = { 0 };
|
||||
U32 OpenVRProvider::OVR_SENSORMAGNETOMETER[vr::k_unMaxTrackedDeviceCount] = { 0 };
|
||||
U32 OpenVRProvider::OVR_SENSORPOSITION[vr::k_unMaxTrackedDeviceCount] = { 0 };
|
||||
|
||||
U32 OpenVRProvider::OVR_BUTTONPRESSED[vr::k_unMaxTrackedDeviceCount];
|
||||
U32 OpenVRProvider::OVR_BUTTONTOUCHED[vr::k_unMaxTrackedDeviceCount];
|
||||
|
||||
U32 OpenVRProvider::OVR_AXISNONE[vr::k_unMaxTrackedDeviceCount] = { 0 };
|
||||
U32 OpenVRProvider::OVR_AXISTRACKPAD[vr::k_unMaxTrackedDeviceCount] = { 0 };
|
||||
U32 OpenVRProvider::OVR_AXISJOYSTICK[vr::k_unMaxTrackedDeviceCount] = { 0 };
|
||||
U32 OpenVRProvider::OVR_AXISTRIGGER[vr::k_unMaxTrackedDeviceCount] = { 0 };
|
||||
|
||||
static String GetTrackedDeviceString(vr::IVRSystem *pHmd, vr::TrackedDeviceIndex_t unDevice, vr::TrackedDeviceProperty prop, vr::TrackedPropertyError *peError = NULL)
|
||||
{
|
||||
uint32_t unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, NULL, 0, peError);
|
||||
if (unRequiredBufferLen == 0)
|
||||
return "";
|
||||
|
||||
char *pchBuffer = new char[unRequiredBufferLen];
|
||||
unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, pchBuffer, unRequiredBufferLen, peError);
|
||||
String sResult = pchBuffer;
|
||||
delete[] pchBuffer;
|
||||
return sResult;
|
||||
}
|
||||
|
||||
static MatrixF ConvertSteamVRAffineMatrixToMatrixFPlain(const vr::HmdMatrix34_t &mat)
|
||||
{
|
||||
MatrixF outMat(1);
|
||||
|
||||
outMat.setColumn(0, Point4F(mat.m[0][0], mat.m[1][0], mat.m[2][0], 0.0));
|
||||
outMat.setColumn(1, Point4F(mat.m[0][1], mat.m[1][1], mat.m[2][1], 0.0));
|
||||
outMat.setColumn(2, Point4F(mat.m[0][2], mat.m[1][2], mat.m[2][2], 0.0));
|
||||
outMat.setColumn(3, Point4F(mat.m[0][3], mat.m[1][3], mat.m[2][3], 1.0f)); // pos
|
||||
|
||||
return outMat;
|
||||
}
|
||||
|
||||
MODULE_BEGIN(OpenVRProvider)
|
||||
|
||||
MODULE_INIT_AFTER(InputEventManager)
|
||||
MODULE_SHUTDOWN_BEFORE(InputEventManager)
|
||||
|
||||
MODULE_INIT
|
||||
{
|
||||
OpenVRProvider::staticInit();
|
||||
ManagedSingleton< OpenVRProvider >::createSingleton();
|
||||
}
|
||||
|
||||
MODULE_SHUTDOWN
|
||||
{
|
||||
ManagedSingleton< OpenVRProvider >::deleteSingleton();
|
||||
}
|
||||
|
||||
MODULE_END;
|
||||
|
||||
|
||||
bool OpenVRRenderState::setupRenderTargets(U32 mode)
|
||||
{
|
||||
if (!mHMD)
|
||||
return false;
|
||||
|
||||
U32 sizeX, sizeY;
|
||||
Point2I newRTSize;
|
||||
mHMD->GetRecommendedRenderTargetSize(&sizeX, &sizeY);
|
||||
|
||||
mEyeViewport[0] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY));
|
||||
mEyeViewport[1] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY));
|
||||
|
||||
newRTSize.x = sizeX;
|
||||
newRTSize.y = sizeY;
|
||||
|
||||
GFXTexHandle stereoTexture;
|
||||
stereoTexture.set(newRTSize.x, newRTSize.y, GFXFormatR8G8B8A8, &VRTextureProfile, "OpenVR Stereo RT Color");
|
||||
mStereoRenderTextures[0] = mStereoRenderTextures[1] = stereoTexture;
|
||||
|
||||
GFXTexHandle stereoDepthTexture;
|
||||
stereoDepthTexture.set(newRTSize.x, newRTSize.y, GFXFormatD24S8, &VRDepthProfile, "OpenVR Depth");
|
||||
mStereoDepthTextures[0] = mStereoDepthTextures[1] = stereoDepthTexture;
|
||||
|
||||
mStereoRT = GFX->allocRenderToTextureTarget();
|
||||
mStereoRT->attachTexture(GFXTextureTarget::Color0, stereoTexture);
|
||||
mStereoRT->attachTexture(GFXTextureTarget::DepthStencil, stereoDepthTexture);
|
||||
|
||||
mEyeRT[0] = mEyeRT[1] = mStereoRT;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void OpenVRRenderState::setupDistortion()
|
||||
{
|
||||
if (!mHMD)
|
||||
return;
|
||||
|
||||
U16 m_iLensGridSegmentCountH = 43;
|
||||
U16 m_iLensGridSegmentCountV = 43;
|
||||
|
||||
float w = (float)(1.0 / float(m_iLensGridSegmentCountH - 1));
|
||||
float h = (float)(1.0 / float(m_iLensGridSegmentCountV - 1));
|
||||
|
||||
float u, v = 0;
|
||||
|
||||
Vector<GFXVertexPTTT> vVerts(0);
|
||||
GFXVertexPTTT *vert;
|
||||
|
||||
vVerts.reserve((m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2);
|
||||
|
||||
mDistortionVerts.set(GFX, (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2, GFXBufferTypeStatic);
|
||||
|
||||
vert = mDistortionVerts.lock();
|
||||
|
||||
//left eye distortion verts
|
||||
float Xoffset = -1;
|
||||
for (int y = 0; y < m_iLensGridSegmentCountV; y++)
|
||||
{
|
||||
for (int x = 0; x < m_iLensGridSegmentCountH; x++)
|
||||
{
|
||||
u = x*w; v = 1 - y*h;
|
||||
vert->point = Point3F(Xoffset + u, -1 + 2 * y*h, 0.0f);
|
||||
|
||||
vr::DistortionCoordinates_t dc0 = mHMD->ComputeDistortion(vr::Eye_Left, u, v);
|
||||
|
||||
vert->texCoord1 = Point2F(dc0.rfRed[0], 1 - dc0.rfRed[1]); // r
|
||||
vert->texCoord2 = Point2F(dc0.rfGreen[0], 1 - dc0.rfGreen[1]); // g
|
||||
vert->texCoord3 = Point2F(dc0.rfBlue[0], 1 - dc0.rfBlue[1]); // b
|
||||
|
||||
vert++;
|
||||
}
|
||||
}
|
||||
|
||||
//right eye distortion verts
|
||||
Xoffset = 0;
|
||||
for (int y = 0; y < m_iLensGridSegmentCountV; y++)
|
||||
{
|
||||
for (int x = 0; x < m_iLensGridSegmentCountH; x++)
|
||||
{
|
||||
u = x*w; v = 1 - y*h;
|
||||
vert->point = Point3F(Xoffset + u, -1 + 2 * y*h, 0.0f);
|
||||
|
||||
vr::DistortionCoordinates_t dc0 = mHMD->ComputeDistortion(vr::Eye_Right, u, v);
|
||||
|
||||
vert->texCoord1 = Point2F(dc0.rfRed[0], 1 - dc0.rfRed[1]);
|
||||
vert->texCoord2 = Point2F(dc0.rfGreen[0], 1 - dc0.rfGreen[1]);
|
||||
vert->texCoord3 = Point2F(dc0.rfBlue[0], 1 - dc0.rfBlue[1]);
|
||||
|
||||
vert++;
|
||||
}
|
||||
}
|
||||
|
||||
mDistortionVerts.unlock();
|
||||
|
||||
mDistortionInds.set(GFX, m_iLensGridSegmentCountV * m_iLensGridSegmentCountH * 6 * 2, 0, GFXBufferTypeStatic);
|
||||
|
||||
GFXPrimitive *prim;
|
||||
U16 *index;
|
||||
|
||||
mDistortionInds.lock(&index, &prim);
|
||||
U16 a, b, c, d;
|
||||
|
||||
U16 offset = 0;
|
||||
for (U16 y = 0; y < m_iLensGridSegmentCountV - 1; y++)
|
||||
{
|
||||
for (U16 x = 0; x < m_iLensGridSegmentCountH - 1; x++)
|
||||
{
|
||||
a = m_iLensGridSegmentCountH*y + x + offset;
|
||||
b = m_iLensGridSegmentCountH*y + x + 1 + offset;
|
||||
c = (y + 1)*m_iLensGridSegmentCountH + x + 1 + offset;
|
||||
d = (y + 1)*m_iLensGridSegmentCountH + x + offset;
|
||||
*index++ = a;
|
||||
*index++ = b;
|
||||
*index++ = c;
|
||||
|
||||
*index++ = a;
|
||||
*index++ = c;
|
||||
*index++ = d;
|
||||
}
|
||||
}
|
||||
|
||||
offset = (m_iLensGridSegmentCountH)*(m_iLensGridSegmentCountV);
|
||||
for (U16 y = 0; y < m_iLensGridSegmentCountV - 1; y++)
|
||||
{
|
||||
for (U16 x = 0; x < m_iLensGridSegmentCountH - 1; x++)
|
||||
{
|
||||
a = m_iLensGridSegmentCountH*y + x + offset;
|
||||
b = m_iLensGridSegmentCountH*y + x + 1 + offset;
|
||||
c = (y + 1)*m_iLensGridSegmentCountH + x + 1 + offset;
|
||||
d = (y + 1)*m_iLensGridSegmentCountH + x + offset;
|
||||
*index++ = a;
|
||||
*index++ = b;
|
||||
*index++ = c;
|
||||
|
||||
*index++ = a;
|
||||
*index++ = c;
|
||||
*index++ = d;
|
||||
}
|
||||
}
|
||||
|
||||
mDistortionInds.unlock();
|
||||
}
|
||||
|
||||
void OpenVRRenderState::renderDistortion(U32 eye)
|
||||
{
|
||||
// Updates distortion for an eye (this should only be the case for backend APIS where image should be predistorted)
|
||||
/*
|
||||
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
glViewport( 0, 0, m_nWindowWidth, m_nWindowHeight );
|
||||
|
||||
glBindVertexArray( m_unLensVAO );
|
||||
glUseProgram( m_unLensProgramID );
|
||||
|
||||
//render left lens (first half of index array )
|
||||
glBindTexture(GL_TEXTURE_2D, leftEyeDesc.m_nResolveTextureId );
|
||||
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
|
||||
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
|
||||
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
|
||||
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR );
|
||||
glDrawElements( GL_TRIANGLES, m_uiIndexSize/2, GL_UNSIGNED_SHORT, 0 );
|
||||
|
||||
//render right lens (second half of index array )
|
||||
glBindTexture(GL_TEXTURE_2D, rightEyeDesc.m_nResolveTextureId );
|
||||
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE );
|
||||
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE );
|
||||
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR );
|
||||
glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR );
|
||||
glDrawElements( GL_TRIANGLES, m_uiIndexSize/2, GL_UNSIGNED_SHORT, (const void *)(m_uiIndexSize) );
|
||||
|
||||
glBindVertexArray( 0 );
|
||||
glUseProgram( 0 );
|
||||
*/
|
||||
}
|
||||
|
||||
void OpenVRRenderState::renderPreview()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
void OpenVRRenderState::reset(vr::IVRSystem* hmd)
|
||||
{
|
||||
mHMD = hmd;
|
||||
|
||||
mStereoRT = NULL;
|
||||
mEyeRT[0] = mEyeRT[1] = NULL;
|
||||
|
||||
mStereoRenderTextures[0] = mStereoRenderTextures[1] = NULL;
|
||||
mStereoDepthTextures[0] = mStereoDepthTextures[1] = NULL;
|
||||
|
||||
mDistortionVerts = NULL;
|
||||
mDistortionInds = NULL;
|
||||
|
||||
if (!mHMD)
|
||||
return;
|
||||
|
||||
vr::HmdMatrix34_t mat = mHMD->GetEyeToHeadTransform(vr::Eye_Left);
|
||||
mEyePose[0] = ConvertSteamVRAffineMatrixToMatrixFPlain(mat);
|
||||
mEyePose[0].inverse();
|
||||
|
||||
mat = mHMD->GetEyeToHeadTransform(vr::Eye_Right);
|
||||
mEyePose[1] = ConvertSteamVRAffineMatrixToMatrixFPlain(mat);
|
||||
mEyePose[1].inverse();
|
||||
|
||||
mHMD->GetProjectionRaw(vr::Eye_Left, &mEyeFov[0].leftTan, &mEyeFov[0].rightTan, &mEyeFov[0].upTan, &mEyeFov[0].downTan);
|
||||
mHMD->GetProjectionRaw(vr::Eye_Right, &mEyeFov[1].leftTan, &mEyeFov[1].rightTan, &mEyeFov[1].upTan, &mEyeFov[1].downTan);
|
||||
|
||||
mEyeFov[0].upTan = -mEyeFov[0].upTan;
|
||||
mEyeFov[0].leftTan = -mEyeFov[0].leftTan;
|
||||
mEyeFov[1].upTan = -mEyeFov[1].upTan;
|
||||
mEyeFov[1].leftTan = -mEyeFov[1].leftTan;
|
||||
}
|
||||
|
||||
OpenVRProvider::OpenVRProvider() :
|
||||
mHMD(NULL),
|
||||
mRenderModels(NULL),
|
||||
mDrawCanvas(NULL),
|
||||
mGameConnection(NULL)
|
||||
{
|
||||
dStrcpy(mName, "openvr");
|
||||
mDeviceType = INPUTMGR->getNextDeviceType();
|
||||
buildInputCodeTable();
|
||||
GFXDevice::getDeviceEventSignal().notify(this, &OpenVRProvider::_handleDeviceEvent);
|
||||
INPUTMGR->registerDevice(this);
|
||||
}
|
||||
|
||||
OpenVRProvider::~OpenVRProvider()
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
void OpenVRProvider::staticInit()
|
||||
{
|
||||
// TODO: Add console vars
|
||||
}
|
||||
|
||||
bool OpenVRProvider::enable()
|
||||
{
|
||||
disable();
|
||||
|
||||
// Load openvr runtime
|
||||
vr::EVRInitError eError = vr::VRInitError_None;
|
||||
mHMD = vr::VR_Init(&eError, vr::VRApplication_Scene);
|
||||
|
||||
dMemset(mDeviceClassChar, '\0', sizeof(mDeviceClassChar));
|
||||
|
||||
if (eError != vr::VRInitError_None)
|
||||
{
|
||||
mHMD = NULL;
|
||||
char buf[1024];
|
||||
sprintf_s(buf, sizeof(buf), "Unable to init VR runtime: %s", vr::VR_GetVRInitErrorAsEnglishDescription(eError));
|
||||
Con::printf(buf);
|
||||
return false;
|
||||
}
|
||||
|
||||
mRenderModels = (vr::IVRRenderModels *)vr::VR_GetGenericInterface(vr::IVRRenderModels_Version, &eError);
|
||||
if (!mRenderModels)
|
||||
{
|
||||
mHMD = NULL;
|
||||
vr::VR_Shutdown();
|
||||
|
||||
char buf[1024];
|
||||
sprintf_s(buf, sizeof(buf), "Unable to get render model interface: %s", vr::VR_GetVRInitErrorAsEnglishDescription(eError));
|
||||
Con::printf(buf);
|
||||
return false;
|
||||
}
|
||||
|
||||
mDriver = GetTrackedDeviceString(mHMD, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_TrackingSystemName_String);
|
||||
mDisplay = GetTrackedDeviceString(mHMD, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SerialNumber_String);
|
||||
|
||||
mHMDRenderState.reset(mHMD);
|
||||
mHMD->ResetSeatedZeroPose();
|
||||
dMemset(mPreviousInputTrackedDevicePose, '\0', sizeof(mPreviousInputTrackedDevicePose));
|
||||
|
||||
mEnabled = true;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool OpenVRProvider::disable()
|
||||
{
|
||||
if (mHMD)
|
||||
{
|
||||
mHMD = NULL;
|
||||
mRenderModels = NULL;
|
||||
mHMDRenderState.reset(NULL);
|
||||
vr::VR_Shutdown();
|
||||
}
|
||||
|
||||
mEnabled = false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void OpenVRProvider::buildInputCodeTable()
|
||||
{
|
||||
// Obtain all of the device codes
|
||||
for (U32 i = 0; i < vr::k_unMaxTrackedDeviceCount; ++i)
|
||||
{
|
||||
OVR_SENSORROT[i] = INPUTMGR->getNextDeviceCode();
|
||||
|
||||
OVR_SENSORROTANG[i] = INPUTMGR->getNextDeviceCode();
|
||||
|
||||
OVR_SENSORVELOCITY[i] = INPUTMGR->getNextDeviceCode();
|
||||
OVR_SENSORANGVEL[i] = INPUTMGR->getNextDeviceCode();
|
||||
OVR_SENSORMAGNETOMETER[i] = INPUTMGR->getNextDeviceCode();
|
||||
|
||||
OVR_SENSORPOSITION[i] = INPUTMGR->getNextDeviceCode();
|
||||
|
||||
|
||||
OVR_BUTTONPRESSED[i] = INPUTMGR->getNextDeviceCode();
|
||||
OVR_BUTTONTOUCHED[i] = INPUTMGR->getNextDeviceCode();
|
||||
|
||||
OVR_AXISNONE[i] = INPUTMGR->getNextDeviceCode();
|
||||
OVR_AXISTRACKPAD[i] = INPUTMGR->getNextDeviceCode();
|
||||
OVR_AXISJOYSTICK[i] = INPUTMGR->getNextDeviceCode();
|
||||
OVR_AXISTRIGGER[i] = INPUTMGR->getNextDeviceCode();
|
||||
}
|
||||
|
||||
// Build out the virtual map
|
||||
char buffer[64];
|
||||
for (U32 i = 0; i < vr::k_unMaxTrackedDeviceCount; ++i)
|
||||
{
|
||||
dSprintf(buffer, 64, "opvr_sensorrot%d", i);
|
||||
INPUTMGR->addVirtualMap(buffer, SI_ROT, OVR_SENSORROT[i]);
|
||||
|
||||
dSprintf(buffer, 64, "opvr_sensorrotang%d", i);
|
||||
INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORROTANG[i]);
|
||||
|
||||
dSprintf(buffer, 64, "opvr_sensorvelocity%d", i);
|
||||
INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORVELOCITY[i]);
|
||||
|
||||
dSprintf(buffer, 64, "opvr_sensorangvel%d", i);
|
||||
INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORANGVEL[i]);
|
||||
|
||||
dSprintf(buffer, 64, "opvr_sensormagnetometer%d", i);
|
||||
INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORMAGNETOMETER[i]);
|
||||
|
||||
dSprintf(buffer, 64, "opvr_sensorpos%d", i);
|
||||
INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORPOSITION[i]);
|
||||
|
||||
dSprintf(buffer, 64, "opvr_buttonpressed%d", i);
|
||||
INPUTMGR->addVirtualMap(buffer, SI_INT, OVR_BUTTONPRESSED[i]);
|
||||
dSprintf(buffer, 64, "opvr_buttontouched%d", i);
|
||||
INPUTMGR->addVirtualMap(buffer, SI_INT, OVR_BUTTONTOUCHED[i]);
|
||||
|
||||
dSprintf(buffer, 64, "opvr_axis_none%d", i);
|
||||
INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_AXISNONE[i]);
|
||||
dSprintf(buffer, 64, "opvr_axis_trackpad%d", i);
|
||||
INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_AXISTRACKPAD[i]);
|
||||
dSprintf(buffer, 64, "opvr_axis_joystick%d", i);
|
||||
INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_AXISJOYSTICK[i]);
|
||||
dSprintf(buffer, 64, "opvr_axis_trigger%d", i);
|
||||
INPUTMGR->addVirtualMap(buffer, SI_INT, OVR_AXISTRIGGER[i]);
|
||||
}
|
||||
}
|
||||
|
||||
bool OpenVRProvider::process()
|
||||
{
|
||||
if (!mHMD)
|
||||
return true;
|
||||
|
||||
// Process SteamVR events
|
||||
vr::VREvent_t event;
|
||||
while (mHMD->PollNextEvent(&event, sizeof(event)))
|
||||
{
|
||||
processVREvent(event);
|
||||
}
|
||||
|
||||
// Process SteamVR controller state
|
||||
for (vr::TrackedDeviceIndex_t unDevice = 0; unDevice < vr::k_unMaxTrackedDeviceCount; unDevice++)
|
||||
{
|
||||
vr::VRControllerState_t state;
|
||||
if (mHMD->GetControllerState(unDevice, &state))
|
||||
{
|
||||
// TODO
|
||||
}
|
||||
}
|
||||
|
||||
// Update input poses
|
||||
updateTrackedPoses();
|
||||
submitInputChanges();
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool OpenVRProvider::providesFrameEyePose() const
|
||||
{
|
||||
return mHMD != NULL;
|
||||
}
|
||||
|
||||
inline Point3F OpenVRVecToTorqueVec(vr::HmdVector3_t vec)
|
||||
{
|
||||
return Point3F(-vec.v[0], vec.v[2], -vec.v[1]);
|
||||
}
|
||||
|
||||
void OpenVRTransformToRotPos(MatrixF mat, QuatF &outRot, Point3F &outPos)
|
||||
{
|
||||
// Directly set the rotation and position from the eye transforms
|
||||
MatrixF torqueMat(1);
|
||||
|
||||
F32 inRotMat[4][4];
|
||||
Point4F col0; mat.getColumn(0, &col0);
|
||||
Point4F col1; mat.getColumn(1, &col1);
|
||||
Point4F col2; mat.getColumn(2, &col2);
|
||||
Point4F col3; mat.getColumn(3, &col3);
|
||||
inRotMat[0][0] = col0.x;
|
||||
inRotMat[0][1] = col0.y;
|
||||
inRotMat[0][2] = col0.z;
|
||||
inRotMat[0][3] = col0.w;
|
||||
inRotMat[1][0] = col1.x;
|
||||
inRotMat[1][1] = col1.y;
|
||||
inRotMat[1][2] = col1.z;
|
||||
inRotMat[1][3] = col1.w;
|
||||
inRotMat[2][0] = col2.x;
|
||||
inRotMat[2][1] = col2.y;
|
||||
inRotMat[2][2] = col2.z;
|
||||
inRotMat[2][3] = col2.w;
|
||||
inRotMat[3][0] = col3.x;
|
||||
inRotMat[3][1] = col3.y;
|
||||
inRotMat[3][2] = col3.z;
|
||||
inRotMat[3][3] = col3.w;
|
||||
|
||||
OculusVRUtil::convertRotation(inRotMat, torqueMat);
|
||||
|
||||
Point3F pos = torqueMat.getPosition();
|
||||
outRot = QuatF(torqueMat);
|
||||
outPos = Point3F(-pos.x, pos.z, -pos.y);
|
||||
}
|
||||
|
||||
void OpenVRProvider::getFrameEyePose(IDevicePose *pose, U32 eye) const
|
||||
{
|
||||
AssertFatal(eye >= 0 && eye < 2, "Out of bounds eye");
|
||||
|
||||
MatrixF mat = mHMDRenderState.mHMDPose * mHMDRenderState.mEyePose[eye];
|
||||
|
||||
OpenVRTransformToRotPos(mat, pose->orientation, pose->position);
|
||||
pose->velocity = Point3F(0);
|
||||
pose->angularVelocity = Point3F(0);
|
||||
}
|
||||
|
||||
bool OpenVRProvider::providesEyeOffsets() const
|
||||
{
|
||||
return mHMD != NULL;
|
||||
}
|
||||
|
||||
/// Returns eye offset not taking into account any position tracking info
|
||||
void OpenVRProvider::getEyeOffsets(Point3F *dest) const
|
||||
{
|
||||
dest[0] = mHMDRenderState.mEyePose[0].getPosition();
|
||||
dest[1] = mHMDRenderState.mEyePose[1].getPosition();
|
||||
}
|
||||
|
||||
bool OpenVRProvider::providesFovPorts() const
|
||||
{
|
||||
return mHMD != NULL;
|
||||
}
|
||||
|
||||
void OpenVRProvider::getFovPorts(FovPort *out) const
|
||||
{
|
||||
dMemcpy(out, mHMDRenderState.mEyeFov, sizeof(mHMDRenderState.mEyeFov));
|
||||
}
|
||||
|
||||
bool OpenVRProvider::providesProjectionOffset() const
|
||||
{
|
||||
return mHMD != NULL;
|
||||
}
|
||||
|
||||
const Point2F& OpenVRProvider::getProjectionOffset() const
|
||||
{
|
||||
return Point2F(0, 0);
|
||||
}
|
||||
|
||||
void OpenVRProvider::getStereoViewports(RectI *out) const
|
||||
{
|
||||
out[0] = mHMDRenderState.mEyeViewport[0];
|
||||
out[1] = mHMDRenderState.mEyeViewport[1];
|
||||
}
|
||||
|
||||
void OpenVRProvider::getStereoTargets(GFXTextureTarget **out) const
|
||||
{
|
||||
out[0] = mHMDRenderState.mEyeRT[0];
|
||||
out[1] = mHMDRenderState.mEyeRT[1];
|
||||
}
|
||||
|
||||
void OpenVRProvider::setDrawCanvas(GuiCanvas *canvas)
|
||||
{
|
||||
vr::EVRInitError peError = vr::VRInitError_None;
|
||||
|
||||
if (!vr::VRCompositor())
|
||||
{
|
||||
printf("Compositor initialization failed. See log file for details\n");
|
||||
return;
|
||||
}
|
||||
|
||||
if (mDrawCanvas != canvas || mHMDRenderState.mHMD == NULL)
|
||||
{
|
||||
mHMDRenderState.setupRenderTargets(0);
|
||||
}
|
||||
mDrawCanvas = canvas;
|
||||
}
|
||||
|
||||
void OpenVRProvider::setCurrentConnection(GameConnection *connection)
|
||||
{
|
||||
mGameConnection = connection;
|
||||
}
|
||||
|
||||
GameConnection* OpenVRProvider::getCurrentConnection()
|
||||
{
|
||||
return mGameConnection;
|
||||
}
|
||||
|
||||
GFXTexHandle OpenVRProvider::getPreviewTexture()
|
||||
{
|
||||
return mHMDRenderState.mStereoRenderTextures[0]; // TODO: render distortion preview
|
||||
}
|
||||
|
||||
void OpenVRProvider::onStartFrame()
|
||||
{
|
||||
if (!mHMD)
|
||||
return;
|
||||
|
||||
}
|
||||
|
||||
void OpenVRProvider::onEndFrame()
|
||||
{
|
||||
if (!mHMD)
|
||||
return;
|
||||
}
|
||||
|
||||
void OpenVRProvider::onEyeRendered(U32 index)
|
||||
{
|
||||
if (!mHMD)
|
||||
return;
|
||||
|
||||
if (GFX->getAdapterType() == Direct3D11)
|
||||
{
|
||||
vr::Texture_t eyeTexture = { (void*)static_cast<GFXD3D11TextureObject*>(mHMDRenderState.mStereoRenderTextures[index].getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma };
|
||||
vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture);
|
||||
}
|
||||
else if (GFX->getAdapterType() == OpenGL)
|
||||
{/*
|
||||
vr::Texture_t eyeTexture = { (void*)static_cast<GFXGLTextureObject*>(mHMDRenderState.mStereoRenderTextures[index].getPointer())->getHandle(), vr::API_OpenGL, vr::ColorSpace_Gamma };
|
||||
vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture);*/
|
||||
}
|
||||
}
|
||||
|
||||
bool OpenVRProvider::_handleDeviceEvent(GFXDevice::GFXDeviceEventType evt)
|
||||
{
|
||||
if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
|
||||
{
|
||||
return true;
|
||||
}
|
||||
|
||||
switch (evt)
|
||||
{
|
||||
case GFXDevice::deStartOfFrame:
|
||||
|
||||
// Start of frame
|
||||
|
||||
onStartFrame();
|
||||
|
||||
break;
|
||||
|
||||
case GFXDevice::dePostFrame:
|
||||
|
||||
// End of frame
|
||||
|
||||
onEndFrame();
|
||||
|
||||
break;
|
||||
|
||||
case GFXDevice::deDestroy:
|
||||
|
||||
// Need to reinit rendering
|
||||
break;
|
||||
|
||||
case GFXDevice::deLeftStereoFrameRendered:
|
||||
//
|
||||
|
||||
onEyeRendered(0);
|
||||
break;
|
||||
|
||||
case GFXDevice::deRightStereoFrameRendered:
|
||||
//
|
||||
|
||||
onEyeRendered(1);
|
||||
break;
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void OpenVRProvider::processVREvent(const vr::VREvent_t & event)
|
||||
{
|
||||
switch (event.eventType)
|
||||
{
|
||||
case vr::VREvent_TrackedDeviceActivated:
|
||||
{
|
||||
// Setup render model
|
||||
}
|
||||
break;
|
||||
case vr::VREvent_TrackedDeviceDeactivated:
|
||||
{
|
||||
// Deactivated
|
||||
}
|
||||
break;
|
||||
case vr::VREvent_TrackedDeviceUpdated:
|
||||
{
|
||||
// Updated
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
void OpenVRProvider::updateTrackedPoses()
|
||||
{
|
||||
if (!mHMD)
|
||||
return;
|
||||
|
||||
vr::VRCompositor()->WaitGetPoses(mTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, NULL, 0);
|
||||
|
||||
mValidPoseCount = 0;
|
||||
|
||||
for (int nDevice = 0; nDevice < vr::k_unMaxTrackedDeviceCount; ++nDevice)
|
||||
{
|
||||
IDevicePose &inPose = mCurrentDevicePose[nDevice];
|
||||
if (mTrackedDevicePose[nDevice].bPoseIsValid)
|
||||
{
|
||||
mValidPoseCount++;
|
||||
MatrixF mat = ConvertSteamVRAffineMatrixToMatrixFPlain(mTrackedDevicePose[nDevice].mDeviceToAbsoluteTracking);
|
||||
mat.inverse();
|
||||
|
||||
if (nDevice == vr::k_unTrackedDeviceIndex_Hmd)
|
||||
{
|
||||
mHMDRenderState.mHMDPose = mat;
|
||||
}
|
||||
|
||||
vr::TrackedDevicePose_t &outPose = mTrackedDevicePose[nDevice];
|
||||
OpenVRTransformToRotPos(mat, inPose.orientation, inPose.position);
|
||||
|
||||
inPose.state = outPose.eTrackingResult;
|
||||
inPose.valid = outPose.bPoseIsValid;
|
||||
inPose.connected = outPose.bDeviceIsConnected;
|
||||
|
||||
inPose.velocity = OpenVRVecToTorqueVec(outPose.vVelocity);
|
||||
inPose.angularVelocity = OpenVRVecToTorqueVec(outPose.vAngularVelocity);
|
||||
}
|
||||
else
|
||||
{
|
||||
inPose.valid = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void OpenVRProvider::submitInputChanges()
|
||||
{
|
||||
// Diff current frame with previous frame
|
||||
for (U32 i = 0; i < vr::k_unMaxTrackedDeviceCount; i++)
|
||||
{
|
||||
IDevicePose curPose = mCurrentDevicePose[i];
|
||||
IDevicePose prevPose = mPreviousInputTrackedDevicePose[i];
|
||||
|
||||
if (!curPose.valid || !curPose.connected)
|
||||
continue;
|
||||
|
||||
if (curPose.orientation != prevPose.orientation)
|
||||
{
|
||||
AngAxisF axisAA(curPose.orientation);
|
||||
INPUTMGR->buildInputEvent(mDeviceType, 0, SI_ROT, OVR_SENSORROT[i], SI_MOVE, axisAA);
|
||||
}
|
||||
|
||||
if (curPose.position != prevPose.position)
|
||||
{
|
||||
INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORPOSITION[i], SI_MOVE, curPose.position);
|
||||
}
|
||||
|
||||
if (curPose.velocity != prevPose.velocity)
|
||||
{
|
||||
// Convert angles to degrees
|
||||
VectorF angles;
|
||||
angles.x = curPose.velocity.x;
|
||||
angles.y = curPose.velocity.y;
|
||||
angles.z = curPose.velocity.z;
|
||||
|
||||
INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORVELOCITY[i], SI_MOVE, angles);
|
||||
}
|
||||
|
||||
if (curPose.angularVelocity != prevPose.angularVelocity)
|
||||
{
|
||||
// Convert angles to degrees
|
||||
VectorF angles;
|
||||
angles[0] = mRadToDeg(curPose.velocity.x);
|
||||
angles[1] = mRadToDeg(curPose.velocity.y);
|
||||
angles[2] = mRadToDeg(curPose.velocity.z);
|
||||
|
||||
INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORANGVEL[i], SI_MOVE, angles);
|
||||
}
|
||||
/*
|
||||
if (curPose.connected != prevPose.connected)
|
||||
{
|
||||
if (Con::isFunction("onOVRConnectionChanged"))
|
||||
{
|
||||
Con::executef("onOVRConnectionStatus", curPose.connected);
|
||||
}
|
||||
}*/
|
||||
|
||||
if (curPose.state != prevPose.state)
|
||||
{
|
||||
if (Con::isFunction("onOVRStateChanged"))
|
||||
{
|
||||
Con::executef("onOVRStateChanged", curPose.state);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
dMemcpy(mPreviousInputTrackedDevicePose, mCurrentDevicePose, sizeof(mPreviousInputTrackedDevicePose));
|
||||
}
|
||||
|
||||
void OpenVRProvider::resetSensors()
|
||||
{
|
||||
if (mHMD)
|
||||
{
|
||||
mHMD->ResetSeatedZeroPose();
|
||||
}
|
||||
}
|
||||
|
||||
DefineEngineFunction(isOpenVRDeviceActive, bool, (), ,
|
||||
"@brief Used to determine if the OpenVR input device is active\n\n"
|
||||
|
||||
"The OpenVR device is considered active when the library has been "
|
||||
"initialized and either a real of simulated HMD is present.\n\n"
|
||||
|
||||
"@return True if the OpenVR input device is active.\n"
|
||||
|
||||
"@ingroup Game")
|
||||
{
|
||||
if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return OCULUSVRDEV->getActive();
|
||||
}
|
||||
|
||||
|
||||
DefineEngineFunction(OpenVRSetEnabled, bool, (bool value), ,
|
||||
"@brief Used to determine if the OpenVR input device is active\n\n"
|
||||
|
||||
"The OpenVR device is considered active when the library has been "
|
||||
"initialized and either a real of simulated HMD is present.\n\n"
|
||||
|
||||
"@return True if the OpenVR input device is active.\n"
|
||||
|
||||
"@ingroup Game")
|
||||
{
|
||||
if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
|
||||
{
|
||||
return false;
|
||||
}
|
||||
|
||||
return value ? ManagedSingleton<OpenVRProvider>::instance()->enable() : ManagedSingleton<OpenVRProvider>::instance()->disable();
|
||||
}
|
||||
|
||||
|
||||
|
||||
DefineEngineFunction(setOpenVRHMDAsGameConnectionDisplayDevice, bool, (GameConnection* conn), ,
|
||||
"@brief Sets the first HMD to be a GameConnection's display device\n\n"
|
||||
"@param conn The GameConnection to set.\n"
|
||||
"@return True if the GameConnection display device was set.\n"
|
||||
"@ingroup Game")
|
||||
{
|
||||
if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
|
||||
{
|
||||
Con::errorf("setOVRHMDAsGameConnectionDisplayDevice(): No Oculus VR Device present.");
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!conn)
|
||||
{
|
||||
Con::errorf("setOVRHMDAsGameConnectionDisplayDevice(): Invalid GameConnection.");
|
||||
return false;
|
||||
}
|
||||
|
||||
conn->setDisplayDevice(ManagedSingleton<OpenVRProvider>::instance());
|
||||
return true;
|
||||
}
|
||||
|
||||
DefineEngineFunction(OpenVRResetSensors, void, (), ,
|
||||
"@brief Resets all Oculus VR sensors.\n\n"
|
||||
"This resets all sensor orientations such that their 'normal' rotation "
|
||||
"is defined when this function is called. This defines an HMD's forwards "
|
||||
"and up direction, for example."
|
||||
"@ingroup Game")
|
||||
{
|
||||
if (!ManagedSingleton<OpenVRProvider>::instanceOrNull())
|
||||
{
|
||||
return;
|
||||
}
|
||||
|
||||
ManagedSingleton<OpenVRProvider>::instance()->resetSensors();
|
||||
}
|
||||
172
Engine/source/platform/input/openVR/openVRProvider.h
Normal file
172
Engine/source/platform/input/openVR/openVRProvider.h
Normal file
|
|
@ -0,0 +1,172 @@
|
|||
|
||||
#ifndef _OPENVRDEVICE_H_
|
||||
#define _OPENVRDEVICE_H_
|
||||
|
||||
#include "math/mQuat.h"
|
||||
#include "math/mPoint4.h"
|
||||
#include "math/util/frustum.h"
|
||||
#include "core/util/tSingleton.h"
|
||||
|
||||
#include "gfx/gfxDevice.h"
|
||||
#include "gfx/gfxVertexBuffer.h"
|
||||
#include "gfx/gfxPrimitiveBuffer.h"
|
||||
#include "gfx/gfxTarget.h"
|
||||
|
||||
#include "platform/input/IInputDevice.h"
|
||||
#include "platform/input/event.h"
|
||||
#include "platform/output/IDisplayDevice.h"
|
||||
|
||||
#include <openvr.h>
|
||||
|
||||
class OpenVRHMDDevice;
|
||||
|
||||
struct OpenVRRenderState
|
||||
{
|
||||
vr::IVRSystem *mHMD;
|
||||
|
||||
FovPort mEyeFov[2];
|
||||
MatrixF mEyePose[2];
|
||||
MatrixF mHMDPose;
|
||||
|
||||
RectI mEyeViewport[2];
|
||||
GFXTextureTargetRef mStereoRT;
|
||||
GFXTextureTargetRef mEyeRT[2];
|
||||
|
||||
GFXTexHandle mStereoRenderTextures[2];
|
||||
GFXTexHandle mStereoDepthTextures[2];
|
||||
|
||||
GFXVertexBufferHandle<GFXVertexPTTT> mDistortionVerts;
|
||||
GFXPrimitiveBufferHandle mDistortionInds;
|
||||
|
||||
bool setupRenderTargets(U32 mode);
|
||||
void setupDistortion();
|
||||
|
||||
void renderDistortion(U32 eye);
|
||||
|
||||
void renderPreview();
|
||||
|
||||
void reset(vr::IVRSystem* hmd);
|
||||
};
|
||||
|
||||
class OpenVRProvider : public IDisplayDevice, public IInputDevice
|
||||
{
|
||||
public:
|
||||
|
||||
enum DataDifferences {
|
||||
DIFF_NONE = 0,
|
||||
DIFF_ROT = (1 << 0),
|
||||
DIFF_ROTAXISX = (1 << 1),
|
||||
DIFF_ROTAXISY = (1 << 2),
|
||||
DIFF_ACCEL = (1 << 3),
|
||||
DIFF_ANGVEL = (1 << 4),
|
||||
DIFF_MAG = (1 << 5),
|
||||
DIFF_POS = (1 << 6),
|
||||
DIFF_STATUS = (1 << 7),
|
||||
|
||||
DIFF_ROTAXIS = (DIFF_ROTAXISX | DIFF_ROTAXISY),
|
||||
DIFF_RAW = (DIFF_ACCEL | DIFF_ANGVEL | DIFF_MAG),
|
||||
};
|
||||
|
||||
OpenVRProvider();
|
||||
~OpenVRProvider();
|
||||
|
||||
static void staticInit();
|
||||
|
||||
bool enable();
|
||||
bool disable();
|
||||
|
||||
bool getActive() { return mHMD != NULL; }
|
||||
|
||||
/// @name Input handling
|
||||
/// {
|
||||
void buildInputCodeTable();
|
||||
virtual bool process();
|
||||
/// }
|
||||
|
||||
/// @name Display handling
|
||||
/// {
|
||||
virtual bool providesFrameEyePose() const;
|
||||
virtual void getFrameEyePose(IDevicePose *pose, U32 eye) const;
|
||||
|
||||
virtual bool providesEyeOffsets() const;
|
||||
/// Returns eye offset not taking into account any position tracking info
|
||||
virtual void getEyeOffsets(Point3F *dest) const;
|
||||
|
||||
virtual bool providesFovPorts() const;
|
||||
virtual void getFovPorts(FovPort *out) const;
|
||||
|
||||
virtual bool providesProjectionOffset() const;
|
||||
virtual const Point2F& getProjectionOffset() const;
|
||||
|
||||
virtual void getStereoViewports(RectI *out) const;
|
||||
virtual void getStereoTargets(GFXTextureTarget **out) const;
|
||||
|
||||
virtual void setDrawCanvas(GuiCanvas *canvas);
|
||||
|
||||
virtual void setCurrentConnection(GameConnection *connection);
|
||||
virtual GameConnection* getCurrentConnection();
|
||||
|
||||
virtual GFXTexHandle getPreviewTexture();
|
||||
|
||||
virtual void onStartFrame();
|
||||
virtual void onEndFrame();
|
||||
|
||||
virtual void onEyeRendered(U32 index);
|
||||
|
||||
bool _handleDeviceEvent(GFXDevice::GFXDeviceEventType evt);
|
||||
/// }
|
||||
|
||||
/// @name OpenVR handling
|
||||
/// {
|
||||
void processVREvent(const vr::VREvent_t & event);
|
||||
|
||||
void updateTrackedPoses();
|
||||
void submitInputChanges();
|
||||
|
||||
void resetSensors();
|
||||
/// }
|
||||
|
||||
/// @name OpenVR state
|
||||
/// {
|
||||
vr::IVRSystem *mHMD;
|
||||
vr::IVRRenderModels *mRenderModels;
|
||||
String mDriver;
|
||||
String mDisplay;
|
||||
vr::TrackedDevicePose_t mTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
IDevicePose mCurrentDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
IDevicePose mPreviousInputTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
U32 mValidPoseCount;
|
||||
|
||||
char mDeviceClassChar[vr::k_unMaxTrackedDeviceCount];
|
||||
|
||||
OpenVRRenderState mHMDRenderState;
|
||||
/// }
|
||||
|
||||
GuiCanvas* mDrawCanvas;
|
||||
GameConnection* mGameConnection;
|
||||
|
||||
static U32 OVR_SENSORROT[vr::k_unMaxTrackedDeviceCount];
|
||||
static U32 OVR_SENSORROTANG[vr::k_unMaxTrackedDeviceCount];
|
||||
static U32 OVR_SENSORVELOCITY[vr::k_unMaxTrackedDeviceCount];
|
||||
static U32 OVR_SENSORANGVEL[vr::k_unMaxTrackedDeviceCount];
|
||||
static U32 OVR_SENSORMAGNETOMETER[vr::k_unMaxTrackedDeviceCount];
|
||||
static U32 OVR_SENSORPOSITION[vr::k_unMaxTrackedDeviceCount];
|
||||
|
||||
static U32 OVR_BUTTONPRESSED[vr::k_unMaxTrackedDeviceCount];
|
||||
static U32 OVR_BUTTONTOUCHED[vr::k_unMaxTrackedDeviceCount];
|
||||
|
||||
static U32 OVR_AXISNONE[vr::k_unMaxTrackedDeviceCount];
|
||||
static U32 OVR_AXISTRACKPAD[vr::k_unMaxTrackedDeviceCount];
|
||||
static U32 OVR_AXISJOYSTICK[vr::k_unMaxTrackedDeviceCount];
|
||||
static U32 OVR_AXISTRIGGER[vr::k_unMaxTrackedDeviceCount];
|
||||
|
||||
|
||||
public:
|
||||
// For ManagedSingleton.
|
||||
static const char* getSingletonName() { return "OpenVRProvider"; }
|
||||
};
|
||||
|
||||
/// Returns the OculusVRDevice singleton.
|
||||
#define OCULUSVRDEV ManagedSingleton<OpenVRProvider>::instance()
|
||||
|
||||
#endif // _OCULUSVRDEVICE_H_
|
||||
|
|
@ -34,8 +34,16 @@ class GuiCanvas;
|
|||
/// Defines the basic display pose common to most display devices
|
||||
typedef struct DisplayPose
|
||||
{
|
||||
EulerF orientation; /// Direction device is facing
|
||||
QuatF orientation; /// Direction device is facing
|
||||
Point3F position; /// Relative position of device in view space
|
||||
|
||||
Point3F velocity;
|
||||
Point3F angularVelocity;
|
||||
|
||||
U32 state; /// Generic state
|
||||
|
||||
bool valid; /// Pose set
|
||||
bool connected; /// Device connected
|
||||
} IDevicePose;
|
||||
|
||||
class IDisplayDevice
|
||||
|
|
@ -63,6 +71,9 @@ public:
|
|||
virtual GameConnection* getCurrentConnection() = 0;
|
||||
|
||||
virtual void onStartFrame() = 0;
|
||||
|
||||
/// Returns a texture handle representing a preview of the composited VR view
|
||||
virtual GFXTexHandle getPreviewTexture() = 0;
|
||||
};
|
||||
|
||||
#endif // _IDISPLAYDEVICE_H_
|
||||
|
|
|
|||
Loading…
Reference in a new issue