Merge pull request #1688 from jamesu/dx11_openvr_pr

Basic OpenVR Support code
This commit is contained in:
Areloch 2016-09-14 00:24:20 -05:00 committed by GitHub
commit 7e4095d610
74 changed files with 5987 additions and 1384 deletions

View file

@ -393,44 +393,6 @@ void Camera::getEyeCameraTransform(IDisplayDevice *displayDevice, U32 eyeId, Mat
}
}
DisplayPose Camera::calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose)
{
// NOTE: this is intended to be similar to updateMove
DisplayPose outPose;
outPose.orientation = EulerF(0,0,0);
outPose.position = inPose.position;
// Pitch
outPose.orientation.x = (inPose.orientation.x - mLastAbsolutePitch);
// Constrain the range of mRot.x
while (outPose.orientation.x < -M_PI_F)
outPose.orientation.x += M_2PI_F;
while (outPose.orientation.x > M_PI_F)
outPose.orientation.x -= M_2PI_F;
// Yaw
outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw);
// Constrain the range of mRot.z
while (outPose.orientation.z < -M_PI_F)
outPose.orientation.z += M_2PI_F;
while (outPose.orientation.z > M_PI_F)
outPose.orientation.z -= M_2PI_F;
// Bank
if (mDataBlock->cameraCanBank)
{
outPose.orientation.y = (inPose.orientation.y - mLastAbsoluteRoll);
}
// Constrain the range of mRot.y
while (outPose.orientation.y > M_PI_F)
outPose.orientation.y -= M_2PI_F;
return outPose;
}
//----------------------------------------------------------------------------
F32 Camera::getCameraFov()

View file

@ -237,7 +237,6 @@ class Camera: public ShapeBase
virtual void interpolateTick( F32 delta);
virtual void getCameraTransform( F32* pos,MatrixF* mat );
virtual void getEyeCameraTransform( IDisplayDevice *display, U32 eyeId, MatrixF *outMat );
virtual DisplayPose calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose);
virtual void writePacketData( GameConnection* conn, BitStream* stream );
virtual void readPacketData( GameConnection* conn, BitStream* stream );

View file

@ -1,6 +1,7 @@
#include "T3D/gameBase/extended/extendedMove.h"
#include "core/stream/bitStream.h"
#include "math/mathIO.h"
#include "math/mAngAxis.h"
#include "core/module.h"
#include "console/consoleTypes.h"
#include "core/strings/stringFunctions.h"
@ -15,15 +16,17 @@ MODULE_BEGIN( ExtendedMoveManager )
MODULE_END;
S32 ExtendedMoveManager::mPosX[ExtendedMove::MaxPositionsRotations] = { 0, };
S32 ExtendedMoveManager::mPosY[ExtendedMove::MaxPositionsRotations] = { 0, };
S32 ExtendedMoveManager::mPosZ[ExtendedMove::MaxPositionsRotations] = { 0, };
F32 ExtendedMoveManager::mPosX[ExtendedMove::MaxPositionsRotations] = { 0, };
F32 ExtendedMoveManager::mPosY[ExtendedMove::MaxPositionsRotations] = { 0, };
F32 ExtendedMoveManager::mPosZ[ExtendedMove::MaxPositionsRotations] = { 0, };
bool ExtendedMoveManager::mRotIsEuler[ExtendedMove::MaxPositionsRotations] = { 0, };
F32 ExtendedMoveManager::mRotAX[ExtendedMove::MaxPositionsRotations] = { 0, };
F32 ExtendedMoveManager::mRotAY[ExtendedMove::MaxPositionsRotations] = { 0, };
F32 ExtendedMoveManager::mRotAZ[ExtendedMove::MaxPositionsRotations] = { 0, };
F32 ExtendedMoveManager::mRotAA[ExtendedMove::MaxPositionsRotations] = { 1, };
F32 ExtendedMoveManager::mPosScale = 2.0f;
void ExtendedMoveManager::init()
{
for(U32 i = 0; i < ExtendedMove::MaxPositionsRotations; ++i)
@ -31,19 +34,19 @@ void ExtendedMoveManager::init()
char varName[256];
dSprintf(varName, sizeof(varName), "mvPosX%d", i);
Con::addVariable(varName, TypeS32, &mPosX[i],
Con::addVariable(varName, TypeF32, &mPosX[i],
"X position of controller in millimeters. Only 13 bits are networked.\n"
"@ingroup Game");
"@ingroup Game");
dSprintf(varName, sizeof(varName), "mvPosY%d", i);
Con::addVariable(varName, TypeS32, &mPosY[i],
Con::addVariable(varName, TypeF32, &mPosY[i],
"Y position of controller in millimeters. Only 13 bits are networked.\n"
"@ingroup Game");
"@ingroup Game");
dSprintf(varName, sizeof(varName), "mvPosZ%d", i);
Con::addVariable(varName, TypeS32, &mPosZ[i],
Con::addVariable(varName, TypeF32, &mPosZ[i],
"Z position of controller in millimeters. Only 13 bits are networked.\n"
"@ingroup Game");
"@ingroup Game");
dSprintf(varName, sizeof(varName), "mvRotIsEuler%d", i);
Con::addVariable(varName, TypeBool, &mRotIsEuler[i],
@ -52,33 +55,39 @@ void ExtendedMoveManager::init()
"(a vector and angle). When true, the given rotation is a three component "
"Euler angle. When using Euler angles, the $mvRotA component of the ExtendedMove "
"is ignored for this set of rotations.\n"
"@ingroup Game");
"@ingroup Game");
dSprintf(varName, sizeof(varName), "mvRotX%d", i);
Con::addVariable(varName, TypeF32, &mRotAX[i],
"X rotation vector component of controller.\n"
"@ingroup Game");
"@ingroup Game");
dSprintf(varName, sizeof(varName), "mvRotY%d", i);
Con::addVariable(varName, TypeF32, &mRotAY[i],
"Y rotation vector component of controller.\n"
"@ingroup Game");
"@ingroup Game");
dSprintf(varName, sizeof(varName), "mvRotZ%d", i);
Con::addVariable(varName, TypeF32, &mRotAZ[i],
"Z rotation vector component of controller.\n"
"@ingroup Game");
"@ingroup Game");
dSprintf(varName, sizeof(varName), "mvRotA%d", i);
Con::addVariable(varName, TypeF32, &mRotAA[i],
"Angle rotation (in degrees) component of controller.\n"
"@ingroup Game");
"@ingroup Game");
}
Con::addVariable("mvPosScale", TypeF32, &mPosScale,
"@brief Indicates the scale to be given to mvPos values.\n\n"
""
"@ingroup Game");
}
const ExtendedMove NullExtendedMove;
#define CLAMPPOS(x) (x<0 ? -((-x) & (1<<(MaxPositionBits-1))-1) : (x & (1<<(MaxPositionBits-1))-1))
#define CLAMPPOS(x) ((S32)(((x + 1) * .5) * ((1 << MaxPositionBits) - 1)) & ((1<<MaxPositionBits)-1))
#define UNCLAMPPOS(x) ((F32)(x * 2 / F32((1 << MaxPositionBits) - 1) - 1.0f))
#define CLAMPROT(f) ((S32)(((f + 1) * .5) * ((1 << MaxRotationBits) - 1)) & ((1<<MaxRotationBits)-1))
#define UNCLAMPROT(x) ((F32)(x * 2 / F32((1 << MaxRotationBits) - 1) - 1.0f))
@ -94,6 +103,10 @@ ExtendedMove::ExtendedMove() : Move()
rotZ[i] = 0;
rotW[i] = 1;
cposX[i] = 0;
cposY[i] = 0;
cposZ[i] = 0;
EulerBasedRotation[i] = false;
}
}
@ -133,20 +146,20 @@ void ExtendedMove::pack(BitStream *stream, const Move * basemove)
{
// Position
if(stream->writeFlag(posX[i] != extBaseMove->posX[i]))
stream->writeSignedInt(posX[i], MaxPositionBits);
stream->writeInt(cposX[i], MaxPositionBits);
if(stream->writeFlag(posY[i] != extBaseMove->posY[i]))
stream->writeSignedInt(posY[i], MaxPositionBits);
stream->writeInt(cposY[i], MaxPositionBits);
if(stream->writeFlag(posZ[i] != extBaseMove->posZ[i]))
stream->writeSignedInt(posZ[i], MaxPositionBits);
stream->writeInt(cposZ[i], MaxPositionBits);
// Rotation
stream->writeFlag(EulerBasedRotation[i]);
if(stream->writeFlag(rotX[i] != extBaseMove->rotX[i]))
stream->writeInt(crotX[i], MaxRotationBits);
stream->writeInt(crotX[i], EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits);
if(stream->writeFlag(rotY[i] != extBaseMove->rotY[i]))
stream->writeInt(crotY[i], MaxRotationBits);
stream->writeInt(crotY[i], EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits);
if(stream->writeFlag(rotZ[i] != extBaseMove->rotZ[i]))
stream->writeInt(crotZ[i], MaxRotationBits);
stream->writeInt(crotZ[i], EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits);
if(!EulerBasedRotation[i])
{
if(stream->writeFlag(rotW[i] != extBaseMove->rotW[i]))
@ -175,18 +188,27 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
for(U32 i=0; i<MaxPositionsRotations; ++i)
{
// Position
if(stream->readFlag())
posX[i] = stream->readSignedInt(MaxPositionBits);
if (stream->readFlag())
{
cposX[i] = stream->readInt(MaxPositionBits);
posX[i] = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
}
else
posX[i] = extBaseMove->posX[i];
if(stream->readFlag())
posY[i] = stream->readSignedInt(MaxPositionBits);
if (stream->readFlag())
{
cposY[i] = stream->readInt(MaxPositionBits);
posY[i] = UNCLAMPPOS(cposY[i]) * ExtendedMoveManager::mPosScale;
}
else
posY[i] = extBaseMove->posY[i];
if(stream->readFlag())
posZ[i] = stream->readSignedInt(MaxPositionBits);
if (stream->readFlag())
{
cposZ[i] = stream->readInt(MaxPositionBits);
posZ[i] = UNCLAMPPOS(cposZ[i]) * ExtendedMoveManager::mPosScale;
}
else
posZ[i] = extBaseMove->posZ[i];
@ -197,8 +219,8 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
scale = M_2PI_F;
if(stream->readFlag())
{
crotX[i] = stream->readInt(MaxRotationBits);
rotX[i] = UNCLAMPROT(crotX[i]) * scale;
crotX[i] = stream->readInt(EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits);
rotX[i] = EulerBasedRotation[i] ? (UNCLAMPROT(crotX[i]) * scale) : UNCLAMPPOS(crotX[i]);
}
else
{
@ -207,8 +229,8 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
if(stream->readFlag())
{
crotY[i] = stream->readInt(MaxRotationBits);
rotY[i] = UNCLAMPROT(crotY[i]) * scale;
crotY[i] = stream->readInt(EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits);
rotY[i] = EulerBasedRotation[i] ? (UNCLAMPROT(crotY[i]) * scale) : UNCLAMPPOS(crotY[i]);
}
else
{
@ -217,8 +239,8 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
if(stream->readFlag())
{
crotZ[i] = stream->readInt(MaxRotationBits);
rotZ[i] = UNCLAMPROT(crotZ[i]) * scale;
crotZ[i] = stream->readInt(EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits);
rotZ[i] = EulerBasedRotation[i] ? (UNCLAMPROT(crotZ[i]) * scale) : UNCLAMPPOS(crotZ[i]);
}
else
{
@ -230,7 +252,7 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
if(stream->readFlag())
{
crotW[i] = stream->readInt(MaxRotationBits);
rotW[i] = UNCLAMPROT(crotW[i]);
rotW[i] = UNCLAMPROT(crotW[i]) * M_2PI_F;
}
else
{
@ -252,9 +274,9 @@ void ExtendedMove::clamp()
for(U32 i=0; i<MaxPositionsRotations; ++i)
{
// Positions
posX[i] = CLAMPPOS(posX[i]);
posY[i] = CLAMPPOS(posY[i]);
posZ[i] = CLAMPPOS(posZ[i]);
cposX[i] = CLAMPPOS(posX[i] / ExtendedMoveManager::mPosScale);
cposY[i] = CLAMPPOS(posY[i] / ExtendedMoveManager::mPosScale);
cposZ[i] = CLAMPPOS(posZ[i] / ExtendedMoveManager::mPosScale);
// Rotations
if(EulerBasedRotation[i])
@ -265,11 +287,29 @@ void ExtendedMove::clamp()
}
else
{
crotX[i] = CLAMPROT(rotX[i]);
crotY[i] = CLAMPROT(rotY[i]);
crotZ[i] = CLAMPROT(rotZ[i]);
crotW[i] = CLAMPROT(rotW[i]);
crotX[i] = CLAMPPOS(rotX[i]);
crotY[i] = CLAMPPOS(rotY[i]);
crotZ[i] = CLAMPPOS(rotZ[i]);
crotW[i] = CLAMPROT(rotW[i] / M_2PI_F);
}
#ifdef DEBUG_CONTROLLER_MOVE
if (i == 1)
{
F32 x, y, z, a;
x = UNCLAMPPOS(crotX[i]);
y = UNCLAMPPOS(crotY[i]);
z = UNCLAMPPOS(crotZ[i]);
a = UNCLAMPROT(crotW[i]) * M_2PI_F;
Con::printf("INPUT POS == %f,%f,%f", ExtendedMoveManager::mPosX[i], ExtendedMoveManager::mPosY[i], ExtendedMoveManager::mPosZ[i]);
Con::printf("rot %f,%f,%f,%f clamped to %f,%f,%f,%f", rotX[i], rotY[i], rotZ[i], rotW[i], x,y,z,a);
x = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
y = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
z = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
Con::printf("pos %f,%f,%f clamped to %f,%f,%f", posX[i], posY[i], posZ[i], x, y, z);
}
#endif
}
// Perform the standard Move clamp
@ -281,6 +321,10 @@ void ExtendedMove::unclamp()
// Unclamp the values the same as for net traffic so the client matches the server
for(U32 i=0; i<MaxPositionsRotations; ++i)
{
posX[i] = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale;
posY[i] = UNCLAMPPOS(cposY[i]) * ExtendedMoveManager::mPosScale;
posZ[i] = UNCLAMPPOS(cposZ[i]) * ExtendedMoveManager::mPosScale;
// Rotations
if(EulerBasedRotation[i])
{
@ -290,10 +334,10 @@ void ExtendedMove::unclamp()
}
else
{
rotX[i] = UNCLAMPROT(crotX[i]);
rotY[i] = UNCLAMPROT(crotY[i]);
rotZ[i] = UNCLAMPROT(crotZ[i]);
rotW[i] = UNCLAMPROT(crotW[i]);
rotX[i] = UNCLAMPPOS(crotX[i]);
rotY[i] = UNCLAMPPOS(crotY[i]);
rotZ[i] = UNCLAMPPOS(crotZ[i]);
rotW[i] = UNCLAMPROT(crotW[i]) * M_2PI_F;
}
}

View file

@ -11,12 +11,14 @@ struct ExtendedMove : public Move
enum Constants {
MaxPositionsRotations = 3,
MaxPositionBits = 13,
MaxPositionBits = 16,
MaxRotationBits = 16,
};
// Position is in millimeters
S32 posX[MaxPositionsRotations], posY[MaxPositionsRotations], posZ[MaxPositionsRotations];
F32 posX[MaxPositionsRotations], posY[MaxPositionsRotations], posZ[MaxPositionsRotations];
S32 cposX[MaxPositionsRotations], cposY[MaxPositionsRotations], cposZ[MaxPositionsRotations];
bool EulerBasedRotation[MaxPositionsRotations];
@ -39,15 +41,17 @@ extern const ExtendedMove NullExtendedMove;
class ExtendedMoveManager
{
public:
static S32 mPosX[ExtendedMove::MaxPositionsRotations];
static S32 mPosY[ExtendedMove::MaxPositionsRotations];
static S32 mPosZ[ExtendedMove::MaxPositionsRotations];
static F32 mPosX[ExtendedMove::MaxPositionsRotations];
static F32 mPosY[ExtendedMove::MaxPositionsRotations];
static F32 mPosZ[ExtendedMove::MaxPositionsRotations];
static bool mRotIsEuler[ExtendedMove::MaxPositionsRotations];
static F32 mRotAX[ExtendedMove::MaxPositionsRotations];
static F32 mRotAY[ExtendedMove::MaxPositionsRotations];
static F32 mRotAZ[ExtendedMove::MaxPositionsRotations];
static F32 mRotAA[ExtendedMove::MaxPositionsRotations];
static F32 mPosScale;
static void init();
};

View file

@ -75,11 +75,11 @@ bool ExtendedMoveList::getNextExtMove( ExtendedMove &curMove )
else
{
//Rotation is passed in as an Angle Axis in degrees. We need to convert this into a Quat.
QuatF q(Point3F(ExtendedMoveManager::mRotAX[i], ExtendedMoveManager::mRotAY[i], ExtendedMoveManager::mRotAZ[i]), mDegToRad(ExtendedMoveManager::mRotAA[i]));
curMove.rotX[i] = q.x;
curMove.rotY[i] = q.y;
curMove.rotZ[i] = q.z;
curMove.rotW[i] = q.w;
AngAxisF q(Point3F(ExtendedMoveManager::mRotAX[i], ExtendedMoveManager::mRotAY[i], ExtendedMoveManager::mRotAZ[i]), mDegToRad(ExtendedMoveManager::mRotAA[i]));
curMove.rotX[i] = q.axis.x;
curMove.rotY[i] = q.axis.y;
curMove.rotZ[i] = q.axis.z;
curMove.rotW[i] = q.angle;
}
}

View file

@ -469,8 +469,8 @@ bool GameConnection::readConnectRequest(BitStream *stream, const char **errorStr
for(U32 i = 0; i < mConnectArgc+3; i++)
{
connectArgv[i].value = &connectArgvValue[i];
connectArgvValue[i].init();
connectArgv[i].value = &connectArgvValue[i];
connectArgvValue[i].init();
}
for(U32 i = 0; i < mConnectArgc; i++)
@ -681,6 +681,24 @@ bool GameConnection::getControlCameraTransform(F32 dt, MatrixF* mat)
return true;
}
bool GameConnection::getControlCameraHeadTransform(IDisplayDevice *display, MatrixF *transform)
{
GameBase* obj = getCameraObject();
if (!obj)
return false;
GameBase* cObj = obj;
while ((cObj = cObj->getControlObject()) != 0)
{
if (cObj->useObjsEyePoint())
obj = cObj;
}
obj->getEyeCameraTransform(display, -1, transform);
return true;
}
bool GameConnection::getControlCameraEyeTransforms(IDisplayDevice *display, MatrixF *transforms)
{
GameBase* obj = getCameraObject();
@ -896,8 +914,8 @@ void GameConnection::onRemove()
// clientgroup and what not (this is so that we can disconnect from a local server
// without needing to destroy and recreate the server before we can connect to it
// again).
// Safe-delete as we don't know whether the server connection is currently being
// worked on.
// Safe-delete as we don't know whether the server connection is currently being
// worked on.
getRemoteConnection()->safeDeleteObject();
setRemoteConnectionObject(NULL);
}

View file

@ -267,6 +267,10 @@ public:
bool getControlCameraTransform(F32 dt,MatrixF* mat);
bool getControlCameraVelocity(Point3F *vel);
/// Returns the head transform for the control object, using supplemental information
/// from the provided IDisplayDevice
bool getControlCameraHeadTransform(IDisplayDevice *display, MatrixF *transform);
/// Returns the eye transforms for the control object, using supplemental information
/// from the provided IDisplayDevice.
bool getControlCameraEyeTransforms(IDisplayDevice *display, MatrixF *transforms);

View file

@ -348,13 +348,13 @@ bool GameProcessCameraQuery(CameraQuery *query)
query->farPlane = gClientSceneGraph->getVisibleDistance() * CameraAndFOV::sVisDistanceScale;
// Provide some default values
query->projectionOffset = Point2F::Zero;
query->stereoTargets[0] = 0;
query->stereoTargets[1] = 0;
query->eyeOffset[0] = Point3F::Zero;
query->eyeOffset[1] = Point3F::Zero;
query->hasFovPort = false;
query->hasStereoTargets = false;
query->displayDevice = NULL;
F32 cameraFov = 0.0f;
bool fovSet = false;
@ -364,6 +364,9 @@ bool GameProcessCameraQuery(CameraQuery *query)
if(!gEditingMission && connection->hasDisplayDevice())
{
IDisplayDevice* display = connection->getDisplayDevice();
query->displayDevice = display;
// Note: all eye values are invalid until this is called
display->setDrawCanvas(query->drawCanvas);
@ -372,12 +375,6 @@ bool GameProcessCameraQuery(CameraQuery *query)
// Display may activate AFTER so we need to call this again just in case
display->onStartFrame();
// The connection's display device may want to set the projection offset
if(display->providesProjectionOffset())
{
query->projectionOffset = display->getProjectionOffset();
}
// The connection's display device may want to set the eye offset
if(display->providesEyeOffsets())
{
@ -394,6 +391,7 @@ bool GameProcessCameraQuery(CameraQuery *query)
// Grab the latest overriding render view transforms
connection->getControlCameraEyeTransforms(display, query->eyeTransforms);
connection->getControlCameraHeadTransform(display, &query->headMatrix);
display->getStereoViewports(query->stereoViewports);
display->getStereoTargets(query->stereoTargets);
@ -403,6 +401,7 @@ bool GameProcessCameraQuery(CameraQuery *query)
{
query->eyeTransforms[0] = query->cameraMatrix;
query->eyeTransforms[1] = query->cameraMatrix;
query->headMatrix = query->cameraMatrix;
}
// Use the connection's FOV settings if requried

View file

@ -33,6 +33,7 @@
#include "gfx/gfxOcclusionQuery.h"
#include "gfx/gfxDrawUtil.h"
#include "gfx/gfxTextureManager.h"
#include "gfx/sim/debugDraw.h"
#include "renderInstance/renderPassManager.h"
#include "T3D/gameBase/gameConnection.h"
#include "T3D/gameBase/processList.h"
@ -275,12 +276,10 @@ bool LightFlareData::_testVisibility(const SceneRenderState *state, LightFlareSt
// is on scren at all... if not then return
// the last result.
const Point3F &lightPos = flareState->lightMat.getPosition();
const RectI &viewport = GFX->getViewport();
MatrixF projMatrix;
state->getCameraFrustum().getProjectionMatrix(&projMatrix);
if( state->isReflectPass() )
projMatrix = state->getSceneManager()->getNonClipProjection();
bool onScreen = MathUtils::mProjectWorldToScreen( lightPos, outLightPosSS, viewport, GFX->getWorldMatrix(), projMatrix );
const RectI &viewport = RectI(Point2I(0, 0), GFX->getViewport().extent);
MatrixF camProjMatrix = state->getSceneManager()->getNonClipProjection();
bool onScreen = MathUtils::mProjectWorldToScreen( lightPos, outLightPosSS, viewport, GFX->getWorldMatrix(), camProjMatrix );
// It is onscreen, so raycast as a simple occlusion test.
const LightInfo *lightInfo = flareState->lightInfo;
@ -297,7 +296,7 @@ bool LightFlareData::_testVisibility(const SceneRenderState *state, LightFlareSt
// Always treat light as onscreen if using HOQ
// it will be faded out if offscreen anyway.
onScreen = true;
needsRaycast = false;
needsRaycast = false;
// Test the hardware queries for rendered pixels.
U32 pixels = 0, fullPixels = 0;
@ -400,63 +399,75 @@ bool LightFlareData::_testVisibility(const SceneRenderState *state, LightFlareSt
return lightVisible;
}
void LightFlareData::prepRender( SceneRenderState *state, LightFlareState *flareState )
void LightFlareData::prepRender(SceneRenderState *state, LightFlareState *flareState)
{
PROFILE_SCOPE( LightFlareData_prepRender );
PROFILE_SCOPE(LightFlareData_prepRender);
const LightInfo *lightInfo = flareState->lightInfo;
if ( mIsZero( flareState->fullBrightness ) ||
mIsZero( lightInfo->getBrightness() ) )
return;
if (mIsZero(flareState->fullBrightness) ||
mIsZero(lightInfo->getBrightness()))
return;
// Figure out the element count to render.
U32 elementCount = mElementCount;
const bool isReflectPass = state->isReflectPass();
if ( isReflectPass )
if (isReflectPass)
{
// Then we don't render anything this pass.
if ( !mRenderReflectPass )
if (!mRenderReflectPass)
return;
// Find the zero distance elements which make
// up the corona of the light flare.
elementCount = 0.0f;
for ( U32 i=0; i < mElementCount; i++ )
if ( mIsZero( mElementDist[i] ) )
elementCount++;
for (U32 i = 0; i < mElementCount; i++)
if (mIsZero(mElementDist[i]))
elementCount++;
}
// Better have something to render.
if ( elementCount == 0 )
if (elementCount == 0)
return;
U32 visDelta = U32_MAX;
F32 occlusionFade = 1.0f;
Point3F lightPosSS;
bool lightVisible = _testVisibility( state, flareState, &visDelta, &occlusionFade, &lightPosSS );
bool lightVisible = _testVisibility(state, flareState, &visDelta, &occlusionFade, &lightPosSS);
//DebugDrawer::get()->drawBox(flareState->lightMat.getPosition() + Point3F(-0.5, -0.5, -0.5) * 4, flareState->lightMat.getPosition() + Point3F(0.5, 0.5, 0.5) * 4, ColorI::BLUE);
// We can only skip rendering if the light is not
// visible, and it has elapsed the fade out time.
if ( mIsZero( occlusionFade ) ||
!lightVisible && visDelta > FadeOutTime )
if (mIsZero(occlusionFade) ||
!lightVisible && visDelta > FadeOutTime)
return;
const RectI &viewport = GFX->getViewport();
Point3F oneOverViewportExtent( 1.0f / (F32)viewport.extent.x, 1.0f / (F32)viewport.extent.y, 0.0f );
Point3F oneOverViewportExtent(1.0f / (F32)viewport.extent.x, 1.0f / (F32)viewport.extent.y, 0.0f);
// Really convert it to screen space.
lightPosSS.x -= viewport.point.x;
lightPosSS.y -= viewport.point.y;
lightPosSS *= oneOverViewportExtent;
lightPosSS = ( lightPosSS * 2.0f ) - Point3F::One;
lightPosSS = (lightPosSS * 2.0f) - Point3F::One;
lightPosSS.y = -lightPosSS.y;
lightPosSS.z = 0.0f;
// Determine the center of the current projection so we can converge there
Point3F centerProj(0);
{
MatrixF camProjMatrix = state->getSceneManager()->getNonClipProjection();
Point3F outCenterPos;
RectI centerViewport = RectI(Point2I(0, 0), viewport.extent);
MathUtils::mProjectWorldToScreen(Point3F(0,state->getSceneManager()->getNearClip(),0), &outCenterPos, centerViewport, MatrixF::Identity, camProjMatrix);
centerProj = outCenterPos;
centerProj *= oneOverViewportExtent;
centerProj = (centerProj * 2.0f) - Point3F::One;
centerProj.y = -centerProj.y;
centerProj.z = 0.0f;
}
// Take any projection offset into account so that the point where the flare's
// elements converge is at the 'eye' point rather than the center of the viewport.
const Point2F& projOffset = state->getCameraFrustum().getProjectionOffset();
Point3F flareVec( -lightPosSS + Point3F(projOffset.x, projOffset.y, 0.0f) );
Point3F flareVec( centerProj - lightPosSS );
const F32 flareLength = flareVec.len();
if ( flareLength > 0.0f )
flareVec *= 1.0f / flareLength;

View file

@ -56,11 +56,18 @@
#include "T3D/decal/decalManager.h"
#include "T3D/decal/decalData.h"
#include "materials/baseMatInstance.h"
#include "math/mathUtils.h"
#include "gfx/sim/debugDraw.h"
#ifdef TORQUE_EXTENDED_MOVE
#include "T3D/gameBase/extended/extendedMove.h"
#endif
#ifdef TORQUE_OPENVR
#include "platform/input/openVR/openVRProvider.h"
#include "platform/input/openVR/openVRTrackedObject.h"
#endif
// Amount of time if takes to transition to a new action sequence.
static F32 sAnimationTransitionTime = 0.25f;
static bool sUseAnimationTransitions = true;
@ -1776,7 +1783,7 @@ void Player::onRemove()
mWorkingQueryBox.minExtents.set(-1e9f, -1e9f, -1e9f);
mWorkingQueryBox.maxExtents.set(-1e9f, -1e9f, -1e9f);
SAFE_DELETE( mPhysicsRep );
SAFE_DELETE( mPhysicsRep );
Parent::onRemove();
}
@ -2489,10 +2496,25 @@ void Player::allowAllPoses()
mAllowSwimming = true;
}
AngAxisF gPlayerMoveRot;
void Player::updateMove(const Move* move)
{
delta.move = *move;
#ifdef TORQUE_OPENVR
if (mControllers[0])
{
mControllers[0]->processTick(move);
}
if (mControllers[1])
{
mControllers[1]->processTick(move);
}
#endif
// Is waterCoverage high enough to be 'swimming'?
{
bool swimming = mWaterCoverage > 0.65f && canSwim();
@ -2531,6 +2553,7 @@ void Player::updateMove(const Move* move)
delta.headVec = mHead;
bool doStandardMove = true;
bool absoluteDelta = false;
GameConnection* con = getControllingClient();
#ifdef TORQUE_EXTENDED_MOVE
@ -2618,6 +2641,38 @@ void Player::updateMove(const Move* move)
while (mHead.y > M_PI_F)
mHead.y -= M_2PI_F;
}
else
{
// Orient the player so we are looking towards the required position, ignoring any banking
AngAxisF moveRot(Point3F(emove->rotX[emoveIndex], emove->rotY[emoveIndex], emove->rotZ[emoveIndex]), emove->rotW[emoveIndex]);
MatrixF trans(1);
moveRot.setMatrix(&trans);
trans.inverse();
Point3F vecForward(0, 10, 0);
Point3F viewAngle;
Point3F orient;
EulerF rot;
trans.mulV(vecForward);
viewAngle = vecForward;
vecForward.z = 0; // flatten
vecForward.normalizeSafe();
F32 yawAng;
F32 pitchAng;
MathUtils::getAnglesFromVector(vecForward, yawAng, pitchAng);
mRot = EulerF(0);
mRot.z = yawAng;
mHead = EulerF(0);
while (mRot.z < 0.0f)
mRot.z += M_2PI_F;
while (mRot.z > M_2PI_F)
mRot.z -= M_2PI_F;
absoluteDelta = true;
}
}
#endif
@ -2666,6 +2721,13 @@ void Player::updateMove(const Move* move)
delta.head = mHead;
delta.headVec -= mHead;
if (absoluteDelta)
{
delta.headVec = Point3F(0, 0, 0);
delta.rotVec = Point3F(0, 0, 0);
}
for(U32 i=0; i<3; ++i)
{
if (delta.headVec[i] > M_PI_F)
@ -3275,9 +3337,9 @@ bool Player::canCrouch()
if ( mDataBlock->actionList[PlayerData::CrouchRootAnim].sequence == -1 )
return false;
// We are already in this pose, so don't test it again...
if ( mPose == CrouchPose )
return true;
// We are already in this pose, so don't test it again...
if ( mPose == CrouchPose )
return true;
// Do standard Torque physics test here!
if ( !mPhysicsRep )
@ -3327,8 +3389,8 @@ bool Player::canStand()
return false;
// We are already in this pose, so don't test it again...
if ( mPose == StandPose )
return true;
if ( mPose == StandPose )
return true;
// Do standard Torque physics test here!
if ( !mPhysicsRep )
@ -3391,9 +3453,9 @@ bool Player::canProne()
if ( !mPhysicsRep )
return true;
// We are already in this pose, so don't test it again...
if ( mPose == PronePose )
return true;
// We are already in this pose, so don't test it again...
if ( mPose == PronePose )
return true;
return mPhysicsRep->testSpacials( getPosition(), mDataBlock->proneBoxSize );
}
@ -3590,7 +3652,7 @@ MatrixF * Player::Death::fallToGround(F32 dt, const Point3F& loc, F32 curZ, F32
normal.normalize();
mat.set(EulerF (0.0f, 0.0f, curZ));
mat.mulV(upY, & ahead);
mCross(ahead, normal, &sideVec);
mCross(ahead, normal, &sideVec);
sideVec.normalize();
mCross(normal, sideVec, &ahead);
@ -5589,58 +5651,6 @@ void Player::getMuzzleTransform(U32 imageSlot,MatrixF* mat)
*mat = nmat;
}
DisplayPose Player::calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose)
{
// NOTE: this is intended to be similar to updateMove
DisplayPose outPose;
outPose.orientation = getRenderTransform().toEuler();
outPose.position = inPose.position;
if (con && con->getControlSchemeAbsoluteRotation())
{
// Pitch
outPose.orientation.x = (inPose.orientation.x - mLastAbsolutePitch);
// Constrain the range of mRot.x
while (outPose.orientation.x < -M_PI_F)
outPose.orientation.x += M_2PI_F;
while (outPose.orientation.x > M_PI_F)
outPose.orientation.x -= M_2PI_F;
// Yaw
// Rotate (heading) head or body?
if ((isMounted() && getMountNode() == 0) || (con && !con->isFirstPerson()))
{
// Rotate head
outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw);
}
else
{
// Rotate body
outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw);
}
// Constrain the range of mRot.z
while (outPose.orientation.z < 0.0f)
outPose.orientation.z += M_2PI_F;
while (outPose.orientation.z > M_2PI_F)
outPose.orientation.z -= M_2PI_F;
// Bank
if (mDataBlock->cameraCanBank)
{
outPose.orientation.y = (inPose.orientation.y - mLastAbsoluteRoll);
}
// Constrain the range of mRot.y
while (outPose.orientation.y > M_PI_F)
outPose.orientation.y -= M_2PI_F;
}
return outPose;
}
void Player::getRenderMuzzleTransform(U32 imageSlot,MatrixF* mat)
{
disableHeadZCalc();
@ -5836,7 +5846,7 @@ F32 Player::getSpeed() const
void Player::setVelocity(const VectorF& vel)
{
AssertFatal( !mIsNaN( vel ), "Player::setVelocity() - The velocity is NaN!" );
AssertFatal( !mIsNaN( vel ), "Player::setVelocity() - The velocity is NaN!" );
mVelocity = vel;
setMaskBits(MoveMask);
@ -5844,7 +5854,7 @@ void Player::setVelocity(const VectorF& vel)
void Player::applyImpulse(const Point3F&,const VectorF& vec)
{
AssertFatal( !mIsNaN( vec ), "Player::applyImpulse() - The vector is NaN!" );
AssertFatal( !mIsNaN( vec ), "Player::applyImpulse() - The vector is NaN!" );
// Players ignore angular velocity
VectorF vel;
@ -6192,7 +6202,7 @@ U32 Player::packUpdate(NetConnection *con, U32 mask, BitStream *stream)
stream->writeFlag(mSwimming);
stream->writeFlag(mJetting);
stream->writeInt(mPose, NumPoseBits);
stream->writeInt(mState,NumStateBits);
if (stream->writeFlag(mState == RecoverState))
stream->writeInt(mRecoverTicks,PlayerData::RecoverDelayBits);
@ -6293,7 +6303,7 @@ void Player::unpackUpdate(NetConnection *con, BitStream *stream)
mSwimming = stream->readFlag();
mJetting = stream->readFlag();
mPose = (Pose)(stream->readInt(NumPoseBits));
ActionState actionState = (ActionState)stream->readInt(NumStateBits);
if (stream->readFlag()) {
mRecoverTicks = stream->readInt(PlayerData::RecoverDelayBits);
@ -7160,3 +7170,38 @@ void Player::renderConvex( ObjectRenderInst *ri, SceneRenderState *state, BaseMa
mConvex.renderWorkingList();
GFX->leaveDebugEvent();
}
#ifdef TORQUE_OPENVR
void Player::setControllers(Vector<OpenVRTrackedObject*> controllerList)
{
mControllers[0] = controllerList.size() > 0 ? controllerList[0] : NULL;
mControllers[1] = controllerList.size() > 1 ? controllerList[1] : NULL;
}
ConsoleMethod(Player, setVRControllers, void, 4, 4, "")
{
OpenVRTrackedObject *controllerL, *controllerR;
Vector<OpenVRTrackedObject*> list;
if (Sim::findObject(argv[2], controllerL))
{
list.push_back(controllerL);
}
else
{
list.push_back(NULL);
}
if (Sim::findObject(argv[3], controllerR))
{
list.push_back(controllerR);
}
else
{
list.push_back(NULL);
}
object->setControllers(list);
}
#endif

View file

@ -39,6 +39,7 @@ class DecalData;
class SplashData;
class PhysicsPlayer;
class Player;
class OpenVRTrackedObject;
//----------------------------------------------------------------------------
@ -518,6 +519,8 @@ protected:
Point3F mLastPos; ///< Holds the last position for physics updates
Point3F mLastWaterPos; ///< Same as mLastPos, but for water
SimObjectPtr<OpenVRTrackedObject> mControllers[2];
struct ContactInfo
{
bool contacted, jump, run;
@ -577,12 +580,17 @@ protected:
PhysicsPlayer* getPhysicsRep() const { return mPhysicsRep; }
#ifdef TORQUE_OPENVR
void setControllers(Vector<OpenVRTrackedObject*> controllerList);
#endif
protected:
virtual void reSkin();
void setState(ActionState state, U32 ticks=0);
void updateState();
// Jetting
bool mJetting;
@ -686,7 +694,6 @@ public:
void getEyeBaseTransform(MatrixF* mat, bool includeBank);
void getRenderEyeTransform(MatrixF* mat);
void getRenderEyeBaseTransform(MatrixF* mat, bool includeBank);
virtual DisplayPose calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose);
void getCameraParameters(F32 *min, F32 *max, Point3F *offset, MatrixF *rot);
void getMuzzleTransform(U32 imageSlot,MatrixF* mat);
void getRenderMuzzleTransform(U32 imageSlot,MatrixF* mat);

View file

@ -1992,67 +1992,21 @@ void ShapeBase::getEyeCameraTransform(IDisplayDevice *displayDevice, U32 eyeId,
Point3F eyePos;
Point3F rotEyePos;
DisplayPose inPose;
displayDevice->getFrameEyePose(&inPose, eyeId);
DisplayPose newPose = calcCameraDeltaPose(displayDevice->getCurrentConnection(), inPose);
DisplayPose newPose;
displayDevice->getFrameEyePose(&newPose, eyeId);
// Ok, basically we just need to add on newPose to the camera transform
// NOTE: currently we dont support third-person camera in this mode
MatrixF cameraTransform(1);
F32 fakePos = 0;
//cameraTransform = getRenderTransform(); // use this for controllers TODO
getCameraTransform(&fakePos, &cameraTransform);
QuatF baserot = cameraTransform;
QuatF qrot = QuatF(newPose.orientation);
QuatF concatRot;
concatRot.mul(baserot, qrot);
concatRot.setMatrix(&temp);
temp.setPosition(cameraTransform.getPosition() + concatRot.mulP(newPose.position, &rotEyePos));
temp = MatrixF(1);
newPose.orientation.setMatrix(&temp);
temp.setPosition(newPose.position);
*outMat = temp;
}
DisplayPose ShapeBase::calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose)
{
// NOTE: this is intended to be similar to updateMove
// WARNING: does not take into account any move values
DisplayPose outPose;
outPose.orientation = getRenderTransform().toEuler();
outPose.position = inPose.position;
if (con && con->getControlSchemeAbsoluteRotation())
{
// Pitch
outPose.orientation.x = inPose.orientation.x;
// Constrain the range of mRot.x
while (outPose.orientation.x < -M_PI_F)
outPose.orientation.x += M_2PI_F;
while (outPose.orientation.x > M_PI_F)
outPose.orientation.x -= M_2PI_F;
// Yaw
outPose.orientation.z = inPose.orientation.z;
// Constrain the range of mRot.z
while (outPose.orientation.z < -M_PI_F)
outPose.orientation.z += M_2PI_F;
while (outPose.orientation.z > M_PI_F)
outPose.orientation.z -= M_2PI_F;
// Bank
if (mDataBlock->cameraCanBank)
{
outPose.orientation.y = inPose.orientation.y;
}
// Constrain the range of mRot.y
while (outPose.orientation.y > M_PI_F)
outPose.orientation.y -= M_2PI_F;
}
return outPose;
*outMat = cameraTransform * temp;
}
void ShapeBase::getCameraParameters(F32 *min,F32* max,Point3F* off,MatrixF* rot)

View file

@ -1588,9 +1588,6 @@ public:
/// orient and position values of the display device.
virtual void getEyeCameraTransform( IDisplayDevice *display, U32 eyeId, MatrixF *outMat );
/// Calculates a delta camera angle and view position based on inPose
virtual DisplayPose calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose);
/// Gets the index of a node inside a mounted image given the name
/// @param imageSlot Image slot
/// @param nodeName Node name

View file

@ -667,11 +667,11 @@ void ScatterSky::prepRenderImage( SceneRenderState *state )
mFlareState.scale = mFlareScale;
mFlareState.lightInfo = mLight;
Point3F lightPos = state->getCameraPosition() - state->getFarPlane() * mLight->getDirection() * 0.9f;
Point3F lightPos = state->getDiffuseCameraPosition() - state->getFarPlane() * mLight->getDirection() * 0.9f;
mFlareState.lightMat.identity();
mFlareState.lightMat.setPosition( lightPos );
F32 dist = ( lightPos - state->getCameraPosition( ) ).len( );
F32 dist = ( lightPos - state->getDiffuseCameraPosition( ) ).len( );
F32 coronaScale = 0.5f;
F32 screenRadius = GFX->getViewport( ).extent.y * coronaScale * 0.5f;
mFlareState.worldRadius = screenRadius * dist / state->getWorldToScreenScale( ).y;

View file

@ -119,74 +119,77 @@ void GFXD3D11Device::enumerateAdapters(Vector<GFXAdapter*> &adapterList)
for(U32 adapterIndex = 0; DXGIFactory->EnumAdapters1(adapterIndex, &EnumAdapter) != DXGI_ERROR_NOT_FOUND; ++adapterIndex)
{
GFXAdapter *toAdd = new GFXAdapter;
toAdd->mType = Direct3D11;
toAdd->mIndex = adapterIndex;
toAdd->mCreateDeviceInstanceDelegate = mCreateDeviceInstance;
GFXAdapter *toAdd = new GFXAdapter;
toAdd->mType = Direct3D11;
toAdd->mIndex = adapterIndex;
toAdd->mCreateDeviceInstanceDelegate = mCreateDeviceInstance;
toAdd->mShaderModel = 5.0f;
DXGI_ADAPTER_DESC1 desc;
EnumAdapter->GetDesc1(&desc);
toAdd->mShaderModel = 5.0f;
DXGI_ADAPTER_DESC1 desc;
EnumAdapter->GetDesc1(&desc);
size_t size=wcslen(desc.Description);
char *str = new char[size+1];
// LUID identifies adapter for oculus rift
dMemcpy(&toAdd->mLUID, &desc.AdapterLuid, sizeof(toAdd->mLUID));
wcstombs(str, desc.Description,size);
str[size]='\0';
String Description=str;
size_t size=wcslen(desc.Description);
char *str = new char[size+1];
wcstombs(str, desc.Description,size);
str[size]='\0';
String Description=str;
SAFE_DELETE_ARRAY(str);
dStrncpy(toAdd->mName, Description.c_str(), GFXAdapter::MaxAdapterNameLen);
dStrncat(toAdd->mName, " (D3D11)", GFXAdapter::MaxAdapterNameLen);
dStrncpy(toAdd->mName, Description.c_str(), GFXAdapter::MaxAdapterNameLen);
dStrncat(toAdd->mName, " (D3D11)", GFXAdapter::MaxAdapterNameLen);
IDXGIOutput* pOutput = NULL;
HRESULT hr;
IDXGIOutput* pOutput = NULL;
HRESULT hr;
hr = EnumAdapter->EnumOutputs(adapterIndex, &pOutput);
hr = EnumAdapter->EnumOutputs(adapterIndex, &pOutput);
if(hr == DXGI_ERROR_NOT_FOUND)
{
if(hr == DXGI_ERROR_NOT_FOUND)
{
SAFE_RELEASE(EnumAdapter);
break;
}
break;
}
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::enumerateAdapters -> EnumOutputs call failure");
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::enumerateAdapters -> EnumOutputs call failure");
UINT numModes = 0;
DXGI_MODE_DESC* displayModes = NULL;
DXGI_FORMAT format = DXGI_FORMAT_B8G8R8A8_UNORM;
UINT numModes = 0;
DXGI_MODE_DESC* displayModes = NULL;
DXGI_FORMAT format = DXGI_FORMAT_B8G8R8A8_UNORM;
// Get the number of elements
hr = pOutput->GetDisplayModeList(format, 0, &numModes, NULL);
// Get the number of elements
hr = pOutput->GetDisplayModeList(format, 0, &numModes, NULL);
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::enumerateAdapters -> GetDisplayModeList call failure");
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::enumerateAdapters -> GetDisplayModeList call failure");
displayModes = new DXGI_MODE_DESC[numModes];
displayModes = new DXGI_MODE_DESC[numModes];
// Get the list
hr = pOutput->GetDisplayModeList(format, 0, &numModes, displayModes);
// Get the list
hr = pOutput->GetDisplayModeList(format, 0, &numModes, displayModes);
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::enumerateAdapters -> GetDisplayModeList call failure");
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::enumerateAdapters -> GetDisplayModeList call failure");
for(U32 numMode = 0; numMode < numModes; ++numMode)
{
GFXVideoMode vmAdd;
for(U32 numMode = 0; numMode < numModes; ++numMode)
{
GFXVideoMode vmAdd;
vmAdd.fullScreen = true;
vmAdd.bitDepth = 32;
vmAdd.refreshRate = displayModes[numMode].RefreshRate.Numerator / displayModes[numMode].RefreshRate.Denominator;
vmAdd.resolution.x = displayModes[numMode].Width;
vmAdd.resolution.y = displayModes[numMode].Height;
toAdd->mAvailableModes.push_back(vmAdd);
}
vmAdd.fullScreen = true;
vmAdd.bitDepth = 32;
vmAdd.refreshRate = displayModes[numMode].RefreshRate.Numerator / displayModes[numMode].RefreshRate.Denominator;
vmAdd.resolution.x = displayModes[numMode].Width;
vmAdd.resolution.y = displayModes[numMode].Height;
toAdd->mAvailableModes.push_back(vmAdd);
}
delete[] displayModes;
delete[] displayModes;
SAFE_RELEASE(pOutput);
SAFE_RELEASE(EnumAdapter);
adapterList.push_back(toAdd);
adapterList.push_back(toAdd);
}
SAFE_RELEASE(DXGIFactory);
@ -207,50 +210,50 @@ void GFXD3D11Device::enumerateVideoModes()
for(U32 adapterIndex = 0; DXGIFactory->EnumAdapters1(adapterIndex, &EnumAdapter) != DXGI_ERROR_NOT_FOUND; ++adapterIndex)
{
IDXGIOutput* pOutput = NULL;
IDXGIOutput* pOutput = NULL;
hr = EnumAdapter->EnumOutputs(adapterIndex, &pOutput);
hr = EnumAdapter->EnumOutputs(adapterIndex, &pOutput);
if(hr == DXGI_ERROR_NOT_FOUND)
{
if(hr == DXGI_ERROR_NOT_FOUND)
{
SAFE_RELEASE(EnumAdapter);
break;
}
break;
}
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::enumerateVideoModes -> EnumOutputs call failure");
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::enumerateVideoModes -> EnumOutputs call failure");
UINT numModes = 0;
DXGI_MODE_DESC* displayModes = NULL;
DXGI_FORMAT format = GFXD3D11TextureFormat[GFXFormatR8G8B8A8];
UINT numModes = 0;
DXGI_MODE_DESC* displayModes = NULL;
DXGI_FORMAT format = GFXD3D11TextureFormat[GFXFormatR8G8B8A8];
// Get the number of elements
hr = pOutput->GetDisplayModeList(format, 0, &numModes, NULL);
// Get the number of elements
hr = pOutput->GetDisplayModeList(format, 0, &numModes, NULL);
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::enumerateVideoModes -> GetDisplayModeList call failure");
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::enumerateVideoModes -> GetDisplayModeList call failure");
displayModes = new DXGI_MODE_DESC[numModes];
displayModes = new DXGI_MODE_DESC[numModes];
// Get the list
hr = pOutput->GetDisplayModeList(format, 0, &numModes, displayModes);
// Get the list
hr = pOutput->GetDisplayModeList(format, 0, &numModes, displayModes);
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::enumerateVideoModes -> GetDisplayModeList call failure");
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::enumerateVideoModes -> GetDisplayModeList call failure");
for(U32 numMode = 0; numMode < numModes; ++numMode)
{
GFXVideoMode toAdd;
for(U32 numMode = 0; numMode < numModes; ++numMode)
{
GFXVideoMode toAdd;
toAdd.fullScreen = false;
toAdd.bitDepth = 32;
toAdd.refreshRate = displayModes[numMode].RefreshRate.Numerator / displayModes[numMode].RefreshRate.Denominator;
toAdd.resolution.x = displayModes[numMode].Width;
toAdd.resolution.y = displayModes[numMode].Height;
mVideoModes.push_back(toAdd);
}
toAdd.fullScreen = false;
toAdd.bitDepth = 32;
toAdd.refreshRate = displayModes[numMode].RefreshRate.Numerator / displayModes[numMode].RefreshRate.Denominator;
toAdd.resolution.x = displayModes[numMode].Width;
toAdd.resolution.y = displayModes[numMode].Height;
mVideoModes.push_back(toAdd);
}
delete[] displayModes;
delete[] displayModes;
SAFE_RELEASE(pOutput);
SAFE_RELEASE(EnumAdapter);
}
@ -260,7 +263,7 @@ void GFXD3D11Device::enumerateVideoModes()
IDXGISwapChain* GFXD3D11Device::getSwapChain()
{
return mSwapChain;
return mSwapChain;
}
void GFXD3D11Device::init(const GFXVideoMode &mode, PlatformWindow *window)
@ -282,19 +285,19 @@ void GFXD3D11Device::init(const GFXVideoMode &mode, PlatformWindow *window)
// create a device, device context and swap chain using the information in the d3dpp struct
HRESULT hres = D3D11CreateDeviceAndSwapChain(NULL,
driverType,
NULL,
createDeviceFlags,
NULL,
0,
D3D11_SDK_VERSION,
&d3dpp,
&mSwapChain,
&mD3DDevice,
&deviceFeature,
&mD3DDeviceContext);
NULL,
createDeviceFlags,
NULL,
0,
D3D11_SDK_VERSION,
&d3dpp,
&mSwapChain,
&mD3DDevice,
&deviceFeature,
&mD3DDeviceContext);
if(FAILED(hres))
{
if(FAILED(hres))
{
#ifdef TORQUE_DEBUG
//try again without debug device layer enabled
createDeviceFlags &= ~D3D11_CREATE_DEVICE_DEBUG;
@ -312,9 +315,9 @@ void GFXD3D11Device::init(const GFXVideoMode &mode, PlatformWindow *window)
Con::warnf("GFXD3D11Device::init - Debug layers not detected!");
mDebugLayers = false;
#else
AssertFatal(false, "GFXD3D11Device::init - D3D11CreateDeviceAndSwapChain failed!");
AssertFatal(false, "GFXD3D11Device::init - D3D11CreateDeviceAndSwapChain failed!");
#endif
}
}
//set the fullscreen state here if we need to
if(mode.fullScreen)
@ -326,79 +329,79 @@ void GFXD3D11Device::init(const GFXVideoMode &mode, PlatformWindow *window)
}
}
mTextureManager = new GFXD3D11TextureManager();
mTextureManager = new GFXD3D11TextureManager();
// Now reacquire all the resources we trashed earlier
reacquireDefaultPoolResources();
// Now reacquire all the resources we trashed earlier
reacquireDefaultPoolResources();
//TODO implement feature levels?
if (deviceFeature >= D3D_FEATURE_LEVEL_11_0)
mPixVersion = 5.0f;
else
AssertFatal(false, "GFXD3D11Device::init - We don't support anything below feature level 11.");
if (deviceFeature >= D3D_FEATURE_LEVEL_11_0)
mPixVersion = 5.0f;
else
AssertFatal(false, "GFXD3D11Device::init - We don't support anything below feature level 11.");
D3D11_QUERY_DESC queryDesc;
D3D11_QUERY_DESC queryDesc;
queryDesc.Query = D3D11_QUERY_OCCLUSION;
queryDesc.MiscFlags = 0;
ID3D11Query *testQuery = NULL;
ID3D11Query *testQuery = NULL;
// detect occlusion query support
if (SUCCEEDED(mD3DDevice->CreateQuery(&queryDesc, &testQuery))) mOcclusionQuerySupported = true;
// detect occlusion query support
if (SUCCEEDED(mD3DDevice->CreateQuery(&queryDesc, &testQuery))) mOcclusionQuerySupported = true;
SAFE_RELEASE(testQuery);
Con::printf("Hardware occlusion query detected: %s", mOcclusionQuerySupported ? "Yes" : "No");
Con::printf("Hardware occlusion query detected: %s", mOcclusionQuerySupported ? "Yes" : "No");
mCardProfiler = new GFXD3D11CardProfiler();
mCardProfiler->init();
mCardProfiler = new GFXD3D11CardProfiler();
mCardProfiler->init();
D3D11_TEXTURE2D_DESC desc;
desc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
desc.CPUAccessFlags = 0;
desc.Format = GFXD3D11TextureFormat[GFXFormatD24S8];
desc.MipLevels = 1;
desc.ArraySize = 1;
desc.Usage = D3D11_USAGE_DEFAULT;
desc.Width = mode.resolution.x;
desc.Height = mode.resolution.y;
desc.SampleDesc.Count =1;
desc.SampleDesc.Quality =0;
desc.MiscFlags = 0;
D3D11_TEXTURE2D_DESC desc;
desc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
desc.CPUAccessFlags = 0;
desc.Format = GFXD3D11TextureFormat[GFXFormatD24S8];
desc.MipLevels = 1;
desc.ArraySize = 1;
desc.Usage = D3D11_USAGE_DEFAULT;
desc.Width = mode.resolution.x;
desc.Height = mode.resolution.y;
desc.SampleDesc.Count =1;
desc.SampleDesc.Quality =0;
desc.MiscFlags = 0;
HRESULT hr = mD3DDevice->CreateTexture2D(&desc, NULL, &mDeviceDepthStencil);
if(FAILED(hr))
{
AssertFatal(false, "GFXD3D11Device::init - couldn't create device's depth-stencil surface.");
}
HRESULT hr = mD3DDevice->CreateTexture2D(&desc, NULL, &mDeviceDepthStencil);
if(FAILED(hr))
{
AssertFatal(false, "GFXD3D11Device::init - couldn't create device's depth-stencil surface.");
}
D3D11_DEPTH_STENCIL_VIEW_DESC depthDesc;
depthDesc.Format = GFXD3D11TextureFormat[GFXFormatD24S8];
depthDesc.Flags =0 ;
depthDesc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
depthDesc.Texture2D.MipSlice = 0;
D3D11_DEPTH_STENCIL_VIEW_DESC depthDesc;
depthDesc.Format = GFXD3D11TextureFormat[GFXFormatD24S8];
depthDesc.Flags =0 ;
depthDesc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
depthDesc.Texture2D.MipSlice = 0;
hr = mD3DDevice->CreateDepthStencilView(mDeviceDepthStencil, &depthDesc, &mDeviceDepthStencilView);
hr = mD3DDevice->CreateDepthStencilView(mDeviceDepthStencil, &depthDesc, &mDeviceDepthStencilView);
if(FAILED(hr))
{
AssertFatal(false, "GFXD3D11Device::init - couldn't create depth stencil view");
}
if(FAILED(hr))
{
AssertFatal(false, "GFXD3D11Device::init - couldn't create depth stencil view");
}
hr = mSwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&mDeviceBackbuffer);
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::init - coudln't retrieve backbuffer ref");
hr = mSwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&mDeviceBackbuffer);
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::init - coudln't retrieve backbuffer ref");
//create back buffer view
D3D11_RENDER_TARGET_VIEW_DESC RTDesc;
//create back buffer view
D3D11_RENDER_TARGET_VIEW_DESC RTDesc;
RTDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
RTDesc.Texture2D.MipSlice = 0;
RTDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
RTDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
RTDesc.Texture2D.MipSlice = 0;
RTDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
hr = mD3DDevice->CreateRenderTargetView(mDeviceBackbuffer, &RTDesc, &mDeviceBackBufferView);
hr = mD3DDevice->CreateRenderTargetView(mDeviceBackbuffer, &RTDesc, &mDeviceBackBufferView);
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::init - couldn't create back buffer target view");
if(FAILED(hr))
AssertFatal(false, "GFXD3D11Device::init - couldn't create back buffer target view");
#ifdef TORQUE_DEBUG
String backBufferName = "MainBackBuffer";
@ -416,8 +419,8 @@ void GFXD3D11Device::init(const GFXVideoMode &mode, PlatformWindow *window)
gScreenShot = new ScreenShotD3D11;
mInitialized = true;
deviceInited();
mInitialized = true;
deviceInited();
}
// Supress any debug layer messages we don't want to see
@ -486,28 +489,28 @@ GFXTextureTarget* GFXD3D11Device::allocRenderToTextureTarget()
void GFXD3D11Device::reset(DXGI_SWAP_CHAIN_DESC &d3dpp)
{
if (!mD3DDevice)
return;
if (!mD3DDevice)
return;
mInitialized = false;
mInitialized = false;
// Clean up some commonly dangling state. This helps prevents issues with
// items that are destroyed by the texture manager callbacks and recreated
// later, but still left bound.
setVertexBuffer(NULL);
setPrimitiveBuffer(NULL);
for (S32 i = 0; i<getNumSamplers(); i++)
setTexture(i, NULL);
// Clean up some commonly dangling state. This helps prevents issues with
// items that are destroyed by the texture manager callbacks and recreated
// later, but still left bound.
setVertexBuffer(NULL);
setPrimitiveBuffer(NULL);
for (S32 i = 0; i<getNumSamplers(); i++)
setTexture(i, NULL);
mD3DDeviceContext->ClearState();
mD3DDeviceContext->ClearState();
DXGI_MODE_DESC displayModes;
displayModes.Format = d3dpp.BufferDesc.Format;
displayModes.Height = d3dpp.BufferDesc.Height;
displayModes.Width = d3dpp.BufferDesc.Width;
displayModes.RefreshRate = d3dpp.BufferDesc.RefreshRate;
displayModes.Scaling = d3dpp.BufferDesc.Scaling;
displayModes.ScanlineOrdering = d3dpp.BufferDesc.ScanlineOrdering;
DXGI_MODE_DESC displayModes;
displayModes.Format = d3dpp.BufferDesc.Format;
displayModes.Height = d3dpp.BufferDesc.Height;
displayModes.Width = d3dpp.BufferDesc.Width;
displayModes.RefreshRate = d3dpp.BufferDesc.RefreshRate;
displayModes.Scaling = d3dpp.BufferDesc.Scaling;
displayModes.ScanlineOrdering = d3dpp.BufferDesc.ScanlineOrdering;
HRESULT hr;
if (!d3dpp.Windowed)
@ -520,79 +523,79 @@ void GFXD3D11Device::reset(DXGI_SWAP_CHAIN_DESC &d3dpp)
}
}
// First release all the stuff we allocated from D3DPOOL_DEFAULT
releaseDefaultPoolResources();
// First release all the stuff we allocated from D3DPOOL_DEFAULT
releaseDefaultPoolResources();
//release the backbuffer, depthstencil, and their views
SAFE_RELEASE(mDeviceBackBufferView);
SAFE_RELEASE(mDeviceBackbuffer);
SAFE_RELEASE(mDeviceDepthStencilView);
SAFE_RELEASE(mDeviceDepthStencil);
//release the backbuffer, depthstencil, and their views
SAFE_RELEASE(mDeviceBackBufferView);
SAFE_RELEASE(mDeviceBackbuffer);
SAFE_RELEASE(mDeviceDepthStencilView);
SAFE_RELEASE(mDeviceDepthStencil);
hr = mSwapChain->ResizeBuffers(d3dpp.BufferCount, d3dpp.BufferDesc.Width, d3dpp.BufferDesc.Height, d3dpp.BufferDesc.Format, d3dpp.Windowed ? 0 : DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH);
if (FAILED(hr))
{
AssertFatal(false, "D3D11Device::reset - failed to resize back buffer!");
}
if (FAILED(hr))
{
AssertFatal(false, "D3D11Device::reset - failed to resize back buffer!");
}
//recreate backbuffer view. depth stencil view and texture
D3D11_TEXTURE2D_DESC desc;
desc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
desc.CPUAccessFlags = 0;
desc.Format = GFXD3D11TextureFormat[GFXFormatD24S8];
desc.MipLevels = 1;
desc.ArraySize = 1;
desc.Usage = D3D11_USAGE_DEFAULT;
desc.Width = d3dpp.BufferDesc.Width;
desc.Height = d3dpp.BufferDesc.Height;
desc.SampleDesc.Count = 1;
desc.SampleDesc.Quality = 0;
desc.MiscFlags = 0;
//recreate backbuffer view. depth stencil view and texture
D3D11_TEXTURE2D_DESC desc;
desc.BindFlags = D3D11_BIND_DEPTH_STENCIL;
desc.CPUAccessFlags = 0;
desc.Format = GFXD3D11TextureFormat[GFXFormatD24S8];
desc.MipLevels = 1;
desc.ArraySize = 1;
desc.Usage = D3D11_USAGE_DEFAULT;
desc.Width = d3dpp.BufferDesc.Width;
desc.Height = d3dpp.BufferDesc.Height;
desc.SampleDesc.Count = 1;
desc.SampleDesc.Quality = 0;
desc.MiscFlags = 0;
hr = mD3DDevice->CreateTexture2D(&desc, NULL, &mDeviceDepthStencil);
if (FAILED(hr))
{
AssertFatal(false, "GFXD3D11Device::reset - couldn't create device's depth-stencil surface.");
}
hr = mD3DDevice->CreateTexture2D(&desc, NULL, &mDeviceDepthStencil);
if (FAILED(hr))
{
AssertFatal(false, "GFXD3D11Device::reset - couldn't create device's depth-stencil surface.");
}
D3D11_DEPTH_STENCIL_VIEW_DESC depthDesc;
depthDesc.Format = GFXD3D11TextureFormat[GFXFormatD24S8];
depthDesc.Flags = 0;
depthDesc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
depthDesc.Texture2D.MipSlice = 0;
D3D11_DEPTH_STENCIL_VIEW_DESC depthDesc;
depthDesc.Format = GFXD3D11TextureFormat[GFXFormatD24S8];
depthDesc.Flags = 0;
depthDesc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D;
depthDesc.Texture2D.MipSlice = 0;
hr = mD3DDevice->CreateDepthStencilView(mDeviceDepthStencil, &depthDesc, &mDeviceDepthStencilView);
hr = mD3DDevice->CreateDepthStencilView(mDeviceDepthStencil, &depthDesc, &mDeviceDepthStencilView);
if (FAILED(hr))
{
AssertFatal(false, "GFXD3D11Device::reset - couldn't create depth stencil view");
}
if (FAILED(hr))
{
AssertFatal(false, "GFXD3D11Device::reset - couldn't create depth stencil view");
}
hr = mSwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&mDeviceBackbuffer);
if (FAILED(hr))
AssertFatal(false, "GFXD3D11Device::reset - coudln't retrieve backbuffer ref");
hr = mSwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&mDeviceBackbuffer);
if (FAILED(hr))
AssertFatal(false, "GFXD3D11Device::reset - coudln't retrieve backbuffer ref");
//create back buffer view
D3D11_RENDER_TARGET_VIEW_DESC RTDesc;
//create back buffer view
D3D11_RENDER_TARGET_VIEW_DESC RTDesc;
RTDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
RTDesc.Texture2D.MipSlice = 0;
RTDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
RTDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM;
RTDesc.Texture2D.MipSlice = 0;
RTDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
hr = mD3DDevice->CreateRenderTargetView(mDeviceBackbuffer, &RTDesc, &mDeviceBackBufferView);
hr = mD3DDevice->CreateRenderTargetView(mDeviceBackbuffer, &RTDesc, &mDeviceBackBufferView);
if (FAILED(hr))
AssertFatal(false, "GFXD3D11Device::reset - couldn't create back buffer target view");
if (FAILED(hr))
AssertFatal(false, "GFXD3D11Device::reset - couldn't create back buffer target view");
mD3DDeviceContext->OMSetRenderTargets(1, &mDeviceBackBufferView, mDeviceDepthStencilView);
hr = mSwapChain->SetFullscreenState(!d3dpp.Windowed, NULL);
hr = mSwapChain->SetFullscreenState(!d3dpp.Windowed, NULL);
if (FAILED(hr))
{
if (FAILED(hr))
{
AssertFatal(false, "D3D11Device::reset - failed to change screen states!");
}
}
//Microsoft recommend this, see DXGI documentation
if (!d3dpp.Windowed)
@ -607,13 +610,13 @@ void GFXD3D11Device::reset(DXGI_SWAP_CHAIN_DESC &d3dpp)
}
}
mInitialized = true;
mInitialized = true;
// Now re aquire all the resources we trashed earlier
reacquireDefaultPoolResources();
// Now re aquire all the resources we trashed earlier
reacquireDefaultPoolResources();
// Mark everything dirty and flush to card, for sanity.
updateStates(true);
// Mark everything dirty and flush to card, for sanity.
updateStates(true);
}
class GFXPCD3D11RegisterDevice
@ -896,20 +899,20 @@ void GFXD3D11Device::_updateRenderTargets()
mRTDirty = false;
}
if (mViewportDirty)
{
D3D11_VIEWPORT viewport;
if (mViewportDirty)
{
D3D11_VIEWPORT viewport;
viewport.TopLeftX = mViewport.point.x;
viewport.TopLeftY = mViewport.point.y;
viewport.Width = mViewport.extent.x;
viewport.Height = mViewport.extent.y;
viewport.MinDepth = 0.0f;
viewport.MaxDepth = 1.0f;
viewport.TopLeftX = mViewport.point.x;
viewport.TopLeftY = mViewport.point.y;
viewport.Width = mViewport.extent.x;
viewport.Height = mViewport.extent.y;
viewport.MinDepth = 0.0f;
viewport.MaxDepth = 1.0f;
mD3DDeviceContext->RSSetViewports(1, &viewport);
mD3DDeviceContext->RSSetViewports(1, &viewport);
mViewportDirty = false;
mViewportDirty = false;
}
}
@ -967,35 +970,35 @@ void GFXD3D11Device::releaseDefaultPoolResources()
void GFXD3D11Device::reacquireDefaultPoolResources()
{
// Now do the dynamic index buffers
if( mDynamicPB == NULL )
mDynamicPB = new GFXD3D11PrimitiveBuffer(this, 0, 0, GFXBufferTypeDynamic);
// Now do the dynamic index buffers
if( mDynamicPB == NULL )
mDynamicPB = new GFXD3D11PrimitiveBuffer(this, 0, 0, GFXBufferTypeDynamic);
D3D11_BUFFER_DESC desc;
desc.ByteWidth = sizeof(U16) * MAX_DYNAMIC_INDICES;
desc.Usage = D3D11_USAGE_DYNAMIC;
desc.BindFlags = D3D11_BIND_INDEX_BUFFER;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
desc.MiscFlags = 0;
desc.StructureByteStride = 0;
D3D11_BUFFER_DESC desc;
desc.ByteWidth = sizeof(U16) * MAX_DYNAMIC_INDICES;
desc.Usage = D3D11_USAGE_DYNAMIC;
desc.BindFlags = D3D11_BIND_INDEX_BUFFER;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
desc.MiscFlags = 0;
desc.StructureByteStride = 0;
HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &mDynamicPB->ib);
HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &mDynamicPB->ib);
if(FAILED(hr))
{
AssertFatal(false, "Failed to allocate dynamic IB");
}
if(FAILED(hr))
{
AssertFatal(false, "Failed to allocate dynamic IB");
}
// Walk the resource list and zombify everything.
GFXResource *walk = mResourceListHead;
while(walk)
{
walk->resurrect();
walk = walk->getNextResource();
}
// Walk the resource list and zombify everything.
GFXResource *walk = mResourceListHead;
while(walk)
{
walk->resurrect();
walk = walk->getNextResource();
}
if(mTextureManager)
mTextureManager->resurrect();
if(mTextureManager)
mTextureManager->resurrect();
}
GFXD3D11VertexBuffer* GFXD3D11Device::findVBPool( const GFXVertexFormat *vertexFormat, U32 vertsNeeded )
@ -1011,40 +1014,40 @@ GFXD3D11VertexBuffer* GFXD3D11Device::findVBPool( const GFXVertexFormat *vertexF
GFXD3D11VertexBuffer * GFXD3D11Device::createVBPool( const GFXVertexFormat *vertexFormat, U32 vertSize )
{
PROFILE_SCOPE( GFXD3D11Device_createVBPool );
PROFILE_SCOPE( GFXD3D11Device_createVBPool );
// this is a bit funky, but it will avoid problems with (lack of) copy constructors
// with a push_back() situation
mVolatileVBList.increment();
StrongRefPtr<GFXD3D11VertexBuffer> newBuff;
mVolatileVBList.last() = new GFXD3D11VertexBuffer();
newBuff = mVolatileVBList.last();
// this is a bit funky, but it will avoid problems with (lack of) copy constructors
// with a push_back() situation
mVolatileVBList.increment();
StrongRefPtr<GFXD3D11VertexBuffer> newBuff;
mVolatileVBList.last() = new GFXD3D11VertexBuffer();
newBuff = mVolatileVBList.last();
newBuff->mNumVerts = 0;
newBuff->mBufferType = GFXBufferTypeVolatile;
newBuff->mVertexFormat.copy( *vertexFormat );
newBuff->mVertexSize = vertSize;
newBuff->mDevice = this;
newBuff->mNumVerts = 0;
newBuff->mBufferType = GFXBufferTypeVolatile;
newBuff->mVertexFormat.copy( *vertexFormat );
newBuff->mVertexSize = vertSize;
newBuff->mDevice = this;
// Requesting it will allocate it.
vertexFormat->getDecl();
// Requesting it will allocate it.
vertexFormat->getDecl();
D3D11_BUFFER_DESC desc;
desc.ByteWidth = vertSize * MAX_DYNAMIC_VERTS;
desc.Usage = D3D11_USAGE_DYNAMIC;
desc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
desc.MiscFlags = 0;
desc.StructureByteStride = 0;
D3D11_BUFFER_DESC desc;
desc.ByteWidth = vertSize * MAX_DYNAMIC_VERTS;
desc.Usage = D3D11_USAGE_DYNAMIC;
desc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE;
desc.MiscFlags = 0;
desc.StructureByteStride = 0;
HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &newBuff->vb);
HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &newBuff->vb);
if(FAILED(hr))
{
AssertFatal(false, "Failed to allocate dynamic VB");
}
if(FAILED(hr))
{
AssertFatal(false, "Failed to allocate dynamic VB");
}
return newBuff;
return newBuff;
}
//-----------------------------------------------------------------------------
@ -1100,30 +1103,30 @@ void GFXD3D11Device::setClipRect( const RectI &inRect )
void GFXD3D11Device::setVertexStream( U32 stream, GFXVertexBuffer *buffer )
{
GFXD3D11VertexBuffer *d3dBuffer = static_cast<GFXD3D11VertexBuffer*>( buffer );
GFXD3D11VertexBuffer *d3dBuffer = static_cast<GFXD3D11VertexBuffer*>( buffer );
if ( stream == 0 )
{
// Set the volatile buffer which is used to
// offset the start index when doing draw calls.
if ( d3dBuffer && d3dBuffer->mVolatileStart > 0 )
mVolatileVB = d3dBuffer;
else
mVolatileVB = NULL;
}
if ( stream == 0 )
{
// Set the volatile buffer which is used to
// offset the start index when doing draw calls.
if ( d3dBuffer && d3dBuffer->mVolatileStart > 0 )
mVolatileVB = d3dBuffer;
else
mVolatileVB = NULL;
}
// NOTE: We do not use the stream offset here for stream 0
// as that feature is *supposedly* not as well supported as
// using the start index in drawPrimitive.
//
// If we can verify that this is not the case then we should
// start using this method exclusively for all streams.
// NOTE: We do not use the stream offset here for stream 0
// as that feature is *supposedly* not as well supported as
// using the start index in drawPrimitive.
//
// If we can verify that this is not the case then we should
// start using this method exclusively for all streams.
U32 strides[1] = { d3dBuffer ? d3dBuffer->mVertexSize : 0 };
U32 offset = d3dBuffer && stream != 0 ? d3dBuffer->mVolatileStart * d3dBuffer->mVertexSize : 0;
ID3D11Buffer* buff = d3dBuffer ? d3dBuffer->vb : NULL;
U32 strides[1] = { d3dBuffer ? d3dBuffer->mVertexSize : 0 };
U32 offset = d3dBuffer && stream != 0 ? d3dBuffer->mVolatileStart * d3dBuffer->mVertexSize : 0;
ID3D11Buffer* buff = d3dBuffer ? d3dBuffer->vb : NULL;
getDeviceContext()->IASetVertexBuffers(stream, 1, &buff, strides, &offset);
getDeviceContext()->IASetVertexBuffers(stream, 1, &buff, strides, &offset);
}
void GFXD3D11Device::setVertexStreamFrequency( U32 stream, U32 frequency )
@ -1176,7 +1179,7 @@ void GFXD3D11Device::drawPrimitive( GFXPrimitiveType primType, U32 vertexStart,
setShaderConstBufferInternal(mCurrentShaderConstBuffer);
if ( mVolatileVB )
vertexStart += mVolatileVB->mVolatileStart;
vertexStart += mVolatileVB->mVolatileStart;
mD3DDeviceContext->IASetPrimitiveTopology(GFXD3D11PrimType[primType]);
@ -1240,23 +1243,23 @@ void GFXD3D11Device::setShader(GFXShader *shader, bool force)
{
if(shader)
{
GFXD3D11Shader *d3dShader = static_cast<GFXD3D11Shader*>(shader);
GFXD3D11Shader *d3dShader = static_cast<GFXD3D11Shader*>(shader);
if (d3dShader->mPixShader != mLastPixShader || force)
{
mD3DDeviceContext->PSSetShader( d3dShader->mPixShader, NULL, 0);
mLastPixShader = d3dShader->mPixShader;
}
{
mD3DDeviceContext->PSSetShader( d3dShader->mPixShader, NULL, 0);
mLastPixShader = d3dShader->mPixShader;
}
if (d3dShader->mVertShader != mLastVertShader || force)
{
mD3DDeviceContext->VSSetShader( d3dShader->mVertShader, NULL, 0);
mLastVertShader = d3dShader->mVertShader;
}
{
mD3DDeviceContext->VSSetShader( d3dShader->mVertShader, NULL, 0);
mLastVertShader = d3dShader->mVertShader;
}
}
else
{
setupGenericShaders();
setupGenericShaders();
}
}
@ -1283,7 +1286,7 @@ GFXPrimitiveBuffer * GFXD3D11Device::allocPrimitiveBuffer(U32 numIndices, U32 nu
case GFXBufferTypeDynamic:
case GFXBufferTypeVolatile:
usage = D3D11_USAGE_DYNAMIC;
usage = D3D11_USAGE_DYNAMIC;
break;
}
@ -1301,24 +1304,24 @@ GFXPrimitiveBuffer * GFXD3D11Device::allocPrimitiveBuffer(U32 numIndices, U32 nu
}
else
{
// Otherwise, get it as a seperate buffer...
D3D11_BUFFER_DESC desc;
desc.ByteWidth = sizeof(U16) * numIndices;
desc.Usage = usage;
if(bufferType == GFXBufferTypeDynamic)
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; // We never allow reading from a primitive buffer.
else
desc.CPUAccessFlags = 0;
desc.BindFlags = D3D11_BIND_INDEX_BUFFER;
desc.MiscFlags = 0;
desc.StructureByteStride = 0;
// Otherwise, get it as a seperate buffer...
D3D11_BUFFER_DESC desc;
desc.ByteWidth = sizeof(U16) * numIndices;
desc.Usage = usage;
if(bufferType == GFXBufferTypeDynamic)
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; // We never allow reading from a primitive buffer.
else
desc.CPUAccessFlags = 0;
desc.BindFlags = D3D11_BIND_INDEX_BUFFER;
desc.MiscFlags = 0;
desc.StructureByteStride = 0;
HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &res->ib);
HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &res->ib);
if(FAILED(hr))
{
AssertFatal(false, "Failed to allocate an index buffer.");
}
if(FAILED(hr))
{
AssertFatal(false, "Failed to allocate an index buffer.");
}
}
if (data)
@ -1362,7 +1365,7 @@ GFXVertexBuffer * GFXD3D11Device::allocVertexBuffer(U32 numVerts, const GFXVerte
case GFXBufferTypeDynamic:
case GFXBufferTypeVolatile:
usage = D3D11_USAGE_DYNAMIC;
usage = D3D11_USAGE_DYNAMIC;
break;
}
@ -1377,27 +1380,27 @@ GFXVertexBuffer * GFXD3D11Device::allocVertexBuffer(U32 numVerts, const GFXVerte
}
else
{
// Requesting it will allocate it.
vertexFormat->getDecl(); //-ALEX disabled to postpone until after shader is actually set...
// Requesting it will allocate it.
vertexFormat->getDecl(); //-ALEX disabled to postpone until after shader is actually set...
// Get a new buffer...
D3D11_BUFFER_DESC desc;
desc.ByteWidth = vertSize * numVerts;
desc.Usage = usage;
desc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
if(bufferType == GFXBufferTypeDynamic)
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; // We never allow reading from a vertex buffer.
else
desc.CPUAccessFlags = 0;
desc.MiscFlags = 0;
desc.StructureByteStride = 0;
// Get a new buffer...
D3D11_BUFFER_DESC desc;
desc.ByteWidth = vertSize * numVerts;
desc.Usage = usage;
desc.BindFlags = D3D11_BIND_VERTEX_BUFFER;
if(bufferType == GFXBufferTypeDynamic)
desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; // We never allow reading from a vertex buffer.
else
desc.CPUAccessFlags = 0;
desc.MiscFlags = 0;
desc.StructureByteStride = 0;
HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &res->vb);
HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &res->vb);
if(FAILED(hr))
{
AssertFatal(false, "Failed to allocate VB");
}
if(FAILED(hr))
{
AssertFatal(false, "Failed to allocate VB");
}
}
res->mNumVerts = numVerts;
@ -1595,7 +1598,6 @@ GFXVertexDecl* GFXD3D11Device::allocVertexDecl( const GFXVertexFormat *vertexFor
S32 elemIndex = 0;
for (S32 i = 0; i < elemCount; i++, elemIndex++)
{
const GFXVertexElement &element = vertexFormat->getElement(elemIndex);
stream = element.getStreamIndex();
@ -1687,9 +1689,9 @@ void GFXD3D11Device::setTextureInternal( U32 textureUnit, const GFXTextureObject
{
if( texture == NULL )
{
ID3D11ShaderResourceView *pView = NULL;
mD3DDeviceContext->PSSetShaderResources(textureUnit, 1, &pView);
return;
ID3D11ShaderResourceView *pView = NULL;
mD3DDeviceContext->PSSetShaderResources(textureUnit, 1, &pView);
return;
}
GFXD3D11TextureObject *tex = (GFXD3D11TextureObject*)(texture);
@ -1701,23 +1703,23 @@ GFXFence *GFXD3D11Device::createFence()
// Figure out what fence type we should be making if we don't know
if( mCreateFenceType == -1 )
{
D3D11_QUERY_DESC desc;
desc.MiscFlags = 0;
desc.Query = D3D11_QUERY_EVENT;
D3D11_QUERY_DESC desc;
desc.MiscFlags = 0;
desc.Query = D3D11_QUERY_EVENT;
ID3D11Query *testQuery = NULL;
ID3D11Query *testQuery = NULL;
HRESULT hRes = mD3DDevice->CreateQuery(&desc, &testQuery);
HRESULT hRes = mD3DDevice->CreateQuery(&desc, &testQuery);
if(FAILED(hRes))
{
mCreateFenceType = true;
}
if(FAILED(hRes))
{
mCreateFenceType = true;
}
else
{
mCreateFenceType = false;
}
else
{
mCreateFenceType = false;
}
SAFE_RELEASE(testQuery);
}

View file

@ -42,6 +42,8 @@
class PlatformWindow;
class GFXD3D11ShaderConstBuffer;
class OculusVRHMDDevice;
class D3D11OculusTexture;
//------------------------------------------------------------------------------
@ -53,6 +55,8 @@ class GFXD3D11Device : public GFXDevice
friend class GFXD3D11TextureObject;
friend class GFXD3D11TextureTarget;
friend class GFXD3D11WindowTarget;
friend class OculusVRHMDDevice;
friend class D3D11OculusTexture;
virtual GFXFormat selectSupportedFormat(GFXTextureProfile *profile,
const Vector<GFXFormat> &formats, bool texture, bool mustblend, bool mustfilter);

View file

@ -73,6 +73,7 @@ void GFXD3D11EnumTranslate::init()
GFXD3D11TextureFormat[GFXFormatD24FS8] = DXGI_FORMAT_UNKNOWN;
GFXD3D11TextureFormat[GFXFormatD16] = DXGI_FORMAT_D16_UNORM;
GFXD3D11TextureFormat[GFXFormatR8G8B8A8_SRGB] = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;
GFXD3D11TextureFormat[GFXFormatR8G8B8A8_LINEAR_FORCE] = DXGI_FORMAT_R8G8B8A8_UNORM;
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------
GFXD3D11TextureFilter[GFXTextureFilterNone] = D3D11_FILTER_MIN_MAG_MIP_POINT;

View file

@ -97,9 +97,9 @@ void GFXD3D11TextureTarget::attachTexture( RenderSlot slot, GFXTextureObject *te
if( tex == GFXTextureTarget::sDefaultDepthStencil )
{
mTargets[slot] = D3D11->mDeviceDepthStencil;
mTargetViews[slot] = D3D11->mDeviceDepthStencilView;
mTargets[slot]->AddRef();
mTargetViews[slot]->AddRef();
mTargetViews[slot] = D3D11->mDeviceDepthStencilView;
mTargets[slot]->AddRef();
mTargetViews[slot]->AddRef();
}
else
{
@ -110,14 +110,14 @@ void GFXD3D11TextureTarget::attachTexture( RenderSlot slot, GFXTextureObject *te
// Grab the surface level.
if( slot == DepthStencil )
{
{
mTargets[slot] = d3dto->getSurface();
if ( mTargets[slot] )
mTargets[slot]->AddRef();
mTargetViews[slot] = d3dto->getDSView();
if( mTargetViews[slot])
mTargetViews[slot]->AddRef();
mTargetViews[slot] = d3dto->getDSView();
if( mTargetViews[slot])
mTargetViews[slot]->AddRef();
}
else
@ -126,12 +126,12 @@ void GFXD3D11TextureTarget::attachTexture( RenderSlot slot, GFXTextureObject *te
// if the surface that it needs to render to is different than the mip level
// in the actual texture. This will happen with MSAA.
if( d3dto->getSurface() == NULL )
{
{
mTargets[slot] = d3dto->get2DTex();
mTargets[slot]->AddRef();
mTargetViews[slot] = d3dto->getRTView();
mTargetViews[slot]->AddRef();
mTargets[slot] = d3dto->get2DTex();
mTargets[slot]->AddRef();
mTargetViews[slot] = d3dto->getRTView();
mTargetViews[slot]->AddRef();
}
else
{
@ -163,6 +163,13 @@ void GFXD3D11TextureTarget::attachTexture( RenderSlot slot, GFXTextureObject *te
mTargetSize = Point2I(sd.Width, sd.Height);
S32 format = sd.Format;
if (format == DXGI_FORMAT_R8G8B8A8_TYPELESS || format == DXGI_FORMAT_B8G8R8A8_TYPELESS)
{
mTargetFormat = GFXFormatR8G8B8A8;
return;
}
GFXREVERSE_LOOKUP( GFXD3D11TextureFormat, GFXFormat, format );
mTargetFormat = (GFXFormat)format;
}
@ -276,7 +283,7 @@ void GFXD3D11TextureTarget::resolve()
if (mResolveTargets[i])
{
D3D11_TEXTURE2D_DESC desc;
mTargets[i]->GetDesc(&desc);
mTargets[i]->GetDesc(&desc);
D3D11DEVICECONTEXT->CopySubresourceRegion(mResolveTargets[i]->get2DTex(), 0, 0, 0, 0, mTargets[i], 0, NULL);
}
}
@ -400,10 +407,10 @@ void GFXD3D11WindowTarget::activate()
void GFXD3D11WindowTarget::resolveTo(GFXTextureObject *tex)
{
GFXDEBUGEVENT_SCOPE(GFXPCD3D11WindowTarget_resolveTo, ColorI::RED);
GFXDEBUGEVENT_SCOPE(GFXPCD3D11WindowTarget_resolveTo, ColorI::RED);
D3D11_TEXTURE2D_DESC desc;
ID3D11Texture2D* surf = ((GFXD3D11TextureObject*)(tex))->get2DTex();
surf->GetDesc(&desc);
D3D11DEVICECONTEXT->ResolveSubresource(surf, 0, D3D11->mDeviceBackbuffer, 0, desc.Format);
D3D11_TEXTURE2D_DESC desc;
ID3D11Texture2D* surf = ((GFXD3D11TextureObject*)(tex))->get2DTex();
surf->GetDesc(&desc);
D3D11DEVICECONTEXT->ResolveSubresource(surf, 0, D3D11->mDeviceBackbuffer, 0, desc.Format);
}

View file

@ -180,8 +180,8 @@ bool GFXD3D11TextureObject::copyToBmp(GBitmap* bmp)
// check format limitations
// at the moment we only support RGBA for the source (other 4 byte formats should
// be easy to add though)
AssertFatal(mFormat == GFXFormatR8G8B8A8, "copyToBmp: invalid format");
if (mFormat != GFXFormatR8G8B8A8)
AssertFatal(mFormat == GFXFormatR8G8B8A8 || mFormat == GFXFormatR8G8B8A8_LINEAR_FORCE, "copyToBmp: invalid format");
if (mFormat != GFXFormatR8G8B8A8 && mFormat != GFXFormatR8G8B8A8_LINEAR_FORCE)
return false;
PROFILE_START(GFXD3D11TextureObject_copyToBmp);
@ -197,7 +197,7 @@ bool GFXD3D11TextureObject::copyToBmp(GBitmap* bmp)
const U32 sourceBytesPerPixel = 4;
U32 destBytesPerPixel = 0;
if(bmp->getFormat() == GFXFormatR8G8B8A8)
if (bmp->getFormat() == GFXFormatR8G8B8A8 || bmp->getFormat() == GFXFormatR8G8B8A8_LINEAR_FORCE)
destBytesPerPixel = 4;
else if(bmp->getFormat() == GFXFormatR8G8B8)
destBytesPerPixel = 3;

View file

@ -115,6 +115,8 @@ void GFXD3D9EnumTranslate::init()
GFXD3D9TextureFormat[GFXFormatD24FS8] = D3DFMT_D24FS8;
GFXD3D9TextureFormat[GFXFormatD16] = D3DFMT_D16;
GFXD3D9TextureFormat[GFXFormatR8G8B8A8_SRGB] = D3DFMT_UNKNOWN;
GFXD3D9TextureFormat[GFXFormatR8G8B8A8_LINEAR_FORCE] = D3DFMT_A8R8G8B8;
VALIDATE_LOOKUPTABLE( GFXD3D9TextureFormat, GFXFormat);
//------------------------------------------------------------------------------
//------------------------------------------------------------------------------

View file

@ -293,6 +293,7 @@ void GBitmap::allocateBitmap(const U32 in_width, const U32 in_height, const bool
break;
case GFXFormatR8G8B8: mBytesPerPixel = 3;
break;
case GFXFormatR8G8B8A8_LINEAR_FORCE:
case GFXFormatR8G8B8X8:
case GFXFormatR8G8B8A8: mBytesPerPixel = 4;
break;

View file

@ -328,13 +328,14 @@ static bool _writePNG(GBitmap *bitmap, Stream &stream, U32 compressionLevel, U32
format == GFXFormatR8G8B8A8 ||
format == GFXFormatR8G8B8X8 ||
format == GFXFormatA8 ||
format == GFXFormatR5G6B5, "_writePNG: ONLY RGB bitmap writing supported at this time.");
format == GFXFormatR5G6B5 ||
format == GFXFormatR8G8B8A8_LINEAR_FORCE, "_writePNG: ONLY RGB bitmap writing supported at this time.");
if ( format != GFXFormatR8G8B8 &&
format != GFXFormatR8G8B8A8 &&
format != GFXFormatR8G8B8X8 &&
format != GFXFormatA8 &&
format != GFXFormatR5G6B5 )
format != GFXFormatR5G6B5 && format != GFXFormatR8G8B8A8_LINEAR_FORCE)
return false;
png_structp png_ptr = png_create_write_struct_2(PNG_LIBPNG_VER_STRING,
@ -381,7 +382,7 @@ static bool _writePNG(GBitmap *bitmap, Stream &stream, U32 compressionLevel, U32
NULL, // compression type
NULL); // filter type
}
else if (format == GFXFormatR8G8B8A8 || format == GFXFormatR8G8B8X8)
else if (format == GFXFormatR8G8B8A8 || format == GFXFormatR8G8B8X8 || format == GFXFormatR8G8B8A8_LINEAR_FORCE)
{
png_set_IHDR(png_ptr, info_ptr,
width, height, // the width & height

View file

@ -35,6 +35,12 @@
#include "core/util/delegate.h"
#endif
struct GFXAdapterLUID
{
unsigned long LowPart;
long HighPart;
};
struct GFXAdapter
{
public:
@ -58,6 +64,9 @@ public:
/// Supported shader model. 0.f means none supported.
F32 mShaderModel;
/// LUID for windows oculus support
GFXAdapterLUID mLUID;
const char * getName() const { return mName; }
const char * getOutputName() const { return mOutputName; }
GFXAdapterType mType;
@ -72,6 +81,7 @@ public:
mOutputName[0] = 0;
mShaderModel = 0.f;
mIndex = 0;
dMemset(&mLUID, '\0', sizeof(mLUID));
}
~GFXAdapter()

View file

@ -160,7 +160,8 @@ GFXDevice::GFXDevice()
// misc
mAllowRender = true;
mCurrentRenderStyle = RS_Standard;
mCurrentProjectionOffset = Point2F::Zero;
mCurrentStereoTarget = -1;
mStereoHeadTransform = MatrixF(1);
mCanCurrentlyRender = false;
mInitialized = false;

View file

@ -219,6 +219,12 @@ public:
/// The device has started rendering a frame's field (such as for side-by-side rendering)
deStartOfField,
/// left stereo frame has been rendered
deLeftStereoFrameRendered,
/// right stereo frame has been rendered
deRightStereoFrameRendered,
/// The device is about to finish rendering a frame's field
deEndOfField,
};
@ -248,6 +254,7 @@ public:
{
RS_Standard = 0,
RS_StereoSideBySide = (1<<0), // Render into current Render Target side-by-side
RS_StereoSeparate = (1<<1) // Render in two separate passes (then combined by vr compositor)
};
enum GFXDeviceLimits
@ -281,13 +288,19 @@ protected:
/// The style of rendering that is to be performed, based on GFXDeviceRenderStyles
U32 mCurrentRenderStyle;
/// The current projection offset. May be used during side-by-side rendering, for example.
Point2F mCurrentProjectionOffset;
/// Current stereo target being rendered to
S32 mCurrentStereoTarget;
/// Eye offset used when using a stereo rendering style
Point3F mStereoEyeOffset[NumStereoPorts];
/// Center matrix for head
MatrixF mStereoHeadTransform;
/// Left and right matrix for eyes
MatrixF mStereoEyeTransforms[NumStereoPorts];
/// Inverse of mStereoEyeTransforms
MatrixF mInverseStereoEyeTransforms[NumStereoPorts];
/// Fov port settings
@ -338,21 +351,25 @@ public:
/// Retrieve the current rendering style based on GFXDeviceRenderStyles
U32 getCurrentRenderStyle() const { return mCurrentRenderStyle; }
/// Retrieve the current stereo target being rendered to
S32 getCurrentStereoTarget() const { return mCurrentStereoTarget; }
/// Set the current rendering style, based on GFXDeviceRenderStyles
void setCurrentRenderStyle(U32 style) { mCurrentRenderStyle = style; }
/// Set the current projection offset used during stereo rendering
const Point2F& getCurrentProjectionOffset() { return mCurrentProjectionOffset; }
/// Get the current projection offset used during stereo rendering
void setCurrentProjectionOffset(const Point2F& offset) { mCurrentProjectionOffset = offset; }
/// Set the current stereo target being rendered to (in case we're doing anything with postfx)
void setCurrentStereoTarget(const F32 targetId) { mCurrentStereoTarget = targetId; }
/// Get the current eye offset used during stereo rendering
const Point3F* getStereoEyeOffsets() { return mStereoEyeOffset; }
const MatrixF& getStereoHeadTransform() { return mStereoHeadTransform; }
const MatrixF* getStereoEyeTransforms() { return mStereoEyeTransforms; }
const MatrixF* getInverseStereoEyeTransforms() { return mInverseStereoEyeTransforms; }
/// Sets the head matrix for stereo rendering
void setStereoHeadTransform(const MatrixF &mat) { mStereoHeadTransform = mat; }
/// Set the current eye offset used during stereo rendering
void setStereoEyeOffsets(Point3F *offsets) { dMemcpy(mStereoEyeOffset, offsets, sizeof(Point3F) * NumStereoPorts); }
@ -391,6 +408,8 @@ public:
}
setViewport(mStereoViewports[eyeId]);
}
mCurrentStereoTarget = eyeId;
}
GFXCardProfiler* getCardProfiler() const { return mCardProfiler; }
@ -462,7 +481,7 @@ public:
/// Returns the first format from the list which meets all
/// the criteria of the texture profile and query options.
virtual GFXFormat selectSupportedFormat(GFXTextureProfile *profile,
const Vector<GFXFormat> &formats, bool texture, bool mustblend, bool mustfilter) = 0;
const Vector<GFXFormat> &formats, bool texture, bool mustblend, bool mustfilter) = 0;
/// @}

View file

@ -61,6 +61,7 @@ void GFXDrawUtil::_setupStateBlocks()
bitmapStretchSR.setZReadWrite(false);
bitmapStretchSR.setBlend(true, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha);
bitmapStretchSR.samplersDefined = true;
bitmapStretchSR.setColorWrites(true, true, true, false); // NOTE: comment this out if alpha write is needed
// Linear: Create wrap SB
bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getWrapLinear();

View file

@ -192,6 +192,12 @@ enum GFXFormat
GFXFormatD24S8,
GFXFormatD24FS8,
// sRGB formats
GFXFormatR8G8B8A8_SRGB,
// Guaranteed RGBA8 (for apis which really dont like bgr)
GFXFormatR8G8B8A8_LINEAR_FORCE,
// 64 bit texture formats...
GFXFormatR16G16B16A16,// first in group...
GFXFormatR16G16B16A16F,
@ -206,9 +212,6 @@ enum GFXFormat
GFXFormatDXT4,
GFXFormatDXT5,
// sRGB formats
GFXFormatR8G8B8A8_SRGB,
GFXFormat_COUNT,
GFXFormat_8BIT = GFXFormatA8,

View file

@ -50,6 +50,8 @@ FontRenderBatcher::FontRenderBatcher() : mStorage(8096)
// result in the text always being black. This may not be the case in OpenGL
// so it may have to change. -bramage
f.samplers[0].textureColorOp = GFXTOPAdd;
f.setColorWrites(true, true, true, false); // NOTE: comment this out if alpha write is needed
mFontSB = GFX->createStateBlock(f);
}
}

View file

@ -198,6 +198,22 @@ GFXAdapter* GFXInit::getAdapterOfType( GFXAdapterType type, const char* outputDe
return NULL;
}
GFXAdapter* GFXInit::getAdapterOfType(GFXAdapterType type, S32 outputDeviceIndex)
{
for (U32 i = 0; i < smAdapters.size(); i++)
{
if (smAdapters[i]->mType == type)
{
if (smAdapters[i]->mIndex == outputDeviceIndex)
{
return smAdapters[i];
}
}
}
return NULL;
}
GFXAdapter* GFXInit::chooseAdapter( GFXAdapterType type, const char* outputDevice)
{
GFXAdapter* adapter = GFXInit::getAdapterOfType(type, outputDevice);
@ -219,6 +235,27 @@ GFXAdapter* GFXInit::chooseAdapter( GFXAdapterType type, const char* outputDevic
return adapter;
}
GFXAdapter* GFXInit::chooseAdapter(GFXAdapterType type, S32 outputDeviceIndex)
{
GFXAdapter* adapter = GFXInit::getAdapterOfType(type, outputDeviceIndex);
if (!adapter && type != OpenGL)
{
Con::errorf("The requested renderer, %s, doesn't seem to be available."
" Trying the default, OpenGL.", getAdapterNameFromType(type));
adapter = GFXInit::getAdapterOfType(OpenGL, outputDeviceIndex);
}
if (!adapter)
{
Con::errorf("The OpenGL renderer doesn't seem to be available. Trying the GFXNulDevice.");
adapter = GFXInit::getAdapterOfType(NullDevice, 0);
}
AssertFatal(adapter, "There is no rendering device available whatsoever.");
return adapter;
}
const char* GFXInit::getAdapterNameFromType(GFXAdapterType type)
{
// must match GFXAdapterType order
@ -256,8 +293,23 @@ GFXAdapter *GFXInit::getBestAdapterChoice()
// Get the user's preference for device...
const String renderer = Con::getVariable("$pref::Video::displayDevice");
const String outputDevice = Con::getVariable("$pref::Video::displayOutputDevice");
GFXAdapterType adapterType = getAdapterTypeFromName(renderer.c_str());
GFXAdapter *adapter = chooseAdapter(adapterType, outputDevice.c_str());
const String adapterDevice = Con::getVariable("$Video::forceDisplayAdapter");
GFXAdapterType adapterType = getAdapterTypeFromName(renderer.c_str());;
GFXAdapter *adapter = NULL;
if (adapterDevice.isEmpty())
{
adapter = chooseAdapter(adapterType, outputDevice.c_str());
}
else
{
S32 adapterIdx = dAtoi(adapterDevice.c_str());
if (adapterIdx == -1)
adapter = chooseAdapter(adapterType, outputDevice.c_str());
else
adapter = chooseAdapter(adapterType, adapterIdx);
}
// Did they have one? Return it.
if(adapter)

View file

@ -74,10 +74,16 @@ public:
/// This method never returns NULL.
static GFXAdapter *chooseAdapter( GFXAdapterType type, const char* outputDevice);
/// Override which chooses an adapter based on an index instead
static GFXAdapter *chooseAdapter( GFXAdapterType type, S32 outputDeviceIndex );
/// Gets the first adapter of the requested type (and on the requested output device)
/// from the list of enumerated adapters. Should only call this after a call to
/// enumerateAdapters.
static GFXAdapter *getAdapterOfType( GFXAdapterType type, const char* outputDevice );
/// Override which gets an adapter based on an index instead
static GFXAdapter *getAdapterOfType( GFXAdapterType type, S32 outputDeviceIndex );
/// Converts a GFXAdapterType to a string name. Useful for writing out prefs
static const char *getAdapterNameFromType( GFXAdapterType type );

View file

@ -100,7 +100,10 @@ public:
/// of a target texture after presentation or deactivated.
///
/// This is mainly a depth buffer optimization.
NoDiscard = BIT(10)
NoDiscard = BIT(10),
/// Texture is managed by another process, thus should not be modified
NoModify = BIT(11)
};
@ -164,6 +167,7 @@ public:
inline bool noMip() const { return testFlag(NoMipmap); }
inline bool isPooled() const { return testFlag(Pooled); }
inline bool canDiscard() const { return !testFlag(NoDiscard); }
inline bool canModify() const { return !testFlag(NoModify); }
private:
/// These constants control the packing for the profile; if you add flags, types, or

View file

@ -139,7 +139,78 @@ void DebugDrawer::setupStateBlocks()
mRenderAlpha = GFX->createStateBlock(d);
}
void DebugDrawer::render()
void DebugDrawer::drawBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color)
{
Point3F point0(a.x, a.y, a.z);
Point3F point1(a.x, b.y, a.z);
Point3F point2(b.x, b.y, a.z);
Point3F point3(b.x, a.y, a.z);
Point3F point4(a.x, a.y, b.z);
Point3F point5(a.x, b.y, b.z);
Point3F point6(b.x, b.y, b.z);
Point3F point7(b.x, a.y, b.z);
// Draw one plane
drawLine(point0, point1, color);
drawLine(point1, point2, color);
drawLine(point2, point3, color);
drawLine(point3, point0, color);
// Draw the other plane
drawLine(point4, point5, color);
drawLine(point5, point6, color);
drawLine(point6, point7, color);
drawLine(point7, point4, color);
// Draw the connecting corners
drawLine(point0, point4, color);
drawLine(point1, point5, color);
drawLine(point2, point6, color);
drawLine(point3, point7, color);
}
void DebugDrawer::drawTransformedBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color, const MatrixF& transform)
{
Point3F point0(a.x, a.y, a.z);
Point3F point1(a.x, b.y, a.z);
Point3F point2(b.x, b.y, a.z);
Point3F point3(b.x, a.y, a.z);
Point3F point4(a.x, a.y, b.z);
Point3F point5(a.x, b.y, b.z);
Point3F point6(b.x, b.y, b.z);
Point3F point7(b.x, a.y, b.z);
transform.mulP(point0);
transform.mulP(point1);
transform.mulP(point2);
transform.mulP(point3);
transform.mulP(point4);
transform.mulP(point5);
transform.mulP(point6);
transform.mulP(point7);
// Draw one plane
drawLine(point0, point1, color);
drawLine(point1, point2, color);
drawLine(point2, point3, color);
drawLine(point3, point0, color);
// Draw the other plane
drawLine(point4, point5, color);
drawLine(point5, point6, color);
drawLine(point6, point7, color);
drawLine(point7, point4, color);
// Draw the connecting corners
drawLine(point0, point4, color);
drawLine(point1, point5, color);
drawLine(point2, point6, color);
drawLine(point3, point7, color);
}
void DebugDrawer::render(bool clear)
{
#ifdef ENABLE_DEBUGDRAW
if(!isDrawing)
@ -264,7 +335,7 @@ void DebugDrawer::render()
shouldToggleFreeze = false;
}
if(p->dieTime <= curTime && !isFrozen && p->dieTime != U32_MAX)
if(clear && p->dieTime <= curTime && !isFrozen && p->dieTime != U32_MAX)
{
*walk = p->next;
mPrimChunker.free(p);

View file

@ -105,7 +105,9 @@ public:
static void init();
/// Called globally to render debug draw state. Also does state updates.
void render();
void render(bool clear=true);
bool willDraw() { return isDrawing && mHead; }
void toggleFreeze() { shouldToggleFreeze = true; };
void toggleDrawing()
@ -120,8 +122,11 @@ public:
///
/// @{
void drawBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color = ColorF(1.0f, 1.0f, 1.0f));
void drawTransformedBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color, const MatrixF& transform);
void drawBox(const Point3F &a, const Point3F &b, const ColorF &color = ColorF(1.0f,1.0f,1.0f));
void drawLine(const Point3F &a, const Point3F &b, const ColorF &color = ColorF(1.0f,1.0f,1.0f));
void drawLine(const Point3F &a, const Point3F &b, const ColorF &color = ColorF(1.0f,1.0f,1.0f));
void drawTri(const Point3F &a, const Point3F &b, const Point3F &c, const ColorF &color = ColorF(1.0f,1.0f,1.0f));
void drawText(const Point3F& pos, const String& text, const ColorF &color = ColorF(1.0f,1.0f,1.0f));
void drawCapsule(const Point3F &a, const F32 &radius, const F32 &height, const ColorF &color = ColorF(1.0f, 1.0f, 1.0f));
@ -176,7 +181,7 @@ private:
DirectionLine,
OutlinedText,
Capsule,
} type; ///< Type of the primitive. The meanings of a,b,c are determined by this.
} type; ///< Type of the primitive. The meanings of a,b,c are determined by this.
SimTime dieTime; ///< Time at which we should remove this from the list.
bool useZ; ///< If true, do z-checks for this primitive.

View file

@ -37,8 +37,9 @@
#include "gfx/gfxTransformSaver.h"
#include "gfx/gfxDrawUtil.h"
#include "gfx/gfxDebugEvent.h"
GFXTextureObject *gLastStereoTexture = NULL;
#include "core/stream/fileStream.h"
#include "platform/output/IDisplayDevice.h"
#include "T3D/gameBase/extended/extendedMove.h"
#define TS_OVERLAY_SCREEN_WIDTH 0.75
@ -63,8 +64,9 @@ Vector<GuiTSCtrl*> GuiTSCtrl::smAwakeTSCtrls;
ImplementEnumType( GuiTSRenderStyles,
"Style of rendering for a GuiTSCtrl.\n\n"
"@ingroup Gui3D" )
{ GuiTSCtrl::RenderStyleStandard, "standard" },
{ GuiTSCtrl::RenderStyleStereoSideBySide, "stereo side by side" },
{ GuiTSCtrl::RenderStyleStandard, "standard" },
{ GuiTSCtrl::RenderStyleStereoSideBySide, "stereo side by side" },
{ GuiTSCtrl::RenderStyleStereoSeparate, "stereo separate" },
EndImplementEnumType;
//-----------------------------------------------------------------------------
@ -158,7 +160,6 @@ GuiTSCtrl::GuiTSCtrl()
mLastCameraQuery.farPlane = 10.0f;
mLastCameraQuery.nearPlane = 0.01f;
mLastCameraQuery.projectionOffset = Point2F::Zero;
mLastCameraQuery.hasFovPort = false;
mLastCameraQuery.hasStereoTargets = false;
@ -198,9 +199,9 @@ void GuiTSCtrl::initPersistFields()
void GuiTSCtrl::consoleInit()
{
Con::addVariable("$TSControl::frameCount", TypeS32, &smFrameCount, "The number of frames that have been rendered since this control was created.\n"
"@ingroup Rendering\n");
"@ingroup Rendering\n");
Con::addVariable("$TSControl::useLatestDisplayTransform", TypeBool, &smUseLatestDisplayTransform, "Use the latest view transform when rendering stereo instead of the one calculated by the last move.\n"
"@ingroup Rendering\n");
"@ingroup Rendering\n");
}
//-----------------------------------------------------------------------------
@ -352,36 +353,175 @@ static FovPort CalculateFovPortForCanvas(const RectI viewport, const CameraQuery
return fovPort;
}
void GuiTSCtrl::_internalRender(RectI guiViewport, RectI renderViewport, Frustum &frustum)
{
GFXTransformSaver saver;
Point2I renderSize = renderViewport.extent;
GFXTarget *origTarget = GFX->getActiveRenderTarget();
S32 origStereoTarget = GFX->getCurrentStereoTarget();
if (mForceFOV != 0)
mLastCameraQuery.fov = mDegToRad(mForceFOV);
if (mCameraZRot)
{
MatrixF rotMat(EulerF(0, 0, mDegToRad(mCameraZRot)));
mLastCameraQuery.cameraMatrix.mul(rotMat);
}
if (mReflectPriority > 0)
{
// Get the total reflection priority.
F32 totalPriority = 0;
for (U32 i = 0; i < smAwakeTSCtrls.size(); i++)
if (smAwakeTSCtrls[i]->isVisible())
totalPriority += smAwakeTSCtrls[i]->mReflectPriority;
REFLECTMGR->update(mReflectPriority / totalPriority,
renderSize,
mLastCameraQuery);
}
GFX->setActiveRenderTarget(origTarget);
GFX->setCurrentStereoTarget(origStereoTarget);
GFX->setViewport(renderViewport);
// Clear the zBuffer so GUI doesn't hose object rendering accidentally
GFX->clear(GFXClearZBuffer, ColorI(20, 20, 20), 1.0f, 0);
GFX->setFrustum(frustum);
mSaveProjection = GFX->getProjectionMatrix();
if (mLastCameraQuery.ortho)
{
mOrthoWidth = frustum.getWidth();
mOrthoHeight = frustum.getHeight();
}
// We're going to be displaying this render at size of this control in
// pixels - let the scene know so that it can calculate e.g. reflections
// correctly for that final display result.
gClientSceneGraph->setDisplayTargetResolution(renderSize);
// Set the GFX world matrix to the world-to-camera transform, but don't
// change the cameraMatrix in mLastCameraQuery. This is because
// mLastCameraQuery.cameraMatrix is supposed to contain the camera-to-world
// transform. In-place invert would save a copy but mess up any GUIs that
// depend on that value.
MatrixF worldToCamera = mLastCameraQuery.cameraMatrix;
worldToCamera.inverse();
GFX->setWorldMatrix(worldToCamera);
mSaveProjection = GFX->getProjectionMatrix();
mSaveModelview = GFX->getWorldMatrix();
mSaveViewport = guiViewport;
mSaveWorldToScreenScale = GFX->getWorldToScreenScale();
mSaveFrustum = GFX->getFrustum();
mSaveFrustum.setTransform(mLastCameraQuery.cameraMatrix);
// Set the default non-clip projection as some
// objects depend on this even in non-reflect cases.
gClientSceneGraph->setNonClipProjection(mSaveProjection);
// Give the post effect manager the worldToCamera, and cameraToScreen matrices
PFXMGR->setFrameMatrices(mSaveModelview, mSaveProjection);
renderWorld(guiViewport);
DebugDrawer* debugDraw = DebugDrawer::get();
if (mRenderStyle == RenderStyleStereoSideBySide && debugDraw->willDraw())
{
// For SBS we need to render over each viewport
Frustum frustum;
GFX->setViewport(mLastCameraQuery.stereoViewports[0]);
MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]);
GFX->setFrustum(frustum);
debugDraw->render(false);
GFX->setViewport(mLastCameraQuery.stereoViewports[1]);
MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[1]);
GFX->setFrustum(frustum);
debugDraw->render();
}
else
{
debugDraw->render();
}
saver.restore();
}
//-----------------------------------------------------------------------------
void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
{
// Save the current transforms so we can restore
// Save the current transforms so we can restore
// it for child control rendering below.
GFXTransformSaver saver;
bool renderingToTarget = false;
if(!processCameraQuery(&mLastCameraQuery))
mLastCameraQuery.displayDevice = NULL;
if (!processCameraQuery(&mLastCameraQuery))
{
// We have no camera, but render the GUI children
// anyway. This makes editing GuiTSCtrl derived
// controls easier in the GuiEditor.
renderChildControls( offset, updateRect );
renderChildControls(offset, updateRect);
return;
}
// jamesu - currently a little bit of a hack. Ideally we need to ditch the viewports in the query data and just rely on the display device
if (mLastCameraQuery.displayDevice)
{
if (mRenderStyle == RenderStyleStereoSideBySide)
{
mLastCameraQuery.displayDevice->setDrawMode(GFXDevice::RS_StereoSideBySide);
}
else if (mRenderStyle == RenderStyleStereoSeparate)
{
mLastCameraQuery.displayDevice->setDrawMode(GFXDevice::RS_StereoSeparate);
}
else
{
mLastCameraQuery.displayDevice->setDrawMode(GFXDevice::RS_Standard);
}
// The connection's display device may want to set the eye offset
if (mLastCameraQuery.displayDevice->providesEyeOffsets())
{
mLastCameraQuery.displayDevice->getEyeOffsets(mLastCameraQuery.eyeOffset);
}
// Grab field of view for both eyes
if (mLastCameraQuery.displayDevice->providesFovPorts())
{
mLastCameraQuery.displayDevice->getFovPorts(mLastCameraQuery.fovPort);
mLastCameraQuery.hasFovPort = true;
}
mLastCameraQuery.displayDevice->getStereoViewports(mLastCameraQuery.stereoViewports);
mLastCameraQuery.displayDevice->getStereoTargets(mLastCameraQuery.stereoTargets);
mLastCameraQuery.hasStereoTargets = mLastCameraQuery.stereoTargets[0];
}
GFXTargetRef origTarget = GFX->getActiveRenderTarget();
U32 origStyle = GFX->getCurrentRenderStyle();
// Set up the appropriate render style
U32 prevRenderStyle = GFX->getCurrentRenderStyle();
Point2F prevProjectionOffset = GFX->getCurrentProjectionOffset();
Point2I renderSize = getExtent();
Frustum frustum;
if(mRenderStyle == RenderStyleStereoSideBySide)
mLastCameraQuery.currentEye = -1;
if (mRenderStyle == RenderStyleStereoSideBySide)
{
GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSideBySide);
GFX->setCurrentProjectionOffset(mLastCameraQuery.projectionOffset);
GFX->setStereoEyeOffsets(mLastCameraQuery.eyeOffset);
GFX->setStereoHeadTransform(mLastCameraQuery.headMatrix);
if (!mLastCameraQuery.hasStereoTargets)
{
@ -398,9 +538,74 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
mLastCameraQuery.fovPort[0] = CalculateFovPortForCanvas(mLastCameraQuery.stereoViewports[0], mLastCameraQuery);
mLastCameraQuery.fovPort[1] = CalculateFovPortForCanvas(mLastCameraQuery.stereoViewports[1], mLastCameraQuery);
}
GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes
GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes
GFX->setSteroViewports(mLastCameraQuery.stereoViewports);
GFX->setStereoTargets(mLastCameraQuery.stereoTargets);
MatrixF myTransforms[2];
Frustum frustum;
if (smUseLatestDisplayTransform)
{
// Use the view matrix determined from the display device
myTransforms[0] = mLastCameraQuery.eyeTransforms[0];
myTransforms[1] = mLastCameraQuery.eyeTransforms[1];
}
else
{
// Use the view matrix determined from the control object
myTransforms[0] = mLastCameraQuery.cameraMatrix;
myTransforms[1] = mLastCameraQuery.cameraMatrix;
mLastCameraQuery.headMatrix = mLastCameraQuery.cameraMatrix; // override head
QuatF qrot = mLastCameraQuery.cameraMatrix;
Point3F pos = mLastCameraQuery.cameraMatrix.getPosition();
Point3F rotEyePos;
myTransforms[0].setPosition(pos + qrot.mulP(mLastCameraQuery.eyeOffset[0], &rotEyePos));
myTransforms[1].setPosition(pos + qrot.mulP(mLastCameraQuery.eyeOffset[1], &rotEyePos));
}
GFX->setStereoEyeTransforms(myTransforms);
// Allow render size to originate from the render target
if (mLastCameraQuery.stereoTargets[0])
{
renderSize = mLastCameraQuery.stereoTargets[0]->getSize();
renderingToTarget = true;
}
// NOTE: these calculations are essentially overridden later by the fov port settings when rendering each eye.
MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]);
GFX->activateStereoTarget(-1);
_internalRender(RectI(updateRect.point, updateRect.extent), RectI(Point2I(0,0), renderSize), frustum);
// Notify device we've rendered the right, thus the last stereo frame.
GFX->getDeviceEventSignal().trigger(GFXDevice::deRightStereoFrameRendered);
// Render preview
if (mLastCameraQuery.displayDevice)
{
GFXTexHandle previewTexture = mLastCameraQuery.displayDevice->getPreviewTexture();
if (!previewTexture.isNull())
{
GFX->setActiveRenderTarget(origTarget);
GFX->setCurrentRenderStyle(origStyle);
GFX->setClipRect(updateRect);
renderDisplayPreview(updateRect, previewTexture);
}
}
}
else if (mRenderStyle == RenderStyleStereoSeparate && mLastCameraQuery.displayDevice)
{
// In this case we render the scene twice to different render targets, then
// render the final composite view
GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSeparate);
GFX->setStereoEyeOffsets(mLastCameraQuery.eyeOffset);
GFX->setStereoHeadTransform(mLastCameraQuery.headMatrix);
GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes
GFX->setSteroViewports(mLastCameraQuery.stereoViewports);
GFX->setStereoTargets(mLastCameraQuery.stereoTargets);
@ -426,60 +631,62 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
myTransforms[1].setPosition(pos + qrot.mulP(mLastCameraQuery.eyeOffset[1], &rotEyePos));
}
GFX->setStereoEyeTransforms(myTransforms);
MatrixF origMatrix = mLastCameraQuery.cameraMatrix;
// Allow render size to originate from the render target
if (mLastCameraQuery.stereoTargets[0])
// Left
MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]);
mLastCameraQuery.cameraMatrix = myTransforms[0];
frustum.update();
GFX->activateStereoTarget(0);
mLastCameraQuery.currentEye = 0;
GFX->beginField();
_internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), frustum);
GFX->getDeviceEventSignal().trigger(GFXDevice::deLeftStereoFrameRendered);
GFX->endField();
// Right
GFX->activateStereoTarget(1);
mLastCameraQuery.currentEye = 1;
MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[1]);
mLastCameraQuery.cameraMatrix = myTransforms[1];
frustum.update();
GFX->beginField();
_internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[1]->getSize()), RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), frustum);
GFX->getDeviceEventSignal().trigger(GFXDevice::deRightStereoFrameRendered);
GFX->endField();
mLastCameraQuery.cameraMatrix = origMatrix;
// Render preview
if (mLastCameraQuery.displayDevice)
{
renderSize = mLastCameraQuery.stereoViewports[0].extent;
renderingToTarget = true;
GFXTexHandle previewTexture = mLastCameraQuery.displayDevice->getPreviewTexture();
if (!previewTexture.isNull())
{
GFX->setActiveRenderTarget(origTarget);
GFX->setCurrentRenderStyle(origStyle);
GFX->setClipRect(updateRect);
renderDisplayPreview(updateRect, previewTexture);
}
}
}
else
{
GFX->setCurrentRenderStyle(GFXDevice::RS_Standard);
}
#ifdef TORQUE_OS_MAC
Point2I screensize = getRoot()->getWindowSize();
tempRect.point.y = screensize.y - (tempRect.point.y + tempRect.extent.y);
#endif
if ( mReflectPriority > 0 )
{
// Get the total reflection priority.
F32 totalPriority = 0;
for ( U32 i=0; i < smAwakeTSCtrls.size(); i++ )
if ( smAwakeTSCtrls[i]->isVisible() )
totalPriority += smAwakeTSCtrls[i]->mReflectPriority;
REFLECTMGR->update( mReflectPriority / totalPriority,
getExtent(),
mLastCameraQuery );
}
if(mForceFOV != 0)
mLastCameraQuery.fov = mDegToRad(mForceFOV);
if(mCameraZRot)
{
MatrixF rotMat(EulerF(0, 0, mDegToRad(mCameraZRot)));
mLastCameraQuery.cameraMatrix.mul(rotMat);
}
Frustum frustum;
if(mRenderStyle == RenderStyleStereoSideBySide)
{
// NOTE: these calculations are essentially overridden later by the fov port settings when rendering each eye.
MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]);
}
else
{
// set up the camera and viewport stuff:
F32 wwidth;
F32 wheight;
F32 renderWidth = F32(renderSize.x);
F32 renderHeight = F32(renderSize.y);
F32 aspectRatio = renderWidth / renderHeight;
// Use the FOV to calculate the viewport height scale
// then generate the width scale from the aspect ratio.
if(!mLastCameraQuery.ortho)
if (!mLastCameraQuery.ortho)
{
wheight = mLastCameraQuery.nearPlane * mTan(mLastCameraQuery.fov / 2.0f);
wwidth = aspectRatio * wheight;
@ -498,184 +705,33 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
F32 top = wheight - vscale * (updateRect.point.y - offset.y);
F32 bottom = wheight - vscale * (updateRect.point.y + updateRect.extent.y - offset.y);
frustum.set( mLastCameraQuery.ortho, left, right, top, bottom, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane );
}
frustum.set(mLastCameraQuery.ortho, left, right, top, bottom, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane);
// Manipulate the frustum for tiled screenshots
const bool screenShotMode = gScreenShot && gScreenShot->isPending();
if ( screenShotMode )
{
gScreenShot->tileFrustum( frustum );
GFX->setViewMatrix(MatrixF::Identity);
}
RectI tempRect = updateRect;
if (!renderingToTarget)
{
#ifdef TORQUE_OS_MAC
// Manipulate the frustum for tiled screenshots
const bool screenShotMode = gScreenShot && gScreenShot->isPending();
if (screenShotMode)
{
gScreenShot->tileFrustum(frustum);
GFX->setViewMatrix(MatrixF::Identity);
}
RectI tempRect = updateRect;
#ifdef TORQUE_OS_MAC
Point2I screensize = getRoot()->getWindowSize();
tempRect.point.y = screensize.y - (tempRect.point.y + tempRect.extent.y);
#endif
#endif
GFX->setViewport( tempRect );
}
else
{
// Activate stereo RT
GFX->activateStereoTarget(-1);
_internalRender(tempRect, tempRect, frustum);
}
// Clear the zBuffer so GUI doesn't hose object rendering accidentally
GFX->clear( GFXClearZBuffer , ColorI(20,20,20), 1.0f, 0 );
//GFX->clear( GFXClearTarget, ColorI(255,0,0), 1.0f, 0);
GFX->setFrustum( frustum );
if(mLastCameraQuery.ortho)
{
mOrthoWidth = frustum.getWidth();
mOrthoHeight = frustum.getHeight();
}
// We're going to be displaying this render at size of this control in
// pixels - let the scene know so that it can calculate e.g. reflections
// correctly for that final display result.
gClientSceneGraph->setDisplayTargetResolution(renderSize);
// Set the GFX world matrix to the world-to-camera transform, but don't
// change the cameraMatrix in mLastCameraQuery. This is because
// mLastCameraQuery.cameraMatrix is supposed to contain the camera-to-world
// transform. In-place invert would save a copy but mess up any GUIs that
// depend on that value.
MatrixF worldToCamera = mLastCameraQuery.cameraMatrix;
worldToCamera.inverse();
GFX->setWorldMatrix( worldToCamera );
mSaveProjection = GFX->getProjectionMatrix();
mSaveModelview = GFX->getWorldMatrix();
mSaveViewport = updateRect;
mSaveWorldToScreenScale = GFX->getWorldToScreenScale();
mSaveFrustum = GFX->getFrustum();
mSaveFrustum.setTransform( mLastCameraQuery.cameraMatrix );
// Set the default non-clip projection as some
// objects depend on this even in non-reflect cases.
gClientSceneGraph->setNonClipProjection( mSaveProjection );
// Give the post effect manager the worldToCamera, and cameraToScreen matrices
PFXMGR->setFrameMatrices( mSaveModelview, mSaveProjection );
renderWorld(updateRect);
DebugDrawer::get()->render();
// Render the canvas overlay if its available
if (mRenderStyle == RenderStyleStereoSideBySide && mStereoGuiTarget.getPointer())
{
GFXDEBUGEVENT_SCOPE( StereoGui_Render, ColorI( 255, 0, 0 ) );
MatrixF proj(1);
Frustum originalFrustum = GFX->getFrustum();
GFXTextureObject *texObject = mStereoGuiTarget->getTexture(0);
const FovPort *currentFovPort = GFX->getStereoFovPort();
const MatrixF *eyeTransforms = GFX->getStereoEyeTransforms();
const Point3F *eyeOffset = GFX->getStereoEyeOffsets();
Frustum gfxFrustum = originalFrustum;
for (U32 i=0; i<2; i++)
{
GFX->activateStereoTarget(i);
MathUtils::makeFovPortFrustum(&gfxFrustum, true, gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[i], eyeTransforms[i]);
GFX->setFrustum(gfxFrustum);
MatrixF eyeWorldTrans(1);
eyeWorldTrans.setPosition(Point3F(eyeOffset[i].x,eyeOffset[i].y,eyeOffset[i].z));
MatrixF eyeWorld(1);
eyeWorld.mul(eyeWorldTrans);
eyeWorld.inverse();
GFX->setWorldMatrix(eyeWorld);
GFX->setViewMatrix(MatrixF::Identity);
if (!mStereoOverlayVB.getPointer())
{
mStereoOverlayVB.set(GFX, 4, GFXBufferTypeStatic);
GFXVertexPCT *verts = mStereoOverlayVB.lock(0, 4);
F32 texLeft = 0.0f;
F32 texRight = 1.0f;
F32 texTop = 1.0f;
F32 texBottom = 0.0f;
F32 rectRatio = gfxFrustum.getWidth() / gfxFrustum.getHeight();
F32 rectWidth = gfxFrustum.getWidth() * TS_OVERLAY_SCREEN_WIDTH;
F32 rectHeight = rectWidth * rectRatio;
F32 screenLeft = -rectWidth * 0.5;
F32 screenRight = rectWidth * 0.5;
F32 screenTop = -rectHeight * 0.5;
F32 screenBottom = rectHeight * 0.5;
const F32 fillConv = 0.0f;
const F32 frustumDepthAdjusted = gfxFrustum.getNearDist() + 0.012;
verts[0].point.set( screenLeft - fillConv, frustumDepthAdjusted, screenTop - fillConv );
verts[1].point.set( screenRight - fillConv, frustumDepthAdjusted, screenTop - fillConv );
verts[2].point.set( screenLeft - fillConv, frustumDepthAdjusted, screenBottom - fillConv );
verts[3].point.set( screenRight - fillConv, frustumDepthAdjusted, screenBottom - fillConv );
verts[0].color = verts[1].color = verts[2].color = verts[3].color = ColorI(255,255,255,255);
verts[0].texCoord.set( texLeft, texTop );
verts[1].texCoord.set( texRight, texTop );
verts[2].texCoord.set( texLeft, texBottom );
verts[3].texCoord.set( texRight, texBottom );
mStereoOverlayVB.unlock();
}
if (!mStereoGuiSB.getPointer())
{
// DrawBitmapStretchSR
GFXStateBlockDesc bitmapStretchSR;
bitmapStretchSR.setCullMode(GFXCullNone);
bitmapStretchSR.setZReadWrite(false, false);
bitmapStretchSR.setBlend(true, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha);
bitmapStretchSR.samplersDefined = true;
bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear();
bitmapStretchSR.samplers[0].minFilter = GFXTextureFilterPoint;
bitmapStretchSR.samplers[0].mipFilter = GFXTextureFilterPoint;
bitmapStretchSR.samplers[0].magFilter = GFXTextureFilterPoint;
mStereoGuiSB = GFX->createStateBlock(bitmapStretchSR);
}
GFX->setVertexBuffer(mStereoOverlayVB);
GFX->setStateBlock(mStereoGuiSB);
GFX->setTexture( 0, texObject );
GFX->setupGenericShaders( GFXDevice::GSModColorTexture );
GFX->drawPrimitive( GFXTriangleStrip, 0, 2 );
}
}
// Restore the previous matrix state before
// we begin rendering the child controls.
saver.restore();
// Restore the render style and any stereo parameters
GFX->setActiveRenderTarget(origTarget);
GFX->setCurrentRenderStyle(prevRenderStyle);
GFX->setCurrentProjectionOffset(prevProjectionOffset);
if(mRenderStyle == RenderStyleStereoSideBySide && gLastStereoTexture)
{
GFX->setClipRect(updateRect);
GFX->getDrawUtil()->drawBitmapStretch(gLastStereoTexture, updateRect);
}
// TODO: Some render to sort of overlay system?
// Allow subclasses to render 2D elements.
GFX->setActiveRenderTarget(origTarget);
GFX->setCurrentRenderStyle(origStyle);
GFX->setClipRect(updateRect);
renderGui( offset, updateRect );
renderGui(offset, updateRect);
if (shouldRenderChildControls())
{
@ -711,10 +767,83 @@ void GuiTSCtrl::drawLineList( const Vector<Point3F> &points, const ColorI color,
drawLine( points[i], points[i+1], color, width );
}
//-----------------------------------------------------------------------------
void GuiTSCtrl::setStereoGui(GuiOffscreenCanvas *canvas)
{
mStereoGuiTarget = canvas ? canvas->getTarget() : NULL;
mStereoCanvas = canvas;
}
//-----------------------------------------------------------------------------
void GuiTSCtrl::renderDisplayPreview(const RectI &updateRect, GFXTexHandle &previewTexture)
{
GFX->setWorldMatrix(MatrixF(1));
GFX->setViewMatrix(MatrixF::Identity);
GFX->setClipRect(updateRect);
GFX->getDrawUtil()->drawRectFill(RectI(Point2I(0, 0), Point2I(1024, 768)), ColorI::BLACK);
GFX->getDrawUtil()->drawRect(RectI(Point2I(0, 0), Point2I(1024, 768)), ColorI::RED);
if (!mStereoPreviewVB.getPointer())
{
mStereoPreviewVB.set(GFX, 4, GFXBufferTypeStatic);
GFXVertexPCT *verts = mStereoPreviewVB.lock(0, 4);
F32 texLeft = 0.0f;
F32 texRight = 1.0f;
F32 texTop = 0.0f;
F32 texBottom = 1.0f;
F32 rectWidth = updateRect.extent.x;
F32 rectHeight = updateRect.extent.y;
F32 screenLeft = 0;
F32 screenRight = rectWidth;
F32 screenTop = 0;
F32 screenBottom = rectHeight;
const F32 fillConv = 0.0f;
const F32 frustumDepthAdjusted = 0.0f;
verts[0].point.set(screenLeft - fillConv, screenTop - fillConv, 0.f);
verts[1].point.set(screenRight - fillConv, screenTop - fillConv, 0.f);
verts[2].point.set(screenLeft - fillConv, screenBottom - fillConv, 0.f);
verts[3].point.set(screenRight - fillConv, screenBottom - fillConv, 0.f);
verts[0].color = verts[1].color = verts[2].color = verts[3].color = ColorI(255, 255, 255, 255);
verts[0].texCoord.set(texLeft, texTop);
verts[1].texCoord.set(texRight, texTop);
verts[2].texCoord.set(texLeft, texBottom);
verts[3].texCoord.set(texRight, texBottom);
mStereoPreviewVB.unlock();
}
if (!mStereoPreviewSB.getPointer())
{
// DrawBitmapStretchSR
GFXStateBlockDesc bitmapStretchSR;
bitmapStretchSR.setCullMode(GFXCullNone);
bitmapStretchSR.setZReadWrite(false, false);
bitmapStretchSR.setBlend(false, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha);
bitmapStretchSR.samplersDefined = true;
bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear();
bitmapStretchSR.samplers[0].minFilter = GFXTextureFilterPoint;
bitmapStretchSR.samplers[0].mipFilter = GFXTextureFilterPoint;
bitmapStretchSR.samplers[0].magFilter = GFXTextureFilterPoint;
mStereoPreviewSB = GFX->createStateBlock(bitmapStretchSR);
}
GFX->setVertexBuffer(mStereoPreviewVB);
GFX->setStateBlock(mStereoPreviewSB);
GFX->setTexture(0, previewTexture);
GFX->setupGenericShaders(GFXDevice::GSModColorTexture);
GFX->drawPrimitive(GFXTriangleStrip, 0, 2);
}
//=============================================================================

View file

@ -35,6 +35,10 @@
#include "materials/matTextureTarget.h"
#endif
#ifndef _GUIOFFSCREENCANVAS_H_
#include "gui/core/guiOffscreenCanvas.h"
#endif
class IDisplayDevice;
class GuiOffscreenCanvas;
@ -45,16 +49,19 @@ struct CameraQuery
F32 farPlane;
F32 fov;
FovPort fovPort[2]; // fov for each eye
Point2F projectionOffset;
Point3F eyeOffset[2];
MatrixF eyeTransforms[2];
bool ortho;
bool hasFovPort;
bool hasStereoTargets;
MatrixF cameraMatrix;
MatrixF headMatrix; // center matrix (for HMDs)
S32 currentEye;
RectI stereoViewports[2]; // destination viewports
GFXTextureTarget* stereoTargets[2];
GuiCanvas* drawCanvas; // Canvas we are drawing to. Needed for VR
IDisplayDevice* displayDevice;
};
/// Abstract base class for 3D viewport GUIs.
@ -65,7 +72,8 @@ class GuiTSCtrl : public GuiContainer
public:
enum RenderStyles {
RenderStyleStandard = 0,
RenderStyleStereoSideBySide = (1<<0)
RenderStyleStereoSideBySide = (1<<0),
RenderStyleStereoSeparate = (1<<1),
};
protected:
@ -104,12 +112,18 @@ protected:
NamedTexTargetRef mStereoGuiTarget;
GFXVertexBufferHandle<GFXVertexPCT> mStereoOverlayVB;
GFXStateBlockRef mStereoGuiSB;
GFXVertexBufferHandle<GFXVertexPCT> mStereoPreviewVB;
GFXStateBlockRef mStereoPreviewSB;
SimObjectPtr<GuiOffscreenCanvas> mStereoCanvas;
public:
GuiTSCtrl();
void onPreRender();
void _internalRender(RectI guiViewport, RectI renderViewport, Frustum &frustum);
void onRender(Point2I offset, const RectI &updateRect);
virtual bool processCameraQuery(CameraQuery *query);
@ -178,6 +192,7 @@ public:
bool shouldRenderChildControls() { return mRenderStyle == RenderStyleStandard; }
void setStereoGui(GuiOffscreenCanvas *canvas);
void renderDisplayPreview(const RectI &updateRect, GFXTexHandle &previewTexture);
DECLARE_CONOBJECT(GuiTSCtrl);
DECLARE_CATEGORY( "Gui 3D" );

View file

@ -124,6 +124,7 @@ public:
void invalidText(bool playSound = true);
void validText();
bool isValidText();
inline bool isPasswordText() { return mPasswordText; }
bool isAllTextSelected();
void selectAllText();

View file

@ -286,6 +286,8 @@ class GuiControl : public SimGroup
const char * getConsoleCommand(); ///< Returns the name of the function bound to this GuiControl
LangTable *getGUILangTable(void);
const UTF8 *getGUIString(S32 id);
inline String& getTooltip() { return mTooltip; } ///< Returns the tooltip
/// @}

View file

@ -176,7 +176,7 @@ void GuiOffscreenCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = tr
GFX->setWorldMatrix( MatrixF::Identity );
GFX->setViewMatrix( MatrixF::Identity );
GFX->setProjectionMatrix( MatrixF::Identity );
RectI contentRect(Point2I(0,0), mTargetSize);
{
// Render active GUI Dialogs
@ -193,7 +193,7 @@ void GuiOffscreenCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = tr
// Fill Blue if no Dialogs
if(this->size() == 0)
GFX->clear( GFXClearTarget, ColorF(0,0,1,1), 1.0f, 0 );
GFX->clear( GFXClearTarget, ColorF(0,0,0,1), 1.0f, 0 );
GFX->setClipRect( contentRect );
@ -210,7 +210,7 @@ void GuiOffscreenCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = tr
GFX->getDrawUtil()->clearBitmapModulation();
}
mTarget->resolve();
GFX->popActiveRenderTarget();
@ -219,6 +219,13 @@ void GuiOffscreenCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = tr
// Keep track of the last time we rendered.
mLastRenderMs = Platform::getRealMilliseconds();
mTargetDirty = mDynamicTarget;
onFrameRendered();
}
void GuiOffscreenCanvas::onFrameRendered()
{
}
Point2I GuiOffscreenCanvas::getWindowSize()

View file

@ -23,6 +23,7 @@ public:
void onRemove();
void renderFrame(bool preRenderOnly, bool bufferSwap);
virtual void onFrameRendered();
Point2I getWindowSize();

View file

@ -1162,6 +1162,7 @@ bool EditTSCtrl::processCameraQuery(CameraQuery * query)
query->cameraMatrix = camRot;
query->cameraMatrix.setPosition(camPos);
query->headMatrix = query->cameraMatrix;
query->fov = mOrthoFOV;
}

View file

@ -453,30 +453,7 @@ void AdvancedLightBinManager::_setupPerFrameParameters( const SceneRenderState *
// Perform a camera offset. We need to manually perform this offset on the sun (or vector) light's
// polygon, which is at the far plane.
const Point2F& projOffset = frustum.getProjectionOffset();
Point3F cameraOffsetPos = cameraPos;
if(!projOffset.isZero())
{
// First we need to calculate the offset at the near plane. The projOffset
// given above can be thought of a percent as it ranges from 0..1 (or 0..-1).
F32 nearOffset = frustum.getNearRight() * projOffset.x;
// Now given the near plane distance from the camera we can solve the right
// triangle and calcuate the SIN theta for the offset at the near plane.
// SIN theta = x/y
F32 sinTheta = nearOffset / frustum.getNearDist();
// Finally, we can calcuate the offset at the far plane, which is where our sun (or vector)
// light's polygon is drawn.
F32 farOffset = frustum.getFarDist() * sinTheta;
// We can now apply this far plane offset to the far plane itself, which then compensates
// for the project offset.
MatrixF camTrans = frustum.getTransform();
VectorF offset = camTrans.getRightVector();
offset *= farOffset;
cameraOffsetPos += offset;
}
// Now build the quad for drawing full-screen vector light
// passes.... this is a volatile VB and updates every frame.

View file

@ -214,8 +214,26 @@ void Frustum::setNearFarDist( F32 nearDist, F32 farDist )
return;
// Recalculate the frustum.
MatrixF xfm( mTransform );
set( mIsOrtho, getFov(), getAspectRatio(), nearDist, farDist, xfm );
MatrixF xfm( mTransform );
const F32 CENTER_EPSILON = 0.001;
F32 centerX = mNearLeft + (mNearRight - mNearLeft) * 0.5;
F32 centerY = mNearBottom + (mNearTop - mNearBottom) * 0.5;
if ((centerX > CENTER_EPSILON || centerX < -CENTER_EPSILON) || (centerY > CENTER_EPSILON || centerY < -CENTER_EPSILON) )
{
// Off-center projection, so re-calc use the new distances
FovPort expectedFovPort;
expectedFovPort.leftTan = -(mNearLeft / mNearDist);
expectedFovPort.rightTan = (mNearRight / mNearDist);
expectedFovPort.upTan = (mNearTop / mNearDist);
expectedFovPort.downTan = -(mNearBottom / mNearDist);
MathUtils::makeFovPortFrustum(this, mIsOrtho, nearDist, farDist, expectedFovPort);
}
else
{
// Projection is not off-center, use the normal code
set(mIsOrtho, getFov(), getAspectRatio(), nearDist, farDist, xfm);
}
}
//-----------------------------------------------------------------------------

View file

@ -27,6 +27,7 @@
#include "core/stringTable.h"
#include "platform/platformInput.h"
#include "math/mQuat.h"
#include "math/mAngAxis.h"
MODULE_BEGIN( InputEventManager )
@ -546,3 +547,21 @@ void InputEventManager::buildInputEvent(U32 deviceType, U32 deviceInst, InputEve
newEvent.postToSignal(Input::smInputEvent);
}
void InputEventManager::buildInputEvent(U32 deviceType, U32 deviceInst, InputEventType objType, InputObjectInstances objInst, InputActionType action, AngAxisF& aValue)
{
InputEventInfo newEvent;
newEvent.deviceType = deviceType;
newEvent.deviceInst = deviceInst;
newEvent.objType = objType;
newEvent.objInst = objInst;
newEvent.action = action;
newEvent.fValue = aValue.axis.x;
newEvent.fValue2 = aValue.axis.y;
newEvent.fValue3 = aValue.axis.z;
newEvent.fValue4 = aValue.angle;
newEvent.postToSignal(Input::smInputEvent);
}

View file

@ -504,6 +504,9 @@ public:
/// Build an input event based on a QuatF
void buildInputEvent(U32 deviceType, U32 deviceInst, InputEventType objType, InputObjectInstances objInst, InputActionType action, QuatF& qValue);
/// Build an input event based on a AngAxisF
void buildInputEvent(U32 deviceType, U32 deviceInst, InputEventType objType, InputObjectInstances objInst, InputActionType action, AngAxisF& qValue);
protected:
U32 mNextDeviceTypeCode;
U32 mNextDeviceCode;

View file

@ -62,7 +62,7 @@ MODULE_END;
// OculusVRDevice
//-----------------------------------------------------------------------------
bool OculusVRDevice::smEnableDevice = true;
bool OculusVRDevice::smEnableDevice = false;
bool OculusVRDevice::smSimulateHMD = true;
@ -156,26 +156,27 @@ void OculusVRDevice::buildCodeTable()
OculusVRSensorDevice::buildCodeTable();
}
void OculusVRDevice::addHMDDevice(ovrHmd hmd)
void OculusVRDevice::addHMDDevice(ovrHmd hmd, ovrGraphicsLuid luid)
{
if(!hmd)
return;
OculusVRHMDDevice* hmdd = new OculusVRHMDDevice();
hmdd->set(hmd,mHMDDevices.size());
hmdd->set(hmd, luid, mHMDDevices.size());
mHMDDevices.push_back(hmdd);
Con::printf(" HMD found: %s by %s [v%d]", hmd->ProductName, hmd->Manufacturer, hmd->Type);
ovrHmdDesc desc = ovr_GetHmdDesc(hmd);
Con::printf(" HMD found: %s by %s [v%d]", desc.ProductName, desc.Manufacturer, desc.Type);
}
void OculusVRDevice::createSimulatedHMD()
{
{/* TOFIX
OculusVRHMDDevice* hmdd = new OculusVRHMDDevice();
ovrHmd hmd = ovrHmd_CreateDebug(ovrHmd_DK2);
ovrHmd hmd = ovr_CreateDebug(ovrHmd_DK2);
hmdd->set(hmd,mHMDDevices.size());
mHMDDevices.push_back(hmdd);
Con::printf(" HMD simulated: %s by %s [v%d]", hmdd->getProductName(), hmdd->getManufacturer(), hmdd->getVersion());
Con::printf(" HMD simulated: %s by %s [v%d]", hmdd->getProductName(), hmdd->getManufacturer(), hmdd->getVersion()); */
}
bool OculusVRDevice::enable()
@ -185,16 +186,17 @@ bool OculusVRDevice::enable()
Con::printf("Oculus VR Device Init:");
if(sOcculusEnabled && ovr_Initialize())
if(sOcculusEnabled && OVR_SUCCESS(ovr_Initialize(0)))
{
mEnabled = true;
// Enumerate HMDs and pick the first one
ovrHmd hmd = ovrHmd_Create(0);
if(hmd)
ovrHmd hmd;
ovrGraphicsLuid luid;
if(OVR_SUCCESS(ovr_Create(&hmd, &luid)))
{
// Add the HMD to our list
addHMDDevice(hmd);
addHMDDevice(hmd, luid);
setActive(true);
}
@ -316,17 +318,6 @@ void OculusVRDevice::getEyeOffsets(Point3F *dest) const
hmd->getEyeOffsets(dest);
}
bool OculusVRDevice::providesFovPorts() const
{
if(!mHMDDevices.size())
return false;
const OculusVRHMDDevice* hmd = getHMDDevice(mActiveDeviceId);
if(!hmd)
return Point3F::Zero;
return true;
}
void OculusVRDevice::getFovPorts(FovPort *out) const
{
@ -560,6 +551,20 @@ GameConnection* OculusVRDevice::getCurrentConnection()
//-----------------------------------------------------------------------------
GFXTexHandle OculusVRDevice::getPreviewTexture()
{
if (!mHMDDevices.size())
return NULL;
OculusVRHMDDevice* hmd = getHMDDevice(mActiveDeviceId);
if (!hmd)
return NULL;
return hmd->getPreviewTexture();
}
//-----------------------------------------------------------------------------
DefineEngineFunction(isOculusVRDeviceActive, bool, (),,
"@brief Used to determine if the Oculus VR input device is active\n\n"
@ -700,7 +705,7 @@ DefineEngineFunction(getOVRHMDVersion, S32, (S32 index),,
return hmd->getVersion();
}
DefineEngineFunction(getOVRHMDDisplayDeviceName, const char*, (S32 index),,
DefineEngineFunction(getOVRHMDDisplayDeviceType, const char*, (S32 index),,
"@brief Windows display device name used in EnumDisplaySettings/CreateDC.\n\n"
"@param index The HMD index.\n"
"@return The name of the HMD display device, if any.\n"
@ -717,7 +722,7 @@ DefineEngineFunction(getOVRHMDDisplayDeviceName, const char*, (S32 index),,
return "";
}
return hmd->getDisplayDeviceName();
return hmd->getDisplayDeviceType();
}
DefineEngineFunction(getOVRHMDDisplayDeviceId, S32, (S32 index),,
@ -740,26 +745,6 @@ DefineEngineFunction(getOVRHMDDisplayDeviceId, S32, (S32 index),,
return hmd->getDisplayDeviceId();
}
DefineEngineFunction(getOVRHMDDisplayDesktopPos, Point2I, (S32 index),,
"@brief Desktop coordinate position of the screen (can be negative; may not be present on all platforms).\n\n"
"@param index The HMD index.\n"
"@return Position of the screen.\n"
"@ingroup Game")
{
if(!ManagedSingleton<OculusVRDevice>::instanceOrNull())
{
return Point2I::Zero;
}
const OculusVRHMDDevice* hmd = OCULUSVRDEV->getHMDDevice(index);
if(!hmd)
{
return Point2I::Zero;
}
return hmd->getDesktopPosition();
}
DefineEngineFunction(getOVRHMDResolution, Point2I, (S32 index),,
"@brief Provides the OVR HMD screen resolution.\n\n"
"@param index The HMD index.\n"

View file

@ -33,7 +33,7 @@
#include "math/mQuat.h"
#include "math/mPoint4.h"
#include "gfx/gfxDevice.h"
#include "OVR_CAPI_0_5_0.h"
#include "OVR_CAPI_0_8_0.h"
#define DEFAULT_RIFT_UNIT 0
@ -83,6 +83,9 @@ protected:
/// Which HMD is the active one
U32 mActiveDeviceId;
/// Device id we need to use to hook up with oculus
ovrGraphicsLuid mLuid;
protected:
void cleanUp();
@ -90,7 +93,7 @@ protected:
/// Input Event Manager
void buildCodeTable();
void addHMDDevice(ovrHmd hmd);
void addHMDDevice(ovrHmd hmd, ovrGraphicsLuid luid);
void createSimulatedHMD();
@ -112,8 +115,8 @@ public:
virtual bool providesFrameEyePose() const;
virtual void getFrameEyePose(DisplayPose *outPose, U32 eyeId) const;
virtual bool providesEyeOffsets() const;
virtual bool providesFovPorts() const { return true; }
virtual void getEyeOffsets(Point3F *dest) const;
virtual bool providesFovPorts() const;
virtual void getFovPorts(FovPort *out) const;
virtual bool providesProjectionOffset() const;
virtual const Point2F& getProjectionOffset() const;
@ -151,6 +154,8 @@ public:
virtual void setCurrentConnection(GameConnection *connection);
virtual GameConnection* getCurrentConnection();
GFXTexHandle getPreviewTexture();
bool _handleDeviceEvent( GFXDevice::GFXDeviceEventType evt );
public:

View file

@ -26,11 +26,17 @@
#include "postFx/postEffectCommon.h"
#include "gui/core/guiCanvas.h"
#include "platform/input/oculusVR/oculusVRUtil.h"
#include "core/stream/fileStream.h"
#include "gfx/D3D9/gfxD3D9Device.h"
// Use D3D9 for win32
#include "gfx/D3D11/gfxD3D11Device.h"
#include "gfx/D3D11/gfxD3D11EnumTranslate.h"
#include "gfx/gfxStringEnumTranslate.h"
#undef D3D11
// Use D3D11 for win32
#ifdef TORQUE_OS_WIN
#define OVR_D3D_VERSION 9
#define OVR_D3D_VERSION 11
#include "OVR_CAPI_D3D.h"
#define OCULUS_USE_D3D
#else
@ -38,15 +44,125 @@
#define OCULUS_USE_GL
#endif
extern GFXTextureObject *gLastStereoTexture;
struct OculusTexture
{
virtual void AdvanceToNextTexture() = 0;
OculusVRHMDDevice::OculusVRHMDDevice() :
mWindowSize(1280,800)
virtual ~OculusTexture() {
}
};
//------------------------------------------------------------
// ovrSwapTextureSet wrapper class that also maintains the render target views
// needed for D3D11 rendering.
struct D3D11OculusTexture : public OculusTexture
{
ovrHmd hmd;
ovrSwapTextureSet * TextureSet;
static const int TextureCount = 2;
GFXTexHandle TexRtv[TextureCount];
GFXDevice *Owner;
D3D11OculusTexture(GFXDevice* owner) :
hmd(nullptr),
TextureSet(nullptr),
Owner(owner)
{
TexRtv[0] = TexRtv[1] = nullptr;
}
bool Init(ovrHmd _hmd, int sizeW, int sizeH)
{
hmd = _hmd;
D3D11_TEXTURE2D_DESC dsDesc;
dsDesc.Width = sizeW;
dsDesc.Height = sizeH;
dsDesc.MipLevels = 1;
dsDesc.ArraySize = 1;
dsDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;// DXGI_FORMAT_R8G8B8A8_UNORM_SRGB;
dsDesc.SampleDesc.Count = 1; // No multi-sampling allowed
dsDesc.SampleDesc.Quality = 0;
dsDesc.Usage = D3D11_USAGE_DEFAULT;
dsDesc.CPUAccessFlags = 0;
dsDesc.MiscFlags = 0;
dsDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET;
GFXD3D11Device* device = static_cast<GFXD3D11Device*>(GFX);
ovrResult result = ovr_CreateSwapTextureSetD3D11(hmd, device->mD3DDevice, &dsDesc, ovrSwapTextureSetD3D11_Typeless, &TextureSet);
if (!OVR_SUCCESS(result))
return false;
AssertFatal(TextureSet->TextureCount == TextureCount, "TextureCount mismatch.");
for (int i = 0; i < TextureCount; ++i)
{
ovrD3D11Texture* tex = (ovrD3D11Texture*)&TextureSet->Textures[i];
D3D11_RENDER_TARGET_VIEW_DESC rtvd = {};
rtvd.Format = DXGI_FORMAT_B8G8R8A8_UNORM;// DXGI_FORMAT_R8G8B8A8_UNORM;
rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
GFXD3D11TextureObject* object = new GFXD3D11TextureObject(GFX, &VRTextureProfile);
object->registerResourceWithDevice(GFX);
*(object->getSRViewPtr()) = tex->D3D11.pSRView;
*(object->get2DTexPtr()) = tex->D3D11.pTexture;
device->mD3DDevice->CreateRenderTargetView(tex->D3D11.pTexture, &rtvd, object->getRTViewPtr());
// Add refs for texture release later on
if (object->getSRView()) object->getSRView()->AddRef();
//object->getRTView()->AddRef();
if (object->get2DTex()) object->get2DTex()->AddRef();
object->isManaged = true;
// Get the actual size of the texture...
D3D11_TEXTURE2D_DESC probeDesc;
ZeroMemory(&probeDesc, sizeof(D3D11_TEXTURE2D_DESC));
object->get2DTex()->GetDesc(&probeDesc);
object->mTextureSize.set(probeDesc.Width, probeDesc.Height, 0);
object->mBitmapSize = object->mTextureSize;
int fmt = probeDesc.Format;
if (fmt == DXGI_FORMAT_R8G8B8A8_TYPELESS || fmt == DXGI_FORMAT_B8G8R8A8_TYPELESS)
{
object->mFormat = GFXFormatR8G8B8A8; // usual case
}
else
{
// TODO: improve this. this can be very bad.
GFXREVERSE_LOOKUP(GFXD3D11TextureFormat, GFXFormat, fmt);
object->mFormat = (GFXFormat)fmt;
}
TexRtv[i] = object;
}
return true;
}
~D3D11OculusTexture()
{
for (int i = 0; i < TextureCount; ++i)
{
SAFE_DELETE(TexRtv[i]);
}
if (TextureSet)
{
ovr_DestroySwapTextureSet(hmd, TextureSet);
}
}
void AdvanceToNextTexture()
{
TextureSet->CurrentIndex = (TextureSet->CurrentIndex + 1) % TextureSet->TextureCount;
}
};
OculusVRHMDDevice::OculusVRHMDDevice()
{
mIsValid = false;
mDevice = NULL;
mSupportedDistortionCaps = 0;
mCurrentDistortionCaps = 0;
mCurrentCaps = 0;
mSupportedCaps = 0;
mVsync = true;
@ -60,6 +176,7 @@ mWindowSize(1280,800)
mConnection = NULL;
mSensor = NULL;
mActionCodeIndex = 0;
mTextureSwapSet = NULL;
}
OculusVRHMDDevice::~OculusVRHMDDevice()
@ -79,14 +196,14 @@ void OculusVRHMDDevice::cleanUp()
if(mDevice)
{
ovrHmd_Destroy(mDevice);
ovr_Destroy(mDevice);
mDevice = NULL;
}
mIsValid = false;
}
void OculusVRHMDDevice::set(ovrHmd hmd, U32 actionCodeIndex)
void OculusVRHMDDevice::set(ovrHmd hmd, ovrGraphicsLuid luid, U32 actionCodeIndex)
{
cleanUp();
@ -95,50 +212,42 @@ void OculusVRHMDDevice::set(ovrHmd hmd, U32 actionCodeIndex)
mDevice = hmd;
mSupportedCaps = hmd->HmdCaps;
mCurrentCaps = mSupportedCaps & (ovrHmdCap_DynamicPrediction | ovrHmdCap_LowPersistence | (!mVsync ? ovrHmdCap_NoVSync : 0));
ovrHmdDesc desc = ovr_GetHmdDesc(hmd);
int caps = ovr_GetTrackingCaps(hmd);
mSupportedDistortionCaps = hmd->DistortionCaps;
mCurrentDistortionCaps = mSupportedDistortionCaps & (ovrDistortionCap_TimeWarp | ovrDistortionCap_Vignette | ovrDistortionCap_Overdrive);
mTimewarp = mSupportedDistortionCaps & ovrDistortionCap_TimeWarp;
mSupportedCaps = desc.AvailableHmdCaps;
mCurrentCaps = mSupportedCaps;
mTimewarp = true;
// DeviceInfo
mProductName = hmd->ProductName;
mManufacturer = hmd->Manufacturer;
mVersion = hmd->FirmwareMajor;
mProductName = desc.ProductName;
mManufacturer = desc.Manufacturer;
mVersion = desc.FirmwareMajor;
mDisplayDeviceName = hmd->DisplayDeviceName;
mDisplayId = hmd->DisplayId;
//
Vector<GFXAdapter*> adapterList;
GFXD3D11Device::enumerateAdapters(adapterList);
mDesktopPosition.x = hmd->WindowsPos.x;
mDesktopPosition.y = hmd->WindowsPos.y;
dMemcpy(&mLuid, &luid, sizeof(mLuid));
mDisplayId = -1;
mResolution.x = hmd->Resolution.w;
mResolution.y = hmd->Resolution.h;
mProfileInterpupillaryDistance = ovrHmd_GetFloat(hmd, OVR_KEY_IPD, OVR_DEFAULT_IPD);
mLensSeparation = ovrHmd_GetFloat(hmd, "LensSeparation", 0);
ovrHmd_GetFloatArray(hmd, "ScreenSize", &mScreenSize.x, 2);
dMemcpy(mCurrentFovPorts, mDevice->DefaultEyeFov, sizeof(mDevice->DefaultEyeFov));
for (U32 i=0; i<2; i++)
for (U32 i = 0, sz = adapterList.size(); i < sz; i++)
{
mCurrentFovPorts[i].UpTan = mDevice->DefaultEyeFov[i].UpTan;
mCurrentFovPorts[i].DownTan = mDevice->DefaultEyeFov[i].DownTan;
mCurrentFovPorts[i].LeftTan = mDevice->DefaultEyeFov[i].LeftTan;
mCurrentFovPorts[i].RightTan = mDevice->DefaultEyeFov[i].RightTan;
GFXAdapter* adapter = adapterList[i];
if (dMemcmp(&adapter->mLUID, &mLuid, sizeof(mLuid)) == 0)
{
mDisplayId = adapter->mIndex;
mDisplayDeviceType = "D3D11"; // TOFIX this
}
}
if (mDevice->HmdCaps & ovrHmdCap_ExtendDesktop)
{
mWindowSize = Point2I(mDevice->Resolution.w, mDevice->Resolution.h);
}
else
{
mWindowSize = Point2I(1100, 618);
}
mResolution.x = desc.Resolution.w;
mResolution.y = desc.Resolution.h;
mProfileInterpupillaryDistance = ovr_GetFloat(hmd, OVR_KEY_IPD, OVR_DEFAULT_IPD);
mLensSeparation = ovr_GetFloat(hmd, "LensSeparation", 0);
ovr_GetFloatArray(hmd, "ScreenSize", &mScreenSize.x, 2);
mActionCodeIndex = actionCodeIndex;
@ -147,6 +256,8 @@ void OculusVRHMDDevice::set(ovrHmd hmd, U32 actionCodeIndex)
mSensor = new OculusVRSensorDevice();
mSensor->set(mDevice, mActionCodeIndex);
mDebugMirrorTexture = NULL;
updateCaps();
}
@ -163,25 +274,26 @@ void OculusVRHMDDevice::setOptimalDisplaySize(GuiCanvas *canvas)
PlatformWindow *window = canvas->getPlatformWindow();
GFXTarget *target = window->getGFXTarget();
if (target && target->getSize() != mWindowSize)
Point2I requiredSize(0, 0);
ovrHmdDesc desc = ovr_GetHmdDesc(mDevice);
ovrSizei leftSize = ovr_GetFovTextureSize(mDevice, ovrEye_Left, desc.DefaultEyeFov[0], mCurrentPixelDensity);
ovrSizei rightSize = ovr_GetFovTextureSize(mDevice, ovrEye_Right, desc.DefaultEyeFov[1], mCurrentPixelDensity);
requiredSize.x = leftSize.w + rightSize.h;
requiredSize.y = mMax(leftSize.h, rightSize.h);
if (target && target->getSize() != requiredSize)
{
GFXVideoMode newMode;
newMode.antialiasLevel = 0;
newMode.bitDepth = 32;
newMode.fullScreen = false;
newMode.refreshRate = 75;
newMode.resolution = mWindowSize;
newMode.resolution = requiredSize;
newMode.wideScreen = false;
window->setVideoMode(newMode);
//AssertFatal(window->getClientExtent().x == mWindowSize[0] && window->getClientExtent().y == mWindowSize[1], "Window didn't resize to correct dimensions");
}
// Need to move window over to the rift side of the desktop
if (mDevice->HmdCaps & ovrHmdCap_ExtendDesktop && !OculusVRDevice::smWindowDebug)
{
#ifndef OCULUS_WINDOW_DEBUG
window->setPosition(getDesktopPosition());
#endif
//AssertFatal(window->getClientExtent().x == requiredSize.x && window->getClientExtent().y == requiredSize.y, "Window didn't resize to correct dimensions");
}
}
@ -190,53 +302,161 @@ bool OculusVRHMDDevice::isDisplayingWarning()
if (!mIsValid || !mDevice)
return false;
return false;/*
ovrHSWDisplayState displayState;
ovrHmd_GetHSWDisplayState(mDevice, &displayState);
return displayState.Displayed;
return displayState.Displayed;*/
}
void OculusVRHMDDevice::dismissWarning()
{
if (!mIsValid || !mDevice)
return;
ovrHmd_DismissHSWDisplay(mDevice);
//ovr_DismissHSWDisplay(mDevice);
}
GFXTexHandle OculusVRHMDDevice::getPreviewTexture()
{
if (!mIsValid || !mDevice)
return NULL;
return mDebugMirrorTextureHandle;
}
bool OculusVRHMDDevice::setupTargets()
{
ovrFovPort eyeFov[2] = {mDevice->DefaultEyeFov[0], mDevice->DefaultEyeFov[1]};
// Create eye render buffers
ID3D11RenderTargetView * eyeRenderTexRtv[2];
ovrLayerEyeFov ld = { { ovrLayerType_EyeFov } };
mRenderLayer = ld;
mRecomendedEyeTargetSize[0] = ovrHmd_GetFovTextureSize(mDevice, ovrEye_Left, eyeFov[0], mCurrentPixelDensity);
mRecomendedEyeTargetSize[1] = ovrHmd_GetFovTextureSize(mDevice, ovrEye_Right, eyeFov[1], mCurrentPixelDensity);
GFXD3D11Device* device = static_cast<GFXD3D11Device*>(GFX);
ovrHmdDesc desc = ovr_GetHmdDesc(mDevice);
for (int i = 0; i < 2; i++)
{
mRenderLayer.Fov[i] = desc.DefaultEyeFov[i];
mRenderLayer.Viewport[i].Size = ovr_GetFovTextureSize(mDevice, (ovrEyeType)i, mRenderLayer.Fov[i], mCurrentPixelDensity);
mEyeRenderDesc[i] = ovr_GetRenderDesc(mDevice, (ovrEyeType_)(ovrEye_Left+i), mRenderLayer.Fov[i]);
}
ovrSizei recommendedEyeTargetSize[2];
recommendedEyeTargetSize[0] = mRenderLayer.Viewport[0].Size;
recommendedEyeTargetSize[1] = mRenderLayer.Viewport[1].Size;
if (mTextureSwapSet)
{
delete mTextureSwapSet;
mTextureSwapSet = NULL;
}
// Calculate render target size
if (mDesiredRenderingMode == GFXDevice::RS_StereoSideBySide)
{
// Setup a single texture, side-by-side viewports
Point2I rtSize(
mRecomendedEyeTargetSize[0].w + mRecomendedEyeTargetSize[1].w,
mRecomendedEyeTargetSize[0].h > mRecomendedEyeTargetSize[1].h ? mRecomendedEyeTargetSize[0].h : mRecomendedEyeTargetSize[1].h
recommendedEyeTargetSize[0].w + recommendedEyeTargetSize[1].w,
recommendedEyeTargetSize[0].h > recommendedEyeTargetSize[1].h ? recommendedEyeTargetSize[0].h : recommendedEyeTargetSize[1].h
);
GFXFormat targetFormat = GFX->getActiveRenderTarget()->getFormat();
mRTFormat = targetFormat;
rtSize = generateRenderTarget(mStereoRT, mStereoTexture, mStereoDepthTexture, rtSize);
rtSize = generateRenderTarget(mStereoRT, mStereoDepthTexture, rtSize);
// Generate the swap texture we need to store the final image
D3D11OculusTexture* tex = new D3D11OculusTexture(GFX);
if (tex->Init(mDevice, rtSize.x, rtSize.y))
{
mTextureSwapSet = tex;
}
mRenderLayer.ColorTexture[0] = tex->TextureSet;
mRenderLayer.ColorTexture[1] = tex->TextureSet;
mRenderLayer.Viewport[0].Pos.x = 0;
mRenderLayer.Viewport[0].Pos.y = 0;
mRenderLayer.Viewport[1].Pos.x = (rtSize.x + 1) / 2;
mRenderLayer.Viewport[1].Pos.y = 0;
// Left
mEyeRenderSize[0] = rtSize;
mEyeRT[0] = mStereoRT;
mEyeTexture[0] = mStereoTexture;
mEyeViewport[0] = RectI(Point2I(0,0), Point2I((mRecomendedEyeTargetSize[0].w+1)/2, mRecomendedEyeTargetSize[0].h));
mEyeViewport[0] = RectI(Point2I(mRenderLayer.Viewport[0].Pos.x, mRenderLayer.Viewport[0].Pos.y), Point2I(mRenderLayer.Viewport[0].Size.w, mRenderLayer.Viewport[0].Size.h));
// Right
mEyeRenderSize[1] = rtSize;
mEyeRT[1] = mStereoRT;
mEyeTexture[1] = mStereoTexture;
mEyeViewport[1] = RectI(Point2I((mRecomendedEyeTargetSize[0].w+1)/2,0), Point2I((mRecomendedEyeTargetSize[1].w+1)/2, mRecomendedEyeTargetSize[1].h));
mEyeViewport[1] = RectI(Point2I(mRenderLayer.Viewport[1].Pos.x, mRenderLayer.Viewport[1].Pos.y), Point2I(mRenderLayer.Viewport[1].Size.w, mRenderLayer.Viewport[1].Size.h));
gLastStereoTexture = mEyeTexture[0];
GFXD3D11Device* device = static_cast<GFXD3D11Device*>(GFX);
D3D11_TEXTURE2D_DESC dsDesc;
dsDesc.Width = rtSize.x;
dsDesc.Height = rtSize.y;
dsDesc.MipLevels = 1;
dsDesc.ArraySize = 1;
dsDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;// DXGI_FORMAT_R8G8B8A8_UNORM_SRGB;
dsDesc.SampleDesc.Count = 1;
dsDesc.SampleDesc.Quality = 0;
dsDesc.Usage = D3D11_USAGE_DEFAULT;
dsDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE;
dsDesc.CPUAccessFlags = 0;
dsDesc.MiscFlags = 0;
// Create typeless when we are rendering as non-sRGB since we will override the texture format in the RTV
bool reinterpretSrgbAsLinear = true;
unsigned compositorTextureFlags = 0;
if (reinterpretSrgbAsLinear)
compositorTextureFlags |= ovrSwapTextureSetD3D11_Typeless;
ovrResult result = ovr_CreateMirrorTextureD3D11(mDevice, device->mD3DDevice, &dsDesc, compositorTextureFlags, &mDebugMirrorTexture);
if (result == ovrError_DisplayLost || !mDebugMirrorTexture)
{
AssertFatal(false, "Something went wrong");
return NULL;
}
// Create texture handle so we can render it in-game
ovrD3D11Texture* mirror_tex = (ovrD3D11Texture*)mDebugMirrorTexture;
D3D11_RENDER_TARGET_VIEW_DESC rtvd = {};
rtvd.Format = DXGI_FORMAT_B8G8R8A8_UNORM;// DXGI_FORMAT_R8G8B8A8_UNORM;
rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D;
GFXD3D11TextureObject* object = new GFXD3D11TextureObject(GFX, &VRTextureProfile);
object->registerResourceWithDevice(GFX);
*(object->getSRViewPtr()) = mirror_tex->D3D11.pSRView;
*(object->get2DTexPtr()) = mirror_tex->D3D11.pTexture;
device->mD3DDevice->CreateRenderTargetView(mirror_tex->D3D11.pTexture, &rtvd, object->getRTViewPtr());
// Add refs for texture release later on
if (object->getSRView()) object->getSRView()->AddRef();
//object->getRTView()->AddRef();
if (object->get2DTex()) object->get2DTex()->AddRef();
object->isManaged = true;
// Get the actual size of the texture...
D3D11_TEXTURE2D_DESC probeDesc;
ZeroMemory(&probeDesc, sizeof(D3D11_TEXTURE2D_DESC));
object->get2DTex()->GetDesc(&probeDesc);
object->mTextureSize.set(probeDesc.Width, probeDesc.Height, 0);
object->mBitmapSize = object->mTextureSize;
int fmt = probeDesc.Format;
if (fmt == DXGI_FORMAT_R8G8B8A8_TYPELESS || fmt == DXGI_FORMAT_B8G8R8A8_TYPELESS)
{
object->mFormat = GFXFormatR8G8B8A8; // usual case
}
else
{
// TODO: improve this. this can be very bad.
GFXREVERSE_LOOKUP(GFXD3D11TextureFormat, GFXFormat, fmt);
object->mFormat = (GFXFormat)fmt;
}
mDebugMirrorTextureHandle = object;
}
else
{
@ -261,17 +481,14 @@ String OculusVRHMDDevice::dumpMetrics()
F32 ipd = this->getIPD();
U32 lastStatus = mSensor->getLastTrackingStatus();
sb.format(" | OVR Sensor %i | rot: %f %f %f, pos: %f %f %f, FOV (%f %f %f %f, %f %f %f %f), IPD %f, Track:%s%s, Disort:%s%s%s",
sb.format(" | OVR Sensor %i | rot: %f %f %f, pos: %f %f %f, FOV (%f %f %f %f, %f %f %f %f), IPD %f, Track:%s%s",
mActionCodeIndex,
rot.x, rot.y, rot.z,
pos.x, pos.y, pos.z,
eyeFov[0].upTan, eyeFov[0].downTan, eyeFov[0].leftTan, eyeFov[0].rightTan, eyeFov[1].upTan, eyeFov[1].downTan, eyeFov[1].leftTan, eyeFov[1].rightTan,
getIPD(),
lastStatus & ovrStatus_OrientationTracked ? " ORIENT" : "",
lastStatus & ovrStatus_PositionTracked ? " POS" : "",
mCurrentDistortionCaps & ovrDistortionCap_TimeWarp ? " TIMEWARP" : "",
mCurrentDistortionCaps & ovrDistortionCap_Vignette ? " VIGNETTE" : "",
mCurrentDistortionCaps & ovrDistortionCap_Overdrive ? " OVERDRIVE" : "");
lastStatus & ovrStatus_PositionTracked ? " POS" : "");
return sb.data();
}
@ -292,82 +509,23 @@ void OculusVRHMDDevice::updateRenderInfo()
return;
PlatformWindow *window = mDrawCanvas->getPlatformWindow();
ovrFovPort eyeFov[2] = {mDevice->DefaultEyeFov[0], mDevice->DefaultEyeFov[1]};
ovrHmdDesc desc = ovr_GetHmdDesc(mDevice);
// Update window size if it's incorrect
Point2I backbufferSize = mDrawCanvas->getBounds().extent;
// Reset
ovrHmd_ConfigureRendering(mDevice, NULL, 0, NULL, NULL);
#ifdef OCULUS_USE_D3D
// Generate render target textures
GFXD3D9Device *d3d9GFX = dynamic_cast<GFXD3D9Device*>(GFX);
if (d3d9GFX)
// Finally setup!
if (!setupTargets())
{
ovrD3D9Config cfg;
cfg.D3D9.Header.API = ovrRenderAPI_D3D9;
cfg.D3D9.Header.Multisample = 0;
cfg.D3D9.Header.BackBufferSize = OVR::Sizei(backbufferSize.x, backbufferSize.y);
cfg.D3D9.pDevice = d3d9GFX->getDevice();
cfg.D3D9.pDevice->GetSwapChain(0, &cfg.D3D9.pSwapChain);
// Finally setup!
if (!setupTargets())
{
onDeviceDestroy();
return;
}
ovrHmd_AttachToWindow(mDevice, window->getPlatformDrawable(), NULL, NULL);
if (!ovrHmd_ConfigureRendering( mDevice, &cfg.Config, mCurrentDistortionCaps, eyeFov, mEyeRenderDesc ))
{
Con::errorf("Couldn't configure oculus rendering!");
return;
}
onDeviceDestroy();
return;
}
#endif
#ifdef OCULUS_USE_GL
// Generate render target textures
GFXGLDevice *glGFX = dynamic_cast<GFXGLDevice*>(GFX);
if (glGFX)
{
ovrGLConfig cfg;
cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
cfg.OGL.Header.Multisample = 0;
cfg.OGL.Header.BackBufferSize = OVR::Sizei(backbufferSize.x, backbufferSize.y);
#ifdef WIN32
cfg.OGL.Window = GetActiveWindow();//window->getPlatformDrawable();
cfg.OGL.DC = wglGetCurrentDC();
#else
cfg.OGL.Disp = NULL;
#endif
// Finally setup!
if (!setupTargets())
{
onDeviceDestroy();
return;
}
ovrHmd_AttachToWindow(mDevice, window->getPlatformDrawable(), NULL, NULL);
if (!ovrHmd_ConfigureRendering( mDevice, &cfg.Config, mCurrentDistortionCaps, eyeFov, mEyeRenderDesc ))
{
Con::errorf("Couldn't configure oculus rendering!");
return;
}
}
#endif
mRenderConfigurationDirty = false;
}
Point2I OculusVRHMDDevice::generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &texture, GFXTexHandle &depth, Point2I desiredSize)
Point2I OculusVRHMDDevice::generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &depth, Point2I desiredSize)
{
// Texture size that we already have might be big enough.
Point2I newRTSize;
@ -402,12 +560,12 @@ Point2I OculusVRHMDDevice::generateRenderTarget(GFXTextureTargetRef &target, GFX
newRTSize.setMax(Point2I(64, 64));
// Stereo RT needs to be the same size as the recommended RT
if ( newRT || texture.getWidthHeight() != newRTSize )
/*if ( newRT || mDebugStereoTexture.getWidthHeight() != newRTSize )
{
texture.set( newRTSize.x, newRTSize.y, mRTFormat, &VRTextureProfile, avar( "%s() - (line %d)", __FUNCTION__, __LINE__ ) );
target->attachTexture( GFXTextureTarget::Color0, texture );
Con::printf("generateRenderTarget generated %x", texture.getPointer());
}
mDebugStereoTexture.set( newRTSize.x, newRTSize.y, mRTFormat, &VRTextureProfile, avar( "%s() - (line %d)", __FUNCTION__, __LINE__ ) );
target->attachTexture( GFXTextureTarget::Color0, mDebugStereoTexture);
Con::printf("generateRenderTarget generated %x", mDebugStereoTexture.getPointer());
}*/
if ( depth.getWidthHeight() != newRTSize )
{
@ -424,6 +582,13 @@ void OculusVRHMDDevice::clearRenderTargets()
mStereoRT = NULL;
mEyeRT[0] = NULL;
mEyeRT[1] = NULL;
if (mDebugMirrorTexture)
{
ovr_DestroyMirrorTexture(mDevice, mDebugMirrorTexture);
mDebugMirrorTexture = NULL;
mDebugMirrorTextureHandle = NULL;
}
}
void OculusVRHMDDevice::updateCaps()
@ -431,34 +596,7 @@ void OculusVRHMDDevice::updateCaps()
if (!mIsValid || !mDevice)
return;
U32 oldDistortionCaps = mCurrentDistortionCaps;
// Distortion
if (mTimewarp)
{
mCurrentDistortionCaps |= ovrDistortionCap_TimeWarp;
}
else
{
mCurrentDistortionCaps &= ~ovrDistortionCap_TimeWarp;
}
if (oldDistortionCaps != mCurrentDistortionCaps)
{
mRenderConfigurationDirty = true;
}
// Device
if (!mVsync)
{
mCurrentCaps |= ovrHmdCap_NoVSync;
}
else
{
mCurrentCaps &= ~ovrHmdCap_NoVSync;
}
ovrHmd_SetEnabledCaps(mDevice, mCurrentCaps);
ovr_SetEnabledCaps(mDevice, mCurrentCaps);
}
static bool sInFrame = false; // protects against recursive onStartFrame calls
@ -469,108 +607,64 @@ void OculusVRHMDDevice::onStartFrame()
return;
sInFrame = true;
#ifndef OCULUS_DEBUG_FRAME
ovrHmd_BeginFrame(mDevice, 0);
#endif
ovrVector3f hmdToEyeViewOffset[2] = { mEyeRenderDesc[0].HmdToEyeViewOffset, mEyeRenderDesc[1].HmdToEyeViewOffset };
ovrHmd_GetEyePoses(mDevice, 0, hmdToEyeViewOffset, mCurrentEyePoses, &mLastTrackingState);
ovrTrackingState hmdState = ovr_GetTrackingState(mDevice, 0, ovrTrue);
ovr_CalcEyePoses(hmdState.HeadPose.ThePose, hmdToEyeViewOffset, mRenderLayer.RenderPose);
for (U32 i=0; i<2; i++)
{
mCurrentEyePoses[i].Position.x *= OculusVRDevice::smPositionTrackingScale;
mCurrentEyePoses[i].Position.y *= OculusVRDevice::smPositionTrackingScale;
mCurrentEyePoses[i].Position.z *= OculusVRDevice::smPositionTrackingScale;
mRenderLayer.RenderPose[i].Position.x *= OculusVRDevice::smPositionTrackingScale;
mRenderLayer.RenderPose[i].Position.y *= OculusVRDevice::smPositionTrackingScale;
mRenderLayer.RenderPose[i].Position.z *= OculusVRDevice::smPositionTrackingScale;
}
mRenderLayer.SensorSampleTime = ovr_GetTimeInSeconds();
// Set current dest texture on stereo render target
D3D11OculusTexture* texSwap = (D3D11OculusTexture*)mTextureSwapSet;
mStereoRT->attachTexture(GFXTextureTarget::Color0, texSwap->TexRtv[texSwap->TextureSet->CurrentIndex]);
sInFrame = false;
mFrameReady = true;
}
void OculusVRHMDDevice::onEndFrame()
{
if (!mIsValid || !mDevice || !mDrawCanvas || sInFrame || !mFrameReady)
if (!mIsValid || !mDevice || !mDrawCanvas || sInFrame || !mFrameReady || !mTextureSwapSet)
return;
Point2I eyeSize;
GFXTarget *windowTarget = mDrawCanvas->getPlatformWindow()->getGFXTarget();
#ifndef OCULUS_DEBUG_FRAME
#ifdef OCULUS_USE_D3D
GFXD3D9Device *d3d9GFX = dynamic_cast<GFXD3D9Device*>(GFX);
if (d3d9GFX && mEyeRT[0].getPointer())
GFXD3D11Device *d3d11GFX = dynamic_cast<GFXD3D11Device*>(GFX);
ovrViewScaleDesc viewScaleDesc;
ovrVector3f hmdToEyeViewOffset[2] = { mEyeRenderDesc[0].HmdToEyeViewOffset, mEyeRenderDesc[1].HmdToEyeViewOffset };
viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;
viewScaleDesc.HmdToEyeViewOffset[0] = hmdToEyeViewOffset[0];
viewScaleDesc.HmdToEyeViewOffset[1] = hmdToEyeViewOffset[1];
ovrLayerDirect ld = { { ovrLayerType_Direct } };
mDebugRenderLayer = ld;
mDebugRenderLayer.ColorTexture[0] = mRenderLayer.ColorTexture[0];
mDebugRenderLayer.ColorTexture[1] = mRenderLayer.ColorTexture[1];
mDebugRenderLayer.Viewport[0] = mRenderLayer.Viewport[0];
mDebugRenderLayer.Viewport[1] = mRenderLayer.Viewport[1];
// TODO: use ovrViewScaleDesc
ovrLayerHeader* layers = &mRenderLayer.Header;
ovrResult result = ovr_SubmitFrame(mDevice, 0, &viewScaleDesc, &layers, 1);
mTextureSwapSet->AdvanceToNextTexture();
if (OVR_SUCCESS(result))
{
// Left
ovrD3D9Texture eyeTextures[2];
eyeSize = mEyeTexture[0].getWidthHeight();
eyeTextures[0].D3D9.Header.API = ovrRenderAPI_D3D9;
eyeTextures[0].D3D9.Header.RenderViewport.Pos.x = mEyeViewport[0].point.x;
eyeTextures[0].D3D9.Header.RenderViewport.Pos.y = mEyeViewport[0].point.y;
eyeTextures[0].D3D9.Header.RenderViewport.Size.w = mEyeViewport[0].extent.x;
eyeTextures[0].D3D9.Header.RenderViewport.Size.h = mEyeViewport[0].extent.y;
eyeTextures[0].D3D9.Header.TextureSize.w = eyeSize.x;
eyeTextures[0].D3D9.Header.TextureSize.h = eyeSize.y;
eyeTextures[0].D3D9.pTexture = mEyeRT[0].getPointer() ? static_cast<GFXD3D9TextureObject*>(mEyeTexture[0].getPointer())->get2DTex() : NULL;
// Right
eyeSize = mEyeTexture[1].getWidthHeight();
eyeTextures[1].D3D9.Header.API = ovrRenderAPI_D3D9;
eyeTextures[1].D3D9.Header.RenderViewport.Pos.x = mEyeViewport[1].point.x;
eyeTextures[1].D3D9.Header.RenderViewport.Pos.y = mEyeViewport[1].point.y;
eyeTextures[1].D3D9.Header.RenderViewport.Size.w = mEyeViewport[1].extent.x;
eyeTextures[1].D3D9.Header.RenderViewport.Size.h = mEyeViewport[1].extent.y;
eyeTextures[1].D3D9.Header.TextureSize.w = eyeSize.x;
eyeTextures[1].D3D9.Header.TextureSize.h = eyeSize.y;
eyeTextures[1].D3D9.pTexture = mEyeRT[0].getPointer() ? static_cast<GFXD3D9TextureObject*>(mEyeTexture[1].getPointer())->get2DTex() : NULL;
// Submit!
GFX->disableShaders();
GFX->setActiveRenderTarget(windowTarget);
GFX->clear(GFXClearZBuffer | GFXClearStencil | GFXClearTarget, ColorI(255,0,0), 1.0f, 0);
ovrHmd_EndFrame(mDevice, mCurrentEyePoses, (ovrTexture*)(&eyeTextures[0]));
int woo = 1;
}
#endif
#ifdef OCULUS_USE_GL
GFXGLDevice *glGFX = dynamic_cast<GFXGLDevice*>(GFX);
if (glGFX && mEyeRT[0].getPointer())
{
// Left
ovrGLTexture eyeTextures[2];
eyeSize = mEyeTexture[0].getWidthHeight();
eyeTextures[0].OGL.Header.API = ovrRenderAPI_GL;
eyeTextures[0].OGL.Header.RenderViewport.Pos.x = mEyeViewport[0].point.x;
eyeTextures[0].OGL.Header.RenderViewport.Pos.y = mEyeViewport[0].point.y;
eyeTextures[0].OGL.Header.RenderViewport.Size.w = mEyeViewport[0].extent.x;
eyeTextures[0].OGL.Header.RenderViewport.Size.h = mEyeViewport[0].extent.y;
eyeTextures[0].OGL.Header.TextureSize.w = eyeSize.x;
eyeTextures[0].OGL.Header.TextureSize.h = eyeSize.y;
eyeTextures[0].OGL.TexId = mEyeRT[0].getPointer() ? static_cast<GFXGLTextureObject*>(mEyeTexture[0].getPointer())->getHandle() : 0;
// Right
eyeSize = mEyeTexture[1].getWidthHeight();
eyeTextures[1].OGL.Header.API = ovrRenderAPI_GL;
eyeTextures[1].OGL.Header.RenderViewport.Pos.x = mEyeViewport[1].point.x;
eyeTextures[1].OGL.Header.RenderViewport.Pos.y = mEyeViewport[1].point.y;
eyeTextures[1].OGL.Header.RenderViewport.Size.w = mEyeViewport[1].extent.x;
eyeTextures[1].OGL.Header.RenderViewport.Size.h = mEyeViewport[1].extent.y;
eyeTextures[1].OGL.Header.TextureSize.w = eyeSize.x;
eyeTextures[1].OGL.Header.TextureSize.h = eyeSize.y;
eyeTextures[0].OGL.TexId = mEyeRT[1].getPointer() ? static_cast<GFXGLTextureObject*>(mEyeTexture[1].getPointer())->getHandle() : 0;
// Submit!
GFX->disableShaders();
GFX->setActiveRenderTarget(windowTarget);
GFX->clear(GFXClearZBuffer | GFXClearStencil | GFXClearTarget, ColorI(255,0,0), 1.0f, 0);
ovrHmd_EndFrame(mDevice, mCurrentEyePoses, (ovrTexture*)(&eyeTextures[0]));
}
#endif
#endif
// TODO: render preview in display?
mFrameReady = false;
}
@ -578,14 +672,15 @@ void OculusVRHMDDevice::onEndFrame()
void OculusVRHMDDevice::getFrameEyePose(DisplayPose *outPose, U32 eyeId) const
{
// Directly set the rotation and position from the eye transforms
ovrPosef pose = mCurrentEyePoses[eyeId];
ovrPosef pose = mRenderLayer.RenderPose[eyeId];
OVR::Quatf orientation = pose.Orientation;
const OVR::Vector3f position = pose.Position;
EulerF rotEuler;
OculusVRUtil::convertRotation(orientation, rotEuler);
MatrixF torqueMat(1);
OVR::Matrix4f mat(orientation);
OculusVRUtil::convertRotation(mat.M, torqueMat);
outPose->orientation = rotEuler;
outPose->orientation = QuatF(torqueMat);
outPose->position = Point3F(-position.x, position.z, -position.y);
}
@ -605,18 +700,17 @@ void OculusVRHMDDevice::onDeviceDestroy()
mEyeRT[1]->zombify();
}
if (mTextureSwapSet)
{
delete mTextureSwapSet;
mTextureSwapSet = NULL;
}
mStereoRT = NULL;
mStereoTexture = NULL;
mStereoDepthTexture = NULL;
mEyeTexture[0] = NULL;
mEyeDepthTexture[0] = NULL;
mEyeTexture[1] = NULL;
mEyeDepthTexture[1] = NULL;
mEyeRT[0] = NULL;
mEyeRT[1] = NULL;
mRenderConfigurationDirty = true;
ovrHmd_ConfigureRendering(mDevice, NULL, 0, NULL, NULL);
}

View file

@ -34,12 +34,14 @@
#include "math/mRect.h"
#include "gfx/gfxDevice.h"
#include "OVR_CAPI_0_5_0.h"
#include "OVR_CAPI.h"
class GuiCanvas;
class GameConnection;
struct DisplayPose;
class OculusVRSensorDevice;
struct OculusTexture;
class OculusVRHMDDevice
{
@ -59,9 +61,6 @@ protected:
ovrHmd mDevice;
U32 mSupportedDistortionCaps;
U32 mCurrentDistortionCaps;
U32 mSupportedCaps;
U32 mCurrentCaps;
@ -70,15 +69,12 @@ protected:
String mManufacturer;
U32 mVersion;
// Windows display device name used in EnumDisplaySettings/CreateDC
String mDisplayDeviceName;
// Device type (D3D11, etc)
String mDisplayDeviceType;
// MacOS display ID
// Adapter index
S32 mDisplayId;
// Desktop coordinate position of the screen (can be negative; may not be present on all platforms)
Point2I mDesktopPosition;
// Whole screen resolution
Point2I mResolution;
@ -99,18 +95,15 @@ protected:
Point2F mProjectionCenterOffset;
// Current pose of eyes
ovrPosef mCurrentEyePoses[2];
ovrEyeRenderDesc mEyeRenderDesc[2];
ovrFovPort mCurrentFovPorts[2];
Point2I mWindowSize;
GameConnection *mConnection;
OculusVRSensorDevice *mSensor;
U32 mActionCodeIndex;
ovrGraphicsLuid mLuid;
protected:
void updateRenderInfo();
@ -121,7 +114,7 @@ public:
void cleanUp();
// Set the HMD properties based on information from the OVR device
void set(ovrHmd hmd, U32 actionCodeIndex);
void set(ovrHmd hmd, ovrGraphicsLuid luid, U32 actionCodeIndex);
// Sets optimal display size for canvas
void setOptimalDisplaySize(GuiCanvas *canvas);
@ -133,14 +126,11 @@ public:
U32 getVersion() const { return mVersion; }
// Windows display device name used in EnumDisplaySettings/CreateDC
const char* getDisplayDeviceName() const { return mDisplayDeviceName.c_str(); }
const char* getDisplayDeviceType () const { return mDisplayDeviceType.c_str(); }
// MacOS display ID
S32 getDisplayDeviceId() const { return mDisplayId; }
// Desktop coordinate position of the screen (can be negative; may not be present on all platforms)
const Point2I& getDesktopPosition() const { return mDesktopPosition; }
// Whole screen resolution
const Point2I& getResolution() const { return mResolution; }
@ -166,7 +156,7 @@ public:
void getStereoViewports(RectI *dest) const { dMemcpy(dest, mEyeViewport, sizeof(mEyeViewport)); }
void getStereoTargets(GFXTextureTarget **dest) const { dest[0] = mEyeRT[0]; dest[1] = mEyeRT[1]; }
void getFovPorts(FovPort *dest) const { dMemcpy(dest, mCurrentFovPorts, sizeof(mCurrentFovPorts)); }
void getFovPorts(FovPort *dest) const { dMemcpy(dest, &mRenderLayer.Fov[0], sizeof(mRenderLayer.Fov)); }
/// Returns eye offsets in torque coordinate space, i.e. z being up, x being left-right, and y being depth (forward).
void getEyeOffsets(Point3F *offsets) const {
@ -181,7 +171,7 @@ public:
void onEndFrame();
void onDeviceDestroy();
Point2I generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &texture, GFXTexHandle &depth, Point2I desiredSize);
Point2I generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &depth, Point2I desiredSize);
void clearRenderTargets();
bool isDisplayingWarning();
@ -195,23 +185,17 @@ public:
virtual void setCurrentConnection(GameConnection *connection) { mConnection = connection; }
virtual GameConnection* getCurrentConnection() { return mConnection; }
GFXTexHandle getPreviewTexture();
String dumpMetrics();
// Stereo RT
GFXTexHandle mStereoTexture;
GFXTexHandle mDebugStereoTexture;
GFXTexHandle mStereoDepthTexture;
GFXTextureTargetRef mStereoRT;
// Eye RTs (if we are using separate targets)
GFXTextureTargetRef mEyeRT[2];
GFXTexHandle mEyeTexture[2];
GFXTexHandle mEyeDepthTexture[2];
// Current render target size for each eye
Point2I mEyeRenderSize[2];
// Recommended eye target size for each eye
ovrSizei mRecomendedEyeTargetSize[2];
// Desired viewport for each eye
RectI mEyeViewport[2];
@ -220,6 +204,12 @@ public:
F32 smDesiredPixelDensity;
ovrTrackingState mLastTrackingState;
OculusTexture* mTextureSwapSet;
ovrLayerEyeFov mRenderLayer;
ovrLayerDirect mDebugRenderLayer;
ovrViewScaleDesc mScaleDesc;
ovrTexture* mDebugMirrorTexture;
GFXTexHandle mDebugMirrorTextureHandle;
GFXDevice::GFXDeviceRenderStyles mDesiredRenderingMode;

View file

@ -27,7 +27,7 @@
#include "math/mMatrix.h"
#include "math/mQuat.h"
#include "math/mPoint2.h"
#include "OVR_CAPI_0_5_0.h"
#include "OVR_CAPI_0_8_0.h"
struct OculusVRSensorData
{

View file

@ -24,8 +24,10 @@
#include "platform/input/oculusVR/oculusVRSensorData.h"
#include "platform/input/oculusVR/oculusVRUtil.h"
#include "platform/platformInput.h"
#include"console/simBase.h"
#include "console/simBase.h"
#include "console/engineAPI.h"
#include "math/mAngAxis.h"
#include "OVR_CAPI_0_8_0.h"
U32 OculusVRSensorDevice::OVR_SENSORROT[OculusVRConstants::MaxSensors] = {0};
U32 OculusVRSensorDevice::OVR_SENSORROTANG[OculusVRConstants::MaxSensors] = {0};
@ -66,7 +68,7 @@ void OculusVRSensorDevice::cleanUp()
{
mIsValid = false;
ovrHmd_ConfigureTracking(mDevice, 0, 0);
ovr_ConfigureTracking(mDevice, 0, 0);
}
void OculusVRSensorDevice::set(ovrHmd sensor, S32 actionCodeIndex)
@ -74,7 +76,7 @@ void OculusVRSensorDevice::set(ovrHmd sensor, S32 actionCodeIndex)
mIsValid = false;
mDevice = sensor;
mSupportedTrackingCaps = sensor->TrackingCaps;
mSupportedTrackingCaps = ovr_GetTrackingCaps(sensor);
mCurrentTrackingCaps = ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position;
mCurrentTrackingCaps = mSupportedTrackingCaps & mCurrentTrackingCaps;
@ -82,15 +84,17 @@ void OculusVRSensorDevice::set(ovrHmd sensor, S32 actionCodeIndex)
mPositionTrackingDisabled = !(mCurrentTrackingCaps & ovrTrackingCap_Position);
ovrHmdDesc desc = ovr_GetHmdDesc(sensor);
// DeviceInfo
mProductName = sensor->ProductName;
mManufacturer = sensor->Manufacturer;
mVersion = sensor->Type;
mProductName = desc.ProductName;
mManufacturer = desc.Manufacturer;
mVersion = desc.Type;
// SensorInfo
mVendorId = sensor->VendorId;
mProductId = sensor->ProductId;
mSerialNumber = sensor->SerialNumber;
mVendorId = desc.VendorId;
mProductId = desc.ProductId;
mSerialNumber = desc.SerialNumber;
mActionCodeIndex = actionCodeIndex;
@ -163,7 +167,7 @@ bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bo
return false;
// Grab current state
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue);
mLastStatus = ts.StatusFlags;
// Store the current data from the sensor and compare with previous data
@ -181,7 +185,8 @@ bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bo
{
if(generateRotAsAngAxis)
{
INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_ROT, OVR_SENSORROT[mActionCodeIndex], SI_MOVE, currentBuffer->mRotQuat);
AngAxisF axisAA(currentBuffer->mRotQuat);
INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_ROT, OVR_SENSORROT[mActionCodeIndex], SI_MOVE, axisAA);
}
if(generateRotAsEuler)
@ -249,7 +254,7 @@ void OculusVRSensorDevice::reset()
if(!mIsValid)
return;
ovrHmd_RecenterPose(mDevice);
ovr_RecenterPose(mDevice);
}
bool OculusVRSensorDevice::getYawCorrection() const
@ -322,7 +327,7 @@ EulerF OculusVRSensorDevice::getEulerRotation()
if(!mIsValid)
return Point3F::Zero;
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue);
OVR::Quatf orientation = ts.HeadPose.ThePose.Orientation;
// Sensor rotation in Euler format
@ -337,7 +342,7 @@ EulerF OculusVRSensorDevice::getRawEulerRotation()
if(!mIsValid)
return Point3F::Zero;
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue);
OVR::Quatf orientation = ts.HeadPose.ThePose.Orientation;
// Sensor rotation in Euler format
@ -351,7 +356,7 @@ VectorF OculusVRSensorDevice::getAcceleration()
if(!mIsValid)
return VectorF::Zero;
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue);
OVR::Vector3f a = ts.HeadPose.LinearAcceleration;
// Sensor acceleration in VectorF format
@ -366,7 +371,7 @@ EulerF OculusVRSensorDevice::getAngularVelocity()
if(!mIsValid)
return EulerF::Zero;
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue);
OVR::Vector3f v = ts.HeadPose.AngularVelocity;
// Sensor angular velocity in EulerF format
@ -381,7 +386,7 @@ Point3F OculusVRSensorDevice::getPosition()
if(!mIsValid)
return Point3F();
ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds());
ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue);
OVR::Vector3f v = ts.HeadPose.ThePose.Position;
return Point3F(-v.x, v.z, -v.y);
}
@ -399,5 +404,5 @@ void OculusVRSensorDevice::updateTrackingCaps()
if (!mPositionTrackingDisabled)
mCurrentTrackingCaps |= ovrTrackingCap_Position;
ovrHmd_ConfigureTracking(mDevice, mCurrentTrackingCaps, 0);
ovr_ConfigureTracking(mDevice, mCurrentTrackingCaps, 0);
}

View file

@ -30,7 +30,7 @@
#include "math/mPoint4.h"
#include "platform/input/oculusVR/oculusVRConstants.h"
#include "platform/types.h"
#include "OVR_CAPI_0_5_0.h"
#include "OVR_CAPI.h"
struct OculusVRSensorData;

View file

@ -44,10 +44,7 @@ void convertRotation(const F32 inRotMat[4][4], MatrixF& outRotation)
void convertRotation(OVR::Quatf& inRotation, EulerF& outRotation)
{
F32 yaw, pitch, roll;
inRotation.GetEulerAngles<OVR::Axis_Y, OVR::Axis_X, OVR::Axis_Z>(&yaw, &pitch, &roll);
outRotation.x = -pitch;
outRotation.y = roll;
outRotation.z = -yaw;
inRotation.GetEulerAngles<OVR::Axis_X, OVR::Axis_Z, OVR::Axis_Y, OVR::Rotate_CW, OVR::Handed_R>(&outRotation.x, &outRotation.y, &outRotation.z);
}
void calculateAxisRotation(const MatrixF& inRotation, const F32& maxAxisRadius, Point2F& outRotation)

View file

@ -25,7 +25,7 @@
#include "math/mPoint2.h"
#include "math/mMatrix.h"
#include "OVR_CAPI_0_5_0.h"
#include "OVR_CAPI_0_8_0.h"
// NOTE: math code in oculus uses "Offset" which is a preprocessor macro
#define TorqueOffset Offset

View file

@ -0,0 +1,546 @@
#include "platform/input/openVR/openVRProvider.h"
#include "platform/input/openVR/openVROverlay.h"
#include "gfx/D3D11/gfxD3D11Device.h"
#include "gfx/D3D11/gfxD3D11TextureObject.h"
#include "gfx/D3D11/gfxD3D11EnumTranslate.h"
#ifdef TORQUE_OPENGL
#include "gfx/gl/gfxGLDevice.h"
#include "gfx/gl/gfxGLTextureObject.h"
#include "gfx/gl/gfxGLEnumTranslate.h"
#endif
#include "postFx/postEffectCommon.h"
#include "gui/controls/guiTextEditCtrl.h"
ImplementEnumType(OpenVROverlayType,
"Desired overlay type for OpenVROverlay. .\n\n"
"@ingroup OpenVR")
{ OpenVROverlay::OVERLAYTYPE_OVERLAY, "Overlay" },
{ OpenVROverlay::OVERLAYTYPE_DASHBOARD, "Dashboard" },
EndImplementEnumType;
IMPLEMENT_CONOBJECT(OpenVROverlay);
OpenVROverlay::OpenVROverlay()
{
mTransform = MatrixF(1);
mOverlayWidth = 1.5f;
mOverlayFlags = 0;
mOverlayColor = ColorF(1, 1, 1, 1);
mTrackingOrigin = vr::TrackingUniverseSeated;
mTargetFormat = GFXFormatR8G8B8A8_LINEAR_FORCE; // needed for openvr!
mManualMouseHandling = true;
mMouseScale = Point2F(1, 1);
}
OpenVROverlay::~OpenVROverlay()
{
}
static bool setProtectedOverlayTypeDirty(void *obj, const char *array, const char *data)
{
OpenVROverlay *object = static_cast<OpenVROverlay*>(obj);
object->mOverlayTypeDirty = true;
return true;
}
static bool setProtectedOverlayDirty(void *obj, const char *array, const char *data)
{
OpenVROverlay *object = static_cast<OpenVROverlay*>(obj);
object->mOverlayDirty = true;
return true;
}
void OpenVROverlay::initPersistFields()
{
addProtectedField("overlayType", TypeOpenVROverlayType, Offset(mOverlayType, OpenVROverlay), &setProtectedOverlayTypeDirty, &defaultProtectedGetFn,
"Type of overlay.");
addProtectedField("overlayFlags", TypeS32, Offset(mOverlayFlags, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
"Flags for overlay.");
addProtectedField("overlayWidth", TypeF32, Offset(mOverlayWidth, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
"Width of overlay.");
addProtectedField("overlayColor", TypeColorF, Offset(mOverlayColor, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
"Backing color of overlay.");
addProtectedField("transformType", TypeOpenVROverlayTransformType, Offset(mOverlayTransformType, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
"Transform type of overlay.");
addProtectedField("transformPosition", TypeMatrixPosition, Offset(mTransform, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
"Position of overlay.");
addProtectedField("transformRotation", TypeMatrixRotation, Offset(mTransform, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
"Rotation of overlay.");
addProtectedField("transformDeviceIndex", TypeS32, Offset(mTransformDeviceIndex, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
"Rotation of overlay.");
addProtectedField("transformDeviceComponent", TypeString, Offset(mTransformDeviceComponent, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
"Rotation of overlay.");
addProtectedField("inputMethod", TypeOpenVROverlayInputMethod, Offset(mInputMethod, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
"Type of input method.");
addProtectedField("mouseScale", TypePoint2F, Offset(mMouseScale, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
"Scale of mouse input.");
addProtectedField("trackingOrigin", TypeOpenVRTrackingUniverseOrigin, Offset(mTrackingOrigin, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
"Tracking origin.");
addProtectedField("controllerDevice", TypeS32, Offset(mControllerDeviceIndex, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn,
"Index of controller to attach overlay to.");
addField("manualMouseHandling", TypeBool, Offset(mManualMouseHandling, OpenVROverlay), "Forces openvr to create mouse events for overlay");
Parent::initPersistFields();
}
bool OpenVROverlay::onAdd()
{
if (Parent::onAdd())
{
mOverlayTypeDirty = true;
mOverlayDirty = true;
if (OPENVR)
{
OPENVR->registerOverlay(this);
}
return true;
}
return false;
}
void OpenVROverlay::onRemove()
{
if (mOverlayHandle)
{
vr::VROverlay()->DestroyOverlay(mOverlayHandle);
mOverlayHandle = NULL;
}
if (mThumbOverlayHandle)
{
vr::VROverlay()->DestroyOverlay(mThumbOverlayHandle);
mThumbOverlayHandle = NULL;
}
if (ManagedSingleton<OpenVRProvider>::instanceOrNull())
{
OPENVR->unregisterOverlay(this);
}
}
void OpenVROverlay::resetOverlay()
{
vr::IVROverlay *overlay = vr::VROverlay();
if (!overlay)
return;
if (mOverlayHandle)
{
overlay->DestroyOverlay(mOverlayHandle);
mOverlayHandle = NULL;
}
if (mThumbOverlayHandle)
{
overlay->DestroyOverlay(mThumbOverlayHandle);
mThumbOverlayHandle = NULL;
}
if (mOverlayType == OpenVROverlay::OVERLAYTYPE_DASHBOARD)
{
overlay->CreateDashboardOverlay(mInternalName, mInternalName, &mOverlayHandle, &mThumbOverlayHandle);
}
else
{
overlay->CreateOverlay(mInternalName, mInternalName, &mOverlayHandle);
}
mOverlayDirty = true;
mOverlayTypeDirty = false;
// Pre-render start frame so we have a texture available
if (!mTarget)
{
renderFrame(false, false);
}
}
void OpenVROverlay::updateOverlay()
{
if (mOverlayTypeDirty)
resetOverlay();
// Update params
vr::IVROverlay *overlay = vr::VROverlay();
if (!overlay || !mOverlayHandle)
return;
if (!mOverlayDirty)
return;
MatrixF vrMat(1);
vr::HmdMatrix34_t ovrMat;
vr::HmdVector2_t ovrMouseScale;
ovrMouseScale.v[0] = mMouseScale.x;
ovrMouseScale.v[1] = mMouseScale.y;
OpenVRUtil::convertTransformToOVR(mTransform, vrMat);
OpenVRUtil::convertMatrixFPlainToSteamVRAffineMatrix(vrMat, ovrMat);
MatrixF reverseMat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(ovrMat);
MatrixF finalReverseMat(1);
OpenVRUtil::convertTransformFromOVR(reverseMat, finalReverseMat);
switch (mOverlayTransformType)
{
case vr::VROverlayTransform_Absolute:
overlay->SetOverlayTransformAbsolute(mOverlayHandle, mTrackingOrigin, &ovrMat);
break;
case vr::VROverlayTransform_TrackedDeviceRelative:
overlay->SetOverlayTransformTrackedDeviceRelative(mOverlayHandle, mTransformDeviceIndex, &ovrMat);
break;
case vr::VROverlayTransform_TrackedComponent:
overlay->SetOverlayTransformTrackedDeviceComponent(mOverlayHandle, mTransformDeviceIndex, mTransformDeviceComponent.c_str());
break;
// NOTE: system not handled here - doesn't seem possible to create these
default:
break;
}
// overlay->SetOverlayColor(mOverlayHandle, mOverlayColor.red, mOverlayColor.green, mOverlayColor.blue);
overlay->SetOverlayAlpha(mOverlayHandle, mOverlayColor.alpha);
overlay->SetOverlayMouseScale(mOverlayHandle, &ovrMouseScale);
overlay->SetOverlayInputMethod(mOverlayHandle, mInputMethod);
overlay->SetOverlayWidthInMeters(mOverlayHandle, mOverlayWidth);
// NOTE: if flags in openvr change, double check this
for (U32 i = vr::VROverlayFlags_None; i <= vr::VROverlayFlags_ShowTouchPadScrollWheel; i++)
{
overlay->SetOverlayFlag(mOverlayHandle, (vr::VROverlayFlags)i, mOverlayFlags & (1 << i));
}
mOverlayDirty = false;
}
void OpenVROverlay::showOverlay()
{
updateOverlay();
if (mOverlayHandle == NULL)
return;
if (mOverlayType != OVERLAYTYPE_DASHBOARD)
{
vr::EVROverlayError err = vr::VROverlay()->ShowOverlay(mOverlayHandle);
if (err != vr::VROverlayError_None)
{
Con::errorf("VR Overlay error!");
}
}
if (!mStagingTexture)
{
renderFrame(false, false);
}
}
void OpenVROverlay::hideOverlay()
{
if (mOverlayHandle == NULL)
return;
if (mOverlayType != OVERLAYTYPE_DASHBOARD)
{
vr::VROverlay()->HideOverlay(mOverlayHandle);
}
}
bool OpenVROverlay::isOverlayVisible()
{
if (mOverlayHandle == NULL)
return false;
return vr::VROverlay()->IsOverlayVisible(mOverlayHandle);
}
bool OpenVROverlay::isOverlayHoverTarget()
{
if (mOverlayHandle == NULL)
return false;
return vr::VROverlay()->IsHoverTargetOverlay(mOverlayHandle);
}
bool OpenVROverlay::isGamepadFocussed()
{
if (mOverlayHandle == NULL)
return false;
return vr::VROverlay()->GetGamepadFocusOverlay() == mOverlayHandle;
}
bool OpenVROverlay::isActiveDashboardOverlay()
{
return false; // TODO WHERE DID I GET THIS FROM
}
MatrixF OpenVROverlay::getTransformForOverlayCoordinates(const Point2F &pos)
{
if (mOverlayHandle == NULL)
return MatrixF::Identity;
vr::HmdVector2_t vec;
vec.v[0] = pos.x;
vec.v[1] = pos.y;
vr::HmdMatrix34_t outMat;
MatrixF outTorqueMat;
if (vr::VROverlay()->GetTransformForOverlayCoordinates(mOverlayHandle, mTrackingOrigin, vec, &outMat) != vr::VROverlayError_None)
return MatrixF::Identity;
MatrixF vrMat(1);
vrMat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(outMat);
OpenVRUtil::convertTransformFromOVR(vrMat, outTorqueMat);
return outTorqueMat;
}
bool OpenVROverlay::castRay(const Point3F &origin, const Point3F &direction, RayInfo *info)
{
if (mOverlayHandle == NULL)
return false;
vr::VROverlayIntersectionParams_t params;
vr::VROverlayIntersectionResults_t result;
Point3F ovrOrigin = OpenVRUtil::convertPointToOVR(origin);
Point3F ovrDirection = OpenVRUtil::convertPointToOVR(direction);
params.eOrigin = mTrackingOrigin;
params.vSource.v[0] = ovrOrigin.x;
params.vSource.v[1] = ovrOrigin.y;
params.vSource.v[2] = ovrOrigin.z;
params.vDirection.v[0] = ovrDirection.x;
params.vDirection.v[1] = ovrDirection.y;
params.vDirection.v[2] = ovrDirection.z;
bool rayHit = vr::VROverlay()->ComputeOverlayIntersection(mOverlayHandle, &params, &result);
if (rayHit && info)
{
info->t = result.fDistance;
info->point = OpenVRUtil::convertPointFromOVR(result.vPoint); // TODO: need to transform this FROM vr-space
info->normal = OpenVRUtil::convertPointFromOVR(result.vNormal);
info->texCoord = Point2F(result.vUVs.v[0], result.vUVs.v[1]);
info->object = NULL;
info->userData = this;
}
return rayHit;
}
void OpenVROverlay::moveGamepadFocusToNeighbour()
{
}
void OpenVROverlay::handleOpenVREvents()
{
if (mManualMouseHandling)
{
// tell OpenVR to make some events for us
for (vr::TrackedDeviceIndex_t unDeviceId = 1; unDeviceId < vr::k_unControllerStateAxisCount; unDeviceId++)
{
if (vr::VROverlay()->HandleControllerOverlayInteractionAsMouse(mOverlayHandle, unDeviceId))
{
break;
}
}
}
vr::VREvent_t vrEvent;
while (vr::VROverlay()->PollNextOverlayEvent(mOverlayHandle, &vrEvent, sizeof(vrEvent)))
{
InputEventInfo eventInfo;
eventInfo.deviceType = MouseDeviceType;
eventInfo.deviceInst = 0;
eventInfo.objType = SI_AXIS;
eventInfo.modifier = (InputModifiers)0;
eventInfo.ascii = 0;
//Con::printf("Overlay event %i", vrEvent.eventType);
switch (vrEvent.eventType)
{
case vr::VREvent_MouseMove:
{
//Con::printf("mousemove %f,%f", vrEvent.data.mouse.x, vrEvent.data.mouse.y);
eventInfo.objType = SI_AXIS;
eventInfo.objInst = SI_XAXIS;
eventInfo.action = SI_MAKE;
eventInfo.fValue = getExtent().x * vrEvent.data.mouse.x;
processMouseEvent(eventInfo);
eventInfo.objType = SI_AXIS;
eventInfo.objInst = SI_YAXIS;
eventInfo.action = SI_MAKE;
eventInfo.fValue = getExtent().y * (1.0 - vrEvent.data.mouse.y);
processMouseEvent(eventInfo);
}
break;
case vr::VREvent_MouseButtonDown:
{
eventInfo.objType = SI_BUTTON;
eventInfo.objInst = (InputObjectInstances)OpenVRUtil::convertOpenVRButtonToTorqueButton(vrEvent.data.mouse.button);
eventInfo.action = SI_MAKE;
eventInfo.fValue = 1.0f;
processMouseEvent(eventInfo);
}
break;
case vr::VREvent_MouseButtonUp:
{
eventInfo.objType = SI_BUTTON;
eventInfo.objInst = (InputObjectInstances)OpenVRUtil::convertOpenVRButtonToTorqueButton(vrEvent.data.mouse.button);
eventInfo.action = SI_BREAK;
eventInfo.fValue = 0.0f;
processMouseEvent(eventInfo);
}
break;
case vr::VREvent_OverlayShown:
{
markDirty();
}
break;
case vr::VREvent_Quit:
AssertFatal(false, "WTF is going on here");
break;
case vr::VREvent_KeyboardCharInput:
case vr::VREvent_KeyboardDone:
updateTextControl((GuiControl*)vrEvent.data.keyboard.uUserValue);
break;
}
}
if (mThumbOverlayHandle != vr::k_ulOverlayHandleInvalid)
{
while (vr::VROverlay()->PollNextOverlayEvent(mThumbOverlayHandle, &vrEvent, sizeof(vrEvent)))
{
switch (vrEvent.eventType)
{
case vr::VREvent_OverlayShown:
{
markDirty();
}
break;
}
}
}
}
void OpenVROverlay::updateTextControl(GuiControl* ctrl)
{
if (!ctrl)
return;
GuiTextCtrl* textCtrl = dynamic_cast<GuiTextCtrl*>(ctrl);
if (textCtrl)
{
char text[GuiTextCtrl::MAX_STRING_LENGTH];
vr::VROverlay()->GetKeyboardText(text, GuiTextCtrl::MAX_STRING_LENGTH);
textCtrl->setText(text);
}
}
void OpenVROverlay::onFrameRendered()
{
vr::IVROverlay *overlay = vr::VROverlay();
if (!overlay || !mOverlayHandle)
return;
updateOverlay();
Point2I desiredSize = mTarget->getSize();
if (mStagingTexture.isNull() || mStagingTexture.getWidthHeight() != desiredSize)
{
Point2I sz = mStagingTexture.getWidthHeight();
mStagingTexture.set(desiredSize.x, desiredSize.y, mTargetFormat, &VRTextureProfile, "OpenVROverlay staging texture");
}
mTarget->resolveTo(mStagingTexture);
vr::Texture_t tex;
if (GFX->getAdapterType() == Direct3D11)
{
tex = { (void*)static_cast<GFXD3D11TextureObject*>(mStagingTexture.getPointer())->getResource(), vr::API_DirectX, vr::ColorSpace_Auto };
}
#ifdef TORQUE_OPENGL
else if (GFX->getAdapterType() == OpenGL)
{
tex = { (void*)static_cast<GFXGLTextureObject*>(mStagingTexture.getPointer())->getHandle(), vr::API_OpenGL, vr::ColorSpace_Auto };
}
#endif
else
{
return;
}
//mStagingTexture->dumpToDisk("PNG", "D:\\test.png");
vr::EVROverlayError err = overlay->SetOverlayTexture(mOverlayHandle, &tex);
if (err != vr::VROverlayError_None)
{
Con::errorf("VR: Error setting overlay texture.");
}
//Con::printf("Overlay visible ? %s", vr::VROverlay()->IsOverlayVisible(mOverlayHandle) ? "YES" : "NO");
}
void OpenVROverlay::enableKeyboardTranslation()
{
vr::IVROverlay *overlay = vr::VROverlay();
if (!overlay || !mOverlayHandle)
return;
GuiTextEditCtrl* ctrl = dynamic_cast<GuiTextEditCtrl*>(getFirstResponder());
if (ctrl)
{
vr::EGamepadTextInputMode inputMode = ctrl->isPasswordText() ? vr::k_EGamepadTextInputModePassword : vr::k_EGamepadTextInputModeNormal;
char text[GuiTextCtrl::MAX_STRING_LENGTH + 1];
ctrl->getText(text);
overlay->ShowKeyboardForOverlay(mOverlayHandle, inputMode, vr::k_EGamepadTextInputLineModeSingleLine, ctrl->getTooltip().c_str(), GuiTextCtrl::MAX_STRING_LENGTH, text, false, (uint64_t)ctrl);
}
}
void OpenVROverlay::disableKeyboardTranslation()
{
vr::IVROverlay *overlay = vr::VROverlay();
if (!overlay || !mOverlayHandle)
return;
overlay->HideKeyboard();
}
void OpenVROverlay::setNativeAcceleratorsEnabled(bool enabled)
{
}
DefineEngineMethod(OpenVROverlay, showOverlay, void, (), , "")
{
object->showOverlay();
}
DefineEngineMethod(OpenVROverlay, hideOverlay, void, (), , "")
{
object->hideOverlay();
}

View file

@ -0,0 +1,105 @@
#ifndef _OPENVROVERLAY_H_
#define _OPENVROVERLAY_H_
#ifndef _GUIOFFSCREENCANVAS_H_
#include "gui/core/guiOffscreenCanvas.h"
#endif
#ifndef _OPENVRDEVICE_H_
#include "platform/input/openVR/openVRProvider.h"
#endif
#ifndef _COLLISION_H_
#include "collision/collision.h"
#endif
typedef vr::VROverlayInputMethod OpenVROverlayInputMethod;
typedef vr::VROverlayTransformType OpenVROverlayTransformType;
typedef vr::EGamepadTextInputMode OpenVRGamepadTextInputMode;
typedef vr::EGamepadTextInputLineMode OpenVRGamepadTextInputLineMode;
typedef vr::ETrackingResult OpenVRTrackingResult;
typedef vr::ETrackingUniverseOrigin OpenVRTrackingUniverseOrigin;
typedef vr::EOverlayDirection OpenVROverlayDirection;
typedef vr::EVRState OpenVRState;
class OpenVROverlay : public GuiOffscreenCanvas
{
public:
typedef GuiOffscreenCanvas Parent;
enum OverlayType
{
OVERLAYTYPE_OVERLAY,
OVERLAYTYPE_DASHBOARD,
};
vr::VROverlayHandle_t mOverlayHandle;
vr::VROverlayHandle_t mThumbOverlayHandle;
// Desired OpenVR state
U32 mOverlayFlags;
F32 mOverlayWidth;
vr::VROverlayTransformType mOverlayTransformType;
MatrixF mTransform;
vr::TrackedDeviceIndex_t mTransformDeviceIndex;
String mTransformDeviceComponent;
vr::VROverlayInputMethod mInputMethod;
Point2F mMouseScale;
vr::ETrackingUniverseOrigin mTrackingOrigin;
vr::TrackedDeviceIndex_t mControllerDeviceIndex;
GFXTexHandle mStagingTexture; ///< Texture used by openvr
ColorF mOverlayColor;
bool mOverlayTypeDirty; ///< Overlay type is dirty
bool mOverlayDirty; ///< Overlay properties are dirty
bool mManualMouseHandling;
OverlayType mOverlayType;
//
OpenVROverlay();
virtual ~OpenVROverlay();
static void initPersistFields();
DECLARE_CONOBJECT(OpenVROverlay);
bool onAdd();
void onRemove();
void resetOverlay();
void updateOverlay();
void showOverlay();
void hideOverlay();
bool isOverlayVisible();
bool isOverlayHoverTarget();
bool isGamepadFocussed();
bool isActiveDashboardOverlay();
MatrixF getTransformForOverlayCoordinates(const Point2F &pos);
bool castRay(const Point3F &origin, const Point3F &direction, RayInfo *info);
void moveGamepadFocusToNeighbour();
void handleOpenVREvents();
void updateTextControl(GuiControl* ctrl);
void onFrameRendered();
virtual void enableKeyboardTranslation();
virtual void disableKeyboardTranslation();
virtual void setNativeAcceleratorsEnabled(bool enabled);
};
typedef OpenVROverlay::OverlayType OpenVROverlayType;
DefineEnumType(OpenVROverlayType);
#endif

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,388 @@
#ifndef _OPENVRDEVICE_H_
#define _OPENVRDEVICE_H_
#include "math/mQuat.h"
#include "math/mPoint4.h"
#include "math/util/frustum.h"
#include "core/util/tSingleton.h"
#include "gfx/gfxDevice.h"
#include "gfx/gfxVertexBuffer.h"
#include "gfx/gfxPrimitiveBuffer.h"
#include "gfx/gfxTarget.h"
#include "platform/input/IInputDevice.h"
#include "platform/input/event.h"
#include "platform/output/IDisplayDevice.h"
#include <openvr.h>
class OpenVRHMDDevice;
class OpenVROverlay;
class BaseMatInstance;
class SceneRenderState;
struct MeshRenderInst;
class Namespace;
class NamedTexTarget;
typedef vr::VROverlayInputMethod OpenVROverlayInputMethod;
typedef vr::VROverlayTransformType OpenVROverlayTransformType;
typedef vr::EGamepadTextInputMode OpenVRGamepadTextInputMode;
typedef vr::EGamepadTextInputLineMode OpenVRGamepadTextInputLineMode;
typedef vr::ETrackingResult OpenVRTrackingResult;
typedef vr::ETrackingUniverseOrigin OpenVRTrackingUniverseOrigin;
typedef vr::EOverlayDirection OpenVROverlayDirection;
typedef vr::EVRState OpenVRState;
typedef vr::TrackedDeviceClass OpenVRTrackedDeviceClass;
DefineEnumType(OpenVROverlayInputMethod);
DefineEnumType(OpenVROverlayTransformType);
DefineEnumType(OpenVRGamepadTextInputMode);
DefineEnumType(OpenVRGamepadTextInputLineMode);
DefineEnumType(OpenVRTrackingResult);
DefineEnumType(OpenVRTrackingUniverseOrigin);
DefineEnumType(OpenVROverlayDirection);
DefineEnumType(OpenVRState);
DefineEnumType(OpenVRTrackedDeviceClass);
namespace OpenVRUtil
{
/// Convert a matrix in OVR space to torque space
void convertTransformFromOVR(const MatrixF &inRotTMat, MatrixF& outRotation);
/// Convert a matrix in torque space to OVR space
void convertTransformToOVR(const MatrixF& inRotation, MatrixF& outRotation);
/// Converts vr::HmdMatrix34_t to a MatrixF
MatrixF convertSteamVRAffineMatrixToMatrixFPlain(const vr::HmdMatrix34_t &mat);
/// Converts a MatrixF to a vr::HmdMatrix34_t
void convertMatrixFPlainToSteamVRAffineMatrix(const MatrixF &inMat, vr::HmdMatrix34_t &outMat);
U32 convertOpenVRButtonToTorqueButton(uint32_t vrButton);
/// Converts a point to OVR coords
inline Point3F convertPointToOVR(const Point3F &point)
{
return Point3F(-point.x, -point.z, point.y);
}
/// Converts a point from OVR coords
inline Point3F convertPointFromOVR(const Point3F &point)
{
return Point3F(-point.x, point.z, -point.y);
}
// Converts a point from OVR coords, from an input float array
inline Point3F convertPointFromOVR(const vr::HmdVector3_t& v)
{
return Point3F(-v.v[0], v.v[2], -v.v[1]);
}
};
template<int TEXSIZE> class VRTextureSet
{
public:
static const int TextureCount = TEXSIZE;
GFXTexHandle mTextures[TEXSIZE];
U32 mIndex;
VRTextureSet() : mIndex(0)
{
}
void init(U32 width, U32 height, GFXFormat fmt, GFXTextureProfile *profile, const String &desc)
{
for (U32 i = 0; i < TextureCount; i++)
{
mTextures[i].set(width, height, fmt, profile, desc);
}
}
void clear()
{
for (U32 i = 0; i < TextureCount; i++)
{
mTextures[i] = NULL;
}
}
void advance()
{
mIndex = (mIndex + 1) % TextureCount;
}
GFXTexHandle& getTextureHandle()
{
return mTextures[mIndex];
}
};
/// Simple class to handle rendering native OpenVR model data
class OpenVRRenderModel
{
public:
typedef GFXVertexPNT VertexType;
GFXVertexBufferHandle<VertexType> mVertexBuffer;
GFXPrimitiveBufferHandle mPrimitiveBuffer;
BaseMatInstance* mMaterialInstance; ///< Material to use for rendering. NOTE:
Box3F mLocalBox;
OpenVRRenderModel() : mMaterialInstance(NULL)
{
}
~OpenVRRenderModel()
{
SAFE_DELETE(mMaterialInstance);
}
Box3F getWorldBox(MatrixF &mat)
{
Box3F ret = mLocalBox;
mat.mul(ret);
return ret;
}
bool init(const vr::RenderModel_t & vrModel, StringTableEntry materialName);
void draw(SceneRenderState *state, MeshRenderInst* renderInstance);
};
struct OpenVRRenderState
{
vr::IVRSystem *mHMD;
FovPort mEyeFov[2];
MatrixF mEyePose[2];
MatrixF mHMDPose;
RectI mEyeViewport[2];
GFXTextureTargetRef mStereoRT;
GFXTexHandle mStereoRenderTexture;
GFXTexHandle mStereoDepthTexture;
VRTextureSet<4> mOutputEyeTextures;
GFXDevice::GFXDeviceRenderStyles mRenderMode;
bool setupRenderTargets(GFXDevice::GFXDeviceRenderStyles mode);
void renderPreview();
void reset(vr::IVRSystem* hmd);
void updateHMDProjection();
};
class OpenVRProvider : public IDisplayDevice, public IInputDevice
{
public:
enum DataDifferences {
DIFF_NONE = 0,
DIFF_ROT = (1 << 0),
DIFF_ROTAXISX = (1 << 1),
DIFF_ROTAXISY = (1 << 2),
DIFF_ACCEL = (1 << 3),
DIFF_ANGVEL = (1 << 4),
DIFF_MAG = (1 << 5),
DIFF_POS = (1 << 6),
DIFF_STATUS = (1 << 7),
DIFF_ROTAXIS = (DIFF_ROTAXISX | DIFF_ROTAXISY),
DIFF_RAW = (DIFF_ACCEL | DIFF_ANGVEL | DIFF_MAG),
};
struct LoadedRenderModel
{
StringTableEntry name;
vr::RenderModel_t *vrModel;
OpenVRRenderModel *model;
vr::EVRRenderModelError modelError;
S32 textureId;
bool loadedTexture;
};
struct LoadedRenderTexture
{
U32 vrTextureId;
vr::RenderModel_TextureMap_t *vrTexture;
GFXTextureObject *texture;
NamedTexTarget *targetTexture;
vr::EVRRenderModelError textureError;
};
OpenVRProvider();
~OpenVRProvider();
typedef Signal <void(const vr::VREvent_t &evt)> VREventSignal;
VREventSignal& getVREventSignal() { return mVREventSignal; }
static void staticInit();
bool enable();
bool disable();
bool getActive() { return mHMD != NULL; }
inline vr::IVRRenderModels* getRenderModels() { return mRenderModels; }
/// @name Input handling
/// {
void buildInputCodeTable();
virtual bool process();
/// }
/// @name Display handling
/// {
virtual bool providesFrameEyePose() const;
virtual void getFrameEyePose(IDevicePose *pose, S32 eyeId) const;
virtual bool providesEyeOffsets() const;
/// Returns eye offset not taking into account any position tracking info
virtual void getEyeOffsets(Point3F *dest) const;
virtual bool providesFovPorts() const;
virtual void getFovPorts(FovPort *out) const;
virtual void getStereoViewports(RectI *out) const;
virtual void getStereoTargets(GFXTextureTarget **out) const;
virtual void setDrawCanvas(GuiCanvas *canvas);
virtual void setDrawMode(GFXDevice::GFXDeviceRenderStyles style);
virtual void setCurrentConnection(GameConnection *connection);
virtual GameConnection* getCurrentConnection();
virtual GFXTexHandle getPreviewTexture();
virtual void onStartFrame();
virtual void onEndFrame();
virtual void onEyeRendered(U32 index);
virtual void setRoomTracking(bool room);
bool _handleDeviceEvent(GFXDevice::GFXDeviceEventType evt);
S32 getDisplayDeviceId() const;
/// }
/// @name OpenVR handling
/// {
void processVREvent(const vr::VREvent_t & event);
void updateTrackedPoses();
void submitInputChanges();
void resetSensors();
void mapDeviceToEvent(U32 deviceIdx, S32 eventIdx);
void resetEventMap();
IDevicePose getTrackedDevicePose(U32 idx);
/// }
/// @name Overlay registration
/// {
void registerOverlay(OpenVROverlay* overlay);
void unregisterOverlay(OpenVROverlay* overlay);
/// }
/// @name Model loading
/// {
const S32 preloadRenderModel(StringTableEntry name);
const S32 preloadRenderModelTexture(U32 index);
bool getRenderModel(S32 idx, OpenVRRenderModel **ret, bool &failed);
bool getRenderModelTexture(S32 idx, GFXTextureObject **outTex, bool &failed);
bool getRenderModelTextureName(S32 idx, String &outName);
void resetRenderModels();
/// }
/// @name Console API
/// {
OpenVROverlay *getGamepadFocusOverlay();
void setOverlayNeighbour(vr::EOverlayDirection dir, OpenVROverlay *overlay);
bool isDashboardVisible();
void showDashboard(const char *overlayToShow);
vr::TrackedDeviceIndex_t getPrimaryDashboardDevice();
void setKeyboardTransformAbsolute(const MatrixF &xfm);
void setKeyboardPositionForOverlay(OpenVROverlay *overlay, const RectI &rect);
void getControllerDeviceIndexes(vr::TrackedDeviceClass &deviceClass, Vector<S32> &outList);
StringTableEntry getControllerModel(U32 idx);
/// }
/// @name OpenVR state
/// {
vr::IVRSystem *mHMD;
vr::IVRRenderModels *mRenderModels;
String mDriver;
String mDisplay;
vr::TrackedDevicePose_t mTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
IDevicePose mCurrentDevicePose[vr::k_unMaxTrackedDeviceCount];
IDevicePose mPreviousInputTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
U32 mValidPoseCount;
vr::VRControllerState_t mCurrentControllerState[vr::k_unMaxTrackedDeviceCount];
vr::VRControllerState_t mPreviousCurrentControllerState[vr::k_unMaxTrackedDeviceCount];
char mDeviceClassChar[vr::k_unMaxTrackedDeviceCount];
OpenVRRenderState mHMDRenderState;
GFXAdapterLUID mLUID;
vr::ETrackingUniverseOrigin mTrackingSpace;
Vector<OpenVROverlay*> mOverlays;
VREventSignal mVREventSignal;
Namespace *mOpenVRNS;
Vector<LoadedRenderModel> mLoadedModels;
Vector<LoadedRenderTexture> mLoadedTextures;
Map<StringTableEntry, S32> mLoadedModelLookup;
Map<U32, S32> mLoadedTextureLookup;
Map<U32, S32> mDeviceEventMap;
/// }
GuiCanvas* mDrawCanvas;
GameConnection* mGameConnection;
static U32 OVR_SENSORROT[vr::k_unMaxTrackedDeviceCount];
static U32 OVR_SENSORROTANG[vr::k_unMaxTrackedDeviceCount];
static U32 OVR_SENSORVELOCITY[vr::k_unMaxTrackedDeviceCount];
static U32 OVR_SENSORANGVEL[vr::k_unMaxTrackedDeviceCount];
static U32 OVR_SENSORMAGNETOMETER[vr::k_unMaxTrackedDeviceCount];
static U32 OVR_SENSORPOSITION[vr::k_unMaxTrackedDeviceCount];
static U32 OVR_BUTTONPRESSED[vr::k_unMaxTrackedDeviceCount];
static U32 OVR_BUTTONTOUCHED[vr::k_unMaxTrackedDeviceCount];
static U32 OVR_AXISNONE[vr::k_unMaxTrackedDeviceCount];
static U32 OVR_AXISTRACKPAD[vr::k_unMaxTrackedDeviceCount];
static U32 OVR_AXISJOYSTICK[vr::k_unMaxTrackedDeviceCount];
static U32 OVR_AXISTRIGGER[vr::k_unMaxTrackedDeviceCount];
/// @name HMD Rotation offset
/// {
static EulerF smHMDRotOffset;
static F32 smHMDmvYaw;
static F32 smHMDmvPitch;
static bool smRotateYawWithMoveActions;
/// }
public:
// For ManagedSingleton.
static const char* getSingletonName() { return "OpenVRProvider"; }
};
/// Returns the OculusVRDevice singleton.
#define OPENVR ManagedSingleton<OpenVRProvider>::instance()
#endif // _OCULUSVRDEVICE_H_

View file

@ -0,0 +1,981 @@
#include "platform/platform.h"
#include "platform/input/openVR/openVRTrackedObject.h"
#include "platform/input/openVR/openVRProvider.h"
#include "math/mathIO.h"
#include "scene/sceneRenderState.h"
#include "console/consoleTypes.h"
#include "core/stream/bitStream.h"
#include "core/resourceManager.h"
#include "materials/materialManager.h"
#include "materials/baseMatInstance.h"
#include "renderInstance/renderPassManager.h"
#include "lighting/lightQuery.h"
#include "console/engineAPI.h"
#include "gfx/gfxTextureManager.h"
#include "gfx/sim/debugDraw.h"
#include "gfx/gfxTransformSaver.h"
#include "environment/skyBox.h"
#include "collision/boxConvex.h"
#include "collision/concretePolyList.h"
#include "T3D/physics/physicsPlugin.h"
#include "T3D/physics/physicsCollision.h"
#include "T3D/physics/physicsBody.h"
#ifdef TORQUE_EXTENDED_MOVE
#include "T3D/gameBase/extended/extendedMove.h"
#endif
bool OpenVRTrackedObject::smDebugControllerMovePosition = true;
bool OpenVRTrackedObject::smDebugControllerPosition = false;
static const U32 sCollisionMoveMask = (PlayerObjectType |
StaticShapeObjectType | VehicleObjectType);
U32 OpenVRTrackedObject::sServerCollisionMask = sCollisionMoveMask; // ItemObjectType
U32 OpenVRTrackedObject::sClientCollisionMask = sCollisionMoveMask;
//-----------------------------------------------------------------------------
IMPLEMENT_CO_DATABLOCK_V1(OpenVRTrackedObjectData);
OpenVRTrackedObjectData::OpenVRTrackedObjectData() :
mShapeFile(NULL)
{
mCollisionBoxMin = Point3F(-0.02, -0.20, -0.02);
mCollisionBoxMax = Point3F(0.02, 0.05, 0.02);
}
OpenVRTrackedObjectData::~OpenVRTrackedObjectData()
{
}
bool OpenVRTrackedObjectData::onAdd()
{
if (Parent::onAdd())
{
return true;
}
return false;
}
bool OpenVRTrackedObjectData::preload(bool server, String &errorStr)
{
if (!Parent::preload(server, errorStr))
return false;
bool error = false;
if (!server)
{
mShape = mShapeFile ? ResourceManager::get().load(mShapeFile) : NULL;
}
}
void OpenVRTrackedObjectData::initPersistFields()
{
addGroup("Render Components");
addField("shape", TypeShapeFilename, Offset(mShapeFile, OpenVRTrackedObjectData), "Shape file to use for controller model.");
addField("collisionMin", TypePoint3F, Offset(mCollisionBoxMin, OpenVRTrackedObjectData), "Box min");
addField("collisionMax", TypePoint3F, Offset(mCollisionBoxMax, OpenVRTrackedObjectData), "Box min");
endGroup("Render Components");
Parent::initPersistFields();
}
void OpenVRTrackedObjectData::packData(BitStream* stream)
{
Parent::packData(stream);
stream->writeString(mShapeFile);
}
void OpenVRTrackedObjectData::unpackData(BitStream* stream)
{
Parent::unpackData(stream);
mShapeFile = stream->readSTString();
}
//-----------------------------------------------------------------------------
IMPLEMENT_CO_NETOBJECT_V1(OpenVRTrackedObject);
ConsoleDocClass(OpenVRTrackedObject,
"@brief Renders and handles interactions OpenVR controllers and tracked objects.\n\n"
"This class implements basic rendering and interactions with OpenVR controllers.\n\n"
"The object should be controlled by a player object. Controllers will be rendered at\n"
"the correct position regardless of the current transform of the object.\n"
"@ingroup OpenVR\n");
//-----------------------------------------------------------------------------
// Object setup and teardown
//-----------------------------------------------------------------------------
OpenVRTrackedObject::OpenVRTrackedObject() :
mDataBlock(NULL),
mShapeInstance(NULL),
mBasicModel(NULL),
mDeviceIndex(-1),
mMappedMoveIndex(-1),
mIgnoreParentRotation(true),
mConvexList(new Convex()),
mPhysicsRep(NULL)
{
// Flag this object so that it will always
// be sent across the network to clients
mNetFlags.set(Ghostable | ScopeAlways);
// Set it as a "static" object that casts shadows
mTypeMask |= StaticObjectType | StaticShapeObjectType;
mPose.connected = false;
}
OpenVRTrackedObject::~OpenVRTrackedObject()
{
clearRenderData();
delete mConvexList;
}
void OpenVRTrackedObject::updateRenderData()
{
clearRenderData();
if (!mDataBlock)
return;
// Are we using a model?
if (mDataBlock->mShape)
{
if (mShapeInstance && mShapeInstance->getShape() != mDataBlock->mShape)
{
delete mShapeInstance;
mShapeInstance = NULL;
}
if (!mShapeInstance)
{
mShapeInstance = new TSShapeInstance(mDataBlock->mShape, isClientObject());
}
}
else
{
setupRenderDataFromModel(isClientObject());
}
}
void OpenVRTrackedObject::setupRenderDataFromModel(bool loadComponentModels)
{
clearRenderData();
if (!OPENVR || !OPENVR->isEnabled())
return;
vr::IVRRenderModels *models = OPENVR->getRenderModels();
if (!models)
return;
if (!mShapeInstance && mModelName && mModelName[0] != '\0')
{
bool failed = false;
S32 idx = OPENVR->preloadRenderModel(mModelName);
while (!OPENVR->getRenderModel(idx, &mBasicModel, failed))
{
if (failed)
break;
}
}
if (loadComponentModels)
{
mRenderComponents.setSize(models->GetComponentCount(mModelName));
for (U32 i = 0, sz = mRenderComponents.size(); i < sz; i++)
{
RenderModelSlot &slot = mRenderComponents[i];
char buffer[1024];
slot.mappedNodeIdx = -1;
slot.componentName = NULL;
slot.nativeModel = NULL;
U32 result = models->GetComponentName(mModelName, i, buffer, sizeof(buffer));
if (result == 0)
continue;
#ifdef DEBUG_CONTROLLER_MODELS
Con::printf("Controller[%s] component %i NAME == %s", mModelName, i, buffer);
#endif
slot.componentName = StringTable->insert(buffer, true);
result = models->GetComponentRenderModelName(mModelName, slot.componentName, buffer, sizeof(buffer));
if (result == 0)
{
#ifdef DEBUG_CONTROLLER_MODELS
Con::printf("Controller[%s] component %i NO MODEL", mModelName, i);
#endif
continue;
}
#ifdef DEBUG_CONTROLLER_MODELS
Con::printf("Controller[%s] component %i == %s", mModelName, i, slot.componentName);
#endif
bool failed = false;
S32 idx = OPENVR->preloadRenderModel(StringTable->insert(buffer, true));
while (!OPENVR->getRenderModel(idx, &slot.nativeModel, failed))
{
if (failed)
break;
}
}
}
}
void OpenVRTrackedObject::clearRenderData()
{
mBasicModel = NULL;
mRenderComponents.clear();
}
//-----------------------------------------------------------------------------
// Object Editing
//-----------------------------------------------------------------------------
void OpenVRTrackedObject::initPersistFields()
{
// SceneObject already handles exposing the transform
Parent::initPersistFields();
addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track");
addField("mappedMoveIndex", TypeS32, Offset(mMappedMoveIndex, OpenVRTrackedObject), "Index of movemanager state to track"); addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track");
addField("ignoreParentRotation", TypeBool, Offset(mIgnoreParentRotation, OpenVRTrackedObject), "Index of movemanager state to track"); addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track");
static bool conInit = false;
if (!conInit)
{
Con::addVariable("$OpenVRTrackedObject::debugControllerPosition", TypeBool, &smDebugControllerPosition);
Con::addVariable("$OpenVRTrackedObject::debugControllerMovePosition", TypeBool, &smDebugControllerMovePosition);
conInit = true;
}
}
void OpenVRTrackedObject::inspectPostApply()
{
Parent::inspectPostApply();
// Flag the network mask to send the updates
// to the client object
setMaskBits(UpdateMask);
}
bool OpenVRTrackedObject::onAdd()
{
if (!Parent::onAdd())
return false;
// Set up a 1x1x1 bounding box
mObjBox.set(Point3F(-0.5f, -0.5f, -0.5f),
Point3F(0.5f, 0.5f, 0.5f));
resetWorldBox();
// Add this object to the scene
addToScene();
if (mDataBlock)
{
mObjBox.minExtents = mDataBlock->mCollisionBoxMin;
mObjBox.maxExtents = mDataBlock->mCollisionBoxMax;
resetWorldBox();
}
else
{
setGlobalBounds();
}
return true;
}
void OpenVRTrackedObject::onRemove()
{
// Remove this object from the scene
removeFromScene();
clearRenderData();
SAFE_DELETE(mPhysicsRep);
Parent::onRemove();
}
void OpenVRTrackedObject::_updatePhysics()
{
SAFE_DELETE(mPhysicsRep);
if (!PHYSICSMGR)
return;
PhysicsCollision *colShape = NULL;
MatrixF offset(true);
colShape = PHYSICSMGR->createCollision();
colShape->addBox(getObjBox().getExtents() * 0.5f * mObjScale, offset);
if (colShape)
{
PhysicsWorld *world = PHYSICSMGR->getWorld(isServerObject() ? "server" : "client");
mPhysicsRep = PHYSICSMGR->createBody();
mPhysicsRep->init(colShape, 0, PhysicsBody::BF_TRIGGER | PhysicsBody::BF_KINEMATIC, this, world);
mPhysicsRep->setTransform(getTransform());
}
}
bool OpenVRTrackedObject::onNewDataBlock(GameBaseData *dptr, bool reload)
{
mDataBlock = dynamic_cast<OpenVRTrackedObjectData*>(dptr);
if (!mDataBlock || !Parent::onNewDataBlock(dptr, reload))
return false;
// Setup the models
clearRenderData();
mObjBox.minExtents = mDataBlock->mCollisionBoxMin;
mObjBox.maxExtents = mDataBlock->mCollisionBoxMax;
mGlobalBounds = false;
resetWorldBox();
_updatePhysics();
scriptOnNewDataBlock();
return true;
}
void OpenVRTrackedObject::setInteractObject(SceneObject* object, bool holding)
{
mInteractObject = object;
mHoldInteractedObject = holding;
}
void OpenVRTrackedObject::setTransform(const MatrixF & mat)
{
// Let SceneObject handle all of the matrix manipulation
Parent::setTransform(mat);
// Dirty our network mask so that the new transform gets
// transmitted to the client object
setMaskBits(UpdateMask);
}
void OpenVRTrackedObject::setModelName(String &modelName)
{
if (!isServerObject())
return;
mModelName = StringTable->insert(modelName.c_str(), true);
setMaskBits(UpdateMask);
}
U32 OpenVRTrackedObject::packUpdate(NetConnection *conn, U32 mask, BitStream *stream)
{
// Allow the Parent to get a crack at writing its info
U32 retMask = Parent::packUpdate(conn, mask, stream);
// Write our transform information
if (stream->writeFlag(mask & UpdateMask))
{
mathWrite(*stream, getTransform());
mathWrite(*stream, getScale());
stream->write((S16)mDeviceIndex);
stream->write((S16)mMappedMoveIndex);
stream->writeString(mModelName);
}
return retMask;
}
void OpenVRTrackedObject::unpackUpdate(NetConnection *conn, BitStream *stream)
{
// Let the Parent read any info it sent
Parent::unpackUpdate(conn, stream);
if (stream->readFlag()) // UpdateMask
{
mathRead(*stream, &mObjToWorld);
mathRead(*stream, &mObjScale);
setTransform(mObjToWorld);
S16 readDeviceIndex;
S16 readMoveIndex;
stream->read(&readDeviceIndex);
stream->read(&readMoveIndex);
mDeviceIndex = readDeviceIndex;
mMappedMoveIndex = readMoveIndex;
mModelName = stream->readSTString();
updateRenderData();
}
}
void OpenVRTrackedObject::writePacketData(GameConnection *conn, BitStream *stream)
{
Parent::writePacketData(conn, stream);
}
void OpenVRTrackedObject::readPacketData(GameConnection *conn, BitStream *stream)
{
Parent::readPacketData(conn, stream);
}
MatrixF OpenVRTrackedObject::getTrackedTransform()
{
IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex);
MatrixF trackedMat(1);
pose.orientation.setMatrix(&trackedMat);
trackedMat.setPosition(pose.position);
return trackedMat;
}
MatrixF OpenVRTrackedObject::getLastTrackedTransform()
{
MatrixF trackedMat(1);
mPose.orientation.setMatrix(&trackedMat);
trackedMat.setPosition(mPose.position);
return trackedMat;
}
MatrixF OpenVRTrackedObject::getBaseTrackingTransform()
{
if (isMounted())
{
MatrixF mat;
mMount.object->getMountTransform(mMount.node, mMount.xfm, &mat);
if (mIgnoreParentRotation)
{
Point3F pos = mat.getPosition();
mat = MatrixF(1);
mat.setPosition(pos);
}
//mat.inverse();
return mat;
}
return MatrixF(1);
}
void OpenVRTrackedObject::prepRenderImage(SceneRenderState *state)
{
RenderPassManager *renderPass = state->getRenderPass();
// debug rendering for now
if (mDeviceIndex < 0)
return;
// Current pose
IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex);
IDevicePose hmdPose = OPENVR->getTrackedDevicePose(0);
if (!pose.connected && !mPose.connected)
return;
MatrixF offsetMat = getBaseTrackingTransform();
//offsetMat.inverse();
Point3F pos = offsetMat.getPosition();
//Con::printf("Base offs == %f,%f,%f", pos.x, pos.y, pos.z);
const F32 CONTROLLER_SCALE = 0.1;
if (smDebugControllerPosition)
{
ColorI drawColor = ColorI::GREEN;
if (!pose.valid)
{
drawColor = ColorI::RED;
}
// Draw Camera
/*
DisplayPose cameraPose;
OPENVR->getFrameEyePose(&cameraPose, -1);
Point3F cameraCenter(0);
MatrixF cameraMat(1);
cameraPose.orientation.setMatrix(&cameraMat);
cameraMat.setPosition(cameraPose.position);
cameraMat.mulP(cameraCenter);
//DebugDrawer::get()->drawBox(cameraCenter - Point3F(0.1), cameraCenter + Point3F(0.1), ColorI::GREEN);
DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -0.1, -0.5), Point3F(0.5, 0.1, 0.5), ColorI::WHITE, cameraMat); // general box
*/
// Draw Tracked HMD Pos
Point3F hmdCenter(0, 0, 0);
MatrixF hmdMat(1);
hmdPose.orientation.setMatrix(&hmdMat);
hmdMat.setPosition(hmdPose.position);
hmdMat.inverse(); // -> world mat (as opposed to world -> tracked pos)
hmdMat = offsetMat * hmdMat;
hmdMat.mulP(hmdCenter);
DebugDrawer::get()->drawBox(hmdCenter - Point3F(0.1), hmdCenter + Point3F(0.1), ColorI::RED);
DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -0.1, -0.5), Point3F(0.5, 0.1, 0.5), ColorI::GREEN, hmdMat); // general box
// Draw Controller
MatrixF mat(1);
pose.orientation.setMatrix(&mat);
mat.setPosition(pose.position);
mat.inverse(); // same as HMD
mat = offsetMat * mat;
Point3F middleStart(0, -1 * CONTROLLER_SCALE, 0);
Point3F middleEnd(0, 1 * CONTROLLER_SCALE, 0);
Point3F middle(0, 0, 0);
Point3F center(0, 0, 0);
mat.mulP(center);
//DebugDrawer::get()->drawBox(center - Point3F(0.1), center + Point3F(0.1), ColorI::BLUE);
mat.mulP(middleStart);
mat.mulP(middle);
mat.mulP(middleEnd);
char buffer[256];
dSprintf(buffer, 256, "%f %f %f", center.x, center.y, center.z);
DebugDrawer::get()->drawText(middle, buffer);
DebugDrawer::get()->drawLine(middleStart, middle, ColorI(0, 255, 0)); // axis back
DebugDrawer::get()->drawLine(middleEnd, middle, ColorI(255, 0, 0)); // axis forward
DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -1, -0.5) * CONTROLLER_SCALE, Point3F(0.5, 1, 0.5) * CONTROLLER_SCALE, drawColor, mat); // general box
DebugDrawer::get()->drawBoxOutline(Point3F(-1), Point3F(1), ColorI::WHITE);
}
if (isClientObject() && smDebugControllerMovePosition)
{
MatrixF transform = getRenderTransform();
transform.scale(mObjScale);
DebugDrawer::get()->drawTransformedBoxOutline(mObjBox.minExtents, mObjBox.maxExtents, ColorI::RED, transform);
// jamesu - grab server object pose for debugging
OpenVRTrackedObject* tracked = static_cast<OpenVRTrackedObject*>(getServerObject());
if (tracked)
{
mPose = tracked->mPose;
}
ColorI drawColor = ColorI::GREEN;
if (!pose.valid)
{
drawColor = ColorI::RED;
}
// Draw Controller
MatrixF mat(1);
mPose.orientation.setMatrix(&mat);
mat.setPosition(mPose.position);
mat.inverse(); // same as HMD
mat = offsetMat * mat;
Point3F middleStart(0, -1 * CONTROLLER_SCALE, 0);
Point3F middleEnd(0, 1 * CONTROLLER_SCALE, 0);
Point3F middle(0, 0, 0);
Point3F center(0, 0, 0);
mat.mulP(center);
//DebugDrawer::get()->drawBox(center - Point3F(0.1), center + Point3F(0.1), ColorI::BLUE);
mat.mulP(middleStart);
mat.mulP(middle);
mat.mulP(middleEnd);
char buffer[256];
dSprintf(buffer, 256, "%f %f %f", center.x, center.y, center.z);
DebugDrawer::get()->drawText(middle, buffer);
DebugDrawer::get()->drawLine(middleStart, middle, ColorI(0, 255, 0)); // axis back
DebugDrawer::get()->drawLine(middleEnd, middle, ColorI(255, 0, 0)); // axis forward
DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -1, -0.5) * CONTROLLER_SCALE, Point3F(0.5, 1, 0.5) * CONTROLLER_SCALE, drawColor, mat); // general box
DebugDrawer::get()->drawBoxOutline(Point3F(-1), Point3F(1), ColorI::WHITE);
}
// Controller matrix base
MatrixF trackedMat = getTrackedTransform();
MatrixF invTrackedMat(1);
invTrackedMat = trackedMat;
invTrackedMat.inverse(); // -> world mat (as opposed to world -> tracked pos)
invTrackedMat = getBaseTrackingTransform() * invTrackedMat;
trackedMat = invTrackedMat;
trackedMat.inverse();
// Render the controllers, using either the render model or the shape
if (mShapeInstance)
{
// Calculate the distance of this object from the camera
Point3F cameraOffset = invTrackedMat.getPosition();
cameraOffset -= state->getDiffuseCameraPosition();
F32 dist = cameraOffset.len();
if (dist < 0.01f)
dist = 0.01f;
// Set up the LOD for the shape
F32 invScale = (1.0f / getMax(getMax(mObjScale.x, mObjScale.y), mObjScale.z));
mShapeInstance->setDetailFromDistance(state, dist * invScale);
// Make sure we have a valid level of detail
if (mShapeInstance->getCurrentDetail() < 0)
return;
// GFXTransformSaver is a handy helper class that restores
// the current GFX matrices to their original values when
// it goes out of scope at the end of the function
GFXTransformSaver saver;
// Set up our TS render state
TSRenderState rdata;
rdata.setSceneState(state);
rdata.setFadeOverride(1.0f);
// We might have some forward lit materials
// so pass down a query to gather lights.
LightQuery query;
query.init(getWorldSphere());
rdata.setLightQuery(&query);
// Set the world matrix to the objects render transform
MatrixF mat = trackedMat;
mat.scale(mObjScale);
GFX->setWorldMatrix(mat);
// TODO: move the nodes about for components
mShapeInstance->animate();
mShapeInstance->render(rdata);
}
else if (mRenderComponents.size() > 0)
{
vr::IVRRenderModels *models = OPENVR->getRenderModels();
if (!models)
return;
vr::IVRSystem* vrs = vr::VRSystem();
if (!vrs->GetControllerState(mDeviceIndex, &mCurrentControllerState))
{
return;
}
for (U32 i = 0, sz = mRenderComponents.size(); i < sz; i++)
{
RenderModelSlot slot = mRenderComponents[i];
vr::RenderModel_ControllerMode_State_t modeState;
vr::RenderModel_ComponentState_t componentState;
modeState.bScrollWheelVisible = false;
if (models->GetComponentState(mModelName, slot.componentName, &mCurrentControllerState, &modeState, &componentState))
{
MeshRenderInst *ri = renderPass->allocInst<MeshRenderInst>();
// Set our RenderInst as a standard mesh render
ri->type = RenderPassManager::RIT_Mesh;
// Calculate our sorting point
if (state && slot.nativeModel)
{
// Calculate our sort point manually.
const Box3F rBox = slot.nativeModel->getWorldBox(invTrackedMat);
ri->sortDistSq = rBox.getSqDistanceToPoint(state->getCameraPosition());
}
else
{
ri->sortDistSq = 0.0f;
}
MatrixF newTransform = trackedMat;
MatrixF controllerOffsMat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(componentState.mTrackingToComponentRenderModel);
MatrixF offComponentMat(1);
OpenVRUtil::convertTransformFromOVR(controllerOffsMat, offComponentMat);
newTransform = offComponentMat * newTransform;
newTransform.inverse();
//DebugDrawer::get()->drawBox(newTransform.getPosition() - Point3F(0.001), newTransform.getPosition() + Point3F(0.001), ColorI::BLUE);
if (!slot.nativeModel)
continue;
if (i < 1)
continue;
// Set up our transforms
ri->objectToWorld = renderPass->allocUniqueXform(newTransform);
ri->worldToCamera = renderPass->allocSharedXform(RenderPassManager::View);
ri->projection = renderPass->allocSharedXform(RenderPassManager::Projection);
// If our material needs lights then fill the RIs
// light vector with the best lights.
if (true)
{
LightQuery query;
Point3F center(0, 0, 0);
invTrackedMat.mulP(center);
query.init(SphereF(center, 10.0f));
query.getLights(ri->lights, 8);
}
// Draw model
slot.nativeModel->draw(state, ri);
state->getRenderPass()->addInst(ri);
}
}
}
else if (mBasicModel)
{
MeshRenderInst *ri = renderPass->allocInst<MeshRenderInst>();
// Set our RenderInst as a standard mesh render
ri->type = RenderPassManager::RIT_Mesh;
// Calculate our sorting point
if (state)
{
// Calculate our sort point manually.
const Box3F rBox = mBasicModel->getWorldBox(invTrackedMat);
ri->sortDistSq = rBox.getSqDistanceToPoint(state->getCameraPosition());
}
else
{
ri->sortDistSq = 0.0f;
}
MatrixF newTransform = invTrackedMat;
// Set up our transforms
ri->objectToWorld = renderPass->allocUniqueXform(newTransform);
ri->worldToCamera = renderPass->allocSharedXform(RenderPassManager::View);
ri->projection = renderPass->allocSharedXform(RenderPassManager::Projection);
// If our material needs lights then fill the RIs
// light vector with the best lights.
if (true)
{
LightQuery query;
Point3F center(0, 0, 0);
invTrackedMat.mulP(center);
query.init(SphereF(center, 10.0f));
query.getLights(ri->lights, 8);
}
// Draw model
mBasicModel->draw(state, ri);
state->getRenderPass()->addInst(ri);
}
}
U32 OpenVRTrackedObject::getCollisionMask()
{
if (isServerObject())
return sServerCollisionMask;
else
return sClientCollisionMask;
}
void OpenVRTrackedObject::updateWorkingCollisionSet()
{
const U32 mask = getCollisionMask();
Box3F convexBox = mConvexList->getBoundingBox(getTransform(), getScale());
F32 len = (50) * TickSec;
F32 l = (len * 1.1) + 0.1; // fudge factor
convexBox.minExtents -= Point3F(l, l, l);
convexBox.maxExtents += Point3F(l, l, l);
disableCollision();
mConvexList->updateWorkingList(convexBox, mask);
enableCollision();
}
void OpenVRTrackedObject::updateMove(const Move *move)
{
// Set transform based on move
#ifdef TORQUE_EXTENDED_MOVE
const ExtendedMove* emove = dynamic_cast<const ExtendedMove*>(move);
if (!emove)
return;
U32 emoveIndex = mMappedMoveIndex;
if (emoveIndex >= ExtendedMove::MaxPositionsRotations)
emoveIndex = 0;
//IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex);
//Con::printf("OpenVRTrackedObject::processTick move %i", emoveIndex);
if (!emove->EulerBasedRotation[emoveIndex])
{
AngAxisF inRot = AngAxisF(Point3F(emove->rotX[emoveIndex], emove->rotY[emoveIndex], emove->rotZ[emoveIndex]), emove->rotW[emoveIndex]);
// Update our pose based on the move info
mPose.orientation = inRot;
mPose.position = Point3F(emove->posX[emoveIndex], emove->posY[emoveIndex], emove->posZ[emoveIndex]);
mPose.valid = true;
mPose.connected = true;
}
// Set transform based on move pose
MatrixF trackedMat(1);
MatrixF invTrackedMat(1);
mPose.orientation.setMatrix(&trackedMat);
trackedMat.setPosition(mPose.position);
invTrackedMat = trackedMat;
invTrackedMat.inverse(); // -> world mat (as opposed to world -> tracked pos)
invTrackedMat = getBaseTrackingTransform() * invTrackedMat;
trackedMat = invTrackedMat;
trackedMat.inverse();
SceneObject::setTransform(invTrackedMat);
if (mPhysicsRep)
mPhysicsRep->setTransform(invTrackedMat);
#endif
}
void OpenVRTrackedObject::processTick(const Move *move)
{
// Perform collision checks
if (isServerObject())
{
updateMove(move);
if (!mPhysicsRep)
{
updateWorkingCollisionSet();
}
}
Parent::processTick(move);
}
void OpenVRTrackedObject::interpolateTick(F32 delta)
{
// Set latest transform
Parent::interpolateTick(delta);
}
void OpenVRTrackedObject::advanceTime(F32 dt)
{
Parent::advanceTime(dt);
}
bool OpenVRTrackedObject::castRay(const Point3F &start, const Point3F &end, RayInfo* info)
{
if (!mPose.connected || !mPose.valid)
return false;
// Collide against bounding box.
F32 st, et, fst = 0.0f, fet = 1.0f;
F32 *bmin = &mObjBox.minExtents.x;
F32 *bmax = &mObjBox.maxExtents.x;
F32 const *si = &start.x;
F32 const *ei = &end.x;
for (S32 i = 0; i < 3; i++) {
if (*si < *ei) {
if (*si > *bmax || *ei < *bmin)
return false;
F32 di = *ei - *si;
st = (*si < *bmin) ? (*bmin - *si) / di : 0.0f;
et = (*ei > *bmax) ? (*bmax - *si) / di : 1.0f;
}
else {
if (*ei > *bmax || *si < *bmin)
return false;
F32 di = *ei - *si;
st = (*si > *bmax) ? (*bmax - *si) / di : 0.0f;
et = (*ei < *bmin) ? (*bmin - *si) / di : 1.0f;
}
if (st > fst) fst = st;
if (et < fet) fet = et;
if (fet < fst)
return false;
bmin++; bmax++;
si++; ei++;
}
info->normal = start - end;
info->normal.normalizeSafe();
getTransform().mulV(info->normal);
info->t = fst;
info->object = this;
info->point.interpolate(start, end, fst);
info->material = 0;
return true;
}
void OpenVRTrackedObject::buildConvex(const Box3F& box, Convex* convex)
{
// These should really come out of a pool
mConvexList->collectGarbage();
Box3F realBox = box;
mWorldToObj.mul(realBox);
realBox.minExtents.convolveInverse(mObjScale);
realBox.maxExtents.convolveInverse(mObjScale);
if (realBox.isOverlapped(getObjBox()) == false)
return;
// Just return a box convex for the entire shape...
Convex* cc = 0;
CollisionWorkingList& wl = convex->getWorkingList();
for (CollisionWorkingList* itr = wl.wLink.mNext; itr != &wl; itr = itr->wLink.mNext) {
if (itr->mConvex->getType() == BoxConvexType &&
itr->mConvex->getObject() == this) {
cc = itr->mConvex;
break;
}
}
if (cc)
return;
// Create a new convex.
BoxConvex* cp = new BoxConvex;
mConvexList->registerObject(cp);
convex->addToWorkingList(cp);
cp->init(this);
mObjBox.getCenter(&cp->mCenter);
cp->mSize.x = mObjBox.len_x() / 2.0f;
cp->mSize.y = mObjBox.len_y() / 2.0f;
cp->mSize.z = mObjBox.len_z() / 2.0f;
}
bool OpenVRTrackedObject::testObject(SceneObject* enter)
{
return false; // TODO
}
DefineEngineMethod(OpenVRTrackedObject, setModelName, void, (String modelName),, "Set model name. Typically you should do this from the client to update the server representation.")
{
object->setModelName(modelName);
}

View file

@ -0,0 +1,155 @@
#ifndef _OPENVR_TRACKED_OBJECT_H_
#define _OPENVR_TRACKED_OBJECT_H_
#ifndef _GAMEBASE_H_
#include "T3D/gameBase/gameBase.h"
#endif
#ifndef _GFXVERTEXBUFFER_H_
#include "gfx/gfxVertexBuffer.h"
#endif
#ifndef _GFXPRIMITIVEBUFFER_H_
#include "gfx/gfxPrimitiveBuffer.h"
#endif
#ifndef _TSSHAPEINSTANCE_H_
#include "ts/tsShapeInstance.h"
#endif
#include "collision/earlyOutPolyList.h"
#include <openvr.h>
class BaseMatInstance;
class OpenVRRenderModel;
class PhysicsBody;
class OpenVRTrackedObjectData : public GameBaseData {
public:
typedef GameBaseData Parent;
StringTableEntry mShapeFile;
Resource<TSShape> mShape; ///< Torque model
Point3F mCollisionBoxMin;
Point3F mCollisionBoxMax;
public:
OpenVRTrackedObjectData();
~OpenVRTrackedObjectData();
DECLARE_CONOBJECT(OpenVRTrackedObjectData);
bool onAdd();
bool preload(bool server, String &errorStr);
static void initPersistFields();
virtual void packData(BitStream* stream);
virtual void unpackData(BitStream* stream);
};
/// Implements a GameObject which tracks an OpenVR controller
class OpenVRTrackedObject : public GameBase
{
typedef GameBase Parent;
enum MaskBits
{
UpdateMask = Parent::NextFreeMask << 0,
NextFreeMask = Parent::NextFreeMask << 1
};
struct RenderModelSlot
{
StringTableEntry componentName; ///< Component name
S16 mappedNodeIdx; ///< Mapped node idx in mShape
OpenVRRenderModel *nativeModel; ///< Native model
};
OpenVRTrackedObjectData *mDataBlock;
/// @name Rendering
/// {
TSShapeInstance *mShapeInstance; ///< Shape used to render controller (uses native model otherwise)
StringTableEntry mModelName;
OpenVRRenderModel *mBasicModel; ///< Basic model
Vector<RenderModelSlot> mRenderComponents;
/// }
S32 mDeviceIndex; ///< Controller idx in openvr (for direct updating)
S32 mMappedMoveIndex; ///< Movemanager move index for rotation
vr::VRControllerState_t mCurrentControllerState;
vr::VRControllerState_t mPreviousControllerState;
IDevicePose mPose; ///< Current openvr pose data, or reconstructed data from the client
Convex* mConvexList;
EarlyOutPolyList mClippedList;
PhysicsBody *mPhysicsRep;
SimObjectPtr<SceneObject> mCollisionObject; ///< Object we're currently colliding with
SimObjectPtr<SceneObject> mInteractObject; ///< Object we've designated as important to interact with
bool mHoldInteractedObject; ///< Performs pickup logic with mInteractObject
bool mIgnoreParentRotation; ///< Ignores the rotation of the parent object
static bool smDebugControllerPosition; ///< Shows latest controller position in DebugDrawer
static bool smDebugControllerMovePosition; ///< Shows move position in DebugDrawer
static U32 sServerCollisionMask;
static U32 sClientCollisionMask;
public:
OpenVRTrackedObject();
virtual ~OpenVRTrackedObject();
void updateRenderData();
void setupRenderDataFromModel(bool loadComponentModels);
void clearRenderData();
DECLARE_CONOBJECT(OpenVRTrackedObject);
static void initPersistFields();
virtual void inspectPostApply();
bool onAdd();
void onRemove();
void _updatePhysics();
bool onNewDataBlock(GameBaseData *dptr, bool reload);
void setInteractObject(SceneObject* object, bool holding);
void setTransform(const MatrixF &mat);
void setModelName(String &modelName);
U32 packUpdate(NetConnection *conn, U32 mask, BitStream *stream);
void unpackUpdate(NetConnection *conn, BitStream *stream);
void writePacketData(GameConnection *conn, BitStream *stream);
void readPacketData(GameConnection *conn, BitStream *stream);
void prepRenderImage(SceneRenderState *state);
MatrixF getTrackedTransform();
MatrixF getLastTrackedTransform();
MatrixF getBaseTrackingTransform();
U32 getCollisionMask();
void updateWorkingCollisionSet();
// Time management
void updateMove(const Move *move);
void processTick(const Move *move);
void interpolateTick(F32 delta);
void advanceTime(F32 dt);
// Collision
bool castRay(const Point3F &start, const Point3F &end, RayInfo* info);
void buildConvex(const Box3F& box, Convex* convex);
bool testObject(SceneObject* enter);
};
#endif // _OPENVR_TRACKED_OBJECT_H_

View file

@ -34,15 +34,30 @@ class GuiCanvas;
/// Defines the basic display pose common to most display devices
typedef struct DisplayPose
{
EulerF orientation; /// Direction device is facing
QuatF orientation; /// Direction device is facing
Point3F position; /// Relative position of device in view space
Point3F velocity;
Point3F angularVelocity;
#ifdef DEBUG_DISPLAY_POSE
MatrixF actualMatrix;
MatrixF originalMatrix;
#endif
U32 state; /// Generic state
bool valid; /// Pose set
bool connected; /// Device connected
} IDevicePose;
class IDisplayDevice
{
public:
virtual bool providesFrameEyePose() const = 0;
virtual void getFrameEyePose(IDevicePose *pose, U32 eye) const = 0;
/// Get a display pose for the specified eye, or the HMD if eyeId is -1.
virtual void getFrameEyePose(IDevicePose *pose, S32 eyeId) const = 0;
virtual bool providesEyeOffsets() const = 0;
/// Returns eye offset not taking into account any position tracking info
@ -51,18 +66,19 @@ public:
virtual bool providesFovPorts() const = 0;
virtual void getFovPorts(FovPort *out) const = 0;
virtual bool providesProjectionOffset() const = 0;
virtual const Point2F& getProjectionOffset() const = 0;
virtual void getStereoViewports(RectI *out) const = 0;
virtual void getStereoTargets(GFXTextureTarget **out) const = 0;
virtual void setDrawCanvas(GuiCanvas *canvas) = 0;
virtual void setDrawMode(GFXDevice::GFXDeviceRenderStyles style) = 0;
virtual void setCurrentConnection(GameConnection *connection) = 0;
virtual GameConnection* getCurrentConnection() = 0;
virtual void onStartFrame() = 0;
/// Returns a texture handle representing a preview of the composited VR view
virtual GFXTexHandle getPreviewTexture() = 0;
};
#endif // _IDISPLAYDEVICE_H_

View file

@ -154,7 +154,6 @@ GFX_ImplementTextureProfile( VRTextureProfile,
GFX_ImplementTextureProfile( VRDepthProfile,
GFXTextureProfile::DiffuseMap,
GFXTextureProfile::PreserveSize |
GFXTextureProfile::RenderTarget |
GFXTextureProfile::NoMipmap |
GFXTextureProfile::ZTarget,
GFXTextureProfile::NONE );

View file

@ -127,6 +127,7 @@ public:
const PFXFrameState &getFrameState() const { return mFrameState[mFrameStateSwitch]; }
const PFXFrameState &getLastFrameState() const { return mFrameState[!mFrameStateSwitch]; }
void setFrameState(const PFXFrameState& newState) { mFrameState[mFrameStateSwitch] = newState; }
void setFrameMatrices( const MatrixF &worldToCamera, const MatrixF &cameraToScreen );
// For ManagedSingleton.

View file

@ -28,6 +28,7 @@
#include "console/consoleTypes.h"
#include "core/tAlgorithm.h"
#include "math/mMathFn.h"
#include "math/mathUtils.h"
#include "T3D/gameBase/gameConnection.h"
#include "ts/tsShapeInstance.h"
#include "gui/3d/guiTSControl.h"
@ -94,9 +95,9 @@ ReflectionManager::ReflectionManager()
void ReflectionManager::initConsole()
{
Con::addVariable( "$pref::Reflect::refractTexScale", TypeF32, &ReflectionManager::smRefractTexScale, "RefractTex has dimensions equal to the active render target scaled in both x and y by this float.\n"
"@ingroup Rendering");
"@ingroup Rendering");
Con::addVariable( "$pref::Reflect::frameLimitMS", TypeS32, &ReflectionManager::smFrameReflectionMS, "ReflectionManager tries not to spend more than this amount of time updating reflections per frame.\n"
"@ingroup Rendering");
"@ingroup Rendering");
}
ReflectionManager::~ReflectionManager()
@ -134,12 +135,49 @@ void ReflectionManager::update( F32 timeSlice,
// Setup a culler for testing the
// visibility of reflectors.
Frustum culler;
culler.set( false,
query.fov,
(F32)resolution.x / (F32)resolution.y,
query.nearPlane,
query.farPlane,
query.cameraMatrix );
// jamesu - normally we just need a frustum which covers the current ports, however for SBS mode
// we need something which covers both viewports.
S32 stereoTarget = GFX->getCurrentStereoTarget();
if (stereoTarget != -1)
{
// In this case we're rendering in stereo using a specific eye
MathUtils::makeFovPortFrustum(&culler, false, query.nearPlane, query.farPlane, query.fovPort[stereoTarget], query.headMatrix);
}
else if (GFX->getCurrentRenderStyle() == GFXDevice::RS_StereoSideBySide)
{
// Calculate an ideal culling size here, we'll just assume double fov based on the first fovport based on
// the head position.
FovPort port = query.fovPort[0];
F32 leftSize = query.nearPlane * port.leftTan;
F32 rightSize = query.nearPlane * port.rightTan;
F32 upSize = query.nearPlane * port.upTan;
F32 downSize = query.nearPlane * port.downTan;
F32 left = -leftSize;
F32 right = rightSize;
F32 top = upSize;
F32 bottom = -downSize;
F32 fovInRadians = mAtan2((top - bottom) / 2.0f, query.nearPlane) * 3.0f;
culler.set(false,
fovInRadians,
(F32)(query.stereoViewports[0].extent.x + query.stereoViewports[1].extent.x) / (F32)query.stereoViewports[0].extent.y,
query.nearPlane,
query.farPlane,
query.headMatrix);
}
else
{
// Normal culling
culler.set(false,
query.fov,
(F32)resolution.x / (F32)resolution.y,
query.nearPlane,
query.farPlane,
query.cameraMatrix);
}
// Manipulate the frustum for tiled screenshots
const bool screenShotMode = gScreenShot && gScreenShot->isPending();
@ -159,6 +197,7 @@ void ReflectionManager::update( F32 timeSlice,
refparams.viewportExtent = resolution;
refparams.culler = culler;
refparams.startOfUpdateMs = startOfUpdateMs;
refparams.eyeId = stereoTarget;
// Update the reflection score.
ReflectorList::iterator reflectorIter = mReflectors.begin();
@ -173,6 +212,7 @@ void ReflectionManager::update( F32 timeSlice,
mTimer->getElapsedMs();
mTimer->reset();
U32 numUpdated = 0;
U32 currentTarget = stereoTarget >= 0 ? stereoTarget : 0;
reflectorIter = mReflectors.begin();
for ( ; reflectorIter != mReflectors.end(); reflectorIter++ )
{
@ -182,7 +222,12 @@ void ReflectionManager::update( F32 timeSlice,
break;
(*reflectorIter)->updateReflection( refparams );
(*reflectorIter)->lastUpdateMs = startOfUpdateMs;
if (stereoTarget != 0) // only update MS if we're not rendering the left eye in separate mode
{
(*reflectorIter)->lastUpdateMs = startOfUpdateMs;
}
numUpdated++;
// If we run out of update time then stop.

View file

@ -39,6 +39,7 @@
#include "math/mathUtils.h"
#include "math/util/frustum.h"
#include "gfx/screenshot.h"
#include "postFx/postEffectManager.h"
extern ColorI gCanvasClearColor;
@ -418,7 +419,7 @@ void CubeReflector::updateFace( const ReflectParams &params, U32 faceidx )
);
reflectRenderState.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial );
reflectRenderState.setDiffuseCameraTransform( params.query->cameraMatrix );
reflectRenderState.setDiffuseCameraTransform( params.query->headMatrix );
// render scene
LIGHTMGR->registerGlobalLights( &reflectRenderState.getCullingFrustum(), false );
@ -532,31 +533,48 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
texDim = getMin( texDim, params.viewportExtent.x );
texDim = getMin( texDim, params.viewportExtent.y );
bool texResize = ( texDim != mLastTexSize );
mLastTexSize = texDim;
S32 currentTarget = params.eyeId >= 0 ? params.eyeId : 0;
const Point2I texSize( texDim, texDim );
const Point2I texSize = Point2I(texDim, texDim);
bool texResize = (texSize != mLastTexSize);
mLastTexSize = texSize;
if ( texResize ||
reflectTex.isNull() ||
innerReflectTex[currentTarget].isNull() ||
innerReflectTex[currentTarget]->getSize() != texSize ||
reflectTex->getFormat() != REFLECTMGR->getReflectFormat() )
{
reflectTex = REFLECTMGR->allocRenderTarget( texSize );
depthBuff = LightShadowMap::_getDepthTarget( texSize.x, texSize.y );
innerReflectTex[currentTarget] = REFLECTMGR->allocRenderTarget( texSize );
}
if ( texResize || depthBuff.isNull() )
{
depthBuff = LightShadowMap::_getDepthTarget(texSize.x, texSize.y);
}
reflectTex = innerReflectTex[currentTarget];
// store current matrices
GFXTransformSaver saver;
Point2I viewport(params.viewportExtent);
if(GFX->getCurrentRenderStyle() == GFXDevice::RS_StereoSideBySide)
{
viewport.x *= 0.5f;
}
F32 aspectRatio = F32( viewport.x ) / F32( viewport.y );
Frustum frustum;
frustum.set(false, params.query->fov, aspectRatio, params.query->nearPlane, params.query->farPlane);
S32 stereoTarget = GFX->getCurrentStereoTarget();
if (stereoTarget != -1)
{
MathUtils::makeFovPortFrustum(&frustum, false, params.query->nearPlane, params.query->farPlane, params.query->fovPort[stereoTarget]);
}
else
{
Point2I viewport(params.viewportExtent);
if (GFX->getCurrentRenderStyle() == GFXDevice::RS_StereoSideBySide)
{
viewport.x *= 0.5f;
}
F32 aspectRatio = F32(viewport.x) / F32(viewport.y);
frustum.set(false, params.query->fov, aspectRatio, params.query->nearPlane, params.query->farPlane);
}
// Manipulate the frustum for tiled screenshots
const bool screenShotMode = gScreenShot && gScreenShot->isPending();
@ -578,10 +596,10 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
if(reflectTarget.isNull())
reflectTarget = GFX->allocRenderToTextureTarget();
reflectTarget->attachTexture( GFXTextureTarget::Color0, reflectTex );
reflectTarget->attachTexture( GFXTextureTarget::Color0, innerReflectTex[currentTarget] );
reflectTarget->attachTexture( GFXTextureTarget::DepthStencil, depthBuff );
GFX->pushActiveRenderTarget();
GFX->setActiveRenderTarget( reflectTarget );
GFX->setActiveRenderTarget( reflectTarget );
U32 objTypeFlag = -1;
SceneCameraState reflectCameraState = SceneCameraState::fromGFX();
@ -603,10 +621,21 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
{
// Store previous values
RectI originalVP = GFX->getViewport();
MatrixF origNonClipProjection = gClientSceneGraph->getNonClipProjection();
PFXFrameState origPFXState = PFXMGR->getFrameState();
Point2F projOffset = GFX->getCurrentProjectionOffset();
const FovPort *currentFovPort = GFX->getStereoFovPort();
MatrixF inverseEyeTransforms[2];
const FovPort *currentFovPort = params.query->fovPort;
MatrixF inverseEyeTransforms[2];
Frustum gfxFrustum;
// Calculate viewport based on texture size
RectI stereoViewports[2];
stereoViewports[0] = params.query->stereoViewports[0];
stereoViewports[1] = params.query->stereoViewports[1];
stereoViewports[0].extent.x = stereoViewports[1].extent.x = texSize.x / 2;
stereoViewports[0].extent.y = stereoViewports[1].extent.y = texSize.y;
stereoViewports[0].point.x = 0;
stereoViewports[1].point.x = stereoViewports[0].extent.x;
// Calculate world transforms for eyes
inverseEyeTransforms[0] = params.query->eyeTransforms[0];
@ -614,50 +643,64 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
inverseEyeTransforms[0].inverse();
inverseEyeTransforms[1].inverse();
Frustum originalFrustum = GFX->getFrustum();
//
// Render left half of display
GFX->activateStereoTarget(0);
GFX->setWorldMatrix(params.query->eyeTransforms[0]);
//
Frustum gfxFrustum = originalFrustum;
MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[0], inverseEyeTransforms[0]);
GFX->setViewport(stereoViewports[0]);
GFX->setCurrentStereoTarget(0);
MathUtils::makeFovPortFrustum(&gfxFrustum, params.query->ortho, params.query->nearPlane, params.query->farPlane, params.query->fovPort[0]);
gfxFrustum.update();
GFX->setFrustum(gfxFrustum);
setGFXMatrices( params.query->eyeTransforms[0] );
SceneCameraState cameraStateLeft = SceneCameraState::fromGFX();
SceneRenderState renderStateLeft( gClientSceneGraph, SPT_Reflect, cameraStateLeft );
SceneRenderState renderStateLeft
(
gClientSceneGraph,
SPT_Reflect,
SceneCameraState::fromGFX()
);
renderStateLeft.setSceneRenderStyle(SRS_SideBySide);
renderStateLeft.setSceneRenderField(0);
renderStateLeft.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial );
renderStateLeft.setDiffuseCameraTransform( params.query->eyeTransforms[0] );
renderStateLeft.setDiffuseCameraTransform(params.query->headMatrix);
//renderStateLeft.disableAdvancedLightingBins(true);
gClientSceneGraph->renderSceneNoLights( &renderStateLeft, objTypeFlag );
//
// Render right half of display
GFX->activateStereoTarget(1);
GFX->setWorldMatrix(params.query->eyeTransforms[1]);
//
gfxFrustum = originalFrustum;
MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[1], inverseEyeTransforms[1]);
GFX->setViewport(stereoViewports[1]);
GFX->setCurrentStereoTarget(1);
MathUtils::makeFovPortFrustum(&gfxFrustum, params.query->ortho, params.query->nearPlane, params.query->farPlane, params.query->fovPort[1]);
gfxFrustum.update();
GFX->setFrustum(gfxFrustum);
setGFXMatrices( params.query->eyeTransforms[1] );
SceneCameraState cameraStateRight = SceneCameraState::fromGFX();
SceneRenderState renderStateRight( gClientSceneGraph, SPT_Reflect, cameraStateRight );
SceneRenderState renderStateRight
(
gClientSceneGraph,
SPT_Reflect,
SceneCameraState::fromGFX()
);
renderStateRight.setSceneRenderStyle(SRS_SideBySide);
renderStateRight.setSceneRenderField(1);
renderStateRight.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial );
renderStateRight.setDiffuseCameraTransform( params.query->eyeTransforms[1] );
renderStateRight.disableAdvancedLightingBins(true);
renderStateRight.setDiffuseCameraTransform( params.query->headMatrix );
//renderStateRight.disableAdvancedLightingBins(true);
gClientSceneGraph->renderSceneNoLights( &renderStateRight, objTypeFlag );
// Restore previous values
GFX->setFrustum(gfxFrustum);
GFX->setFrustum(frustum);
GFX->setViewport(originalVP);
gClientSceneGraph->setNonClipProjection(origNonClipProjection);
PFXMGR->setFrameState(origPFXState);
GFX->setCurrentStereoTarget(-1);
}
else
{
@ -669,7 +712,7 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
);
reflectRenderState.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial );
reflectRenderState.setDiffuseCameraTransform( params.query->cameraMatrix );
reflectRenderState.setDiffuseCameraTransform( params.query->headMatrix );
gClientSceneGraph->renderSceneNoLights( &reflectRenderState, objTypeFlag );
}
@ -680,6 +723,14 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
reflectTarget->resolve();
GFX->popActiveRenderTarget();
#ifdef DEBUG_REFLECT_TEX
static U32 reflectStage = 0;
char buf[128]; dSprintf(buf, 128, "F:\\REFLECT-OUT%i.PNG", reflectStage);
//reflectTex->dumpToDisk("PNG", buf);
reflectStage++;
if (reflectStage > 1) reflectStage = 0;
#endif
// Restore detail adjust amount.
TSShapeInstance::smDetailAdjust = detailAdjustBackup;
@ -793,7 +844,7 @@ MatrixF PlaneReflector::getFrustumClipProj( MatrixF &modelview )
// as (sgn(clipPlane.x), sgn(clipPlane.y), 1, 1) and
// transform it into camera space by multiplying it
// by the inverse of the projection matrix
Vector4F q;
Vector4F q;
q.x = sgn(clipPlane.x) / proj(0,0);
q.y = sgn(clipPlane.y) / proj(1,1);
q.z = -1.0F;

View file

@ -53,6 +53,7 @@ struct ReflectParams
Point2I viewportExtent;
Frustum culler;
U32 startOfUpdateMs;
S8 eyeId;
};
@ -191,7 +192,7 @@ public:
{
refplane.set( Point3F(0,0,0), Point3F(0,0,1) );
objectSpace = false;
mLastTexSize = 0;
mLastTexSize = Point2I(0,0);
}
virtual ~PlaneReflector() {}
@ -213,7 +214,7 @@ public:
protected:
U32 mLastTexSize;
Point2I mLastTexSize;
// The camera position at the last update.
Point3F mLastPos;
@ -224,7 +225,9 @@ protected:
public:
GFXTextureTargetRef reflectTarget;
GFXTexHandle reflectTex;
GFXTexHandle innerReflectTex[2]; /// < Textures we actually render to
GFXTexHandle reflectTex; ///< Last texture we rendered to
GFXTexHandle depthBuff;
PlaneF refplane;
bool objectSpace;

View file

@ -32,6 +32,7 @@ SceneCameraState::SceneCameraState( const RectI& viewport, const Frustum& frustu
: mViewport( viewport ),
mFrustum( frustum ),
mWorldViewMatrix( worldView ),
mHeadWorldViewMatrix( worldView ),
mProjectionMatrix( projection )
{
mViewDirection = frustum.getTransform().getForwardVector();
@ -39,7 +40,7 @@ SceneCameraState::SceneCameraState( const RectI& viewport, const Frustum& frustu
//-----------------------------------------------------------------------------
SceneCameraState SceneCameraState::fromGFX()
SceneCameraState SceneCameraState::fromGFX( )
{
return fromGFXWithViewport( GFX->getViewport() );
}
@ -56,10 +57,20 @@ SceneCameraState SceneCameraState::fromGFXWithViewport( const RectI& viewport )
Frustum frustum = GFX->getFrustum();
frustum.setTransform( camera );
return SceneCameraState(
SceneCameraState ret = SceneCameraState(
viewport,
frustum,
world,
GFX->getProjectionMatrix()
);
// If rendering to stereo, make sure we get the head matrix
S32 stereoTarget = GFX->getCurrentStereoTarget();
if (stereoTarget != -1)
{
ret.mHeadWorldViewMatrix = GFX->getStereoHeadTransform();
ret.mHeadWorldViewMatrix.inverse();
}
return ret;
}

View file

@ -51,6 +51,9 @@ class SceneCameraState
/// The inverse of the frustum's transform stored here for caching.
MatrixF mWorldViewMatrix;
/// Actual head position (will be - eye pos)
MatrixF mHeadWorldViewMatrix;
/// The projection matrix.
MatrixF mProjectionMatrix;
@ -88,6 +91,9 @@ class SceneCameraState
/// Return the world-space view vector.
const Point3F& getViewDirection() const { return mViewDirection; }
/// Returns the world->view transform for the head (used to calculate various display metrics)
const MatrixF& getHeadWorldViewMatrix() const { return mHeadWorldViewMatrix; }
/// Return the view->world transform. This is a shortcut for getFrustum().getTransform().
const MatrixF& getViewWorldMatrix() const { return mFrustum.getTransform(); }

View file

@ -41,6 +41,8 @@
// For player object bounds workaround.
#include "T3D/player.h"
#include "postFx/postEffectManager.h"
extern bool gEditingMission;
@ -239,7 +241,10 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S
MatrixF originalWorld = GFX->getWorldMatrix();
Frustum originalFrustum = GFX->getFrustum();
Point2F projOffset = GFX->getCurrentProjectionOffset();
// Save PFX & SceneManager projections
MatrixF origNonClipProjection = renderState->getSceneManager()->getNonClipProjection();
PFXFrameState origPFXState = PFXMGR->getFrameState();
const FovPort *currentFovPort = GFX->getStereoFovPort();
const MatrixF *eyeTransforms = GFX->getStereoEyeTransforms();
const MatrixF *worldEyeTransforms = GFX->getInverseStereoEyeTransforms();
@ -251,15 +256,16 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S
GFX->setWorldMatrix(worldEyeTransforms[0]);
Frustum gfxFrustum = originalFrustum;
MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[0], eyeTransforms[0]);
MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[0]);
GFX->setFrustum(gfxFrustum);
SceneCameraState cameraStateLeft = SceneCameraState::fromGFX();
SceneRenderState renderStateLeft( this, renderState->getScenePassType(), cameraStateLeft );
renderStateLeft.getSceneManager()->setNonClipProjection(GFX->getProjectionMatrix());
renderStateLeft.setSceneRenderStyle(SRS_SideBySide);
renderStateLeft.setSceneRenderField(0);
PFXMGR->setFrameMatrices(GFX->getWorldMatrix(), GFX->getProjectionMatrix());
renderSceneNoLights( &renderStateLeft, objectMask, baseObject, baseZone );
renderSceneNoLights( &renderStateLeft, objectMask, baseObject, baseZone ); // left
// Indicate that we've just finished a field
//GFX->clear(GFXClearTarget | GFXClearZBuffer | GFXClearStencil, ColorI(255,0,0), 1.0f, 0);
@ -271,21 +277,25 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S
GFX->setWorldMatrix(worldEyeTransforms[1]);
gfxFrustum = originalFrustum;
MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[1], eyeTransforms[1]);
MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[1]);
GFX->setFrustum(gfxFrustum);
SceneCameraState cameraStateRight = SceneCameraState::fromGFX();
SceneRenderState renderStateRight( this, renderState->getScenePassType(), cameraStateRight );
renderStateRight.getSceneManager()->setNonClipProjection(GFX->getProjectionMatrix());
renderStateRight.setSceneRenderStyle(SRS_SideBySide);
renderStateRight.setSceneRenderField(1);
PFXMGR->setFrameMatrices(GFX->getWorldMatrix(), GFX->getProjectionMatrix());
renderSceneNoLights( &renderStateRight, objectMask, baseObject, baseZone );
renderSceneNoLights( &renderStateRight, objectMask, baseObject, baseZone ); // right
// Indicate that we've just finished a field
//GFX->clear(GFXClearTarget | GFXClearZBuffer | GFXClearStencil, ColorI(0,255,0), 1.0f, 0);
GFX->endField();
// Restore previous values
renderState->getSceneManager()->setNonClipProjection(origNonClipProjection);
PFXMGR->setFrameState(origPFXState);
GFX->setWorldMatrix(originalWorld);
GFX->setFrustum(originalFrustum);
GFX->setViewport(originalVP);

View file

@ -48,11 +48,11 @@ SceneRenderState::SceneRenderState( SceneManager* sceneManager,
mDisableAdvancedLightingBins( false ),
mRenderArea( view.getFrustum().getBounds() ),
mAmbientLightColor( sceneManager->getAmbientLightColor() ),
mSceneRenderStyle( SRS_Standard ),
mRenderField( 0 )
mSceneRenderStyle( SRS_Standard )
{
// Setup the default parameters for the screen metrics methods.
mDiffuseCameraTransform = view.getViewWorldMatrix();
mDiffuseCameraTransform = view.getHeadWorldViewMatrix();
mDiffuseCameraTransform.inverse();
// The vector eye is the camera vector with its
// length normalized to 1 / zFar.

View file

@ -72,9 +72,6 @@ class SceneRenderState
/// The render style being performed
SceneRenderStyle mSceneRenderStyle;
/// When doing stereo rendering, the current field that is being rendered
S32 mRenderField;
/// The render pass which we are setting up with this scene state.
RenderPassManager* mRenderPass;
@ -237,12 +234,6 @@ class SceneRenderState
/// Set the rendering style used for the scene
void setSceneRenderStyle(SceneRenderStyle style) { mSceneRenderStyle = style; }
/// Get the stereo field being rendered
S32 getSceneRenderField() const { return mRenderField; }
/// Set the stereo field being rendered
void setSceneRenderField(S32 field) { mRenderField = field; }
/// @}
/// @name Transforms, projections, and viewports.

View file

@ -1450,9 +1450,8 @@ bool ActionMap::processAction(const InputEventInfo* pEvent)
}
else
{
// Handle rotation (QuatF)
QuatF quat(pEvent->fValue, pEvent->fValue2, pEvent->fValue3, pEvent->fValue4);
AngAxisF aa(quat);
// Handle rotation (AngAxisF)
AngAxisF aa(Point3F(pEvent->fValue, pEvent->fValue2, pEvent->fValue3), pEvent->fValue4);
aa.axis.normalize();
argv[1] = Con::getFloatArg( aa.axis.x );
argv[2] = Con::getFloatArg( aa.axis.y );

View file

@ -82,7 +82,7 @@ WindowInputGenerator::~WindowInputGenerator()
//-----------------------------------------------------------------------------
void WindowInputGenerator::generateInputEvent( InputEventInfo &inputEvent )
{
if (!mInputController || !mFocused)
if (!mInputController)// || !mFocused)
return;
if (inputEvent.action == SI_MAKE && inputEvent.deviceType == KeyboardDeviceType)
@ -331,7 +331,7 @@ void WindowInputGenerator::handleKeyboard( WindowId did, U32 modifier, U32 actio
void WindowInputGenerator::handleInputEvent( U32 deviceInst, F32 fValue, F32 fValue2, F32 fValue3, F32 fValue4, S32 iValue, U16 deviceType, U16 objType, U16 ascii, U16 objInst, U8 action, U8 modifier )
{
// Skip it if we don't have focus.
if(!mInputController || !mFocused)
if(!mInputController)// || !mFocused)
return;
// Convert to an InputEventInfo and pass it around for processing.

View file

@ -752,3 +752,21 @@ vehicleMap.bind( gamepad, btn_b, brake );
vehicleMap.bind( gamepad, btn_x, movebackward );
// bind exiting the vehicle to a button
vehicleMap.bindCmd(gamepad, btn_y,"getout();","");
// ----------------------------------------------------------------------------
// Oculus Rift
// ----------------------------------------------------------------------------
function OVRSensorRotEuler(%pitch, %roll, %yaw)
{
//echo("Sensor euler: " @ %pitch SPC %roll SPC %yaw);
$mvRotZ0 = %yaw;
$mvRotX0 = %pitch;
$mvRotY0 = %roll;
}
$mvRotIsEuler0 = true;
$OculusVR::GenerateAngleAxisRotationEvents = false;
$OculusVR::GenerateEulerRotationEvents = true;
moveMap.bind( oculusvr, ovr_sensorrotang0, OVRSensorRotEuler );

View file

@ -0,0 +1,32 @@
# module openvr
option(TORQUE_OPENVR "Enable openvr module" OFF)
mark_as_advanced(TORQUE_OPENVR)
if(TORQUE_OPENVR)
if(TORQUE_OPENVR_SDK_PATH STREQUAL "")
set(TORQUE_OPENVR_SDK_PATH "" CACHE PATH "openvr library path" FORCE)
endif()
else() # hide variable
set(TORQUE_OPENVR_SDK_PATH "" CACHE INTERNAL "" FORCE)
endif()
if(TORQUE_OPENVR)
# Source
addPathRec( "${srcDir}/platform/input/openvr" )
# Includes
addInclude( "${TORQUE_OPENVR_SDK_PATH}/headers" )
# Libs
if( WIN32 )
if( TORQUE_CPU_X64 )
link_directories( "${TORQUE_OPENVR_SDK_PATH}/lib/win64" )
else()
link_directories( "${TORQUE_OPENVR_SDK_PATH}/lib/win32" )
endif()
addLib( "openvr_api" )
endif()
addDef(TORQUE_OPENVR)
endif()