Remove projection offset, add the hmd head matrix. Also tidy up a few things.

This commit is contained in:
James Urquhart 2016-05-18 00:18:02 +01:00
parent e7bafe3c7b
commit f91aa639d6
20 changed files with 126 additions and 113 deletions

View file

@ -96,9 +96,9 @@ ExtendedMove::ExtendedMove() : Move()
rotZ[i] = 0;
rotW[i] = 1;
cposX[i] = 0;
cposY[i] = 0;
cposZ[i] = 0;
cposX[i] = 0;
cposY[i] = 0;
cposZ[i] = 0;
EulerBasedRotation[i] = false;
}
@ -139,11 +139,11 @@ void ExtendedMove::pack(BitStream *stream, const Move * basemove)
{
// Position
if(stream->writeFlag(posX[i] != extBaseMove->posX[i]))
stream->writeSignedInt(cposX[i], MaxPositionBits);
stream->writeInt(cposX[i], MaxPositionBits);
if(stream->writeFlag(posY[i] != extBaseMove->posY[i]))
stream->writeSignedInt(cposY[i], MaxPositionBits);
stream->writeInt(cposY[i], MaxPositionBits);
if(stream->writeFlag(posZ[i] != extBaseMove->posZ[i]))
stream->writeSignedInt(cposZ[i], MaxPositionBits);
stream->writeInt(cposZ[i], MaxPositionBits);
// Rotation
stream->writeFlag(EulerBasedRotation[i]);
@ -183,7 +183,7 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
// Position
if (stream->readFlag())
{
posX[i] = stream->readSignedInt(MaxPositionBits);
posX[i] = stream->readInt(MaxPositionBits);
cposX[i] = UNCLAMPPOS(posX[i]);
}
else
@ -191,7 +191,7 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
if (stream->readFlag())
{
cposY[i] = stream->readSignedInt(MaxPositionBits);
cposY[i] = stream->readInt(MaxPositionBits);
posY[i] = UNCLAMPPOS(cposY[i]);
}
else
@ -199,7 +199,7 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove)
if (stream->readFlag())
{
cposZ[i] = stream->readSignedInt(MaxPositionBits);
cposZ[i] = stream->readInt(MaxPositionBits);
posZ[i] = UNCLAMPPOS(cposZ[i]);
}
else
@ -267,9 +267,9 @@ void ExtendedMove::clamp()
for(U32 i=0; i<MaxPositionsRotations; ++i)
{
// Positions
posX[i] = CLAMPPOS(posX[i]);
posY[i] = CLAMPPOS(posY[i]);
posZ[i] = CLAMPPOS(posZ[i]);
cposX[i] = CLAMPPOS(posX[i]);
cposY[i] = CLAMPPOS(posY[i]);
cposZ[i] = CLAMPPOS(posZ[i]);
// Rotations
if(EulerBasedRotation[i])
@ -285,6 +285,16 @@ void ExtendedMove::clamp()
crotZ[i] = CLAMPPOS(rotZ[i]);
crotW[i] = CLAMPROT(rotW[i] / M_2PI_F);
}
/*if (i == 0)
{
F32 x, y, z, a;
x = UNCLAMPPOS(crotX[i]);
y = UNCLAMPPOS(crotY[i]);
z = UNCLAMPPOS(crotZ[i]);
a = UNCLAMPROT(crotW[i]) * M_2PI_F;
//Con::printf("rot %f,%f,%f,%f clamped to %f,%f,%f,%f", rotX[i], rotY[i], rotZ[i], rotW[i], x,y,z,a);
}*/
}
// Perform the standard Move clamp
@ -296,9 +306,9 @@ void ExtendedMove::unclamp()
// Unclamp the values the same as for net traffic so the client matches the server
for(U32 i=0; i<MaxPositionsRotations; ++i)
{
posX[i] = UNCLAMPPOS(posX[i]);
posY[i] = UNCLAMPPOS(posY[i]);
posZ[i] = UNCLAMPPOS(posZ[i]);
posX[i] = UNCLAMPPOS(cposX[i]);
posY[i] = UNCLAMPPOS(cposY[i]);
posZ[i] = UNCLAMPPOS(cposZ[i]);
// Rotations
if(EulerBasedRotation[i])

View file

@ -681,6 +681,24 @@ bool GameConnection::getControlCameraTransform(F32 dt, MatrixF* mat)
return true;
}
bool GameConnection::getControlCameraHeadTransform(IDisplayDevice *display, MatrixF *transform)
{
GameBase* obj = getCameraObject();
if (!obj)
return false;
GameBase* cObj = obj;
while ((cObj = cObj->getControlObject()) != 0)
{
if (cObj->useObjsEyePoint())
obj = cObj;
}
obj->getEyeCameraTransform(display, -1, transform);
return true;
}
bool GameConnection::getControlCameraEyeTransforms(IDisplayDevice *display, MatrixF *transforms)
{
GameBase* obj = getCameraObject();

View file

@ -267,6 +267,10 @@ public:
bool getControlCameraTransform(F32 dt,MatrixF* mat);
bool getControlCameraVelocity(Point3F *vel);
/// Returns the head transform for the control object, using supplemental information
/// from the provided IDisplayDevice
bool getControlCameraHeadTransform(IDisplayDevice *display, MatrixF *transform);
/// Returns the eye transforms for the control object, using supplemental information
/// from the provided IDisplayDevice.
bool getControlCameraEyeTransforms(IDisplayDevice *display, MatrixF *transforms);

View file

@ -348,7 +348,6 @@ bool GameProcessCameraQuery(CameraQuery *query)
query->farPlane = gClientSceneGraph->getVisibleDistance() * CameraAndFOV::sVisDistanceScale;
// Provide some default values
query->projectionOffset = Point2F::Zero;
query->stereoTargets[0] = 0;
query->stereoTargets[1] = 0;
query->eyeOffset[0] = Point3F::Zero;
@ -376,12 +375,6 @@ bool GameProcessCameraQuery(CameraQuery *query)
// Display may activate AFTER so we need to call this again just in case
display->onStartFrame();
// The connection's display device may want to set the projection offset
if(display->providesProjectionOffset())
{
query->projectionOffset = display->getProjectionOffset();
}
// The connection's display device may want to set the eye offset
if(display->providesEyeOffsets())
{
@ -398,6 +391,7 @@ bool GameProcessCameraQuery(CameraQuery *query)
// Grab the latest overriding render view transforms
connection->getControlCameraEyeTransforms(display, query->eyeTransforms);
connection->getControlCameraHeadTransform(display, &query->headMatrix);
display->getStereoViewports(query->stereoViewports);
display->getStereoTargets(query->stereoTargets);
@ -407,6 +401,7 @@ bool GameProcessCameraQuery(CameraQuery *query)
{
query->eyeTransforms[0] = query->cameraMatrix;
query->eyeTransforms[1] = query->cameraMatrix;
query->headMatrix = query->cameraMatrix;
}
// Use the connection's FOV settings if requried

View file

@ -160,7 +160,8 @@ GFXDevice::GFXDevice()
// misc
mAllowRender = true;
mCurrentRenderStyle = RS_Standard;
mCurrentProjectionOffset = Point2F::Zero;
mCurrentStereoTarget = -1;
mStereoHeadTransform = MatrixF(1);
mCanCurrentlyRender = false;
mInitialized = false;

View file

@ -288,13 +288,19 @@ protected:
/// The style of rendering that is to be performed, based on GFXDeviceRenderStyles
U32 mCurrentRenderStyle;
/// The current projection offset. May be used during side-by-side rendering, for example.
Point2F mCurrentProjectionOffset;
/// Current stereo target being rendered to
S32 mCurrentStereoTarget;
/// Eye offset used when using a stereo rendering style
Point3F mStereoEyeOffset[NumStereoPorts];
/// Center matrix for head
MatrixF mStereoHeadTransform;
/// Left and right matrix for eyes
MatrixF mStereoEyeTransforms[NumStereoPorts];
/// Inverse of mStereoEyeTransforms
MatrixF mInverseStereoEyeTransforms[NumStereoPorts];
/// Fov port settings
@ -345,21 +351,25 @@ public:
/// Retrieve the current rendering style based on GFXDeviceRenderStyles
U32 getCurrentRenderStyle() const { return mCurrentRenderStyle; }
/// Retrieve the current stereo target being rendered to
S32 getCurrentStereoTarget() const { return mCurrentStereoTarget; }
/// Set the current rendering style, based on GFXDeviceRenderStyles
void setCurrentRenderStyle(U32 style) { mCurrentRenderStyle = style; }
/// Set the current projection offset used during stereo rendering
const Point2F& getCurrentProjectionOffset() { return mCurrentProjectionOffset; }
/// Get the current projection offset used during stereo rendering
void setCurrentProjectionOffset(const Point2F& offset) { mCurrentProjectionOffset = offset; }
/// Set the current stereo target being rendered to (in case we're doing anything with postfx)
void setCurrentStereoTarget(const F32 targetId) { mCurrentStereoTarget = targetId; }
/// Get the current eye offset used during stereo rendering
const Point3F* getStereoEyeOffsets() { return mStereoEyeOffset; }
const MatrixF& getStereoHeadTransform() { return mStereoHeadTransform; }
const MatrixF* getStereoEyeTransforms() { return mStereoEyeTransforms; }
const MatrixF* getInverseStereoEyeTransforms() { return mInverseStereoEyeTransforms; }
/// Sets the head matrix for stereo rendering
void setStereoHeadTransform(const MatrixF &mat) { mStereoHeadTransform = mat; }
/// Set the current eye offset used during stereo rendering
void setStereoEyeOffsets(Point3F *offsets) { dMemcpy(mStereoEyeOffset, offsets, sizeof(Point3F) * NumStereoPorts); }
@ -398,6 +408,8 @@ public:
}
setViewport(mStereoViewports[eyeId]);
}
mCurrentStereoTarget = eyeId;
}
GFXCardProfiler* getCardProfiler() const { return mCardProfiler; }

View file

@ -160,7 +160,6 @@ GuiTSCtrl::GuiTSCtrl()
mLastCameraQuery.farPlane = 10.0f;
mLastCameraQuery.nearPlane = 0.01f;
mLastCameraQuery.projectionOffset = Point2F::Zero;
mLastCameraQuery.hasFovPort = false;
mLastCameraQuery.hasStereoTargets = false;
@ -556,12 +555,6 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
mLastCameraQuery.displayDevice->setDrawMode(GFXDevice::RS_Standard);
}
// The connection's display device may want to set the projection offset
if (mLastCameraQuery.displayDevice->providesProjectionOffset())
{
mLastCameraQuery.projectionOffset = mLastCameraQuery.displayDevice->getProjectionOffset();
}
// The connection's display device may want to set the eye offset
if (mLastCameraQuery.displayDevice->providesEyeOffsets())
{
@ -586,7 +579,6 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
// Set up the appropriate render style
U32 prevRenderStyle = GFX->getCurrentRenderStyle();
Point2F prevProjectionOffset = GFX->getCurrentProjectionOffset();
Point2I renderSize = getExtent();
Frustum frustum;
@ -595,8 +587,8 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
if (mRenderStyle == RenderStyleStereoSideBySide)
{
GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSideBySide);
GFX->setCurrentProjectionOffset(mLastCameraQuery.projectionOffset);
GFX->setStereoEyeOffsets(mLastCameraQuery.eyeOffset);
GFX->setStereoHeadTransform(mLastCameraQuery.headMatrix);
if (!mLastCameraQuery.hasStereoTargets)
{
@ -626,12 +618,14 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
// Use the view matrix determined from the display device
myTransforms[0] = mLastCameraQuery.eyeTransforms[0];
myTransforms[1] = mLastCameraQuery.eyeTransforms[1];
myTransforms[2] = mLastCameraQuery.cameraMatrix;
}
else
{
// Use the view matrix determined from the control object
myTransforms[0] = mLastCameraQuery.cameraMatrix;
myTransforms[1] = mLastCameraQuery.cameraMatrix;
myTransforms[2] = mLastCameraQuery.cameraMatrix;
QuatF qrot = mLastCameraQuery.cameraMatrix;
Point3F pos = mLastCameraQuery.cameraMatrix.getPosition();
@ -678,6 +672,7 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect)
// render the final composite view
GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSeparate);
GFX->setStereoEyeOffsets(mLastCameraQuery.eyeOffset);
GFX->setStereoHeadTransform(mLastCameraQuery.headMatrix);
GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes
GFX->setSteroViewports(mLastCameraQuery.stereoViewports);
GFX->setStereoTargets(mLastCameraQuery.stereoTargets);

View file

@ -49,13 +49,13 @@ struct CameraQuery
F32 farPlane;
F32 fov;
FovPort fovPort[2]; // fov for each eye
Point2F projectionOffset;
Point3F eyeOffset[2];
MatrixF eyeTransforms[2];
bool ortho;
bool hasFovPort;
bool hasStereoTargets;
MatrixF cameraMatrix;
MatrixF headMatrix; // center matrix (for HMDs)
S32 currentEye;
RectI stereoViewports[2]; // destination viewports
GFXTextureTarget* stereoTargets[2];

View file

@ -193,7 +193,7 @@ void GuiOffscreenCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = tr
// Fill Blue if no Dialogs
if(this->size() == 0)
GFX->clear( GFXClearTarget, ColorF(0,0,1,1), 1.0f, 0 );
GFX->clear( GFXClearTarget, ColorF(0,0,0,1), 1.0f, 0 );
GFX->setClipRect( contentRect );

View file

@ -1162,6 +1162,7 @@ bool EditTSCtrl::processCameraQuery(CameraQuery * query)
query->cameraMatrix = camRot;
query->cameraMatrix.setPosition(camPos);
query->headMatrix = query->cameraMatrix;
query->fov = mOrthoFOV;
}

View file

@ -453,30 +453,7 @@ void AdvancedLightBinManager::_setupPerFrameParameters( const SceneRenderState *
// Perform a camera offset. We need to manually perform this offset on the sun (or vector) light's
// polygon, which is at the far plane.
const Point2F& projOffset = frustum.getProjectionOffset();
Point3F cameraOffsetPos = cameraPos;
if(!projOffset.isZero())
{
// First we need to calculate the offset at the near plane. The projOffset
// given above can be thought of a percent as it ranges from 0..1 (or 0..-1).
F32 nearOffset = frustum.getNearRight() * projOffset.x;
// Now given the near plane distance from the camera we can solve the right
// triangle and calcuate the SIN theta for the offset at the near plane.
// SIN theta = x/y
F32 sinTheta = nearOffset / frustum.getNearDist();
// Finally, we can calcuate the offset at the far plane, which is where our sun (or vector)
// light's polygon is drawn.
F32 farOffset = frustum.getFarDist() * sinTheta;
// We can now apply this far plane offset to the far plane itself, which then compensates
// for the project offset.
MatrixF camTrans = frustum.getTransform();
VectorF offset = camTrans.getRightVector();
offset *= farOffset;
cameraOffsetPos += offset;
}
// Now build the quad for drawing full-screen vector light
// passes.... this is a volatile VB and updates every frame.

View file

@ -597,15 +597,27 @@ void OpenVRTransformToRotPos(MatrixF mat, QuatF &outRot, Point3F &outPos)
outPos = pos;// Point3F(-pos.x, pos.z, -pos.y);
}
void OpenVRProvider::getFrameEyePose(IDevicePose *pose, U32 eye) const
void OpenVRProvider::getFrameEyePose(IDevicePose *pose, S32 eyeId) const
{
AssertFatal(eye >= 0 && eye < 2, "Out of bounds eye");
AssertFatal(eyeId >= -1 && eyeId < 2, "Out of bounds eye");
MatrixF mat = mHMDRenderState.mEyePose[eye] * mHMDRenderState.mHMDPose; // same order as in the openvr example
if (eyeId == -1)
{
// NOTE: this is codename for "head"
MatrixF mat = mHMDRenderState.mHMDPose; // same order as in the openvr example
OpenVRTransformToRotPos(mat, pose->orientation, pose->position);
pose->velocity = Point3F(0);
pose->angularVelocity = Point3F(0);
OpenVRTransformToRotPos(mat, pose->orientation, pose->position);
pose->velocity = Point3F(0);
pose->angularVelocity = Point3F(0);
}
else
{
MatrixF mat = mHMDRenderState.mEyePose[eyeId] * mHMDRenderState.mHMDPose; // same order as in the openvr example
OpenVRTransformToRotPos(mat, pose->orientation, pose->position);
pose->velocity = Point3F(0);
pose->angularVelocity = Point3F(0);
}
}
bool OpenVRProvider::providesEyeOffsets() const
@ -633,16 +645,6 @@ void OpenVRProvider::getFovPorts(FovPort *out) const
dMemcpy(out, mHMDRenderState.mEyeFov, sizeof(mHMDRenderState.mEyeFov));
}
bool OpenVRProvider::providesProjectionOffset() const
{
return mHMD != NULL;
}
const Point2F& OpenVRProvider::getProjectionOffset() const
{
return Point2F(0, 0);
}
void OpenVRProvider::getStereoViewports(RectI *out) const
{
out[0] = mHMDRenderState.mEyeViewport[0];

View file

@ -157,7 +157,7 @@ public:
/// @name Display handling
/// {
virtual bool providesFrameEyePose() const;
virtual void getFrameEyePose(IDevicePose *pose, U32 eye) const;
virtual void getFrameEyePose(IDevicePose *pose, S32 eyeId) const;
virtual bool providesEyeOffsets() const;
/// Returns eye offset not taking into account any position tracking info
@ -166,9 +166,6 @@ public:
virtual bool providesFovPorts() const;
virtual void getFovPorts(FovPort *out) const;
virtual bool providesProjectionOffset() const;
virtual const Point2F& getProjectionOffset() const;
virtual void getStereoViewports(RectI *out) const;
virtual void getStereoTargets(GFXTextureTarget **out) const;

View file

@ -50,7 +50,9 @@ class IDisplayDevice
{
public:
virtual bool providesFrameEyePose() const = 0;
virtual void getFrameEyePose(IDevicePose *pose, U32 eye) const = 0;
/// Get a display pose for the specified eye, or the HMD if eyeId is -1.
virtual void getFrameEyePose(IDevicePose *pose, S32 eyeId) const = 0;
virtual bool providesEyeOffsets() const = 0;
/// Returns eye offset not taking into account any position tracking info
@ -59,9 +61,6 @@ public:
virtual bool providesFovPorts() const = 0;
virtual void getFovPorts(FovPort *out) const = 0;
virtual bool providesProjectionOffset() const = 0;
virtual const Point2F& getProjectionOffset() const = 0;
virtual void getStereoViewports(RectI *out) const = 0;
virtual void getStereoTargets(GFXTextureTarget **out) const = 0;

View file

@ -418,7 +418,7 @@ void CubeReflector::updateFace( const ReflectParams &params, U32 faceidx )
);
reflectRenderState.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial );
reflectRenderState.setDiffuseCameraTransform( params.query->cameraMatrix );
reflectRenderState.setDiffuseCameraTransform( params.query->headMatrix );
// render scene
LIGHTMGR->registerGlobalLights( &reflectRenderState.getCullingFrustum(), false );
@ -581,7 +581,7 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
reflectTarget->attachTexture( GFXTextureTarget::Color0, reflectTex );
reflectTarget->attachTexture( GFXTextureTarget::DepthStencil, depthBuff );
GFX->pushActiveRenderTarget();
GFX->setActiveRenderTarget( reflectTarget );
GFX->setActiveRenderTarget( reflectTarget );
U32 objTypeFlag = -1;
SceneCameraState reflectCameraState = SceneCameraState::fromGFX();
@ -604,7 +604,6 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
// Store previous values
RectI originalVP = GFX->getViewport();
Point2F projOffset = GFX->getCurrentProjectionOffset();
const FovPort *currentFovPort = GFX->getStereoFovPort();
MatrixF inverseEyeTransforms[2];
@ -629,9 +628,8 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
SceneCameraState cameraStateLeft = SceneCameraState::fromGFX();
SceneRenderState renderStateLeft( gClientSceneGraph, SPT_Reflect, cameraStateLeft );
renderStateLeft.setSceneRenderStyle(SRS_SideBySide);
renderStateLeft.setSceneRenderField(0);
renderStateLeft.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial );
renderStateLeft.setDiffuseCameraTransform( params.query->eyeTransforms[0] );
renderStateLeft.setDiffuseCameraTransform( params.query->headMatrix );
gClientSceneGraph->renderSceneNoLights( &renderStateLeft, objTypeFlag );
@ -648,9 +646,8 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
SceneCameraState cameraStateRight = SceneCameraState::fromGFX();
SceneRenderState renderStateRight( gClientSceneGraph, SPT_Reflect, cameraStateRight );
renderStateRight.setSceneRenderStyle(SRS_SideBySide);
renderStateRight.setSceneRenderField(1);
renderStateRight.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial );
renderStateRight.setDiffuseCameraTransform( params.query->eyeTransforms[1] );
renderStateRight.setDiffuseCameraTransform( params.query->headMatrix );
renderStateRight.disableAdvancedLightingBins(true);
gClientSceneGraph->renderSceneNoLights( &renderStateRight, objTypeFlag );
@ -669,7 +666,7 @@ void PlaneReflector::updateReflection( const ReflectParams &params )
);
reflectRenderState.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial );
reflectRenderState.setDiffuseCameraTransform( params.query->cameraMatrix );
reflectRenderState.setDiffuseCameraTransform( params.query->headMatrix );
gClientSceneGraph->renderSceneNoLights( &reflectRenderState, objTypeFlag );
}

View file

@ -32,6 +32,7 @@ SceneCameraState::SceneCameraState( const RectI& viewport, const Frustum& frustu
: mViewport( viewport ),
mFrustum( frustum ),
mWorldViewMatrix( worldView ),
mHeadWorldViewMatrix( worldView ),
mProjectionMatrix( projection )
{
mViewDirection = frustum.getTransform().getForwardVector();
@ -39,7 +40,7 @@ SceneCameraState::SceneCameraState( const RectI& viewport, const Frustum& frustu
//-----------------------------------------------------------------------------
SceneCameraState SceneCameraState::fromGFX()
SceneCameraState SceneCameraState::fromGFX( )
{
return fromGFXWithViewport( GFX->getViewport() );
}
@ -56,10 +57,20 @@ SceneCameraState SceneCameraState::fromGFXWithViewport( const RectI& viewport )
Frustum frustum = GFX->getFrustum();
frustum.setTransform( camera );
return SceneCameraState(
SceneCameraState ret = SceneCameraState(
viewport,
frustum,
world,
GFX->getProjectionMatrix()
);
// If rendering to stereo, make sure we get the head matrix
S32 stereoTarget = GFX->getCurrentStereoTarget();
if (stereoTarget != -1)
{
ret.mHeadWorldViewMatrix = GFX->getStereoHeadTransform();
ret.mHeadWorldViewMatrix.inverse();
}
return ret;
}

View file

@ -51,6 +51,9 @@ class SceneCameraState
/// The inverse of the frustum's transform stored here for caching.
MatrixF mWorldViewMatrix;
/// Actual head position (will be - eye pos)
MatrixF mHeadWorldViewMatrix;
/// The projection matrix.
MatrixF mProjectionMatrix;
@ -88,6 +91,9 @@ class SceneCameraState
/// Return the world-space view vector.
const Point3F& getViewDirection() const { return mViewDirection; }
/// Returns the world->view transform for the head (used to calculate various display metrics)
const MatrixF& getHeadWorldViewMatrix() const { return mHeadWorldViewMatrix; }
/// Return the view->world transform. This is a shortcut for getFrustum().getTransform().
const MatrixF& getViewWorldMatrix() const { return mFrustum.getTransform(); }

View file

@ -239,7 +239,6 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S
MatrixF originalWorld = GFX->getWorldMatrix();
Frustum originalFrustum = GFX->getFrustum();
Point2F projOffset = GFX->getCurrentProjectionOffset();
const FovPort *currentFovPort = GFX->getStereoFovPort();
const MatrixF *eyeTransforms = GFX->getStereoEyeTransforms();
const MatrixF *worldEyeTransforms = GFX->getInverseStereoEyeTransforms();
@ -257,7 +256,6 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S
SceneCameraState cameraStateLeft = SceneCameraState::fromGFX();
SceneRenderState renderStateLeft( this, renderState->getScenePassType(), cameraStateLeft );
renderStateLeft.setSceneRenderStyle(SRS_SideBySide);
renderStateLeft.setSceneRenderField(0);
renderSceneNoLights( &renderStateLeft, objectMask, baseObject, baseZone ); // left
@ -277,7 +275,6 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S
SceneCameraState cameraStateRight = SceneCameraState::fromGFX();
SceneRenderState renderStateRight( this, renderState->getScenePassType(), cameraStateRight );
renderStateRight.setSceneRenderStyle(SRS_SideBySide);
renderStateRight.setSceneRenderField(1);
renderSceneNoLights( &renderStateRight, objectMask, baseObject, baseZone ); // right

View file

@ -48,11 +48,11 @@ SceneRenderState::SceneRenderState( SceneManager* sceneManager,
mDisableAdvancedLightingBins( false ),
mRenderArea( view.getFrustum().getBounds() ),
mAmbientLightColor( sceneManager->getAmbientLightColor() ),
mSceneRenderStyle( SRS_Standard ),
mRenderField( 0 )
mSceneRenderStyle( SRS_Standard )
{
// Setup the default parameters for the screen metrics methods.
mDiffuseCameraTransform = view.getViewWorldMatrix();
mDiffuseCameraTransform = view.getHeadWorldViewMatrix();
mDiffuseCameraTransform.inverse();
// The vector eye is the camera vector with its
// length normalized to 1 / zFar.

View file

@ -72,9 +72,6 @@ class SceneRenderState
/// The render style being performed
SceneRenderStyle mSceneRenderStyle;
/// When doing stereo rendering, the current field that is being rendered
S32 mRenderField;
/// The render pass which we are setting up with this scene state.
RenderPassManager* mRenderPass;
@ -237,12 +234,6 @@ class SceneRenderState
/// Set the rendering style used for the scene
void setSceneRenderStyle(SceneRenderStyle style) { mSceneRenderStyle = style; }
/// Get the stereo field being rendered
S32 getSceneRenderField() const { return mRenderField; }
/// Set the stereo field being rendered
void setSceneRenderField(S32 field) { mRenderField = field; }
/// @}
/// @name Transforms, projections, and viewports.