From 64b751a7c86501e9ae95b05507e36c1ffd107abb Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Mon, 21 Mar 2016 15:10:14 +0000 Subject: [PATCH 01/33] Basic port of code --- Engine/source/gfx/D3D11/gfxD3D11Device.cpp | 3 + Engine/source/gfx/D3D11/gfxD3D11Device.h | 4 + Engine/source/gfx/D3D11/gfxD3D11Target.cpp | 7 + .../gfx/D3D11/gfxD3D11TextureObject.cpp | 6 + Engine/source/gfx/gfxAdapter.h | 10 + Engine/source/gfx/gfxInit.cpp | 52 +- Engine/source/gfx/gfxInit.h | 6 + Engine/source/gfx/gfxTextureProfile.h | 6 +- Engine/source/gui/3d/guiTSControl.cpp | 74 +- .../input/oculusVR/oculusVRDevice.cpp | 46 +- .../platform/input/oculusVR/oculusVRDevice.h | 7 +- .../input/oculusVR/oculusVRHMDDevice.cpp | 654 ++++++++++-------- .../input/oculusVR/oculusVRHMDDevice.h | 50 +- .../input/oculusVR/oculusVRSensorData.h | 2 +- .../input/oculusVR/oculusVRSensorDevice.cpp | 37 +- .../input/oculusVR/oculusVRSensorDevice.h | 2 +- .../platform/input/oculusVR/oculusVRUtil.h | 2 +- Engine/source/postFx/postEffect.cpp | 1 - .../Full/game/scripts/client/default.bind.cs | 18 + 19 files changed, 613 insertions(+), 374 deletions(-) diff --git a/Engine/source/gfx/D3D11/gfxD3D11Device.cpp b/Engine/source/gfx/D3D11/gfxD3D11Device.cpp index 3f7cd44f8..49a3835af 100644 --- a/Engine/source/gfx/D3D11/gfxD3D11Device.cpp +++ b/Engine/source/gfx/D3D11/gfxD3D11Device.cpp @@ -128,6 +128,9 @@ void GFXD3D11Device::enumerateAdapters(Vector &adapterList) DXGI_ADAPTER_DESC1 desc; EnumAdapter->GetDesc1(&desc); + // LUID identifies adapter for oculus rift + dMemcpy(&toAdd->mLUID, &desc.AdapterLuid, sizeof(toAdd->mLUID)); + size_t size=wcslen(desc.Description); char *str = new char[size+1]; diff --git a/Engine/source/gfx/D3D11/gfxD3D11Device.h b/Engine/source/gfx/D3D11/gfxD3D11Device.h index 97418d373..8640c8b68 100644 --- a/Engine/source/gfx/D3D11/gfxD3D11Device.h +++ b/Engine/source/gfx/D3D11/gfxD3D11Device.h @@ -42,6 +42,8 @@ class PlatformWindow; class GFXD3D11ShaderConstBuffer; +class OculusVRHMDDevice; +class D3D11OculusTexture; //------------------------------------------------------------------------------ @@ -53,6 +55,8 @@ class GFXD3D11Device : public GFXDevice friend class GFXD3D11TextureObject; friend class GFXD3D11TextureTarget; friend class GFXD3D11WindowTarget; + friend class OculusVRHMDDevice; + friend class D3D11OculusTexture; virtual GFXFormat selectSupportedFormat(GFXTextureProfile *profile, const Vector &formats, bool texture, bool mustblend, bool mustfilter); diff --git a/Engine/source/gfx/D3D11/gfxD3D11Target.cpp b/Engine/source/gfx/D3D11/gfxD3D11Target.cpp index a74b3e54d..9c21fa4d3 100644 --- a/Engine/source/gfx/D3D11/gfxD3D11Target.cpp +++ b/Engine/source/gfx/D3D11/gfxD3D11Target.cpp @@ -163,6 +163,13 @@ void GFXD3D11TextureTarget::attachTexture( RenderSlot slot, GFXTextureObject *te mTargetSize = Point2I(sd.Width, sd.Height); S32 format = sd.Format; + + if (format == DXGI_FORMAT_R8G8B8A8_TYPELESS || format == DXGI_FORMAT_B8G8R8A8_TYPELESS) + { + mTargetFormat = GFXFormatR8G8B8A8; + return; + } + GFXREVERSE_LOOKUP( GFXD3D11TextureFormat, GFXFormat, format ); mTargetFormat = (GFXFormat)format; } diff --git a/Engine/source/gfx/D3D11/gfxD3D11TextureObject.cpp b/Engine/source/gfx/D3D11/gfxD3D11TextureObject.cpp index 8f15cf550..1c97597cc 100644 --- a/Engine/source/gfx/D3D11/gfxD3D11TextureObject.cpp +++ b/Engine/source/gfx/D3D11/gfxD3D11TextureObject.cpp @@ -67,6 +67,12 @@ GFXLockedRect *GFXD3D11TextureObject::lock(U32 mipLevel /*= 0*/, RectI *inRect / D3D11_MAPPED_SUBRESOURCE mapInfo; + /*if (!mProfile->canModify()) + { + AssertFatal(false, "Tried to modify external texture"); + return NULL; + }*/ + if( mProfile->isRenderTarget() ) { //AssertFatal( 0, "GFXD3D11TextureObject::lock - Need to handle mapping render targets" ); diff --git a/Engine/source/gfx/gfxAdapter.h b/Engine/source/gfx/gfxAdapter.h index eccf1d7dc..221cc4ef3 100644 --- a/Engine/source/gfx/gfxAdapter.h +++ b/Engine/source/gfx/gfxAdapter.h @@ -35,6 +35,12 @@ #include "core/util/delegate.h" #endif +struct GFXAdapterLUID +{ + unsigned long LowPart; + long HighPart; +}; + struct GFXAdapter { public: @@ -58,6 +64,9 @@ public: /// Supported shader model. 0.f means none supported. F32 mShaderModel; + /// LUID for windows oculus support + GFXAdapterLUID mLUID; + const char * getName() const { return mName; } const char * getOutputName() const { return mOutputName; } GFXAdapterType mType; @@ -72,6 +81,7 @@ public: mOutputName[0] = 0; mShaderModel = 0.f; mIndex = 0; + dMemset(&mLUID, '\0', sizeof(mLUID)); } ~GFXAdapter() diff --git a/Engine/source/gfx/gfxInit.cpp b/Engine/source/gfx/gfxInit.cpp index 09d503d10..bb5e560ac 100644 --- a/Engine/source/gfx/gfxInit.cpp +++ b/Engine/source/gfx/gfxInit.cpp @@ -198,6 +198,22 @@ GFXAdapter* GFXInit::getAdapterOfType( GFXAdapterType type, const char* outputDe return NULL; } +GFXAdapter* GFXInit::getAdapterOfType(GFXAdapterType type, S32 outputDeviceIndex) +{ + for (U32 i = 0; i < smAdapters.size(); i++) + { + if (smAdapters[i]->mType == type) + { + if (smAdapters[i]->mIndex == outputDeviceIndex) + { + return smAdapters[i]; + } + } + } + + return NULL; +} + GFXAdapter* GFXInit::chooseAdapter( GFXAdapterType type, const char* outputDevice) { GFXAdapter* adapter = GFXInit::getAdapterOfType(type, outputDevice); @@ -219,6 +235,27 @@ GFXAdapter* GFXInit::chooseAdapter( GFXAdapterType type, const char* outputDevic return adapter; } +GFXAdapter* GFXInit::chooseAdapter(GFXAdapterType type, S32 outputDeviceIndex) +{ + GFXAdapter* adapter = GFXInit::getAdapterOfType(type, outputDeviceIndex); + + if (!adapter && type != OpenGL) + { + Con::errorf("The requested renderer, %s, doesn't seem to be available." + " Trying the default, OpenGL.", getAdapterNameFromType(type)); + adapter = GFXInit::getAdapterOfType(OpenGL, outputDeviceIndex); + } + + if (!adapter) + { + Con::errorf("The OpenGL renderer doesn't seem to be available. Trying the GFXNulDevice."); + adapter = GFXInit::getAdapterOfType(NullDevice, 0); + } + + AssertFatal(adapter, "There is no rendering device available whatsoever."); + return adapter; +} + const char* GFXInit::getAdapterNameFromType(GFXAdapterType type) { // must match GFXAdapterType order @@ -256,8 +293,19 @@ GFXAdapter *GFXInit::getBestAdapterChoice() // Get the user's preference for device... const String renderer = Con::getVariable("$pref::Video::displayDevice"); const String outputDevice = Con::getVariable("$pref::Video::displayOutputDevice"); - GFXAdapterType adapterType = getAdapterTypeFromName(renderer.c_str()); - GFXAdapter *adapter = chooseAdapter(adapterType, outputDevice.c_str()); + const String adapterDevice = Con::getVariable("$Video::forceDisplayAdapter"); + + GFXAdapterType adapterType = getAdapterTypeFromName(renderer.c_str());; + GFXAdapter *adapter; + + if (adapterDevice.isEmpty()) + { + adapter = chooseAdapter(adapterType, outputDevice.c_str()); + } + else + { + adapter = chooseAdapter(adapterType, dAtoi(adapterDevice.c_str())); + } // Did they have one? Return it. if(adapter) diff --git a/Engine/source/gfx/gfxInit.h b/Engine/source/gfx/gfxInit.h index f2be9dbf7..73cdbba02 100644 --- a/Engine/source/gfx/gfxInit.h +++ b/Engine/source/gfx/gfxInit.h @@ -74,10 +74,16 @@ public: /// This method never returns NULL. static GFXAdapter *chooseAdapter( GFXAdapterType type, const char* outputDevice); + /// Override which chooses an adapter based on an index instead + static GFXAdapter *chooseAdapter( GFXAdapterType type, S32 outputDeviceIndex ); + /// Gets the first adapter of the requested type (and on the requested output device) /// from the list of enumerated adapters. Should only call this after a call to /// enumerateAdapters. static GFXAdapter *getAdapterOfType( GFXAdapterType type, const char* outputDevice ); + + /// Override which gets an adapter based on an index instead + static GFXAdapter *getAdapterOfType( GFXAdapterType type, S32 outputDeviceIndex ); /// Converts a GFXAdapterType to a string name. Useful for writing out prefs static const char *getAdapterNameFromType( GFXAdapterType type ); diff --git a/Engine/source/gfx/gfxTextureProfile.h b/Engine/source/gfx/gfxTextureProfile.h index 95bc17944..d4840cd26 100644 --- a/Engine/source/gfx/gfxTextureProfile.h +++ b/Engine/source/gfx/gfxTextureProfile.h @@ -100,7 +100,10 @@ public: /// of a target texture after presentation or deactivated. /// /// This is mainly a depth buffer optimization. - NoDiscard = BIT(10) + NoDiscard = BIT(10), + + /// Texture is managed by another process, thus should not be modified + NoModify = BIT(11) }; @@ -164,6 +167,7 @@ public: inline bool noMip() const { return testFlag(NoMipmap); } inline bool isPooled() const { return testFlag(Pooled); } inline bool canDiscard() const { return !testFlag(NoDiscard); } + inline bool canModify() const { return !testFlag(NoModify); } private: /// These constants control the packing for the profile; if you add flags, types, or diff --git a/Engine/source/gui/3d/guiTSControl.cpp b/Engine/source/gui/3d/guiTSControl.cpp index 02d93690f..d7222dcc2 100644 --- a/Engine/source/gui/3d/guiTSControl.cpp +++ b/Engine/source/gui/3d/guiTSControl.cpp @@ -37,6 +37,7 @@ #include "gfx/gfxTransformSaver.h" #include "gfx/gfxDrawUtil.h" #include "gfx/gfxDebugEvent.h" +#include "core/stream/fileStream.h" GFXTextureObject *gLastStereoTexture = NULL; @@ -569,7 +570,7 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) DebugDrawer::get()->render(); // Render the canvas overlay if its available - if (mRenderStyle == RenderStyleStereoSideBySide && mStereoGuiTarget.getPointer()) + if (false && mRenderStyle == RenderStyleStereoSideBySide && mStereoGuiTarget.getPointer()) { GFXDEBUGEVENT_SCOPE( StereoGui_Render, ColorI( 255, 0, 0 ) ); MatrixF proj(1); @@ -638,7 +639,7 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) GFXStateBlockDesc bitmapStretchSR; bitmapStretchSR.setCullMode(GFXCullNone); bitmapStretchSR.setZReadWrite(false, false); - bitmapStretchSR.setBlend(true, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha); + bitmapStretchSR.setBlend(false , GFXBlendSrcAlpha, GFXBlendInvSrcAlpha); bitmapStretchSR.samplersDefined = true; bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear(); @@ -666,11 +667,78 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) GFX->setCurrentRenderStyle(prevRenderStyle); GFX->setCurrentProjectionOffset(prevProjectionOffset); + GFX->updateStates(true); if(mRenderStyle == RenderStyleStereoSideBySide && gLastStereoTexture) { + GFX->setWorldMatrix(MatrixF(1)); + GFX->setViewMatrix(MatrixF::Identity); GFX->setClipRect(updateRect); - GFX->getDrawUtil()->drawBitmapStretch(gLastStereoTexture, updateRect); + + GFX->getDrawUtil()->drawRectFill(RectI(Point2I(0,0), Point2I(1024, 768)), ColorI::BLACK); + GFX->getDrawUtil()->drawRect(RectI(Point2I(0, 0), Point2I(1024, 768)), ColorI::RED); + + if (!mStereoOverlayVB.getPointer()) + { + mStereoOverlayVB.set(GFX, 4, GFXBufferTypeStatic); + GFXVertexPCT *verts = mStereoOverlayVB.lock(0, 4); + + F32 texLeft = 0.0f; + F32 texRight = 1.0f; + F32 texTop = 1.0f; + F32 texBottom = 0.0f; + + F32 rectWidth = 1024.0; + F32 rectHeight = 768.0; + + F32 screenLeft = 0; + F32 screenRight = rectWidth; + F32 screenTop = 0; + F32 screenBottom = rectHeight; + + const F32 fillConv = 0.0f; + const F32 frustumDepthAdjusted = 0.0f; + verts[0].point.set(screenLeft - fillConv, screenTop - fillConv, 0.f); + verts[1].point.set(screenRight - fillConv, screenTop - fillConv, 0.f); + verts[2].point.set(screenLeft - fillConv, screenBottom - fillConv, 0.f); + verts[3].point.set(screenRight - fillConv, screenBottom - fillConv, 0.f); + + verts[0].color = verts[1].color = verts[2].color = verts[3].color = ColorI(255,255,255,255); + + verts[0].texCoord.set(texLeft, texTop); + verts[1].texCoord.set(texRight, texTop); + verts[2].texCoord.set(texLeft, texBottom); + verts[3].texCoord.set(texRight, texBottom); + + mStereoOverlayVB.unlock(); + } + + if (!mStereoGuiSB.getPointer()) + { + // DrawBitmapStretchSR + GFXStateBlockDesc bitmapStretchSR; + bitmapStretchSR.setCullMode(GFXCullNone); + bitmapStretchSR.setZReadWrite(false, false); + bitmapStretchSR.setBlend(true, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha); + bitmapStretchSR.samplersDefined = true; + + bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear(); + bitmapStretchSR.samplers[0].minFilter = GFXTextureFilterPoint; + bitmapStretchSR.samplers[0].mipFilter = GFXTextureFilterPoint; + bitmapStretchSR.samplers[0].magFilter = GFXTextureFilterPoint; + + mStereoGuiSB = GFX->createStateBlock(bitmapStretchSR); + } + //static GFXTexHandle texHandle("art/gui/splash", &GFXDefaultPersistentProfile, avar("%s() - mTextureNormal (line %d)", __FUNCTION__, __LINE__)); + GFX->setVertexBuffer(mStereoOverlayVB); + GFX->setStateBlock(mStereoGuiSB); + GFX->setTexture(0, gLastStereoTexture);// texHandle);// gLastStereoTexture); + GFX->setupGenericShaders(GFXDevice::GSModColorTexture); + GFX->drawPrimitive(GFXTriangleStrip, 0, 2); + + + + //GFX->getDrawUtil()->drawBitmapStretch(gLastStereoTexture, updateRect); } // Allow subclasses to render 2D elements. diff --git a/Engine/source/platform/input/oculusVR/oculusVRDevice.cpp b/Engine/source/platform/input/oculusVR/oculusVRDevice.cpp index 4eabfd1ed..50f153b12 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRDevice.cpp +++ b/Engine/source/platform/input/oculusVR/oculusVRDevice.cpp @@ -156,26 +156,27 @@ void OculusVRDevice::buildCodeTable() OculusVRSensorDevice::buildCodeTable(); } -void OculusVRDevice::addHMDDevice(ovrHmd hmd) +void OculusVRDevice::addHMDDevice(ovrHmd hmd, ovrGraphicsLuid luid) { if(!hmd) return; OculusVRHMDDevice* hmdd = new OculusVRHMDDevice(); - hmdd->set(hmd,mHMDDevices.size()); + hmdd->set(hmd, luid, mHMDDevices.size()); mHMDDevices.push_back(hmdd); - Con::printf(" HMD found: %s by %s [v%d]", hmd->ProductName, hmd->Manufacturer, hmd->Type); + ovrHmdDesc desc = ovr_GetHmdDesc(hmd); + Con::printf(" HMD found: %s by %s [v%d]", desc.ProductName, desc.Manufacturer, desc.Type); } void OculusVRDevice::createSimulatedHMD() -{ +{/* TOFIX OculusVRHMDDevice* hmdd = new OculusVRHMDDevice(); - ovrHmd hmd = ovrHmd_CreateDebug(ovrHmd_DK2); + ovrHmd hmd = ovr_CreateDebug(ovrHmd_DK2); hmdd->set(hmd,mHMDDevices.size()); mHMDDevices.push_back(hmdd); - Con::printf(" HMD simulated: %s by %s [v%d]", hmdd->getProductName(), hmdd->getManufacturer(), hmdd->getVersion()); + Con::printf(" HMD simulated: %s by %s [v%d]", hmdd->getProductName(), hmdd->getManufacturer(), hmdd->getVersion()); */ } bool OculusVRDevice::enable() @@ -185,16 +186,17 @@ bool OculusVRDevice::enable() Con::printf("Oculus VR Device Init:"); - if(sOcculusEnabled && ovr_Initialize()) + if(sOcculusEnabled && OVR_SUCCESS(ovr_Initialize(0))) { mEnabled = true; // Enumerate HMDs and pick the first one - ovrHmd hmd = ovrHmd_Create(0); - if(hmd) + ovrHmd hmd; + ovrGraphicsLuid luid; + if(OVR_SUCCESS(ovr_Create(&hmd, &luid))) { // Add the HMD to our list - addHMDDevice(hmd); + addHMDDevice(hmd, luid); setActive(true); } @@ -700,7 +702,7 @@ DefineEngineFunction(getOVRHMDVersion, S32, (S32 index),, return hmd->getVersion(); } -DefineEngineFunction(getOVRHMDDisplayDeviceName, const char*, (S32 index),, +DefineEngineFunction(getOVRHMDDisplayDeviceType, const char*, (S32 index),, "@brief Windows display device name used in EnumDisplaySettings/CreateDC.\n\n" "@param index The HMD index.\n" "@return The name of the HMD display device, if any.\n" @@ -717,7 +719,7 @@ DefineEngineFunction(getOVRHMDDisplayDeviceName, const char*, (S32 index),, return ""; } - return hmd->getDisplayDeviceName(); + return hmd->getDisplayDeviceType(); } DefineEngineFunction(getOVRHMDDisplayDeviceId, S32, (S32 index),, @@ -740,26 +742,6 @@ DefineEngineFunction(getOVRHMDDisplayDeviceId, S32, (S32 index),, return hmd->getDisplayDeviceId(); } -DefineEngineFunction(getOVRHMDDisplayDesktopPos, Point2I, (S32 index),, - "@brief Desktop coordinate position of the screen (can be negative; may not be present on all platforms).\n\n" - "@param index The HMD index.\n" - "@return Position of the screen.\n" - "@ingroup Game") -{ - if(!ManagedSingleton::instanceOrNull()) - { - return Point2I::Zero; - } - - const OculusVRHMDDevice* hmd = OCULUSVRDEV->getHMDDevice(index); - if(!hmd) - { - return Point2I::Zero; - } - - return hmd->getDesktopPosition(); -} - DefineEngineFunction(getOVRHMDResolution, Point2I, (S32 index),, "@brief Provides the OVR HMD screen resolution.\n\n" "@param index The HMD index.\n" diff --git a/Engine/source/platform/input/oculusVR/oculusVRDevice.h b/Engine/source/platform/input/oculusVR/oculusVRDevice.h index 695b435f1..c1ee642a2 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRDevice.h +++ b/Engine/source/platform/input/oculusVR/oculusVRDevice.h @@ -33,7 +33,7 @@ #include "math/mQuat.h" #include "math/mPoint4.h" #include "gfx/gfxDevice.h" -#include "OVR_CAPI_0_5_0.h" +#include "OVR_CAPI_0_8_0.h" #define DEFAULT_RIFT_UNIT 0 @@ -83,6 +83,9 @@ protected: /// Which HMD is the active one U32 mActiveDeviceId; + /// Device id we need to use to hook up with oculus + ovrGraphicsLuid mLuid; + protected: void cleanUp(); @@ -90,7 +93,7 @@ protected: /// Input Event Manager void buildCodeTable(); - void addHMDDevice(ovrHmd hmd); + void addHMDDevice(ovrHmd hmd, ovrGraphicsLuid luid); void createSimulatedHMD(); diff --git a/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp b/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp index d9a8ca074..2d1bef8f9 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp +++ b/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp @@ -26,27 +26,144 @@ #include "postFx/postEffectCommon.h" #include "gui/core/guiCanvas.h" #include "platform/input/oculusVR/oculusVRUtil.h" +#include "core/stream/fileStream.h" -#include "gfx/D3D9/gfxD3D9Device.h" -// Use D3D9 for win32 + +#include "gfx/D3D11/gfxD3D11Device.h" +#include "gfx/D3D11/gfxD3D11EnumTranslate.h" +#include "gfx/gfxStringEnumTranslate.h" +#undef D3D11 + +// Use D3D11 for win32 #ifdef TORQUE_OS_WIN -#define OVR_D3D_VERSION 9 +#define OVR_D3D_VERSION 11 #include "OVR_CAPI_D3D.h" #define OCULUS_USE_D3D #else #include "OVR_CAPI_GL.h" #define OCULUS_USE_GL #endif - extern GFXTextureObject *gLastStereoTexture; -OculusVRHMDDevice::OculusVRHMDDevice() : -mWindowSize(1280,800) +struct OculusTexture +{ + virtual void AdvanceToNextTexture() = 0; + + virtual ~OculusTexture() { + } +}; + +//------------------------------------------------------------ +// ovrSwapTextureSet wrapper class that also maintains the render target views +// needed for D3D11 rendering. +struct D3D11OculusTexture : public OculusTexture +{ + ovrHmd hmd; + ovrSwapTextureSet * TextureSet; + static const int TextureCount = 2; + GFXTexHandle TexRtv[TextureCount]; + GFXDevice *Owner; + + D3D11OculusTexture(GFXDevice* owner) : + hmd(nullptr), + TextureSet(nullptr), + Owner(owner) + { + TexRtv[0] = TexRtv[1] = nullptr; + } + + bool Init(ovrHmd _hmd, int sizeW, int sizeH) + { + hmd = _hmd; + + D3D11_TEXTURE2D_DESC dsDesc; + dsDesc.Width = sizeW; + dsDesc.Height = sizeH; + dsDesc.MipLevels = 1; + dsDesc.ArraySize = 1; + dsDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM_SRGB; + dsDesc.SampleDesc.Count = 1; // No multi-sampling allowed + dsDesc.SampleDesc.Quality = 0; + dsDesc.Usage = D3D11_USAGE_DEFAULT; + dsDesc.CPUAccessFlags = 0; + dsDesc.MiscFlags = 0; + dsDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET; + + + GFXD3D11Device* device = static_cast(GFX); + ovrResult result = ovr_CreateSwapTextureSetD3D11(hmd, device->mD3DDevice, &dsDesc, ovrSwapTextureSetD3D11_Typeless, &TextureSet); + if (!OVR_SUCCESS(result)) + return false; + + AssertFatal(TextureSet->TextureCount == TextureCount, "TextureCount mismatch."); + + for (int i = 0; i < TextureCount; ++i) + { + ovrD3D11Texture* tex = (ovrD3D11Texture*)&TextureSet->Textures[i]; + D3D11_RENDER_TARGET_VIEW_DESC rtvd = {}; + rtvd.Format = DXGI_FORMAT_R8G8B8A8_UNORM; + rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D; + + GFXD3D11TextureObject* object = new GFXD3D11TextureObject(GFX, &VRTextureProfile); + object->registerResourceWithDevice(GFX); + *(object->getSRViewPtr()) = tex->D3D11.pSRView; + *(object->get2DTexPtr()) = tex->D3D11.pTexture; + device->mD3DDevice->CreateRenderTargetView(tex->D3D11.pTexture, &rtvd, object->getRTViewPtr()); + + // Add refs for texture release later on + if (object->getSRView()) object->getSRView()->AddRef(); + //object->getRTView()->AddRef(); + if (object->get2DTex()) object->get2DTex()->AddRef(); + object->isManaged = true; + + // Get the actual size of the texture... + D3D11_TEXTURE2D_DESC probeDesc; + ZeroMemory(&probeDesc, sizeof(D3D11_TEXTURE2D_DESC)); + object->get2DTex()->GetDesc(&probeDesc); + + object->mTextureSize.set(probeDesc.Width, probeDesc.Height, 0); + object->mBitmapSize = object->mTextureSize; + int fmt = probeDesc.Format; + + if (fmt == DXGI_FORMAT_R8G8B8A8_TYPELESS) + { + object->mFormat = GFXFormatR8G8B8A8; // usual case + } + else + { + // TODO: improve this. this can be very bad. + GFXREVERSE_LOOKUP(GFXD3D11TextureFormat, GFXFormat, fmt); + object->mFormat = (GFXFormat)fmt; + } + TexRtv[i] = object; + } + + return true; + } + + ~D3D11OculusTexture() + { + for (int i = 0; i < TextureCount; ++i) + { + SAFE_DELETE(TexRtv[i]); + } + if (TextureSet) + { + ovr_DestroySwapTextureSet(hmd, TextureSet); + } + } + + void AdvanceToNextTexture() + { + TextureSet->CurrentIndex = (TextureSet->CurrentIndex + 1) % TextureSet->TextureCount; + } +}; + + +OculusVRHMDDevice::OculusVRHMDDevice() { mIsValid = false; mDevice = NULL; - mSupportedDistortionCaps = 0; - mCurrentDistortionCaps = 0; mCurrentCaps = 0; mSupportedCaps = 0; mVsync = true; @@ -60,6 +177,7 @@ mWindowSize(1280,800) mConnection = NULL; mSensor = NULL; mActionCodeIndex = 0; + mTextureSwapSet = NULL; } OculusVRHMDDevice::~OculusVRHMDDevice() @@ -79,14 +197,14 @@ void OculusVRHMDDevice::cleanUp() if(mDevice) { - ovrHmd_Destroy(mDevice); + ovr_Destroy(mDevice); mDevice = NULL; } mIsValid = false; } -void OculusVRHMDDevice::set(ovrHmd hmd, U32 actionCodeIndex) +void OculusVRHMDDevice::set(ovrHmd hmd, ovrGraphicsLuid luid, U32 actionCodeIndex) { cleanUp(); @@ -95,50 +213,42 @@ void OculusVRHMDDevice::set(ovrHmd hmd, U32 actionCodeIndex) mDevice = hmd; - mSupportedCaps = hmd->HmdCaps; - mCurrentCaps = mSupportedCaps & (ovrHmdCap_DynamicPrediction | ovrHmdCap_LowPersistence | (!mVsync ? ovrHmdCap_NoVSync : 0)); + ovrHmdDesc desc = ovr_GetHmdDesc(hmd); + int caps = ovr_GetTrackingCaps(hmd); - mSupportedDistortionCaps = hmd->DistortionCaps; - mCurrentDistortionCaps = mSupportedDistortionCaps & (ovrDistortionCap_TimeWarp | ovrDistortionCap_Vignette | ovrDistortionCap_Overdrive); + mSupportedCaps = desc.AvailableHmdCaps; + mCurrentCaps = mSupportedCaps; - mTimewarp = mSupportedDistortionCaps & ovrDistortionCap_TimeWarp; + mTimewarp = true; // DeviceInfo - mProductName = hmd->ProductName; - mManufacturer = hmd->Manufacturer; - mVersion = hmd->FirmwareMajor; + mProductName = desc.ProductName; + mManufacturer = desc.Manufacturer; + mVersion = desc.FirmwareMajor; - mDisplayDeviceName = hmd->DisplayDeviceName; - mDisplayId = hmd->DisplayId; + // + Vector adapterList; + GFXD3D11Device::enumerateAdapters(adapterList); - mDesktopPosition.x = hmd->WindowsPos.x; - mDesktopPosition.y = hmd->WindowsPos.y; + dMemcpy(&mLuid, &luid, sizeof(mLuid)); + mDisplayId = -1; - mResolution.x = hmd->Resolution.w; - mResolution.y = hmd->Resolution.h; + for (U32 i = 0, sz = adapterList.size(); i < sz; i++) + { + GFXAdapter* adapter = adapterList[i]; + if (dMemcmp(&adapter->mLUID, &mLuid, sizeof(mLuid)) == 0) + { + mDisplayId = adapter->mIndex; + mDisplayDeviceType = "D3D11"; // TOFIX this + } + } - mProfileInterpupillaryDistance = ovrHmd_GetFloat(hmd, OVR_KEY_IPD, OVR_DEFAULT_IPD); - mLensSeparation = ovrHmd_GetFloat(hmd, "LensSeparation", 0); - ovrHmd_GetFloatArray(hmd, "ScreenSize", &mScreenSize.x, 2); + mResolution.x = desc.Resolution.w; + mResolution.y = desc.Resolution.h; - dMemcpy(mCurrentFovPorts, mDevice->DefaultEyeFov, sizeof(mDevice->DefaultEyeFov)); - - for (U32 i=0; i<2; i++) - { - mCurrentFovPorts[i].UpTan = mDevice->DefaultEyeFov[i].UpTan; - mCurrentFovPorts[i].DownTan = mDevice->DefaultEyeFov[i].DownTan; - mCurrentFovPorts[i].LeftTan = mDevice->DefaultEyeFov[i].LeftTan; - mCurrentFovPorts[i].RightTan = mDevice->DefaultEyeFov[i].RightTan; - } - - if (mDevice->HmdCaps & ovrHmdCap_ExtendDesktop) - { - mWindowSize = Point2I(mDevice->Resolution.w, mDevice->Resolution.h); - } - else - { - mWindowSize = Point2I(1100, 618); - } + mProfileInterpupillaryDistance = ovr_GetFloat(hmd, OVR_KEY_IPD, OVR_DEFAULT_IPD); + mLensSeparation = ovr_GetFloat(hmd, "LensSeparation", 0); + ovr_GetFloatArray(hmd, "ScreenSize", &mScreenSize.x, 2); mActionCodeIndex = actionCodeIndex; @@ -147,6 +257,8 @@ void OculusVRHMDDevice::set(ovrHmd hmd, U32 actionCodeIndex) mSensor = new OculusVRSensorDevice(); mSensor->set(mDevice, mActionCodeIndex); + mDebugMirrorTexture = NULL; + updateCaps(); } @@ -163,25 +275,26 @@ void OculusVRHMDDevice::setOptimalDisplaySize(GuiCanvas *canvas) PlatformWindow *window = canvas->getPlatformWindow(); GFXTarget *target = window->getGFXTarget(); - if (target && target->getSize() != mWindowSize) + Point2I requiredSize(0, 0); + + ovrHmdDesc desc = ovr_GetHmdDesc(mDevice); + ovrSizei leftSize = ovr_GetFovTextureSize(mDevice, ovrEye_Left, desc.DefaultEyeFov[0], mCurrentPixelDensity); + ovrSizei rightSize = ovr_GetFovTextureSize(mDevice, ovrEye_Right, desc.DefaultEyeFov[1], mCurrentPixelDensity); + + requiredSize.x = leftSize.w + rightSize.h; + requiredSize.y = mMax(leftSize.h, rightSize.h); + + if (target && target->getSize() != requiredSize) { GFXVideoMode newMode; newMode.antialiasLevel = 0; newMode.bitDepth = 32; newMode.fullScreen = false; newMode.refreshRate = 75; - newMode.resolution = mWindowSize; + newMode.resolution = requiredSize; newMode.wideScreen = false; window->setVideoMode(newMode); - //AssertFatal(window->getClientExtent().x == mWindowSize[0] && window->getClientExtent().y == mWindowSize[1], "Window didn't resize to correct dimensions"); - } - - // Need to move window over to the rift side of the desktop - if (mDevice->HmdCaps & ovrHmdCap_ExtendDesktop && !OculusVRDevice::smWindowDebug) - { -#ifndef OCULUS_WINDOW_DEBUG - window->setPosition(getDesktopPosition()); -#endif + //AssertFatal(window->getClientExtent().x == requiredSize.x && window->getClientExtent().y == requiredSize.y, "Window didn't resize to correct dimensions"); } } @@ -190,61 +303,165 @@ bool OculusVRHMDDevice::isDisplayingWarning() if (!mIsValid || !mDevice) return false; + return false;/* ovrHSWDisplayState displayState; ovrHmd_GetHSWDisplayState(mDevice, &displayState); - return displayState.Displayed; + return displayState.Displayed;*/ } void OculusVRHMDDevice::dismissWarning() { if (!mIsValid || !mDevice) return; - ovrHmd_DismissHSWDisplay(mDevice); + //ovr_DismissHSWDisplay(mDevice); } bool OculusVRHMDDevice::setupTargets() { - ovrFovPort eyeFov[2] = {mDevice->DefaultEyeFov[0], mDevice->DefaultEyeFov[1]}; + // Create eye render buffers + ID3D11RenderTargetView * eyeRenderTexRtv[2]; + ovrLayerEyeFov ld = { { ovrLayerType_EyeFov } }; + mRenderLayer = ld; - mRecomendedEyeTargetSize[0] = ovrHmd_GetFovTextureSize(mDevice, ovrEye_Left, eyeFov[0], mCurrentPixelDensity); - mRecomendedEyeTargetSize[1] = ovrHmd_GetFovTextureSize(mDevice, ovrEye_Right, eyeFov[1], mCurrentPixelDensity); + GFXD3D11Device* device = static_cast(GFX); - // Calculate render target size - if (mDesiredRenderingMode == GFXDevice::RS_StereoSideBySide) - { - // Setup a single texture, side-by-side viewports - Point2I rtSize( - mRecomendedEyeTargetSize[0].w + mRecomendedEyeTargetSize[1].w, - mRecomendedEyeTargetSize[0].h > mRecomendedEyeTargetSize[1].h ? mRecomendedEyeTargetSize[0].h : mRecomendedEyeTargetSize[1].h - ); + ovrHmdDesc desc = ovr_GetHmdDesc(mDevice); + for (int i = 0; i < 2; i++) + { + mRenderLayer.Fov[i] = desc.DefaultEyeFov[i]; + mRenderLayer.Viewport[i].Size = ovr_GetFovTextureSize(mDevice, (ovrEyeType)i, mRenderLayer.Fov[i], mCurrentPixelDensity); + mEyeRenderDesc[i] = ovr_GetRenderDesc(mDevice, (ovrEyeType_)(ovrEye_Left+i), mRenderLayer.Fov[i]); + } - GFXFormat targetFormat = GFX->getActiveRenderTarget()->getFormat(); - mRTFormat = targetFormat; + ovrSizei recommendedEyeTargetSize[2]; + recommendedEyeTargetSize[0] = mRenderLayer.Viewport[0].Size; + recommendedEyeTargetSize[1] = mRenderLayer.Viewport[1].Size; - rtSize = generateRenderTarget(mStereoRT, mStereoTexture, mStereoDepthTexture, rtSize); - - // Left - mEyeRenderSize[0] = rtSize; - mEyeRT[0] = mStereoRT; - mEyeTexture[0] = mStereoTexture; - mEyeViewport[0] = RectI(Point2I(0,0), Point2I((mRecomendedEyeTargetSize[0].w+1)/2, mRecomendedEyeTargetSize[0].h)); + if (mTextureSwapSet) + { + delete mTextureSwapSet; + mTextureSwapSet = NULL; + } - // Right - mEyeRenderSize[1] = rtSize; - mEyeRT[1] = mStereoRT; - mEyeTexture[1] = mStereoTexture; - mEyeViewport[1] = RectI(Point2I((mRecomendedEyeTargetSize[0].w+1)/2,0), Point2I((mRecomendedEyeTargetSize[1].w+1)/2, mRecomendedEyeTargetSize[1].h)); + // Calculate render target size + if (mDesiredRenderingMode == GFXDevice::RS_StereoSideBySide) + { + // Setup a single texture, side-by-side viewports + Point2I rtSize( + recommendedEyeTargetSize[0].w + recommendedEyeTargetSize[1].w, + recommendedEyeTargetSize[0].h > recommendedEyeTargetSize[1].h ? recommendedEyeTargetSize[0].h : recommendedEyeTargetSize[1].h + ); - gLastStereoTexture = mEyeTexture[0]; - } - else - { - // No rendering, abort! - return false; - } + GFXFormat targetFormat = GFX->getActiveRenderTarget()->getFormat(); + mRTFormat = targetFormat; - return true; + rtSize = generateRenderTarget(mStereoRT, mStereoDepthTexture, rtSize); + + // Generate the swap texture we need to store the final image + D3D11OculusTexture* tex = new D3D11OculusTexture(GFX); + if (tex->Init(mDevice, rtSize.x, rtSize.y)) + { + mTextureSwapSet = tex; + } + + mRenderLayer.ColorTexture[0] = tex->TextureSet; + mRenderLayer.ColorTexture[1] = tex->TextureSet; + + mRenderLayer.Viewport[0].Pos.x = 0; + mRenderLayer.Viewport[0].Pos.y = 0; + mRenderLayer.Viewport[1].Pos.x = (rtSize.x + 1) / 2; + mRenderLayer.Viewport[1].Pos.y = 0; + + // Left + mEyeRT[0] = mStereoRT; + mEyeViewport[0] = RectI(Point2I(mRenderLayer.Viewport[0].Pos.x, mRenderLayer.Viewport[0].Pos.y), Point2I(mRenderLayer.Viewport[0].Size.w, mRenderLayer.Viewport[0].Size.h)); + + // Right + mEyeRT[1] = mStereoRT; + mEyeViewport[1] = RectI(Point2I(mRenderLayer.Viewport[1].Pos.x, mRenderLayer.Viewport[1].Pos.y), Point2I(mRenderLayer.Viewport[1].Size.w, mRenderLayer.Viewport[1].Size.h)); + + gLastStereoTexture = NULL; + + + GFXD3D11Device* device = static_cast(GFX); + + D3D11_TEXTURE2D_DESC dsDesc; + dsDesc.Width = rtSize.x; + dsDesc.Height = rtSize.y; + dsDesc.MipLevels = 1; + dsDesc.ArraySize = 1; + dsDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM_SRGB; + dsDesc.SampleDesc.Count = 1; + dsDesc.SampleDesc.Quality = 0; + dsDesc.Usage = D3D11_USAGE_DEFAULT; + dsDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE; + dsDesc.CPUAccessFlags = 0; + dsDesc.MiscFlags = 0; + + // Create typeless when we are rendering as non-sRGB since we will override the texture format in the RTV + bool reinterpretSrgbAsLinear = true; + unsigned compositorTextureFlags = 0; + if (reinterpretSrgbAsLinear) + compositorTextureFlags |= ovrSwapTextureSetD3D11_Typeless; + + ovrResult result = ovr_CreateMirrorTextureD3D11(mDevice, device->mD3DDevice, &dsDesc, compositorTextureFlags, &mDebugMirrorTexture); + + if (result == ovrError_DisplayLost || !mDebugMirrorTexture) + { + AssertFatal(false, "Something went wrong"); + return NULL; + } + + // Create texture handle so we can render it in-game + ovrD3D11Texture* mirror_tex = (ovrD3D11Texture*)mDebugMirrorTexture; + D3D11_RENDER_TARGET_VIEW_DESC rtvd = {}; + rtvd.Format = DXGI_FORMAT_R8G8B8A8_UNORM; + rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D; + + GFXD3D11TextureObject* object = new GFXD3D11TextureObject(GFX, &VRTextureProfile); + object->registerResourceWithDevice(GFX); + *(object->getSRViewPtr()) = mirror_tex->D3D11.pSRView; + *(object->get2DTexPtr()) = mirror_tex->D3D11.pTexture; + device->mD3DDevice->CreateRenderTargetView(mirror_tex->D3D11.pTexture, &rtvd, object->getRTViewPtr()); + + + // Add refs for texture release later on + if (object->getSRView()) object->getSRView()->AddRef(); + //object->getRTView()->AddRef(); + if (object->get2DTex()) object->get2DTex()->AddRef(); + object->isManaged = true; + + // Get the actual size of the texture... + D3D11_TEXTURE2D_DESC probeDesc; + ZeroMemory(&probeDesc, sizeof(D3D11_TEXTURE2D_DESC)); + object->get2DTex()->GetDesc(&probeDesc); + + object->mTextureSize.set(probeDesc.Width, probeDesc.Height, 0); + object->mBitmapSize = object->mTextureSize; + int fmt = probeDesc.Format; + + if (fmt == DXGI_FORMAT_R8G8B8A8_TYPELESS) + { + object->mFormat = GFXFormatR8G8B8A8; // usual case + } + else + { + // TODO: improve this. this can be very bad. + GFXREVERSE_LOOKUP(GFXD3D11TextureFormat, GFXFormat, fmt); + object->mFormat = (GFXFormat)fmt; + } + + mDebugMirrorTextureHandle = object; + gLastStereoTexture = mDebugMirrorTextureHandle; + } + else + { + // No rendering, abort! + return false; + } + + return true; } String OculusVRHMDDevice::dumpMetrics() @@ -261,17 +478,14 @@ String OculusVRHMDDevice::dumpMetrics() F32 ipd = this->getIPD(); U32 lastStatus = mSensor->getLastTrackingStatus(); - sb.format(" | OVR Sensor %i | rot: %f %f %f, pos: %f %f %f, FOV (%f %f %f %f, %f %f %f %f), IPD %f, Track:%s%s, Disort:%s%s%s", + sb.format(" | OVR Sensor %i | rot: %f %f %f, pos: %f %f %f, FOV (%f %f %f %f, %f %f %f %f), IPD %f, Track:%s%s", mActionCodeIndex, rot.x, rot.y, rot.z, pos.x, pos.y, pos.z, eyeFov[0].upTan, eyeFov[0].downTan, eyeFov[0].leftTan, eyeFov[0].rightTan, eyeFov[1].upTan, eyeFov[1].downTan, eyeFov[1].leftTan, eyeFov[1].rightTan, getIPD(), lastStatus & ovrStatus_OrientationTracked ? " ORIENT" : "", - lastStatus & ovrStatus_PositionTracked ? " POS" : "", - mCurrentDistortionCaps & ovrDistortionCap_TimeWarp ? " TIMEWARP" : "", - mCurrentDistortionCaps & ovrDistortionCap_Vignette ? " VIGNETTE" : "", - mCurrentDistortionCaps & ovrDistortionCap_Overdrive ? " OVERDRIVE" : ""); + lastStatus & ovrStatus_PositionTracked ? " POS" : ""); return sb.data(); } @@ -292,82 +506,23 @@ void OculusVRHMDDevice::updateRenderInfo() return; PlatformWindow *window = mDrawCanvas->getPlatformWindow(); - ovrFovPort eyeFov[2] = {mDevice->DefaultEyeFov[0], mDevice->DefaultEyeFov[1]}; + + ovrHmdDesc desc = ovr_GetHmdDesc(mDevice); // Update window size if it's incorrect Point2I backbufferSize = mDrawCanvas->getBounds().extent; - // Reset - ovrHmd_ConfigureRendering(mDevice, NULL, 0, NULL, NULL); - -#ifdef OCULUS_USE_D3D - // Generate render target textures - GFXD3D9Device *d3d9GFX = dynamic_cast(GFX); - if (d3d9GFX) - { - ovrD3D9Config cfg; - cfg.D3D9.Header.API = ovrRenderAPI_D3D9; - cfg.D3D9.Header.Multisample = 0; - cfg.D3D9.Header.BackBufferSize = OVR::Sizei(backbufferSize.x, backbufferSize.y); - cfg.D3D9.pDevice = d3d9GFX->getDevice(); - cfg.D3D9.pDevice->GetSwapChain(0, &cfg.D3D9.pSwapChain); - - // Finally setup! - if (!setupTargets()) - { - onDeviceDestroy(); - return; - } - - ovrHmd_AttachToWindow(mDevice, window->getPlatformDrawable(), NULL, NULL); - - if (!ovrHmd_ConfigureRendering( mDevice, &cfg.Config, mCurrentDistortionCaps, eyeFov, mEyeRenderDesc )) - { - Con::errorf("Couldn't configure oculus rendering!"); - return; - } - } -#endif - -#ifdef OCULUS_USE_GL - // Generate render target textures - GFXGLDevice *glGFX = dynamic_cast(GFX); - if (glGFX) - { - ovrGLConfig cfg; - cfg.OGL.Header.API = ovrRenderAPI_OpenGL; - cfg.OGL.Header.Multisample = 0; - cfg.OGL.Header.BackBufferSize = OVR::Sizei(backbufferSize.x, backbufferSize.y); - -#ifdef WIN32 - cfg.OGL.Window = GetActiveWindow();//window->getPlatformDrawable(); - cfg.OGL.DC = wglGetCurrentDC(); -#else - cfg.OGL.Disp = NULL; -#endif - - // Finally setup! - if (!setupTargets()) - { - onDeviceDestroy(); - return; - } - - ovrHmd_AttachToWindow(mDevice, window->getPlatformDrawable(), NULL, NULL); - - if (!ovrHmd_ConfigureRendering( mDevice, &cfg.Config, mCurrentDistortionCaps, eyeFov, mEyeRenderDesc )) - { - Con::errorf("Couldn't configure oculus rendering!"); - return; - } - } -#endif - + // Finally setup! + if (!setupTargets()) + { + onDeviceDestroy(); + return; + } mRenderConfigurationDirty = false; } -Point2I OculusVRHMDDevice::generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &texture, GFXTexHandle &depth, Point2I desiredSize) +Point2I OculusVRHMDDevice::generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &depth, Point2I desiredSize) { // Texture size that we already have might be big enough. Point2I newRTSize; @@ -402,12 +557,12 @@ Point2I OculusVRHMDDevice::generateRenderTarget(GFXTextureTargetRef &target, GFX newRTSize.setMax(Point2I(64, 64)); // Stereo RT needs to be the same size as the recommended RT - if ( newRT || texture.getWidthHeight() != newRTSize ) + /*if ( newRT || mDebugStereoTexture.getWidthHeight() != newRTSize ) { - texture.set( newRTSize.x, newRTSize.y, mRTFormat, &VRTextureProfile, avar( "%s() - (line %d)", __FUNCTION__, __LINE__ ) ); - target->attachTexture( GFXTextureTarget::Color0, texture ); - Con::printf("generateRenderTarget generated %x", texture.getPointer()); - } + mDebugStereoTexture.set( newRTSize.x, newRTSize.y, mRTFormat, &VRTextureProfile, avar( "%s() - (line %d)", __FUNCTION__, __LINE__ ) ); + target->attachTexture( GFXTextureTarget::Color0, mDebugStereoTexture); + Con::printf("generateRenderTarget generated %x", mDebugStereoTexture.getPointer()); + }*/ if ( depth.getWidthHeight() != newRTSize ) { @@ -424,6 +579,13 @@ void OculusVRHMDDevice::clearRenderTargets() mStereoRT = NULL; mEyeRT[0] = NULL; mEyeRT[1] = NULL; + + if (mDebugMirrorTexture) + { + ovr_DestroyMirrorTexture(mDevice, mDebugMirrorTexture); + mDebugMirrorTexture = NULL; + mDebugMirrorTextureHandle = NULL; + } } void OculusVRHMDDevice::updateCaps() @@ -431,34 +593,7 @@ void OculusVRHMDDevice::updateCaps() if (!mIsValid || !mDevice) return; - U32 oldDistortionCaps = mCurrentDistortionCaps; - - // Distortion - if (mTimewarp) - { - mCurrentDistortionCaps |= ovrDistortionCap_TimeWarp; - } - else - { - mCurrentDistortionCaps &= ~ovrDistortionCap_TimeWarp; - } - - if (oldDistortionCaps != mCurrentDistortionCaps) - { - mRenderConfigurationDirty = true; - } - - // Device - if (!mVsync) - { - mCurrentCaps |= ovrHmdCap_NoVSync; - } - else - { - mCurrentCaps &= ~ovrHmdCap_NoVSync; - } - - ovrHmd_SetEnabledCaps(mDevice, mCurrentCaps); + ovr_SetEnabledCaps(mDevice, mCurrentCaps); } static bool sInFrame = false; // protects against recursive onStartFrame calls @@ -469,108 +604,64 @@ void OculusVRHMDDevice::onStartFrame() return; sInFrame = true; - -#ifndef OCULUS_DEBUG_FRAME - ovrHmd_BeginFrame(mDevice, 0); -#endif ovrVector3f hmdToEyeViewOffset[2] = { mEyeRenderDesc[0].HmdToEyeViewOffset, mEyeRenderDesc[1].HmdToEyeViewOffset }; - ovrHmd_GetEyePoses(mDevice, 0, hmdToEyeViewOffset, mCurrentEyePoses, &mLastTrackingState); + ovrTrackingState hmdState = ovr_GetTrackingState(mDevice, 0, ovrTrue); + ovr_CalcEyePoses(hmdState.HeadPose.ThePose, hmdToEyeViewOffset, mRenderLayer.RenderPose); for (U32 i=0; i<2; i++) { - mCurrentEyePoses[i].Position.x *= OculusVRDevice::smPositionTrackingScale; - mCurrentEyePoses[i].Position.y *= OculusVRDevice::smPositionTrackingScale; - mCurrentEyePoses[i].Position.z *= OculusVRDevice::smPositionTrackingScale; + mRenderLayer.RenderPose[i].Position.x *= OculusVRDevice::smPositionTrackingScale; + mRenderLayer.RenderPose[i].Position.y *= OculusVRDevice::smPositionTrackingScale; + mRenderLayer.RenderPose[i].Position.z *= OculusVRDevice::smPositionTrackingScale; } + mRenderLayer.SensorSampleTime = ovr_GetTimeInSeconds(); + + // Set current dest texture on stereo render target + D3D11OculusTexture* texSwap = (D3D11OculusTexture*)mTextureSwapSet; + mStereoRT->attachTexture(GFXTextureTarget::Color0, texSwap->TexRtv[texSwap->TextureSet->CurrentIndex]); + sInFrame = false; mFrameReady = true; } void OculusVRHMDDevice::onEndFrame() { - if (!mIsValid || !mDevice || !mDrawCanvas || sInFrame || !mFrameReady) + if (!mIsValid || !mDevice || !mDrawCanvas || sInFrame || !mFrameReady || !mTextureSwapSet) return; Point2I eyeSize; GFXTarget *windowTarget = mDrawCanvas->getPlatformWindow()->getGFXTarget(); -#ifndef OCULUS_DEBUG_FRAME - -#ifdef OCULUS_USE_D3D - GFXD3D9Device *d3d9GFX = dynamic_cast(GFX); - if (d3d9GFX && mEyeRT[0].getPointer()) - { - // Left - ovrD3D9Texture eyeTextures[2]; - eyeSize = mEyeTexture[0].getWidthHeight(); - eyeTextures[0].D3D9.Header.API = ovrRenderAPI_D3D9; - eyeTextures[0].D3D9.Header.RenderViewport.Pos.x = mEyeViewport[0].point.x; - eyeTextures[0].D3D9.Header.RenderViewport.Pos.y = mEyeViewport[0].point.y; - eyeTextures[0].D3D9.Header.RenderViewport.Size.w = mEyeViewport[0].extent.x; - eyeTextures[0].D3D9.Header.RenderViewport.Size.h = mEyeViewport[0].extent.y; - eyeTextures[0].D3D9.Header.TextureSize.w = eyeSize.x; - eyeTextures[0].D3D9.Header.TextureSize.h = eyeSize.y; - eyeTextures[0].D3D9.pTexture = mEyeRT[0].getPointer() ? static_cast(mEyeTexture[0].getPointer())->get2DTex() : NULL; + GFXD3D11Device *d3d11GFX = dynamic_cast(GFX); - // Right - eyeSize = mEyeTexture[1].getWidthHeight(); - eyeTextures[1].D3D9.Header.API = ovrRenderAPI_D3D9; - eyeTextures[1].D3D9.Header.RenderViewport.Pos.x = mEyeViewport[1].point.x; - eyeTextures[1].D3D9.Header.RenderViewport.Pos.y = mEyeViewport[1].point.y; - eyeTextures[1].D3D9.Header.RenderViewport.Size.w = mEyeViewport[1].extent.x; - eyeTextures[1].D3D9.Header.RenderViewport.Size.h = mEyeViewport[1].extent.y; - eyeTextures[1].D3D9.Header.TextureSize.w = eyeSize.x; - eyeTextures[1].D3D9.Header.TextureSize.h = eyeSize.y; - eyeTextures[1].D3D9.pTexture = mEyeRT[0].getPointer() ? static_cast(mEyeTexture[1].getPointer())->get2DTex() : NULL; + ovrViewScaleDesc viewScaleDesc; + ovrVector3f hmdToEyeViewOffset[2] = { mEyeRenderDesc[0].HmdToEyeViewOffset, mEyeRenderDesc[1].HmdToEyeViewOffset }; + viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f; + viewScaleDesc.HmdToEyeViewOffset[0] = hmdToEyeViewOffset[0]; + viewScaleDesc.HmdToEyeViewOffset[1] = hmdToEyeViewOffset[1]; - // Submit! - GFX->disableShaders(); - GFX->setActiveRenderTarget(windowTarget); - GFX->clear(GFXClearZBuffer | GFXClearStencil | GFXClearTarget, ColorI(255,0,0), 1.0f, 0); - ovrHmd_EndFrame(mDevice, mCurrentEyePoses, (ovrTexture*)(&eyeTextures[0])); - } -#endif + ovrLayerDirect ld = { { ovrLayerType_Direct } }; + mDebugRenderLayer = ld; -#ifdef OCULUS_USE_GL - GFXGLDevice *glGFX = dynamic_cast(GFX); - if (glGFX && mEyeRT[0].getPointer()) - { - // Left - ovrGLTexture eyeTextures[2]; - eyeSize = mEyeTexture[0].getWidthHeight(); - eyeTextures[0].OGL.Header.API = ovrRenderAPI_GL; - eyeTextures[0].OGL.Header.RenderViewport.Pos.x = mEyeViewport[0].point.x; - eyeTextures[0].OGL.Header.RenderViewport.Pos.y = mEyeViewport[0].point.y; - eyeTextures[0].OGL.Header.RenderViewport.Size.w = mEyeViewport[0].extent.x; - eyeTextures[0].OGL.Header.RenderViewport.Size.h = mEyeViewport[0].extent.y; - eyeTextures[0].OGL.Header.TextureSize.w = eyeSize.x; - eyeTextures[0].OGL.Header.TextureSize.h = eyeSize.y; - eyeTextures[0].OGL.TexId = mEyeRT[0].getPointer() ? static_cast(mEyeTexture[0].getPointer())->getHandle() : 0; + mDebugRenderLayer.ColorTexture[0] = mRenderLayer.ColorTexture[0]; + mDebugRenderLayer.ColorTexture[1] = mRenderLayer.ColorTexture[1]; + mDebugRenderLayer.Viewport[0] = mRenderLayer.Viewport[0]; + mDebugRenderLayer.Viewport[1] = mRenderLayer.Viewport[1]; - // Right - eyeSize = mEyeTexture[1].getWidthHeight(); - eyeTextures[1].OGL.Header.API = ovrRenderAPI_GL; - eyeTextures[1].OGL.Header.RenderViewport.Pos.x = mEyeViewport[1].point.x; - eyeTextures[1].OGL.Header.RenderViewport.Pos.y = mEyeViewport[1].point.y; - eyeTextures[1].OGL.Header.RenderViewport.Size.w = mEyeViewport[1].extent.x; - eyeTextures[1].OGL.Header.RenderViewport.Size.h = mEyeViewport[1].extent.y; - eyeTextures[1].OGL.Header.TextureSize.w = eyeSize.x; - eyeTextures[1].OGL.Header.TextureSize.h = eyeSize.y; - eyeTextures[0].OGL.TexId = mEyeRT[1].getPointer() ? static_cast(mEyeTexture[1].getPointer())->getHandle() : 0; + // TODO: use ovrViewScaleDesc + ovrLayerHeader* layers = &mRenderLayer.Header; + ovrResult result = ovr_SubmitFrame(mDevice, 0, &viewScaleDesc, &layers, 1); + mTextureSwapSet->AdvanceToNextTexture(); - // Submit! - GFX->disableShaders(); + if (OVR_SUCCESS(result)) + { + int woo = 1; + } - GFX->setActiveRenderTarget(windowTarget); - GFX->clear(GFXClearZBuffer | GFXClearStencil | GFXClearTarget, ColorI(255,0,0), 1.0f, 0); - ovrHmd_EndFrame(mDevice, mCurrentEyePoses, (ovrTexture*)(&eyeTextures[0])); - } -#endif - -#endif + // TODO: render preview in display? mFrameReady = false; } @@ -578,7 +669,7 @@ void OculusVRHMDDevice::onEndFrame() void OculusVRHMDDevice::getFrameEyePose(DisplayPose *outPose, U32 eyeId) const { // Directly set the rotation and position from the eye transforms - ovrPosef pose = mCurrentEyePoses[eyeId]; + ovrPosef pose = mRenderLayer.RenderPose[eyeId]; OVR::Quatf orientation = pose.Orientation; const OVR::Vector3f position = pose.Position; @@ -605,18 +696,17 @@ void OculusVRHMDDevice::onDeviceDestroy() mEyeRT[1]->zombify(); } + if (mTextureSwapSet) + { + delete mTextureSwapSet; + mTextureSwapSet = NULL; + } + mStereoRT = NULL; - mStereoTexture = NULL; mStereoDepthTexture = NULL; - mEyeTexture[0] = NULL; - mEyeDepthTexture[0] = NULL; - mEyeTexture[1] = NULL; - mEyeDepthTexture[1] = NULL; mEyeRT[0] = NULL; mEyeRT[1] = NULL; mRenderConfigurationDirty = true; - - ovrHmd_ConfigureRendering(mDevice, NULL, 0, NULL, NULL); } diff --git a/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.h b/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.h index 996a0ca14..13a5533b2 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.h +++ b/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.h @@ -34,12 +34,14 @@ #include "math/mRect.h" #include "gfx/gfxDevice.h" -#include "OVR_CAPI_0_5_0.h" +#include "OVR_CAPI.h" class GuiCanvas; class GameConnection; struct DisplayPose; class OculusVRSensorDevice; +struct OculusTexture; + class OculusVRHMDDevice { @@ -59,9 +61,6 @@ protected: ovrHmd mDevice; - U32 mSupportedDistortionCaps; - U32 mCurrentDistortionCaps; - U32 mSupportedCaps; U32 mCurrentCaps; @@ -70,15 +69,12 @@ protected: String mManufacturer; U32 mVersion; - // Windows display device name used in EnumDisplaySettings/CreateDC - String mDisplayDeviceName; + // Device type (D3D11, etc) + String mDisplayDeviceType; - // MacOS display ID + // Adapter index S32 mDisplayId; - // Desktop coordinate position of the screen (can be negative; may not be present on all platforms) - Point2I mDesktopPosition; - // Whole screen resolution Point2I mResolution; @@ -99,18 +95,15 @@ protected: Point2F mProjectionCenterOffset; // Current pose of eyes - ovrPosef mCurrentEyePoses[2]; ovrEyeRenderDesc mEyeRenderDesc[2]; - ovrFovPort mCurrentFovPorts[2]; - - Point2I mWindowSize; - GameConnection *mConnection; OculusVRSensorDevice *mSensor; U32 mActionCodeIndex; + ovrGraphicsLuid mLuid; + protected: void updateRenderInfo(); @@ -121,7 +114,7 @@ public: void cleanUp(); // Set the HMD properties based on information from the OVR device - void set(ovrHmd hmd, U32 actionCodeIndex); + void set(ovrHmd hmd, ovrGraphicsLuid luid, U32 actionCodeIndex); // Sets optimal display size for canvas void setOptimalDisplaySize(GuiCanvas *canvas); @@ -133,14 +126,11 @@ public: U32 getVersion() const { return mVersion; } // Windows display device name used in EnumDisplaySettings/CreateDC - const char* getDisplayDeviceName() const { return mDisplayDeviceName.c_str(); } + const char* getDisplayDeviceType () const { return mDisplayDeviceType.c_str(); } // MacOS display ID S32 getDisplayDeviceId() const { return mDisplayId; } - // Desktop coordinate position of the screen (can be negative; may not be present on all platforms) - const Point2I& getDesktopPosition() const { return mDesktopPosition; } - // Whole screen resolution const Point2I& getResolution() const { return mResolution; } @@ -166,7 +156,7 @@ public: void getStereoViewports(RectI *dest) const { dMemcpy(dest, mEyeViewport, sizeof(mEyeViewport)); } void getStereoTargets(GFXTextureTarget **dest) const { dest[0] = mEyeRT[0]; dest[1] = mEyeRT[1]; } - void getFovPorts(FovPort *dest) const { dMemcpy(dest, mCurrentFovPorts, sizeof(mCurrentFovPorts)); } + void getFovPorts(FovPort *dest) const { dMemcpy(dest, &mRenderLayer.Fov[0], sizeof(mRenderLayer.Fov)); } /// Returns eye offsets in torque coordinate space, i.e. z being up, x being left-right, and y being depth (forward). void getEyeOffsets(Point3F *offsets) const { @@ -181,7 +171,7 @@ public: void onEndFrame(); void onDeviceDestroy(); - Point2I generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &texture, GFXTexHandle &depth, Point2I desiredSize); + Point2I generateRenderTarget(GFXTextureTargetRef &target, GFXTexHandle &depth, Point2I desiredSize); void clearRenderTargets(); bool isDisplayingWarning(); @@ -198,20 +188,12 @@ public: String dumpMetrics(); // Stereo RT - GFXTexHandle mStereoTexture; + GFXTexHandle mDebugStereoTexture; GFXTexHandle mStereoDepthTexture; GFXTextureTargetRef mStereoRT; // Eye RTs (if we are using separate targets) GFXTextureTargetRef mEyeRT[2]; - GFXTexHandle mEyeTexture[2]; - GFXTexHandle mEyeDepthTexture[2]; - - // Current render target size for each eye - Point2I mEyeRenderSize[2]; - - // Recommended eye target size for each eye - ovrSizei mRecomendedEyeTargetSize[2]; // Desired viewport for each eye RectI mEyeViewport[2]; @@ -220,6 +202,12 @@ public: F32 smDesiredPixelDensity; ovrTrackingState mLastTrackingState; + OculusTexture* mTextureSwapSet; + ovrLayerEyeFov mRenderLayer; + ovrLayerDirect mDebugRenderLayer; + ovrViewScaleDesc mScaleDesc; + ovrTexture* mDebugMirrorTexture; + GFXTexHandle mDebugMirrorTextureHandle; GFXDevice::GFXDeviceRenderStyles mDesiredRenderingMode; diff --git a/Engine/source/platform/input/oculusVR/oculusVRSensorData.h b/Engine/source/platform/input/oculusVR/oculusVRSensorData.h index 36436ab45..739751465 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRSensorData.h +++ b/Engine/source/platform/input/oculusVR/oculusVRSensorData.h @@ -27,7 +27,7 @@ #include "math/mMatrix.h" #include "math/mQuat.h" #include "math/mPoint2.h" -#include "OVR_CAPI_0_5_0.h" +#include "OVR_CAPI_0_8_0.h" struct OculusVRSensorData { diff --git a/Engine/source/platform/input/oculusVR/oculusVRSensorDevice.cpp b/Engine/source/platform/input/oculusVR/oculusVRSensorDevice.cpp index 47ad51770..cfdaa5be1 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRSensorDevice.cpp +++ b/Engine/source/platform/input/oculusVR/oculusVRSensorDevice.cpp @@ -24,8 +24,9 @@ #include "platform/input/oculusVR/oculusVRSensorData.h" #include "platform/input/oculusVR/oculusVRUtil.h" #include "platform/platformInput.h" -#include"console/simBase.h" +#include "console/simBase.h" #include "console/engineAPI.h" +#include "OVR_CAPI_0_8_0.h" U32 OculusVRSensorDevice::OVR_SENSORROT[OculusVRConstants::MaxSensors] = {0}; U32 OculusVRSensorDevice::OVR_SENSORROTANG[OculusVRConstants::MaxSensors] = {0}; @@ -66,7 +67,7 @@ void OculusVRSensorDevice::cleanUp() { mIsValid = false; - ovrHmd_ConfigureTracking(mDevice, 0, 0); + ovr_ConfigureTracking(mDevice, 0, 0); } void OculusVRSensorDevice::set(ovrHmd sensor, S32 actionCodeIndex) @@ -74,7 +75,7 @@ void OculusVRSensorDevice::set(ovrHmd sensor, S32 actionCodeIndex) mIsValid = false; mDevice = sensor; - mSupportedTrackingCaps = sensor->TrackingCaps; + mSupportedTrackingCaps = ovr_GetTrackingCaps(sensor); mCurrentTrackingCaps = ovrTrackingCap_Orientation | ovrTrackingCap_MagYawCorrection | ovrTrackingCap_Position; mCurrentTrackingCaps = mSupportedTrackingCaps & mCurrentTrackingCaps; @@ -82,15 +83,17 @@ void OculusVRSensorDevice::set(ovrHmd sensor, S32 actionCodeIndex) mPositionTrackingDisabled = !(mCurrentTrackingCaps & ovrTrackingCap_Position); + ovrHmdDesc desc = ovr_GetHmdDesc(sensor); + // DeviceInfo - mProductName = sensor->ProductName; - mManufacturer = sensor->Manufacturer; - mVersion = sensor->Type; + mProductName = desc.ProductName; + mManufacturer = desc.Manufacturer; + mVersion = desc.Type; // SensorInfo - mVendorId = sensor->VendorId; - mProductId = sensor->ProductId; - mSerialNumber = sensor->SerialNumber; + mVendorId = desc.VendorId; + mProductId = desc.ProductId; + mSerialNumber = desc.SerialNumber; mActionCodeIndex = actionCodeIndex; @@ -163,7 +166,7 @@ bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bo return false; // Grab current state - ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds()); + ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue); mLastStatus = ts.StatusFlags; // Store the current data from the sensor and compare with previous data @@ -249,7 +252,7 @@ void OculusVRSensorDevice::reset() if(!mIsValid) return; - ovrHmd_RecenterPose(mDevice); + ovr_RecenterPose(mDevice); } bool OculusVRSensorDevice::getYawCorrection() const @@ -322,7 +325,7 @@ EulerF OculusVRSensorDevice::getEulerRotation() if(!mIsValid) return Point3F::Zero; - ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds()); + ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue); OVR::Quatf orientation = ts.HeadPose.ThePose.Orientation; // Sensor rotation in Euler format @@ -337,7 +340,7 @@ EulerF OculusVRSensorDevice::getRawEulerRotation() if(!mIsValid) return Point3F::Zero; - ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds()); + ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue); OVR::Quatf orientation = ts.HeadPose.ThePose.Orientation; // Sensor rotation in Euler format @@ -351,7 +354,7 @@ VectorF OculusVRSensorDevice::getAcceleration() if(!mIsValid) return VectorF::Zero; - ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds()); + ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue); OVR::Vector3f a = ts.HeadPose.LinearAcceleration; // Sensor acceleration in VectorF format @@ -366,7 +369,7 @@ EulerF OculusVRSensorDevice::getAngularVelocity() if(!mIsValid) return EulerF::Zero; - ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds()); + ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue); OVR::Vector3f v = ts.HeadPose.AngularVelocity; // Sensor angular velocity in EulerF format @@ -381,7 +384,7 @@ Point3F OculusVRSensorDevice::getPosition() if(!mIsValid) return Point3F(); - ovrTrackingState ts = ovrHmd_GetTrackingState(mDevice, ovr_GetTimeInSeconds()); + ovrTrackingState ts = ovr_GetTrackingState(mDevice, ovr_GetTimeInSeconds(), ovrTrue); OVR::Vector3f v = ts.HeadPose.ThePose.Position; return Point3F(-v.x, v.z, -v.y); } @@ -399,5 +402,5 @@ void OculusVRSensorDevice::updateTrackingCaps() if (!mPositionTrackingDisabled) mCurrentTrackingCaps |= ovrTrackingCap_Position; - ovrHmd_ConfigureTracking(mDevice, mCurrentTrackingCaps, 0); + ovr_ConfigureTracking(mDevice, mCurrentTrackingCaps, 0); } diff --git a/Engine/source/platform/input/oculusVR/oculusVRSensorDevice.h b/Engine/source/platform/input/oculusVR/oculusVRSensorDevice.h index 418a9e28f..4c3cb6ccd 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRSensorDevice.h +++ b/Engine/source/platform/input/oculusVR/oculusVRSensorDevice.h @@ -30,7 +30,7 @@ #include "math/mPoint4.h" #include "platform/input/oculusVR/oculusVRConstants.h" #include "platform/types.h" -#include "OVR_CAPI_0_5_0.h" +#include "OVR_CAPI.h" struct OculusVRSensorData; diff --git a/Engine/source/platform/input/oculusVR/oculusVRUtil.h b/Engine/source/platform/input/oculusVR/oculusVRUtil.h index 389af9da5..8056bb3c8 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRUtil.h +++ b/Engine/source/platform/input/oculusVR/oculusVRUtil.h @@ -25,7 +25,7 @@ #include "math/mPoint2.h" #include "math/mMatrix.h" -#include "OVR_CAPI_0_5_0.h" +#include "OVR_CAPI_0_8_0.h" // NOTE: math code in oculus uses "Offset" which is a preprocessor macro #define TorqueOffset Offset diff --git a/Engine/source/postFx/postEffect.cpp b/Engine/source/postFx/postEffect.cpp index c388e0abe..7e4a6fed8 100644 --- a/Engine/source/postFx/postEffect.cpp +++ b/Engine/source/postFx/postEffect.cpp @@ -154,7 +154,6 @@ GFX_ImplementTextureProfile( VRTextureProfile, GFX_ImplementTextureProfile( VRDepthProfile, GFXTextureProfile::DiffuseMap, GFXTextureProfile::PreserveSize | - GFXTextureProfile::RenderTarget | GFXTextureProfile::NoMipmap | GFXTextureProfile::ZTarget, GFXTextureProfile::NONE ); diff --git a/Templates/Full/game/scripts/client/default.bind.cs b/Templates/Full/game/scripts/client/default.bind.cs index 9dcbca96b..1af881a81 100644 --- a/Templates/Full/game/scripts/client/default.bind.cs +++ b/Templates/Full/game/scripts/client/default.bind.cs @@ -752,3 +752,21 @@ vehicleMap.bind( gamepad, btn_b, brake ); vehicleMap.bind( gamepad, btn_x, movebackward ); // bind exiting the vehicle to a button vehicleMap.bindCmd(gamepad, btn_y,"getout();",""); + + +// ---------------------------------------------------------------------------- +// Oculus Rift +// ---------------------------------------------------------------------------- + +function OVRSensorRotEuler(%pitch, %roll, %yaw) +{ + //echo("Sensor euler: " @ %pitch SPC %roll SPC %yaw); + $mvRotZ0 = %yaw; + $mvRotX0 = %pitch; + $mvRotY0 = %roll; +} + +$mvRotIsEuler0 = true; +$OculusVR::GenerateAngleAxisRotationEvents = false; +$OculusVR::GenerateEulerRotationEvents = true; +moveMap.bind( oculusvr, ovr_sensorrotang0, OVRSensorRotEuler ); From 3dc21007658b1ca788c03021c62622c12f32f4b6 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Mon, 21 Mar 2016 15:49:33 +0000 Subject: [PATCH 02/33] Ignore alpha when rendering debug texture --- Engine/source/gui/3d/guiTSControl.cpp | 2 +- Engine/source/scene/sceneManager.cpp | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Engine/source/gui/3d/guiTSControl.cpp b/Engine/source/gui/3d/guiTSControl.cpp index d7222dcc2..9383ff605 100644 --- a/Engine/source/gui/3d/guiTSControl.cpp +++ b/Engine/source/gui/3d/guiTSControl.cpp @@ -719,7 +719,7 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) GFXStateBlockDesc bitmapStretchSR; bitmapStretchSR.setCullMode(GFXCullNone); bitmapStretchSR.setZReadWrite(false, false); - bitmapStretchSR.setBlend(true, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha); + bitmapStretchSR.setBlend(false, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha); bitmapStretchSR.samplersDefined = true; bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear(); diff --git a/Engine/source/scene/sceneManager.cpp b/Engine/source/scene/sceneManager.cpp index b68de9def..8cf74c3b7 100644 --- a/Engine/source/scene/sceneManager.cpp +++ b/Engine/source/scene/sceneManager.cpp @@ -259,7 +259,7 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S renderStateLeft.setSceneRenderStyle(SRS_SideBySide); renderStateLeft.setSceneRenderField(0); - renderSceneNoLights( &renderStateLeft, objectMask, baseObject, baseZone ); + renderSceneNoLights( &renderStateLeft, objectMask, baseObject, baseZone ); // left // Indicate that we've just finished a field //GFX->clear(GFXClearTarget | GFXClearZBuffer | GFXClearStencil, ColorI(255,0,0), 1.0f, 0); @@ -279,7 +279,7 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S renderStateRight.setSceneRenderStyle(SRS_SideBySide); renderStateRight.setSceneRenderField(1); - renderSceneNoLights( &renderStateRight, objectMask, baseObject, baseZone ); + renderSceneNoLights( &renderStateRight, objectMask, baseObject, baseZone ); // right // Indicate that we've just finished a field //GFX->clear(GFXClearTarget | GFXClearZBuffer | GFXClearStencil, ColorI(0,255,0), 1.0f, 0); From e239d106f52942f6811d42ade8a5890cc9017ba4 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Wed, 23 Mar 2016 15:18:14 +0000 Subject: [PATCH 03/33] Use correct bgra format --- .../platform/input/oculusVR/oculusVRHMDDevice.cpp | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp b/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp index 2d1bef8f9..c2bd152a0 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp +++ b/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp @@ -81,7 +81,7 @@ struct D3D11OculusTexture : public OculusTexture dsDesc.Height = sizeH; dsDesc.MipLevels = 1; dsDesc.ArraySize = 1; - dsDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM_SRGB; + dsDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;// DXGI_FORMAT_R8G8B8A8_UNORM_SRGB; dsDesc.SampleDesc.Count = 1; // No multi-sampling allowed dsDesc.SampleDesc.Quality = 0; dsDesc.Usage = D3D11_USAGE_DEFAULT; @@ -101,7 +101,7 @@ struct D3D11OculusTexture : public OculusTexture { ovrD3D11Texture* tex = (ovrD3D11Texture*)&TextureSet->Textures[i]; D3D11_RENDER_TARGET_VIEW_DESC rtvd = {}; - rtvd.Format = DXGI_FORMAT_R8G8B8A8_UNORM; + rtvd.Format = DXGI_FORMAT_B8G8R8A8_UNORM;// DXGI_FORMAT_R8G8B8A8_UNORM; rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D; GFXD3D11TextureObject* object = new GFXD3D11TextureObject(GFX, &VRTextureProfile); @@ -125,7 +125,7 @@ struct D3D11OculusTexture : public OculusTexture object->mBitmapSize = object->mTextureSize; int fmt = probeDesc.Format; - if (fmt == DXGI_FORMAT_R8G8B8A8_TYPELESS) + if (fmt == DXGI_FORMAT_R8G8B8A8_TYPELESS || fmt == DXGI_FORMAT_B8G8R8A8_TYPELESS) { object->mFormat = GFXFormatR8G8B8A8; // usual case } @@ -391,7 +391,7 @@ bool OculusVRHMDDevice::setupTargets() dsDesc.Height = rtSize.y; dsDesc.MipLevels = 1; dsDesc.ArraySize = 1; - dsDesc.Format = DXGI_FORMAT_R8G8B8A8_UNORM_SRGB; + dsDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;// DXGI_FORMAT_R8G8B8A8_UNORM_SRGB; dsDesc.SampleDesc.Count = 1; dsDesc.SampleDesc.Quality = 0; dsDesc.Usage = D3D11_USAGE_DEFAULT; @@ -416,7 +416,7 @@ bool OculusVRHMDDevice::setupTargets() // Create texture handle so we can render it in-game ovrD3D11Texture* mirror_tex = (ovrD3D11Texture*)mDebugMirrorTexture; D3D11_RENDER_TARGET_VIEW_DESC rtvd = {}; - rtvd.Format = DXGI_FORMAT_R8G8B8A8_UNORM; + rtvd.Format = DXGI_FORMAT_B8G8R8A8_UNORM;// DXGI_FORMAT_R8G8B8A8_UNORM; rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D; GFXD3D11TextureObject* object = new GFXD3D11TextureObject(GFX, &VRTextureProfile); @@ -441,7 +441,7 @@ bool OculusVRHMDDevice::setupTargets() object->mBitmapSize = object->mTextureSize; int fmt = probeDesc.Format; - if (fmt == DXGI_FORMAT_R8G8B8A8_TYPELESS) + if (fmt == DXGI_FORMAT_R8G8B8A8_TYPELESS || fmt == DXGI_FORMAT_B8G8R8A8_TYPELESS) { object->mFormat = GFXFormatR8G8B8A8; // usual case } @@ -652,7 +652,7 @@ void OculusVRHMDDevice::onEndFrame() mDebugRenderLayer.Viewport[1] = mRenderLayer.Viewport[1]; // TODO: use ovrViewScaleDesc - ovrLayerHeader* layers = &mRenderLayer.Header; + ovrLayerHeader* layers = &mRenderLayer.Header; ovrResult result = ovr_SubmitFrame(mDevice, 0, &viewScaleDesc, &layers, 1); mTextureSwapSet->AdvanceToNextTexture(); From ba91478fade7aefe181fca2ffd77c8817b378982 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Sun, 17 Apr 2016 22:19:42 +0100 Subject: [PATCH 04/33] Baseline working openvr code --- Engine/source/T3D/camera.cpp | 38 - Engine/source/T3D/camera.h | 1 - .../T3D/gameBase/extended/extendedMove.cpp | 5 +- .../gameBase/extended/extendedMoveList.cpp | 10 +- Engine/source/T3D/gameFunctions.cpp | 4 + Engine/source/T3D/player.cpp | 84 +- Engine/source/T3D/player.h | 1 - Engine/source/T3D/shapeBase.cpp | 57 +- Engine/source/T3D/shapeBase.h | 3 - Engine/source/gfx/gfxDevice.h | 7 + Engine/source/gui/3d/guiTSControl.cpp | 545 ++++++----- Engine/source/gui/3d/guiTSControl.h | 10 +- Engine/source/platform/input/event.cpp | 19 + Engine/source/platform/input/event.h | 3 + .../input/oculusVR/oculusVRDevice.cpp | 27 +- .../platform/input/oculusVR/oculusVRDevice.h | 4 +- .../input/oculusVR/oculusVRHMDDevice.cpp | 20 +- .../input/oculusVR/oculusVRHMDDevice.h | 2 + .../input/oculusVR/oculusVRSensorDevice.cpp | 4 +- .../platform/input/oculusVR/oculusVRUtil.cpp | 5 +- .../platform/input/openVR/openVRProvider.cpp | 886 ++++++++++++++++++ .../platform/input/openVR/openVRProvider.h | 172 ++++ .../source/platform/output/IDisplayDevice.h | 13 +- 23 files changed, 1463 insertions(+), 457 deletions(-) create mode 100644 Engine/source/platform/input/openVR/openVRProvider.cpp create mode 100644 Engine/source/platform/input/openVR/openVRProvider.h diff --git a/Engine/source/T3D/camera.cpp b/Engine/source/T3D/camera.cpp index 59002d9bf..11914542a 100644 --- a/Engine/source/T3D/camera.cpp +++ b/Engine/source/T3D/camera.cpp @@ -393,44 +393,6 @@ void Camera::getEyeCameraTransform(IDisplayDevice *displayDevice, U32 eyeId, Mat } } -DisplayPose Camera::calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose) -{ - // NOTE: this is intended to be similar to updateMove - DisplayPose outPose; - outPose.orientation = EulerF(0,0,0); - outPose.position = inPose.position; - - // Pitch - outPose.orientation.x = (inPose.orientation.x - mLastAbsolutePitch); - - // Constrain the range of mRot.x - while (outPose.orientation.x < -M_PI_F) - outPose.orientation.x += M_2PI_F; - while (outPose.orientation.x > M_PI_F) - outPose.orientation.x -= M_2PI_F; - - // Yaw - outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw); - - // Constrain the range of mRot.z - while (outPose.orientation.z < -M_PI_F) - outPose.orientation.z += M_2PI_F; - while (outPose.orientation.z > M_PI_F) - outPose.orientation.z -= M_2PI_F; - - // Bank - if (mDataBlock->cameraCanBank) - { - outPose.orientation.y = (inPose.orientation.y - mLastAbsoluteRoll); - } - - // Constrain the range of mRot.y - while (outPose.orientation.y > M_PI_F) - outPose.orientation.y -= M_2PI_F; - - return outPose; -} - //---------------------------------------------------------------------------- F32 Camera::getCameraFov() diff --git a/Engine/source/T3D/camera.h b/Engine/source/T3D/camera.h index 6e835d982..5e760e61f 100644 --- a/Engine/source/T3D/camera.h +++ b/Engine/source/T3D/camera.h @@ -237,7 +237,6 @@ class Camera: public ShapeBase virtual void interpolateTick( F32 delta); virtual void getCameraTransform( F32* pos,MatrixF* mat ); virtual void getEyeCameraTransform( IDisplayDevice *display, U32 eyeId, MatrixF *outMat ); - virtual DisplayPose calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose); virtual void writePacketData( GameConnection* conn, BitStream* stream ); virtual void readPacketData( GameConnection* conn, BitStream* stream ); diff --git a/Engine/source/T3D/gameBase/extended/extendedMove.cpp b/Engine/source/T3D/gameBase/extended/extendedMove.cpp index 9040fce75..7b260adb5 100644 --- a/Engine/source/T3D/gameBase/extended/extendedMove.cpp +++ b/Engine/source/T3D/gameBase/extended/extendedMove.cpp @@ -1,6 +1,7 @@ #include "T3D/gameBase/extended/extendedMove.h" #include "core/stream/bitStream.h" #include "math/mathIO.h" +#include "math/mAngAxis.h" #include "core/module.h" #include "console/consoleTypes.h" #include "core/strings/stringFunctions.h" @@ -268,7 +269,7 @@ void ExtendedMove::clamp() crotX[i] = CLAMPROT(rotX[i]); crotY[i] = CLAMPROT(rotY[i]); crotZ[i] = CLAMPROT(rotZ[i]); - crotW[i] = CLAMPROT(rotW[i]); + crotW[i] = CLAMPROT(rotW[i] / M_2PI_F); } } @@ -293,7 +294,7 @@ void ExtendedMove::unclamp() rotX[i] = UNCLAMPROT(crotX[i]); rotY[i] = UNCLAMPROT(crotY[i]); rotZ[i] = UNCLAMPROT(crotZ[i]); - rotW[i] = UNCLAMPROT(crotW[i]); + rotW[i] = UNCLAMPROT(crotW[i]) * M_2PI_F; } } diff --git a/Engine/source/T3D/gameBase/extended/extendedMoveList.cpp b/Engine/source/T3D/gameBase/extended/extendedMoveList.cpp index 14292ef86..155aa0bc4 100644 --- a/Engine/source/T3D/gameBase/extended/extendedMoveList.cpp +++ b/Engine/source/T3D/gameBase/extended/extendedMoveList.cpp @@ -75,11 +75,11 @@ bool ExtendedMoveList::getNextExtMove( ExtendedMove &curMove ) else { //Rotation is passed in as an Angle Axis in degrees. We need to convert this into a Quat. - QuatF q(Point3F(ExtendedMoveManager::mRotAX[i], ExtendedMoveManager::mRotAY[i], ExtendedMoveManager::mRotAZ[i]), mDegToRad(ExtendedMoveManager::mRotAA[i])); - curMove.rotX[i] = q.x; - curMove.rotY[i] = q.y; - curMove.rotZ[i] = q.z; - curMove.rotW[i] = q.w; + AngAxisF q(Point3F(ExtendedMoveManager::mRotAX[i], ExtendedMoveManager::mRotAY[i], ExtendedMoveManager::mRotAZ[i]), mDegToRad(ExtendedMoveManager::mRotAA[i])); + curMove.rotX[i] = q.axis.x; + curMove.rotY[i] = q.axis.y; + curMove.rotZ[i] = q.axis.z; + curMove.rotW[i] = q.angle; } } diff --git a/Engine/source/T3D/gameFunctions.cpp b/Engine/source/T3D/gameFunctions.cpp index 3c71b57b5..ceb6945a8 100644 --- a/Engine/source/T3D/gameFunctions.cpp +++ b/Engine/source/T3D/gameFunctions.cpp @@ -355,6 +355,7 @@ bool GameProcessCameraQuery(CameraQuery *query) query->eyeOffset[1] = Point3F::Zero; query->hasFovPort = false; query->hasStereoTargets = false; + query->displayDevice = NULL; F32 cameraFov = 0.0f; bool fovSet = false; @@ -364,6 +365,9 @@ bool GameProcessCameraQuery(CameraQuery *query) if(!gEditingMission && connection->hasDisplayDevice()) { IDisplayDevice* display = connection->getDisplayDevice(); + + query->displayDevice = display; + // Note: all eye values are invalid until this is called display->setDrawCanvas(query->drawCanvas); diff --git a/Engine/source/T3D/player.cpp b/Engine/source/T3D/player.cpp index 4393073a6..811352aa4 100644 --- a/Engine/source/T3D/player.cpp +++ b/Engine/source/T3D/player.cpp @@ -56,6 +56,7 @@ #include "T3D/decal/decalManager.h" #include "T3D/decal/decalData.h" #include "materials/baseMatInstance.h" +#include "math/mathUtils.h" #ifdef TORQUE_EXTENDED_MOVE #include "T3D/gameBase/extended/extendedMove.h" @@ -2489,6 +2490,8 @@ void Player::allowAllPoses() mAllowSwimming = true; } +AngAxisF gPlayerMoveRot; + void Player::updateMove(const Move* move) { delta.move = *move; @@ -2531,6 +2534,7 @@ void Player::updateMove(const Move* move) delta.headVec = mHead; bool doStandardMove = true; + bool absoluteDelta = false; GameConnection* con = getControllingClient(); #ifdef TORQUE_EXTENDED_MOVE @@ -2618,6 +2622,27 @@ void Player::updateMove(const Move* move) while (mHead.y > M_PI_F) mHead.y -= M_2PI_F; } + else + { + // Orient the player so we are looking towards the required position, ignoring any banking + AngAxisF moveRot(Point3F(emove->rotX[emoveIndex], emove->rotY[emoveIndex], emove->rotZ[emoveIndex]), emove->rotW[emoveIndex]); + MatrixF trans(1); + moveRot.setMatrix(&trans); + + Point3F vecForward(0, 1, 0); + Point3F orient; + EulerF rot; + trans.mulV(vecForward); + + F32 yawAng; + F32 pitchAng; + MathUtils::getAnglesFromVector(vecForward, yawAng, pitchAng); + mRot.z = yawAng; + mHead = EulerF(0); + mHead.x = -pitchAng; + + absoluteDelta = true; + } } #endif @@ -2666,6 +2691,13 @@ void Player::updateMove(const Move* move) delta.head = mHead; delta.headVec -= mHead; + + if (absoluteDelta) + { + delta.headVec = Point3F(0, 0, 0); + delta.rotVec = Point3F(0, 0, 0); + } + for(U32 i=0; i<3; ++i) { if (delta.headVec[i] > M_PI_F) @@ -5589,58 +5621,6 @@ void Player::getMuzzleTransform(U32 imageSlot,MatrixF* mat) *mat = nmat; } -DisplayPose Player::calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose) -{ - // NOTE: this is intended to be similar to updateMove - DisplayPose outPose; - outPose.orientation = getRenderTransform().toEuler(); - outPose.position = inPose.position; - - if (con && con->getControlSchemeAbsoluteRotation()) - { - // Pitch - outPose.orientation.x = (inPose.orientation.x - mLastAbsolutePitch); - - // Constrain the range of mRot.x - while (outPose.orientation.x < -M_PI_F) - outPose.orientation.x += M_2PI_F; - while (outPose.orientation.x > M_PI_F) - outPose.orientation.x -= M_2PI_F; - - // Yaw - - // Rotate (heading) head or body? - if ((isMounted() && getMountNode() == 0) || (con && !con->isFirstPerson())) - { - // Rotate head - outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw); - } - else - { - // Rotate body - outPose.orientation.z = (inPose.orientation.z - mLastAbsoluteYaw); - } - - // Constrain the range of mRot.z - while (outPose.orientation.z < 0.0f) - outPose.orientation.z += M_2PI_F; - while (outPose.orientation.z > M_2PI_F) - outPose.orientation.z -= M_2PI_F; - - // Bank - if (mDataBlock->cameraCanBank) - { - outPose.orientation.y = (inPose.orientation.y - mLastAbsoluteRoll); - } - - // Constrain the range of mRot.y - while (outPose.orientation.y > M_PI_F) - outPose.orientation.y -= M_2PI_F; - } - - return outPose; -} - void Player::getRenderMuzzleTransform(U32 imageSlot,MatrixF* mat) { disableHeadZCalc(); diff --git a/Engine/source/T3D/player.h b/Engine/source/T3D/player.h index 4ffd6c95d..a05b6de99 100644 --- a/Engine/source/T3D/player.h +++ b/Engine/source/T3D/player.h @@ -686,7 +686,6 @@ public: void getEyeBaseTransform(MatrixF* mat, bool includeBank); void getRenderEyeTransform(MatrixF* mat); void getRenderEyeBaseTransform(MatrixF* mat, bool includeBank); - virtual DisplayPose calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose); void getCameraParameters(F32 *min, F32 *max, Point3F *offset, MatrixF *rot); void getMuzzleTransform(U32 imageSlot,MatrixF* mat); void getRenderMuzzleTransform(U32 imageSlot,MatrixF* mat); diff --git a/Engine/source/T3D/shapeBase.cpp b/Engine/source/T3D/shapeBase.cpp index a2d1cd00d..e5a6dc6fb 100644 --- a/Engine/source/T3D/shapeBase.cpp +++ b/Engine/source/T3D/shapeBase.cpp @@ -1992,9 +1992,8 @@ void ShapeBase::getEyeCameraTransform(IDisplayDevice *displayDevice, U32 eyeId, Point3F eyePos; Point3F rotEyePos; - DisplayPose inPose; - displayDevice->getFrameEyePose(&inPose, eyeId); - DisplayPose newPose = calcCameraDeltaPose(displayDevice->getCurrentConnection(), inPose); + DisplayPose newPose; + displayDevice->getFrameEyePose(&newPose, eyeId); // Ok, basically we just need to add on newPose to the camera transform // NOTE: currently we dont support third-person camera in this mode @@ -2004,57 +2003,15 @@ void ShapeBase::getEyeCameraTransform(IDisplayDevice *displayDevice, U32 eyeId, QuatF baserot = cameraTransform; QuatF qrot = QuatF(newPose.orientation); - QuatF concatRot; - concatRot.mul(baserot, qrot); - concatRot.setMatrix(&temp); - temp.setPosition(cameraTransform.getPosition() + concatRot.mulP(newPose.position, &rotEyePos)); + //QuatF concatRot; + //concatRot.mul(baserot, qrot); + qrot.setMatrix(&temp); + temp.setPosition(cameraTransform.getPosition() + qrot.mulP(newPose.position, &rotEyePos)); + *outMat = temp; } -DisplayPose ShapeBase::calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose) -{ - // NOTE: this is intended to be similar to updateMove - // WARNING: does not take into account any move values - - DisplayPose outPose; - outPose.orientation = getRenderTransform().toEuler(); - outPose.position = inPose.position; - - if (con && con->getControlSchemeAbsoluteRotation()) - { - // Pitch - outPose.orientation.x = inPose.orientation.x; - - // Constrain the range of mRot.x - while (outPose.orientation.x < -M_PI_F) - outPose.orientation.x += M_2PI_F; - while (outPose.orientation.x > M_PI_F) - outPose.orientation.x -= M_2PI_F; - - // Yaw - outPose.orientation.z = inPose.orientation.z; - - // Constrain the range of mRot.z - while (outPose.orientation.z < -M_PI_F) - outPose.orientation.z += M_2PI_F; - while (outPose.orientation.z > M_PI_F) - outPose.orientation.z -= M_2PI_F; - - // Bank - if (mDataBlock->cameraCanBank) - { - outPose.orientation.y = inPose.orientation.y; - } - - // Constrain the range of mRot.y - while (outPose.orientation.y > M_PI_F) - outPose.orientation.y -= M_2PI_F; - } - - return outPose; -} - void ShapeBase::getCameraParameters(F32 *min,F32* max,Point3F* off,MatrixF* rot) { *min = mDataBlock->cameraMinDist; diff --git a/Engine/source/T3D/shapeBase.h b/Engine/source/T3D/shapeBase.h index 5a7ff5eb1..6ee026292 100644 --- a/Engine/source/T3D/shapeBase.h +++ b/Engine/source/T3D/shapeBase.h @@ -1588,9 +1588,6 @@ public: /// orient and position values of the display device. virtual void getEyeCameraTransform( IDisplayDevice *display, U32 eyeId, MatrixF *outMat ); - /// Calculates a delta camera angle and view position based on inPose - virtual DisplayPose calcCameraDeltaPose(GameConnection *con, const DisplayPose& inPose); - /// Gets the index of a node inside a mounted image given the name /// @param imageSlot Image slot /// @param nodeName Node name diff --git a/Engine/source/gfx/gfxDevice.h b/Engine/source/gfx/gfxDevice.h index aa7ba0edf..5ae7567d1 100644 --- a/Engine/source/gfx/gfxDevice.h +++ b/Engine/source/gfx/gfxDevice.h @@ -219,6 +219,12 @@ public: /// The device has started rendering a frame's field (such as for side-by-side rendering) deStartOfField, + /// left stereo frame has been rendered + deLeftStereoFrameRendered, + + /// right stereo frame has been rendered + deRightStereoFrameRendered, + /// The device is about to finish rendering a frame's field deEndOfField, }; @@ -248,6 +254,7 @@ public: { RS_Standard = 0, RS_StereoSideBySide = (1<<0), // Render into current Render Target side-by-side + RS_StereoSeparate = (1<<1) // Render in two separate passes (then combined by vr compositor) }; enum GFXDeviceLimits diff --git a/Engine/source/gui/3d/guiTSControl.cpp b/Engine/source/gui/3d/guiTSControl.cpp index 9383ff605..1b8b92f77 100644 --- a/Engine/source/gui/3d/guiTSControl.cpp +++ b/Engine/source/gui/3d/guiTSControl.cpp @@ -38,8 +38,8 @@ #include "gfx/gfxDrawUtil.h" #include "gfx/gfxDebugEvent.h" #include "core/stream/fileStream.h" - -GFXTextureObject *gLastStereoTexture = NULL; +#include "platform/output/IDisplayDevice.h" +#include "T3D/gameBase/extended/extendedMove.h" #define TS_OVERLAY_SCREEN_WIDTH 0.75 @@ -66,6 +66,7 @@ ImplementEnumType( GuiTSRenderStyles, "@ingroup Gui3D" ) { GuiTSCtrl::RenderStyleStandard, "standard" }, { GuiTSCtrl::RenderStyleStereoSideBySide, "stereo side by side" }, + { GuiTSCtrl::RenderStyleStereoSeparate, "stereo separate" }, EndImplementEnumType; //----------------------------------------------------------------------------- @@ -353,32 +354,111 @@ static FovPort CalculateFovPortForCanvas(const RectI viewport, const CameraQuery return fovPort; } +void GuiTSCtrl::_internalRender(RectI viewport, Frustum &frustum) +{ + GFXTransformSaver saver; + Point2I renderSize = viewport.extent; + + if (mReflectPriority > 0) + { + // Get the total reflection priority. + F32 totalPriority = 0; + for (U32 i = 0; i < smAwakeTSCtrls.size(); i++) + if (smAwakeTSCtrls[i]->isVisible()) + totalPriority += smAwakeTSCtrls[i]->mReflectPriority; + + REFLECTMGR->update(mReflectPriority / totalPriority, + getExtent(), + mLastCameraQuery); + } + + if (mForceFOV != 0) + mLastCameraQuery.fov = mDegToRad(mForceFOV); + + if (mCameraZRot) + { + MatrixF rotMat(EulerF(0, 0, mDegToRad(mCameraZRot))); + mLastCameraQuery.cameraMatrix.mul(rotMat); + } + + GFX->setViewport(viewport); + + // Clear the zBuffer so GUI doesn't hose object rendering accidentally + GFX->clear(GFXClearZBuffer, ColorI(20, 20, 20), 1.0f, 0); + + GFX->setFrustum(frustum); + mSaveProjection = GFX->getProjectionMatrix(); + + if (mLastCameraQuery.ortho) + { + mOrthoWidth = frustum.getWidth(); + mOrthoHeight = frustum.getHeight(); + } + + // We're going to be displaying this render at size of this control in + // pixels - let the scene know so that it can calculate e.g. reflections + // correctly for that final display result. + gClientSceneGraph->setDisplayTargetResolution(renderSize); + + // Set the GFX world matrix to the world-to-camera transform, but don't + // change the cameraMatrix in mLastCameraQuery. This is because + // mLastCameraQuery.cameraMatrix is supposed to contain the camera-to-world + // transform. In-place invert would save a copy but mess up any GUIs that + // depend on that value. + MatrixF worldToCamera = mLastCameraQuery.cameraMatrix; + worldToCamera.inverse(); + GFX->setWorldMatrix(worldToCamera); + + mSaveProjection = GFX->getProjectionMatrix(); + mSaveModelview = GFX->getWorldMatrix(); + mSaveViewport = viewport; + mSaveWorldToScreenScale = GFX->getWorldToScreenScale(); + mSaveFrustum = GFX->getFrustum(); + mSaveFrustum.setTransform(mLastCameraQuery.cameraMatrix); + + // Set the default non-clip projection as some + // objects depend on this even in non-reflect cases. + gClientSceneGraph->setNonClipProjection(mSaveProjection); + + // Give the post effect manager the worldToCamera, and cameraToScreen matrices + PFXMGR->setFrameMatrices(mSaveModelview, mSaveProjection); + + renderWorld(viewport); + DebugDrawer::get()->render(); + + // Restore the previous matrix state before + // we begin rendering the child controls. + saver.restore(); +} + //----------------------------------------------------------------------------- void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) { - // Save the current transforms so we can restore + // Save the current transforms so we can restore // it for child control rendering below. GFXTransformSaver saver; bool renderingToTarget = false; - if(!processCameraQuery(&mLastCameraQuery)) + if (!processCameraQuery(&mLastCameraQuery)) { // We have no camera, but render the GUI children // anyway. This makes editing GuiTSCtrl derived // controls easier in the GuiEditor. - renderChildControls( offset, updateRect ); + renderChildControls(offset, updateRect); return; } GFXTargetRef origTarget = GFX->getActiveRenderTarget(); + U32 origStyle = GFX->getCurrentRenderStyle(); // Set up the appropriate render style U32 prevRenderStyle = GFX->getCurrentRenderStyle(); Point2F prevProjectionOffset = GFX->getCurrentProjectionOffset(); Point2I renderSize = getExtent(); + Frustum frustum; - if(mRenderStyle == RenderStyleStereoSideBySide) + if (mRenderStyle == RenderStyleStereoSideBySide) { GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSideBySide); GFX->setCurrentProjectionOffset(mLastCameraQuery.projectionOffset); @@ -399,13 +479,13 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) mLastCameraQuery.fovPort[0] = CalculateFovPortForCanvas(mLastCameraQuery.stereoViewports[0], mLastCameraQuery); mLastCameraQuery.fovPort[1] = CalculateFovPortForCanvas(mLastCameraQuery.stereoViewports[1], mLastCameraQuery); } - - GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes + GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes GFX->setSteroViewports(mLastCameraQuery.stereoViewports); GFX->setStereoTargets(mLastCameraQuery.stereoTargets); MatrixF myTransforms[2]; + Frustum frustum; if (smUseLatestDisplayTransform) { @@ -435,52 +515,109 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) renderSize = mLastCameraQuery.stereoViewports[0].extent; renderingToTarget = true; } - } - else - { - GFX->setCurrentRenderStyle(GFXDevice::RS_Standard); - } - if ( mReflectPriority > 0 ) - { - // Get the total reflection priority. - F32 totalPriority = 0; - for ( U32 i=0; i < smAwakeTSCtrls.size(); i++ ) - if ( smAwakeTSCtrls[i]->isVisible() ) - totalPriority += smAwakeTSCtrls[i]->mReflectPriority; - - REFLECTMGR->update( mReflectPriority / totalPriority, - getExtent(), - mLastCameraQuery ); - } - - if(mForceFOV != 0) - mLastCameraQuery.fov = mDegToRad(mForceFOV); - - if(mCameraZRot) - { - MatrixF rotMat(EulerF(0, 0, mDegToRad(mCameraZRot))); - mLastCameraQuery.cameraMatrix.mul(rotMat); - } - - Frustum frustum; - if(mRenderStyle == RenderStyleStereoSideBySide) - { // NOTE: these calculations are essentially overridden later by the fov port settings when rendering each eye. - MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]); + MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]); + + GFX->activateStereoTarget(-1); + _internalRender(RectI(updateRect.point, updateRect.extent), frustum); + + // Render preview + if (mLastCameraQuery.displayDevice) + { + GFXTexHandle previewTexture = mLastCameraQuery.displayDevice->getPreviewTexture(); + if (!previewTexture.isNull()) + { + GFX->setActiveRenderTarget(origTarget); + GFX->setCurrentRenderStyle(origStyle); + GFX->setClipRect(updateRect); + renderDisplayPreview(updateRect, previewTexture); + } + } + } + else if (mRenderStyle == RenderStyleStereoSeparate && mLastCameraQuery.stereoTargets[0]) + { + // In this case we render the scene twice to different render targets, then + // render the final composite view + GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSeparate); + GFX->setStereoEyeOffsets(mLastCameraQuery.eyeOffset); + GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes + GFX->setSteroViewports(mLastCameraQuery.stereoViewports); + GFX->setStereoTargets(mLastCameraQuery.stereoTargets); + + MatrixF myTransforms[2]; + + if (smUseLatestDisplayTransform) + { + // Use the view matrix determined from the display device + myTransforms[0] = mLastCameraQuery.eyeTransforms[0]; + myTransforms[1] = mLastCameraQuery.eyeTransforms[1]; + } + else + { + // Use the view matrix determined from the control object + myTransforms[0] = mLastCameraQuery.cameraMatrix; + myTransforms[1] = mLastCameraQuery.cameraMatrix; + + QuatF qrot = mLastCameraQuery.cameraMatrix; + Point3F pos = mLastCameraQuery.cameraMatrix.getPosition(); + Point3F rotEyePos; + + myTransforms[0].setPosition(pos + qrot.mulP(mLastCameraQuery.eyeOffset[0], &rotEyePos)); + myTransforms[1].setPosition(pos + qrot.mulP(mLastCameraQuery.eyeOffset[1], &rotEyePos)); + } + + MatrixF origMatrix = mLastCameraQuery.cameraMatrix; + + // Left + MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]); + mLastCameraQuery.cameraMatrix = myTransforms[0]; + frustum.update(); + GFX->activateStereoTarget(0); + _internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), frustum); + GFX->getDeviceEventSignal().trigger(GFXDevice::deLeftStereoFrameRendered); + + // Right + GFX->activateStereoTarget(1); + MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[1]); + mLastCameraQuery.cameraMatrix = myTransforms[1]; + frustum.update(); + _internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[1]->getSize()), frustum); + GFX->getDeviceEventSignal().trigger(GFXDevice::deRightStereoFrameRendered); + + mLastCameraQuery.cameraMatrix = origMatrix; + + // Render preview + if (mLastCameraQuery.displayDevice) + { + GFXTexHandle previewTexture = mLastCameraQuery.displayDevice->getPreviewTexture(); + if (!previewTexture.isNull()) + { + GFX->setActiveRenderTarget(origTarget); + GFX->setCurrentRenderStyle(origStyle); + GFX->setClipRect(updateRect); + renderDisplayPreview(updateRect, previewTexture); + } + } } else { +#ifdef TORQUE_OS_MAC + Point2I screensize = getRoot()->getWindowSize(); + tempRect.point.y = screensize.y - (tempRect.point.y + tempRect.extent.y); +#endif + GFX->setCurrentRenderStyle(GFXDevice::RS_Standard); + // set up the camera and viewport stuff: F32 wwidth; F32 wheight; F32 renderWidth = F32(renderSize.x); F32 renderHeight = F32(renderSize.y); F32 aspectRatio = renderWidth / renderHeight; - + // Use the FOV to calculate the viewport height scale // then generate the width scale from the aspect ratio. - if(!mLastCameraQuery.ortho) + if (!mLastCameraQuery.ortho) { wheight = mLastCameraQuery.nearPlane * mTan(mLastCameraQuery.fov / 2.0f); wwidth = aspectRatio * wheight; @@ -499,251 +636,33 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) F32 top = wheight - vscale * (updateRect.point.y - offset.y); F32 bottom = wheight - vscale * (updateRect.point.y + updateRect.extent.y - offset.y); - frustum.set( mLastCameraQuery.ortho, left, right, top, bottom, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane ); - } + frustum.set(mLastCameraQuery.ortho, left, right, top, bottom, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane); - // Manipulate the frustum for tiled screenshots - const bool screenShotMode = gScreenShot && gScreenShot->isPending(); - if ( screenShotMode ) - { - gScreenShot->tileFrustum( frustum ); - GFX->setViewMatrix(MatrixF::Identity); - } - - RectI tempRect = updateRect; - - if (!renderingToTarget) - { - #ifdef TORQUE_OS_MAC + // Manipulate the frustum for tiled screenshots + const bool screenShotMode = gScreenShot && gScreenShot->isPending(); + if (screenShotMode) + { + gScreenShot->tileFrustum(frustum); + GFX->setViewMatrix(MatrixF::Identity); + } + + RectI tempRect = updateRect; + +#ifdef TORQUE_OS_MAC Point2I screensize = getRoot()->getWindowSize(); tempRect.point.y = screensize.y - (tempRect.point.y + tempRect.extent.y); - #endif +#endif - GFX->setViewport( tempRect ); - } - else - { - // Activate stereo RT - GFX->activateStereoTarget(-1); + _internalRender(tempRect, frustum); } - // Clear the zBuffer so GUI doesn't hose object rendering accidentally - GFX->clear( GFXClearZBuffer , ColorI(20,20,20), 1.0f, 0 ); - //GFX->clear( GFXClearTarget, ColorI(255,0,0), 1.0f, 0); - - GFX->setFrustum( frustum ); - if(mLastCameraQuery.ortho) - { - mOrthoWidth = frustum.getWidth(); - mOrthoHeight = frustum.getHeight(); - } - - // We're going to be displaying this render at size of this control in - // pixels - let the scene know so that it can calculate e.g. reflections - // correctly for that final display result. - gClientSceneGraph->setDisplayTargetResolution(renderSize); - - // Set the GFX world matrix to the world-to-camera transform, but don't - // change the cameraMatrix in mLastCameraQuery. This is because - // mLastCameraQuery.cameraMatrix is supposed to contain the camera-to-world - // transform. In-place invert would save a copy but mess up any GUIs that - // depend on that value. - MatrixF worldToCamera = mLastCameraQuery.cameraMatrix; - worldToCamera.inverse(); - GFX->setWorldMatrix( worldToCamera ); - - mSaveProjection = GFX->getProjectionMatrix(); - mSaveModelview = GFX->getWorldMatrix(); - mSaveViewport = updateRect; - mSaveWorldToScreenScale = GFX->getWorldToScreenScale(); - mSaveFrustum = GFX->getFrustum(); - mSaveFrustum.setTransform( mLastCameraQuery.cameraMatrix ); - - // Set the default non-clip projection as some - // objects depend on this even in non-reflect cases. - gClientSceneGraph->setNonClipProjection( mSaveProjection ); - - // Give the post effect manager the worldToCamera, and cameraToScreen matrices - PFXMGR->setFrameMatrices( mSaveModelview, mSaveProjection ); - - renderWorld(updateRect); - DebugDrawer::get()->render(); - - // Render the canvas overlay if its available - if (false && mRenderStyle == RenderStyleStereoSideBySide && mStereoGuiTarget.getPointer()) - { - GFXDEBUGEVENT_SCOPE( StereoGui_Render, ColorI( 255, 0, 0 ) ); - MatrixF proj(1); - - Frustum originalFrustum = GFX->getFrustum(); - GFXTextureObject *texObject = mStereoGuiTarget->getTexture(0); - const FovPort *currentFovPort = GFX->getStereoFovPort(); - const MatrixF *eyeTransforms = GFX->getStereoEyeTransforms(); - const Point3F *eyeOffset = GFX->getStereoEyeOffsets(); - Frustum gfxFrustum = originalFrustum; - - for (U32 i=0; i<2; i++) - { - GFX->activateStereoTarget(i); - MathUtils::makeFovPortFrustum(&gfxFrustum, true, gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[i], eyeTransforms[i]); - GFX->setFrustum(gfxFrustum); - - MatrixF eyeWorldTrans(1); - eyeWorldTrans.setPosition(Point3F(eyeOffset[i].x,eyeOffset[i].y,eyeOffset[i].z)); - MatrixF eyeWorld(1); - eyeWorld.mul(eyeWorldTrans); - eyeWorld.inverse(); - - GFX->setWorldMatrix(eyeWorld); - GFX->setViewMatrix(MatrixF::Identity); - - if (!mStereoOverlayVB.getPointer()) - { - mStereoOverlayVB.set(GFX, 4, GFXBufferTypeStatic); - GFXVertexPCT *verts = mStereoOverlayVB.lock(0, 4); - - F32 texLeft = 0.0f; - F32 texRight = 1.0f; - F32 texTop = 1.0f; - F32 texBottom = 0.0f; - - F32 rectRatio = gfxFrustum.getWidth() / gfxFrustum.getHeight(); - F32 rectWidth = gfxFrustum.getWidth() * TS_OVERLAY_SCREEN_WIDTH; - F32 rectHeight = rectWidth * rectRatio; - - F32 screenLeft = -rectWidth * 0.5; - F32 screenRight = rectWidth * 0.5; - F32 screenTop = -rectHeight * 0.5; - F32 screenBottom = rectHeight * 0.5; - - const F32 fillConv = 0.0f; - const F32 frustumDepthAdjusted = gfxFrustum.getNearDist() + 0.012; - verts[0].point.set( screenLeft - fillConv, frustumDepthAdjusted, screenTop - fillConv ); - verts[1].point.set( screenRight - fillConv, frustumDepthAdjusted, screenTop - fillConv ); - verts[2].point.set( screenLeft - fillConv, frustumDepthAdjusted, screenBottom - fillConv ); - verts[3].point.set( screenRight - fillConv, frustumDepthAdjusted, screenBottom - fillConv ); - - verts[0].color = verts[1].color = verts[2].color = verts[3].color = ColorI(255,255,255,255); - - verts[0].texCoord.set( texLeft, texTop ); - verts[1].texCoord.set( texRight, texTop ); - verts[2].texCoord.set( texLeft, texBottom ); - verts[3].texCoord.set( texRight, texBottom ); - - mStereoOverlayVB.unlock(); - } - - if (!mStereoGuiSB.getPointer()) - { - // DrawBitmapStretchSR - GFXStateBlockDesc bitmapStretchSR; - bitmapStretchSR.setCullMode(GFXCullNone); - bitmapStretchSR.setZReadWrite(false, false); - bitmapStretchSR.setBlend(false , GFXBlendSrcAlpha, GFXBlendInvSrcAlpha); - bitmapStretchSR.samplersDefined = true; - - bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear(); - bitmapStretchSR.samplers[0].minFilter = GFXTextureFilterPoint; - bitmapStretchSR.samplers[0].mipFilter = GFXTextureFilterPoint; - bitmapStretchSR.samplers[0].magFilter = GFXTextureFilterPoint; - - mStereoGuiSB = GFX->createStateBlock(bitmapStretchSR); - } - - GFX->setVertexBuffer(mStereoOverlayVB); - GFX->setStateBlock(mStereoGuiSB); - GFX->setTexture( 0, texObject ); - GFX->setupGenericShaders( GFXDevice::GSModColorTexture ); - GFX->drawPrimitive( GFXTriangleStrip, 0, 2 ); - } - } - - // Restore the previous matrix state before - // we begin rendering the child controls. - saver.restore(); - - // Restore the render style and any stereo parameters - GFX->setActiveRenderTarget(origTarget); - GFX->setCurrentRenderStyle(prevRenderStyle); - GFX->setCurrentProjectionOffset(prevProjectionOffset); - - GFX->updateStates(true); - - if(mRenderStyle == RenderStyleStereoSideBySide && gLastStereoTexture) - { - GFX->setWorldMatrix(MatrixF(1)); - GFX->setViewMatrix(MatrixF::Identity); - GFX->setClipRect(updateRect); - - GFX->getDrawUtil()->drawRectFill(RectI(Point2I(0,0), Point2I(1024, 768)), ColorI::BLACK); - GFX->getDrawUtil()->drawRect(RectI(Point2I(0, 0), Point2I(1024, 768)), ColorI::RED); - - if (!mStereoOverlayVB.getPointer()) - { - mStereoOverlayVB.set(GFX, 4, GFXBufferTypeStatic); - GFXVertexPCT *verts = mStereoOverlayVB.lock(0, 4); - - F32 texLeft = 0.0f; - F32 texRight = 1.0f; - F32 texTop = 1.0f; - F32 texBottom = 0.0f; - - F32 rectWidth = 1024.0; - F32 rectHeight = 768.0; - - F32 screenLeft = 0; - F32 screenRight = rectWidth; - F32 screenTop = 0; - F32 screenBottom = rectHeight; - - const F32 fillConv = 0.0f; - const F32 frustumDepthAdjusted = 0.0f; - verts[0].point.set(screenLeft - fillConv, screenTop - fillConv, 0.f); - verts[1].point.set(screenRight - fillConv, screenTop - fillConv, 0.f); - verts[2].point.set(screenLeft - fillConv, screenBottom - fillConv, 0.f); - verts[3].point.set(screenRight - fillConv, screenBottom - fillConv, 0.f); - - verts[0].color = verts[1].color = verts[2].color = verts[3].color = ColorI(255,255,255,255); - - verts[0].texCoord.set(texLeft, texTop); - verts[1].texCoord.set(texRight, texTop); - verts[2].texCoord.set(texLeft, texBottom); - verts[3].texCoord.set(texRight, texBottom); - - mStereoOverlayVB.unlock(); - } - - if (!mStereoGuiSB.getPointer()) - { - // DrawBitmapStretchSR - GFXStateBlockDesc bitmapStretchSR; - bitmapStretchSR.setCullMode(GFXCullNone); - bitmapStretchSR.setZReadWrite(false, false); - bitmapStretchSR.setBlend(false, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha); - bitmapStretchSR.samplersDefined = true; - - bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear(); - bitmapStretchSR.samplers[0].minFilter = GFXTextureFilterPoint; - bitmapStretchSR.samplers[0].mipFilter = GFXTextureFilterPoint; - bitmapStretchSR.samplers[0].magFilter = GFXTextureFilterPoint; - - mStereoGuiSB = GFX->createStateBlock(bitmapStretchSR); - } - //static GFXTexHandle texHandle("art/gui/splash", &GFXDefaultPersistentProfile, avar("%s() - mTextureNormal (line %d)", __FUNCTION__, __LINE__)); - GFX->setVertexBuffer(mStereoOverlayVB); - GFX->setStateBlock(mStereoGuiSB); - GFX->setTexture(0, gLastStereoTexture);// texHandle);// gLastStereoTexture); - GFX->setupGenericShaders(GFXDevice::GSModColorTexture); - GFX->drawPrimitive(GFXTriangleStrip, 0, 2); - - - - //GFX->getDrawUtil()->drawBitmapStretch(gLastStereoTexture, updateRect); - } + // TODO: Some render to sort of overlay system? // Allow subclasses to render 2D elements. + GFX->setActiveRenderTarget(origTarget); + GFX->setCurrentRenderStyle(origStyle); GFX->setClipRect(updateRect); - renderGui( offset, updateRect ); + renderGui(offset, updateRect); if (shouldRenderChildControls()) { @@ -779,12 +698,84 @@ void GuiTSCtrl::drawLineList( const Vector &points, const ColorI color, drawLine( points[i], points[i+1], color, width ); } +//----------------------------------------------------------------------------- void GuiTSCtrl::setStereoGui(GuiOffscreenCanvas *canvas) { mStereoGuiTarget = canvas ? canvas->getTarget() : NULL; } + +//----------------------------------------------------------------------------- + +void GuiTSCtrl::renderDisplayPreview(const RectI &updateRect, GFXTexHandle &previewTexture) +{ + GFX->setWorldMatrix(MatrixF(1)); + GFX->setViewMatrix(MatrixF::Identity); + GFX->setClipRect(updateRect); + + GFX->getDrawUtil()->drawRectFill(RectI(Point2I(0, 0), Point2I(1024, 768)), ColorI::BLACK); + GFX->getDrawUtil()->drawRect(RectI(Point2I(0, 0), Point2I(1024, 768)), ColorI::RED); + + if (!mStereoPreviewVB.getPointer()) + { + mStereoPreviewVB.set(GFX, 4, GFXBufferTypeStatic); + GFXVertexPCT *verts = mStereoPreviewVB.lock(0, 4); + + F32 texLeft = 0.0f; + F32 texRight = 1.0f; + F32 texTop = 0.0f; + F32 texBottom = 1.0f; + + F32 rectWidth = updateRect.extent.x; + F32 rectHeight = updateRect.extent.y; + + F32 screenLeft = 0; + F32 screenRight = rectWidth; + F32 screenTop = 0; + F32 screenBottom = rectHeight; + + const F32 fillConv = 0.0f; + const F32 frustumDepthAdjusted = 0.0f; + verts[0].point.set(screenLeft - fillConv, screenTop - fillConv, 0.f); + verts[1].point.set(screenRight - fillConv, screenTop - fillConv, 0.f); + verts[2].point.set(screenLeft - fillConv, screenBottom - fillConv, 0.f); + verts[3].point.set(screenRight - fillConv, screenBottom - fillConv, 0.f); + + verts[0].color = verts[1].color = verts[2].color = verts[3].color = ColorI(255, 255, 255, 255); + + verts[0].texCoord.set(texLeft, texTop); + verts[1].texCoord.set(texRight, texTop); + verts[2].texCoord.set(texLeft, texBottom); + verts[3].texCoord.set(texRight, texBottom); + + mStereoPreviewVB.unlock(); + } + + if (!mStereoPreviewSB.getPointer()) + { + // DrawBitmapStretchSR + GFXStateBlockDesc bitmapStretchSR; + bitmapStretchSR.setCullMode(GFXCullNone); + bitmapStretchSR.setZReadWrite(false, false); + bitmapStretchSR.setBlend(false, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha); + bitmapStretchSR.samplersDefined = true; + + bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear(); + bitmapStretchSR.samplers[0].minFilter = GFXTextureFilterPoint; + bitmapStretchSR.samplers[0].mipFilter = GFXTextureFilterPoint; + bitmapStretchSR.samplers[0].magFilter = GFXTextureFilterPoint; + + mStereoPreviewSB = GFX->createStateBlock(bitmapStretchSR); + } + + GFX->setVertexBuffer(mStereoPreviewVB); + GFX->setStateBlock(mStereoPreviewSB); + GFX->setTexture(0, previewTexture); + GFX->setupGenericShaders(GFXDevice::GSModColorTexture); + GFX->drawPrimitive(GFXTriangleStrip, 0, 2); +} + //============================================================================= // Console Methods. //============================================================================= diff --git a/Engine/source/gui/3d/guiTSControl.h b/Engine/source/gui/3d/guiTSControl.h index 493b7c03d..bc2fba586 100644 --- a/Engine/source/gui/3d/guiTSControl.h +++ b/Engine/source/gui/3d/guiTSControl.h @@ -55,6 +55,8 @@ struct CameraQuery RectI stereoViewports[2]; // destination viewports GFXTextureTarget* stereoTargets[2]; GuiCanvas* drawCanvas; // Canvas we are drawing to. Needed for VR + + IDisplayDevice* displayDevice; }; /// Abstract base class for 3D viewport GUIs. @@ -65,7 +67,8 @@ class GuiTSCtrl : public GuiContainer public: enum RenderStyles { RenderStyleStandard = 0, - RenderStyleStereoSideBySide = (1<<0) + RenderStyleStereoSideBySide = (1<<0), + RenderStyleStereoSeparate = (1<<1), }; protected: @@ -104,12 +107,16 @@ protected: NamedTexTargetRef mStereoGuiTarget; GFXVertexBufferHandle mStereoOverlayVB; GFXStateBlockRef mStereoGuiSB; + + GFXVertexBufferHandle mStereoPreviewVB; + GFXStateBlockRef mStereoPreviewSB; public: GuiTSCtrl(); void onPreRender(); + void _internalRender(RectI viewport, Frustum &frustum); void onRender(Point2I offset, const RectI &updateRect); virtual bool processCameraQuery(CameraQuery *query); @@ -178,6 +185,7 @@ public: bool shouldRenderChildControls() { return mRenderStyle == RenderStyleStandard; } void setStereoGui(GuiOffscreenCanvas *canvas); + void renderDisplayPreview(const RectI &updateRect, GFXTexHandle &previewTexture); DECLARE_CONOBJECT(GuiTSCtrl); DECLARE_CATEGORY( "Gui 3D" ); diff --git a/Engine/source/platform/input/event.cpp b/Engine/source/platform/input/event.cpp index 45b89e85f..c4145517e 100644 --- a/Engine/source/platform/input/event.cpp +++ b/Engine/source/platform/input/event.cpp @@ -27,6 +27,7 @@ #include "core/stringTable.h" #include "platform/platformInput.h" #include "math/mQuat.h" +#include "math/mAngAxis.h" MODULE_BEGIN( InputEventManager ) @@ -546,3 +547,21 @@ void InputEventManager::buildInputEvent(U32 deviceType, U32 deviceInst, InputEve newEvent.postToSignal(Input::smInputEvent); } + +void InputEventManager::buildInputEvent(U32 deviceType, U32 deviceInst, InputEventType objType, InputObjectInstances objInst, InputActionType action, AngAxisF& aValue) +{ + InputEventInfo newEvent; + + newEvent.deviceType = deviceType; + newEvent.deviceInst = deviceInst; + newEvent.objType = objType; + newEvent.objInst = objInst; + newEvent.action = action; + newEvent.fValue = aValue.axis.x; + newEvent.fValue2 = aValue.axis.y; + newEvent.fValue3 = aValue.axis.z; + newEvent.fValue4 = aValue.angle; + + newEvent.postToSignal(Input::smInputEvent); +} + diff --git a/Engine/source/platform/input/event.h b/Engine/source/platform/input/event.h index 916d1910f..b77caa202 100644 --- a/Engine/source/platform/input/event.h +++ b/Engine/source/platform/input/event.h @@ -504,6 +504,9 @@ public: /// Build an input event based on a QuatF void buildInputEvent(U32 deviceType, U32 deviceInst, InputEventType objType, InputObjectInstances objInst, InputActionType action, QuatF& qValue); + /// Build an input event based on a AngAxisF + void buildInputEvent(U32 deviceType, U32 deviceInst, InputEventType objType, InputObjectInstances objInst, InputActionType action, AngAxisF& qValue); + protected: U32 mNextDeviceTypeCode; U32 mNextDeviceCode; diff --git a/Engine/source/platform/input/oculusVR/oculusVRDevice.cpp b/Engine/source/platform/input/oculusVR/oculusVRDevice.cpp index 50f153b12..229bc0429 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRDevice.cpp +++ b/Engine/source/platform/input/oculusVR/oculusVRDevice.cpp @@ -62,7 +62,7 @@ MODULE_END; // OculusVRDevice //----------------------------------------------------------------------------- -bool OculusVRDevice::smEnableDevice = true; +bool OculusVRDevice::smEnableDevice = false; bool OculusVRDevice::smSimulateHMD = true; @@ -318,17 +318,6 @@ void OculusVRDevice::getEyeOffsets(Point3F *dest) const hmd->getEyeOffsets(dest); } -bool OculusVRDevice::providesFovPorts() const -{ - if(!mHMDDevices.size()) - return false; - - const OculusVRHMDDevice* hmd = getHMDDevice(mActiveDeviceId); - if(!hmd) - return Point3F::Zero; - - return true; -} void OculusVRDevice::getFovPorts(FovPort *out) const { @@ -562,6 +551,20 @@ GameConnection* OculusVRDevice::getCurrentConnection() //----------------------------------------------------------------------------- +GFXTexHandle OculusVRDevice::getPreviewTexture() +{ + if (!mHMDDevices.size()) + return NULL; + + OculusVRHMDDevice* hmd = getHMDDevice(mActiveDeviceId); + if (!hmd) + return NULL; + + return hmd->getPreviewTexture(); +} + +//----------------------------------------------------------------------------- + DefineEngineFunction(isOculusVRDeviceActive, bool, (),, "@brief Used to determine if the Oculus VR input device is active\n\n" diff --git a/Engine/source/platform/input/oculusVR/oculusVRDevice.h b/Engine/source/platform/input/oculusVR/oculusVRDevice.h index c1ee642a2..603737391 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRDevice.h +++ b/Engine/source/platform/input/oculusVR/oculusVRDevice.h @@ -115,8 +115,8 @@ public: virtual bool providesFrameEyePose() const; virtual void getFrameEyePose(DisplayPose *outPose, U32 eyeId) const; virtual bool providesEyeOffsets() const; + virtual bool providesFovPorts() const { return true; } virtual void getEyeOffsets(Point3F *dest) const; - virtual bool providesFovPorts() const; virtual void getFovPorts(FovPort *out) const; virtual bool providesProjectionOffset() const; virtual const Point2F& getProjectionOffset() const; @@ -154,6 +154,8 @@ public: virtual void setCurrentConnection(GameConnection *connection); virtual GameConnection* getCurrentConnection(); + GFXTexHandle getPreviewTexture(); + bool _handleDeviceEvent( GFXDevice::GFXDeviceEventType evt ); public: diff --git a/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp b/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp index c2bd152a0..ceccfe4c1 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp +++ b/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp @@ -43,7 +43,6 @@ #include "OVR_CAPI_GL.h" #define OCULUS_USE_GL #endif -extern GFXTextureObject *gLastStereoTexture; struct OculusTexture { @@ -317,6 +316,14 @@ void OculusVRHMDDevice::dismissWarning() //ovr_DismissHSWDisplay(mDevice); } +GFXTexHandle OculusVRHMDDevice::getPreviewTexture() +{ + if (!mIsValid || !mDevice) + return NULL; + + return mDebugMirrorTextureHandle; +} + bool OculusVRHMDDevice::setupTargets() { // Create eye render buffers @@ -381,9 +388,6 @@ bool OculusVRHMDDevice::setupTargets() mEyeRT[1] = mStereoRT; mEyeViewport[1] = RectI(Point2I(mRenderLayer.Viewport[1].Pos.x, mRenderLayer.Viewport[1].Pos.y), Point2I(mRenderLayer.Viewport[1].Size.w, mRenderLayer.Viewport[1].Size.h)); - gLastStereoTexture = NULL; - - GFXD3D11Device* device = static_cast(GFX); D3D11_TEXTURE2D_DESC dsDesc; @@ -453,7 +457,6 @@ bool OculusVRHMDDevice::setupTargets() } mDebugMirrorTextureHandle = object; - gLastStereoTexture = mDebugMirrorTextureHandle; } else { @@ -673,10 +676,11 @@ void OculusVRHMDDevice::getFrameEyePose(DisplayPose *outPose, U32 eyeId) const OVR::Quatf orientation = pose.Orientation; const OVR::Vector3f position = pose.Position; - EulerF rotEuler; - OculusVRUtil::convertRotation(orientation, rotEuler); + MatrixF torqueMat(1); + OVR::Matrix4f mat(orientation); + OculusVRUtil::convertRotation(mat.M, torqueMat); - outPose->orientation = rotEuler; + outPose->orientation = QuatF(torqueMat); outPose->position = Point3F(-position.x, position.z, -position.y); } diff --git a/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.h b/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.h index 13a5533b2..c2e1b5f4e 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.h +++ b/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.h @@ -185,6 +185,8 @@ public: virtual void setCurrentConnection(GameConnection *connection) { mConnection = connection; } virtual GameConnection* getCurrentConnection() { return mConnection; } + GFXTexHandle getPreviewTexture(); + String dumpMetrics(); // Stereo RT diff --git a/Engine/source/platform/input/oculusVR/oculusVRSensorDevice.cpp b/Engine/source/platform/input/oculusVR/oculusVRSensorDevice.cpp index cfdaa5be1..6922ec74d 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRSensorDevice.cpp +++ b/Engine/source/platform/input/oculusVR/oculusVRSensorDevice.cpp @@ -26,6 +26,7 @@ #include "platform/platformInput.h" #include "console/simBase.h" #include "console/engineAPI.h" +#include "math/mAngAxis.h" #include "OVR_CAPI_0_8_0.h" U32 OculusVRSensorDevice::OVR_SENSORROT[OculusVRConstants::MaxSensors] = {0}; @@ -184,7 +185,8 @@ bool OculusVRSensorDevice::process(U32 deviceType, bool generateRotAsAngAxis, bo { if(generateRotAsAngAxis) { - INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_ROT, OVR_SENSORROT[mActionCodeIndex], SI_MOVE, currentBuffer->mRotQuat); + AngAxisF axisAA(currentBuffer->mRotQuat); + INPUTMGR->buildInputEvent(deviceType, OculusVRConstants::DefaultOVRBase, SI_ROT, OVR_SENSORROT[mActionCodeIndex], SI_MOVE, axisAA); } if(generateRotAsEuler) diff --git a/Engine/source/platform/input/oculusVR/oculusVRUtil.cpp b/Engine/source/platform/input/oculusVR/oculusVRUtil.cpp index 69ddbc380..6732eac07 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRUtil.cpp +++ b/Engine/source/platform/input/oculusVR/oculusVRUtil.cpp @@ -44,10 +44,7 @@ void convertRotation(const F32 inRotMat[4][4], MatrixF& outRotation) void convertRotation(OVR::Quatf& inRotation, EulerF& outRotation) { F32 yaw, pitch, roll; - inRotation.GetEulerAngles(&yaw, &pitch, &roll); - outRotation.x = -pitch; - outRotation.y = roll; - outRotation.z = -yaw; + inRotation.GetEulerAngles(&outRotation.x, &outRotation.y, &outRotation.z); } void calculateAxisRotation(const MatrixF& inRotation, const F32& maxAxisRadius, Point2F& outRotation) diff --git a/Engine/source/platform/input/openVR/openVRProvider.cpp b/Engine/source/platform/input/openVR/openVRProvider.cpp new file mode 100644 index 000000000..01a9b1dfb --- /dev/null +++ b/Engine/source/platform/input/openVR/openVRProvider.cpp @@ -0,0 +1,886 @@ +#include "platform/input/openVR/openVRProvider.h" +#include "platform/platformInput.h" +#include "core/module.h" +#include "console/engineAPI.h" +#include "T3D/gameBase/gameConnection.h" +#include "gui/core/guiCanvas.h" +#include "postFx/postEffectCommon.h" + +#include "gfx/D3D11/gfxD3D11Device.h" +#include "gfx/D3D11/gfxD3D11TextureObject.h" +#include "gfx/D3D11/gfxD3D11EnumTranslate.h" +#include "gfx/gfxStringEnumTranslate.h" + +/* +#include "gfx/gl/gfxGLDevice.h" +#include "gfx/gl/gfxGLTextureObject.h" +#include "gfx/gl/gfxGLEnumTranslate.h" +*/ + +#include "platform/input/oculusVR/oculusVRUtil.h" + + +U32 OpenVRProvider::OVR_SENSORROT[vr::k_unMaxTrackedDeviceCount] = { 0 }; +U32 OpenVRProvider::OVR_SENSORROTANG[vr::k_unMaxTrackedDeviceCount] = { 0 }; +U32 OpenVRProvider::OVR_SENSORVELOCITY[vr::k_unMaxTrackedDeviceCount] = { 0 }; +U32 OpenVRProvider::OVR_SENSORANGVEL[vr::k_unMaxTrackedDeviceCount] = { 0 }; +U32 OpenVRProvider::OVR_SENSORMAGNETOMETER[vr::k_unMaxTrackedDeviceCount] = { 0 }; +U32 OpenVRProvider::OVR_SENSORPOSITION[vr::k_unMaxTrackedDeviceCount] = { 0 }; + +U32 OpenVRProvider::OVR_BUTTONPRESSED[vr::k_unMaxTrackedDeviceCount]; +U32 OpenVRProvider::OVR_BUTTONTOUCHED[vr::k_unMaxTrackedDeviceCount]; + +U32 OpenVRProvider::OVR_AXISNONE[vr::k_unMaxTrackedDeviceCount] = { 0 }; +U32 OpenVRProvider::OVR_AXISTRACKPAD[vr::k_unMaxTrackedDeviceCount] = { 0 }; +U32 OpenVRProvider::OVR_AXISJOYSTICK[vr::k_unMaxTrackedDeviceCount] = { 0 }; +U32 OpenVRProvider::OVR_AXISTRIGGER[vr::k_unMaxTrackedDeviceCount] = { 0 }; + +static String GetTrackedDeviceString(vr::IVRSystem *pHmd, vr::TrackedDeviceIndex_t unDevice, vr::TrackedDeviceProperty prop, vr::TrackedPropertyError *peError = NULL) +{ + uint32_t unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, NULL, 0, peError); + if (unRequiredBufferLen == 0) + return ""; + + char *pchBuffer = new char[unRequiredBufferLen]; + unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, pchBuffer, unRequiredBufferLen, peError); + String sResult = pchBuffer; + delete[] pchBuffer; + return sResult; +} + +static MatrixF ConvertSteamVRAffineMatrixToMatrixFPlain(const vr::HmdMatrix34_t &mat) +{ + MatrixF outMat(1); + + outMat.setColumn(0, Point4F(mat.m[0][0], mat.m[1][0], mat.m[2][0], 0.0)); + outMat.setColumn(1, Point4F(mat.m[0][1], mat.m[1][1], mat.m[2][1], 0.0)); + outMat.setColumn(2, Point4F(mat.m[0][2], mat.m[1][2], mat.m[2][2], 0.0)); + outMat.setColumn(3, Point4F(mat.m[0][3], mat.m[1][3], mat.m[2][3], 1.0f)); // pos + + return outMat; +} + +MODULE_BEGIN(OpenVRProvider) + +MODULE_INIT_AFTER(InputEventManager) +MODULE_SHUTDOWN_BEFORE(InputEventManager) + +MODULE_INIT +{ + OpenVRProvider::staticInit(); + ManagedSingleton< OpenVRProvider >::createSingleton(); +} + +MODULE_SHUTDOWN +{ + ManagedSingleton< OpenVRProvider >::deleteSingleton(); +} + +MODULE_END; + + +bool OpenVRRenderState::setupRenderTargets(U32 mode) +{ + if (!mHMD) + return false; + + U32 sizeX, sizeY; + Point2I newRTSize; + mHMD->GetRecommendedRenderTargetSize(&sizeX, &sizeY); + + mEyeViewport[0] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY)); + mEyeViewport[1] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY)); + + newRTSize.x = sizeX; + newRTSize.y = sizeY; + + GFXTexHandle stereoTexture; + stereoTexture.set(newRTSize.x, newRTSize.y, GFXFormatR8G8B8A8, &VRTextureProfile, "OpenVR Stereo RT Color"); + mStereoRenderTextures[0] = mStereoRenderTextures[1] = stereoTexture; + + GFXTexHandle stereoDepthTexture; + stereoDepthTexture.set(newRTSize.x, newRTSize.y, GFXFormatD24S8, &VRDepthProfile, "OpenVR Depth"); + mStereoDepthTextures[0] = mStereoDepthTextures[1] = stereoDepthTexture; + + mStereoRT = GFX->allocRenderToTextureTarget(); + mStereoRT->attachTexture(GFXTextureTarget::Color0, stereoTexture); + mStereoRT->attachTexture(GFXTextureTarget::DepthStencil, stereoDepthTexture); + + mEyeRT[0] = mEyeRT[1] = mStereoRT; + + return true; +} + +void OpenVRRenderState::setupDistortion() +{ + if (!mHMD) + return; + + U16 m_iLensGridSegmentCountH = 43; + U16 m_iLensGridSegmentCountV = 43; + + float w = (float)(1.0 / float(m_iLensGridSegmentCountH - 1)); + float h = (float)(1.0 / float(m_iLensGridSegmentCountV - 1)); + + float u, v = 0; + + Vector vVerts(0); + GFXVertexPTTT *vert; + + vVerts.reserve((m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2); + + mDistortionVerts.set(GFX, (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2, GFXBufferTypeStatic); + + vert = mDistortionVerts.lock(); + + //left eye distortion verts + float Xoffset = -1; + for (int y = 0; y < m_iLensGridSegmentCountV; y++) + { + for (int x = 0; x < m_iLensGridSegmentCountH; x++) + { + u = x*w; v = 1 - y*h; + vert->point = Point3F(Xoffset + u, -1 + 2 * y*h, 0.0f); + + vr::DistortionCoordinates_t dc0 = mHMD->ComputeDistortion(vr::Eye_Left, u, v); + + vert->texCoord1 = Point2F(dc0.rfRed[0], 1 - dc0.rfRed[1]); // r + vert->texCoord2 = Point2F(dc0.rfGreen[0], 1 - dc0.rfGreen[1]); // g + vert->texCoord3 = Point2F(dc0.rfBlue[0], 1 - dc0.rfBlue[1]); // b + + vert++; + } + } + + //right eye distortion verts + Xoffset = 0; + for (int y = 0; y < m_iLensGridSegmentCountV; y++) + { + for (int x = 0; x < m_iLensGridSegmentCountH; x++) + { + u = x*w; v = 1 - y*h; + vert->point = Point3F(Xoffset + u, -1 + 2 * y*h, 0.0f); + + vr::DistortionCoordinates_t dc0 = mHMD->ComputeDistortion(vr::Eye_Right, u, v); + + vert->texCoord1 = Point2F(dc0.rfRed[0], 1 - dc0.rfRed[1]); + vert->texCoord2 = Point2F(dc0.rfGreen[0], 1 - dc0.rfGreen[1]); + vert->texCoord3 = Point2F(dc0.rfBlue[0], 1 - dc0.rfBlue[1]); + + vert++; + } + } + + mDistortionVerts.unlock(); + + mDistortionInds.set(GFX, m_iLensGridSegmentCountV * m_iLensGridSegmentCountH * 6 * 2, 0, GFXBufferTypeStatic); + + GFXPrimitive *prim; + U16 *index; + + mDistortionInds.lock(&index, &prim); + U16 a, b, c, d; + + U16 offset = 0; + for (U16 y = 0; y < m_iLensGridSegmentCountV - 1; y++) + { + for (U16 x = 0; x < m_iLensGridSegmentCountH - 1; x++) + { + a = m_iLensGridSegmentCountH*y + x + offset; + b = m_iLensGridSegmentCountH*y + x + 1 + offset; + c = (y + 1)*m_iLensGridSegmentCountH + x + 1 + offset; + d = (y + 1)*m_iLensGridSegmentCountH + x + offset; + *index++ = a; + *index++ = b; + *index++ = c; + + *index++ = a; + *index++ = c; + *index++ = d; + } + } + + offset = (m_iLensGridSegmentCountH)*(m_iLensGridSegmentCountV); + for (U16 y = 0; y < m_iLensGridSegmentCountV - 1; y++) + { + for (U16 x = 0; x < m_iLensGridSegmentCountH - 1; x++) + { + a = m_iLensGridSegmentCountH*y + x + offset; + b = m_iLensGridSegmentCountH*y + x + 1 + offset; + c = (y + 1)*m_iLensGridSegmentCountH + x + 1 + offset; + d = (y + 1)*m_iLensGridSegmentCountH + x + offset; + *index++ = a; + *index++ = b; + *index++ = c; + + *index++ = a; + *index++ = c; + *index++ = d; + } + } + + mDistortionInds.unlock(); +} + +void OpenVRRenderState::renderDistortion(U32 eye) +{ + // Updates distortion for an eye (this should only be the case for backend APIS where image should be predistorted) + /* + + glDisable(GL_DEPTH_TEST); + glViewport( 0, 0, m_nWindowWidth, m_nWindowHeight ); + + glBindVertexArray( m_unLensVAO ); + glUseProgram( m_unLensProgramID ); + + //render left lens (first half of index array ) + glBindTexture(GL_TEXTURE_2D, leftEyeDesc.m_nResolveTextureId ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR ); + glDrawElements( GL_TRIANGLES, m_uiIndexSize/2, GL_UNSIGNED_SHORT, 0 ); + + //render right lens (second half of index array ) + glBindTexture(GL_TEXTURE_2D, rightEyeDesc.m_nResolveTextureId ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR ); + glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR ); + glDrawElements( GL_TRIANGLES, m_uiIndexSize/2, GL_UNSIGNED_SHORT, (const void *)(m_uiIndexSize) ); + + glBindVertexArray( 0 ); + glUseProgram( 0 ); + */ +} + +void OpenVRRenderState::renderPreview() +{ + +} + +void OpenVRRenderState::reset(vr::IVRSystem* hmd) +{ + mHMD = hmd; + + mStereoRT = NULL; + mEyeRT[0] = mEyeRT[1] = NULL; + + mStereoRenderTextures[0] = mStereoRenderTextures[1] = NULL; + mStereoDepthTextures[0] = mStereoDepthTextures[1] = NULL; + + mDistortionVerts = NULL; + mDistortionInds = NULL; + + if (!mHMD) + return; + + vr::HmdMatrix34_t mat = mHMD->GetEyeToHeadTransform(vr::Eye_Left); + mEyePose[0] = ConvertSteamVRAffineMatrixToMatrixFPlain(mat); + mEyePose[0].inverse(); + + mat = mHMD->GetEyeToHeadTransform(vr::Eye_Right); + mEyePose[1] = ConvertSteamVRAffineMatrixToMatrixFPlain(mat); + mEyePose[1].inverse(); + + mHMD->GetProjectionRaw(vr::Eye_Left, &mEyeFov[0].leftTan, &mEyeFov[0].rightTan, &mEyeFov[0].upTan, &mEyeFov[0].downTan); + mHMD->GetProjectionRaw(vr::Eye_Right, &mEyeFov[1].leftTan, &mEyeFov[1].rightTan, &mEyeFov[1].upTan, &mEyeFov[1].downTan); + + mEyeFov[0].upTan = -mEyeFov[0].upTan; + mEyeFov[0].leftTan = -mEyeFov[0].leftTan; + mEyeFov[1].upTan = -mEyeFov[1].upTan; + mEyeFov[1].leftTan = -mEyeFov[1].leftTan; +} + +OpenVRProvider::OpenVRProvider() : + mHMD(NULL), + mRenderModels(NULL), + mDrawCanvas(NULL), + mGameConnection(NULL) +{ + dStrcpy(mName, "openvr"); + mDeviceType = INPUTMGR->getNextDeviceType(); + buildInputCodeTable(); + GFXDevice::getDeviceEventSignal().notify(this, &OpenVRProvider::_handleDeviceEvent); + INPUTMGR->registerDevice(this); +} + +OpenVRProvider::~OpenVRProvider() +{ + +} + +void OpenVRProvider::staticInit() +{ + // TODO: Add console vars +} + +bool OpenVRProvider::enable() +{ + disable(); + + // Load openvr runtime + vr::EVRInitError eError = vr::VRInitError_None; + mHMD = vr::VR_Init(&eError, vr::VRApplication_Scene); + + dMemset(mDeviceClassChar, '\0', sizeof(mDeviceClassChar)); + + if (eError != vr::VRInitError_None) + { + mHMD = NULL; + char buf[1024]; + sprintf_s(buf, sizeof(buf), "Unable to init VR runtime: %s", vr::VR_GetVRInitErrorAsEnglishDescription(eError)); + Con::printf(buf); + return false; + } + + mRenderModels = (vr::IVRRenderModels *)vr::VR_GetGenericInterface(vr::IVRRenderModels_Version, &eError); + if (!mRenderModels) + { + mHMD = NULL; + vr::VR_Shutdown(); + + char buf[1024]; + sprintf_s(buf, sizeof(buf), "Unable to get render model interface: %s", vr::VR_GetVRInitErrorAsEnglishDescription(eError)); + Con::printf(buf); + return false; + } + + mDriver = GetTrackedDeviceString(mHMD, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_TrackingSystemName_String); + mDisplay = GetTrackedDeviceString(mHMD, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SerialNumber_String); + + mHMDRenderState.reset(mHMD); + mHMD->ResetSeatedZeroPose(); + dMemset(mPreviousInputTrackedDevicePose, '\0', sizeof(mPreviousInputTrackedDevicePose)); + + mEnabled = true; + + return true; +} + +bool OpenVRProvider::disable() +{ + if (mHMD) + { + mHMD = NULL; + mRenderModels = NULL; + mHMDRenderState.reset(NULL); + vr::VR_Shutdown(); + } + + mEnabled = false; + + return true; +} + +void OpenVRProvider::buildInputCodeTable() +{ + // Obtain all of the device codes + for (U32 i = 0; i < vr::k_unMaxTrackedDeviceCount; ++i) + { + OVR_SENSORROT[i] = INPUTMGR->getNextDeviceCode(); + + OVR_SENSORROTANG[i] = INPUTMGR->getNextDeviceCode(); + + OVR_SENSORVELOCITY[i] = INPUTMGR->getNextDeviceCode(); + OVR_SENSORANGVEL[i] = INPUTMGR->getNextDeviceCode(); + OVR_SENSORMAGNETOMETER[i] = INPUTMGR->getNextDeviceCode(); + + OVR_SENSORPOSITION[i] = INPUTMGR->getNextDeviceCode(); + + + OVR_BUTTONPRESSED[i] = INPUTMGR->getNextDeviceCode(); + OVR_BUTTONTOUCHED[i] = INPUTMGR->getNextDeviceCode(); + + OVR_AXISNONE[i] = INPUTMGR->getNextDeviceCode(); + OVR_AXISTRACKPAD[i] = INPUTMGR->getNextDeviceCode(); + OVR_AXISJOYSTICK[i] = INPUTMGR->getNextDeviceCode(); + OVR_AXISTRIGGER[i] = INPUTMGR->getNextDeviceCode(); + } + + // Build out the virtual map + char buffer[64]; + for (U32 i = 0; i < vr::k_unMaxTrackedDeviceCount; ++i) + { + dSprintf(buffer, 64, "opvr_sensorrot%d", i); + INPUTMGR->addVirtualMap(buffer, SI_ROT, OVR_SENSORROT[i]); + + dSprintf(buffer, 64, "opvr_sensorrotang%d", i); + INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORROTANG[i]); + + dSprintf(buffer, 64, "opvr_sensorvelocity%d", i); + INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORVELOCITY[i]); + + dSprintf(buffer, 64, "opvr_sensorangvel%d", i); + INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORANGVEL[i]); + + dSprintf(buffer, 64, "opvr_sensormagnetometer%d", i); + INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORMAGNETOMETER[i]); + + dSprintf(buffer, 64, "opvr_sensorpos%d", i); + INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_SENSORPOSITION[i]); + + dSprintf(buffer, 64, "opvr_buttonpressed%d", i); + INPUTMGR->addVirtualMap(buffer, SI_INT, OVR_BUTTONPRESSED[i]); + dSprintf(buffer, 64, "opvr_buttontouched%d", i); + INPUTMGR->addVirtualMap(buffer, SI_INT, OVR_BUTTONTOUCHED[i]); + + dSprintf(buffer, 64, "opvr_axis_none%d", i); + INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_AXISNONE[i]); + dSprintf(buffer, 64, "opvr_axis_trackpad%d", i); + INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_AXISTRACKPAD[i]); + dSprintf(buffer, 64, "opvr_axis_joystick%d", i); + INPUTMGR->addVirtualMap(buffer, SI_POS, OVR_AXISJOYSTICK[i]); + dSprintf(buffer, 64, "opvr_axis_trigger%d", i); + INPUTMGR->addVirtualMap(buffer, SI_INT, OVR_AXISTRIGGER[i]); + } +} + +bool OpenVRProvider::process() +{ + if (!mHMD) + return true; + + // Process SteamVR events + vr::VREvent_t event; + while (mHMD->PollNextEvent(&event, sizeof(event))) + { + processVREvent(event); + } + + // Process SteamVR controller state + for (vr::TrackedDeviceIndex_t unDevice = 0; unDevice < vr::k_unMaxTrackedDeviceCount; unDevice++) + { + vr::VRControllerState_t state; + if (mHMD->GetControllerState(unDevice, &state)) + { + // TODO + } + } + + // Update input poses + updateTrackedPoses(); + submitInputChanges(); + + return true; +} + +bool OpenVRProvider::providesFrameEyePose() const +{ + return mHMD != NULL; +} + +inline Point3F OpenVRVecToTorqueVec(vr::HmdVector3_t vec) +{ + return Point3F(-vec.v[0], vec.v[2], -vec.v[1]); +} + +void OpenVRTransformToRotPos(MatrixF mat, QuatF &outRot, Point3F &outPos) +{ + // Directly set the rotation and position from the eye transforms + MatrixF torqueMat(1); + + F32 inRotMat[4][4]; + Point4F col0; mat.getColumn(0, &col0); + Point4F col1; mat.getColumn(1, &col1); + Point4F col2; mat.getColumn(2, &col2); + Point4F col3; mat.getColumn(3, &col3); + inRotMat[0][0] = col0.x; + inRotMat[0][1] = col0.y; + inRotMat[0][2] = col0.z; + inRotMat[0][3] = col0.w; + inRotMat[1][0] = col1.x; + inRotMat[1][1] = col1.y; + inRotMat[1][2] = col1.z; + inRotMat[1][3] = col1.w; + inRotMat[2][0] = col2.x; + inRotMat[2][1] = col2.y; + inRotMat[2][2] = col2.z; + inRotMat[2][3] = col2.w; + inRotMat[3][0] = col3.x; + inRotMat[3][1] = col3.y; + inRotMat[3][2] = col3.z; + inRotMat[3][3] = col3.w; + + OculusVRUtil::convertRotation(inRotMat, torqueMat); + + Point3F pos = torqueMat.getPosition(); + outRot = QuatF(torqueMat); + outPos = Point3F(-pos.x, pos.z, -pos.y); +} + +void OpenVRProvider::getFrameEyePose(IDevicePose *pose, U32 eye) const +{ + AssertFatal(eye >= 0 && eye < 2, "Out of bounds eye"); + + MatrixF mat = mHMDRenderState.mHMDPose * mHMDRenderState.mEyePose[eye]; + + OpenVRTransformToRotPos(mat, pose->orientation, pose->position); + pose->velocity = Point3F(0); + pose->angularVelocity = Point3F(0); +} + +bool OpenVRProvider::providesEyeOffsets() const +{ + return mHMD != NULL; +} + +/// Returns eye offset not taking into account any position tracking info +void OpenVRProvider::getEyeOffsets(Point3F *dest) const +{ + dest[0] = mHMDRenderState.mEyePose[0].getPosition(); + dest[1] = mHMDRenderState.mEyePose[1].getPosition(); +} + +bool OpenVRProvider::providesFovPorts() const +{ + return mHMD != NULL; +} + +void OpenVRProvider::getFovPorts(FovPort *out) const +{ + dMemcpy(out, mHMDRenderState.mEyeFov, sizeof(mHMDRenderState.mEyeFov)); +} + +bool OpenVRProvider::providesProjectionOffset() const +{ + return mHMD != NULL; +} + +const Point2F& OpenVRProvider::getProjectionOffset() const +{ + return Point2F(0, 0); +} + +void OpenVRProvider::getStereoViewports(RectI *out) const +{ + out[0] = mHMDRenderState.mEyeViewport[0]; + out[1] = mHMDRenderState.mEyeViewport[1]; +} + +void OpenVRProvider::getStereoTargets(GFXTextureTarget **out) const +{ + out[0] = mHMDRenderState.mEyeRT[0]; + out[1] = mHMDRenderState.mEyeRT[1]; +} + +void OpenVRProvider::setDrawCanvas(GuiCanvas *canvas) +{ + vr::EVRInitError peError = vr::VRInitError_None; + + if (!vr::VRCompositor()) + { + printf("Compositor initialization failed. See log file for details\n"); + return; + } + + if (mDrawCanvas != canvas || mHMDRenderState.mHMD == NULL) + { + mHMDRenderState.setupRenderTargets(0); + } + mDrawCanvas = canvas; +} + +void OpenVRProvider::setCurrentConnection(GameConnection *connection) +{ + mGameConnection = connection; +} + +GameConnection* OpenVRProvider::getCurrentConnection() +{ + return mGameConnection; +} + +GFXTexHandle OpenVRProvider::getPreviewTexture() +{ + return mHMDRenderState.mStereoRenderTextures[0]; // TODO: render distortion preview +} + +void OpenVRProvider::onStartFrame() +{ + if (!mHMD) + return; + +} + +void OpenVRProvider::onEndFrame() +{ + if (!mHMD) + return; +} + +void OpenVRProvider::onEyeRendered(U32 index) +{ + if (!mHMD) + return; + + if (GFX->getAdapterType() == Direct3D11) + { + vr::Texture_t eyeTexture = { (void*)static_cast(mHMDRenderState.mStereoRenderTextures[index].getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma }; + vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture); + } + else if (GFX->getAdapterType() == OpenGL) + {/* + vr::Texture_t eyeTexture = { (void*)static_cast(mHMDRenderState.mStereoRenderTextures[index].getPointer())->getHandle(), vr::API_OpenGL, vr::ColorSpace_Gamma }; + vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture);*/ + } +} + +bool OpenVRProvider::_handleDeviceEvent(GFXDevice::GFXDeviceEventType evt) +{ + if (!ManagedSingleton::instanceOrNull()) + { + return true; + } + + switch (evt) + { + case GFXDevice::deStartOfFrame: + + // Start of frame + + onStartFrame(); + + break; + + case GFXDevice::dePostFrame: + + // End of frame + + onEndFrame(); + + break; + + case GFXDevice::deDestroy: + + // Need to reinit rendering + break; + + case GFXDevice::deLeftStereoFrameRendered: + // + + onEyeRendered(0); + break; + + case GFXDevice::deRightStereoFrameRendered: + // + + onEyeRendered(1); + break; + + default: + break; + } + + return true; +} + +void OpenVRProvider::processVREvent(const vr::VREvent_t & event) +{ + switch (event.eventType) + { + case vr::VREvent_TrackedDeviceActivated: + { + // Setup render model + } + break; + case vr::VREvent_TrackedDeviceDeactivated: + { + // Deactivated + } + break; + case vr::VREvent_TrackedDeviceUpdated: + { + // Updated + } + break; + } +} + +void OpenVRProvider::updateTrackedPoses() +{ + if (!mHMD) + return; + + vr::VRCompositor()->WaitGetPoses(mTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, NULL, 0); + + mValidPoseCount = 0; + + for (int nDevice = 0; nDevice < vr::k_unMaxTrackedDeviceCount; ++nDevice) + { + IDevicePose &inPose = mCurrentDevicePose[nDevice]; + if (mTrackedDevicePose[nDevice].bPoseIsValid) + { + mValidPoseCount++; + MatrixF mat = ConvertSteamVRAffineMatrixToMatrixFPlain(mTrackedDevicePose[nDevice].mDeviceToAbsoluteTracking); + mat.inverse(); + + if (nDevice == vr::k_unTrackedDeviceIndex_Hmd) + { + mHMDRenderState.mHMDPose = mat; + } + + vr::TrackedDevicePose_t &outPose = mTrackedDevicePose[nDevice]; + OpenVRTransformToRotPos(mat, inPose.orientation, inPose.position); + + inPose.state = outPose.eTrackingResult; + inPose.valid = outPose.bPoseIsValid; + inPose.connected = outPose.bDeviceIsConnected; + + inPose.velocity = OpenVRVecToTorqueVec(outPose.vVelocity); + inPose.angularVelocity = OpenVRVecToTorqueVec(outPose.vAngularVelocity); + } + else + { + inPose.valid = false; + } + } +} + +void OpenVRProvider::submitInputChanges() +{ + // Diff current frame with previous frame + for (U32 i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) + { + IDevicePose curPose = mCurrentDevicePose[i]; + IDevicePose prevPose = mPreviousInputTrackedDevicePose[i]; + + if (!curPose.valid || !curPose.connected) + continue; + + if (curPose.orientation != prevPose.orientation) + { + AngAxisF axisAA(curPose.orientation); + INPUTMGR->buildInputEvent(mDeviceType, 0, SI_ROT, OVR_SENSORROT[i], SI_MOVE, axisAA); + } + + if (curPose.position != prevPose.position) + { + INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORPOSITION[i], SI_MOVE, curPose.position); + } + + if (curPose.velocity != prevPose.velocity) + { + // Convert angles to degrees + VectorF angles; + angles.x = curPose.velocity.x; + angles.y = curPose.velocity.y; + angles.z = curPose.velocity.z; + + INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORVELOCITY[i], SI_MOVE, angles); + } + + if (curPose.angularVelocity != prevPose.angularVelocity) + { + // Convert angles to degrees + VectorF angles; + angles[0] = mRadToDeg(curPose.velocity.x); + angles[1] = mRadToDeg(curPose.velocity.y); + angles[2] = mRadToDeg(curPose.velocity.z); + + INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORANGVEL[i], SI_MOVE, angles); + } + /* + if (curPose.connected != prevPose.connected) + { + if (Con::isFunction("onOVRConnectionChanged")) + { + Con::executef("onOVRConnectionStatus", curPose.connected); + } + }*/ + + if (curPose.state != prevPose.state) + { + if (Con::isFunction("onOVRStateChanged")) + { + Con::executef("onOVRStateChanged", curPose.state); + } + } + } + + dMemcpy(mPreviousInputTrackedDevicePose, mCurrentDevicePose, sizeof(mPreviousInputTrackedDevicePose)); +} + +void OpenVRProvider::resetSensors() +{ + if (mHMD) + { + mHMD->ResetSeatedZeroPose(); + } +} + +DefineEngineFunction(isOpenVRDeviceActive, bool, (), , + "@brief Used to determine if the OpenVR input device is active\n\n" + + "The OpenVR device is considered active when the library has been " + "initialized and either a real of simulated HMD is present.\n\n" + + "@return True if the OpenVR input device is active.\n" + + "@ingroup Game") +{ + if (!ManagedSingleton::instanceOrNull()) + { + return false; + } + + return OCULUSVRDEV->getActive(); +} + + +DefineEngineFunction(OpenVRSetEnabled, bool, (bool value), , + "@brief Used to determine if the OpenVR input device is active\n\n" + + "The OpenVR device is considered active when the library has been " + "initialized and either a real of simulated HMD is present.\n\n" + + "@return True if the OpenVR input device is active.\n" + + "@ingroup Game") +{ + if (!ManagedSingleton::instanceOrNull()) + { + return false; + } + + return value ? ManagedSingleton::instance()->enable() : ManagedSingleton::instance()->disable(); +} + + + +DefineEngineFunction(setOpenVRHMDAsGameConnectionDisplayDevice, bool, (GameConnection* conn), , + "@brief Sets the first HMD to be a GameConnection's display device\n\n" + "@param conn The GameConnection to set.\n" + "@return True if the GameConnection display device was set.\n" + "@ingroup Game") +{ + if (!ManagedSingleton::instanceOrNull()) + { + Con::errorf("setOVRHMDAsGameConnectionDisplayDevice(): No Oculus VR Device present."); + return false; + } + + if (!conn) + { + Con::errorf("setOVRHMDAsGameConnectionDisplayDevice(): Invalid GameConnection."); + return false; + } + + conn->setDisplayDevice(ManagedSingleton::instance()); + return true; +} + +DefineEngineFunction(OpenVRResetSensors, void, (), , + "@brief Resets all Oculus VR sensors.\n\n" + "This resets all sensor orientations such that their 'normal' rotation " + "is defined when this function is called. This defines an HMD's forwards " + "and up direction, for example." + "@ingroup Game") +{ + if (!ManagedSingleton::instanceOrNull()) + { + return; + } + + ManagedSingleton::instance()->resetSensors(); +} diff --git a/Engine/source/platform/input/openVR/openVRProvider.h b/Engine/source/platform/input/openVR/openVRProvider.h new file mode 100644 index 000000000..053fd518a --- /dev/null +++ b/Engine/source/platform/input/openVR/openVRProvider.h @@ -0,0 +1,172 @@ + +#ifndef _OPENVRDEVICE_H_ +#define _OPENVRDEVICE_H_ + +#include "math/mQuat.h" +#include "math/mPoint4.h" +#include "math/util/frustum.h" +#include "core/util/tSingleton.h" + +#include "gfx/gfxDevice.h" +#include "gfx/gfxVertexBuffer.h" +#include "gfx/gfxPrimitiveBuffer.h" +#include "gfx/gfxTarget.h" + +#include "platform/input/IInputDevice.h" +#include "platform/input/event.h" +#include "platform/output/IDisplayDevice.h" + +#include + +class OpenVRHMDDevice; + +struct OpenVRRenderState +{ + vr::IVRSystem *mHMD; + + FovPort mEyeFov[2]; + MatrixF mEyePose[2]; + MatrixF mHMDPose; + + RectI mEyeViewport[2]; + GFXTextureTargetRef mStereoRT; + GFXTextureTargetRef mEyeRT[2]; + + GFXTexHandle mStereoRenderTextures[2]; + GFXTexHandle mStereoDepthTextures[2]; + + GFXVertexBufferHandle mDistortionVerts; + GFXPrimitiveBufferHandle mDistortionInds; + + bool setupRenderTargets(U32 mode); + void setupDistortion(); + + void renderDistortion(U32 eye); + + void renderPreview(); + + void reset(vr::IVRSystem* hmd); +}; + +class OpenVRProvider : public IDisplayDevice, public IInputDevice +{ +public: + + enum DataDifferences { + DIFF_NONE = 0, + DIFF_ROT = (1 << 0), + DIFF_ROTAXISX = (1 << 1), + DIFF_ROTAXISY = (1 << 2), + DIFF_ACCEL = (1 << 3), + DIFF_ANGVEL = (1 << 4), + DIFF_MAG = (1 << 5), + DIFF_POS = (1 << 6), + DIFF_STATUS = (1 << 7), + + DIFF_ROTAXIS = (DIFF_ROTAXISX | DIFF_ROTAXISY), + DIFF_RAW = (DIFF_ACCEL | DIFF_ANGVEL | DIFF_MAG), + }; + + OpenVRProvider(); + ~OpenVRProvider(); + + static void staticInit(); + + bool enable(); + bool disable(); + + bool getActive() { return mHMD != NULL; } + + /// @name Input handling + /// { + void buildInputCodeTable(); + virtual bool process(); + /// } + + /// @name Display handling + /// { + virtual bool providesFrameEyePose() const; + virtual void getFrameEyePose(IDevicePose *pose, U32 eye) const; + + virtual bool providesEyeOffsets() const; + /// Returns eye offset not taking into account any position tracking info + virtual void getEyeOffsets(Point3F *dest) const; + + virtual bool providesFovPorts() const; + virtual void getFovPorts(FovPort *out) const; + + virtual bool providesProjectionOffset() const; + virtual const Point2F& getProjectionOffset() const; + + virtual void getStereoViewports(RectI *out) const; + virtual void getStereoTargets(GFXTextureTarget **out) const; + + virtual void setDrawCanvas(GuiCanvas *canvas); + + virtual void setCurrentConnection(GameConnection *connection); + virtual GameConnection* getCurrentConnection(); + + virtual GFXTexHandle getPreviewTexture(); + + virtual void onStartFrame(); + virtual void onEndFrame(); + + virtual void onEyeRendered(U32 index); + + bool _handleDeviceEvent(GFXDevice::GFXDeviceEventType evt); + /// } + + /// @name OpenVR handling + /// { + void processVREvent(const vr::VREvent_t & event); + + void updateTrackedPoses(); + void submitInputChanges(); + + void resetSensors(); + /// } + + /// @name OpenVR state + /// { + vr::IVRSystem *mHMD; + vr::IVRRenderModels *mRenderModels; + String mDriver; + String mDisplay; + vr::TrackedDevicePose_t mTrackedDevicePose[vr::k_unMaxTrackedDeviceCount]; + IDevicePose mCurrentDevicePose[vr::k_unMaxTrackedDeviceCount]; + IDevicePose mPreviousInputTrackedDevicePose[vr::k_unMaxTrackedDeviceCount]; + U32 mValidPoseCount; + + char mDeviceClassChar[vr::k_unMaxTrackedDeviceCount]; + + OpenVRRenderState mHMDRenderState; + /// } + + GuiCanvas* mDrawCanvas; + GameConnection* mGameConnection; + + static U32 OVR_SENSORROT[vr::k_unMaxTrackedDeviceCount]; + static U32 OVR_SENSORROTANG[vr::k_unMaxTrackedDeviceCount]; + static U32 OVR_SENSORVELOCITY[vr::k_unMaxTrackedDeviceCount]; + static U32 OVR_SENSORANGVEL[vr::k_unMaxTrackedDeviceCount]; + static U32 OVR_SENSORMAGNETOMETER[vr::k_unMaxTrackedDeviceCount]; + static U32 OVR_SENSORPOSITION[vr::k_unMaxTrackedDeviceCount]; + + static U32 OVR_BUTTONPRESSED[vr::k_unMaxTrackedDeviceCount]; + static U32 OVR_BUTTONTOUCHED[vr::k_unMaxTrackedDeviceCount]; + + static U32 OVR_AXISNONE[vr::k_unMaxTrackedDeviceCount]; + static U32 OVR_AXISTRACKPAD[vr::k_unMaxTrackedDeviceCount]; + static U32 OVR_AXISJOYSTICK[vr::k_unMaxTrackedDeviceCount]; + static U32 OVR_AXISTRIGGER[vr::k_unMaxTrackedDeviceCount]; + + +public: + // For ManagedSingleton. + static const char* getSingletonName() { return "OpenVRProvider"; } +}; + +/// Returns the OculusVRDevice singleton. +#define OCULUSVRDEV ManagedSingleton::instance() + +#endif // _OCULUSVRDEVICE_H_ diff --git a/Engine/source/platform/output/IDisplayDevice.h b/Engine/source/platform/output/IDisplayDevice.h index 3231649a7..bd372085d 100644 --- a/Engine/source/platform/output/IDisplayDevice.h +++ b/Engine/source/platform/output/IDisplayDevice.h @@ -34,8 +34,16 @@ class GuiCanvas; /// Defines the basic display pose common to most display devices typedef struct DisplayPose { - EulerF orientation; /// Direction device is facing + QuatF orientation; /// Direction device is facing Point3F position; /// Relative position of device in view space + + Point3F velocity; + Point3F angularVelocity; + + U32 state; /// Generic state + + bool valid; /// Pose set + bool connected; /// Device connected } IDevicePose; class IDisplayDevice @@ -63,6 +71,9 @@ public: virtual GameConnection* getCurrentConnection() = 0; virtual void onStartFrame() = 0; + + /// Returns a texture handle representing a preview of the composited VR view + virtual GFXTexHandle getPreviewTexture() = 0; }; #endif // _IDISPLAYDEVICE_H_ From 126828131d9df2a715c74742dc90559c7c939bdd Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Mon, 25 Apr 2016 23:26:27 +0100 Subject: [PATCH 05/33] Improve openvr, also add a module for it. --- Engine/source/gfx/gfxInit.cpp | 22 ++-- .../platform/input/openVR/openVRProvider.cpp | 118 +++++++++++++++++- .../platform/input/openVR/openVRProvider.h | 43 +++++++ Tools/CMake/modules/module_openvr.cmake | 30 +++++ 4 files changed, 199 insertions(+), 14 deletions(-) create mode 100644 Tools/CMake/modules/module_openvr.cmake diff --git a/Engine/source/gfx/gfxInit.cpp b/Engine/source/gfx/gfxInit.cpp index bb5e560ac..69ea43d0c 100644 --- a/Engine/source/gfx/gfxInit.cpp +++ b/Engine/source/gfx/gfxInit.cpp @@ -293,19 +293,19 @@ GFXAdapter *GFXInit::getBestAdapterChoice() // Get the user's preference for device... const String renderer = Con::getVariable("$pref::Video::displayDevice"); const String outputDevice = Con::getVariable("$pref::Video::displayOutputDevice"); - const String adapterDevice = Con::getVariable("$Video::forceDisplayAdapter"); + const String adapterDevice = Con::getVariable("$Video::forceDisplayAdapter"); - GFXAdapterType adapterType = getAdapterTypeFromName(renderer.c_str());; - GFXAdapter *adapter; + GFXAdapterType adapterType = getAdapterTypeFromName(renderer.c_str());; + GFXAdapter *adapter; - if (adapterDevice.isEmpty()) - { - adapter = chooseAdapter(adapterType, outputDevice.c_str()); - } - else - { - adapter = chooseAdapter(adapterType, dAtoi(adapterDevice.c_str())); - } + if (adapterDevice.isEmpty()) + { + adapter = chooseAdapter(adapterType, outputDevice.c_str()); + } + else if (dAtoi(adapterDevice.c_str()) != -1) + { + adapter = chooseAdapter(adapterType, dAtoi(adapterDevice.c_str())); + } // Did they have one? Return it. if(adapter) diff --git a/Engine/source/platform/input/openVR/openVRProvider.cpp b/Engine/source/platform/input/openVR/openVRProvider.cpp index 01a9b1dfb..bf03d99b7 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.cpp +++ b/Engine/source/platform/input/openVR/openVRProvider.cpp @@ -11,6 +11,11 @@ #include "gfx/D3D11/gfxD3D11EnumTranslate.h" #include "gfx/gfxStringEnumTranslate.h" + +#include "gfx/D3D9/gfxD3D9Device.h" +#include "gfx/D3D9/gfxD3D9TextureObject.h" +#include "gfx/D3D9/gfxD3D9EnumTranslate.h" + /* #include "gfx/gl/gfxGLDevice.h" #include "gfx/gl/gfxGLTextureObject.h" @@ -20,6 +25,8 @@ #include "platform/input/oculusVR/oculusVRUtil.h" +//------------------------------------------------------------ + U32 OpenVRProvider::OVR_SENSORROT[vr::k_unMaxTrackedDeviceCount] = { 0 }; U32 OpenVRProvider::OVR_SENSORROTANG[vr::k_unMaxTrackedDeviceCount] = { 0 }; U32 OpenVRProvider::OVR_SENSORVELOCITY[vr::k_unMaxTrackedDeviceCount] = { 0 }; @@ -108,6 +115,9 @@ bool OpenVRRenderState::setupRenderTargets(U32 mode) mEyeRT[0] = mEyeRT[1] = mStereoRT; + mOutputEyeTextures[0].init(newRTSize.x, newRTSize.y, GFXFormatR8G8B8A8, &VRTextureProfile, "OpenVR Stereo RT Color OUTPUT"); + mOutputEyeTextures[1].init(newRTSize.x, newRTSize.y, GFXFormatR8G8B8A8, &VRTextureProfile, "OpenVR Stereo RT Color OUTPUT"); + return true; } @@ -272,6 +282,9 @@ void OpenVRRenderState::reset(vr::IVRSystem* hmd) mDistortionVerts = NULL; mDistortionInds = NULL; + mOutputEyeTextures[0].clear(); + mOutputEyeTextures[1].clear(); + if (!mHMD) return; @@ -303,6 +316,7 @@ OpenVRProvider::OpenVRProvider() : buildInputCodeTable(); GFXDevice::getDeviceEventSignal().notify(this, &OpenVRProvider::_handleDeviceEvent); INPUTMGR->registerDevice(this); + dMemset(&mLUID, '\0', sizeof(mLUID)); } OpenVRProvider::~OpenVRProvider() @@ -334,6 +348,49 @@ bool OpenVRProvider::enable() return false; } + dMemset(&mLUID, '\0', sizeof(mLUID)); + +#ifdef TORQUE_OS_WIN32 + + // For windows we need to lookup the DXGI record for this and grab the LUID for the display adapter. We need the LUID since + // T3D uses EnumAdapters1 not EnumAdapters whereas openvr uses EnumAdapters. + int32_t AdapterIdx; + IDXGIAdapter* EnumAdapter; + IDXGIFactory1* DXGIFactory; + mHMD->GetDXGIOutputInfo(&AdapterIdx); + // Get the LUID of the device + + HRESULT hr = CreateDXGIFactory1(__uuidof(IDXGIFactory1), reinterpret_cast(&DXGIFactory)); + + if (FAILED(hr)) + AssertFatal(false, "OpenVRProvider::enable -> CreateDXGIFactory1 call failure"); + + hr = DXGIFactory->EnumAdapters(AdapterIdx, &EnumAdapter); + + if (FAILED(hr)) + { + Con::warnf("VR: HMD device has an invalid adapter."); + } + else + { + DXGI_ADAPTER_DESC desc; + hr = EnumAdapter->GetDesc(&desc); + if (FAILED(hr)) + { + Con::warnf("VR: HMD device has an invalid adapter."); + } + else + { + dMemcpy(&mLUID, &desc.AdapterLuid, sizeof(mLUID)); + } + SAFE_RELEASE(EnumAdapter); + } + + SAFE_RELEASE(DXGIFactory); +#endif + + + mRenderModels = (vr::IVRRenderModels *)vr::VR_GetGenericInterface(vr::IVRRenderModels_Version, &eError); if (!mRenderModels) { @@ -441,6 +498,9 @@ bool OpenVRProvider::process() if (!mHMD) return true; + if (!vr::VRCompositor()) + return true; + // Process SteamVR events vr::VREvent_t event; while (mHMD->PollNextEvent(&event, sizeof(event))) @@ -570,7 +630,7 @@ void OpenVRProvider::setDrawCanvas(GuiCanvas *canvas) if (!vr::VRCompositor()) { - printf("Compositor initialization failed. See log file for details\n"); + Con::errorf("VR: Compositor initialization failed. See log file for details\n"); return; } @@ -614,16 +674,30 @@ void OpenVRProvider::onEyeRendered(U32 index) if (!mHMD) return; + vr::EVRCompositorError err = vr::VRCompositorError_None; + + GFXTexHandle eyeTex = mHMDRenderState.mOutputEyeTextures[index].getTextureHandle(); + mHMDRenderState.mEyeRT[0]->resolveTo(eyeTex); + mHMDRenderState.mOutputEyeTextures[index].advance(); + if (GFX->getAdapterType() == Direct3D11) { - vr::Texture_t eyeTexture = { (void*)static_cast(mHMDRenderState.mStereoRenderTextures[index].getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma }; - vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture); + GFXFormat fmt1 = eyeTex->getFormat(); + vr::Texture_t eyeTexture = { (void*)static_cast(eyeTex.getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma }; + err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture); + } + else if (GFX->getAdapterType() == Direct3D9) + { + //vr::Texture_t eyeTexture = { (void*)static_cast(mHMDRenderState.mStereoRenderTextures[index].getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma }; + //err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture); } else if (GFX->getAdapterType() == OpenGL) {/* vr::Texture_t eyeTexture = { (void*)static_cast(mHMDRenderState.mStereoRenderTextures[index].getPointer())->getHandle(), vr::API_OpenGL, vr::ColorSpace_Gamma }; vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture);*/ } + + AssertFatal(err != vr::VRCompositorError_None, "VR compositor error!"); } bool OpenVRProvider::_handleDeviceEvent(GFXDevice::GFXDeviceEventType evt) @@ -675,6 +749,29 @@ bool OpenVRProvider::_handleDeviceEvent(GFXDevice::GFXDeviceEventType evt) return true; } +S32 OpenVRProvider::getDisplayDeviceId() const +{ + return -1; +#ifdef TORQUE_OS_WIN32 + if (GFX->getAdapterType() == Direct3D11) + { + Vector adapterList; + GFXD3D11Device::enumerateAdapters(adapterList); + + for (U32 i = 0, sz = adapterList.size(); i < sz; i++) + { + GFXAdapter* adapter = adapterList[i]; + if (dMemcmp(&adapter->mLUID, &mLUID, sizeof(mLUID)) == 0) + { + return adapter->mIndex; + } + } + } +#endif + + return -1; +} + void OpenVRProvider::processVREvent(const vr::VREvent_t & event) { switch (event.eventType) @@ -870,6 +967,21 @@ DefineEngineFunction(setOpenVRHMDAsGameConnectionDisplayDevice, bool, (GameConne return true; } + +DefineEngineFunction(OpenVRGetDisplayDeviceId, S32, (), , + "@brief MacOS display ID.\n\n" + "@param index The HMD index.\n" + "@return The ID of the HMD display device, if any.\n" + "@ingroup Game") +{ + if (!ManagedSingleton::instanceOrNull()) + { + return -1; + } + + return ManagedSingleton::instance()->getDisplayDeviceId(); +} + DefineEngineFunction(OpenVRResetSensors, void, (), , "@brief Resets all Oculus VR sensors.\n\n" "This resets all sensor orientations such that their 'normal' rotation " diff --git a/Engine/source/platform/input/openVR/openVRProvider.h b/Engine/source/platform/input/openVR/openVRProvider.h index 053fd518a..5006269a1 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.h +++ b/Engine/source/platform/input/openVR/openVRProvider.h @@ -20,6 +20,44 @@ class OpenVRHMDDevice; +class VRTextureSet +{ +public: + static const int TextureCount = 2; + GFXTexHandle mTextures[2]; + U32 mIndex; + + VRTextureSet() : mIndex(0) + { + } + + void init(U32 width, U32 height, GFXFormat fmt, GFXTextureProfile *profile, const String &desc) + { + for (U32 i = 0; i < TextureCount; i++) + { + mTextures[i].set(width, height, fmt, profile, desc); + } + } + + void clear() + { + for (U32 i = 0; i < TextureCount; i++) + { + mTextures[i] = NULL; + } + } + + void advance() + { + mIndex = (mIndex + 1) & TextureCount; + } + + GFXTexHandle& getTextureHandle() + { + return mTextures[mIndex]; + } +}; + struct OpenVRRenderState { vr::IVRSystem *mHMD; @@ -38,6 +76,8 @@ struct OpenVRRenderState GFXVertexBufferHandle mDistortionVerts; GFXPrimitiveBufferHandle mDistortionInds; + VRTextureSet mOutputEyeTextures[2]; + bool setupRenderTargets(U32 mode); void setupDistortion(); @@ -114,6 +154,8 @@ public: virtual void onEyeRendered(U32 index); bool _handleDeviceEvent(GFXDevice::GFXDeviceEventType evt); + + S32 getDisplayDeviceId() const; /// } /// @name OpenVR handling @@ -140,6 +182,7 @@ public: char mDeviceClassChar[vr::k_unMaxTrackedDeviceCount]; OpenVRRenderState mHMDRenderState; + GFXAdapterLUID mLUID; /// } GuiCanvas* mDrawCanvas; diff --git a/Tools/CMake/modules/module_openvr.cmake b/Tools/CMake/modules/module_openvr.cmake new file mode 100644 index 000000000..66a490348 --- /dev/null +++ b/Tools/CMake/modules/module_openvr.cmake @@ -0,0 +1,30 @@ + +# module openvr + +option(TORQUE_OPENVR "Enable openvr module" OFF) +mark_as_advanced(TORQUE_OPENVR) +if(TORQUE_OPENVR) + if(TORQUE_OCULUSVR_SDK_PATH STREQUAL "") + set(TORQUE_OPENVR_SDK_PATH "" CACHE PATH "openvr library path" FORCE) + endif() +else() # hide variable + set(TORQUE_OPENVR_SDK_PATH "" CACHE INTERNAL "" FORCE) +endif() + +if(TORQUE_OPENVR) + # Source + addPathRec( "${srcDir}/platform/input/openvr" ) + + # Includes + addInclude( "${TORQUE_OPENVR_SDK_PATH}/headers" ) + + # Libs + if( WIN32 ) + if( TORQUE_CPU_X64 ) + link_directories( "${TORQUE_OPENVR_SDK_PATH}/lib/win64" ) + else() + link_directories( "${TORQUE_OPENVR_SDK_PATH}/lib/win32" ) + endif() + addLib( "openvr_api" ) + endif() +endif() From 639b3973943cce39c3a58a02fef7e87586d8d2d4 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Mon, 25 Apr 2016 23:26:54 +0100 Subject: [PATCH 06/33] Disable input focus disable to preserve sanity (temp) --- Engine/source/windowManager/windowInputGenerator.cpp | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Engine/source/windowManager/windowInputGenerator.cpp b/Engine/source/windowManager/windowInputGenerator.cpp index 193d0248a..02d863a6b 100644 --- a/Engine/source/windowManager/windowInputGenerator.cpp +++ b/Engine/source/windowManager/windowInputGenerator.cpp @@ -82,7 +82,7 @@ WindowInputGenerator::~WindowInputGenerator() //----------------------------------------------------------------------------- void WindowInputGenerator::generateInputEvent( InputEventInfo &inputEvent ) { - if (!mInputController || !mFocused) + if (!mInputController)// || !mFocused) return; if (inputEvent.action == SI_MAKE && inputEvent.deviceType == KeyboardDeviceType) @@ -331,7 +331,7 @@ void WindowInputGenerator::handleKeyboard( WindowId did, U32 modifier, U32 actio void WindowInputGenerator::handleInputEvent( U32 deviceInst, F32 fValue, F32 fValue2, F32 fValue3, F32 fValue4, S32 iValue, U16 deviceType, U16 objType, U16 ascii, U16 objInst, U8 action, U8 modifier ) { // Skip it if we don't have focus. - if(!mInputController || !mFocused) + if(!mInputController)// || !mFocused) return; // Convert to an InputEventInfo and pass it around for processing. From 36908b0434f79e39db560ad2f6f12cf00731cda6 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Sun, 1 May 2016 00:05:57 +0100 Subject: [PATCH 07/33] Add code to render the basic stereo view fallback --- Engine/source/gui/3d/guiTSControl.cpp | 102 +++++++++++++++++++++++++- Engine/source/gui/3d/guiTSControl.h | 9 ++- 2 files changed, 106 insertions(+), 5 deletions(-) diff --git a/Engine/source/gui/3d/guiTSControl.cpp b/Engine/source/gui/3d/guiTSControl.cpp index 1b8b92f77..1b7f9f556 100644 --- a/Engine/source/gui/3d/guiTSControl.cpp +++ b/Engine/source/gui/3d/guiTSControl.cpp @@ -426,8 +426,97 @@ void GuiTSCtrl::_internalRender(RectI viewport, Frustum &frustum) renderWorld(viewport); DebugDrawer::get()->render(); - // Restore the previous matrix state before - // we begin rendering the child controls. + // Render the canvas overlay if its available + if (mStereoCanvas.getPointer() && mStereoGuiTarget.getPointer() && mStereoCanvas->size() != 0) + { + GFXDEBUGEVENT_SCOPE(StereoGui_Render, ColorI(255, 0, 0)); + MatrixF proj(1); + + Frustum originalFrustum = frustum; + GFXTextureObject *texObject = mStereoGuiTarget->getTexture(0); + const FovPort *currentFovPort = GFX->getStereoFovPort(); + const MatrixF *eyeTransforms = GFX->getStereoEyeTransforms(); + const Point3F *eyeOffset = GFX->getStereoEyeOffsets(); + Frustum gfxFrustum = originalFrustum; + + GFX->setClipRect(viewport); + GFX->setViewport(viewport); + GFX->setFrustum(frustum); + + MatrixF eyeWorldTrans(1); + if (mLastCameraQuery.currentEye != -1) + { + eyeWorldTrans.setPosition(Point3F(eyeOffset[mLastCameraQuery.currentEye].x, eyeOffset[mLastCameraQuery.currentEye].y, eyeOffset[mLastCameraQuery.currentEye].z)); + } + MatrixF eyeWorld(1); + eyeWorld.mul(eyeWorldTrans); + eyeWorld.inverse(); + + GFX->setWorldMatrix(eyeWorld); + GFX->setViewMatrix(MatrixF::Identity); + + if (!mStereoOverlayVB.getPointer()) + { + mStereoOverlayVB.set(GFX, 4, GFXBufferTypeStatic); + GFXVertexPCT *verts = mStereoOverlayVB.lock(0, 4); + + F32 texLeft = 0.0f; + F32 texRight = 1.0f; + F32 texTop = 1.0f; + F32 texBottom = 0.0f; + + F32 rectRatio = gfxFrustum.getWidth() / gfxFrustum.getHeight(); + F32 rectWidth = gfxFrustum.getWidth() * TS_OVERLAY_SCREEN_WIDTH; + F32 rectHeight = rectWidth * rectRatio; + + F32 screenLeft = -rectWidth * 0.5; + F32 screenRight = rectWidth * 0.5; + F32 screenTop = -rectHeight * 0.5; + F32 screenBottom = rectHeight * 0.5; + + const F32 fillConv = 0.0f; + const F32 frustumDepthAdjusted = gfxFrustum.getNearDist() + 0.012; + verts[0].point.set(screenLeft - fillConv, frustumDepthAdjusted, screenTop - fillConv); + verts[1].point.set(screenRight - fillConv, frustumDepthAdjusted, screenTop - fillConv); + verts[2].point.set(screenLeft - fillConv, frustumDepthAdjusted, screenBottom - fillConv); + verts[3].point.set(screenRight - fillConv, frustumDepthAdjusted, screenBottom - fillConv); + + verts[0].color = verts[1].color = verts[2].color = verts[3].color = ColorI(255, 255, 255, 255); + + verts[0].texCoord.set(texLeft, texTop); + verts[1].texCoord.set(texRight, texTop); + verts[2].texCoord.set(texLeft, texBottom); + verts[3].texCoord.set(texRight, texBottom); + + mStereoOverlayVB.unlock(); + } + + if (!mStereoGuiSB.getPointer()) + { + // DrawBitmapStretchSR + GFXStateBlockDesc bitmapStretchSR; + bitmapStretchSR.setCullMode(GFXCullNone); + bitmapStretchSR.setZReadWrite(false, false); + bitmapStretchSR.setBlend(true, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha); + bitmapStretchSR.samplersDefined = true; + + bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear(); + bitmapStretchSR.samplers[0].minFilter = GFXTextureFilterPoint; + bitmapStretchSR.samplers[0].mipFilter = GFXTextureFilterPoint; + bitmapStretchSR.samplers[0].magFilter = GFXTextureFilterPoint; + + mStereoGuiSB = GFX->createStateBlock(bitmapStretchSR); + } + + GFX->setPrimitiveBuffer(NULL); + GFX->setVertexBuffer(mStereoOverlayVB); + GFX->setStateBlock(mStereoGuiSB); + GFX->setTexture(0, texObject); + GFX->setupGenericShaders(GFXDevice::GSModColorTexture); + GFX->drawPrimitive(GFXTriangleStrip, 0, 2); + } + + saver.restore(); } @@ -458,6 +547,8 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) Point2I renderSize = getExtent(); Frustum frustum; + mLastCameraQuery.currentEye = -1; + if (mRenderStyle == RenderStyleStereoSideBySide) { GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSideBySide); @@ -573,12 +664,14 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]); mLastCameraQuery.cameraMatrix = myTransforms[0]; frustum.update(); - GFX->activateStereoTarget(0); + GFX->activateStereoTarget(0); + mLastCameraQuery.currentEye = 0; _internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), frustum); GFX->getDeviceEventSignal().trigger(GFXDevice::deLeftStereoFrameRendered); // Right - GFX->activateStereoTarget(1); + GFX->activateStereoTarget(1); + mLastCameraQuery.currentEye = 1; MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[1]); mLastCameraQuery.cameraMatrix = myTransforms[1]; frustum.update(); @@ -703,6 +796,7 @@ void GuiTSCtrl::drawLineList( const Vector &points, const ColorI color, void GuiTSCtrl::setStereoGui(GuiOffscreenCanvas *canvas) { mStereoGuiTarget = canvas ? canvas->getTarget() : NULL; + mStereoCanvas = canvas; } diff --git a/Engine/source/gui/3d/guiTSControl.h b/Engine/source/gui/3d/guiTSControl.h index bc2fba586..82bf0ebdb 100644 --- a/Engine/source/gui/3d/guiTSControl.h +++ b/Engine/source/gui/3d/guiTSControl.h @@ -35,6 +35,10 @@ #include "materials/matTextureTarget.h" #endif +#ifndef _GUIOFFSCREENCANVAS_H_ +#include "gui/core/guiOffscreenCanvas.h" +#endif + class IDisplayDevice; class GuiOffscreenCanvas; @@ -52,6 +56,7 @@ struct CameraQuery bool hasFovPort; bool hasStereoTargets; MatrixF cameraMatrix; + S32 currentEye; RectI stereoViewports[2]; // destination viewports GFXTextureTarget* stereoTargets[2]; GuiCanvas* drawCanvas; // Canvas we are drawing to. Needed for VR @@ -68,7 +73,7 @@ public: enum RenderStyles { RenderStyleStandard = 0, RenderStyleStereoSideBySide = (1<<0), - RenderStyleStereoSeparate = (1<<1), + RenderStyleStereoSeparate = (1<<1), }; protected: @@ -110,6 +115,8 @@ protected: GFXVertexBufferHandle mStereoPreviewVB; GFXStateBlockRef mStereoPreviewSB; + + SimObjectPtr mStereoCanvas; public: From b2faecc82d097cb9a7714c8e66138e298f7c1aa7 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Sun, 1 May 2016 00:08:29 +0100 Subject: [PATCH 08/33] Fix openvr typos --- Engine/source/platform/input/openVR/openVRProvider.cpp | 2 +- Engine/source/platform/input/openVR/openVRProvider.h | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Engine/source/platform/input/openVR/openVRProvider.cpp b/Engine/source/platform/input/openVR/openVRProvider.cpp index bf03d99b7..93e843f87 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.cpp +++ b/Engine/source/platform/input/openVR/openVRProvider.cpp @@ -697,7 +697,7 @@ void OpenVRProvider::onEyeRendered(U32 index) vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture);*/ } - AssertFatal(err != vr::VRCompositorError_None, "VR compositor error!"); + AssertFatal(err == vr::VRCompositorError_None, "VR compositor error!"); } bool OpenVRProvider::_handleDeviceEvent(GFXDevice::GFXDeviceEventType evt) diff --git a/Engine/source/platform/input/openVR/openVRProvider.h b/Engine/source/platform/input/openVR/openVRProvider.h index 5006269a1..206aa8799 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.h +++ b/Engine/source/platform/input/openVR/openVRProvider.h @@ -49,7 +49,7 @@ public: void advance() { - mIndex = (mIndex + 1) & TextureCount; + mIndex = (mIndex + 1) % TextureCount; } GFXTexHandle& getTextureHandle() From b58f34da9b7d36ffb04358c15fca8efb0ea2f599 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Sun, 1 May 2016 00:09:50 +0100 Subject: [PATCH 09/33] Correctly handle invalid openvr adapters --- Engine/source/gfx/gfxInit.cpp | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/Engine/source/gfx/gfxInit.cpp b/Engine/source/gfx/gfxInit.cpp index 69ea43d0c..9d0cf36ac 100644 --- a/Engine/source/gfx/gfxInit.cpp +++ b/Engine/source/gfx/gfxInit.cpp @@ -296,15 +296,19 @@ GFXAdapter *GFXInit::getBestAdapterChoice() const String adapterDevice = Con::getVariable("$Video::forceDisplayAdapter"); GFXAdapterType adapterType = getAdapterTypeFromName(renderer.c_str());; - GFXAdapter *adapter; + GFXAdapter *adapter = NULL; if (adapterDevice.isEmpty()) { adapter = chooseAdapter(adapterType, outputDevice.c_str()); } - else if (dAtoi(adapterDevice.c_str()) != -1) + else { - adapter = chooseAdapter(adapterType, dAtoi(adapterDevice.c_str())); + S32 adapterIdx = dAtoi(adapterDevice.c_str()); + if (adapterIdx == -1) + adapter = chooseAdapter(adapterType, outputDevice.c_str()); + else + adapter = chooseAdapter(adapterType, adapterIdx); } // Did they have one? Return it. From b15be28f38dc4140ce32a975f09853339007dd80 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Sun, 1 May 2016 00:30:25 +0100 Subject: [PATCH 10/33] Temp add the convertRotation handler so we dont need the oculus files --- .../platform/input/openVR/openVRProvider.cpp | 23 +++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/Engine/source/platform/input/openVR/openVRProvider.cpp b/Engine/source/platform/input/openVR/openVRProvider.cpp index 93e843f87..4ef916bcf 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.cpp +++ b/Engine/source/platform/input/openVR/openVRProvider.cpp @@ -22,7 +22,26 @@ #include "gfx/gl/gfxGLEnumTranslate.h" */ -#include "platform/input/oculusVR/oculusVRUtil.h" +namespace OpenVRUtil +{ + /// Convert an OVR sensor's rotation to a Torque 3D matrix + void convertRotation(const F32 inRotMat[4][4], MatrixF& outRotation) + { + // Set rotation. We need to convert from sensor coordinates to + // Torque coordinates. The sensor matrix is stored row-major. + // The conversion is: + // + // Sensor Torque + // a b c a b c a -c b + // d e f --> -g -h -i --> -g i -h + // g h i d e f d -f e + outRotation.setColumn(0, Point4F( inRotMat[0][0], -inRotMat[2][0], inRotMat[1][0], 0.0f)); + outRotation.setColumn(1, Point4F(-inRotMat[0][2], inRotMat[2][2], -inRotMat[1][2], 0.0f)); + outRotation.setColumn(2, Point4F( inRotMat[0][1], -inRotMat[2][1], inRotMat[1][1], 0.0f)); + outRotation.setPosition(Point3F::Zero); + } +} + //------------------------------------------------------------ @@ -562,7 +581,7 @@ void OpenVRTransformToRotPos(MatrixF mat, QuatF &outRot, Point3F &outPos) inRotMat[3][2] = col3.z; inRotMat[3][3] = col3.w; - OculusVRUtil::convertRotation(inRotMat, torqueMat); + OpenVRUtil::convertRotation(inRotMat, torqueMat); Point3F pos = torqueMat.getPosition(); outRot = QuatF(torqueMat); From 9e5eda9a0844532b8ba35572b4189d7588d4afb9 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Sat, 7 May 2016 22:33:54 +0100 Subject: [PATCH 11/33] More progress - Fixed normal stereo rendering - Beginnings of vr overlay code --- Engine/source/gui/3d/guiTSControl.cpp | 47 +- .../platform/input/openVR/openVROverlay.cpp | 161 +++++ .../platform/input/openVR/openVROverlay.h | 89 +++ .../platform/input/openVR/openVRProvider.cpp | 581 ++++++++++-------- .../platform/input/openVR/openVRProvider.h | 70 ++- .../source/platform/output/IDisplayDevice.h | 1 + 6 files changed, 693 insertions(+), 256 deletions(-) create mode 100644 Engine/source/platform/input/openVR/openVROverlay.cpp create mode 100644 Engine/source/platform/input/openVR/openVROverlay.h diff --git a/Engine/source/gui/3d/guiTSControl.cpp b/Engine/source/gui/3d/guiTSControl.cpp index 1b7f9f556..1a8046dd2 100644 --- a/Engine/source/gui/3d/guiTSControl.cpp +++ b/Engine/source/gui/3d/guiTSControl.cpp @@ -538,6 +538,47 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) return; } + // jamesu - currently a little bit of a hack. Ideally we need to ditch the viewports in the query data and just rely on the display device + if (mLastCameraQuery.displayDevice) + { + if (mRenderStyle == RenderStyleStereoSideBySide) + { + mLastCameraQuery.displayDevice->setDrawMode(GFXDevice::RS_StereoSideBySide); + } + else if (mRenderStyle == RenderStyleStereoSeparate) + { + mLastCameraQuery.displayDevice->setDrawMode(GFXDevice::RS_StereoSeparate); + } + else + { + mLastCameraQuery.displayDevice->setDrawMode(GFXDevice::RS_Standard); + } + + // The connection's display device may want to set the projection offset + if (mLastCameraQuery.displayDevice->providesProjectionOffset()) + { + mLastCameraQuery.projectionOffset = mLastCameraQuery.displayDevice->getProjectionOffset(); + } + + // The connection's display device may want to set the eye offset + if (mLastCameraQuery.displayDevice->providesEyeOffsets()) + { + mLastCameraQuery.displayDevice->getEyeOffsets(mLastCameraQuery.eyeOffset); + } + + // Grab field of view for both eyes + if (mLastCameraQuery.displayDevice->providesFovPorts()) + { + mLastCameraQuery.displayDevice->getFovPorts(mLastCameraQuery.fovPort); + mLastCameraQuery.hasFovPort = true; + } + + mLastCameraQuery.displayDevice->getStereoViewports(mLastCameraQuery.stereoViewports); + mLastCameraQuery.displayDevice->getStereoTargets(mLastCameraQuery.stereoTargets); + + mLastCameraQuery.hasStereoTargets = mLastCameraQuery.stereoTargets[0]; + } + GFXTargetRef origTarget = GFX->getActiveRenderTarget(); U32 origStyle = GFX->getCurrentRenderStyle(); @@ -612,6 +653,9 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) GFX->activateStereoTarget(-1); _internalRender(RectI(updateRect.point, updateRect.extent), frustum); + + // Notify device we've rendered the right, thus the last stereo frame. + GFX->getDeviceEventSignal().trigger(GFXDevice::deRightStereoFrameRendered); // Render preview if (mLastCameraQuery.displayDevice) @@ -626,7 +670,7 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) } } } - else if (mRenderStyle == RenderStyleStereoSeparate && mLastCameraQuery.stereoTargets[0]) + else if (mRenderStyle == RenderStyleStereoSeparate && mLastCameraQuery.displayDevice) { // In this case we render the scene twice to different render targets, then // render the final composite view @@ -699,7 +743,6 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) Point2I screensize = getRoot()->getWindowSize(); tempRect.point.y = screensize.y - (tempRect.point.y + tempRect.extent.y); #endif - GFX->setCurrentRenderStyle(GFXDevice::RS_Standard); // set up the camera and viewport stuff: F32 wwidth; diff --git a/Engine/source/platform/input/openVR/openVROverlay.cpp b/Engine/source/platform/input/openVR/openVROverlay.cpp new file mode 100644 index 000000000..d22abbd51 --- /dev/null +++ b/Engine/source/platform/input/openVR/openVROverlay.cpp @@ -0,0 +1,161 @@ +#include "platform/input/openVR/openVROverlay.h" + +ImplementEnumType(OpenVROverlayType, + "Desired overlay type for OpenVROverlay. .\n\n" + "@ingroup OpenVR") +{ OpenVROverlay::OVERLAYTYPE_OVERLAY, "Overlay" }, +{ OpenVROverlay::OVERLAYTYPE_DASHBOARD, "Dashboard" }, +EndImplementEnumType; + +OpenVROverlay::OpenVROverlay() +{ + +} + +OpenVROverlay::~OpenVROverlay() +{ + +} + +void OpenVROverlay::initPersistFields() +{ + Parent::initPersistFields(); +} + +bool OpenVROverlay::onAdd() +{ + if (Parent::onAdd()) + { + mOverlayTypeDirty = true; + mOverlayDirty = true; + return true; + } + + return false; +} + +void OpenVROverlay::onRemove() +{ + if (mOverlayHandle) + { + vr::VROverlay()->DestroyOverlay(mOverlayHandle); + mOverlayHandle = NULL; + } +} + +void OpenVROverlay::resetOverlay() +{ + mOverlayTypeDirty = false; +} + +void OpenVROverlay::updateOverlay() +{ + if (mOverlayTypeDirty) + resetOverlay(); + + // Update params TODO + mOverlayDirty = false; +} + +void OpenVROverlay::showOverlay() +{ + if (mOverlayHandle == NULL) + return; + + vr::VROverlay()->ShowOverlay(mOverlayHandle); +} + +void OpenVROverlay::hideOverlay() +{ + if (mOverlayHandle == NULL) + return; + + vr::VROverlay()->HideOverlay(mOverlayHandle); +} + + +bool OpenVROverlay::isOverlayVisible() +{ + if (mOverlayHandle == NULL) + return false; + + return vr::VROverlay()->IsOverlayVisible(mOverlayHandle); +} + +bool OpenVROverlay::isOverlayHoverTarget() +{ + if (mOverlayHandle == NULL) + return false; + + return vr::VROverlay()->IsHoverTargetOverlay(mOverlayHandle); +} + + +bool OpenVROverlay::isGamepadFocussed() +{ + if (mOverlayHandle == NULL) + return false; + + return vr::VROverlay()->GetGamepadFocusOverlay() == mOverlayHandle; +} + +bool OpenVROverlay::isActiveDashboardOverlay() +{ + return false; // TODO WHERE DID I GET THIS FROM +} + +MatrixF OpenVROverlay::getTransformForOverlayCoordinates(const vr::ETrackingUniverseOrigin trackingOrigin, const Point2F &pos) +{ + if (mOverlayHandle == NULL) + return MatrixF::Identity; + + vr::HmdVector2_t vec; + vec.v[0] = pos.x; + vec.v[1] = pos.y; + vr::HmdMatrix34_t outMat; + MatrixF outTorqueMat; + if (vr::VROverlay()->GetTransformForOverlayCoordinates(mOverlayHandle, trackingOrigin, vec, &outMat) != vr::VROverlayError_None) + return MatrixF::Identity; + + MatrixF vrMat(1); + vrMat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(outMat); + OpenVRUtil::convertTransformFromOVR(vrMat, outTorqueMat); + return outTorqueMat; +} + +bool OpenVROverlay::castRay(const vr::ETrackingUniverseOrigin trackingOrigin, const Point3F &origin, const Point3F &direction, RayInfo *info) +{ + if (mOverlayHandle == NULL) + return false; + + vr::VROverlayIntersectionParams_t params; + vr::VROverlayIntersectionResults_t result; + + params.eOrigin = trackingOrigin; + params.vSource.v[0] = origin.x; + params.vSource.v[1] = origin.y; + params.vSource.v[2] = origin.z; + params.vDirection.v[0] = direction.x; // TODO: need to transform this to vr-space + params.vDirection.v[1] = direction.y; + params.vDirection.v[2] = direction.z; + + bool rayHit = vr::VROverlay()->ComputeOverlayIntersection(mOverlayHandle, ¶ms, &result); + + if (rayHit && info) + { + info->t = result.fDistance; + info->point = Point3F(result.vPoint.v[0], result.vPoint.v[1], result.vPoint.v[2]); // TODO: need to transform this FROM vr-space + info->normal = Point3F(result.vNormal.v[0], result.vNormal.v[1], result.vNormal.v[2]); + info->texCoord = Point2F(result.vUVs.v[0], result.vUVs.v[1]); + info->object = NULL; + info->userData = this; + } + + return rayHit; +} + +void OpenVROverlay::moveGamepadFocusToNeighbour() +{ + +} + diff --git a/Engine/source/platform/input/openVR/openVROverlay.h b/Engine/source/platform/input/openVR/openVROverlay.h new file mode 100644 index 000000000..8a5a82f17 --- /dev/null +++ b/Engine/source/platform/input/openVR/openVROverlay.h @@ -0,0 +1,89 @@ +#ifndef _OPENVROVERLAY_H_ +#define _OPENVROVERLAY_H_ + +#ifndef _GUIOFFSCREENCANVAS_H_ +#include "gui/core/guiOffscreenCanvas.h" +#endif +#ifndef _OPENVRDEVICE_H_ +#include "platform/input/openVR/openVRProvider.h" +#endif +#ifndef _COLLISION_H_ +#include "collision/collision.h" +#endif + + +typedef vr::VROverlayInputMethod OpenVROverlayInputMethod; +typedef vr::VROverlayTransformType OpenVROverlayTransformType; +typedef vr::EGamepadTextInputMode OpenVRGamepadTextInputMode; +typedef vr::EGamepadTextInputLineMode OpenVRGamepadTextInputLineMode; +typedef vr::ETrackingResult OpenVRTrackingResult; +typedef vr::ETrackingUniverseOrigin OpenVRTrackingUniverseOrigin; +typedef vr::EOverlayDirection OpenVROverlayDirection; +typedef vr::EVRState OpenVRState; + +class OpenVROverlay : public GuiOffscreenCanvas +{ +public: + typedef GuiOffscreenCanvas Parent; + + enum OverlayType + { + OVERLAYTYPE_OVERLAY, + OVERLAYTYPE_DASHBOARD, + }; + + vr::VROverlayHandle_t mOverlayHandle; + + // Desired OpenVR state + U32 mOverlayFlags; + F32 mOverlayWidth; + + vr::VROverlayTransformType mOverlayTransformType; + MatrixF mTransform; + vr::TrackedDeviceIndex_t mTransformDeviceIndex; + const char* mTransformDeviceComponent; + + + vr::VROverlayInputMethod mInputMethod; + Point2F mMouseScale; + + MatrixF mTrackingOrigin; + vr::TrackedDeviceIndex_t mControllerDeviceIndex; + + bool mOverlayTypeDirty; ///< Overlay type is dirty + bool mOverlayDirty; ///< Overlay properties are dirty + OverlayType mOverlayType; + + // + + OpenVROverlay(); + virtual ~OpenVROverlay(); + + static void initPersistFields(); + + bool onAdd(); + void onRemove(); + + void resetOverlay(); + void updateOverlay(); + + void showOverlay(); + void hideOverlay(); + + bool isOverlayVisible(); + bool isOverlayHoverTarget(); + + bool isGamepadFocussed(); + bool isActiveDashboardOverlay(); + + MatrixF getTransformForOverlayCoordinates(const vr::ETrackingUniverseOrigin trackingOrigin, const Point2F &pos); + bool castRay(const vr::ETrackingUniverseOrigin trackingOrigin, const Point3F &origin, const Point3F &direction, RayInfo *info); + + void moveGamepadFocusToNeighbour(); +}; + +typedef OpenVROverlay::OverlayType OpenVROverlayType; +DefineEnumType(OpenVROverlayType); + + +#endif diff --git a/Engine/source/platform/input/openVR/openVRProvider.cpp b/Engine/source/platform/input/openVR/openVRProvider.cpp index 4ef916bcf..2f8524221 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.cpp +++ b/Engine/source/platform/input/openVR/openVRProvider.cpp @@ -16,17 +16,21 @@ #include "gfx/D3D9/gfxD3D9TextureObject.h" #include "gfx/D3D9/gfxD3D9EnumTranslate.h" -/* +#ifdef TORQUE_OPENGL #include "gfx/gl/gfxGLDevice.h" #include "gfx/gl/gfxGLTextureObject.h" #include "gfx/gl/gfxGLEnumTranslate.h" -*/ +#endif namespace OpenVRUtil { - /// Convert an OVR sensor's rotation to a Torque 3D matrix - void convertRotation(const F32 inRotMat[4][4], MatrixF& outRotation) - { + void convertTransformFromOVR(const MatrixF &inRotTMat, MatrixF& outRotation) + { + Point4F col0; inRotTMat.getColumn(0, &col0); + Point4F col1; inRotTMat.getColumn(1, &col1); + Point4F col2; inRotTMat.getColumn(2, &col2); + Point4F col3; inRotTMat.getColumn(3, &col3); + // Set rotation. We need to convert from sensor coordinates to // Torque coordinates. The sensor matrix is stored row-major. // The conversion is: @@ -35,15 +39,158 @@ namespace OpenVRUtil // a b c a b c a -c b // d e f --> -g -h -i --> -g i -h // g h i d e f d -f e - outRotation.setColumn(0, Point4F( inRotMat[0][0], -inRotMat[2][0], inRotMat[1][0], 0.0f)); - outRotation.setColumn(1, Point4F(-inRotMat[0][2], inRotMat[2][2], -inRotMat[1][2], 0.0f)); - outRotation.setColumn(2, Point4F( inRotMat[0][1], -inRotMat[2][1], inRotMat[1][1], 0.0f)); - outRotation.setPosition(Point3F::Zero); + outRotation.setColumn(0, Point4F( col0.x, -col2.x, col1.x, 0.0f)); + outRotation.setColumn(1, Point4F(-col0.z, col2.z, -col1.z, 0.0f)); + outRotation.setColumn(2, Point4F( col0.y, -col2.y, col1.y, 0.0f)); + outRotation.setColumn(3, Point4F(-col3.x, col3.z, -col3.y, 1.0f)); + } + + void convertTransformToOVR(const MatrixF& inRotation, MatrixF& outRotation) + { + Point4F col0; inRotation.getColumn(0, &col0); + Point4F col1; inRotation.getColumn(1, &col1); + Point4F col2; inRotation.getColumn(2, &col2); + Point4F col3; inRotation.getColumn(3, &col3); + + // This is basically a reverse of what is in convertTransformFromOVR + outRotation.setColumn(0, Point4F(col0.x, col2.x, -col1.x, 0.0f)); + outRotation.setColumn(1, Point4F(col0.z, col2.z, -col1.z, 0.0f)); + outRotation.setColumn(2, Point4F(-col0.y, -col2.y, col1.y, 0.0f)); + outRotation.setColumn(3, Point4F(-col3.x, -col3.z, col3.y, 1.0f)); + } + + MatrixF convertSteamVRAffineMatrixToMatrixFPlain(const vr::HmdMatrix34_t &mat) + { + MatrixF outMat(1); + + outMat.setColumn(0, Point4F(mat.m[0][0], mat.m[1][0], mat.m[2][0], 0.0)); + outMat.setColumn(1, Point4F(mat.m[0][1], mat.m[1][1], mat.m[2][1], 0.0)); + outMat.setColumn(2, Point4F(mat.m[0][2], mat.m[1][2], mat.m[2][2], 0.0)); + outMat.setColumn(3, Point4F(mat.m[0][3], mat.m[1][3], mat.m[2][3], 1.0f)); // pos + + return outMat; + } + + void convertMatrixFPlainToSteamVRAffineMatrix(const MatrixF &inMat, vr::HmdMatrix34_t &outMat) + { + Point4F row0; inMat.getRow(0, &row0); + Point4F row1; inMat.getRow(1, &row1); + Point4F row2; inMat.getRow(2, &row2); + + outMat.m[0][0] = row0.x; + outMat.m[0][1] = row0.y; + outMat.m[0][2] = row0.z; + outMat.m[0][3] = row0.w; + + outMat.m[1][0] = row1.x; + outMat.m[1][1] = row1.y; + outMat.m[1][2] = row1.z; + outMat.m[1][3] = row1.w; + + outMat.m[2][0] = row2.x; + outMat.m[2][1] = row2.y; + outMat.m[2][2] = row2.z; + outMat.m[2][3] = row2.w; + } + + + vr::VRTextureBounds_t TorqueRectToBounds(const RectI &rect, const Point2I &widthHeight) + { + vr::VRTextureBounds_t bounds; + F32 xRatio = 1.0 / (F32)widthHeight.x; + F32 yRatio = 1.0 / (F32)widthHeight.y; + bounds.uMin = rect.point.x * xRatio; + bounds.vMin = rect.point.y * yRatio; + bounds.uMax = (rect.point.x + rect.extent.x) * xRatio; + bounds.vMax = (rect.point.y + rect.extent.y) * yRatio; + return bounds; } } +//------------------------------------------------------------ + +DECLARE_SCOPE(OpenVR); +IMPLEMENT_SCOPE(OpenVR, OpenVRProvider, , ""); +ConsoleDoc( + "@class OpenVRProvider\n" + "@brief This class is the interface between TorqueScript and OpenVR.\n\n" + "@ingroup OpenVR\n" + ); + +// Enum impls + +ImplementEnumType(OpenVROverlayInputMethod, + "Types of input supported by VR Overlays. .\n\n" + "@ingroup OpenVR") +{ vr::VROverlayInputMethod_None, "None" }, +{ vr::VROverlayInputMethod_Mouse, "Mouse" }, +EndImplementEnumType; + +ImplementEnumType(OpenVROverlayTransformType, + "Allows the caller to figure out which overlay transform getter to call. .\n\n" + "@ingroup OpenVR") +{ vr::VROverlayTransform_Absolute, "Absolute" }, +{ vr::VROverlayTransform_TrackedDeviceRelative, "TrackedDeviceRelative" }, +{ vr::VROverlayTransform_SystemOverlay, "SystemOverlay" }, +{ vr::VROverlayTransform_TrackedComponent, "TrackedComponent" }, +EndImplementEnumType; + +ImplementEnumType(OpenVRGamepadTextInputMode, + "Types of input supported by VR Overlays. .\n\n" + "@ingroup OpenVR") +{ vr::k_EGamepadTextInputModeNormal, "Normal", }, +{ vr::k_EGamepadTextInputModePassword, "Password", }, +{ vr::k_EGamepadTextInputModeSubmit, "Submit" }, +EndImplementEnumType; + +ImplementEnumType(OpenVRGamepadTextInputLineMode, + "Types of input supported by VR Overlays. .\n\n" + "@ingroup OpenVR") +{ vr::k_EGamepadTextInputLineModeSingleLine, "SingleLine" }, +{ vr::k_EGamepadTextInputLineModeMultipleLines, "MultipleLines" }, +EndImplementEnumType; + +ImplementEnumType(OpenVRTrackingResult, + ". .\n\n" + "@ingroup OpenVR") +{ vr::TrackingResult_Uninitialized, "None" }, +{ vr::TrackingResult_Calibrating_InProgress, "Calibrating_InProgress" }, +{ vr::TrackingResult_Calibrating_OutOfRange, "Calibrating_OutOfRange" }, +{ vr::TrackingResult_Running_OK, "Running_Ok" }, +{ vr::TrackingResult_Running_OutOfRange, "Running_OutOfRange" }, +EndImplementEnumType; + +ImplementEnumType(OpenVRTrackingUniverseOrigin, + "Identifies which style of tracking origin the application wants to use for the poses it is requesting. .\n\n" + "@ingroup OpenVR") +{ vr::TrackingUniverseSeated, "Seated" }, +{ vr::TrackingUniverseStanding, "Standing" }, +{ vr::TrackingUniverseRawAndUncalibrated, "RawAndUncalibrated" }, +EndImplementEnumType; + +ImplementEnumType(OpenVROverlayDirection, + "Directions for changing focus between overlays with the gamepad. .\n\n" + "@ingroup OpenVR") +{ vr::OverlayDirection_Up, "Up" }, +{ vr::OverlayDirection_Down, "Down" }, +{ vr::OverlayDirection_Left, "Left" }, +{ vr::OverlayDirection_Right, "Right" }, +EndImplementEnumType; + +ImplementEnumType(OpenVRState, + "Status of the overall system or tracked objects. .\n\n" + "@ingroup OpenVR") +{ vr::VRState_Undefined, "Undefined" }, +{ vr::VRState_Off, "Off" }, +{ vr::VRState_Searching, "Searching" }, +{ vr::VRState_Searching_Alert, "Searching_Alert" }, +{ vr::VRState_Ready, "Ready" }, +{ vr::VRState_Ready_Alert, "Ready_Alert" }, +{ vr::VRState_NotReady, "NotReady" }, +EndImplementEnumType; + //------------------------------------------------------------ U32 OpenVRProvider::OVR_SENSORROT[vr::k_unMaxTrackedDeviceCount] = { 0 }; @@ -74,18 +221,6 @@ static String GetTrackedDeviceString(vr::IVRSystem *pHmd, vr::TrackedDeviceIndex return sResult; } -static MatrixF ConvertSteamVRAffineMatrixToMatrixFPlain(const vr::HmdMatrix34_t &mat) -{ - MatrixF outMat(1); - - outMat.setColumn(0, Point4F(mat.m[0][0], mat.m[1][0], mat.m[2][0], 0.0)); - outMat.setColumn(1, Point4F(mat.m[0][1], mat.m[1][1], mat.m[2][1], 0.0)); - outMat.setColumn(2, Point4F(mat.m[0][2], mat.m[1][2], mat.m[2][2], 0.0)); - outMat.setColumn(3, Point4F(mat.m[0][3], mat.m[1][3], mat.m[2][3], 1.0f)); // pos - - return outMat; -} - MODULE_BEGIN(OpenVRProvider) MODULE_INIT_AFTER(InputEventManager) @@ -105,184 +240,60 @@ MODULE_SHUTDOWN MODULE_END; -bool OpenVRRenderState::setupRenderTargets(U32 mode) +bool OpenVRRenderState::setupRenderTargets(GFXDevice::GFXDeviceRenderStyles mode) { if (!mHMD) return false; + if (mRenderMode == mode) + return true; + + mRenderMode = mode; + + if (mode == GFXDevice::RS_Standard) + { + reset(mHMD); + return true; + } + U32 sizeX, sizeY; Point2I newRTSize; mHMD->GetRecommendedRenderTargetSize(&sizeX, &sizeY); - mEyeViewport[0] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY)); - mEyeViewport[1] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY)); + if (mode == GFXDevice::RS_StereoSeparate) + { + mEyeViewport[0] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY)); + mEyeViewport[1] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY)); - newRTSize.x = sizeX; - newRTSize.y = sizeY; + newRTSize.x = sizeX; + newRTSize.y = sizeY; + } + else + { + mEyeViewport[0] = RectI(Point2I(0, 0), Point2I(sizeX, sizeY)); + mEyeViewport[1] = RectI(Point2I(sizeX, 0), Point2I(sizeX, sizeY)); + + newRTSize.x = sizeX * 2; + newRTSize.y = sizeY; + } GFXTexHandle stereoTexture; stereoTexture.set(newRTSize.x, newRTSize.y, GFXFormatR8G8B8A8, &VRTextureProfile, "OpenVR Stereo RT Color"); - mStereoRenderTextures[0] = mStereoRenderTextures[1] = stereoTexture; + mStereoRenderTexture = stereoTexture; GFXTexHandle stereoDepthTexture; stereoDepthTexture.set(newRTSize.x, newRTSize.y, GFXFormatD24S8, &VRDepthProfile, "OpenVR Depth"); - mStereoDepthTextures[0] = mStereoDepthTextures[1] = stereoDepthTexture; + mStereoDepthTexture = stereoDepthTexture; mStereoRT = GFX->allocRenderToTextureTarget(); mStereoRT->attachTexture(GFXTextureTarget::Color0, stereoTexture); mStereoRT->attachTexture(GFXTextureTarget::DepthStencil, stereoDepthTexture); - mEyeRT[0] = mEyeRT[1] = mStereoRT; - - mOutputEyeTextures[0].init(newRTSize.x, newRTSize.y, GFXFormatR8G8B8A8, &VRTextureProfile, "OpenVR Stereo RT Color OUTPUT"); - mOutputEyeTextures[1].init(newRTSize.x, newRTSize.y, GFXFormatR8G8B8A8, &VRTextureProfile, "OpenVR Stereo RT Color OUTPUT"); + mOutputEyeTextures.init(newRTSize.x, newRTSize.y, GFXFormatR8G8B8A8, &VRTextureProfile, "OpenVR Stereo RT Color OUTPUT"); return true; } -void OpenVRRenderState::setupDistortion() -{ - if (!mHMD) - return; - - U16 m_iLensGridSegmentCountH = 43; - U16 m_iLensGridSegmentCountV = 43; - - float w = (float)(1.0 / float(m_iLensGridSegmentCountH - 1)); - float h = (float)(1.0 / float(m_iLensGridSegmentCountV - 1)); - - float u, v = 0; - - Vector vVerts(0); - GFXVertexPTTT *vert; - - vVerts.reserve((m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2); - - mDistortionVerts.set(GFX, (m_iLensGridSegmentCountV * m_iLensGridSegmentCountH) * 2, GFXBufferTypeStatic); - - vert = mDistortionVerts.lock(); - - //left eye distortion verts - float Xoffset = -1; - for (int y = 0; y < m_iLensGridSegmentCountV; y++) - { - for (int x = 0; x < m_iLensGridSegmentCountH; x++) - { - u = x*w; v = 1 - y*h; - vert->point = Point3F(Xoffset + u, -1 + 2 * y*h, 0.0f); - - vr::DistortionCoordinates_t dc0 = mHMD->ComputeDistortion(vr::Eye_Left, u, v); - - vert->texCoord1 = Point2F(dc0.rfRed[0], 1 - dc0.rfRed[1]); // r - vert->texCoord2 = Point2F(dc0.rfGreen[0], 1 - dc0.rfGreen[1]); // g - vert->texCoord3 = Point2F(dc0.rfBlue[0], 1 - dc0.rfBlue[1]); // b - - vert++; - } - } - - //right eye distortion verts - Xoffset = 0; - for (int y = 0; y < m_iLensGridSegmentCountV; y++) - { - for (int x = 0; x < m_iLensGridSegmentCountH; x++) - { - u = x*w; v = 1 - y*h; - vert->point = Point3F(Xoffset + u, -1 + 2 * y*h, 0.0f); - - vr::DistortionCoordinates_t dc0 = mHMD->ComputeDistortion(vr::Eye_Right, u, v); - - vert->texCoord1 = Point2F(dc0.rfRed[0], 1 - dc0.rfRed[1]); - vert->texCoord2 = Point2F(dc0.rfGreen[0], 1 - dc0.rfGreen[1]); - vert->texCoord3 = Point2F(dc0.rfBlue[0], 1 - dc0.rfBlue[1]); - - vert++; - } - } - - mDistortionVerts.unlock(); - - mDistortionInds.set(GFX, m_iLensGridSegmentCountV * m_iLensGridSegmentCountH * 6 * 2, 0, GFXBufferTypeStatic); - - GFXPrimitive *prim; - U16 *index; - - mDistortionInds.lock(&index, &prim); - U16 a, b, c, d; - - U16 offset = 0; - for (U16 y = 0; y < m_iLensGridSegmentCountV - 1; y++) - { - for (U16 x = 0; x < m_iLensGridSegmentCountH - 1; x++) - { - a = m_iLensGridSegmentCountH*y + x + offset; - b = m_iLensGridSegmentCountH*y + x + 1 + offset; - c = (y + 1)*m_iLensGridSegmentCountH + x + 1 + offset; - d = (y + 1)*m_iLensGridSegmentCountH + x + offset; - *index++ = a; - *index++ = b; - *index++ = c; - - *index++ = a; - *index++ = c; - *index++ = d; - } - } - - offset = (m_iLensGridSegmentCountH)*(m_iLensGridSegmentCountV); - for (U16 y = 0; y < m_iLensGridSegmentCountV - 1; y++) - { - for (U16 x = 0; x < m_iLensGridSegmentCountH - 1; x++) - { - a = m_iLensGridSegmentCountH*y + x + offset; - b = m_iLensGridSegmentCountH*y + x + 1 + offset; - c = (y + 1)*m_iLensGridSegmentCountH + x + 1 + offset; - d = (y + 1)*m_iLensGridSegmentCountH + x + offset; - *index++ = a; - *index++ = b; - *index++ = c; - - *index++ = a; - *index++ = c; - *index++ = d; - } - } - - mDistortionInds.unlock(); -} - -void OpenVRRenderState::renderDistortion(U32 eye) -{ - // Updates distortion for an eye (this should only be the case for backend APIS where image should be predistorted) - /* - - glDisable(GL_DEPTH_TEST); - glViewport( 0, 0, m_nWindowWidth, m_nWindowHeight ); - - glBindVertexArray( m_unLensVAO ); - glUseProgram( m_unLensProgramID ); - - //render left lens (first half of index array ) - glBindTexture(GL_TEXTURE_2D, leftEyeDesc.m_nResolveTextureId ); - glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE ); - glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE ); - glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR ); - glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR ); - glDrawElements( GL_TRIANGLES, m_uiIndexSize/2, GL_UNSIGNED_SHORT, 0 ); - - //render right lens (second half of index array ) - glBindTexture(GL_TEXTURE_2D, rightEyeDesc.m_nResolveTextureId ); - glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE ); - glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE ); - glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR ); - glTexParameteri( GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR ); - glDrawElements( GL_TRIANGLES, m_uiIndexSize/2, GL_UNSIGNED_SHORT, (const void *)(m_uiIndexSize) ); - - glBindVertexArray( 0 ); - glUseProgram( 0 ); - */ -} - void OpenVRRenderState::renderPreview() { @@ -293,26 +304,21 @@ void OpenVRRenderState::reset(vr::IVRSystem* hmd) mHMD = hmd; mStereoRT = NULL; - mEyeRT[0] = mEyeRT[1] = NULL; - mStereoRenderTextures[0] = mStereoRenderTextures[1] = NULL; - mStereoDepthTextures[0] = mStereoDepthTextures[1] = NULL; + mStereoRenderTexture = NULL; + mStereoDepthTexture = NULL; - mDistortionVerts = NULL; - mDistortionInds = NULL; - - mOutputEyeTextures[0].clear(); - mOutputEyeTextures[1].clear(); + mOutputEyeTextures.clear(); if (!mHMD) return; vr::HmdMatrix34_t mat = mHMD->GetEyeToHeadTransform(vr::Eye_Left); - mEyePose[0] = ConvertSteamVRAffineMatrixToMatrixFPlain(mat); + mEyePose[0] = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(mat); mEyePose[0].inverse(); mat = mHMD->GetEyeToHeadTransform(vr::Eye_Right); - mEyePose[1] = ConvertSteamVRAffineMatrixToMatrixFPlain(mat); + mEyePose[1] = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(mat); mEyePose[1].inverse(); mHMD->GetProjectionRaw(vr::Eye_Left, &mEyeFov[0].leftTan, &mEyeFov[0].rightTan, &mEyeFov[0].upTan, &mEyeFov[0].downTan); @@ -336,6 +342,8 @@ OpenVRProvider::OpenVRProvider() : GFXDevice::getDeviceEventSignal().notify(this, &OpenVRProvider::_handleDeviceEvent); INPUTMGR->registerDevice(this); dMemset(&mLUID, '\0', sizeof(mLUID)); + + mTrackingSpace = vr::TrackingUniverseSeated; } OpenVRProvider::~OpenVRProvider() @@ -345,7 +353,16 @@ OpenVRProvider::~OpenVRProvider() void OpenVRProvider::staticInit() { - // TODO: Add console vars + // Overlay flags + Con::setIntVariable("$OpenVR::OverlayFlags_None", 1 << (U32)vr::VROverlayFlags_None); + Con::setIntVariable("$OpenVR::OverlayFlags_Curved", 1 << (U32)vr::VROverlayFlags_Curved); + Con::setIntVariable("$OpenVR::OverlayFlags_RGSS4X", 1 << (U32)vr::VROverlayFlags_RGSS4X); + Con::setIntVariable("$OpenVR::OverlayFlags_NoDashboardTab", 1 << (U32)vr::VROverlayFlags_NoDashboardTab); + Con::setIntVariable("$OpenVR::OverlayFlags_AcceptsGamepadEvents", 1 << (U32)vr::VROverlayFlags_AcceptsGamepadEvents); + Con::setIntVariable("$OpenVR::OverlayFlags_ShowGamepadFocus", 1 << (U32)vr::VROverlayFlags_ShowGamepadFocus); + Con::setIntVariable("$OpenVR::OverlayFlags_SendVRScrollEvents", 1 << (U32)vr::VROverlayFlags_SendVRScrollEvents); + Con::setIntVariable("$OpenVR::OverlayFlags_SendVRTouchpadEvents", 1 << (U32)vr::VROverlayFlags_SendVRTouchpadEvents); + Con::setIntVariable("$OpenVR::OverlayFlags_ShowTouchPadScrollWheel", 1 << (U32)vr::VROverlayFlags_ShowTouchPadScrollWheel); } bool OpenVRProvider::enable() @@ -558,34 +575,11 @@ void OpenVRTransformToRotPos(MatrixF mat, QuatF &outRot, Point3F &outPos) { // Directly set the rotation and position from the eye transforms MatrixF torqueMat(1); - - F32 inRotMat[4][4]; - Point4F col0; mat.getColumn(0, &col0); - Point4F col1; mat.getColumn(1, &col1); - Point4F col2; mat.getColumn(2, &col2); - Point4F col3; mat.getColumn(3, &col3); - inRotMat[0][0] = col0.x; - inRotMat[0][1] = col0.y; - inRotMat[0][2] = col0.z; - inRotMat[0][3] = col0.w; - inRotMat[1][0] = col1.x; - inRotMat[1][1] = col1.y; - inRotMat[1][2] = col1.z; - inRotMat[1][3] = col1.w; - inRotMat[2][0] = col2.x; - inRotMat[2][1] = col2.y; - inRotMat[2][2] = col2.z; - inRotMat[2][3] = col2.w; - inRotMat[3][0] = col3.x; - inRotMat[3][1] = col3.y; - inRotMat[3][2] = col3.z; - inRotMat[3][3] = col3.w; - - OpenVRUtil::convertRotation(inRotMat, torqueMat); + OpenVRUtil::convertTransformFromOVR(mat, torqueMat); Point3F pos = torqueMat.getPosition(); outRot = QuatF(torqueMat); - outPos = Point3F(-pos.x, pos.z, -pos.y); + outPos = pos;// Point3F(-pos.x, pos.z, -pos.y); } void OpenVRProvider::getFrameEyePose(IDevicePose *pose, U32 eye) const @@ -639,8 +633,8 @@ void OpenVRProvider::getStereoViewports(RectI *out) const void OpenVRProvider::getStereoTargets(GFXTextureTarget **out) const { - out[0] = mHMDRenderState.mEyeRT[0]; - out[1] = mHMDRenderState.mEyeRT[1]; + out[0] = mHMDRenderState.mStereoRT; + out[1] = mHMDRenderState.mStereoRT; } void OpenVRProvider::setDrawCanvas(GuiCanvas *canvas) @@ -655,11 +649,16 @@ void OpenVRProvider::setDrawCanvas(GuiCanvas *canvas) if (mDrawCanvas != canvas || mHMDRenderState.mHMD == NULL) { - mHMDRenderState.setupRenderTargets(0); + mHMDRenderState.setupRenderTargets(GFXDevice::RS_Standard); } mDrawCanvas = canvas; } +void OpenVRProvider::setDrawMode(GFXDevice::GFXDeviceRenderStyles style) +{ + mHMDRenderState.setupRenderTargets(style); +} + void OpenVRProvider::setCurrentConnection(GameConnection *connection) { mGameConnection = connection; @@ -672,7 +671,7 @@ GameConnection* OpenVRProvider::getCurrentConnection() GFXTexHandle OpenVRProvider::getPreviewTexture() { - return mHMDRenderState.mStereoRenderTextures[0]; // TODO: render distortion preview + return mHMDRenderState.mStereoRenderTexture; // TODO: render distortion preview } void OpenVRProvider::onStartFrame() @@ -694,31 +693,87 @@ void OpenVRProvider::onEyeRendered(U32 index) return; vr::EVRCompositorError err = vr::VRCompositorError_None; + vr::VRTextureBounds_t bounds; + U32 textureIdxToSubmit = index; - GFXTexHandle eyeTex = mHMDRenderState.mOutputEyeTextures[index].getTextureHandle(); - mHMDRenderState.mEyeRT[0]->resolveTo(eyeTex); - mHMDRenderState.mOutputEyeTextures[index].advance(); + GFXTexHandle eyeTex = mHMDRenderState.mOutputEyeTextures.getTextureHandle(); + if (mHMDRenderState.mRenderMode == GFXDevice::RS_StereoSeparate) + { + mHMDRenderState.mStereoRT->resolveTo(eyeTex); + mHMDRenderState.mOutputEyeTextures.advance(); + } + else + { + // assuming side-by-side, so the right eye will be next + if (index == 1) + { + mHMDRenderState.mStereoRT->resolveTo(eyeTex); + mHMDRenderState.mOutputEyeTextures.advance(); + } + else + { + return; + } + } if (GFX->getAdapterType() == Direct3D11) { - GFXFormat fmt1 = eyeTex->getFormat(); - vr::Texture_t eyeTexture = { (void*)static_cast(eyeTex.getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma }; - err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture); + vr::Texture_t eyeTexture; + if (mHMDRenderState.mRenderMode == GFXDevice::RS_StereoSeparate) + { + // whatever eye we are on + eyeTexture = { (void*)static_cast(eyeTex.getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma }; + bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[index], mHMDRenderState.mStereoRenderTexture.getWidthHeight()); + err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture, &bounds); + } + else + { + // left & right at the same time + eyeTexture = { (void*)static_cast(eyeTex.getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma }; + bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[0], mHMDRenderState.mStereoRenderTexture.getWidthHeight()); + err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left), &eyeTexture, &bounds); + bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[1], mHMDRenderState.mStereoRenderTexture.getWidthHeight()); + err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Right), &eyeTexture, &bounds); + } } else if (GFX->getAdapterType() == Direct3D9) { //vr::Texture_t eyeTexture = { (void*)static_cast(mHMDRenderState.mStereoRenderTextures[index].getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma }; //err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture); } +#ifdef TORQUE_OPENGL else if (GFX->getAdapterType() == OpenGL) - {/* - vr::Texture_t eyeTexture = { (void*)static_cast(mHMDRenderState.mStereoRenderTextures[index].getPointer())->getHandle(), vr::API_OpenGL, vr::ColorSpace_Gamma }; - vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture);*/ + { + vr::Texture_t eyeTexture; + if (mHMDRenderState.mRenderMode == GFXDevice::RS_StereoSeparate) + { + // whatever eye we are on + eyeTexture = { (void*)static_cast(eyeTex.getPointer())->getHandle(), vr::API_OpenGL, vr::ColorSpace_Gamma }; + bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[index], mHMDRenderState.mStereoRenderTexture.getWidthHeight()); + err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture, &bounds); + } + else + { + // left & right at the same time + eyeTexture = { (void*)static_cast(eyeTex.getPointer())->getHandle(), vr::API_OpenGL, vr::ColorSpace_Gamma }; + bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[0], mHMDRenderState.mStereoRenderTexture.getWidthHeight()); + err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left), &eyeTexture, &bounds); + bounds = OpenVRUtil::TorqueRectToBounds(mHMDRenderState.mEyeViewport[1], mHMDRenderState.mStereoRenderTexture.getWidthHeight()); + err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Right), &eyeTexture, &bounds); + } } +#endif AssertFatal(err == vr::VRCompositorError_None, "VR compositor error!"); } +void OpenVRProvider::setRoomTracking(bool room) +{ + vr::IVRCompositor* compositor = vr::VRCompositor(); + mTrackingSpace = room ? vr::TrackingUniverseStanding : vr::TrackingUniverseSeated; + if (compositor) compositor->SetTrackingSpace(mTrackingSpace); +} + bool OpenVRProvider::_handleDeviceEvent(GFXDevice::GFXDeviceEventType evt) { if (!ManagedSingleton::instanceOrNull()) @@ -770,9 +825,8 @@ bool OpenVRProvider::_handleDeviceEvent(GFXDevice::GFXDeviceEventType evt) S32 OpenVRProvider::getDisplayDeviceId() const { - return -1; -#ifdef TORQUE_OS_WIN32 - if (GFX->getAdapterType() == Direct3D11) +#if defined(TORQUE_OS_WIN64) || defined(TORQUE_OS_WIN32) + if (GFX && GFX->getAdapterType() == Direct3D11) { Vector adapterList; GFXD3D11Device::enumerateAdapters(adapterList); @@ -818,7 +872,17 @@ void OpenVRProvider::updateTrackedPoses() if (!mHMD) return; - vr::VRCompositor()->WaitGetPoses(mTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, NULL, 0); + vr::IVRCompositor* compositor = vr::VRCompositor(); + + if (!compositor) + return; + + if (compositor->GetTrackingSpace() != mTrackingSpace) + { + compositor->SetTrackingSpace(mTrackingSpace); + } + + compositor->WaitGetPoses(mTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, NULL, 0); mValidPoseCount = 0; @@ -828,7 +892,7 @@ void OpenVRProvider::updateTrackedPoses() if (mTrackedDevicePose[nDevice].bPoseIsValid) { mValidPoseCount++; - MatrixF mat = ConvertSteamVRAffineMatrixToMatrixFPlain(mTrackedDevicePose[nDevice].mDeviceToAbsoluteTracking); + MatrixF mat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(mTrackedDevicePose[nDevice].mDeviceToAbsoluteTracking); mat.inverse(); if (nDevice == vr::k_unTrackedDeviceIndex_Hmd) @@ -925,7 +989,43 @@ void OpenVRProvider::resetSensors() } } -DefineEngineFunction(isOpenVRDeviceActive, bool, (), , +OpenVROverlay *OpenVRProvider::getGamepadFocusOverlay() +{ + return NULL; +} + +void OpenVRProvider::setOverlayNeighbour(vr::EOverlayDirection dir, OpenVROverlay *overlay) +{ + +} + + +bool OpenVRProvider::isDashboardVisible() +{ + return false; +} + +void OpenVRProvider::showDashboard(const char *overlayToShow) +{ + +} + +vr::TrackedDeviceIndex_t OpenVRProvider::getPrimaryDashboardDevice() +{ + return -1; +} + +void OpenVRProvider::setKeyboardTransformAbsolute(const MatrixF &xfm) +{ + // mTrackingSpace +} + +void OpenVRProvider::setKeyboardPositionForOverlay(OpenVROverlay *overlay, const RectI &rect) +{ + +} + +DefineEngineStaticMethod(OpenVR, isDeviceActive, bool, (), , "@brief Used to determine if the OpenVR input device is active\n\n" "The OpenVR device is considered active when the library has been " @@ -940,11 +1040,11 @@ DefineEngineFunction(isOpenVRDeviceActive, bool, (), , return false; } - return OCULUSVRDEV->getActive(); + return OPENVR->getActive(); } -DefineEngineFunction(OpenVRSetEnabled, bool, (bool value), , +DefineEngineStaticMethod(OpenVR, setEnabled, bool, (bool value), , "@brief Used to determine if the OpenVR input device is active\n\n" "The OpenVR device is considered active when the library has been " @@ -959,12 +1059,11 @@ DefineEngineFunction(OpenVRSetEnabled, bool, (bool value), , return false; } - return value ? ManagedSingleton::instance()->enable() : ManagedSingleton::instance()->disable(); + return value ? OPENVR->enable() : OPENVR->disable(); } - -DefineEngineFunction(setOpenVRHMDAsGameConnectionDisplayDevice, bool, (GameConnection* conn), , +DefineEngineStaticMethod(OpenVR, setHMDAsGameConnectionDisplayDevice, bool, (GameConnection* conn), , "@brief Sets the first HMD to be a GameConnection's display device\n\n" "@param conn The GameConnection to set.\n" "@return True if the GameConnection display device was set.\n" @@ -982,12 +1081,12 @@ DefineEngineFunction(setOpenVRHMDAsGameConnectionDisplayDevice, bool, (GameConne return false; } - conn->setDisplayDevice(ManagedSingleton::instance()); + conn->setDisplayDevice(OPENVR); return true; } -DefineEngineFunction(OpenVRGetDisplayDeviceId, S32, (), , +DefineEngineStaticMethod(OpenVR, getDisplayDeviceId, S32, (), , "@brief MacOS display ID.\n\n" "@param index The HMD index.\n" "@return The ID of the HMD display device, if any.\n" @@ -998,10 +1097,10 @@ DefineEngineFunction(OpenVRGetDisplayDeviceId, S32, (), , return -1; } - return ManagedSingleton::instance()->getDisplayDeviceId(); + return OPENVR->getDisplayDeviceId(); } -DefineEngineFunction(OpenVRResetSensors, void, (), , +DefineEngineStaticMethod(OpenVR, resetSensors, void, (), , "@brief Resets all Oculus VR sensors.\n\n" "This resets all sensor orientations such that their 'normal' rotation " "is defined when this function is called. This defines an HMD's forwards " @@ -1013,5 +1112,7 @@ DefineEngineFunction(OpenVRResetSensors, void, (), , return; } - ManagedSingleton::instance()->resetSensors(); + OPENVR->resetSensors(); } + +// Overlay stuff diff --git a/Engine/source/platform/input/openVR/openVRProvider.h b/Engine/source/platform/input/openVR/openVRProvider.h index 206aa8799..94b43fb38 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.h +++ b/Engine/source/platform/input/openVR/openVRProvider.h @@ -19,12 +19,39 @@ #include class OpenVRHMDDevice; +class OpenVROverlay; -class VRTextureSet +typedef vr::VROverlayInputMethod OpenVROverlayInputMethod; +typedef vr::VROverlayTransformType OpenVROverlayTransformType; +typedef vr::EGamepadTextInputMode OpenVRGamepadTextInputMode; +typedef vr::EGamepadTextInputLineMode OpenVRGamepadTextInputLineMode; +typedef vr::ETrackingResult OpenVRTrackingResult; +typedef vr::ETrackingUniverseOrigin OpenVRTrackingUniverseOrigin; +typedef vr::EOverlayDirection OpenVROverlayDirection; +typedef vr::EVRState OpenVRState; + +DefineEnumType(OpenVROverlayTransformType); + +namespace OpenVRUtil +{ + /// Convert a matrix in OVR space to torque space + void convertTransformFromOVR(const MatrixF &inRotTMat, MatrixF& outRotation); + + /// Convert a matrix in torque space to OVR space + void convertTransformToOVR(const MatrixF& inRotation, MatrixF& outRotation); + + /// Converts vr::HmdMatrix34_t to a MatrixF + MatrixF convertSteamVRAffineMatrixToMatrixFPlain(const vr::HmdMatrix34_t &mat); + + /// Converts a MatrixF to a vr::HmdMatrix34_t + void convertMatrixFPlainToSteamVRAffineMatrix(const MatrixF &inMat, vr::HmdMatrix34_t &outMat); +}; + +template class VRTextureSet { public: - static const int TextureCount = 2; - GFXTexHandle mTextures[2]; + static const int TextureCount = TEXSIZE; + GFXTexHandle mTextures[TEXSIZE]; U32 mIndex; VRTextureSet() : mIndex(0) @@ -68,20 +95,15 @@ struct OpenVRRenderState RectI mEyeViewport[2]; GFXTextureTargetRef mStereoRT; - GFXTextureTargetRef mEyeRT[2]; - GFXTexHandle mStereoRenderTextures[2]; - GFXTexHandle mStereoDepthTextures[2]; + GFXTexHandle mStereoRenderTexture; + GFXTexHandle mStereoDepthTexture; - GFXVertexBufferHandle mDistortionVerts; - GFXPrimitiveBufferHandle mDistortionInds; + VRTextureSet<4> mOutputEyeTextures; - VRTextureSet mOutputEyeTextures[2]; + GFXDevice::GFXDeviceRenderStyles mRenderMode; - bool setupRenderTargets(U32 mode); - void setupDistortion(); - - void renderDistortion(U32 eye); + bool setupRenderTargets(GFXDevice::GFXDeviceRenderStyles mode); void renderPreview(); @@ -142,6 +164,7 @@ public: virtual void getStereoTargets(GFXTextureTarget **out) const; virtual void setDrawCanvas(GuiCanvas *canvas); + virtual void setDrawMode(GFXDevice::GFXDeviceRenderStyles style); virtual void setCurrentConnection(GameConnection *connection); virtual GameConnection* getCurrentConnection(); @@ -153,6 +176,8 @@ public: virtual void onEyeRendered(U32 index); + virtual void setRoomTracking(bool room); + bool _handleDeviceEvent(GFXDevice::GFXDeviceEventType evt); S32 getDisplayDeviceId() const; @@ -168,6 +193,21 @@ public: void resetSensors(); /// } + + /// @name Console API + /// { + OpenVROverlay *getGamepadFocusOverlay(); + void setOverlayNeighbour(vr::EOverlayDirection dir, OpenVROverlay *overlay); + + bool isDashboardVisible(); + void showDashboard(const char *overlayToShow); + + vr::TrackedDeviceIndex_t getPrimaryDashboardDevice(); + + void setKeyboardTransformAbsolute(const MatrixF &xfm); + void setKeyboardPositionForOverlay(OpenVROverlay *overlay, const RectI &rect); + /// } + /// @name OpenVR state /// { vr::IVRSystem *mHMD; @@ -183,6 +223,8 @@ public: OpenVRRenderState mHMDRenderState; GFXAdapterLUID mLUID; + + vr::ETrackingUniverseOrigin mTrackingSpace; /// } GuiCanvas* mDrawCanvas; @@ -210,6 +252,6 @@ public: }; /// Returns the OculusVRDevice singleton. -#define OCULUSVRDEV ManagedSingleton::instance() +#define OPENVR ManagedSingleton::instance() #endif // _OCULUSVRDEVICE_H_ diff --git a/Engine/source/platform/output/IDisplayDevice.h b/Engine/source/platform/output/IDisplayDevice.h index bd372085d..9ce327a04 100644 --- a/Engine/source/platform/output/IDisplayDevice.h +++ b/Engine/source/platform/output/IDisplayDevice.h @@ -66,6 +66,7 @@ public: virtual void getStereoTargets(GFXTextureTarget **out) const = 0; virtual void setDrawCanvas(GuiCanvas *canvas) = 0; + virtual void setDrawMode(GFXDevice::GFXDeviceRenderStyles style) = 0; virtual void setCurrentConnection(GameConnection *connection) = 0; virtual GameConnection* getCurrentConnection() = 0; From 9f49a7844e9f0d61b3258c5cf0b5fe544ab29b9c Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Sun, 8 May 2016 18:18:04 +0100 Subject: [PATCH 12/33] Use correct multiplication order for eye transform. Also fix displayDevice not set bug. --- Engine/source/gui/3d/guiTSControl.cpp | 2 ++ Engine/source/platform/input/openVR/openVRProvider.cpp | 7 +++++-- 2 files changed, 7 insertions(+), 2 deletions(-) diff --git a/Engine/source/gui/3d/guiTSControl.cpp b/Engine/source/gui/3d/guiTSControl.cpp index 1a8046dd2..57c6d2fef 100644 --- a/Engine/source/gui/3d/guiTSControl.cpp +++ b/Engine/source/gui/3d/guiTSControl.cpp @@ -529,6 +529,8 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) GFXTransformSaver saver; bool renderingToTarget = false; + mLastCameraQuery.displayDevice = NULL; + if (!processCameraQuery(&mLastCameraQuery)) { // We have no camera, but render the GUI children diff --git a/Engine/source/platform/input/openVR/openVRProvider.cpp b/Engine/source/platform/input/openVR/openVRProvider.cpp index 2f8524221..8c49ef49e 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.cpp +++ b/Engine/source/platform/input/openVR/openVRProvider.cpp @@ -343,7 +343,7 @@ OpenVRProvider::OpenVRProvider() : INPUTMGR->registerDevice(this); dMemset(&mLUID, '\0', sizeof(mLUID)); - mTrackingSpace = vr::TrackingUniverseSeated; + mTrackingSpace = vr::TrackingUniverseStanding; } OpenVRProvider::~OpenVRProvider() @@ -586,7 +586,7 @@ void OpenVRProvider::getFrameEyePose(IDevicePose *pose, U32 eye) const { AssertFatal(eye >= 0 && eye < 2, "Out of bounds eye"); - MatrixF mat = mHMDRenderState.mHMDPose * mHMDRenderState.mEyePose[eye]; + MatrixF mat = mHMDRenderState.mEyePose[eye] * mHMDRenderState.mHMDPose; // same order as in the openvr example OpenVRTransformToRotPos(mat, pose->orientation, pose->position); pose->velocity = Point3F(0); @@ -603,6 +603,9 @@ void OpenVRProvider::getEyeOffsets(Point3F *dest) const { dest[0] = mHMDRenderState.mEyePose[0].getPosition(); dest[1] = mHMDRenderState.mEyePose[1].getPosition(); + + dest[0] = Point3F(-dest[0].x, dest[0].y, dest[0].z); // convert from vr-space + dest[1] = Point3F(-dest[1].x, dest[1].y, dest[1].z); } bool OpenVRProvider::providesFovPorts() const From 185fde8ea4ecd15c680662d562d156faea99d0ed Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Tue, 10 May 2016 22:13:04 +0100 Subject: [PATCH 13/33] Fix issue with clamping the AngAxisF --- .../T3D/gameBase/extended/extendedMove.cpp | 68 ++++++++++++------- .../T3D/gameBase/extended/extendedMove.h | 6 +- 2 files changed, 47 insertions(+), 27 deletions(-) diff --git a/Engine/source/T3D/gameBase/extended/extendedMove.cpp b/Engine/source/T3D/gameBase/extended/extendedMove.cpp index 7b260adb5..bf7573113 100644 --- a/Engine/source/T3D/gameBase/extended/extendedMove.cpp +++ b/Engine/source/T3D/gameBase/extended/extendedMove.cpp @@ -79,7 +79,8 @@ void ExtendedMoveManager::init() const ExtendedMove NullExtendedMove; -#define CLAMPPOS(x) (x<0 ? -((-x) & (1<<(MaxPositionBits-1))-1) : (x & (1<<(MaxPositionBits-1))-1)) +#define CLAMPPOS(x) ((S32)(((x + 1) * .5) * ((1 << MaxPositionBits) - 1)) & ((1<writeFlag(posX[i] != extBaseMove->posX[i])) - stream->writeSignedInt(posX[i], MaxPositionBits); + stream->writeSignedInt(cposX[i], MaxPositionBits); if(stream->writeFlag(posY[i] != extBaseMove->posY[i])) - stream->writeSignedInt(posY[i], MaxPositionBits); + stream->writeSignedInt(cposY[i], MaxPositionBits); if(stream->writeFlag(posZ[i] != extBaseMove->posZ[i])) - stream->writeSignedInt(posZ[i], MaxPositionBits); + stream->writeSignedInt(cposZ[i], MaxPositionBits); // Rotation stream->writeFlag(EulerBasedRotation[i]); if(stream->writeFlag(rotX[i] != extBaseMove->rotX[i])) - stream->writeInt(crotX[i], MaxRotationBits); + stream->writeInt(crotX[i], EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits); if(stream->writeFlag(rotY[i] != extBaseMove->rotY[i])) - stream->writeInt(crotY[i], MaxRotationBits); + stream->writeInt(crotY[i], EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits); if(stream->writeFlag(rotZ[i] != extBaseMove->rotZ[i])) - stream->writeInt(crotZ[i], MaxRotationBits); + stream->writeInt(crotZ[i], EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits); if(!EulerBasedRotation[i]) { if(stream->writeFlag(rotW[i] != extBaseMove->rotW[i])) @@ -176,18 +181,27 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove) for(U32 i=0; ireadFlag()) + if (stream->readFlag()) + { posX[i] = stream->readSignedInt(MaxPositionBits); + cposX[i] = UNCLAMPPOS(posX[i]); + } else posX[i] = extBaseMove->posX[i]; - if(stream->readFlag()) - posY[i] = stream->readSignedInt(MaxPositionBits); + if (stream->readFlag()) + { + cposY[i] = stream->readSignedInt(MaxPositionBits); + posY[i] = UNCLAMPPOS(cposY[i]); + } else posY[i] = extBaseMove->posY[i]; - if(stream->readFlag()) - posZ[i] = stream->readSignedInt(MaxPositionBits); + if (stream->readFlag()) + { + cposZ[i] = stream->readSignedInt(MaxPositionBits); + posZ[i] = UNCLAMPPOS(cposZ[i]); + } else posZ[i] = extBaseMove->posZ[i]; @@ -198,8 +212,8 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove) scale = M_2PI_F; if(stream->readFlag()) { - crotX[i] = stream->readInt(MaxRotationBits); - rotX[i] = UNCLAMPROT(crotX[i]) * scale; + crotX[i] = stream->readInt(EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits); + rotX[i] = EulerBasedRotation[i] ? (UNCLAMPROT(crotX[i]) * scale) : UNCLAMPPOS(crotX[i]); } else { @@ -208,8 +222,8 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove) if(stream->readFlag()) { - crotY[i] = stream->readInt(MaxRotationBits); - rotY[i] = UNCLAMPROT(crotY[i]) * scale; + crotY[i] = stream->readInt(EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits); + rotY[i] = EulerBasedRotation[i] ? (UNCLAMPROT(crotY[i]) * scale) : UNCLAMPPOS(crotY[i]); } else { @@ -218,8 +232,8 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove) if(stream->readFlag()) { - crotZ[i] = stream->readInt(MaxRotationBits); - rotZ[i] = UNCLAMPROT(crotZ[i]) * scale; + crotZ[i] = stream->readInt(EulerBasedRotation[i] ? MaxRotationBits : MaxPositionBits); + rotZ[i] = EulerBasedRotation[i] ? (UNCLAMPROT(crotZ[i]) * scale) : UNCLAMPPOS(crotZ[i]); } else { @@ -231,7 +245,7 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove) if(stream->readFlag()) { crotW[i] = stream->readInt(MaxRotationBits); - rotW[i] = UNCLAMPROT(crotW[i]); + rotW[i] = UNCLAMPROT(crotW[i]) * M_2PI_F; } else { @@ -266,9 +280,9 @@ void ExtendedMove::clamp() } else { - crotX[i] = CLAMPROT(rotX[i]); - crotY[i] = CLAMPROT(rotY[i]); - crotZ[i] = CLAMPROT(rotZ[i]); + crotX[i] = CLAMPPOS(rotX[i]); + crotY[i] = CLAMPPOS(rotY[i]); + crotZ[i] = CLAMPPOS(rotZ[i]); crotW[i] = CLAMPROT(rotW[i] / M_2PI_F); } } @@ -282,6 +296,10 @@ void ExtendedMove::unclamp() // Unclamp the values the same as for net traffic so the client matches the server for(U32 i=0; i Date: Sat, 14 May 2016 23:51:04 +0100 Subject: [PATCH 14/33] Improvements to openvr code - Overlays are implemented (sans input for the moment) - Fixed a problem where the movemanager was using the wrong values for hmd rotation & position --- .../gfx/D3D11/gfxD3D11EnumTranslate.cpp | 1 + .../gfx/D3D11/gfxD3D11TextureObject.cpp | 12 +- .../gfx/D3D9/pc/gfxD3D9EnumTranslate.pc.cpp | 2 + Engine/source/gfx/bitmap/gBitmap.cpp | 1 + .../source/gfx/bitmap/loaders/bitmapPng.cpp | 7 +- Engine/source/gfx/gfxEnums.h | 9 +- Engine/source/gui/core/guiOffscreenCanvas.cpp | 11 +- Engine/source/gui/core/guiOffscreenCanvas.h | 1 + .../platform/input/openVR/openVROverlay.cpp | 307 +++++++++++++++++- .../platform/input/openVR/openVROverlay.h | 18 +- .../platform/input/openVR/openVRProvider.cpp | 29 +- .../platform/input/openVR/openVRProvider.h | 9 + Engine/source/sim/actionMap.cpp | 5 +- 13 files changed, 380 insertions(+), 32 deletions(-) diff --git a/Engine/source/gfx/D3D11/gfxD3D11EnumTranslate.cpp b/Engine/source/gfx/D3D11/gfxD3D11EnumTranslate.cpp index b7a05acd4..72acdd083 100644 --- a/Engine/source/gfx/D3D11/gfxD3D11EnumTranslate.cpp +++ b/Engine/source/gfx/D3D11/gfxD3D11EnumTranslate.cpp @@ -73,6 +73,7 @@ void GFXD3D11EnumTranslate::init() GFXD3D11TextureFormat[GFXFormatD24FS8] = DXGI_FORMAT_UNKNOWN; GFXD3D11TextureFormat[GFXFormatD16] = DXGI_FORMAT_D16_UNORM; GFXD3D11TextureFormat[GFXFormatR8G8B8A8_SRGB] = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB; + GFXD3D11TextureFormat[GFXFormatR8G8B8A8_LINEAR_FORCE] = DXGI_FORMAT_R8G8B8A8_UNORM; //------------------------------------------------------------------------------ //------------------------------------------------------------------------------ GFXD3D11TextureFilter[GFXTextureFilterNone] = D3D11_FILTER_MIN_MAG_MIP_POINT; diff --git a/Engine/source/gfx/D3D11/gfxD3D11TextureObject.cpp b/Engine/source/gfx/D3D11/gfxD3D11TextureObject.cpp index 1c97597cc..22af430f0 100644 --- a/Engine/source/gfx/D3D11/gfxD3D11TextureObject.cpp +++ b/Engine/source/gfx/D3D11/gfxD3D11TextureObject.cpp @@ -67,12 +67,6 @@ GFXLockedRect *GFXD3D11TextureObject::lock(U32 mipLevel /*= 0*/, RectI *inRect / D3D11_MAPPED_SUBRESOURCE mapInfo; - /*if (!mProfile->canModify()) - { - AssertFatal(false, "Tried to modify external texture"); - return NULL; - }*/ - if( mProfile->isRenderTarget() ) { //AssertFatal( 0, "GFXD3D11TextureObject::lock - Need to handle mapping render targets" ); @@ -186,8 +180,8 @@ bool GFXD3D11TextureObject::copyToBmp(GBitmap* bmp) // check format limitations // at the moment we only support RGBA for the source (other 4 byte formats should // be easy to add though) - AssertFatal(mFormat == GFXFormatR8G8B8A8, "copyToBmp: invalid format"); - if (mFormat != GFXFormatR8G8B8A8) + AssertFatal(mFormat == GFXFormatR8G8B8A8 || mFormat == GFXFormatR8G8B8A8_LINEAR_FORCE, "copyToBmp: invalid format"); + if (mFormat != GFXFormatR8G8B8A8 && mFormat != GFXFormatR8G8B8A8_LINEAR_FORCE) return false; PROFILE_START(GFXD3D11TextureObject_copyToBmp); @@ -203,7 +197,7 @@ bool GFXD3D11TextureObject::copyToBmp(GBitmap* bmp) const U32 sourceBytesPerPixel = 4; U32 destBytesPerPixel = 0; - if(bmp->getFormat() == GFXFormatR8G8B8A8) + if (bmp->getFormat() == GFXFormatR8G8B8A8 || bmp->getFormat() == GFXFormatR8G8B8A8_LINEAR_FORCE) destBytesPerPixel = 4; else if(bmp->getFormat() == GFXFormatR8G8B8) destBytesPerPixel = 3; diff --git a/Engine/source/gfx/D3D9/pc/gfxD3D9EnumTranslate.pc.cpp b/Engine/source/gfx/D3D9/pc/gfxD3D9EnumTranslate.pc.cpp index a598c4999..1d51860c0 100644 --- a/Engine/source/gfx/D3D9/pc/gfxD3D9EnumTranslate.pc.cpp +++ b/Engine/source/gfx/D3D9/pc/gfxD3D9EnumTranslate.pc.cpp @@ -115,6 +115,8 @@ void GFXD3D9EnumTranslate::init() GFXD3D9TextureFormat[GFXFormatD24FS8] = D3DFMT_D24FS8; GFXD3D9TextureFormat[GFXFormatD16] = D3DFMT_D16; GFXD3D9TextureFormat[GFXFormatR8G8B8A8_SRGB] = D3DFMT_UNKNOWN; + + GFXD3D9TextureFormat[GFXFormatR8G8B8A8_LINEAR_FORCE] = D3DFMT_A8R8G8B8; VALIDATE_LOOKUPTABLE( GFXD3D9TextureFormat, GFXFormat); //------------------------------------------------------------------------------ //------------------------------------------------------------------------------ diff --git a/Engine/source/gfx/bitmap/gBitmap.cpp b/Engine/source/gfx/bitmap/gBitmap.cpp index 78454d5e3..e5ef6b407 100644 --- a/Engine/source/gfx/bitmap/gBitmap.cpp +++ b/Engine/source/gfx/bitmap/gBitmap.cpp @@ -293,6 +293,7 @@ void GBitmap::allocateBitmap(const U32 in_width, const U32 in_height, const bool break; case GFXFormatR8G8B8: mBytesPerPixel = 3; break; + case GFXFormatR8G8B8A8_LINEAR_FORCE: case GFXFormatR8G8B8X8: case GFXFormatR8G8B8A8: mBytesPerPixel = 4; break; diff --git a/Engine/source/gfx/bitmap/loaders/bitmapPng.cpp b/Engine/source/gfx/bitmap/loaders/bitmapPng.cpp index c0e05471c..702c5b33d 100644 --- a/Engine/source/gfx/bitmap/loaders/bitmapPng.cpp +++ b/Engine/source/gfx/bitmap/loaders/bitmapPng.cpp @@ -328,13 +328,14 @@ static bool _writePNG(GBitmap *bitmap, Stream &stream, U32 compressionLevel, U32 format == GFXFormatR8G8B8A8 || format == GFXFormatR8G8B8X8 || format == GFXFormatA8 || - format == GFXFormatR5G6B5, "_writePNG: ONLY RGB bitmap writing supported at this time."); + format == GFXFormatR5G6B5 || + format == GFXFormatR8G8B8A8_LINEAR_FORCE, "_writePNG: ONLY RGB bitmap writing supported at this time."); if ( format != GFXFormatR8G8B8 && format != GFXFormatR8G8B8A8 && format != GFXFormatR8G8B8X8 && format != GFXFormatA8 && - format != GFXFormatR5G6B5 ) + format != GFXFormatR5G6B5 && format != GFXFormatR8G8B8A8_LINEAR_FORCE) return false; png_structp png_ptr = png_create_write_struct_2(PNG_LIBPNG_VER_STRING, @@ -381,7 +382,7 @@ static bool _writePNG(GBitmap *bitmap, Stream &stream, U32 compressionLevel, U32 NULL, // compression type NULL); // filter type } - else if (format == GFXFormatR8G8B8A8 || format == GFXFormatR8G8B8X8) + else if (format == GFXFormatR8G8B8A8 || format == GFXFormatR8G8B8X8 || format == GFXFormatR8G8B8A8_LINEAR_FORCE) { png_set_IHDR(png_ptr, info_ptr, width, height, // the width & height diff --git a/Engine/source/gfx/gfxEnums.h b/Engine/source/gfx/gfxEnums.h index cdb61b6a4..af0e1b920 100644 --- a/Engine/source/gfx/gfxEnums.h +++ b/Engine/source/gfx/gfxEnums.h @@ -192,6 +192,12 @@ enum GFXFormat GFXFormatD24S8, GFXFormatD24FS8, + // sRGB formats + GFXFormatR8G8B8A8_SRGB, + + // Guaranteed RGBA8 (for apis which really dont like bgr) + GFXFormatR8G8B8A8_LINEAR_FORCE, + // 64 bit texture formats... GFXFormatR16G16B16A16,// first in group... GFXFormatR16G16B16A16F, @@ -206,9 +212,6 @@ enum GFXFormat GFXFormatDXT4, GFXFormatDXT5, - // sRGB formats - GFXFormatR8G8B8A8_SRGB, - GFXFormat_COUNT, GFXFormat_8BIT = GFXFormatA8, diff --git a/Engine/source/gui/core/guiOffscreenCanvas.cpp b/Engine/source/gui/core/guiOffscreenCanvas.cpp index e54479931..f3ce5c2e6 100644 --- a/Engine/source/gui/core/guiOffscreenCanvas.cpp +++ b/Engine/source/gui/core/guiOffscreenCanvas.cpp @@ -176,7 +176,7 @@ void GuiOffscreenCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = tr GFX->setWorldMatrix( MatrixF::Identity ); GFX->setViewMatrix( MatrixF::Identity ); GFX->setProjectionMatrix( MatrixF::Identity ); - + RectI contentRect(Point2I(0,0), mTargetSize); { // Render active GUI Dialogs @@ -210,7 +210,7 @@ void GuiOffscreenCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = tr GFX->getDrawUtil()->clearBitmapModulation(); } - + mTarget->resolve(); GFX->popActiveRenderTarget(); @@ -219,6 +219,13 @@ void GuiOffscreenCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = tr // Keep track of the last time we rendered. mLastRenderMs = Platform::getRealMilliseconds(); mTargetDirty = mDynamicTarget; + + onFrameRendered(); +} + +void GuiOffscreenCanvas::onFrameRendered() +{ + } Point2I GuiOffscreenCanvas::getWindowSize() diff --git a/Engine/source/gui/core/guiOffscreenCanvas.h b/Engine/source/gui/core/guiOffscreenCanvas.h index bf7d53bef..9807f56a7 100644 --- a/Engine/source/gui/core/guiOffscreenCanvas.h +++ b/Engine/source/gui/core/guiOffscreenCanvas.h @@ -23,6 +23,7 @@ public: void onRemove(); void renderFrame(bool preRenderOnly, bool bufferSwap); + virtual void onFrameRendered(); Point2I getWindowSize(); diff --git a/Engine/source/platform/input/openVR/openVROverlay.cpp b/Engine/source/platform/input/openVR/openVROverlay.cpp index d22abbd51..24bede00b 100644 --- a/Engine/source/platform/input/openVR/openVROverlay.cpp +++ b/Engine/source/platform/input/openVR/openVROverlay.cpp @@ -1,5 +1,18 @@ +#include "platform/input/openVR/openVRProvider.h" #include "platform/input/openVR/openVROverlay.h" +#include "gfx/D3D11/gfxD3D11Device.h" +#include "gfx/D3D11/gfxD3D11TextureObject.h" +#include "gfx/D3D11/gfxD3D11EnumTranslate.h" + +#ifdef TORQUE_OPENGL +#include "gfx/gl/gfxGLDevice.h" +#include "gfx/gl/gfxGLTextureObject.h" +#include "gfx/gl/gfxGLEnumTranslate.h" +#endif + +#include "postFx/postEffectCommon.h" + ImplementEnumType(OpenVROverlayType, "Desired overlay type for OpenVROverlay. .\n\n" "@ingroup OpenVR") @@ -7,9 +20,18 @@ ImplementEnumType(OpenVROverlayType, { OpenVROverlay::OVERLAYTYPE_DASHBOARD, "Dashboard" }, EndImplementEnumType; +IMPLEMENT_CONOBJECT(OpenVROverlay); + OpenVROverlay::OpenVROverlay() { + mTransform = MatrixF(1); + mOverlayWidth = 1.5f; + mOverlayFlags = 0; + mOverlayColor = ColorF(1, 1, 1, 1); + mTrackingOrigin = vr::TrackingUniverseSeated; + + mTargetFormat = GFXFormatR8G8B8A8_LINEAR_FORCE; // needed for openvr! } OpenVROverlay::~OpenVROverlay() @@ -17,8 +39,53 @@ OpenVROverlay::~OpenVROverlay() } +static bool setProtectedOverlayTypeDirty(void *obj, const char *array, const char *data) +{ + OpenVROverlay *object = static_cast(obj); + object->mOverlayTypeDirty = true; + return true; +} + +static bool setProtectedOverlayDirty(void *obj, const char *array, const char *data) +{ + OpenVROverlay *object = static_cast(obj); + object->mOverlayDirty = true; + return true; +} + void OpenVROverlay::initPersistFields() { + addProtectedField("overlayType", TypeOpenVROverlayType, Offset(mOverlayType, OpenVROverlay), &setProtectedOverlayTypeDirty, &defaultProtectedGetFn, + "Type of overlay."); + addProtectedField("overlayFlags", TypeS32, Offset(mOverlayFlags, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, + "Flags for overlay."); + addProtectedField("overlayWidth", TypeS32, Offset(mOverlayWidth, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, + "Width of overlay."); + addProtectedField("overlayColor", TypeColorF, Offset(mOverlayColor, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, + "Backing color of overlay."); + + addProtectedField("transformType", TypeOpenVROverlayTransformType, Offset(mOverlayTransformType, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, + "Transform type of overlay."); + addProtectedField("transformPosition", TypeMatrixPosition, Offset(mTransform, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, + "Position of overlay."); + addProtectedField("transformRotation", TypeMatrixRotation, Offset(mTransform, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, + "Rotation of overlay."); + addProtectedField("transformDeviceIndex", TypeS32, Offset(mTransformDeviceIndex, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, + "Rotation of overlay."); + addProtectedField("transformDeviceComponent", TypeString, Offset(mTransformDeviceComponent, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, + "Rotation of overlay."); + + addProtectedField("inputMethod", TypeOpenVROverlayInputMethod, Offset(mTransformDeviceComponent, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, + "Type of input method."); + addProtectedField("mouseScale", TypePoint2F, Offset(mMouseScale, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, + "Scale of mouse input."); + + addProtectedField("trackingOrigin", TypeOpenVRTrackingUniverseOrigin, Offset(mTrackingOrigin, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, + "Tracking origin."); + + addProtectedField("controllerDevice", TypeS32, Offset(mControllerDeviceIndex, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, + "Index of controller to attach overlay to."); + Parent::initPersistFields(); } @@ -41,11 +108,49 @@ void OpenVROverlay::onRemove() vr::VROverlay()->DestroyOverlay(mOverlayHandle); mOverlayHandle = NULL; } + + if (mThumbOverlayHandle) + { + vr::VROverlay()->DestroyOverlay(mThumbOverlayHandle); + mThumbOverlayHandle = NULL; + } } void OpenVROverlay::resetOverlay() { + vr::IVROverlay *overlay = vr::VROverlay(); + if (!overlay) + return; + + if (mOverlayHandle) + { + overlay->DestroyOverlay(mOverlayHandle); + mOverlayHandle = NULL; + } + + if (mThumbOverlayHandle) + { + overlay->DestroyOverlay(mThumbOverlayHandle); + mThumbOverlayHandle = NULL; + } + + if (mOverlayType == OpenVROverlay::OVERLAYTYPE_DASHBOARD) + { + overlay->CreateDashboardOverlay(mInternalName, mInternalName, &mOverlayHandle, &mThumbOverlayHandle); + } + else + { + overlay->CreateOverlay(mInternalName, mInternalName, &mOverlayHandle); + } + + mOverlayDirty = true; mOverlayTypeDirty = false; + + // Pre-render start frame so we have a texture available + if (!mTarget) + { + renderFrame(false, false); + } } void OpenVROverlay::updateOverlay() @@ -53,16 +158,74 @@ void OpenVROverlay::updateOverlay() if (mOverlayTypeDirty) resetOverlay(); - // Update params TODO + // Update params + vr::IVROverlay *overlay = vr::VROverlay(); + if (!overlay || !mOverlayHandle) + return; + + if (!mOverlayDirty) + return; + + MatrixF vrMat(1); + vr::HmdMatrix34_t ovrMat; + vr::HmdVector2_t ovrMouseScale; + ovrMouseScale.v[0] = mMouseScale.x; + ovrMouseScale.v[1] = mMouseScale.y; + + OpenVRUtil::convertTransformToOVR(mTransform, vrMat); + OpenVRUtil::convertMatrixFPlainToSteamVRAffineMatrix(vrMat, ovrMat); + + MatrixF reverseMat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(ovrMat); + MatrixF finalReverseMat(1); + OpenVRUtil::convertTransformFromOVR(reverseMat, finalReverseMat); + + switch (mOverlayTransformType) + { + case vr::VROverlayTransform_Absolute: + overlay->SetOverlayTransformAbsolute(mOverlayHandle, mTrackingOrigin, &ovrMat); + break; + case vr::VROverlayTransform_TrackedDeviceRelative: + overlay->SetOverlayTransformTrackedDeviceRelative(mOverlayHandle, mTransformDeviceIndex, &ovrMat); + break; + case vr::VROverlayTransform_TrackedComponent: + overlay->SetOverlayTransformTrackedDeviceComponent(mOverlayHandle, mTransformDeviceIndex, mTransformDeviceComponent.c_str()); + break; + // NOTE: system not handled here - doesn't seem possible to create these + default: + break; + } + + // overlay->SetOverlayColor(mOverlayHandle, mOverlayColor.red, mOverlayColor.green, mOverlayColor.blue); + overlay->SetOverlayAlpha(mOverlayHandle, mOverlayColor.alpha); + overlay->SetOverlayMouseScale(mOverlayHandle, &ovrMouseScale); + overlay->SetOverlayInputMethod(mOverlayHandle, mInputMethod); + overlay->SetOverlayWidthInMeters(mOverlayHandle, mOverlayWidth); + + // NOTE: if flags in openvr change, double check this + /*for (U32 i = vr::VROverlayFlags_None; i <= vr::VROverlayFlags_ShowTouchPadScrollWheel; i++) + { + overlay->SetOverlayFlag(mOverlayHandle, (vr::VROverlayFlags)i, mOverlayFlags & (1 << i)); + }*/ + mOverlayDirty = false; } void OpenVROverlay::showOverlay() { + updateOverlay(); if (mOverlayHandle == NULL) return; - vr::VROverlay()->ShowOverlay(mOverlayHandle); + vr::EVROverlayError err = vr::VROverlay()->ShowOverlay(mOverlayHandle); + if (err != vr::VROverlayError_None) + { + Con::errorf("VR Overlay error!"); + } + + if (!mStagingTexture) + { + renderFrame(false, false); + } } void OpenVROverlay::hideOverlay() @@ -104,7 +267,7 @@ bool OpenVROverlay::isActiveDashboardOverlay() return false; // TODO WHERE DID I GET THIS FROM } -MatrixF OpenVROverlay::getTransformForOverlayCoordinates(const vr::ETrackingUniverseOrigin trackingOrigin, const Point2F &pos) +MatrixF OpenVROverlay::getTransformForOverlayCoordinates(const Point2F &pos) { if (mOverlayHandle == NULL) return MatrixF::Identity; @@ -114,7 +277,7 @@ MatrixF OpenVROverlay::getTransformForOverlayCoordinates(const vr::ETrackingUniv vec.v[1] = pos.y; vr::HmdMatrix34_t outMat; MatrixF outTorqueMat; - if (vr::VROverlay()->GetTransformForOverlayCoordinates(mOverlayHandle, trackingOrigin, vec, &outMat) != vr::VROverlayError_None) + if (vr::VROverlay()->GetTransformForOverlayCoordinates(mOverlayHandle, mTrackingOrigin, vec, &outMat) != vr::VROverlayError_None) return MatrixF::Identity; MatrixF vrMat(1); @@ -123,7 +286,7 @@ MatrixF OpenVROverlay::getTransformForOverlayCoordinates(const vr::ETrackingUniv return outTorqueMat; } -bool OpenVROverlay::castRay(const vr::ETrackingUniverseOrigin trackingOrigin, const Point3F &origin, const Point3F &direction, RayInfo *info) +bool OpenVROverlay::castRay(const Point3F &origin, const Point3F &direction, RayInfo *info) { if (mOverlayHandle == NULL) return false; @@ -131,7 +294,7 @@ bool OpenVROverlay::castRay(const vr::ETrackingUniverseOrigin trackingOrigin, c vr::VROverlayIntersectionParams_t params; vr::VROverlayIntersectionResults_t result; - params.eOrigin = trackingOrigin; + params.eOrigin = mTrackingOrigin; params.vSource.v[0] = origin.x; params.vSource.v[1] = origin.y; params.vSource.v[2] = origin.z; @@ -159,3 +322,135 @@ void OpenVROverlay::moveGamepadFocusToNeighbour() } +void OpenVROverlay::handleOpenVREvents() +{ + vr::VREvent_t vrEvent; + while (vr::VROverlay()->PollNextOverlayEvent(mOverlayHandle, &vrEvent, sizeof(vrEvent))) + { + InputEventInfo eventInfo; + eventInfo.deviceType = MouseDeviceType; + eventInfo.deviceInst = 0; + eventInfo.objType = SI_AXIS; + eventInfo.modifier = (InputModifiers)0; + eventInfo.ascii = 0; + + switch (vrEvent.eventType) + { + case vr::VREvent_MouseMove: + { + eventInfo.objType = SI_AXIS; + eventInfo.objInst = SI_XAXIS; + eventInfo.action = SI_MAKE; + eventInfo.fValue = vrEvent.data.mouse.x; + processMouseEvent(eventInfo); + + eventInfo.objType = SI_AXIS; + eventInfo.objInst = SI_YAXIS; + eventInfo.action = SI_MAKE; + eventInfo.fValue = vrEvent.data.mouse.y; + processMouseEvent(eventInfo); + } + break; + + case vr::VREvent_MouseButtonDown: + { + eventInfo.objType = SI_BUTTON; + eventInfo.objInst = (InputObjectInstances)OpenVRUtil::convertOpenVRButtonToTorqueButton(vrEvent.data.mouse.button); + eventInfo.action = SI_MAKE; + eventInfo.fValue = 1.0f; + processMouseEvent(eventInfo); + } + break; + + case vr::VREvent_MouseButtonUp: + { + eventInfo.objType = SI_BUTTON; + eventInfo.objInst = (InputObjectInstances)OpenVRUtil::convertOpenVRButtonToTorqueButton(vrEvent.data.mouse.button); + eventInfo.action = SI_BREAK; + eventInfo.fValue = 0.0f; + processMouseEvent(eventInfo); + } + break; + + case vr::VREvent_OverlayShown: + { + markDirty(); + } + break; + + case vr::VREvent_Quit: + AssertFatal(false, "WTF is going on here"); + break; + } + } + + if (mThumbOverlayHandle != vr::k_ulOverlayHandleInvalid) + { + while (vr::VROverlay()->PollNextOverlayEvent(mThumbOverlayHandle, &vrEvent, sizeof(vrEvent))) + { + switch (vrEvent.eventType) + { + case vr::VREvent_OverlayShown: + { + markDirty(); + } + break; + } + } + } +} + +void OpenVROverlay::onFrameRendered() +{ + vr::IVROverlay *overlay = vr::VROverlay(); + if (!overlay || !mOverlayHandle) + return; + + updateOverlay(); + + Point2I desiredSize = mTarget->getSize(); + if (mStagingTexture.isNull() || mStagingTexture.getWidthHeight() != desiredSize) + { + Point2I sz = mStagingTexture.getWidthHeight(); + mStagingTexture.set(desiredSize.x, desiredSize.y, mTargetFormat, &VRTextureProfile, "OpenVROverlay staging texture"); + } + mTarget->resolveTo(mStagingTexture); + + vr::Texture_t tex; + if (GFX->getAdapterType() == Direct3D11) + { + tex = { (void*)static_cast(mStagingTexture.getPointer())->getResource(), vr::API_DirectX, vr::ColorSpace_Auto }; + } +#ifdef TORQUE_OPENGL + else if (GFX->getAdapterType() == OpenGL) + { + tex = { (void*)static_cast(mStagingTexture.getPointer())->getHandle(), vr::API_OpenGL, vr::ColorSpace_Auto }; + + } +#endif + else + { + return; + } + + //mStagingTexture->dumpToDisk("PNG", "D:\\test.png"); + + vr::EVROverlayError err = overlay->SetOverlayTexture(mOverlayHandle, &tex); + if (err != vr::VROverlayError_None) + { + Con::errorf("VR: Error setting overlay texture."); + } + + //Con::printf("Overlay visible ? %s", vr::VROverlay()->IsOverlayVisible(mOverlayHandle) ? "YES" : "NO"); +} + + +DefineEngineMethod(OpenVROverlay, showOverlay, void, (), , "") +{ + object->showOverlay(); +} + +DefineEngineMethod(OpenVROverlay, hideOverlay, void, (), , "") +{ + object->hideOverlay(); +} \ No newline at end of file diff --git a/Engine/source/platform/input/openVR/openVROverlay.h b/Engine/source/platform/input/openVR/openVROverlay.h index 8a5a82f17..6998f3423 100644 --- a/Engine/source/platform/input/openVR/openVROverlay.h +++ b/Engine/source/platform/input/openVR/openVROverlay.h @@ -33,6 +33,7 @@ public: }; vr::VROverlayHandle_t mOverlayHandle; + vr::VROverlayHandle_t mThumbOverlayHandle; // Desired OpenVR state U32 mOverlayFlags; @@ -41,15 +42,19 @@ public: vr::VROverlayTransformType mOverlayTransformType; MatrixF mTransform; vr::TrackedDeviceIndex_t mTransformDeviceIndex; - const char* mTransformDeviceComponent; + String mTransformDeviceComponent; vr::VROverlayInputMethod mInputMethod; Point2F mMouseScale; - MatrixF mTrackingOrigin; + vr::ETrackingUniverseOrigin mTrackingOrigin; vr::TrackedDeviceIndex_t mControllerDeviceIndex; + GFXTexHandle mStagingTexture; ///< Texture used by openvr + + ColorF mOverlayColor; + bool mOverlayTypeDirty; ///< Overlay type is dirty bool mOverlayDirty; ///< Overlay properties are dirty OverlayType mOverlayType; @@ -61,6 +66,8 @@ public: static void initPersistFields(); + DECLARE_CONOBJECT(OpenVROverlay); + bool onAdd(); void onRemove(); @@ -76,10 +83,13 @@ public: bool isGamepadFocussed(); bool isActiveDashboardOverlay(); - MatrixF getTransformForOverlayCoordinates(const vr::ETrackingUniverseOrigin trackingOrigin, const Point2F &pos); - bool castRay(const vr::ETrackingUniverseOrigin trackingOrigin, const Point3F &origin, const Point3F &direction, RayInfo *info); + MatrixF getTransformForOverlayCoordinates(const Point2F &pos); + bool castRay(const Point3F &origin, const Point3F &direction, RayInfo *info); void moveGamepadFocusToNeighbour(); + + void handleOpenVREvents(); + void onFrameRendered(); }; typedef OpenVROverlay::OverlayType OpenVROverlayType; diff --git a/Engine/source/platform/input/openVR/openVRProvider.cpp b/Engine/source/platform/input/openVR/openVRProvider.cpp index 8c49ef49e..c6b6f1379 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.cpp +++ b/Engine/source/platform/input/openVR/openVRProvider.cpp @@ -22,6 +22,8 @@ #include "gfx/gl/gfxGLEnumTranslate.h" #endif +AngAxisF gLastMoveRot; // jamesu - this is just here for temp debugging + namespace OpenVRUtil { void convertTransformFromOVR(const MatrixF &inRotTMat, MatrixF& outRotation) @@ -93,6 +95,19 @@ namespace OpenVRUtil outMat.m[2][3] = row2.w; } + U32 convertOpenVRButtonToTorqueButton(uint32_t vrButton) + { + switch (vrButton) + { + case vr::VRMouseButton_Left: + return KEY_BUTTON0; + case vr::VRMouseButton_Right: + return KEY_BUTTON1; + case vr::VRMouseButton_Middle: + return KEY_BUTTON2; + } + } + vr::VRTextureBounds_t TorqueRectToBounds(const RectI &rect, const Point2I &widthHeight) { @@ -343,7 +358,7 @@ OpenVRProvider::OpenVRProvider() : INPUTMGR->registerDevice(this); dMemset(&mLUID, '\0', sizeof(mLUID)); - mTrackingSpace = vr::TrackingUniverseStanding; + mTrackingSpace = vr::TrackingUniverseSeated; } OpenVRProvider::~OpenVRProvider() @@ -896,11 +911,16 @@ void OpenVRProvider::updateTrackedPoses() { mValidPoseCount++; MatrixF mat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(mTrackedDevicePose[nDevice].mDeviceToAbsoluteTracking); - mat.inverse(); if (nDevice == vr::k_unTrackedDeviceIndex_Hmd) { mHMDRenderState.mHMDPose = mat; + // jaeesu - store the last rotation for temp debugging + MatrixF torqueMat(1); + OpenVRUtil::convertTransformFromOVR(mat, torqueMat); + gLastMoveRot = AngAxisF(torqueMat); + //Con::printf("gLastMoveRot = %f,%f,%f,%f", gLastMoveRot.axis.x, gLastMoveRot.axis.y, gLastMoveRot.axis.z, gLastMoveRot.angle); + mHMDRenderState.mHMDPose.inverse(); } vr::TrackedDevicePose_t &outPose = mTrackedDevicePose[nDevice]; @@ -1119,3 +1139,8 @@ DefineEngineStaticMethod(OpenVR, resetSensors, void, (), , } // Overlay stuff + +DefineEngineFunction(OpenVRIsCompiledIn, bool, (), , "") +{ + return true; +} diff --git a/Engine/source/platform/input/openVR/openVRProvider.h b/Engine/source/platform/input/openVR/openVRProvider.h index 94b43fb38..76f66e784 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.h +++ b/Engine/source/platform/input/openVR/openVRProvider.h @@ -30,7 +30,14 @@ typedef vr::ETrackingUniverseOrigin OpenVRTrackingUniverseOrigin; typedef vr::EOverlayDirection OpenVROverlayDirection; typedef vr::EVRState OpenVRState; +DefineEnumType(OpenVROverlayInputMethod); DefineEnumType(OpenVROverlayTransformType); +DefineEnumType(OpenVRGamepadTextInputMode); +DefineEnumType(OpenVRGamepadTextInputLineMode); +DefineEnumType(OpenVRTrackingResult); +DefineEnumType(OpenVRTrackingUniverseOrigin); +DefineEnumType(OpenVROverlayDirection); +DefineEnumType(OpenVRState); namespace OpenVRUtil { @@ -45,6 +52,8 @@ namespace OpenVRUtil /// Converts a MatrixF to a vr::HmdMatrix34_t void convertMatrixFPlainToSteamVRAffineMatrix(const MatrixF &inMat, vr::HmdMatrix34_t &outMat); + + U32 convertOpenVRButtonToTorqueButton(uint32_t vrButton); }; template class VRTextureSet diff --git a/Engine/source/sim/actionMap.cpp b/Engine/source/sim/actionMap.cpp index e3db62cef..e4455d458 100644 --- a/Engine/source/sim/actionMap.cpp +++ b/Engine/source/sim/actionMap.cpp @@ -1450,9 +1450,8 @@ bool ActionMap::processAction(const InputEventInfo* pEvent) } else { - // Handle rotation (QuatF) - QuatF quat(pEvent->fValue, pEvent->fValue2, pEvent->fValue3, pEvent->fValue4); - AngAxisF aa(quat); + // Handle rotation (AngAxisF) + AngAxisF aa(Point3F(pEvent->fValue, pEvent->fValue2, pEvent->fValue3), pEvent->fValue4); aa.axis.normalize(); argv[1] = Con::getFloatArg( aa.axis.x ); argv[2] = Con::getFloatArg( aa.axis.y ); From e7bafe3c7bf4e46fd740f302d68dee630d7efb60 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Sun, 15 May 2016 00:39:08 +0100 Subject: [PATCH 15/33] Fix cmake file for openvr --- Tools/CMake/modules/module_openvr.cmake | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Tools/CMake/modules/module_openvr.cmake b/Tools/CMake/modules/module_openvr.cmake index 66a490348..0d8d2e8c6 100644 --- a/Tools/CMake/modules/module_openvr.cmake +++ b/Tools/CMake/modules/module_openvr.cmake @@ -4,11 +4,11 @@ option(TORQUE_OPENVR "Enable openvr module" OFF) mark_as_advanced(TORQUE_OPENVR) if(TORQUE_OPENVR) - if(TORQUE_OCULUSVR_SDK_PATH STREQUAL "") + if(TORQUE_OPENVR_SDK_PATH STREQUAL "") set(TORQUE_OPENVR_SDK_PATH "" CACHE PATH "openvr library path" FORCE) endif() else() # hide variable - set(TORQUE_OPENVR_SDK_PATH "" CACHE INTERNAL "" FORCE) + set(TORQUE_OPENVR_SDK_PATH "" CACHE INTERNAL "" FORCE) endif() if(TORQUE_OPENVR) From f91aa639d61556d8e614a368b6437041b1e5d6d8 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Wed, 18 May 2016 00:18:02 +0100 Subject: [PATCH 16/33] Remove projection offset, add the hmd head matrix. Also tidy up a few things. --- .../T3D/gameBase/extended/extendedMove.cpp | 40 ++++++++++++------- Engine/source/T3D/gameBase/gameConnection.cpp | 18 +++++++++ Engine/source/T3D/gameBase/gameConnection.h | 4 ++ Engine/source/T3D/gameFunctions.cpp | 9 +---- Engine/source/gfx/gfxDevice.cpp | 3 +- Engine/source/gfx/gfxDevice.h | 26 ++++++++---- Engine/source/gui/3d/guiTSControl.cpp | 13 ++---- Engine/source/gui/3d/guiTSControl.h | 2 +- Engine/source/gui/core/guiOffscreenCanvas.cpp | 2 +- Engine/source/gui/worldEditor/editTSCtrl.cpp | 1 + .../advanced/advancedLightBinManager.cpp | 23 ----------- .../platform/input/openVR/openVRProvider.cpp | 34 ++++++++-------- .../platform/input/openVR/openVRProvider.h | 5 +-- .../source/platform/output/IDisplayDevice.h | 7 ++-- Engine/source/scene/reflector.cpp | 13 +++--- Engine/source/scene/sceneCameraState.cpp | 15 ++++++- Engine/source/scene/sceneCameraState.h | 6 +++ Engine/source/scene/sceneManager.cpp | 3 -- Engine/source/scene/sceneRenderState.cpp | 6 +-- Engine/source/scene/sceneRenderState.h | 9 ----- 20 files changed, 126 insertions(+), 113 deletions(-) diff --git a/Engine/source/T3D/gameBase/extended/extendedMove.cpp b/Engine/source/T3D/gameBase/extended/extendedMove.cpp index bf7573113..a27de9ca6 100644 --- a/Engine/source/T3D/gameBase/extended/extendedMove.cpp +++ b/Engine/source/T3D/gameBase/extended/extendedMove.cpp @@ -96,9 +96,9 @@ ExtendedMove::ExtendedMove() : Move() rotZ[i] = 0; rotW[i] = 1; - cposX[i] = 0; - cposY[i] = 0; - cposZ[i] = 0; + cposX[i] = 0; + cposY[i] = 0; + cposZ[i] = 0; EulerBasedRotation[i] = false; } @@ -139,11 +139,11 @@ void ExtendedMove::pack(BitStream *stream, const Move * basemove) { // Position if(stream->writeFlag(posX[i] != extBaseMove->posX[i])) - stream->writeSignedInt(cposX[i], MaxPositionBits); + stream->writeInt(cposX[i], MaxPositionBits); if(stream->writeFlag(posY[i] != extBaseMove->posY[i])) - stream->writeSignedInt(cposY[i], MaxPositionBits); + stream->writeInt(cposY[i], MaxPositionBits); if(stream->writeFlag(posZ[i] != extBaseMove->posZ[i])) - stream->writeSignedInt(cposZ[i], MaxPositionBits); + stream->writeInt(cposZ[i], MaxPositionBits); // Rotation stream->writeFlag(EulerBasedRotation[i]); @@ -183,7 +183,7 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove) // Position if (stream->readFlag()) { - posX[i] = stream->readSignedInt(MaxPositionBits); + posX[i] = stream->readInt(MaxPositionBits); cposX[i] = UNCLAMPPOS(posX[i]); } else @@ -191,7 +191,7 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove) if (stream->readFlag()) { - cposY[i] = stream->readSignedInt(MaxPositionBits); + cposY[i] = stream->readInt(MaxPositionBits); posY[i] = UNCLAMPPOS(cposY[i]); } else @@ -199,7 +199,7 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove) if (stream->readFlag()) { - cposZ[i] = stream->readSignedInt(MaxPositionBits); + cposZ[i] = stream->readInt(MaxPositionBits); posZ[i] = UNCLAMPPOS(cposZ[i]); } else @@ -267,9 +267,9 @@ void ExtendedMove::clamp() for(U32 i=0; igetControlObject()) != 0) + { + if (cObj->useObjsEyePoint()) + obj = cObj; + } + + obj->getEyeCameraTransform(display, -1, transform); + + return true; +} + bool GameConnection::getControlCameraEyeTransforms(IDisplayDevice *display, MatrixF *transforms) { GameBase* obj = getCameraObject(); diff --git a/Engine/source/T3D/gameBase/gameConnection.h b/Engine/source/T3D/gameBase/gameConnection.h index ac3774eed..13084a637 100644 --- a/Engine/source/T3D/gameBase/gameConnection.h +++ b/Engine/source/T3D/gameBase/gameConnection.h @@ -267,6 +267,10 @@ public: bool getControlCameraTransform(F32 dt,MatrixF* mat); bool getControlCameraVelocity(Point3F *vel); + /// Returns the head transform for the control object, using supplemental information + /// from the provided IDisplayDevice + bool getControlCameraHeadTransform(IDisplayDevice *display, MatrixF *transform); + /// Returns the eye transforms for the control object, using supplemental information /// from the provided IDisplayDevice. bool getControlCameraEyeTransforms(IDisplayDevice *display, MatrixF *transforms); diff --git a/Engine/source/T3D/gameFunctions.cpp b/Engine/source/T3D/gameFunctions.cpp index ceb6945a8..cea4d4c69 100644 --- a/Engine/source/T3D/gameFunctions.cpp +++ b/Engine/source/T3D/gameFunctions.cpp @@ -348,7 +348,6 @@ bool GameProcessCameraQuery(CameraQuery *query) query->farPlane = gClientSceneGraph->getVisibleDistance() * CameraAndFOV::sVisDistanceScale; // Provide some default values - query->projectionOffset = Point2F::Zero; query->stereoTargets[0] = 0; query->stereoTargets[1] = 0; query->eyeOffset[0] = Point3F::Zero; @@ -376,12 +375,6 @@ bool GameProcessCameraQuery(CameraQuery *query) // Display may activate AFTER so we need to call this again just in case display->onStartFrame(); - // The connection's display device may want to set the projection offset - if(display->providesProjectionOffset()) - { - query->projectionOffset = display->getProjectionOffset(); - } - // The connection's display device may want to set the eye offset if(display->providesEyeOffsets()) { @@ -398,6 +391,7 @@ bool GameProcessCameraQuery(CameraQuery *query) // Grab the latest overriding render view transforms connection->getControlCameraEyeTransforms(display, query->eyeTransforms); + connection->getControlCameraHeadTransform(display, &query->headMatrix); display->getStereoViewports(query->stereoViewports); display->getStereoTargets(query->stereoTargets); @@ -407,6 +401,7 @@ bool GameProcessCameraQuery(CameraQuery *query) { query->eyeTransforms[0] = query->cameraMatrix; query->eyeTransforms[1] = query->cameraMatrix; + query->headMatrix = query->cameraMatrix; } // Use the connection's FOV settings if requried diff --git a/Engine/source/gfx/gfxDevice.cpp b/Engine/source/gfx/gfxDevice.cpp index fe446ce88..5dcb0bb40 100644 --- a/Engine/source/gfx/gfxDevice.cpp +++ b/Engine/source/gfx/gfxDevice.cpp @@ -160,7 +160,8 @@ GFXDevice::GFXDevice() // misc mAllowRender = true; mCurrentRenderStyle = RS_Standard; - mCurrentProjectionOffset = Point2F::Zero; + mCurrentStereoTarget = -1; + mStereoHeadTransform = MatrixF(1); mCanCurrentlyRender = false; mInitialized = false; diff --git a/Engine/source/gfx/gfxDevice.h b/Engine/source/gfx/gfxDevice.h index 5ae7567d1..5aec5ad8e 100644 --- a/Engine/source/gfx/gfxDevice.h +++ b/Engine/source/gfx/gfxDevice.h @@ -288,13 +288,19 @@ protected: /// The style of rendering that is to be performed, based on GFXDeviceRenderStyles U32 mCurrentRenderStyle; - /// The current projection offset. May be used during side-by-side rendering, for example. - Point2F mCurrentProjectionOffset; + /// Current stereo target being rendered to + S32 mCurrentStereoTarget; /// Eye offset used when using a stereo rendering style Point3F mStereoEyeOffset[NumStereoPorts]; + /// Center matrix for head + MatrixF mStereoHeadTransform; + + /// Left and right matrix for eyes MatrixF mStereoEyeTransforms[NumStereoPorts]; + + /// Inverse of mStereoEyeTransforms MatrixF mInverseStereoEyeTransforms[NumStereoPorts]; /// Fov port settings @@ -345,21 +351,25 @@ public: /// Retrieve the current rendering style based on GFXDeviceRenderStyles U32 getCurrentRenderStyle() const { return mCurrentRenderStyle; } + /// Retrieve the current stereo target being rendered to + S32 getCurrentStereoTarget() const { return mCurrentStereoTarget; } + /// Set the current rendering style, based on GFXDeviceRenderStyles void setCurrentRenderStyle(U32 style) { mCurrentRenderStyle = style; } - /// Set the current projection offset used during stereo rendering - const Point2F& getCurrentProjectionOffset() { return mCurrentProjectionOffset; } - - /// Get the current projection offset used during stereo rendering - void setCurrentProjectionOffset(const Point2F& offset) { mCurrentProjectionOffset = offset; } + /// Set the current stereo target being rendered to (in case we're doing anything with postfx) + void setCurrentStereoTarget(const F32 targetId) { mCurrentStereoTarget = targetId; } /// Get the current eye offset used during stereo rendering const Point3F* getStereoEyeOffsets() { return mStereoEyeOffset; } + const MatrixF& getStereoHeadTransform() { return mStereoHeadTransform; } const MatrixF* getStereoEyeTransforms() { return mStereoEyeTransforms; } const MatrixF* getInverseStereoEyeTransforms() { return mInverseStereoEyeTransforms; } + /// Sets the head matrix for stereo rendering + void setStereoHeadTransform(const MatrixF &mat) { mStereoHeadTransform = mat; } + /// Set the current eye offset used during stereo rendering void setStereoEyeOffsets(Point3F *offsets) { dMemcpy(mStereoEyeOffset, offsets, sizeof(Point3F) * NumStereoPorts); } @@ -398,6 +408,8 @@ public: } setViewport(mStereoViewports[eyeId]); } + + mCurrentStereoTarget = eyeId; } GFXCardProfiler* getCardProfiler() const { return mCardProfiler; } diff --git a/Engine/source/gui/3d/guiTSControl.cpp b/Engine/source/gui/3d/guiTSControl.cpp index 57c6d2fef..5175d4ac9 100644 --- a/Engine/source/gui/3d/guiTSControl.cpp +++ b/Engine/source/gui/3d/guiTSControl.cpp @@ -160,7 +160,6 @@ GuiTSCtrl::GuiTSCtrl() mLastCameraQuery.farPlane = 10.0f; mLastCameraQuery.nearPlane = 0.01f; - mLastCameraQuery.projectionOffset = Point2F::Zero; mLastCameraQuery.hasFovPort = false; mLastCameraQuery.hasStereoTargets = false; @@ -556,12 +555,6 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) mLastCameraQuery.displayDevice->setDrawMode(GFXDevice::RS_Standard); } - // The connection's display device may want to set the projection offset - if (mLastCameraQuery.displayDevice->providesProjectionOffset()) - { - mLastCameraQuery.projectionOffset = mLastCameraQuery.displayDevice->getProjectionOffset(); - } - // The connection's display device may want to set the eye offset if (mLastCameraQuery.displayDevice->providesEyeOffsets()) { @@ -586,7 +579,6 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) // Set up the appropriate render style U32 prevRenderStyle = GFX->getCurrentRenderStyle(); - Point2F prevProjectionOffset = GFX->getCurrentProjectionOffset(); Point2I renderSize = getExtent(); Frustum frustum; @@ -595,8 +587,8 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) if (mRenderStyle == RenderStyleStereoSideBySide) { GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSideBySide); - GFX->setCurrentProjectionOffset(mLastCameraQuery.projectionOffset); GFX->setStereoEyeOffsets(mLastCameraQuery.eyeOffset); + GFX->setStereoHeadTransform(mLastCameraQuery.headMatrix); if (!mLastCameraQuery.hasStereoTargets) { @@ -626,12 +618,14 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) // Use the view matrix determined from the display device myTransforms[0] = mLastCameraQuery.eyeTransforms[0]; myTransforms[1] = mLastCameraQuery.eyeTransforms[1]; + myTransforms[2] = mLastCameraQuery.cameraMatrix; } else { // Use the view matrix determined from the control object myTransforms[0] = mLastCameraQuery.cameraMatrix; myTransforms[1] = mLastCameraQuery.cameraMatrix; + myTransforms[2] = mLastCameraQuery.cameraMatrix; QuatF qrot = mLastCameraQuery.cameraMatrix; Point3F pos = mLastCameraQuery.cameraMatrix.getPosition(); @@ -678,6 +672,7 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) // render the final composite view GFX->setCurrentRenderStyle(GFXDevice::RS_StereoSeparate); GFX->setStereoEyeOffsets(mLastCameraQuery.eyeOffset); + GFX->setStereoHeadTransform(mLastCameraQuery.headMatrix); GFX->setStereoFovPort(mLastCameraQuery.fovPort); // NOTE: this specifies fov for BOTH eyes GFX->setSteroViewports(mLastCameraQuery.stereoViewports); GFX->setStereoTargets(mLastCameraQuery.stereoTargets); diff --git a/Engine/source/gui/3d/guiTSControl.h b/Engine/source/gui/3d/guiTSControl.h index 82bf0ebdb..a15b95b53 100644 --- a/Engine/source/gui/3d/guiTSControl.h +++ b/Engine/source/gui/3d/guiTSControl.h @@ -49,13 +49,13 @@ struct CameraQuery F32 farPlane; F32 fov; FovPort fovPort[2]; // fov for each eye - Point2F projectionOffset; Point3F eyeOffset[2]; MatrixF eyeTransforms[2]; bool ortho; bool hasFovPort; bool hasStereoTargets; MatrixF cameraMatrix; + MatrixF headMatrix; // center matrix (for HMDs) S32 currentEye; RectI stereoViewports[2]; // destination viewports GFXTextureTarget* stereoTargets[2]; diff --git a/Engine/source/gui/core/guiOffscreenCanvas.cpp b/Engine/source/gui/core/guiOffscreenCanvas.cpp index f3ce5c2e6..fc23c1369 100644 --- a/Engine/source/gui/core/guiOffscreenCanvas.cpp +++ b/Engine/source/gui/core/guiOffscreenCanvas.cpp @@ -193,7 +193,7 @@ void GuiOffscreenCanvas::renderFrame(bool preRenderOnly, bool bufferSwap /* = tr // Fill Blue if no Dialogs if(this->size() == 0) - GFX->clear( GFXClearTarget, ColorF(0,0,1,1), 1.0f, 0 ); + GFX->clear( GFXClearTarget, ColorF(0,0,0,1), 1.0f, 0 ); GFX->setClipRect( contentRect ); diff --git a/Engine/source/gui/worldEditor/editTSCtrl.cpp b/Engine/source/gui/worldEditor/editTSCtrl.cpp index 928cbfbf1..4518ef3ed 100644 --- a/Engine/source/gui/worldEditor/editTSCtrl.cpp +++ b/Engine/source/gui/worldEditor/editTSCtrl.cpp @@ -1162,6 +1162,7 @@ bool EditTSCtrl::processCameraQuery(CameraQuery * query) query->cameraMatrix = camRot; query->cameraMatrix.setPosition(camPos); + query->headMatrix = query->cameraMatrix; query->fov = mOrthoFOV; } diff --git a/Engine/source/lighting/advanced/advancedLightBinManager.cpp b/Engine/source/lighting/advanced/advancedLightBinManager.cpp index 5e7259b3a..0dd9f59c8 100644 --- a/Engine/source/lighting/advanced/advancedLightBinManager.cpp +++ b/Engine/source/lighting/advanced/advancedLightBinManager.cpp @@ -453,30 +453,7 @@ void AdvancedLightBinManager::_setupPerFrameParameters( const SceneRenderState * // Perform a camera offset. We need to manually perform this offset on the sun (or vector) light's // polygon, which is at the far plane. - const Point2F& projOffset = frustum.getProjectionOffset(); Point3F cameraOffsetPos = cameraPos; - if(!projOffset.isZero()) - { - // First we need to calculate the offset at the near plane. The projOffset - // given above can be thought of a percent as it ranges from 0..1 (or 0..-1). - F32 nearOffset = frustum.getNearRight() * projOffset.x; - - // Now given the near plane distance from the camera we can solve the right - // triangle and calcuate the SIN theta for the offset at the near plane. - // SIN theta = x/y - F32 sinTheta = nearOffset / frustum.getNearDist(); - - // Finally, we can calcuate the offset at the far plane, which is where our sun (or vector) - // light's polygon is drawn. - F32 farOffset = frustum.getFarDist() * sinTheta; - - // We can now apply this far plane offset to the far plane itself, which then compensates - // for the project offset. - MatrixF camTrans = frustum.getTransform(); - VectorF offset = camTrans.getRightVector(); - offset *= farOffset; - cameraOffsetPos += offset; - } // Now build the quad for drawing full-screen vector light // passes.... this is a volatile VB and updates every frame. diff --git a/Engine/source/platform/input/openVR/openVRProvider.cpp b/Engine/source/platform/input/openVR/openVRProvider.cpp index c6b6f1379..79fed71a8 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.cpp +++ b/Engine/source/platform/input/openVR/openVRProvider.cpp @@ -597,15 +597,27 @@ void OpenVRTransformToRotPos(MatrixF mat, QuatF &outRot, Point3F &outPos) outPos = pos;// Point3F(-pos.x, pos.z, -pos.y); } -void OpenVRProvider::getFrameEyePose(IDevicePose *pose, U32 eye) const +void OpenVRProvider::getFrameEyePose(IDevicePose *pose, S32 eyeId) const { - AssertFatal(eye >= 0 && eye < 2, "Out of bounds eye"); + AssertFatal(eyeId >= -1 && eyeId < 2, "Out of bounds eye"); - MatrixF mat = mHMDRenderState.mEyePose[eye] * mHMDRenderState.mHMDPose; // same order as in the openvr example + if (eyeId == -1) + { + // NOTE: this is codename for "head" + MatrixF mat = mHMDRenderState.mHMDPose; // same order as in the openvr example - OpenVRTransformToRotPos(mat, pose->orientation, pose->position); - pose->velocity = Point3F(0); - pose->angularVelocity = Point3F(0); + OpenVRTransformToRotPos(mat, pose->orientation, pose->position); + pose->velocity = Point3F(0); + pose->angularVelocity = Point3F(0); + } + else + { + MatrixF mat = mHMDRenderState.mEyePose[eyeId] * mHMDRenderState.mHMDPose; // same order as in the openvr example + + OpenVRTransformToRotPos(mat, pose->orientation, pose->position); + pose->velocity = Point3F(0); + pose->angularVelocity = Point3F(0); + } } bool OpenVRProvider::providesEyeOffsets() const @@ -633,16 +645,6 @@ void OpenVRProvider::getFovPorts(FovPort *out) const dMemcpy(out, mHMDRenderState.mEyeFov, sizeof(mHMDRenderState.mEyeFov)); } -bool OpenVRProvider::providesProjectionOffset() const -{ - return mHMD != NULL; -} - -const Point2F& OpenVRProvider::getProjectionOffset() const -{ - return Point2F(0, 0); -} - void OpenVRProvider::getStereoViewports(RectI *out) const { out[0] = mHMDRenderState.mEyeViewport[0]; diff --git a/Engine/source/platform/input/openVR/openVRProvider.h b/Engine/source/platform/input/openVR/openVRProvider.h index 76f66e784..de3a73e89 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.h +++ b/Engine/source/platform/input/openVR/openVRProvider.h @@ -157,7 +157,7 @@ public: /// @name Display handling /// { virtual bool providesFrameEyePose() const; - virtual void getFrameEyePose(IDevicePose *pose, U32 eye) const; + virtual void getFrameEyePose(IDevicePose *pose, S32 eyeId) const; virtual bool providesEyeOffsets() const; /// Returns eye offset not taking into account any position tracking info @@ -166,9 +166,6 @@ public: virtual bool providesFovPorts() const; virtual void getFovPorts(FovPort *out) const; - virtual bool providesProjectionOffset() const; - virtual const Point2F& getProjectionOffset() const; - virtual void getStereoViewports(RectI *out) const; virtual void getStereoTargets(GFXTextureTarget **out) const; diff --git a/Engine/source/platform/output/IDisplayDevice.h b/Engine/source/platform/output/IDisplayDevice.h index 9ce327a04..66cdf683d 100644 --- a/Engine/source/platform/output/IDisplayDevice.h +++ b/Engine/source/platform/output/IDisplayDevice.h @@ -50,7 +50,9 @@ class IDisplayDevice { public: virtual bool providesFrameEyePose() const = 0; - virtual void getFrameEyePose(IDevicePose *pose, U32 eye) const = 0; + + /// Get a display pose for the specified eye, or the HMD if eyeId is -1. + virtual void getFrameEyePose(IDevicePose *pose, S32 eyeId) const = 0; virtual bool providesEyeOffsets() const = 0; /// Returns eye offset not taking into account any position tracking info @@ -59,9 +61,6 @@ public: virtual bool providesFovPorts() const = 0; virtual void getFovPorts(FovPort *out) const = 0; - virtual bool providesProjectionOffset() const = 0; - virtual const Point2F& getProjectionOffset() const = 0; - virtual void getStereoViewports(RectI *out) const = 0; virtual void getStereoTargets(GFXTextureTarget **out) const = 0; diff --git a/Engine/source/scene/reflector.cpp b/Engine/source/scene/reflector.cpp index 1addaf191..951cce010 100644 --- a/Engine/source/scene/reflector.cpp +++ b/Engine/source/scene/reflector.cpp @@ -418,7 +418,7 @@ void CubeReflector::updateFace( const ReflectParams ¶ms, U32 faceidx ) ); reflectRenderState.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial ); - reflectRenderState.setDiffuseCameraTransform( params.query->cameraMatrix ); + reflectRenderState.setDiffuseCameraTransform( params.query->headMatrix ); // render scene LIGHTMGR->registerGlobalLights( &reflectRenderState.getCullingFrustum(), false ); @@ -581,7 +581,7 @@ void PlaneReflector::updateReflection( const ReflectParams ¶ms ) reflectTarget->attachTexture( GFXTextureTarget::Color0, reflectTex ); reflectTarget->attachTexture( GFXTextureTarget::DepthStencil, depthBuff ); GFX->pushActiveRenderTarget(); - GFX->setActiveRenderTarget( reflectTarget ); + GFX->setActiveRenderTarget( reflectTarget ); U32 objTypeFlag = -1; SceneCameraState reflectCameraState = SceneCameraState::fromGFX(); @@ -604,7 +604,6 @@ void PlaneReflector::updateReflection( const ReflectParams ¶ms ) // Store previous values RectI originalVP = GFX->getViewport(); - Point2F projOffset = GFX->getCurrentProjectionOffset(); const FovPort *currentFovPort = GFX->getStereoFovPort(); MatrixF inverseEyeTransforms[2]; @@ -629,9 +628,8 @@ void PlaneReflector::updateReflection( const ReflectParams ¶ms ) SceneCameraState cameraStateLeft = SceneCameraState::fromGFX(); SceneRenderState renderStateLeft( gClientSceneGraph, SPT_Reflect, cameraStateLeft ); renderStateLeft.setSceneRenderStyle(SRS_SideBySide); - renderStateLeft.setSceneRenderField(0); renderStateLeft.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial ); - renderStateLeft.setDiffuseCameraTransform( params.query->eyeTransforms[0] ); + renderStateLeft.setDiffuseCameraTransform( params.query->headMatrix ); gClientSceneGraph->renderSceneNoLights( &renderStateLeft, objTypeFlag ); @@ -648,9 +646,8 @@ void PlaneReflector::updateReflection( const ReflectParams ¶ms ) SceneCameraState cameraStateRight = SceneCameraState::fromGFX(); SceneRenderState renderStateRight( gClientSceneGraph, SPT_Reflect, cameraStateRight ); renderStateRight.setSceneRenderStyle(SRS_SideBySide); - renderStateRight.setSceneRenderField(1); renderStateRight.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial ); - renderStateRight.setDiffuseCameraTransform( params.query->eyeTransforms[1] ); + renderStateRight.setDiffuseCameraTransform( params.query->headMatrix ); renderStateRight.disableAdvancedLightingBins(true); gClientSceneGraph->renderSceneNoLights( &renderStateRight, objTypeFlag ); @@ -669,7 +666,7 @@ void PlaneReflector::updateReflection( const ReflectParams ¶ms ) ); reflectRenderState.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial ); - reflectRenderState.setDiffuseCameraTransform( params.query->cameraMatrix ); + reflectRenderState.setDiffuseCameraTransform( params.query->headMatrix ); gClientSceneGraph->renderSceneNoLights( &reflectRenderState, objTypeFlag ); } diff --git a/Engine/source/scene/sceneCameraState.cpp b/Engine/source/scene/sceneCameraState.cpp index 82b1c9daa..ca866d17a 100644 --- a/Engine/source/scene/sceneCameraState.cpp +++ b/Engine/source/scene/sceneCameraState.cpp @@ -32,6 +32,7 @@ SceneCameraState::SceneCameraState( const RectI& viewport, const Frustum& frustu : mViewport( viewport ), mFrustum( frustum ), mWorldViewMatrix( worldView ), + mHeadWorldViewMatrix( worldView ), mProjectionMatrix( projection ) { mViewDirection = frustum.getTransform().getForwardVector(); @@ -39,7 +40,7 @@ SceneCameraState::SceneCameraState( const RectI& viewport, const Frustum& frustu //----------------------------------------------------------------------------- -SceneCameraState SceneCameraState::fromGFX() +SceneCameraState SceneCameraState::fromGFX( ) { return fromGFXWithViewport( GFX->getViewport() ); } @@ -56,10 +57,20 @@ SceneCameraState SceneCameraState::fromGFXWithViewport( const RectI& viewport ) Frustum frustum = GFX->getFrustum(); frustum.setTransform( camera ); - return SceneCameraState( + SceneCameraState ret = SceneCameraState( viewport, frustum, world, GFX->getProjectionMatrix() ); + + // If rendering to stereo, make sure we get the head matrix + S32 stereoTarget = GFX->getCurrentStereoTarget(); + if (stereoTarget != -1) + { + ret.mHeadWorldViewMatrix = GFX->getStereoHeadTransform(); + ret.mHeadWorldViewMatrix.inverse(); + } + + return ret; } diff --git a/Engine/source/scene/sceneCameraState.h b/Engine/source/scene/sceneCameraState.h index 9eec5d488..059f8689a 100644 --- a/Engine/source/scene/sceneCameraState.h +++ b/Engine/source/scene/sceneCameraState.h @@ -51,6 +51,9 @@ class SceneCameraState /// The inverse of the frustum's transform stored here for caching. MatrixF mWorldViewMatrix; + /// Actual head position (will be - eye pos) + MatrixF mHeadWorldViewMatrix; + /// The projection matrix. MatrixF mProjectionMatrix; @@ -88,6 +91,9 @@ class SceneCameraState /// Return the world-space view vector. const Point3F& getViewDirection() const { return mViewDirection; } + /// Returns the world->view transform for the head (used to calculate various display metrics) + const MatrixF& getHeadWorldViewMatrix() const { return mHeadWorldViewMatrix; } + /// Return the view->world transform. This is a shortcut for getFrustum().getTransform(). const MatrixF& getViewWorldMatrix() const { return mFrustum.getTransform(); } diff --git a/Engine/source/scene/sceneManager.cpp b/Engine/source/scene/sceneManager.cpp index 8cf74c3b7..5ed8f2669 100644 --- a/Engine/source/scene/sceneManager.cpp +++ b/Engine/source/scene/sceneManager.cpp @@ -239,7 +239,6 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S MatrixF originalWorld = GFX->getWorldMatrix(); Frustum originalFrustum = GFX->getFrustum(); - Point2F projOffset = GFX->getCurrentProjectionOffset(); const FovPort *currentFovPort = GFX->getStereoFovPort(); const MatrixF *eyeTransforms = GFX->getStereoEyeTransforms(); const MatrixF *worldEyeTransforms = GFX->getInverseStereoEyeTransforms(); @@ -257,7 +256,6 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S SceneCameraState cameraStateLeft = SceneCameraState::fromGFX(); SceneRenderState renderStateLeft( this, renderState->getScenePassType(), cameraStateLeft ); renderStateLeft.setSceneRenderStyle(SRS_SideBySide); - renderStateLeft.setSceneRenderField(0); renderSceneNoLights( &renderStateLeft, objectMask, baseObject, baseZone ); // left @@ -277,7 +275,6 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S SceneCameraState cameraStateRight = SceneCameraState::fromGFX(); SceneRenderState renderStateRight( this, renderState->getScenePassType(), cameraStateRight ); renderStateRight.setSceneRenderStyle(SRS_SideBySide); - renderStateRight.setSceneRenderField(1); renderSceneNoLights( &renderStateRight, objectMask, baseObject, baseZone ); // right diff --git a/Engine/source/scene/sceneRenderState.cpp b/Engine/source/scene/sceneRenderState.cpp index 47bb8b440..10373eaae 100644 --- a/Engine/source/scene/sceneRenderState.cpp +++ b/Engine/source/scene/sceneRenderState.cpp @@ -48,11 +48,11 @@ SceneRenderState::SceneRenderState( SceneManager* sceneManager, mDisableAdvancedLightingBins( false ), mRenderArea( view.getFrustum().getBounds() ), mAmbientLightColor( sceneManager->getAmbientLightColor() ), - mSceneRenderStyle( SRS_Standard ), - mRenderField( 0 ) + mSceneRenderStyle( SRS_Standard ) { // Setup the default parameters for the screen metrics methods. - mDiffuseCameraTransform = view.getViewWorldMatrix(); + mDiffuseCameraTransform = view.getHeadWorldViewMatrix(); + mDiffuseCameraTransform.inverse(); // The vector eye is the camera vector with its // length normalized to 1 / zFar. diff --git a/Engine/source/scene/sceneRenderState.h b/Engine/source/scene/sceneRenderState.h index 4b8fd200d..edcc583bd 100644 --- a/Engine/source/scene/sceneRenderState.h +++ b/Engine/source/scene/sceneRenderState.h @@ -72,9 +72,6 @@ class SceneRenderState /// The render style being performed SceneRenderStyle mSceneRenderStyle; - /// When doing stereo rendering, the current field that is being rendered - S32 mRenderField; - /// The render pass which we are setting up with this scene state. RenderPassManager* mRenderPass; @@ -237,12 +234,6 @@ class SceneRenderState /// Set the rendering style used for the scene void setSceneRenderStyle(SceneRenderStyle style) { mSceneRenderStyle = style; } - /// Get the stereo field being rendered - S32 getSceneRenderField() const { return mRenderField; } - - /// Set the stereo field being rendered - void setSceneRenderField(S32 field) { mRenderField = field; } - /// @} /// @name Transforms, projections, and viewports. From 734688ff7ef3d0253da6a7ec44c7b81d7f17da25 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Wed, 18 May 2016 23:55:17 +0100 Subject: [PATCH 17/33] Implement basic keyboard & mouse support for openvr overlays --- Engine/source/gui/controls/guiTextEditCtrl.h | 1 + Engine/source/gui/core/guiControl.h | 2 + .../platform/input/openVR/openVROverlay.cpp | 130 +++++++++++++++--- .../platform/input/openVR/openVROverlay.h | 6 + .../platform/input/openVR/openVRProvider.cpp | 23 +++- .../platform/input/openVR/openVRProvider.h | 26 ++++ 6 files changed, 167 insertions(+), 21 deletions(-) diff --git a/Engine/source/gui/controls/guiTextEditCtrl.h b/Engine/source/gui/controls/guiTextEditCtrl.h index 9d29038f7..15021ea5e 100644 --- a/Engine/source/gui/controls/guiTextEditCtrl.h +++ b/Engine/source/gui/controls/guiTextEditCtrl.h @@ -124,6 +124,7 @@ public: void invalidText(bool playSound = true); void validText(); bool isValidText(); + inline bool isPasswordText() { return mPasswordText; } bool isAllTextSelected(); void selectAllText(); diff --git a/Engine/source/gui/core/guiControl.h b/Engine/source/gui/core/guiControl.h index ca873878d..fa3a327dd 100644 --- a/Engine/source/gui/core/guiControl.h +++ b/Engine/source/gui/core/guiControl.h @@ -286,6 +286,8 @@ class GuiControl : public SimGroup const char * getConsoleCommand(); ///< Returns the name of the function bound to this GuiControl LangTable *getGUILangTable(void); const UTF8 *getGUIString(S32 id); + + inline String& getTooltip() { return mTooltip; } ///< Returns the tooltip /// @} diff --git a/Engine/source/platform/input/openVR/openVROverlay.cpp b/Engine/source/platform/input/openVR/openVROverlay.cpp index 24bede00b..25c345153 100644 --- a/Engine/source/platform/input/openVR/openVROverlay.cpp +++ b/Engine/source/platform/input/openVR/openVROverlay.cpp @@ -12,6 +12,7 @@ #endif #include "postFx/postEffectCommon.h" +#include "gui/controls/guiTextEditCtrl.h" ImplementEnumType(OpenVROverlayType, "Desired overlay type for OpenVROverlay. .\n\n" @@ -32,6 +33,9 @@ OpenVROverlay::OpenVROverlay() mTrackingOrigin = vr::TrackingUniverseSeated; mTargetFormat = GFXFormatR8G8B8A8_LINEAR_FORCE; // needed for openvr! + mManualMouseHandling = true; + + mMouseScale = Point2F(1, 1); } OpenVROverlay::~OpenVROverlay() @@ -75,7 +79,7 @@ void OpenVROverlay::initPersistFields() addProtectedField("transformDeviceComponent", TypeString, Offset(mTransformDeviceComponent, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, "Rotation of overlay."); - addProtectedField("inputMethod", TypeOpenVROverlayInputMethod, Offset(mTransformDeviceComponent, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, + addProtectedField("inputMethod", TypeOpenVROverlayInputMethod, Offset(mInputMethod, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, "Type of input method."); addProtectedField("mouseScale", TypePoint2F, Offset(mMouseScale, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, "Scale of mouse input."); @@ -86,6 +90,8 @@ void OpenVROverlay::initPersistFields() addProtectedField("controllerDevice", TypeS32, Offset(mControllerDeviceIndex, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, "Index of controller to attach overlay to."); + addField("manualMouseHandling", TypeBool, Offset(mManualMouseHandling, OpenVROverlay), "Forces openvr to create mouse events for overlay"); + Parent::initPersistFields(); } @@ -95,6 +101,12 @@ bool OpenVROverlay::onAdd() { mOverlayTypeDirty = true; mOverlayDirty = true; + + if (OPENVR) + { + OPENVR->registerOverlay(this); + } + return true; } @@ -114,6 +126,11 @@ void OpenVROverlay::onRemove() vr::VROverlay()->DestroyOverlay(mThumbOverlayHandle); mThumbOverlayHandle = NULL; } + + if (OPENVR) + { + OPENVR->unregisterOverlay(this); + } } void OpenVROverlay::resetOverlay() @@ -202,10 +219,10 @@ void OpenVROverlay::updateOverlay() overlay->SetOverlayWidthInMeters(mOverlayHandle, mOverlayWidth); // NOTE: if flags in openvr change, double check this - /*for (U32 i = vr::VROverlayFlags_None; i <= vr::VROverlayFlags_ShowTouchPadScrollWheel; i++) + for (U32 i = vr::VROverlayFlags_None; i <= vr::VROverlayFlags_ShowTouchPadScrollWheel; i++) { overlay->SetOverlayFlag(mOverlayHandle, (vr::VROverlayFlags)i, mOverlayFlags & (1 << i)); - }*/ + } mOverlayDirty = false; } @@ -216,11 +233,14 @@ void OpenVROverlay::showOverlay() if (mOverlayHandle == NULL) return; - vr::EVROverlayError err = vr::VROverlay()->ShowOverlay(mOverlayHandle); - if (err != vr::VROverlayError_None) - { - Con::errorf("VR Overlay error!"); - } + if (mOverlayType != OVERLAYTYPE_DASHBOARD) + { + vr::EVROverlayError err = vr::VROverlay()->ShowOverlay(mOverlayHandle); + if (err != vr::VROverlayError_None) + { + Con::errorf("VR Overlay error!"); + } + } if (!mStagingTexture) { @@ -233,7 +253,10 @@ void OpenVROverlay::hideOverlay() if (mOverlayHandle == NULL) return; - vr::VROverlay()->HideOverlay(mOverlayHandle); + if (mOverlayType != OVERLAYTYPE_DASHBOARD) + { + vr::VROverlay()->HideOverlay(mOverlayHandle); + } } @@ -294,21 +317,24 @@ bool OpenVROverlay::castRay(const Point3F &origin, const Point3F &direction, Ray vr::VROverlayIntersectionParams_t params; vr::VROverlayIntersectionResults_t result; + Point3F ovrOrigin = OpenVRUtil::convertPointToOVR(origin); + Point3F ovrDirection = OpenVRUtil::convertPointToOVR(direction); + params.eOrigin = mTrackingOrigin; - params.vSource.v[0] = origin.x; - params.vSource.v[1] = origin.y; - params.vSource.v[2] = origin.z; - params.vDirection.v[0] = direction.x; // TODO: need to transform this to vr-space - params.vDirection.v[1] = direction.y; - params.vDirection.v[2] = direction.z; + params.vSource.v[0] = ovrOrigin.x; + params.vSource.v[1] = ovrOrigin.y; + params.vSource.v[2] = ovrOrigin.z; + params.vDirection.v[0] = ovrDirection.x; + params.vDirection.v[1] = ovrDirection.y; + params.vDirection.v[2] = ovrDirection.z; bool rayHit = vr::VROverlay()->ComputeOverlayIntersection(mOverlayHandle, ¶ms, &result); if (rayHit && info) { info->t = result.fDistance; - info->point = Point3F(result.vPoint.v[0], result.vPoint.v[1], result.vPoint.v[2]); // TODO: need to transform this FROM vr-space - info->normal = Point3F(result.vNormal.v[0], result.vNormal.v[1], result.vNormal.v[2]); + info->point = OpenVRUtil::convertPointFromOVR(result.vPoint); // TODO: need to transform this FROM vr-space + info->normal = OpenVRUtil::convertPointFromOVR(result.vNormal); info->texCoord = Point2F(result.vUVs.v[0], result.vUVs.v[1]); info->object = NULL; info->userData = this; @@ -324,6 +350,19 @@ void OpenVROverlay::moveGamepadFocusToNeighbour() void OpenVROverlay::handleOpenVREvents() { + if (mManualMouseHandling) + { + // tell OpenVR to make some events for us + for (vr::TrackedDeviceIndex_t unDeviceId = 1; unDeviceId < vr::k_unControllerStateAxisCount; unDeviceId++) + { + if (vr::VROverlay()->HandleControllerOverlayInteractionAsMouse(mOverlayHandle, unDeviceId)) + { + break; + } + } + } + + vr::VREvent_t vrEvent; while (vr::VROverlay()->PollNextOverlayEvent(mOverlayHandle, &vrEvent, sizeof(vrEvent))) { @@ -334,20 +373,23 @@ void OpenVROverlay::handleOpenVREvents() eventInfo.modifier = (InputModifiers)0; eventInfo.ascii = 0; + Con::printf("Overlay event %i", vrEvent.eventType); + switch (vrEvent.eventType) { case vr::VREvent_MouseMove: { + Con::printf("mousemove %f,%f", vrEvent.data.mouse.x, vrEvent.data.mouse.y); eventInfo.objType = SI_AXIS; eventInfo.objInst = SI_XAXIS; eventInfo.action = SI_MAKE; - eventInfo.fValue = vrEvent.data.mouse.x; + eventInfo.fValue = getExtent().x * vrEvent.data.mouse.x; processMouseEvent(eventInfo); eventInfo.objType = SI_AXIS; eventInfo.objInst = SI_YAXIS; eventInfo.action = SI_MAKE; - eventInfo.fValue = vrEvent.data.mouse.y; + eventInfo.fValue = getExtent().y * (1.0 - vrEvent.data.mouse.y); processMouseEvent(eventInfo); } break; @@ -381,7 +423,13 @@ void OpenVROverlay::handleOpenVREvents() case vr::VREvent_Quit: AssertFatal(false, "WTF is going on here"); break; - } + + case vr::VREvent_KeyboardCharInput: + case vr::VREvent_KeyboardDone: + updateTextControl((GuiControl*)vrEvent.data.keyboard.uUserValue); + break; + } + } if (mThumbOverlayHandle != vr::k_ulOverlayHandleInvalid) @@ -400,6 +448,20 @@ void OpenVROverlay::handleOpenVREvents() } } +void OpenVROverlay::updateTextControl(GuiControl* ctrl) +{ + if (!ctrl) + return; + + GuiTextCtrl* textCtrl = dynamic_cast(ctrl); + if (textCtrl) + { + char text[GuiTextCtrl::MAX_STRING_LENGTH]; + vr::VROverlay()->GetKeyboardText(text, GuiTextCtrl::MAX_STRING_LENGTH); + textCtrl->setText(text); + } +} + void OpenVROverlay::onFrameRendered() { vr::IVROverlay *overlay = vr::VROverlay(); @@ -444,6 +506,34 @@ void OpenVROverlay::onFrameRendered() //Con::printf("Overlay visible ? %s", vr::VROverlay()->IsOverlayVisible(mOverlayHandle) ? "YES" : "NO"); } +void OpenVROverlay::enableKeyboardTranslation() +{ + vr::IVROverlay *overlay = vr::VROverlay(); + if (!overlay || !mOverlayHandle) + return; + + GuiTextEditCtrl* ctrl = dynamic_cast(getFirstResponder()); + if (ctrl) + { + vr::EGamepadTextInputMode inputMode = ctrl->isPasswordText() ? vr::k_EGamepadTextInputModePassword : vr::k_EGamepadTextInputModeNormal; + char text[GuiTextCtrl::MAX_STRING_LENGTH + 1]; + ctrl->getText(text); + overlay->ShowKeyboardForOverlay(mOverlayHandle, inputMode, vr::k_EGamepadTextInputLineModeSingleLine, ctrl->getTooltip().c_str(), GuiTextCtrl::MAX_STRING_LENGTH, text, false, (uint64_t)ctrl); + } +} + +void OpenVROverlay::disableKeyboardTranslation() +{ + vr::IVROverlay *overlay = vr::VROverlay(); + if (!overlay || !mOverlayHandle) + return; + + overlay->HideKeyboard(); +} + +void OpenVROverlay::setNativeAcceleratorsEnabled(bool enabled) +{ +} DefineEngineMethod(OpenVROverlay, showOverlay, void, (), , "") { diff --git a/Engine/source/platform/input/openVR/openVROverlay.h b/Engine/source/platform/input/openVR/openVROverlay.h index 6998f3423..faee66b83 100644 --- a/Engine/source/platform/input/openVR/openVROverlay.h +++ b/Engine/source/platform/input/openVR/openVROverlay.h @@ -57,6 +57,7 @@ public: bool mOverlayTypeDirty; ///< Overlay type is dirty bool mOverlayDirty; ///< Overlay properties are dirty + bool mManualMouseHandling; OverlayType mOverlayType; // @@ -89,7 +90,12 @@ public: void moveGamepadFocusToNeighbour(); void handleOpenVREvents(); + void updateTextControl(GuiControl* ctrl); void onFrameRendered(); + + virtual void enableKeyboardTranslation(); + virtual void disableKeyboardTranslation(); + virtual void setNativeAcceleratorsEnabled(bool enabled); }; typedef OpenVROverlay::OverlayType OpenVROverlayType; diff --git a/Engine/source/platform/input/openVR/openVRProvider.cpp b/Engine/source/platform/input/openVR/openVRProvider.cpp index 79fed71a8..2061403e7 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.cpp +++ b/Engine/source/platform/input/openVR/openVRProvider.cpp @@ -1,4 +1,5 @@ #include "platform/input/openVR/openVRProvider.h" +#include "platform/input/openVR/openVROverlay.h" #include "platform/platformInput.h" #include "core/module.h" #include "console/engineAPI.h" @@ -547,7 +548,7 @@ void OpenVRProvider::buildInputCodeTable() bool OpenVRProvider::process() { if (!mHMD) - return true; + return true; if (!vr::VRCompositor()) return true; @@ -559,6 +560,12 @@ bool OpenVRProvider::process() processVREvent(event); } + // process overlay events + for (U32 i = 0; i < mOverlays.size(); i++) + { + mOverlays[i]->handleOpenVREvents(); + } + // Process SteamVR controller state for (vr::TrackedDeviceIndex_t unDevice = 0; unDevice < vr::k_unMaxTrackedDeviceCount; unDevice++) { @@ -1014,6 +1021,20 @@ void OpenVRProvider::resetSensors() } } +void OpenVRProvider::registerOverlay(OpenVROverlay* overlay) +{ + mOverlays.push_back(overlay); +} + +void OpenVRProvider::unregisterOverlay(OpenVROverlay* overlay) +{ + S32 index = mOverlays.find_next(overlay); + if (index != -1) + { + mOverlays.erase(index); + } +} + OpenVROverlay *OpenVRProvider::getGamepadFocusOverlay() { return NULL; diff --git a/Engine/source/platform/input/openVR/openVRProvider.h b/Engine/source/platform/input/openVR/openVRProvider.h index de3a73e89..b690b0941 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.h +++ b/Engine/source/platform/input/openVR/openVRProvider.h @@ -54,6 +54,24 @@ namespace OpenVRUtil void convertMatrixFPlainToSteamVRAffineMatrix(const MatrixF &inMat, vr::HmdMatrix34_t &outMat); U32 convertOpenVRButtonToTorqueButton(uint32_t vrButton); + + /// Converts a point to OVR coords + inline Point3F convertPointToOVR(const Point3F &point) + { + return Point3F(-point.x, -point.z, point.y); + } + + /// Converts a point from OVR coords + inline Point3F convertPointFromOVR(const Point3F &point) + { + return Point3F(-point.x, point.z, -point.y); + } + + // Converts a point from OVR coords, from an input float array + inline Point3F convertPointFromOVR(const vr::HmdVector3_t& v) + { + return Point3F(-v.v[0], v.v[2], -v.v[1]); + } }; template class VRTextureSet @@ -199,6 +217,12 @@ public: void resetSensors(); /// } + /// @name Overlay registration + /// { + void registerOverlay(OpenVROverlay* overlay); + void unregisterOverlay(OpenVROverlay* overlay); + /// } + /// @name Console API /// { @@ -231,6 +255,8 @@ public: GFXAdapterLUID mLUID; vr::ETrackingUniverseOrigin mTrackingSpace; + + Vector mOverlays; /// } GuiCanvas* mDrawCanvas; From a83afa07ea880f8edaf405d05bf5f04ad7e72b13 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Thu, 19 May 2016 22:21:04 +0100 Subject: [PATCH 18/33] Fix blockiness when drawing to gui overlays using standard draw commands --- Engine/source/gfx/gfxDrawUtil.cpp | 1 + Engine/source/gfx/gfxFontRenderBatcher.cpp | 2 ++ 2 files changed, 3 insertions(+) diff --git a/Engine/source/gfx/gfxDrawUtil.cpp b/Engine/source/gfx/gfxDrawUtil.cpp index 42b146a10..d68b05e55 100644 --- a/Engine/source/gfx/gfxDrawUtil.cpp +++ b/Engine/source/gfx/gfxDrawUtil.cpp @@ -61,6 +61,7 @@ void GFXDrawUtil::_setupStateBlocks() bitmapStretchSR.setZReadWrite(false); bitmapStretchSR.setBlend(true, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha); bitmapStretchSR.samplersDefined = true; + bitmapStretchSR.setColorWrites(true, true, true, false); // Linear: Create wrap SB bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getWrapLinear(); diff --git a/Engine/source/gfx/gfxFontRenderBatcher.cpp b/Engine/source/gfx/gfxFontRenderBatcher.cpp index a9761e7fc..84551506b 100644 --- a/Engine/source/gfx/gfxFontRenderBatcher.cpp +++ b/Engine/source/gfx/gfxFontRenderBatcher.cpp @@ -50,6 +50,8 @@ FontRenderBatcher::FontRenderBatcher() : mStorage(8096) // result in the text always being black. This may not be the case in OpenGL // so it may have to change. -bramage f.samplers[0].textureColorOp = GFXTOPAdd; + + f.setColorWrites(true, true, true, false); mFontSB = GFX->createStateBlock(f); } } From 2da474c484c3edeb4f94a983d165904076564d3f Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Thu, 19 May 2016 22:37:15 +0100 Subject: [PATCH 19/33] Always use the latest eye pose data from the HMD --- Engine/source/platform/input/openVR/openVRProvider.cpp | 10 +++++++++- Engine/source/platform/input/openVR/openVRProvider.h | 1 + 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/Engine/source/platform/input/openVR/openVRProvider.cpp b/Engine/source/platform/input/openVR/openVRProvider.cpp index 2061403e7..c9b5a33c7 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.cpp +++ b/Engine/source/platform/input/openVR/openVRProvider.cpp @@ -329,6 +329,11 @@ void OpenVRRenderState::reset(vr::IVRSystem* hmd) if (!mHMD) return; + updateHMDProjection(); +} + +void OpenVRRenderState::updateHMDProjection() +{ vr::HmdMatrix34_t mat = mHMD->GetEyeToHeadTransform(vr::Eye_Left); mEyePose[0] = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(mat); mEyePose[0].inverse(); @@ -911,6 +916,9 @@ void OpenVRProvider::updateTrackedPoses() compositor->WaitGetPoses(mTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, NULL, 0); + // Make sure we're using the latest eye offset in case user has changed IPD + mHMDRenderState.updateHMDProjection(); + mValidPoseCount = 0; for (int nDevice = 0; nDevice < vr::k_unMaxTrackedDeviceCount; ++nDevice) @@ -924,7 +932,7 @@ void OpenVRProvider::updateTrackedPoses() if (nDevice == vr::k_unTrackedDeviceIndex_Hmd) { mHMDRenderState.mHMDPose = mat; - // jaeesu - store the last rotation for temp debugging + // jamesu - store the last rotation for temp debugging MatrixF torqueMat(1); OpenVRUtil::convertTransformFromOVR(mat, torqueMat); gLastMoveRot = AngAxisF(torqueMat); diff --git a/Engine/source/platform/input/openVR/openVRProvider.h b/Engine/source/platform/input/openVR/openVRProvider.h index b690b0941..ec7ca7100 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.h +++ b/Engine/source/platform/input/openVR/openVRProvider.h @@ -135,6 +135,7 @@ struct OpenVRRenderState void renderPreview(); void reset(vr::IVRSystem* hmd); + void updateHMDProjection(); }; class OpenVRProvider : public IDisplayDevice, public IInputDevice From 694dd4abfae39f0e5fb330444fcbadbf5fbbf1d7 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Fri, 20 May 2016 00:31:32 +0100 Subject: [PATCH 20/33] Fix stack overflow --- Engine/source/gui/3d/guiTSControl.cpp | 2 -- 1 file changed, 2 deletions(-) diff --git a/Engine/source/gui/3d/guiTSControl.cpp b/Engine/source/gui/3d/guiTSControl.cpp index 5175d4ac9..8e2fb3a25 100644 --- a/Engine/source/gui/3d/guiTSControl.cpp +++ b/Engine/source/gui/3d/guiTSControl.cpp @@ -618,14 +618,12 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) // Use the view matrix determined from the display device myTransforms[0] = mLastCameraQuery.eyeTransforms[0]; myTransforms[1] = mLastCameraQuery.eyeTransforms[1]; - myTransforms[2] = mLastCameraQuery.cameraMatrix; } else { // Use the view matrix determined from the control object myTransforms[0] = mLastCameraQuery.cameraMatrix; myTransforms[1] = mLastCameraQuery.cameraMatrix; - myTransforms[2] = mLastCameraQuery.cameraMatrix; QuatF qrot = mLastCameraQuery.cameraMatrix; Point3F pos = mLastCameraQuery.cameraMatrix.getPosition(); From 14628e39372dd26b0d0e081f5d36fd2d3e9a9809 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Fri, 20 May 2016 16:15:56 +0100 Subject: [PATCH 21/33] Fix setNearFarDist for off-center projections --- Engine/source/math/util/frustum.cpp | 22 ++++++++++++++++++++-- 1 file changed, 20 insertions(+), 2 deletions(-) diff --git a/Engine/source/math/util/frustum.cpp b/Engine/source/math/util/frustum.cpp index bbcc16f83..181e140ef 100644 --- a/Engine/source/math/util/frustum.cpp +++ b/Engine/source/math/util/frustum.cpp @@ -214,8 +214,26 @@ void Frustum::setNearFarDist( F32 nearDist, F32 farDist ) return; // Recalculate the frustum. - MatrixF xfm( mTransform ); - set( mIsOrtho, getFov(), getAspectRatio(), nearDist, farDist, xfm ); + MatrixF xfm( mTransform ); + + const F32 CENTER_EPSILON = 0.01; + F32 centerX = mNearLeft + (mNearRight - mNearLeft) * 0.5; + F32 centerY = mNearBottom + (mNearTop - mNearBottom) * 0.5; + if ((centerX > CENTER_EPSILON || centerX < -CENTER_EPSILON) || (centerY > CENTER_EPSILON || centerY < -CENTER_EPSILON) ) + { + // Off-center projection, so re-calc use the new distances + FovPort expectedFovPort; + expectedFovPort.leftTan = -(mNearLeft / mNearDist); + expectedFovPort.rightTan = (mNearRight / mNearDist); + expectedFovPort.upTan = (mNearTop / mNearDist); + expectedFovPort.downTan = -(mNearBottom / mNearDist); + MathUtils::makeFovPortFrustum(this, mIsOrtho, nearDist, farDist, expectedFovPort); + } + else + { + // Projection is not off-center, use the normal code + set(mIsOrtho, getFov(), getAspectRatio(), nearDist, farDist, xfm); + } } //----------------------------------------------------------------------------- From c6d2456a7c10ebfc2a4529c476642ce45143755a Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Sat, 21 May 2016 13:46:20 +0100 Subject: [PATCH 22/33] Fix lens flares in VR --- Engine/source/T3D/lightFlareData.cpp | 71 ++++++++++++++---------- Engine/source/environment/scatterSky.cpp | 4 +- Engine/source/math/util/frustum.cpp | 2 +- 3 files changed, 44 insertions(+), 33 deletions(-) diff --git a/Engine/source/T3D/lightFlareData.cpp b/Engine/source/T3D/lightFlareData.cpp index 2fae80fa9..5ce430755 100644 --- a/Engine/source/T3D/lightFlareData.cpp +++ b/Engine/source/T3D/lightFlareData.cpp @@ -33,6 +33,7 @@ #include "gfx/gfxOcclusionQuery.h" #include "gfx/gfxDrawUtil.h" #include "gfx/gfxTextureManager.h" +#include "gfx/sim/debugDraw.h" #include "renderInstance/renderPassManager.h" #include "T3D/gameBase/gameConnection.h" #include "T3D/gameBase/processList.h" @@ -275,12 +276,10 @@ bool LightFlareData::_testVisibility(const SceneRenderState *state, LightFlareSt // is on scren at all... if not then return // the last result. const Point3F &lightPos = flareState->lightMat.getPosition(); - const RectI &viewport = GFX->getViewport(); - MatrixF projMatrix; - state->getCameraFrustum().getProjectionMatrix(&projMatrix); - if( state->isReflectPass() ) - projMatrix = state->getSceneManager()->getNonClipProjection(); - bool onScreen = MathUtils::mProjectWorldToScreen( lightPos, outLightPosSS, viewport, GFX->getWorldMatrix(), projMatrix ); + const RectI &viewport = RectI(Point2I(0, 0), GFX->getViewport().extent); + + MatrixF camProjMatrix = projMatrix = state->getSceneManager()->getNonClipProjection(); + bool onScreen = MathUtils::mProjectWorldToScreen( lightPos, outLightPosSS, viewport, GFX->getWorldMatrix(), camProjMatrix ); // It is onscreen, so raycast as a simple occlusion test. const LightInfo *lightInfo = flareState->lightInfo; @@ -297,7 +296,7 @@ bool LightFlareData::_testVisibility(const SceneRenderState *state, LightFlareSt // Always treat light as onscreen if using HOQ // it will be faded out if offscreen anyway. onScreen = true; - needsRaycast = false; + needsRaycast = false; // Test the hardware queries for rendered pixels. U32 pixels = 0, fullPixels = 0; @@ -400,63 +399,75 @@ bool LightFlareData::_testVisibility(const SceneRenderState *state, LightFlareSt return lightVisible; } -void LightFlareData::prepRender( SceneRenderState *state, LightFlareState *flareState ) +void LightFlareData::prepRender(SceneRenderState *state, LightFlareState *flareState) { - PROFILE_SCOPE( LightFlareData_prepRender ); + PROFILE_SCOPE(LightFlareData_prepRender); const LightInfo *lightInfo = flareState->lightInfo; - if ( mIsZero( flareState->fullBrightness ) || - mIsZero( lightInfo->getBrightness() ) ) - return; + if (mIsZero(flareState->fullBrightness) || + mIsZero(lightInfo->getBrightness())) + return; // Figure out the element count to render. U32 elementCount = mElementCount; const bool isReflectPass = state->isReflectPass(); - if ( isReflectPass ) + if (isReflectPass) { // Then we don't render anything this pass. - if ( !mRenderReflectPass ) + if (!mRenderReflectPass) return; // Find the zero distance elements which make // up the corona of the light flare. elementCount = 0.0f; - for ( U32 i=0; i < mElementCount; i++ ) - if ( mIsZero( mElementDist[i] ) ) - elementCount++; + for (U32 i = 0; i < mElementCount; i++) + if (mIsZero(mElementDist[i])) + elementCount++; } // Better have something to render. - if ( elementCount == 0 ) + if (elementCount == 0) return; - + U32 visDelta = U32_MAX; F32 occlusionFade = 1.0f; Point3F lightPosSS; - bool lightVisible = _testVisibility( state, flareState, &visDelta, &occlusionFade, &lightPosSS ); - + bool lightVisible = _testVisibility(state, flareState, &visDelta, &occlusionFade, &lightPosSS); + + //DebugDrawer::get()->drawBox(flareState->lightMat.getPosition() + Point3F(-0.5, -0.5, -0.5) * 4, flareState->lightMat.getPosition() + Point3F(0.5, 0.5, 0.5) * 4, ColorI::BLUE); + // We can only skip rendering if the light is not // visible, and it has elapsed the fade out time. - if ( mIsZero( occlusionFade ) || - !lightVisible && visDelta > FadeOutTime ) + if (mIsZero(occlusionFade) || + !lightVisible && visDelta > FadeOutTime) return; const RectI &viewport = GFX->getViewport(); - Point3F oneOverViewportExtent( 1.0f / (F32)viewport.extent.x, 1.0f / (F32)viewport.extent.y, 0.0f ); + Point3F oneOverViewportExtent(1.0f / (F32)viewport.extent.x, 1.0f / (F32)viewport.extent.y, 0.0f); - // Really convert it to screen space. - lightPosSS.x -= viewport.point.x; - lightPosSS.y -= viewport.point.y; lightPosSS *= oneOverViewportExtent; - lightPosSS = ( lightPosSS * 2.0f ) - Point3F::One; + lightPosSS = (lightPosSS * 2.0f) - Point3F::One; lightPosSS.y = -lightPosSS.y; lightPosSS.z = 0.0f; + // Determine the center of the current projection so we can converge there + Point3F centerProj(0); + { + MatrixF camProjMatrix = state->getSceneManager()->getNonClipProjection(); + Point3F outCenterPos; + RectI centerViewport = RectI(Point2I(0, 0), viewport.extent); + MathUtils::mProjectWorldToScreen(Point3F(0,state->getSceneManager()->getNearClip(),0), &outCenterPos, centerViewport, MatrixF::Identity, camProjMatrix); + centerProj = outCenterPos; + centerProj *= oneOverViewportExtent; + centerProj = (centerProj * 2.0f) - Point3F::One; + centerProj.y = -centerProj.y; + centerProj.z = 0.0f; + } + // Take any projection offset into account so that the point where the flare's // elements converge is at the 'eye' point rather than the center of the viewport. - const Point2F& projOffset = state->getCameraFrustum().getProjectionOffset(); - Point3F flareVec( -lightPosSS + Point3F(projOffset.x, projOffset.y, 0.0f) ); + Point3F flareVec( centerProj - lightPosSS ); const F32 flareLength = flareVec.len(); if ( flareLength > 0.0f ) flareVec *= 1.0f / flareLength; diff --git a/Engine/source/environment/scatterSky.cpp b/Engine/source/environment/scatterSky.cpp index 7607246f4..9b25d71ea 100644 --- a/Engine/source/environment/scatterSky.cpp +++ b/Engine/source/environment/scatterSky.cpp @@ -667,11 +667,11 @@ void ScatterSky::prepRenderImage( SceneRenderState *state ) mFlareState.scale = mFlareScale; mFlareState.lightInfo = mLight; - Point3F lightPos = state->getCameraPosition() - state->getFarPlane() * mLight->getDirection() * 0.9f; + Point3F lightPos = state->getDiffuseCameraPosition() - state->getFarPlane() * mLight->getDirection() * 0.9f; mFlareState.lightMat.identity(); mFlareState.lightMat.setPosition( lightPos ); - F32 dist = ( lightPos - state->getCameraPosition( ) ).len( ); + F32 dist = ( lightPos - state->getDiffuseCameraPosition( ) ).len( ); F32 coronaScale = 0.5f; F32 screenRadius = GFX->getViewport( ).extent.y * coronaScale * 0.5f; mFlareState.worldRadius = screenRadius * dist / state->getWorldToScreenScale( ).y; diff --git a/Engine/source/math/util/frustum.cpp b/Engine/source/math/util/frustum.cpp index 181e140ef..bfb42a6bf 100644 --- a/Engine/source/math/util/frustum.cpp +++ b/Engine/source/math/util/frustum.cpp @@ -216,7 +216,7 @@ void Frustum::setNearFarDist( F32 nearDist, F32 farDist ) // Recalculate the frustum. MatrixF xfm( mTransform ); - const F32 CENTER_EPSILON = 0.01; + const F32 CENTER_EPSILON = 0.001; F32 centerX = mNearLeft + (mNearRight - mNearLeft) * 0.5; F32 centerY = mNearBottom + (mNearTop - mNearBottom) * 0.5; if ((centerX > CENTER_EPSILON || centerX < -CENTER_EPSILON) || (centerY > CENTER_EPSILON || centerY < -CENTER_EPSILON) ) From 784f6f92d887dc160ec885ca73f0d730406745f1 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Sat, 21 May 2016 19:52:41 +0100 Subject: [PATCH 23/33] Fix lens flare in side-by-side view --- Engine/source/T3D/lightFlareData.cpp | 2 +- Engine/source/gui/3d/guiTSControl.cpp | 1 + Engine/source/postFx/postEffectManager.h | 1 + Engine/source/scene/reflector.cpp | 5 +++++ Engine/source/scene/sceneManager.cpp | 13 +++++++++++++ 5 files changed, 21 insertions(+), 1 deletion(-) diff --git a/Engine/source/T3D/lightFlareData.cpp b/Engine/source/T3D/lightFlareData.cpp index 5ce430755..84058f6c8 100644 --- a/Engine/source/T3D/lightFlareData.cpp +++ b/Engine/source/T3D/lightFlareData.cpp @@ -277,8 +277,8 @@ bool LightFlareData::_testVisibility(const SceneRenderState *state, LightFlareSt // the last result. const Point3F &lightPos = flareState->lightMat.getPosition(); const RectI &viewport = RectI(Point2I(0, 0), GFX->getViewport().extent); + MatrixF camProjMatrix = state->getSceneManager()->getNonClipProjection(); - MatrixF camProjMatrix = projMatrix = state->getSceneManager()->getNonClipProjection(); bool onScreen = MathUtils::mProjectWorldToScreen( lightPos, outLightPosSS, viewport, GFX->getWorldMatrix(), camProjMatrix ); // It is onscreen, so raycast as a simple occlusion test. diff --git a/Engine/source/gui/3d/guiTSControl.cpp b/Engine/source/gui/3d/guiTSControl.cpp index 8e2fb3a25..1bd5f154c 100644 --- a/Engine/source/gui/3d/guiTSControl.cpp +++ b/Engine/source/gui/3d/guiTSControl.cpp @@ -624,6 +624,7 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) // Use the view matrix determined from the control object myTransforms[0] = mLastCameraQuery.cameraMatrix; myTransforms[1] = mLastCameraQuery.cameraMatrix; + mLastCameraQuery.headMatrix = mLastCameraQuery.cameraMatrix; // override head QuatF qrot = mLastCameraQuery.cameraMatrix; Point3F pos = mLastCameraQuery.cameraMatrix.getPosition(); diff --git a/Engine/source/postFx/postEffectManager.h b/Engine/source/postFx/postEffectManager.h index 0ef72a586..f06e6b76a 100644 --- a/Engine/source/postFx/postEffectManager.h +++ b/Engine/source/postFx/postEffectManager.h @@ -127,6 +127,7 @@ public: const PFXFrameState &getFrameState() const { return mFrameState[mFrameStateSwitch]; } const PFXFrameState &getLastFrameState() const { return mFrameState[!mFrameStateSwitch]; } + void setFrameState(const PFXFrameState& newState) { mFrameState[mFrameStateSwitch] = newState; } void setFrameMatrices( const MatrixF &worldToCamera, const MatrixF &cameraToScreen ); // For ManagedSingleton. diff --git a/Engine/source/scene/reflector.cpp b/Engine/source/scene/reflector.cpp index 951cce010..5993f0e26 100644 --- a/Engine/source/scene/reflector.cpp +++ b/Engine/source/scene/reflector.cpp @@ -39,6 +39,7 @@ #include "math/mathUtils.h" #include "math/util/frustum.h" #include "gfx/screenshot.h" +#include "postFx/postEffectManager.h" extern ColorI gCanvasClearColor; @@ -603,6 +604,8 @@ void PlaneReflector::updateReflection( const ReflectParams ¶ms ) { // Store previous values RectI originalVP = GFX->getViewport(); + MatrixF origNonClipProjection = gClientSceneGraph->getNonClipProjection(); + PFXFrameState origPFXState = PFXMGR->getFrameState(); const FovPort *currentFovPort = GFX->getStereoFovPort(); MatrixF inverseEyeTransforms[2]; @@ -655,6 +658,8 @@ void PlaneReflector::updateReflection( const ReflectParams ¶ms ) // Restore previous values GFX->setFrustum(gfxFrustum); GFX->setViewport(originalVP); + gClientSceneGraph->setNonClipProjection(origNonClipProjection); + PFXMGR->setFrameState(origPFXState); } else { diff --git a/Engine/source/scene/sceneManager.cpp b/Engine/source/scene/sceneManager.cpp index 5ed8f2669..53c8eb045 100644 --- a/Engine/source/scene/sceneManager.cpp +++ b/Engine/source/scene/sceneManager.cpp @@ -41,6 +41,8 @@ // For player object bounds workaround. #include "T3D/player.h" +#include "postFx/postEffectManager.h" + extern bool gEditingMission; @@ -239,6 +241,10 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S MatrixF originalWorld = GFX->getWorldMatrix(); Frustum originalFrustum = GFX->getFrustum(); + // Save PFX & SceneManager projections + MatrixF origNonClipProjection = renderState->getSceneManager()->getNonClipProjection(); + PFXFrameState origPFXState = PFXMGR->getFrameState(); + const FovPort *currentFovPort = GFX->getStereoFovPort(); const MatrixF *eyeTransforms = GFX->getStereoEyeTransforms(); const MatrixF *worldEyeTransforms = GFX->getInverseStereoEyeTransforms(); @@ -255,7 +261,9 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S SceneCameraState cameraStateLeft = SceneCameraState::fromGFX(); SceneRenderState renderStateLeft( this, renderState->getScenePassType(), cameraStateLeft ); + renderStateLeft.getSceneManager()->setNonClipProjection(GFX->getProjectionMatrix()); renderStateLeft.setSceneRenderStyle(SRS_SideBySide); + PFXMGR->setFrameMatrices(GFX->getWorldMatrix(), GFX->getProjectionMatrix()); renderSceneNoLights( &renderStateLeft, objectMask, baseObject, baseZone ); // left @@ -274,7 +282,9 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S SceneCameraState cameraStateRight = SceneCameraState::fromGFX(); SceneRenderState renderStateRight( this, renderState->getScenePassType(), cameraStateRight ); + renderStateRight.getSceneManager()->setNonClipProjection(GFX->getProjectionMatrix()); renderStateRight.setSceneRenderStyle(SRS_SideBySide); + PFXMGR->setFrameMatrices(GFX->getWorldMatrix(), GFX->getProjectionMatrix()); renderSceneNoLights( &renderStateRight, objectMask, baseObject, baseZone ); // right @@ -283,6 +293,9 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S GFX->endField(); // Restore previous values + renderState->getSceneManager()->setNonClipProjection(origNonClipProjection); + PFXMGR->setFrameState(origPFXState); + GFX->setWorldMatrix(originalWorld); GFX->setFrustum(originalFrustum); GFX->setViewport(originalVP); From e6c89b1f240e13b0c935cc817d685498c86a8828 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Mon, 23 May 2016 00:45:19 +0100 Subject: [PATCH 24/33] Add basic rotation offset code --- .../platform/input/openVR/openVRProvider.cpp | 39 +++++++++++++++++++ .../platform/input/openVR/openVRProvider.h | 6 +++ 2 files changed, 45 insertions(+) diff --git a/Engine/source/platform/input/openVR/openVRProvider.cpp b/Engine/source/platform/input/openVR/openVRProvider.cpp index c9b5a33c7..243cdb48a 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.cpp +++ b/Engine/source/platform/input/openVR/openVRProvider.cpp @@ -106,6 +106,8 @@ namespace OpenVRUtil return KEY_BUTTON1; case vr::VRMouseButton_Middle: return KEY_BUTTON2; + default: + return KEY_NULL; } } @@ -224,6 +226,10 @@ U32 OpenVRProvider::OVR_AXISTRACKPAD[vr::k_unMaxTrackedDeviceCount] = { 0 }; U32 OpenVRProvider::OVR_AXISJOYSTICK[vr::k_unMaxTrackedDeviceCount] = { 0 }; U32 OpenVRProvider::OVR_AXISTRIGGER[vr::k_unMaxTrackedDeviceCount] = { 0 }; +EulerF OpenVRProvider::smHMDRotOffset(0); +F32 OpenVRProvider::smHMDmvYaw = 0; +F32 OpenVRProvider::smHMDmvPitch = 0; + static String GetTrackedDeviceString(vr::IVRSystem *pHmd, vr::TrackedDeviceIndex_t unDevice, vr::TrackedDeviceProperty prop, vr::TrackedPropertyError *peError = NULL) { uint32_t unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, NULL, 0, peError); @@ -384,6 +390,13 @@ void OpenVRProvider::staticInit() Con::setIntVariable("$OpenVR::OverlayFlags_SendVRScrollEvents", 1 << (U32)vr::VROverlayFlags_SendVRScrollEvents); Con::setIntVariable("$OpenVR::OverlayFlags_SendVRTouchpadEvents", 1 << (U32)vr::VROverlayFlags_SendVRTouchpadEvents); Con::setIntVariable("$OpenVR::OverlayFlags_ShowTouchPadScrollWheel", 1 << (U32)vr::VROverlayFlags_ShowTouchPadScrollWheel); + + Con::addVariable("$OpenVR::HMDRotOffsetX", TypeF32, &smHMDRotOffset.x); + Con::addVariable("$OpenVR::HMDRotOffsetY", TypeF32, &smHMDRotOffset.y); + Con::addVariable("$OpenVR::HMDRotOffsetZ", TypeF32, &smHMDRotOffset.z); + + Con::addVariable("$OpenVR::HMDmvYaw", TypeF32, &smHMDmvYaw); + Con::addVariable("$OpenVR::HMDmvPitch", TypeF32, &smHMDmvPitch); } bool OpenVRProvider::enable() @@ -558,6 +571,22 @@ bool OpenVRProvider::process() if (!vr::VRCompositor()) return true; + // Update HMD rotation offset + smHMDRotOffset.z += smHMDmvYaw; + smHMDRotOffset.x += smHMDmvPitch; + + while (smHMDRotOffset.x < -M_PI_F) + smHMDRotOffset.x += M_2PI_F; + while (smHMDRotOffset.x > M_PI_F) + smHMDRotOffset.x -= M_2PI_F; + while (smHMDRotOffset.z < -M_PI_F) + smHMDRotOffset.z += M_2PI_F; + while (smHMDRotOffset.z > M_PI_F) + smHMDRotOffset.z -= M_2PI_F; + + smHMDmvYaw = 0; + smHMDmvPitch = 0; + // Process SteamVR events vr::VREvent_t event; while (mHMD->PollNextEvent(&event, sizeof(event))) @@ -932,6 +961,16 @@ void OpenVRProvider::updateTrackedPoses() if (nDevice == vr::k_unTrackedDeviceIndex_Hmd) { mHMDRenderState.mHMDPose = mat; + MatrixF rotOffset(1); + EulerF localRot(-smHMDRotOffset.x, -smHMDRotOffset.z, smHMDRotOffset.y); + + // NOTE: offsetting before is probably the best we're going to be able to do here, since if we apply the matrix AFTER + // we will get correct movements relative to the camera HOWEVER this also distorts any future movements from the HMD since + // we will then be on a really weird rotation axis. + QuatF(localRot).setMatrix(&rotOffset); + rotOffset.inverse(); + mHMDRenderState.mHMDPose = rotOffset * mHMDRenderState.mHMDPose; + // jamesu - store the last rotation for temp debugging MatrixF torqueMat(1); OpenVRUtil::convertTransformFromOVR(mat, torqueMat); diff --git a/Engine/source/platform/input/openVR/openVRProvider.h b/Engine/source/platform/input/openVR/openVRProvider.h index ec7ca7100..5b0e908a1 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.h +++ b/Engine/source/platform/input/openVR/openVRProvider.h @@ -278,6 +278,12 @@ public: static U32 OVR_AXISJOYSTICK[vr::k_unMaxTrackedDeviceCount]; static U32 OVR_AXISTRIGGER[vr::k_unMaxTrackedDeviceCount]; + /// @name HMD Rotation offset + /// { + static EulerF smHMDRotOffset; + static F32 smHMDmvYaw; + static F32 smHMDmvPitch; + /// } public: // For ManagedSingleton. From 660bd8d3479685c95ba428b8764ed987540c90fb Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Thu, 26 May 2016 23:50:27 +0100 Subject: [PATCH 25/33] Allow gamepad to rotate openvr view. Also Fix issue with movemanager openvr rotation not being set correctly. --- Engine/source/platform/input/openVR/openVRProvider.cpp | 10 +++++++++- Engine/source/platform/input/openVR/openVRProvider.h | 1 + 2 files changed, 10 insertions(+), 1 deletion(-) diff --git a/Engine/source/platform/input/openVR/openVRProvider.cpp b/Engine/source/platform/input/openVR/openVRProvider.cpp index 243cdb48a..fdf687afd 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.cpp +++ b/Engine/source/platform/input/openVR/openVRProvider.cpp @@ -229,6 +229,7 @@ U32 OpenVRProvider::OVR_AXISTRIGGER[vr::k_unMaxTrackedDeviceCount] = { 0 }; EulerF OpenVRProvider::smHMDRotOffset(0); F32 OpenVRProvider::smHMDmvYaw = 0; F32 OpenVRProvider::smHMDmvPitch = 0; +bool OpenVRProvider::smRotateYawWithMoveActions = false; static String GetTrackedDeviceString(vr::IVRSystem *pHmd, vr::TrackedDeviceIndex_t unDevice, vr::TrackedDeviceProperty prop, vr::TrackedPropertyError *peError = NULL) { @@ -397,6 +398,8 @@ void OpenVRProvider::staticInit() Con::addVariable("$OpenVR::HMDmvYaw", TypeF32, &smHMDmvYaw); Con::addVariable("$OpenVR::HMDmvPitch", TypeF32, &smHMDmvPitch); + + Con::addVariable("$OpenVR::HMDRotateYawWithMoveActions", TypeBool, &smRotateYawWithMoveActions); } bool OpenVRProvider::enable() @@ -571,6 +574,11 @@ bool OpenVRProvider::process() if (!vr::VRCompositor()) return true; + if (smRotateYawWithMoveActions) + { + smHMDmvYaw += MoveManager::mRightAction - MoveManager::mLeftAction + MoveManager::mXAxis_L; + } + // Update HMD rotation offset smHMDRotOffset.z += smHMDmvYaw; smHMDRotOffset.x += smHMDmvPitch; @@ -969,7 +977,7 @@ void OpenVRProvider::updateTrackedPoses() // we will then be on a really weird rotation axis. QuatF(localRot).setMatrix(&rotOffset); rotOffset.inverse(); - mHMDRenderState.mHMDPose = rotOffset * mHMDRenderState.mHMDPose; + mHMDRenderState.mHMDPose = mat = rotOffset * mHMDRenderState.mHMDPose; // jamesu - store the last rotation for temp debugging MatrixF torqueMat(1); diff --git a/Engine/source/platform/input/openVR/openVRProvider.h b/Engine/source/platform/input/openVR/openVRProvider.h index 5b0e908a1..4080f1eac 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.h +++ b/Engine/source/platform/input/openVR/openVRProvider.h @@ -283,6 +283,7 @@ public: static EulerF smHMDRotOffset; static F32 smHMDmvYaw; static F32 smHMDmvPitch; + static bool smRotateYawWithMoveActions; /// } public: From 0ac3d95cb95d3a54d2552c4ef8f46d8ca9f783b2 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Sat, 4 Jun 2016 12:21:38 +0100 Subject: [PATCH 26/33] Add more outline functions to DebugDraw --- Engine/source/gfx/sim/debugDraw.cpp | 71 +++++++++++++++++++++++++++++ Engine/source/gfx/sim/debugDraw.h | 3 ++ 2 files changed, 74 insertions(+) diff --git a/Engine/source/gfx/sim/debugDraw.cpp b/Engine/source/gfx/sim/debugDraw.cpp index b31a6925e..2625d1ebe 100644 --- a/Engine/source/gfx/sim/debugDraw.cpp +++ b/Engine/source/gfx/sim/debugDraw.cpp @@ -139,6 +139,77 @@ void DebugDrawer::setupStateBlocks() mRenderAlpha = GFX->createStateBlock(d); } +void DebugDrawer::drawBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color) +{ + Point3F point0(a.x, a.y, a.z); + Point3F point1(a.x, b.y, a.z); + Point3F point2(b.x, b.y, a.z); + Point3F point3(b.x, a.y, a.z); + + Point3F point4(a.x, a.y, b.z); + Point3F point5(a.x, b.y, b.z); + Point3F point6(b.x, b.y, b.z); + Point3F point7(b.x, a.y, b.z); + + // Draw one plane + drawLine(point0, point1, color); + drawLine(point1, point2, color); + drawLine(point2, point3, color); + drawLine(point3, point0, color); + + // Draw the other plane + drawLine(point4, point5, color); + drawLine(point5, point6, color); + drawLine(point6, point7, color); + drawLine(point7, point4, color); + + // Draw the connecting corners + drawLine(point0, point4, color); + drawLine(point1, point5, color); + drawLine(point2, point6, color); + drawLine(point3, point7, color); +} + +void DebugDrawer::drawTransformedBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color, const MatrixF& transform) +{ + Point3F point0(a.x, a.y, a.z); + Point3F point1(a.x, b.y, a.z); + Point3F point2(b.x, b.y, a.z); + Point3F point3(b.x, a.y, a.z); + + Point3F point4(a.x, a.y, b.z); + Point3F point5(a.x, b.y, b.z); + Point3F point6(b.x, b.y, b.z); + Point3F point7(b.x, a.y, b.z); + + transform.mulP(point0); + transform.mulP(point1); + transform.mulP(point2); + transform.mulP(point3); + transform.mulP(point4); + transform.mulP(point5); + transform.mulP(point6); + transform.mulP(point7); + + // Draw one plane + drawLine(point0, point1, color); + drawLine(point1, point2, color); + drawLine(point2, point3, color); + drawLine(point3, point0, color); + + // Draw the other plane + drawLine(point4, point5, color); + drawLine(point5, point6, color); + drawLine(point6, point7, color); + drawLine(point7, point4, color); + + // Draw the connecting corners + drawLine(point0, point4, color); + drawLine(point1, point5, color); + drawLine(point2, point6, color); + drawLine(point3, point7, color); +} + void DebugDrawer::render() { #ifdef ENABLE_DEBUGDRAW diff --git a/Engine/source/gfx/sim/debugDraw.h b/Engine/source/gfx/sim/debugDraw.h index a07f52ca4..bfc2b6547 100644 --- a/Engine/source/gfx/sim/debugDraw.h +++ b/Engine/source/gfx/sim/debugDraw.h @@ -120,6 +120,9 @@ public: /// /// @{ + void drawBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color = ColorF(1.0f, 1.0f, 1.0f)); + void drawTransformedBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color, const MatrixF& transform); + void drawBox(const Point3F &a, const Point3F &b, const ColorF &color = ColorF(1.0f,1.0f,1.0f)); void drawLine(const Point3F &a, const Point3F &b, const ColorF &color = ColorF(1.0f,1.0f,1.0f)); void drawTri(const Point3F &a, const Point3F &b, const Point3F &c, const ColorF &color = ColorF(1.0f,1.0f,1.0f)); From 1b2abbeaaaff1e641c211c66f4c307ddbf1f5351 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Sat, 4 Jun 2016 12:22:57 +0100 Subject: [PATCH 27/33] Allow DebugDraw not to flush the draw queue --- Engine/source/gfx/sim/debugDraw.cpp | 4 ++-- Engine/source/gfx/sim/debugDraw.h | 4 +++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/Engine/source/gfx/sim/debugDraw.cpp b/Engine/source/gfx/sim/debugDraw.cpp index 2625d1ebe..8a591fc26 100644 --- a/Engine/source/gfx/sim/debugDraw.cpp +++ b/Engine/source/gfx/sim/debugDraw.cpp @@ -210,7 +210,7 @@ void DebugDrawer::drawTransformedBoxOutline(const Point3F &a, const Point3F &b, drawLine(point3, point7, color); } -void DebugDrawer::render() +void DebugDrawer::render(bool clear) { #ifdef ENABLE_DEBUGDRAW if(!isDrawing) @@ -335,7 +335,7 @@ void DebugDrawer::render() shouldToggleFreeze = false; } - if(p->dieTime <= curTime && !isFrozen && p->dieTime != U32_MAX) + if(clear && p->dieTime <= curTime && !isFrozen && p->dieTime != U32_MAX) { *walk = p->next; mPrimChunker.free(p); diff --git a/Engine/source/gfx/sim/debugDraw.h b/Engine/source/gfx/sim/debugDraw.h index bfc2b6547..ddaba1164 100644 --- a/Engine/source/gfx/sim/debugDraw.h +++ b/Engine/source/gfx/sim/debugDraw.h @@ -105,7 +105,9 @@ public: static void init(); /// Called globally to render debug draw state. Also does state updates. - void render(); + void render(bool clear=true); + + bool willDraw() { return isDrawing && mHead; } void toggleFreeze() { shouldToggleFreeze = true; }; void toggleDrawing() From de48afc00c5461580134b6fab4811ad1d3fef387 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Sat, 4 Jun 2016 12:24:26 +0100 Subject: [PATCH 28/33] USe correct frustum projections in reflections in separate rt mode --- Engine/source/scene/reflectionManager.cpp | 23 ++++++++++++++++------ Engine/source/scene/reflector.cpp | 24 +++++++++++++++-------- 2 files changed, 33 insertions(+), 14 deletions(-) diff --git a/Engine/source/scene/reflectionManager.cpp b/Engine/source/scene/reflectionManager.cpp index 5536b4fa5..de70008af 100644 --- a/Engine/source/scene/reflectionManager.cpp +++ b/Engine/source/scene/reflectionManager.cpp @@ -28,6 +28,7 @@ #include "console/consoleTypes.h" #include "core/tAlgorithm.h" #include "math/mMathFn.h" +#include "math/mathUtils.h" #include "T3D/gameBase/gameConnection.h" #include "ts/tsShapeInstance.h" #include "gui/3d/guiTSControl.h" @@ -134,12 +135,22 @@ void ReflectionManager::update( F32 timeSlice, // Setup a culler for testing the // visibility of reflectors. Frustum culler; - culler.set( false, - query.fov, - (F32)resolution.x / (F32)resolution.y, - query.nearPlane, - query.farPlane, - query.cameraMatrix ); + + S32 stereoTarget = GFX->getCurrentStereoTarget(); + if (stereoTarget != -1) + { + MathUtils::makeFovPortFrustum(&culler, false, query.nearPlane, query.farPlane, query.fovPort[stereoTarget]); + } + else + { + culler.set(false, + query.fov, + (F32)resolution.x / (F32)resolution.y, + query.nearPlane, + query.farPlane, + query.cameraMatrix); + } + // Manipulate the frustum for tiled screenshots const bool screenShotMode = gScreenShot && gScreenShot->isPending(); diff --git a/Engine/source/scene/reflector.cpp b/Engine/source/scene/reflector.cpp index 5993f0e26..9c85a6ac7 100644 --- a/Engine/source/scene/reflector.cpp +++ b/Engine/source/scene/reflector.cpp @@ -548,16 +548,24 @@ void PlaneReflector::updateReflection( const ReflectParams ¶ms ) // store current matrices GFXTransformSaver saver; - - Point2I viewport(params.viewportExtent); - if(GFX->getCurrentRenderStyle() == GFXDevice::RS_StereoSideBySide) - { - viewport.x *= 0.5f; - } - F32 aspectRatio = F32( viewport.x ) / F32( viewport.y ); Frustum frustum; - frustum.set(false, params.query->fov, aspectRatio, params.query->nearPlane, params.query->farPlane); + + S32 stereoTarget = GFX->getCurrentStereoTarget(); + if (stereoTarget != -1) + { + MathUtils::makeFovPortFrustum(&frustum, false, params.query->nearPlane, params.query->farPlane, params.query->fovPort[stereoTarget]); + } + else + { + Point2I viewport(params.viewportExtent); + if (GFX->getCurrentRenderStyle() == GFXDevice::RS_StereoSideBySide) + { + viewport.x *= 0.5f; + } + F32 aspectRatio = F32(viewport.x) / F32(viewport.y); + frustum.set(false, params.query->fov, aspectRatio, params.query->nearPlane, params.query->farPlane); + } // Manipulate the frustum for tiled screenshots const bool screenShotMode = gScreenShot && gScreenShot->isPending(); From fa7697b13ee3c5646a7e45ff3ca5aef727b01b15 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Sat, 4 Jun 2016 12:26:31 +0100 Subject: [PATCH 29/33] Fix debug draw in SBS mode & reflection update timing --- Engine/source/gui/3d/guiTSControl.cpp | 48 +++++++++++++++++++-------- 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/Engine/source/gui/3d/guiTSControl.cpp b/Engine/source/gui/3d/guiTSControl.cpp index 1bd5f154c..1f7ddaa7e 100644 --- a/Engine/source/gui/3d/guiTSControl.cpp +++ b/Engine/source/gui/3d/guiTSControl.cpp @@ -358,19 +358,6 @@ void GuiTSCtrl::_internalRender(RectI viewport, Frustum &frustum) GFXTransformSaver saver; Point2I renderSize = viewport.extent; - if (mReflectPriority > 0) - { - // Get the total reflection priority. - F32 totalPriority = 0; - for (U32 i = 0; i < smAwakeTSCtrls.size(); i++) - if (smAwakeTSCtrls[i]->isVisible()) - totalPriority += smAwakeTSCtrls[i]->mReflectPriority; - - REFLECTMGR->update(mReflectPriority / totalPriority, - getExtent(), - mLastCameraQuery); - } - if (mForceFOV != 0) mLastCameraQuery.fov = mDegToRad(mForceFOV); @@ -380,6 +367,19 @@ void GuiTSCtrl::_internalRender(RectI viewport, Frustum &frustum) mLastCameraQuery.cameraMatrix.mul(rotMat); } + if (mReflectPriority > 0) + { + // Get the total reflection priority. + F32 totalPriority = 0; + for (U32 i = 0; i < smAwakeTSCtrls.size(); i++) + if (smAwakeTSCtrls[i]->isVisible()) + totalPriority += smAwakeTSCtrls[i]->mReflectPriority; + + REFLECTMGR->update(mReflectPriority / totalPriority, + renderSize, + mLastCameraQuery); + } + GFX->setViewport(viewport); // Clear the zBuffer so GUI doesn't hose object rendering accidentally @@ -423,7 +423,27 @@ void GuiTSCtrl::_internalRender(RectI viewport, Frustum &frustum) PFXMGR->setFrameMatrices(mSaveModelview, mSaveProjection); renderWorld(viewport); - DebugDrawer::get()->render(); + + DebugDrawer* debugDraw = DebugDrawer::get(); + if (mRenderStyle == RenderStyleStereoSideBySide && debugDraw->willDraw()) + { + // For SBS we need to render over each viewport + Frustum frustum; + + GFX->setViewport(mLastCameraQuery.stereoViewports[0]); + MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]); + GFX->setFrustum(frustum); + debugDraw->render(false); + + GFX->setViewport(mLastCameraQuery.stereoViewports[1]); + MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[1]); + GFX->setFrustum(frustum); + debugDraw->render(); + } + else + { + debugDraw->render(); + } // Render the canvas overlay if its available if (mStereoCanvas.getPointer() && mStereoGuiTarget.getPointer() && mStereoCanvas->size() != 0) From 1198932e87ec863d7d8725585e6ec44233af0068 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Wed, 8 Jun 2016 22:50:10 +0100 Subject: [PATCH 30/33] Basic fix for reflections in both stereo rendering modes --- Engine/source/gui/3d/guiTSControl.cpp | 123 ++++------------------ Engine/source/gui/3d/guiTSControl.h | 2 +- Engine/source/scene/reflectionManager.cpp | 56 ++++++++-- Engine/source/scene/reflector.cpp | 111 +++++++++++++------ Engine/source/scene/reflector.h | 9 +- Engine/source/scene/sceneManager.cpp | 4 +- 6 files changed, 150 insertions(+), 155 deletions(-) diff --git a/Engine/source/gui/3d/guiTSControl.cpp b/Engine/source/gui/3d/guiTSControl.cpp index 1f7ddaa7e..36ae70338 100644 --- a/Engine/source/gui/3d/guiTSControl.cpp +++ b/Engine/source/gui/3d/guiTSControl.cpp @@ -353,10 +353,12 @@ static FovPort CalculateFovPortForCanvas(const RectI viewport, const CameraQuery return fovPort; } -void GuiTSCtrl::_internalRender(RectI viewport, Frustum &frustum) +void GuiTSCtrl::_internalRender(RectI guiViewport, RectI renderViewport, Frustum &frustum) { GFXTransformSaver saver; - Point2I renderSize = viewport.extent; + Point2I renderSize = renderViewport.extent; + GFXTarget *origTarget = GFX->getActiveRenderTarget(); + S32 origStereoTarget = GFX->getCurrentStereoTarget(); if (mForceFOV != 0) mLastCameraQuery.fov = mDegToRad(mForceFOV); @@ -380,7 +382,9 @@ void GuiTSCtrl::_internalRender(RectI viewport, Frustum &frustum) mLastCameraQuery); } - GFX->setViewport(viewport); + GFX->setActiveRenderTarget(origTarget); + GFX->setCurrentStereoTarget(origStereoTarget); + GFX->setViewport(renderViewport); // Clear the zBuffer so GUI doesn't hose object rendering accidentally GFX->clear(GFXClearZBuffer, ColorI(20, 20, 20), 1.0f, 0); @@ -410,7 +414,7 @@ void GuiTSCtrl::_internalRender(RectI viewport, Frustum &frustum) mSaveProjection = GFX->getProjectionMatrix(); mSaveModelview = GFX->getWorldMatrix(); - mSaveViewport = viewport; + mSaveViewport = guiViewport; mSaveWorldToScreenScale = GFX->getWorldToScreenScale(); mSaveFrustum = GFX->getFrustum(); mSaveFrustum.setTransform(mLastCameraQuery.cameraMatrix); @@ -422,7 +426,7 @@ void GuiTSCtrl::_internalRender(RectI viewport, Frustum &frustum) // Give the post effect manager the worldToCamera, and cameraToScreen matrices PFXMGR->setFrameMatrices(mSaveModelview, mSaveProjection); - renderWorld(viewport); + renderWorld(guiViewport); DebugDrawer* debugDraw = DebugDrawer::get(); if (mRenderStyle == RenderStyleStereoSideBySide && debugDraw->willDraw()) @@ -445,97 +449,6 @@ void GuiTSCtrl::_internalRender(RectI viewport, Frustum &frustum) debugDraw->render(); } - // Render the canvas overlay if its available - if (mStereoCanvas.getPointer() && mStereoGuiTarget.getPointer() && mStereoCanvas->size() != 0) - { - GFXDEBUGEVENT_SCOPE(StereoGui_Render, ColorI(255, 0, 0)); - MatrixF proj(1); - - Frustum originalFrustum = frustum; - GFXTextureObject *texObject = mStereoGuiTarget->getTexture(0); - const FovPort *currentFovPort = GFX->getStereoFovPort(); - const MatrixF *eyeTransforms = GFX->getStereoEyeTransforms(); - const Point3F *eyeOffset = GFX->getStereoEyeOffsets(); - Frustum gfxFrustum = originalFrustum; - - GFX->setClipRect(viewport); - GFX->setViewport(viewport); - GFX->setFrustum(frustum); - - MatrixF eyeWorldTrans(1); - if (mLastCameraQuery.currentEye != -1) - { - eyeWorldTrans.setPosition(Point3F(eyeOffset[mLastCameraQuery.currentEye].x, eyeOffset[mLastCameraQuery.currentEye].y, eyeOffset[mLastCameraQuery.currentEye].z)); - } - MatrixF eyeWorld(1); - eyeWorld.mul(eyeWorldTrans); - eyeWorld.inverse(); - - GFX->setWorldMatrix(eyeWorld); - GFX->setViewMatrix(MatrixF::Identity); - - if (!mStereoOverlayVB.getPointer()) - { - mStereoOverlayVB.set(GFX, 4, GFXBufferTypeStatic); - GFXVertexPCT *verts = mStereoOverlayVB.lock(0, 4); - - F32 texLeft = 0.0f; - F32 texRight = 1.0f; - F32 texTop = 1.0f; - F32 texBottom = 0.0f; - - F32 rectRatio = gfxFrustum.getWidth() / gfxFrustum.getHeight(); - F32 rectWidth = gfxFrustum.getWidth() * TS_OVERLAY_SCREEN_WIDTH; - F32 rectHeight = rectWidth * rectRatio; - - F32 screenLeft = -rectWidth * 0.5; - F32 screenRight = rectWidth * 0.5; - F32 screenTop = -rectHeight * 0.5; - F32 screenBottom = rectHeight * 0.5; - - const F32 fillConv = 0.0f; - const F32 frustumDepthAdjusted = gfxFrustum.getNearDist() + 0.012; - verts[0].point.set(screenLeft - fillConv, frustumDepthAdjusted, screenTop - fillConv); - verts[1].point.set(screenRight - fillConv, frustumDepthAdjusted, screenTop - fillConv); - verts[2].point.set(screenLeft - fillConv, frustumDepthAdjusted, screenBottom - fillConv); - verts[3].point.set(screenRight - fillConv, frustumDepthAdjusted, screenBottom - fillConv); - - verts[0].color = verts[1].color = verts[2].color = verts[3].color = ColorI(255, 255, 255, 255); - - verts[0].texCoord.set(texLeft, texTop); - verts[1].texCoord.set(texRight, texTop); - verts[2].texCoord.set(texLeft, texBottom); - verts[3].texCoord.set(texRight, texBottom); - - mStereoOverlayVB.unlock(); - } - - if (!mStereoGuiSB.getPointer()) - { - // DrawBitmapStretchSR - GFXStateBlockDesc bitmapStretchSR; - bitmapStretchSR.setCullMode(GFXCullNone); - bitmapStretchSR.setZReadWrite(false, false); - bitmapStretchSR.setBlend(true, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha); - bitmapStretchSR.samplersDefined = true; - - bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getClampLinear(); - bitmapStretchSR.samplers[0].minFilter = GFXTextureFilterPoint; - bitmapStretchSR.samplers[0].mipFilter = GFXTextureFilterPoint; - bitmapStretchSR.samplers[0].magFilter = GFXTextureFilterPoint; - - mStereoGuiSB = GFX->createStateBlock(bitmapStretchSR); - } - - GFX->setPrimitiveBuffer(NULL); - GFX->setVertexBuffer(mStereoOverlayVB); - GFX->setStateBlock(mStereoGuiSB); - GFX->setTexture(0, texObject); - GFX->setupGenericShaders(GFXDevice::GSModColorTexture); - GFX->drawPrimitive(GFXTriangleStrip, 0, 2); - } - - saver.restore(); } @@ -659,7 +572,7 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) // Allow render size to originate from the render target if (mLastCameraQuery.stereoTargets[0]) { - renderSize = mLastCameraQuery.stereoViewports[0].extent; + renderSize = mLastCameraQuery.stereoTargets[0]->getSize(); renderingToTarget = true; } @@ -667,7 +580,7 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]); GFX->activateStereoTarget(-1); - _internalRender(RectI(updateRect.point, updateRect.extent), frustum); + _internalRender(RectI(updateRect.point, updateRect.extent), RectI(Point2I(0,0), renderSize), frustum); // Notify device we've rendered the right, thus the last stereo frame. GFX->getDeviceEventSignal().trigger(GFXDevice::deRightStereoFrameRendered); @@ -726,17 +639,21 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) frustum.update(); GFX->activateStereoTarget(0); mLastCameraQuery.currentEye = 0; - _internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), frustum); + GFX->beginField(); + _internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), frustum); GFX->getDeviceEventSignal().trigger(GFXDevice::deLeftStereoFrameRendered); + GFX->endField(); // Right GFX->activateStereoTarget(1); mLastCameraQuery.currentEye = 1; MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[1]); mLastCameraQuery.cameraMatrix = myTransforms[1]; - frustum.update(); - _internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[1]->getSize()), frustum); - GFX->getDeviceEventSignal().trigger(GFXDevice::deRightStereoFrameRendered); + frustum.update(); + GFX->beginField(); + _internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[1]->getSize()), RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), frustum); + GFX->getDeviceEventSignal().trigger(GFXDevice::deRightStereoFrameRendered); + GFX->endField(); mLastCameraQuery.cameraMatrix = origMatrix; @@ -805,7 +722,7 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) tempRect.point.y = screensize.y - (tempRect.point.y + tempRect.extent.y); #endif - _internalRender(tempRect, frustum); + _internalRender(tempRect, tempRect, frustum); } // TODO: Some render to sort of overlay system? diff --git a/Engine/source/gui/3d/guiTSControl.h b/Engine/source/gui/3d/guiTSControl.h index a15b95b53..b35d2630a 100644 --- a/Engine/source/gui/3d/guiTSControl.h +++ b/Engine/source/gui/3d/guiTSControl.h @@ -123,7 +123,7 @@ public: GuiTSCtrl(); void onPreRender(); - void _internalRender(RectI viewport, Frustum &frustum); + void _internalRender(RectI guiViewport, RectI renderViewport, Frustum &frustum); void onRender(Point2I offset, const RectI &updateRect); virtual bool processCameraQuery(CameraQuery *query); diff --git a/Engine/source/scene/reflectionManager.cpp b/Engine/source/scene/reflectionManager.cpp index de70008af..b9e146477 100644 --- a/Engine/source/scene/reflectionManager.cpp +++ b/Engine/source/scene/reflectionManager.cpp @@ -95,9 +95,9 @@ ReflectionManager::ReflectionManager() void ReflectionManager::initConsole() { Con::addVariable( "$pref::Reflect::refractTexScale", TypeF32, &ReflectionManager::smRefractTexScale, "RefractTex has dimensions equal to the active render target scaled in both x and y by this float.\n" - "@ingroup Rendering"); + "@ingroup Rendering"); Con::addVariable( "$pref::Reflect::frameLimitMS", TypeS32, &ReflectionManager::smFrameReflectionMS, "ReflectionManager tries not to spend more than this amount of time updating reflections per frame.\n" - "@ingroup Rendering"); + "@ingroup Rendering"); } ReflectionManager::~ReflectionManager() @@ -136,22 +136,49 @@ void ReflectionManager::update( F32 timeSlice, // visibility of reflectors. Frustum culler; + // jamesu - normally we just need a frustum which covers the current ports, however for SBS mode + // we need something which covers both viewports. S32 stereoTarget = GFX->getCurrentStereoTarget(); if (stereoTarget != -1) { - MathUtils::makeFovPortFrustum(&culler, false, query.nearPlane, query.farPlane, query.fovPort[stereoTarget]); + // In this case we're rendering in stereo using a specific eye + MathUtils::makeFovPortFrustum(&culler, false, query.nearPlane, query.farPlane, query.fovPort[stereoTarget], query.headMatrix); + } + else if (GFX->getCurrentRenderStyle() == GFXDevice::RS_StereoSideBySide) + { + // Calculate an ideal culling size here, we'll just assume double fov based on the first fovport based on + // the head position. + FovPort port = query.fovPort[0]; + F32 leftSize = query.nearPlane * port.leftTan; + F32 rightSize = query.nearPlane * port.rightTan; + F32 upSize = query.nearPlane * port.upTan; + F32 downSize = query.nearPlane * port.downTan; + + F32 left = -leftSize; + F32 right = rightSize; + F32 top = upSize; + F32 bottom = -downSize; + + F32 fovInRadians = mAtan2((top - bottom) / 2.0f, query.nearPlane) * 3.0f; + + culler.set(false, + fovInRadians, + (F32)(query.stereoViewports[0].extent.x + query.stereoViewports[1].extent.x) / (F32)query.stereoViewports[0].extent.y, + query.nearPlane, + query.farPlane, + query.headMatrix); } else { - culler.set(false, - query.fov, - (F32)resolution.x / (F32)resolution.y, - query.nearPlane, - query.farPlane, - query.cameraMatrix); + // Normal culling + culler.set(false, + query.fov, + (F32)resolution.x / (F32)resolution.y, + query.nearPlane, + query.farPlane, + query.cameraMatrix); } - // Manipulate the frustum for tiled screenshots const bool screenShotMode = gScreenShot && gScreenShot->isPending(); if ( screenShotMode ) @@ -170,6 +197,7 @@ void ReflectionManager::update( F32 timeSlice, refparams.viewportExtent = resolution; refparams.culler = culler; refparams.startOfUpdateMs = startOfUpdateMs; + refparams.eyeId = stereoTarget; // Update the reflection score. ReflectorList::iterator reflectorIter = mReflectors.begin(); @@ -184,6 +212,7 @@ void ReflectionManager::update( F32 timeSlice, mTimer->getElapsedMs(); mTimer->reset(); U32 numUpdated = 0; + U32 currentTarget = stereoTarget >= 0 ? stereoTarget : 0; reflectorIter = mReflectors.begin(); for ( ; reflectorIter != mReflectors.end(); reflectorIter++ ) { @@ -193,7 +222,12 @@ void ReflectionManager::update( F32 timeSlice, break; (*reflectorIter)->updateReflection( refparams ); - (*reflectorIter)->lastUpdateMs = startOfUpdateMs; + + if (stereoTarget != 0) // only update MS if we're not rendering the left eye in separate mode + { + (*reflectorIter)->lastUpdateMs = startOfUpdateMs; + } + numUpdated++; // If we run out of update time then stop. diff --git a/Engine/source/scene/reflector.cpp b/Engine/source/scene/reflector.cpp index 9c85a6ac7..b8d2d9c4c 100644 --- a/Engine/source/scene/reflector.cpp +++ b/Engine/source/scene/reflector.cpp @@ -533,19 +533,28 @@ void PlaneReflector::updateReflection( const ReflectParams ¶ms ) texDim = getMin( texDim, params.viewportExtent.x ); texDim = getMin( texDim, params.viewportExtent.y ); - bool texResize = ( texDim != mLastTexSize ); - mLastTexSize = texDim; + S32 currentTarget = params.eyeId >= 0 ? params.eyeId : 0; - const Point2I texSize( texDim, texDim ); + const Point2I texSize = Point2I(texDim, texDim); + + bool texResize = (texSize != mLastTexSize); + mLastTexSize = texSize; if ( texResize || - reflectTex.isNull() || + innerReflectTex[currentTarget].isNull() || + innerReflectTex[currentTarget]->getSize() != texSize || reflectTex->getFormat() != REFLECTMGR->getReflectFormat() ) { - reflectTex = REFLECTMGR->allocRenderTarget( texSize ); - depthBuff = LightShadowMap::_getDepthTarget( texSize.x, texSize.y ); + innerReflectTex[currentTarget] = REFLECTMGR->allocRenderTarget( texSize ); } + if ( texResize || depthBuff.isNull() ) + { + depthBuff = LightShadowMap::_getDepthTarget(texSize.x, texSize.y); + } + + reflectTex = innerReflectTex[currentTarget]; + // store current matrices GFXTransformSaver saver; @@ -554,17 +563,17 @@ void PlaneReflector::updateReflection( const ReflectParams ¶ms ) S32 stereoTarget = GFX->getCurrentStereoTarget(); if (stereoTarget != -1) { - MathUtils::makeFovPortFrustum(&frustum, false, params.query->nearPlane, params.query->farPlane, params.query->fovPort[stereoTarget]); + MathUtils::makeFovPortFrustum(&frustum, false, params.query->nearPlane, params.query->farPlane, params.query->fovPort[stereoTarget]); } else { - Point2I viewport(params.viewportExtent); - if (GFX->getCurrentRenderStyle() == GFXDevice::RS_StereoSideBySide) - { - viewport.x *= 0.5f; - } - F32 aspectRatio = F32(viewport.x) / F32(viewport.y); - frustum.set(false, params.query->fov, aspectRatio, params.query->nearPlane, params.query->farPlane); + Point2I viewport(params.viewportExtent); + if (GFX->getCurrentRenderStyle() == GFXDevice::RS_StereoSideBySide) + { + viewport.x *= 0.5f; + } + F32 aspectRatio = F32(viewport.x) / F32(viewport.y); + frustum.set(false, params.query->fov, aspectRatio, params.query->nearPlane, params.query->farPlane); } // Manipulate the frustum for tiled screenshots @@ -587,7 +596,7 @@ void PlaneReflector::updateReflection( const ReflectParams ¶ms ) if(reflectTarget.isNull()) reflectTarget = GFX->allocRenderToTextureTarget(); - reflectTarget->attachTexture( GFXTextureTarget::Color0, reflectTex ); + reflectTarget->attachTexture( GFXTextureTarget::Color0, innerReflectTex[currentTarget] ); reflectTarget->attachTexture( GFXTextureTarget::DepthStencil, depthBuff ); GFX->pushActiveRenderTarget(); GFX->setActiveRenderTarget( reflectTarget ); @@ -615,8 +624,18 @@ void PlaneReflector::updateReflection( const ReflectParams ¶ms ) MatrixF origNonClipProjection = gClientSceneGraph->getNonClipProjection(); PFXFrameState origPFXState = PFXMGR->getFrameState(); - const FovPort *currentFovPort = GFX->getStereoFovPort(); - MatrixF inverseEyeTransforms[2]; + const FovPort *currentFovPort = params.query->fovPort; + MatrixF inverseEyeTransforms[2]; + Frustum gfxFrustum; + + // Calculate viewport based on texture size + RectI stereoViewports[2]; + stereoViewports[0] = params.query->stereoViewports[0]; + stereoViewports[1] = params.query->stereoViewports[1]; + stereoViewports[0].extent.x = stereoViewports[1].extent.x = texSize.x / 2; + stereoViewports[0].extent.y = stereoViewports[1].extent.y = texSize.y; + stereoViewports[0].point.x = 0; + stereoViewports[1].point.x = stereoViewports[0].extent.x; // Calculate world transforms for eyes inverseEyeTransforms[0] = params.query->eyeTransforms[0]; @@ -624,50 +643,64 @@ void PlaneReflector::updateReflection( const ReflectParams ¶ms ) inverseEyeTransforms[0].inverse(); inverseEyeTransforms[1].inverse(); - Frustum originalFrustum = GFX->getFrustum(); - + // // Render left half of display - GFX->activateStereoTarget(0); - GFX->setWorldMatrix(params.query->eyeTransforms[0]); + // - Frustum gfxFrustum = originalFrustum; - MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[0], inverseEyeTransforms[0]); + GFX->setViewport(stereoViewports[0]); + GFX->setCurrentStereoTarget(0); + MathUtils::makeFovPortFrustum(&gfxFrustum, params.query->ortho, params.query->nearPlane, params.query->farPlane, params.query->fovPort[0]); + gfxFrustum.update(); GFX->setFrustum(gfxFrustum); setGFXMatrices( params.query->eyeTransforms[0] ); - SceneCameraState cameraStateLeft = SceneCameraState::fromGFX(); - SceneRenderState renderStateLeft( gClientSceneGraph, SPT_Reflect, cameraStateLeft ); + SceneRenderState renderStateLeft + ( + gClientSceneGraph, + SPT_Reflect, + SceneCameraState::fromGFX() + ); + renderStateLeft.setSceneRenderStyle(SRS_SideBySide); renderStateLeft.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial ); - renderStateLeft.setDiffuseCameraTransform( params.query->headMatrix ); + renderStateLeft.setDiffuseCameraTransform(params.query->headMatrix); + //renderStateLeft.disableAdvancedLightingBins(true); gClientSceneGraph->renderSceneNoLights( &renderStateLeft, objTypeFlag ); + // // Render right half of display - GFX->activateStereoTarget(1); - GFX->setWorldMatrix(params.query->eyeTransforms[1]); + // - gfxFrustum = originalFrustum; - MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[1], inverseEyeTransforms[1]); + GFX->setViewport(stereoViewports[1]); + GFX->setCurrentStereoTarget(1); + MathUtils::makeFovPortFrustum(&gfxFrustum, params.query->ortho, params.query->nearPlane, params.query->farPlane, params.query->fovPort[1]); + gfxFrustum.update(); GFX->setFrustum(gfxFrustum); setGFXMatrices( params.query->eyeTransforms[1] ); - SceneCameraState cameraStateRight = SceneCameraState::fromGFX(); - SceneRenderState renderStateRight( gClientSceneGraph, SPT_Reflect, cameraStateRight ); + SceneRenderState renderStateRight + ( + gClientSceneGraph, + SPT_Reflect, + SceneCameraState::fromGFX() + ); + renderStateRight.setSceneRenderStyle(SRS_SideBySide); renderStateRight.getMaterialDelegate().bind( REFLECTMGR, &ReflectionManager::getReflectionMaterial ); renderStateRight.setDiffuseCameraTransform( params.query->headMatrix ); - renderStateRight.disableAdvancedLightingBins(true); + //renderStateRight.disableAdvancedLightingBins(true); gClientSceneGraph->renderSceneNoLights( &renderStateRight, objTypeFlag ); // Restore previous values - GFX->setFrustum(gfxFrustum); + GFX->setFrustum(frustum); GFX->setViewport(originalVP); gClientSceneGraph->setNonClipProjection(origNonClipProjection); PFXMGR->setFrameState(origPFXState); + GFX->setCurrentStereoTarget(-1); } else { @@ -690,6 +723,14 @@ void PlaneReflector::updateReflection( const ReflectParams ¶ms ) reflectTarget->resolve(); GFX->popActiveRenderTarget(); +#ifdef DEBUG_REFLECT_TEX + static U32 reflectStage = 0; + char buf[128]; dSprintf(buf, 128, "F:\\REFLECT-OUT%i.PNG", reflectStage); + //reflectTex->dumpToDisk("PNG", buf); + reflectStage++; + if (reflectStage > 1) reflectStage = 0; +#endif + // Restore detail adjust amount. TSShapeInstance::smDetailAdjust = detailAdjustBackup; @@ -803,7 +844,7 @@ MatrixF PlaneReflector::getFrustumClipProj( MatrixF &modelview ) // as (sgn(clipPlane.x), sgn(clipPlane.y), 1, 1) and // transform it into camera space by multiplying it // by the inverse of the projection matrix - Vector4F q; + Vector4F q; q.x = sgn(clipPlane.x) / proj(0,0); q.y = sgn(clipPlane.y) / proj(1,1); q.z = -1.0F; diff --git a/Engine/source/scene/reflector.h b/Engine/source/scene/reflector.h index 36d830462..c0646d30d 100644 --- a/Engine/source/scene/reflector.h +++ b/Engine/source/scene/reflector.h @@ -53,6 +53,7 @@ struct ReflectParams Point2I viewportExtent; Frustum culler; U32 startOfUpdateMs; + S8 eyeId; }; @@ -191,7 +192,7 @@ public: { refplane.set( Point3F(0,0,0), Point3F(0,0,1) ); objectSpace = false; - mLastTexSize = 0; + mLastTexSize = Point2I(0,0); } virtual ~PlaneReflector() {} @@ -213,7 +214,7 @@ public: protected: - U32 mLastTexSize; + Point2I mLastTexSize; // The camera position at the last update. Point3F mLastPos; @@ -224,7 +225,9 @@ protected: public: GFXTextureTargetRef reflectTarget; - GFXTexHandle reflectTex; + + GFXTexHandle innerReflectTex[2]; /// < Textures we actually render to + GFXTexHandle reflectTex; ///< Last texture we rendered to GFXTexHandle depthBuff; PlaneF refplane; bool objectSpace; diff --git a/Engine/source/scene/sceneManager.cpp b/Engine/source/scene/sceneManager.cpp index 53c8eb045..187f32498 100644 --- a/Engine/source/scene/sceneManager.cpp +++ b/Engine/source/scene/sceneManager.cpp @@ -256,7 +256,7 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S GFX->setWorldMatrix(worldEyeTransforms[0]); Frustum gfxFrustum = originalFrustum; - MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[0], eyeTransforms[0]); + MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[0]); GFX->setFrustum(gfxFrustum); SceneCameraState cameraStateLeft = SceneCameraState::fromGFX(); @@ -277,7 +277,7 @@ void SceneManager::renderScene( SceneRenderState* renderState, U32 objectMask, S GFX->setWorldMatrix(worldEyeTransforms[1]); gfxFrustum = originalFrustum; - MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[1], eyeTransforms[1]); + MathUtils::makeFovPortFrustum(&gfxFrustum, gfxFrustum.isOrtho(), gfxFrustum.getNearDist(), gfxFrustum.getFarDist(), currentFovPort[1]); GFX->setFrustum(gfxFrustum); SceneCameraState cameraStateRight = SceneCameraState::fromGFX(); From e6159a590a8f0457ee5b4393aae3a4f879fd6d72 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Wed, 15 Jun 2016 00:12:27 +0100 Subject: [PATCH 31/33] Add basic support for showing openvr controllers and tracked objects --- .../T3D/gameBase/extended/extendedMove.cpp | 53 +- .../T3D/gameBase/extended/extendedMove.h | 8 +- Engine/source/T3D/player.cpp | 69 +- Engine/source/T3D/player.h | 8 + Engine/source/T3D/shapeBase.cpp | 13 +- .../platform/input/openVR/openVROverlay.cpp | 8 +- .../platform/input/openVR/openVRProvider.cpp | 555 +++++++++- .../platform/input/openVR/openVRProvider.h | 91 ++ .../input/openVR/openVRTrackedObject.cpp | 981 ++++++++++++++++++ .../input/openVR/openVRTrackedObject.h | 155 +++ .../source/platform/output/IDisplayDevice.h | 5 + Tools/CMake/modules/module_openvr.cmake | 2 + 12 files changed, 1903 insertions(+), 45 deletions(-) create mode 100644 Engine/source/platform/input/openVR/openVRTrackedObject.cpp create mode 100644 Engine/source/platform/input/openVR/openVRTrackedObject.h diff --git a/Engine/source/T3D/gameBase/extended/extendedMove.cpp b/Engine/source/T3D/gameBase/extended/extendedMove.cpp index a27de9ca6..a11dfc6eb 100644 --- a/Engine/source/T3D/gameBase/extended/extendedMove.cpp +++ b/Engine/source/T3D/gameBase/extended/extendedMove.cpp @@ -16,15 +16,17 @@ MODULE_BEGIN( ExtendedMoveManager ) MODULE_END; -S32 ExtendedMoveManager::mPosX[ExtendedMove::MaxPositionsRotations] = { 0, }; -S32 ExtendedMoveManager::mPosY[ExtendedMove::MaxPositionsRotations] = { 0, }; -S32 ExtendedMoveManager::mPosZ[ExtendedMove::MaxPositionsRotations] = { 0, }; +F32 ExtendedMoveManager::mPosX[ExtendedMove::MaxPositionsRotations] = { 0, }; +F32 ExtendedMoveManager::mPosY[ExtendedMove::MaxPositionsRotations] = { 0, }; +F32 ExtendedMoveManager::mPosZ[ExtendedMove::MaxPositionsRotations] = { 0, }; bool ExtendedMoveManager::mRotIsEuler[ExtendedMove::MaxPositionsRotations] = { 0, }; F32 ExtendedMoveManager::mRotAX[ExtendedMove::MaxPositionsRotations] = { 0, }; F32 ExtendedMoveManager::mRotAY[ExtendedMove::MaxPositionsRotations] = { 0, }; F32 ExtendedMoveManager::mRotAZ[ExtendedMove::MaxPositionsRotations] = { 0, }; F32 ExtendedMoveManager::mRotAA[ExtendedMove::MaxPositionsRotations] = { 1, }; +F32 ExtendedMoveManager::mPosScale = 2.0f; + void ExtendedMoveManager::init() { for(U32 i = 0; i < ExtendedMove::MaxPositionsRotations; ++i) @@ -32,17 +34,17 @@ void ExtendedMoveManager::init() char varName[256]; dSprintf(varName, sizeof(varName), "mvPosX%d", i); - Con::addVariable(varName, TypeS32, &mPosX[i], + Con::addVariable(varName, TypeF32, &mPosX[i], "X position of controller in millimeters. Only 13 bits are networked.\n" "@ingroup Game"); dSprintf(varName, sizeof(varName), "mvPosY%d", i); - Con::addVariable(varName, TypeS32, &mPosY[i], + Con::addVariable(varName, TypeF32, &mPosY[i], "Y position of controller in millimeters. Only 13 bits are networked.\n" "@ingroup Game"); dSprintf(varName, sizeof(varName), "mvPosZ%d", i); - Con::addVariable(varName, TypeS32, &mPosZ[i], + Con::addVariable(varName, TypeF32, &mPosZ[i], "Z position of controller in millimeters. Only 13 bits are networked.\n" "@ingroup Game"); @@ -75,6 +77,11 @@ void ExtendedMoveManager::init() "Angle rotation (in degrees) component of controller.\n" "@ingroup Game"); } + + Con::addVariable("mvPosScale", TypeF32, &mPosScale, + "@brief Indicates the scale to be given to mvPos values.\n\n" + "" + "@ingroup Game"); } const ExtendedMove NullExtendedMove; @@ -183,8 +190,8 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove) // Position if (stream->readFlag()) { - posX[i] = stream->readInt(MaxPositionBits); - cposX[i] = UNCLAMPPOS(posX[i]); + cposX[i] = stream->readInt(MaxPositionBits); + posX[i] = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale; } else posX[i] = extBaseMove->posX[i]; @@ -192,7 +199,7 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove) if (stream->readFlag()) { cposY[i] = stream->readInt(MaxPositionBits); - posY[i] = UNCLAMPPOS(cposY[i]); + posY[i] = UNCLAMPPOS(cposY[i]) * ExtendedMoveManager::mPosScale; } else posY[i] = extBaseMove->posY[i]; @@ -200,7 +207,7 @@ void ExtendedMove::unpack(BitStream *stream, const Move * basemove) if (stream->readFlag()) { cposZ[i] = stream->readInt(MaxPositionBits); - posZ[i] = UNCLAMPPOS(cposZ[i]); + posZ[i] = UNCLAMPPOS(cposZ[i]) * ExtendedMoveManager::mPosScale; } else posZ[i] = extBaseMove->posZ[i]; @@ -267,9 +274,9 @@ void ExtendedMove::clamp() for(U32 i=0; iprocessTick(move); + } + + if (mControllers[1]) + { + mControllers[1]->processTick(move); + } + +#endif + // Is waterCoverage high enough to be 'swimming'? { bool swimming = mWaterCoverage > 0.65f && canSwim(); @@ -2628,18 +2647,29 @@ void Player::updateMove(const Move* move) AngAxisF moveRot(Point3F(emove->rotX[emoveIndex], emove->rotY[emoveIndex], emove->rotZ[emoveIndex]), emove->rotW[emoveIndex]); MatrixF trans(1); moveRot.setMatrix(&trans); + trans.inverse(); - Point3F vecForward(0, 1, 0); + Point3F vecForward(0, 10, 0); + Point3F viewAngle; Point3F orient; EulerF rot; trans.mulV(vecForward); + viewAngle = vecForward; + vecForward.z = 0; // flatten + vecForward.normalizeSafe(); F32 yawAng; F32 pitchAng; MathUtils::getAnglesFromVector(vecForward, yawAng, pitchAng); + + mRot = EulerF(0); mRot.z = yawAng; mHead = EulerF(0); - mHead.x = -pitchAng; + + while (mRot.z < 0.0f) + mRot.z += M_2PI_F; + while (mRot.z > M_2PI_F) + mRot.z -= M_2PI_F; absoluteDelta = true; } @@ -7140,3 +7170,38 @@ void Player::renderConvex( ObjectRenderInst *ri, SceneRenderState *state, BaseMa mConvex.renderWorkingList(); GFX->leaveDebugEvent(); } + +#ifdef TORQUE_OPENVR +void Player::setControllers(Vector controllerList) +{ + mControllers[0] = controllerList.size() > 0 ? controllerList[0] : NULL; + mControllers[1] = controllerList.size() > 1 ? controllerList[1] : NULL; +} + +ConsoleMethod(Player, setVRControllers, void, 4, 4, "") +{ + OpenVRTrackedObject *controllerL, *controllerR; + Vector list; + + if (Sim::findObject(argv[2], controllerL)) + { + list.push_back(controllerL); + } + else + { + list.push_back(NULL); + } + + if (Sim::findObject(argv[3], controllerR)) + { + list.push_back(controllerR); + } + else + { + list.push_back(NULL); + } + + object->setControllers(list); +} + +#endif diff --git a/Engine/source/T3D/player.h b/Engine/source/T3D/player.h index a05b6de99..1e0a76cb0 100644 --- a/Engine/source/T3D/player.h +++ b/Engine/source/T3D/player.h @@ -39,6 +39,7 @@ class DecalData; class SplashData; class PhysicsPlayer; class Player; +class OpenVRTrackedObject; //---------------------------------------------------------------------------- @@ -518,6 +519,8 @@ protected: Point3F mLastPos; ///< Holds the last position for physics updates Point3F mLastWaterPos; ///< Same as mLastPos, but for water + SimObjectPtr mControllers[2]; + struct ContactInfo { bool contacted, jump, run; @@ -577,12 +580,17 @@ protected: PhysicsPlayer* getPhysicsRep() const { return mPhysicsRep; } +#ifdef TORQUE_OPENVR + void setControllers(Vector controllerList); +#endif + protected: virtual void reSkin(); void setState(ActionState state, U32 ticks=0); void updateState(); + // Jetting bool mJetting; diff --git a/Engine/source/T3D/shapeBase.cpp b/Engine/source/T3D/shapeBase.cpp index e5a6dc6fb..09fc1ca42 100644 --- a/Engine/source/T3D/shapeBase.cpp +++ b/Engine/source/T3D/shapeBase.cpp @@ -1999,17 +1999,14 @@ void ShapeBase::getEyeCameraTransform(IDisplayDevice *displayDevice, U32 eyeId, // NOTE: currently we dont support third-person camera in this mode MatrixF cameraTransform(1); F32 fakePos = 0; + //cameraTransform = getRenderTransform(); // use this for controllers TODO getCameraTransform(&fakePos, &cameraTransform); - QuatF baserot = cameraTransform; - QuatF qrot = QuatF(newPose.orientation); - //QuatF concatRot; - //concatRot.mul(baserot, qrot); - qrot.setMatrix(&temp); + temp = MatrixF(1); + newPose.orientation.setMatrix(&temp); + temp.setPosition(newPose.position); - temp.setPosition(cameraTransform.getPosition() + qrot.mulP(newPose.position, &rotEyePos)); - - *outMat = temp; + *outMat = cameraTransform * temp; } void ShapeBase::getCameraParameters(F32 *min,F32* max,Point3F* off,MatrixF* rot) diff --git a/Engine/source/platform/input/openVR/openVROverlay.cpp b/Engine/source/platform/input/openVR/openVROverlay.cpp index 25c345153..6f4487181 100644 --- a/Engine/source/platform/input/openVR/openVROverlay.cpp +++ b/Engine/source/platform/input/openVR/openVROverlay.cpp @@ -63,7 +63,7 @@ void OpenVROverlay::initPersistFields() "Type of overlay."); addProtectedField("overlayFlags", TypeS32, Offset(mOverlayFlags, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, "Flags for overlay."); - addProtectedField("overlayWidth", TypeS32, Offset(mOverlayWidth, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, + addProtectedField("overlayWidth", TypeF32, Offset(mOverlayWidth, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, "Width of overlay."); addProtectedField("overlayColor", TypeColorF, Offset(mOverlayColor, OpenVROverlay), &setProtectedOverlayDirty, &defaultProtectedGetFn, "Backing color of overlay."); @@ -127,7 +127,7 @@ void OpenVROverlay::onRemove() mThumbOverlayHandle = NULL; } - if (OPENVR) + if (ManagedSingleton::instanceOrNull()) { OPENVR->unregisterOverlay(this); } @@ -373,13 +373,13 @@ void OpenVROverlay::handleOpenVREvents() eventInfo.modifier = (InputModifiers)0; eventInfo.ascii = 0; - Con::printf("Overlay event %i", vrEvent.eventType); + //Con::printf("Overlay event %i", vrEvent.eventType); switch (vrEvent.eventType) { case vr::VREvent_MouseMove: { - Con::printf("mousemove %f,%f", vrEvent.data.mouse.x, vrEvent.data.mouse.y); + //Con::printf("mousemove %f,%f", vrEvent.data.mouse.x, vrEvent.data.mouse.y); eventInfo.objType = SI_AXIS; eventInfo.objInst = SI_XAXIS; eventInfo.action = SI_MAKE; diff --git a/Engine/source/platform/input/openVR/openVRProvider.cpp b/Engine/source/platform/input/openVR/openVRProvider.cpp index fdf687afd..e217cb96a 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.cpp +++ b/Engine/source/platform/input/openVR/openVRProvider.cpp @@ -6,6 +6,12 @@ #include "T3D/gameBase/gameConnection.h" #include "gui/core/guiCanvas.h" #include "postFx/postEffectCommon.h" +#include "renderInstance/renderPassManager.h" +#include "scene/sceneRenderState.h" +#include "materials/baseMatInstance.h" +#include "materials/materialManager.h" +#include "console/consoleInternal.h" +#include "core/stream/fileStream.h" #include "gfx/D3D11/gfxD3D11Device.h" #include "gfx/D3D11/gfxD3D11TextureObject.h" @@ -17,12 +23,20 @@ #include "gfx/D3D9/gfxD3D9TextureObject.h" #include "gfx/D3D9/gfxD3D9EnumTranslate.h" +#include "materials/matTextureTarget.h" + #ifdef TORQUE_OPENGL #include "gfx/gl/gfxGLDevice.h" #include "gfx/gl/gfxGLTextureObject.h" #include "gfx/gl/gfxGLEnumTranslate.h" #endif +struct OpenVRLoadedTexture +{ + vr::TextureID_t texId; + NamedTexTarget texTarget; +}; + AngAxisF gLastMoveRot; // jamesu - this is just here for temp debugging namespace OpenVRUtil @@ -74,6 +88,8 @@ namespace OpenVRUtil return outMat; } + + void convertMatrixFPlainToSteamVRAffineMatrix(const MatrixF &inMat, vr::HmdMatrix34_t &outMat) { Point4F row0; inMat.getRow(0, &row0); @@ -123,6 +139,114 @@ namespace OpenVRUtil bounds.vMax = (rect.point.y + rect.extent.y) * yRatio; return bounds; } + + String GetTrackedDeviceString(vr::IVRSystem *pHmd, vr::TrackedDeviceIndex_t unDevice, vr::TrackedDeviceProperty prop, vr::TrackedPropertyError *peError = NULL) + { + uint32_t unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, NULL, 0, peError); + if (unRequiredBufferLen == 0) + return ""; + + char *pchBuffer = new char[unRequiredBufferLen]; + unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, pchBuffer, unRequiredBufferLen, peError); + String sResult = pchBuffer; + delete[] pchBuffer; + return sResult; + } + +} + +//------------------------------------------------------------ + +bool OpenVRRenderModel::init(const vr::RenderModel_t & vrModel, StringTableEntry materialName) +{ + SAFE_DELETE(mMaterialInstance); + mMaterialInstance = MATMGR->createMatInstance(materialName, getGFXVertexFormat< VertexType >()); + if (!mMaterialInstance) + return false; + + mLocalBox = Box3F::Invalid; + + // Prepare primitives + U16 *indPtr = NULL; + GFXPrimitive *primPtr = NULL; + mPrimitiveBuffer.set(GFX, vrModel.unTriangleCount * 3, 1, GFXBufferTypeStatic, "OpenVR Controller buffer"); + + mPrimitiveBuffer.lock(&indPtr, &primPtr); + if (!indPtr || !primPtr) + return false; + + primPtr->minIndex = 0; + primPtr->numPrimitives = vrModel.unTriangleCount; + primPtr->numVertices = vrModel.unVertexCount; + primPtr->startIndex = 0; + primPtr->startVertex = 0; + primPtr->type = GFXTriangleList; + + //dMemcpy(indPtr, vrModel.rIndexData, sizeof(U16) * vrModel.unTriangleCount * 3); + + for (U32 i = 0; i < vrModel.unTriangleCount; i++) + { + const U32 idx = i * 3; + indPtr[idx + 0] = vrModel.rIndexData[idx + 2]; + indPtr[idx + 1] = vrModel.rIndexData[idx + 1]; + indPtr[idx + 2] = vrModel.rIndexData[idx + 0]; + } + + mPrimitiveBuffer.unlock(); + + // Prepare verts + mVertexBuffer.set(GFX, vrModel.unVertexCount, GFXBufferTypeStatic); + VertexType *vertPtr = mVertexBuffer.lock(); + if (!vertPtr) + return false; + + // Convert to torque coordinate system + for (U32 i = 0; i < vrModel.unVertexCount; i++) + { + const vr::RenderModel_Vertex_t &vert = vrModel.rVertexData[i]; + vertPtr->point = OpenVRUtil::convertPointFromOVR(vert.vPosition); + vertPtr->point.x = -vertPtr->point.x; + vertPtr->point.y = -vertPtr->point.y; + vertPtr->point.z = -vertPtr->point.z; + vertPtr->normal = OpenVRUtil::convertPointFromOVR(vert.vNormal); + vertPtr->normal.x = -vertPtr->normal.x; + vertPtr->normal.y = -vertPtr->normal.y; + vertPtr->normal.z = -vertPtr->normal.z; + vertPtr->texCoord = Point2F(vert.rfTextureCoord[0], vert.rfTextureCoord[1]); + vertPtr++; + } + + mVertexBuffer.unlock(); + + for (U32 i = 0, sz = vrModel.unVertexCount; i < sz; i++) + { + Point3F pos = Point3F(vrModel.rVertexData[i].vPosition.v[0], vrModel.rVertexData[i].vPosition.v[1], vrModel.rVertexData[i].vPosition.v[2]); + mLocalBox.extend(pos); + } + + return true; +} + +void OpenVRRenderModel::draw(SceneRenderState *state, MeshRenderInst* renderInstance) +{ + renderInstance->type = RenderPassManager::RIT_Mesh; + renderInstance->matInst = state->getOverrideMaterial(mMaterialInstance); + if (!renderInstance->matInst) + return; + + renderInstance->vertBuff = &mVertexBuffer; + renderInstance->primBuff = &mPrimitiveBuffer; + renderInstance->prim = NULL; + renderInstance->primBuffIndex = 0; + + if (renderInstance->matInst->getMaterial()->isTranslucent()) + { + renderInstance->type = RenderPassManager::RIT_Translucent; + renderInstance->translucentSort = true; + } + + renderInstance->defaultKey = renderInstance->matInst->getStateHint(); + renderInstance->defaultKey2 = (uintptr_t)renderInstance->vertBuff; } //------------------------------------------------------------ @@ -209,6 +333,16 @@ ImplementEnumType(OpenVRState, { vr::VRState_NotReady, "NotReady" }, EndImplementEnumType; +ImplementEnumType(OpenVRTrackedDeviceClass, + "Types of devices which are tracked .\n\n" + "@ingroup OpenVR") +{ vr::TrackedDeviceClass_Invalid, "Invalid" }, +{ vr::TrackedDeviceClass_HMD, "HMD" }, +{ vr::TrackedDeviceClass_Controller, "Controller" }, +{ vr::TrackedDeviceClass_TrackingReference, "TrackingReference" }, +{ vr::TrackedDeviceClass_Other, "Other" }, +EndImplementEnumType; + //------------------------------------------------------------ U32 OpenVRProvider::OVR_SENSORROT[vr::k_unMaxTrackedDeviceCount] = { 0 }; @@ -371,7 +505,7 @@ OpenVRProvider::OpenVRProvider() : INPUTMGR->registerDevice(this); dMemset(&mLUID, '\0', sizeof(mLUID)); - mTrackingSpace = vr::TrackingUniverseSeated; + mTrackingSpace = vr::TrackingUniverseStanding; } OpenVRProvider::~OpenVRProvider() @@ -404,6 +538,8 @@ void OpenVRProvider::staticInit() bool OpenVRProvider::enable() { + mOpenVRNS = Namespace::find(StringTable->insert("OpenVR")); + disable(); // Load openvr runtime @@ -479,12 +615,19 @@ bool OpenVRProvider::enable() mDriver = GetTrackedDeviceString(mHMD, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_TrackingSystemName_String); mDisplay = GetTrackedDeviceString(mHMD, vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SerialNumber_String); + mHMDRenderState.mHMDPose = MatrixF(1); + mHMDRenderState.mEyePose[0] = MatrixF(1); + mHMDRenderState.mEyePose[1] = MatrixF(1); + mHMDRenderState.reset(mHMD); mHMD->ResetSeatedZeroPose(); dMemset(mPreviousInputTrackedDevicePose, '\0', sizeof(mPreviousInputTrackedDevicePose)); mEnabled = true; + dMemset(mCurrentControllerState, '\0', sizeof(mCurrentControllerState)); + dMemset(mPreviousCurrentControllerState, '\0', sizeof(mPreviousCurrentControllerState)); + return true; } @@ -614,7 +757,7 @@ bool OpenVRProvider::process() vr::VRControllerState_t state; if (mHMD->GetControllerState(unDevice, &state)) { - // TODO + mCurrentControllerState[unDevice] = state; } } @@ -643,7 +786,21 @@ void OpenVRTransformToRotPos(MatrixF mat, QuatF &outRot, Point3F &outPos) Point3F pos = torqueMat.getPosition(); outRot = QuatF(torqueMat); - outPos = pos;// Point3F(-pos.x, pos.z, -pos.y); + outPos = pos; + outRot.mulP(pos, &outPos); // jamesu - position needs to be multiplied by rotation in this case +} + +void OpenVRTransformToRotPosMat(MatrixF mat, QuatF &outRot, Point3F &outPos, MatrixF &outMat) +{ + // Directly set the rotation and position from the eye transforms + MatrixF torqueMat(1); + OpenVRUtil::convertTransformFromOVR(mat, torqueMat); + + Point3F pos = torqueMat.getPosition(); + outRot = QuatF(torqueMat); + outPos = pos; + outRot.mulP(pos, &outPos); // jamesu - position needs to be multiplied by rotation in this case + outMat = torqueMat; } void OpenVRProvider::getFrameEyePose(IDevicePose *pose, S32 eyeId) const @@ -655,15 +812,29 @@ void OpenVRProvider::getFrameEyePose(IDevicePose *pose, S32 eyeId) const // NOTE: this is codename for "head" MatrixF mat = mHMDRenderState.mHMDPose; // same order as in the openvr example +#ifdef DEBUG_DISPLAY_POSE + pose->originalMatrix = mat; + OpenVRTransformToRotPosMat(mat, pose->orientation, pose->position, pose->actualMatrix); +#else OpenVRTransformToRotPos(mat, pose->orientation, pose->position); +#endif + pose->velocity = Point3F(0); pose->angularVelocity = Point3F(0); } else { MatrixF mat = mHMDRenderState.mEyePose[eyeId] * mHMDRenderState.mHMDPose; // same order as in the openvr example + //mat = mHMDRenderState.mHMDPose * mHMDRenderState.mEyePose[eyeId]; // same order as in the openvr example + +#ifdef DEBUG_DISPLAY_POSE + pose->originalMatrix = mat; + OpenVRTransformToRotPosMat(mat, pose->orientation, pose->position, pose->actualMatrix); +#else OpenVRTransformToRotPos(mat, pose->orientation, pose->position); +#endif + pose->velocity = Point3F(0); pose->angularVelocity = Point3F(0); } @@ -914,10 +1085,14 @@ S32 OpenVRProvider::getDisplayDeviceId() const return -1; } -void OpenVRProvider::processVREvent(const vr::VREvent_t & event) +void OpenVRProvider::processVREvent(const vr::VREvent_t & evt) { - switch (event.eventType) + mVREventSignal.trigger(evt); + switch (evt.eventType) { + case vr::VREvent_InputFocusCaptured: + //Con::executef() + break; case vr::VREvent_TrackedDeviceActivated: { // Setup render model @@ -969,6 +1144,8 @@ void OpenVRProvider::updateTrackedPoses() if (nDevice == vr::k_unTrackedDeviceIndex_Hmd) { mHMDRenderState.mHMDPose = mat; + + /* MatrixF rotOffset(1); EulerF localRot(-smHMDRotOffset.x, -smHMDRotOffset.z, smHMDRotOffset.y); @@ -978,6 +1155,7 @@ void OpenVRProvider::updateTrackedPoses() QuatF(localRot).setMatrix(&rotOffset); rotOffset.inverse(); mHMDRenderState.mHMDPose = mat = rotOffset * mHMDRenderState.mHMDPose; + */ // jamesu - store the last rotation for temp debugging MatrixF torqueMat(1); @@ -990,6 +1168,11 @@ void OpenVRProvider::updateTrackedPoses() vr::TrackedDevicePose_t &outPose = mTrackedDevicePose[nDevice]; OpenVRTransformToRotPos(mat, inPose.orientation, inPose.position); +#ifdef DEBUG_DISPLAY_POSE + OpenVRUtil::convertTransformFromOVR(mat, inPose.actualMatrix); + inPose.originalMatrix = mat; +#endif + inPose.state = outPose.eTrackingResult; inPose.valid = outPose.bPoseIsValid; inPose.connected = outPose.bDeviceIsConnected; @@ -1012,18 +1195,23 @@ void OpenVRProvider::submitInputChanges() IDevicePose curPose = mCurrentDevicePose[i]; IDevicePose prevPose = mPreviousInputTrackedDevicePose[i]; + S32 eventIdx = -1; + + if (!mDeviceEventMap.tryGetValue(i, eventIdx) || eventIdx < 0) + continue; + if (!curPose.valid || !curPose.connected) continue; if (curPose.orientation != prevPose.orientation) { AngAxisF axisAA(curPose.orientation); - INPUTMGR->buildInputEvent(mDeviceType, 0, SI_ROT, OVR_SENSORROT[i], SI_MOVE, axisAA); + INPUTMGR->buildInputEvent(mDeviceType, 0, SI_ROT, OVR_SENSORROT[eventIdx], SI_MOVE, axisAA); } if (curPose.position != prevPose.position) { - INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORPOSITION[i], SI_MOVE, curPose.position); + INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORPOSITION[eventIdx], SI_MOVE, curPose.position); } if (curPose.velocity != prevPose.velocity) @@ -1034,7 +1222,7 @@ void OpenVRProvider::submitInputChanges() angles.y = curPose.velocity.y; angles.z = curPose.velocity.z; - INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORVELOCITY[i], SI_MOVE, angles); + INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORVELOCITY[eventIdx], SI_MOVE, angles); } if (curPose.angularVelocity != prevPose.angularVelocity) @@ -1045,7 +1233,7 @@ void OpenVRProvider::submitInputChanges() angles[1] = mRadToDeg(curPose.velocity.y); angles[2] = mRadToDeg(curPose.velocity.z); - INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORANGVEL[i], SI_MOVE, angles); + INPUTMGR->buildInputEvent(mDeviceType, 0, SI_POS, OVR_SENSORANGVEL[eventIdx], SI_MOVE, angles); } /* if (curPose.connected != prevPose.connected) @@ -1076,6 +1264,28 @@ void OpenVRProvider::resetSensors() } } +void OpenVRProvider::mapDeviceToEvent(U32 deviceIdx, S32 eventIdx) +{ + mDeviceEventMap[deviceIdx] = eventIdx; +} + +void OpenVRProvider::resetEventMap() +{ + mDeviceEventMap.clear(); +} + +IDevicePose OpenVRProvider::getTrackedDevicePose(U32 idx) +{ + if (idx >= vr::k_unMaxTrackedDeviceCount) + { + IDevicePose ret; + ret.connected = ret.valid = false; + return ret; + } + + return mCurrentDevicePose[idx]; +} + void OpenVRProvider::registerOverlay(OpenVROverlay* overlay) { mOverlays.push_back(overlay); @@ -1090,6 +1300,261 @@ void OpenVRProvider::unregisterOverlay(OpenVROverlay* overlay) } } +const S32 OpenVRProvider::preloadRenderModelTexture(U32 index) +{ + S32 idx = -1; + if (mLoadedTextureLookup.tryGetValue(index, idx)) + return idx; + + char buffer[256]; + dSprintf(buffer, sizeof(buffer), "openvrtex_%u", index); + + OpenVRProvider::LoadedRenderTexture loadedTexture; + loadedTexture.vrTextureId = index; + loadedTexture.vrTexture = NULL; + loadedTexture.texture = NULL; + loadedTexture.textureError = vr::VRRenderModelError_Loading; + loadedTexture.targetTexture = new NamedTexTarget(); + loadedTexture.targetTexture->registerWithName(buffer); + mLoadedTextures.push_back(loadedTexture); + mLoadedTextureLookup[index] = mLoadedTextures.size() - 1; + + return mLoadedTextures.size() - 1; +} + +const S32 OpenVRProvider::preloadRenderModel(StringTableEntry name) +{ + S32 idx = -1; + if (mLoadedModelLookup.tryGetValue(name, idx)) + return idx; + + OpenVRProvider::LoadedRenderModel loadedModel; + loadedModel.name = name; + loadedModel.model = NULL; + loadedModel.vrModel = NULL; + loadedModel.modelError = vr::VRRenderModelError_Loading; + loadedModel.loadedTexture = false; + loadedModel.textureId = -1; + mLoadedModels.push_back(loadedModel); + mLoadedModelLookup[name] = mLoadedModels.size() - 1; + + return mLoadedModels.size() - 1; +} + + +bool OpenVRProvider::getRenderModel(S32 idx, OpenVRRenderModel **ret, bool &failed) +{ + if (idx < 0 || idx > mLoadedModels.size()) + { + failed = true; + return true; + } + + OpenVRProvider::LoadedRenderModel &loadedModel = mLoadedModels[idx]; + //Con::printf("RenderModel[%i] STAGE 1", idx); + + failed = false; + + if (loadedModel.modelError > vr::VRRenderModelError_Loading) + { + failed = true; + return true; + } + + // Stage 1 : model + if (!loadedModel.model) + { + loadedModel.modelError = vr::VRRenderModels()->LoadRenderModel_Async(loadedModel.name, &loadedModel.vrModel); + //Con::printf(" vr::VRRenderModels()->LoadRenderModel_Async(\"%s\", %x); -> %i", loadedModel.name, &loadedModel.vrModel, loadedModel.modelError); + if (loadedModel.modelError == vr::VRRenderModelError_None) + { + if (loadedModel.vrModel == NULL) + { + failed = true; + return true; + } + // Load the model + loadedModel.model = new OpenVRRenderModel(); + } + else if (loadedModel.modelError == vr::VRRenderModelError_Loading) + { + return false; + } + } + + //Con::printf("RenderModel[%i] STAGE 2 (texId == %i)", idx, loadedModel.vrModel->diffuseTextureId); + + // Stage 2 : texture + if (!loadedModel.loadedTexture && loadedModel.model) + { + if (loadedModel.textureId == -1) + { + loadedModel.textureId = preloadRenderModelTexture(loadedModel.vrModel->diffuseTextureId); + } + + if (loadedModel.textureId == -1) + { + failed = true; + return true; + } + + if (!getRenderModelTexture(loadedModel.textureId, NULL, failed)) + { + return false; + } + + if (failed) + { + return true; + } + + loadedModel.loadedTexture = true; + + //Con::printf("RenderModel[%i] GOT TEXTURE"); + + // Now we can load the model. Note we first need to get a Material for the mapped texture + NamedTexTarget *namedTexture = mLoadedTextures[loadedModel.textureId].targetTexture; + String materialName = MATMGR->getMapEntry(namedTexture->getName().c_str()); + if (materialName.isEmpty()) + { + char buffer[256]; + dSprintf(buffer, sizeof(buffer), "#%s", namedTexture->getName().c_str()); + materialName = buffer; + + //Con::printf("RenderModel[%i] materialName == %s", idx, buffer); + + Material* mat = new Material(); + mat->mMapTo = namedTexture->getName(); + mat->mDiffuseMapFilename[0] = buffer; + mat->mEmissive[0] = true; + + dSprintf(buffer, sizeof(buffer), "%s_Material", namedTexture->getName().c_str()); + if (!mat->registerObject(buffer)) + { + Con::errorf("Couldn't create placeholder openvr material %s!", buffer); + failed = true; + return true; + } + + materialName = buffer; + } + + loadedModel.model->init(*loadedModel.vrModel, materialName); + } + + if ((loadedModel.modelError > vr::VRRenderModelError_Loading) || + (loadedModel.textureId >= 0 && mLoadedTextures[loadedModel.textureId].textureError > vr::VRRenderModelError_Loading)) + { + failed = true; + } + + if (!failed && ret) + { + *ret = loadedModel.model; + } + return true; +} + +bool OpenVRProvider::getRenderModelTexture(S32 idx, GFXTextureObject **outTex, bool &failed) +{ + if (idx < 0 || idx > mLoadedModels.size()) + { + failed = true; + return true; + } + + failed = false; + + OpenVRProvider::LoadedRenderTexture &loadedTexture = mLoadedTextures[idx]; + + if (loadedTexture.textureError > vr::VRRenderModelError_Loading) + { + failed = true; + return true; + } + + if (!loadedTexture.texture) + { + if (!loadedTexture.vrTexture) + { + loadedTexture.textureError = vr::VRRenderModels()->LoadTexture_Async(loadedTexture.vrTextureId, &loadedTexture.vrTexture); + if (loadedTexture.textureError == vr::VRRenderModelError_None) + { + // Load the texture + GFXTexHandle tex; + + const U32 sz = loadedTexture.vrTexture->unWidth * loadedTexture.vrTexture->unHeight * 4; + GBitmap *bmp = new GBitmap(loadedTexture.vrTexture->unWidth, loadedTexture.vrTexture->unHeight, false, GFXFormatR8G8B8A8); + + Swizzles::bgra.ToBuffer(bmp->getAddress(0,0,0), loadedTexture.vrTexture->rubTextureMapData, sz); + + char buffer[256]; + dSprintf(buffer, 256, "OVRTEX-%i.png", loadedTexture.vrTextureId); + + FileStream fs; + fs.open(buffer, Torque::FS::File::Write); + bmp->writeBitmap("PNG", fs); + fs.close(); + + tex.set(bmp, &GFXDefaultStaticDiffuseProfile, true, "OpenVR Texture"); + //tex.set(loadedTexture.vrTexture->unWidth, loadedTexture.vrTexture->unHeight, 1, (void*)pixels, GFXFormatR8G8B8A8, &GFXDefaultStaticDiffuseProfile, "OpenVR Texture", 1); + + + loadedTexture.targetTexture->setTexture(tex); + loadedTexture.texture = tex; + } + else if (loadedTexture.textureError == vr::VRRenderModelError_Loading) + { + return false; + } + } + } + + if (loadedTexture.textureError > vr::VRRenderModelError_Loading) + { + failed = true; + } + + if (!failed && outTex) + { + *outTex = loadedTexture.texture; + } + + return true; +} + +bool OpenVRProvider::getRenderModelTextureName(S32 idx, String &outName) +{ + if (idx < 0 || idx >= mLoadedTextures.size()) + return false; + + if (mLoadedTextures[idx].targetTexture) + { + outName = mLoadedTextures[idx].targetTexture->getName(); + return true; + } + + return false; +} + +void OpenVRProvider::resetRenderModels() +{ + for (U32 i = 0, sz = mLoadedModels.size(); i < sz; i++) + { + SAFE_DELETE(mLoadedModels[i].model); + if (mLoadedModels[i].vrModel) mRenderModels->FreeRenderModel(mLoadedModels[i].vrModel); + } + for (U32 i = 0, sz = mLoadedTextures.size(); i < sz; i++) + { + SAFE_DELETE(mLoadedTextures[i].targetTexture); + if (mLoadedTextures[i].vrTexture) mRenderModels->FreeTexture(mLoadedTextures[i].vrTexture); + } + mLoadedModels.clear(); + mLoadedTextures.clear(); + mLoadedModelLookup.clear(); + mLoadedTextureLookup.clear(); +} + OpenVROverlay *OpenVRProvider::getGamepadFocusOverlay() { return NULL; @@ -1126,6 +1591,54 @@ void OpenVRProvider::setKeyboardPositionForOverlay(OpenVROverlay *overlay, const } +void OpenVRProvider::getControllerDeviceIndexes(vr::TrackedDeviceClass &deviceClass, Vector &outList) +{ + for (U32 i = 0; iGetTrackedDeviceClass(i); + if (klass == deviceClass) + { + outList.push_back(i); + } + } +} + +StringTableEntry OpenVRProvider::getControllerModel(U32 idx) +{ + if (idx >= vr::k_unMaxTrackedDeviceCount || !mRenderModels) + return NULL; + + String str = GetTrackedDeviceString(mHMD, idx, vr::Prop_RenderModelName_String, NULL); + return StringTable->insert(str, true); +} + +DefineEngineStaticMethod(OpenVR, getControllerDeviceIndexes, const char*, (OpenVRTrackedDeviceClass klass),, + "@brief Gets the indexes of devices which match the required device class") +{ + if (!ManagedSingleton::instanceOrNull()) + { + return ""; + } + + Vector outList; + OPENVR->getControllerDeviceIndexes(klass, outList); + return EngineMarshallData>(outList); +} + +DefineEngineStaticMethod(OpenVR, getControllerModel, const char*, (S32 idx), , + "@brief Gets the indexes of devices which match the required device class") +{ + if (!ManagedSingleton::instanceOrNull()) + { + return ""; + } + + return OPENVR->getControllerModel(idx); +} + DefineEngineStaticMethod(OpenVR, isDeviceActive, bool, (), , "@brief Used to determine if the OpenVR input device is active\n\n" @@ -1216,6 +1729,30 @@ DefineEngineStaticMethod(OpenVR, resetSensors, void, (), , OPENVR->resetSensors(); } +DefineEngineStaticMethod(OpenVR, mapDeviceToEvent, void, (S32 deviceId, S32 eventId), , + "@brief Maps a device to an event code.\n\n" + "@ingroup Game") +{ + if (!ManagedSingleton::instanceOrNull()) + { + return; + } + + OPENVR->mapDeviceToEvent(deviceId, eventId); +} + +DefineEngineStaticMethod(OpenVR, resetEventMap, void, (), , + "@brief Resets event map.\n\n" + "@ingroup Game") +{ + if (!ManagedSingleton::instanceOrNull()) + { + return; + } + + OPENVR->resetEventMap(); +} + // Overlay stuff DefineEngineFunction(OpenVRIsCompiledIn, bool, (), , "") diff --git a/Engine/source/platform/input/openVR/openVRProvider.h b/Engine/source/platform/input/openVR/openVRProvider.h index 4080f1eac..f35684e70 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.h +++ b/Engine/source/platform/input/openVR/openVRProvider.h @@ -20,6 +20,11 @@ class OpenVRHMDDevice; class OpenVROverlay; +class BaseMatInstance; +class SceneRenderState; +struct MeshRenderInst; +class Namespace; +class NamedTexTarget; typedef vr::VROverlayInputMethod OpenVROverlayInputMethod; typedef vr::VROverlayTransformType OpenVROverlayTransformType; @@ -29,6 +34,7 @@ typedef vr::ETrackingResult OpenVRTrackingResult; typedef vr::ETrackingUniverseOrigin OpenVRTrackingUniverseOrigin; typedef vr::EOverlayDirection OpenVROverlayDirection; typedef vr::EVRState OpenVRState; +typedef vr::TrackedDeviceClass OpenVRTrackedDeviceClass; DefineEnumType(OpenVROverlayInputMethod); DefineEnumType(OpenVROverlayTransformType); @@ -38,6 +44,7 @@ DefineEnumType(OpenVRTrackingResult); DefineEnumType(OpenVRTrackingUniverseOrigin); DefineEnumType(OpenVROverlayDirection); DefineEnumType(OpenVRState); +DefineEnumType(OpenVRTrackedDeviceClass); namespace OpenVRUtil { @@ -112,6 +119,36 @@ public: } }; +/// Simple class to handle rendering native OpenVR model data +class OpenVRRenderModel +{ +public: + typedef GFXVertexPNT VertexType; + GFXVertexBufferHandle mVertexBuffer; + GFXPrimitiveBufferHandle mPrimitiveBuffer; + BaseMatInstance* mMaterialInstance; ///< Material to use for rendering. NOTE: + Box3F mLocalBox; + + OpenVRRenderModel() : mMaterialInstance(NULL) + { + } + + ~OpenVRRenderModel() + { + SAFE_DELETE(mMaterialInstance); + } + + Box3F getWorldBox(MatrixF &mat) + { + Box3F ret = mLocalBox; + mat.mul(ret); + return ret; + } + + bool init(const vr::RenderModel_t & vrModel, StringTableEntry materialName); + void draw(SceneRenderState *state, MeshRenderInst* renderInstance); +}; + struct OpenVRRenderState { vr::IVRSystem *mHMD; @@ -157,15 +194,38 @@ public: DIFF_RAW = (DIFF_ACCEL | DIFF_ANGVEL | DIFF_MAG), }; + struct LoadedRenderModel + { + StringTableEntry name; + vr::RenderModel_t *vrModel; + OpenVRRenderModel *model; + vr::EVRRenderModelError modelError; + S32 textureId; + bool loadedTexture; + }; + + struct LoadedRenderTexture + { + U32 vrTextureId; + vr::RenderModel_TextureMap_t *vrTexture; + GFXTextureObject *texture; + NamedTexTarget *targetTexture; + vr::EVRRenderModelError textureError; + }; + OpenVRProvider(); ~OpenVRProvider(); + typedef Signal VREventSignal; + VREventSignal& getVREventSignal() { return mVREventSignal; } + static void staticInit(); bool enable(); bool disable(); bool getActive() { return mHMD != NULL; } + inline vr::IVRRenderModels* getRenderModels() { return mRenderModels; } /// @name Input handling /// { @@ -216,6 +276,11 @@ public: void submitInputChanges(); void resetSensors(); + + void mapDeviceToEvent(U32 deviceIdx, S32 eventIdx); + void resetEventMap(); + + IDevicePose getTrackedDevicePose(U32 idx); /// } /// @name Overlay registration @@ -224,6 +289,16 @@ public: void unregisterOverlay(OpenVROverlay* overlay); /// } + /// @name Model loading + /// { + const S32 preloadRenderModel(StringTableEntry name); + const S32 preloadRenderModelTexture(U32 index); + bool getRenderModel(S32 idx, OpenVRRenderModel **ret, bool &failed); + bool getRenderModelTexture(S32 idx, GFXTextureObject **outTex, bool &failed); + bool getRenderModelTextureName(S32 idx, String &outName); + void resetRenderModels(); + /// } + /// @name Console API /// { @@ -237,6 +312,9 @@ public: void setKeyboardTransformAbsolute(const MatrixF &xfm); void setKeyboardPositionForOverlay(OpenVROverlay *overlay, const RectI &rect); + + void getControllerDeviceIndexes(vr::TrackedDeviceClass &deviceClass, Vector &outList); + StringTableEntry getControllerModel(U32 idx); /// } /// @name OpenVR state @@ -250,6 +328,9 @@ public: IDevicePose mPreviousInputTrackedDevicePose[vr::k_unMaxTrackedDeviceCount]; U32 mValidPoseCount; + vr::VRControllerState_t mCurrentControllerState[vr::k_unMaxTrackedDeviceCount]; + vr::VRControllerState_t mPreviousCurrentControllerState[vr::k_unMaxTrackedDeviceCount]; + char mDeviceClassChar[vr::k_unMaxTrackedDeviceCount]; OpenVRRenderState mHMDRenderState; @@ -258,6 +339,16 @@ public: vr::ETrackingUniverseOrigin mTrackingSpace; Vector mOverlays; + + VREventSignal mVREventSignal; + Namespace *mOpenVRNS; + + Vector mLoadedModels; + Vector mLoadedTextures; + Map mLoadedModelLookup; + Map mLoadedTextureLookup; + + Map mDeviceEventMap; /// } GuiCanvas* mDrawCanvas; diff --git a/Engine/source/platform/input/openVR/openVRTrackedObject.cpp b/Engine/source/platform/input/openVR/openVRTrackedObject.cpp new file mode 100644 index 000000000..a4467f55c --- /dev/null +++ b/Engine/source/platform/input/openVR/openVRTrackedObject.cpp @@ -0,0 +1,981 @@ +#include "platform/platform.h" +#include "platform/input/openVR/openVRTrackedObject.h" +#include "platform/input/openVR/openVRProvider.h" + +#include "math/mathIO.h" +#include "scene/sceneRenderState.h" +#include "console/consoleTypes.h" +#include "core/stream/bitStream.h" +#include "core/resourceManager.h" +#include "materials/materialManager.h" +#include "materials/baseMatInstance.h" +#include "renderInstance/renderPassManager.h" +#include "lighting/lightQuery.h" +#include "console/engineAPI.h" +#include "gfx/gfxTextureManager.h" +#include "gfx/sim/debugDraw.h" +#include "gfx/gfxTransformSaver.h" +#include "environment/skyBox.h" +#include "collision/boxConvex.h" +#include "collision/concretePolyList.h" +#include "T3D/physics/physicsPlugin.h" +#include "T3D/physics/physicsCollision.h" +#include "T3D/physics/physicsBody.h" + +#ifdef TORQUE_EXTENDED_MOVE +#include "T3D/gameBase/extended/extendedMove.h" +#endif + + +bool OpenVRTrackedObject::smDebugControllerMovePosition = true; +bool OpenVRTrackedObject::smDebugControllerPosition = false; + +static const U32 sCollisionMoveMask = (PlayerObjectType | + StaticShapeObjectType | VehicleObjectType); + +U32 OpenVRTrackedObject::sServerCollisionMask = sCollisionMoveMask; // ItemObjectType +U32 OpenVRTrackedObject::sClientCollisionMask = sCollisionMoveMask; + +//----------------------------------------------------------------------------- + +IMPLEMENT_CO_DATABLOCK_V1(OpenVRTrackedObjectData); + +OpenVRTrackedObjectData::OpenVRTrackedObjectData() : + mShapeFile(NULL) +{ + mCollisionBoxMin = Point3F(-0.02, -0.20, -0.02); + mCollisionBoxMax = Point3F(0.02, 0.05, 0.02); +} + +OpenVRTrackedObjectData::~OpenVRTrackedObjectData() +{ +} + +bool OpenVRTrackedObjectData::onAdd() +{ + if (Parent::onAdd()) + { + return true; + } + + return false; +} + +bool OpenVRTrackedObjectData::preload(bool server, String &errorStr) +{ + if (!Parent::preload(server, errorStr)) + return false; + + bool error = false; + if (!server) + { + mShape = mShapeFile ? ResourceManager::get().load(mShapeFile) : NULL; + } +} + +void OpenVRTrackedObjectData::initPersistFields() +{ + addGroup("Render Components"); + addField("shape", TypeShapeFilename, Offset(mShapeFile, OpenVRTrackedObjectData), "Shape file to use for controller model."); + addField("collisionMin", TypePoint3F, Offset(mCollisionBoxMin, OpenVRTrackedObjectData), "Box min"); + addField("collisionMax", TypePoint3F, Offset(mCollisionBoxMax, OpenVRTrackedObjectData), "Box min"); + endGroup("Render Components"); + + Parent::initPersistFields(); +} + +void OpenVRTrackedObjectData::packData(BitStream* stream) +{ + Parent::packData(stream); + + stream->writeString(mShapeFile); +} + +void OpenVRTrackedObjectData::unpackData(BitStream* stream) +{ + Parent::unpackData(stream); + + mShapeFile = stream->readSTString(); +} + +//----------------------------------------------------------------------------- + + +IMPLEMENT_CO_NETOBJECT_V1(OpenVRTrackedObject); + +ConsoleDocClass(OpenVRTrackedObject, + "@brief Renders and handles interactions OpenVR controllers and tracked objects.\n\n" + "This class implements basic rendering and interactions with OpenVR controllers.\n\n" + "The object should be controlled by a player object. Controllers will be rendered at\n" + "the correct position regardless of the current transform of the object.\n" + "@ingroup OpenVR\n"); + + +//----------------------------------------------------------------------------- +// Object setup and teardown +//----------------------------------------------------------------------------- +OpenVRTrackedObject::OpenVRTrackedObject() : + mDataBlock(NULL), + mShapeInstance(NULL), + mBasicModel(NULL), + mDeviceIndex(-1), + mMappedMoveIndex(-1), + mIgnoreParentRotation(true), + mConvexList(new Convex()), + mPhysicsRep(NULL) +{ + // Flag this object so that it will always + // be sent across the network to clients + mNetFlags.set(Ghostable | ScopeAlways); + + // Set it as a "static" object that casts shadows + mTypeMask |= StaticObjectType | StaticShapeObjectType; + + mPose.connected = false; +} + +OpenVRTrackedObject::~OpenVRTrackedObject() +{ + clearRenderData(); + delete mConvexList; +} + +void OpenVRTrackedObject::updateRenderData() +{ + clearRenderData(); + + if (!mDataBlock) + return; + + // Are we using a model? + if (mDataBlock->mShape) + { + if (mShapeInstance && mShapeInstance->getShape() != mDataBlock->mShape) + { + delete mShapeInstance; + mShapeInstance = NULL; + } + + if (!mShapeInstance) + { + mShapeInstance = new TSShapeInstance(mDataBlock->mShape, isClientObject()); + } + } + else + { + setupRenderDataFromModel(isClientObject()); + } +} + +void OpenVRTrackedObject::setupRenderDataFromModel(bool loadComponentModels) +{ + clearRenderData(); + + if (!OPENVR || !OPENVR->isEnabled()) + return; + + vr::IVRRenderModels *models = OPENVR->getRenderModels(); + if (!models) + return; + + if (!mShapeInstance && mModelName && mModelName[0] != '\0') + { + bool failed = false; + S32 idx = OPENVR->preloadRenderModel(mModelName); + while (!OPENVR->getRenderModel(idx, &mBasicModel, failed)) + { + if (failed) + break; + } + } + + if (loadComponentModels) + { + mRenderComponents.setSize(models->GetComponentCount(mModelName)); + + for (U32 i = 0, sz = mRenderComponents.size(); i < sz; i++) + { + RenderModelSlot &slot = mRenderComponents[i]; + char buffer[1024]; + + slot.mappedNodeIdx = -1; + slot.componentName = NULL; + slot.nativeModel = NULL; + + U32 result = models->GetComponentName(mModelName, i, buffer, sizeof(buffer)); + if (result == 0) + continue; + +#ifdef DEBUG_CONTROLLER_MODELS + Con::printf("Controller[%s] component %i NAME == %s", mModelName, i, buffer); +#endif + + slot.componentName = StringTable->insert(buffer, true); + + result = models->GetComponentRenderModelName(mModelName, slot.componentName, buffer, sizeof(buffer)); + if (result == 0) + { +#ifdef DEBUG_CONTROLLER_MODELS + Con::printf("Controller[%s] component %i NO MODEL", mModelName, i); +#endif + continue; + } + +#ifdef DEBUG_CONTROLLER_MODELS + Con::printf("Controller[%s] component %i == %s", mModelName, i, slot.componentName); +#endif + + bool failed = false; + S32 idx = OPENVR->preloadRenderModel(StringTable->insert(buffer, true)); + while (!OPENVR->getRenderModel(idx, &slot.nativeModel, failed)) + { + if (failed) + break; + } + } + } +} + +void OpenVRTrackedObject::clearRenderData() +{ + mBasicModel = NULL; + mRenderComponents.clear(); +} + +//----------------------------------------------------------------------------- +// Object Editing +//----------------------------------------------------------------------------- +void OpenVRTrackedObject::initPersistFields() +{ + // SceneObject already handles exposing the transform + Parent::initPersistFields(); + + addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track"); + addField("mappedMoveIndex", TypeS32, Offset(mMappedMoveIndex, OpenVRTrackedObject), "Index of movemanager state to track"); addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track"); + addField("ignoreParentRotation", TypeBool, Offset(mIgnoreParentRotation, OpenVRTrackedObject), "Index of movemanager state to track"); addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track"); + + static bool conInit = false; + if (!conInit) + { + Con::addVariable("$OpenVRTrackedObject::debugControllerPosition", TypeBool, &smDebugControllerPosition); + Con::addVariable("$OpenVRTrackedObject::debugControllerMovePosition", TypeBool, &smDebugControllerMovePosition); + conInit = true; + } +} + +void OpenVRTrackedObject::inspectPostApply() +{ + Parent::inspectPostApply(); + + // Flag the network mask to send the updates + // to the client object + setMaskBits(UpdateMask); +} + +bool OpenVRTrackedObject::onAdd() +{ + if (!Parent::onAdd()) + return false; + + // Set up a 1x1x1 bounding box + mObjBox.set(Point3F(-0.5f, -0.5f, -0.5f), + Point3F(0.5f, 0.5f, 0.5f)); + + resetWorldBox(); + + // Add this object to the scene + addToScene(); + + if (mDataBlock) + { + mObjBox.minExtents = mDataBlock->mCollisionBoxMin; + mObjBox.maxExtents = mDataBlock->mCollisionBoxMax; + resetWorldBox(); + } + else + { + setGlobalBounds(); + } + + return true; +} + +void OpenVRTrackedObject::onRemove() +{ + // Remove this object from the scene + removeFromScene(); + + clearRenderData(); + + SAFE_DELETE(mPhysicsRep); + + Parent::onRemove(); +} + +void OpenVRTrackedObject::_updatePhysics() +{ + SAFE_DELETE(mPhysicsRep); + + if (!PHYSICSMGR) + return; + + PhysicsCollision *colShape = NULL; + MatrixF offset(true); + colShape = PHYSICSMGR->createCollision(); + colShape->addBox(getObjBox().getExtents() * 0.5f * mObjScale, offset); + + if (colShape) + { + PhysicsWorld *world = PHYSICSMGR->getWorld(isServerObject() ? "server" : "client"); + mPhysicsRep = PHYSICSMGR->createBody(); + mPhysicsRep->init(colShape, 0, PhysicsBody::BF_TRIGGER | PhysicsBody::BF_KINEMATIC, this, world); + mPhysicsRep->setTransform(getTransform()); + } +} + +bool OpenVRTrackedObject::onNewDataBlock(GameBaseData *dptr, bool reload) +{ + mDataBlock = dynamic_cast(dptr); + if (!mDataBlock || !Parent::onNewDataBlock(dptr, reload)) + return false; + + // Setup the models + clearRenderData(); + + mObjBox.minExtents = mDataBlock->mCollisionBoxMin; + mObjBox.maxExtents = mDataBlock->mCollisionBoxMax; + + mGlobalBounds = false; + + resetWorldBox(); + + _updatePhysics(); + + scriptOnNewDataBlock(); + + return true; +} + +void OpenVRTrackedObject::setInteractObject(SceneObject* object, bool holding) +{ + mInteractObject = object; + mHoldInteractedObject = holding; +} + +void OpenVRTrackedObject::setTransform(const MatrixF & mat) +{ + // Let SceneObject handle all of the matrix manipulation + Parent::setTransform(mat); + + // Dirty our network mask so that the new transform gets + // transmitted to the client object + setMaskBits(UpdateMask); +} + +void OpenVRTrackedObject::setModelName(String &modelName) +{ + if (!isServerObject()) + return; + + mModelName = StringTable->insert(modelName.c_str(), true); + setMaskBits(UpdateMask); +} + +U32 OpenVRTrackedObject::packUpdate(NetConnection *conn, U32 mask, BitStream *stream) +{ + // Allow the Parent to get a crack at writing its info + U32 retMask = Parent::packUpdate(conn, mask, stream); + + // Write our transform information + if (stream->writeFlag(mask & UpdateMask)) + { + mathWrite(*stream, getTransform()); + mathWrite(*stream, getScale()); + + stream->write((S16)mDeviceIndex); + stream->write((S16)mMappedMoveIndex); + stream->writeString(mModelName); + } + + return retMask; +} + +void OpenVRTrackedObject::unpackUpdate(NetConnection *conn, BitStream *stream) +{ + // Let the Parent read any info it sent + Parent::unpackUpdate(conn, stream); + + if (stream->readFlag()) // UpdateMask + { + mathRead(*stream, &mObjToWorld); + mathRead(*stream, &mObjScale); + + setTransform(mObjToWorld); + + S16 readDeviceIndex; + S16 readMoveIndex; + stream->read(&readDeviceIndex); + stream->read(&readMoveIndex); + + mDeviceIndex = readDeviceIndex; + mMappedMoveIndex = readMoveIndex; + mModelName = stream->readSTString(); + + updateRenderData(); + } + +} + +void OpenVRTrackedObject::writePacketData(GameConnection *conn, BitStream *stream) +{ + Parent::writePacketData(conn, stream); +} + +void OpenVRTrackedObject::readPacketData(GameConnection *conn, BitStream *stream) +{ + Parent::readPacketData(conn, stream); +} + +MatrixF OpenVRTrackedObject::getTrackedTransform() +{ + IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex); + MatrixF trackedMat(1); + + pose.orientation.setMatrix(&trackedMat); + trackedMat.setPosition(pose.position); + + return trackedMat; +} + +MatrixF OpenVRTrackedObject::getLastTrackedTransform() +{ + MatrixF trackedMat(1); + + mPose.orientation.setMatrix(&trackedMat); + trackedMat.setPosition(mPose.position); + + return trackedMat; +} + +MatrixF OpenVRTrackedObject::getBaseTrackingTransform() +{ + if (isMounted()) + { + MatrixF mat; + + mMount.object->getMountTransform(mMount.node, mMount.xfm, &mat); + if (mIgnoreParentRotation) + { + Point3F pos = mat.getPosition(); + mat = MatrixF(1); + mat.setPosition(pos); + } + //mat.inverse(); + return mat; + } + + return MatrixF(1); +} + +void OpenVRTrackedObject::prepRenderImage(SceneRenderState *state) +{ + RenderPassManager *renderPass = state->getRenderPass(); + + // debug rendering for now + + if (mDeviceIndex < 0) + return; + + // Current pose + IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex); + IDevicePose hmdPose = OPENVR->getTrackedDevicePose(0); + + if (!pose.connected && !mPose.connected) + return; + + MatrixF offsetMat = getBaseTrackingTransform(); + //offsetMat.inverse(); + + Point3F pos = offsetMat.getPosition(); + //Con::printf("Base offs == %f,%f,%f", pos.x, pos.y, pos.z); + + const F32 CONTROLLER_SCALE = 0.1; + + if (smDebugControllerPosition) + { + ColorI drawColor = ColorI::GREEN; + if (!pose.valid) + { + drawColor = ColorI::RED; + } + + // Draw Camera + /* + DisplayPose cameraPose; + OPENVR->getFrameEyePose(&cameraPose, -1); + Point3F cameraCenter(0); + MatrixF cameraMat(1); + cameraPose.orientation.setMatrix(&cameraMat); + cameraMat.setPosition(cameraPose.position); + cameraMat.mulP(cameraCenter); + //DebugDrawer::get()->drawBox(cameraCenter - Point3F(0.1), cameraCenter + Point3F(0.1), ColorI::GREEN); + + DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -0.1, -0.5), Point3F(0.5, 0.1, 0.5), ColorI::WHITE, cameraMat); // general box + */ + + // Draw Tracked HMD Pos + Point3F hmdCenter(0, 0, 0); + MatrixF hmdMat(1); + hmdPose.orientation.setMatrix(&hmdMat); + hmdMat.setPosition(hmdPose.position); + hmdMat.inverse(); // -> world mat (as opposed to world -> tracked pos) + hmdMat = offsetMat * hmdMat; + hmdMat.mulP(hmdCenter); + DebugDrawer::get()->drawBox(hmdCenter - Point3F(0.1), hmdCenter + Point3F(0.1), ColorI::RED); + DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -0.1, -0.5), Point3F(0.5, 0.1, 0.5), ColorI::GREEN, hmdMat); // general box + + + // Draw Controller + MatrixF mat(1); + pose.orientation.setMatrix(&mat); + mat.setPosition(pose.position); + mat.inverse(); // same as HMD + mat = offsetMat * mat; + + Point3F middleStart(0, -1 * CONTROLLER_SCALE, 0); + Point3F middleEnd(0, 1 * CONTROLLER_SCALE, 0); + Point3F middle(0, 0, 0); + + Point3F center(0, 0, 0); + mat.mulP(center); + + //DebugDrawer::get()->drawBox(center - Point3F(0.1), center + Point3F(0.1), ColorI::BLUE); + + mat.mulP(middleStart); + mat.mulP(middle); + mat.mulP(middleEnd); + + char buffer[256]; + dSprintf(buffer, 256, "%f %f %f", center.x, center.y, center.z); + DebugDrawer::get()->drawText(middle, buffer); + DebugDrawer::get()->drawLine(middleStart, middle, ColorI(0, 255, 0)); // axis back + DebugDrawer::get()->drawLine(middleEnd, middle, ColorI(255, 0, 0)); // axis forward + DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -1, -0.5) * CONTROLLER_SCALE, Point3F(0.5, 1, 0.5) * CONTROLLER_SCALE, drawColor, mat); // general box + DebugDrawer::get()->drawBoxOutline(Point3F(-1), Point3F(1), ColorI::WHITE); + } + + if (isClientObject() && smDebugControllerMovePosition) + { + MatrixF transform = getRenderTransform(); + transform.scale(mObjScale); + DebugDrawer::get()->drawTransformedBoxOutline(mObjBox.minExtents, mObjBox.maxExtents, ColorI::RED, transform); + + // jamesu - grab server object pose for debugging + OpenVRTrackedObject* tracked = static_cast(getServerObject()); + if (tracked) + { + mPose = tracked->mPose; + } + + ColorI drawColor = ColorI::GREEN; + if (!pose.valid) + { + drawColor = ColorI::RED; + } + // Draw Controller + MatrixF mat(1); + mPose.orientation.setMatrix(&mat); + mat.setPosition(mPose.position); + mat.inverse(); // same as HMD + mat = offsetMat * mat; + + Point3F middleStart(0, -1 * CONTROLLER_SCALE, 0); + Point3F middleEnd(0, 1 * CONTROLLER_SCALE, 0); + Point3F middle(0, 0, 0); + + Point3F center(0, 0, 0); + mat.mulP(center); + + //DebugDrawer::get()->drawBox(center - Point3F(0.1), center + Point3F(0.1), ColorI::BLUE); + + mat.mulP(middleStart); + mat.mulP(middle); + mat.mulP(middleEnd); + + char buffer[256]; + dSprintf(buffer, 256, "%f %f %f", center.x, center.y, center.z); + DebugDrawer::get()->drawText(middle, buffer); + DebugDrawer::get()->drawLine(middleStart, middle, ColorI(0, 255, 0)); // axis back + DebugDrawer::get()->drawLine(middleEnd, middle, ColorI(255, 0, 0)); // axis forward + DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -1, -0.5) * CONTROLLER_SCALE, Point3F(0.5, 1, 0.5) * CONTROLLER_SCALE, drawColor, mat); // general box + DebugDrawer::get()->drawBoxOutline(Point3F(-1), Point3F(1), ColorI::WHITE); + } + + // Controller matrix base + MatrixF trackedMat = getTrackedTransform(); + MatrixF invTrackedMat(1); + + invTrackedMat = trackedMat; + invTrackedMat.inverse(); // -> world mat (as opposed to world -> tracked pos) + + invTrackedMat = getBaseTrackingTransform() * invTrackedMat; + trackedMat = invTrackedMat; + trackedMat.inverse(); + + // Render the controllers, using either the render model or the shape + if (mShapeInstance) + { + // Calculate the distance of this object from the camera + Point3F cameraOffset = invTrackedMat.getPosition(); + cameraOffset -= state->getDiffuseCameraPosition(); + F32 dist = cameraOffset.len(); + if (dist < 0.01f) + dist = 0.01f; + + // Set up the LOD for the shape + F32 invScale = (1.0f / getMax(getMax(mObjScale.x, mObjScale.y), mObjScale.z)); + + mShapeInstance->setDetailFromDistance(state, dist * invScale); + + // Make sure we have a valid level of detail + if (mShapeInstance->getCurrentDetail() < 0) + return; + + // GFXTransformSaver is a handy helper class that restores + // the current GFX matrices to their original values when + // it goes out of scope at the end of the function + GFXTransformSaver saver; + + // Set up our TS render state + TSRenderState rdata; + rdata.setSceneState(state); + rdata.setFadeOverride(1.0f); + + // We might have some forward lit materials + // so pass down a query to gather lights. + LightQuery query; + query.init(getWorldSphere()); + rdata.setLightQuery(&query); + + // Set the world matrix to the objects render transform + MatrixF mat = trackedMat; + + mat.scale(mObjScale); + GFX->setWorldMatrix(mat); + + // TODO: move the nodes about for components + + mShapeInstance->animate(); + mShapeInstance->render(rdata); + } + else if (mRenderComponents.size() > 0) + { + vr::IVRRenderModels *models = OPENVR->getRenderModels(); + if (!models) + return; + + vr::IVRSystem* vrs = vr::VRSystem(); + + if (!vrs->GetControllerState(mDeviceIndex, &mCurrentControllerState)) + { + return; + } + + for (U32 i = 0, sz = mRenderComponents.size(); i < sz; i++) + { + RenderModelSlot slot = mRenderComponents[i]; + vr::RenderModel_ControllerMode_State_t modeState; + vr::RenderModel_ComponentState_t componentState; + + modeState.bScrollWheelVisible = false; + + if (models->GetComponentState(mModelName, slot.componentName, &mCurrentControllerState, &modeState, &componentState)) + { + MeshRenderInst *ri = renderPass->allocInst(); + + // Set our RenderInst as a standard mesh render + ri->type = RenderPassManager::RIT_Mesh; + + // Calculate our sorting point + if (state && slot.nativeModel) + { + // Calculate our sort point manually. + const Box3F rBox = slot.nativeModel->getWorldBox(invTrackedMat); + ri->sortDistSq = rBox.getSqDistanceToPoint(state->getCameraPosition()); + } + else + { + ri->sortDistSq = 0.0f; + } + + MatrixF newTransform = trackedMat; + MatrixF controllerOffsMat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(componentState.mTrackingToComponentRenderModel); + MatrixF offComponentMat(1); + OpenVRUtil::convertTransformFromOVR(controllerOffsMat, offComponentMat); + + newTransform = offComponentMat * newTransform; + + newTransform.inverse(); + + //DebugDrawer::get()->drawBox(newTransform.getPosition() - Point3F(0.001), newTransform.getPosition() + Point3F(0.001), ColorI::BLUE); + + if (!slot.nativeModel) + continue; + if (i < 1) + continue; + + // Set up our transforms + ri->objectToWorld = renderPass->allocUniqueXform(newTransform); + ri->worldToCamera = renderPass->allocSharedXform(RenderPassManager::View); + ri->projection = renderPass->allocSharedXform(RenderPassManager::Projection); + + // If our material needs lights then fill the RIs + // light vector with the best lights. + if (true) + { + LightQuery query; + Point3F center(0, 0, 0); + invTrackedMat.mulP(center); + query.init(SphereF(center, 10.0f)); + query.getLights(ri->lights, 8); + } + + // Draw model + slot.nativeModel->draw(state, ri); + state->getRenderPass()->addInst(ri); + } + } + } + else if (mBasicModel) + { + MeshRenderInst *ri = renderPass->allocInst(); + + // Set our RenderInst as a standard mesh render + ri->type = RenderPassManager::RIT_Mesh; + + // Calculate our sorting point + if (state) + { + // Calculate our sort point manually. + const Box3F rBox = mBasicModel->getWorldBox(invTrackedMat); + ri->sortDistSq = rBox.getSqDistanceToPoint(state->getCameraPosition()); + } + else + { + ri->sortDistSq = 0.0f; + } + + MatrixF newTransform = invTrackedMat; + // Set up our transforms + ri->objectToWorld = renderPass->allocUniqueXform(newTransform); + ri->worldToCamera = renderPass->allocSharedXform(RenderPassManager::View); + ri->projection = renderPass->allocSharedXform(RenderPassManager::Projection); + + // If our material needs lights then fill the RIs + // light vector with the best lights. + if (true) + { + LightQuery query; + Point3F center(0, 0, 0); + invTrackedMat.mulP(center); + query.init(SphereF(center, 10.0f)); + query.getLights(ri->lights, 8); + } + + // Draw model + mBasicModel->draw(state, ri); + state->getRenderPass()->addInst(ri); + } +} + +U32 OpenVRTrackedObject::getCollisionMask() +{ + if (isServerObject()) + return sServerCollisionMask; + else + return sClientCollisionMask; +} + +void OpenVRTrackedObject::updateWorkingCollisionSet() +{ + const U32 mask = getCollisionMask(); + Box3F convexBox = mConvexList->getBoundingBox(getTransform(), getScale()); + F32 len = (50) * TickSec; + F32 l = (len * 1.1) + 0.1; // fudge factor + convexBox.minExtents -= Point3F(l, l, l); + convexBox.maxExtents += Point3F(l, l, l); + + disableCollision(); + mConvexList->updateWorkingList(convexBox, mask); + enableCollision(); +} + +void OpenVRTrackedObject::updateMove(const Move *move) +{ + // Set transform based on move + +#ifdef TORQUE_EXTENDED_MOVE + + const ExtendedMove* emove = dynamic_cast(move); + if (!emove) + return; + + U32 emoveIndex = mMappedMoveIndex; + if (emoveIndex >= ExtendedMove::MaxPositionsRotations) + emoveIndex = 0; + + //IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex); + //Con::printf("OpenVRTrackedObject::processTick move %i", emoveIndex); + + if (!emove->EulerBasedRotation[emoveIndex]) + { + AngAxisF inRot = AngAxisF(Point3F(emove->rotX[emoveIndex], emove->rotY[emoveIndex], emove->rotZ[emoveIndex]), emove->rotW[emoveIndex]); + // Update our pose based on the move info + mPose.orientation = inRot; + mPose.position = Point3F(emove->posX[emoveIndex], emove->posY[emoveIndex], emove->posZ[emoveIndex]); + mPose.valid = true; + mPose.connected = true; + } + + // Set transform based on move pose + MatrixF trackedMat(1); + MatrixF invTrackedMat(1); + + mPose.orientation.setMatrix(&trackedMat); + trackedMat.setPosition(mPose.position); + + invTrackedMat = trackedMat; + invTrackedMat.inverse(); // -> world mat (as opposed to world -> tracked pos) + + invTrackedMat = getBaseTrackingTransform() * invTrackedMat; + trackedMat = invTrackedMat; + trackedMat.inverse(); + + SceneObject::setTransform(invTrackedMat); + + if (mPhysicsRep) + mPhysicsRep->setTransform(invTrackedMat); +#endif +} + +void OpenVRTrackedObject::processTick(const Move *move) +{ + // Perform collision checks + if (isServerObject()) + { + updateMove(move); + + if (!mPhysicsRep) + { + updateWorkingCollisionSet(); + } + } + + Parent::processTick(move); +} + +void OpenVRTrackedObject::interpolateTick(F32 delta) +{ + // Set latest transform + + Parent::interpolateTick(delta); +} + +void OpenVRTrackedObject::advanceTime(F32 dt) +{ + Parent::advanceTime(dt); +} + +bool OpenVRTrackedObject::castRay(const Point3F &start, const Point3F &end, RayInfo* info) +{ + if (!mPose.connected || !mPose.valid) + return false; + + // Collide against bounding box. + F32 st, et, fst = 0.0f, fet = 1.0f; + F32 *bmin = &mObjBox.minExtents.x; + F32 *bmax = &mObjBox.maxExtents.x; + F32 const *si = &start.x; + F32 const *ei = &end.x; + + for (S32 i = 0; i < 3; i++) { + if (*si < *ei) { + if (*si > *bmax || *ei < *bmin) + return false; + F32 di = *ei - *si; + st = (*si < *bmin) ? (*bmin - *si) / di : 0.0f; + et = (*ei > *bmax) ? (*bmax - *si) / di : 1.0f; + } + else { + if (*ei > *bmax || *si < *bmin) + return false; + F32 di = *ei - *si; + st = (*si > *bmax) ? (*bmax - *si) / di : 0.0f; + et = (*ei < *bmin) ? (*bmin - *si) / di : 1.0f; + } + if (st > fst) fst = st; + if (et < fet) fet = et; + if (fet < fst) + return false; + bmin++; bmax++; + si++; ei++; + } + + info->normal = start - end; + info->normal.normalizeSafe(); + getTransform().mulV(info->normal); + + info->t = fst; + info->object = this; + info->point.interpolate(start, end, fst); + info->material = 0; + return true; +} + +void OpenVRTrackedObject::buildConvex(const Box3F& box, Convex* convex) +{ + // These should really come out of a pool + mConvexList->collectGarbage(); + + Box3F realBox = box; + mWorldToObj.mul(realBox); + realBox.minExtents.convolveInverse(mObjScale); + realBox.maxExtents.convolveInverse(mObjScale); + + if (realBox.isOverlapped(getObjBox()) == false) + return; + + // Just return a box convex for the entire shape... + Convex* cc = 0; + CollisionWorkingList& wl = convex->getWorkingList(); + for (CollisionWorkingList* itr = wl.wLink.mNext; itr != &wl; itr = itr->wLink.mNext) { + if (itr->mConvex->getType() == BoxConvexType && + itr->mConvex->getObject() == this) { + cc = itr->mConvex; + break; + } + } + if (cc) + return; + + // Create a new convex. + BoxConvex* cp = new BoxConvex; + mConvexList->registerObject(cp); + convex->addToWorkingList(cp); + cp->init(this); + + mObjBox.getCenter(&cp->mCenter); + cp->mSize.x = mObjBox.len_x() / 2.0f; + cp->mSize.y = mObjBox.len_y() / 2.0f; + cp->mSize.z = mObjBox.len_z() / 2.0f; +} + +bool OpenVRTrackedObject::testObject(SceneObject* enter) +{ + return false; // TODO +} + +DefineEngineMethod(OpenVRTrackedObject, setModelName, void, (String modelName),, "Set model name. Typically you should do this from the client to update the server representation.") +{ + object->setModelName(modelName); +} diff --git a/Engine/source/platform/input/openVR/openVRTrackedObject.h b/Engine/source/platform/input/openVR/openVRTrackedObject.h new file mode 100644 index 000000000..572649a8b --- /dev/null +++ b/Engine/source/platform/input/openVR/openVRTrackedObject.h @@ -0,0 +1,155 @@ +#ifndef _OPENVR_TRACKED_OBJECT_H_ +#define _OPENVR_TRACKED_OBJECT_H_ + +#ifndef _GAMEBASE_H_ +#include "T3D/gameBase/gameBase.h" +#endif +#ifndef _GFXVERTEXBUFFER_H_ +#include "gfx/gfxVertexBuffer.h" +#endif +#ifndef _GFXPRIMITIVEBUFFER_H_ +#include "gfx/gfxPrimitiveBuffer.h" +#endif +#ifndef _TSSHAPEINSTANCE_H_ +#include "ts/tsShapeInstance.h" +#endif +#include "collision/earlyOutPolyList.h" + +#include + +class BaseMatInstance; +class OpenVRRenderModel; +class PhysicsBody; + +class OpenVRTrackedObjectData : public GameBaseData { +public: + typedef GameBaseData Parent; + + StringTableEntry mShapeFile; + Resource mShape; ///< Torque model + + Point3F mCollisionBoxMin; + Point3F mCollisionBoxMax; + +public: + + OpenVRTrackedObjectData(); + ~OpenVRTrackedObjectData(); + + DECLARE_CONOBJECT(OpenVRTrackedObjectData); + + bool onAdd(); + bool preload(bool server, String &errorStr); + + static void initPersistFields(); + + virtual void packData(BitStream* stream); + virtual void unpackData(BitStream* stream); +}; + +/// Implements a GameObject which tracks an OpenVR controller +class OpenVRTrackedObject : public GameBase +{ + typedef GameBase Parent; + + enum MaskBits + { + UpdateMask = Parent::NextFreeMask << 0, + NextFreeMask = Parent::NextFreeMask << 1 + }; + + struct RenderModelSlot + { + StringTableEntry componentName; ///< Component name + S16 mappedNodeIdx; ///< Mapped node idx in mShape + OpenVRRenderModel *nativeModel; ///< Native model + }; + + OpenVRTrackedObjectData *mDataBlock; + + /// @name Rendering + /// { + TSShapeInstance *mShapeInstance; ///< Shape used to render controller (uses native model otherwise) + StringTableEntry mModelName; + OpenVRRenderModel *mBasicModel; ///< Basic model + Vector mRenderComponents; + /// } + + S32 mDeviceIndex; ///< Controller idx in openvr (for direct updating) + S32 mMappedMoveIndex; ///< Movemanager move index for rotation + + vr::VRControllerState_t mCurrentControllerState; + vr::VRControllerState_t mPreviousControllerState; + + IDevicePose mPose; ///< Current openvr pose data, or reconstructed data from the client + + Convex* mConvexList; + EarlyOutPolyList mClippedList; + PhysicsBody *mPhysicsRep; + + SimObjectPtr mCollisionObject; ///< Object we're currently colliding with + SimObjectPtr mInteractObject; ///< Object we've designated as important to interact with + + bool mHoldInteractedObject; ///< Performs pickup logic with mInteractObject + bool mIgnoreParentRotation; ///< Ignores the rotation of the parent object + + static bool smDebugControllerPosition; ///< Shows latest controller position in DebugDrawer + static bool smDebugControllerMovePosition; ///< Shows move position in DebugDrawer + static U32 sServerCollisionMask; + static U32 sClientCollisionMask; + +public: + OpenVRTrackedObject(); + virtual ~OpenVRTrackedObject(); + + void updateRenderData(); + void setupRenderDataFromModel(bool loadComponentModels); + + void clearRenderData(); + + DECLARE_CONOBJECT(OpenVRTrackedObject); + + static void initPersistFields(); + + virtual void inspectPostApply(); + + bool onAdd(); + void onRemove(); + + + void _updatePhysics(); + bool onNewDataBlock(GameBaseData *dptr, bool reload); + + void setInteractObject(SceneObject* object, bool holding); + + void setTransform(const MatrixF &mat); + void setModelName(String &modelName); + + U32 packUpdate(NetConnection *conn, U32 mask, BitStream *stream); + void unpackUpdate(NetConnection *conn, BitStream *stream); + void writePacketData(GameConnection *conn, BitStream *stream); + void readPacketData(GameConnection *conn, BitStream *stream); + + void prepRenderImage(SceneRenderState *state); + + MatrixF getTrackedTransform(); + MatrixF getLastTrackedTransform(); + MatrixF getBaseTrackingTransform(); + + U32 getCollisionMask(); + void updateWorkingCollisionSet(); + + // Time management + void updateMove(const Move *move); + void processTick(const Move *move); + void interpolateTick(F32 delta); + void advanceTime(F32 dt); + + // Collision + bool castRay(const Point3F &start, const Point3F &end, RayInfo* info); + void buildConvex(const Box3F& box, Convex* convex); + bool testObject(SceneObject* enter); + +}; + +#endif // _OPENVR_TRACKED_OBJECT_H_ \ No newline at end of file diff --git a/Engine/source/platform/output/IDisplayDevice.h b/Engine/source/platform/output/IDisplayDevice.h index 66cdf683d..075d0acaa 100644 --- a/Engine/source/platform/output/IDisplayDevice.h +++ b/Engine/source/platform/output/IDisplayDevice.h @@ -40,6 +40,11 @@ typedef struct DisplayPose Point3F velocity; Point3F angularVelocity; +#ifdef DEBUG_DISPLAY_POSE + MatrixF actualMatrix; + MatrixF originalMatrix; +#endif + U32 state; /// Generic state bool valid; /// Pose set diff --git a/Tools/CMake/modules/module_openvr.cmake b/Tools/CMake/modules/module_openvr.cmake index 0d8d2e8c6..cc8e8c76e 100644 --- a/Tools/CMake/modules/module_openvr.cmake +++ b/Tools/CMake/modules/module_openvr.cmake @@ -27,4 +27,6 @@ if(TORQUE_OPENVR) endif() addLib( "openvr_api" ) endif() + + addDef(TORQUE_OPENVR) endif() From 212ac36cc1d1293c26e175154280d3e2bb0cfff7 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Tue, 12 Jul 2016 23:30:11 +0100 Subject: [PATCH 32/33] Tidy up indentation in openvr changes --- .../T3D/gameBase/extended/extendedMove.cpp | 36 +- Engine/source/T3D/gameBase/gameConnection.cpp | 30 +- Engine/source/T3D/player.cpp | 74 +- Engine/source/gfx/D3D11/gfxD3D11Device.cpp | 743 +++++----- Engine/source/gfx/D3D11/gfxD3D11Target.cpp | 46 +- Engine/source/gfx/gfxAdapter.h | 4 +- Engine/source/gfx/gfxDevice.cpp | 4 +- Engine/source/gfx/gfxDevice.h | 14 +- Engine/source/gfx/gfxDrawUtil.cpp | 2 +- Engine/source/gfx/gfxFontRenderBatcher.cpp | 2 +- Engine/source/gfx/gfxInit.cpp | 60 +- Engine/source/gfx/gfxInit.h | 8 +- Engine/source/gfx/gfxTextureProfile.h | 6 +- Engine/source/gfx/sim/debugDraw.cpp | 108 +- Engine/source/gfx/sim/debugDraw.h | 4 +- Engine/source/gui/3d/guiTSControl.cpp | 72 +- .../platform/input/oculusVR/oculusVRDevice.h | 4 +- .../input/oculusVR/oculusVRHMDDevice.cpp | 556 ++++---- .../input/oculusVR/oculusVRHMDDevice.h | 18 +- .../platform/input/openVR/openVROverlay.cpp | 128 +- .../platform/input/openVR/openVROverlay.h | 10 +- .../platform/input/openVR/openVRProvider.cpp | 828 +++++------ .../platform/input/openVR/openVRProvider.h | 192 +-- .../input/openVR/openVRTrackedObject.cpp | 1228 ++++++++--------- .../input/openVR/openVRTrackedObject.h | 166 +-- 25 files changed, 2171 insertions(+), 2172 deletions(-) diff --git a/Engine/source/T3D/gameBase/extended/extendedMove.cpp b/Engine/source/T3D/gameBase/extended/extendedMove.cpp index a11dfc6eb..849706cd1 100644 --- a/Engine/source/T3D/gameBase/extended/extendedMove.cpp +++ b/Engine/source/T3D/gameBase/extended/extendedMove.cpp @@ -36,17 +36,17 @@ void ExtendedMoveManager::init() dSprintf(varName, sizeof(varName), "mvPosX%d", i); Con::addVariable(varName, TypeF32, &mPosX[i], "X position of controller in millimeters. Only 13 bits are networked.\n" - "@ingroup Game"); + "@ingroup Game"); dSprintf(varName, sizeof(varName), "mvPosY%d", i); Con::addVariable(varName, TypeF32, &mPosY[i], "Y position of controller in millimeters. Only 13 bits are networked.\n" - "@ingroup Game"); + "@ingroup Game"); dSprintf(varName, sizeof(varName), "mvPosZ%d", i); Con::addVariable(varName, TypeF32, &mPosZ[i], "Z position of controller in millimeters. Only 13 bits are networked.\n" - "@ingroup Game"); + "@ingroup Game"); dSprintf(varName, sizeof(varName), "mvRotIsEuler%d", i); Con::addVariable(varName, TypeBool, &mRotIsEuler[i], @@ -55,33 +55,33 @@ void ExtendedMoveManager::init() "(a vector and angle). When true, the given rotation is a three component " "Euler angle. When using Euler angles, the $mvRotA component of the ExtendedMove " "is ignored for this set of rotations.\n" - "@ingroup Game"); + "@ingroup Game"); dSprintf(varName, sizeof(varName), "mvRotX%d", i); Con::addVariable(varName, TypeF32, &mRotAX[i], "X rotation vector component of controller.\n" - "@ingroup Game"); + "@ingroup Game"); dSprintf(varName, sizeof(varName), "mvRotY%d", i); Con::addVariable(varName, TypeF32, &mRotAY[i], "Y rotation vector component of controller.\n" - "@ingroup Game"); + "@ingroup Game"); dSprintf(varName, sizeof(varName), "mvRotZ%d", i); Con::addVariable(varName, TypeF32, &mRotAZ[i], "Z rotation vector component of controller.\n" - "@ingroup Game"); + "@ingroup Game"); dSprintf(varName, sizeof(varName), "mvRotA%d", i); Con::addVariable(varName, TypeF32, &mRotAA[i], "Angle rotation (in degrees) component of controller.\n" - "@ingroup Game"); + "@ingroup Game"); } Con::addVariable("mvPosScale", TypeF32, &mPosScale, - "@brief Indicates the scale to be given to mvPos values.\n\n" - "" - "@ingroup Game"); + "@brief Indicates the scale to be given to mvPos values.\n\n" + "" + "@ingroup Game"); } const ExtendedMove NullExtendedMove; @@ -293,7 +293,7 @@ void ExtendedMove::clamp() crotW[i] = CLAMPROT(rotW[i] / M_2PI_F); } - #ifdef DEBUG_CONTROLLER_MOVE + #ifdef DEBUG_CONTROLLER_MOVE if (i == 1) { F32 x, y, z, a; @@ -302,14 +302,14 @@ void ExtendedMove::clamp() z = UNCLAMPPOS(crotZ[i]); a = UNCLAMPROT(crotW[i]) * M_2PI_F; - Con::printf("INPUT POS == %f,%f,%f", ExtendedMoveManager::mPosX[i], ExtendedMoveManager::mPosY[i], ExtendedMoveManager::mPosZ[i]); + Con::printf("INPUT POS == %f,%f,%f", ExtendedMoveManager::mPosX[i], ExtendedMoveManager::mPosY[i], ExtendedMoveManager::mPosZ[i]); Con::printf("rot %f,%f,%f,%f clamped to %f,%f,%f,%f", rotX[i], rotY[i], rotZ[i], rotW[i], x,y,z,a); - x = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale; - y = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale; - z = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale; - Con::printf("pos %f,%f,%f clamped to %f,%f,%f", posX[i], posY[i], posZ[i], x, y, z); + x = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale; + y = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale; + z = UNCLAMPPOS(cposX[i]) * ExtendedMoveManager::mPosScale; + Con::printf("pos %f,%f,%f clamped to %f,%f,%f", posX[i], posY[i], posZ[i], x, y, z); } - #endif + #endif } // Perform the standard Move clamp diff --git a/Engine/source/T3D/gameBase/gameConnection.cpp b/Engine/source/T3D/gameBase/gameConnection.cpp index f1b081f38..08125c261 100644 --- a/Engine/source/T3D/gameBase/gameConnection.cpp +++ b/Engine/source/T3D/gameBase/gameConnection.cpp @@ -469,8 +469,8 @@ bool GameConnection::readConnectRequest(BitStream *stream, const char **errorStr for(U32 i = 0; i < mConnectArgc+3; i++) { - connectArgv[i].value = &connectArgvValue[i]; - connectArgvValue[i].init(); + connectArgv[i].value = &connectArgvValue[i]; + connectArgvValue[i].init(); } for(U32 i = 0; i < mConnectArgc; i++) @@ -683,20 +683,20 @@ bool GameConnection::getControlCameraTransform(F32 dt, MatrixF* mat) bool GameConnection::getControlCameraHeadTransform(IDisplayDevice *display, MatrixF *transform) { - GameBase* obj = getCameraObject(); - if (!obj) - return false; + GameBase* obj = getCameraObject(); + if (!obj) + return false; - GameBase* cObj = obj; - while ((cObj = cObj->getControlObject()) != 0) - { - if (cObj->useObjsEyePoint()) - obj = cObj; - } + GameBase* cObj = obj; + while ((cObj = cObj->getControlObject()) != 0) + { + if (cObj->useObjsEyePoint()) + obj = cObj; + } - obj->getEyeCameraTransform(display, -1, transform); + obj->getEyeCameraTransform(display, -1, transform); - return true; + return true; } bool GameConnection::getControlCameraEyeTransforms(IDisplayDevice *display, MatrixF *transforms) @@ -914,8 +914,8 @@ void GameConnection::onRemove() // clientgroup and what not (this is so that we can disconnect from a local server // without needing to destroy and recreate the server before we can connect to it // again). - // Safe-delete as we don't know whether the server connection is currently being - // worked on. + // Safe-delete as we don't know whether the server connection is currently being + // worked on. getRemoteConnection()->safeDeleteObject(); setRemoteConnectionObject(NULL); } diff --git a/Engine/source/T3D/player.cpp b/Engine/source/T3D/player.cpp index c2cd7360f..db6a2ca42 100644 --- a/Engine/source/T3D/player.cpp +++ b/Engine/source/T3D/player.cpp @@ -1783,7 +1783,7 @@ void Player::onRemove() mWorkingQueryBox.minExtents.set(-1e9f, -1e9f, -1e9f); mWorkingQueryBox.maxExtents.set(-1e9f, -1e9f, -1e9f); - SAFE_DELETE( mPhysicsRep ); + SAFE_DELETE( mPhysicsRep ); Parent::onRemove(); } @@ -2505,12 +2505,12 @@ void Player::updateMove(const Move* move) #ifdef TORQUE_OPENVR if (mControllers[0]) { - mControllers[0]->processTick(move); + mControllers[0]->processTick(move); } if (mControllers[1]) { - mControllers[1]->processTick(move); + mControllers[1]->processTick(move); } #endif @@ -3337,9 +3337,9 @@ bool Player::canCrouch() if ( mDataBlock->actionList[PlayerData::CrouchRootAnim].sequence == -1 ) return false; - // We are already in this pose, so don't test it again... - if ( mPose == CrouchPose ) - return true; + // We are already in this pose, so don't test it again... + if ( mPose == CrouchPose ) + return true; // Do standard Torque physics test here! if ( !mPhysicsRep ) @@ -3389,8 +3389,8 @@ bool Player::canStand() return false; // We are already in this pose, so don't test it again... - if ( mPose == StandPose ) - return true; + if ( mPose == StandPose ) + return true; // Do standard Torque physics test here! if ( !mPhysicsRep ) @@ -3453,9 +3453,9 @@ bool Player::canProne() if ( !mPhysicsRep ) return true; - // We are already in this pose, so don't test it again... - if ( mPose == PronePose ) - return true; + // We are already in this pose, so don't test it again... + if ( mPose == PronePose ) + return true; return mPhysicsRep->testSpacials( getPosition(), mDataBlock->proneBoxSize ); } @@ -3652,7 +3652,7 @@ MatrixF * Player::Death::fallToGround(F32 dt, const Point3F& loc, F32 curZ, F32 normal.normalize(); mat.set(EulerF (0.0f, 0.0f, curZ)); mat.mulV(upY, & ahead); - mCross(ahead, normal, &sideVec); + mCross(ahead, normal, &sideVec); sideVec.normalize(); mCross(normal, sideVec, &ahead); @@ -5846,7 +5846,7 @@ F32 Player::getSpeed() const void Player::setVelocity(const VectorF& vel) { - AssertFatal( !mIsNaN( vel ), "Player::setVelocity() - The velocity is NaN!" ); + AssertFatal( !mIsNaN( vel ), "Player::setVelocity() - The velocity is NaN!" ); mVelocity = vel; setMaskBits(MoveMask); @@ -5854,7 +5854,7 @@ void Player::setVelocity(const VectorF& vel) void Player::applyImpulse(const Point3F&,const VectorF& vec) { - AssertFatal( !mIsNaN( vec ), "Player::applyImpulse() - The vector is NaN!" ); + AssertFatal( !mIsNaN( vec ), "Player::applyImpulse() - The vector is NaN!" ); // Players ignore angular velocity VectorF vel; @@ -6202,7 +6202,7 @@ U32 Player::packUpdate(NetConnection *con, U32 mask, BitStream *stream) stream->writeFlag(mSwimming); stream->writeFlag(mJetting); stream->writeInt(mPose, NumPoseBits); - + stream->writeInt(mState,NumStateBits); if (stream->writeFlag(mState == RecoverState)) stream->writeInt(mRecoverTicks,PlayerData::RecoverDelayBits); @@ -6303,7 +6303,7 @@ void Player::unpackUpdate(NetConnection *con, BitStream *stream) mSwimming = stream->readFlag(); mJetting = stream->readFlag(); mPose = (Pose)(stream->readInt(NumPoseBits)); - + ActionState actionState = (ActionState)stream->readInt(NumStateBits); if (stream->readFlag()) { mRecoverTicks = stream->readInt(PlayerData::RecoverDelayBits); @@ -7174,34 +7174,34 @@ void Player::renderConvex( ObjectRenderInst *ri, SceneRenderState *state, BaseMa #ifdef TORQUE_OPENVR void Player::setControllers(Vector controllerList) { - mControllers[0] = controllerList.size() > 0 ? controllerList[0] : NULL; - mControllers[1] = controllerList.size() > 1 ? controllerList[1] : NULL; + mControllers[0] = controllerList.size() > 0 ? controllerList[0] : NULL; + mControllers[1] = controllerList.size() > 1 ? controllerList[1] : NULL; } ConsoleMethod(Player, setVRControllers, void, 4, 4, "") { - OpenVRTrackedObject *controllerL, *controllerR; - Vector list; + OpenVRTrackedObject *controllerL, *controllerR; + Vector list; - if (Sim::findObject(argv[2], controllerL)) - { - list.push_back(controllerL); - } - else - { - list.push_back(NULL); - } + if (Sim::findObject(argv[2], controllerL)) + { + list.push_back(controllerL); + } + else + { + list.push_back(NULL); + } - if (Sim::findObject(argv[3], controllerR)) - { - list.push_back(controllerR); - } - else - { - list.push_back(NULL); - } + if (Sim::findObject(argv[3], controllerR)) + { + list.push_back(controllerR); + } + else + { + list.push_back(NULL); + } - object->setControllers(list); + object->setControllers(list); } #endif diff --git a/Engine/source/gfx/D3D11/gfxD3D11Device.cpp b/Engine/source/gfx/D3D11/gfxD3D11Device.cpp index 49a3835af..2881e1f48 100644 --- a/Engine/source/gfx/D3D11/gfxD3D11Device.cpp +++ b/Engine/source/gfx/D3D11/gfxD3D11Device.cpp @@ -119,77 +119,77 @@ void GFXD3D11Device::enumerateAdapters(Vector &adapterList) for(U32 adapterIndex = 0; DXGIFactory->EnumAdapters1(adapterIndex, &EnumAdapter) != DXGI_ERROR_NOT_FOUND; ++adapterIndex) { - GFXAdapter *toAdd = new GFXAdapter; - toAdd->mType = Direct3D11; - toAdd->mIndex = adapterIndex; - toAdd->mCreateDeviceInstanceDelegate = mCreateDeviceInstance; + GFXAdapter *toAdd = new GFXAdapter; + toAdd->mType = Direct3D11; + toAdd->mIndex = adapterIndex; + toAdd->mCreateDeviceInstanceDelegate = mCreateDeviceInstance; - toAdd->mShaderModel = 5.0f; - DXGI_ADAPTER_DESC1 desc; - EnumAdapter->GetDesc1(&desc); + toAdd->mShaderModel = 5.0f; + DXGI_ADAPTER_DESC1 desc; + EnumAdapter->GetDesc1(&desc); - // LUID identifies adapter for oculus rift - dMemcpy(&toAdd->mLUID, &desc.AdapterLuid, sizeof(toAdd->mLUID)); + // LUID identifies adapter for oculus rift + dMemcpy(&toAdd->mLUID, &desc.AdapterLuid, sizeof(toAdd->mLUID)); - size_t size=wcslen(desc.Description); - char *str = new char[size+1]; + size_t size=wcslen(desc.Description); + char *str = new char[size+1]; - wcstombs(str, desc.Description,size); - str[size]='\0'; - String Description=str; + wcstombs(str, desc.Description,size); + str[size]='\0'; + String Description=str; SAFE_DELETE_ARRAY(str); - dStrncpy(toAdd->mName, Description.c_str(), GFXAdapter::MaxAdapterNameLen); - dStrncat(toAdd->mName, " (D3D11)", GFXAdapter::MaxAdapterNameLen); + dStrncpy(toAdd->mName, Description.c_str(), GFXAdapter::MaxAdapterNameLen); + dStrncat(toAdd->mName, " (D3D11)", GFXAdapter::MaxAdapterNameLen); - IDXGIOutput* pOutput = NULL; - HRESULT hr; + IDXGIOutput* pOutput = NULL; + HRESULT hr; - hr = EnumAdapter->EnumOutputs(adapterIndex, &pOutput); + hr = EnumAdapter->EnumOutputs(adapterIndex, &pOutput); - if(hr == DXGI_ERROR_NOT_FOUND) - { + if(hr == DXGI_ERROR_NOT_FOUND) + { SAFE_RELEASE(EnumAdapter); - break; - } + break; + } - if(FAILED(hr)) - AssertFatal(false, "GFXD3D11Device::enumerateAdapters -> EnumOutputs call failure"); + if(FAILED(hr)) + AssertFatal(false, "GFXD3D11Device::enumerateAdapters -> EnumOutputs call failure"); - UINT numModes = 0; - DXGI_MODE_DESC* displayModes = NULL; - DXGI_FORMAT format = DXGI_FORMAT_B8G8R8A8_UNORM; + UINT numModes = 0; + DXGI_MODE_DESC* displayModes = NULL; + DXGI_FORMAT format = DXGI_FORMAT_B8G8R8A8_UNORM; - // Get the number of elements - hr = pOutput->GetDisplayModeList(format, 0, &numModes, NULL); + // Get the number of elements + hr = pOutput->GetDisplayModeList(format, 0, &numModes, NULL); - if(FAILED(hr)) - AssertFatal(false, "GFXD3D11Device::enumerateAdapters -> GetDisplayModeList call failure"); + if(FAILED(hr)) + AssertFatal(false, "GFXD3D11Device::enumerateAdapters -> GetDisplayModeList call failure"); - displayModes = new DXGI_MODE_DESC[numModes]; + displayModes = new DXGI_MODE_DESC[numModes]; - // Get the list - hr = pOutput->GetDisplayModeList(format, 0, &numModes, displayModes); + // Get the list + hr = pOutput->GetDisplayModeList(format, 0, &numModes, displayModes); - if(FAILED(hr)) - AssertFatal(false, "GFXD3D11Device::enumerateAdapters -> GetDisplayModeList call failure"); + if(FAILED(hr)) + AssertFatal(false, "GFXD3D11Device::enumerateAdapters -> GetDisplayModeList call failure"); - for(U32 numMode = 0; numMode < numModes; ++numMode) - { - GFXVideoMode vmAdd; + for(U32 numMode = 0; numMode < numModes; ++numMode) + { + GFXVideoMode vmAdd; - vmAdd.fullScreen = true; - vmAdd.bitDepth = 32; - vmAdd.refreshRate = displayModes[numMode].RefreshRate.Numerator / displayModes[numMode].RefreshRate.Denominator; - vmAdd.resolution.x = displayModes[numMode].Width; - vmAdd.resolution.y = displayModes[numMode].Height; - toAdd->mAvailableModes.push_back(vmAdd); - } + vmAdd.fullScreen = true; + vmAdd.bitDepth = 32; + vmAdd.refreshRate = displayModes[numMode].RefreshRate.Numerator / displayModes[numMode].RefreshRate.Denominator; + vmAdd.resolution.x = displayModes[numMode].Width; + vmAdd.resolution.y = displayModes[numMode].Height; + toAdd->mAvailableModes.push_back(vmAdd); + } - delete[] displayModes; + delete[] displayModes; SAFE_RELEASE(pOutput); SAFE_RELEASE(EnumAdapter); - adapterList.push_back(toAdd); + adapterList.push_back(toAdd); } SAFE_RELEASE(DXGIFactory); @@ -210,50 +210,50 @@ void GFXD3D11Device::enumerateVideoModes() for(U32 adapterIndex = 0; DXGIFactory->EnumAdapters1(adapterIndex, &EnumAdapter) != DXGI_ERROR_NOT_FOUND; ++adapterIndex) { - IDXGIOutput* pOutput = NULL; + IDXGIOutput* pOutput = NULL; - hr = EnumAdapter->EnumOutputs(adapterIndex, &pOutput); + hr = EnumAdapter->EnumOutputs(adapterIndex, &pOutput); - if(hr == DXGI_ERROR_NOT_FOUND) - { + if(hr == DXGI_ERROR_NOT_FOUND) + { SAFE_RELEASE(EnumAdapter); - break; - } + break; + } - if(FAILED(hr)) - AssertFatal(false, "GFXD3D11Device::enumerateVideoModes -> EnumOutputs call failure"); + if(FAILED(hr)) + AssertFatal(false, "GFXD3D11Device::enumerateVideoModes -> EnumOutputs call failure"); - UINT numModes = 0; - DXGI_MODE_DESC* displayModes = NULL; - DXGI_FORMAT format = GFXD3D11TextureFormat[GFXFormatR8G8B8A8]; + UINT numModes = 0; + DXGI_MODE_DESC* displayModes = NULL; + DXGI_FORMAT format = GFXD3D11TextureFormat[GFXFormatR8G8B8A8]; - // Get the number of elements - hr = pOutput->GetDisplayModeList(format, 0, &numModes, NULL); + // Get the number of elements + hr = pOutput->GetDisplayModeList(format, 0, &numModes, NULL); - if(FAILED(hr)) - AssertFatal(false, "GFXD3D11Device::enumerateVideoModes -> GetDisplayModeList call failure"); + if(FAILED(hr)) + AssertFatal(false, "GFXD3D11Device::enumerateVideoModes -> GetDisplayModeList call failure"); - displayModes = new DXGI_MODE_DESC[numModes]; + displayModes = new DXGI_MODE_DESC[numModes]; - // Get the list - hr = pOutput->GetDisplayModeList(format, 0, &numModes, displayModes); + // Get the list + hr = pOutput->GetDisplayModeList(format, 0, &numModes, displayModes); - if(FAILED(hr)) - AssertFatal(false, "GFXD3D11Device::enumerateVideoModes -> GetDisplayModeList call failure"); + if(FAILED(hr)) + AssertFatal(false, "GFXD3D11Device::enumerateVideoModes -> GetDisplayModeList call failure"); - for(U32 numMode = 0; numMode < numModes; ++numMode) - { - GFXVideoMode toAdd; + for(U32 numMode = 0; numMode < numModes; ++numMode) + { + GFXVideoMode toAdd; - toAdd.fullScreen = false; - toAdd.bitDepth = 32; - toAdd.refreshRate = displayModes[numMode].RefreshRate.Numerator / displayModes[numMode].RefreshRate.Denominator; - toAdd.resolution.x = displayModes[numMode].Width; - toAdd.resolution.y = displayModes[numMode].Height; - mVideoModes.push_back(toAdd); - } + toAdd.fullScreen = false; + toAdd.bitDepth = 32; + toAdd.refreshRate = displayModes[numMode].RefreshRate.Numerator / displayModes[numMode].RefreshRate.Denominator; + toAdd.resolution.x = displayModes[numMode].Width; + toAdd.resolution.y = displayModes[numMode].Height; + mVideoModes.push_back(toAdd); + } - delete[] displayModes; + delete[] displayModes; SAFE_RELEASE(pOutput); SAFE_RELEASE(EnumAdapter); } @@ -263,7 +263,7 @@ void GFXD3D11Device::enumerateVideoModes() IDXGISwapChain* GFXD3D11Device::getSwapChain() { - return mSwapChain; + return mSwapChain; } void GFXD3D11Device::init(const GFXVideoMode &mode, PlatformWindow *window) @@ -285,19 +285,19 @@ void GFXD3D11Device::init(const GFXVideoMode &mode, PlatformWindow *window) // create a device, device context and swap chain using the information in the d3dpp struct HRESULT hres = D3D11CreateDeviceAndSwapChain(NULL, driverType, - NULL, - createDeviceFlags, - NULL, - 0, - D3D11_SDK_VERSION, - &d3dpp, - &mSwapChain, - &mD3DDevice, - &deviceFeature, - &mD3DDeviceContext); + NULL, + createDeviceFlags, + NULL, + 0, + D3D11_SDK_VERSION, + &d3dpp, + &mSwapChain, + &mD3DDevice, + &deviceFeature, + &mD3DDeviceContext); - if(FAILED(hres)) - { + if(FAILED(hres)) + { #ifdef TORQUE_DEBUG //try again without debug device layer enabled createDeviceFlags &= ~D3D11_CREATE_DEVICE_DEBUG; @@ -315,9 +315,9 @@ void GFXD3D11Device::init(const GFXVideoMode &mode, PlatformWindow *window) Con::warnf("GFXD3D11Device::init - Debug layers not detected!"); mDebugLayers = false; #else - AssertFatal(false, "GFXD3D11Device::init - D3D11CreateDeviceAndSwapChain failed!"); + AssertFatal(false, "GFXD3D11Device::init - D3D11CreateDeviceAndSwapChain failed!"); #endif - } + } //set the fullscreen state here if we need to if(mode.fullScreen) @@ -329,79 +329,79 @@ void GFXD3D11Device::init(const GFXVideoMode &mode, PlatformWindow *window) } } - mTextureManager = new GFXD3D11TextureManager(); + mTextureManager = new GFXD3D11TextureManager(); - // Now reacquire all the resources we trashed earlier - reacquireDefaultPoolResources(); + // Now reacquire all the resources we trashed earlier + reacquireDefaultPoolResources(); //TODO implement feature levels? - if (deviceFeature >= D3D_FEATURE_LEVEL_11_0) - mPixVersion = 5.0f; - else - AssertFatal(false, "GFXD3D11Device::init - We don't support anything below feature level 11."); + if (deviceFeature >= D3D_FEATURE_LEVEL_11_0) + mPixVersion = 5.0f; + else + AssertFatal(false, "GFXD3D11Device::init - We don't support anything below feature level 11."); - D3D11_QUERY_DESC queryDesc; + D3D11_QUERY_DESC queryDesc; queryDesc.Query = D3D11_QUERY_OCCLUSION; queryDesc.MiscFlags = 0; - ID3D11Query *testQuery = NULL; + ID3D11Query *testQuery = NULL; - // detect occlusion query support - if (SUCCEEDED(mD3DDevice->CreateQuery(&queryDesc, &testQuery))) mOcclusionQuerySupported = true; + // detect occlusion query support + if (SUCCEEDED(mD3DDevice->CreateQuery(&queryDesc, &testQuery))) mOcclusionQuerySupported = true; SAFE_RELEASE(testQuery); - Con::printf("Hardware occlusion query detected: %s", mOcclusionQuerySupported ? "Yes" : "No"); + Con::printf("Hardware occlusion query detected: %s", mOcclusionQuerySupported ? "Yes" : "No"); - mCardProfiler = new GFXD3D11CardProfiler(); - mCardProfiler->init(); + mCardProfiler = new GFXD3D11CardProfiler(); + mCardProfiler->init(); - D3D11_TEXTURE2D_DESC desc; - desc.BindFlags = D3D11_BIND_DEPTH_STENCIL; - desc.CPUAccessFlags = 0; - desc.Format = GFXD3D11TextureFormat[GFXFormatD24S8]; - desc.MipLevels = 1; - desc.ArraySize = 1; - desc.Usage = D3D11_USAGE_DEFAULT; - desc.Width = mode.resolution.x; - desc.Height = mode.resolution.y; - desc.SampleDesc.Count =1; - desc.SampleDesc.Quality =0; - desc.MiscFlags = 0; + D3D11_TEXTURE2D_DESC desc; + desc.BindFlags = D3D11_BIND_DEPTH_STENCIL; + desc.CPUAccessFlags = 0; + desc.Format = GFXD3D11TextureFormat[GFXFormatD24S8]; + desc.MipLevels = 1; + desc.ArraySize = 1; + desc.Usage = D3D11_USAGE_DEFAULT; + desc.Width = mode.resolution.x; + desc.Height = mode.resolution.y; + desc.SampleDesc.Count =1; + desc.SampleDesc.Quality =0; + desc.MiscFlags = 0; - HRESULT hr = mD3DDevice->CreateTexture2D(&desc, NULL, &mDeviceDepthStencil); - if(FAILED(hr)) - { - AssertFatal(false, "GFXD3D11Device::init - couldn't create device's depth-stencil surface."); - } + HRESULT hr = mD3DDevice->CreateTexture2D(&desc, NULL, &mDeviceDepthStencil); + if(FAILED(hr)) + { + AssertFatal(false, "GFXD3D11Device::init - couldn't create device's depth-stencil surface."); + } - D3D11_DEPTH_STENCIL_VIEW_DESC depthDesc; - depthDesc.Format = GFXD3D11TextureFormat[GFXFormatD24S8]; - depthDesc.Flags =0 ; - depthDesc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D; - depthDesc.Texture2D.MipSlice = 0; + D3D11_DEPTH_STENCIL_VIEW_DESC depthDesc; + depthDesc.Format = GFXD3D11TextureFormat[GFXFormatD24S8]; + depthDesc.Flags =0 ; + depthDesc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D; + depthDesc.Texture2D.MipSlice = 0; - hr = mD3DDevice->CreateDepthStencilView(mDeviceDepthStencil, &depthDesc, &mDeviceDepthStencilView); + hr = mD3DDevice->CreateDepthStencilView(mDeviceDepthStencil, &depthDesc, &mDeviceDepthStencilView); - if(FAILED(hr)) - { - AssertFatal(false, "GFXD3D11Device::init - couldn't create depth stencil view"); - } + if(FAILED(hr)) + { + AssertFatal(false, "GFXD3D11Device::init - couldn't create depth stencil view"); + } - hr = mSwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&mDeviceBackbuffer); - if(FAILED(hr)) - AssertFatal(false, "GFXD3D11Device::init - coudln't retrieve backbuffer ref"); + hr = mSwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&mDeviceBackbuffer); + if(FAILED(hr)) + AssertFatal(false, "GFXD3D11Device::init - coudln't retrieve backbuffer ref"); - //create back buffer view - D3D11_RENDER_TARGET_VIEW_DESC RTDesc; + //create back buffer view + D3D11_RENDER_TARGET_VIEW_DESC RTDesc; - RTDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM; - RTDesc.Texture2D.MipSlice = 0; - RTDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D; + RTDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM; + RTDesc.Texture2D.MipSlice = 0; + RTDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D; - hr = mD3DDevice->CreateRenderTargetView(mDeviceBackbuffer, &RTDesc, &mDeviceBackBufferView); + hr = mD3DDevice->CreateRenderTargetView(mDeviceBackbuffer, &RTDesc, &mDeviceBackBufferView); - if(FAILED(hr)) - AssertFatal(false, "GFXD3D11Device::init - couldn't create back buffer target view"); + if(FAILED(hr)) + AssertFatal(false, "GFXD3D11Device::init - couldn't create back buffer target view"); #ifdef TORQUE_DEBUG String backBufferName = "MainBackBuffer"; @@ -419,8 +419,8 @@ void GFXD3D11Device::init(const GFXVideoMode &mode, PlatformWindow *window) gScreenShot = new ScreenShotD3D11; - mInitialized = true; - deviceInited(); + mInitialized = true; + deviceInited(); } // Supress any debug layer messages we don't want to see @@ -489,28 +489,28 @@ GFXTextureTarget* GFXD3D11Device::allocRenderToTextureTarget() void GFXD3D11Device::reset(DXGI_SWAP_CHAIN_DESC &d3dpp) { - if (!mD3DDevice) - return; + if (!mD3DDevice) + return; - mInitialized = false; + mInitialized = false; - // Clean up some commonly dangling state. This helps prevents issues with - // items that are destroyed by the texture manager callbacks and recreated - // later, but still left bound. - setVertexBuffer(NULL); - setPrimitiveBuffer(NULL); - for (S32 i = 0; iClearState(); + mD3DDeviceContext->ClearState(); - DXGI_MODE_DESC displayModes; - displayModes.Format = d3dpp.BufferDesc.Format; - displayModes.Height = d3dpp.BufferDesc.Height; - displayModes.Width = d3dpp.BufferDesc.Width; - displayModes.RefreshRate = d3dpp.BufferDesc.RefreshRate; - displayModes.Scaling = d3dpp.BufferDesc.Scaling; - displayModes.ScanlineOrdering = d3dpp.BufferDesc.ScanlineOrdering; + DXGI_MODE_DESC displayModes; + displayModes.Format = d3dpp.BufferDesc.Format; + displayModes.Height = d3dpp.BufferDesc.Height; + displayModes.Width = d3dpp.BufferDesc.Width; + displayModes.RefreshRate = d3dpp.BufferDesc.RefreshRate; + displayModes.Scaling = d3dpp.BufferDesc.Scaling; + displayModes.ScanlineOrdering = d3dpp.BufferDesc.ScanlineOrdering; HRESULT hr; if (!d3dpp.Windowed) @@ -523,79 +523,79 @@ void GFXD3D11Device::reset(DXGI_SWAP_CHAIN_DESC &d3dpp) } } - // First release all the stuff we allocated from D3DPOOL_DEFAULT - releaseDefaultPoolResources(); + // First release all the stuff we allocated from D3DPOOL_DEFAULT + releaseDefaultPoolResources(); - //release the backbuffer, depthstencil, and their views - SAFE_RELEASE(mDeviceBackBufferView); - SAFE_RELEASE(mDeviceBackbuffer); - SAFE_RELEASE(mDeviceDepthStencilView); - SAFE_RELEASE(mDeviceDepthStencil); + //release the backbuffer, depthstencil, and their views + SAFE_RELEASE(mDeviceBackBufferView); + SAFE_RELEASE(mDeviceBackbuffer); + SAFE_RELEASE(mDeviceDepthStencilView); + SAFE_RELEASE(mDeviceDepthStencil); hr = mSwapChain->ResizeBuffers(d3dpp.BufferCount, d3dpp.BufferDesc.Width, d3dpp.BufferDesc.Height, d3dpp.BufferDesc.Format, d3dpp.Windowed ? 0 : DXGI_SWAP_CHAIN_FLAG_ALLOW_MODE_SWITCH); - if (FAILED(hr)) - { - AssertFatal(false, "D3D11Device::reset - failed to resize back buffer!"); - } + if (FAILED(hr)) + { + AssertFatal(false, "D3D11Device::reset - failed to resize back buffer!"); + } - //recreate backbuffer view. depth stencil view and texture - D3D11_TEXTURE2D_DESC desc; - desc.BindFlags = D3D11_BIND_DEPTH_STENCIL; - desc.CPUAccessFlags = 0; - desc.Format = GFXD3D11TextureFormat[GFXFormatD24S8]; - desc.MipLevels = 1; - desc.ArraySize = 1; - desc.Usage = D3D11_USAGE_DEFAULT; - desc.Width = d3dpp.BufferDesc.Width; - desc.Height = d3dpp.BufferDesc.Height; - desc.SampleDesc.Count = 1; - desc.SampleDesc.Quality = 0; - desc.MiscFlags = 0; + //recreate backbuffer view. depth stencil view and texture + D3D11_TEXTURE2D_DESC desc; + desc.BindFlags = D3D11_BIND_DEPTH_STENCIL; + desc.CPUAccessFlags = 0; + desc.Format = GFXD3D11TextureFormat[GFXFormatD24S8]; + desc.MipLevels = 1; + desc.ArraySize = 1; + desc.Usage = D3D11_USAGE_DEFAULT; + desc.Width = d3dpp.BufferDesc.Width; + desc.Height = d3dpp.BufferDesc.Height; + desc.SampleDesc.Count = 1; + desc.SampleDesc.Quality = 0; + desc.MiscFlags = 0; - hr = mD3DDevice->CreateTexture2D(&desc, NULL, &mDeviceDepthStencil); - if (FAILED(hr)) - { - AssertFatal(false, "GFXD3D11Device::reset - couldn't create device's depth-stencil surface."); - } + hr = mD3DDevice->CreateTexture2D(&desc, NULL, &mDeviceDepthStencil); + if (FAILED(hr)) + { + AssertFatal(false, "GFXD3D11Device::reset - couldn't create device's depth-stencil surface."); + } - D3D11_DEPTH_STENCIL_VIEW_DESC depthDesc; - depthDesc.Format = GFXD3D11TextureFormat[GFXFormatD24S8]; - depthDesc.Flags = 0; - depthDesc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D; - depthDesc.Texture2D.MipSlice = 0; + D3D11_DEPTH_STENCIL_VIEW_DESC depthDesc; + depthDesc.Format = GFXD3D11TextureFormat[GFXFormatD24S8]; + depthDesc.Flags = 0; + depthDesc.ViewDimension = D3D11_DSV_DIMENSION_TEXTURE2D; + depthDesc.Texture2D.MipSlice = 0; - hr = mD3DDevice->CreateDepthStencilView(mDeviceDepthStencil, &depthDesc, &mDeviceDepthStencilView); + hr = mD3DDevice->CreateDepthStencilView(mDeviceDepthStencil, &depthDesc, &mDeviceDepthStencilView); - if (FAILED(hr)) - { - AssertFatal(false, "GFXD3D11Device::reset - couldn't create depth stencil view"); - } + if (FAILED(hr)) + { + AssertFatal(false, "GFXD3D11Device::reset - couldn't create depth stencil view"); + } - hr = mSwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&mDeviceBackbuffer); - if (FAILED(hr)) - AssertFatal(false, "GFXD3D11Device::reset - coudln't retrieve backbuffer ref"); + hr = mSwapChain->GetBuffer(0, __uuidof(ID3D11Texture2D), (LPVOID*)&mDeviceBackbuffer); + if (FAILED(hr)) + AssertFatal(false, "GFXD3D11Device::reset - coudln't retrieve backbuffer ref"); - //create back buffer view - D3D11_RENDER_TARGET_VIEW_DESC RTDesc; + //create back buffer view + D3D11_RENDER_TARGET_VIEW_DESC RTDesc; - RTDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM; - RTDesc.Texture2D.MipSlice = 0; - RTDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D; + RTDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM; + RTDesc.Texture2D.MipSlice = 0; + RTDesc.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D; - hr = mD3DDevice->CreateRenderTargetView(mDeviceBackbuffer, &RTDesc, &mDeviceBackBufferView); + hr = mD3DDevice->CreateRenderTargetView(mDeviceBackbuffer, &RTDesc, &mDeviceBackBufferView); - if (FAILED(hr)) - AssertFatal(false, "GFXD3D11Device::reset - couldn't create back buffer target view"); + if (FAILED(hr)) + AssertFatal(false, "GFXD3D11Device::reset - couldn't create back buffer target view"); mD3DDeviceContext->OMSetRenderTargets(1, &mDeviceBackBufferView, mDeviceDepthStencilView); - hr = mSwapChain->SetFullscreenState(!d3dpp.Windowed, NULL); + hr = mSwapChain->SetFullscreenState(!d3dpp.Windowed, NULL); - if (FAILED(hr)) - { + if (FAILED(hr)) + { AssertFatal(false, "D3D11Device::reset - failed to change screen states!"); - } + } //Microsoft recommend this, see DXGI documentation if (!d3dpp.Windowed) @@ -610,13 +610,13 @@ void GFXD3D11Device::reset(DXGI_SWAP_CHAIN_DESC &d3dpp) } } - mInitialized = true; + mInitialized = true; - // Now re aquire all the resources we trashed earlier - reacquireDefaultPoolResources(); + // Now re aquire all the resources we trashed earlier + reacquireDefaultPoolResources(); - // Mark everything dirty and flush to card, for sanity. - updateStates(true); + // Mark everything dirty and flush to card, for sanity. + updateStates(true); } class GFXPCD3D11RegisterDevice @@ -899,20 +899,20 @@ void GFXD3D11Device::_updateRenderTargets() mRTDirty = false; } - if (mViewportDirty) - { - D3D11_VIEWPORT viewport; + if (mViewportDirty) + { + D3D11_VIEWPORT viewport; - viewport.TopLeftX = mViewport.point.x; - viewport.TopLeftY = mViewport.point.y; - viewport.Width = mViewport.extent.x; - viewport.Height = mViewport.extent.y; - viewport.MinDepth = 0.0f; - viewport.MaxDepth = 1.0f; + viewport.TopLeftX = mViewport.point.x; + viewport.TopLeftY = mViewport.point.y; + viewport.Width = mViewport.extent.x; + viewport.Height = mViewport.extent.y; + viewport.MinDepth = 0.0f; + viewport.MaxDepth = 1.0f; - mD3DDeviceContext->RSSetViewports(1, &viewport); + mD3DDeviceContext->RSSetViewports(1, &viewport); - mViewportDirty = false; + mViewportDirty = false; } } @@ -970,35 +970,35 @@ void GFXD3D11Device::releaseDefaultPoolResources() void GFXD3D11Device::reacquireDefaultPoolResources() { - // Now do the dynamic index buffers - if( mDynamicPB == NULL ) - mDynamicPB = new GFXD3D11PrimitiveBuffer(this, 0, 0, GFXBufferTypeDynamic); + // Now do the dynamic index buffers + if( mDynamicPB == NULL ) + mDynamicPB = new GFXD3D11PrimitiveBuffer(this, 0, 0, GFXBufferTypeDynamic); - D3D11_BUFFER_DESC desc; - desc.ByteWidth = sizeof(U16) * MAX_DYNAMIC_INDICES; - desc.Usage = D3D11_USAGE_DYNAMIC; - desc.BindFlags = D3D11_BIND_INDEX_BUFFER; - desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; - desc.MiscFlags = 0; - desc.StructureByteStride = 0; + D3D11_BUFFER_DESC desc; + desc.ByteWidth = sizeof(U16) * MAX_DYNAMIC_INDICES; + desc.Usage = D3D11_USAGE_DYNAMIC; + desc.BindFlags = D3D11_BIND_INDEX_BUFFER; + desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; + desc.MiscFlags = 0; + desc.StructureByteStride = 0; - HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &mDynamicPB->ib); + HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &mDynamicPB->ib); - if(FAILED(hr)) - { - AssertFatal(false, "Failed to allocate dynamic IB"); - } + if(FAILED(hr)) + { + AssertFatal(false, "Failed to allocate dynamic IB"); + } - // Walk the resource list and zombify everything. - GFXResource *walk = mResourceListHead; - while(walk) - { - walk->resurrect(); - walk = walk->getNextResource(); - } + // Walk the resource list and zombify everything. + GFXResource *walk = mResourceListHead; + while(walk) + { + walk->resurrect(); + walk = walk->getNextResource(); + } - if(mTextureManager) - mTextureManager->resurrect(); + if(mTextureManager) + mTextureManager->resurrect(); } GFXD3D11VertexBuffer* GFXD3D11Device::findVBPool( const GFXVertexFormat *vertexFormat, U32 vertsNeeded ) @@ -1014,40 +1014,40 @@ GFXD3D11VertexBuffer* GFXD3D11Device::findVBPool( const GFXVertexFormat *vertexF GFXD3D11VertexBuffer * GFXD3D11Device::createVBPool( const GFXVertexFormat *vertexFormat, U32 vertSize ) { - PROFILE_SCOPE( GFXD3D11Device_createVBPool ); + PROFILE_SCOPE( GFXD3D11Device_createVBPool ); - // this is a bit funky, but it will avoid problems with (lack of) copy constructors - // with a push_back() situation - mVolatileVBList.increment(); - StrongRefPtr newBuff; - mVolatileVBList.last() = new GFXD3D11VertexBuffer(); - newBuff = mVolatileVBList.last(); + // this is a bit funky, but it will avoid problems with (lack of) copy constructors + // with a push_back() situation + mVolatileVBList.increment(); + StrongRefPtr newBuff; + mVolatileVBList.last() = new GFXD3D11VertexBuffer(); + newBuff = mVolatileVBList.last(); - newBuff->mNumVerts = 0; - newBuff->mBufferType = GFXBufferTypeVolatile; - newBuff->mVertexFormat.copy( *vertexFormat ); - newBuff->mVertexSize = vertSize; - newBuff->mDevice = this; + newBuff->mNumVerts = 0; + newBuff->mBufferType = GFXBufferTypeVolatile; + newBuff->mVertexFormat.copy( *vertexFormat ); + newBuff->mVertexSize = vertSize; + newBuff->mDevice = this; - // Requesting it will allocate it. - vertexFormat->getDecl(); + // Requesting it will allocate it. + vertexFormat->getDecl(); - D3D11_BUFFER_DESC desc; - desc.ByteWidth = vertSize * MAX_DYNAMIC_VERTS; - desc.Usage = D3D11_USAGE_DYNAMIC; - desc.BindFlags = D3D11_BIND_VERTEX_BUFFER; - desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; - desc.MiscFlags = 0; - desc.StructureByteStride = 0; + D3D11_BUFFER_DESC desc; + desc.ByteWidth = vertSize * MAX_DYNAMIC_VERTS; + desc.Usage = D3D11_USAGE_DYNAMIC; + desc.BindFlags = D3D11_BIND_VERTEX_BUFFER; + desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; + desc.MiscFlags = 0; + desc.StructureByteStride = 0; - HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &newBuff->vb); + HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &newBuff->vb); - if(FAILED(hr)) - { - AssertFatal(false, "Failed to allocate dynamic VB"); - } + if(FAILED(hr)) + { + AssertFatal(false, "Failed to allocate dynamic VB"); + } - return newBuff; + return newBuff; } //----------------------------------------------------------------------------- @@ -1103,30 +1103,30 @@ void GFXD3D11Device::setClipRect( const RectI &inRect ) void GFXD3D11Device::setVertexStream( U32 stream, GFXVertexBuffer *buffer ) { - GFXD3D11VertexBuffer *d3dBuffer = static_cast( buffer ); + GFXD3D11VertexBuffer *d3dBuffer = static_cast( buffer ); - if ( stream == 0 ) - { - // Set the volatile buffer which is used to - // offset the start index when doing draw calls. - if ( d3dBuffer && d3dBuffer->mVolatileStart > 0 ) - mVolatileVB = d3dBuffer; - else - mVolatileVB = NULL; - } + if ( stream == 0 ) + { + // Set the volatile buffer which is used to + // offset the start index when doing draw calls. + if ( d3dBuffer && d3dBuffer->mVolatileStart > 0 ) + mVolatileVB = d3dBuffer; + else + mVolatileVB = NULL; + } - // NOTE: We do not use the stream offset here for stream 0 - // as that feature is *supposedly* not as well supported as - // using the start index in drawPrimitive. - // - // If we can verify that this is not the case then we should - // start using this method exclusively for all streams. + // NOTE: We do not use the stream offset here for stream 0 + // as that feature is *supposedly* not as well supported as + // using the start index in drawPrimitive. + // + // If we can verify that this is not the case then we should + // start using this method exclusively for all streams. - U32 strides[1] = { d3dBuffer ? d3dBuffer->mVertexSize : 0 }; - U32 offset = d3dBuffer && stream != 0 ? d3dBuffer->mVolatileStart * d3dBuffer->mVertexSize : 0; - ID3D11Buffer* buff = d3dBuffer ? d3dBuffer->vb : NULL; + U32 strides[1] = { d3dBuffer ? d3dBuffer->mVertexSize : 0 }; + U32 offset = d3dBuffer && stream != 0 ? d3dBuffer->mVolatileStart * d3dBuffer->mVertexSize : 0; + ID3D11Buffer* buff = d3dBuffer ? d3dBuffer->vb : NULL; - getDeviceContext()->IASetVertexBuffers(stream, 1, &buff, strides, &offset); + getDeviceContext()->IASetVertexBuffers(stream, 1, &buff, strides, &offset); } void GFXD3D11Device::setVertexStreamFrequency( U32 stream, U32 frequency ) @@ -1179,7 +1179,7 @@ void GFXD3D11Device::drawPrimitive( GFXPrimitiveType primType, U32 vertexStart, setShaderConstBufferInternal(mCurrentShaderConstBuffer); if ( mVolatileVB ) - vertexStart += mVolatileVB->mVolatileStart; + vertexStart += mVolatileVB->mVolatileStart; mD3DDeviceContext->IASetPrimitiveTopology(GFXD3D11PrimType[primType]); @@ -1243,23 +1243,23 @@ void GFXD3D11Device::setShader(GFXShader *shader, bool force) { if(shader) { - GFXD3D11Shader *d3dShader = static_cast(shader); + GFXD3D11Shader *d3dShader = static_cast(shader); if (d3dShader->mPixShader != mLastPixShader || force) - { - mD3DDeviceContext->PSSetShader( d3dShader->mPixShader, NULL, 0); - mLastPixShader = d3dShader->mPixShader; - } + { + mD3DDeviceContext->PSSetShader( d3dShader->mPixShader, NULL, 0); + mLastPixShader = d3dShader->mPixShader; + } if (d3dShader->mVertShader != mLastVertShader || force) - { - mD3DDeviceContext->VSSetShader( d3dShader->mVertShader, NULL, 0); - mLastVertShader = d3dShader->mVertShader; - } + { + mD3DDeviceContext->VSSetShader( d3dShader->mVertShader, NULL, 0); + mLastVertShader = d3dShader->mVertShader; + } } else { - setupGenericShaders(); + setupGenericShaders(); } } @@ -1286,7 +1286,7 @@ GFXPrimitiveBuffer * GFXD3D11Device::allocPrimitiveBuffer(U32 numIndices, U32 nu case GFXBufferTypeDynamic: case GFXBufferTypeVolatile: - usage = D3D11_USAGE_DYNAMIC; + usage = D3D11_USAGE_DYNAMIC; break; } @@ -1304,24 +1304,24 @@ GFXPrimitiveBuffer * GFXD3D11Device::allocPrimitiveBuffer(U32 numIndices, U32 nu } else { - // Otherwise, get it as a seperate buffer... - D3D11_BUFFER_DESC desc; - desc.ByteWidth = sizeof(U16) * numIndices; - desc.Usage = usage; - if(bufferType == GFXBufferTypeDynamic) - desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; // We never allow reading from a primitive buffer. - else - desc.CPUAccessFlags = 0; - desc.BindFlags = D3D11_BIND_INDEX_BUFFER; - desc.MiscFlags = 0; - desc.StructureByteStride = 0; + // Otherwise, get it as a seperate buffer... + D3D11_BUFFER_DESC desc; + desc.ByteWidth = sizeof(U16) * numIndices; + desc.Usage = usage; + if(bufferType == GFXBufferTypeDynamic) + desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; // We never allow reading from a primitive buffer. + else + desc.CPUAccessFlags = 0; + desc.BindFlags = D3D11_BIND_INDEX_BUFFER; + desc.MiscFlags = 0; + desc.StructureByteStride = 0; - HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &res->ib); + HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &res->ib); - if(FAILED(hr)) - { - AssertFatal(false, "Failed to allocate an index buffer."); - } + if(FAILED(hr)) + { + AssertFatal(false, "Failed to allocate an index buffer."); + } } if (data) @@ -1365,7 +1365,7 @@ GFXVertexBuffer * GFXD3D11Device::allocVertexBuffer(U32 numVerts, const GFXVerte case GFXBufferTypeDynamic: case GFXBufferTypeVolatile: - usage = D3D11_USAGE_DYNAMIC; + usage = D3D11_USAGE_DYNAMIC; break; } @@ -1380,27 +1380,27 @@ GFXVertexBuffer * GFXD3D11Device::allocVertexBuffer(U32 numVerts, const GFXVerte } else { - // Requesting it will allocate it. - vertexFormat->getDecl(); //-ALEX disabled to postpone until after shader is actually set... + // Requesting it will allocate it. + vertexFormat->getDecl(); //-ALEX disabled to postpone until after shader is actually set... - // Get a new buffer... - D3D11_BUFFER_DESC desc; - desc.ByteWidth = vertSize * numVerts; - desc.Usage = usage; - desc.BindFlags = D3D11_BIND_VERTEX_BUFFER; - if(bufferType == GFXBufferTypeDynamic) - desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; // We never allow reading from a vertex buffer. - else - desc.CPUAccessFlags = 0; - desc.MiscFlags = 0; - desc.StructureByteStride = 0; + // Get a new buffer... + D3D11_BUFFER_DESC desc; + desc.ByteWidth = vertSize * numVerts; + desc.Usage = usage; + desc.BindFlags = D3D11_BIND_VERTEX_BUFFER; + if(bufferType == GFXBufferTypeDynamic) + desc.CPUAccessFlags = D3D11_CPU_ACCESS_WRITE; // We never allow reading from a vertex buffer. + else + desc.CPUAccessFlags = 0; + desc.MiscFlags = 0; + desc.StructureByteStride = 0; - HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &res->vb); + HRESULT hr = D3D11DEVICE->CreateBuffer(&desc, NULL, &res->vb); - if(FAILED(hr)) - { - AssertFatal(false, "Failed to allocate VB"); - } + if(FAILED(hr)) + { + AssertFatal(false, "Failed to allocate VB"); + } } res->mNumVerts = numVerts; @@ -1597,7 +1597,6 @@ GFXVertexDecl* GFXD3D11Device::allocVertexDecl( const GFXVertexFormat *vertexFor S32 elemIndex = 0; for (S32 i = 0; i < elemCount; i++, elemIndex++) - { const GFXVertexElement &element = vertexFormat->getElement(elemIndex); @@ -1690,9 +1689,9 @@ void GFXD3D11Device::setTextureInternal( U32 textureUnit, const GFXTextureObject { if( texture == NULL ) { - ID3D11ShaderResourceView *pView = NULL; - mD3DDeviceContext->PSSetShaderResources(textureUnit, 1, &pView); - return; + ID3D11ShaderResourceView *pView = NULL; + mD3DDeviceContext->PSSetShaderResources(textureUnit, 1, &pView); + return; } GFXD3D11TextureObject *tex = (GFXD3D11TextureObject*)(texture); @@ -1704,23 +1703,23 @@ GFXFence *GFXD3D11Device::createFence() // Figure out what fence type we should be making if we don't know if( mCreateFenceType == -1 ) { - D3D11_QUERY_DESC desc; - desc.MiscFlags = 0; - desc.Query = D3D11_QUERY_EVENT; + D3D11_QUERY_DESC desc; + desc.MiscFlags = 0; + desc.Query = D3D11_QUERY_EVENT; - ID3D11Query *testQuery = NULL; + ID3D11Query *testQuery = NULL; - HRESULT hRes = mD3DDevice->CreateQuery(&desc, &testQuery); + HRESULT hRes = mD3DDevice->CreateQuery(&desc, &testQuery); - if(FAILED(hRes)) - { - mCreateFenceType = true; - } + if(FAILED(hRes)) + { + mCreateFenceType = true; + } - else - { - mCreateFenceType = false; - } + else + { + mCreateFenceType = false; + } SAFE_RELEASE(testQuery); } diff --git a/Engine/source/gfx/D3D11/gfxD3D11Target.cpp b/Engine/source/gfx/D3D11/gfxD3D11Target.cpp index 9c21fa4d3..2260ff841 100644 --- a/Engine/source/gfx/D3D11/gfxD3D11Target.cpp +++ b/Engine/source/gfx/D3D11/gfxD3D11Target.cpp @@ -97,9 +97,9 @@ void GFXD3D11TextureTarget::attachTexture( RenderSlot slot, GFXTextureObject *te if( tex == GFXTextureTarget::sDefaultDepthStencil ) { mTargets[slot] = D3D11->mDeviceDepthStencil; - mTargetViews[slot] = D3D11->mDeviceDepthStencilView; - mTargets[slot]->AddRef(); - mTargetViews[slot]->AddRef(); + mTargetViews[slot] = D3D11->mDeviceDepthStencilView; + mTargets[slot]->AddRef(); + mTargetViews[slot]->AddRef(); } else { @@ -110,14 +110,14 @@ void GFXD3D11TextureTarget::attachTexture( RenderSlot slot, GFXTextureObject *te // Grab the surface level. if( slot == DepthStencil ) - { + { mTargets[slot] = d3dto->getSurface(); if ( mTargets[slot] ) mTargets[slot]->AddRef(); - mTargetViews[slot] = d3dto->getDSView(); - if( mTargetViews[slot]) - mTargetViews[slot]->AddRef(); + mTargetViews[slot] = d3dto->getDSView(); + if( mTargetViews[slot]) + mTargetViews[slot]->AddRef(); } else @@ -126,12 +126,12 @@ void GFXD3D11TextureTarget::attachTexture( RenderSlot slot, GFXTextureObject *te // if the surface that it needs to render to is different than the mip level // in the actual texture. This will happen with MSAA. if( d3dto->getSurface() == NULL ) - { + { - mTargets[slot] = d3dto->get2DTex(); - mTargets[slot]->AddRef(); - mTargetViews[slot] = d3dto->getRTView(); - mTargetViews[slot]->AddRef(); + mTargets[slot] = d3dto->get2DTex(); + mTargets[slot]->AddRef(); + mTargetViews[slot] = d3dto->getRTView(); + mTargetViews[slot]->AddRef(); } else { @@ -164,11 +164,11 @@ void GFXD3D11TextureTarget::attachTexture( RenderSlot slot, GFXTextureObject *te S32 format = sd.Format; - if (format == DXGI_FORMAT_R8G8B8A8_TYPELESS || format == DXGI_FORMAT_B8G8R8A8_TYPELESS) - { - mTargetFormat = GFXFormatR8G8B8A8; - return; - } + if (format == DXGI_FORMAT_R8G8B8A8_TYPELESS || format == DXGI_FORMAT_B8G8R8A8_TYPELESS) + { + mTargetFormat = GFXFormatR8G8B8A8; + return; + } GFXREVERSE_LOOKUP( GFXD3D11TextureFormat, GFXFormat, format ); mTargetFormat = (GFXFormat)format; @@ -283,7 +283,7 @@ void GFXD3D11TextureTarget::resolve() if (mResolveTargets[i]) { D3D11_TEXTURE2D_DESC desc; - mTargets[i]->GetDesc(&desc); + mTargets[i]->GetDesc(&desc); D3D11DEVICECONTEXT->CopySubresourceRegion(mResolveTargets[i]->get2DTex(), 0, 0, 0, 0, mTargets[i], 0, NULL); } } @@ -407,10 +407,10 @@ void GFXD3D11WindowTarget::activate() void GFXD3D11WindowTarget::resolveTo(GFXTextureObject *tex) { - GFXDEBUGEVENT_SCOPE(GFXPCD3D11WindowTarget_resolveTo, ColorI::RED); + GFXDEBUGEVENT_SCOPE(GFXPCD3D11WindowTarget_resolveTo, ColorI::RED); - D3D11_TEXTURE2D_DESC desc; - ID3D11Texture2D* surf = ((GFXD3D11TextureObject*)(tex))->get2DTex(); - surf->GetDesc(&desc); - D3D11DEVICECONTEXT->ResolveSubresource(surf, 0, D3D11->mDeviceBackbuffer, 0, desc.Format); + D3D11_TEXTURE2D_DESC desc; + ID3D11Texture2D* surf = ((GFXD3D11TextureObject*)(tex))->get2DTex(); + surf->GetDesc(&desc); + D3D11DEVICECONTEXT->ResolveSubresource(surf, 0, D3D11->mDeviceBackbuffer, 0, desc.Format); } \ No newline at end of file diff --git a/Engine/source/gfx/gfxAdapter.h b/Engine/source/gfx/gfxAdapter.h index 221cc4ef3..a7988e910 100644 --- a/Engine/source/gfx/gfxAdapter.h +++ b/Engine/source/gfx/gfxAdapter.h @@ -37,8 +37,8 @@ struct GFXAdapterLUID { - unsigned long LowPart; - long HighPart; + unsigned long LowPart; + long HighPart; }; struct GFXAdapter diff --git a/Engine/source/gfx/gfxDevice.cpp b/Engine/source/gfx/gfxDevice.cpp index 5dcb0bb40..0ee217854 100644 --- a/Engine/source/gfx/gfxDevice.cpp +++ b/Engine/source/gfx/gfxDevice.cpp @@ -160,8 +160,8 @@ GFXDevice::GFXDevice() // misc mAllowRender = true; mCurrentRenderStyle = RS_Standard; - mCurrentStereoTarget = -1; - mStereoHeadTransform = MatrixF(1); + mCurrentStereoTarget = -1; + mStereoHeadTransform = MatrixF(1); mCanCurrentlyRender = false; mInitialized = false; diff --git a/Engine/source/gfx/gfxDevice.h b/Engine/source/gfx/gfxDevice.h index 5aec5ad8e..ef3bbce13 100644 --- a/Engine/source/gfx/gfxDevice.h +++ b/Engine/source/gfx/gfxDevice.h @@ -219,11 +219,11 @@ public: /// The device has started rendering a frame's field (such as for side-by-side rendering) deStartOfField, - /// left stereo frame has been rendered - deLeftStereoFrameRendered, + /// left stereo frame has been rendered + deLeftStereoFrameRendered, - /// right stereo frame has been rendered - deRightStereoFrameRendered, + /// right stereo frame has been rendered + deRightStereoFrameRendered, /// The device is about to finish rendering a frame's field deEndOfField, @@ -254,7 +254,7 @@ public: { RS_Standard = 0, RS_StereoSideBySide = (1<<0), // Render into current Render Target side-by-side - RS_StereoSeparate = (1<<1) // Render in two separate passes (then combined by vr compositor) + RS_StereoSeparate = (1<<1) // Render in two separate passes (then combined by vr compositor) }; enum GFXDeviceLimits @@ -409,7 +409,7 @@ public: setViewport(mStereoViewports[eyeId]); } - mCurrentStereoTarget = eyeId; + mCurrentStereoTarget = eyeId; } GFXCardProfiler* getCardProfiler() const { return mCardProfiler; } @@ -481,7 +481,7 @@ public: /// Returns the first format from the list which meets all /// the criteria of the texture profile and query options. virtual GFXFormat selectSupportedFormat(GFXTextureProfile *profile, - const Vector &formats, bool texture, bool mustblend, bool mustfilter) = 0; + const Vector &formats, bool texture, bool mustblend, bool mustfilter) = 0; /// @} diff --git a/Engine/source/gfx/gfxDrawUtil.cpp b/Engine/source/gfx/gfxDrawUtil.cpp index d68b05e55..3dfe28a3e 100644 --- a/Engine/source/gfx/gfxDrawUtil.cpp +++ b/Engine/source/gfx/gfxDrawUtil.cpp @@ -61,7 +61,7 @@ void GFXDrawUtil::_setupStateBlocks() bitmapStretchSR.setZReadWrite(false); bitmapStretchSR.setBlend(true, GFXBlendSrcAlpha, GFXBlendInvSrcAlpha); bitmapStretchSR.samplersDefined = true; - bitmapStretchSR.setColorWrites(true, true, true, false); + bitmapStretchSR.setColorWrites(true, true, true, false); // NOTE: comment this out if alpha write is needed // Linear: Create wrap SB bitmapStretchSR.samplers[0] = GFXSamplerStateDesc::getWrapLinear(); diff --git a/Engine/source/gfx/gfxFontRenderBatcher.cpp b/Engine/source/gfx/gfxFontRenderBatcher.cpp index 84551506b..fd4cb11cb 100644 --- a/Engine/source/gfx/gfxFontRenderBatcher.cpp +++ b/Engine/source/gfx/gfxFontRenderBatcher.cpp @@ -51,7 +51,7 @@ FontRenderBatcher::FontRenderBatcher() : mStorage(8096) // so it may have to change. -bramage f.samplers[0].textureColorOp = GFXTOPAdd; - f.setColorWrites(true, true, true, false); + f.setColorWrites(true, true, true, false); // NOTE: comment this out if alpha write is needed mFontSB = GFX->createStateBlock(f); } } diff --git a/Engine/source/gfx/gfxInit.cpp b/Engine/source/gfx/gfxInit.cpp index 9d0cf36ac..be4389f73 100644 --- a/Engine/source/gfx/gfxInit.cpp +++ b/Engine/source/gfx/gfxInit.cpp @@ -200,18 +200,18 @@ GFXAdapter* GFXInit::getAdapterOfType( GFXAdapterType type, const char* outputDe GFXAdapter* GFXInit::getAdapterOfType(GFXAdapterType type, S32 outputDeviceIndex) { - for (U32 i = 0; i < smAdapters.size(); i++) - { - if (smAdapters[i]->mType == type) - { - if (smAdapters[i]->mIndex == outputDeviceIndex) - { - return smAdapters[i]; - } - } - } + for (U32 i = 0; i < smAdapters.size(); i++) + { + if (smAdapters[i]->mType == type) + { + if (smAdapters[i]->mIndex == outputDeviceIndex) + { + return smAdapters[i]; + } + } + } - return NULL; + return NULL; } GFXAdapter* GFXInit::chooseAdapter( GFXAdapterType type, const char* outputDevice) @@ -237,23 +237,23 @@ GFXAdapter* GFXInit::chooseAdapter( GFXAdapterType type, const char* outputDevic GFXAdapter* GFXInit::chooseAdapter(GFXAdapterType type, S32 outputDeviceIndex) { - GFXAdapter* adapter = GFXInit::getAdapterOfType(type, outputDeviceIndex); + GFXAdapter* adapter = GFXInit::getAdapterOfType(type, outputDeviceIndex); - if (!adapter && type != OpenGL) - { - Con::errorf("The requested renderer, %s, doesn't seem to be available." - " Trying the default, OpenGL.", getAdapterNameFromType(type)); - adapter = GFXInit::getAdapterOfType(OpenGL, outputDeviceIndex); - } + if (!adapter && type != OpenGL) + { + Con::errorf("The requested renderer, %s, doesn't seem to be available." + " Trying the default, OpenGL.", getAdapterNameFromType(type)); + adapter = GFXInit::getAdapterOfType(OpenGL, outputDeviceIndex); + } - if (!adapter) - { - Con::errorf("The OpenGL renderer doesn't seem to be available. Trying the GFXNulDevice."); - adapter = GFXInit::getAdapterOfType(NullDevice, 0); - } + if (!adapter) + { + Con::errorf("The OpenGL renderer doesn't seem to be available. Trying the GFXNulDevice."); + adapter = GFXInit::getAdapterOfType(NullDevice, 0); + } - AssertFatal(adapter, "There is no rendering device available whatsoever."); - return adapter; + AssertFatal(adapter, "There is no rendering device available whatsoever."); + return adapter; } const char* GFXInit::getAdapterNameFromType(GFXAdapterType type) @@ -304,11 +304,11 @@ GFXAdapter *GFXInit::getBestAdapterChoice() } else { - S32 adapterIdx = dAtoi(adapterDevice.c_str()); - if (adapterIdx == -1) - adapter = chooseAdapter(adapterType, outputDevice.c_str()); - else - adapter = chooseAdapter(adapterType, adapterIdx); + S32 adapterIdx = dAtoi(adapterDevice.c_str()); + if (adapterIdx == -1) + adapter = chooseAdapter(adapterType, outputDevice.c_str()); + else + adapter = chooseAdapter(adapterType, adapterIdx); } // Did they have one? Return it. diff --git a/Engine/source/gfx/gfxInit.h b/Engine/source/gfx/gfxInit.h index 73cdbba02..4152d9ce9 100644 --- a/Engine/source/gfx/gfxInit.h +++ b/Engine/source/gfx/gfxInit.h @@ -74,16 +74,16 @@ public: /// This method never returns NULL. static GFXAdapter *chooseAdapter( GFXAdapterType type, const char* outputDevice); - /// Override which chooses an adapter based on an index instead - static GFXAdapter *chooseAdapter( GFXAdapterType type, S32 outputDeviceIndex ); + /// Override which chooses an adapter based on an index instead + static GFXAdapter *chooseAdapter( GFXAdapterType type, S32 outputDeviceIndex ); /// Gets the first adapter of the requested type (and on the requested output device) /// from the list of enumerated adapters. Should only call this after a call to /// enumerateAdapters. static GFXAdapter *getAdapterOfType( GFXAdapterType type, const char* outputDevice ); - /// Override which gets an adapter based on an index instead - static GFXAdapter *getAdapterOfType( GFXAdapterType type, S32 outputDeviceIndex ); + /// Override which gets an adapter based on an index instead + static GFXAdapter *getAdapterOfType( GFXAdapterType type, S32 outputDeviceIndex ); /// Converts a GFXAdapterType to a string name. Useful for writing out prefs static const char *getAdapterNameFromType( GFXAdapterType type ); diff --git a/Engine/source/gfx/gfxTextureProfile.h b/Engine/source/gfx/gfxTextureProfile.h index d4840cd26..270a41947 100644 --- a/Engine/source/gfx/gfxTextureProfile.h +++ b/Engine/source/gfx/gfxTextureProfile.h @@ -102,8 +102,8 @@ public: /// This is mainly a depth buffer optimization. NoDiscard = BIT(10), - /// Texture is managed by another process, thus should not be modified - NoModify = BIT(11) + /// Texture is managed by another process, thus should not be modified + NoModify = BIT(11) }; @@ -167,7 +167,7 @@ public: inline bool noMip() const { return testFlag(NoMipmap); } inline bool isPooled() const { return testFlag(Pooled); } inline bool canDiscard() const { return !testFlag(NoDiscard); } - inline bool canModify() const { return !testFlag(NoModify); } + inline bool canModify() const { return !testFlag(NoModify); } private: /// These constants control the packing for the profile; if you add flags, types, or diff --git a/Engine/source/gfx/sim/debugDraw.cpp b/Engine/source/gfx/sim/debugDraw.cpp index 8a591fc26..cfa6bf47e 100644 --- a/Engine/source/gfx/sim/debugDraw.cpp +++ b/Engine/source/gfx/sim/debugDraw.cpp @@ -141,73 +141,73 @@ void DebugDrawer::setupStateBlocks() void DebugDrawer::drawBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color) { - Point3F point0(a.x, a.y, a.z); - Point3F point1(a.x, b.y, a.z); - Point3F point2(b.x, b.y, a.z); - Point3F point3(b.x, a.y, a.z); + Point3F point0(a.x, a.y, a.z); + Point3F point1(a.x, b.y, a.z); + Point3F point2(b.x, b.y, a.z); + Point3F point3(b.x, a.y, a.z); - Point3F point4(a.x, a.y, b.z); - Point3F point5(a.x, b.y, b.z); - Point3F point6(b.x, b.y, b.z); - Point3F point7(b.x, a.y, b.z); + Point3F point4(a.x, a.y, b.z); + Point3F point5(a.x, b.y, b.z); + Point3F point6(b.x, b.y, b.z); + Point3F point7(b.x, a.y, b.z); - // Draw one plane - drawLine(point0, point1, color); - drawLine(point1, point2, color); - drawLine(point2, point3, color); - drawLine(point3, point0, color); + // Draw one plane + drawLine(point0, point1, color); + drawLine(point1, point2, color); + drawLine(point2, point3, color); + drawLine(point3, point0, color); - // Draw the other plane - drawLine(point4, point5, color); - drawLine(point5, point6, color); - drawLine(point6, point7, color); - drawLine(point7, point4, color); + // Draw the other plane + drawLine(point4, point5, color); + drawLine(point5, point6, color); + drawLine(point6, point7, color); + drawLine(point7, point4, color); - // Draw the connecting corners - drawLine(point0, point4, color); - drawLine(point1, point5, color); - drawLine(point2, point6, color); - drawLine(point3, point7, color); + // Draw the connecting corners + drawLine(point0, point4, color); + drawLine(point1, point5, color); + drawLine(point2, point6, color); + drawLine(point3, point7, color); } void DebugDrawer::drawTransformedBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color, const MatrixF& transform) { - Point3F point0(a.x, a.y, a.z); - Point3F point1(a.x, b.y, a.z); - Point3F point2(b.x, b.y, a.z); - Point3F point3(b.x, a.y, a.z); + Point3F point0(a.x, a.y, a.z); + Point3F point1(a.x, b.y, a.z); + Point3F point2(b.x, b.y, a.z); + Point3F point3(b.x, a.y, a.z); - Point3F point4(a.x, a.y, b.z); - Point3F point5(a.x, b.y, b.z); - Point3F point6(b.x, b.y, b.z); - Point3F point7(b.x, a.y, b.z); + Point3F point4(a.x, a.y, b.z); + Point3F point5(a.x, b.y, b.z); + Point3F point6(b.x, b.y, b.z); + Point3F point7(b.x, a.y, b.z); - transform.mulP(point0); - transform.mulP(point1); - transform.mulP(point2); - transform.mulP(point3); - transform.mulP(point4); - transform.mulP(point5); - transform.mulP(point6); - transform.mulP(point7); + transform.mulP(point0); + transform.mulP(point1); + transform.mulP(point2); + transform.mulP(point3); + transform.mulP(point4); + transform.mulP(point5); + transform.mulP(point6); + transform.mulP(point7); - // Draw one plane - drawLine(point0, point1, color); - drawLine(point1, point2, color); - drawLine(point2, point3, color); - drawLine(point3, point0, color); + // Draw one plane + drawLine(point0, point1, color); + drawLine(point1, point2, color); + drawLine(point2, point3, color); + drawLine(point3, point0, color); - // Draw the other plane - drawLine(point4, point5, color); - drawLine(point5, point6, color); - drawLine(point6, point7, color); - drawLine(point7, point4, color); + // Draw the other plane + drawLine(point4, point5, color); + drawLine(point5, point6, color); + drawLine(point6, point7, color); + drawLine(point7, point4, color); - // Draw the connecting corners - drawLine(point0, point4, color); - drawLine(point1, point5, color); - drawLine(point2, point6, color); - drawLine(point3, point7, color); + // Draw the connecting corners + drawLine(point0, point4, color); + drawLine(point1, point5, color); + drawLine(point2, point6, color); + drawLine(point3, point7, color); } void DebugDrawer::render(bool clear) diff --git a/Engine/source/gfx/sim/debugDraw.h b/Engine/source/gfx/sim/debugDraw.h index ddaba1164..8c0118c10 100644 --- a/Engine/source/gfx/sim/debugDraw.h +++ b/Engine/source/gfx/sim/debugDraw.h @@ -126,7 +126,7 @@ public: void drawTransformedBoxOutline(const Point3F &a, const Point3F &b, const ColorF &color, const MatrixF& transform); void drawBox(const Point3F &a, const Point3F &b, const ColorF &color = ColorF(1.0f,1.0f,1.0f)); - void drawLine(const Point3F &a, const Point3F &b, const ColorF &color = ColorF(1.0f,1.0f,1.0f)); + void drawLine(const Point3F &a, const Point3F &b, const ColorF &color = ColorF(1.0f,1.0f,1.0f)); void drawTri(const Point3F &a, const Point3F &b, const Point3F &c, const ColorF &color = ColorF(1.0f,1.0f,1.0f)); void drawText(const Point3F& pos, const String& text, const ColorF &color = ColorF(1.0f,1.0f,1.0f)); void drawCapsule(const Point3F &a, const F32 &radius, const F32 &height, const ColorF &color = ColorF(1.0f, 1.0f, 1.0f)); @@ -181,7 +181,7 @@ private: DirectionLine, OutlinedText, Capsule, - } type; ///< Type of the primitive. The meanings of a,b,c are determined by this. + } type; ///< Type of the primitive. The meanings of a,b,c are determined by this. SimTime dieTime; ///< Time at which we should remove this from the list. bool useZ; ///< If true, do z-checks for this primitive. diff --git a/Engine/source/gui/3d/guiTSControl.cpp b/Engine/source/gui/3d/guiTSControl.cpp index 36ae70338..e66ace994 100644 --- a/Engine/source/gui/3d/guiTSControl.cpp +++ b/Engine/source/gui/3d/guiTSControl.cpp @@ -64,9 +64,9 @@ Vector GuiTSCtrl::smAwakeTSCtrls; ImplementEnumType( GuiTSRenderStyles, "Style of rendering for a GuiTSCtrl.\n\n" "@ingroup Gui3D" ) - { GuiTSCtrl::RenderStyleStandard, "standard" }, - { GuiTSCtrl::RenderStyleStereoSideBySide, "stereo side by side" }, - { GuiTSCtrl::RenderStyleStereoSeparate, "stereo separate" }, + { GuiTSCtrl::RenderStyleStandard, "standard" }, + { GuiTSCtrl::RenderStyleStereoSideBySide, "stereo side by side" }, + { GuiTSCtrl::RenderStyleStereoSeparate, "stereo separate" }, EndImplementEnumType; //----------------------------------------------------------------------------- @@ -199,9 +199,9 @@ void GuiTSCtrl::initPersistFields() void GuiTSCtrl::consoleInit() { Con::addVariable("$TSControl::frameCount", TypeS32, &smFrameCount, "The number of frames that have been rendered since this control was created.\n" - "@ingroup Rendering\n"); + "@ingroup Rendering\n"); Con::addVariable("$TSControl::useLatestDisplayTransform", TypeBool, &smUseLatestDisplayTransform, "Use the latest view transform when rendering stereo instead of the one calculated by the last move.\n" - "@ingroup Rendering\n"); + "@ingroup Rendering\n"); } //----------------------------------------------------------------------------- @@ -371,15 +371,15 @@ void GuiTSCtrl::_internalRender(RectI guiViewport, RectI renderViewport, Frustum if (mReflectPriority > 0) { - // Get the total reflection priority. - F32 totalPriority = 0; - for (U32 i = 0; i < smAwakeTSCtrls.size(); i++) - if (smAwakeTSCtrls[i]->isVisible()) - totalPriority += smAwakeTSCtrls[i]->mReflectPriority; + // Get the total reflection priority. + F32 totalPriority = 0; + for (U32 i = 0; i < smAwakeTSCtrls.size(); i++) + if (smAwakeTSCtrls[i]->isVisible()) + totalPriority += smAwakeTSCtrls[i]->mReflectPriority; - REFLECTMGR->update(mReflectPriority / totalPriority, - renderSize, - mLastCameraQuery); + REFLECTMGR->update(mReflectPriority / totalPriority, + renderSize, + mLastCameraQuery); } GFX->setActiveRenderTarget(origTarget); @@ -431,22 +431,22 @@ void GuiTSCtrl::_internalRender(RectI guiViewport, RectI renderViewport, Frustum DebugDrawer* debugDraw = DebugDrawer::get(); if (mRenderStyle == RenderStyleStereoSideBySide && debugDraw->willDraw()) { - // For SBS we need to render over each viewport - Frustum frustum; + // For SBS we need to render over each viewport + Frustum frustum; - GFX->setViewport(mLastCameraQuery.stereoViewports[0]); - MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]); - GFX->setFrustum(frustum); - debugDraw->render(false); + GFX->setViewport(mLastCameraQuery.stereoViewports[0]); + MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]); + GFX->setFrustum(frustum); + debugDraw->render(false); - GFX->setViewport(mLastCameraQuery.stereoViewports[1]); - MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[1]); - GFX->setFrustum(frustum); - debugDraw->render(); + GFX->setViewport(mLastCameraQuery.stereoViewports[1]); + MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[1]); + GFX->setFrustum(frustum); + debugDraw->render(); } else { - debugDraw->render(); + debugDraw->render(); } saver.restore(); @@ -637,23 +637,23 @@ void GuiTSCtrl::onRender(Point2I offset, const RectI &updateRect) MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[0]); mLastCameraQuery.cameraMatrix = myTransforms[0]; frustum.update(); - GFX->activateStereoTarget(0); - mLastCameraQuery.currentEye = 0; - GFX->beginField(); - _internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), frustum); + GFX->activateStereoTarget(0); + mLastCameraQuery.currentEye = 0; + GFX->beginField(); + _internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), frustum); GFX->getDeviceEventSignal().trigger(GFXDevice::deLeftStereoFrameRendered); - GFX->endField(); + GFX->endField(); // Right - GFX->activateStereoTarget(1); - mLastCameraQuery.currentEye = 1; + GFX->activateStereoTarget(1); + mLastCameraQuery.currentEye = 1; MathUtils::makeFovPortFrustum(&frustum, mLastCameraQuery.ortho, mLastCameraQuery.nearPlane, mLastCameraQuery.farPlane, mLastCameraQuery.fovPort[1]); mLastCameraQuery.cameraMatrix = myTransforms[1]; - frustum.update(); - GFX->beginField(); - _internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[1]->getSize()), RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), frustum); - GFX->getDeviceEventSignal().trigger(GFXDevice::deRightStereoFrameRendered); - GFX->endField(); + frustum.update(); + GFX->beginField(); + _internalRender(RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[1]->getSize()), RectI(Point2I(0, 0), mLastCameraQuery.stereoTargets[0]->getSize()), frustum); + GFX->getDeviceEventSignal().trigger(GFXDevice::deRightStereoFrameRendered); + GFX->endField(); mLastCameraQuery.cameraMatrix = origMatrix; diff --git a/Engine/source/platform/input/oculusVR/oculusVRDevice.h b/Engine/source/platform/input/oculusVR/oculusVRDevice.h index 603737391..10223b9fc 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRDevice.h +++ b/Engine/source/platform/input/oculusVR/oculusVRDevice.h @@ -83,8 +83,8 @@ protected: /// Which HMD is the active one U32 mActiveDeviceId; - /// Device id we need to use to hook up with oculus - ovrGraphicsLuid mLuid; + /// Device id we need to use to hook up with oculus + ovrGraphicsLuid mLuid; protected: void cleanUp(); diff --git a/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp b/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp index ceccfe4c1..473749320 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp +++ b/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.cpp @@ -46,10 +46,10 @@ struct OculusTexture { - virtual void AdvanceToNextTexture() = 0; + virtual void AdvanceToNextTexture() = 0; - virtual ~OculusTexture() { - } + virtual ~OculusTexture() { + } }; //------------------------------------------------------------ @@ -57,105 +57,105 @@ struct OculusTexture // needed for D3D11 rendering. struct D3D11OculusTexture : public OculusTexture { - ovrHmd hmd; - ovrSwapTextureSet * TextureSet; - static const int TextureCount = 2; - GFXTexHandle TexRtv[TextureCount]; - GFXDevice *Owner; + ovrHmd hmd; + ovrSwapTextureSet * TextureSet; + static const int TextureCount = 2; + GFXTexHandle TexRtv[TextureCount]; + GFXDevice *Owner; - D3D11OculusTexture(GFXDevice* owner) : - hmd(nullptr), - TextureSet(nullptr), - Owner(owner) - { - TexRtv[0] = TexRtv[1] = nullptr; - } + D3D11OculusTexture(GFXDevice* owner) : + hmd(nullptr), + TextureSet(nullptr), + Owner(owner) + { + TexRtv[0] = TexRtv[1] = nullptr; + } - bool Init(ovrHmd _hmd, int sizeW, int sizeH) - { - hmd = _hmd; + bool Init(ovrHmd _hmd, int sizeW, int sizeH) + { + hmd = _hmd; - D3D11_TEXTURE2D_DESC dsDesc; - dsDesc.Width = sizeW; - dsDesc.Height = sizeH; - dsDesc.MipLevels = 1; - dsDesc.ArraySize = 1; - dsDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;// DXGI_FORMAT_R8G8B8A8_UNORM_SRGB; - dsDesc.SampleDesc.Count = 1; // No multi-sampling allowed - dsDesc.SampleDesc.Quality = 0; - dsDesc.Usage = D3D11_USAGE_DEFAULT; - dsDesc.CPUAccessFlags = 0; - dsDesc.MiscFlags = 0; - dsDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET; + D3D11_TEXTURE2D_DESC dsDesc; + dsDesc.Width = sizeW; + dsDesc.Height = sizeH; + dsDesc.MipLevels = 1; + dsDesc.ArraySize = 1; + dsDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;// DXGI_FORMAT_R8G8B8A8_UNORM_SRGB; + dsDesc.SampleDesc.Count = 1; // No multi-sampling allowed + dsDesc.SampleDesc.Quality = 0; + dsDesc.Usage = D3D11_USAGE_DEFAULT; + dsDesc.CPUAccessFlags = 0; + dsDesc.MiscFlags = 0; + dsDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE | D3D11_BIND_RENDER_TARGET; - GFXD3D11Device* device = static_cast(GFX); - ovrResult result = ovr_CreateSwapTextureSetD3D11(hmd, device->mD3DDevice, &dsDesc, ovrSwapTextureSetD3D11_Typeless, &TextureSet); - if (!OVR_SUCCESS(result)) - return false; + GFXD3D11Device* device = static_cast(GFX); + ovrResult result = ovr_CreateSwapTextureSetD3D11(hmd, device->mD3DDevice, &dsDesc, ovrSwapTextureSetD3D11_Typeless, &TextureSet); + if (!OVR_SUCCESS(result)) + return false; - AssertFatal(TextureSet->TextureCount == TextureCount, "TextureCount mismatch."); + AssertFatal(TextureSet->TextureCount == TextureCount, "TextureCount mismatch."); - for (int i = 0; i < TextureCount; ++i) - { - ovrD3D11Texture* tex = (ovrD3D11Texture*)&TextureSet->Textures[i]; - D3D11_RENDER_TARGET_VIEW_DESC rtvd = {}; - rtvd.Format = DXGI_FORMAT_B8G8R8A8_UNORM;// DXGI_FORMAT_R8G8B8A8_UNORM; - rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D; + for (int i = 0; i < TextureCount; ++i) + { + ovrD3D11Texture* tex = (ovrD3D11Texture*)&TextureSet->Textures[i]; + D3D11_RENDER_TARGET_VIEW_DESC rtvd = {}; + rtvd.Format = DXGI_FORMAT_B8G8R8A8_UNORM;// DXGI_FORMAT_R8G8B8A8_UNORM; + rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D; - GFXD3D11TextureObject* object = new GFXD3D11TextureObject(GFX, &VRTextureProfile); - object->registerResourceWithDevice(GFX); - *(object->getSRViewPtr()) = tex->D3D11.pSRView; - *(object->get2DTexPtr()) = tex->D3D11.pTexture; - device->mD3DDevice->CreateRenderTargetView(tex->D3D11.pTexture, &rtvd, object->getRTViewPtr()); + GFXD3D11TextureObject* object = new GFXD3D11TextureObject(GFX, &VRTextureProfile); + object->registerResourceWithDevice(GFX); + *(object->getSRViewPtr()) = tex->D3D11.pSRView; + *(object->get2DTexPtr()) = tex->D3D11.pTexture; + device->mD3DDevice->CreateRenderTargetView(tex->D3D11.pTexture, &rtvd, object->getRTViewPtr()); - // Add refs for texture release later on - if (object->getSRView()) object->getSRView()->AddRef(); - //object->getRTView()->AddRef(); - if (object->get2DTex()) object->get2DTex()->AddRef(); - object->isManaged = true; + // Add refs for texture release later on + if (object->getSRView()) object->getSRView()->AddRef(); + //object->getRTView()->AddRef(); + if (object->get2DTex()) object->get2DTex()->AddRef(); + object->isManaged = true; - // Get the actual size of the texture... - D3D11_TEXTURE2D_DESC probeDesc; - ZeroMemory(&probeDesc, sizeof(D3D11_TEXTURE2D_DESC)); - object->get2DTex()->GetDesc(&probeDesc); + // Get the actual size of the texture... + D3D11_TEXTURE2D_DESC probeDesc; + ZeroMemory(&probeDesc, sizeof(D3D11_TEXTURE2D_DESC)); + object->get2DTex()->GetDesc(&probeDesc); - object->mTextureSize.set(probeDesc.Width, probeDesc.Height, 0); - object->mBitmapSize = object->mTextureSize; - int fmt = probeDesc.Format; + object->mTextureSize.set(probeDesc.Width, probeDesc.Height, 0); + object->mBitmapSize = object->mTextureSize; + int fmt = probeDesc.Format; - if (fmt == DXGI_FORMAT_R8G8B8A8_TYPELESS || fmt == DXGI_FORMAT_B8G8R8A8_TYPELESS) - { - object->mFormat = GFXFormatR8G8B8A8; // usual case - } - else - { - // TODO: improve this. this can be very bad. - GFXREVERSE_LOOKUP(GFXD3D11TextureFormat, GFXFormat, fmt); - object->mFormat = (GFXFormat)fmt; - } - TexRtv[i] = object; - } + if (fmt == DXGI_FORMAT_R8G8B8A8_TYPELESS || fmt == DXGI_FORMAT_B8G8R8A8_TYPELESS) + { + object->mFormat = GFXFormatR8G8B8A8; // usual case + } + else + { + // TODO: improve this. this can be very bad. + GFXREVERSE_LOOKUP(GFXD3D11TextureFormat, GFXFormat, fmt); + object->mFormat = (GFXFormat)fmt; + } + TexRtv[i] = object; + } - return true; - } + return true; + } - ~D3D11OculusTexture() - { - for (int i = 0; i < TextureCount; ++i) - { - SAFE_DELETE(TexRtv[i]); - } - if (TextureSet) - { - ovr_DestroySwapTextureSet(hmd, TextureSet); - } - } + ~D3D11OculusTexture() + { + for (int i = 0; i < TextureCount; ++i) + { + SAFE_DELETE(TexRtv[i]); + } + if (TextureSet) + { + ovr_DestroySwapTextureSet(hmd, TextureSet); + } + } - void AdvanceToNextTexture() - { - TextureSet->CurrentIndex = (TextureSet->CurrentIndex + 1) % TextureSet->TextureCount; - } + void AdvanceToNextTexture() + { + TextureSet->CurrentIndex = (TextureSet->CurrentIndex + 1) % TextureSet->TextureCount; + } }; @@ -176,7 +176,7 @@ OculusVRHMDDevice::OculusVRHMDDevice() mConnection = NULL; mSensor = NULL; mActionCodeIndex = 0; - mTextureSwapSet = NULL; + mTextureSwapSet = NULL; } OculusVRHMDDevice::~OculusVRHMDDevice() @@ -212,35 +212,35 @@ void OculusVRHMDDevice::set(ovrHmd hmd, ovrGraphicsLuid luid, U32 actionCodeInde mDevice = hmd; - ovrHmdDesc desc = ovr_GetHmdDesc(hmd); - int caps = ovr_GetTrackingCaps(hmd); + ovrHmdDesc desc = ovr_GetHmdDesc(hmd); + int caps = ovr_GetTrackingCaps(hmd); mSupportedCaps = desc.AvailableHmdCaps; - mCurrentCaps = mSupportedCaps; - - mTimewarp = true; + mCurrentCaps = mSupportedCaps; + + mTimewarp = true; // DeviceInfo mProductName = desc.ProductName; mManufacturer = desc.Manufacturer; mVersion = desc.FirmwareMajor; - // - Vector adapterList; - GFXD3D11Device::enumerateAdapters(adapterList); + // + Vector adapterList; + GFXD3D11Device::enumerateAdapters(adapterList); - dMemcpy(&mLuid, &luid, sizeof(mLuid)); - mDisplayId = -1; + dMemcpy(&mLuid, &luid, sizeof(mLuid)); + mDisplayId = -1; - for (U32 i = 0, sz = adapterList.size(); i < sz; i++) - { - GFXAdapter* adapter = adapterList[i]; - if (dMemcmp(&adapter->mLUID, &mLuid, sizeof(mLuid)) == 0) - { - mDisplayId = adapter->mIndex; - mDisplayDeviceType = "D3D11"; // TOFIX this - } - } + for (U32 i = 0, sz = adapterList.size(); i < sz; i++) + { + GFXAdapter* adapter = adapterList[i]; + if (dMemcmp(&adapter->mLUID, &mLuid, sizeof(mLuid)) == 0) + { + mDisplayId = adapter->mIndex; + mDisplayDeviceType = "D3D11"; // TOFIX this + } + } mResolution.x = desc.Resolution.w; mResolution.y = desc.Resolution.h; @@ -256,7 +256,7 @@ void OculusVRHMDDevice::set(ovrHmd hmd, ovrGraphicsLuid luid, U32 actionCodeInde mSensor = new OculusVRSensorDevice(); mSensor->set(mDevice, mActionCodeIndex); - mDebugMirrorTexture = NULL; + mDebugMirrorTexture = NULL; updateCaps(); } @@ -274,15 +274,15 @@ void OculusVRHMDDevice::setOptimalDisplaySize(GuiCanvas *canvas) PlatformWindow *window = canvas->getPlatformWindow(); GFXTarget *target = window->getGFXTarget(); - Point2I requiredSize(0, 0); + Point2I requiredSize(0, 0); - ovrHmdDesc desc = ovr_GetHmdDesc(mDevice); - ovrSizei leftSize = ovr_GetFovTextureSize(mDevice, ovrEye_Left, desc.DefaultEyeFov[0], mCurrentPixelDensity); - ovrSizei rightSize = ovr_GetFovTextureSize(mDevice, ovrEye_Right, desc.DefaultEyeFov[1], mCurrentPixelDensity); + ovrHmdDesc desc = ovr_GetHmdDesc(mDevice); + ovrSizei leftSize = ovr_GetFovTextureSize(mDevice, ovrEye_Left, desc.DefaultEyeFov[0], mCurrentPixelDensity); + ovrSizei rightSize = ovr_GetFovTextureSize(mDevice, ovrEye_Right, desc.DefaultEyeFov[1], mCurrentPixelDensity); - requiredSize.x = leftSize.w + rightSize.h; - requiredSize.y = mMax(leftSize.h, rightSize.h); - + requiredSize.x = leftSize.w + rightSize.h; + requiredSize.y = mMax(leftSize.h, rightSize.h); + if (target && target->getSize() != requiredSize) { GFXVideoMode newMode; @@ -302,7 +302,7 @@ bool OculusVRHMDDevice::isDisplayingWarning() if (!mIsValid || !mDevice) return false; - return false;/* + return false;/* ovrHSWDisplayState displayState; ovrHmd_GetHSWDisplayState(mDevice, &displayState); @@ -326,145 +326,145 @@ GFXTexHandle OculusVRHMDDevice::getPreviewTexture() bool OculusVRHMDDevice::setupTargets() { - // Create eye render buffers - ID3D11RenderTargetView * eyeRenderTexRtv[2]; - ovrLayerEyeFov ld = { { ovrLayerType_EyeFov } }; - mRenderLayer = ld; + // Create eye render buffers + ID3D11RenderTargetView * eyeRenderTexRtv[2]; + ovrLayerEyeFov ld = { { ovrLayerType_EyeFov } }; + mRenderLayer = ld; - GFXD3D11Device* device = static_cast(GFX); + GFXD3D11Device* device = static_cast(GFX); - ovrHmdDesc desc = ovr_GetHmdDesc(mDevice); - for (int i = 0; i < 2; i++) - { - mRenderLayer.Fov[i] = desc.DefaultEyeFov[i]; - mRenderLayer.Viewport[i].Size = ovr_GetFovTextureSize(mDevice, (ovrEyeType)i, mRenderLayer.Fov[i], mCurrentPixelDensity); - mEyeRenderDesc[i] = ovr_GetRenderDesc(mDevice, (ovrEyeType_)(ovrEye_Left+i), mRenderLayer.Fov[i]); - } + ovrHmdDesc desc = ovr_GetHmdDesc(mDevice); + for (int i = 0; i < 2; i++) + { + mRenderLayer.Fov[i] = desc.DefaultEyeFov[i]; + mRenderLayer.Viewport[i].Size = ovr_GetFovTextureSize(mDevice, (ovrEyeType)i, mRenderLayer.Fov[i], mCurrentPixelDensity); + mEyeRenderDesc[i] = ovr_GetRenderDesc(mDevice, (ovrEyeType_)(ovrEye_Left+i), mRenderLayer.Fov[i]); + } - ovrSizei recommendedEyeTargetSize[2]; - recommendedEyeTargetSize[0] = mRenderLayer.Viewport[0].Size; - recommendedEyeTargetSize[1] = mRenderLayer.Viewport[1].Size; + ovrSizei recommendedEyeTargetSize[2]; + recommendedEyeTargetSize[0] = mRenderLayer.Viewport[0].Size; + recommendedEyeTargetSize[1] = mRenderLayer.Viewport[1].Size; - if (mTextureSwapSet) - { - delete mTextureSwapSet; - mTextureSwapSet = NULL; - } + if (mTextureSwapSet) + { + delete mTextureSwapSet; + mTextureSwapSet = NULL; + } - // Calculate render target size - if (mDesiredRenderingMode == GFXDevice::RS_StereoSideBySide) - { - // Setup a single texture, side-by-side viewports - Point2I rtSize( - recommendedEyeTargetSize[0].w + recommendedEyeTargetSize[1].w, - recommendedEyeTargetSize[0].h > recommendedEyeTargetSize[1].h ? recommendedEyeTargetSize[0].h : recommendedEyeTargetSize[1].h - ); + // Calculate render target size + if (mDesiredRenderingMode == GFXDevice::RS_StereoSideBySide) + { + // Setup a single texture, side-by-side viewports + Point2I rtSize( + recommendedEyeTargetSize[0].w + recommendedEyeTargetSize[1].w, + recommendedEyeTargetSize[0].h > recommendedEyeTargetSize[1].h ? recommendedEyeTargetSize[0].h : recommendedEyeTargetSize[1].h + ); - GFXFormat targetFormat = GFX->getActiveRenderTarget()->getFormat(); - mRTFormat = targetFormat; + GFXFormat targetFormat = GFX->getActiveRenderTarget()->getFormat(); + mRTFormat = targetFormat; - rtSize = generateRenderTarget(mStereoRT, mStereoDepthTexture, rtSize); + rtSize = generateRenderTarget(mStereoRT, mStereoDepthTexture, rtSize); - // Generate the swap texture we need to store the final image - D3D11OculusTexture* tex = new D3D11OculusTexture(GFX); - if (tex->Init(mDevice, rtSize.x, rtSize.y)) - { - mTextureSwapSet = tex; - } + // Generate the swap texture we need to store the final image + D3D11OculusTexture* tex = new D3D11OculusTexture(GFX); + if (tex->Init(mDevice, rtSize.x, rtSize.y)) + { + mTextureSwapSet = tex; + } - mRenderLayer.ColorTexture[0] = tex->TextureSet; - mRenderLayer.ColorTexture[1] = tex->TextureSet; + mRenderLayer.ColorTexture[0] = tex->TextureSet; + mRenderLayer.ColorTexture[1] = tex->TextureSet; - mRenderLayer.Viewport[0].Pos.x = 0; - mRenderLayer.Viewport[0].Pos.y = 0; - mRenderLayer.Viewport[1].Pos.x = (rtSize.x + 1) / 2; - mRenderLayer.Viewport[1].Pos.y = 0; + mRenderLayer.Viewport[0].Pos.x = 0; + mRenderLayer.Viewport[0].Pos.y = 0; + mRenderLayer.Viewport[1].Pos.x = (rtSize.x + 1) / 2; + mRenderLayer.Viewport[1].Pos.y = 0; - // Left - mEyeRT[0] = mStereoRT; - mEyeViewport[0] = RectI(Point2I(mRenderLayer.Viewport[0].Pos.x, mRenderLayer.Viewport[0].Pos.y), Point2I(mRenderLayer.Viewport[0].Size.w, mRenderLayer.Viewport[0].Size.h)); + // Left + mEyeRT[0] = mStereoRT; + mEyeViewport[0] = RectI(Point2I(mRenderLayer.Viewport[0].Pos.x, mRenderLayer.Viewport[0].Pos.y), Point2I(mRenderLayer.Viewport[0].Size.w, mRenderLayer.Viewport[0].Size.h)); - // Right - mEyeRT[1] = mStereoRT; - mEyeViewport[1] = RectI(Point2I(mRenderLayer.Viewport[1].Pos.x, mRenderLayer.Viewport[1].Pos.y), Point2I(mRenderLayer.Viewport[1].Size.w, mRenderLayer.Viewport[1].Size.h)); + // Right + mEyeRT[1] = mStereoRT; + mEyeViewport[1] = RectI(Point2I(mRenderLayer.Viewport[1].Pos.x, mRenderLayer.Viewport[1].Pos.y), Point2I(mRenderLayer.Viewport[1].Size.w, mRenderLayer.Viewport[1].Size.h)); - GFXD3D11Device* device = static_cast(GFX); + GFXD3D11Device* device = static_cast(GFX); - D3D11_TEXTURE2D_DESC dsDesc; - dsDesc.Width = rtSize.x; - dsDesc.Height = rtSize.y; - dsDesc.MipLevels = 1; - dsDesc.ArraySize = 1; - dsDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;// DXGI_FORMAT_R8G8B8A8_UNORM_SRGB; - dsDesc.SampleDesc.Count = 1; - dsDesc.SampleDesc.Quality = 0; - dsDesc.Usage = D3D11_USAGE_DEFAULT; - dsDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE; - dsDesc.CPUAccessFlags = 0; - dsDesc.MiscFlags = 0; + D3D11_TEXTURE2D_DESC dsDesc; + dsDesc.Width = rtSize.x; + dsDesc.Height = rtSize.y; + dsDesc.MipLevels = 1; + dsDesc.ArraySize = 1; + dsDesc.Format = DXGI_FORMAT_B8G8R8A8_UNORM_SRGB;// DXGI_FORMAT_R8G8B8A8_UNORM_SRGB; + dsDesc.SampleDesc.Count = 1; + dsDesc.SampleDesc.Quality = 0; + dsDesc.Usage = D3D11_USAGE_DEFAULT; + dsDesc.BindFlags = D3D11_BIND_SHADER_RESOURCE; + dsDesc.CPUAccessFlags = 0; + dsDesc.MiscFlags = 0; - // Create typeless when we are rendering as non-sRGB since we will override the texture format in the RTV - bool reinterpretSrgbAsLinear = true; - unsigned compositorTextureFlags = 0; - if (reinterpretSrgbAsLinear) - compositorTextureFlags |= ovrSwapTextureSetD3D11_Typeless; + // Create typeless when we are rendering as non-sRGB since we will override the texture format in the RTV + bool reinterpretSrgbAsLinear = true; + unsigned compositorTextureFlags = 0; + if (reinterpretSrgbAsLinear) + compositorTextureFlags |= ovrSwapTextureSetD3D11_Typeless; - ovrResult result = ovr_CreateMirrorTextureD3D11(mDevice, device->mD3DDevice, &dsDesc, compositorTextureFlags, &mDebugMirrorTexture); - - if (result == ovrError_DisplayLost || !mDebugMirrorTexture) - { - AssertFatal(false, "Something went wrong"); - return NULL; - } + ovrResult result = ovr_CreateMirrorTextureD3D11(mDevice, device->mD3DDevice, &dsDesc, compositorTextureFlags, &mDebugMirrorTexture); + + if (result == ovrError_DisplayLost || !mDebugMirrorTexture) + { + AssertFatal(false, "Something went wrong"); + return NULL; + } - // Create texture handle so we can render it in-game - ovrD3D11Texture* mirror_tex = (ovrD3D11Texture*)mDebugMirrorTexture; - D3D11_RENDER_TARGET_VIEW_DESC rtvd = {}; - rtvd.Format = DXGI_FORMAT_B8G8R8A8_UNORM;// DXGI_FORMAT_R8G8B8A8_UNORM; - rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D; + // Create texture handle so we can render it in-game + ovrD3D11Texture* mirror_tex = (ovrD3D11Texture*)mDebugMirrorTexture; + D3D11_RENDER_TARGET_VIEW_DESC rtvd = {}; + rtvd.Format = DXGI_FORMAT_B8G8R8A8_UNORM;// DXGI_FORMAT_R8G8B8A8_UNORM; + rtvd.ViewDimension = D3D11_RTV_DIMENSION_TEXTURE2D; - GFXD3D11TextureObject* object = new GFXD3D11TextureObject(GFX, &VRTextureProfile); - object->registerResourceWithDevice(GFX); - *(object->getSRViewPtr()) = mirror_tex->D3D11.pSRView; - *(object->get2DTexPtr()) = mirror_tex->D3D11.pTexture; - device->mD3DDevice->CreateRenderTargetView(mirror_tex->D3D11.pTexture, &rtvd, object->getRTViewPtr()); + GFXD3D11TextureObject* object = new GFXD3D11TextureObject(GFX, &VRTextureProfile); + object->registerResourceWithDevice(GFX); + *(object->getSRViewPtr()) = mirror_tex->D3D11.pSRView; + *(object->get2DTexPtr()) = mirror_tex->D3D11.pTexture; + device->mD3DDevice->CreateRenderTargetView(mirror_tex->D3D11.pTexture, &rtvd, object->getRTViewPtr()); - // Add refs for texture release later on - if (object->getSRView()) object->getSRView()->AddRef(); - //object->getRTView()->AddRef(); - if (object->get2DTex()) object->get2DTex()->AddRef(); - object->isManaged = true; + // Add refs for texture release later on + if (object->getSRView()) object->getSRView()->AddRef(); + //object->getRTView()->AddRef(); + if (object->get2DTex()) object->get2DTex()->AddRef(); + object->isManaged = true; - // Get the actual size of the texture... - D3D11_TEXTURE2D_DESC probeDesc; - ZeroMemory(&probeDesc, sizeof(D3D11_TEXTURE2D_DESC)); - object->get2DTex()->GetDesc(&probeDesc); + // Get the actual size of the texture... + D3D11_TEXTURE2D_DESC probeDesc; + ZeroMemory(&probeDesc, sizeof(D3D11_TEXTURE2D_DESC)); + object->get2DTex()->GetDesc(&probeDesc); - object->mTextureSize.set(probeDesc.Width, probeDesc.Height, 0); - object->mBitmapSize = object->mTextureSize; - int fmt = probeDesc.Format; + object->mTextureSize.set(probeDesc.Width, probeDesc.Height, 0); + object->mBitmapSize = object->mTextureSize; + int fmt = probeDesc.Format; - if (fmt == DXGI_FORMAT_R8G8B8A8_TYPELESS || fmt == DXGI_FORMAT_B8G8R8A8_TYPELESS) - { - object->mFormat = GFXFormatR8G8B8A8; // usual case - } - else - { - // TODO: improve this. this can be very bad. - GFXREVERSE_LOOKUP(GFXD3D11TextureFormat, GFXFormat, fmt); - object->mFormat = (GFXFormat)fmt; - } - - mDebugMirrorTextureHandle = object; - } - else - { - // No rendering, abort! - return false; - } + if (fmt == DXGI_FORMAT_R8G8B8A8_TYPELESS || fmt == DXGI_FORMAT_B8G8R8A8_TYPELESS) + { + object->mFormat = GFXFormatR8G8B8A8; // usual case + } + else + { + // TODO: improve this. this can be very bad. + GFXREVERSE_LOOKUP(GFXD3D11TextureFormat, GFXFormat, fmt); + object->mFormat = (GFXFormat)fmt; + } + + mDebugMirrorTextureHandle = object; + } + else + { + // No rendering, abort! + return false; + } - return true; + return true; } String OculusVRHMDDevice::dumpMetrics() @@ -510,17 +510,17 @@ void OculusVRHMDDevice::updateRenderInfo() PlatformWindow *window = mDrawCanvas->getPlatformWindow(); - ovrHmdDesc desc = ovr_GetHmdDesc(mDevice); + ovrHmdDesc desc = ovr_GetHmdDesc(mDevice); // Update window size if it's incorrect Point2I backbufferSize = mDrawCanvas->getBounds().extent; - // Finally setup! - if (!setupTargets()) - { - onDeviceDestroy(); - return; - } + // Finally setup! + if (!setupTargets()) + { + onDeviceDestroy(); + return; + } mRenderConfigurationDirty = false; } @@ -583,12 +583,12 @@ void OculusVRHMDDevice::clearRenderTargets() mEyeRT[0] = NULL; mEyeRT[1] = NULL; - if (mDebugMirrorTexture) - { - ovr_DestroyMirrorTexture(mDevice, mDebugMirrorTexture); - mDebugMirrorTexture = NULL; - mDebugMirrorTextureHandle = NULL; - } + if (mDebugMirrorTexture) + { + ovr_DestroyMirrorTexture(mDevice, mDebugMirrorTexture); + mDebugMirrorTexture = NULL; + mDebugMirrorTextureHandle = NULL; + } } void OculusVRHMDDevice::updateCaps() @@ -609,21 +609,21 @@ void OculusVRHMDDevice::onStartFrame() sInFrame = true; ovrVector3f hmdToEyeViewOffset[2] = { mEyeRenderDesc[0].HmdToEyeViewOffset, mEyeRenderDesc[1].HmdToEyeViewOffset }; - ovrTrackingState hmdState = ovr_GetTrackingState(mDevice, 0, ovrTrue); - ovr_CalcEyePoses(hmdState.HeadPose.ThePose, hmdToEyeViewOffset, mRenderLayer.RenderPose); + ovrTrackingState hmdState = ovr_GetTrackingState(mDevice, 0, ovrTrue); + ovr_CalcEyePoses(hmdState.HeadPose.ThePose, hmdToEyeViewOffset, mRenderLayer.RenderPose); for (U32 i=0; i<2; i++) { - mRenderLayer.RenderPose[i].Position.x *= OculusVRDevice::smPositionTrackingScale; - mRenderLayer.RenderPose[i].Position.y *= OculusVRDevice::smPositionTrackingScale; - mRenderLayer.RenderPose[i].Position.z *= OculusVRDevice::smPositionTrackingScale; + mRenderLayer.RenderPose[i].Position.x *= OculusVRDevice::smPositionTrackingScale; + mRenderLayer.RenderPose[i].Position.y *= OculusVRDevice::smPositionTrackingScale; + mRenderLayer.RenderPose[i].Position.z *= OculusVRDevice::smPositionTrackingScale; } - mRenderLayer.SensorSampleTime = ovr_GetTimeInSeconds(); + mRenderLayer.SensorSampleTime = ovr_GetTimeInSeconds(); - // Set current dest texture on stereo render target - D3D11OculusTexture* texSwap = (D3D11OculusTexture*)mTextureSwapSet; - mStereoRT->attachTexture(GFXTextureTarget::Color0, texSwap->TexRtv[texSwap->TextureSet->CurrentIndex]); + // Set current dest texture on stereo render target + D3D11OculusTexture* texSwap = (D3D11OculusTexture*)mTextureSwapSet; + mStereoRT->attachTexture(GFXTextureTarget::Color0, texSwap->TexRtv[texSwap->TextureSet->CurrentIndex]); sInFrame = false; mFrameReady = true; @@ -639,32 +639,32 @@ void OculusVRHMDDevice::onEndFrame() GFXD3D11Device *d3d11GFX = dynamic_cast(GFX); - ovrViewScaleDesc viewScaleDesc; - ovrVector3f hmdToEyeViewOffset[2] = { mEyeRenderDesc[0].HmdToEyeViewOffset, mEyeRenderDesc[1].HmdToEyeViewOffset }; - viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f; - viewScaleDesc.HmdToEyeViewOffset[0] = hmdToEyeViewOffset[0]; - viewScaleDesc.HmdToEyeViewOffset[1] = hmdToEyeViewOffset[1]; + ovrViewScaleDesc viewScaleDesc; + ovrVector3f hmdToEyeViewOffset[2] = { mEyeRenderDesc[0].HmdToEyeViewOffset, mEyeRenderDesc[1].HmdToEyeViewOffset }; + viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f; + viewScaleDesc.HmdToEyeViewOffset[0] = hmdToEyeViewOffset[0]; + viewScaleDesc.HmdToEyeViewOffset[1] = hmdToEyeViewOffset[1]; - ovrLayerDirect ld = { { ovrLayerType_Direct } }; - mDebugRenderLayer = ld; + ovrLayerDirect ld = { { ovrLayerType_Direct } }; + mDebugRenderLayer = ld; - mDebugRenderLayer.ColorTexture[0] = mRenderLayer.ColorTexture[0]; - mDebugRenderLayer.ColorTexture[1] = mRenderLayer.ColorTexture[1]; - mDebugRenderLayer.Viewport[0] = mRenderLayer.Viewport[0]; - mDebugRenderLayer.Viewport[1] = mRenderLayer.Viewport[1]; + mDebugRenderLayer.ColorTexture[0] = mRenderLayer.ColorTexture[0]; + mDebugRenderLayer.ColorTexture[1] = mRenderLayer.ColorTexture[1]; + mDebugRenderLayer.Viewport[0] = mRenderLayer.Viewport[0]; + mDebugRenderLayer.Viewport[1] = mRenderLayer.Viewport[1]; - // TODO: use ovrViewScaleDesc - ovrLayerHeader* layers = &mRenderLayer.Header; - ovrResult result = ovr_SubmitFrame(mDevice, 0, &viewScaleDesc, &layers, 1); - mTextureSwapSet->AdvanceToNextTexture(); + // TODO: use ovrViewScaleDesc + ovrLayerHeader* layers = &mRenderLayer.Header; + ovrResult result = ovr_SubmitFrame(mDevice, 0, &viewScaleDesc, &layers, 1); + mTextureSwapSet->AdvanceToNextTexture(); - if (OVR_SUCCESS(result)) - { - int woo = 1; - } + if (OVR_SUCCESS(result)) + { + int woo = 1; + } - // TODO: render preview in display? + // TODO: render preview in display? mFrameReady = false; } @@ -700,11 +700,11 @@ void OculusVRHMDDevice::onDeviceDestroy() mEyeRT[1]->zombify(); } - if (mTextureSwapSet) - { - delete mTextureSwapSet; - mTextureSwapSet = NULL; - } + if (mTextureSwapSet) + { + delete mTextureSwapSet; + mTextureSwapSet = NULL; + } mStereoRT = NULL; mStereoDepthTexture = NULL; diff --git a/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.h b/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.h index c2e1b5f4e..6a78778b3 100644 --- a/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.h +++ b/Engine/source/platform/input/oculusVR/oculusVRHMDDevice.h @@ -102,7 +102,7 @@ protected: OculusVRSensorDevice *mSensor; U32 mActionCodeIndex; - ovrGraphicsLuid mLuid; + ovrGraphicsLuid mLuid; protected: void updateRenderInfo(); @@ -126,7 +126,7 @@ public: U32 getVersion() const { return mVersion; } // Windows display device name used in EnumDisplaySettings/CreateDC - const char* getDisplayDeviceType () const { return mDisplayDeviceType.c_str(); } + const char* getDisplayDeviceType () const { return mDisplayDeviceType.c_str(); } // MacOS display ID S32 getDisplayDeviceId() const { return mDisplayId; } @@ -190,7 +190,7 @@ public: String dumpMetrics(); // Stereo RT - GFXTexHandle mDebugStereoTexture; + GFXTexHandle mDebugStereoTexture; GFXTexHandle mStereoDepthTexture; GFXTextureTargetRef mStereoRT; @@ -204,12 +204,12 @@ public: F32 smDesiredPixelDensity; ovrTrackingState mLastTrackingState; - OculusTexture* mTextureSwapSet; - ovrLayerEyeFov mRenderLayer; - ovrLayerDirect mDebugRenderLayer; - ovrViewScaleDesc mScaleDesc; - ovrTexture* mDebugMirrorTexture; - GFXTexHandle mDebugMirrorTextureHandle; + OculusTexture* mTextureSwapSet; + ovrLayerEyeFov mRenderLayer; + ovrLayerDirect mDebugRenderLayer; + ovrViewScaleDesc mScaleDesc; + ovrTexture* mDebugMirrorTexture; + GFXTexHandle mDebugMirrorTextureHandle; GFXDevice::GFXDeviceRenderStyles mDesiredRenderingMode; diff --git a/Engine/source/platform/input/openVR/openVROverlay.cpp b/Engine/source/platform/input/openVR/openVROverlay.cpp index 6f4487181..b8f0ecf02 100644 --- a/Engine/source/platform/input/openVR/openVROverlay.cpp +++ b/Engine/source/platform/input/openVR/openVROverlay.cpp @@ -102,10 +102,10 @@ bool OpenVROverlay::onAdd() mOverlayTypeDirty = true; mOverlayDirty = true; - if (OPENVR) - { - OPENVR->registerOverlay(this); - } + if (OPENVR) + { + OPENVR->registerOverlay(this); + } return true; } @@ -127,10 +127,10 @@ void OpenVROverlay::onRemove() mThumbOverlayHandle = NULL; } - if (ManagedSingleton::instanceOrNull()) - { - OPENVR->unregisterOverlay(this); - } + if (ManagedSingleton::instanceOrNull()) + { + OPENVR->unregisterOverlay(this); + } } void OpenVROverlay::resetOverlay() @@ -233,14 +233,14 @@ void OpenVROverlay::showOverlay() if (mOverlayHandle == NULL) return; - if (mOverlayType != OVERLAYTYPE_DASHBOARD) - { - vr::EVROverlayError err = vr::VROverlay()->ShowOverlay(mOverlayHandle); - if (err != vr::VROverlayError_None) - { - Con::errorf("VR Overlay error!"); - } - } + if (mOverlayType != OVERLAYTYPE_DASHBOARD) + { + vr::EVROverlayError err = vr::VROverlay()->ShowOverlay(mOverlayHandle); + if (err != vr::VROverlayError_None) + { + Con::errorf("VR Overlay error!"); + } + } if (!mStagingTexture) { @@ -253,10 +253,10 @@ void OpenVROverlay::hideOverlay() if (mOverlayHandle == NULL) return; - if (mOverlayType != OVERLAYTYPE_DASHBOARD) - { - vr::VROverlay()->HideOverlay(mOverlayHandle); - } + if (mOverlayType != OVERLAYTYPE_DASHBOARD) + { + vr::VROverlay()->HideOverlay(mOverlayHandle); + } } @@ -317,8 +317,8 @@ bool OpenVROverlay::castRay(const Point3F &origin, const Point3F &direction, Ray vr::VROverlayIntersectionParams_t params; vr::VROverlayIntersectionResults_t result; - Point3F ovrOrigin = OpenVRUtil::convertPointToOVR(origin); - Point3F ovrDirection = OpenVRUtil::convertPointToOVR(direction); + Point3F ovrOrigin = OpenVRUtil::convertPointToOVR(origin); + Point3F ovrDirection = OpenVRUtil::convertPointToOVR(direction); params.eOrigin = mTrackingOrigin; params.vSource.v[0] = ovrOrigin.x; @@ -350,17 +350,17 @@ void OpenVROverlay::moveGamepadFocusToNeighbour() void OpenVROverlay::handleOpenVREvents() { - if (mManualMouseHandling) - { - // tell OpenVR to make some events for us - for (vr::TrackedDeviceIndex_t unDeviceId = 1; unDeviceId < vr::k_unControllerStateAxisCount; unDeviceId++) - { - if (vr::VROverlay()->HandleControllerOverlayInteractionAsMouse(mOverlayHandle, unDeviceId)) - { - break; - } - } - } + if (mManualMouseHandling) + { + // tell OpenVR to make some events for us + for (vr::TrackedDeviceIndex_t unDeviceId = 1; unDeviceId < vr::k_unControllerStateAxisCount; unDeviceId++) + { + if (vr::VROverlay()->HandleControllerOverlayInteractionAsMouse(mOverlayHandle, unDeviceId)) + { + break; + } + } + } vr::VREvent_t vrEvent; @@ -373,13 +373,13 @@ void OpenVROverlay::handleOpenVREvents() eventInfo.modifier = (InputModifiers)0; eventInfo.ascii = 0; - //Con::printf("Overlay event %i", vrEvent.eventType); + //Con::printf("Overlay event %i", vrEvent.eventType); switch (vrEvent.eventType) { case vr::VREvent_MouseMove: { - //Con::printf("mousemove %f,%f", vrEvent.data.mouse.x, vrEvent.data.mouse.y); + //Con::printf("mousemove %f,%f", vrEvent.data.mouse.x, vrEvent.data.mouse.y); eventInfo.objType = SI_AXIS; eventInfo.objInst = SI_XAXIS; eventInfo.action = SI_MAKE; @@ -424,11 +424,11 @@ void OpenVROverlay::handleOpenVREvents() AssertFatal(false, "WTF is going on here"); break; - case vr::VREvent_KeyboardCharInput: - case vr::VREvent_KeyboardDone: - updateTextControl((GuiControl*)vrEvent.data.keyboard.uUserValue); - break; - } + case vr::VREvent_KeyboardCharInput: + case vr::VREvent_KeyboardDone: + updateTextControl((GuiControl*)vrEvent.data.keyboard.uUserValue); + break; + } } @@ -450,16 +450,16 @@ void OpenVROverlay::handleOpenVREvents() void OpenVROverlay::updateTextControl(GuiControl* ctrl) { - if (!ctrl) - return; + if (!ctrl) + return; - GuiTextCtrl* textCtrl = dynamic_cast(ctrl); - if (textCtrl) - { - char text[GuiTextCtrl::MAX_STRING_LENGTH]; - vr::VROverlay()->GetKeyboardText(text, GuiTextCtrl::MAX_STRING_LENGTH); - textCtrl->setText(text); - } + GuiTextCtrl* textCtrl = dynamic_cast(ctrl); + if (textCtrl) + { + char text[GuiTextCtrl::MAX_STRING_LENGTH]; + vr::VROverlay()->GetKeyboardText(text, GuiTextCtrl::MAX_STRING_LENGTH); + textCtrl->setText(text); + } } void OpenVROverlay::onFrameRendered() @@ -508,27 +508,27 @@ void OpenVROverlay::onFrameRendered() void OpenVROverlay::enableKeyboardTranslation() { - vr::IVROverlay *overlay = vr::VROverlay(); - if (!overlay || !mOverlayHandle) - return; + vr::IVROverlay *overlay = vr::VROverlay(); + if (!overlay || !mOverlayHandle) + return; - GuiTextEditCtrl* ctrl = dynamic_cast(getFirstResponder()); - if (ctrl) - { - vr::EGamepadTextInputMode inputMode = ctrl->isPasswordText() ? vr::k_EGamepadTextInputModePassword : vr::k_EGamepadTextInputModeNormal; - char text[GuiTextCtrl::MAX_STRING_LENGTH + 1]; - ctrl->getText(text); - overlay->ShowKeyboardForOverlay(mOverlayHandle, inputMode, vr::k_EGamepadTextInputLineModeSingleLine, ctrl->getTooltip().c_str(), GuiTextCtrl::MAX_STRING_LENGTH, text, false, (uint64_t)ctrl); - } + GuiTextEditCtrl* ctrl = dynamic_cast(getFirstResponder()); + if (ctrl) + { + vr::EGamepadTextInputMode inputMode = ctrl->isPasswordText() ? vr::k_EGamepadTextInputModePassword : vr::k_EGamepadTextInputModeNormal; + char text[GuiTextCtrl::MAX_STRING_LENGTH + 1]; + ctrl->getText(text); + overlay->ShowKeyboardForOverlay(mOverlayHandle, inputMode, vr::k_EGamepadTextInputLineModeSingleLine, ctrl->getTooltip().c_str(), GuiTextCtrl::MAX_STRING_LENGTH, text, false, (uint64_t)ctrl); + } } void OpenVROverlay::disableKeyboardTranslation() { - vr::IVROverlay *overlay = vr::VROverlay(); - if (!overlay || !mOverlayHandle) - return; + vr::IVROverlay *overlay = vr::VROverlay(); + if (!overlay || !mOverlayHandle) + return; - overlay->HideKeyboard(); + overlay->HideKeyboard(); } void OpenVROverlay::setNativeAcceleratorsEnabled(bool enabled) diff --git a/Engine/source/platform/input/openVR/openVROverlay.h b/Engine/source/platform/input/openVR/openVROverlay.h index faee66b83..f4ffc6d87 100644 --- a/Engine/source/platform/input/openVR/openVROverlay.h +++ b/Engine/source/platform/input/openVR/openVROverlay.h @@ -57,7 +57,7 @@ public: bool mOverlayTypeDirty; ///< Overlay type is dirty bool mOverlayDirty; ///< Overlay properties are dirty - bool mManualMouseHandling; + bool mManualMouseHandling; OverlayType mOverlayType; // @@ -90,12 +90,12 @@ public: void moveGamepadFocusToNeighbour(); void handleOpenVREvents(); - void updateTextControl(GuiControl* ctrl); + void updateTextControl(GuiControl* ctrl); void onFrameRendered(); - virtual void enableKeyboardTranslation(); - virtual void disableKeyboardTranslation(); - virtual void setNativeAcceleratorsEnabled(bool enabled); + virtual void enableKeyboardTranslation(); + virtual void disableKeyboardTranslation(); + virtual void setNativeAcceleratorsEnabled(bool enabled); }; typedef OpenVROverlay::OverlayType OpenVROverlayType; diff --git a/Engine/source/platform/input/openVR/openVRProvider.cpp b/Engine/source/platform/input/openVR/openVRProvider.cpp index e217cb96a..b60fd007d 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.cpp +++ b/Engine/source/platform/input/openVR/openVRProvider.cpp @@ -33,8 +33,8 @@ struct OpenVRLoadedTexture { - vr::TextureID_t texId; - NamedTexTarget texTarget; + vr::TextureID_t texId; + NamedTexTarget texTarget; }; AngAxisF gLastMoveRot; // jamesu - this is just here for temp debugging @@ -142,15 +142,15 @@ namespace OpenVRUtil String GetTrackedDeviceString(vr::IVRSystem *pHmd, vr::TrackedDeviceIndex_t unDevice, vr::TrackedDeviceProperty prop, vr::TrackedPropertyError *peError = NULL) { - uint32_t unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, NULL, 0, peError); - if (unRequiredBufferLen == 0) - return ""; + uint32_t unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, NULL, 0, peError); + if (unRequiredBufferLen == 0) + return ""; - char *pchBuffer = new char[unRequiredBufferLen]; - unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, pchBuffer, unRequiredBufferLen, peError); - String sResult = pchBuffer; - delete[] pchBuffer; - return sResult; + char *pchBuffer = new char[unRequiredBufferLen]; + unRequiredBufferLen = pHmd->GetStringTrackedDeviceProperty(unDevice, prop, pchBuffer, unRequiredBufferLen, peError); + String sResult = pchBuffer; + delete[] pchBuffer; + return sResult; } } @@ -159,94 +159,94 @@ namespace OpenVRUtil bool OpenVRRenderModel::init(const vr::RenderModel_t & vrModel, StringTableEntry materialName) { - SAFE_DELETE(mMaterialInstance); - mMaterialInstance = MATMGR->createMatInstance(materialName, getGFXVertexFormat< VertexType >()); - if (!mMaterialInstance) - return false; + SAFE_DELETE(mMaterialInstance); + mMaterialInstance = MATMGR->createMatInstance(materialName, getGFXVertexFormat< VertexType >()); + if (!mMaterialInstance) + return false; - mLocalBox = Box3F::Invalid; + mLocalBox = Box3F::Invalid; - // Prepare primitives - U16 *indPtr = NULL; - GFXPrimitive *primPtr = NULL; - mPrimitiveBuffer.set(GFX, vrModel.unTriangleCount * 3, 1, GFXBufferTypeStatic, "OpenVR Controller buffer"); + // Prepare primitives + U16 *indPtr = NULL; + GFXPrimitive *primPtr = NULL; + mPrimitiveBuffer.set(GFX, vrModel.unTriangleCount * 3, 1, GFXBufferTypeStatic, "OpenVR Controller buffer"); - mPrimitiveBuffer.lock(&indPtr, &primPtr); - if (!indPtr || !primPtr) - return false; + mPrimitiveBuffer.lock(&indPtr, &primPtr); + if (!indPtr || !primPtr) + return false; - primPtr->minIndex = 0; - primPtr->numPrimitives = vrModel.unTriangleCount; - primPtr->numVertices = vrModel.unVertexCount; - primPtr->startIndex = 0; - primPtr->startVertex = 0; - primPtr->type = GFXTriangleList; + primPtr->minIndex = 0; + primPtr->numPrimitives = vrModel.unTriangleCount; + primPtr->numVertices = vrModel.unVertexCount; + primPtr->startIndex = 0; + primPtr->startVertex = 0; + primPtr->type = GFXTriangleList; - //dMemcpy(indPtr, vrModel.rIndexData, sizeof(U16) * vrModel.unTriangleCount * 3); + //dMemcpy(indPtr, vrModel.rIndexData, sizeof(U16) * vrModel.unTriangleCount * 3); - for (U32 i = 0; i < vrModel.unTriangleCount; i++) - { - const U32 idx = i * 3; - indPtr[idx + 0] = vrModel.rIndexData[idx + 2]; - indPtr[idx + 1] = vrModel.rIndexData[idx + 1]; - indPtr[idx + 2] = vrModel.rIndexData[idx + 0]; - } + for (U32 i = 0; i < vrModel.unTriangleCount; i++) + { + const U32 idx = i * 3; + indPtr[idx + 0] = vrModel.rIndexData[idx + 2]; + indPtr[idx + 1] = vrModel.rIndexData[idx + 1]; + indPtr[idx + 2] = vrModel.rIndexData[idx + 0]; + } - mPrimitiveBuffer.unlock(); + mPrimitiveBuffer.unlock(); - // Prepare verts - mVertexBuffer.set(GFX, vrModel.unVertexCount, GFXBufferTypeStatic); - VertexType *vertPtr = mVertexBuffer.lock(); - if (!vertPtr) - return false; + // Prepare verts + mVertexBuffer.set(GFX, vrModel.unVertexCount, GFXBufferTypeStatic); + VertexType *vertPtr = mVertexBuffer.lock(); + if (!vertPtr) + return false; - // Convert to torque coordinate system - for (U32 i = 0; i < vrModel.unVertexCount; i++) - { - const vr::RenderModel_Vertex_t &vert = vrModel.rVertexData[i]; - vertPtr->point = OpenVRUtil::convertPointFromOVR(vert.vPosition); - vertPtr->point.x = -vertPtr->point.x; - vertPtr->point.y = -vertPtr->point.y; - vertPtr->point.z = -vertPtr->point.z; - vertPtr->normal = OpenVRUtil::convertPointFromOVR(vert.vNormal); - vertPtr->normal.x = -vertPtr->normal.x; - vertPtr->normal.y = -vertPtr->normal.y; - vertPtr->normal.z = -vertPtr->normal.z; - vertPtr->texCoord = Point2F(vert.rfTextureCoord[0], vert.rfTextureCoord[1]); - vertPtr++; - } + // Convert to torque coordinate system + for (U32 i = 0; i < vrModel.unVertexCount; i++) + { + const vr::RenderModel_Vertex_t &vert = vrModel.rVertexData[i]; + vertPtr->point = OpenVRUtil::convertPointFromOVR(vert.vPosition); + vertPtr->point.x = -vertPtr->point.x; + vertPtr->point.y = -vertPtr->point.y; + vertPtr->point.z = -vertPtr->point.z; + vertPtr->normal = OpenVRUtil::convertPointFromOVR(vert.vNormal); + vertPtr->normal.x = -vertPtr->normal.x; + vertPtr->normal.y = -vertPtr->normal.y; + vertPtr->normal.z = -vertPtr->normal.z; + vertPtr->texCoord = Point2F(vert.rfTextureCoord[0], vert.rfTextureCoord[1]); + vertPtr++; + } - mVertexBuffer.unlock(); + mVertexBuffer.unlock(); - for (U32 i = 0, sz = vrModel.unVertexCount; i < sz; i++) - { - Point3F pos = Point3F(vrModel.rVertexData[i].vPosition.v[0], vrModel.rVertexData[i].vPosition.v[1], vrModel.rVertexData[i].vPosition.v[2]); - mLocalBox.extend(pos); - } + for (U32 i = 0, sz = vrModel.unVertexCount; i < sz; i++) + { + Point3F pos = Point3F(vrModel.rVertexData[i].vPosition.v[0], vrModel.rVertexData[i].vPosition.v[1], vrModel.rVertexData[i].vPosition.v[2]); + mLocalBox.extend(pos); + } - return true; + return true; } void OpenVRRenderModel::draw(SceneRenderState *state, MeshRenderInst* renderInstance) { - renderInstance->type = RenderPassManager::RIT_Mesh; - renderInstance->matInst = state->getOverrideMaterial(mMaterialInstance); - if (!renderInstance->matInst) - return; + renderInstance->type = RenderPassManager::RIT_Mesh; + renderInstance->matInst = state->getOverrideMaterial(mMaterialInstance); + if (!renderInstance->matInst) + return; - renderInstance->vertBuff = &mVertexBuffer; - renderInstance->primBuff = &mPrimitiveBuffer; - renderInstance->prim = NULL; - renderInstance->primBuffIndex = 0; + renderInstance->vertBuff = &mVertexBuffer; + renderInstance->primBuff = &mPrimitiveBuffer; + renderInstance->prim = NULL; + renderInstance->primBuffIndex = 0; - if (renderInstance->matInst->getMaterial()->isTranslucent()) - { - renderInstance->type = RenderPassManager::RIT_Translucent; - renderInstance->translucentSort = true; - } + if (renderInstance->matInst->getMaterial()->isTranslucent()) + { + renderInstance->type = RenderPassManager::RIT_Translucent; + renderInstance->translucentSort = true; + } - renderInstance->defaultKey = renderInstance->matInst->getStateHint(); - renderInstance->defaultKey2 = (uintptr_t)renderInstance->vertBuff; + renderInstance->defaultKey = renderInstance->matInst->getStateHint(); + renderInstance->defaultKey2 = (uintptr_t)renderInstance->vertBuff; } //------------------------------------------------------------ @@ -334,8 +334,8 @@ ImplementEnumType(OpenVRState, EndImplementEnumType; ImplementEnumType(OpenVRTrackedDeviceClass, - "Types of devices which are tracked .\n\n" - "@ingroup OpenVR") + "Types of devices which are tracked .\n\n" + "@ingroup OpenVR") { vr::TrackedDeviceClass_Invalid, "Invalid" }, { vr::TrackedDeviceClass_HMD, "HMD" }, { vr::TrackedDeviceClass_Controller, "Controller" }, @@ -572,27 +572,27 @@ bool OpenVRProvider::enable() HRESULT hr = CreateDXGIFactory1(__uuidof(IDXGIFactory1), reinterpret_cast(&DXGIFactory)); if (FAILED(hr)) - AssertFatal(false, "OpenVRProvider::enable -> CreateDXGIFactory1 call failure"); + AssertFatal(false, "OpenVRProvider::enable -> CreateDXGIFactory1 call failure"); hr = DXGIFactory->EnumAdapters(AdapterIdx, &EnumAdapter); if (FAILED(hr)) { - Con::warnf("VR: HMD device has an invalid adapter."); + Con::warnf("VR: HMD device has an invalid adapter."); } else { - DXGI_ADAPTER_DESC desc; - hr = EnumAdapter->GetDesc(&desc); - if (FAILED(hr)) - { - Con::warnf("VR: HMD device has an invalid adapter."); - } - else - { - dMemcpy(&mLUID, &desc.AdapterLuid, sizeof(mLUID)); - } - SAFE_RELEASE(EnumAdapter); + DXGI_ADAPTER_DESC desc; + hr = EnumAdapter->GetDesc(&desc); + if (FAILED(hr)) + { + Con::warnf("VR: HMD device has an invalid adapter."); + } + else + { + dMemcpy(&mLUID, &desc.AdapterLuid, sizeof(mLUID)); + } + SAFE_RELEASE(EnumAdapter); } SAFE_RELEASE(DXGIFactory); @@ -712,14 +712,14 @@ void OpenVRProvider::buildInputCodeTable() bool OpenVRProvider::process() { if (!mHMD) - return true; + return true; if (!vr::VRCompositor()) - return true; + return true; if (smRotateYawWithMoveActions) { - smHMDmvYaw += MoveManager::mRightAction - MoveManager::mLeftAction + MoveManager::mXAxis_L; + smHMDmvYaw += MoveManager::mRightAction - MoveManager::mLeftAction + MoveManager::mXAxis_L; } // Update HMD rotation offset @@ -745,11 +745,11 @@ bool OpenVRProvider::process() processVREvent(event); } - // process overlay events - for (U32 i = 0; i < mOverlays.size(); i++) - { - mOverlays[i]->handleOpenVREvents(); - } + // process overlay events + for (U32 i = 0; i < mOverlays.size(); i++) + { + mOverlays[i]->handleOpenVREvents(); + } // Process SteamVR controller state for (vr::TrackedDeviceIndex_t unDevice = 0; unDevice < vr::k_unMaxTrackedDeviceCount; unDevice++) @@ -757,7 +757,7 @@ bool OpenVRProvider::process() vr::VRControllerState_t state; if (mHMD->GetControllerState(unDevice, &state)) { - mCurrentControllerState[unDevice] = state; + mCurrentControllerState[unDevice] = state; } } @@ -792,52 +792,52 @@ void OpenVRTransformToRotPos(MatrixF mat, QuatF &outRot, Point3F &outPos) void OpenVRTransformToRotPosMat(MatrixF mat, QuatF &outRot, Point3F &outPos, MatrixF &outMat) { - // Directly set the rotation and position from the eye transforms - MatrixF torqueMat(1); - OpenVRUtil::convertTransformFromOVR(mat, torqueMat); + // Directly set the rotation and position from the eye transforms + MatrixF torqueMat(1); + OpenVRUtil::convertTransformFromOVR(mat, torqueMat); - Point3F pos = torqueMat.getPosition(); - outRot = QuatF(torqueMat); - outPos = pos; - outRot.mulP(pos, &outPos); // jamesu - position needs to be multiplied by rotation in this case - outMat = torqueMat; + Point3F pos = torqueMat.getPosition(); + outRot = QuatF(torqueMat); + outPos = pos; + outRot.mulP(pos, &outPos); // jamesu - position needs to be multiplied by rotation in this case + outMat = torqueMat; } void OpenVRProvider::getFrameEyePose(IDevicePose *pose, S32 eyeId) const { AssertFatal(eyeId >= -1 && eyeId < 2, "Out of bounds eye"); - if (eyeId == -1) - { - // NOTE: this is codename for "head" - MatrixF mat = mHMDRenderState.mHMDPose; // same order as in the openvr example + if (eyeId == -1) + { + // NOTE: this is codename for "head" + MatrixF mat = mHMDRenderState.mHMDPose; // same order as in the openvr example #ifdef DEBUG_DISPLAY_POSE - pose->originalMatrix = mat; - OpenVRTransformToRotPosMat(mat, pose->orientation, pose->position, pose->actualMatrix); + pose->originalMatrix = mat; + OpenVRTransformToRotPosMat(mat, pose->orientation, pose->position, pose->actualMatrix); #else - OpenVRTransformToRotPos(mat, pose->orientation, pose->position); + OpenVRTransformToRotPos(mat, pose->orientation, pose->position); #endif - pose->velocity = Point3F(0); - pose->angularVelocity = Point3F(0); - } - else - { - MatrixF mat = mHMDRenderState.mEyePose[eyeId] * mHMDRenderState.mHMDPose; // same order as in the openvr example - //mat = mHMDRenderState.mHMDPose * mHMDRenderState.mEyePose[eyeId]; // same order as in the openvr example + pose->velocity = Point3F(0); + pose->angularVelocity = Point3F(0); + } + else + { + MatrixF mat = mHMDRenderState.mEyePose[eyeId] * mHMDRenderState.mHMDPose; // same order as in the openvr example + //mat = mHMDRenderState.mHMDPose * mHMDRenderState.mEyePose[eyeId]; // same order as in the openvr example #ifdef DEBUG_DISPLAY_POSE - pose->originalMatrix = mat; - OpenVRTransformToRotPosMat(mat, pose->orientation, pose->position, pose->actualMatrix); + pose->originalMatrix = mat; + OpenVRTransformToRotPosMat(mat, pose->orientation, pose->position, pose->actualMatrix); #else - OpenVRTransformToRotPos(mat, pose->orientation, pose->position); + OpenVRTransformToRotPos(mat, pose->orientation, pose->position); #endif - pose->velocity = Point3F(0); - pose->angularVelocity = Point3F(0); - } + pose->velocity = Point3F(0); + pose->angularVelocity = Point3F(0); + } } bool OpenVRProvider::providesEyeOffsets() const @@ -978,8 +978,8 @@ void OpenVRProvider::onEyeRendered(U32 index) } else if (GFX->getAdapterType() == Direct3D9) { - //vr::Texture_t eyeTexture = { (void*)static_cast(mHMDRenderState.mStereoRenderTextures[index].getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma }; - //err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture); + //vr::Texture_t eyeTexture = { (void*)static_cast(mHMDRenderState.mStereoRenderTextures[index].getPointer())->get2DTex(), vr::API_DirectX, vr::ColorSpace_Gamma }; + //err = vr::VRCompositor()->Submit((vr::EVREye)(vr::Eye_Left + index), &eyeTexture); } #ifdef TORQUE_OPENGL else if (GFX->getAdapterType() == OpenGL) @@ -1066,23 +1066,23 @@ bool OpenVRProvider::_handleDeviceEvent(GFXDevice::GFXDeviceEventType evt) S32 OpenVRProvider::getDisplayDeviceId() const { #if defined(TORQUE_OS_WIN64) || defined(TORQUE_OS_WIN32) - if (GFX && GFX->getAdapterType() == Direct3D11) - { - Vector adapterList; - GFXD3D11Device::enumerateAdapters(adapterList); + if (GFX && GFX->getAdapterType() == Direct3D11) + { + Vector adapterList; + GFXD3D11Device::enumerateAdapters(adapterList); - for (U32 i = 0, sz = adapterList.size(); i < sz; i++) - { - GFXAdapter* adapter = adapterList[i]; - if (dMemcmp(&adapter->mLUID, &mLUID, sizeof(mLUID)) == 0) - { - return adapter->mIndex; - } - } - } + for (U32 i = 0, sz = adapterList.size(); i < sz; i++) + { + GFXAdapter* adapter = adapterList[i]; + if (dMemcmp(&adapter->mLUID, &mLUID, sizeof(mLUID)) == 0) + { + return adapter->mIndex; + } + } + } #endif - return -1; + return -1; } void OpenVRProvider::processVREvent(const vr::VREvent_t & evt) @@ -1091,8 +1091,8 @@ void OpenVRProvider::processVREvent(const vr::VREvent_t & evt) switch (evt.eventType) { case vr::VREvent_InputFocusCaptured: - //Con::executef() - break; + //Con::executef() + break; case vr::VREvent_TrackedDeviceActivated: { // Setup render model @@ -1145,7 +1145,7 @@ void OpenVRProvider::updateTrackedPoses() { mHMDRenderState.mHMDPose = mat; - /* + /* MatrixF rotOffset(1); EulerF localRot(-smHMDRotOffset.x, -smHMDRotOffset.z, smHMDRotOffset.y); @@ -1155,7 +1155,7 @@ void OpenVRProvider::updateTrackedPoses() QuatF(localRot).setMatrix(&rotOffset); rotOffset.inverse(); mHMDRenderState.mHMDPose = mat = rotOffset * mHMDRenderState.mHMDPose; - */ + */ // jamesu - store the last rotation for temp debugging MatrixF torqueMat(1); @@ -1169,8 +1169,8 @@ void OpenVRProvider::updateTrackedPoses() OpenVRTransformToRotPos(mat, inPose.orientation, inPose.position); #ifdef DEBUG_DISPLAY_POSE - OpenVRUtil::convertTransformFromOVR(mat, inPose.actualMatrix); - inPose.originalMatrix = mat; + OpenVRUtil::convertTransformFromOVR(mat, inPose.actualMatrix); + inPose.originalMatrix = mat; #endif inPose.state = outPose.eTrackingResult; @@ -1195,10 +1195,10 @@ void OpenVRProvider::submitInputChanges() IDevicePose curPose = mCurrentDevicePose[i]; IDevicePose prevPose = mPreviousInputTrackedDevicePose[i]; - S32 eventIdx = -1; - - if (!mDeviceEventMap.tryGetValue(i, eventIdx) || eventIdx < 0) - continue; + S32 eventIdx = -1; + + if (!mDeviceEventMap.tryGetValue(i, eventIdx) || eventIdx < 0) + continue; if (!curPose.valid || !curPose.connected) continue; @@ -1266,293 +1266,293 @@ void OpenVRProvider::resetSensors() void OpenVRProvider::mapDeviceToEvent(U32 deviceIdx, S32 eventIdx) { - mDeviceEventMap[deviceIdx] = eventIdx; + mDeviceEventMap[deviceIdx] = eventIdx; } void OpenVRProvider::resetEventMap() { - mDeviceEventMap.clear(); + mDeviceEventMap.clear(); } IDevicePose OpenVRProvider::getTrackedDevicePose(U32 idx) { - if (idx >= vr::k_unMaxTrackedDeviceCount) - { - IDevicePose ret; - ret.connected = ret.valid = false; - return ret; - } + if (idx >= vr::k_unMaxTrackedDeviceCount) + { + IDevicePose ret; + ret.connected = ret.valid = false; + return ret; + } - return mCurrentDevicePose[idx]; + return mCurrentDevicePose[idx]; } void OpenVRProvider::registerOverlay(OpenVROverlay* overlay) { - mOverlays.push_back(overlay); + mOverlays.push_back(overlay); } void OpenVRProvider::unregisterOverlay(OpenVROverlay* overlay) { - S32 index = mOverlays.find_next(overlay); - if (index != -1) - { - mOverlays.erase(index); - } + S32 index = mOverlays.find_next(overlay); + if (index != -1) + { + mOverlays.erase(index); + } } const S32 OpenVRProvider::preloadRenderModelTexture(U32 index) { - S32 idx = -1; - if (mLoadedTextureLookup.tryGetValue(index, idx)) - return idx; + S32 idx = -1; + if (mLoadedTextureLookup.tryGetValue(index, idx)) + return idx; - char buffer[256]; - dSprintf(buffer, sizeof(buffer), "openvrtex_%u", index); + char buffer[256]; + dSprintf(buffer, sizeof(buffer), "openvrtex_%u", index); - OpenVRProvider::LoadedRenderTexture loadedTexture; - loadedTexture.vrTextureId = index; - loadedTexture.vrTexture = NULL; - loadedTexture.texture = NULL; - loadedTexture.textureError = vr::VRRenderModelError_Loading; - loadedTexture.targetTexture = new NamedTexTarget(); - loadedTexture.targetTexture->registerWithName(buffer); - mLoadedTextures.push_back(loadedTexture); - mLoadedTextureLookup[index] = mLoadedTextures.size() - 1; + OpenVRProvider::LoadedRenderTexture loadedTexture; + loadedTexture.vrTextureId = index; + loadedTexture.vrTexture = NULL; + loadedTexture.texture = NULL; + loadedTexture.textureError = vr::VRRenderModelError_Loading; + loadedTexture.targetTexture = new NamedTexTarget(); + loadedTexture.targetTexture->registerWithName(buffer); + mLoadedTextures.push_back(loadedTexture); + mLoadedTextureLookup[index] = mLoadedTextures.size() - 1; - return mLoadedTextures.size() - 1; + return mLoadedTextures.size() - 1; } const S32 OpenVRProvider::preloadRenderModel(StringTableEntry name) { - S32 idx = -1; - if (mLoadedModelLookup.tryGetValue(name, idx)) - return idx; + S32 idx = -1; + if (mLoadedModelLookup.tryGetValue(name, idx)) + return idx; - OpenVRProvider::LoadedRenderModel loadedModel; - loadedModel.name = name; - loadedModel.model = NULL; - loadedModel.vrModel = NULL; - loadedModel.modelError = vr::VRRenderModelError_Loading; - loadedModel.loadedTexture = false; - loadedModel.textureId = -1; - mLoadedModels.push_back(loadedModel); - mLoadedModelLookup[name] = mLoadedModels.size() - 1; + OpenVRProvider::LoadedRenderModel loadedModel; + loadedModel.name = name; + loadedModel.model = NULL; + loadedModel.vrModel = NULL; + loadedModel.modelError = vr::VRRenderModelError_Loading; + loadedModel.loadedTexture = false; + loadedModel.textureId = -1; + mLoadedModels.push_back(loadedModel); + mLoadedModelLookup[name] = mLoadedModels.size() - 1; - return mLoadedModels.size() - 1; + return mLoadedModels.size() - 1; } bool OpenVRProvider::getRenderModel(S32 idx, OpenVRRenderModel **ret, bool &failed) { - if (idx < 0 || idx > mLoadedModels.size()) - { - failed = true; - return true; - } + if (idx < 0 || idx > mLoadedModels.size()) + { + failed = true; + return true; + } - OpenVRProvider::LoadedRenderModel &loadedModel = mLoadedModels[idx]; - //Con::printf("RenderModel[%i] STAGE 1", idx); + OpenVRProvider::LoadedRenderModel &loadedModel = mLoadedModels[idx]; + //Con::printf("RenderModel[%i] STAGE 1", idx); - failed = false; + failed = false; - if (loadedModel.modelError > vr::VRRenderModelError_Loading) - { - failed = true; - return true; - } + if (loadedModel.modelError > vr::VRRenderModelError_Loading) + { + failed = true; + return true; + } - // Stage 1 : model - if (!loadedModel.model) - { - loadedModel.modelError = vr::VRRenderModels()->LoadRenderModel_Async(loadedModel.name, &loadedModel.vrModel); - //Con::printf(" vr::VRRenderModels()->LoadRenderModel_Async(\"%s\", %x); -> %i", loadedModel.name, &loadedModel.vrModel, loadedModel.modelError); - if (loadedModel.modelError == vr::VRRenderModelError_None) - { - if (loadedModel.vrModel == NULL) - { - failed = true; - return true; - } - // Load the model - loadedModel.model = new OpenVRRenderModel(); - } - else if (loadedModel.modelError == vr::VRRenderModelError_Loading) - { - return false; - } - } + // Stage 1 : model + if (!loadedModel.model) + { + loadedModel.modelError = vr::VRRenderModels()->LoadRenderModel_Async(loadedModel.name, &loadedModel.vrModel); + //Con::printf(" vr::VRRenderModels()->LoadRenderModel_Async(\"%s\", %x); -> %i", loadedModel.name, &loadedModel.vrModel, loadedModel.modelError); + if (loadedModel.modelError == vr::VRRenderModelError_None) + { + if (loadedModel.vrModel == NULL) + { + failed = true; + return true; + } + // Load the model + loadedModel.model = new OpenVRRenderModel(); + } + else if (loadedModel.modelError == vr::VRRenderModelError_Loading) + { + return false; + } + } - //Con::printf("RenderModel[%i] STAGE 2 (texId == %i)", idx, loadedModel.vrModel->diffuseTextureId); + //Con::printf("RenderModel[%i] STAGE 2 (texId == %i)", idx, loadedModel.vrModel->diffuseTextureId); - // Stage 2 : texture - if (!loadedModel.loadedTexture && loadedModel.model) - { - if (loadedModel.textureId == -1) - { - loadedModel.textureId = preloadRenderModelTexture(loadedModel.vrModel->diffuseTextureId); - } + // Stage 2 : texture + if (!loadedModel.loadedTexture && loadedModel.model) + { + if (loadedModel.textureId == -1) + { + loadedModel.textureId = preloadRenderModelTexture(loadedModel.vrModel->diffuseTextureId); + } - if (loadedModel.textureId == -1) - { - failed = true; - return true; - } + if (loadedModel.textureId == -1) + { + failed = true; + return true; + } - if (!getRenderModelTexture(loadedModel.textureId, NULL, failed)) - { - return false; - } + if (!getRenderModelTexture(loadedModel.textureId, NULL, failed)) + { + return false; + } - if (failed) - { - return true; - } + if (failed) + { + return true; + } - loadedModel.loadedTexture = true; + loadedModel.loadedTexture = true; - //Con::printf("RenderModel[%i] GOT TEXTURE"); + //Con::printf("RenderModel[%i] GOT TEXTURE"); - // Now we can load the model. Note we first need to get a Material for the mapped texture - NamedTexTarget *namedTexture = mLoadedTextures[loadedModel.textureId].targetTexture; - String materialName = MATMGR->getMapEntry(namedTexture->getName().c_str()); - if (materialName.isEmpty()) - { - char buffer[256]; - dSprintf(buffer, sizeof(buffer), "#%s", namedTexture->getName().c_str()); - materialName = buffer; + // Now we can load the model. Note we first need to get a Material for the mapped texture + NamedTexTarget *namedTexture = mLoadedTextures[loadedModel.textureId].targetTexture; + String materialName = MATMGR->getMapEntry(namedTexture->getName().c_str()); + if (materialName.isEmpty()) + { + char buffer[256]; + dSprintf(buffer, sizeof(buffer), "#%s", namedTexture->getName().c_str()); + materialName = buffer; - //Con::printf("RenderModel[%i] materialName == %s", idx, buffer); + //Con::printf("RenderModel[%i] materialName == %s", idx, buffer); - Material* mat = new Material(); - mat->mMapTo = namedTexture->getName(); - mat->mDiffuseMapFilename[0] = buffer; - mat->mEmissive[0] = true; + Material* mat = new Material(); + mat->mMapTo = namedTexture->getName(); + mat->mDiffuseMapFilename[0] = buffer; + mat->mEmissive[0] = true; - dSprintf(buffer, sizeof(buffer), "%s_Material", namedTexture->getName().c_str()); - if (!mat->registerObject(buffer)) - { - Con::errorf("Couldn't create placeholder openvr material %s!", buffer); - failed = true; - return true; - } + dSprintf(buffer, sizeof(buffer), "%s_Material", namedTexture->getName().c_str()); + if (!mat->registerObject(buffer)) + { + Con::errorf("Couldn't create placeholder openvr material %s!", buffer); + failed = true; + return true; + } - materialName = buffer; - } - - loadedModel.model->init(*loadedModel.vrModel, materialName); - } + materialName = buffer; + } + + loadedModel.model->init(*loadedModel.vrModel, materialName); + } - if ((loadedModel.modelError > vr::VRRenderModelError_Loading) || - (loadedModel.textureId >= 0 && mLoadedTextures[loadedModel.textureId].textureError > vr::VRRenderModelError_Loading)) - { - failed = true; - } + if ((loadedModel.modelError > vr::VRRenderModelError_Loading) || + (loadedModel.textureId >= 0 && mLoadedTextures[loadedModel.textureId].textureError > vr::VRRenderModelError_Loading)) + { + failed = true; + } - if (!failed && ret) - { - *ret = loadedModel.model; - } - return true; + if (!failed && ret) + { + *ret = loadedModel.model; + } + return true; } bool OpenVRProvider::getRenderModelTexture(S32 idx, GFXTextureObject **outTex, bool &failed) { - if (idx < 0 || idx > mLoadedModels.size()) - { - failed = true; - return true; - } + if (idx < 0 || idx > mLoadedModels.size()) + { + failed = true; + return true; + } - failed = false; + failed = false; - OpenVRProvider::LoadedRenderTexture &loadedTexture = mLoadedTextures[idx]; + OpenVRProvider::LoadedRenderTexture &loadedTexture = mLoadedTextures[idx]; - if (loadedTexture.textureError > vr::VRRenderModelError_Loading) - { - failed = true; - return true; - } + if (loadedTexture.textureError > vr::VRRenderModelError_Loading) + { + failed = true; + return true; + } - if (!loadedTexture.texture) - { - if (!loadedTexture.vrTexture) - { - loadedTexture.textureError = vr::VRRenderModels()->LoadTexture_Async(loadedTexture.vrTextureId, &loadedTexture.vrTexture); - if (loadedTexture.textureError == vr::VRRenderModelError_None) - { - // Load the texture - GFXTexHandle tex; + if (!loadedTexture.texture) + { + if (!loadedTexture.vrTexture) + { + loadedTexture.textureError = vr::VRRenderModels()->LoadTexture_Async(loadedTexture.vrTextureId, &loadedTexture.vrTexture); + if (loadedTexture.textureError == vr::VRRenderModelError_None) + { + // Load the texture + GFXTexHandle tex; - const U32 sz = loadedTexture.vrTexture->unWidth * loadedTexture.vrTexture->unHeight * 4; - GBitmap *bmp = new GBitmap(loadedTexture.vrTexture->unWidth, loadedTexture.vrTexture->unHeight, false, GFXFormatR8G8B8A8); + const U32 sz = loadedTexture.vrTexture->unWidth * loadedTexture.vrTexture->unHeight * 4; + GBitmap *bmp = new GBitmap(loadedTexture.vrTexture->unWidth, loadedTexture.vrTexture->unHeight, false, GFXFormatR8G8B8A8); - Swizzles::bgra.ToBuffer(bmp->getAddress(0,0,0), loadedTexture.vrTexture->rubTextureMapData, sz); + Swizzles::bgra.ToBuffer(bmp->getAddress(0,0,0), loadedTexture.vrTexture->rubTextureMapData, sz); - char buffer[256]; - dSprintf(buffer, 256, "OVRTEX-%i.png", loadedTexture.vrTextureId); + char buffer[256]; + dSprintf(buffer, 256, "OVRTEX-%i.png", loadedTexture.vrTextureId); - FileStream fs; - fs.open(buffer, Torque::FS::File::Write); - bmp->writeBitmap("PNG", fs); - fs.close(); + FileStream fs; + fs.open(buffer, Torque::FS::File::Write); + bmp->writeBitmap("PNG", fs); + fs.close(); - tex.set(bmp, &GFXDefaultStaticDiffuseProfile, true, "OpenVR Texture"); - //tex.set(loadedTexture.vrTexture->unWidth, loadedTexture.vrTexture->unHeight, 1, (void*)pixels, GFXFormatR8G8B8A8, &GFXDefaultStaticDiffuseProfile, "OpenVR Texture", 1); + tex.set(bmp, &GFXDefaultStaticDiffuseProfile, true, "OpenVR Texture"); + //tex.set(loadedTexture.vrTexture->unWidth, loadedTexture.vrTexture->unHeight, 1, (void*)pixels, GFXFormatR8G8B8A8, &GFXDefaultStaticDiffuseProfile, "OpenVR Texture", 1); - loadedTexture.targetTexture->setTexture(tex); - loadedTexture.texture = tex; - } - else if (loadedTexture.textureError == vr::VRRenderModelError_Loading) - { - return false; - } - } - } + loadedTexture.targetTexture->setTexture(tex); + loadedTexture.texture = tex; + } + else if (loadedTexture.textureError == vr::VRRenderModelError_Loading) + { + return false; + } + } + } - if (loadedTexture.textureError > vr::VRRenderModelError_Loading) - { - failed = true; - } + if (loadedTexture.textureError > vr::VRRenderModelError_Loading) + { + failed = true; + } - if (!failed && outTex) - { - *outTex = loadedTexture.texture; - } + if (!failed && outTex) + { + *outTex = loadedTexture.texture; + } - return true; + return true; } bool OpenVRProvider::getRenderModelTextureName(S32 idx, String &outName) { - if (idx < 0 || idx >= mLoadedTextures.size()) - return false; + if (idx < 0 || idx >= mLoadedTextures.size()) + return false; - if (mLoadedTextures[idx].targetTexture) - { - outName = mLoadedTextures[idx].targetTexture->getName(); - return true; - } + if (mLoadedTextures[idx].targetTexture) + { + outName = mLoadedTextures[idx].targetTexture->getName(); + return true; + } - return false; + return false; } void OpenVRProvider::resetRenderModels() { - for (U32 i = 0, sz = mLoadedModels.size(); i < sz; i++) - { - SAFE_DELETE(mLoadedModels[i].model); - if (mLoadedModels[i].vrModel) mRenderModels->FreeRenderModel(mLoadedModels[i].vrModel); - } - for (U32 i = 0, sz = mLoadedTextures.size(); i < sz; i++) - { - SAFE_DELETE(mLoadedTextures[i].targetTexture); - if (mLoadedTextures[i].vrTexture) mRenderModels->FreeTexture(mLoadedTextures[i].vrTexture); - } - mLoadedModels.clear(); - mLoadedTextures.clear(); - mLoadedModelLookup.clear(); - mLoadedTextureLookup.clear(); + for (U32 i = 0, sz = mLoadedModels.size(); i < sz; i++) + { + SAFE_DELETE(mLoadedModels[i].model); + if (mLoadedModels[i].vrModel) mRenderModels->FreeRenderModel(mLoadedModels[i].vrModel); + } + for (U32 i = 0, sz = mLoadedTextures.size(); i < sz; i++) + { + SAFE_DELETE(mLoadedTextures[i].targetTexture); + if (mLoadedTextures[i].vrTexture) mRenderModels->FreeTexture(mLoadedTextures[i].vrTexture); + } + mLoadedModels.clear(); + mLoadedTextures.clear(); + mLoadedModelLookup.clear(); + mLoadedTextureLookup.clear(); } OpenVROverlay *OpenVRProvider::getGamepadFocusOverlay() @@ -1593,50 +1593,50 @@ void OpenVRProvider::setKeyboardPositionForOverlay(OpenVROverlay *overlay, const void OpenVRProvider::getControllerDeviceIndexes(vr::TrackedDeviceClass &deviceClass, Vector &outList) { - for (U32 i = 0; iGetTrackedDeviceClass(i); - if (klass == deviceClass) - { - outList.push_back(i); - } - } + vr::TrackedDeviceClass klass = mHMD->GetTrackedDeviceClass(i); + if (klass == deviceClass) + { + outList.push_back(i); + } + } } StringTableEntry OpenVRProvider::getControllerModel(U32 idx) { - if (idx >= vr::k_unMaxTrackedDeviceCount || !mRenderModels) - return NULL; + if (idx >= vr::k_unMaxTrackedDeviceCount || !mRenderModels) + return NULL; - String str = GetTrackedDeviceString(mHMD, idx, vr::Prop_RenderModelName_String, NULL); - return StringTable->insert(str, true); + String str = GetTrackedDeviceString(mHMD, idx, vr::Prop_RenderModelName_String, NULL); + return StringTable->insert(str, true); } DefineEngineStaticMethod(OpenVR, getControllerDeviceIndexes, const char*, (OpenVRTrackedDeviceClass klass),, - "@brief Gets the indexes of devices which match the required device class") + "@brief Gets the indexes of devices which match the required device class") { - if (!ManagedSingleton::instanceOrNull()) - { - return ""; - } + if (!ManagedSingleton::instanceOrNull()) + { + return ""; + } - Vector outList; - OPENVR->getControllerDeviceIndexes(klass, outList); - return EngineMarshallData>(outList); + Vector outList; + OPENVR->getControllerDeviceIndexes(klass, outList); + return EngineMarshallData>(outList); } DefineEngineStaticMethod(OpenVR, getControllerModel, const char*, (S32 idx), , - "@brief Gets the indexes of devices which match the required device class") + "@brief Gets the indexes of devices which match the required device class") { - if (!ManagedSingleton::instanceOrNull()) - { - return ""; - } + if (!ManagedSingleton::instanceOrNull()) + { + return ""; + } - return OPENVR->getControllerModel(idx); + return OPENVR->getControllerModel(idx); } DefineEngineStaticMethod(OpenVR, isDeviceActive, bool, (), , @@ -1701,17 +1701,17 @@ DefineEngineStaticMethod(OpenVR, setHMDAsGameConnectionDisplayDevice, bool, (Gam DefineEngineStaticMethod(OpenVR, getDisplayDeviceId, S32, (), , - "@brief MacOS display ID.\n\n" - "@param index The HMD index.\n" - "@return The ID of the HMD display device, if any.\n" - "@ingroup Game") + "@brief MacOS display ID.\n\n" + "@param index The HMD index.\n" + "@return The ID of the HMD display device, if any.\n" + "@ingroup Game") { - if (!ManagedSingleton::instanceOrNull()) - { - return -1; - } + if (!ManagedSingleton::instanceOrNull()) + { + return -1; + } - return OPENVR->getDisplayDeviceId(); + return OPENVR->getDisplayDeviceId(); } DefineEngineStaticMethod(OpenVR, resetSensors, void, (), , @@ -1730,27 +1730,27 @@ DefineEngineStaticMethod(OpenVR, resetSensors, void, (), , } DefineEngineStaticMethod(OpenVR, mapDeviceToEvent, void, (S32 deviceId, S32 eventId), , - "@brief Maps a device to an event code.\n\n" - "@ingroup Game") + "@brief Maps a device to an event code.\n\n" + "@ingroup Game") { - if (!ManagedSingleton::instanceOrNull()) - { - return; - } + if (!ManagedSingleton::instanceOrNull()) + { + return; + } - OPENVR->mapDeviceToEvent(deviceId, eventId); + OPENVR->mapDeviceToEvent(deviceId, eventId); } DefineEngineStaticMethod(OpenVR, resetEventMap, void, (), , - "@brief Resets event map.\n\n" - "@ingroup Game") + "@brief Resets event map.\n\n" + "@ingroup Game") { - if (!ManagedSingleton::instanceOrNull()) - { - return; - } + if (!ManagedSingleton::instanceOrNull()) + { + return; + } - OPENVR->resetEventMap(); + OPENVR->resetEventMap(); } // Overlay stuff diff --git a/Engine/source/platform/input/openVR/openVRProvider.h b/Engine/source/platform/input/openVR/openVRProvider.h index f35684e70..009861af4 100644 --- a/Engine/source/platform/input/openVR/openVRProvider.h +++ b/Engine/source/platform/input/openVR/openVRProvider.h @@ -62,91 +62,91 @@ namespace OpenVRUtil U32 convertOpenVRButtonToTorqueButton(uint32_t vrButton); - /// Converts a point to OVR coords - inline Point3F convertPointToOVR(const Point3F &point) - { - return Point3F(-point.x, -point.z, point.y); - } + /// Converts a point to OVR coords + inline Point3F convertPointToOVR(const Point3F &point) + { + return Point3F(-point.x, -point.z, point.y); + } - /// Converts a point from OVR coords - inline Point3F convertPointFromOVR(const Point3F &point) - { - return Point3F(-point.x, point.z, -point.y); - } + /// Converts a point from OVR coords + inline Point3F convertPointFromOVR(const Point3F &point) + { + return Point3F(-point.x, point.z, -point.y); + } - // Converts a point from OVR coords, from an input float array - inline Point3F convertPointFromOVR(const vr::HmdVector3_t& v) - { - return Point3F(-v.v[0], v.v[2], -v.v[1]); - } + // Converts a point from OVR coords, from an input float array + inline Point3F convertPointFromOVR(const vr::HmdVector3_t& v) + { + return Point3F(-v.v[0], v.v[2], -v.v[1]); + } }; template class VRTextureSet { public: - static const int TextureCount = TEXSIZE; - GFXTexHandle mTextures[TEXSIZE]; - U32 mIndex; + static const int TextureCount = TEXSIZE; + GFXTexHandle mTextures[TEXSIZE]; + U32 mIndex; - VRTextureSet() : mIndex(0) - { - } + VRTextureSet() : mIndex(0) + { + } - void init(U32 width, U32 height, GFXFormat fmt, GFXTextureProfile *profile, const String &desc) - { - for (U32 i = 0; i < TextureCount; i++) - { - mTextures[i].set(width, height, fmt, profile, desc); - } - } + void init(U32 width, U32 height, GFXFormat fmt, GFXTextureProfile *profile, const String &desc) + { + for (U32 i = 0; i < TextureCount; i++) + { + mTextures[i].set(width, height, fmt, profile, desc); + } + } - void clear() - { - for (U32 i = 0; i < TextureCount; i++) - { - mTextures[i] = NULL; - } - } + void clear() + { + for (U32 i = 0; i < TextureCount; i++) + { + mTextures[i] = NULL; + } + } - void advance() - { - mIndex = (mIndex + 1) % TextureCount; - } + void advance() + { + mIndex = (mIndex + 1) % TextureCount; + } - GFXTexHandle& getTextureHandle() - { - return mTextures[mIndex]; - } + GFXTexHandle& getTextureHandle() + { + return mTextures[mIndex]; + } }; /// Simple class to handle rendering native OpenVR model data class OpenVRRenderModel { public: - typedef GFXVertexPNT VertexType; - GFXVertexBufferHandle mVertexBuffer; - GFXPrimitiveBufferHandle mPrimitiveBuffer; - BaseMatInstance* mMaterialInstance; ///< Material to use for rendering. NOTE: - Box3F mLocalBox; + typedef GFXVertexPNT VertexType; + GFXVertexBufferHandle mVertexBuffer; + GFXPrimitiveBufferHandle mPrimitiveBuffer; + BaseMatInstance* mMaterialInstance; ///< Material to use for rendering. NOTE: + Box3F mLocalBox; - OpenVRRenderModel() : mMaterialInstance(NULL) - { - } + OpenVRRenderModel() : mMaterialInstance(NULL) + { + } - ~OpenVRRenderModel() - { - SAFE_DELETE(mMaterialInstance); - } + ~OpenVRRenderModel() + { + SAFE_DELETE(mMaterialInstance); + } - Box3F getWorldBox(MatrixF &mat) - { - Box3F ret = mLocalBox; - mat.mul(ret); - return ret; - } + Box3F getWorldBox(MatrixF &mat) + { + Box3F ret = mLocalBox; + mat.mul(ret); + return ret; + } - bool init(const vr::RenderModel_t & vrModel, StringTableEntry materialName); - void draw(SceneRenderState *state, MeshRenderInst* renderInstance); + bool init(const vr::RenderModel_t & vrModel, StringTableEntry materialName); + void draw(SceneRenderState *state, MeshRenderInst* renderInstance); }; struct OpenVRRenderState @@ -196,21 +196,21 @@ public: struct LoadedRenderModel { - StringTableEntry name; - vr::RenderModel_t *vrModel; - OpenVRRenderModel *model; - vr::EVRRenderModelError modelError; - S32 textureId; - bool loadedTexture; + StringTableEntry name; + vr::RenderModel_t *vrModel; + OpenVRRenderModel *model; + vr::EVRRenderModelError modelError; + S32 textureId; + bool loadedTexture; }; struct LoadedRenderTexture { - U32 vrTextureId; - vr::RenderModel_TextureMap_t *vrTexture; - GFXTextureObject *texture; - NamedTexTarget *targetTexture; - vr::EVRRenderModelError textureError; + U32 vrTextureId; + vr::RenderModel_TextureMap_t *vrTexture; + GFXTextureObject *texture; + NamedTexTarget *targetTexture; + vr::EVRRenderModelError textureError; }; OpenVRProvider(); @@ -283,21 +283,21 @@ public: IDevicePose getTrackedDevicePose(U32 idx); /// } - /// @name Overlay registration - /// { - void registerOverlay(OpenVROverlay* overlay); - void unregisterOverlay(OpenVROverlay* overlay); - /// } + /// @name Overlay registration + /// { + void registerOverlay(OpenVROverlay* overlay); + void unregisterOverlay(OpenVROverlay* overlay); + /// } - /// @name Model loading - /// { - const S32 preloadRenderModel(StringTableEntry name); - const S32 preloadRenderModelTexture(U32 index); - bool getRenderModel(S32 idx, OpenVRRenderModel **ret, bool &failed); - bool getRenderModelTexture(S32 idx, GFXTextureObject **outTex, bool &failed); - bool getRenderModelTextureName(S32 idx, String &outName); - void resetRenderModels(); - /// } + /// @name Model loading + /// { + const S32 preloadRenderModel(StringTableEntry name); + const S32 preloadRenderModelTexture(U32 index); + bool getRenderModel(S32 idx, OpenVRRenderModel **ret, bool &failed); + bool getRenderModelTexture(S32 idx, GFXTextureObject **outTex, bool &failed); + bool getRenderModelTextureName(S32 idx, String &outName); + void resetRenderModels(); + /// } /// @name Console API @@ -338,17 +338,17 @@ public: vr::ETrackingUniverseOrigin mTrackingSpace; - Vector mOverlays; + Vector mOverlays; - VREventSignal mVREventSignal; - Namespace *mOpenVRNS; + VREventSignal mVREventSignal; + Namespace *mOpenVRNS; - Vector mLoadedModels; - Vector mLoadedTextures; - Map mLoadedModelLookup; - Map mLoadedTextureLookup; + Vector mLoadedModels; + Vector mLoadedTextures; + Map mLoadedModelLookup; + Map mLoadedTextureLookup; - Map mDeviceEventMap; + Map mDeviceEventMap; /// } GuiCanvas* mDrawCanvas; diff --git a/Engine/source/platform/input/openVR/openVRTrackedObject.cpp b/Engine/source/platform/input/openVR/openVRTrackedObject.cpp index a4467f55c..584ccda11 100644 --- a/Engine/source/platform/input/openVR/openVRTrackedObject.cpp +++ b/Engine/source/platform/input/openVR/openVRTrackedObject.cpp @@ -31,7 +31,7 @@ bool OpenVRTrackedObject::smDebugControllerMovePosition = true; bool OpenVRTrackedObject::smDebugControllerPosition = false; static const U32 sCollisionMoveMask = (PlayerObjectType | - StaticShapeObjectType | VehicleObjectType); + StaticShapeObjectType | VehicleObjectType); U32 OpenVRTrackedObject::sServerCollisionMask = sCollisionMoveMask; // ItemObjectType U32 OpenVRTrackedObject::sClientCollisionMask = sCollisionMoveMask; @@ -43,8 +43,8 @@ IMPLEMENT_CO_DATABLOCK_V1(OpenVRTrackedObjectData); OpenVRTrackedObjectData::OpenVRTrackedObjectData() : mShapeFile(NULL) { - mCollisionBoxMin = Point3F(-0.02, -0.20, -0.02); - mCollisionBoxMax = Point3F(0.02, 0.05, 0.02); + mCollisionBoxMin = Point3F(-0.02, -0.20, -0.02); + mCollisionBoxMax = Point3F(0.02, 0.05, 0.02); } OpenVRTrackedObjectData::~OpenVRTrackedObjectData() @@ -53,49 +53,49 @@ OpenVRTrackedObjectData::~OpenVRTrackedObjectData() bool OpenVRTrackedObjectData::onAdd() { - if (Parent::onAdd()) - { - return true; - } + if (Parent::onAdd()) + { + return true; + } - return false; + return false; } bool OpenVRTrackedObjectData::preload(bool server, String &errorStr) { - if (!Parent::preload(server, errorStr)) - return false; + if (!Parent::preload(server, errorStr)) + return false; - bool error = false; - if (!server) - { - mShape = mShapeFile ? ResourceManager::get().load(mShapeFile) : NULL; - } + bool error = false; + if (!server) + { + mShape = mShapeFile ? ResourceManager::get().load(mShapeFile) : NULL; + } } void OpenVRTrackedObjectData::initPersistFields() { - addGroup("Render Components"); - addField("shape", TypeShapeFilename, Offset(mShapeFile, OpenVRTrackedObjectData), "Shape file to use for controller model."); - addField("collisionMin", TypePoint3F, Offset(mCollisionBoxMin, OpenVRTrackedObjectData), "Box min"); - addField("collisionMax", TypePoint3F, Offset(mCollisionBoxMax, OpenVRTrackedObjectData), "Box min"); - endGroup("Render Components"); + addGroup("Render Components"); + addField("shape", TypeShapeFilename, Offset(mShapeFile, OpenVRTrackedObjectData), "Shape file to use for controller model."); + addField("collisionMin", TypePoint3F, Offset(mCollisionBoxMin, OpenVRTrackedObjectData), "Box min"); + addField("collisionMax", TypePoint3F, Offset(mCollisionBoxMax, OpenVRTrackedObjectData), "Box min"); + endGroup("Render Components"); - Parent::initPersistFields(); + Parent::initPersistFields(); } void OpenVRTrackedObjectData::packData(BitStream* stream) { - Parent::packData(stream); + Parent::packData(stream); - stream->writeString(mShapeFile); + stream->writeString(mShapeFile); } void OpenVRTrackedObjectData::unpackData(BitStream* stream) { - Parent::unpackData(stream); + Parent::unpackData(stream); - mShapeFile = stream->readSTString(); + mShapeFile = stream->readSTString(); } //----------------------------------------------------------------------------- @@ -104,11 +104,11 @@ void OpenVRTrackedObjectData::unpackData(BitStream* stream) IMPLEMENT_CO_NETOBJECT_V1(OpenVRTrackedObject); ConsoleDocClass(OpenVRTrackedObject, - "@brief Renders and handles interactions OpenVR controllers and tracked objects.\n\n" - "This class implements basic rendering and interactions with OpenVR controllers.\n\n" - "The object should be controlled by a player object. Controllers will be rendered at\n" - "the correct position regardless of the current transform of the object.\n" - "@ingroup OpenVR\n"); + "@brief Renders and handles interactions OpenVR controllers and tracked objects.\n\n" + "This class implements basic rendering and interactions with OpenVR controllers.\n\n" + "The object should be controlled by a player object. Controllers will be rendered at\n" + "the correct position regardless of the current transform of the object.\n" + "@ingroup OpenVR\n"); //----------------------------------------------------------------------------- @@ -124,122 +124,122 @@ OpenVRTrackedObject::OpenVRTrackedObject() : mConvexList(new Convex()), mPhysicsRep(NULL) { - // Flag this object so that it will always - // be sent across the network to clients - mNetFlags.set(Ghostable | ScopeAlways); + // Flag this object so that it will always + // be sent across the network to clients + mNetFlags.set(Ghostable | ScopeAlways); - // Set it as a "static" object that casts shadows - mTypeMask |= StaticObjectType | StaticShapeObjectType; + // Set it as a "static" object that casts shadows + mTypeMask |= StaticObjectType | StaticShapeObjectType; - mPose.connected = false; + mPose.connected = false; } OpenVRTrackedObject::~OpenVRTrackedObject() { - clearRenderData(); - delete mConvexList; + clearRenderData(); + delete mConvexList; } void OpenVRTrackedObject::updateRenderData() { - clearRenderData(); + clearRenderData(); - if (!mDataBlock) - return; + if (!mDataBlock) + return; - // Are we using a model? - if (mDataBlock->mShape) - { - if (mShapeInstance && mShapeInstance->getShape() != mDataBlock->mShape) - { - delete mShapeInstance; - mShapeInstance = NULL; - } + // Are we using a model? + if (mDataBlock->mShape) + { + if (mShapeInstance && mShapeInstance->getShape() != mDataBlock->mShape) + { + delete mShapeInstance; + mShapeInstance = NULL; + } - if (!mShapeInstance) - { - mShapeInstance = new TSShapeInstance(mDataBlock->mShape, isClientObject()); - } - } - else - { - setupRenderDataFromModel(isClientObject()); - } + if (!mShapeInstance) + { + mShapeInstance = new TSShapeInstance(mDataBlock->mShape, isClientObject()); + } + } + else + { + setupRenderDataFromModel(isClientObject()); + } } void OpenVRTrackedObject::setupRenderDataFromModel(bool loadComponentModels) { - clearRenderData(); - - if (!OPENVR || !OPENVR->isEnabled()) - return; + clearRenderData(); + + if (!OPENVR || !OPENVR->isEnabled()) + return; - vr::IVRRenderModels *models = OPENVR->getRenderModels(); - if (!models) - return; + vr::IVRRenderModels *models = OPENVR->getRenderModels(); + if (!models) + return; - if (!mShapeInstance && mModelName && mModelName[0] != '\0') - { - bool failed = false; - S32 idx = OPENVR->preloadRenderModel(mModelName); - while (!OPENVR->getRenderModel(idx, &mBasicModel, failed)) - { - if (failed) - break; - } - } + if (!mShapeInstance && mModelName && mModelName[0] != '\0') + { + bool failed = false; + S32 idx = OPENVR->preloadRenderModel(mModelName); + while (!OPENVR->getRenderModel(idx, &mBasicModel, failed)) + { + if (failed) + break; + } + } - if (loadComponentModels) - { - mRenderComponents.setSize(models->GetComponentCount(mModelName)); + if (loadComponentModels) + { + mRenderComponents.setSize(models->GetComponentCount(mModelName)); - for (U32 i = 0, sz = mRenderComponents.size(); i < sz; i++) - { - RenderModelSlot &slot = mRenderComponents[i]; - char buffer[1024]; + for (U32 i = 0, sz = mRenderComponents.size(); i < sz; i++) + { + RenderModelSlot &slot = mRenderComponents[i]; + char buffer[1024]; - slot.mappedNodeIdx = -1; - slot.componentName = NULL; - slot.nativeModel = NULL; + slot.mappedNodeIdx = -1; + slot.componentName = NULL; + slot.nativeModel = NULL; - U32 result = models->GetComponentName(mModelName, i, buffer, sizeof(buffer)); - if (result == 0) - continue; + U32 result = models->GetComponentName(mModelName, i, buffer, sizeof(buffer)); + if (result == 0) + continue; #ifdef DEBUG_CONTROLLER_MODELS - Con::printf("Controller[%s] component %i NAME == %s", mModelName, i, buffer); + Con::printf("Controller[%s] component %i NAME == %s", mModelName, i, buffer); #endif - slot.componentName = StringTable->insert(buffer, true); + slot.componentName = StringTable->insert(buffer, true); - result = models->GetComponentRenderModelName(mModelName, slot.componentName, buffer, sizeof(buffer)); - if (result == 0) - { + result = models->GetComponentRenderModelName(mModelName, slot.componentName, buffer, sizeof(buffer)); + if (result == 0) + { #ifdef DEBUG_CONTROLLER_MODELS - Con::printf("Controller[%s] component %i NO MODEL", mModelName, i); + Con::printf("Controller[%s] component %i NO MODEL", mModelName, i); #endif - continue; - } + continue; + } #ifdef DEBUG_CONTROLLER_MODELS - Con::printf("Controller[%s] component %i == %s", mModelName, i, slot.componentName); + Con::printf("Controller[%s] component %i == %s", mModelName, i, slot.componentName); #endif - bool failed = false; - S32 idx = OPENVR->preloadRenderModel(StringTable->insert(buffer, true)); - while (!OPENVR->getRenderModel(idx, &slot.nativeModel, failed)) - { - if (failed) - break; - } - } - } + bool failed = false; + S32 idx = OPENVR->preloadRenderModel(StringTable->insert(buffer, true)); + while (!OPENVR->getRenderModel(idx, &slot.nativeModel, failed)) + { + if (failed) + break; + } + } + } } void OpenVRTrackedObject::clearRenderData() { - mBasicModel = NULL; - mRenderComponents.clear(); + mBasicModel = NULL; + mRenderComponents.clear(); } //----------------------------------------------------------------------------- @@ -247,735 +247,735 @@ void OpenVRTrackedObject::clearRenderData() //----------------------------------------------------------------------------- void OpenVRTrackedObject::initPersistFields() { - // SceneObject already handles exposing the transform - Parent::initPersistFields(); + // SceneObject already handles exposing the transform + Parent::initPersistFields(); - addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track"); - addField("mappedMoveIndex", TypeS32, Offset(mMappedMoveIndex, OpenVRTrackedObject), "Index of movemanager state to track"); addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track"); - addField("ignoreParentRotation", TypeBool, Offset(mIgnoreParentRotation, OpenVRTrackedObject), "Index of movemanager state to track"); addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track"); + addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track"); + addField("mappedMoveIndex", TypeS32, Offset(mMappedMoveIndex, OpenVRTrackedObject), "Index of movemanager state to track"); addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track"); + addField("ignoreParentRotation", TypeBool, Offset(mIgnoreParentRotation, OpenVRTrackedObject), "Index of movemanager state to track"); addField("deviceIndex", TypeS32, Offset(mDeviceIndex, OpenVRTrackedObject), "Index of device to track"); - static bool conInit = false; - if (!conInit) - { - Con::addVariable("$OpenVRTrackedObject::debugControllerPosition", TypeBool, &smDebugControllerPosition); - Con::addVariable("$OpenVRTrackedObject::debugControllerMovePosition", TypeBool, &smDebugControllerMovePosition); - conInit = true; - } + static bool conInit = false; + if (!conInit) + { + Con::addVariable("$OpenVRTrackedObject::debugControllerPosition", TypeBool, &smDebugControllerPosition); + Con::addVariable("$OpenVRTrackedObject::debugControllerMovePosition", TypeBool, &smDebugControllerMovePosition); + conInit = true; + } } void OpenVRTrackedObject::inspectPostApply() { - Parent::inspectPostApply(); + Parent::inspectPostApply(); - // Flag the network mask to send the updates - // to the client object - setMaskBits(UpdateMask); + // Flag the network mask to send the updates + // to the client object + setMaskBits(UpdateMask); } bool OpenVRTrackedObject::onAdd() { - if (!Parent::onAdd()) - return false; + if (!Parent::onAdd()) + return false; - // Set up a 1x1x1 bounding box - mObjBox.set(Point3F(-0.5f, -0.5f, -0.5f), - Point3F(0.5f, 0.5f, 0.5f)); + // Set up a 1x1x1 bounding box + mObjBox.set(Point3F(-0.5f, -0.5f, -0.5f), + Point3F(0.5f, 0.5f, 0.5f)); - resetWorldBox(); + resetWorldBox(); - // Add this object to the scene - addToScene(); + // Add this object to the scene + addToScene(); - if (mDataBlock) - { - mObjBox.minExtents = mDataBlock->mCollisionBoxMin; - mObjBox.maxExtents = mDataBlock->mCollisionBoxMax; - resetWorldBox(); - } - else - { - setGlobalBounds(); - } + if (mDataBlock) + { + mObjBox.minExtents = mDataBlock->mCollisionBoxMin; + mObjBox.maxExtents = mDataBlock->mCollisionBoxMax; + resetWorldBox(); + } + else + { + setGlobalBounds(); + } - return true; + return true; } void OpenVRTrackedObject::onRemove() { - // Remove this object from the scene - removeFromScene(); + // Remove this object from the scene + removeFromScene(); - clearRenderData(); + clearRenderData(); - SAFE_DELETE(mPhysicsRep); + SAFE_DELETE(mPhysicsRep); - Parent::onRemove(); + Parent::onRemove(); } void OpenVRTrackedObject::_updatePhysics() { - SAFE_DELETE(mPhysicsRep); + SAFE_DELETE(mPhysicsRep); - if (!PHYSICSMGR) - return; + if (!PHYSICSMGR) + return; - PhysicsCollision *colShape = NULL; - MatrixF offset(true); - colShape = PHYSICSMGR->createCollision(); - colShape->addBox(getObjBox().getExtents() * 0.5f * mObjScale, offset); + PhysicsCollision *colShape = NULL; + MatrixF offset(true); + colShape = PHYSICSMGR->createCollision(); + colShape->addBox(getObjBox().getExtents() * 0.5f * mObjScale, offset); - if (colShape) - { - PhysicsWorld *world = PHYSICSMGR->getWorld(isServerObject() ? "server" : "client"); - mPhysicsRep = PHYSICSMGR->createBody(); - mPhysicsRep->init(colShape, 0, PhysicsBody::BF_TRIGGER | PhysicsBody::BF_KINEMATIC, this, world); - mPhysicsRep->setTransform(getTransform()); - } + if (colShape) + { + PhysicsWorld *world = PHYSICSMGR->getWorld(isServerObject() ? "server" : "client"); + mPhysicsRep = PHYSICSMGR->createBody(); + mPhysicsRep->init(colShape, 0, PhysicsBody::BF_TRIGGER | PhysicsBody::BF_KINEMATIC, this, world); + mPhysicsRep->setTransform(getTransform()); + } } bool OpenVRTrackedObject::onNewDataBlock(GameBaseData *dptr, bool reload) { - mDataBlock = dynamic_cast(dptr); - if (!mDataBlock || !Parent::onNewDataBlock(dptr, reload)) - return false; + mDataBlock = dynamic_cast(dptr); + if (!mDataBlock || !Parent::onNewDataBlock(dptr, reload)) + return false; - // Setup the models - clearRenderData(); + // Setup the models + clearRenderData(); - mObjBox.minExtents = mDataBlock->mCollisionBoxMin; - mObjBox.maxExtents = mDataBlock->mCollisionBoxMax; + mObjBox.minExtents = mDataBlock->mCollisionBoxMin; + mObjBox.maxExtents = mDataBlock->mCollisionBoxMax; - mGlobalBounds = false; + mGlobalBounds = false; - resetWorldBox(); + resetWorldBox(); - _updatePhysics(); + _updatePhysics(); - scriptOnNewDataBlock(); + scriptOnNewDataBlock(); - return true; + return true; } void OpenVRTrackedObject::setInteractObject(SceneObject* object, bool holding) { - mInteractObject = object; - mHoldInteractedObject = holding; + mInteractObject = object; + mHoldInteractedObject = holding; } void OpenVRTrackedObject::setTransform(const MatrixF & mat) { - // Let SceneObject handle all of the matrix manipulation - Parent::setTransform(mat); + // Let SceneObject handle all of the matrix manipulation + Parent::setTransform(mat); - // Dirty our network mask so that the new transform gets - // transmitted to the client object - setMaskBits(UpdateMask); + // Dirty our network mask so that the new transform gets + // transmitted to the client object + setMaskBits(UpdateMask); } void OpenVRTrackedObject::setModelName(String &modelName) { - if (!isServerObject()) - return; + if (!isServerObject()) + return; - mModelName = StringTable->insert(modelName.c_str(), true); - setMaskBits(UpdateMask); + mModelName = StringTable->insert(modelName.c_str(), true); + setMaskBits(UpdateMask); } U32 OpenVRTrackedObject::packUpdate(NetConnection *conn, U32 mask, BitStream *stream) { - // Allow the Parent to get a crack at writing its info - U32 retMask = Parent::packUpdate(conn, mask, stream); + // Allow the Parent to get a crack at writing its info + U32 retMask = Parent::packUpdate(conn, mask, stream); - // Write our transform information - if (stream->writeFlag(mask & UpdateMask)) - { - mathWrite(*stream, getTransform()); - mathWrite(*stream, getScale()); + // Write our transform information + if (stream->writeFlag(mask & UpdateMask)) + { + mathWrite(*stream, getTransform()); + mathWrite(*stream, getScale()); - stream->write((S16)mDeviceIndex); - stream->write((S16)mMappedMoveIndex); - stream->writeString(mModelName); - } + stream->write((S16)mDeviceIndex); + stream->write((S16)mMappedMoveIndex); + stream->writeString(mModelName); + } - return retMask; + return retMask; } void OpenVRTrackedObject::unpackUpdate(NetConnection *conn, BitStream *stream) { - // Let the Parent read any info it sent - Parent::unpackUpdate(conn, stream); + // Let the Parent read any info it sent + Parent::unpackUpdate(conn, stream); - if (stream->readFlag()) // UpdateMask - { - mathRead(*stream, &mObjToWorld); - mathRead(*stream, &mObjScale); + if (stream->readFlag()) // UpdateMask + { + mathRead(*stream, &mObjToWorld); + mathRead(*stream, &mObjScale); - setTransform(mObjToWorld); - - S16 readDeviceIndex; - S16 readMoveIndex; - stream->read(&readDeviceIndex); - stream->read(&readMoveIndex); + setTransform(mObjToWorld); + + S16 readDeviceIndex; + S16 readMoveIndex; + stream->read(&readDeviceIndex); + stream->read(&readMoveIndex); - mDeviceIndex = readDeviceIndex; - mMappedMoveIndex = readMoveIndex; - mModelName = stream->readSTString(); + mDeviceIndex = readDeviceIndex; + mMappedMoveIndex = readMoveIndex; + mModelName = stream->readSTString(); - updateRenderData(); - } + updateRenderData(); + } } void OpenVRTrackedObject::writePacketData(GameConnection *conn, BitStream *stream) { - Parent::writePacketData(conn, stream); + Parent::writePacketData(conn, stream); } void OpenVRTrackedObject::readPacketData(GameConnection *conn, BitStream *stream) { - Parent::readPacketData(conn, stream); + Parent::readPacketData(conn, stream); } MatrixF OpenVRTrackedObject::getTrackedTransform() { - IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex); - MatrixF trackedMat(1); + IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex); + MatrixF trackedMat(1); - pose.orientation.setMatrix(&trackedMat); - trackedMat.setPosition(pose.position); + pose.orientation.setMatrix(&trackedMat); + trackedMat.setPosition(pose.position); - return trackedMat; + return trackedMat; } MatrixF OpenVRTrackedObject::getLastTrackedTransform() { - MatrixF trackedMat(1); + MatrixF trackedMat(1); - mPose.orientation.setMatrix(&trackedMat); - trackedMat.setPosition(mPose.position); + mPose.orientation.setMatrix(&trackedMat); + trackedMat.setPosition(mPose.position); - return trackedMat; + return trackedMat; } MatrixF OpenVRTrackedObject::getBaseTrackingTransform() { - if (isMounted()) - { - MatrixF mat; + if (isMounted()) + { + MatrixF mat; - mMount.object->getMountTransform(mMount.node, mMount.xfm, &mat); - if (mIgnoreParentRotation) - { - Point3F pos = mat.getPosition(); - mat = MatrixF(1); - mat.setPosition(pos); - } - //mat.inverse(); - return mat; - } + mMount.object->getMountTransform(mMount.node, mMount.xfm, &mat); + if (mIgnoreParentRotation) + { + Point3F pos = mat.getPosition(); + mat = MatrixF(1); + mat.setPosition(pos); + } + //mat.inverse(); + return mat; + } - return MatrixF(1); + return MatrixF(1); } void OpenVRTrackedObject::prepRenderImage(SceneRenderState *state) { - RenderPassManager *renderPass = state->getRenderPass(); + RenderPassManager *renderPass = state->getRenderPass(); - // debug rendering for now + // debug rendering for now - if (mDeviceIndex < 0) - return; + if (mDeviceIndex < 0) + return; - // Current pose - IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex); - IDevicePose hmdPose = OPENVR->getTrackedDevicePose(0); + // Current pose + IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex); + IDevicePose hmdPose = OPENVR->getTrackedDevicePose(0); - if (!pose.connected && !mPose.connected) - return; + if (!pose.connected && !mPose.connected) + return; - MatrixF offsetMat = getBaseTrackingTransform(); - //offsetMat.inverse(); + MatrixF offsetMat = getBaseTrackingTransform(); + //offsetMat.inverse(); - Point3F pos = offsetMat.getPosition(); - //Con::printf("Base offs == %f,%f,%f", pos.x, pos.y, pos.z); + Point3F pos = offsetMat.getPosition(); + //Con::printf("Base offs == %f,%f,%f", pos.x, pos.y, pos.z); - const F32 CONTROLLER_SCALE = 0.1; + const F32 CONTROLLER_SCALE = 0.1; - if (smDebugControllerPosition) - { - ColorI drawColor = ColorI::GREEN; - if (!pose.valid) - { - drawColor = ColorI::RED; - } + if (smDebugControllerPosition) + { + ColorI drawColor = ColorI::GREEN; + if (!pose.valid) + { + drawColor = ColorI::RED; + } - // Draw Camera - /* - DisplayPose cameraPose; - OPENVR->getFrameEyePose(&cameraPose, -1); - Point3F cameraCenter(0); - MatrixF cameraMat(1); - cameraPose.orientation.setMatrix(&cameraMat); - cameraMat.setPosition(cameraPose.position); - cameraMat.mulP(cameraCenter); - //DebugDrawer::get()->drawBox(cameraCenter - Point3F(0.1), cameraCenter + Point3F(0.1), ColorI::GREEN); - - DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -0.1, -0.5), Point3F(0.5, 0.1, 0.5), ColorI::WHITE, cameraMat); // general box - */ + // Draw Camera + /* + DisplayPose cameraPose; + OPENVR->getFrameEyePose(&cameraPose, -1); + Point3F cameraCenter(0); + MatrixF cameraMat(1); + cameraPose.orientation.setMatrix(&cameraMat); + cameraMat.setPosition(cameraPose.position); + cameraMat.mulP(cameraCenter); + //DebugDrawer::get()->drawBox(cameraCenter - Point3F(0.1), cameraCenter + Point3F(0.1), ColorI::GREEN); + + DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -0.1, -0.5), Point3F(0.5, 0.1, 0.5), ColorI::WHITE, cameraMat); // general box + */ - // Draw Tracked HMD Pos - Point3F hmdCenter(0, 0, 0); - MatrixF hmdMat(1); - hmdPose.orientation.setMatrix(&hmdMat); - hmdMat.setPosition(hmdPose.position); - hmdMat.inverse(); // -> world mat (as opposed to world -> tracked pos) - hmdMat = offsetMat * hmdMat; - hmdMat.mulP(hmdCenter); - DebugDrawer::get()->drawBox(hmdCenter - Point3F(0.1), hmdCenter + Point3F(0.1), ColorI::RED); - DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -0.1, -0.5), Point3F(0.5, 0.1, 0.5), ColorI::GREEN, hmdMat); // general box + // Draw Tracked HMD Pos + Point3F hmdCenter(0, 0, 0); + MatrixF hmdMat(1); + hmdPose.orientation.setMatrix(&hmdMat); + hmdMat.setPosition(hmdPose.position); + hmdMat.inverse(); // -> world mat (as opposed to world -> tracked pos) + hmdMat = offsetMat * hmdMat; + hmdMat.mulP(hmdCenter); + DebugDrawer::get()->drawBox(hmdCenter - Point3F(0.1), hmdCenter + Point3F(0.1), ColorI::RED); + DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -0.1, -0.5), Point3F(0.5, 0.1, 0.5), ColorI::GREEN, hmdMat); // general box - // Draw Controller - MatrixF mat(1); - pose.orientation.setMatrix(&mat); - mat.setPosition(pose.position); - mat.inverse(); // same as HMD - mat = offsetMat * mat; + // Draw Controller + MatrixF mat(1); + pose.orientation.setMatrix(&mat); + mat.setPosition(pose.position); + mat.inverse(); // same as HMD + mat = offsetMat * mat; - Point3F middleStart(0, -1 * CONTROLLER_SCALE, 0); - Point3F middleEnd(0, 1 * CONTROLLER_SCALE, 0); - Point3F middle(0, 0, 0); + Point3F middleStart(0, -1 * CONTROLLER_SCALE, 0); + Point3F middleEnd(0, 1 * CONTROLLER_SCALE, 0); + Point3F middle(0, 0, 0); - Point3F center(0, 0, 0); - mat.mulP(center); + Point3F center(0, 0, 0); + mat.mulP(center); - //DebugDrawer::get()->drawBox(center - Point3F(0.1), center + Point3F(0.1), ColorI::BLUE); + //DebugDrawer::get()->drawBox(center - Point3F(0.1), center + Point3F(0.1), ColorI::BLUE); - mat.mulP(middleStart); - mat.mulP(middle); - mat.mulP(middleEnd); + mat.mulP(middleStart); + mat.mulP(middle); + mat.mulP(middleEnd); - char buffer[256]; - dSprintf(buffer, 256, "%f %f %f", center.x, center.y, center.z); - DebugDrawer::get()->drawText(middle, buffer); - DebugDrawer::get()->drawLine(middleStart, middle, ColorI(0, 255, 0)); // axis back - DebugDrawer::get()->drawLine(middleEnd, middle, ColorI(255, 0, 0)); // axis forward - DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -1, -0.5) * CONTROLLER_SCALE, Point3F(0.5, 1, 0.5) * CONTROLLER_SCALE, drawColor, mat); // general box - DebugDrawer::get()->drawBoxOutline(Point3F(-1), Point3F(1), ColorI::WHITE); - } + char buffer[256]; + dSprintf(buffer, 256, "%f %f %f", center.x, center.y, center.z); + DebugDrawer::get()->drawText(middle, buffer); + DebugDrawer::get()->drawLine(middleStart, middle, ColorI(0, 255, 0)); // axis back + DebugDrawer::get()->drawLine(middleEnd, middle, ColorI(255, 0, 0)); // axis forward + DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -1, -0.5) * CONTROLLER_SCALE, Point3F(0.5, 1, 0.5) * CONTROLLER_SCALE, drawColor, mat); // general box + DebugDrawer::get()->drawBoxOutline(Point3F(-1), Point3F(1), ColorI::WHITE); + } - if (isClientObject() && smDebugControllerMovePosition) - { - MatrixF transform = getRenderTransform(); - transform.scale(mObjScale); - DebugDrawer::get()->drawTransformedBoxOutline(mObjBox.minExtents, mObjBox.maxExtents, ColorI::RED, transform); - - // jamesu - grab server object pose for debugging - OpenVRTrackedObject* tracked = static_cast(getServerObject()); - if (tracked) - { - mPose = tracked->mPose; - } + if (isClientObject() && smDebugControllerMovePosition) + { + MatrixF transform = getRenderTransform(); + transform.scale(mObjScale); + DebugDrawer::get()->drawTransformedBoxOutline(mObjBox.minExtents, mObjBox.maxExtents, ColorI::RED, transform); + + // jamesu - grab server object pose for debugging + OpenVRTrackedObject* tracked = static_cast(getServerObject()); + if (tracked) + { + mPose = tracked->mPose; + } - ColorI drawColor = ColorI::GREEN; - if (!pose.valid) - { - drawColor = ColorI::RED; - } - // Draw Controller - MatrixF mat(1); - mPose.orientation.setMatrix(&mat); - mat.setPosition(mPose.position); - mat.inverse(); // same as HMD - mat = offsetMat * mat; + ColorI drawColor = ColorI::GREEN; + if (!pose.valid) + { + drawColor = ColorI::RED; + } + // Draw Controller + MatrixF mat(1); + mPose.orientation.setMatrix(&mat); + mat.setPosition(mPose.position); + mat.inverse(); // same as HMD + mat = offsetMat * mat; - Point3F middleStart(0, -1 * CONTROLLER_SCALE, 0); - Point3F middleEnd(0, 1 * CONTROLLER_SCALE, 0); - Point3F middle(0, 0, 0); + Point3F middleStart(0, -1 * CONTROLLER_SCALE, 0); + Point3F middleEnd(0, 1 * CONTROLLER_SCALE, 0); + Point3F middle(0, 0, 0); - Point3F center(0, 0, 0); - mat.mulP(center); + Point3F center(0, 0, 0); + mat.mulP(center); - //DebugDrawer::get()->drawBox(center - Point3F(0.1), center + Point3F(0.1), ColorI::BLUE); + //DebugDrawer::get()->drawBox(center - Point3F(0.1), center + Point3F(0.1), ColorI::BLUE); - mat.mulP(middleStart); - mat.mulP(middle); - mat.mulP(middleEnd); + mat.mulP(middleStart); + mat.mulP(middle); + mat.mulP(middleEnd); - char buffer[256]; - dSprintf(buffer, 256, "%f %f %f", center.x, center.y, center.z); - DebugDrawer::get()->drawText(middle, buffer); - DebugDrawer::get()->drawLine(middleStart, middle, ColorI(0, 255, 0)); // axis back - DebugDrawer::get()->drawLine(middleEnd, middle, ColorI(255, 0, 0)); // axis forward - DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -1, -0.5) * CONTROLLER_SCALE, Point3F(0.5, 1, 0.5) * CONTROLLER_SCALE, drawColor, mat); // general box - DebugDrawer::get()->drawBoxOutline(Point3F(-1), Point3F(1), ColorI::WHITE); - } + char buffer[256]; + dSprintf(buffer, 256, "%f %f %f", center.x, center.y, center.z); + DebugDrawer::get()->drawText(middle, buffer); + DebugDrawer::get()->drawLine(middleStart, middle, ColorI(0, 255, 0)); // axis back + DebugDrawer::get()->drawLine(middleEnd, middle, ColorI(255, 0, 0)); // axis forward + DebugDrawer::get()->drawTransformedBoxOutline(Point3F(-0.5, -1, -0.5) * CONTROLLER_SCALE, Point3F(0.5, 1, 0.5) * CONTROLLER_SCALE, drawColor, mat); // general box + DebugDrawer::get()->drawBoxOutline(Point3F(-1), Point3F(1), ColorI::WHITE); + } - // Controller matrix base - MatrixF trackedMat = getTrackedTransform(); - MatrixF invTrackedMat(1); + // Controller matrix base + MatrixF trackedMat = getTrackedTransform(); + MatrixF invTrackedMat(1); - invTrackedMat = trackedMat; - invTrackedMat.inverse(); // -> world mat (as opposed to world -> tracked pos) + invTrackedMat = trackedMat; + invTrackedMat.inverse(); // -> world mat (as opposed to world -> tracked pos) - invTrackedMat = getBaseTrackingTransform() * invTrackedMat; - trackedMat = invTrackedMat; - trackedMat.inverse(); + invTrackedMat = getBaseTrackingTransform() * invTrackedMat; + trackedMat = invTrackedMat; + trackedMat.inverse(); - // Render the controllers, using either the render model or the shape - if (mShapeInstance) - { - // Calculate the distance of this object from the camera - Point3F cameraOffset = invTrackedMat.getPosition(); - cameraOffset -= state->getDiffuseCameraPosition(); - F32 dist = cameraOffset.len(); - if (dist < 0.01f) - dist = 0.01f; + // Render the controllers, using either the render model or the shape + if (mShapeInstance) + { + // Calculate the distance of this object from the camera + Point3F cameraOffset = invTrackedMat.getPosition(); + cameraOffset -= state->getDiffuseCameraPosition(); + F32 dist = cameraOffset.len(); + if (dist < 0.01f) + dist = 0.01f; - // Set up the LOD for the shape - F32 invScale = (1.0f / getMax(getMax(mObjScale.x, mObjScale.y), mObjScale.z)); + // Set up the LOD for the shape + F32 invScale = (1.0f / getMax(getMax(mObjScale.x, mObjScale.y), mObjScale.z)); - mShapeInstance->setDetailFromDistance(state, dist * invScale); + mShapeInstance->setDetailFromDistance(state, dist * invScale); - // Make sure we have a valid level of detail - if (mShapeInstance->getCurrentDetail() < 0) - return; + // Make sure we have a valid level of detail + if (mShapeInstance->getCurrentDetail() < 0) + return; - // GFXTransformSaver is a handy helper class that restores - // the current GFX matrices to their original values when - // it goes out of scope at the end of the function - GFXTransformSaver saver; + // GFXTransformSaver is a handy helper class that restores + // the current GFX matrices to their original values when + // it goes out of scope at the end of the function + GFXTransformSaver saver; - // Set up our TS render state - TSRenderState rdata; - rdata.setSceneState(state); - rdata.setFadeOverride(1.0f); + // Set up our TS render state + TSRenderState rdata; + rdata.setSceneState(state); + rdata.setFadeOverride(1.0f); - // We might have some forward lit materials - // so pass down a query to gather lights. - LightQuery query; - query.init(getWorldSphere()); - rdata.setLightQuery(&query); + // We might have some forward lit materials + // so pass down a query to gather lights. + LightQuery query; + query.init(getWorldSphere()); + rdata.setLightQuery(&query); - // Set the world matrix to the objects render transform - MatrixF mat = trackedMat; + // Set the world matrix to the objects render transform + MatrixF mat = trackedMat; - mat.scale(mObjScale); - GFX->setWorldMatrix(mat); + mat.scale(mObjScale); + GFX->setWorldMatrix(mat); - // TODO: move the nodes about for components + // TODO: move the nodes about for components - mShapeInstance->animate(); - mShapeInstance->render(rdata); - } - else if (mRenderComponents.size() > 0) - { - vr::IVRRenderModels *models = OPENVR->getRenderModels(); - if (!models) - return; + mShapeInstance->animate(); + mShapeInstance->render(rdata); + } + else if (mRenderComponents.size() > 0) + { + vr::IVRRenderModels *models = OPENVR->getRenderModels(); + if (!models) + return; - vr::IVRSystem* vrs = vr::VRSystem(); + vr::IVRSystem* vrs = vr::VRSystem(); - if (!vrs->GetControllerState(mDeviceIndex, &mCurrentControllerState)) - { - return; - } + if (!vrs->GetControllerState(mDeviceIndex, &mCurrentControllerState)) + { + return; + } - for (U32 i = 0, sz = mRenderComponents.size(); i < sz; i++) - { - RenderModelSlot slot = mRenderComponents[i]; - vr::RenderModel_ControllerMode_State_t modeState; - vr::RenderModel_ComponentState_t componentState; + for (U32 i = 0, sz = mRenderComponents.size(); i < sz; i++) + { + RenderModelSlot slot = mRenderComponents[i]; + vr::RenderModel_ControllerMode_State_t modeState; + vr::RenderModel_ComponentState_t componentState; - modeState.bScrollWheelVisible = false; + modeState.bScrollWheelVisible = false; - if (models->GetComponentState(mModelName, slot.componentName, &mCurrentControllerState, &modeState, &componentState)) - { - MeshRenderInst *ri = renderPass->allocInst(); + if (models->GetComponentState(mModelName, slot.componentName, &mCurrentControllerState, &modeState, &componentState)) + { + MeshRenderInst *ri = renderPass->allocInst(); - // Set our RenderInst as a standard mesh render - ri->type = RenderPassManager::RIT_Mesh; + // Set our RenderInst as a standard mesh render + ri->type = RenderPassManager::RIT_Mesh; - // Calculate our sorting point - if (state && slot.nativeModel) - { - // Calculate our sort point manually. - const Box3F rBox = slot.nativeModel->getWorldBox(invTrackedMat); - ri->sortDistSq = rBox.getSqDistanceToPoint(state->getCameraPosition()); - } - else - { - ri->sortDistSq = 0.0f; - } + // Calculate our sorting point + if (state && slot.nativeModel) + { + // Calculate our sort point manually. + const Box3F rBox = slot.nativeModel->getWorldBox(invTrackedMat); + ri->sortDistSq = rBox.getSqDistanceToPoint(state->getCameraPosition()); + } + else + { + ri->sortDistSq = 0.0f; + } - MatrixF newTransform = trackedMat; - MatrixF controllerOffsMat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(componentState.mTrackingToComponentRenderModel); - MatrixF offComponentMat(1); - OpenVRUtil::convertTransformFromOVR(controllerOffsMat, offComponentMat); + MatrixF newTransform = trackedMat; + MatrixF controllerOffsMat = OpenVRUtil::convertSteamVRAffineMatrixToMatrixFPlain(componentState.mTrackingToComponentRenderModel); + MatrixF offComponentMat(1); + OpenVRUtil::convertTransformFromOVR(controllerOffsMat, offComponentMat); - newTransform = offComponentMat * newTransform; + newTransform = offComponentMat * newTransform; - newTransform.inverse(); + newTransform.inverse(); - //DebugDrawer::get()->drawBox(newTransform.getPosition() - Point3F(0.001), newTransform.getPosition() + Point3F(0.001), ColorI::BLUE); + //DebugDrawer::get()->drawBox(newTransform.getPosition() - Point3F(0.001), newTransform.getPosition() + Point3F(0.001), ColorI::BLUE); - if (!slot.nativeModel) - continue; - if (i < 1) - continue; + if (!slot.nativeModel) + continue; + if (i < 1) + continue; - // Set up our transforms - ri->objectToWorld = renderPass->allocUniqueXform(newTransform); - ri->worldToCamera = renderPass->allocSharedXform(RenderPassManager::View); - ri->projection = renderPass->allocSharedXform(RenderPassManager::Projection); + // Set up our transforms + ri->objectToWorld = renderPass->allocUniqueXform(newTransform); + ri->worldToCamera = renderPass->allocSharedXform(RenderPassManager::View); + ri->projection = renderPass->allocSharedXform(RenderPassManager::Projection); - // If our material needs lights then fill the RIs - // light vector with the best lights. - if (true) - { - LightQuery query; - Point3F center(0, 0, 0); - invTrackedMat.mulP(center); - query.init(SphereF(center, 10.0f)); - query.getLights(ri->lights, 8); - } + // If our material needs lights then fill the RIs + // light vector with the best lights. + if (true) + { + LightQuery query; + Point3F center(0, 0, 0); + invTrackedMat.mulP(center); + query.init(SphereF(center, 10.0f)); + query.getLights(ri->lights, 8); + } - // Draw model - slot.nativeModel->draw(state, ri); - state->getRenderPass()->addInst(ri); - } - } - } - else if (mBasicModel) - { - MeshRenderInst *ri = renderPass->allocInst(); + // Draw model + slot.nativeModel->draw(state, ri); + state->getRenderPass()->addInst(ri); + } + } + } + else if (mBasicModel) + { + MeshRenderInst *ri = renderPass->allocInst(); - // Set our RenderInst as a standard mesh render - ri->type = RenderPassManager::RIT_Mesh; + // Set our RenderInst as a standard mesh render + ri->type = RenderPassManager::RIT_Mesh; - // Calculate our sorting point - if (state) - { - // Calculate our sort point manually. - const Box3F rBox = mBasicModel->getWorldBox(invTrackedMat); - ri->sortDistSq = rBox.getSqDistanceToPoint(state->getCameraPosition()); - } - else - { - ri->sortDistSq = 0.0f; - } + // Calculate our sorting point + if (state) + { + // Calculate our sort point manually. + const Box3F rBox = mBasicModel->getWorldBox(invTrackedMat); + ri->sortDistSq = rBox.getSqDistanceToPoint(state->getCameraPosition()); + } + else + { + ri->sortDistSq = 0.0f; + } - MatrixF newTransform = invTrackedMat; - // Set up our transforms - ri->objectToWorld = renderPass->allocUniqueXform(newTransform); - ri->worldToCamera = renderPass->allocSharedXform(RenderPassManager::View); - ri->projection = renderPass->allocSharedXform(RenderPassManager::Projection); + MatrixF newTransform = invTrackedMat; + // Set up our transforms + ri->objectToWorld = renderPass->allocUniqueXform(newTransform); + ri->worldToCamera = renderPass->allocSharedXform(RenderPassManager::View); + ri->projection = renderPass->allocSharedXform(RenderPassManager::Projection); - // If our material needs lights then fill the RIs - // light vector with the best lights. - if (true) - { - LightQuery query; - Point3F center(0, 0, 0); - invTrackedMat.mulP(center); - query.init(SphereF(center, 10.0f)); - query.getLights(ri->lights, 8); - } + // If our material needs lights then fill the RIs + // light vector with the best lights. + if (true) + { + LightQuery query; + Point3F center(0, 0, 0); + invTrackedMat.mulP(center); + query.init(SphereF(center, 10.0f)); + query.getLights(ri->lights, 8); + } - // Draw model - mBasicModel->draw(state, ri); - state->getRenderPass()->addInst(ri); - } + // Draw model + mBasicModel->draw(state, ri); + state->getRenderPass()->addInst(ri); + } } U32 OpenVRTrackedObject::getCollisionMask() { - if (isServerObject()) - return sServerCollisionMask; - else - return sClientCollisionMask; + if (isServerObject()) + return sServerCollisionMask; + else + return sClientCollisionMask; } void OpenVRTrackedObject::updateWorkingCollisionSet() { - const U32 mask = getCollisionMask(); - Box3F convexBox = mConvexList->getBoundingBox(getTransform(), getScale()); - F32 len = (50) * TickSec; - F32 l = (len * 1.1) + 0.1; // fudge factor - convexBox.minExtents -= Point3F(l, l, l); - convexBox.maxExtents += Point3F(l, l, l); + const U32 mask = getCollisionMask(); + Box3F convexBox = mConvexList->getBoundingBox(getTransform(), getScale()); + F32 len = (50) * TickSec; + F32 l = (len * 1.1) + 0.1; // fudge factor + convexBox.minExtents -= Point3F(l, l, l); + convexBox.maxExtents += Point3F(l, l, l); - disableCollision(); - mConvexList->updateWorkingList(convexBox, mask); - enableCollision(); + disableCollision(); + mConvexList->updateWorkingList(convexBox, mask); + enableCollision(); } void OpenVRTrackedObject::updateMove(const Move *move) { - // Set transform based on move + // Set transform based on move #ifdef TORQUE_EXTENDED_MOVE - const ExtendedMove* emove = dynamic_cast(move); - if (!emove) - return; + const ExtendedMove* emove = dynamic_cast(move); + if (!emove) + return; - U32 emoveIndex = mMappedMoveIndex; - if (emoveIndex >= ExtendedMove::MaxPositionsRotations) - emoveIndex = 0; + U32 emoveIndex = mMappedMoveIndex; + if (emoveIndex >= ExtendedMove::MaxPositionsRotations) + emoveIndex = 0; - //IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex); - //Con::printf("OpenVRTrackedObject::processTick move %i", emoveIndex); + //IDevicePose pose = OPENVR->getTrackedDevicePose(mDeviceIndex); + //Con::printf("OpenVRTrackedObject::processTick move %i", emoveIndex); - if (!emove->EulerBasedRotation[emoveIndex]) - { - AngAxisF inRot = AngAxisF(Point3F(emove->rotX[emoveIndex], emove->rotY[emoveIndex], emove->rotZ[emoveIndex]), emove->rotW[emoveIndex]); - // Update our pose based on the move info - mPose.orientation = inRot; - mPose.position = Point3F(emove->posX[emoveIndex], emove->posY[emoveIndex], emove->posZ[emoveIndex]); - mPose.valid = true; - mPose.connected = true; - } + if (!emove->EulerBasedRotation[emoveIndex]) + { + AngAxisF inRot = AngAxisF(Point3F(emove->rotX[emoveIndex], emove->rotY[emoveIndex], emove->rotZ[emoveIndex]), emove->rotW[emoveIndex]); + // Update our pose based on the move info + mPose.orientation = inRot; + mPose.position = Point3F(emove->posX[emoveIndex], emove->posY[emoveIndex], emove->posZ[emoveIndex]); + mPose.valid = true; + mPose.connected = true; + } - // Set transform based on move pose - MatrixF trackedMat(1); - MatrixF invTrackedMat(1); + // Set transform based on move pose + MatrixF trackedMat(1); + MatrixF invTrackedMat(1); - mPose.orientation.setMatrix(&trackedMat); - trackedMat.setPosition(mPose.position); + mPose.orientation.setMatrix(&trackedMat); + trackedMat.setPosition(mPose.position); - invTrackedMat = trackedMat; - invTrackedMat.inverse(); // -> world mat (as opposed to world -> tracked pos) + invTrackedMat = trackedMat; + invTrackedMat.inverse(); // -> world mat (as opposed to world -> tracked pos) - invTrackedMat = getBaseTrackingTransform() * invTrackedMat; - trackedMat = invTrackedMat; - trackedMat.inverse(); + invTrackedMat = getBaseTrackingTransform() * invTrackedMat; + trackedMat = invTrackedMat; + trackedMat.inverse(); - SceneObject::setTransform(invTrackedMat); + SceneObject::setTransform(invTrackedMat); - if (mPhysicsRep) - mPhysicsRep->setTransform(invTrackedMat); + if (mPhysicsRep) + mPhysicsRep->setTransform(invTrackedMat); #endif } void OpenVRTrackedObject::processTick(const Move *move) { - // Perform collision checks - if (isServerObject()) - { - updateMove(move); + // Perform collision checks + if (isServerObject()) + { + updateMove(move); - if (!mPhysicsRep) - { - updateWorkingCollisionSet(); - } - } + if (!mPhysicsRep) + { + updateWorkingCollisionSet(); + } + } - Parent::processTick(move); + Parent::processTick(move); } void OpenVRTrackedObject::interpolateTick(F32 delta) { - // Set latest transform + // Set latest transform - Parent::interpolateTick(delta); + Parent::interpolateTick(delta); } void OpenVRTrackedObject::advanceTime(F32 dt) { - Parent::advanceTime(dt); + Parent::advanceTime(dt); } bool OpenVRTrackedObject::castRay(const Point3F &start, const Point3F &end, RayInfo* info) { - if (!mPose.connected || !mPose.valid) - return false; + if (!mPose.connected || !mPose.valid) + return false; - // Collide against bounding box. - F32 st, et, fst = 0.0f, fet = 1.0f; - F32 *bmin = &mObjBox.minExtents.x; - F32 *bmax = &mObjBox.maxExtents.x; - F32 const *si = &start.x; - F32 const *ei = &end.x; + // Collide against bounding box. + F32 st, et, fst = 0.0f, fet = 1.0f; + F32 *bmin = &mObjBox.minExtents.x; + F32 *bmax = &mObjBox.maxExtents.x; + F32 const *si = &start.x; + F32 const *ei = &end.x; - for (S32 i = 0; i < 3; i++) { - if (*si < *ei) { - if (*si > *bmax || *ei < *bmin) - return false; - F32 di = *ei - *si; - st = (*si < *bmin) ? (*bmin - *si) / di : 0.0f; - et = (*ei > *bmax) ? (*bmax - *si) / di : 1.0f; - } - else { - if (*ei > *bmax || *si < *bmin) - return false; - F32 di = *ei - *si; - st = (*si > *bmax) ? (*bmax - *si) / di : 0.0f; - et = (*ei < *bmin) ? (*bmin - *si) / di : 1.0f; - } - if (st > fst) fst = st; - if (et < fet) fet = et; - if (fet < fst) - return false; - bmin++; bmax++; - si++; ei++; - } + for (S32 i = 0; i < 3; i++) { + if (*si < *ei) { + if (*si > *bmax || *ei < *bmin) + return false; + F32 di = *ei - *si; + st = (*si < *bmin) ? (*bmin - *si) / di : 0.0f; + et = (*ei > *bmax) ? (*bmax - *si) / di : 1.0f; + } + else { + if (*ei > *bmax || *si < *bmin) + return false; + F32 di = *ei - *si; + st = (*si > *bmax) ? (*bmax - *si) / di : 0.0f; + et = (*ei < *bmin) ? (*bmin - *si) / di : 1.0f; + } + if (st > fst) fst = st; + if (et < fet) fet = et; + if (fet < fst) + return false; + bmin++; bmax++; + si++; ei++; + } - info->normal = start - end; - info->normal.normalizeSafe(); - getTransform().mulV(info->normal); + info->normal = start - end; + info->normal.normalizeSafe(); + getTransform().mulV(info->normal); - info->t = fst; - info->object = this; - info->point.interpolate(start, end, fst); - info->material = 0; - return true; + info->t = fst; + info->object = this; + info->point.interpolate(start, end, fst); + info->material = 0; + return true; } void OpenVRTrackedObject::buildConvex(const Box3F& box, Convex* convex) { - // These should really come out of a pool - mConvexList->collectGarbage(); + // These should really come out of a pool + mConvexList->collectGarbage(); - Box3F realBox = box; - mWorldToObj.mul(realBox); - realBox.minExtents.convolveInverse(mObjScale); - realBox.maxExtents.convolveInverse(mObjScale); + Box3F realBox = box; + mWorldToObj.mul(realBox); + realBox.minExtents.convolveInverse(mObjScale); + realBox.maxExtents.convolveInverse(mObjScale); - if (realBox.isOverlapped(getObjBox()) == false) - return; + if (realBox.isOverlapped(getObjBox()) == false) + return; - // Just return a box convex for the entire shape... - Convex* cc = 0; - CollisionWorkingList& wl = convex->getWorkingList(); - for (CollisionWorkingList* itr = wl.wLink.mNext; itr != &wl; itr = itr->wLink.mNext) { - if (itr->mConvex->getType() == BoxConvexType && - itr->mConvex->getObject() == this) { - cc = itr->mConvex; - break; - } - } - if (cc) - return; + // Just return a box convex for the entire shape... + Convex* cc = 0; + CollisionWorkingList& wl = convex->getWorkingList(); + for (CollisionWorkingList* itr = wl.wLink.mNext; itr != &wl; itr = itr->wLink.mNext) { + if (itr->mConvex->getType() == BoxConvexType && + itr->mConvex->getObject() == this) { + cc = itr->mConvex; + break; + } + } + if (cc) + return; - // Create a new convex. - BoxConvex* cp = new BoxConvex; - mConvexList->registerObject(cp); - convex->addToWorkingList(cp); - cp->init(this); + // Create a new convex. + BoxConvex* cp = new BoxConvex; + mConvexList->registerObject(cp); + convex->addToWorkingList(cp); + cp->init(this); - mObjBox.getCenter(&cp->mCenter); - cp->mSize.x = mObjBox.len_x() / 2.0f; - cp->mSize.y = mObjBox.len_y() / 2.0f; - cp->mSize.z = mObjBox.len_z() / 2.0f; + mObjBox.getCenter(&cp->mCenter); + cp->mSize.x = mObjBox.len_x() / 2.0f; + cp->mSize.y = mObjBox.len_y() / 2.0f; + cp->mSize.z = mObjBox.len_z() / 2.0f; } bool OpenVRTrackedObject::testObject(SceneObject* enter) { - return false; // TODO + return false; // TODO } DefineEngineMethod(OpenVRTrackedObject, setModelName, void, (String modelName),, "Set model name. Typically you should do this from the client to update the server representation.") { - object->setModelName(modelName); + object->setModelName(modelName); } diff --git a/Engine/source/platform/input/openVR/openVRTrackedObject.h b/Engine/source/platform/input/openVR/openVRTrackedObject.h index 572649a8b..eb2feb87c 100644 --- a/Engine/source/platform/input/openVR/openVRTrackedObject.h +++ b/Engine/source/platform/input/openVR/openVRTrackedObject.h @@ -23,132 +23,132 @@ class PhysicsBody; class OpenVRTrackedObjectData : public GameBaseData { public: - typedef GameBaseData Parent; + typedef GameBaseData Parent; - StringTableEntry mShapeFile; - Resource mShape; ///< Torque model + StringTableEntry mShapeFile; + Resource mShape; ///< Torque model - Point3F mCollisionBoxMin; - Point3F mCollisionBoxMax; + Point3F mCollisionBoxMin; + Point3F mCollisionBoxMax; public: - OpenVRTrackedObjectData(); - ~OpenVRTrackedObjectData(); + OpenVRTrackedObjectData(); + ~OpenVRTrackedObjectData(); - DECLARE_CONOBJECT(OpenVRTrackedObjectData); + DECLARE_CONOBJECT(OpenVRTrackedObjectData); - bool onAdd(); - bool preload(bool server, String &errorStr); + bool onAdd(); + bool preload(bool server, String &errorStr); - static void initPersistFields(); + static void initPersistFields(); - virtual void packData(BitStream* stream); - virtual void unpackData(BitStream* stream); + virtual void packData(BitStream* stream); + virtual void unpackData(BitStream* stream); }; /// Implements a GameObject which tracks an OpenVR controller class OpenVRTrackedObject : public GameBase { - typedef GameBase Parent; + typedef GameBase Parent; - enum MaskBits - { - UpdateMask = Parent::NextFreeMask << 0, - NextFreeMask = Parent::NextFreeMask << 1 - }; + enum MaskBits + { + UpdateMask = Parent::NextFreeMask << 0, + NextFreeMask = Parent::NextFreeMask << 1 + }; - struct RenderModelSlot - { - StringTableEntry componentName; ///< Component name - S16 mappedNodeIdx; ///< Mapped node idx in mShape - OpenVRRenderModel *nativeModel; ///< Native model - }; + struct RenderModelSlot + { + StringTableEntry componentName; ///< Component name + S16 mappedNodeIdx; ///< Mapped node idx in mShape + OpenVRRenderModel *nativeModel; ///< Native model + }; - OpenVRTrackedObjectData *mDataBlock; + OpenVRTrackedObjectData *mDataBlock; - /// @name Rendering - /// { - TSShapeInstance *mShapeInstance; ///< Shape used to render controller (uses native model otherwise) - StringTableEntry mModelName; - OpenVRRenderModel *mBasicModel; ///< Basic model - Vector mRenderComponents; - /// } + /// @name Rendering + /// { + TSShapeInstance *mShapeInstance; ///< Shape used to render controller (uses native model otherwise) + StringTableEntry mModelName; + OpenVRRenderModel *mBasicModel; ///< Basic model + Vector mRenderComponents; + /// } - S32 mDeviceIndex; ///< Controller idx in openvr (for direct updating) - S32 mMappedMoveIndex; ///< Movemanager move index for rotation + S32 mDeviceIndex; ///< Controller idx in openvr (for direct updating) + S32 mMappedMoveIndex; ///< Movemanager move index for rotation - vr::VRControllerState_t mCurrentControllerState; - vr::VRControllerState_t mPreviousControllerState; + vr::VRControllerState_t mCurrentControllerState; + vr::VRControllerState_t mPreviousControllerState; - IDevicePose mPose; ///< Current openvr pose data, or reconstructed data from the client + IDevicePose mPose; ///< Current openvr pose data, or reconstructed data from the client - Convex* mConvexList; - EarlyOutPolyList mClippedList; - PhysicsBody *mPhysicsRep; + Convex* mConvexList; + EarlyOutPolyList mClippedList; + PhysicsBody *mPhysicsRep; - SimObjectPtr mCollisionObject; ///< Object we're currently colliding with - SimObjectPtr mInteractObject; ///< Object we've designated as important to interact with + SimObjectPtr mCollisionObject; ///< Object we're currently colliding with + SimObjectPtr mInteractObject; ///< Object we've designated as important to interact with - bool mHoldInteractedObject; ///< Performs pickup logic with mInteractObject - bool mIgnoreParentRotation; ///< Ignores the rotation of the parent object + bool mHoldInteractedObject; ///< Performs pickup logic with mInteractObject + bool mIgnoreParentRotation; ///< Ignores the rotation of the parent object - static bool smDebugControllerPosition; ///< Shows latest controller position in DebugDrawer - static bool smDebugControllerMovePosition; ///< Shows move position in DebugDrawer - static U32 sServerCollisionMask; - static U32 sClientCollisionMask; + static bool smDebugControllerPosition; ///< Shows latest controller position in DebugDrawer + static bool smDebugControllerMovePosition; ///< Shows move position in DebugDrawer + static U32 sServerCollisionMask; + static U32 sClientCollisionMask; public: - OpenVRTrackedObject(); - virtual ~OpenVRTrackedObject(); + OpenVRTrackedObject(); + virtual ~OpenVRTrackedObject(); - void updateRenderData(); - void setupRenderDataFromModel(bool loadComponentModels); + void updateRenderData(); + void setupRenderDataFromModel(bool loadComponentModels); - void clearRenderData(); + void clearRenderData(); - DECLARE_CONOBJECT(OpenVRTrackedObject); + DECLARE_CONOBJECT(OpenVRTrackedObject); - static void initPersistFields(); + static void initPersistFields(); - virtual void inspectPostApply(); + virtual void inspectPostApply(); - bool onAdd(); - void onRemove(); + bool onAdd(); + void onRemove(); - void _updatePhysics(); - bool onNewDataBlock(GameBaseData *dptr, bool reload); + void _updatePhysics(); + bool onNewDataBlock(GameBaseData *dptr, bool reload); - void setInteractObject(SceneObject* object, bool holding); + void setInteractObject(SceneObject* object, bool holding); - void setTransform(const MatrixF &mat); - void setModelName(String &modelName); + void setTransform(const MatrixF &mat); + void setModelName(String &modelName); - U32 packUpdate(NetConnection *conn, U32 mask, BitStream *stream); - void unpackUpdate(NetConnection *conn, BitStream *stream); - void writePacketData(GameConnection *conn, BitStream *stream); - void readPacketData(GameConnection *conn, BitStream *stream); + U32 packUpdate(NetConnection *conn, U32 mask, BitStream *stream); + void unpackUpdate(NetConnection *conn, BitStream *stream); + void writePacketData(GameConnection *conn, BitStream *stream); + void readPacketData(GameConnection *conn, BitStream *stream); - void prepRenderImage(SceneRenderState *state); + void prepRenderImage(SceneRenderState *state); - MatrixF getTrackedTransform(); - MatrixF getLastTrackedTransform(); - MatrixF getBaseTrackingTransform(); + MatrixF getTrackedTransform(); + MatrixF getLastTrackedTransform(); + MatrixF getBaseTrackingTransform(); - U32 getCollisionMask(); - void updateWorkingCollisionSet(); + U32 getCollisionMask(); + void updateWorkingCollisionSet(); - // Time management - void updateMove(const Move *move); - void processTick(const Move *move); - void interpolateTick(F32 delta); - void advanceTime(F32 dt); + // Time management + void updateMove(const Move *move); + void processTick(const Move *move); + void interpolateTick(F32 delta); + void advanceTime(F32 dt); - // Collision - bool castRay(const Point3F &start, const Point3F &end, RayInfo* info); - void buildConvex(const Box3F& box, Convex* convex); - bool testObject(SceneObject* enter); + // Collision + bool castRay(const Point3F &start, const Point3F &end, RayInfo* info); + void buildConvex(const Box3F& box, Convex* convex); + bool testObject(SceneObject* enter); }; From 455aa99046a12babe83ffb43d3ec838238622a08 Mon Sep 17 00:00:00 2001 From: James Urquhart Date: Tue, 13 Sep 2016 10:24:23 +0100 Subject: [PATCH 33/33] Add missing bracket --- Engine/source/gfx/D3D11/gfxD3D11Device.cpp | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Engine/source/gfx/D3D11/gfxD3D11Device.cpp b/Engine/source/gfx/D3D11/gfxD3D11Device.cpp index 2881e1f48..a71e9c932 100644 --- a/Engine/source/gfx/D3D11/gfxD3D11Device.cpp +++ b/Engine/source/gfx/D3D11/gfxD3D11Device.cpp @@ -1597,7 +1597,7 @@ GFXVertexDecl* GFXD3D11Device::allocVertexDecl( const GFXVertexFormat *vertexFor S32 elemIndex = 0; for (S32 i = 0; i < elemCount; i++, elemIndex++) - + { const GFXVertexElement &element = vertexFormat->getElement(elemIndex); stream = element.getStreamIndex();