diff options
author | Matt A. Tobin <mattatobin@localhost.localdomain> | 2018-02-02 04:16:08 -0500 |
---|---|---|
committer | Matt A. Tobin <mattatobin@localhost.localdomain> | 2018-02-02 04:16:08 -0500 |
commit | 5f8de423f190bbb79a62f804151bc24824fa32d8 (patch) | |
tree | 10027f336435511475e392454359edea8e25895d /gfx/vr | |
parent | 49ee0794b5d912db1f95dce6eb52d781dc210db5 (diff) | |
download | UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.gz UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.lz UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.tar.xz UXP-5f8de423f190bbb79a62f804151bc24824fa32d8.zip |
Add m-esr52 at 52.6.0
Diffstat (limited to 'gfx/vr')
62 files changed, 13577 insertions, 0 deletions
diff --git a/gfx/vr/VRDisplayClient.cpp b/gfx/vr/VRDisplayClient.cpp new file mode 100644 index 000000000..2f258e987 --- /dev/null +++ b/gfx/vr/VRDisplayClient.cpp @@ -0,0 +1,143 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include <math.h> + +#include "prlink.h" +#include "prmem.h" +#include "prenv.h" +#include "gfxPrefs.h" +#include "nsString.h" +#include "mozilla/Preferences.h" +#include "mozilla/Unused.h" +#include "nsServiceManagerUtils.h" +#include "nsIScreenManager.h" + +#ifdef XP_WIN +#include "../layers/d3d11/CompositorD3D11.h" +#endif + +#include "VRDisplayClient.h" +#include "VRDisplayPresentation.h" +#include "VRManagerChild.h" +#include "VRLayerChild.h" + +using namespace mozilla; +using namespace mozilla::gfx; + +VRDisplayClient::VRDisplayClient(const VRDisplayInfo& aDisplayInfo) + : mDisplayInfo(aDisplayInfo) + , bLastEventWasPresenting(false) + , mPresentationCount(0) +{ + MOZ_COUNT_CTOR(VRDisplayClient); +} + +VRDisplayClient::~VRDisplayClient() { + MOZ_COUNT_DTOR(VRDisplayClient); +} + +void +VRDisplayClient::UpdateDisplayInfo(const VRDisplayInfo& aDisplayInfo) +{ + mDisplayInfo = aDisplayInfo; +} + +already_AddRefed<VRDisplayPresentation> +VRDisplayClient::BeginPresentation(const nsTArray<mozilla::dom::VRLayer>& aLayers) +{ + ++mPresentationCount; + RefPtr<VRDisplayPresentation> presentation = new VRDisplayPresentation(this, aLayers); + return presentation.forget(); +} + +void +VRDisplayClient::PresentationDestroyed() +{ + --mPresentationCount; +} + +void +VRDisplayClient::ZeroSensor() +{ + VRManagerChild *vm = VRManagerChild::Get(); + vm->SendResetSensor(mDisplayInfo.mDisplayID); +} + +VRHMDSensorState +VRDisplayClient::GetSensorState() +{ + VRHMDSensorState sensorState; + VRManagerChild *vm = VRManagerChild::Get(); + Unused << vm->SendGetSensorState(mDisplayInfo.mDisplayID, &sensorState); + return sensorState; +} + +VRHMDSensorState +VRDisplayClient::GetImmediateSensorState() +{ + VRHMDSensorState sensorState; + + VRManagerChild *vm = VRManagerChild::Get(); + Unused << vm->SendGetImmediateSensorState(mDisplayInfo.mDisplayID, &sensorState); + return sensorState; +} + +const double kVRDisplayRAFMaxDuration = 32; // milliseconds + +void +VRDisplayClient::NotifyVsync() +{ + VRManagerChild *vm = VRManagerChild::Get(); + + bool isPresenting = GetIsPresenting(); + + bool bShouldCallback = !isPresenting; + if (mLastVSyncTime.IsNull()) { + bShouldCallback = true; + } else { + TimeDuration duration = TimeStamp::Now() - mLastVSyncTime; + if (duration.ToMilliseconds() > kVRDisplayRAFMaxDuration) { + bShouldCallback = true; + } + } + + if (bShouldCallback) { + vm->RunFrameRequestCallbacks(); + mLastVSyncTime = TimeStamp::Now(); + } + + // Check if we need to trigger onVRDisplayPresentChange event + if (bLastEventWasPresenting != isPresenting) { + bLastEventWasPresenting = isPresenting; + vm->FireDOMVRDisplayPresentChangeEvent(); + } +} + +void +VRDisplayClient::NotifyVRVsync() +{ + VRManagerChild *vm = VRManagerChild::Get(); + vm->RunFrameRequestCallbacks(); + mLastVSyncTime = TimeStamp::Now(); +} + +bool +VRDisplayClient::GetIsConnected() const +{ + return mDisplayInfo.GetIsConnected(); +} + +bool +VRDisplayClient::GetIsPresenting() const +{ + return mDisplayInfo.GetIsPresenting(); +} + +void +VRDisplayClient::NotifyDisconnected() +{ + mDisplayInfo.mIsConnected = false; +} diff --git a/gfx/vr/VRDisplayClient.h b/gfx/vr/VRDisplayClient.h new file mode 100644 index 000000000..0cdd24682 --- /dev/null +++ b/gfx/vr/VRDisplayClient.h @@ -0,0 +1,61 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef GFX_VR_DISPLAY_CLIENT_H +#define GFX_VR_DISPLAY_CLIENT_H + +#include "nsIScreen.h" +#include "nsCOMPtr.h" +#include "mozilla/RefPtr.h" +#include "mozilla/dom/VRDisplayBinding.h" + +#include "gfxVR.h" + +namespace mozilla { +namespace gfx { +class VRDisplayPresentation; +class VRManagerChild; + +class VRDisplayClient +{ +public: + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRDisplayClient) + + explicit VRDisplayClient(const VRDisplayInfo& aDisplayInfo); + + void UpdateDisplayInfo(const VRDisplayInfo& aDisplayInfo); + + const VRDisplayInfo& GetDisplayInfo() const { return mDisplayInfo; } + virtual VRHMDSensorState GetSensorState(); + virtual VRHMDSensorState GetImmediateSensorState(); + + virtual void ZeroSensor(); + + already_AddRefed<VRDisplayPresentation> BeginPresentation(const nsTArray<dom::VRLayer>& aLayers); + void PresentationDestroyed(); + + void NotifyVsync(); + void NotifyVRVsync(); + + bool GetIsConnected() const; + bool GetIsPresenting() const; + + void NotifyDisconnected(); + +protected: + virtual ~VRDisplayClient(); + + VRDisplayInfo mDisplayInfo; + + bool bLastEventWasPresenting; + + TimeStamp mLastVSyncTime; + int mPresentationCount; +}; + +} // namespace gfx +} // namespace mozilla + +#endif /* GFX_VR_DISPLAY_CLIENT_H */ diff --git a/gfx/vr/VRDisplayHost.cpp b/gfx/vr/VRDisplayHost.cpp new file mode 100644 index 000000000..fd2fd6d6a --- /dev/null +++ b/gfx/vr/VRDisplayHost.cpp @@ -0,0 +1,201 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- +* This Source Code Form is subject to the terms of the Mozilla Public +* License, v. 2.0. If a copy of the MPL was not distributed with this +* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "VRDisplayHost.h" +#include "gfxVR.h" + +#if defined(XP_WIN) + +#include <d3d11.h> +#include "gfxWindowsPlatform.h" +#include "../layers/d3d11/CompositorD3D11.h" +#include "mozilla/layers/TextureD3D11.h" + +#endif + +using namespace mozilla; +using namespace mozilla::gfx; +using namespace mozilla::layers; + +VRDisplayHost::VRDisplayHost(VRDeviceType aType) + : mInputFrameID(0) +{ + MOZ_COUNT_CTOR(VRDisplayHost); + mDisplayInfo.mType = aType; + mDisplayInfo.mDisplayID = VRDisplayManager::AllocateDisplayID(); + mDisplayInfo.mIsPresenting = false; + + for (int i = 0; i < kMaxLatencyFrames; i++) { + mLastSensorState[i].Clear(); + } +} + +VRDisplayHost::~VRDisplayHost() +{ + MOZ_COUNT_DTOR(VRDisplayHost); +} + +void +VRDisplayHost::AddLayer(VRLayerParent *aLayer) +{ + mLayers.AppendElement(aLayer); + if (mLayers.Length() == 1) { + StartPresentation(); + } + mDisplayInfo.mIsPresenting = mLayers.Length() > 0; + + // Ensure that the content process receives the change immediately + VRManager* vm = VRManager::Get(); + vm->RefreshVRDisplays(); +} + +void +VRDisplayHost::RemoveLayer(VRLayerParent *aLayer) +{ + mLayers.RemoveElement(aLayer); + if (mLayers.Length() == 0) { + StopPresentation(); + } + mDisplayInfo.mIsPresenting = mLayers.Length() > 0; + + // Ensure that the content process receives the change immediately + VRManager* vm = VRManager::Get(); + vm->RefreshVRDisplays(); +} + +#if defined(XP_WIN) + +void +VRDisplayHost::SubmitFrame(VRLayerParent* aLayer, const int32_t& aInputFrameID, + PTextureParent* aTexture, const gfx::Rect& aLeftEyeRect, + const gfx::Rect& aRightEyeRect) +{ + // aInputFrameID is no longer controlled by content with the WebVR 1.1 API + // update; however, we will later use this code to enable asynchronous + // submission of multiple layers to be composited. This will enable + // us to build browser UX that remains responsive even when content does + // not consistently submit frames. + + int32_t inputFrameID = aInputFrameID; + if (inputFrameID == 0) { + inputFrameID = mInputFrameID; + } + if (inputFrameID < 0) { + // Sanity check to prevent invalid memory access on builds with assertions + // disabled. + inputFrameID = 0; + } + + VRHMDSensorState sensorState = mLastSensorState[inputFrameID % kMaxLatencyFrames]; + // It is possible to get a cache miss on mLastSensorState if latency is + // longer than kMaxLatencyFrames. An optimization would be to find a frame + // that is closer than the one selected with the modulus. + // If we hit this; however, latency is already so high that the site is + // un-viewable and a more accurate pose prediction is not likely to + // compensate. + + TextureHost* th = TextureHost::AsTextureHost(aTexture); + // WebVR doesn't use the compositor to compose the frame, so use + // AutoLockTextureHostWithoutCompositor here. + AutoLockTextureHostWithoutCompositor autoLock(th); + if (autoLock.Failed()) { + NS_WARNING("Failed to lock the VR layer texture"); + return; + } + + CompositableTextureSourceRef source; + if (!th->BindTextureSource(source)) { + NS_WARNING("The TextureHost was successfully locked but can't provide a TextureSource"); + return; + } + MOZ_ASSERT(source); + + IntSize texSize = source->GetSize(); + + TextureSourceD3D11* sourceD3D11 = source->AsSourceD3D11(); + if (!sourceD3D11) { + NS_WARNING("WebVR support currently only implemented for D3D11"); + return; + } + + SubmitFrame(sourceD3D11, texSize, sensorState, aLeftEyeRect, aRightEyeRect); +} + +#else + +void +VRDisplayHost::SubmitFrame(VRLayerParent* aLayer, const int32_t& aInputFrameID, + PTextureParent* aTexture, const gfx::Rect& aLeftEyeRect, + const gfx::Rect& aRightEyeRect) +{ + NS_WARNING("WebVR only supported in Windows."); +} + +#endif + +bool +VRDisplayHost::CheckClearDisplayInfoDirty() +{ + if (mDisplayInfo == mLastUpdateDisplayInfo) { + return false; + } + mLastUpdateDisplayInfo = mDisplayInfo; + return true; +} + +VRControllerHost::VRControllerHost(VRDeviceType aType) +{ + MOZ_COUNT_CTOR(VRControllerHost); + mControllerInfo.mType = aType; + mControllerInfo.mControllerID = VRDisplayManager::AllocateDisplayID(); +} + +VRControllerHost::~VRControllerHost() +{ + MOZ_COUNT_DTOR(VRControllerHost); +} + +const VRControllerInfo& +VRControllerHost::GetControllerInfo() const +{ + return mControllerInfo; +} + +void +VRControllerHost::SetIndex(uint32_t aIndex) +{ + mIndex = aIndex; +} + +uint32_t +VRControllerHost::GetIndex() +{ + return mIndex; +} + +void +VRControllerHost::SetButtonPressed(uint64_t aBit) +{ + mButtonPressed = aBit; +} + +uint64_t +VRControllerHost::GetButtonPressed() +{ + return mButtonPressed; +} + +void +VRControllerHost::SetPose(const dom::GamepadPoseState& aPose) +{ + mPose = aPose; +} + +const dom::GamepadPoseState& +VRControllerHost::GetPose() +{ + return mPose; +} + diff --git a/gfx/vr/VRDisplayHost.h b/gfx/vr/VRDisplayHost.h new file mode 100644 index 000000000..0e04e4fd2 --- /dev/null +++ b/gfx/vr/VRDisplayHost.h @@ -0,0 +1,114 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- +* This Source Code Form is subject to the terms of the Mozilla Public +* License, v. 2.0. If a copy of the MPL was not distributed with this +* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef GFX_VR_DISPLAY_HOST_H +#define GFX_VR_DISPLAY_HOST_H + +#include "gfxVR.h" +#include "nsTArray.h" +#include "nsString.h" +#include "nsCOMPtr.h" +#include "mozilla/RefPtr.h" +#include "mozilla/gfx/2D.h" +#include "mozilla/Atomics.h" +#include "mozilla/EnumeratedArray.h" +#include "mozilla/TimeStamp.h" +#include "mozilla/TypedEnumBits.h" +#include "mozilla/dom/GamepadPoseState.h" + +namespace mozilla { +namespace layers { +class PTextureParent; +#if defined(XP_WIN) +class TextureSourceD3D11; +#endif +} // namespace layers +namespace gfx { +class VRLayerParent; + +class VRDisplayHost { +public: + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRDisplayHost) + + const VRDisplayInfo& GetDisplayInfo() const { return mDisplayInfo; } + + void AddLayer(VRLayerParent* aLayer); + void RemoveLayer(VRLayerParent* aLayer); + + virtual VRHMDSensorState GetSensorState() = 0; + virtual VRHMDSensorState GetImmediateSensorState() = 0; + virtual void ZeroSensor() = 0; + virtual void StartPresentation() = 0; + virtual void StopPresentation() = 0; + virtual void NotifyVSync() { }; + + void SubmitFrame(VRLayerParent* aLayer, + const int32_t& aInputFrameID, + mozilla::layers::PTextureParent* aTexture, + const gfx::Rect& aLeftEyeRect, + const gfx::Rect& aRightEyeRect); + + bool CheckClearDisplayInfoDirty(); + +protected: + explicit VRDisplayHost(VRDeviceType aType); + virtual ~VRDisplayHost(); + +#if defined(XP_WIN) + virtual void SubmitFrame(mozilla::layers::TextureSourceD3D11* aSource, + const IntSize& aSize, + const VRHMDSensorState& aSensorState, + const gfx::Rect& aLeftEyeRect, + const gfx::Rect& aRightEyeRect) = 0; +#endif + + VRDisplayInfo mDisplayInfo; + + nsTArray<RefPtr<VRLayerParent>> mLayers; + // Weak reference to mLayers entries are cleared in VRLayerParent destructor + + // The maximum number of frames of latency that we would expect before we + // should give up applying pose prediction. + // If latency is greater than one second, then the experience is not likely + // to be corrected by pose prediction. Setting this value too + // high may result in unnecessary memory allocation. + // As the current fastest refresh rate is 90hz, 100 is selected as a + // conservative value. + static const int kMaxLatencyFrames = 100; + VRHMDSensorState mLastSensorState[kMaxLatencyFrames]; + int32_t mInputFrameID; + +private: + VRDisplayInfo mLastUpdateDisplayInfo; +}; + +class VRControllerHost { +public: + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRControllerHost) + + const VRControllerInfo& GetControllerInfo() const; + void SetIndex(uint32_t aIndex); + uint32_t GetIndex(); + void SetButtonPressed(uint64_t aBit); + uint64_t GetButtonPressed(); + void SetPose(const dom::GamepadPoseState& aPose); + const dom::GamepadPoseState& GetPose(); + +protected: + explicit VRControllerHost(VRDeviceType aType); + virtual ~VRControllerHost(); + + VRControllerInfo mControllerInfo; + // The controller index in VRControllerManager. + uint32_t mIndex; + // The current button pressed bit of button mask. + uint64_t mButtonPressed; + dom::GamepadPoseState mPose; +}; + +} // namespace gfx +} // namespace mozilla + +#endif /* GFX_VR_DISPLAY_HOST_H */ diff --git a/gfx/vr/VRDisplayPresentation.cpp b/gfx/vr/VRDisplayPresentation.cpp new file mode 100644 index 000000000..ba528ae7c --- /dev/null +++ b/gfx/vr/VRDisplayPresentation.cpp @@ -0,0 +1,112 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- +* This Source Code Form is subject to the terms of the Mozilla Public +* License, v. 2.0. If a copy of the MPL was not distributed with this +* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "VRDisplayPresentation.h" + +#include "mozilla/Unused.h" +#include "VRDisplayClient.h" +#include "VRLayerChild.h" + +using namespace mozilla; +using namespace mozilla::gfx; + +VRDisplayPresentation::VRDisplayPresentation(VRDisplayClient *aDisplayClient, + const nsTArray<mozilla::dom::VRLayer>& aLayers) + : mDisplayClient(aDisplayClient) + , mDOMLayers(aLayers) +{ + CreateLayers(); +} + +void +VRDisplayPresentation::CreateLayers() +{ + if (mLayers.Length()) { + return; + } + + for (dom::VRLayer& layer : mDOMLayers) { + dom::HTMLCanvasElement* canvasElement = layer.mSource; + if (!canvasElement) { + /// XXX In the future we will support WebVR in WebWorkers here + continue; + } + + Rect leftBounds(0.0, 0.0, 0.5, 1.0); + if (layer.mLeftBounds.Length() == 4) { + leftBounds.x = layer.mLeftBounds[0]; + leftBounds.y = layer.mLeftBounds[1]; + leftBounds.width = layer.mLeftBounds[2]; + leftBounds.height = layer.mLeftBounds[3]; + } else if (layer.mLeftBounds.Length() != 0) { + /** + * We ignore layers with an incorrect number of values. + * In the future, VRDisplay.requestPresent may throw in + * this case. See https://github.com/w3c/webvr/issues/71 + */ + continue; + } + + Rect rightBounds(0.5, 0.0, 0.5, 1.0); + if (layer.mRightBounds.Length() == 4) { + rightBounds.x = layer.mRightBounds[0]; + rightBounds.y = layer.mRightBounds[1]; + rightBounds.width = layer.mRightBounds[2]; + rightBounds.height = layer.mRightBounds[3]; + } else if (layer.mRightBounds.Length() != 0) { + /** + * We ignore layers with an incorrect number of values. + * In the future, VRDisplay.requestPresent may throw in + * this case. See https://github.com/w3c/webvr/issues/71 + */ + continue; + } + + VRManagerChild *manager = VRManagerChild::Get(); + if (!manager) { + NS_WARNING("VRManagerChild::Get returned null!"); + continue; + } + + RefPtr<VRLayerChild> vrLayer = static_cast<VRLayerChild*>(manager->CreateVRLayer(mDisplayClient->GetDisplayInfo().GetDisplayID(), leftBounds, rightBounds)); + if (!vrLayer) { + NS_WARNING("CreateVRLayer returned null!"); + continue; + } + + vrLayer->Initialize(canvasElement); + + mLayers.AppendElement(vrLayer); + } +} + +void +VRDisplayPresentation::DestroyLayers() +{ + for (VRLayerChild* layer : mLayers) { + Unused << layer->SendDestroy(); + } + mLayers.Clear(); +} + +void +VRDisplayPresentation::GetDOMLayers(nsTArray<dom::VRLayer>& result) +{ + result = mDOMLayers; +} + +VRDisplayPresentation::~VRDisplayPresentation() +{ + DestroyLayers(); + mDisplayClient->PresentationDestroyed(); +} + +void VRDisplayPresentation::SubmitFrame() +{ + for (VRLayerChild *layer : mLayers) { + layer->SubmitFrame(); + break; // Currently only one layer supported, submit only the first + } +} diff --git a/gfx/vr/VRDisplayPresentation.h b/gfx/vr/VRDisplayPresentation.h new file mode 100644 index 000000000..28103d8f5 --- /dev/null +++ b/gfx/vr/VRDisplayPresentation.h @@ -0,0 +1,39 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- +* This Source Code Form is subject to the terms of the Mozilla Public +* License, v. 2.0. If a copy of the MPL was not distributed with this +* file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef GFX_VR_DISPLAY_PRESENTATION_H +#define GFX_VR_DISPLAY_PRESENTATION_H + +#include "mozilla/RefPtr.h" +#include "mozilla/dom/VRDisplayBinding.h" + +namespace mozilla { +namespace gfx { +class VRDisplayClient; +class VRLayerChild; + +class VRDisplayPresentation final +{ + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRDisplayPresentation) + +public: + VRDisplayPresentation(VRDisplayClient *aDisplayClient, const nsTArray<dom::VRLayer>& aLayers); + void SubmitFrame(); + void GetDOMLayers(nsTArray<dom::VRLayer>& result); + +private: + ~VRDisplayPresentation(); + void CreateLayers(); + void DestroyLayers(); + + RefPtr<VRDisplayClient> mDisplayClient; + nsTArray<dom::VRLayer> mDOMLayers; + nsTArray<RefPtr<VRLayerChild>> mLayers; +}; + +} // namespace gfx +} // namespace mozilla + +#endif /* GFX_VR_DISPLAY_PRESENTAITON_H */ diff --git a/gfx/vr/VRManager.cpp b/gfx/vr/VRManager.cpp new file mode 100644 index 000000000..672e9e3a1 --- /dev/null +++ b/gfx/vr/VRManager.cpp @@ -0,0 +1,393 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + + +#include "VRManager.h" +#include "VRManagerParent.h" +#include "gfxVR.h" +#include "gfxVROpenVR.h" +#include "mozilla/ClearOnShutdown.h" +#include "mozilla/dom/VRDisplay.h" +#include "mozilla/dom/GamepadEventTypes.h" +#include "mozilla/layers/TextureHost.h" +#include "mozilla/Unused.h" + +#include "gfxPrefs.h" +#include "gfxVR.h" +#if defined(XP_WIN) +#include "gfxVROculus.h" +#endif +#if defined(XP_WIN) || defined(XP_MACOSX) || defined(XP_LINUX) +#include "gfxVROSVR.h" +#endif +#include "ipc/VRLayerParent.h" + +using namespace mozilla; +using namespace mozilla::gfx; +using namespace mozilla::layers; +using namespace mozilla::gl; + +namespace mozilla { +namespace gfx { + +static StaticRefPtr<VRManager> sVRManagerSingleton; + +/*static*/ void +VRManager::ManagerInit() +{ + MOZ_ASSERT(NS_IsMainThread()); + + if (sVRManagerSingleton == nullptr) { + sVRManagerSingleton = new VRManager(); + ClearOnShutdown(&sVRManagerSingleton); + } +} + +VRManager::VRManager() + : mInitialized(false) +{ + MOZ_COUNT_CTOR(VRManager); + MOZ_ASSERT(sVRManagerSingleton == nullptr); + + RefPtr<VRDisplayManager> mgr; + RefPtr<VRControllerManager> controllerMgr; + + /** + * We must add the VRDisplayManager's to mManagers in a careful order to + * ensure that we don't detect the same VRDisplay from multiple API's. + * + * Oculus comes first, as it will only enumerate Oculus HMD's and is the + * native interface for Oculus HMD's. + * + * OpenvR comes second, as it is the native interface for HTC Vive + * which is the most common HMD at this time. + * + * OSVR will be used if Oculus SDK and OpenVR don't detect any HMDS, + * to support everyone else. + */ + +#if defined(XP_WIN) + // The Oculus runtime is supported only on Windows + mgr = VRDisplayManagerOculus::Create(); + if (mgr) { + mManagers.AppendElement(mgr); + } +#endif + +#if defined(XP_WIN) || defined(XP_MACOSX) || defined(XP_LINUX) + // OpenVR is cross platform compatible + mgr = VRDisplayManagerOpenVR::Create(); + if (mgr) { + mManagers.AppendElement(mgr); + } + + controllerMgr = VRControllerManagerOpenVR::Create(); + if (mgr) { + mControllerManagers.AppendElement(controllerMgr); + } + + // OSVR is cross platform compatible + mgr = VRDisplayManagerOSVR::Create(); + if (mgr) { + mManagers.AppendElement(mgr); + } +#endif + // Enable gamepad extensions while VR is enabled. + if (gfxPrefs::VREnabled()) { + Preferences::SetBool("dom.gamepad.extensions.enabled", true); + } +} + +VRManager::~VRManager() +{ + MOZ_ASSERT(NS_IsMainThread()); + MOZ_ASSERT(!mInitialized); + MOZ_COUNT_DTOR(VRManager); +} + +void +VRManager::Destroy() +{ + mVRDisplays.Clear(); + for (uint32_t i = 0; i < mManagers.Length(); ++i) { + mManagers[i]->Destroy(); + } + + mVRControllers.Clear(); + for (uint32_t i = 0; i < mControllerManagers.Length(); ++i) { + mControllerManagers[i]->Destroy(); + } + mInitialized = false; +} + +void +VRManager::Init() +{ + for (uint32_t i = 0; i < mManagers.Length(); ++i) { + mManagers[i]->Init(); + } + + for (uint32_t i = 0; i < mControllerManagers.Length(); ++i) { + mControllerManagers[i]->Init(); + } + mInitialized = true; +} + +/* static */VRManager* +VRManager::Get() +{ + MOZ_ASSERT(sVRManagerSingleton != nullptr); + + return sVRManagerSingleton; +} + +void +VRManager::AddVRManagerParent(VRManagerParent* aVRManagerParent) +{ + if (mVRManagerParents.IsEmpty()) { + Init(); + } + mVRManagerParents.PutEntry(aVRManagerParent); +} + +void +VRManager::RemoveVRManagerParent(VRManagerParent* aVRManagerParent) +{ + mVRManagerParents.RemoveEntry(aVRManagerParent); + if (mVRManagerParents.IsEmpty()) { + Destroy(); + } +} + +void +VRManager::NotifyVsync(const TimeStamp& aVsyncTimestamp) +{ + const double kVRDisplayRefreshMaxDuration = 5000; // milliseconds + + bool bHaveEventListener = false; + + for (auto iter = mVRManagerParents.Iter(); !iter.Done(); iter.Next()) { + VRManagerParent *vmp = iter.Get()->GetKey(); + if (mVRDisplays.Count()) { + Unused << vmp->SendNotifyVSync(); + } + bHaveEventListener |= vmp->HaveEventListener(); + } + + for (auto iter = mVRDisplays.Iter(); !iter.Done(); iter.Next()) { + gfx::VRDisplayHost* display = iter.UserData(); + display->NotifyVSync(); + } + + if (bHaveEventListener) { + for (uint32_t i = 0; i < mControllerManagers.Length(); ++i) { + mControllerManagers[i]->HandleInput(); + } + // If content has set an EventHandler to be notified of VR display events + // we must continually refresh the VR display enumeration to check + // for events that we must fire such as Window.onvrdisplayconnect + // Note that enumeration itself may activate display hardware, such + // as Oculus, so we only do this when we know we are displaying content + // that is looking for VR displays. + if (mLastRefreshTime.IsNull()) { + // This is the first vsync, must refresh VR displays + RefreshVRDisplays(); + RefreshVRControllers(); + mLastRefreshTime = TimeStamp::Now(); + } else { + // We don't have to do this every frame, so check if we + // have refreshed recently. + TimeDuration duration = TimeStamp::Now() - mLastRefreshTime; + if (duration.ToMilliseconds() > kVRDisplayRefreshMaxDuration) { + RefreshVRDisplays(); + RefreshVRControllers(); + mLastRefreshTime = TimeStamp::Now(); + } + } + } +} + +void +VRManager::NotifyVRVsync(const uint32_t& aDisplayID) +{ + for (auto iter = mVRManagerParents.Iter(); !iter.Done(); iter.Next()) { + Unused << iter.Get()->GetKey()->SendNotifyVRVSync(aDisplayID); + } +} + +void +VRManager::RefreshVRDisplays(bool aMustDispatch) +{ + nsTArray<RefPtr<gfx::VRDisplayHost> > displays; + + /** We don't wish to enumerate the same display from multiple managers, + * so stop as soon as we get a display. + * It is still possible to get multiple displays from a single manager, + * but do not wish to mix-and-match for risk of reporting a duplicate. + * + * XXX - Perhaps there will be a better way to detect duplicate displays + * in the future. + */ + for (uint32_t i = 0; i < mManagers.Length() && displays.Length() == 0; ++i) { + mManagers[i]->GetHMDs(displays); + } + + bool displayInfoChanged = false; + + if (displays.Length() != mVRDisplays.Count()) { + // Catch cases where a VR display has been removed + displayInfoChanged = true; + } + + for (const auto& display: displays) { + if (!GetDisplay(display->GetDisplayInfo().GetDisplayID())) { + // This is a new display + displayInfoChanged = true; + break; + } + + if (display->CheckClearDisplayInfoDirty()) { + // This display's info has changed + displayInfoChanged = true; + break; + } + } + + if (displayInfoChanged) { + mVRDisplays.Clear(); + for (const auto& display: displays) { + mVRDisplays.Put(display->GetDisplayInfo().GetDisplayID(), display); + } + } + + if (displayInfoChanged || aMustDispatch) { + DispatchVRDisplayInfoUpdate(); + } +} + +void +VRManager::DispatchVRDisplayInfoUpdate() +{ + nsTArray<VRDisplayInfo> update; + GetVRDisplayInfo(update); + + for (auto iter = mVRManagerParents.Iter(); !iter.Done(); iter.Next()) { + Unused << iter.Get()->GetKey()->SendUpdateDisplayInfo(update); + } +} + + +/** + * Get any VR displays that have already been enumerated without + * activating any new devices. + */ +void +VRManager::GetVRDisplayInfo(nsTArray<VRDisplayInfo>& aDisplayInfo) +{ + aDisplayInfo.Clear(); + for (auto iter = mVRDisplays.Iter(); !iter.Done(); iter.Next()) { + gfx::VRDisplayHost* display = iter.UserData(); + aDisplayInfo.AppendElement(VRDisplayInfo(display->GetDisplayInfo())); + } +} + +RefPtr<gfx::VRDisplayHost> +VRManager::GetDisplay(const uint32_t& aDisplayID) +{ + RefPtr<gfx::VRDisplayHost> display; + if (mVRDisplays.Get(aDisplayID, getter_AddRefs(display))) { + return display; + } + return nullptr; +} + +void +VRManager::SubmitFrame(VRLayerParent* aLayer, layers::PTextureParent* aTexture, + const gfx::Rect& aLeftEyeRect, + const gfx::Rect& aRightEyeRect) +{ + TextureHost* th = TextureHost::AsTextureHost(aTexture); + mLastFrame = th; + RefPtr<VRDisplayHost> display = GetDisplay(aLayer->GetDisplayID()); + if (display) { + display->SubmitFrame(aLayer, 0, aTexture, aLeftEyeRect, aRightEyeRect); + } +} + +RefPtr<gfx::VRControllerHost> +VRManager::GetController(const uint32_t& aControllerID) +{ + RefPtr<gfx::VRControllerHost> controller; + if (mVRControllers.Get(aControllerID, getter_AddRefs(controller))) { + return controller; + } + return nullptr; +} + +void +VRManager::GetVRControllerInfo(nsTArray<VRControllerInfo>& aControllerInfo) +{ + aControllerInfo.Clear(); + for (auto iter = mVRControllers.Iter(); !iter.Done(); iter.Next()) { + gfx::VRControllerHost* controller = iter.UserData(); + aControllerInfo.AppendElement(VRControllerInfo(controller->GetControllerInfo())); + } +} + +void +VRManager::RefreshVRControllers() +{ + nsTArray<RefPtr<gfx::VRControllerHost>> controllers; + + for (uint32_t i = 0; i < mControllerManagers.Length() + && controllers.Length() == 0; ++i) { + mControllerManagers[i]->GetControllers(controllers); + } + + bool controllerInfoChanged = false; + + if (controllers.Length() != mVRControllers.Count()) { + // Catch cases where VR controllers has been removed + controllerInfoChanged = true; + } + + for (const auto& controller : controllers) { + if (!GetController(controller->GetControllerInfo().GetControllerID())) { + // This is a new controller + controllerInfoChanged = true; + break; + } + } + + if (controllerInfoChanged) { + mVRControllers.Clear(); + for (const auto& controller : controllers) { + mVRControllers.Put(controller->GetControllerInfo().GetControllerID(), + controller); + } + } +} + +void +VRManager::ScanForDevices() +{ + for (uint32_t i = 0; i < mControllerManagers.Length(); ++i) { + mControllerManagers[i]->ScanForDevices(); + } +} + +template<class T> +void +VRManager::NotifyGamepadChange(const T& aInfo) +{ + dom::GamepadChangeEvent e(aInfo); + + for (auto iter = mVRManagerParents.Iter(); !iter.Done(); iter.Next()) { + Unused << iter.Get()->GetKey()->SendGamepadUpdate(e); + } +} + +} // namespace gfx +} // namespace mozilla diff --git a/gfx/vr/VRManager.h b/gfx/vr/VRManager.h new file mode 100644 index 000000000..b46a3b58f --- /dev/null +++ b/gfx/vr/VRManager.h @@ -0,0 +1,88 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef GFX_VR_MANAGER_H +#define GFX_VR_MANAGER_H + +#include "nsRefPtrHashtable.h" +#include "nsTArray.h" +#include "nsTHashtable.h" +#include "nsDataHashtable.h" +#include "mozilla/TimeStamp.h" +#include "gfxVR.h" + +namespace mozilla { +namespace layers { +class TextureHost; +} +namespace gfx { + +class VRLayerParent; +class VRManagerParent; +class VRDisplayHost; +class VRControllerManager; + +class VRManager +{ + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(mozilla::gfx::VRManager) + +public: + static void ManagerInit(); + static VRManager* Get(); + + void AddVRManagerParent(VRManagerParent* aVRManagerParent); + void RemoveVRManagerParent(VRManagerParent* aVRManagerParent); + + void NotifyVsync(const TimeStamp& aVsyncTimestamp); + void NotifyVRVsync(const uint32_t& aDisplayID); + void RefreshVRDisplays(bool aMustDispatch = false); + void ScanForDevices(); + template<class T> void NotifyGamepadChange(const T& aInfo); + RefPtr<gfx::VRDisplayHost> GetDisplay(const uint32_t& aDisplayID); + void GetVRDisplayInfo(nsTArray<VRDisplayInfo>& aDisplayInfo); + + void SubmitFrame(VRLayerParent* aLayer, layers::PTextureParent* aTexture, + const gfx::Rect& aLeftEyeRect, + const gfx::Rect& aRightEyeRect); + RefPtr<gfx::VRControllerHost> GetController(const uint32_t& aControllerID); + void GetVRControllerInfo(nsTArray<VRControllerInfo>& aControllerInfo); + +protected: + VRManager(); + ~VRManager(); + +private: + RefPtr<layers::TextureHost> mLastFrame; + + void Init(); + void Destroy(); + + void DispatchVRDisplayInfoUpdate(); + void RefreshVRControllers(); + + typedef nsTHashtable<nsRefPtrHashKey<VRManagerParent>> VRManagerParentSet; + VRManagerParentSet mVRManagerParents; + + typedef nsTArray<RefPtr<VRDisplayManager>> VRDisplayManagerArray; + VRDisplayManagerArray mManagers; + + typedef nsTArray<RefPtr<VRControllerManager>> VRControllerManagerArray; + VRControllerManagerArray mControllerManagers; + + typedef nsRefPtrHashtable<nsUint32HashKey, gfx::VRDisplayHost> VRDisplayHostHashMap; + VRDisplayHostHashMap mVRDisplays; + + typedef nsRefPtrHashtable<nsUint32HashKey, gfx::VRControllerHost> VRControllerHostHashMap; + VRControllerHostHashMap mVRControllers; + + Atomic<bool> mInitialized; + + TimeStamp mLastRefreshTime; +}; + +} // namespace gfx +} // namespace mozilla + +#endif // GFX_VR_MANAGER_H diff --git a/gfx/vr/gfxVR.cpp b/gfx/vr/gfxVR.cpp new file mode 100644 index 000000000..c0babb4f8 --- /dev/null +++ b/gfx/vr/gfxVR.cpp @@ -0,0 +1,127 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include <math.h> + +#include "gfxVR.h" +#ifdef MOZ_GAMEPAD +#include "mozilla/dom/GamepadEventTypes.h" +#include "mozilla/dom/GamepadBinding.h" +#endif + +#ifndef M_PI +# define M_PI 3.14159265358979323846 +#endif + +using namespace mozilla; +using namespace mozilla::gfx; + +Atomic<uint32_t> VRDisplayManager::sDisplayBase(0); +Atomic<uint32_t> VRControllerManager::sControllerBase(0); + +/* static */ uint32_t +VRDisplayManager::AllocateDisplayID() +{ + return ++sDisplayBase; +} + +Matrix4x4 +VRFieldOfView::ConstructProjectionMatrix(float zNear, float zFar, + bool rightHanded) const +{ + float upTan = tan(upDegrees * M_PI / 180.0); + float downTan = tan(downDegrees * M_PI / 180.0); + float leftTan = tan(leftDegrees * M_PI / 180.0); + float rightTan = tan(rightDegrees * M_PI / 180.0); + + float handednessScale = rightHanded ? -1.0 : 1.0; + + float pxscale = 2.0f / (leftTan + rightTan); + float pxoffset = (leftTan - rightTan) * pxscale * 0.5; + float pyscale = 2.0f / (upTan + downTan); + float pyoffset = (upTan - downTan) * pyscale * 0.5; + + Matrix4x4 mobj; + float *m = &mobj._11; + + m[0*4+0] = pxscale; + m[2*4+0] = pxoffset * handednessScale; + + m[1*4+1] = pyscale; + m[2*4+1] = -pyoffset * handednessScale; + + m[2*4+2] = zFar / (zNear - zFar) * -handednessScale; + m[3*4+2] = (zFar * zNear) / (zNear - zFar); + + m[2*4+3] = handednessScale; + m[3*4+3] = 0.0f; + + return mobj; +} + +/* static */ uint32_t +VRControllerManager::AllocateControllerID() +{ + return ++sControllerBase; +} + +void +VRControllerManager::AddGamepad(const char* aID, uint32_t aMapping, + uint32_t aNumButtons, uint32_t aNumAxes) +{ + dom::GamepadAdded a(NS_ConvertUTF8toUTF16(nsDependentCString(aID)), mControllerCount, + aMapping, dom::GamepadServiceType::VR, aNumButtons, + aNumAxes); + + VRManager* vm = VRManager::Get(); + MOZ_ASSERT(vm); + vm->NotifyGamepadChange<dom::GamepadAdded>(a); +} + +void +VRControllerManager::RemoveGamepad(uint32_t aIndex) +{ + dom::GamepadRemoved a(aIndex, dom::GamepadServiceType::VR); + + VRManager* vm = VRManager::Get(); + MOZ_ASSERT(vm); + vm->NotifyGamepadChange<dom::GamepadRemoved>(a); +} + +void +VRControllerManager::NewButtonEvent(uint32_t aIndex, uint32_t aButton, + bool aPressed) +{ + dom::GamepadButtonInformation a(aIndex, dom::GamepadServiceType::VR, + aButton, aPressed, aPressed ? 1.0L : 0.0L); + + VRManager* vm = VRManager::Get(); + MOZ_ASSERT(vm); + vm->NotifyGamepadChange<dom::GamepadButtonInformation>(a); +} + +void +VRControllerManager::NewAxisMove(uint32_t aIndex, uint32_t aAxis, + double aValue) +{ + dom::GamepadAxisInformation a(aIndex, dom::GamepadServiceType::VR, + aAxis, aValue); + + VRManager* vm = VRManager::Get(); + MOZ_ASSERT(vm); + vm->NotifyGamepadChange<dom::GamepadAxisInformation>(a); +} + +void +VRControllerManager::NewPoseState(uint32_t aIndex, + const dom::GamepadPoseState& aPose) +{ + dom::GamepadPoseInformation a(aIndex, dom::GamepadServiceType::VR, + aPose); + + VRManager* vm = VRManager::Get(); + MOZ_ASSERT(vm); + vm->NotifyGamepadChange<dom::GamepadPoseInformation>(a); +} diff --git a/gfx/vr/gfxVR.h b/gfx/vr/gfxVR.h new file mode 100644 index 000000000..b46875741 --- /dev/null +++ b/gfx/vr/gfxVR.h @@ -0,0 +1,285 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef GFX_VR_H +#define GFX_VR_H + +#include "nsTArray.h" +#include "nsString.h" +#include "nsCOMPtr.h" +#include "mozilla/RefPtr.h" +#include "mozilla/gfx/2D.h" +#include "mozilla/Atomics.h" +#include "mozilla/EnumeratedArray.h" +#include "mozilla/TimeStamp.h" +#include "mozilla/TypedEnumBits.h" + +namespace mozilla { +namespace layers { +class PTextureParent; +} +namespace dom { +enum class GamepadMappingType : uint32_t; +struct GamepadPoseState; +} +namespace gfx { +class VRLayerParent; +class VRDisplayHost; +class VRControllerHost; + +enum class VRDeviceType : uint16_t { + Oculus, + OpenVR, + OSVR, + NumVRDeviceTypes +}; + +enum class VRDisplayCapabilityFlags : uint16_t { + Cap_None = 0, + /** + * Cap_Position is set if the VRDisplay is capable of tracking its position. + */ + Cap_Position = 1 << 1, + /** + * Cap_Orientation is set if the VRDisplay is capable of tracking its orientation. + */ + Cap_Orientation = 1 << 2, + /** + * Cap_Present is set if the VRDisplay is capable of presenting content to an + * HMD or similar device. Can be used to indicate "magic window" devices that + * are capable of 6DoF tracking but for which requestPresent is not meaningful. + * If false then calls to requestPresent should always fail, and + * getEyeParameters should return null. + */ + Cap_Present = 1 << 3, + /** + * Cap_External is set if the VRDisplay is separate from the device's + * primary display. If presenting VR content will obscure + * other content on the device, this should be un-set. When + * un-set, the application should not attempt to mirror VR content + * or update non-VR UI because that content will not be visible. + */ + Cap_External = 1 << 4, + /** + * Cap_AngularAcceleration is set if the VRDisplay is capable of tracking its + * angular acceleration. + */ + Cap_AngularAcceleration = 1 << 5, + /** + * Cap_LinearAcceleration is set if the VRDisplay is capable of tracking its + * linear acceleration. + */ + Cap_LinearAcceleration = 1 << 6, + /** + * Cap_StageParameters is set if the VRDisplay is capable of room scale VR + * and can report the StageParameters to describe the space. + */ + Cap_StageParameters = 1 << 7, + /** + * Cap_All used for validity checking during IPC serialization + */ + Cap_All = (1 << 8) - 1 +}; + +MOZ_MAKE_ENUM_CLASS_BITWISE_OPERATORS(VRDisplayCapabilityFlags) + +struct VRFieldOfView { + VRFieldOfView() {} + VRFieldOfView(double up, double right, double down, double left) + : upDegrees(up), rightDegrees(right), downDegrees(down), leftDegrees(left) + {} + + void SetFromTanRadians(double up, double right, double down, double left) + { + upDegrees = atan(up) * 180.0 / M_PI; + rightDegrees = atan(right) * 180.0 / M_PI; + downDegrees = atan(down) * 180.0 / M_PI; + leftDegrees = atan(left) * 180.0 / M_PI; + } + + bool operator==(const VRFieldOfView& other) const { + return other.upDegrees == upDegrees && + other.downDegrees == downDegrees && + other.rightDegrees == rightDegrees && + other.leftDegrees == leftDegrees; + } + + bool operator!=(const VRFieldOfView& other) const { + return !(*this == other); + } + + bool IsZero() const { + return upDegrees == 0.0 || + rightDegrees == 0.0 || + downDegrees == 0.0 || + leftDegrees == 0.0; + } + + Matrix4x4 ConstructProjectionMatrix(float zNear, float zFar, bool rightHanded) const; + + double upDegrees; + double rightDegrees; + double downDegrees; + double leftDegrees; +}; + +struct VRDisplayInfo +{ + VRDeviceType GetType() const { return mType; } + uint32_t GetDisplayID() const { return mDisplayID; } + const nsCString& GetDisplayName() const { return mDisplayName; } + VRDisplayCapabilityFlags GetCapabilities() const { return mCapabilityFlags; } + + const IntSize& SuggestedEyeResolution() const { return mEyeResolution; } + const Point3D& GetEyeTranslation(uint32_t whichEye) const { return mEyeTranslation[whichEye]; } + const VRFieldOfView& GetEyeFOV(uint32_t whichEye) const { return mEyeFOV[whichEye]; } + bool GetIsConnected() const { return mIsConnected; } + bool GetIsPresenting() const { return mIsPresenting; } + const Size& GetStageSize() const { return mStageSize; } + const Matrix4x4& GetSittingToStandingTransform() const { return mSittingToStandingTransform; } + + enum Eye { + Eye_Left, + Eye_Right, + NumEyes + }; + + uint32_t mDisplayID; + VRDeviceType mType; + nsCString mDisplayName; + VRDisplayCapabilityFlags mCapabilityFlags; + VRFieldOfView mEyeFOV[VRDisplayInfo::NumEyes]; + Point3D mEyeTranslation[VRDisplayInfo::NumEyes]; + IntSize mEyeResolution; + bool mIsConnected; + bool mIsPresenting; + Size mStageSize; + Matrix4x4 mSittingToStandingTransform; + + bool operator==(const VRDisplayInfo& other) const { + return mType == other.mType && + mDisplayID == other.mDisplayID && + mDisplayName == other.mDisplayName && + mCapabilityFlags == other.mCapabilityFlags && + mEyeResolution == other.mEyeResolution && + mIsConnected == other.mIsConnected && + mIsPresenting == other.mIsPresenting && + mEyeFOV[0] == other.mEyeFOV[0] && + mEyeFOV[1] == other.mEyeFOV[1] && + mEyeTranslation[0] == other.mEyeTranslation[0] && + mEyeTranslation[1] == other.mEyeTranslation[1] && + mStageSize == other.mStageSize && + mSittingToStandingTransform == other.mSittingToStandingTransform; + } + + bool operator!=(const VRDisplayInfo& other) const { + return !(*this == other); + } +}; + +struct VRHMDSensorState { + double timestamp; + int32_t inputFrameID; + VRDisplayCapabilityFlags flags; + float orientation[4]; + float position[3]; + float angularVelocity[3]; + float angularAcceleration[3]; + float linearVelocity[3]; + float linearAcceleration[3]; + + void Clear() { + memset(this, 0, sizeof(VRHMDSensorState)); + } +}; + +class VRDisplayManager { +public: + static uint32_t AllocateDisplayID(); + +protected: + static Atomic<uint32_t> sDisplayBase; + +public: + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRDisplayManager) + + virtual bool Init() = 0; + virtual void Destroy() = 0; + virtual void GetHMDs(nsTArray<RefPtr<VRDisplayHost>>& aHMDResult) = 0; + +protected: + VRDisplayManager() { } + virtual ~VRDisplayManager() { } +}; + +struct VRControllerInfo +{ + VRDeviceType GetType() const { return mType; } + uint32_t GetControllerID() const { return mControllerID; } + const nsCString& GetControllerName() const { return mControllerName; } + uint32_t GetMappingType() const { return mMappingType; } + uint32_t GetNumButtons() const { return mNumButtons; } + uint32_t GetNumAxes() const { return mNumAxes; } + + uint32_t mControllerID; + VRDeviceType mType; + nsCString mControllerName; + uint32_t mMappingType; + uint32_t mNumButtons; + uint32_t mNumAxes; + + bool operator==(const VRControllerInfo& other) const { + return mType == other.mType && + mControllerID == other.mControllerID && + mControllerName == other.mControllerName && + mMappingType == other.mMappingType && + mNumButtons == other.mNumButtons && + mNumAxes == other.mNumAxes; + } + + bool operator!=(const VRControllerInfo& other) const { + return !(*this == other); + } +}; + +class VRControllerManager { +public: + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRControllerManager) + + static uint32_t AllocateControllerID(); + virtual bool Init() = 0; + virtual void Destroy() = 0; + virtual void HandleInput() = 0; + virtual void GetControllers(nsTArray<RefPtr<VRControllerHost>>& aControllerResult) = 0; + virtual void ScanForDevices() = 0; + void NewButtonEvent(uint32_t aIndex, uint32_t aButton, bool aPressed); + void NewAxisMove(uint32_t aIndex, uint32_t aAxis, double aValue); + void NewPoseState(uint32_t aIndex, const dom::GamepadPoseState& aPose); + void AddGamepad(const char* aID, uint32_t aMapping, + uint32_t aNumButtons, uint32_t aNumAxes); + void RemoveGamepad(uint32_t aIndex); + +protected: + VRControllerManager() : mInstalled(false), mControllerCount(0) {} + virtual ~VRControllerManager() {} + + bool mInstalled; + uint32_t mControllerCount; + static Atomic<uint32_t> sControllerBase; + +private: + virtual void HandleButtonPress(uint32_t aControllerIdx, + uint64_t aButtonPressed) = 0; + virtual void HandleAxisMove(uint32_t aControllerIdx, uint32_t aAxis, + float aValue) = 0; + virtual void HandlePoseTracking(uint32_t aControllerIdx, + const dom::GamepadPoseState& aPose, + VRControllerHost* aController) = 0; +}; + +} // namespace gfx +} // namespace mozilla + +#endif /* GFX_VR_H */ diff --git a/gfx/vr/gfxVROSVR.cpp b/gfx/vr/gfxVROSVR.cpp new file mode 100644 index 000000000..8b275e923 --- /dev/null +++ b/gfx/vr/gfxVROSVR.cpp @@ -0,0 +1,529 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include <math.h> + +#include "prlink.h" +#include "prmem.h" +#include "prenv.h" +#include "gfxPrefs.h" +#include "nsString.h" +#include "mozilla/Preferences.h" + +#include "mozilla/gfx/Quaternion.h" + +#ifdef XP_WIN +#include "../layers/d3d11/CompositorD3D11.h" +#include "../layers/d3d11/TextureD3D11.h" +#endif + +#include "gfxVROSVR.h" + +#ifndef M_PI +#define M_PI 3.14159265358979323846 +#endif + +using namespace mozilla::layers; +using namespace mozilla::gfx; +using namespace mozilla::gfx::impl; + +namespace { +// need to typedef functions that will be used in the code below +extern "C" { +typedef OSVR_ClientContext (*pfn_osvrClientInit)( + const char applicationIdentifier[], uint32_t flags); +typedef OSVR_ReturnCode (*pfn_osvrClientShutdown)(OSVR_ClientContext ctx); +typedef OSVR_ReturnCode (*pfn_osvrClientUpdate)(OSVR_ClientContext ctx); +typedef OSVR_ReturnCode (*pfn_osvrClientCheckStatus)(OSVR_ClientContext ctx); +typedef OSVR_ReturnCode (*pfn_osvrClientGetInterface)( + OSVR_ClientContext ctx, const char path[], OSVR_ClientInterface* iface); +typedef OSVR_ReturnCode (*pfn_osvrClientFreeInterface)( + OSVR_ClientContext ctx, OSVR_ClientInterface iface); +typedef OSVR_ReturnCode (*pfn_osvrGetOrientationState)( + OSVR_ClientInterface iface, OSVR_TimeValue* timestamp, + OSVR_OrientationState* state); +typedef OSVR_ReturnCode (*pfn_osvrGetPositionState)(OSVR_ClientInterface iface, + OSVR_TimeValue* timestamp, + OSVR_PositionState* state); +typedef OSVR_ReturnCode (*pfn_osvrClientGetDisplay)(OSVR_ClientContext ctx, + OSVR_DisplayConfig* disp); +typedef OSVR_ReturnCode (*pfn_osvrClientFreeDisplay)(OSVR_DisplayConfig disp); +typedef OSVR_ReturnCode (*pfn_osvrClientGetNumEyesForViewer)( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount* eyes); +typedef OSVR_ReturnCode (*pfn_osvrClientGetViewerEyePose)( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount eye, + OSVR_Pose3* pose); +typedef OSVR_ReturnCode (*pfn_osvrClientGetDisplayDimensions)( + OSVR_DisplayConfig disp, OSVR_DisplayInputCount displayInputIndex, + OSVR_DisplayDimension* width, OSVR_DisplayDimension* height); +typedef OSVR_ReturnCode ( + *pfn_osvrClientGetViewerEyeSurfaceProjectionClippingPlanes)( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount eye, + OSVR_SurfaceCount surface, double* left, double* right, double* bottom, + double* top); +typedef OSVR_ReturnCode (*pfn_osvrClientGetRelativeViewportForViewerEyeSurface)( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount eye, + OSVR_SurfaceCount surface, OSVR_ViewportDimension* left, + OSVR_ViewportDimension* bottom, OSVR_ViewportDimension* width, + OSVR_ViewportDimension* height); +typedef OSVR_ReturnCode (*pfn_osvrClientGetViewerEyeSurfaceProjectionMatrixf)( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount eye, + OSVR_SurfaceCount surface, float near, float far, + OSVR_MatrixConventions flags, float* matrix); +typedef OSVR_ReturnCode (*pfn_osvrClientCheckDisplayStartup)( + OSVR_DisplayConfig disp); +typedef OSVR_ReturnCode (*pfn_osvrClientSetRoomRotationUsingHead)( + OSVR_ClientContext ctx); +} + +static pfn_osvrClientInit osvr_ClientInit = nullptr; +static pfn_osvrClientShutdown osvr_ClientShutdown = nullptr; +static pfn_osvrClientUpdate osvr_ClientUpdate = nullptr; +static pfn_osvrClientCheckStatus osvr_ClientCheckStatus = nullptr; +static pfn_osvrClientGetInterface osvr_ClientGetInterface = nullptr; +static pfn_osvrClientFreeInterface osvr_ClientFreeInterface = nullptr; +static pfn_osvrGetOrientationState osvr_GetOrientationState = nullptr; +static pfn_osvrGetPositionState osvr_GetPositionState = nullptr; +static pfn_osvrClientGetDisplay osvr_ClientGetDisplay = nullptr; +static pfn_osvrClientFreeDisplay osvr_ClientFreeDisplay = nullptr; +static pfn_osvrClientGetNumEyesForViewer osvr_ClientGetNumEyesForViewer = + nullptr; +static pfn_osvrClientGetViewerEyePose osvr_ClientGetViewerEyePose = nullptr; +static pfn_osvrClientGetDisplayDimensions osvr_ClientGetDisplayDimensions = + nullptr; +static pfn_osvrClientGetViewerEyeSurfaceProjectionClippingPlanes + osvr_ClientGetViewerEyeSurfaceProjectionClippingPlanes = nullptr; +static pfn_osvrClientGetRelativeViewportForViewerEyeSurface + osvr_ClientGetRelativeViewportForViewerEyeSurface = nullptr; +static pfn_osvrClientGetViewerEyeSurfaceProjectionMatrixf + osvr_ClientGetViewerEyeSurfaceProjectionMatrixf = nullptr; +static pfn_osvrClientCheckDisplayStartup osvr_ClientCheckDisplayStartup = + nullptr; +static pfn_osvrClientSetRoomRotationUsingHead + osvr_ClientSetRoomRotationUsingHead = nullptr; + +bool +LoadOSVRRuntime() +{ + static PRLibrary* osvrUtilLib = nullptr; + static PRLibrary* osvrCommonLib = nullptr; + static PRLibrary* osvrClientLib = nullptr; + static PRLibrary* osvrClientKitLib = nullptr; + //this looks up the path in the about:config setting, from greprefs.js or modules\libpref\init\all.js + nsAdoptingCString osvrUtilPath = + mozilla::Preferences::GetCString("gfx.vr.osvr.utilLibPath"); + nsAdoptingCString osvrCommonPath = + mozilla::Preferences::GetCString("gfx.vr.osvr.commonLibPath"); + nsAdoptingCString osvrClientPath = + mozilla::Preferences::GetCString("gfx.vr.osvr.clientLibPath"); + nsAdoptingCString osvrClientKitPath = + mozilla::Preferences::GetCString("gfx.vr.osvr.clientKitLibPath"); + + //we need all the libs to be valid + if ((!osvrUtilPath) || (!osvrCommonPath) || (!osvrClientPath) || + (!osvrClientKitPath)) { + return false; + } + + osvrUtilLib = PR_LoadLibrary(osvrUtilPath.BeginReading()); + osvrCommonLib = PR_LoadLibrary(osvrCommonPath.BeginReading()); + osvrClientLib = PR_LoadLibrary(osvrClientPath.BeginReading()); + osvrClientKitLib = PR_LoadLibrary(osvrClientKitPath.BeginReading()); + + if (!osvrUtilLib) { + printf_stderr("[OSVR] Failed to load OSVR Util library!\n"); + return false; + } + if (!osvrCommonLib) { + printf_stderr("[OSVR] Failed to load OSVR Common library!\n"); + return false; + } + if (!osvrClientLib) { + printf_stderr("[OSVR] Failed to load OSVR Client library!\n"); + return false; + } + if (!osvrClientKitLib) { + printf_stderr("[OSVR] Failed to load OSVR ClientKit library!\n"); + return false; + } + +// make sure all functions that we'll be using are available +#define REQUIRE_FUNCTION(_x) \ + do { \ + *(void**) & osvr_##_x = \ + (void*)PR_FindSymbol(osvrClientKitLib, "osvr" #_x); \ + if (!osvr_##_x) { \ + printf_stderr("osvr" #_x " symbol missing\n"); \ + goto fail; \ + } \ + } while (0) + + REQUIRE_FUNCTION(ClientInit); + REQUIRE_FUNCTION(ClientShutdown); + REQUIRE_FUNCTION(ClientUpdate); + REQUIRE_FUNCTION(ClientCheckStatus); + REQUIRE_FUNCTION(ClientGetInterface); + REQUIRE_FUNCTION(ClientFreeInterface); + REQUIRE_FUNCTION(GetOrientationState); + REQUIRE_FUNCTION(GetPositionState); + REQUIRE_FUNCTION(ClientGetDisplay); + REQUIRE_FUNCTION(ClientFreeDisplay); + REQUIRE_FUNCTION(ClientGetNumEyesForViewer); + REQUIRE_FUNCTION(ClientGetViewerEyePose); + REQUIRE_FUNCTION(ClientGetDisplayDimensions); + REQUIRE_FUNCTION(ClientGetViewerEyeSurfaceProjectionClippingPlanes); + REQUIRE_FUNCTION(ClientGetRelativeViewportForViewerEyeSurface); + REQUIRE_FUNCTION(ClientGetViewerEyeSurfaceProjectionMatrixf); + REQUIRE_FUNCTION(ClientCheckDisplayStartup); + REQUIRE_FUNCTION(ClientSetRoomRotationUsingHead); + +#undef REQUIRE_FUNCTION + + return true; + +fail: + return false; +} + +} // namespace + +mozilla::gfx::VRFieldOfView +SetFromTanRadians(double left, double right, double bottom, double top) +{ + mozilla::gfx::VRFieldOfView fovInfo; + fovInfo.leftDegrees = atan(left) * 180.0 / M_PI; + fovInfo.rightDegrees = atan(right) * 180.0 / M_PI; + fovInfo.upDegrees = atan(top) * 180.0 / M_PI; + fovInfo.downDegrees = atan(bottom) * 180.0 / M_PI; + return fovInfo; +} + +VRDisplayOSVR::VRDisplayOSVR(OSVR_ClientContext* context, + OSVR_ClientInterface* iface, + OSVR_DisplayConfig* display) + : VRDisplayHost(VRDeviceType::OSVR) + , m_ctx(context) + , m_iface(iface) + , m_display(display) +{ + + MOZ_COUNT_CTOR_INHERITED(VRDisplayOSVR, VRDisplayHost); + + mDisplayInfo.mIsConnected = true; + mDisplayInfo.mDisplayName.AssignLiteral("OSVR HMD"); + mDisplayInfo.mCapabilityFlags = VRDisplayCapabilityFlags::Cap_None; + mDisplayInfo.mCapabilityFlags = + VRDisplayCapabilityFlags::Cap_Orientation | VRDisplayCapabilityFlags::Cap_Position; + + mDisplayInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_External; + mDisplayInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_Present; + + // XXX OSVR display topology allows for more than one viewer + // will assume only one viewer for now (most likely stay that way) + + OSVR_EyeCount numEyes; + osvr_ClientGetNumEyesForViewer(*m_display, 0, &numEyes); + + for (uint8_t eye = 0; eye < numEyes; eye++) { + double left, right, bottom, top; + // XXX for now there is only one surface per eye + osvr_ClientGetViewerEyeSurfaceProjectionClippingPlanes( + *m_display, 0, eye, 0, &left, &right, &bottom, &top); + mDisplayInfo.mEyeFOV[eye] = + SetFromTanRadians(-left, right, -bottom, top); + } + + // XXX Assuming there is only one display input for now + // however, it's possible to have more than one (dSight with 2 HDMI inputs) + OSVR_DisplayDimension width, height; + osvr_ClientGetDisplayDimensions(*m_display, 0, &width, &height); + + + for (uint8_t eye = 0; eye < numEyes; eye++) { + + OSVR_ViewportDimension l, b, w, h; + osvr_ClientGetRelativeViewportForViewerEyeSurface(*m_display, 0, eye, 0, &l, + &b, &w, &h); + mDisplayInfo.mEyeResolution.width = w; + mDisplayInfo.mEyeResolution.height = h; + OSVR_Pose3 eyePose; + // Viewer eye pose may not be immediately available, update client context until we get it + OSVR_ReturnCode ret = + osvr_ClientGetViewerEyePose(*m_display, 0, eye, &eyePose); + while (ret != OSVR_RETURN_SUCCESS) { + osvr_ClientUpdate(*m_ctx); + ret = osvr_ClientGetViewerEyePose(*m_display, 0, eye, &eyePose); + } + mDisplayInfo.mEyeTranslation[eye].x = eyePose.translation.data[0]; + mDisplayInfo.mEyeTranslation[eye].y = eyePose.translation.data[1]; + mDisplayInfo.mEyeTranslation[eye].z = eyePose.translation.data[2]; + } +} + +void +VRDisplayOSVR::Destroy() +{ + // destroy non-owning pointers + m_ctx = nullptr; + m_iface = nullptr; + m_display = nullptr; +} + +void +VRDisplayOSVR::ZeroSensor() +{ + // recenter pose aka reset yaw + osvr_ClientSetRoomRotationUsingHead(*m_ctx); +} + +VRHMDSensorState +VRDisplayOSVR::GetSensorState() +{ + + //update client context before anything + //this usually goes into app's mainloop + osvr_ClientUpdate(*m_ctx); + + VRHMDSensorState result; + OSVR_TimeValue timestamp; + result.Clear(); + + OSVR_OrientationState orientation; + + OSVR_ReturnCode ret = + osvr_GetOrientationState(*m_iface, ×tamp, &orientation); + + result.timestamp = timestamp.seconds; + + if (ret == OSVR_RETURN_SUCCESS) { + result.flags |= VRDisplayCapabilityFlags::Cap_Orientation; + result.orientation[0] = orientation.data[1]; + result.orientation[1] = orientation.data[2]; + result.orientation[2] = orientation.data[3]; + result.orientation[3] = orientation.data[0]; + } + + OSVR_PositionState position; + ret = osvr_GetPositionState(*m_iface, ×tamp, &position); + if (ret == OSVR_RETURN_SUCCESS) { + result.flags |= VRDisplayCapabilityFlags::Cap_Position; + result.position[0] = position.data[0]; + result.position[1] = position.data[1]; + result.position[2] = position.data[2]; + } + + return result; +} + +VRHMDSensorState +VRDisplayOSVR::GetImmediateSensorState() +{ + return GetSensorState(); +} + +#if defined(XP_WIN) + +void +VRDisplayOSVR::SubmitFrame(TextureSourceD3D11* aSource, + const IntSize& aSize, + const VRHMDSensorState& aSensorState, + const gfx::Rect& aLeftEyeRect, + const gfx::Rect& aRightEyeRect) +{ + // XXX Add code to submit frame +} + +#endif + +void +VRDisplayOSVR::StartPresentation() +{ + // XXX Add code to start VR Presentation +} + +void +VRDisplayOSVR::StopPresentation() +{ + // XXX Add code to end VR Presentation +} + +already_AddRefed<VRDisplayManagerOSVR> +VRDisplayManagerOSVR::Create() +{ + MOZ_ASSERT(NS_IsMainThread()); + + if (!gfxPrefs::VREnabled() || !gfxPrefs::VROSVREnabled()) { + return nullptr; + } + if (!LoadOSVRRuntime()) { + return nullptr; + } + RefPtr<VRDisplayManagerOSVR> manager = new VRDisplayManagerOSVR(); + return manager.forget(); +} + +void +VRDisplayManagerOSVR::CheckOSVRStatus() +{ + if (mOSVRInitialized) { + return; + } + + // client context must be initialized first + InitializeClientContext(); + + // update client context + osvr_ClientUpdate(m_ctx); + + // initialize interface and display if they are not initialized yet + InitializeInterface(); + InitializeDisplay(); + + // OSVR is fully initialized now + if (mClientContextInitialized && mDisplayConfigInitialized && + mInterfaceInitialized) { + mOSVRInitialized = true; + } +} + +void +VRDisplayManagerOSVR::InitializeClientContext() +{ + // already initialized + if (mClientContextInitialized) { + return; + } + + // first time creating + if (!m_ctx) { + // get client context + m_ctx = osvr_ClientInit("com.osvr.webvr", 0); + // update context + osvr_ClientUpdate(m_ctx); + // verify we are connected + if (OSVR_RETURN_SUCCESS == osvr_ClientCheckStatus(m_ctx)) { + mClientContextInitialized = true; + } + } + // client context exists but not up and running yet + else { + // update context + osvr_ClientUpdate(m_ctx); + if (OSVR_RETURN_SUCCESS == osvr_ClientCheckStatus(m_ctx)) { + mClientContextInitialized = true; + } + } +} + +void +VRDisplayManagerOSVR::InitializeInterface() +{ + // already initialized + if (mInterfaceInitialized) { + return; + } + //Client context must be initialized before getting interface + if (mClientContextInitialized) { + // m_iface will remain nullptr if no interface is returned + if (OSVR_RETURN_SUCCESS == + osvr_ClientGetInterface(m_ctx, "/me/head", &m_iface)) { + mInterfaceInitialized = true; + } + } +} + +void +VRDisplayManagerOSVR::InitializeDisplay() +{ + // display is fully configured + if (mDisplayConfigInitialized) { + return; + } + + //Client context must be initialized before getting interface + if (mClientContextInitialized) { + // first time creating display object + if (m_display == nullptr) { + + OSVR_ReturnCode ret = osvr_ClientGetDisplay(m_ctx, &m_display); + + if (ret == OSVR_RETURN_SUCCESS) { + osvr_ClientUpdate(m_ctx); + // display object may have been created but not fully startup + if (OSVR_RETURN_SUCCESS == osvr_ClientCheckDisplayStartup(m_display)) { + mDisplayConfigInitialized = true; + } + } + + // Typically once we get Display object, pose data is available after + // clientUpdate but sometimes it takes ~ 200 ms to get + // a succesfull connection, so we might have to run a few update cycles + } else { + + if (OSVR_RETURN_SUCCESS == osvr_ClientCheckDisplayStartup(m_display)) { + mDisplayConfigInitialized = true; + } + } + } +} + +bool +VRDisplayManagerOSVR::Init() +{ + + // OSVR server should be running in the background + // It would load plugins and take care of detecting HMDs + if (!mOSVRInitialized) { + nsIThread* thread = nullptr; + NS_GetCurrentThread(&thread); + mOSVRThread = already_AddRefed<nsIThread>(thread); + + // initialize client context + InitializeClientContext(); + // try to initialize interface + InitializeInterface(); + // try to initialize display object + InitializeDisplay(); + // verify all components are initialized + CheckOSVRStatus(); + } + + return mOSVRInitialized; +} + +void +VRDisplayManagerOSVR::Destroy() +{ + if (mOSVRInitialized) { + MOZ_ASSERT(NS_GetCurrentThread() == mOSVRThread); + mOSVRThread = nullptr; + mHMDInfo = nullptr; + mOSVRInitialized = false; + } + // client context may not have been initialized + if (m_ctx) { + osvr_ClientFreeDisplay(m_display); + } + // osvr checks that m_ctx or m_iface are not null + osvr_ClientFreeInterface(m_ctx, m_iface); + osvr_ClientShutdown(m_ctx); +} + +void +VRDisplayManagerOSVR::GetHMDs(nsTArray<RefPtr<VRDisplayHost>>& aHMDResult) +{ + // make sure context, interface and display are initialized + CheckOSVRStatus(); + + if (!mOSVRInitialized) { + return; + } + + mHMDInfo = new VRDisplayOSVR(&m_ctx, &m_iface, &m_display); + + if (mHMDInfo) { + aHMDResult.AppendElement(mHMDInfo); + } +} diff --git a/gfx/vr/gfxVROSVR.h b/gfx/vr/gfxVROSVR.h new file mode 100644 index 000000000..6bd6e93d2 --- /dev/null +++ b/gfx/vr/gfxVROSVR.h @@ -0,0 +1,107 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef GFX_VR_OSVR_H +#define GFX_VR_OSVR_H + +#include "nsTArray.h" +#include "mozilla/RefPtr.h" +#include "nsThreadUtils.h" + +#include "mozilla/gfx/2D.h" +#include "mozilla/EnumeratedArray.h" + +#include "VRDisplayHost.h" + +#include <osvr/ClientKit/ClientKitC.h> +#include <osvr/ClientKit/DisplayC.h> + +namespace mozilla { +namespace gfx { +namespace impl { + +class VRDisplayOSVR : public VRDisplayHost +{ +public: + VRHMDSensorState GetSensorState() override; + VRHMDSensorState GetImmediateSensorState() override; + void ZeroSensor() override; + +protected: + virtual void StartPresentation() override; + virtual void StopPresentation() override; + +#if defined(XP_WIN) + virtual void SubmitFrame(TextureSourceD3D11* aSource, + const IntSize& aSize, + const VRHMDSensorState& aSensorState, + const gfx::Rect& aLeftEyeRect, + const gfx::Rect& aRightEyeRect) override; +#endif + +public: + explicit VRDisplayOSVR(OSVR_ClientContext* context, + OSVR_ClientInterface* iface, + OSVR_DisplayConfig* display); + +protected: + virtual ~VRDisplayOSVR() + { + Destroy(); + MOZ_COUNT_DTOR_INHERITED(VRDisplayOSVR, VRDisplayHost); + } + void Destroy(); + + OSVR_ClientContext* m_ctx; + OSVR_ClientInterface* m_iface; + OSVR_DisplayConfig* m_display; +}; + +} // namespace impl + +class VRDisplayManagerOSVR : public VRDisplayManager +{ +public: + static already_AddRefed<VRDisplayManagerOSVR> Create(); + virtual bool Init() override; + virtual void Destroy() override; + virtual void GetHMDs(nsTArray<RefPtr<VRDisplayHost>>& aHMDResult) override; + +protected: + VRDisplayManagerOSVR() + : mOSVRInitialized(false) + , mClientContextInitialized(false) + , mDisplayConfigInitialized(false) + , mInterfaceInitialized(false) + , m_ctx(nullptr) + , m_iface(nullptr) + , m_display(nullptr) + { + } + + RefPtr<impl::VRDisplayOSVR> mHMDInfo; + bool mOSVRInitialized; + bool mClientContextInitialized; + bool mDisplayConfigInitialized; + bool mInterfaceInitialized; + RefPtr<nsIThread> mOSVRThread; + + OSVR_ClientContext m_ctx; + OSVR_ClientInterface m_iface; + OSVR_DisplayConfig m_display; + +private: + // check if all components are initialized + // and if not, it will try to initialize them + void CheckOSVRStatus(); + void InitializeClientContext(); + void InitializeDisplay(); + void InitializeInterface(); +}; + +} // namespace gfx +} // namespace mozilla + +#endif /* GFX_VR_OSVR_H */
\ No newline at end of file diff --git a/gfx/vr/gfxVROculus.cpp b/gfx/vr/gfxVROculus.cpp new file mode 100644 index 000000000..c00a22320 --- /dev/null +++ b/gfx/vr/gfxVROculus.cpp @@ -0,0 +1,896 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef XP_WIN +#error "Oculus 1.3 runtime support only available for Windows" +#endif + +#include <math.h> + + +#include "prlink.h" +#include "prmem.h" +#include "prenv.h" +#include "gfxPrefs.h" +#include "nsString.h" +#include "mozilla/DebugOnly.h" +#include "mozilla/Preferences.h" +#include "mozilla/TimeStamp.h" +#include "mozilla/gfx/DeviceManagerDx.h" +#include "ipc/VRLayerParent.h" + +#include "mozilla/gfx/Quaternion.h" + +#include <d3d11.h> +#include "CompositorD3D11.h" +#include "TextureD3D11.h" + +#include "gfxVROculus.h" + +/** XXX The DX11 objects and quad blitting could be encapsulated + * into a separate object if either Oculus starts supporting + * non-Windows platforms or the blit is needed by other HMD\ + * drivers. + * Alternately, we could remove the extra blit for + * Oculus as well with some more refactoring. + */ + +// See CompositorD3D11Shaders.h +struct ShaderBytes { const void* mData; size_t mLength; }; +extern ShaderBytes sRGBShader; +extern ShaderBytes sLayerQuadVS; +#ifndef M_PI +# define M_PI 3.14159265358979323846 +#endif + +using namespace mozilla; +using namespace mozilla::gfx; +using namespace mozilla::gfx::impl; +using namespace mozilla::layers; + +namespace { + +#ifdef OVR_CAPI_LIMITED_MOZILLA +static pfn_ovr_Initialize ovr_Initialize = nullptr; +static pfn_ovr_Shutdown ovr_Shutdown = nullptr; +static pfn_ovr_GetLastErrorInfo ovr_GetLastErrorInfo = nullptr; +static pfn_ovr_GetVersionString ovr_GetVersionString = nullptr; +static pfn_ovr_TraceMessage ovr_TraceMessage = nullptr; +static pfn_ovr_GetHmdDesc ovr_GetHmdDesc = nullptr; +static pfn_ovr_GetTrackerCount ovr_GetTrackerCount = nullptr; +static pfn_ovr_GetTrackerDesc ovr_GetTrackerDesc = nullptr; +static pfn_ovr_Create ovr_Create = nullptr; +static pfn_ovr_Destroy ovr_Destroy = nullptr; +static pfn_ovr_GetSessionStatus ovr_GetSessionStatus = nullptr; +static pfn_ovr_SetTrackingOriginType ovr_SetTrackingOriginType = nullptr; +static pfn_ovr_GetTrackingOriginType ovr_GetTrackingOriginType = nullptr; +static pfn_ovr_RecenterTrackingOrigin ovr_RecenterTrackingOrigin = nullptr; +static pfn_ovr_ClearShouldRecenterFlag ovr_ClearShouldRecenterFlag = nullptr; +static pfn_ovr_GetTrackingState ovr_GetTrackingState = nullptr; +static pfn_ovr_GetTrackerPose ovr_GetTrackerPose = nullptr; +static pfn_ovr_GetInputState ovr_GetInputState = nullptr; +static pfn_ovr_GetConnectedControllerTypes ovr_GetConnectedControllerTypes = nullptr; +static pfn_ovr_SetControllerVibration ovr_SetControllerVibration = nullptr; +static pfn_ovr_GetTextureSwapChainLength ovr_GetTextureSwapChainLength = nullptr; +static pfn_ovr_GetTextureSwapChainCurrentIndex ovr_GetTextureSwapChainCurrentIndex = nullptr; +static pfn_ovr_GetTextureSwapChainDesc ovr_GetTextureSwapChainDesc = nullptr; +static pfn_ovr_CommitTextureSwapChain ovr_CommitTextureSwapChain = nullptr; +static pfn_ovr_DestroyTextureSwapChain ovr_DestroyTextureSwapChain = nullptr; +static pfn_ovr_DestroyMirrorTexture ovr_DestroyMirrorTexture = nullptr; +static pfn_ovr_GetFovTextureSize ovr_GetFovTextureSize = nullptr; +static pfn_ovr_GetRenderDesc ovr_GetRenderDesc = nullptr; +static pfn_ovr_SubmitFrame ovr_SubmitFrame = nullptr; +static pfn_ovr_GetPredictedDisplayTime ovr_GetPredictedDisplayTime = nullptr; +static pfn_ovr_GetTimeInSeconds ovr_GetTimeInSeconds = nullptr; +static pfn_ovr_GetBool ovr_GetBool = nullptr; +static pfn_ovr_SetBool ovr_SetBool = nullptr; +static pfn_ovr_GetInt ovr_GetInt = nullptr; +static pfn_ovr_SetInt ovr_SetInt = nullptr; +static pfn_ovr_GetFloat ovr_GetFloat = nullptr; +static pfn_ovr_SetFloat ovr_SetFloat = nullptr; +static pfn_ovr_GetFloatArray ovr_GetFloatArray = nullptr; +static pfn_ovr_SetFloatArray ovr_SetFloatArray = nullptr; +static pfn_ovr_GetString ovr_GetString = nullptr; +static pfn_ovr_SetString ovr_SetString = nullptr; + +#ifdef XP_WIN +static pfn_ovr_CreateTextureSwapChainDX ovr_CreateTextureSwapChainDX = nullptr; +static pfn_ovr_GetTextureSwapChainBufferDX ovr_GetTextureSwapChainBufferDX = nullptr; +static pfn_ovr_CreateMirrorTextureDX ovr_CreateMirrorTextureDX = nullptr; +static pfn_ovr_GetMirrorTextureBufferDX ovr_GetMirrorTextureBufferDX = nullptr; +#endif + +static pfn_ovr_CreateTextureSwapChainGL ovr_CreateTextureSwapChainGL = nullptr; +static pfn_ovr_GetTextureSwapChainBufferGL ovr_GetTextureSwapChainBufferGL = nullptr; +static pfn_ovr_CreateMirrorTextureGL ovr_CreateMirrorTextureGL = nullptr; +static pfn_ovr_GetMirrorTextureBufferGL ovr_GetMirrorTextureBufferGL = nullptr; + +#ifdef HAVE_64BIT_BUILD +#define BUILD_BITS 64 +#else +#define BUILD_BITS 32 +#endif + +#define OVR_PRODUCT_VERSION 1 +#define OVR_MAJOR_VERSION 3 +#define OVR_MINOR_VERSION 1 + +static bool +InitializeOculusCAPI() +{ + static PRLibrary *ovrlib = nullptr; + + if (!ovrlib) { + nsTArray<nsCString> libSearchPaths; + nsCString libName; + nsCString searchPath; + +#if defined(_WIN32) + static const char dirSep = '\\'; +#else + static const char dirSep = '/'; +#endif + +#if defined(_WIN32) + static const int pathLen = 260; + searchPath.SetCapacity(pathLen); + int realLen = ::GetSystemDirectoryA(searchPath.BeginWriting(), pathLen); + if (realLen != 0 && realLen < pathLen) { + searchPath.SetLength(realLen); + libSearchPaths.AppendElement(searchPath); + } + libName.AppendPrintf("LibOVRRT%d_%d.dll", BUILD_BITS, OVR_PRODUCT_VERSION); +#elif defined(__APPLE__) + searchPath.Truncate(); + searchPath.AppendPrintf("/Library/Frameworks/LibOVRRT_%d.framework/Versions/%d", OVR_PRODUCT_VERSION, OVR_MAJOR_VERSION); + libSearchPaths.AppendElement(searchPath); + + if (PR_GetEnv("HOME")) { + searchPath.Truncate(); + searchPath.AppendPrintf("%s/Library/Frameworks/LibOVRRT_%d.framework/Versions/%d", PR_GetEnv("HOME"), OVR_PRODUCT_VERSION, OVR_MAJOR_VERSION); + libSearchPaths.AppendElement(searchPath); + } + // The following will match the va_list overload of AppendPrintf if the product version is 0 + // That's bad times. + //libName.AppendPrintf("LibOVRRT_%d", OVR_PRODUCT_VERSION); + libName.Append("LibOVRRT_"); + libName.AppendInt(OVR_PRODUCT_VERSION); +#else + libSearchPaths.AppendElement(nsCString("/usr/local/lib")); + libSearchPaths.AppendElement(nsCString("/usr/lib")); + libName.AppendPrintf("libOVRRT%d_%d.so.%d", BUILD_BITS, OVR_PRODUCT_VERSION, OVR_MAJOR_VERSION); +#endif + + // If the pref is present, we override libName + nsAdoptingCString prefLibPath = mozilla::Preferences::GetCString("dom.vr.ovr_lib_path"); + if (prefLibPath && prefLibPath.get()) { + libSearchPaths.InsertElementsAt(0, 1, prefLibPath); + } + + nsAdoptingCString prefLibName = mozilla::Preferences::GetCString("dom.vr.ovr_lib_name"); + if (prefLibName && prefLibName.get()) { + libName.Assign(prefLibName); + } + + // search the path/module dir + libSearchPaths.InsertElementsAt(0, 1, nsCString()); + + // If the env var is present, we override libName + if (PR_GetEnv("OVR_LIB_PATH")) { + searchPath = PR_GetEnv("OVR_LIB_PATH"); + libSearchPaths.InsertElementsAt(0, 1, searchPath); + } + + if (PR_GetEnv("OVR_LIB_NAME")) { + libName = PR_GetEnv("OVR_LIB_NAME"); + } + + for (uint32_t i = 0; i < libSearchPaths.Length(); ++i) { + nsCString& libPath = libSearchPaths[i]; + nsCString fullName; + if (libPath.Length() == 0) { + fullName.Assign(libName); + } else { + fullName.AppendPrintf("%s%c%s", libPath.BeginReading(), dirSep, libName.BeginReading()); + } + + ovrlib = PR_LoadLibrary(fullName.BeginReading()); + if (ovrlib) + break; + } + + if (!ovrlib) { + return false; + } + } + + // was it already initialized? + if (ovr_Initialize) + return true; + +#define REQUIRE_FUNCTION(_x) do { \ + *(void **)&_x = (void *) PR_FindSymbol(ovrlib, #_x); \ + if (!_x) { printf_stderr(#_x " symbol missing\n"); goto fail; } \ + } while (0) + + REQUIRE_FUNCTION(ovr_Initialize); + REQUIRE_FUNCTION(ovr_Shutdown); + REQUIRE_FUNCTION(ovr_GetLastErrorInfo); + REQUIRE_FUNCTION(ovr_GetVersionString); + REQUIRE_FUNCTION(ovr_TraceMessage); + REQUIRE_FUNCTION(ovr_GetHmdDesc); + REQUIRE_FUNCTION(ovr_GetTrackerCount); + REQUIRE_FUNCTION(ovr_GetTrackerDesc); + REQUIRE_FUNCTION(ovr_Create); + REQUIRE_FUNCTION(ovr_Destroy); + REQUIRE_FUNCTION(ovr_GetSessionStatus); + REQUIRE_FUNCTION(ovr_SetTrackingOriginType); + REQUIRE_FUNCTION(ovr_GetTrackingOriginType); + REQUIRE_FUNCTION(ovr_RecenterTrackingOrigin); + REQUIRE_FUNCTION(ovr_ClearShouldRecenterFlag); + REQUIRE_FUNCTION(ovr_GetTrackingState); + REQUIRE_FUNCTION(ovr_GetTrackerPose); + REQUIRE_FUNCTION(ovr_GetInputState); + REQUIRE_FUNCTION(ovr_GetConnectedControllerTypes); + REQUIRE_FUNCTION(ovr_SetControllerVibration); + REQUIRE_FUNCTION(ovr_GetTextureSwapChainLength); + REQUIRE_FUNCTION(ovr_GetTextureSwapChainCurrentIndex); + REQUIRE_FUNCTION(ovr_GetTextureSwapChainDesc); + REQUIRE_FUNCTION(ovr_CommitTextureSwapChain); + REQUIRE_FUNCTION(ovr_DestroyTextureSwapChain); + REQUIRE_FUNCTION(ovr_DestroyMirrorTexture); + REQUIRE_FUNCTION(ovr_GetFovTextureSize); + REQUIRE_FUNCTION(ovr_GetRenderDesc); + REQUIRE_FUNCTION(ovr_SubmitFrame); + REQUIRE_FUNCTION(ovr_GetPredictedDisplayTime); + REQUIRE_FUNCTION(ovr_GetTimeInSeconds); + REQUIRE_FUNCTION(ovr_GetBool); + REQUIRE_FUNCTION(ovr_SetBool); + REQUIRE_FUNCTION(ovr_GetInt); + REQUIRE_FUNCTION(ovr_SetInt); + REQUIRE_FUNCTION(ovr_GetFloat); + REQUIRE_FUNCTION(ovr_SetFloat); + REQUIRE_FUNCTION(ovr_GetFloatArray); + REQUIRE_FUNCTION(ovr_SetFloatArray); + REQUIRE_FUNCTION(ovr_GetString); + REQUIRE_FUNCTION(ovr_SetString); + +#ifdef XP_WIN + + REQUIRE_FUNCTION(ovr_CreateTextureSwapChainDX); + REQUIRE_FUNCTION(ovr_GetTextureSwapChainBufferDX); + REQUIRE_FUNCTION(ovr_CreateMirrorTextureDX); + REQUIRE_FUNCTION(ovr_GetMirrorTextureBufferDX); + +#endif + + REQUIRE_FUNCTION(ovr_CreateTextureSwapChainGL); + REQUIRE_FUNCTION(ovr_GetTextureSwapChainBufferGL); + REQUIRE_FUNCTION(ovr_CreateMirrorTextureGL); + REQUIRE_FUNCTION(ovr_GetMirrorTextureBufferGL); + +#undef REQUIRE_FUNCTION + + return true; + + fail: + ovr_Initialize = nullptr; + return false; +} + +#else +#include <OVR_Version.h> +// we're statically linked; it's available +static bool InitializeOculusCAPI() +{ + return true; +} + +#endif + +ovrFovPort +ToFovPort(const VRFieldOfView& aFOV) +{ + ovrFovPort fovPort; + fovPort.LeftTan = tan(aFOV.leftDegrees * M_PI / 180.0); + fovPort.RightTan = tan(aFOV.rightDegrees * M_PI / 180.0); + fovPort.UpTan = tan(aFOV.upDegrees * M_PI / 180.0); + fovPort.DownTan = tan(aFOV.downDegrees * M_PI / 180.0); + return fovPort; +} + +VRFieldOfView +FromFovPort(const ovrFovPort& aFOV) +{ + VRFieldOfView fovInfo; + fovInfo.leftDegrees = atan(aFOV.LeftTan) * 180.0 / M_PI; + fovInfo.rightDegrees = atan(aFOV.RightTan) * 180.0 / M_PI; + fovInfo.upDegrees = atan(aFOV.UpTan) * 180.0 / M_PI; + fovInfo.downDegrees = atan(aFOV.DownTan) * 180.0 / M_PI; + return fovInfo; +} + +} // namespace + +VRDisplayOculus::VRDisplayOculus(ovrSession aSession) + : VRDisplayHost(VRDeviceType::Oculus) + , mSession(aSession) + , mTextureSet(nullptr) + , mQuadVS(nullptr) + , mQuadPS(nullptr) + , mLinearSamplerState(nullptr) + , mVSConstantBuffer(nullptr) + , mPSConstantBuffer(nullptr) + , mVertexBuffer(nullptr) + , mInputLayout(nullptr) + , mIsPresenting(false) +{ + MOZ_COUNT_CTOR_INHERITED(VRDisplayOculus, VRDisplayHost); + + mDisplayInfo.mDisplayName.AssignLiteral("Oculus VR HMD"); + mDisplayInfo.mIsConnected = true; + + mDesc = ovr_GetHmdDesc(aSession); + + mDisplayInfo.mCapabilityFlags = VRDisplayCapabilityFlags::Cap_None; + if (mDesc.AvailableTrackingCaps & ovrTrackingCap_Orientation) { + mDisplayInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_Orientation; + mDisplayInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_AngularAcceleration; + } + if (mDesc.AvailableTrackingCaps & ovrTrackingCap_Position) { + mDisplayInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_Position; + mDisplayInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_LinearAcceleration; + } + mDisplayInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_External; + mDisplayInfo.mCapabilityFlags |= VRDisplayCapabilityFlags::Cap_Present; + + mFOVPort[VRDisplayInfo::Eye_Left] = mDesc.DefaultEyeFov[ovrEye_Left]; + mFOVPort[VRDisplayInfo::Eye_Right] = mDesc.DefaultEyeFov[ovrEye_Right]; + + mDisplayInfo.mEyeFOV[VRDisplayInfo::Eye_Left] = FromFovPort(mFOVPort[VRDisplayInfo::Eye_Left]); + mDisplayInfo.mEyeFOV[VRDisplayInfo::Eye_Right] = FromFovPort(mFOVPort[VRDisplayInfo::Eye_Right]); + + float pixelsPerDisplayPixel = 1.0; + ovrSizei texSize[2]; + + // get eye parameters and create the mesh + for (uint32_t eye = 0; eye < VRDisplayInfo::NumEyes; eye++) { + + ovrEyeRenderDesc renderDesc = ovr_GetRenderDesc(mSession, (ovrEyeType)eye, mFOVPort[eye]); + + // As of Oculus 0.6.0, the HmdToEyeOffset values are correct and don't need to be negated. + mDisplayInfo.mEyeTranslation[eye] = Point3D(renderDesc.HmdToEyeOffset.x, renderDesc.HmdToEyeOffset.y, renderDesc.HmdToEyeOffset.z); + + texSize[eye] = ovr_GetFovTextureSize(mSession, (ovrEyeType)eye, mFOVPort[eye], pixelsPerDisplayPixel); + } + + // take the max of both for eye resolution + mDisplayInfo.mEyeResolution.width = std::max(texSize[VRDisplayInfo::Eye_Left].w, texSize[VRDisplayInfo::Eye_Right].w); + mDisplayInfo.mEyeResolution.height = std::max(texSize[VRDisplayInfo::Eye_Left].h, texSize[VRDisplayInfo::Eye_Right].h); +} + +VRDisplayOculus::~VRDisplayOculus() { + StopPresentation(); + Destroy(); + MOZ_COUNT_DTOR_INHERITED(VRDisplayOculus, VRDisplayHost); +} + +void +VRDisplayOculus::Destroy() +{ + if (mSession) { + ovr_Destroy(mSession); + mSession = nullptr; + } +} + +void +VRDisplayOculus::ZeroSensor() +{ + ovr_RecenterTrackingOrigin(mSession); +} + +VRHMDSensorState +VRDisplayOculus::GetSensorState() +{ + mInputFrameID++; + + VRHMDSensorState result; + double frameDelta = 0.0f; + if (gfxPrefs::VRPosePredictionEnabled()) { + // XXX We might need to call ovr_GetPredictedDisplayTime even if we don't use the result. + // If we don't call it, the Oculus driver will spew out many warnings... + double predictedFrameTime = ovr_GetPredictedDisplayTime(mSession, mInputFrameID); + frameDelta = predictedFrameTime - ovr_GetTimeInSeconds(); + } + result = GetSensorState(frameDelta); + result.inputFrameID = mInputFrameID; + mLastSensorState[result.inputFrameID % kMaxLatencyFrames] = result; + return result; +} + +VRHMDSensorState +VRDisplayOculus::GetImmediateSensorState() +{ + return GetSensorState(0.0); +} + +VRHMDSensorState +VRDisplayOculus::GetSensorState(double timeOffset) +{ + VRHMDSensorState result; + result.Clear(); + + ovrTrackingState state = ovr_GetTrackingState(mSession, timeOffset, true); + ovrPoseStatef& pose(state.HeadPose); + + result.timestamp = pose.TimeInSeconds; + + if (state.StatusFlags & ovrStatus_OrientationTracked) { + result.flags |= VRDisplayCapabilityFlags::Cap_Orientation; + + result.orientation[0] = pose.ThePose.Orientation.x; + result.orientation[1] = pose.ThePose.Orientation.y; + result.orientation[2] = pose.ThePose.Orientation.z; + result.orientation[3] = pose.ThePose.Orientation.w; + + result.angularVelocity[0] = pose.AngularVelocity.x; + result.angularVelocity[1] = pose.AngularVelocity.y; + result.angularVelocity[2] = pose.AngularVelocity.z; + + result.flags |= VRDisplayCapabilityFlags::Cap_AngularAcceleration; + + result.angularAcceleration[0] = pose.AngularAcceleration.x; + result.angularAcceleration[1] = pose.AngularAcceleration.y; + result.angularAcceleration[2] = pose.AngularAcceleration.z; + } + + if (state.StatusFlags & ovrStatus_PositionTracked) { + result.flags |= VRDisplayCapabilityFlags::Cap_Position; + + result.position[0] = pose.ThePose.Position.x; + result.position[1] = pose.ThePose.Position.y; + result.position[2] = pose.ThePose.Position.z; + + result.linearVelocity[0] = pose.LinearVelocity.x; + result.linearVelocity[1] = pose.LinearVelocity.y; + result.linearVelocity[2] = pose.LinearVelocity.z; + + result.flags |= VRDisplayCapabilityFlags::Cap_LinearAcceleration; + + result.linearAcceleration[0] = pose.LinearAcceleration.x; + result.linearAcceleration[1] = pose.LinearAcceleration.y; + result.linearAcceleration[2] = pose.LinearAcceleration.z; + } + result.flags |= VRDisplayCapabilityFlags::Cap_External; + result.flags |= VRDisplayCapabilityFlags::Cap_Present; + + return result; +} + +void +VRDisplayOculus::StartPresentation() +{ + if (mIsPresenting) { + return; + } + mIsPresenting = true; + + /** + * The presentation format is determined by content, which describes the + * left and right eye rectangles in the VRLayer. The default, if no + * coordinates are passed is to place the left and right eye textures + * side-by-side within the buffer. + * + * XXX - An optimization would be to dynamically resize this buffer + * to accomodate sites that are choosing to render in a lower + * resolution or are using space outside of the left and right + * eye textures for other purposes. (Bug 1291443) + */ + ovrTextureSwapChainDesc desc; + memset(&desc, 0, sizeof(desc)); + desc.Type = ovrTexture_2D; + desc.ArraySize = 1; + desc.Format = OVR_FORMAT_B8G8R8A8_UNORM_SRGB; + desc.Width = mDisplayInfo.mEyeResolution.width * 2; + desc.Height = mDisplayInfo.mEyeResolution.height; + desc.MipLevels = 1; + desc.SampleCount = 1; + desc.StaticImage = false; + desc.MiscFlags = ovrTextureMisc_DX_Typeless; + desc.BindFlags = ovrTextureBind_DX_RenderTarget; + + if (!mDevice) { + mDevice = gfx::DeviceManagerDx::Get()->GetCompositorDevice(); + if (!mDevice) { + NS_WARNING("Failed to get a D3D11Device for Oculus"); + return; + } + } + + mDevice->GetImmediateContext(getter_AddRefs(mContext)); + if (!mContext) { + NS_WARNING("Failed to get immediate context for Oculus"); + return; + } + + if (FAILED(mDevice->CreateVertexShader(sLayerQuadVS.mData, sLayerQuadVS.mLength, nullptr, &mQuadVS))) { + NS_WARNING("Failed to create vertex shader for Oculus"); + return; + } + + if (FAILED(mDevice->CreatePixelShader(sRGBShader.mData, sRGBShader.mLength, nullptr, &mQuadPS))) { + NS_WARNING("Failed to create pixel shader for Oculus"); + return; + } + + CD3D11_BUFFER_DESC cBufferDesc(sizeof(layers::VertexShaderConstants), + D3D11_BIND_CONSTANT_BUFFER, + D3D11_USAGE_DYNAMIC, + D3D11_CPU_ACCESS_WRITE); + + if (FAILED(mDevice->CreateBuffer(&cBufferDesc, nullptr, getter_AddRefs(mVSConstantBuffer)))) { + NS_WARNING("Failed to vertex shader constant buffer for Oculus"); + return; + } + + cBufferDesc.ByteWidth = sizeof(layers::PixelShaderConstants); + if (FAILED(mDevice->CreateBuffer(&cBufferDesc, nullptr, getter_AddRefs(mPSConstantBuffer)))) { + NS_WARNING("Failed to pixel shader constant buffer for Oculus"); + return; + } + + CD3D11_SAMPLER_DESC samplerDesc(D3D11_DEFAULT); + if (FAILED(mDevice->CreateSamplerState(&samplerDesc, getter_AddRefs(mLinearSamplerState)))) { + NS_WARNING("Failed to create sampler state for Oculus"); + return; + } + + D3D11_INPUT_ELEMENT_DESC layout[] = + { + { "POSITION", 0, DXGI_FORMAT_R32G32_FLOAT, 0, 0, D3D11_INPUT_PER_VERTEX_DATA, 0 }, + }; + + if (FAILED(mDevice->CreateInputLayout(layout, + sizeof(layout) / sizeof(D3D11_INPUT_ELEMENT_DESC), + sLayerQuadVS.mData, + sLayerQuadVS.mLength, + getter_AddRefs(mInputLayout)))) { + NS_WARNING("Failed to create input layout for Oculus"); + return; + } + + ovrResult orv = ovr_CreateTextureSwapChainDX(mSession, mDevice, &desc, &mTextureSet); + if (orv != ovrSuccess) { + NS_WARNING("ovr_CreateTextureSwapChainDX failed"); + return; + } + + int textureCount = 0; + orv = ovr_GetTextureSwapChainLength(mSession, mTextureSet, &textureCount); + if (orv != ovrSuccess) { + NS_WARNING("ovr_GetTextureSwapChainLength failed"); + return; + } + + Vertex vertices[] = { { { 0.0, 0.0 } },{ { 1.0, 0.0 } },{ { 0.0, 1.0 } },{ { 1.0, 1.0 } } }; + CD3D11_BUFFER_DESC bufferDesc(sizeof(vertices), D3D11_BIND_VERTEX_BUFFER); + D3D11_SUBRESOURCE_DATA data; + data.pSysMem = (void*)vertices; + + if (FAILED(mDevice->CreateBuffer(&bufferDesc, &data, getter_AddRefs(mVertexBuffer)))) { + NS_WARNING("Failed to create vertex buffer for Oculus"); + return; + } + + mRenderTargets.SetLength(textureCount); + + memset(&mVSConstants, 0, sizeof(mVSConstants)); + memset(&mPSConstants, 0, sizeof(mPSConstants)); + + for (int i = 0; i < textureCount; ++i) { + RefPtr<CompositingRenderTargetD3D11> rt; + ID3D11Texture2D* texture = nullptr; + orv = ovr_GetTextureSwapChainBufferDX(mSession, mTextureSet, i, IID_PPV_ARGS(&texture)); + MOZ_ASSERT(orv == ovrSuccess, "ovr_GetTextureSwapChainBufferDX failed."); + rt = new CompositingRenderTargetD3D11(texture, IntPoint(0, 0), DXGI_FORMAT_B8G8R8A8_UNORM); + rt->SetSize(IntSize(mDisplayInfo.mEyeResolution.width * 2, mDisplayInfo.mEyeResolution.height)); + mRenderTargets[i] = rt; + texture->Release(); + } +} + +void +VRDisplayOculus::StopPresentation() +{ + if (!mIsPresenting) { + return; + } + mIsPresenting = false; + + ovr_SubmitFrame(mSession, 0, nullptr, nullptr, 0); + + if (mTextureSet) { + ovr_DestroyTextureSwapChain(mSession, mTextureSet); + mTextureSet = nullptr; + } +} + +/*static*/ already_AddRefed<VRDisplayManagerOculus> +VRDisplayManagerOculus::Create() +{ + MOZ_ASSERT(NS_IsMainThread()); + + if (!gfxPrefs::VREnabled() || !gfxPrefs::VROculusEnabled()) + { + return nullptr; + } + + if (!InitializeOculusCAPI()) { + return nullptr; + } + + RefPtr<VRDisplayManagerOculus> manager = new VRDisplayManagerOculus(); + return manager.forget(); +} + +bool +VRDisplayManagerOculus::Init() +{ + if (!mOculusInitialized) { + nsIThread* thread = nullptr; + NS_GetCurrentThread(&thread); + mOculusThread = already_AddRefed<nsIThread>(thread); + + ovrInitParams params; + memset(¶ms, 0, sizeof(params)); + params.Flags = ovrInit_RequestVersion; + params.RequestedMinorVersion = OVR_MINOR_VERSION; + params.LogCallback = nullptr; + params.ConnectionTimeoutMS = 0; + + ovrResult orv = ovr_Initialize(¶ms); + + if (orv == ovrSuccess) { + mOculusInitialized = true; + } + } + + return mOculusInitialized; +} + +void +VRDisplayManagerOculus::Destroy() +{ + if (mOculusInitialized) { + MOZ_ASSERT(NS_GetCurrentThread() == mOculusThread); + mOculusThread = nullptr; + + mHMDInfo = nullptr; + + ovr_Shutdown(); + mOculusInitialized = false; + } +} + +void +VRDisplayManagerOculus::GetHMDs(nsTArray<RefPtr<VRDisplayHost>>& aHMDResult) +{ + if (!mOculusInitialized) { + return; + } + + // ovr_Create can be slow when no HMD is present and we wish + // to keep the same oculus session when possible, so we detect + // presence of an HMD with ovr_GetHmdDesc before calling ovr_Create + ovrHmdDesc desc = ovr_GetHmdDesc(NULL); + if (desc.Type == ovrHmd_None) { + // No HMD connected. + mHMDInfo = nullptr; + } else if (mHMDInfo == nullptr) { + // HMD Detected + ovrSession session; + ovrGraphicsLuid luid; + ovrResult orv = ovr_Create(&session, &luid); + if (orv == ovrSuccess) { + mHMDInfo = new VRDisplayOculus(session); + } + } + + if (mHMDInfo) { + aHMDResult.AppendElement(mHMDInfo); + } +} + +already_AddRefed<CompositingRenderTargetD3D11> +VRDisplayOculus::GetNextRenderTarget() +{ + int currentRenderTarget = 0; + DebugOnly<ovrResult> orv = ovr_GetTextureSwapChainCurrentIndex(mSession, mTextureSet, ¤tRenderTarget); + MOZ_ASSERT(orv == ovrSuccess, "ovr_GetTextureSwapChainCurrentIndex failed."); + + mRenderTargets[currentRenderTarget]->ClearOnBind(); + RefPtr<CompositingRenderTargetD3D11> rt = mRenderTargets[currentRenderTarget]; + return rt.forget(); +} + +bool +VRDisplayOculus::UpdateConstantBuffers() +{ + HRESULT hr; + D3D11_MAPPED_SUBRESOURCE resource; + resource.pData = nullptr; + + hr = mContext->Map(mVSConstantBuffer, 0, D3D11_MAP_WRITE_DISCARD, 0, &resource); + if (FAILED(hr) || !resource.pData) { + return false; + } + *(VertexShaderConstants*)resource.pData = mVSConstants; + mContext->Unmap(mVSConstantBuffer, 0); + resource.pData = nullptr; + + hr = mContext->Map(mPSConstantBuffer, 0, D3D11_MAP_WRITE_DISCARD, 0, &resource); + if (FAILED(hr) || !resource.pData) { + return false; + } + *(PixelShaderConstants*)resource.pData = mPSConstants; + mContext->Unmap(mPSConstantBuffer, 0); + + ID3D11Buffer *buffer = mVSConstantBuffer; + mContext->VSSetConstantBuffers(0, 1, &buffer); + buffer = mPSConstantBuffer; + mContext->PSSetConstantBuffers(0, 1, &buffer); + return true; +} + +void +VRDisplayOculus::SubmitFrame(TextureSourceD3D11* aSource, + const IntSize& aSize, + const VRHMDSensorState& aSensorState, + const gfx::Rect& aLeftEyeRect, + const gfx::Rect& aRightEyeRect) +{ + if (!mIsPresenting) { + return; + } + if (mRenderTargets.IsEmpty()) { + /** + * XXX - We should resolve fail the promise returned by + * VRDisplay.requestPresent() when the DX11 resources fail allocation + * in VRDisplayOculus::StartPresentation(). + * Bailing out here prevents the crash but content should be aware + * that frames are not being presented. + * See Bug 1299309. + **/ + return; + } + MOZ_ASSERT(mDevice); + MOZ_ASSERT(mContext); + + RefPtr<CompositingRenderTargetD3D11> surface = GetNextRenderTarget(); + + surface->BindRenderTarget(mContext); + + Matrix viewMatrix = Matrix::Translation(-1.0, 1.0); + viewMatrix.PreScale(2.0f / float(aSize.width), 2.0f / float(aSize.height)); + viewMatrix.PreScale(1.0f, -1.0f); + Matrix4x4 projection = Matrix4x4::From2D(viewMatrix); + projection._33 = 0.0f; + + Matrix transform2d; + gfx::Matrix4x4 transform = gfx::Matrix4x4::From2D(transform2d); + + D3D11_VIEWPORT viewport; + viewport.MinDepth = 0.0f; + viewport.MaxDepth = 1.0f; + viewport.Width = aSize.width; + viewport.Height = aSize.height; + viewport.TopLeftX = 0; + viewport.TopLeftY = 0; + + D3D11_RECT scissor; + scissor.left = 0; + scissor.right = aSize.width; + scissor.top = 0; + scissor.bottom = aSize.height; + + memcpy(&mVSConstants.layerTransform, &transform._11, sizeof(mVSConstants.layerTransform)); + memcpy(&mVSConstants.projection, &projection._11, sizeof(mVSConstants.projection)); + mVSConstants.renderTargetOffset[0] = 0.0f; + mVSConstants.renderTargetOffset[1] = 0.0f; + mVSConstants.layerQuad = Rect(0.0f, 0.0f, aSize.width, aSize.height); + mVSConstants.textureCoords = Rect(0.0f, 1.0f, 1.0f, -1.0f); + + mPSConstants.layerOpacity[0] = 1.0f; + + ID3D11Buffer* vbuffer = mVertexBuffer; + UINT vsize = sizeof(Vertex); + UINT voffset = 0; + mContext->IASetVertexBuffers(0, 1, &vbuffer, &vsize, &voffset); + mContext->IASetIndexBuffer(nullptr, DXGI_FORMAT_R16_UINT, 0); + mContext->IASetInputLayout(mInputLayout); + mContext->RSSetViewports(1, &viewport); + mContext->RSSetScissorRects(1, &scissor); + mContext->IASetPrimitiveTopology(D3D11_PRIMITIVE_TOPOLOGY_TRIANGLESTRIP); + mContext->VSSetShader(mQuadVS, nullptr, 0); + mContext->PSSetShader(mQuadPS, nullptr, 0); + ID3D11ShaderResourceView* srView = aSource->GetShaderResourceView(); + mContext->PSSetShaderResources(0 /* 0 == TexSlot::RGB */, 1, &srView); + // XXX Use Constant from TexSlot in CompositorD3D11.cpp? + + ID3D11SamplerState *sampler = mLinearSamplerState; + mContext->PSSetSamplers(0, 1, &sampler); + + if (!UpdateConstantBuffers()) { + NS_WARNING("Failed to update constant buffers for Oculus"); + return; + } + + mContext->Draw(4, 0); + + ovrResult orv = ovr_CommitTextureSwapChain(mSession, mTextureSet); + if (orv != ovrSuccess) { + NS_WARNING("ovr_CommitTextureSwapChain failed.\n"); + return; + } + + ovrLayerEyeFov layer; + memset(&layer, 0, sizeof(layer)); + layer.Header.Type = ovrLayerType_EyeFov; + layer.Header.Flags = 0; + layer.ColorTexture[0] = mTextureSet; + layer.ColorTexture[1] = nullptr; + layer.Fov[0] = mFOVPort[0]; + layer.Fov[1] = mFOVPort[1]; + layer.Viewport[0].Pos.x = aSize.width * aLeftEyeRect.x; + layer.Viewport[0].Pos.y = aSize.height * aLeftEyeRect.y; + layer.Viewport[0].Size.w = aSize.width * aLeftEyeRect.width; + layer.Viewport[0].Size.h = aSize.height * aLeftEyeRect.height; + layer.Viewport[1].Pos.x = aSize.width * aRightEyeRect.x; + layer.Viewport[1].Pos.y = aSize.height * aRightEyeRect.y; + layer.Viewport[1].Size.w = aSize.width * aRightEyeRect.width; + layer.Viewport[1].Size.h = aSize.height * aRightEyeRect.height; + + const Point3D& l = mDisplayInfo.mEyeTranslation[0]; + const Point3D& r = mDisplayInfo.mEyeTranslation[1]; + const ovrVector3f hmdToEyeViewOffset[2] = { { l.x, l.y, l.z }, + { r.x, r.y, r.z } }; + + for (uint32_t i = 0; i < 2; ++i) { + Quaternion o(aSensorState.orientation[0], + aSensorState.orientation[1], + aSensorState.orientation[2], + aSensorState.orientation[3]); + Point3D vo(hmdToEyeViewOffset[i].x, hmdToEyeViewOffset[i].y, hmdToEyeViewOffset[i].z); + Point3D p = o.RotatePoint(vo); + layer.RenderPose[i].Orientation.x = o.x; + layer.RenderPose[i].Orientation.y = o.y; + layer.RenderPose[i].Orientation.z = o.z; + layer.RenderPose[i].Orientation.w = o.w; + layer.RenderPose[i].Position.x = p.x + aSensorState.position[0]; + layer.RenderPose[i].Position.y = p.y + aSensorState.position[1]; + layer.RenderPose[i].Position.z = p.z + aSensorState.position[2]; + } + + ovrLayerHeader *layers = &layer.Header; + orv = ovr_SubmitFrame(mSession, aSensorState.inputFrameID, nullptr, &layers, 1); + + if (orv != ovrSuccess) { + printf_stderr("ovr_SubmitFrame failed.\n"); + } + + // Trigger the next VSync immediately + VRManager *vm = VRManager::Get(); + MOZ_ASSERT(vm); + vm->NotifyVRVsync(mDisplayInfo.mDisplayID); +} + +void +VRDisplayOculus::NotifyVSync() +{ + ovrSessionStatus sessionStatus; + ovrResult ovr = ovr_GetSessionStatus(mSession, &sessionStatus); + mDisplayInfo.mIsConnected = (ovr == ovrSuccess && sessionStatus.HmdPresent); +} diff --git a/gfx/vr/gfxVROculus.h b/gfx/vr/gfxVROculus.h new file mode 100644 index 000000000..ff00cb1df --- /dev/null +++ b/gfx/vr/gfxVROculus.h @@ -0,0 +1,111 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef GFX_VR_OCULUS_H +#define GFX_VR_OCULUS_H + +#include "nsTArray.h" +#include "mozilla/RefPtr.h" + +#include "mozilla/gfx/2D.h" +#include "mozilla/EnumeratedArray.h" + +#include "gfxVR.h" +#include "VRDisplayHost.h" +#include "ovr_capi_dynamic.h" + +struct ID3D11Device; + +namespace mozilla { +namespace layers { +class CompositingRenderTargetD3D11; +struct VertexShaderConstants; +struct PixelShaderConstants; +} +namespace gfx { +namespace impl { + +class VRDisplayOculus : public VRDisplayHost +{ +public: + virtual void NotifyVSync() override; + virtual VRHMDSensorState GetSensorState() override; + virtual VRHMDSensorState GetImmediateSensorState() override; + void ZeroSensor() override; + +protected: + virtual void StartPresentation() override; + virtual void StopPresentation() override; + virtual void SubmitFrame(mozilla::layers::TextureSourceD3D11* aSource, + const IntSize& aSize, + const VRHMDSensorState& aSensorState, + const gfx::Rect& aLeftEyeRect, + const gfx::Rect& aRightEyeRect) override; + +public: + explicit VRDisplayOculus(ovrSession aSession); + +protected: + virtual ~VRDisplayOculus(); + void Destroy(); + + bool RequireSession(); + const ovrHmdDesc& GetHmdDesc(); + + already_AddRefed<layers::CompositingRenderTargetD3D11> GetNextRenderTarget(); + + VRHMDSensorState GetSensorState(double timeOffset); + + ovrHmdDesc mDesc; + ovrSession mSession; + ovrFovPort mFOVPort[2]; + ovrTextureSwapChain mTextureSet; + nsTArray<RefPtr<layers::CompositingRenderTargetD3D11>> mRenderTargets; + + RefPtr<ID3D11Device> mDevice; + RefPtr<ID3D11DeviceContext> mContext; + ID3D11VertexShader* mQuadVS; + ID3D11PixelShader* mQuadPS; + RefPtr<ID3D11SamplerState> mLinearSamplerState; + layers::VertexShaderConstants mVSConstants; + layers::PixelShaderConstants mPSConstants; + RefPtr<ID3D11Buffer> mVSConstantBuffer; + RefPtr<ID3D11Buffer> mPSConstantBuffer; + RefPtr<ID3D11Buffer> mVertexBuffer; + RefPtr<ID3D11InputLayout> mInputLayout; + + bool mIsPresenting; + + bool UpdateConstantBuffers(); + + struct Vertex + { + float position[2]; + }; +}; + +} // namespace impl + +class VRDisplayManagerOculus : public VRDisplayManager +{ +public: + static already_AddRefed<VRDisplayManagerOculus> Create(); + virtual bool Init() override; + virtual void Destroy() override; + virtual void GetHMDs(nsTArray<RefPtr<VRDisplayHost> >& aHMDResult) override; +protected: + VRDisplayManagerOculus() + : mOculusInitialized(false) + { } + + RefPtr<impl::VRDisplayOculus> mHMDInfo; + bool mOculusInitialized; + RefPtr<nsIThread> mOculusThread; +}; + +} // namespace gfx +} // namespace mozilla + +#endif /* GFX_VR_OCULUS_H */ diff --git a/gfx/vr/gfxVROpenVR.cpp b/gfx/vr/gfxVROpenVR.cpp new file mode 100644 index 000000000..01149c983 --- /dev/null +++ b/gfx/vr/gfxVROpenVR.cpp @@ -0,0 +1,749 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include <math.h> + +#include "prlink.h" +#include "prmem.h" +#include "prenv.h" +#include "gfxPrefs.h" +#include "nsString.h" +#include "mozilla/Preferences.h" + +#include "mozilla/gfx/Quaternion.h" + +#ifdef XP_WIN +#include "CompositorD3D11.h" +#include "TextureD3D11.h" +#endif // XP_WIN + +#include "gfxVROpenVR.h" + +#include "nsServiceManagerUtils.h" +#include "nsIScreenManager.h" +#include "openvr/openvr.h" + +#ifdef MOZ_GAMEPAD +#include "mozilla/dom/GamepadEventTypes.h" +#include "mozilla/dom/GamepadBinding.h" +#endif + +#ifndef M_PI +# define M_PI 3.14159265358979323846 +#endif + +using namespace mozilla; +using namespace mozilla::gfx; +using namespace mozilla::gfx::impl; +using namespace mozilla::layers; +using namespace mozilla::dom; + +namespace { +extern "C" { +typedef uint32_t (VR_CALLTYPE * pfn_VR_InitInternal)(::vr::HmdError *peError, ::vr::EVRApplicationType eApplicationType); +typedef void (VR_CALLTYPE * pfn_VR_ShutdownInternal)(); +typedef bool (VR_CALLTYPE * pfn_VR_IsHmdPresent)(); +typedef bool (VR_CALLTYPE * pfn_VR_IsRuntimeInstalled)(); +typedef const char * (VR_CALLTYPE * pfn_VR_GetStringForHmdError)(::vr::HmdError error); +typedef void * (VR_CALLTYPE * pfn_VR_GetGenericInterface)(const char *pchInterfaceVersion, ::vr::HmdError *peError); +} // extern "C" +} // namespace + +static pfn_VR_InitInternal vr_InitInternal = nullptr; +static pfn_VR_ShutdownInternal vr_ShutdownInternal = nullptr; +static pfn_VR_IsHmdPresent vr_IsHmdPresent = nullptr; +static pfn_VR_IsRuntimeInstalled vr_IsRuntimeInstalled = nullptr; +static pfn_VR_GetStringForHmdError vr_GetStringForHmdError = nullptr; +static pfn_VR_GetGenericInterface vr_GetGenericInterface = nullptr; + +// EButton_System, EButton_DPad_xx, and EButton_A +// can not be triggered in Steam Vive in OpenVR SDK 1.0.3. +const uint64_t gOpenVRButtonMask[] = { + // vr::ButtonMaskFromId(vr::EVRButtonId::k_EButton_System), + vr::ButtonMaskFromId(vr::EVRButtonId::k_EButton_ApplicationMenu), + vr::ButtonMaskFromId(vr::EVRButtonId::k_EButton_Grip), + // vr::ButtonMaskFromId(vr::EVRButtonId::k_EButton_DPad_Left), + // vr::ButtonMaskFromId(vr::EVRButtonId::k_EButton_DPad_Up), + // vr::ButtonMaskFromId(vr::EVRButtonId::k_EButton_DPad_Right), + // vr::ButtonMaskFromId(vr::EVRButtonId::k_EButton_DPad_Down), + // vr::ButtonMaskFromId(vr::EVRButtonId::k_EButton_A), + vr::ButtonMaskFromId(vr::EVRButtonId::k_EButton_SteamVR_Touchpad), + vr::ButtonMaskFromId(vr::EVRButtonId::k_EButton_SteamVR_Trigger) +}; + +const uint32_t gNumOpenVRButtonMask = sizeof(gOpenVRButtonMask) / + sizeof(uint64_t); + +enum class VRControllerAxisType : uint16_t { + TrackpadXAxis, + TrackpadYAxis, + Trigger, + NumVRControllerAxisType +}; + +#define VRControllerAxis(aButtonId) (aButtonId - vr::EVRButtonId::k_EButton_Axis0) + +const uint32_t gOpenVRAxes[] = { + VRControllerAxis(vr::EVRButtonId::k_EButton_Axis0), + VRControllerAxis(vr::EVRButtonId::k_EButton_Axis0), + VRControllerAxis(vr::EVRButtonId::k_EButton_Axis1) +}; + +const uint32_t gNumOpenVRAxis = sizeof(gOpenVRAxes) / + sizeof(uint32_t); + + +bool +LoadOpenVRRuntime() +{ + static PRLibrary *openvrLib = nullptr; + + nsAdoptingCString openvrPath = Preferences::GetCString("gfx.vr.openvr-runtime"); + if (!openvrPath) + return false; + + openvrLib = PR_LoadLibrary(openvrPath.BeginReading()); + if (!openvrLib) + return false; + +#define REQUIRE_FUNCTION(_x) do { \ + *(void **)&vr_##_x = (void *) PR_FindSymbol(openvrLib, "VR_" #_x); \ + if (!vr_##_x) { printf_stderr("VR_" #_x " symbol missing\n"); return false; } \ + } while (0) + + REQUIRE_FUNCTION(InitInternal); + REQUIRE_FUNCTION(ShutdownInternal); + REQUIRE_FUNCTION(IsHmdPresent); + REQUIRE_FUNCTION(IsRuntimeInstalled); + REQUIRE_FUNCTION(GetStringForHmdError); + REQUIRE_FUNCTION(GetGenericInterface); + +#undef REQUIRE_FUNCTION + + return true; +} + +VRDisplayOpenVR::VRDisplayOpenVR(::vr::IVRSystem *aVRSystem, + ::vr::IVRChaperone *aVRChaperone, + ::vr::IVRCompositor *aVRCompositor) + : VRDisplayHost(VRDeviceType::OpenVR) + , mVRSystem(aVRSystem) + , mVRChaperone(aVRChaperone) + , mVRCompositor(aVRCompositor) + , mIsPresenting(false) +{ + MOZ_COUNT_CTOR_INHERITED(VRDisplayOpenVR, VRDisplayHost); + + mDisplayInfo.mDisplayName.AssignLiteral("OpenVR HMD"); + mDisplayInfo.mIsConnected = true; + mDisplayInfo.mCapabilityFlags = VRDisplayCapabilityFlags::Cap_None | + VRDisplayCapabilityFlags::Cap_Orientation | + VRDisplayCapabilityFlags::Cap_Position | + VRDisplayCapabilityFlags::Cap_External | + VRDisplayCapabilityFlags::Cap_Present | + VRDisplayCapabilityFlags::Cap_StageParameters; + + mVRCompositor->SetTrackingSpace(::vr::TrackingUniverseSeated); + + uint32_t w, h; + mVRSystem->GetRecommendedRenderTargetSize(&w, &h); + mDisplayInfo.mEyeResolution.width = w; + mDisplayInfo.mEyeResolution.height = h; + + // SteamVR gives the application a single FOV to use; it's not configurable as with Oculus + for (uint32_t eye = 0; eye < 2; ++eye) { + // get l/r/t/b clip plane coordinates + float l, r, t, b; + mVRSystem->GetProjectionRaw(static_cast<::vr::Hmd_Eye>(eye), &l, &r, &t, &b); + mDisplayInfo.mEyeFOV[eye].SetFromTanRadians(-t, r, b, -l); + + ::vr::HmdMatrix34_t eyeToHead = mVRSystem->GetEyeToHeadTransform(static_cast<::vr::Hmd_Eye>(eye)); + + mDisplayInfo.mEyeTranslation[eye].x = eyeToHead.m[0][3]; + mDisplayInfo.mEyeTranslation[eye].y = eyeToHead.m[1][3]; + mDisplayInfo.mEyeTranslation[eye].z = eyeToHead.m[2][3]; + } + + UpdateStageParameters(); +} + +VRDisplayOpenVR::~VRDisplayOpenVR() +{ + Destroy(); + MOZ_COUNT_DTOR_INHERITED(VRDisplayOpenVR, VRDisplayHost); +} + +void +VRDisplayOpenVR::Destroy() +{ + StopPresentation(); + vr_ShutdownInternal(); +} + +void +VRDisplayOpenVR::UpdateStageParameters() +{ + float sizeX = 0.0f; + float sizeZ = 0.0f; + if (mVRChaperone->GetPlayAreaSize(&sizeX, &sizeZ)) { + ::vr::HmdMatrix34_t t = mVRSystem->GetSeatedZeroPoseToStandingAbsoluteTrackingPose(); + mDisplayInfo.mStageSize.width = sizeX; + mDisplayInfo.mStageSize.height = sizeZ; + + mDisplayInfo.mSittingToStandingTransform._11 = t.m[0][0]; + mDisplayInfo.mSittingToStandingTransform._12 = t.m[1][0]; + mDisplayInfo.mSittingToStandingTransform._13 = t.m[2][0]; + mDisplayInfo.mSittingToStandingTransform._14 = 0.0f; + + mDisplayInfo.mSittingToStandingTransform._21 = t.m[0][1]; + mDisplayInfo.mSittingToStandingTransform._22 = t.m[1][1]; + mDisplayInfo.mSittingToStandingTransform._23 = t.m[2][1]; + mDisplayInfo.mSittingToStandingTransform._24 = 0.0f; + + mDisplayInfo.mSittingToStandingTransform._31 = t.m[0][2]; + mDisplayInfo.mSittingToStandingTransform._32 = t.m[1][2]; + mDisplayInfo.mSittingToStandingTransform._33 = t.m[2][2]; + mDisplayInfo.mSittingToStandingTransform._34 = 0.0f; + + mDisplayInfo.mSittingToStandingTransform._41 = t.m[0][3]; + mDisplayInfo.mSittingToStandingTransform._42 = t.m[1][3]; + mDisplayInfo.mSittingToStandingTransform._43 = t.m[2][3]; + mDisplayInfo.mSittingToStandingTransform._44 = 1.0f; + } else { + // If we fail, fall back to reasonable defaults. + // 1m x 1m space, 0.75m high in seated position + + mDisplayInfo.mStageSize.width = 1.0f; + mDisplayInfo.mStageSize.height = 1.0f; + + mDisplayInfo.mSittingToStandingTransform._11 = 1.0f; + mDisplayInfo.mSittingToStandingTransform._12 = 0.0f; + mDisplayInfo.mSittingToStandingTransform._13 = 0.0f; + mDisplayInfo.mSittingToStandingTransform._14 = 0.0f; + + mDisplayInfo.mSittingToStandingTransform._21 = 0.0f; + mDisplayInfo.mSittingToStandingTransform._22 = 1.0f; + mDisplayInfo.mSittingToStandingTransform._23 = 0.0f; + mDisplayInfo.mSittingToStandingTransform._24 = 0.0f; + + mDisplayInfo.mSittingToStandingTransform._31 = 0.0f; + mDisplayInfo.mSittingToStandingTransform._32 = 0.0f; + mDisplayInfo.mSittingToStandingTransform._33 = 1.0f; + mDisplayInfo.mSittingToStandingTransform._34 = 0.0f; + + mDisplayInfo.mSittingToStandingTransform._41 = 0.0f; + mDisplayInfo.mSittingToStandingTransform._42 = 0.75f; + mDisplayInfo.mSittingToStandingTransform._43 = 0.0f; + mDisplayInfo.mSittingToStandingTransform._44 = 1.0f; + } +} + +void +VRDisplayOpenVR::ZeroSensor() +{ + mVRSystem->ResetSeatedZeroPose(); + UpdateStageParameters(); +} + +VRHMDSensorState +VRDisplayOpenVR::GetSensorState() +{ + return GetSensorState(0.0f); +} + +VRHMDSensorState +VRDisplayOpenVR::GetImmediateSensorState() +{ + return GetSensorState(0.0f); +} + +VRHMDSensorState +VRDisplayOpenVR::GetSensorState(double timeOffset) +{ + { + ::vr::VREvent_t event; + while (mVRSystem->PollNextEvent(&event, sizeof(event))) { + // ignore + } + } + + ::vr::TrackedDevicePose_t poses[::vr::k_unMaxTrackedDeviceCount]; + // Note: We *must* call WaitGetPoses in order for any rendering to happen at all + mVRCompositor->WaitGetPoses(poses, ::vr::k_unMaxTrackedDeviceCount, nullptr, 0); + + VRHMDSensorState result; + result.Clear(); + result.timestamp = PR_Now(); + + if (poses[::vr::k_unTrackedDeviceIndex_Hmd].bDeviceIsConnected && + poses[::vr::k_unTrackedDeviceIndex_Hmd].bPoseIsValid && + poses[::vr::k_unTrackedDeviceIndex_Hmd].eTrackingResult == ::vr::TrackingResult_Running_OK) + { + const ::vr::TrackedDevicePose_t& pose = poses[::vr::k_unTrackedDeviceIndex_Hmd]; + + gfx::Matrix4x4 m; + // NOTE! mDeviceToAbsoluteTracking is a 3x4 matrix, not 4x4. But + // because of its arrangement, we can copy the 12 elements in and + // then transpose them to the right place. We do this so we can + // pull out a Quaternion. + memcpy(&m._11, &pose.mDeviceToAbsoluteTracking, sizeof(float) * 12); + m.Transpose(); + + gfx::Quaternion rot; + rot.SetFromRotationMatrix(m); + rot.Invert(); + + result.flags |= VRDisplayCapabilityFlags::Cap_Orientation; + result.orientation[0] = rot.x; + result.orientation[1] = rot.y; + result.orientation[2] = rot.z; + result.orientation[3] = rot.w; + result.angularVelocity[0] = pose.vAngularVelocity.v[0]; + result.angularVelocity[1] = pose.vAngularVelocity.v[1]; + result.angularVelocity[2] = pose.vAngularVelocity.v[2]; + + result.flags |= VRDisplayCapabilityFlags::Cap_Position; + result.position[0] = m._41; + result.position[1] = m._42; + result.position[2] = m._43; + result.linearVelocity[0] = pose.vVelocity.v[0]; + result.linearVelocity[1] = pose.vVelocity.v[1]; + result.linearVelocity[2] = pose.vVelocity.v[2]; + } + + return result; +} + +void +VRDisplayOpenVR::StartPresentation() +{ + if (mIsPresenting) { + return; + } + mIsPresenting = true; +} + +void +VRDisplayOpenVR::StopPresentation() +{ + if (!mIsPresenting) { + return; + } + + mVRCompositor->ClearLastSubmittedFrame(); + + mIsPresenting = false; +} + + +#if defined(XP_WIN) + +void +VRDisplayOpenVR::SubmitFrame(TextureSourceD3D11* aSource, + const IntSize& aSize, + const VRHMDSensorState& aSensorState, + const gfx::Rect& aLeftEyeRect, + const gfx::Rect& aRightEyeRect) +{ + if (!mIsPresenting) { + return; + } + + ::vr::Texture_t tex; + tex.handle = (void *)aSource->GetD3D11Texture(); + tex.eType = ::vr::EGraphicsAPIConvention::API_DirectX; + tex.eColorSpace = ::vr::EColorSpace::ColorSpace_Auto; + + ::vr::VRTextureBounds_t bounds; + bounds.uMin = aLeftEyeRect.x; + bounds.vMin = 1.0 - aLeftEyeRect.y; + bounds.uMax = aLeftEyeRect.x + aLeftEyeRect.width; + bounds.vMax = 1.0 - aLeftEyeRect.y - aLeftEyeRect.height; + + ::vr::EVRCompositorError err; + err = mVRCompositor->Submit(::vr::EVREye::Eye_Left, &tex, &bounds); + if (err != ::vr::EVRCompositorError::VRCompositorError_None) { + printf_stderr("OpenVR Compositor Submit() failed.\n"); + } + + bounds.uMin = aRightEyeRect.x; + bounds.vMin = 1.0 - aRightEyeRect.y; + bounds.uMax = aRightEyeRect.x + aRightEyeRect.width; + bounds.vMax = 1.0 - aRightEyeRect.y - aRightEyeRect.height; + + err = mVRCompositor->Submit(::vr::EVREye::Eye_Right, &tex, &bounds); + if (err != ::vr::EVRCompositorError::VRCompositorError_None) { + printf_stderr("OpenVR Compositor Submit() failed.\n"); + } + + mVRCompositor->PostPresentHandoff(); + + // Trigger the next VSync immediately + VRManager *vm = VRManager::Get(); + MOZ_ASSERT(vm); + vm->NotifyVRVsync(mDisplayInfo.mDisplayID); +} + +#endif + +void +VRDisplayOpenVR::NotifyVSync() +{ + // We update mIsConneced once per frame. + mDisplayInfo.mIsConnected = vr_IsHmdPresent(); +} + +VRDisplayManagerOpenVR::VRDisplayManagerOpenVR() + : mOpenVRInstalled(false) +{ +} + +/*static*/ already_AddRefed<VRDisplayManagerOpenVR> +VRDisplayManagerOpenVR::Create() +{ + MOZ_ASSERT(NS_IsMainThread()); + + if (!gfxPrefs::VREnabled() || !gfxPrefs::VROpenVREnabled()) { + return nullptr; + } + + if (!LoadOpenVRRuntime()) { + return nullptr; + } + + RefPtr<VRDisplayManagerOpenVR> manager = new VRDisplayManagerOpenVR(); + return manager.forget(); +} + +bool +VRDisplayManagerOpenVR::Init() +{ + if (mOpenVRInstalled) + return true; + + if (!vr_IsRuntimeInstalled()) + return false; + + mOpenVRInstalled = true; + return true; +} + +void +VRDisplayManagerOpenVR::Destroy() +{ + if (mOpenVRInstalled) { + if (mOpenVRHMD) { + mOpenVRHMD = nullptr; + } + mOpenVRInstalled = false; + } +} + +void +VRDisplayManagerOpenVR::GetHMDs(nsTArray<RefPtr<VRDisplayHost>>& aHMDResult) +{ + if (!mOpenVRInstalled) { + return; + } + + if (!vr_IsHmdPresent()) { + if (mOpenVRHMD) { + mOpenVRHMD = nullptr; + } + } else if (mOpenVRHMD == nullptr) { + ::vr::HmdError err; + + vr_InitInternal(&err, ::vr::EVRApplicationType::VRApplication_Scene); + if (err) { + return; + } + + ::vr::IVRSystem *system = (::vr::IVRSystem *)vr_GetGenericInterface(::vr::IVRSystem_Version, &err); + if (err || !system) { + vr_ShutdownInternal(); + return; + } + ::vr::IVRChaperone *chaperone = (::vr::IVRChaperone *)vr_GetGenericInterface(::vr::IVRChaperone_Version, &err); + if (err || !chaperone) { + vr_ShutdownInternal(); + return; + } + ::vr::IVRCompositor *compositor = (::vr::IVRCompositor*)vr_GetGenericInterface(::vr::IVRCompositor_Version, &err); + if (err || !compositor) { + vr_ShutdownInternal(); + return; + } + + mOpenVRHMD = new VRDisplayOpenVR(system, chaperone, compositor); + } + + if (mOpenVRHMD) { + aHMDResult.AppendElement(mOpenVRHMD); + } +} + +VRControllerOpenVR::VRControllerOpenVR() + : VRControllerHost(VRDeviceType::OpenVR) +{ + MOZ_COUNT_CTOR_INHERITED(VRControllerOpenVR, VRControllerHost); + mControllerInfo.mControllerName.AssignLiteral("OpenVR HMD"); +#ifdef MOZ_GAMEPAD + mControllerInfo.mMappingType = static_cast<uint32_t>(GamepadMappingType::_empty); +#else + mControllerInfo.mMappingType = 0; +#endif + mControllerInfo.mNumButtons = gNumOpenVRButtonMask; + mControllerInfo.mNumAxes = gNumOpenVRAxis; +} + +VRControllerOpenVR::~VRControllerOpenVR() +{ + MOZ_COUNT_DTOR_INHERITED(VRControllerOpenVR, VRControllerHost); +} + +void +VRControllerOpenVR::SetTrackedIndex(uint32_t aTrackedIndex) +{ + mTrackedIndex = aTrackedIndex; +} + +uint32_t +VRControllerOpenVR::GetTrackedIndex() +{ + return mTrackedIndex; +} + +VRControllerManagerOpenVR::VRControllerManagerOpenVR() + : mOpenVRInstalled(false), mVRSystem(nullptr) +{ +} + +VRControllerManagerOpenVR::~VRControllerManagerOpenVR() +{ + Destroy(); +} + +/*static*/ already_AddRefed<VRControllerManagerOpenVR> +VRControllerManagerOpenVR::Create() +{ + if (!gfxPrefs::VREnabled() || !gfxPrefs::VROpenVREnabled()) { + return nullptr; + } + + RefPtr<VRControllerManagerOpenVR> manager = new VRControllerManagerOpenVR(); + return manager.forget(); +} + +bool +VRControllerManagerOpenVR::Init() +{ + if (mOpenVRInstalled) + return true; + + if (!vr_IsRuntimeInstalled()) + return false; + + // Loading the OpenVR Runtime + vr::EVRInitError err = vr::VRInitError_None; + + vr_InitInternal(&err, vr::VRApplication_Scene); + if (err != vr::VRInitError_None) { + return false; + } + + mVRSystem = (vr::IVRSystem *)vr_GetGenericInterface(vr::IVRSystem_Version, &err); + if ((err != vr::VRInitError_None) || !mVRSystem) { + vr_ShutdownInternal(); + return false; + } + + mOpenVRInstalled = true; + return true; +} + +void +VRControllerManagerOpenVR::Destroy() +{ + mOpenVRController.Clear(); + mOpenVRInstalled = false; +} + +void +VRControllerManagerOpenVR::HandleInput() +{ + RefPtr<impl::VRControllerOpenVR> controller; + vr::VRControllerState_t state; + uint32_t axis = 0; + + if (!mOpenVRInstalled) { + return; + } + + MOZ_ASSERT(mVRSystem); + + vr::TrackedDevicePose_t poses[vr::k_unMaxTrackedDeviceCount]; + mVRSystem->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseSeated, 0.0f, + poses, vr::k_unMaxTrackedDeviceCount); + // Process OpenVR controller state + for (uint32_t i = 0; i < mOpenVRController.Length(); ++i) { + controller = mOpenVRController[i]; + + MOZ_ASSERT(mVRSystem->GetTrackedDeviceClass(controller->GetTrackedIndex()) + == vr::TrackedDeviceClass_Controller); + + if (mVRSystem->GetControllerState(controller->GetTrackedIndex(), &state)) { + HandleButtonPress(controller->GetIndex(), state.ulButtonPressed); + + axis = static_cast<uint32_t>(VRControllerAxisType::TrackpadXAxis); + HandleAxisMove(controller->GetIndex(), axis, + state.rAxis[gOpenVRAxes[axis]].x); + + axis = static_cast<uint32_t>(VRControllerAxisType::TrackpadYAxis); + HandleAxisMove(controller->GetIndex(), axis, + state.rAxis[gOpenVRAxes[axis]].y); + + axis = static_cast<uint32_t>(VRControllerAxisType::Trigger); + HandleAxisMove(controller->GetIndex(), axis, + state.rAxis[gOpenVRAxes[axis]].x); + } + + // Start to process pose + const ::vr::TrackedDevicePose_t& pose = poses[controller->GetTrackedIndex()]; + + if (pose.bDeviceIsConnected && pose.bPoseIsValid && + pose.eTrackingResult == vr::TrackingResult_Running_OK) { + gfx::Matrix4x4 m; + + // NOTE! mDeviceToAbsoluteTracking is a 3x4 matrix, not 4x4. But + // because of its arrangement, we can copy the 12 elements in and + // then transpose them to the right place. We do this so we can + // pull out a Quaternion. + memcpy(&m.components, &pose.mDeviceToAbsoluteTracking, sizeof(float) * 12); + m.Transpose(); + + gfx::Quaternion rot; + rot.SetFromRotationMatrix(m); + rot.Invert(); + + GamepadPoseState poseState; + poseState.flags |= GamepadCapabilityFlags::Cap_Orientation; + poseState.orientation[0] = rot.x; + poseState.orientation[1] = rot.y; + poseState.orientation[2] = rot.z; + poseState.orientation[3] = rot.w; + poseState.angularVelocity[0] = pose.vAngularVelocity.v[0]; + poseState.angularVelocity[1] = pose.vAngularVelocity.v[1]; + poseState.angularVelocity[2] = pose.vAngularVelocity.v[2]; + + poseState.flags |= GamepadCapabilityFlags::Cap_Position; + poseState.position[0] = m._41; + poseState.position[1] = m._42; + poseState.position[2] = m._43; + poseState.linearVelocity[0] = pose.vVelocity.v[0]; + poseState.linearVelocity[1] = pose.vVelocity.v[1]; + poseState.linearVelocity[2] = pose.vVelocity.v[2]; + HandlePoseTracking(controller->GetIndex(), poseState, controller); + } + } +} + +void +VRControllerManagerOpenVR::HandleButtonPress(uint32_t aControllerIdx, + uint64_t aButtonPressed) +{ + uint64_t buttonMask = 0; + RefPtr<impl::VRControllerOpenVR> controller; + controller = mOpenVRController[aControllerIdx]; + uint64_t diff = (controller->GetButtonPressed() ^ aButtonPressed); + + if (!diff) { + return; + } + + for (uint32_t i = 0; i < gNumOpenVRButtonMask; ++i) { + buttonMask = gOpenVRButtonMask[i]; + + if (diff & buttonMask) { + // diff & aButtonPressed would be true while a new button press + // event, otherwise it is an old press event and needs to notify + // the button has been released. + NewButtonEvent(aControllerIdx, i, diff & aButtonPressed); + } + } + + controller->SetButtonPressed(aButtonPressed); +} + +void +VRControllerManagerOpenVR::HandleAxisMove(uint32_t aControllerIdx, uint32_t aAxis, + float aValue) +{ + if (aValue != 0.0f) { + NewAxisMove(aControllerIdx, aAxis, aValue); + } +} + +void +VRControllerManagerOpenVR::HandlePoseTracking(uint32_t aControllerIdx, + const GamepadPoseState& aPose, + VRControllerHost* aController) +{ + if (aPose != aController->GetPose()) { + aController->SetPose(aPose); + NewPoseState(aControllerIdx, aPose); + } +} + +void +VRControllerManagerOpenVR::GetControllers(nsTArray<RefPtr<VRControllerHost>>& aControllerResult) +{ + if (!mOpenVRInstalled) { + return; + } + + aControllerResult.Clear(); + for (uint32_t i = 0; i < mOpenVRController.Length(); ++i) { + aControllerResult.AppendElement(mOpenVRController[i]); + } +} + +void +VRControllerManagerOpenVR::ScanForDevices() +{ + // Remove the existing gamepads + for (uint32_t i = 0; i < mOpenVRController.Length(); ++i) { + RemoveGamepad(mOpenVRController[i]->GetIndex()); + } + mControllerCount = 0; + mOpenVRController.Clear(); + + if (!mVRSystem) + return; + + // Basically, we would have HMDs in the tracked devices, but we are just interested in the controllers. + for ( vr::TrackedDeviceIndex_t trackedDevice = vr::k_unTrackedDeviceIndex_Hmd + 1; + trackedDevice < vr::k_unMaxTrackedDeviceCount; ++trackedDevice ) { + if (!mVRSystem->IsTrackedDeviceConnected(trackedDevice)) { + continue; + } + + if (mVRSystem->GetTrackedDeviceClass(trackedDevice) != vr::TrackedDeviceClass_Controller) { + continue; + } + + RefPtr<VRControllerOpenVR> openVRController = new VRControllerOpenVR(); + openVRController->SetIndex(mControllerCount); + openVRController->SetTrackedIndex(trackedDevice); + mOpenVRController.AppendElement(openVRController); + +// Only in MOZ_GAMEPAD platform, We add gamepads. +#ifdef MOZ_GAMEPAD + // Not already present, add it. + AddGamepad("OpenVR Gamepad", static_cast<uint32_t>(GamepadMappingType::_empty), + gNumOpenVRButtonMask, gNumOpenVRAxis); + ++mControllerCount; +#endif + } +}
\ No newline at end of file diff --git a/gfx/vr/gfxVROpenVR.h b/gfx/vr/gfxVROpenVR.h new file mode 100644 index 000000000..829f88253 --- /dev/null +++ b/gfx/vr/gfxVROpenVR.h @@ -0,0 +1,140 @@ +/* -*- Mode: C++; tab-width: 20; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef GFX_VR_OPENVR_H +#define GFX_VR_OPENVR_H + +#include "nsTArray.h" +#include "nsIScreen.h" +#include "nsCOMPtr.h" +#include "mozilla/RefPtr.h" + +#include "mozilla/gfx/2D.h" +#include "mozilla/EnumeratedArray.h" + +#include "gfxVR.h" + +// OpenVR Interfaces +namespace vr { +class IVRChaperone; +class IVRCompositor; +class IVRSystem; +struct TrackedDevicePose_t; +} + +namespace mozilla { +namespace gfx { +namespace impl { + +class VRDisplayOpenVR : public VRDisplayHost +{ +public: + virtual void NotifyVSync() override; + virtual VRHMDSensorState GetSensorState() override; + virtual VRHMDSensorState GetImmediateSensorState() override; + void ZeroSensor() override; + +protected: + virtual void StartPresentation() override; + virtual void StopPresentation() override; +#if defined(XP_WIN) + virtual void SubmitFrame(mozilla::layers::TextureSourceD3D11* aSource, + const IntSize& aSize, + const VRHMDSensorState& aSensorState, + const gfx::Rect& aLeftEyeRect, + const gfx::Rect& aRightEyeRect) override; +#endif + +public: + explicit VRDisplayOpenVR(::vr::IVRSystem *aVRSystem, + ::vr::IVRChaperone *aVRChaperone, + ::vr::IVRCompositor *aVRCompositor); + +protected: + virtual ~VRDisplayOpenVR(); + void Destroy(); + + VRHMDSensorState GetSensorState(double timeOffset); + + // not owned by us; global from OpenVR + ::vr::IVRSystem *mVRSystem; + ::vr::IVRChaperone *mVRChaperone; + ::vr::IVRCompositor *mVRCompositor; + + bool mIsPresenting; + + void UpdateStageParameters(); +}; + +} // namespace impl + +class VRDisplayManagerOpenVR : public VRDisplayManager +{ +public: + static already_AddRefed<VRDisplayManagerOpenVR> Create(); + + virtual bool Init() override; + virtual void Destroy() override; + virtual void GetHMDs(nsTArray<RefPtr<VRDisplayHost> >& aHMDResult) override; +protected: + VRDisplayManagerOpenVR(); + + // there can only be one + RefPtr<impl::VRDisplayOpenVR> mOpenVRHMD; + bool mOpenVRInstalled; +}; + +namespace impl { + +class VRControllerOpenVR : public VRControllerHost +{ +public: + explicit VRControllerOpenVR(); + void SetTrackedIndex(uint32_t aTrackedIndex); + uint32_t GetTrackedIndex(); + +protected: + virtual ~VRControllerOpenVR(); + + // The index of tracked devices from vr::IVRSystem. + uint32_t mTrackedIndex; +}; + +} // namespace impl + +class VRControllerManagerOpenVR : public VRControllerManager +{ +public: + static already_AddRefed<VRControllerManagerOpenVR> Create(); + + virtual bool Init() override; + virtual void Destroy() override; + virtual void HandleInput() override; + virtual void GetControllers(nsTArray<RefPtr<VRControllerHost>>& + aControllerResult) override; + virtual void ScanForDevices() override; + +private: + VRControllerManagerOpenVR(); + ~VRControllerManagerOpenVR(); + + virtual void HandleButtonPress(uint32_t aControllerIdx, + uint64_t aButtonPressed) override; + virtual void HandleAxisMove(uint32_t aControllerIdx, uint32_t aAxis, + float aValue) override; + virtual void HandlePoseTracking(uint32_t aControllerIdx, + const dom::GamepadPoseState& aPose, + VRControllerHost* aController) override; + + bool mOpenVRInstalled; + nsTArray<RefPtr<impl::VRControllerOpenVR>> mOpenVRController; + vr::IVRSystem *mVRSystem; +}; + +} // namespace gfx +} // namespace mozilla + + +#endif /* GFX_VR_OPENVR_H */ diff --git a/gfx/vr/ipc/PVRLayer.ipdl b/gfx/vr/ipc/PVRLayer.ipdl new file mode 100644 index 000000000..593fccdd4 --- /dev/null +++ b/gfx/vr/ipc/PVRLayer.ipdl @@ -0,0 +1,27 @@ +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * vim: sw=2 ts=8 et : + */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +include protocol PVRManager; +include protocol PTexture; + +namespace mozilla { +namespace gfx { + +async protocol PVRLayer +{ + manager PVRManager; + +parent: + async SubmitFrame(PTexture aTexture); + async Destroy(); + +child: + async __delete__(); +}; + +} // gfx +} // mozilla diff --git a/gfx/vr/ipc/PVRManager.ipdl b/gfx/vr/ipc/PVRManager.ipdl new file mode 100644 index 000000000..65f114fba --- /dev/null +++ b/gfx/vr/ipc/PVRManager.ipdl @@ -0,0 +1,86 @@ +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * vim: sw=2 ts=8 et : + */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +include LayersSurfaces; +include protocol PLayer; +include protocol PTexture; +include protocol PVRLayer; +include LayersMessages; +include GamepadEventTypes; + +include "VRMessageUtils.h"; + +using struct mozilla::gfx::VRFieldOfView from "gfxVR.h"; +using struct mozilla::gfx::VRDisplayInfo from "gfxVR.h"; +using struct mozilla::gfx::VRSensorUpdate from "gfxVR.h"; +using struct mozilla::gfx::VRHMDSensorState from "gfxVR.h"; +using struct mozilla::gfx::VRControllerInfo from "gfxVR.h"; +using mozilla::layers::LayersBackend from "mozilla/layers/LayersTypes.h"; +using mozilla::layers::TextureFlags from "mozilla/layers/CompositorTypes.h"; + + +namespace mozilla { +namespace gfx { + +/** + * The PVRManager protocol is used to enable communication of VR display + * enumeration and sensor state between the compositor thread and + * content threads/processes. + */ +sync protocol PVRManager +{ + manages PTexture; + manages PVRLayer; + +parent: + async PTexture(SurfaceDescriptor aSharedData, LayersBackend aBackend, + TextureFlags aTextureFlags, uint64_t aSerial); + + async PVRLayer(uint32_t aDisplayID, float aLeftEyeX, float aLeftEyeY, float aLeftEyeWidth, float aLeftEyeHeight, float aRightEyeX, float aRightEyeY, float aRightEyeWidth, float aRightEyeHeight); + + // (Re)Enumerate VR Displays. An updated list of VR displays will be returned + // asynchronously to children via UpdateDisplayInfo. + async RefreshDisplays(); + + // GetDisplays synchronously returns the VR displays that have already been + // enumerated by RefreshDisplays() but does not enumerate new ones. + sync GetDisplays() returns(VRDisplayInfo[] aDisplayInfo); + + // Reset the sensor of the display identified by aDisplayID so that the current + // sensor state is the "Zero" position. + async ResetSensor(uint32_t aDisplayID); + + sync GetSensorState(uint32_t aDisplayID) returns(VRHMDSensorState aState); + sync GetImmediateSensorState(uint32_t aDisplayID) returns(VRHMDSensorState aState); + sync SetHaveEventListener(bool aHaveEventListener); + + async ControllerListenerAdded(); + async ControllerListenerRemoved(); + // GetControllers synchronously returns the VR controllers that have already been + // enumerated by RefreshVRControllers() but does not enumerate new ones. + sync GetControllers() returns(VRControllerInfo[] aControllerInfo); + +child: + + async ParentAsyncMessages(AsyncParentMessageData[] aMessages); + + // Notify children of updated VR display enumeration and details. This will + // be sent to all children when the parent receives RefreshDisplays, even + // if no changes have been detected. This ensures that Promises exposed + // through DOM calls are always resolved. + async UpdateDisplayInfo(VRDisplayInfo[] aDisplayUpdates); + + async NotifyVSync(); + async NotifyVRVSync(uint32_t aDisplayID); + async GamepadUpdate(GamepadChangeEvent aGamepadEvent); + + async __delete__(); + +}; + +} // gfx +} // mozilla diff --git a/gfx/vr/ipc/VRLayerChild.cpp b/gfx/vr/ipc/VRLayerChild.cpp new file mode 100644 index 000000000..cffe9c1f1 --- /dev/null +++ b/gfx/vr/ipc/VRLayerChild.cpp @@ -0,0 +1,86 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "VRLayerChild.h" +#include "GLScreenBuffer.h" +#include "mozilla/layers/TextureClientSharedSurface.h" +#include "SharedSurface.h" // for SharedSurface +#include "SharedSurfaceGL.h" // for SharedSurface +#include "mozilla/layers/LayersMessages.h" // for TimedTexture +#include "nsICanvasRenderingContextInternal.h" +#include "mozilla/dom/HTMLCanvasElement.h" + +namespace mozilla { +namespace gfx { + +VRLayerChild::VRLayerChild(uint32_t aVRDisplayID, VRManagerChild* aVRManagerChild) + : mVRDisplayID(aVRDisplayID) + , mCanvasElement(nullptr) + , mShSurfClient(nullptr) + , mFront(nullptr) +{ + MOZ_COUNT_CTOR(VRLayerChild); +} + +VRLayerChild::~VRLayerChild() +{ + if (mCanvasElement) { + mCanvasElement->StopVRPresentation(); + } + + ClearSurfaces(); + + MOZ_COUNT_DTOR(VRLayerChild); +} + +void +VRLayerChild::Initialize(dom::HTMLCanvasElement* aCanvasElement) +{ + MOZ_ASSERT(aCanvasElement); + mCanvasElement = aCanvasElement; + mCanvasElement->StartVRPresentation(); + + VRManagerChild *vrmc = VRManagerChild::Get(); + vrmc->RunFrameRequestCallbacks(); +} + +void +VRLayerChild::SubmitFrame() +{ + if (!mCanvasElement) { + return; + } + + mShSurfClient = mCanvasElement->GetVRFrame(); + if (!mShSurfClient) { + return; + } + + gl::SharedSurface* surf = mShSurfClient->Surf(); + if (surf->mType == gl::SharedSurfaceType::Basic) { + gfxCriticalError() << "SharedSurfaceType::Basic not supported for WebVR"; + return; + } + + mFront = mShSurfClient; + mShSurfClient = nullptr; + + mFront->SetAddedToCompositableClient(); + VRManagerChild* vrmc = VRManagerChild::Get(); + mFront->SyncWithObject(vrmc->GetSyncObject()); + MOZ_ALWAYS_TRUE(mFront->InitIPDLActor(vrmc)); + + SendSubmitFrame(mFront->GetIPDLActor()); +} + +void +VRLayerChild::ClearSurfaces() +{ + mFront = nullptr; + mShSurfClient = nullptr; +} + +} // namespace gfx +} // namespace mozilla diff --git a/gfx/vr/ipc/VRLayerChild.h b/gfx/vr/ipc/VRLayerChild.h new file mode 100644 index 000000000..df42dddac --- /dev/null +++ b/gfx/vr/ipc/VRLayerChild.h @@ -0,0 +1,53 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef GFX_VR_LAYERCHILD_H +#define GFX_VR_LAYERCHILD_H + +#include "VRManagerChild.h" + +#include "mozilla/RefPtr.h" +#include "mozilla/gfx/PVRLayerChild.h" +#include "GLContext.h" +#include "gfxVR.h" + +class nsICanvasRenderingContextInternal; + +namespace mozilla { +class WebGLContext; +namespace dom { +class HTMLCanvasElement; +} +namespace layers { +class SharedSurfaceTextureClient; +} +namespace gl { +class SurfaceFactory; +} +namespace gfx { + +class VRLayerChild : public PVRLayerChild { + NS_INLINE_DECL_REFCOUNTING(VRLayerChild) + +public: + VRLayerChild(uint32_t aVRDisplayID, VRManagerChild* aVRManagerChild); + void Initialize(dom::HTMLCanvasElement* aCanvasElement); + void SubmitFrame(); + +protected: + virtual ~VRLayerChild(); + void ClearSurfaces(); + + uint32_t mVRDisplayID; + + RefPtr<dom::HTMLCanvasElement> mCanvasElement; + RefPtr<layers::SharedSurfaceTextureClient> mShSurfClient; + RefPtr<layers::TextureClient> mFront; +}; + +} // namespace gfx +} // namespace mozilla + +#endif diff --git a/gfx/vr/ipc/VRLayerParent.cpp b/gfx/vr/ipc/VRLayerParent.cpp new file mode 100644 index 000000000..6c6980817 --- /dev/null +++ b/gfx/vr/ipc/VRLayerParent.cpp @@ -0,0 +1,59 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + + +#include "VRLayerParent.h" +#include "mozilla/Unused.h" + +namespace mozilla { +namespace gfx { + +VRLayerParent::VRLayerParent(uint32_t aVRDisplayID, const Rect& aLeftEyeRect, const Rect& aRightEyeRect) + : mIPCOpen(true) + , mVRDisplayID(aVRDisplayID) + , mLeftEyeRect(aLeftEyeRect) + , mRightEyeRect(aRightEyeRect) +{ + MOZ_COUNT_CTOR(VRLayerParent); +} + +VRLayerParent::~VRLayerParent() +{ + MOZ_COUNT_DTOR(VRLayerParent); +} + +bool +VRLayerParent::RecvDestroy() +{ + Destroy(); + return true; +} + +void +VRLayerParent::ActorDestroy(ActorDestroyReason aWhy) +{ + mIPCOpen = false; +} + +void +VRLayerParent::Destroy() +{ + if (mIPCOpen) { + Unused << PVRLayerParent::Send__delete__(this); + } +} + +bool +VRLayerParent::RecvSubmitFrame(PTextureParent* texture) +{ + VRManager* vm = VRManager::Get(); + vm->SubmitFrame(this, texture, mLeftEyeRect, mRightEyeRect); + + return true; +} + + +} // namespace gfx +} // namespace mozilla diff --git a/gfx/vr/ipc/VRLayerParent.h b/gfx/vr/ipc/VRLayerParent.h new file mode 100644 index 000000000..bd69c9546 --- /dev/null +++ b/gfx/vr/ipc/VRLayerParent.h @@ -0,0 +1,43 @@ +/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef GFX_VR_LAYERPARENT_H +#define GFX_VR_LAYERPARENT_H + +#include "VRManager.h" + +#include "mozilla/RefPtr.h" +#include "mozilla/gfx/PVRLayerParent.h" +#include "gfxVR.h" + +namespace mozilla { +namespace gfx { + +class VRLayerParent : public PVRLayerParent { + NS_INLINE_DECL_REFCOUNTING(VRLayerParent) + +public: + VRLayerParent(uint32_t aVRDisplayID, const Rect& aLeftEyeRect, const Rect& aRightEyeRect); + virtual bool RecvSubmitFrame(PTextureParent* texture) override; + virtual bool RecvDestroy() override; + uint32_t GetDisplayID() const { return mVRDisplayID; } +protected: + virtual void ActorDestroy(ActorDestroyReason aWhy) override; + + virtual ~VRLayerParent(); + void Destroy(); + + bool mIPCOpen; + + uint32_t mVRDisplayID; + gfx::IntSize mSize; + gfx::Rect mLeftEyeRect; + gfx::Rect mRightEyeRect; +}; + +} // namespace gfx +} // namespace mozilla + +#endif diff --git a/gfx/vr/ipc/VRManagerChild.cpp b/gfx/vr/ipc/VRManagerChild.cpp new file mode 100644 index 000000000..70ced86c3 --- /dev/null +++ b/gfx/vr/ipc/VRManagerChild.cpp @@ -0,0 +1,593 @@ +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * vim: sw=2 ts=8 et : + */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "VRManagerChild.h" +#include "VRManagerParent.h" +#include "VRDisplayClient.h" +#include "nsGlobalWindow.h" +#include "mozilla/StaticPtr.h" +#include "mozilla/layers/CompositorThread.h" // for CompositorThread +#include "mozilla/dom/Navigator.h" +#include "mozilla/dom/VREventObserver.h" +#include "mozilla/dom/WindowBinding.h" // for FrameRequestCallback +#include "mozilla/dom/ContentChild.h" +#include "mozilla/layers/TextureClient.h" +#include "nsContentUtils.h" + +#ifdef MOZ_GAMEPAD +#include "mozilla/dom/GamepadManager.h" +#endif + +using layers::TextureClient; + +namespace { +const nsTArray<RefPtr<dom::VREventObserver>>::index_type kNoIndex = + nsTArray<RefPtr<dom::VREventObserver> >::NoIndex; +} // namespace + +namespace mozilla { +namespace gfx { + +static StaticRefPtr<VRManagerChild> sVRManagerChildSingleton; +static StaticRefPtr<VRManagerParent> sVRManagerParentSingleton; + +void ReleaseVRManagerParentSingleton() { + sVRManagerParentSingleton = nullptr; +} + +VRManagerChild::VRManagerChild() + : TextureForwarder() + , mDisplaysInitialized(false) + , mInputFrameID(-1) + , mMessageLoop(MessageLoop::current()) + , mFrameRequestCallbackCounter(0) + , mBackend(layers::LayersBackend::LAYERS_NONE) +{ + MOZ_COUNT_CTOR(VRManagerChild); + MOZ_ASSERT(NS_IsMainThread()); + + mStartTimeStamp = TimeStamp::Now(); +} + +VRManagerChild::~VRManagerChild() +{ + MOZ_ASSERT(NS_IsMainThread()); + MOZ_COUNT_DTOR(VRManagerChild); +} + +/*static*/ void +VRManagerChild::IdentifyTextureHost(const TextureFactoryIdentifier& aIdentifier) +{ + if (sVRManagerChildSingleton) { + sVRManagerChildSingleton->mBackend = aIdentifier.mParentBackend; + sVRManagerChildSingleton->mSyncObject = SyncObject::CreateSyncObject(aIdentifier.mSyncHandle); + } +} + +layers::LayersBackend +VRManagerChild::GetBackendType() const +{ + return mBackend; +} + +/*static*/ VRManagerChild* +VRManagerChild::Get() +{ + MOZ_ASSERT(sVRManagerChildSingleton); + return sVRManagerChildSingleton; +} + +/* static */ bool +VRManagerChild::IsCreated() +{ + return !!sVRManagerChildSingleton; +} + +/* static */ bool +VRManagerChild::InitForContent(Endpoint<PVRManagerChild>&& aEndpoint) +{ + MOZ_ASSERT(NS_IsMainThread()); + MOZ_ASSERT(!sVRManagerChildSingleton); + + RefPtr<VRManagerChild> child(new VRManagerChild()); + if (!aEndpoint.Bind(child)) { + NS_RUNTIMEABORT("Couldn't Open() Compositor channel."); + return false; + } + sVRManagerChildSingleton = child; + return true; +} + +/* static */ bool +VRManagerChild::ReinitForContent(Endpoint<PVRManagerChild>&& aEndpoint) +{ + MOZ_ASSERT(NS_IsMainThread()); + + ShutDown(); + + return InitForContent(Move(aEndpoint)); +} + +/*static*/ void +VRManagerChild::InitSameProcess() +{ + MOZ_ASSERT(NS_IsMainThread()); + MOZ_ASSERT(!sVRManagerChildSingleton); + + sVRManagerChildSingleton = new VRManagerChild(); + sVRManagerParentSingleton = VRManagerParent::CreateSameProcess(); + sVRManagerChildSingleton->Open(sVRManagerParentSingleton->GetIPCChannel(), + mozilla::layers::CompositorThreadHolder::Loop(), + mozilla::ipc::ChildSide); +} + +/* static */ void +VRManagerChild::InitWithGPUProcess(Endpoint<PVRManagerChild>&& aEndpoint) +{ + MOZ_ASSERT(NS_IsMainThread()); + MOZ_ASSERT(!sVRManagerChildSingleton); + + sVRManagerChildSingleton = new VRManagerChild(); + if (!aEndpoint.Bind(sVRManagerChildSingleton)) { + NS_RUNTIMEABORT("Couldn't Open() Compositor channel."); + return; + } +} + +/*static*/ void +VRManagerChild::ShutDown() +{ + MOZ_ASSERT(NS_IsMainThread()); + if (sVRManagerChildSingleton) { + sVRManagerChildSingleton->Destroy(); + sVRManagerChildSingleton = nullptr; + } +} + +/*static*/ void +VRManagerChild::DeferredDestroy(RefPtr<VRManagerChild> aVRManagerChild) +{ + aVRManagerChild->Close(); +} + +void +VRManagerChild::Destroy() +{ + mTexturesWaitingRecycled.Clear(); + + // Keep ourselves alive until everything has been shut down + RefPtr<VRManagerChild> selfRef = this; + + // The DeferredDestroyVRManager task takes ownership of + // the VRManagerChild and will release it when it runs. + MessageLoop::current()->PostTask( + NewRunnableFunction(DeferredDestroy, selfRef)); +} + +layers::PTextureChild* +VRManagerChild::AllocPTextureChild(const SurfaceDescriptor&, + const LayersBackend&, + const TextureFlags&, + const uint64_t&) +{ + return TextureClient::CreateIPDLActor(); +} + +bool +VRManagerChild::DeallocPTextureChild(PTextureChild* actor) +{ + return TextureClient::DestroyIPDLActor(actor); +} + +PVRLayerChild* +VRManagerChild::AllocPVRLayerChild(const uint32_t& aDisplayID, + const float& aLeftEyeX, + const float& aLeftEyeY, + const float& aLeftEyeWidth, + const float& aLeftEyeHeight, + const float& aRightEyeX, + const float& aRightEyeY, + const float& aRightEyeWidth, + const float& aRightEyeHeight) +{ + RefPtr<VRLayerChild> layer = new VRLayerChild(aDisplayID, this); + return layer.forget().take(); +} + +bool +VRManagerChild::DeallocPVRLayerChild(PVRLayerChild* actor) +{ + delete actor; + return true; +} + +void +VRManagerChild::UpdateDisplayInfo(nsTArray<VRDisplayInfo>& aDisplayUpdates) +{ + bool bDisplayConnected = false; + bool bDisplayDisconnected = false; + + // Check if any displays have been disconnected + for (auto& display : mDisplays) { + bool found = false; + for (auto& displayUpdate : aDisplayUpdates) { + if (display->GetDisplayInfo().GetDisplayID() == displayUpdate.GetDisplayID()) { + found = true; + break; + } + } + if (!found) { + display->NotifyDisconnected(); + bDisplayDisconnected = true; + } + } + + // mDisplays could be a hashed container for more scalability, but not worth + // it now as we expect < 10 entries. + nsTArray<RefPtr<VRDisplayClient>> displays; + for (VRDisplayInfo& displayUpdate : aDisplayUpdates) { + bool isNewDisplay = true; + for (auto& display : mDisplays) { + const VRDisplayInfo& prevInfo = display->GetDisplayInfo(); + if (prevInfo.GetDisplayID() == displayUpdate.GetDisplayID()) { + if (displayUpdate.GetIsConnected() && !prevInfo.GetIsConnected()) { + bDisplayConnected = true; + } + if (!displayUpdate.GetIsConnected() && prevInfo.GetIsConnected()) { + bDisplayDisconnected = true; + } + display->UpdateDisplayInfo(displayUpdate); + displays.AppendElement(display); + isNewDisplay = false; + break; + } + } + if (isNewDisplay) { + displays.AppendElement(new VRDisplayClient(displayUpdate)); + bDisplayConnected = true; + } + } + + mDisplays = displays; + + if (bDisplayConnected) { + FireDOMVRDisplayConnectEvent(); + } + if (bDisplayDisconnected) { + FireDOMVRDisplayDisconnectEvent(); + } + + mDisplaysInitialized = true; +} + +bool +VRManagerChild::RecvUpdateDisplayInfo(nsTArray<VRDisplayInfo>&& aDisplayUpdates) +{ + UpdateDisplayInfo(aDisplayUpdates); + for (auto& windowId : mNavigatorCallbacks) { + /** We must call NotifyVRDisplaysUpdated for every + * window's Navigator in mNavigatorCallbacks to ensure that + * the promise returned by Navigator.GetVRDevices + * can resolve. This must happen even if no changes + * to VRDisplays have been detected here. + */ + nsGlobalWindow* window = nsGlobalWindow::GetInnerWindowWithId(windowId); + if (!window) { + continue; + } + ErrorResult result; + dom::Navigator* nav = window->GetNavigator(result); + if (NS_WARN_IF(result.Failed())) { + continue; + } + nav->NotifyVRDisplaysUpdated(); + } + mNavigatorCallbacks.Clear(); + return true; +} + +bool +VRManagerChild::GetVRDisplays(nsTArray<RefPtr<VRDisplayClient>>& aDisplays) +{ + if (!mDisplaysInitialized) { + /** + * If we haven't received any asynchronous callback after requesting + * display enumeration with RefreshDisplays, get the existing displays + * that have already been enumerated by other VRManagerChild instances. + */ + nsTArray<VRDisplayInfo> displays; + Unused << SendGetDisplays(&displays); + UpdateDisplayInfo(displays); + } + aDisplays = mDisplays; + return true; +} + +bool +VRManagerChild::RefreshVRDisplaysWithCallback(uint64_t aWindowId) +{ + bool success = SendRefreshDisplays(); + if (success) { + mNavigatorCallbacks.AppendElement(aWindowId); + } + return success; +} + +int +VRManagerChild::GetInputFrameID() +{ + return mInputFrameID; +} + +bool +VRManagerChild::RecvParentAsyncMessages(InfallibleTArray<AsyncParentMessageData>&& aMessages) +{ + for (InfallibleTArray<AsyncParentMessageData>::index_type i = 0; i < aMessages.Length(); ++i) { + const AsyncParentMessageData& message = aMessages[i]; + + switch (message.type()) { + case AsyncParentMessageData::TOpNotifyNotUsed: { + const OpNotifyNotUsed& op = message.get_OpNotifyNotUsed(); + NotifyNotUsed(op.TextureId(), op.fwdTransactionId()); + break; + } + default: + NS_ERROR("unknown AsyncParentMessageData type"); + return false; + } + } + return true; +} + +PTextureChild* +VRManagerChild::CreateTexture(const SurfaceDescriptor& aSharedData, + LayersBackend aLayersBackend, + TextureFlags aFlags, + uint64_t aSerial) +{ + return SendPTextureConstructor(aSharedData, aLayersBackend, aFlags, aSerial); +} + +void +VRManagerChild::CancelWaitForRecycle(uint64_t aTextureId) +{ + RefPtr<TextureClient> client = mTexturesWaitingRecycled.Get(aTextureId); + if (!client) { + return; + } + mTexturesWaitingRecycled.Remove(aTextureId); +} + +void +VRManagerChild::NotifyNotUsed(uint64_t aTextureId, uint64_t aFwdTransactionId) +{ + RefPtr<TextureClient> client = mTexturesWaitingRecycled.Get(aTextureId); + if (!client) { + return; + } + mTexturesWaitingRecycled.Remove(aTextureId); +} + +bool +VRManagerChild::AllocShmem(size_t aSize, + ipc::SharedMemory::SharedMemoryType aType, + ipc::Shmem* aShmem) +{ + return PVRManagerChild::AllocShmem(aSize, aType, aShmem); +} + +bool +VRManagerChild::AllocUnsafeShmem(size_t aSize, + ipc::SharedMemory::SharedMemoryType aType, + ipc::Shmem* aShmem) +{ + return PVRManagerChild::AllocUnsafeShmem(aSize, aType, aShmem); +} + +bool +VRManagerChild::DeallocShmem(ipc::Shmem& aShmem) +{ + return PVRManagerChild::DeallocShmem(aShmem); +} + +PVRLayerChild* +VRManagerChild::CreateVRLayer(uint32_t aDisplayID, const Rect& aLeftEyeRect, const Rect& aRightEyeRect) +{ + return SendPVRLayerConstructor(aDisplayID, + aLeftEyeRect.x, aLeftEyeRect.y, aLeftEyeRect.width, aLeftEyeRect.height, + aRightEyeRect.x, aRightEyeRect.y, aRightEyeRect.width, aRightEyeRect.height); +} + + +// XXX TODO - VRManagerChild::FrameRequest is the same as nsIDocument::FrameRequest, should we consolodate these? +struct VRManagerChild::FrameRequest +{ + FrameRequest(mozilla::dom::FrameRequestCallback& aCallback, + int32_t aHandle) : + mCallback(&aCallback), + mHandle(aHandle) + {} + + // Conversion operator so that we can append these to a + // FrameRequestCallbackList + operator const RefPtr<mozilla::dom::FrameRequestCallback>& () const { + return mCallback; + } + + // Comparator operators to allow RemoveElementSorted with an + // integer argument on arrays of FrameRequest + bool operator==(int32_t aHandle) const { + return mHandle == aHandle; + } + bool operator<(int32_t aHandle) const { + return mHandle < aHandle; + } + + RefPtr<mozilla::dom::FrameRequestCallback> mCallback; + int32_t mHandle; +}; + +nsresult +VRManagerChild::ScheduleFrameRequestCallback(mozilla::dom::FrameRequestCallback& aCallback, + int32_t *aHandle) +{ + if (mFrameRequestCallbackCounter == INT32_MAX) { + // Can't increment without overflowing; bail out + return NS_ERROR_NOT_AVAILABLE; + } + int32_t newHandle = ++mFrameRequestCallbackCounter; + + DebugOnly<FrameRequest*> request = + mFrameRequestCallbacks.AppendElement(FrameRequest(aCallback, newHandle)); + NS_ASSERTION(request, "This is supposed to be infallible!"); + + *aHandle = newHandle; + return NS_OK; +} + +void +VRManagerChild::CancelFrameRequestCallback(int32_t aHandle) +{ + // mFrameRequestCallbacks is stored sorted by handle + mFrameRequestCallbacks.RemoveElementSorted(aHandle); +} + +bool +VRManagerChild::RecvNotifyVSync() +{ + for (auto& display : mDisplays) { + display->NotifyVsync(); + } + + return true; +} + +bool +VRManagerChild::RecvNotifyVRVSync(const uint32_t& aDisplayID) +{ + for (auto& display : mDisplays) { + if (display->GetDisplayInfo().GetDisplayID() == aDisplayID) { + display->NotifyVRVsync(); + } + } + + return true; +} + +bool +VRManagerChild::RecvGamepadUpdate(const GamepadChangeEvent& aGamepadEvent) +{ +#ifdef MOZ_GAMEPAD + // VRManagerChild could be at other processes, but GamepadManager + // only exists at the content process or the same process + // in non-e10s mode. + MOZ_ASSERT(XRE_IsContentProcess() || IsSameProcess()); + + RefPtr<GamepadManager> gamepadManager(GamepadManager::GetService()); + if (gamepadManager) { + gamepadManager->Update(aGamepadEvent); + } +#endif + + return true; +} + +void +VRManagerChild::RunFrameRequestCallbacks() +{ + TimeStamp nowTime = TimeStamp::Now(); + mozilla::TimeDuration duration = nowTime - mStartTimeStamp; + DOMHighResTimeStamp timeStamp = duration.ToMilliseconds(); + + + nsTArray<FrameRequest> callbacks; + callbacks.AppendElements(mFrameRequestCallbacks); + mFrameRequestCallbacks.Clear(); + for (auto& callback : callbacks) { + callback.mCallback->Call(timeStamp); + } +} + +void +VRManagerChild::FireDOMVRDisplayConnectEvent() +{ + nsContentUtils::AddScriptRunner(NewRunnableMethod(this, + &VRManagerChild::FireDOMVRDisplayConnectEventInternal)); +} + +void +VRManagerChild::FireDOMVRDisplayDisconnectEvent() +{ + nsContentUtils::AddScriptRunner(NewRunnableMethod(this, + &VRManagerChild::FireDOMVRDisplayDisconnectEventInternal)); +} + +void +VRManagerChild::FireDOMVRDisplayPresentChangeEvent() +{ + nsContentUtils::AddScriptRunner(NewRunnableMethod(this, + &VRManagerChild::FireDOMVRDisplayPresentChangeEventInternal)); +} + +void +VRManagerChild::FireDOMVRDisplayConnectEventInternal() +{ + for (auto& listener : mListeners) { + listener->NotifyVRDisplayConnect(); + } +} + +void +VRManagerChild::FireDOMVRDisplayDisconnectEventInternal() +{ + for (auto& listener : mListeners) { + listener->NotifyVRDisplayDisconnect(); + } +} + +void +VRManagerChild::FireDOMVRDisplayPresentChangeEventInternal() +{ + for (auto& listener : mListeners) { + listener->NotifyVRDisplayPresentChange(); + } +} + +void +VRManagerChild::AddListener(dom::VREventObserver* aObserver) +{ + MOZ_ASSERT(aObserver); + + if (mListeners.IndexOf(aObserver) != kNoIndex) { + return; // already exists + } + + mListeners.AppendElement(aObserver); + if (mListeners.Length() == 1) { + Unused << SendSetHaveEventListener(true); + } +} + +void +VRManagerChild::RemoveListener(dom::VREventObserver* aObserver) +{ + MOZ_ASSERT(aObserver); + + mListeners.RemoveElement(aObserver); + if (mListeners.IsEmpty()) { + Unused << SendSetHaveEventListener(false); + } +} + +void +VRManagerChild::HandleFatalError(const char* aName, const char* aMsg) const +{ + dom::ContentChild::FatalErrorIfNotUsingGPUProcess(aName, aMsg, OtherPid()); +} + +} // namespace gfx +} // namespace mozilla diff --git a/gfx/vr/ipc/VRManagerChild.h b/gfx/vr/ipc/VRManagerChild.h new file mode 100644 index 000000000..c898cd2f8 --- /dev/null +++ b/gfx/vr/ipc/VRManagerChild.h @@ -0,0 +1,185 @@ +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * vim: sw=2 ts=8 et : + */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef MOZILLA_GFX_VR_VRMANAGERCHILD_H +#define MOZILLA_GFX_VR_VRMANAGERCHILD_H + +#include "mozilla/gfx/PVRManagerChild.h" +#include "mozilla/ipc/SharedMemory.h" // for SharedMemory, etc +#include "ThreadSafeRefcountingWithMainThreadDestruction.h" +#include "mozilla/layers/ISurfaceAllocator.h" // for ISurfaceAllocator +#include "mozilla/layers/LayersTypes.h" // for LayersBackend +#include "mozilla/layers/TextureForwarder.h" + +namespace mozilla { +namespace dom { +class GamepadManager; +class Navigator; +class VRDisplay; +class VREventObserver; +} // namespace dom +namespace layers { +class PCompositableChild; +class TextureClient; +} +namespace gfx { +class VRLayerChild; +class VRDisplayClient; + +class VRManagerChild : public PVRManagerChild + , public layers::TextureForwarder + , public layers::KnowsCompositor +{ +public: + NS_INLINE_DECL_THREADSAFE_REFCOUNTING(VRManagerChild, override); + + TextureForwarder* GetTextureForwarder() override { return this; } + LayersIPCActor* GetLayersIPCActor() override { return this; } + + static VRManagerChild* Get(); + + // Indicate that an observer wants to receive VR events. + void AddListener(dom::VREventObserver* aObserver); + // Indicate that an observer should no longer receive VR events. + void RemoveListener(dom::VREventObserver* aObserver); + + int GetInputFrameID(); + bool GetVRDisplays(nsTArray<RefPtr<VRDisplayClient> >& aDisplays); + bool RefreshVRDisplaysWithCallback(uint64_t aWindowId); + + static void InitSameProcess(); + static void InitWithGPUProcess(Endpoint<PVRManagerChild>&& aEndpoint); + static bool InitForContent(Endpoint<PVRManagerChild>&& aEndpoint); + static bool ReinitForContent(Endpoint<PVRManagerChild>&& aEndpoint); + static void ShutDown(); + + static bool IsCreated(); + + virtual PTextureChild* CreateTexture(const SurfaceDescriptor& aSharedData, + layers::LayersBackend aLayersBackend, + TextureFlags aFlags, + uint64_t aSerial) override; + virtual void CancelWaitForRecycle(uint64_t aTextureId) override; + + PVRLayerChild* CreateVRLayer(uint32_t aDisplayID, const Rect& aLeftEyeRect, const Rect& aRightEyeRect); + + static void IdentifyTextureHost(const layers::TextureFactoryIdentifier& aIdentifier); + layers::LayersBackend GetBackendType() const; + layers::SyncObject* GetSyncObject() { return mSyncObject; } + + virtual MessageLoop* GetMessageLoop() const override { return mMessageLoop; } + virtual base::ProcessId GetParentPid() const override { return OtherPid(); } + + nsresult ScheduleFrameRequestCallback(mozilla::dom::FrameRequestCallback& aCallback, + int32_t *aHandle); + void CancelFrameRequestCallback(int32_t aHandle); + void RunFrameRequestCallbacks(); + + void UpdateDisplayInfo(nsTArray<VRDisplayInfo>& aDisplayUpdates); + void FireDOMVRDisplayConnectEvent(); + void FireDOMVRDisplayDisconnectEvent(); + void FireDOMVRDisplayPresentChangeEvent(); + + virtual void HandleFatalError(const char* aName, const char* aMsg) const override; + +protected: + explicit VRManagerChild(); + ~VRManagerChild(); + void Destroy(); + static void DeferredDestroy(RefPtr<VRManagerChild> aVRManagerChild); + + virtual PTextureChild* AllocPTextureChild(const SurfaceDescriptor& aSharedData, + const layers::LayersBackend& aLayersBackend, + const TextureFlags& aFlags, + const uint64_t& aSerial) override; + virtual bool DeallocPTextureChild(PTextureChild* actor) override; + + virtual PVRLayerChild* AllocPVRLayerChild(const uint32_t& aDisplayID, + const float& aLeftEyeX, + const float& aLeftEyeY, + const float& aLeftEyeWidth, + const float& aLeftEyeHeight, + const float& aRightEyeX, + const float& aRightEyeY, + const float& aRightEyeWidth, + const float& aRightEyeHeight) override; + virtual bool DeallocPVRLayerChild(PVRLayerChild* actor) override; + + virtual bool RecvUpdateDisplayInfo(nsTArray<VRDisplayInfo>&& aDisplayUpdates) override; + + virtual bool RecvParentAsyncMessages(InfallibleTArray<AsyncParentMessageData>&& aMessages) override; + + virtual bool RecvNotifyVSync() override; + virtual bool RecvNotifyVRVSync(const uint32_t& aDisplayID) override; + virtual bool RecvGamepadUpdate(const GamepadChangeEvent& aGamepadEvent) override; + + // ShmemAllocator + + virtual bool AllocShmem(size_t aSize, + ipc::SharedMemory::SharedMemoryType aType, + ipc::Shmem* aShmem) override; + + virtual bool AllocUnsafeShmem(size_t aSize, + ipc::SharedMemory::SharedMemoryType aType, + ipc::Shmem* aShmem) override; + + virtual bool DeallocShmem(ipc::Shmem& aShmem) override; + + virtual bool IsSameProcess() const override + { + return OtherPid() == base::GetCurrentProcId(); + } + + friend class layers::CompositorBridgeChild; + +private: + + void FireDOMVRDisplayConnectEventInternal(); + void FireDOMVRDisplayDisconnectEventInternal(); + void FireDOMVRDisplayPresentChangeEventInternal(); + /** + * Notify id of Texture When host side end its use. Transaction id is used to + * make sure if there is no newer usage. + */ + void NotifyNotUsed(uint64_t aTextureId, uint64_t aFwdTransactionId); + + nsTArray<RefPtr<VRDisplayClient> > mDisplays; + bool mDisplaysInitialized; + nsTArray<uint64_t> mNavigatorCallbacks; + + int32_t mInputFrameID; + + MessageLoop* mMessageLoop; + + struct FrameRequest; + + nsTArray<FrameRequest> mFrameRequestCallbacks; + /** + * The current frame request callback handle + */ + int32_t mFrameRequestCallbackCounter; + mozilla::TimeStamp mStartTimeStamp; + + // Array of Weak pointers, instance is owned by nsGlobalWindow::mVREventObserver. + nsTArray<dom::VREventObserver*> mListeners; + + /** + * Hold TextureClients refs until end of their usages on host side. + * It defer calling of TextureClient recycle callback. + */ + nsDataHashtable<nsUint64HashKey, RefPtr<layers::TextureClient> > mTexturesWaitingRecycled; + + layers::LayersBackend mBackend; + RefPtr<layers::SyncObject> mSyncObject; + + DISALLOW_COPY_AND_ASSIGN(VRManagerChild); +}; + +} // namespace mozilla +} // namespace gfx + +#endif // MOZILLA_GFX_VR_VRMANAGERCHILD_H diff --git a/gfx/vr/ipc/VRManagerParent.cpp b/gfx/vr/ipc/VRManagerParent.cpp new file mode 100644 index 000000000..725d7dd1d --- /dev/null +++ b/gfx/vr/ipc/VRManagerParent.cpp @@ -0,0 +1,332 @@ +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * vim: sw=2 ts=8 et : + */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#include "VRManagerParent.h" +#include "ipc/VRLayerParent.h" +#include "mozilla/gfx/PVRManagerParent.h" +#include "mozilla/ipc/ProtocolTypes.h" +#include "mozilla/ipc/ProtocolUtils.h" // for IToplevelProtocol +#include "mozilla/TimeStamp.h" // for TimeStamp +#include "mozilla/layers/CompositorThread.h" +#include "mozilla/Unused.h" +#include "VRManager.h" + +namespace mozilla { +using namespace layers; +namespace gfx { + +VRManagerParent::VRManagerParent(ProcessId aChildProcessId, bool aIsContentChild) + : HostIPCAllocator() + , mHaveEventListener(false) + , mIsContentChild(aIsContentChild) +{ + MOZ_COUNT_CTOR(VRManagerParent); + MOZ_ASSERT(NS_IsMainThread()); + + SetOtherProcessId(aChildProcessId); +} + +VRManagerParent::~VRManagerParent() +{ + MOZ_ASSERT(!mVRManagerHolder); + + MOZ_COUNT_DTOR(VRManagerParent); +} + +PTextureParent* +VRManagerParent::AllocPTextureParent(const SurfaceDescriptor& aSharedData, + const LayersBackend& aLayersBackend, + const TextureFlags& aFlags, + const uint64_t& aSerial) +{ + return layers::TextureHost::CreateIPDLActor(this, aSharedData, aLayersBackend, aFlags, aSerial); +} + +bool +VRManagerParent::DeallocPTextureParent(PTextureParent* actor) +{ + return layers::TextureHost::DestroyIPDLActor(actor); +} + +PVRLayerParent* +VRManagerParent::AllocPVRLayerParent(const uint32_t& aDisplayID, + const float& aLeftEyeX, + const float& aLeftEyeY, + const float& aLeftEyeWidth, + const float& aLeftEyeHeight, + const float& aRightEyeX, + const float& aRightEyeY, + const float& aRightEyeWidth, + const float& aRightEyeHeight) +{ + RefPtr<VRLayerParent> layer; + layer = new VRLayerParent(aDisplayID, + Rect(aLeftEyeX, aLeftEyeY, aLeftEyeWidth, aLeftEyeHeight), + Rect(aRightEyeX, aRightEyeY, aRightEyeWidth, aRightEyeHeight)); + VRManager* vm = VRManager::Get(); + RefPtr<gfx::VRDisplayHost> display = vm->GetDisplay(aDisplayID); + if (display) { + display->AddLayer(layer); + } + return layer.forget().take(); +} + +bool +VRManagerParent::DeallocPVRLayerParent(PVRLayerParent* actor) +{ + gfx::VRLayerParent* layer = static_cast<gfx::VRLayerParent*>(actor); + + VRManager* vm = VRManager::Get(); + RefPtr<gfx::VRDisplayHost> display = vm->GetDisplay(layer->GetDisplayID()); + if (display) { + display->RemoveLayer(layer); + } + + delete actor; + return true; +} + +bool +VRManagerParent::AllocShmem(size_t aSize, + ipc::SharedMemory::SharedMemoryType aType, + ipc::Shmem* aShmem) +{ + return PVRManagerParent::AllocShmem(aSize, aType, aShmem); +} + +bool +VRManagerParent::AllocUnsafeShmem(size_t aSize, + ipc::SharedMemory::SharedMemoryType aType, + ipc::Shmem* aShmem) +{ + return PVRManagerParent::AllocUnsafeShmem(aSize, aType, aShmem); +} + +void +VRManagerParent::DeallocShmem(ipc::Shmem& aShmem) +{ + PVRManagerParent::DeallocShmem(aShmem); +} + +bool +VRManagerParent::IsSameProcess() const +{ + return OtherPid() == base::GetCurrentProcId(); +} + +void +VRManagerParent::NotifyNotUsed(PTextureParent* aTexture, uint64_t aTransactionId) +{ + MOZ_ASSERT_UNREACHABLE("unexpected to be called"); +} + +void +VRManagerParent::SendAsyncMessage(const InfallibleTArray<AsyncParentMessageData>& aMessage) +{ + MOZ_ASSERT_UNREACHABLE("unexpected to be called"); +} + +base::ProcessId +VRManagerParent::GetChildProcessId() +{ + return OtherPid(); +} + +void +VRManagerParent::RegisterWithManager() +{ + VRManager* vm = VRManager::Get(); + vm->AddVRManagerParent(this); + mVRManagerHolder = vm; +} + +void +VRManagerParent::UnregisterFromManager() +{ + VRManager* vm = VRManager::Get(); + vm->RemoveVRManagerParent(this); + mVRManagerHolder = nullptr; +} + +/* static */ bool +VRManagerParent::CreateForContent(Endpoint<PVRManagerParent>&& aEndpoint) +{ + MessageLoop* loop = layers::CompositorThreadHolder::Loop(); + + RefPtr<VRManagerParent> vmp = new VRManagerParent(aEndpoint.OtherPid(), true); + loop->PostTask(NewRunnableMethod<Endpoint<PVRManagerParent>&&>( + vmp, &VRManagerParent::Bind, Move(aEndpoint))); + + return true; +} + +void +VRManagerParent::Bind(Endpoint<PVRManagerParent>&& aEndpoint) +{ + if (!aEndpoint.Bind(this)) { + return; + } + mSelfRef = this; + + RegisterWithManager(); +} + +/*static*/ void +VRManagerParent::RegisterVRManagerInCompositorThread(VRManagerParent* aVRManager) +{ + aVRManager->RegisterWithManager(); +} + +/*static*/ VRManagerParent* +VRManagerParent::CreateSameProcess() +{ + MessageLoop* loop = mozilla::layers::CompositorThreadHolder::Loop(); + RefPtr<VRManagerParent> vmp = new VRManagerParent(base::GetCurrentProcId(), false); + vmp->mCompositorThreadHolder = layers::CompositorThreadHolder::GetSingleton(); + vmp->mSelfRef = vmp; + loop->PostTask(NewRunnableFunction(RegisterVRManagerInCompositorThread, vmp.get())); + return vmp.get(); +} + +bool +VRManagerParent::CreateForGPUProcess(Endpoint<PVRManagerParent>&& aEndpoint) +{ + MessageLoop* loop = mozilla::layers::CompositorThreadHolder::Loop(); + + RefPtr<VRManagerParent> vmp = new VRManagerParent(aEndpoint.OtherPid(), false); + vmp->mCompositorThreadHolder = layers::CompositorThreadHolder::GetSingleton(); + loop->PostTask(NewRunnableMethod<Endpoint<PVRManagerParent>&&>( + vmp, &VRManagerParent::Bind, Move(aEndpoint))); + return true; +} + +void +VRManagerParent::DeferredDestroy() +{ + mCompositorThreadHolder = nullptr; + mSelfRef = nullptr; +} + +void +VRManagerParent::ActorDestroy(ActorDestroyReason why) +{ + UnregisterFromManager(); + MessageLoop::current()->PostTask(NewRunnableMethod(this, &VRManagerParent::DeferredDestroy)); +} + +void +VRManagerParent::OnChannelConnected(int32_t aPid) +{ + mCompositorThreadHolder = layers::CompositorThreadHolder::GetSingleton(); +} + +bool +VRManagerParent::RecvRefreshDisplays() +{ + // This is called to refresh the VR Displays for Navigator.GetVRDevices(). + // We must pass "true" to VRManager::RefreshVRDisplays() + // to ensure that the promise returned by Navigator.GetVRDevices + // can resolve even if there are no changes to the VR Displays. + VRManager* vm = VRManager::Get(); + vm->RefreshVRDisplays(true); + + return true; +} + +bool +VRManagerParent::RecvGetDisplays(nsTArray<VRDisplayInfo> *aDisplays) +{ + VRManager* vm = VRManager::Get(); + vm->GetVRDisplayInfo(*aDisplays); + return true; +} + +bool +VRManagerParent::RecvResetSensor(const uint32_t& aDisplayID) +{ + VRManager* vm = VRManager::Get(); + RefPtr<gfx::VRDisplayHost> display = vm->GetDisplay(aDisplayID); + if (display != nullptr) { + display->ZeroSensor(); + } + + return true; +} + +bool +VRManagerParent::RecvGetSensorState(const uint32_t& aDisplayID, VRHMDSensorState* aState) +{ + VRManager* vm = VRManager::Get(); + RefPtr<gfx::VRDisplayHost> display = vm->GetDisplay(aDisplayID); + if (display != nullptr) { + *aState = display->GetSensorState(); + } + return true; +} + +bool +VRManagerParent::RecvGetImmediateSensorState(const uint32_t& aDisplayID, VRHMDSensorState* aState) +{ + VRManager* vm = VRManager::Get(); + RefPtr<gfx::VRDisplayHost> display = vm->GetDisplay(aDisplayID); + if (display != nullptr) { + *aState = display->GetImmediateSensorState(); + } + return true; +} + +bool +VRManagerParent::HaveEventListener() +{ + return mHaveEventListener; +} + +bool +VRManagerParent::RecvSetHaveEventListener(const bool& aHaveEventListener) +{ + mHaveEventListener = aHaveEventListener; + return true; +} + +bool +VRManagerParent::RecvControllerListenerAdded() +{ + VRManager* vm = VRManager::Get(); + // Ask the connected gamepads to be added to GamepadManager + vm->ScanForDevices(); + + return true; +} + +bool +VRManagerParent::RecvControllerListenerRemoved() +{ + return true; +} + +bool +VRManagerParent::RecvGetControllers(nsTArray<VRControllerInfo> *aControllers) +{ + VRManager* vm = VRManager::Get(); + vm->GetVRControllerInfo(*aControllers); + return true; +} + +bool +VRManagerParent::SendGamepadUpdate(const GamepadChangeEvent& aGamepadEvent) +{ + // GamepadManager only exists at the content process + // or the same process in non-e10s mode. + if (mIsContentChild || IsSameProcess()) { + return PVRManagerParent::SendGamepadUpdate(aGamepadEvent); + } else { + return true; + } +} + +} // namespace gfx +} // namespace mozilla diff --git a/gfx/vr/ipc/VRManagerParent.h b/gfx/vr/ipc/VRManagerParent.h new file mode 100644 index 000000000..d4611c187 --- /dev/null +++ b/gfx/vr/ipc/VRManagerParent.h @@ -0,0 +1,118 @@ +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- + * vim: sw=2 ts=8 et : + */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef MOZILLA_GFX_VR_VRMANAGERPARENT_H +#define MOZILLA_GFX_VR_VRMANAGERPARENT_H + +#include "mozilla/layers/CompositableTransactionParent.h" +#include "mozilla/layers/CompositorThread.h" // for CompositorThreadHolder +#include "mozilla/gfx/PVRManagerParent.h" // for PVRManagerParent +#include "mozilla/gfx/PVRLayerParent.h" // for PVRLayerParent +#include "mozilla/ipc/ProtocolUtils.h" // for IToplevelProtocol +#include "mozilla/TimeStamp.h" // for TimeStamp +#include "gfxVR.h" // for VRFieldOfView + +namespace mozilla { +using namespace layers; +namespace gfx { + +class VRManager; + +class VRManagerParent final : public PVRManagerParent + , public HostIPCAllocator + , public ShmemAllocator +{ +public: + explicit VRManagerParent(ProcessId aChildProcessId, bool aIsContentChild); + + static VRManagerParent* CreateSameProcess(); + static bool CreateForGPUProcess(Endpoint<PVRManagerParent>&& aEndpoint); + static bool CreateForContent(Endpoint<PVRManagerParent>&& aEndpoint); + + virtual base::ProcessId GetChildProcessId() override; + + // ShmemAllocator + + virtual ShmemAllocator* AsShmemAllocator() override { return this; } + + virtual bool AllocShmem(size_t aSize, + ipc::SharedMemory::SharedMemoryType aType, + ipc::Shmem* aShmem) override; + + virtual bool AllocUnsafeShmem(size_t aSize, + ipc::SharedMemory::SharedMemoryType aType, + ipc::Shmem* aShmem) override; + + virtual void DeallocShmem(ipc::Shmem& aShmem) override; + + virtual bool IsSameProcess() const override; + bool HaveEventListener(); + + virtual void NotifyNotUsed(PTextureParent* aTexture, uint64_t aTransactionId) override; + virtual void SendAsyncMessage(const InfallibleTArray<AsyncParentMessageData>& aMessage) override; + bool SendGamepadUpdate(const GamepadChangeEvent& aGamepadEvent); + +protected: + ~VRManagerParent(); + + virtual PTextureParent* AllocPTextureParent(const SurfaceDescriptor& aSharedData, + const LayersBackend& aLayersBackend, + const TextureFlags& aFlags, + const uint64_t& aSerial) override; + virtual bool DeallocPTextureParent(PTextureParent* actor) override; + + virtual PVRLayerParent* AllocPVRLayerParent(const uint32_t& aDisplayID, + const float& aLeftEyeX, + const float& aLeftEyeY, + const float& aLeftEyeWidth, + const float& aLeftEyeHeight, + const float& aRightEyeX, + const float& aRightEyeY, + const float& aRightEyeWidth, + const float& aRightEyeHeight) override; + virtual bool DeallocPVRLayerParent(PVRLayerParent* actor) override; + + virtual void ActorDestroy(ActorDestroyReason why) override; + void OnChannelConnected(int32_t pid) override; + + virtual bool RecvRefreshDisplays() override; + virtual bool RecvGetDisplays(nsTArray<VRDisplayInfo> *aDisplays) override; + virtual bool RecvResetSensor(const uint32_t& aDisplayID) override; + virtual bool RecvGetSensorState(const uint32_t& aDisplayID, VRHMDSensorState* aState) override; + virtual bool RecvGetImmediateSensorState(const uint32_t& aDisplayID, VRHMDSensorState* aState) override; + virtual bool RecvSetHaveEventListener(const bool& aHaveEventListener) override; + virtual bool RecvControllerListenerAdded() override; + virtual bool RecvControllerListenerRemoved() override; + virtual bool RecvGetControllers(nsTArray<VRControllerInfo> *aControllers) override; + +private: + void RegisterWithManager(); + void UnregisterFromManager(); + + void Bind(Endpoint<PVRManagerParent>&& aEndpoint); + + static void RegisterVRManagerInCompositorThread(VRManagerParent* aVRManager); + + void DeferredDestroy(); + + // This keeps us alive until ActorDestroy(), at which point we do a + // deferred destruction of ourselves. + RefPtr<VRManagerParent> mSelfRef; + + // Keep the compositor thread alive, until we have destroyed ourselves. + RefPtr<layers::CompositorThreadHolder> mCompositorThreadHolder; + + // Keep the VRManager alive, until we have destroyed ourselves. + RefPtr<VRManager> mVRManagerHolder; + bool mHaveEventListener; + bool mIsContentChild; +}; + +} // namespace mozilla +} // namespace gfx + +#endif // MOZILLA_GFX_VR_VRMANAGERPARENT_H diff --git a/gfx/vr/ipc/VRMessageUtils.h b/gfx/vr/ipc/VRMessageUtils.h new file mode 100644 index 000000000..c066047db --- /dev/null +++ b/gfx/vr/ipc/VRMessageUtils.h @@ -0,0 +1,193 @@ +/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ +/* vim: set sw=2 ts=8 et tw=80 : */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this + * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ + +#ifndef mozilla_gfx_vr_VRMessageUtils_h +#define mozilla_gfx_vr_VRMessageUtils_h + +#include "ipc/IPCMessageUtils.h" +#include "mozilla/GfxMessageUtils.h" +#include "VRManager.h" + +#include "gfxVR.h" + +namespace IPC { + +template<> +struct ParamTraits<mozilla::gfx::VRDeviceType> : + public ContiguousEnumSerializer<mozilla::gfx::VRDeviceType, + mozilla::gfx::VRDeviceType(0), + mozilla::gfx::VRDeviceType(mozilla::gfx::VRDeviceType::NumVRDeviceTypes)> {}; + +template<> +struct ParamTraits<mozilla::gfx::VRDisplayCapabilityFlags> : + public BitFlagsEnumSerializer<mozilla::gfx::VRDisplayCapabilityFlags, + mozilla::gfx::VRDisplayCapabilityFlags::Cap_All> {}; + +template <> +struct ParamTraits<mozilla::gfx::VRDisplayInfo> +{ + typedef mozilla::gfx::VRDisplayInfo paramType; + + static void Write(Message* aMsg, const paramType& aParam) + { + WriteParam(aMsg, aParam.mType); + WriteParam(aMsg, aParam.mDisplayID); + WriteParam(aMsg, aParam.mDisplayName); + WriteParam(aMsg, aParam.mCapabilityFlags); + WriteParam(aMsg, aParam.mEyeResolution); + WriteParam(aMsg, aParam.mIsConnected); + WriteParam(aMsg, aParam.mIsPresenting); + WriteParam(aMsg, aParam.mStageSize); + WriteParam(aMsg, aParam.mSittingToStandingTransform); + for (int i = 0; i < mozilla::gfx::VRDisplayInfo::NumEyes; i++) { + WriteParam(aMsg, aParam.mEyeFOV[i]); + WriteParam(aMsg, aParam.mEyeTranslation[i]); + } + } + + static bool Read(const Message* aMsg, PickleIterator* aIter, paramType* aResult) + { + if (!ReadParam(aMsg, aIter, &(aResult->mType)) || + !ReadParam(aMsg, aIter, &(aResult->mDisplayID)) || + !ReadParam(aMsg, aIter, &(aResult->mDisplayName)) || + !ReadParam(aMsg, aIter, &(aResult->mCapabilityFlags)) || + !ReadParam(aMsg, aIter, &(aResult->mEyeResolution)) || + !ReadParam(aMsg, aIter, &(aResult->mIsConnected)) || + !ReadParam(aMsg, aIter, &(aResult->mIsPresenting)) || + !ReadParam(aMsg, aIter, &(aResult->mStageSize)) || + !ReadParam(aMsg, aIter, &(aResult->mSittingToStandingTransform))) { + return false; + } + for (int i = 0; i < mozilla::gfx::VRDisplayInfo::NumEyes; i++) { + if (!ReadParam(aMsg, aIter, &(aResult->mEyeFOV[i])) || + !ReadParam(aMsg, aIter, &(aResult->mEyeTranslation[i]))) { + return false; + } + } + + return true; + } +}; + +template <> +struct ParamTraits<mozilla::gfx::VRHMDSensorState> +{ + typedef mozilla::gfx::VRHMDSensorState paramType; + + static void Write(Message* aMsg, const paramType& aParam) + { + WriteParam(aMsg, aParam.timestamp); + WriteParam(aMsg, aParam.inputFrameID); + WriteParam(aMsg, aParam.flags); + WriteParam(aMsg, aParam.orientation[0]); + WriteParam(aMsg, aParam.orientation[1]); + WriteParam(aMsg, aParam.orientation[2]); + WriteParam(aMsg, aParam.orientation[3]); + WriteParam(aMsg, aParam.position[0]); + WriteParam(aMsg, aParam.position[1]); + WriteParam(aMsg, aParam.position[2]); + WriteParam(aMsg, aParam.angularVelocity[0]); + WriteParam(aMsg, aParam.angularVelocity[1]); + WriteParam(aMsg, aParam.angularVelocity[2]); + WriteParam(aMsg, aParam.angularAcceleration[0]); + WriteParam(aMsg, aParam.angularAcceleration[1]); + WriteParam(aMsg, aParam.angularAcceleration[2]); + WriteParam(aMsg, aParam.linearVelocity[0]); + WriteParam(aMsg, aParam.linearVelocity[1]); + WriteParam(aMsg, aParam.linearVelocity[2]); + WriteParam(aMsg, aParam.linearAcceleration[0]); + WriteParam(aMsg, aParam.linearAcceleration[1]); + WriteParam(aMsg, aParam.linearAcceleration[2]); + } + + static bool Read(const Message* aMsg, PickleIterator* aIter, paramType* aResult) + { + if (!ReadParam(aMsg, aIter, &(aResult->timestamp)) || + !ReadParam(aMsg, aIter, &(aResult->inputFrameID)) || + !ReadParam(aMsg, aIter, &(aResult->flags)) || + !ReadParam(aMsg, aIter, &(aResult->orientation[0])) || + !ReadParam(aMsg, aIter, &(aResult->orientation[1])) || + !ReadParam(aMsg, aIter, &(aResult->orientation[2])) || + !ReadParam(aMsg, aIter, &(aResult->orientation[3])) || + !ReadParam(aMsg, aIter, &(aResult->position[0])) || + !ReadParam(aMsg, aIter, &(aResult->position[1])) || + !ReadParam(aMsg, aIter, &(aResult->position[2])) || + !ReadParam(aMsg, aIter, &(aResult->angularVelocity[0])) || + !ReadParam(aMsg, aIter, &(aResult->angularVelocity[1])) || + !ReadParam(aMsg, aIter, &(aResult->angularVelocity[2])) || + !ReadParam(aMsg, aIter, &(aResult->angularAcceleration[0])) || + !ReadParam(aMsg, aIter, &(aResult->angularAcceleration[1])) || + !ReadParam(aMsg, aIter, &(aResult->angularAcceleration[2])) || + !ReadParam(aMsg, aIter, &(aResult->linearVelocity[0])) || + !ReadParam(aMsg, aIter, &(aResult->linearVelocity[1])) || + !ReadParam(aMsg, aIter, &(aResult->linearVelocity[2])) || + !ReadParam(aMsg, aIter, &(aResult->linearAcceleration[0])) || + !ReadParam(aMsg, aIter, &(aResult->linearAcceleration[1])) || + !ReadParam(aMsg, aIter, &(aResult->linearAcceleration[2]))) { + return false; + } + return true; + } +}; + +template <> +struct ParamTraits<mozilla::gfx::VRFieldOfView> +{ + typedef mozilla::gfx::VRFieldOfView paramType; + + static void Write(Message* aMsg, const paramType& aParam) + { + WriteParam(aMsg, aParam.upDegrees); + WriteParam(aMsg, aParam.rightDegrees); + WriteParam(aMsg, aParam.downDegrees); + WriteParam(aMsg, aParam.leftDegrees); + } + + static bool Read(const Message* aMsg, PickleIterator* aIter, paramType* aResult) + { + if (!ReadParam(aMsg, aIter, &(aResult->upDegrees)) || + !ReadParam(aMsg, aIter, &(aResult->rightDegrees)) || + !ReadParam(aMsg, aIter, &(aResult->downDegrees)) || + !ReadParam(aMsg, aIter, &(aResult->leftDegrees))) { + return false; + } + + return true; + } +}; + +template <> +struct ParamTraits<mozilla::gfx::VRControllerInfo> +{ + typedef mozilla::gfx::VRControllerInfo paramType; + + static void Write(Message* aMsg, const paramType& aParam) + { + WriteParam(aMsg, aParam.mType); + WriteParam(aMsg, aParam.mControllerID); + WriteParam(aMsg, aParam.mControllerName); + WriteParam(aMsg, aParam.mMappingType); + WriteParam(aMsg, aParam.mNumButtons); + WriteParam(aMsg, aParam.mNumAxes); + } + + static bool Read(const Message* aMsg, PickleIterator* aIter, paramType* aResult) + { + if (!ReadParam(aMsg, aIter, &(aResult->mType)) || + !ReadParam(aMsg, aIter, &(aResult->mControllerID)) || + !ReadParam(aMsg, aIter, &(aResult->mControllerName)) || + !ReadParam(aMsg, aIter, &(aResult->mMappingType)) || + !ReadParam(aMsg, aIter, &(aResult->mNumButtons)) || + !ReadParam(aMsg, aIter, &(aResult->mNumAxes))) { + return false; + } + + return true; + } +}; +} // namespace IPC + +#endif // mozilla_gfx_vr_VRMessageUtils_h diff --git a/gfx/vr/moz.build b/gfx/vr/moz.build new file mode 100644 index 000000000..5ab724d4a --- /dev/null +++ b/gfx/vr/moz.build @@ -0,0 +1,71 @@ +# -*- Mode: python; indent-tabs-mode: nil; tab-width: 40 -*- +# vim: set filetype=python: +# This Source Code Form is subject to the terms of the Mozilla Public +# License, v. 2.0. If a copy of the MPL was not distributed with this +# file, You can obtain one at http://mozilla.org/MPL/2.0/. + +EXPORTS += [ + 'gfxVR.h', + 'ipc/VRLayerChild.h', + 'ipc/VRManagerChild.h', + 'ipc/VRManagerParent.h', + 'ipc/VRMessageUtils.h', + 'VRDisplayClient.h', + 'VRDisplayPresentation.h', + 'VRManager.h', +] + +LOCAL_INCLUDES += [ + '/dom/base', + '/gfx/layers/d3d11', + '/gfx/thebes', +] + +UNIFIED_SOURCES += [ + 'gfxVR.cpp', + 'gfxVROpenVR.cpp', + 'gfxVROSVR.cpp', + 'ipc/VRLayerChild.cpp', + 'ipc/VRLayerParent.cpp', + 'ipc/VRManagerChild.cpp', + 'ipc/VRManagerParent.cpp', + 'VRDisplayClient.cpp', + 'VRDisplayHost.cpp', + 'VRDisplayPresentation.cpp', + 'VRManager.cpp', +] + +if CONFIG['MOZ_WIDGET_TOOLKIT'] == 'windows': + SOURCES += [ + 'gfxVROculus.cpp', + ] + +IPDL_SOURCES = [ + 'ipc/PVRLayer.ipdl', + 'ipc/PVRManager.ipdl', +] + +# For building with the real SDK instead of our local hack +#SOURCES += [ +# 'OVR_CAPI_Util.cpp', +# 'OVR_CAPIShim.c', +# 'OVR_StereoProjection.cpp', +#] +# +#CXXFLAGS += ["-Ic:/proj/ovr/OculusSDK-0.6.0-beta/LibOVR/Include"] +#CFLAGS += ["-Ic:/proj/ovr/OculusSDK-0.6.0-beta/LibOVR/Include"] + +CXXFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] +CXXFLAGS += CONFIG['TK_CFLAGS'] +CFLAGS += CONFIG['MOZ_CAIRO_CFLAGS'] +CFLAGS += CONFIG['TK_CFLAGS'] + +include('/ipc/chromium/chromium-config.mozbuild') + +FINAL_LIBRARY = 'xul' + +# This is intended as a temporary hack to enable VS2015 builds. +if CONFIG['_MSC_VER']: + # ovr_capi_dynamic.h '<unnamed-tag>': Alignment specifier is less than + # actual alignment (8), and will be ignored + CXXFLAGS += ['-wd4359'] diff --git a/gfx/vr/openvr/LICENSE b/gfx/vr/openvr/LICENSE new file mode 100644 index 000000000..ee83337d7 --- /dev/null +++ b/gfx/vr/openvr/LICENSE @@ -0,0 +1,27 @@ +Copyright (c) 2015, Valve Corporation +All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this +list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, +this list of conditions and the following disclaimer in the documentation and/or +other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors +may be used to endorse or promote products derived from this software without +specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON +ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/gfx/vr/openvr/README b/gfx/vr/openvr/README new file mode 100644 index 000000000..5e67e5a3a --- /dev/null +++ b/gfx/vr/openvr/README @@ -0,0 +1,2 @@ +See https://github.com/ValveSoftware/openvr/ + diff --git a/gfx/vr/openvr/openvr.h b/gfx/vr/openvr/openvr.h new file mode 100644 index 000000000..deb3142fd --- /dev/null +++ b/gfx/vr/openvr/openvr.h @@ -0,0 +1,3352 @@ +#pragma once + +// openvr.h +//========= Copyright Valve Corporation ============// +// Dynamically generated file. Do not modify this file directly. + +#ifndef _OPENVR_API +#define _OPENVR_API + +#include <stdint.h> + + + +// vrtypes.h +#ifndef _INCLUDE_VRTYPES_H +#define _INCLUDE_VRTYPES_H + +namespace vr +{ + +#if defined(__linux__) || defined(__APPLE__) + // The 32-bit version of gcc has the alignment requirement for uint64 and double set to + // 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned. + // The 64-bit version of gcc has the alignment requirement for these types set to + // 8 meaning that unless we use #pragma pack(4) our structures will get bigger. + // The 64-bit structure packing has to match the 32-bit structure packing for each platform. + #pragma pack( push, 4 ) +#else + #pragma pack( push, 8 ) +#endif + +typedef void* glSharedTextureHandle_t; +typedef int32_t glInt_t; +typedef uint32_t glUInt_t; + +// right-handed system +// +y is up +// +x is to the right +// -z is going away from you +// Distance unit is meters +struct HmdMatrix34_t +{ + float m[3][4]; +}; + +struct HmdMatrix44_t +{ + float m[4][4]; +}; + +struct HmdVector3_t +{ + float v[3]; +}; + +struct HmdVector4_t +{ + float v[4]; +}; + +struct HmdVector3d_t +{ + double v[3]; +}; + +struct HmdVector2_t +{ + float v[2]; +}; + +struct HmdQuaternion_t +{ + double w, x, y, z; +}; + +struct HmdColor_t +{ + float r, g, b, a; +}; + +struct HmdQuad_t +{ + HmdVector3_t vCorners[ 4 ]; +}; + +struct HmdRect2_t +{ + HmdVector2_t vTopLeft; + HmdVector2_t vBottomRight; +}; + +/** Used to return the post-distortion UVs for each color channel. +* UVs range from 0 to 1 with 0,0 in the upper left corner of the +* source render target. The 0,0 to 1,1 range covers a single eye. */ +struct DistortionCoordinates_t +{ + float rfRed[2]; + float rfGreen[2]; + float rfBlue[2]; +}; + +enum EVREye +{ + Eye_Left = 0, + Eye_Right = 1 +}; + +enum EGraphicsAPIConvention +{ + API_DirectX = 0, // Normalized Z goes from 0 at the viewer to 1 at the far clip plane + API_OpenGL = 1, // Normalized Z goes from 1 at the viewer to -1 at the far clip plane +}; + +enum EColorSpace +{ + ColorSpace_Auto = 0, // Assumes 'gamma' for 8-bit per component formats, otherwise 'linear'. This mirrors the DXGI formats which have _SRGB variants. + ColorSpace_Gamma = 1, // Texture data can be displayed directly on the display without any conversion (a.k.a. display native format). + ColorSpace_Linear = 2, // Same as gamma but has been converted to a linear representation using DXGI's sRGB conversion algorithm. +}; + +struct Texture_t +{ + void* handle; // Native d3d texture pointer or GL texture id. + EGraphicsAPIConvention eType; + EColorSpace eColorSpace; +}; + +enum ETrackingResult +{ + TrackingResult_Uninitialized = 1, + + TrackingResult_Calibrating_InProgress = 100, + TrackingResult_Calibrating_OutOfRange = 101, + + TrackingResult_Running_OK = 200, + TrackingResult_Running_OutOfRange = 201, +}; + +static const uint32_t k_unTrackingStringSize = 32; +static const uint32_t k_unMaxDriverDebugResponseSize = 32768; + +/** Used to pass device IDs to API calls */ +typedef uint32_t TrackedDeviceIndex_t; +static const uint32_t k_unTrackedDeviceIndex_Hmd = 0; +static const uint32_t k_unMaxTrackedDeviceCount = 16; +static const uint32_t k_unTrackedDeviceIndexOther = 0xFFFFFFFE; +static const uint32_t k_unTrackedDeviceIndexInvalid = 0xFFFFFFFF; + +/** Describes what kind of object is being tracked at a given ID */ +enum ETrackedDeviceClass +{ + TrackedDeviceClass_Invalid = 0, // the ID was not valid. + TrackedDeviceClass_HMD = 1, // Head-Mounted Displays + TrackedDeviceClass_Controller = 2, // Tracked controllers + TrackedDeviceClass_TrackingReference = 4, // Camera and base stations that serve as tracking reference points + + TrackedDeviceClass_Other = 1000, +}; + + +/** Describes what specific role associated with a tracked device */ +enum ETrackedControllerRole +{ + TrackedControllerRole_Invalid = 0, // Invalid value for controller type + TrackedControllerRole_LeftHand = 1, // Tracked device associated with the left hand + TrackedControllerRole_RightHand = 2, // Tracked device associated with the right hand +}; + + +/** describes a single pose for a tracked object */ +struct TrackedDevicePose_t +{ + HmdMatrix34_t mDeviceToAbsoluteTracking; + HmdVector3_t vVelocity; // velocity in tracker space in m/s + HmdVector3_t vAngularVelocity; // angular velocity in radians/s (?) + ETrackingResult eTrackingResult; + bool bPoseIsValid; + + // This indicates that there is a device connected for this spot in the pose array. + // It could go from true to false if the user unplugs the device. + bool bDeviceIsConnected; +}; + +/** Identifies which style of tracking origin the application wants to use +* for the poses it is requesting */ +enum ETrackingUniverseOrigin +{ + TrackingUniverseSeated = 0, // Poses are provided relative to the seated zero pose + TrackingUniverseStanding = 1, // Poses are provided relative to the safe bounds configured by the user + TrackingUniverseRawAndUncalibrated = 2, // Poses are provided in the coordinate system defined by the driver. You probably don't want this one. +}; + + +/** Each entry in this enum represents a property that can be retrieved about a +* tracked device. Many fields are only valid for one ETrackedDeviceClass. */ +enum ETrackedDeviceProperty +{ + // general properties that apply to all device classes + Prop_TrackingSystemName_String = 1000, + Prop_ModelNumber_String = 1001, + Prop_SerialNumber_String = 1002, + Prop_RenderModelName_String = 1003, + Prop_WillDriftInYaw_Bool = 1004, + Prop_ManufacturerName_String = 1005, + Prop_TrackingFirmwareVersion_String = 1006, + Prop_HardwareRevision_String = 1007, + Prop_AllWirelessDongleDescriptions_String = 1008, + Prop_ConnectedWirelessDongle_String = 1009, + Prop_DeviceIsWireless_Bool = 1010, + Prop_DeviceIsCharging_Bool = 1011, + Prop_DeviceBatteryPercentage_Float = 1012, // 0 is empty, 1 is full + Prop_StatusDisplayTransform_Matrix34 = 1013, + Prop_Firmware_UpdateAvailable_Bool = 1014, + Prop_Firmware_ManualUpdate_Bool = 1015, + Prop_Firmware_ManualUpdateURL_String = 1016, + Prop_HardwareRevision_Uint64 = 1017, + Prop_FirmwareVersion_Uint64 = 1018, + Prop_FPGAVersion_Uint64 = 1019, + Prop_VRCVersion_Uint64 = 1020, + Prop_RadioVersion_Uint64 = 1021, + Prop_DongleVersion_Uint64 = 1022, + Prop_BlockServerShutdown_Bool = 1023, + Prop_CanUnifyCoordinateSystemWithHmd_Bool = 1024, + Prop_ContainsProximitySensor_Bool = 1025, + Prop_DeviceProvidesBatteryStatus_Bool = 1026, + Prop_DeviceCanPowerOff_Bool = 1027, + Prop_Firmware_ProgrammingTarget_String = 1028, + Prop_DeviceClass_Int32 = 1029, + Prop_HasCamera_Bool = 1030, + Prop_DriverVersion_String = 1031, + Prop_Firmware_ForceUpdateRequired_Bool = 1032, + + // Properties that are unique to TrackedDeviceClass_HMD + Prop_ReportsTimeSinceVSync_Bool = 2000, + Prop_SecondsFromVsyncToPhotons_Float = 2001, + Prop_DisplayFrequency_Float = 2002, + Prop_UserIpdMeters_Float = 2003, + Prop_CurrentUniverseId_Uint64 = 2004, + Prop_PreviousUniverseId_Uint64 = 2005, + Prop_DisplayFirmwareVersion_Uint64 = 2006, + Prop_IsOnDesktop_Bool = 2007, + Prop_DisplayMCType_Int32 = 2008, + Prop_DisplayMCOffset_Float = 2009, + Prop_DisplayMCScale_Float = 2010, + Prop_EdidVendorID_Int32 = 2011, + Prop_DisplayMCImageLeft_String = 2012, + Prop_DisplayMCImageRight_String = 2013, + Prop_DisplayGCBlackClamp_Float = 2014, + Prop_EdidProductID_Int32 = 2015, + Prop_CameraToHeadTransform_Matrix34 = 2016, + Prop_DisplayGCType_Int32 = 2017, + Prop_DisplayGCOffset_Float = 2018, + Prop_DisplayGCScale_Float = 2019, + Prop_DisplayGCPrescale_Float = 2020, + Prop_DisplayGCImage_String = 2021, + Prop_LensCenterLeftU_Float = 2022, + Prop_LensCenterLeftV_Float = 2023, + Prop_LensCenterRightU_Float = 2024, + Prop_LensCenterRightV_Float = 2025, + Prop_UserHeadToEyeDepthMeters_Float = 2026, + Prop_CameraFirmwareVersion_Uint64 = 2027, + Prop_CameraFirmwareDescription_String = 2028, + Prop_DisplayFPGAVersion_Uint64 = 2029, + Prop_DisplayBootloaderVersion_Uint64 = 2030, + Prop_DisplayHardwareVersion_Uint64 = 2031, + Prop_AudioFirmwareVersion_Uint64 = 2032, + Prop_CameraCompatibilityMode_Int32 = 2033, + Prop_ScreenshotHorizontalFieldOfViewDegrees_Float = 2034, + Prop_ScreenshotVerticalFieldOfViewDegrees_Float = 2035, + Prop_DisplaySuppressed_Bool = 2036, + + // Properties that are unique to TrackedDeviceClass_Controller + Prop_AttachedDeviceId_String = 3000, + Prop_SupportedButtons_Uint64 = 3001, + Prop_Axis0Type_Int32 = 3002, // Return value is of type EVRControllerAxisType + Prop_Axis1Type_Int32 = 3003, // Return value is of type EVRControllerAxisType + Prop_Axis2Type_Int32 = 3004, // Return value is of type EVRControllerAxisType + Prop_Axis3Type_Int32 = 3005, // Return value is of type EVRControllerAxisType + Prop_Axis4Type_Int32 = 3006, // Return value is of type EVRControllerAxisType + Prop_ControllerRoleHint_Int32 = 3007, // Return value is of type ETrackedControllerRole + + // Properties that are unique to TrackedDeviceClass_TrackingReference + Prop_FieldOfViewLeftDegrees_Float = 4000, + Prop_FieldOfViewRightDegrees_Float = 4001, + Prop_FieldOfViewTopDegrees_Float = 4002, + Prop_FieldOfViewBottomDegrees_Float = 4003, + Prop_TrackingRangeMinimumMeters_Float = 4004, + Prop_TrackingRangeMaximumMeters_Float = 4005, + Prop_ModeLabel_String = 4006, + + // Vendors are free to expose private debug data in this reserved region + Prop_VendorSpecific_Reserved_Start = 10000, + Prop_VendorSpecific_Reserved_End = 10999, +}; + +/** No string property will ever be longer than this length */ +static const uint32_t k_unMaxPropertyStringSize = 32 * 1024; + +/** Used to return errors that occur when reading properties. */ +enum ETrackedPropertyError +{ + TrackedProp_Success = 0, + TrackedProp_WrongDataType = 1, + TrackedProp_WrongDeviceClass = 2, + TrackedProp_BufferTooSmall = 3, + TrackedProp_UnknownProperty = 4, + TrackedProp_InvalidDevice = 5, + TrackedProp_CouldNotContactServer = 6, + TrackedProp_ValueNotProvidedByDevice = 7, + TrackedProp_StringExceedsMaximumLength = 8, + TrackedProp_NotYetAvailable = 9, // The property value isn't known yet, but is expected soon. Call again later. +}; + +/** Allows the application to control what part of the provided texture will be used in the +* frame buffer. */ +struct VRTextureBounds_t +{ + float uMin, vMin; + float uMax, vMax; +}; + + +/** Allows the application to control how scene textures are used by the compositor when calling Submit. */ +enum EVRSubmitFlags +{ + // Simple render path. App submits rendered left and right eye images with no lens distortion correction applied. + Submit_Default = 0x00, + + // App submits final left and right eye images with lens distortion already applied (lens distortion makes the images appear + // barrel distorted with chromatic aberration correction applied). The app would have used the data returned by + // vr::IVRSystem::ComputeDistortion() to apply the correct distortion to the rendered images before calling Submit(). + Submit_LensDistortionAlreadyApplied = 0x01, + + // If the texture pointer passed in is actually a renderbuffer (e.g. for MSAA in OpenGL) then set this flag. + Submit_GlRenderBuffer = 0x02, +}; + + +/** Status of the overall system or tracked objects */ +enum EVRState +{ + VRState_Undefined = -1, + VRState_Off = 0, + VRState_Searching = 1, + VRState_Searching_Alert = 2, + VRState_Ready = 3, + VRState_Ready_Alert = 4, + VRState_NotReady = 5, + VRState_Standby = 6, + VRState_Ready_Alert_Low = 7, +}; + +/** The types of events that could be posted (and what the parameters mean for each event type) */ +enum EVREventType +{ + VREvent_None = 0, + + VREvent_TrackedDeviceActivated = 100, + VREvent_TrackedDeviceDeactivated = 101, + VREvent_TrackedDeviceUpdated = 102, + VREvent_TrackedDeviceUserInteractionStarted = 103, + VREvent_TrackedDeviceUserInteractionEnded = 104, + VREvent_IpdChanged = 105, + VREvent_EnterStandbyMode = 106, + VREvent_LeaveStandbyMode = 107, + VREvent_TrackedDeviceRoleChanged = 108, + VREvent_WatchdogWakeUpRequested = 109, + + VREvent_ButtonPress = 200, // data is controller + VREvent_ButtonUnpress = 201, // data is controller + VREvent_ButtonTouch = 202, // data is controller + VREvent_ButtonUntouch = 203, // data is controller + + VREvent_MouseMove = 300, // data is mouse + VREvent_MouseButtonDown = 301, // data is mouse + VREvent_MouseButtonUp = 302, // data is mouse + VREvent_FocusEnter = 303, // data is overlay + VREvent_FocusLeave = 304, // data is overlay + VREvent_Scroll = 305, // data is mouse + VREvent_TouchPadMove = 306, // data is mouse + VREvent_OverlayFocusChanged = 307, // data is overlay, global event + + VREvent_InputFocusCaptured = 400, // data is process DEPRECATED + VREvent_InputFocusReleased = 401, // data is process DEPRECATED + VREvent_SceneFocusLost = 402, // data is process + VREvent_SceneFocusGained = 403, // data is process + VREvent_SceneApplicationChanged = 404, // data is process - The App actually drawing the scene changed (usually to or from the compositor) + VREvent_SceneFocusChanged = 405, // data is process - New app got access to draw the scene + VREvent_InputFocusChanged = 406, // data is process + VREvent_SceneApplicationSecondaryRenderingStarted = 407, // data is process + + VREvent_HideRenderModels = 410, // Sent to the scene application to request hiding render models temporarily + VREvent_ShowRenderModels = 411, // Sent to the scene application to request restoring render model visibility + + VREvent_OverlayShown = 500, + VREvent_OverlayHidden = 501, + VREvent_DashboardActivated = 502, + VREvent_DashboardDeactivated = 503, + VREvent_DashboardThumbSelected = 504, // Sent to the overlay manager - data is overlay + VREvent_DashboardRequested = 505, // Sent to the overlay manager - data is overlay + VREvent_ResetDashboard = 506, // Send to the overlay manager + VREvent_RenderToast = 507, // Send to the dashboard to render a toast - data is the notification ID + VREvent_ImageLoaded = 508, // Sent to overlays when a SetOverlayRaw or SetOverlayFromFile call finishes loading + VREvent_ShowKeyboard = 509, // Sent to keyboard renderer in the dashboard to invoke it + VREvent_HideKeyboard = 510, // Sent to keyboard renderer in the dashboard to hide it + VREvent_OverlayGamepadFocusGained = 511, // Sent to an overlay when IVROverlay::SetFocusOverlay is called on it + VREvent_OverlayGamepadFocusLost = 512, // Send to an overlay when it previously had focus and IVROverlay::SetFocusOverlay is called on something else + VREvent_OverlaySharedTextureChanged = 513, + VREvent_DashboardGuideButtonDown = 514, + VREvent_DashboardGuideButtonUp = 515, + VREvent_ScreenshotTriggered = 516, // Screenshot button combo was pressed, Dashboard should request a screenshot + VREvent_ImageFailed = 517, // Sent to overlays when a SetOverlayRaw or SetOverlayfromFail fails to load + + // Screenshot API + VREvent_RequestScreenshot = 520, // Sent by vrclient application to compositor to take a screenshot + VREvent_ScreenshotTaken = 521, // Sent by compositor to the application that the screenshot has been taken + VREvent_ScreenshotFailed = 522, // Sent by compositor to the application that the screenshot failed to be taken + VREvent_SubmitScreenshotToDashboard = 523, // Sent by compositor to the dashboard that a completed screenshot was submitted + VREvent_ScreenshotProgressToDashboard = 524, // Sent by compositor to the dashboard that a completed screenshot was submitted + + VREvent_Notification_Shown = 600, + VREvent_Notification_Hidden = 601, + VREvent_Notification_BeginInteraction = 602, + VREvent_Notification_Destroyed = 603, + + VREvent_Quit = 700, // data is process + VREvent_ProcessQuit = 701, // data is process + VREvent_QuitAborted_UserPrompt = 702, // data is process + VREvent_QuitAcknowledged = 703, // data is process + VREvent_DriverRequestedQuit = 704, // The driver has requested that SteamVR shut down + + VREvent_ChaperoneDataHasChanged = 800, + VREvent_ChaperoneUniverseHasChanged = 801, + VREvent_ChaperoneTempDataHasChanged = 802, + VREvent_ChaperoneSettingsHaveChanged = 803, + VREvent_SeatedZeroPoseReset = 804, + + VREvent_AudioSettingsHaveChanged = 820, + + VREvent_BackgroundSettingHasChanged = 850, + VREvent_CameraSettingsHaveChanged = 851, + VREvent_ReprojectionSettingHasChanged = 852, + VREvent_ModelSkinSettingsHaveChanged = 853, + VREvent_EnvironmentSettingsHaveChanged = 854, + + VREvent_StatusUpdate = 900, + + VREvent_MCImageUpdated = 1000, + + VREvent_FirmwareUpdateStarted = 1100, + VREvent_FirmwareUpdateFinished = 1101, + + VREvent_KeyboardClosed = 1200, + VREvent_KeyboardCharInput = 1201, + VREvent_KeyboardDone = 1202, // Sent when DONE button clicked on keyboard + + VREvent_ApplicationTransitionStarted = 1300, + VREvent_ApplicationTransitionAborted = 1301, + VREvent_ApplicationTransitionNewAppStarted = 1302, + VREvent_ApplicationListUpdated = 1303, + VREvent_ApplicationMimeTypeLoad = 1304, + + VREvent_Compositor_MirrorWindowShown = 1400, + VREvent_Compositor_MirrorWindowHidden = 1401, + VREvent_Compositor_ChaperoneBoundsShown = 1410, + VREvent_Compositor_ChaperoneBoundsHidden = 1411, + + VREvent_TrackedCamera_StartVideoStream = 1500, + VREvent_TrackedCamera_StopVideoStream = 1501, + VREvent_TrackedCamera_PauseVideoStream = 1502, + VREvent_TrackedCamera_ResumeVideoStream = 1503, + + VREvent_PerformanceTest_EnableCapture = 1600, + VREvent_PerformanceTest_DisableCapture = 1601, + VREvent_PerformanceTest_FidelityLevel = 1602, + + // Vendors are free to expose private events in this reserved region + VREvent_VendorSpecific_Reserved_Start = 10000, + VREvent_VendorSpecific_Reserved_End = 19999, +}; + + +/** Level of Hmd activity */ +enum EDeviceActivityLevel +{ + k_EDeviceActivityLevel_Unknown = -1, + k_EDeviceActivityLevel_Idle = 0, + k_EDeviceActivityLevel_UserInteraction = 1, + k_EDeviceActivityLevel_UserInteraction_Timeout = 2, + k_EDeviceActivityLevel_Standby = 3, +}; + + +/** VR controller button and axis IDs */ +enum EVRButtonId +{ + k_EButton_System = 0, + k_EButton_ApplicationMenu = 1, + k_EButton_Grip = 2, + k_EButton_DPad_Left = 3, + k_EButton_DPad_Up = 4, + k_EButton_DPad_Right = 5, + k_EButton_DPad_Down = 6, + k_EButton_A = 7, + + k_EButton_Axis0 = 32, + k_EButton_Axis1 = 33, + k_EButton_Axis2 = 34, + k_EButton_Axis3 = 35, + k_EButton_Axis4 = 36, + + // aliases for well known controllers + k_EButton_SteamVR_Touchpad = k_EButton_Axis0, + k_EButton_SteamVR_Trigger = k_EButton_Axis1, + + k_EButton_Dashboard_Back = k_EButton_Grip, + + k_EButton_Max = 64 +}; + +inline uint64_t ButtonMaskFromId( EVRButtonId id ) { return 1ull << id; } + +/** used for controller button events */ +struct VREvent_Controller_t +{ + uint32_t button; // EVRButtonId enum +}; + + +/** used for simulated mouse events in overlay space */ +enum EVRMouseButton +{ + VRMouseButton_Left = 0x0001, + VRMouseButton_Right = 0x0002, + VRMouseButton_Middle = 0x0004, +}; + + +/** used for simulated mouse events in overlay space */ +struct VREvent_Mouse_t +{ + float x, y; // co-ords are in GL space, bottom left of the texture is 0,0 + uint32_t button; // EVRMouseButton enum +}; + +/** used for simulated mouse wheel scroll in overlay space */ +struct VREvent_Scroll_t +{ + float xdelta, ydelta; // movement in fraction of the pad traversed since last delta, 1.0 for a full swipe + uint32_t repeatCount; +}; + +/** when in mouse input mode you can receive data from the touchpad, these events are only sent if the users finger + is on the touchpad (or just released from it) +**/ +struct VREvent_TouchPadMove_t +{ + // true if the users finger is detected on the touch pad + bool bFingerDown; + + // How long the finger has been down in seconds + float flSecondsFingerDown; + + // These values indicate the starting finger position (so you can do some basic swipe stuff) + float fValueXFirst; + float fValueYFirst; + + // This is the raw sampled coordinate without deadzoning + float fValueXRaw; + float fValueYRaw; +}; + +/** notification related events. Details will still change at this point */ +struct VREvent_Notification_t +{ + uint64_t ulUserValue; + uint32_t notificationId; +}; + +/** Used for events about processes */ +struct VREvent_Process_t +{ + uint32_t pid; + uint32_t oldPid; + bool bForced; +}; + + +/** Used for a few events about overlays */ +struct VREvent_Overlay_t +{ + uint64_t overlayHandle; +}; + + +/** Used for a few events about overlays */ +struct VREvent_Status_t +{ + uint32_t statusState; // EVRState enum +}; + +/** Used for keyboard events **/ +struct VREvent_Keyboard_t +{ + char cNewInput[8]; // Up to 11 bytes of new input + uint64_t uUserValue; // Possible flags about the new input +}; + +struct VREvent_Ipd_t +{ + float ipdMeters; +}; + +struct VREvent_Chaperone_t +{ + uint64_t m_nPreviousUniverse; + uint64_t m_nCurrentUniverse; +}; + +/** Not actually used for any events */ +struct VREvent_Reserved_t +{ + uint64_t reserved0; + uint64_t reserved1; +}; + +struct VREvent_PerformanceTest_t +{ + uint32_t m_nFidelityLevel; +}; + +struct VREvent_SeatedZeroPoseReset_t +{ + bool bResetBySystemMenu; +}; + +struct VREvent_Screenshot_t +{ + uint32_t handle; + uint32_t type; +}; + +struct VREvent_ScreenshotProgress_t +{ + float progress; +}; + +struct VREvent_ApplicationLaunch_t +{ + uint32_t pid; + uint32_t unArgsHandle; +}; + +/** If you change this you must manually update openvr_interop.cs.py */ +typedef union +{ + VREvent_Reserved_t reserved; + VREvent_Controller_t controller; + VREvent_Mouse_t mouse; + VREvent_Scroll_t scroll; + VREvent_Process_t process; + VREvent_Notification_t notification; + VREvent_Overlay_t overlay; + VREvent_Status_t status; + VREvent_Keyboard_t keyboard; + VREvent_Ipd_t ipd; + VREvent_Chaperone_t chaperone; + VREvent_PerformanceTest_t performanceTest; + VREvent_TouchPadMove_t touchPadMove; + VREvent_SeatedZeroPoseReset_t seatedZeroPoseReset; + VREvent_Screenshot_t screenshot; + VREvent_ScreenshotProgress_t screenshotProgress; + VREvent_ApplicationLaunch_t applicationLaunch; +} VREvent_Data_t; + +/** An event posted by the server to all running applications */ +struct VREvent_t +{ + uint32_t eventType; // EVREventType enum + TrackedDeviceIndex_t trackedDeviceIndex; + float eventAgeSeconds; + // event data must be the end of the struct as its size is variable + VREvent_Data_t data; +}; + + +/** The mesh to draw into the stencil (or depth) buffer to perform +* early stencil (or depth) kills of pixels that will never appear on the HMD. +* This mesh draws on all the pixels that will be hidden after distortion. +* +* If the HMD does not provide a visible area mesh pVertexData will be +* NULL and unTriangleCount will be 0. */ +struct HiddenAreaMesh_t +{ + const HmdVector2_t *pVertexData; + uint32_t unTriangleCount; +}; + + +/** Identifies what kind of axis is on the controller at index n. Read this type +* with pVRSystem->Get( nControllerDeviceIndex, Prop_Axis0Type_Int32 + n ); +*/ +enum EVRControllerAxisType +{ + k_eControllerAxis_None = 0, + k_eControllerAxis_TrackPad = 1, + k_eControllerAxis_Joystick = 2, + k_eControllerAxis_Trigger = 3, // Analog trigger data is in the X axis +}; + + +/** contains information about one axis on the controller */ +struct VRControllerAxis_t +{ + float x; // Ranges from -1.0 to 1.0 for joysticks and track pads. Ranges from 0.0 to 1.0 for triggers were 0 is fully released. + float y; // Ranges from -1.0 to 1.0 for joysticks and track pads. Is always 0.0 for triggers. +}; + + +/** the number of axes in the controller state */ +static const uint32_t k_unControllerStateAxisCount = 5; + + +/** Holds all the state of a controller at one moment in time. */ +struct VRControllerState001_t +{ + // If packet num matches that on your prior call, then the controller state hasn't been changed since + // your last call and there is no need to process it + uint32_t unPacketNum; + + // bit flags for each of the buttons. Use ButtonMaskFromId to turn an ID into a mask + uint64_t ulButtonPressed; + uint64_t ulButtonTouched; + + // Axis data for the controller's analog inputs + VRControllerAxis_t rAxis[ k_unControllerStateAxisCount ]; +}; + + +typedef VRControllerState001_t VRControllerState_t; + + +/** determines how to provide output to the application of various event processing functions. */ +enum EVRControllerEventOutputType +{ + ControllerEventOutput_OSEvents = 0, + ControllerEventOutput_VREvents = 1, +}; + + + +/** Collision Bounds Style */ +enum ECollisionBoundsStyle +{ + COLLISION_BOUNDS_STYLE_BEGINNER = 0, + COLLISION_BOUNDS_STYLE_INTERMEDIATE, + COLLISION_BOUNDS_STYLE_SQUARES, + COLLISION_BOUNDS_STYLE_ADVANCED, + COLLISION_BOUNDS_STYLE_NONE, + + COLLISION_BOUNDS_STYLE_COUNT +}; + +/** Allows the application to customize how the overlay appears in the compositor */ +struct Compositor_OverlaySettings +{ + uint32_t size; // sizeof(Compositor_OverlaySettings) + bool curved, antialias; + float scale, distance, alpha; + float uOffset, vOffset, uScale, vScale; + float gridDivs, gridWidth, gridScale; + HmdMatrix44_t transform; +}; + +/** used to refer to a single VR overlay */ +typedef uint64_t VROverlayHandle_t; + +static const VROverlayHandle_t k_ulOverlayHandleInvalid = 0; + +/** Errors that can occur around VR overlays */ +enum EVROverlayError +{ + VROverlayError_None = 0, + + VROverlayError_UnknownOverlay = 10, + VROverlayError_InvalidHandle = 11, + VROverlayError_PermissionDenied = 12, + VROverlayError_OverlayLimitExceeded = 13, // No more overlays could be created because the maximum number already exist + VROverlayError_WrongVisibilityType = 14, + VROverlayError_KeyTooLong = 15, + VROverlayError_NameTooLong = 16, + VROverlayError_KeyInUse = 17, + VROverlayError_WrongTransformType = 18, + VROverlayError_InvalidTrackedDevice = 19, + VROverlayError_InvalidParameter = 20, + VROverlayError_ThumbnailCantBeDestroyed = 21, + VROverlayError_ArrayTooSmall = 22, + VROverlayError_RequestFailed = 23, + VROverlayError_InvalidTexture = 24, + VROverlayError_UnableToLoadFile = 25, + VROVerlayError_KeyboardAlreadyInUse = 26, + VROverlayError_NoNeighbor = 27, +}; + +/** enum values to pass in to VR_Init to identify whether the application will +* draw a 3D scene. */ +enum EVRApplicationType +{ + VRApplication_Other = 0, // Some other kind of application that isn't covered by the other entries + VRApplication_Scene = 1, // Application will submit 3D frames + VRApplication_Overlay = 2, // Application only interacts with overlays + VRApplication_Background = 3, // Application should not start SteamVR if it's not already running, and should not + // keep it running if everything else quits. + VRApplication_Utility = 4, // Init should not try to load any drivers. The application needs access to utility + // interfaces (like IVRSettings and IVRApplications) but not hardware. + VRApplication_VRMonitor = 5, // Reserved for vrmonitor + VRApplication_SteamWatchdog = 6,// Reserved for Steam + + VRApplication_Max +}; + + +/** error codes for firmware */ +enum EVRFirmwareError +{ + VRFirmwareError_None = 0, + VRFirmwareError_Success = 1, + VRFirmwareError_Fail = 2, +}; + + +/** error codes for notifications */ +enum EVRNotificationError +{ + VRNotificationError_OK = 0, + VRNotificationError_InvalidNotificationId = 100, + VRNotificationError_NotificationQueueFull = 101, + VRNotificationError_InvalidOverlayHandle = 102, + VRNotificationError_SystemWithUserValueAlreadyExists = 103, +}; + + +/** error codes returned by Vr_Init */ + +// Please add adequate error description to https://developer.valvesoftware.com/w/index.php?title=Category:SteamVRHelp +enum EVRInitError +{ + VRInitError_None = 0, + VRInitError_Unknown = 1, + + VRInitError_Init_InstallationNotFound = 100, + VRInitError_Init_InstallationCorrupt = 101, + VRInitError_Init_VRClientDLLNotFound = 102, + VRInitError_Init_FileNotFound = 103, + VRInitError_Init_FactoryNotFound = 104, + VRInitError_Init_InterfaceNotFound = 105, + VRInitError_Init_InvalidInterface = 106, + VRInitError_Init_UserConfigDirectoryInvalid = 107, + VRInitError_Init_HmdNotFound = 108, + VRInitError_Init_NotInitialized = 109, + VRInitError_Init_PathRegistryNotFound = 110, + VRInitError_Init_NoConfigPath = 111, + VRInitError_Init_NoLogPath = 112, + VRInitError_Init_PathRegistryNotWritable = 113, + VRInitError_Init_AppInfoInitFailed = 114, + VRInitError_Init_Retry = 115, // Used internally to cause retries to vrserver + VRInitError_Init_InitCanceledByUser = 116, // The calling application should silently exit. The user canceled app startup + VRInitError_Init_AnotherAppLaunching = 117, + VRInitError_Init_SettingsInitFailed = 118, + VRInitError_Init_ShuttingDown = 119, + VRInitError_Init_TooManyObjects = 120, + VRInitError_Init_NoServerForBackgroundApp = 121, + VRInitError_Init_NotSupportedWithCompositor = 122, + VRInitError_Init_NotAvailableToUtilityApps = 123, + VRInitError_Init_Internal = 124, + VRInitError_Init_HmdDriverIdIsNone = 125, + VRInitError_Init_HmdNotFoundPresenceFailed = 126, + VRInitError_Init_VRMonitorNotFound = 127, + VRInitError_Init_VRMonitorStartupFailed = 128, + VRInitError_Init_LowPowerWatchdogNotSupported = 129, + VRInitError_Init_InvalidApplicationType = 130, + VRInitError_Init_NotAvailableToWatchdogApps = 131, + VRInitError_Init_WatchdogDisabledInSettings = 132, + + VRInitError_Driver_Failed = 200, + VRInitError_Driver_Unknown = 201, + VRInitError_Driver_HmdUnknown = 202, + VRInitError_Driver_NotLoaded = 203, + VRInitError_Driver_RuntimeOutOfDate = 204, + VRInitError_Driver_HmdInUse = 205, + VRInitError_Driver_NotCalibrated = 206, + VRInitError_Driver_CalibrationInvalid = 207, + VRInitError_Driver_HmdDisplayNotFound = 208, + VRInitError_Driver_TrackedDeviceInterfaceUnknown = 209, + // VRInitError_Driver_HmdDisplayNotFoundAfterFix = 210, // not needed: here for historic reasons + VRInitError_Driver_HmdDriverIdOutOfBounds = 211, + VRInitError_Driver_HmdDisplayMirrored = 212, + + VRInitError_IPC_ServerInitFailed = 300, + VRInitError_IPC_ConnectFailed = 301, + VRInitError_IPC_SharedStateInitFailed = 302, + VRInitError_IPC_CompositorInitFailed = 303, + VRInitError_IPC_MutexInitFailed = 304, + VRInitError_IPC_Failed = 305, + VRInitError_IPC_CompositorConnectFailed = 306, + VRInitError_IPC_CompositorInvalidConnectResponse = 307, + VRInitError_IPC_ConnectFailedAfterMultipleAttempts = 308, + + VRInitError_Compositor_Failed = 400, + VRInitError_Compositor_D3D11HardwareRequired = 401, + VRInitError_Compositor_FirmwareRequiresUpdate = 402, + VRInitError_Compositor_OverlayInitFailed = 403, + VRInitError_Compositor_ScreenshotsInitFailed = 404, + + VRInitError_VendorSpecific_UnableToConnectToOculusRuntime = 1000, + + VRInitError_VendorSpecific_HmdFound_CantOpenDevice = 1101, + VRInitError_VendorSpecific_HmdFound_UnableToRequestConfigStart = 1102, + VRInitError_VendorSpecific_HmdFound_NoStoredConfig = 1103, + VRInitError_VendorSpecific_HmdFound_ConfigTooBig = 1104, + VRInitError_VendorSpecific_HmdFound_ConfigTooSmall = 1105, + VRInitError_VendorSpecific_HmdFound_UnableToInitZLib = 1106, + VRInitError_VendorSpecific_HmdFound_CantReadFirmwareVersion = 1107, + VRInitError_VendorSpecific_HmdFound_UnableToSendUserDataStart = 1108, + VRInitError_VendorSpecific_HmdFound_UnableToGetUserDataStart = 1109, + VRInitError_VendorSpecific_HmdFound_UnableToGetUserDataNext = 1110, + VRInitError_VendorSpecific_HmdFound_UserDataAddressRange = 1111, + VRInitError_VendorSpecific_HmdFound_UserDataError = 1112, + VRInitError_VendorSpecific_HmdFound_ConfigFailedSanityCheck = 1113, + + VRInitError_Steam_SteamInstallationNotFound = 2000, +}; + +enum EVRScreenshotType +{ + VRScreenshotType_None = 0, + VRScreenshotType_Mono = 1, // left eye only + VRScreenshotType_Stereo = 2, + VRScreenshotType_Cubemap = 3, + VRScreenshotType_MonoPanorama = 4, + VRScreenshotType_StereoPanorama = 5 +}; + +enum EVRScreenshotPropertyFilenames +{ + VRScreenshotPropertyFilenames_Preview = 0, + VRScreenshotPropertyFilenames_VR = 1, +}; + +enum EVRTrackedCameraError +{ + VRTrackedCameraError_None = 0, + VRTrackedCameraError_OperationFailed = 100, + VRTrackedCameraError_InvalidHandle = 101, + VRTrackedCameraError_InvalidFrameHeaderVersion = 102, + VRTrackedCameraError_OutOfHandles = 103, + VRTrackedCameraError_IPCFailure = 104, + VRTrackedCameraError_NotSupportedForThisDevice = 105, + VRTrackedCameraError_SharedMemoryFailure = 106, + VRTrackedCameraError_FrameBufferingFailure = 107, + VRTrackedCameraError_StreamSetupFailure = 108, + VRTrackedCameraError_InvalidGLTextureId = 109, + VRTrackedCameraError_InvalidSharedTextureHandle = 110, + VRTrackedCameraError_FailedToGetGLTextureId = 111, + VRTrackedCameraError_SharedTextureFailure = 112, + VRTrackedCameraError_NoFrameAvailable = 113, + VRTrackedCameraError_InvalidArgument = 114, + VRTrackedCameraError_InvalidFrameBufferSize = 115, +}; + +enum EVRTrackedCameraFrameType +{ + VRTrackedCameraFrameType_Distorted = 0, // This is the camera video frame size in pixels, still distorted. + VRTrackedCameraFrameType_Undistorted, // In pixels, an undistorted inscribed rectangle region without invalid regions. This size is subject to changes shortly. + VRTrackedCameraFrameType_MaximumUndistorted, // In pixels, maximum undistorted with invalid regions. Non zero alpha component identifies valid regions. + MAX_CAMERA_FRAME_TYPES +}; + +typedef uint64_t TrackedCameraHandle_t; +#define INVALID_TRACKED_CAMERA_HANDLE ((vr::TrackedCameraHandle_t)0) + +struct CameraVideoStreamFrameHeader_t +{ + EVRTrackedCameraFrameType eFrameType; + + uint32_t nWidth; + uint32_t nHeight; + uint32_t nBytesPerPixel; + + uint32_t nFrameSequence; + + TrackedDevicePose_t standingTrackedDevicePose; +}; + +// Screenshot types +typedef uint32_t ScreenshotHandle_t; + +static const uint32_t k_unScreenshotHandleInvalid = 0; + +#pragma pack( pop ) + +// figure out how to import from the VR API dll +#if defined(_WIN32) + +#ifdef VR_API_EXPORT +#define VR_INTERFACE extern "C" __declspec( dllexport ) +#else +#define VR_INTERFACE extern "C" __declspec( dllimport ) +#endif + +#elif defined(__GNUC__) || defined(COMPILER_GCC) || defined(__APPLE__) + +#ifdef VR_API_EXPORT +#define VR_INTERFACE extern "C" __attribute__((visibility("default"))) +#else +#define VR_INTERFACE extern "C" +#endif + +#else +#error "Unsupported Platform." +#endif + + +#if defined( _WIN32 ) +#define VR_CALLTYPE __cdecl +#else +#define VR_CALLTYPE +#endif + +} // namespace vr + +#endif // _INCLUDE_VRTYPES_H + + +// vrannotation.h +#ifdef API_GEN +# define VR_CLANG_ATTR(ATTR) __attribute__((annotate( ATTR ))) +#else +# define VR_CLANG_ATTR(ATTR) +#endif + +#define VR_METHOD_DESC(DESC) VR_CLANG_ATTR( "desc:" #DESC ";" ) +#define VR_IGNOREATTR() VR_CLANG_ATTR( "ignore" ) +#define VR_OUT_STRUCT() VR_CLANG_ATTR( "out_struct: ;" ) +#define VR_OUT_STRING() VR_CLANG_ATTR( "out_string: ;" ) +#define VR_OUT_ARRAY_CALL(COUNTER,FUNCTION,PARAMS) VR_CLANG_ATTR( "out_array_call:" #COUNTER "," #FUNCTION "," #PARAMS ";" ) +#define VR_OUT_ARRAY_COUNT(COUNTER) VR_CLANG_ATTR( "out_array_count:" #COUNTER ";" ) +#define VR_ARRAY_COUNT(COUNTER) VR_CLANG_ATTR( "array_count:" #COUNTER ";" ) +#define VR_ARRAY_COUNT_D(COUNTER, DESC) VR_CLANG_ATTR( "array_count:" #COUNTER ";desc:" #DESC ) +#define VR_BUFFER_COUNT(COUNTER) VR_CLANG_ATTR( "buffer_count:" #COUNTER ";" ) +#define VR_OUT_BUFFER_COUNT(COUNTER) VR_CLANG_ATTR( "out_buffer_count:" #COUNTER ";" ) +#define VR_OUT_STRING_COUNT(COUNTER) VR_CLANG_ATTR( "out_string_count:" #COUNTER ";" ) + +// ivrsystem.h +namespace vr +{ + +class IVRSystem +{ +public: + + + // ------------------------------------ + // Display Methods + // ------------------------------------ + + /** Suggested size for the intermediate render target that the distortion pulls from. */ + virtual void GetRecommendedRenderTargetSize( uint32_t *pnWidth, uint32_t *pnHeight ) = 0; + + /** The projection matrix for the specified eye */ + virtual HmdMatrix44_t GetProjectionMatrix( EVREye eEye, float fNearZ, float fFarZ, EGraphicsAPIConvention eProjType ) = 0; + + /** The components necessary to build your own projection matrix in case your + * application is doing something fancy like infinite Z */ + virtual void GetProjectionRaw( EVREye eEye, float *pfLeft, float *pfRight, float *pfTop, float *pfBottom ) = 0; + + /** Returns the result of the distortion function for the specified eye and input UVs. UVs go from 0,0 in + * the upper left of that eye's viewport and 1,1 in the lower right of that eye's viewport. */ + virtual DistortionCoordinates_t ComputeDistortion( EVREye eEye, float fU, float fV ) = 0; + + /** Returns the transform from eye space to the head space. Eye space is the per-eye flavor of head + * space that provides stereo disparity. Instead of Model * View * Projection the sequence is Model * View * Eye^-1 * Projection. + * Normally View and Eye^-1 will be multiplied together and treated as View in your application. + */ + virtual HmdMatrix34_t GetEyeToHeadTransform( EVREye eEye ) = 0; + + /** Returns the number of elapsed seconds since the last recorded vsync event. This + * will come from a vsync timer event in the timer if possible or from the application-reported + * time if that is not available. If no vsync times are available the function will + * return zero for vsync time and frame counter and return false from the method. */ + virtual bool GetTimeSinceLastVsync( float *pfSecondsSinceLastVsync, uint64_t *pulFrameCounter ) = 0; + + /** [D3D9 Only] + * Returns the adapter index that the user should pass into CreateDevice to set up D3D9 in such + * a way that it can go full screen exclusive on the HMD. Returns -1 if there was an error. + */ + virtual int32_t GetD3D9AdapterIndex() = 0; + + /** [D3D10/11 Only] + * Returns the adapter index and output index that the user should pass into EnumAdapters and EnumOutputs + * to create the device and swap chain in DX10 and DX11. If an error occurs both indices will be set to -1. + */ + virtual void GetDXGIOutputInfo( int32_t *pnAdapterIndex ) = 0; + + // ------------------------------------ + // Display Mode methods + // ------------------------------------ + + /** Use to determine if the headset display is part of the desktop (i.e. extended) or hidden (i.e. direct mode). */ + virtual bool IsDisplayOnDesktop() = 0; + + /** Set the display visibility (true = extended, false = direct mode). Return value of true indicates that the change was successful. */ + virtual bool SetDisplayVisibility( bool bIsVisibleOnDesktop ) = 0; + + // ------------------------------------ + // Tracking Methods + // ------------------------------------ + + /** The pose that the tracker thinks that the HMD will be in at the specified number of seconds into the + * future. Pass 0 to get the state at the instant the method is called. Most of the time the application should + * calculate the time until the photons will be emitted from the display and pass that time into the method. + * + * This is roughly analogous to the inverse of the view matrix in most applications, though + * many games will need to do some additional rotation or translation on top of the rotation + * and translation provided by the head pose. + * + * For devices where bPoseIsValid is true the application can use the pose to position the device + * in question. The provided array can be any size up to k_unMaxTrackedDeviceCount. + * + * Seated experiences should call this method with TrackingUniverseSeated and receive poses relative + * to the seated zero pose. Standing experiences should call this method with TrackingUniverseStanding + * and receive poses relative to the Chaperone Play Area. TrackingUniverseRawAndUncalibrated should + * probably not be used unless the application is the Chaperone calibration tool itself, but will provide + * poses relative to the hardware-specific coordinate system in the driver. + */ + virtual void GetDeviceToAbsoluteTrackingPose( ETrackingUniverseOrigin eOrigin, float fPredictedSecondsToPhotonsFromNow, VR_ARRAY_COUNT(unTrackedDevicePoseArrayCount) TrackedDevicePose_t *pTrackedDevicePoseArray, uint32_t unTrackedDevicePoseArrayCount ) = 0; + + /** Sets the zero pose for the seated tracker coordinate system to the current position and yaw of the HMD. After + * ResetSeatedZeroPose all GetDeviceToAbsoluteTrackingPose calls that pass TrackingUniverseSeated as the origin + * will be relative to this new zero pose. The new zero coordinate system will not change the fact that the Y axis + * is up in the real world, so the next pose returned from GetDeviceToAbsoluteTrackingPose after a call to + * ResetSeatedZeroPose may not be exactly an identity matrix. + * + * NOTE: This function overrides the user's previously saved seated zero pose and should only be called as the result of a user action. + * Users are also able to set their seated zero pose via the OpenVR Dashboard. + **/ + virtual void ResetSeatedZeroPose() = 0; + + /** Returns the transform from the seated zero pose to the standing absolute tracking system. This allows + * applications to represent the seated origin to used or transform object positions from one coordinate + * system to the other. + * + * The seated origin may or may not be inside the Play Area or Collision Bounds returned by IVRChaperone. Its position + * depends on what the user has set from the Dashboard settings and previous calls to ResetSeatedZeroPose. */ + virtual HmdMatrix34_t GetSeatedZeroPoseToStandingAbsoluteTrackingPose() = 0; + + /** Returns the transform from the tracking origin to the standing absolute tracking system. This allows + * applications to convert from raw tracking space to the calibrated standing coordinate system. */ + virtual HmdMatrix34_t GetRawZeroPoseToStandingAbsoluteTrackingPose() = 0; + + /** Get a sorted array of device indices of a given class of tracked devices (e.g. controllers). Devices are sorted right to left + * relative to the specified tracked device (default: hmd -- pass in -1 for absolute tracking space). Returns the number of devices + * in the list, or the size of the array needed if not large enough. */ + virtual uint32_t GetSortedTrackedDeviceIndicesOfClass( ETrackedDeviceClass eTrackedDeviceClass, VR_ARRAY_COUNT(unTrackedDeviceIndexArrayCount) vr::TrackedDeviceIndex_t *punTrackedDeviceIndexArray, uint32_t unTrackedDeviceIndexArrayCount, vr::TrackedDeviceIndex_t unRelativeToTrackedDeviceIndex = k_unTrackedDeviceIndex_Hmd ) = 0; + + /** Returns the level of activity on the device. */ + virtual EDeviceActivityLevel GetTrackedDeviceActivityLevel( vr::TrackedDeviceIndex_t unDeviceId ) = 0; + + /** Convenience utility to apply the specified transform to the specified pose. + * This properly transforms all pose components, including velocity and angular velocity + */ + virtual void ApplyTransform( TrackedDevicePose_t *pOutputPose, const TrackedDevicePose_t *pTrackedDevicePose, const HmdMatrix34_t *pTransform ) = 0; + + /** Returns the device index associated with a specific role, for example the left hand or the right hand. */ + virtual vr::TrackedDeviceIndex_t GetTrackedDeviceIndexForControllerRole( vr::ETrackedControllerRole unDeviceType ) = 0; + + /** Returns the controller type associated with a device index. */ + virtual vr::ETrackedControllerRole GetControllerRoleForTrackedDeviceIndex( vr::TrackedDeviceIndex_t unDeviceIndex ) = 0; + + // ------------------------------------ + // Property methods + // ------------------------------------ + + /** Returns the device class of a tracked device. If there has not been a device connected in this slot + * since the application started this function will return TrackedDevice_Invalid. For previous detected + * devices the function will return the previously observed device class. + * + * To determine which devices exist on the system, just loop from 0 to k_unMaxTrackedDeviceCount and check + * the device class. Every device with something other than TrackedDevice_Invalid is associated with an + * actual tracked device. */ + virtual ETrackedDeviceClass GetTrackedDeviceClass( vr::TrackedDeviceIndex_t unDeviceIndex ) = 0; + + /** Returns true if there is a device connected in this slot. */ + virtual bool IsTrackedDeviceConnected( vr::TrackedDeviceIndex_t unDeviceIndex ) = 0; + + /** Returns a bool property. If the device index is not valid or the property is not a bool type this function will return false. */ + virtual bool GetBoolTrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError *pError = 0L ) = 0; + + /** Returns a float property. If the device index is not valid or the property is not a float type this function will return 0. */ + virtual float GetFloatTrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError *pError = 0L ) = 0; + + /** Returns an int property. If the device index is not valid or the property is not a int type this function will return 0. */ + virtual int32_t GetInt32TrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError *pError = 0L ) = 0; + + /** Returns a uint64 property. If the device index is not valid or the property is not a uint64 type this function will return 0. */ + virtual uint64_t GetUint64TrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError *pError = 0L ) = 0; + + /** Returns a matrix property. If the device index is not valid or the property is not a matrix type, this function will return identity. */ + virtual HmdMatrix34_t GetMatrix34TrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, ETrackedPropertyError *pError = 0L ) = 0; + + /** Returns a string property. If the device index is not valid or the property is not a string type this function will + * return 0. Otherwise it returns the length of the number of bytes necessary to hold this string including the trailing + * null. Strings will generally fit in buffers of k_unTrackingStringSize characters. */ + virtual uint32_t GetStringTrackedDeviceProperty( vr::TrackedDeviceIndex_t unDeviceIndex, ETrackedDeviceProperty prop, VR_OUT_STRING() char *pchValue, uint32_t unBufferSize, ETrackedPropertyError *pError = 0L ) = 0; + + /** returns a string that corresponds with the specified property error. The string will be the name + * of the error enum value for all valid error codes */ + virtual const char *GetPropErrorNameFromEnum( ETrackedPropertyError error ) = 0; + + // ------------------------------------ + // Event methods + // ------------------------------------ + + /** Returns true and fills the event with the next event on the queue if there is one. If there are no events + * this method returns false. uncbVREvent should be the size in bytes of the VREvent_t struct */ + virtual bool PollNextEvent( VREvent_t *pEvent, uint32_t uncbVREvent ) = 0; + + /** Returns true and fills the event with the next event on the queue if there is one. If there are no events + * this method returns false. Fills in the pose of the associated tracked device in the provided pose struct. + * This pose will always be older than the call to this function and should not be used to render the device. + uncbVREvent should be the size in bytes of the VREvent_t struct */ + virtual bool PollNextEventWithPose( ETrackingUniverseOrigin eOrigin, VREvent_t *pEvent, uint32_t uncbVREvent, vr::TrackedDevicePose_t *pTrackedDevicePose ) = 0; + + /** returns the name of an EVREvent enum value */ + virtual const char *GetEventTypeNameFromEnum( EVREventType eType ) = 0; + + // ------------------------------------ + // Rendering helper methods + // ------------------------------------ + + /** Returns the stencil mesh information for the current HMD. If this HMD does not have a stencil mesh the vertex data and count will be + * NULL and 0 respectively. This mesh is meant to be rendered into the stencil buffer (or into the depth buffer setting nearz) before rendering + * each eye's view. The pixels covered by this mesh will never be seen by the user after the lens distortion is applied and based on visibility to the panels. + * This will improve perf by letting the GPU early-reject pixels the user will never see before running the pixel shader. + * NOTE: Render this mesh with backface culling disabled since the winding order of the vertices can be different per-HMD or per-eye. + */ + virtual HiddenAreaMesh_t GetHiddenAreaMesh( EVREye eEye ) = 0; + + + // ------------------------------------ + // Controller methods + // ------------------------------------ + + /** Fills the supplied struct with the current state of the controller. Returns false if the controller index + * is invalid. */ + virtual bool GetControllerState( vr::TrackedDeviceIndex_t unControllerDeviceIndex, vr::VRControllerState_t *pControllerState ) = 0; + + /** fills the supplied struct with the current state of the controller and the provided pose with the pose of + * the controller when the controller state was updated most recently. Use this form if you need a precise controller + * pose as input to your application when the user presses or releases a button. */ + virtual bool GetControllerStateWithPose( ETrackingUniverseOrigin eOrigin, vr::TrackedDeviceIndex_t unControllerDeviceIndex, vr::VRControllerState_t *pControllerState, TrackedDevicePose_t *pTrackedDevicePose ) = 0; + + /** Trigger a single haptic pulse on a controller. After this call the application may not trigger another haptic pulse on this controller + * and axis combination for 5ms. */ + virtual void TriggerHapticPulse( vr::TrackedDeviceIndex_t unControllerDeviceIndex, uint32_t unAxisId, unsigned short usDurationMicroSec ) = 0; + + /** returns the name of an EVRButtonId enum value */ + virtual const char *GetButtonIdNameFromEnum( EVRButtonId eButtonId ) = 0; + + /** returns the name of an EVRControllerAxisType enum value */ + virtual const char *GetControllerAxisTypeNameFromEnum( EVRControllerAxisType eAxisType ) = 0; + + /** Tells OpenVR that this process wants exclusive access to controller button states and button events. Other apps will be notified that + * they have lost input focus with a VREvent_InputFocusCaptured event. Returns false if input focus could not be captured for + * some reason. */ + virtual bool CaptureInputFocus() = 0; + + /** Tells OpenVR that this process no longer wants exclusive access to button states and button events. Other apps will be notified + * that input focus has been released with a VREvent_InputFocusReleased event. */ + virtual void ReleaseInputFocus() = 0; + + /** Returns true if input focus is captured by another process. */ + virtual bool IsInputFocusCapturedByAnotherProcess() = 0; + + // ------------------------------------ + // Debug Methods + // ------------------------------------ + + /** Sends a request to the driver for the specified device and returns the response. The maximum response size is 32k, + * but this method can be called with a smaller buffer. If the response exceeds the size of the buffer, it is truncated. + * The size of the response including its terminating null is returned. */ + virtual uint32_t DriverDebugRequest( vr::TrackedDeviceIndex_t unDeviceIndex, const char *pchRequest, char *pchResponseBuffer, uint32_t unResponseBufferSize ) = 0; + + + // ------------------------------------ + // Firmware methods + // ------------------------------------ + + /** Performs the actual firmware update if applicable. + * The following events will be sent, if VRFirmwareError_None was returned: VREvent_FirmwareUpdateStarted, VREvent_FirmwareUpdateFinished + * Use the properties Prop_Firmware_UpdateAvailable_Bool, Prop_Firmware_ManualUpdate_Bool, and Prop_Firmware_ManualUpdateURL_String + * to figure our whether a firmware update is available, and to figure out whether its a manual update + * Prop_Firmware_ManualUpdateURL_String should point to an URL describing the manual update process */ + virtual vr::EVRFirmwareError PerformFirmwareUpdate( vr::TrackedDeviceIndex_t unDeviceIndex ) = 0; + + + // ------------------------------------ + // Application life cycle methods + // ------------------------------------ + + /** Call this to acknowledge to the system that VREvent_Quit has been received and that the process is exiting. + * This extends the timeout until the process is killed. */ + virtual void AcknowledgeQuit_Exiting() = 0; + + /** Call this to tell the system that the user is being prompted to save data. This + * halts the timeout and dismisses the dashboard (if it was up). Applications should be sure to actually + * prompt the user to save and then exit afterward, otherwise the user will be left in a confusing state. */ + virtual void AcknowledgeQuit_UserPrompt() = 0; + +}; + +static const char * const IVRSystem_Version = "IVRSystem_012"; + +} + + +// ivrapplications.h +namespace vr +{ + + /** Used for all errors reported by the IVRApplications interface */ + enum EVRApplicationError + { + VRApplicationError_None = 0, + + VRApplicationError_AppKeyAlreadyExists = 100, // Only one application can use any given key + VRApplicationError_NoManifest = 101, // the running application does not have a manifest + VRApplicationError_NoApplication = 102, // No application is running + VRApplicationError_InvalidIndex = 103, + VRApplicationError_UnknownApplication = 104, // the application could not be found + VRApplicationError_IPCFailed = 105, // An IPC failure caused the request to fail + VRApplicationError_ApplicationAlreadyRunning = 106, + VRApplicationError_InvalidManifest = 107, + VRApplicationError_InvalidApplication = 108, + VRApplicationError_LaunchFailed = 109, // the process didn't start + VRApplicationError_ApplicationAlreadyStarting = 110, // the system was already starting the same application + VRApplicationError_LaunchInProgress = 111, // The system was already starting a different application + VRApplicationError_OldApplicationQuitting = 112, + VRApplicationError_TransitionAborted = 113, + VRApplicationError_IsTemplate = 114, // error when you try to call LaunchApplication() on a template type app (use LaunchTemplateApplication) + + VRApplicationError_BufferTooSmall = 200, // The provided buffer was too small to fit the requested data + VRApplicationError_PropertyNotSet = 201, // The requested property was not set + VRApplicationError_UnknownProperty = 202, + VRApplicationError_InvalidParameter = 203, + }; + + /** The maximum length of an application key */ + static const uint32_t k_unMaxApplicationKeyLength = 128; + + /** these are the properties available on applications. */ + enum EVRApplicationProperty + { + VRApplicationProperty_Name_String = 0, + + VRApplicationProperty_LaunchType_String = 11, + VRApplicationProperty_WorkingDirectory_String = 12, + VRApplicationProperty_BinaryPath_String = 13, + VRApplicationProperty_Arguments_String = 14, + VRApplicationProperty_URL_String = 15, + + VRApplicationProperty_Description_String = 50, + VRApplicationProperty_NewsURL_String = 51, + VRApplicationProperty_ImagePath_String = 52, + VRApplicationProperty_Source_String = 53, + + VRApplicationProperty_IsDashboardOverlay_Bool = 60, + VRApplicationProperty_IsTemplate_Bool = 61, + VRApplicationProperty_IsInstanced_Bool = 62, + + VRApplicationProperty_LastLaunchTime_Uint64 = 70, + }; + + /** These are states the scene application startup process will go through. */ + enum EVRApplicationTransitionState + { + VRApplicationTransition_None = 0, + + VRApplicationTransition_OldAppQuitSent = 10, + VRApplicationTransition_WaitingForExternalLaunch = 11, + + VRApplicationTransition_NewAppLaunched = 20, + }; + + struct AppOverrideKeys_t + { + const char *pchKey; + const char *pchValue; + }; + + class IVRApplications + { + public: + + // --------------- Application management --------------- // + + /** Adds an application manifest to the list to load when building the list of installed applications. + * Temporary manifests are not automatically loaded */ + virtual EVRApplicationError AddApplicationManifest( const char *pchApplicationManifestFullPath, bool bTemporary = false ) = 0; + + /** Removes an application manifest from the list to load when building the list of installed applications. */ + virtual EVRApplicationError RemoveApplicationManifest( const char *pchApplicationManifestFullPath ) = 0; + + /** Returns true if an application is installed */ + virtual bool IsApplicationInstalled( const char *pchAppKey ) = 0; + + /** Returns the number of applications available in the list */ + virtual uint32_t GetApplicationCount() = 0; + + /** Returns the key of the specified application. The index is at least 0 and is less than the return + * value of GetApplicationCount(). The buffer should be at least k_unMaxApplicationKeyLength in order to + * fit the key. */ + virtual EVRApplicationError GetApplicationKeyByIndex( uint32_t unApplicationIndex, char *pchAppKeyBuffer, uint32_t unAppKeyBufferLen ) = 0; + + /** Returns the key of the application for the specified Process Id. The buffer should be at least + * k_unMaxApplicationKeyLength in order to fit the key. */ + virtual EVRApplicationError GetApplicationKeyByProcessId( uint32_t unProcessId, char *pchAppKeyBuffer, uint32_t unAppKeyBufferLen ) = 0; + + /** Launches the application. The existing scene application will exit and then the new application will start. + * This call is not valid for dashboard overlay applications. */ + virtual EVRApplicationError LaunchApplication( const char *pchAppKey ) = 0; + + /** Launches an instance of an application of type template, with its app key being pchNewAppKey (which must be unique) and optionally override sections + * from the manifest file via AppOverrideKeys_t + */ + virtual EVRApplicationError LaunchTemplateApplication( const char *pchTemplateAppKey, const char *pchNewAppKey, VR_ARRAY_COUNT( unKeys ) const AppOverrideKeys_t *pKeys, uint32_t unKeys ) = 0; + + /** launches the application currently associated with this mime type and passes it the option args, typically the filename or object name of the item being launched */ + virtual vr::EVRApplicationError LaunchApplicationFromMimeType( const char *pchMimeType, const char *pchArgs ) = 0; + + /** Launches the dashboard overlay application if it is not already running. This call is only valid for + * dashboard overlay applications. */ + virtual EVRApplicationError LaunchDashboardOverlay( const char *pchAppKey ) = 0; + + /** Cancel a pending launch for an application */ + virtual bool CancelApplicationLaunch( const char *pchAppKey ) = 0; + + /** Identifies a running application. OpenVR can't always tell which process started in response + * to a URL. This function allows a URL handler (or the process itself) to identify the app key + * for the now running application. Passing a process ID of 0 identifies the calling process. + * The application must be one that's known to the system via a call to AddApplicationManifest. */ + virtual EVRApplicationError IdentifyApplication( uint32_t unProcessId, const char *pchAppKey ) = 0; + + /** Returns the process ID for an application. Return 0 if the application was not found or is not running. */ + virtual uint32_t GetApplicationProcessId( const char *pchAppKey ) = 0; + + /** Returns a string for an applications error */ + virtual const char *GetApplicationsErrorNameFromEnum( EVRApplicationError error ) = 0; + + // --------------- Application properties --------------- // + + /** Returns a value for an application property. The required buffer size to fit this value will be returned. */ + virtual uint32_t GetApplicationPropertyString( const char *pchAppKey, EVRApplicationProperty eProperty, char *pchPropertyValueBuffer, uint32_t unPropertyValueBufferLen, EVRApplicationError *peError = nullptr ) = 0; + + /** Returns a bool value for an application property. Returns false in all error cases. */ + virtual bool GetApplicationPropertyBool( const char *pchAppKey, EVRApplicationProperty eProperty, EVRApplicationError *peError = nullptr ) = 0; + + /** Returns a uint64 value for an application property. Returns 0 in all error cases. */ + virtual uint64_t GetApplicationPropertyUint64( const char *pchAppKey, EVRApplicationProperty eProperty, EVRApplicationError *peError = nullptr ) = 0; + + /** Sets the application auto-launch flag. This is only valid for applications which return true for VRApplicationProperty_IsDashboardOverlay_Bool. */ + virtual EVRApplicationError SetApplicationAutoLaunch( const char *pchAppKey, bool bAutoLaunch ) = 0; + + /** Gets the application auto-launch flag. This is only valid for applications which return true for VRApplicationProperty_IsDashboardOverlay_Bool. */ + virtual bool GetApplicationAutoLaunch( const char *pchAppKey ) = 0; + + /** Adds this mime-type to the list of supported mime types for this application*/ + virtual EVRApplicationError SetDefaultApplicationForMimeType( const char *pchAppKey, const char *pchMimeType ) = 0; + + /** return the app key that will open this mime type */ + virtual bool GetDefaultApplicationForMimeType( const char *pchMimeType, char *pchAppKeyBuffer, uint32_t unAppKeyBufferLen ) = 0; + + /** Get the list of supported mime types for this application, comma-delimited */ + virtual bool GetApplicationSupportedMimeTypes( const char *pchAppKey, char *pchMimeTypesBuffer, uint32_t unMimeTypesBuffer ) = 0; + + /** Get the list of app-keys that support this mime type, comma-delimited, the return value is number of bytes you need to return the full string */ + virtual uint32_t GetApplicationsThatSupportMimeType( const char *pchMimeType, char *pchAppKeysThatSupportBuffer, uint32_t unAppKeysThatSupportBuffer ) = 0; + + /** Get the args list from an app launch that had the process already running, you call this when you get a VREvent_ApplicationMimeTypeLoad */ + virtual uint32_t GetApplicationLaunchArguments( uint32_t unHandle, char *pchArgs, uint32_t unArgs ) = 0; + + // --------------- Transition methods --------------- // + + /** Returns the app key for the application that is starting up */ + virtual EVRApplicationError GetStartingApplication( char *pchAppKeyBuffer, uint32_t unAppKeyBufferLen ) = 0; + + /** Returns the application transition state */ + virtual EVRApplicationTransitionState GetTransitionState() = 0; + + /** Returns errors that would prevent the specified application from launching immediately. Calling this function will + * cause the current scene application to quit, so only call it when you are actually about to launch something else. + * What the caller should do about these failures depends on the failure: + * VRApplicationError_OldApplicationQuitting - An existing application has been told to quit. Wait for a VREvent_ProcessQuit + * and try again. + * VRApplicationError_ApplicationAlreadyStarting - This application is already starting. This is a permanent failure. + * VRApplicationError_LaunchInProgress - A different application is already starting. This is a permanent failure. + * VRApplicationError_None - Go ahead and launch. Everything is clear. + */ + virtual EVRApplicationError PerformApplicationPrelaunchCheck( const char *pchAppKey ) = 0; + + /** Returns a string for an application transition state */ + virtual const char *GetApplicationsTransitionStateNameFromEnum( EVRApplicationTransitionState state ) = 0; + + /** Returns true if the outgoing scene app has requested a save prompt before exiting */ + virtual bool IsQuitUserPromptRequested() = 0; + + /** Starts a subprocess within the calling application. This + * suppresses all application transition UI and automatically identifies the new executable + * as part of the same application. On success the calling process should exit immediately. + * If working directory is NULL or "" the directory portion of the binary path will be + * the working directory. */ + virtual EVRApplicationError LaunchInternalProcess( const char *pchBinaryPath, const char *pchArguments, const char *pchWorkingDirectory ) = 0; + }; + + static const char * const IVRApplications_Version = "IVRApplications_006"; + +} // namespace vr + +// ivrsettings.h +namespace vr +{ + enum EVRSettingsError + { + VRSettingsError_None = 0, + VRSettingsError_IPCFailed = 1, + VRSettingsError_WriteFailed = 2, + VRSettingsError_ReadFailed = 3, + }; + + // The maximum length of a settings key + static const uint32_t k_unMaxSettingsKeyLength = 128; + + class IVRSettings + { + public: + virtual const char *GetSettingsErrorNameFromEnum( EVRSettingsError eError ) = 0; + + // Returns true if file sync occurred (force or settings dirty) + virtual bool Sync( bool bForce = false, EVRSettingsError *peError = nullptr ) = 0; + + virtual bool GetBool( const char *pchSection, const char *pchSettingsKey, bool bDefaultValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void SetBool( const char *pchSection, const char *pchSettingsKey, bool bValue, EVRSettingsError *peError = nullptr ) = 0; + virtual int32_t GetInt32( const char *pchSection, const char *pchSettingsKey, int32_t nDefaultValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void SetInt32( const char *pchSection, const char *pchSettingsKey, int32_t nValue, EVRSettingsError *peError = nullptr ) = 0; + virtual float GetFloat( const char *pchSection, const char *pchSettingsKey, float flDefaultValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void SetFloat( const char *pchSection, const char *pchSettingsKey, float flValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void GetString( const char *pchSection, const char *pchSettingsKey, VR_OUT_STRING() char *pchValue, uint32_t unValueLen, const char *pchDefaultValue, EVRSettingsError *peError = nullptr ) = 0; + virtual void SetString( const char *pchSection, const char *pchSettingsKey, const char *pchValue, EVRSettingsError *peError = nullptr ) = 0; + + virtual void RemoveSection( const char *pchSection, EVRSettingsError *peError = nullptr ) = 0; + virtual void RemoveKeyInSection( const char *pchSection, const char *pchSettingsKey, EVRSettingsError *peError = nullptr ) = 0; + }; + + //----------------------------------------------------------------------------- + static const char * const IVRSettings_Version = "IVRSettings_001"; + + //----------------------------------------------------------------------------- + // steamvr keys + + static const char * const k_pch_SteamVR_Section = "steamvr"; + static const char * const k_pch_SteamVR_RequireHmd_String = "requireHmd"; + static const char * const k_pch_SteamVR_ForcedDriverKey_String = "forcedDriver"; + static const char * const k_pch_SteamVR_ForcedHmdKey_String = "forcedHmd"; + static const char * const k_pch_SteamVR_DisplayDebug_Bool = "displayDebug"; + static const char * const k_pch_SteamVR_DebugProcessPipe_String = "debugProcessPipe"; + static const char * const k_pch_SteamVR_EnableDistortion_Bool = "enableDistortion"; + static const char * const k_pch_SteamVR_DisplayDebugX_Int32 = "displayDebugX"; + static const char * const k_pch_SteamVR_DisplayDebugY_Int32 = "displayDebugY"; + static const char * const k_pch_SteamVR_SendSystemButtonToAllApps_Bool= "sendSystemButtonToAllApps"; + static const char * const k_pch_SteamVR_LogLevel_Int32 = "loglevel"; + static const char * const k_pch_SteamVR_IPD_Float = "ipd"; + static const char * const k_pch_SteamVR_Background_String = "background"; + static const char * const k_pch_SteamVR_BackgroundCameraHeight_Float = "backgroundCameraHeight"; + static const char * const k_pch_SteamVR_BackgroundDomeRadius_Float = "backgroundDomeRadius"; + static const char * const k_pch_SteamVR_Environment_String = "environment"; + static const char * const k_pch_SteamVR_GridColor_String = "gridColor"; + static const char * const k_pch_SteamVR_PlayAreaColor_String = "playAreaColor"; + static const char * const k_pch_SteamVR_ShowStage_Bool = "showStage"; + static const char * const k_pch_SteamVR_ActivateMultipleDrivers_Bool = "activateMultipleDrivers"; + static const char * const k_pch_SteamVR_PowerOffOnExit_Bool = "powerOffOnExit"; + static const char * const k_pch_SteamVR_StandbyAppRunningTimeout_Float = "standbyAppRunningTimeout"; + static const char * const k_pch_SteamVR_StandbyNoAppTimeout_Float = "standbyNoAppTimeout"; + static const char * const k_pch_SteamVR_DirectMode_Bool = "directMode"; + static const char * const k_pch_SteamVR_DirectModeEdidVid_Int32 = "directModeEdidVid"; + static const char * const k_pch_SteamVR_DirectModeEdidPid_Int32 = "directModeEdidPid"; + static const char * const k_pch_SteamVR_UsingSpeakers_Bool = "usingSpeakers"; + static const char * const k_pch_SteamVR_SpeakersForwardYawOffsetDegrees_Float = "speakersForwardYawOffsetDegrees"; + static const char * const k_pch_SteamVR_BaseStationPowerManagement_Bool = "basestationPowerManagement"; + static const char * const k_pch_SteamVR_NeverKillProcesses_Bool = "neverKillProcesses"; + static const char * const k_pch_SteamVR_RenderTargetMultiplier_Float = "renderTargetMultiplier"; + static const char * const k_pch_SteamVR_AllowReprojection_Bool = "allowReprojection"; + static const char * const k_pch_SteamVR_ForceReprojection_Bool = "forceReprojection"; + static const char * const k_pch_SteamVR_ForceFadeOnBadTracking_Bool = "forceFadeOnBadTracking"; + static const char * const k_pch_SteamVR_DefaultMirrorView_Int32 = "defaultMirrorView"; + static const char * const k_pch_SteamVR_ShowMirrorView_Bool = "showMirrorView"; + static const char * const k_pch_SteamVR_StartMonitorFromAppLaunch = "startMonitorFromAppLaunch"; + static const char * const k_pch_SteamVR_AutoLaunchSteamVROnButtonPress = "autoLaunchSteamVROnButtonPress"; + static const char * const k_pch_SteamVR_UseGenericGraphcisDevice_Bool = "useGenericGraphicsDevice"; + + + //----------------------------------------------------------------------------- + // lighthouse keys + + static const char * const k_pch_Lighthouse_Section = "driver_lighthouse"; + static const char * const k_pch_Lighthouse_DisableIMU_Bool = "disableimu"; + static const char * const k_pch_Lighthouse_UseDisambiguation_String = "usedisambiguation"; + static const char * const k_pch_Lighthouse_DisambiguationDebug_Int32 = "disambiguationdebug"; + + static const char * const k_pch_Lighthouse_PrimaryBasestation_Int32 = "primarybasestation"; + static const char * const k_pch_Lighthouse_LighthouseName_String = "lighthousename"; + static const char * const k_pch_Lighthouse_MaxIncidenceAngleDegrees_Float = "maxincidenceangledegrees"; + static const char * const k_pch_Lighthouse_UseLighthouseDirect_Bool = "uselighthousedirect"; + static const char * const k_pch_Lighthouse_DBHistory_Bool = "dbhistory"; + + //----------------------------------------------------------------------------- + // null keys + + static const char * const k_pch_Null_Section = "driver_null"; + static const char * const k_pch_Null_EnableNullDriver_Bool = "enable"; + static const char * const k_pch_Null_SerialNumber_String = "serialNumber"; + static const char * const k_pch_Null_ModelNumber_String = "modelNumber"; + static const char * const k_pch_Null_WindowX_Int32 = "windowX"; + static const char * const k_pch_Null_WindowY_Int32 = "windowY"; + static const char * const k_pch_Null_WindowWidth_Int32 = "windowWidth"; + static const char * const k_pch_Null_WindowHeight_Int32 = "windowHeight"; + static const char * const k_pch_Null_RenderWidth_Int32 = "renderWidth"; + static const char * const k_pch_Null_RenderHeight_Int32 = "renderHeight"; + static const char * const k_pch_Null_SecondsFromVsyncToPhotons_Float = "secondsFromVsyncToPhotons"; + static const char * const k_pch_Null_DisplayFrequency_Float = "displayFrequency"; + + //----------------------------------------------------------------------------- + // user interface keys + static const char * const k_pch_UserInterface_Section = "userinterface"; + static const char * const k_pch_UserInterface_StatusAlwaysOnTop_Bool = "StatusAlwaysOnTop"; + static const char * const k_pch_UserInterface_Screenshots_Bool = "screenshots"; + static const char * const k_pch_UserInterface_ScreenshotType_Int = "screenshotType"; + + //----------------------------------------------------------------------------- + // notification keys + static const char * const k_pch_Notifications_Section = "notifications"; + static const char * const k_pch_Notifications_DoNotDisturb_Bool = "DoNotDisturb"; + + //----------------------------------------------------------------------------- + // keyboard keys + static const char * const k_pch_Keyboard_Section = "keyboard"; + static const char * const k_pch_Keyboard_TutorialCompletions = "TutorialCompletions"; + static const char * const k_pch_Keyboard_ScaleX = "ScaleX"; + static const char * const k_pch_Keyboard_ScaleY = "ScaleY"; + static const char * const k_pch_Keyboard_OffsetLeftX = "OffsetLeftX"; + static const char * const k_pch_Keyboard_OffsetRightX = "OffsetRightX"; + static const char * const k_pch_Keyboard_OffsetY = "OffsetY"; + static const char * const k_pch_Keyboard_Smoothing = "Smoothing"; + + //----------------------------------------------------------------------------- + // perf keys + static const char * const k_pch_Perf_Section = "perfcheck"; + static const char * const k_pch_Perf_HeuristicActive_Bool = "heuristicActive"; + static const char * const k_pch_Perf_NotifyInHMD_Bool = "warnInHMD"; + static const char * const k_pch_Perf_NotifyOnlyOnce_Bool = "warnOnlyOnce"; + static const char * const k_pch_Perf_AllowTimingStore_Bool = "allowTimingStore"; + static const char * const k_pch_Perf_SaveTimingsOnExit_Bool = "saveTimingsOnExit"; + static const char * const k_pch_Perf_TestData_Float = "perfTestData"; + + //----------------------------------------------------------------------------- + // collision bounds keys + static const char * const k_pch_CollisionBounds_Section = "collisionBounds"; + static const char * const k_pch_CollisionBounds_Style_Int32 = "CollisionBoundsStyle"; + static const char * const k_pch_CollisionBounds_GroundPerimeterOn_Bool = "CollisionBoundsGroundPerimeterOn"; + static const char * const k_pch_CollisionBounds_CenterMarkerOn_Bool = "CollisionBoundsCenterMarkerOn"; + static const char * const k_pch_CollisionBounds_PlaySpaceOn_Bool = "CollisionBoundsPlaySpaceOn"; + static const char * const k_pch_CollisionBounds_FadeDistance_Float = "CollisionBoundsFadeDistance"; + static const char * const k_pch_CollisionBounds_ColorGammaR_Int32 = "CollisionBoundsColorGammaR"; + static const char * const k_pch_CollisionBounds_ColorGammaG_Int32 = "CollisionBoundsColorGammaG"; + static const char * const k_pch_CollisionBounds_ColorGammaB_Int32 = "CollisionBoundsColorGammaB"; + static const char * const k_pch_CollisionBounds_ColorGammaA_Int32 = "CollisionBoundsColorGammaA"; + + //----------------------------------------------------------------------------- + // camera keys + static const char * const k_pch_Camera_Section = "camera"; + static const char * const k_pch_Camera_EnableCamera_Bool = "enableCamera"; + static const char * const k_pch_Camera_EnableCameraInDashboard_Bool = "enableCameraInDashboard"; + static const char * const k_pch_Camera_EnableCameraForCollisionBounds_Bool = "enableCameraForCollisionBounds"; + static const char * const k_pch_Camera_EnableCameraForRoomView_Bool = "enableCameraForRoomView"; + static const char * const k_pch_Camera_BoundsColorGammaR_Int32 = "cameraBoundsColorGammaR"; + static const char * const k_pch_Camera_BoundsColorGammaG_Int32 = "cameraBoundsColorGammaG"; + static const char * const k_pch_Camera_BoundsColorGammaB_Int32 = "cameraBoundsColorGammaB"; + static const char * const k_pch_Camera_BoundsColorGammaA_Int32 = "cameraBoundsColorGammaA"; + + //----------------------------------------------------------------------------- + // audio keys + static const char * const k_pch_audio_Section = "audio"; + static const char * const k_pch_audio_OnPlaybackDevice_String = "onPlaybackDevice"; + static const char * const k_pch_audio_OnRecordDevice_String = "onRecordDevice"; + static const char * const k_pch_audio_OnPlaybackMirrorDevice_String = "onPlaybackMirrorDevice"; + static const char * const k_pch_audio_OffPlaybackDevice_String = "offPlaybackDevice"; + static const char * const k_pch_audio_OffRecordDevice_String = "offRecordDevice"; + static const char * const k_pch_audio_VIVEHDMIGain = "viveHDMIGain"; + + //----------------------------------------------------------------------------- + // model skin keys + static const char * const k_pch_modelskin_Section = "modelskins"; + +} // namespace vr + +// ivrchaperone.h +namespace vr +{ + +#if defined(__linux__) || defined(__APPLE__) + // The 32-bit version of gcc has the alignment requirement for uint64 and double set to + // 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned. + // The 64-bit version of gcc has the alignment requirement for these types set to + // 8 meaning that unless we use #pragma pack(4) our structures will get bigger. + // The 64-bit structure packing has to match the 32-bit structure packing for each platform. + #pragma pack( push, 4 ) +#else + #pragma pack( push, 8 ) +#endif + +enum ChaperoneCalibrationState +{ + // OK! + ChaperoneCalibrationState_OK = 1, // Chaperone is fully calibrated and working correctly + + // Warnings + ChaperoneCalibrationState_Warning = 100, + ChaperoneCalibrationState_Warning_BaseStationMayHaveMoved = 101, // A base station thinks that it might have moved + ChaperoneCalibrationState_Warning_BaseStationRemoved = 102, // There are less base stations than when calibrated + ChaperoneCalibrationState_Warning_SeatedBoundsInvalid = 103, // Seated bounds haven't been calibrated for the current tracking center + + // Errors + ChaperoneCalibrationState_Error = 200, // The UniverseID is invalid + ChaperoneCalibrationState_Error_BaseStationUninitalized = 201, // Tracking center hasn't be calibrated for at least one of the base stations + ChaperoneCalibrationState_Error_BaseStationConflict = 202, // Tracking center is calibrated, but base stations disagree on the tracking space + ChaperoneCalibrationState_Error_PlayAreaInvalid = 203, // Play Area hasn't been calibrated for the current tracking center + ChaperoneCalibrationState_Error_CollisionBoundsInvalid = 204, // Collision Bounds haven't been calibrated for the current tracking center +}; + + +/** HIGH LEVEL TRACKING SPACE ASSUMPTIONS: +* 0,0,0 is the preferred standing area center. +* 0Y is the floor height. +* -Z is the preferred forward facing direction. */ +class IVRChaperone +{ +public: + + /** Get the current state of Chaperone calibration. This state can change at any time during a session due to physical base station changes. **/ + virtual ChaperoneCalibrationState GetCalibrationState() = 0; + + /** Returns the width and depth of the Play Area (formerly named Soft Bounds) in X and Z. + * Tracking space center (0,0,0) is the center of the Play Area. **/ + virtual bool GetPlayAreaSize( float *pSizeX, float *pSizeZ ) = 0; + + /** Returns the 4 corner positions of the Play Area (formerly named Soft Bounds). + * Corners are in counter-clockwise order. + * Standing center (0,0,0) is the center of the Play Area. + * It's a rectangle. + * 2 sides are parallel to the X axis and 2 sides are parallel to the Z axis. + * Height of every corner is 0Y (on the floor). **/ + virtual bool GetPlayAreaRect( HmdQuad_t *rect ) = 0; + + /** Reload Chaperone data from the .vrchap file on disk. */ + virtual void ReloadInfo( void ) = 0; + + /** Optionally give the chaperone system a hit about the color and brightness in the scene **/ + virtual void SetSceneColor( HmdColor_t color ) = 0; + + /** Get the current chaperone bounds draw color and brightness **/ + virtual void GetBoundsColor( HmdColor_t *pOutputColorArray, int nNumOutputColors, float flCollisionBoundsFadeDistance, HmdColor_t *pOutputCameraColor ) = 0; + + /** Determine whether the bounds are showing right now **/ + virtual bool AreBoundsVisible() = 0; + + /** Force the bounds to show, mostly for utilities **/ + virtual void ForceBoundsVisible( bool bForce ) = 0; +}; + +static const char * const IVRChaperone_Version = "IVRChaperone_003"; + +#pragma pack( pop ) + +} + +// ivrchaperonesetup.h +namespace vr +{ + +enum EChaperoneConfigFile +{ + EChaperoneConfigFile_Live = 1, // The live chaperone config, used by most applications and games + EChaperoneConfigFile_Temp = 2, // The temporary chaperone config, used to live-preview collision bounds in room setup +}; + +enum EChaperoneImportFlags +{ + EChaperoneImport_BoundsOnly = 0x0001, +}; + +/** Manages the working copy of the chaperone info. By default this will be the same as the +* live copy. Any changes made with this interface will stay in the working copy until +* CommitWorkingCopy() is called, at which point the working copy and the live copy will be +* the same again. */ +class IVRChaperoneSetup +{ +public: + + /** Saves the current working copy to disk */ + virtual bool CommitWorkingCopy( EChaperoneConfigFile configFile ) = 0; + + /** Reverts the working copy to match the live chaperone calibration. + * To modify existing data this MUST be do WHILE getting a non-error ChaperoneCalibrationStatus. + * Only after this should you do gets and sets on the existing data. */ + virtual void RevertWorkingCopy() = 0; + + /** Returns the width and depth of the Play Area (formerly named Soft Bounds) in X and Z from the working copy. + * Tracking space center (0,0,0) is the center of the Play Area. */ + virtual bool GetWorkingPlayAreaSize( float *pSizeX, float *pSizeZ ) = 0; + + /** Returns the 4 corner positions of the Play Area (formerly named Soft Bounds) from the working copy. + * Corners are in clockwise order. + * Tracking space center (0,0,0) is the center of the Play Area. + * It's a rectangle. + * 2 sides are parallel to the X axis and 2 sides are parallel to the Z axis. + * Height of every corner is 0Y (on the floor). **/ + virtual bool GetWorkingPlayAreaRect( HmdQuad_t *rect ) = 0; + + /** Returns the number of Quads if the buffer points to null. Otherwise it returns Quads + * into the buffer up to the max specified from the working copy. */ + virtual bool GetWorkingCollisionBoundsInfo( VR_OUT_ARRAY_COUNT(punQuadsCount) HmdQuad_t *pQuadsBuffer, uint32_t* punQuadsCount ) = 0; + + /** Returns the number of Quads if the buffer points to null. Otherwise it returns Quads + * into the buffer up to the max specified. */ + virtual bool GetLiveCollisionBoundsInfo( VR_OUT_ARRAY_COUNT(punQuadsCount) HmdQuad_t *pQuadsBuffer, uint32_t* punQuadsCount ) = 0; + + /** Returns the preferred seated position from the working copy. */ + virtual bool GetWorkingSeatedZeroPoseToRawTrackingPose( HmdMatrix34_t *pmatSeatedZeroPoseToRawTrackingPose ) = 0; + + /** Returns the standing origin from the working copy. */ + virtual bool GetWorkingStandingZeroPoseToRawTrackingPose( HmdMatrix34_t *pmatStandingZeroPoseToRawTrackingPose ) = 0; + + /** Sets the Play Area in the working copy. */ + virtual void SetWorkingPlayAreaSize( float sizeX, float sizeZ ) = 0; + + /** Sets the Collision Bounds in the working copy. */ + virtual void SetWorkingCollisionBoundsInfo( VR_ARRAY_COUNT(unQuadsCount) HmdQuad_t *pQuadsBuffer, uint32_t unQuadsCount ) = 0; + + /** Sets the preferred seated position in the working copy. */ + virtual void SetWorkingSeatedZeroPoseToRawTrackingPose( const HmdMatrix34_t *pMatSeatedZeroPoseToRawTrackingPose ) = 0; + + /** Sets the preferred standing position in the working copy. */ + virtual void SetWorkingStandingZeroPoseToRawTrackingPose( const HmdMatrix34_t *pMatStandingZeroPoseToRawTrackingPose ) = 0; + + /** Tear everything down and reload it from the file on disk */ + virtual void ReloadFromDisk( EChaperoneConfigFile configFile ) = 0; + + /** Returns the preferred seated position. */ + virtual bool GetLiveSeatedZeroPoseToRawTrackingPose( HmdMatrix34_t *pmatSeatedZeroPoseToRawTrackingPose ) = 0; + + virtual void SetWorkingCollisionBoundsTagsInfo( VR_ARRAY_COUNT(unTagCount) uint8_t *pTagsBuffer, uint32_t unTagCount ) = 0; + virtual bool GetLiveCollisionBoundsTagsInfo( VR_OUT_ARRAY_COUNT(punTagCount) uint8_t *pTagsBuffer, uint32_t *punTagCount ) = 0; + + virtual bool SetWorkingPhysicalBoundsInfo( VR_ARRAY_COUNT(unQuadsCount) HmdQuad_t *pQuadsBuffer, uint32_t unQuadsCount ) = 0; + virtual bool GetLivePhysicalBoundsInfo( VR_OUT_ARRAY_COUNT(punQuadsCount) HmdQuad_t *pQuadsBuffer, uint32_t* punQuadsCount ) = 0; + + virtual bool ExportLiveToBuffer( VR_OUT_STRING() char *pBuffer, uint32_t *pnBufferLength ) = 0; + virtual bool ImportFromBufferToWorking( const char *pBuffer, uint32_t nImportFlags ) = 0; +}; + +static const char * const IVRChaperoneSetup_Version = "IVRChaperoneSetup_005"; + + +} + +// ivrcompositor.h +namespace vr +{ + +#if defined(__linux__) || defined(__APPLE__) + // The 32-bit version of gcc has the alignment requirement for uint64 and double set to + // 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned. + // The 64-bit version of gcc has the alignment requirement for these types set to + // 8 meaning that unless we use #pragma pack(4) our structures will get bigger. + // The 64-bit structure packing has to match the 32-bit structure packing for each platform. + #pragma pack( push, 4 ) +#else + #pragma pack( push, 8 ) +#endif + +/** Errors that can occur with the VR compositor */ +enum EVRCompositorError +{ + VRCompositorError_None = 0, + VRCompositorError_RequestFailed = 1, + VRCompositorError_IncompatibleVersion = 100, + VRCompositorError_DoNotHaveFocus = 101, + VRCompositorError_InvalidTexture = 102, + VRCompositorError_IsNotSceneApplication = 103, + VRCompositorError_TextureIsOnWrongDevice = 104, + VRCompositorError_TextureUsesUnsupportedFormat = 105, + VRCompositorError_SharedTexturesNotSupported = 106, + VRCompositorError_IndexOutOfRange = 107, +}; + +const uint32_t VRCompositor_ReprojectionReason_Cpu = 0x01; +const uint32_t VRCompositor_ReprojectionReason_Gpu = 0x02; + +/** Provides a single frame's timing information to the app */ +struct Compositor_FrameTiming +{ + uint32_t m_nSize; // Set to sizeof( Compositor_FrameTiming ) + uint32_t m_nFrameIndex; + uint32_t m_nNumFramePresents; // number of times this frame was presented + uint32_t m_nNumDroppedFrames; // number of additional times previous frame was scanned out + uint32_t m_nReprojectionFlags; + + /** Absolute time reference for comparing frames. This aligns with the vsync that running start is relative to. */ + double m_flSystemTimeInSeconds; + + /** These times may include work from other processes due to OS scheduling. + * The fewer packets of work these are broken up into, the less likely this will happen. + * GPU work can be broken up by calling Flush. This can sometimes be useful to get the GPU started + * processing that work earlier in the frame. */ + float m_flPreSubmitGpuMs; // time spent rendering the scene (gpu work submitted between WaitGetPoses and second Submit) + float m_flPostSubmitGpuMs; // additional time spent rendering by application (e.g. companion window) + float m_flTotalRenderGpuMs; // time between work submitted immediately after present (ideally vsync) until the end of compositor submitted work + float m_flCompositorRenderGpuMs; // time spend performing distortion correction, rendering chaperone, overlays, etc. + float m_flCompositorRenderCpuMs; // time spent on cpu submitting the above work for this frame + float m_flCompositorIdleCpuMs; // time spent waiting for running start (application could have used this much more time) + + /** Miscellaneous measured intervals. */ + float m_flClientFrameIntervalMs; // time between calls to WaitGetPoses + float m_flPresentCallCpuMs; // time blocked on call to present (usually 0.0, but can go long) + float m_flWaitForPresentCpuMs; // time spent spin-waiting for frame index to change (not near-zero indicates wait object failure) + float m_flSubmitFrameMs; // time spent in IVRCompositor::Submit (not near-zero indicates driver issue) + + /** The following are all relative to this frame's SystemTimeInSeconds */ + float m_flWaitGetPosesCalledMs; + float m_flNewPosesReadyMs; + float m_flNewFrameReadyMs; // second call to IVRCompositor::Submit + float m_flCompositorUpdateStartMs; + float m_flCompositorUpdateEndMs; + float m_flCompositorRenderStartMs; + + vr::TrackedDevicePose_t m_HmdPose; // pose used by app to render this frame +}; + +/** Cumulative stats for current application. These are not cleared until a new app connects, +* but they do stop accumulating once the associated app disconnects. */ +struct Compositor_CumulativeStats +{ + uint32_t m_nPid; // Process id associated with these stats (may no longer be running). + uint32_t m_nNumFramePresents; // total number of times we called present (includes reprojected frames) + uint32_t m_nNumDroppedFrames; // total number of times an old frame was re-scanned out (without reprojection) + uint32_t m_nNumReprojectedFrames; // total number of times a frame was scanned out a second time (with reprojection) + + /** Values recorded at startup before application has fully faded in the first time. */ + uint32_t m_nNumFramePresentsOnStartup; + uint32_t m_nNumDroppedFramesOnStartup; + uint32_t m_nNumReprojectedFramesOnStartup; + + /** Applications may explicitly fade to the compositor. This is usually to handle level transitions, and loading often causes + * system wide hitches. The following stats are collected during this period. Does not include values recorded during startup. */ + uint32_t m_nNumLoading; + uint32_t m_nNumFramePresentsLoading; + uint32_t m_nNumDroppedFramesLoading; + uint32_t m_nNumReprojectedFramesLoading; + + /** If we don't get a new frame from the app in less than 2.5 frames, then we assume the app has hung and start + * fading back to the compositor. The following stats are a result of this, and are a subset of those recorded above. + * Does not include values recorded during start up or loading. */ + uint32_t m_nNumTimedOut; + uint32_t m_nNumFramePresentsTimedOut; + uint32_t m_nNumDroppedFramesTimedOut; + uint32_t m_nNumReprojectedFramesTimedOut; +}; + +#pragma pack( pop ) + +/** Allows the application to interact with the compositor */ +class IVRCompositor +{ +public: + /** Sets tracking space returned by WaitGetPoses */ + virtual void SetTrackingSpace( ETrackingUniverseOrigin eOrigin ) = 0; + + /** Gets current tracking space returned by WaitGetPoses */ + virtual ETrackingUniverseOrigin GetTrackingSpace() = 0; + + /** Returns pose(s) to use to render scene (and optionally poses predicted two frames out for gameplay). */ + virtual EVRCompositorError WaitGetPoses( VR_ARRAY_COUNT(unRenderPoseArrayCount) TrackedDevicePose_t* pRenderPoseArray, uint32_t unRenderPoseArrayCount, + VR_ARRAY_COUNT(unGamePoseArrayCount) TrackedDevicePose_t* pGamePoseArray, uint32_t unGamePoseArrayCount ) = 0; + + /** Get the last set of poses returned by WaitGetPoses. */ + virtual EVRCompositorError GetLastPoses( VR_ARRAY_COUNT( unRenderPoseArrayCount ) TrackedDevicePose_t* pRenderPoseArray, uint32_t unRenderPoseArrayCount, + VR_ARRAY_COUNT( unGamePoseArrayCount ) TrackedDevicePose_t* pGamePoseArray, uint32_t unGamePoseArrayCount ) = 0; + + /** Interface for accessing last set of poses returned by WaitGetPoses one at a time. + * Returns VRCompositorError_IndexOutOfRange if unDeviceIndex not less than k_unMaxTrackedDeviceCount otherwise VRCompositorError_None. + * It is okay to pass NULL for either pose if you only want one of the values. */ + virtual EVRCompositorError GetLastPoseForTrackedDeviceIndex( TrackedDeviceIndex_t unDeviceIndex, TrackedDevicePose_t *pOutputPose, TrackedDevicePose_t *pOutputGamePose ) = 0; + + /** Updated scene texture to display. If pBounds is NULL the entire texture will be used. If called from an OpenGL app, consider adding a glFlush after + * Submitting both frames to signal the driver to start processing, otherwise it may wait until the command buffer fills up, causing the app to miss frames. + * + * OpenGL dirty state: + * glBindTexture + */ + virtual EVRCompositorError Submit( EVREye eEye, const Texture_t *pTexture, const VRTextureBounds_t* pBounds = 0, EVRSubmitFlags nSubmitFlags = Submit_Default ) = 0; + + /** Clears the frame that was sent with the last call to Submit. This will cause the + * compositor to show the grid until Submit is called again. */ + virtual void ClearLastSubmittedFrame() = 0; + + /** Call immediately after presenting your app's window (i.e. companion window) to unblock the compositor. + * This is an optional call, which only needs to be used if you can't instead call WaitGetPoses immediately after Present. + * For example, if your engine's render and game loop are not on separate threads, or blocking the render thread until 3ms before the next vsync would + * introduce a deadlock of some sort. This function tells the compositor that you have finished all rendering after having Submitted buffers for both + * eyes, and it is free to start its rendering work. This should only be called from the same thread you are rendering on. */ + virtual void PostPresentHandoff() = 0; + + /** Returns true if timing data is filled it. Sets oldest timing info if nFramesAgo is larger than the stored history. + * Be sure to set timing.size = sizeof(Compositor_FrameTiming) on struct passed in before calling this function. */ + virtual bool GetFrameTiming( Compositor_FrameTiming *pTiming, uint32_t unFramesAgo = 0 ) = 0; + + /** Returns the time in seconds left in the current (as identified by FrameTiming's frameIndex) frame. + * Due to "running start", this value may roll over to the next frame before ever reaching 0.0. */ + virtual float GetFrameTimeRemaining() = 0; + + /** Fills out stats accumulated for the last connected application. Pass in sizeof( Compositor_CumulativeStats ) as second parameter. */ + virtual void GetCumulativeStats( Compositor_CumulativeStats *pStats, uint32_t nStatsSizeInBytes ) = 0; + + /** Fades the view on the HMD to the specified color. The fade will take fSeconds, and the color values are between + * 0.0 and 1.0. This color is faded on top of the scene based on the alpha parameter. Removing the fade color instantly + * would be FadeToColor( 0.0, 0.0, 0.0, 0.0, 0.0 ). Values are in un-premultiplied alpha space. */ + virtual void FadeToColor( float fSeconds, float fRed, float fGreen, float fBlue, float fAlpha, bool bBackground = false ) = 0; + + /** Fading the Grid in or out in fSeconds */ + virtual void FadeGrid( float fSeconds, bool bFadeIn ) = 0; + + /** Override the skybox used in the compositor (e.g. for during level loads when the app can't feed scene images fast enough) + * Order is Front, Back, Left, Right, Top, Bottom. If only a single texture is passed, it is assumed in lat-long format. + * If two are passed, it is assumed a lat-long stereo pair. */ + virtual EVRCompositorError SetSkyboxOverride( VR_ARRAY_COUNT( unTextureCount ) const Texture_t *pTextures, uint32_t unTextureCount ) = 0; + + /** Resets compositor skybox back to defaults. */ + virtual void ClearSkyboxOverride() = 0; + + /** Brings the compositor window to the front. This is useful for covering any other window that may be on the HMD + * and is obscuring the compositor window. */ + virtual void CompositorBringToFront() = 0; + + /** Pushes the compositor window to the back. This is useful for allowing other applications to draw directly to the HMD. */ + virtual void CompositorGoToBack() = 0; + + /** Tells the compositor process to clean up and exit. You do not need to call this function at shutdown. Under normal + * circumstances the compositor will manage its own life cycle based on what applications are running. */ + virtual void CompositorQuit() = 0; + + /** Return whether the compositor is fullscreen */ + virtual bool IsFullscreen() = 0; + + /** Returns the process ID of the process that is currently rendering the scene */ + virtual uint32_t GetCurrentSceneFocusProcess() = 0; + + /** Returns the process ID of the process that rendered the last frame (or 0 if the compositor itself rendered the frame.) + * Returns 0 when fading out from an app and the app's process Id when fading into an app. */ + virtual uint32_t GetLastFrameRenderer() = 0; + + /** Returns true if the current process has the scene focus */ + virtual bool CanRenderScene() = 0; + + /** Creates a window on the primary monitor to display what is being shown in the headset. */ + virtual void ShowMirrorWindow() = 0; + + /** Closes the mirror window. */ + virtual void HideMirrorWindow() = 0; + + /** Returns true if the mirror window is shown. */ + virtual bool IsMirrorWindowVisible() = 0; + + /** Writes all images that the compositor knows about (including overlays) to a 'screenshots' folder in the SteamVR runtime root. */ + virtual void CompositorDumpImages() = 0; + + /** Let an app know it should be rendering with low resources. */ + virtual bool ShouldAppRenderWithLowResources() = 0; + + /** Override interleaved reprojection logic to force on. */ + virtual void ForceInterleavedReprojectionOn( bool bOverride ) = 0; + + /** Force reconnecting to the compositor process. */ + virtual void ForceReconnectProcess() = 0; + + /** Temporarily suspends rendering (useful for finer control over scene transitions). */ + virtual void SuspendRendering( bool bSuspend ) = 0; + + /** Opens a shared D3D11 texture with the undistorted composited image for each eye. */ + virtual vr::EVRCompositorError GetMirrorTextureD3D11( vr::EVREye eEye, void *pD3D11DeviceOrResource, void **ppD3D11ShaderResourceView ) = 0; + + /** Access to mirror textures from OpenGL. */ + virtual vr::EVRCompositorError GetMirrorTextureGL( vr::EVREye eEye, vr::glUInt_t *pglTextureId, vr::glSharedTextureHandle_t *pglSharedTextureHandle ) = 0; + virtual bool ReleaseSharedGLTexture( vr::glUInt_t glTextureId, vr::glSharedTextureHandle_t glSharedTextureHandle ) = 0; + virtual void LockGLSharedTextureForAccess( vr::glSharedTextureHandle_t glSharedTextureHandle ) = 0; + virtual void UnlockGLSharedTextureForAccess( vr::glSharedTextureHandle_t glSharedTextureHandle ) = 0; +}; + +static const char * const IVRCompositor_Version = "IVRCompositor_016"; + +} // namespace vr + + + +// ivrnotifications.h +namespace vr +{ + +#if defined(__linux__) || defined(__APPLE__) + // The 32-bit version of gcc has the alignment requirement for uint64 and double set to + // 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned. + // The 64-bit version of gcc has the alignment requirement for these types set to + // 8 meaning that unless we use #pragma pack(4) our structures will get bigger. + // The 64-bit structure packing has to match the 32-bit structure packing for each platform. + #pragma pack( push, 4 ) +#else + #pragma pack( push, 8 ) +#endif + +// Used for passing graphic data +struct NotificationBitmap_t +{ + NotificationBitmap_t() + : m_pImageData( nullptr ) + , m_nWidth( 0 ) + , m_nHeight( 0 ) + , m_nBytesPerPixel( 0 ) + { + }; + + void *m_pImageData; + int32_t m_nWidth; + int32_t m_nHeight; + int32_t m_nBytesPerPixel; +}; + + +/** Be aware that the notification type is used as 'priority' to pick the next notification */ +enum EVRNotificationType +{ + /** Transient notifications are automatically hidden after a period of time set by the user. + * They are used for things like information and chat messages that do not require user interaction. */ + EVRNotificationType_Transient = 0, + + /** Persistent notifications are shown to the user until they are hidden by calling RemoveNotification(). + * They are used for things like phone calls and alarms that require user interaction. */ + EVRNotificationType_Persistent = 1, + + /** System notifications are shown no matter what. It is expected, that the ulUserValue is used as ID. + * If there is already a system notification in the queue with that ID it is not accepted into the queue + * to prevent spamming with system notification */ + EVRNotificationType_Transient_SystemWithUserValue = 2, +}; + +enum EVRNotificationStyle +{ + /** Creates a notification with minimal external styling. */ + EVRNotificationStyle_None = 0, + + /** Used for notifications about overlay-level status. In Steam this is used for events like downloads completing. */ + EVRNotificationStyle_Application = 100, + + /** Used for notifications about contacts that are unknown or not available. In Steam this is used for friend invitations and offline friends. */ + EVRNotificationStyle_Contact_Disabled = 200, + + /** Used for notifications about contacts that are available but inactive. In Steam this is used for friends that are online but not playing a game. */ + EVRNotificationStyle_Contact_Enabled = 201, + + /** Used for notifications about contacts that are available and active. In Steam this is used for friends that are online and currently running a game. */ + EVRNotificationStyle_Contact_Active = 202, +}; + +static const uint32_t k_unNotificationTextMaxSize = 256; + +typedef uint32_t VRNotificationId; + + + +#pragma pack( pop ) + +/** Allows notification sources to interact with the VR system + This current interface is not yet implemented. Do not use yet. */ +class IVRNotifications +{ +public: + /** Create a notification and enqueue it to be shown to the user. + * An overlay handle is required to create a notification, as otherwise it would be impossible for a user to act on it. + * To create a two-line notification, use a line break ('\n') to split the text into two lines. + * The pImage argument may be NULL, in which case the specified overlay's icon will be used instead. */ + virtual EVRNotificationError CreateNotification( VROverlayHandle_t ulOverlayHandle, uint64_t ulUserValue, EVRNotificationType type, const char *pchText, EVRNotificationStyle style, const NotificationBitmap_t *pImage, /* out */ VRNotificationId *pNotificationId ) = 0; + + /** Destroy a notification, hiding it first if it currently shown to the user. */ + virtual EVRNotificationError RemoveNotification( VRNotificationId notificationId ) = 0; + +}; + +static const char * const IVRNotifications_Version = "IVRNotifications_002"; + +} // namespace vr + + + +// ivroverlay.h +namespace vr +{ + + /** The maximum length of an overlay key in bytes, counting the terminating null character. */ + static const uint32_t k_unVROverlayMaxKeyLength = 128; + + /** The maximum length of an overlay name in bytes, counting the terminating null character. */ + static const uint32_t k_unVROverlayMaxNameLength = 128; + + /** The maximum number of overlays that can exist in the system at one time. */ + static const uint32_t k_unMaxOverlayCount = 64; + + /** Types of input supported by VR Overlays */ + enum VROverlayInputMethod + { + VROverlayInputMethod_None = 0, // No input events will be generated automatically for this overlay + VROverlayInputMethod_Mouse = 1, // Tracked controllers will get mouse events automatically + }; + + /** Allows the caller to figure out which overlay transform getter to call. */ + enum VROverlayTransformType + { + VROverlayTransform_Absolute = 0, + VROverlayTransform_TrackedDeviceRelative = 1, + VROverlayTransform_SystemOverlay = 2, + VROverlayTransform_TrackedComponent = 3, + }; + + /** Overlay control settings */ + enum VROverlayFlags + { + VROverlayFlags_None = 0, + + // The following only take effect when rendered using the high quality render path (see SetHighQualityOverlay). + VROverlayFlags_Curved = 1, + VROverlayFlags_RGSS4X = 2, + + // Set this flag on a dashboard overlay to prevent a tab from showing up for that overlay + VROverlayFlags_NoDashboardTab = 3, + + // Set this flag on a dashboard that is able to deal with gamepad focus events + VROverlayFlags_AcceptsGamepadEvents = 4, + + // Indicates that the overlay should dim/brighten to show gamepad focus + VROverlayFlags_ShowGamepadFocus = 5, + + // When in VROverlayInputMethod_Mouse you can optionally enable sending VRScroll_t + VROverlayFlags_SendVRScrollEvents = 6, + VROverlayFlags_SendVRTouchpadEvents = 7, + + // If set this will render a vertical scroll wheel on the primary controller, + // only needed if not using VROverlayFlags_SendVRScrollEvents but you still want to represent a scroll wheel + VROverlayFlags_ShowTouchPadScrollWheel = 8, + + // If this is set ownership and render access to the overlay are transferred + // to the new scene process on a call to IVRApplications::LaunchInternalProcess + VROverlayFlags_TransferOwnershipToInternalProcess = 9, + + // If set, renders 50% of the texture in each eye, side by side + VROverlayFlags_SideBySide_Parallel = 10, // Texture is left/right + VROverlayFlags_SideBySide_Crossed = 11, // Texture is crossed and right/left + + VROverlayFlags_Panorama = 12, // Texture is a panorama + VROverlayFlags_StereoPanorama = 13, // Texture is a stereo panorama + + // If this is set on an overlay owned by the scene application that overlay + // will be sorted with the "Other" overlays on top of all other scene overlays + VROverlayFlags_SortWithNonSceneOverlays = 14, + }; + + struct VROverlayIntersectionParams_t + { + HmdVector3_t vSource; + HmdVector3_t vDirection; + ETrackingUniverseOrigin eOrigin; + }; + + struct VROverlayIntersectionResults_t + { + HmdVector3_t vPoint; + HmdVector3_t vNormal; + HmdVector2_t vUVs; + float fDistance; + }; + + // Input modes for the Big Picture gamepad text entry + enum EGamepadTextInputMode + { + k_EGamepadTextInputModeNormal = 0, + k_EGamepadTextInputModePassword = 1, + k_EGamepadTextInputModeSubmit = 2, + }; + + // Controls number of allowed lines for the Big Picture gamepad text entry + enum EGamepadTextInputLineMode + { + k_EGamepadTextInputLineModeSingleLine = 0, + k_EGamepadTextInputLineModeMultipleLines = 1 + }; + + /** Directions for changing focus between overlays with the gamepad */ + enum EOverlayDirection + { + OverlayDirection_Up = 0, + OverlayDirection_Down = 1, + OverlayDirection_Left = 2, + OverlayDirection_Right = 3, + + OverlayDirection_Count = 4, + }; + + class IVROverlay + { + public: + + // --------------------------------------------- + // Overlay management methods + // --------------------------------------------- + + /** Finds an existing overlay with the specified key. */ + virtual EVROverlayError FindOverlay( const char *pchOverlayKey, VROverlayHandle_t * pOverlayHandle ) = 0; + + /** Creates a new named overlay. All overlays start hidden and with default settings. */ + virtual EVROverlayError CreateOverlay( const char *pchOverlayKey, const char *pchOverlayFriendlyName, VROverlayHandle_t * pOverlayHandle ) = 0; + + /** Destroys the specified overlay. When an application calls VR_Shutdown all overlays created by that app are + * automatically destroyed. */ + virtual EVROverlayError DestroyOverlay( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Specify which overlay to use the high quality render path. This overlay will be composited in during the distortion pass which + * results in it drawing on top of everything else, but also at a higher quality as it samples the source texture directly rather than + * rasterizing into each eye's render texture first. Because if this, only one of these is supported at any given time. It is most useful + * for overlays that are expected to take up most of the user's view (e.g. streaming video). + * This mode does not support mouse input to your overlay. */ + virtual EVROverlayError SetHighQualityOverlay( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Returns the overlay handle of the current overlay being rendered using the single high quality overlay render path. + * Otherwise it will return k_ulOverlayHandleInvalid. */ + virtual vr::VROverlayHandle_t GetHighQualityOverlay() = 0; + + /** Fills the provided buffer with the string key of the overlay. Returns the size of buffer required to store the key, including + * the terminating null character. k_unVROverlayMaxKeyLength will be enough bytes to fit the string. */ + virtual uint32_t GetOverlayKey( VROverlayHandle_t ulOverlayHandle, VR_OUT_STRING() char *pchValue, uint32_t unBufferSize, EVROverlayError *pError = 0L ) = 0; + + /** Fills the provided buffer with the friendly name of the overlay. Returns the size of buffer required to store the key, including + * the terminating null character. k_unVROverlayMaxNameLength will be enough bytes to fit the string. */ + virtual uint32_t GetOverlayName( VROverlayHandle_t ulOverlayHandle, VR_OUT_STRING() char *pchValue, uint32_t unBufferSize, EVROverlayError *pError = 0L ) = 0; + + /** Gets the raw image data from an overlay. Overlay image data is always returned as RGBA data, 4 bytes per pixel. If the buffer is not large enough, width and height + * will be set and VROverlayError_ArrayTooSmall is returned. */ + virtual EVROverlayError GetOverlayImageData( VROverlayHandle_t ulOverlayHandle, void *pvBuffer, uint32_t unBufferSize, uint32_t *punWidth, uint32_t *punHeight ) = 0; + + /** returns a string that corresponds with the specified overlay error. The string will be the name + * of the error enum value for all valid error codes */ + virtual const char *GetOverlayErrorNameFromEnum( EVROverlayError error ) = 0; + + + // --------------------------------------------- + // Overlay rendering methods + // --------------------------------------------- + + /** Sets the pid that is allowed to render to this overlay (the creator pid is always allow to render), + * by default this is the pid of the process that made the overlay */ + virtual EVROverlayError SetOverlayRenderingPid( VROverlayHandle_t ulOverlayHandle, uint32_t unPID ) = 0; + + /** Gets the pid that is allowed to render to this overlay */ + virtual uint32_t GetOverlayRenderingPid( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Specify flag setting for a given overlay */ + virtual EVROverlayError SetOverlayFlag( VROverlayHandle_t ulOverlayHandle, VROverlayFlags eOverlayFlag, bool bEnabled ) = 0; + + /** Sets flag setting for a given overlay */ + virtual EVROverlayError GetOverlayFlag( VROverlayHandle_t ulOverlayHandle, VROverlayFlags eOverlayFlag, bool *pbEnabled ) = 0; + + /** Sets the color tint of the overlay quad. Use 0.0 to 1.0 per channel. */ + virtual EVROverlayError SetOverlayColor( VROverlayHandle_t ulOverlayHandle, float fRed, float fGreen, float fBlue ) = 0; + + /** Gets the color tint of the overlay quad. */ + virtual EVROverlayError GetOverlayColor( VROverlayHandle_t ulOverlayHandle, float *pfRed, float *pfGreen, float *pfBlue ) = 0; + + /** Sets the alpha of the overlay quad. Use 1.0 for 100 percent opacity to 0.0 for 0 percent opacity. */ + virtual EVROverlayError SetOverlayAlpha( VROverlayHandle_t ulOverlayHandle, float fAlpha ) = 0; + + /** Gets the alpha of the overlay quad. By default overlays are rendering at 100 percent alpha (1.0). */ + virtual EVROverlayError GetOverlayAlpha( VROverlayHandle_t ulOverlayHandle, float *pfAlpha ) = 0; + + /** Sets the aspect ratio of the texels in the overlay. 1.0 means the texels are square. 2.0 means the texels + * are twice as wide as they are tall. Defaults to 1.0. */ + virtual EVROverlayError SetOverlayTexelAspect( VROverlayHandle_t ulOverlayHandle, float fTexelAspect ) = 0; + + /** Gets the aspect ratio of the texels in the overlay. Defaults to 1.0 */ + virtual EVROverlayError GetOverlayTexelAspect( VROverlayHandle_t ulOverlayHandle, float *pfTexelAspect ) = 0; + + /** Sets the rendering sort order for the overlay. Overlays are rendered this order: + * Overlays owned by the scene application + * Overlays owned by some other application + * + * Within a category overlays are rendered lowest sort order to highest sort order. Overlays with the same + * sort order are rendered back to front base on distance from the HMD. + * + * Sort order defaults to 0. */ + virtual EVROverlayError SetOverlaySortOrder( VROverlayHandle_t ulOverlayHandle, uint32_t unSortOrder ) = 0; + + /** Gets the sort order of the overlay. See SetOverlaySortOrder for how this works. */ + virtual EVROverlayError GetOverlaySortOrder( VROverlayHandle_t ulOverlayHandle, uint32_t *punSortOrder ) = 0; + + /** Sets the width of the overlay quad in meters. By default overlays are rendered on a quad that is 1 meter across */ + virtual EVROverlayError SetOverlayWidthInMeters( VROverlayHandle_t ulOverlayHandle, float fWidthInMeters ) = 0; + + /** Returns the width of the overlay quad in meters. By default overlays are rendered on a quad that is 1 meter across */ + virtual EVROverlayError GetOverlayWidthInMeters( VROverlayHandle_t ulOverlayHandle, float *pfWidthInMeters ) = 0; + + /** For high-quality curved overlays only, sets the distance range in meters from the overlay used to automatically curve + * the surface around the viewer. Min is distance is when the surface will be most curved. Max is when least curved. */ + virtual EVROverlayError SetOverlayAutoCurveDistanceRangeInMeters( VROverlayHandle_t ulOverlayHandle, float fMinDistanceInMeters, float fMaxDistanceInMeters ) = 0; + + /** For high-quality curved overlays only, gets the distance range in meters from the overlay used to automatically curve + * the surface around the viewer. Min is distance is when the surface will be most curved. Max is when least curved. */ + virtual EVROverlayError GetOverlayAutoCurveDistanceRangeInMeters( VROverlayHandle_t ulOverlayHandle, float *pfMinDistanceInMeters, float *pfMaxDistanceInMeters ) = 0; + + /** Sets the colorspace the overlay texture's data is in. Defaults to 'auto'. + * If the texture needs to be resolved, you should call SetOverlayTexture with the appropriate colorspace instead. */ + virtual EVROverlayError SetOverlayTextureColorSpace( VROverlayHandle_t ulOverlayHandle, EColorSpace eTextureColorSpace ) = 0; + + /** Gets the overlay's current colorspace setting. */ + virtual EVROverlayError GetOverlayTextureColorSpace( VROverlayHandle_t ulOverlayHandle, EColorSpace *peTextureColorSpace ) = 0; + + /** Sets the part of the texture to use for the overlay. UV Min is the upper left corner and UV Max is the lower right corner. */ + virtual EVROverlayError SetOverlayTextureBounds( VROverlayHandle_t ulOverlayHandle, const VRTextureBounds_t *pOverlayTextureBounds ) = 0; + + /** Gets the part of the texture to use for the overlay. UV Min is the upper left corner and UV Max is the lower right corner. */ + virtual EVROverlayError GetOverlayTextureBounds( VROverlayHandle_t ulOverlayHandle, VRTextureBounds_t *pOverlayTextureBounds ) = 0; + + /** Returns the transform type of this overlay. */ + virtual EVROverlayError GetOverlayTransformType( VROverlayHandle_t ulOverlayHandle, VROverlayTransformType *peTransformType ) = 0; + + /** Sets the transform to absolute tracking origin. */ + virtual EVROverlayError SetOverlayTransformAbsolute( VROverlayHandle_t ulOverlayHandle, ETrackingUniverseOrigin eTrackingOrigin, const HmdMatrix34_t *pmatTrackingOriginToOverlayTransform ) = 0; + + /** Gets the transform if it is absolute. Returns an error if the transform is some other type. */ + virtual EVROverlayError GetOverlayTransformAbsolute( VROverlayHandle_t ulOverlayHandle, ETrackingUniverseOrigin *peTrackingOrigin, HmdMatrix34_t *pmatTrackingOriginToOverlayTransform ) = 0; + + /** Sets the transform to relative to the transform of the specified tracked device. */ + virtual EVROverlayError SetOverlayTransformTrackedDeviceRelative( VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t unTrackedDevice, const HmdMatrix34_t *pmatTrackedDeviceToOverlayTransform ) = 0; + + /** Gets the transform if it is relative to a tracked device. Returns an error if the transform is some other type. */ + virtual EVROverlayError GetOverlayTransformTrackedDeviceRelative( VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t *punTrackedDevice, HmdMatrix34_t *pmatTrackedDeviceToOverlayTransform ) = 0; + + /** Sets the transform to draw the overlay on a rendermodel component mesh instead of a quad. This will only draw when the system is + * drawing the device. Overlays with this transform type cannot receive mouse events. */ + virtual EVROverlayError SetOverlayTransformTrackedDeviceComponent( VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t unDeviceIndex, const char *pchComponentName ) = 0; + + /** Gets the transform information when the overlay is rendering on a component. */ + virtual EVROverlayError GetOverlayTransformTrackedDeviceComponent( VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t *punDeviceIndex, char *pchComponentName, uint32_t unComponentNameSize ) = 0; + + /** Shows the VR overlay. For dashboard overlays, only the Dashboard Manager is allowed to call this. */ + virtual EVROverlayError ShowOverlay( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Hides the VR overlay. For dashboard overlays, only the Dashboard Manager is allowed to call this. */ + virtual EVROverlayError HideOverlay( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Returns true if the overlay is visible. */ + virtual bool IsOverlayVisible( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Get the transform in 3d space associated with a specific 2d point in the overlay's coordinate space (where 0,0 is the lower left). -Z points out of the overlay */ + virtual EVROverlayError GetTransformForOverlayCoordinates( VROverlayHandle_t ulOverlayHandle, ETrackingUniverseOrigin eTrackingOrigin, HmdVector2_t coordinatesInOverlay, HmdMatrix34_t *pmatTransform ) = 0; + + // --------------------------------------------- + // Overlay input methods + // --------------------------------------------- + + /** Returns true and fills the event with the next event on the overlay's event queue, if there is one. + * If there are no events this method returns false. uncbVREvent should be the size in bytes of the VREvent_t struct */ + virtual bool PollNextOverlayEvent( VROverlayHandle_t ulOverlayHandle, VREvent_t *pEvent, uint32_t uncbVREvent ) = 0; + + /** Returns the current input settings for the specified overlay. */ + virtual EVROverlayError GetOverlayInputMethod( VROverlayHandle_t ulOverlayHandle, VROverlayInputMethod *peInputMethod ) = 0; + + /** Sets the input settings for the specified overlay. */ + virtual EVROverlayError SetOverlayInputMethod( VROverlayHandle_t ulOverlayHandle, VROverlayInputMethod eInputMethod ) = 0; + + /** Gets the mouse scaling factor that is used for mouse events. The actual texture may be a different size, but this is + * typically the size of the underlying UI in pixels. */ + virtual EVROverlayError GetOverlayMouseScale( VROverlayHandle_t ulOverlayHandle, HmdVector2_t *pvecMouseScale ) = 0; + + /** Sets the mouse scaling factor that is used for mouse events. The actual texture may be a different size, but this is + * typically the size of the underlying UI in pixels (not in world space). */ + virtual EVROverlayError SetOverlayMouseScale( VROverlayHandle_t ulOverlayHandle, const HmdVector2_t *pvecMouseScale ) = 0; + + /** Computes the overlay-space pixel coordinates of where the ray intersects the overlay with the + * specified settings. Returns false if there is no intersection. */ + virtual bool ComputeOverlayIntersection( VROverlayHandle_t ulOverlayHandle, const VROverlayIntersectionParams_t *pParams, VROverlayIntersectionResults_t *pResults ) = 0; + + /** Processes mouse input from the specified controller as though it were a mouse pointed at a compositor overlay with the + * specified settings. The controller is treated like a laser pointer on the -z axis. The point where the laser pointer would + * intersect with the overlay is the mouse position, the trigger is left mouse, and the track pad is right mouse. + * + * Return true if the controller is pointed at the overlay and an event was generated. */ + virtual bool HandleControllerOverlayInteractionAsMouse( VROverlayHandle_t ulOverlayHandle, TrackedDeviceIndex_t unControllerDeviceIndex ) = 0; + + /** Returns true if the specified overlay is the hover target. An overlay is the hover target when it is the last overlay "moused over" + * by the virtual mouse pointer */ + virtual bool IsHoverTargetOverlay( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Returns the current Gamepad focus overlay */ + virtual vr::VROverlayHandle_t GetGamepadFocusOverlay() = 0; + + /** Sets the current Gamepad focus overlay */ + virtual EVROverlayError SetGamepadFocusOverlay( VROverlayHandle_t ulNewFocusOverlay ) = 0; + + /** Sets an overlay's neighbor. This will also set the neighbor of the "to" overlay + * to point back to the "from" overlay. If an overlay's neighbor is set to invalid both + * ends will be cleared */ + virtual EVROverlayError SetOverlayNeighbor( EOverlayDirection eDirection, VROverlayHandle_t ulFrom, VROverlayHandle_t ulTo ) = 0; + + /** Changes the Gamepad focus from one overlay to one of its neighbors. Returns VROverlayError_NoNeighbor if there is no + * neighbor in that direction */ + virtual EVROverlayError MoveGamepadFocusToNeighbor( EOverlayDirection eDirection, VROverlayHandle_t ulFrom ) = 0; + + // --------------------------------------------- + // Overlay texture methods + // --------------------------------------------- + + /** Texture to draw for the overlay. This function can only be called by the overlay's creator or renderer process (see SetOverlayRenderingPid) . + * + * OpenGL dirty state: + * glBindTexture + */ + virtual EVROverlayError SetOverlayTexture( VROverlayHandle_t ulOverlayHandle, const Texture_t *pTexture ) = 0; + + /** Use this to tell the overlay system to release the texture set for this overlay. */ + virtual EVROverlayError ClearOverlayTexture( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Separate interface for providing the data as a stream of bytes, but there is an upper bound on data + * that can be sent. This function can only be called by the overlay's renderer process. */ + virtual EVROverlayError SetOverlayRaw( VROverlayHandle_t ulOverlayHandle, void *pvBuffer, uint32_t unWidth, uint32_t unHeight, uint32_t unDepth ) = 0; + + /** Separate interface for providing the image through a filename: can be png or jpg, and should not be bigger than 1920x1080. + * This function can only be called by the overlay's renderer process */ + virtual EVROverlayError SetOverlayFromFile( VROverlayHandle_t ulOverlayHandle, const char *pchFilePath ) = 0; + + /** Get the native texture handle/device for an overlay you have created. + * On windows this handle will be a ID3D11ShaderResourceView with a ID3D11Texture2D bound. + * + * The texture will always be sized to match the backing texture you supplied in SetOverlayTexture above. + * + * You MUST call ReleaseNativeOverlayHandle() with pNativeTextureHandle once you are done with this texture. + * + * pNativeTextureHandle is an OUTPUT, it will be a pointer to a ID3D11ShaderResourceView *. + * pNativeTextureRef is an INPUT and should be a ID3D11Resource *. The device used by pNativeTextureRef will be used to bind pNativeTextureHandle. + */ + virtual EVROverlayError GetOverlayTexture( VROverlayHandle_t ulOverlayHandle, void **pNativeTextureHandle, void *pNativeTextureRef, uint32_t *pWidth, uint32_t *pHeight, uint32_t *pNativeFormat, EGraphicsAPIConvention *pAPI, EColorSpace *pColorSpace ) = 0; + + /** Release the pNativeTextureHandle provided from the GetOverlayTexture call, this allows the system to free the underlying GPU resources for this object, + * so only do it once you stop rendering this texture. + */ + virtual EVROverlayError ReleaseNativeOverlayHandle( VROverlayHandle_t ulOverlayHandle, void *pNativeTextureHandle ) = 0; + + /** Get the size of the overlay texture */ + virtual EVROverlayError GetOverlayTextureSize( VROverlayHandle_t ulOverlayHandle, uint32_t *pWidth, uint32_t *pHeight ) = 0; + + // ---------------------------------------------- + // Dashboard Overlay Methods + // ---------------------------------------------- + + /** Creates a dashboard overlay and returns its handle */ + virtual EVROverlayError CreateDashboardOverlay( const char *pchOverlayKey, const char *pchOverlayFriendlyName, VROverlayHandle_t * pMainHandle, VROverlayHandle_t *pThumbnailHandle ) = 0; + + /** Returns true if the dashboard is visible */ + virtual bool IsDashboardVisible() = 0; + + /** returns true if the dashboard is visible and the specified overlay is the active system Overlay */ + virtual bool IsActiveDashboardOverlay( VROverlayHandle_t ulOverlayHandle ) = 0; + + /** Sets the dashboard overlay to only appear when the specified process ID has scene focus */ + virtual EVROverlayError SetDashboardOverlaySceneProcess( VROverlayHandle_t ulOverlayHandle, uint32_t unProcessId ) = 0; + + /** Gets the process ID that this dashboard overlay requires to have scene focus */ + virtual EVROverlayError GetDashboardOverlaySceneProcess( VROverlayHandle_t ulOverlayHandle, uint32_t *punProcessId ) = 0; + + /** Shows the dashboard. */ + virtual void ShowDashboard( const char *pchOverlayToShow ) = 0; + + /** Returns the tracked device that has the laser pointer in the dashboard */ + virtual vr::TrackedDeviceIndex_t GetPrimaryDashboardDevice() = 0; + + // --------------------------------------------- + // Keyboard methods + // --------------------------------------------- + + /** Show the virtual keyboard to accept input **/ + virtual EVROverlayError ShowKeyboard( EGamepadTextInputMode eInputMode, EGamepadTextInputLineMode eLineInputMode, const char *pchDescription, uint32_t unCharMax, const char *pchExistingText, bool bUseMinimalMode, uint64_t uUserValue ) = 0; + + virtual EVROverlayError ShowKeyboardForOverlay( VROverlayHandle_t ulOverlayHandle, EGamepadTextInputMode eInputMode, EGamepadTextInputLineMode eLineInputMode, const char *pchDescription, uint32_t unCharMax, const char *pchExistingText, bool bUseMinimalMode, uint64_t uUserValue ) = 0; + + /** Get the text that was entered into the text input **/ + virtual uint32_t GetKeyboardText( VR_OUT_STRING() char *pchText, uint32_t cchText ) = 0; + + /** Hide the virtual keyboard **/ + virtual void HideKeyboard() = 0; + + /** Set the position of the keyboard in world space **/ + virtual void SetKeyboardTransformAbsolute( ETrackingUniverseOrigin eTrackingOrigin, const HmdMatrix34_t *pmatTrackingOriginToKeyboardTransform ) = 0; + + /** Set the position of the keyboard in overlay space by telling it to avoid a rectangle in the overlay. Rectangle coords have (0,0) in the bottom left **/ + virtual void SetKeyboardPositionForOverlay( VROverlayHandle_t ulOverlayHandle, HmdRect2_t avoidRect ) = 0; + + }; + + static const char * const IVROverlay_Version = "IVROverlay_013"; + +} // namespace vr + +// ivrrendermodels.h +namespace vr +{ + +static const char * const k_pch_Controller_Component_GDC2015 = "gdc2015"; // Canonical coordinate system of the gdc 2015 wired controller, provided for backwards compatibility +static const char * const k_pch_Controller_Component_Base = "base"; // For controllers with an unambiguous 'base'. +static const char * const k_pch_Controller_Component_Tip = "tip"; // For controllers with an unambiguous 'tip' (used for 'laser-pointing') +static const char * const k_pch_Controller_Component_HandGrip = "handgrip"; // Neutral, ambidextrous hand-pose when holding controller. On plane between neutrally posed index finger and thumb +static const char * const k_pch_Controller_Component_Status = "status"; // 1:1 aspect ratio status area, with canonical [0,1] uv mapping + +#if defined(__linux__) || defined(__APPLE__) +// The 32-bit version of gcc has the alignment requirement for uint64 and double set to +// 4 meaning that even with #pragma pack(8) these types will only be four-byte aligned. +// The 64-bit version of gcc has the alignment requirement for these types set to +// 8 meaning that unless we use #pragma pack(4) our structures will get bigger. +// The 64-bit structure packing has to match the 32-bit structure packing for each platform. +#pragma pack( push, 4 ) +#else +#pragma pack( push, 8 ) +#endif + +/** Errors that can occur with the VR compositor */ +enum EVRRenderModelError +{ + VRRenderModelError_None = 0, + VRRenderModelError_Loading = 100, + VRRenderModelError_NotSupported = 200, + VRRenderModelError_InvalidArg = 300, + VRRenderModelError_InvalidModel = 301, + VRRenderModelError_NoShapes = 302, + VRRenderModelError_MultipleShapes = 303, + VRRenderModelError_TooManyVertices = 304, + VRRenderModelError_MultipleTextures = 305, + VRRenderModelError_BufferTooSmall = 306, + VRRenderModelError_NotEnoughNormals = 307, + VRRenderModelError_NotEnoughTexCoords = 308, + + VRRenderModelError_InvalidTexture = 400, +}; + +typedef uint32_t VRComponentProperties; + +enum EVRComponentProperty +{ + VRComponentProperty_IsStatic = (1 << 0), + VRComponentProperty_IsVisible = (1 << 1), + VRComponentProperty_IsTouched = (1 << 2), + VRComponentProperty_IsPressed = (1 << 3), + VRComponentProperty_IsScrolled = (1 << 4), +}; + +/** Describes state information about a render-model component, including transforms and other dynamic properties */ +struct RenderModel_ComponentState_t +{ + HmdMatrix34_t mTrackingToComponentRenderModel; // Transform required when drawing the component render model + HmdMatrix34_t mTrackingToComponentLocal; // Transform available for attaching to a local component coordinate system (-Z out from surface ) + VRComponentProperties uProperties; +}; + +/** A single vertex in a render model */ +struct RenderModel_Vertex_t +{ + HmdVector3_t vPosition; // position in meters in device space + HmdVector3_t vNormal; + float rfTextureCoord[2]; +}; + +/** A texture map for use on a render model */ +struct RenderModel_TextureMap_t +{ + uint16_t unWidth, unHeight; // width and height of the texture map in pixels + const uint8_t *rubTextureMapData; // Map texture data. All textures are RGBA with 8 bits per channel per pixel. Data size is width * height * 4ub +}; + +/** Session unique texture identifier. Rendermodels which share the same texture will have the same id. +IDs <0 denote the texture is not present */ + +typedef int32_t TextureID_t; + +const TextureID_t INVALID_TEXTURE_ID = -1; + +struct RenderModel_t +{ + const RenderModel_Vertex_t *rVertexData; // Vertex data for the mesh + uint32_t unVertexCount; // Number of vertices in the vertex data + const uint16_t *rIndexData; // Indices into the vertex data for each triangle + uint32_t unTriangleCount; // Number of triangles in the mesh. Index count is 3 * TriangleCount + TextureID_t diffuseTextureId; // Session unique texture identifier. Rendermodels which share the same texture will have the same id. <0 == texture not present +}; + +struct RenderModel_ControllerMode_State_t +{ + bool bScrollWheelVisible; // is this controller currently set to be in a scroll wheel mode +}; + +#pragma pack( pop ) + +class IVRRenderModels +{ +public: + + /** Loads and returns a render model for use in the application. pchRenderModelName should be a render model name + * from the Prop_RenderModelName_String property or an absolute path name to a render model on disk. + * + * The resulting render model is valid until VR_Shutdown() is called or until FreeRenderModel() is called. When the + * application is finished with the render model it should call FreeRenderModel() to free the memory associated + * with the model. + * + * The method returns VRRenderModelError_Loading while the render model is still being loaded. + * The method returns VRRenderModelError_None once loaded successfully, otherwise will return an error. */ + virtual EVRRenderModelError LoadRenderModel_Async( const char *pchRenderModelName, RenderModel_t **ppRenderModel ) = 0; + + /** Frees a previously returned render model + * It is safe to call this on a null ptr. */ + virtual void FreeRenderModel( RenderModel_t *pRenderModel ) = 0; + + /** Loads and returns a texture for use in the application. */ + virtual EVRRenderModelError LoadTexture_Async( TextureID_t textureId, RenderModel_TextureMap_t **ppTexture ) = 0; + + /** Frees a previously returned texture + * It is safe to call this on a null ptr. */ + virtual void FreeTexture( RenderModel_TextureMap_t *pTexture ) = 0; + + /** Creates a D3D11 texture and loads data into it. */ + virtual EVRRenderModelError LoadTextureD3D11_Async( TextureID_t textureId, void *pD3D11Device, void **ppD3D11Texture2D ) = 0; + + /** Helper function to copy the bits into an existing texture. */ + virtual EVRRenderModelError LoadIntoTextureD3D11_Async( TextureID_t textureId, void *pDstTexture ) = 0; + + /** Use this to free textures created with LoadTextureD3D11_Async instead of calling Release on them. */ + virtual void FreeTextureD3D11( void *pD3D11Texture2D ) = 0; + + /** Use this to get the names of available render models. Index does not correlate to a tracked device index, but + * is only used for iterating over all available render models. If the index is out of range, this function will return 0. + * Otherwise, it will return the size of the buffer required for the name. */ + virtual uint32_t GetRenderModelName( uint32_t unRenderModelIndex, VR_OUT_STRING() char *pchRenderModelName, uint32_t unRenderModelNameLen ) = 0; + + /** Returns the number of available render models. */ + virtual uint32_t GetRenderModelCount() = 0; + + + /** Returns the number of components of the specified render model. + * Components are useful when client application wish to draw, label, or otherwise interact with components of tracked objects. + * Examples controller components: + * renderable things such as triggers, buttons + * non-renderable things which include coordinate systems such as 'tip', 'base', a neutral controller agnostic hand-pose + * If all controller components are enumerated and rendered, it will be equivalent to drawing the traditional render model + * Returns 0 if components not supported, >0 otherwise */ + virtual uint32_t GetComponentCount( const char *pchRenderModelName ) = 0; + + /** Use this to get the names of available components. Index does not correlate to a tracked device index, but + * is only used for iterating over all available components. If the index is out of range, this function will return 0. + * Otherwise, it will return the size of the buffer required for the name. */ + virtual uint32_t GetComponentName( const char *pchRenderModelName, uint32_t unComponentIndex, VR_OUT_STRING( ) char *pchComponentName, uint32_t unComponentNameLen ) = 0; + + /** Get the button mask for all buttons associated with this component + * If no buttons (or axes) are associated with this component, return 0 + * Note: multiple components may be associated with the same button. Ex: two grip buttons on a single controller. + * Note: A single component may be associated with multiple buttons. Ex: A trackpad which also provides "D-pad" functionality */ + virtual uint64_t GetComponentButtonMask( const char *pchRenderModelName, const char *pchComponentName ) = 0; + + /** Use this to get the render model name for the specified rendermode/component combination, to be passed to LoadRenderModel. + * If the component name is out of range, this function will return 0. + * Otherwise, it will return the size of the buffer required for the name. */ + virtual uint32_t GetComponentRenderModelName( const char *pchRenderModelName, const char *pchComponentName, VR_OUT_STRING( ) char *pchComponentRenderModelName, uint32_t unComponentRenderModelNameLen ) = 0; + + /** Use this to query information about the component, as a function of the controller state. + * + * For dynamic controller components (ex: trigger) values will reflect component motions + * For static components this will return a consistent value independent of the VRControllerState_t + * + * If the pchRenderModelName or pchComponentName is invalid, this will return false (and transforms will be set to identity). + * Otherwise, return true + * Note: For dynamic objects, visibility may be dynamic. (I.e., true/false will be returned based on controller state and controller mode state ) */ + virtual bool GetComponentState( const char *pchRenderModelName, const char *pchComponentName, const vr::VRControllerState_t *pControllerState, const RenderModel_ControllerMode_State_t *pState, RenderModel_ComponentState_t *pComponentState ) = 0; + + /** Returns true if the render model has a component with the specified name */ + virtual bool RenderModelHasComponent( const char *pchRenderModelName, const char *pchComponentName ) = 0; + + /** Returns the URL of the thumbnail image for this rendermodel */ + virtual uint32_t GetRenderModelThumbnailURL( const char *pchRenderModelName, VR_OUT_STRING() char *pchThumbnailURL, uint32_t unThumbnailURLLen, vr::EVRRenderModelError *peError ) = 0; + + /** Provides a render model path that will load the unskinned model if the model name provided has been replace by the user. If the model + * hasn't been replaced the path value will still be a valid path to load the model. Pass this to LoadRenderModel_Async, etc. to load the + * model. */ + virtual uint32_t GetRenderModelOriginalPath( const char *pchRenderModelName, VR_OUT_STRING() char *pchOriginalPath, uint32_t unOriginalPathLen, vr::EVRRenderModelError *peError ) = 0; + + /** Returns a string for a render model error */ + virtual const char *GetRenderModelErrorNameFromEnum( vr::EVRRenderModelError error ) = 0; +}; + +static const char * const IVRRenderModels_Version = "IVRRenderModels_005"; + +} + + +// ivrextendeddisplay.h +namespace vr +{ + + /** NOTE: Use of this interface is not recommended in production applications. It will not work for displays which use + * direct-to-display mode. Creating our own window is also incompatible with the VR compositor and is not available when the compositor is running. */ + class IVRExtendedDisplay + { + public: + + /** Size and position that the window needs to be on the VR display. */ + virtual void GetWindowBounds( int32_t *pnX, int32_t *pnY, uint32_t *pnWidth, uint32_t *pnHeight ) = 0; + + /** Gets the viewport in the frame buffer to draw the output of the distortion into */ + virtual void GetEyeOutputViewport( EVREye eEye, uint32_t *pnX, uint32_t *pnY, uint32_t *pnWidth, uint32_t *pnHeight ) = 0; + + /** [D3D10/11 Only] + * Returns the adapter index and output index that the user should pass into EnumAdapters and EnumOutputs + * to create the device and swap chain in DX10 and DX11. If an error occurs both indices will be set to -1. + */ + virtual void GetDXGIOutputInfo( int32_t *pnAdapterIndex, int32_t *pnAdapterOutputIndex ) = 0; + + }; + + static const char * const IVRExtendedDisplay_Version = "IVRExtendedDisplay_001"; + +} + + +// ivrtrackedcamera.h +namespace vr +{ + +class IVRTrackedCamera +{ +public: + /** Returns a string for an error */ + virtual const char *GetCameraErrorNameFromEnum( vr::EVRTrackedCameraError eCameraError ) = 0; + + /** For convenience, same as tracked property request Prop_HasCamera_Bool */ + virtual vr::EVRTrackedCameraError HasCamera( vr::TrackedDeviceIndex_t nDeviceIndex, bool *pHasCamera ) = 0; + + /** Gets size of the image frame. */ + virtual vr::EVRTrackedCameraError GetCameraFrameSize( vr::TrackedDeviceIndex_t nDeviceIndex, vr::EVRTrackedCameraFrameType eFrameType, uint32_t *pnWidth, uint32_t *pnHeight, uint32_t *pnFrameBufferSize ) = 0; + + virtual vr::EVRTrackedCameraError GetCameraIntrinisics( vr::TrackedDeviceIndex_t nDeviceIndex, vr::EVRTrackedCameraFrameType eFrameType, vr::HmdVector2_t *pFocalLength, vr::HmdVector2_t *pCenter ) = 0; + + virtual vr::EVRTrackedCameraError GetCameraProjection( vr::TrackedDeviceIndex_t nDeviceIndex, vr::EVRTrackedCameraFrameType eFrameType, float flZNear, float flZFar, vr::HmdMatrix44_t *pProjection ) = 0; + + /** Acquiring streaming service permits video streaming for the caller. Releasing hints the system that video services do not need to be maintained for this client. + * If the camera has not already been activated, a one time spin up may incur some auto exposure as well as initial streaming frame delays. + * The camera should be considered a global resource accessible for shared consumption but not exclusive to any caller. + * The camera may go inactive due to lack of active consumers or headset idleness. */ + virtual vr::EVRTrackedCameraError AcquireVideoStreamingService( vr::TrackedDeviceIndex_t nDeviceIndex, vr::TrackedCameraHandle_t *pHandle ) = 0; + virtual vr::EVRTrackedCameraError ReleaseVideoStreamingService( vr::TrackedCameraHandle_t hTrackedCamera ) = 0; + + /** Copies the image frame into a caller's provided buffer. The image data is currently provided as RGBA data, 4 bytes per pixel. + * A caller can provide null for the framebuffer or frameheader if not desired. Requesting the frame header first, followed by the frame buffer allows + * the caller to determine if the frame as advanced per the frame header sequence. + * If there is no frame available yet, due to initial camera spinup or re-activation, the error will be VRTrackedCameraError_NoFrameAvailable. + * Ideally a caller should be polling at ~16ms intervals */ + virtual vr::EVRTrackedCameraError GetVideoStreamFrameBuffer( vr::TrackedCameraHandle_t hTrackedCamera, vr::EVRTrackedCameraFrameType eFrameType, void *pFrameBuffer, uint32_t nFrameBufferSize, vr::CameraVideoStreamFrameHeader_t *pFrameHeader, uint32_t nFrameHeaderSize ) = 0; + + /** Gets size of the image frame. */ + virtual vr::EVRTrackedCameraError GetVideoStreamTextureSize( vr::TrackedDeviceIndex_t nDeviceIndex, vr::EVRTrackedCameraFrameType eFrameType, vr::VRTextureBounds_t *pTextureBounds, uint32_t *pnWidth, uint32_t *pnHeight ) = 0; + + /** Access a shared D3D11 texture for the specified tracked camera stream */ + virtual vr::EVRTrackedCameraError GetVideoStreamTextureD3D11( vr::TrackedCameraHandle_t hTrackedCamera, vr::EVRTrackedCameraFrameType eFrameType, void *pD3D11DeviceOrResource, void **ppD3D11ShaderResourceView, vr::CameraVideoStreamFrameHeader_t *pFrameHeader, uint32_t nFrameHeaderSize ) = 0; + + /** Access a shared GL texture for the specified tracked camera stream */ + virtual vr::EVRTrackedCameraError GetVideoStreamTextureGL( vr::TrackedCameraHandle_t hTrackedCamera, vr::EVRTrackedCameraFrameType eFrameType, vr::glUInt_t *pglTextureId, vr::CameraVideoStreamFrameHeader_t *pFrameHeader, uint32_t nFrameHeaderSize ) = 0; + virtual vr::EVRTrackedCameraError ReleaseVideoStreamTextureGL( vr::TrackedCameraHandle_t hTrackedCamera, vr::glUInt_t glTextureId ) = 0; +}; + +static const char * const IVRTrackedCamera_Version = "IVRTrackedCamera_003"; + +} // namespace vr + + +// ivrscreenshots.h +namespace vr +{ + +/** Errors that can occur with the VR compositor */ +enum EVRScreenshotError +{ + VRScreenshotError_None = 0, + VRScreenshotError_RequestFailed = 1, + VRScreenshotError_IncompatibleVersion = 100, + VRScreenshotError_NotFound = 101, + VRScreenshotError_BufferTooSmall = 102, + VRScreenshotError_ScreenshotAlreadyInProgress = 108, +}; + +/** Allows the application to generate screenshots */ +class IVRScreenshots +{ +public: + /** Request a screenshot of the requested type. + * A request of the VRScreenshotType_Stereo type will always + * work. Other types will depend on the underlying application + * support. + * The first file name is for the preview image and should be a + * regular screenshot (ideally from the left eye). The second + * is the VR screenshot in the correct format. They should be + * in the same aspect ratio. Formats per type: + * VRScreenshotType_Mono: the VR filename is ignored (can be + * nullptr), this is a normal flat single shot. + * VRScreenshotType_Stereo: The VR image should be a + * side-by-side with the left eye image on the left. + * VRScreenshotType_Cubemap: The VR image should be six square + * images composited horizontally. + * VRScreenshotType_StereoPanorama: above/below with left eye + * panorama being the above image. Image is typically square + * with the panorama being 2x horizontal. + * + * Note that the VR dashboard will call this function when + * the user presses the screenshot binding (currently System + * Button + Trigger). If Steam is running, the destination + * file names will be in %TEMP% and will be copied into + * Steam's screenshot library for the running application + * once SubmitScreenshot() is called. + * If Steam is not running, the paths will be in the user's + * documents folder under Documents\SteamVR\Screenshots. + * Other VR applications can call this to initate a + * screenshot outside of user control. + * The destination file names do not need an extension, + * will be replaced with the correct one for the format + * which is currently .png. */ + virtual vr::EVRScreenshotError RequestScreenshot( vr::ScreenshotHandle_t *pOutScreenshotHandle, vr::EVRScreenshotType type, const char *pchPreviewFilename, const char *pchVRFilename ) = 0; + + /** Called by the running VR application to indicate that it + * wishes to be in charge of screenshots. If the + * application does not call this, the Compositor will only + * support VRScreenshotType_Stereo screenshots that will be + * captured without notification to the running app. + * Once hooked your application will receive a + * VREvent_RequestScreenshot event when the user presses the + * buttons to take a screenshot. */ + virtual vr::EVRScreenshotError HookScreenshot( VR_ARRAY_COUNT( numTypes ) const vr::EVRScreenshotType *pSupportedTypes, int numTypes ) = 0; + + /** When your application receives a + * VREvent_RequestScreenshot event, call these functions to get + * the details of the screenshot request. */ + virtual vr::EVRScreenshotType GetScreenshotPropertyType( vr::ScreenshotHandle_t screenshotHandle, vr::EVRScreenshotError *pError ) = 0; + + /** Get the filename for the preview or vr image (see + * vr::EScreenshotPropertyFilenames). The return value is + * the size of the string. */ + virtual uint32_t GetScreenshotPropertyFilename( vr::ScreenshotHandle_t screenshotHandle, vr::EVRScreenshotPropertyFilenames filenameType, VR_OUT_STRING() char *pchFilename, uint32_t cchFilename, vr::EVRScreenshotError *pError ) = 0; + + /** Call this if the application is taking the screen shot + * will take more than a few ms processing. This will result + * in an overlay being presented that shows a completion + * bar. */ + virtual vr::EVRScreenshotError UpdateScreenshotProgress( vr::ScreenshotHandle_t screenshotHandle, float flProgress ) = 0; + + /** Tells the compositor to take an internal screenshot of + * type VRScreenshotType_Stereo. It will take the current + * submitted scene textures of the running application and + * write them into the preview image and a side-by-side file + * for the VR image. + * This is similiar to request screenshot, but doesn't ever + * talk to the application, just takes the shot and submits. */ + virtual vr::EVRScreenshotError TakeStereoScreenshot( vr::ScreenshotHandle_t *pOutScreenshotHandle, const char *pchPreviewFilename, const char *pchVRFilename ) = 0; + + /** Submit the completed screenshot. If Steam is running + * this will call into the Steam client and upload the + * screenshot to the screenshots section of the library for + * the running application. If Steam is not running, this + * function will display a notification to the user that the + * screenshot was taken. The paths should be full paths with + * extensions. + * File paths should be absolute including + * exntensions. + * screenshotHandle can be k_unScreenshotHandleInvalid if this + * was a new shot taking by the app to be saved and not + * initiated by a user (achievement earned or something) */ + virtual vr::EVRScreenshotError SubmitScreenshot( vr::ScreenshotHandle_t screenshotHandle, vr::EVRScreenshotType type, const char *pchSourcePreviewFilename, const char *pchSourceVRFilename ) = 0; +}; + +static const char * const IVRScreenshots_Version = "IVRScreenshots_001"; + +} // namespace vr + + + +// ivrresources.h +namespace vr +{ + +class IVRResources +{ +public: + + // ------------------------------------ + // Shared Resource Methods + // ------------------------------------ + + /** Loads the specified resource into the provided buffer if large enough. + * Returns the size in bytes of the buffer required to hold the specified resource. */ + virtual uint32_t LoadSharedResource( const char *pchResourceName, char *pchBuffer, uint32_t unBufferLen ) = 0; + + /** Provides the full path to the specified resource. Resource names can include named directories for + * drivers and other things, and this resolves all of those and returns the actual physical path. + * pchResourceTypeDirectory is the subdirectory of resources to look in. */ + virtual uint32_t GetResourceFullPath( const char *pchResourceName, const char *pchResourceTypeDirectory, char *pchPathBuffer, uint32_t unBufferLen ) = 0; +}; + +static const char * const IVRResources_Version = "IVRResources_001"; + + +}// End + +#endif // _OPENVR_API + + +namespace vr +{ + /** Finds the active installation of the VR API and initializes it. The provided path must be absolute + * or relative to the current working directory. These are the local install versions of the equivalent + * functions in steamvr.h and will work without a local Steam install. + * + * This path is to the "root" of the VR API install. That's the directory with + * the "drivers" directory and a platform (i.e. "win32") directory in it, not the directory with the DLL itself. + */ + inline IVRSystem *VR_Init( EVRInitError *peError, EVRApplicationType eApplicationType ); + + /** unloads vrclient.dll. Any interface pointers from the interface are + * invalid after this point */ + inline void VR_Shutdown(); + + /** Returns true if there is an HMD attached. This check is as lightweight as possible and + * can be called outside of VR_Init/VR_Shutdown. It should be used when an application wants + * to know if initializing VR is a possibility but isn't ready to take that step yet. + */ + VR_INTERFACE bool VR_CALLTYPE VR_IsHmdPresent(); + + /** Returns true if the OpenVR runtime is installed. */ + VR_INTERFACE bool VR_CALLTYPE VR_IsRuntimeInstalled(); + + /** Returns where the OpenVR runtime is installed. */ + VR_INTERFACE const char *VR_CALLTYPE VR_RuntimePath(); + + /** Returns the name of the enum value for an EVRInitError. This function may be called outside of VR_Init()/VR_Shutdown(). */ + VR_INTERFACE const char *VR_CALLTYPE VR_GetVRInitErrorAsSymbol( EVRInitError error ); + + /** Returns an english string for an EVRInitError. Applications should call VR_GetVRInitErrorAsSymbol instead and + * use that as a key to look up their own localized error message. This function may be called outside of VR_Init()/VR_Shutdown(). */ + VR_INTERFACE const char *VR_CALLTYPE VR_GetVRInitErrorAsEnglishDescription( EVRInitError error ); + + /** Returns the interface of the specified version. This method must be called after VR_Init. The + * pointer returned is valid until VR_Shutdown is called. + */ + VR_INTERFACE void *VR_CALLTYPE VR_GetGenericInterface( const char *pchInterfaceVersion, EVRInitError *peError ); + + /** Returns whether the interface of the specified version exists. + */ + VR_INTERFACE bool VR_CALLTYPE VR_IsInterfaceVersionValid( const char *pchInterfaceVersion ); + + /** Returns a token that represents whether the VR interface handles need to be reloaded */ + VR_INTERFACE uint32_t VR_CALLTYPE VR_GetInitToken(); + + // These typedefs allow old enum names from SDK 0.9.11 to be used in applications. + // They will go away in the future. + typedef EVRInitError HmdError; + typedef EVREye Hmd_Eye; + typedef EGraphicsAPIConvention GraphicsAPIConvention; + typedef EColorSpace ColorSpace; + typedef ETrackingResult HmdTrackingResult; + typedef ETrackedDeviceClass TrackedDeviceClass; + typedef ETrackingUniverseOrigin TrackingUniverseOrigin; + typedef ETrackedDeviceProperty TrackedDeviceProperty; + typedef ETrackedPropertyError TrackedPropertyError; + typedef EVRSubmitFlags VRSubmitFlags_t; + typedef EVRState VRState_t; + typedef ECollisionBoundsStyle CollisionBoundsStyle_t; + typedef EVROverlayError VROverlayError; + typedef EVRFirmwareError VRFirmwareError; + typedef EVRCompositorError VRCompositorError; + typedef EVRScreenshotError VRScreenshotsError; + + inline uint32_t &VRToken() + { + static uint32_t token; + return token; + } + + class COpenVRContext + { + public: + COpenVRContext() { Clear(); } + void Clear(); + + inline void CheckClear() + { + if ( VRToken() != VR_GetInitToken() ) + { + Clear(); + VRToken() = VR_GetInitToken(); + } + } + + IVRSystem *VRSystem() + { + CheckClear(); + if ( m_pVRSystem == nullptr ) + { + EVRInitError eError; + m_pVRSystem = ( IVRSystem * )VR_GetGenericInterface( IVRSystem_Version, &eError ); + } + return m_pVRSystem; + } + IVRChaperone *VRChaperone() + { + CheckClear(); + if ( m_pVRChaperone == nullptr ) + { + EVRInitError eError; + m_pVRChaperone = ( IVRChaperone * )VR_GetGenericInterface( IVRChaperone_Version, &eError ); + } + return m_pVRChaperone; + } + + IVRChaperoneSetup *VRChaperoneSetup() + { + CheckClear(); + if ( m_pVRChaperoneSetup == nullptr ) + { + EVRInitError eError; + m_pVRChaperoneSetup = ( IVRChaperoneSetup * )VR_GetGenericInterface( IVRChaperoneSetup_Version, &eError ); + } + return m_pVRChaperoneSetup; + } + + IVRCompositor *VRCompositor() + { + CheckClear(); + if ( m_pVRCompositor == nullptr ) + { + EVRInitError eError; + m_pVRCompositor = ( IVRCompositor * )VR_GetGenericInterface( IVRCompositor_Version, &eError ); + } + return m_pVRCompositor; + } + + IVROverlay *VROverlay() + { + CheckClear(); + if ( m_pVROverlay == nullptr ) + { + EVRInitError eError; + m_pVROverlay = ( IVROverlay * )VR_GetGenericInterface( IVROverlay_Version, &eError ); + } + return m_pVROverlay; + } + + IVRResources *VRResources() + { + CheckClear(); + if ( m_pVRResources == nullptr ) + { + EVRInitError eError; + m_pVRResources = (IVRResources *)VR_GetGenericInterface( IVRResources_Version, &eError ); + } + return m_pVRResources; + } + + IVRScreenshots *VRScreenshots() + { + CheckClear(); + if ( m_pVRScreenshots == nullptr ) + { + EVRInitError eError; + m_pVRScreenshots = ( IVRScreenshots * )VR_GetGenericInterface( IVRScreenshots_Version, &eError ); + } + return m_pVRScreenshots; + } + + IVRRenderModels *VRRenderModels() + { + CheckClear(); + if ( m_pVRRenderModels == nullptr ) + { + EVRInitError eError; + m_pVRRenderModels = ( IVRRenderModels * )VR_GetGenericInterface( IVRRenderModels_Version, &eError ); + } + return m_pVRRenderModels; + } + + IVRExtendedDisplay *VRExtendedDisplay() + { + CheckClear(); + if ( m_pVRExtendedDisplay == nullptr ) + { + EVRInitError eError; + m_pVRExtendedDisplay = ( IVRExtendedDisplay * )VR_GetGenericInterface( IVRExtendedDisplay_Version, &eError ); + } + return m_pVRExtendedDisplay; + } + + IVRSettings *VRSettings() + { + CheckClear(); + if ( m_pVRSettings == nullptr ) + { + EVRInitError eError; + m_pVRSettings = ( IVRSettings * )VR_GetGenericInterface( IVRSettings_Version, &eError ); + } + return m_pVRSettings; + } + + IVRApplications *VRApplications() + { + CheckClear(); + if ( m_pVRApplications == nullptr ) + { + EVRInitError eError; + m_pVRApplications = ( IVRApplications * )VR_GetGenericInterface( IVRApplications_Version, &eError ); + } + return m_pVRApplications; + } + + IVRTrackedCamera *VRTrackedCamera() + { + CheckClear(); + if ( m_pVRTrackedCamera == nullptr ) + { + EVRInitError eError; + m_pVRTrackedCamera = ( IVRTrackedCamera * )VR_GetGenericInterface( IVRTrackedCamera_Version, &eError ); + } + return m_pVRTrackedCamera; + } + + private: + IVRSystem *m_pVRSystem; + IVRChaperone *m_pVRChaperone; + IVRChaperoneSetup *m_pVRChaperoneSetup; + IVRCompositor *m_pVRCompositor; + IVROverlay *m_pVROverlay; + IVRResources *m_pVRResources; + IVRRenderModels *m_pVRRenderModels; + IVRExtendedDisplay *m_pVRExtendedDisplay; + IVRSettings *m_pVRSettings; + IVRApplications *m_pVRApplications; + IVRTrackedCamera *m_pVRTrackedCamera; + IVRScreenshots *m_pVRScreenshots; + }; + + inline COpenVRContext &OpenVRInternal_ModuleContext() + { + static void *ctx[ sizeof( COpenVRContext ) / sizeof( void * ) ]; + return *( COpenVRContext * )ctx; // bypass zero-init constructor + } + + inline IVRSystem *VR_CALLTYPE VRSystem() { return OpenVRInternal_ModuleContext().VRSystem(); } + inline IVRChaperone *VR_CALLTYPE VRChaperone() { return OpenVRInternal_ModuleContext().VRChaperone(); } + inline IVRChaperoneSetup *VR_CALLTYPE VRChaperoneSetup() { return OpenVRInternal_ModuleContext().VRChaperoneSetup(); } + inline IVRCompositor *VR_CALLTYPE VRCompositor() { return OpenVRInternal_ModuleContext().VRCompositor(); } + inline IVROverlay *VR_CALLTYPE VROverlay() { return OpenVRInternal_ModuleContext().VROverlay(); } + inline IVRScreenshots *VR_CALLTYPE VRScreenshots() { return OpenVRInternal_ModuleContext().VRScreenshots(); } + inline IVRRenderModels *VR_CALLTYPE VRRenderModels() { return OpenVRInternal_ModuleContext().VRRenderModels(); } + inline IVRApplications *VR_CALLTYPE VRApplications() { return OpenVRInternal_ModuleContext().VRApplications(); } + inline IVRSettings *VR_CALLTYPE VRSettings() { return OpenVRInternal_ModuleContext().VRSettings(); } + inline IVRResources *VR_CALLTYPE VRResources() { return OpenVRInternal_ModuleContext().VRResources(); } + inline IVRExtendedDisplay *VR_CALLTYPE VRExtendedDisplay() { return OpenVRInternal_ModuleContext().VRExtendedDisplay(); } + inline IVRTrackedCamera *VR_CALLTYPE VRTrackedCamera() { return OpenVRInternal_ModuleContext().VRTrackedCamera(); } + + inline void COpenVRContext::Clear() + { + m_pVRSystem = nullptr; + m_pVRChaperone = nullptr; + m_pVRChaperoneSetup = nullptr; + m_pVRCompositor = nullptr; + m_pVROverlay = nullptr; + m_pVRRenderModels = nullptr; + m_pVRExtendedDisplay = nullptr; + m_pVRSettings = nullptr; + m_pVRApplications = nullptr; + m_pVRTrackedCamera = nullptr; + m_pVRResources = nullptr; + m_pVRScreenshots = nullptr; + } + + VR_INTERFACE uint32_t VR_CALLTYPE VR_InitInternal( EVRInitError *peError, EVRApplicationType eApplicationType ); + VR_INTERFACE void VR_CALLTYPE VR_ShutdownInternal(); + + /** Finds the active installation of vrclient.dll and initializes it */ + inline IVRSystem *VR_Init( EVRInitError *peError, EVRApplicationType eApplicationType ) + { + IVRSystem *pVRSystem = nullptr; + + EVRInitError eError; + VRToken() = VR_InitInternal( &eError, eApplicationType ); + COpenVRContext &ctx = OpenVRInternal_ModuleContext(); + ctx.Clear(); + + if ( eError == VRInitError_None ) + { + if ( VR_IsInterfaceVersionValid( IVRSystem_Version ) ) + { + pVRSystem = VRSystem(); + } + else + { + VR_ShutdownInternal(); + eError = VRInitError_Init_InterfaceNotFound; + } + } + + if ( peError ) + *peError = eError; + return pVRSystem; + } + + /** unloads vrclient.dll. Any interface pointers from the interface are + * invalid after this point */ + inline void VR_Shutdown() + { + VR_ShutdownInternal(); + } +} diff --git a/gfx/vr/osvr/ClientKit/ClientKitC.h b/gfx/vr/osvr/ClientKit/ClientKitC.h new file mode 100644 index 000000000..8309e890d --- /dev/null +++ b/gfx/vr/osvr/ClientKit/ClientKitC.h @@ -0,0 +1,37 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_ClientKitC_h_GUID_8D7DF104_892D_4CB5_2302_7C6BB5BC985C +#define INCLUDED_ClientKitC_h_GUID_8D7DF104_892D_4CB5_2302_7C6BB5BC985C + +#include <osvr/ClientKit/ContextC.h> +#include <osvr/ClientKit/InterfaceC.h> +#include <osvr/ClientKit/InterfaceCallbackC.h> +#include <osvr/ClientKit/SystemCallbackC.h> + +#endif diff --git a/gfx/vr/osvr/ClientKit/ContextC.h b/gfx/vr/osvr/ClientKit/ContextC.h new file mode 100644 index 000000000..e07e1b4a7 --- /dev/null +++ b/gfx/vr/osvr/ClientKit/ContextC.h @@ -0,0 +1,96 @@ +/** @file + @brief Header + + Must be c-safe! + + @todo Apply annotation macros + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_ContextC_h_GUID_3790F330_2425_4486_4C9F_20C300D7DED3 +#define INCLUDED_ContextC_h_GUID_3790F330_2425_4486_4C9F_20C300D7DED3 + +/* Internal Includes */ +#include <osvr/ClientKit/Export.h> +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/ReturnCodesC.h> +#include <osvr/Util/AnnotationMacrosC.h> +#include <osvr/Util/StdInt.h> +#include <osvr/Util/ClientOpaqueTypesC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +/** @addtogroup ClientKit + @{ +*/ + +/** @brief Initialize the library. + + @param applicationIdentifier A null terminated string identifying your + application. Reverse DNS format strongly suggested. + @param flags initialization options (reserved) - pass 0 for now. + + @returns Client context - will be needed for subsequent calls +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ClientContext osvrClientInit( + const char applicationIdentifier[], uint32_t flags OSVR_CPP_ONLY(= 0)); + +/** @brief Updates the state of the context - call regularly in your mainloop. + + @param ctx Client context +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode osvrClientUpdate(OSVR_ClientContext ctx); + +/** @brief Checks to see if the client context is fully started up and connected + properly to a server. + + If this reports that the client context is not OK, there may not be a server + running, or you may just have to call osvrClientUpdate() a few times to + permit startup to finish. The return value of this call will not change from + failure to success without calling osvrClientUpdate(). + + @param ctx Client context + + @return OSVR_RETURN_FAILURE if not yet fully connected/initialized, or if + some other error (null context) occurs. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientCheckStatus(OSVR_ClientContext ctx); + +/** @brief Shutdown the library. + @param ctx Client context +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientShutdown(OSVR_ClientContext ctx); + +/** @} */ +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/ClientKit/DisplayC.h b/gfx/vr/osvr/ClientKit/DisplayC.h new file mode 100644 index 000000000..75155e6b3 --- /dev/null +++ b/gfx/vr/osvr/ClientKit/DisplayC.h @@ -0,0 +1,506 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2015 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2015 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_DisplayC_h_GUID_8658EDC9_32A2_49A2_5F5C_10F67852AE74 +#define INCLUDED_DisplayC_h_GUID_8658EDC9_32A2_49A2_5F5C_10F67852AE74 + +/* Internal Includes */ +#include <osvr/ClientKit/Export.h> +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/ReturnCodesC.h> +#include <osvr/Util/ClientOpaqueTypesC.h> +#include <osvr/Util/RenderingTypesC.h> +#include <osvr/Util/MatrixConventionsC.h> +#include <osvr/Util/Pose3C.h> +#include <osvr/Util/BoolC.h> +#include <osvr/Util/RadialDistortionParametersC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN +/** @addtogroup ClientKit + @{ + @name Display API + @{ +*/ + +/** @brief Opaque type of a display configuration. */ +typedef struct OSVR_DisplayConfigObject *OSVR_DisplayConfig; + +/** @brief Allocates a display configuration object populated with data from the + OSVR system. + + Before this call will succeed, your application will need to be correctly + and fully connected to an OSVR server. You may consider putting this call in + a loop alternating with osvrClientUpdate() until this call succeeds. + + Data provided by a display configuration object: + + - The logical display topology (number and relationship of viewers, eyes, + and surfaces), which remains constant throughout the life of the + configuration object. (A method of notification of change here is TBD). + - Pose data for viewers (not required for rendering) and pose/view data for + eyes (used for rendering) which is based on tracker data: if used, these + should be queried every frame. + - Projection matrix data for surfaces, which while in current practice may + be relatively unchanging, we are not guaranteeing them to be constant: + these should be queried every frame. + - Video-input-relative viewport size/location for a surface: would like this + to be variable, but probably not feasible. If you have input, please + comment on the dev mailing list. + - Per-surface distortion strategy priorities/availabilities: constant. Note + the following, though... + - Per-surface distortion strategy parameters: variable, request each frame. + (Could make constant with a notification if needed?) + + Important note: While most of this data is immediately available if you are + successful in getting a display config object, the pose-based data (viewer + pose, eye pose, eye view matrix) needs tracker state, so at least one (and in + practice, typically more) osvrClientUpdate() must be performed before a new + tracker report is available to populate that state. See + osvrClientCheckDisplayStartup() to query if all startup data is available. + + @todo Decide if relative viewport should be constant in a display config, + and update docs accordingly. + + @todo Decide if distortion params should be constant in a display config, + and update docs accordingly. + + @return OSVR_RETURN_FAILURE if invalid parameters were passed or some other + error occurred, in which case the output argument is unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientGetDisplay(OSVR_ClientContext ctx, OSVR_DisplayConfig *disp); + +/** @brief Frees a display configuration object. The corresponding context must + still be open. + + If you fail to call this, it will be automatically called as part of + clean-up when the corresponding context is closed. + + @return OSVR_RETURN_FAILURE if a null config was passed, or if the given + display object was already freed. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientFreeDisplay(OSVR_DisplayConfig disp); + +/** @brief Checks to see if a display is fully configured and ready, including + having received its first pose update. + + Once this first succeeds, it will continue to succeed for the lifetime of + the display config object, so it is not necessary to keep calling once you + get a successful result. + + @return OSVR_RETURN_FAILURE if a null config was passed, or if the given + display config object was otherwise not ready for full use. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientCheckDisplayStartup(OSVR_DisplayConfig disp); + +/** @brief A display config can have one or more display inputs to pass pixels + over (HDMI/DVI connections, etc): retrieve the number of display inputs in + the current configuration. + + @param disp Display config object. + @param[out] numDisplayInputs Number of display inputs in the logical display + topology, **constant** throughout the active, valid lifetime of a display + config object. + + @sa OSVR_DisplayInputCount + + @return OSVR_RETURN_FAILURE if invalid parameters were passed, in + which case the output argument is unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode osvrClientGetNumDisplayInputs( + OSVR_DisplayConfig disp, OSVR_DisplayInputCount *numDisplayInputs); + +/** @brief Retrieve the pixel dimensions of a given display input for a display + config + + @param disp Display config object. + @param displayInputIndex The zero-based index of the display input. + @param[out] width Width (in pixels) of the display input. + @param[out] height Height (in pixels) of the display input. + + The out parameters are **constant** throughout the active, valid lifetime of + a display config object. + + @sa OSVR_DisplayDimension + + @return OSVR_RETURN_FAILURE if invalid parameters were passed, in + which case the output arguments are unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode osvrClientGetDisplayDimensions( + OSVR_DisplayConfig disp, OSVR_DisplayInputCount displayInputIndex, + OSVR_DisplayDimension *width, OSVR_DisplayDimension *height); + +/** @brief A display config can have one (or theoretically more) viewers: + retrieve the viewer count. + + @param disp Display config object. + @param[out] viewers Number of viewers in the logical display topology, + **constant** throughout the active, valid lifetime of a display config + object. + + @sa OSVR_ViewerCount + + @return OSVR_RETURN_FAILURE if invalid parameters were passed, in which case + the output argument is unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientGetNumViewers(OSVR_DisplayConfig disp, OSVR_ViewerCount *viewers); + +/** @brief Get the pose of a viewer in a display config. + + Note that there may not necessarily be any surfaces rendered from this pose + (it's the unused "center" eye in a stereo configuration, for instance) so + only use this if it makes integration into your engine or existing + applications (not originally designed for stereo) easier. + + Will only succeed if osvrClientCheckDisplayStartup() succeeds. + + @return OSVR_RETURN_FAILURE if invalid parameters were passed or no pose was + yet available, in which case the pose argument is unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode osvrClientGetViewerPose( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_Pose3 *pose); + +/** @brief Each viewer in a display config can have one or more "eyes" which + have a substantially similar pose: get the count. + + @param disp Display config object. + @param viewer Viewer ID + @param[out] eyes Number of eyes for this viewer in the logical display + topology, **constant** throughout the active, valid lifetime of a display + config object + + @sa OSVR_EyeCount + + @return OSVR_RETURN_FAILURE if invalid parameters were passed, in which case + the output argument is unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode osvrClientGetNumEyesForViewer( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount *eyes); + +/** @brief Get the "viewpoint" for the given eye of a viewer in a display + config. + + Will only succeed if osvrClientCheckDisplayStartup() succeeds. + + @param disp Display config object + @param viewer Viewer ID + @param eye Eye ID + @param[out] pose Room-space pose (not relative to pose of the viewer) + + @return OSVR_RETURN_FAILURE if invalid parameters were passed or no pose was + yet available, in which case the pose argument is unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientGetViewerEyePose(OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, + OSVR_EyeCount eye, OSVR_Pose3 *pose); + +/** @brief Get the view matrix (inverse of pose) for the given eye of a + viewer in a display config - matrix of **doubles**. + + Will only succeed if osvrClientCheckDisplayStartup() succeeds. + + @param disp Display config object + @param viewer Viewer ID + @param eye Eye ID + @param flags Bitwise OR of matrix convention flags (see @ref MatrixFlags) + @param[out] mat Pass a double[::OSVR_MATRIX_SIZE] to get the transformation + matrix from room space to eye space (not relative to pose of the viewer) + + @return OSVR_RETURN_FAILURE if invalid parameters were passed or no pose was + yet available, in which case the output argument is unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode osvrClientGetViewerEyeViewMatrixd( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount eye, + OSVR_MatrixConventions flags, double *mat); + +/** @brief Get the view matrix (inverse of pose) for the given eye of a + viewer in a display config - matrix of **floats**. + + Will only succeed if osvrClientCheckDisplayStartup() succeeds. + + @param disp Display config object + @param viewer Viewer ID + @param eye Eye ID + @param flags Bitwise OR of matrix convention flags (see @ref MatrixFlags) + @param[out] mat Pass a float[::OSVR_MATRIX_SIZE] to get the transformation + matrix from room space to eye space (not relative to pose of the viewer) + + @return OSVR_RETURN_FAILURE if invalid parameters were passed or no pose was + yet available, in which case the output argument is unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode osvrClientGetViewerEyeViewMatrixf( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount eye, + OSVR_MatrixConventions flags, float *mat); + +/** @brief Each eye of each viewer in a display config has one or more surfaces + (aka "screens") on which content should be rendered. + + @param disp Display config object + @param viewer Viewer ID + @param eye Eye ID + @param[out] surfaces Number of surfaces (numbered [0, surfaces - 1]) for the + given viewer and eye. **Constant** throughout the active, valid lifetime of + a display config object. + + @sa OSVR_SurfaceCount + + @return OSVR_RETURN_FAILURE if invalid parameters were passed, in which case + the output argument is unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode osvrClientGetNumSurfacesForViewerEye( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount eye, + OSVR_SurfaceCount *surfaces); + +/** @brief Get the dimensions/location of the viewport **within the display + input** for a surface seen by an eye of a viewer in a display config. (This + does not include other video inputs that may be on a single virtual desktop, + etc. or explicitly account for display configurations that use multiple + video inputs. It does not necessarily indicate that a viewport in the sense + of glViewport must be created with these parameters, though the parameter + order matches for convenience.) + + @param disp Display config object + @param viewer Viewer ID + @param eye Eye ID + @param surface Surface ID + @param[out] left Output: Distance in pixels from the left of the video input + to the left of the viewport. + @param[out] bottom Output: Distance in pixels from the bottom of the video + input to the bottom of the viewport. + @param[out] width Output: Width of viewport in pixels. + @param[out] height Output: Height of viewport in pixels. + + + @return OSVR_RETURN_FAILURE if invalid parameters were passed, in which case + the output arguments are unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientGetRelativeViewportForViewerEyeSurface( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount eye, + OSVR_SurfaceCount surface, OSVR_ViewportDimension *left, + OSVR_ViewportDimension *bottom, OSVR_ViewportDimension *width, + OSVR_ViewportDimension *height); + +/** @brief Get the index of the display input for a surface seen by an eye of a + viewer in a display config. + + This is the OSVR-assigned display input: it may not (and in practice, + usually will not) match any platform-specific display indices. This function + exists to associate surfaces with video inputs as enumerated by + osvrClientGetNumDisplayInputs(). + + @param disp Display config object + @param viewer Viewer ID + @param eye Eye ID + @param surface Surface ID + @param[out] displayInput Zero-based index of the display input pixels for + this surface are tranmitted over. + + This association is **constant** throughout the active, valid lifetime of a + display config object. + + @sa osvrClientGetNumDisplayInputs(), + osvrClientGetRelativeViewportForViewerEyeSurface() + + @return OSVR_RETURN_FAILURE if invalid parameters were passed, in which + case the output argument is unmodified. + */ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientGetViewerEyeSurfaceDisplayInputIndex( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount eye, + OSVR_SurfaceCount surface, OSVR_DisplayInputCount *displayInput); + +/** @brief Get the projection matrix for a surface seen by an eye of a viewer + in a display config. (double version) + + @param disp Display config object + @param viewer Viewer ID + @param eye Eye ID + @param surface Surface ID + @param near Distance from viewpoint to near clipping plane - must be + positive. + @param far Distance from viewpoint to far clipping plane - must be positive + and not equal to near, typically greater than near. + @param flags Bitwise OR of matrix convention flags (see @ref MatrixFlags) + @param[out] matrix Output projection matrix: supply an array of 16 + (::OSVR_MATRIX_SIZE) doubles. + + @return OSVR_RETURN_FAILURE if invalid parameters were passed, in which case + the output argument is unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientGetViewerEyeSurfaceProjectionMatrixd( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount eye, + OSVR_SurfaceCount surface, double near, double far, + OSVR_MatrixConventions flags, double *matrix); + +/** @brief Get the projection matrix for a surface seen by an eye of a viewer + in a display config. (float version) + + @param disp Display config object + @param viewer Viewer ID + @param eye Eye ID + @param surface Surface ID + @param near Distance to near clipping plane - must be nonzero, typically + positive. + @param far Distance to far clipping plane - must be nonzero, typically + positive and greater than near. + @param flags Bitwise OR of matrix convention flags (see @ref MatrixFlags) + @param[out] matrix Output projection matrix: supply an array of 16 + (::OSVR_MATRIX_SIZE) floats. + + @return OSVR_RETURN_FAILURE if invalid parameters were passed, in which case + the output argument is unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientGetViewerEyeSurfaceProjectionMatrixf( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount eye, + OSVR_SurfaceCount surface, float near, float far, + OSVR_MatrixConventions flags, float *matrix); + +/** @brief Get the clipping planes (positions at unit distance) for a surface + seen by an eye of a viewer + in a display config. + + This is only for use in integrations that cannot accept a fully-formulated + projection matrix as returned by + osvrClientGetViewerEyeSurfaceProjectionMatrixf() or + osvrClientGetViewerEyeSurfaceProjectionMatrixd(), and may not necessarily + provide the same optimizations. + + As all the planes are given at unit (1) distance, before passing these + planes to a consuming function in your application/engine, you will typically + divide them by your near clipping plane distance. + + @param disp Display config object + @param viewer Viewer ID + @param eye Eye ID + @param surface Surface ID + @param[out] left Distance to left clipping plane + @param[out] right Distance to right clipping plane + @param[out] bottom Distance to bottom clipping plane + @param[out] top Distance to top clipping plane + + @return OSVR_RETURN_FAILURE if invalid parameters were passed, in which case + the output arguments are unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientGetViewerEyeSurfaceProjectionClippingPlanes( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount eye, + OSVR_SurfaceCount surface, double *left, double *right, double *bottom, + double *top); + +/** @brief Determines if a surface seen by an eye of a viewer in a display + config requests some distortion to be performed. + + This simply reports true or false, and does not specify which kind of + distortion implementations have been parameterized for this display. For + each distortion implementation your application supports, you'll want to + call the corresponding priority function to find out if it is available. + + @param disp Display config object + @param viewer Viewer ID + @param eye Eye ID + @param surface Surface ID + @param[out] distortionRequested Output parameter: whether distortion is + requested. **Constant** throughout the active, valid lifetime of a display + config object. + + @return OSVR_RETURN_FAILURE if invalid parameters were passed, in which case + the output argument is unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientDoesViewerEyeSurfaceWantDistortion(OSVR_DisplayConfig disp, + OSVR_ViewerCount viewer, + OSVR_EyeCount eye, + OSVR_SurfaceCount surface, + OSVR_CBool *distortionRequested); + +/** @brief Returns the priority/availability of radial distortion parameters for + a surface seen by an eye of a viewer in a display config. + + If osvrClientDoesViewerEyeSurfaceWantDistortion() reports false, then the + display does not request distortion of any sort, and thus neither this nor + any other distortion strategy priority function will report an "available" + priority. + + @param disp Display config object + @param viewer Viewer ID + @param eye Eye ID + @param surface Surface ID + @param[out] priority Output: the priority level. Negative values + (canonically OSVR_DISTORTION_PRIORITY_UNAVAILABLE) indicate this technique + not available, higher values indicate higher preference for the given + technique based on the device's description. **Constant** throughout the + active, valid lifetime of a display config object. + + @return OSVR_RETURN_FAILURE if invalid parameters were passed, in which case + the output argument is unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientGetViewerEyeSurfaceRadialDistortionPriority( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount eye, + OSVR_SurfaceCount surface, OSVR_DistortionPriority *priority); + +/** @brief Returns the radial distortion parameters, if known/requested, for a + surface seen by an eye of a viewer in a display config. + + Will only succeed if osvrClientGetViewerEyeSurfaceRadialDistortionPriority() + reports a non-negative priority. + + @param disp Display config object + @param viewer Viewer ID + @param eye Eye ID + @param surface Surface ID + @param[out] params Output: the parameters for radial distortion + + @return OSVR_RETURN_FAILURE if this surface does not have these parameters + described, or if invalid parameters were passed, in which case the output + argument is unmodified. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientGetViewerEyeSurfaceRadialDistortion( + OSVR_DisplayConfig disp, OSVR_ViewerCount viewer, OSVR_EyeCount eye, + OSVR_SurfaceCount surface, OSVR_RadialDistortionParameters *params); + +/** @} + @} +*/ + +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/ClientKit/Export.h b/gfx/vr/osvr/ClientKit/Export.h new file mode 100644 index 000000000..94d5f44f4 --- /dev/null +++ b/gfx/vr/osvr/ClientKit/Export.h @@ -0,0 +1,138 @@ +/** @file + @brief Automatically-generated export header - do not edit! + + @date 2016 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +// Copyright 2016 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef OSVR_CLIENTKIT_EXPORT_H +#define OSVR_CLIENTKIT_EXPORT_H + +#ifdef OSVR_CLIENTKIT_STATIC_DEFINE +# define OSVR_CLIENTKIT_EXPORT +# define OSVR_CLIENTKIT_NO_EXPORT +#endif + +/* Per-compiler advance preventative definition */ +#if defined(__BORLANDC__) || defined(__CODEGEARC__) || defined(__HP_aCC) || \ + defined(__PGI) || defined(__WATCOMC__) +/* Compilers that don't support deprecated, according to CMake. */ +# ifndef OSVR_CLIENTKIT_DEPRECATED +# define OSVR_CLIENTKIT_DEPRECATED +# endif +#endif + +/* Check for attribute support */ +#if defined(__INTEL_COMPILER) +/* Checking before GNUC because Intel implements GNU extensions, + * so it chooses to define __GNUC__ as well. */ +# if __INTEL_COMPILER >= 1200 +/* Intel compiler 12.0 or newer can handle these attributes per CMake */ +# define OSVR_CLIENTKIT_EXPORT_HEADER_SUPPORTS_ATTRIBUTES +# endif + +#elif defined(__GNUC__) +# if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 2)) +/* GCC 4.2+ */ +# define OSVR_CLIENTKIT_EXPORT_HEADER_SUPPORTS_ATTRIBUTES +# endif +#endif + +/* Per-platform defines */ +#if defined(_MSC_VER) +/* MSVC on Windows */ + +#ifndef OSVR_CLIENTKIT_EXPORT +# ifdef osvrClientKit_EXPORTS + /* We are building this library */ +# define OSVR_CLIENTKIT_EXPORT __declspec(dllexport) +# else + /* We are using this library */ +# define OSVR_CLIENTKIT_EXPORT __declspec(dllimport) +# endif +#endif + +#ifndef OSVR_CLIENTKIT_DEPRECATED +# define OSVR_CLIENTKIT_DEPRECATED __declspec(deprecated) +#endif + +#elif defined(_WIN32) && defined(__GNUC__) +/* GCC-compatible on Windows */ + +#ifndef OSVR_CLIENTKIT_EXPORT +# ifdef osvrClientKit_EXPORTS + /* We are building this library */ +# define OSVR_CLIENTKIT_EXPORT __attribute__((dllexport)) +# else + /* We are using this library */ +# define OSVR_CLIENTKIT_EXPORT __attribute__((dllimport)) +# endif +#endif + +#ifndef OSVR_CLIENTKIT_DEPRECATED +# define OSVR_CLIENTKIT_DEPRECATED __attribute__((__deprecated__)) +#endif + +#elif defined(OSVR_CLIENTKIT_EXPORT_HEADER_SUPPORTS_ATTRIBUTES) || \ + (defined(__APPLE__) && defined(__MACH__)) +/* GCC4.2+ compatible (assuming something *nix-like) and Mac OS X */ +/* (The first macro is defined at the top of the file, if applicable) */ +/* see https://gcc.gnu.org/wiki/Visibility */ + +#ifndef OSVR_CLIENTKIT_EXPORT + /* We are building/using this library */ +# define OSVR_CLIENTKIT_EXPORT __attribute__((visibility("default"))) +#endif + +#ifndef OSVR_CLIENTKIT_NO_EXPORT +# define OSVR_CLIENTKIT_NO_EXPORT __attribute__((visibility("hidden"))) +#endif + +#ifndef OSVR_CLIENTKIT_DEPRECATED +# define OSVR_CLIENTKIT_DEPRECATED __attribute__((__deprecated__)) +#endif + +#endif +/* End of platform ifdefs */ + +/* fallback def */ +#ifndef OSVR_CLIENTKIT_EXPORT +# define OSVR_CLIENTKIT_EXPORT +#endif + +/* fallback def */ +#ifndef OSVR_CLIENTKIT_NO_EXPORT +# define OSVR_CLIENTKIT_NO_EXPORT +#endif + +/* fallback def */ +#ifndef OSVR_CLIENTKIT_DEPRECATED_EXPORT +# define OSVR_CLIENTKIT_DEPRECATED_EXPORT OSVR_CLIENTKIT_EXPORT OSVR_CLIENTKIT_DEPRECATED +#endif + +/* fallback def */ +#ifndef OSVR_CLIENTKIT_DEPRECATED_NO_EXPORT +# define OSVR_CLIENTKIT_DEPRECATED_NO_EXPORT OSVR_CLIENTKIT_NO_EXPORT OSVR_CLIENTKIT_DEPRECATED +#endif + +/* Clean up after ourselves */ +#undef OSVR_CLIENTKIT_EXPORT_HEADER_SUPPORTS_ATTRIBUTES + +#endif diff --git a/gfx/vr/osvr/ClientKit/InterfaceC.h b/gfx/vr/osvr/ClientKit/InterfaceC.h new file mode 100644 index 000000000..728350536 --- /dev/null +++ b/gfx/vr/osvr/ClientKit/InterfaceC.h @@ -0,0 +1,75 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_InterfaceC_h_GUID_D90BBAA6_AD62_499D_C023_2F6ED8987C17 +#define INCLUDED_InterfaceC_h_GUID_D90BBAA6_AD62_499D_C023_2F6ED8987C17 + +/* Internal Includes */ +#include <osvr/ClientKit/Export.h> +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/ReturnCodesC.h> +#include <osvr/Util/AnnotationMacrosC.h> +#include <osvr/Util/ClientOpaqueTypesC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN +/** @addtogroup ClientKit +@{ +*/ + +/** @brief Get the interface associated with the given path. + @param ctx Client context + @param path A resource path (null-terminated string) + @param[out] iface The interface object. May be freed when no longer needed, + otherwise it will be freed when the context is closed. +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientGetInterface(OSVR_ClientContext ctx, const char path[], + OSVR_ClientInterface *iface); + +/** @brief Free an interface object before context closure. + + @param ctx Client context + @param iface The interface object + + @returns OSVR_RETURN_SUCCESS unless a null context or interface was passed + or the given interface was not found in the context (i.e. had already been + freed) +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientFreeInterface(OSVR_ClientContext ctx, OSVR_ClientInterface iface); + +/** @} */ +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/ClientKit/InterfaceCallbackC.h b/gfx/vr/osvr/ClientKit/InterfaceCallbackC.h new file mode 100644 index 000000000..dde1cef97 --- /dev/null +++ b/gfx/vr/osvr/ClientKit/InterfaceCallbackC.h @@ -0,0 +1,77 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_InterfaceCallbacksC_h_GUID_8F16E6CB_F998_4ABC_5B6B_4FC1E4B71BC9 +#define INCLUDED_InterfaceCallbacksC_h_GUID_8F16E6CB_F998_4ABC_5B6B_4FC1E4B71BC9 + +/* Internal Includes */ +#include <osvr/ClientKit/Export.h> +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/ReturnCodesC.h> +#include <osvr/Util/AnnotationMacrosC.h> +#include <osvr/Util/ClientOpaqueTypesC.h> +#include <osvr/Util/ClientCallbackTypesC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +#define OSVR_INTERFACE_CALLBACK_METHOD(TYPE) \ + /** @brief Register a callback for TYPE reports on an interface */ \ + OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode osvrRegister##TYPE##Callback( \ + OSVR_ClientInterface iface, OSVR_##TYPE##Callback cb, void *userdata); + +OSVR_INTERFACE_CALLBACK_METHOD(Pose) +OSVR_INTERFACE_CALLBACK_METHOD(Position) +OSVR_INTERFACE_CALLBACK_METHOD(Orientation) +OSVR_INTERFACE_CALLBACK_METHOD(Velocity) +OSVR_INTERFACE_CALLBACK_METHOD(LinearVelocity) +OSVR_INTERFACE_CALLBACK_METHOD(AngularVelocity) +OSVR_INTERFACE_CALLBACK_METHOD(Acceleration) +OSVR_INTERFACE_CALLBACK_METHOD(LinearAcceleration) +OSVR_INTERFACE_CALLBACK_METHOD(AngularAcceleration) +OSVR_INTERFACE_CALLBACK_METHOD(Button) +OSVR_INTERFACE_CALLBACK_METHOD(Analog) +OSVR_INTERFACE_CALLBACK_METHOD(Imaging) +OSVR_INTERFACE_CALLBACK_METHOD(Location2D) +OSVR_INTERFACE_CALLBACK_METHOD(Direction) +OSVR_INTERFACE_CALLBACK_METHOD(EyeTracker2D) +OSVR_INTERFACE_CALLBACK_METHOD(EyeTracker3D) +OSVR_INTERFACE_CALLBACK_METHOD(EyeTrackerBlink) +OSVR_INTERFACE_CALLBACK_METHOD(NaviVelocity) +OSVR_INTERFACE_CALLBACK_METHOD(NaviPosition) + +#undef OSVR_INTERFACE_CALLBACK_METHOD + +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/ClientKit/InterfaceStateC.h b/gfx/vr/osvr/ClientKit/InterfaceStateC.h new file mode 100644 index 000000000..edf9f085c --- /dev/null +++ b/gfx/vr/osvr/ClientKit/InterfaceStateC.h @@ -0,0 +1,79 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_InterfaceStateC_h_GUID_8F85D178_74B9_4AA9_4E9E_243089411408 +#define INCLUDED_InterfaceStateC_h_GUID_8F85D178_74B9_4AA9_4E9E_243089411408 + +/* Internal Includes */ +#include <osvr/ClientKit/Export.h> +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/ReturnCodesC.h> +#include <osvr/Util/AnnotationMacrosC.h> +#include <osvr/Util/ClientOpaqueTypesC.h> +#include <osvr/Util/ClientReportTypesC.h> +#include <osvr/Util/TimeValueC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +#define OSVR_CALLBACK_METHODS(TYPE) \ + /** @brief Get TYPE state from an interface, returning failure if none \ + * exists */ \ + OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode osvrGet##TYPE##State( \ + OSVR_ClientInterface iface, struct OSVR_TimeValue *timestamp, \ + OSVR_##TYPE##State *state); + +OSVR_CALLBACK_METHODS(Pose) +OSVR_CALLBACK_METHODS(Position) +OSVR_CALLBACK_METHODS(Orientation) +OSVR_CALLBACK_METHODS(Velocity) +OSVR_CALLBACK_METHODS(LinearVelocity) +OSVR_CALLBACK_METHODS(AngularVelocity) +OSVR_CALLBACK_METHODS(Acceleration) +OSVR_CALLBACK_METHODS(LinearAcceleration) +OSVR_CALLBACK_METHODS(AngularAcceleration) +OSVR_CALLBACK_METHODS(Button) +OSVR_CALLBACK_METHODS(Analog) +OSVR_CALLBACK_METHODS(Location2D) +OSVR_CALLBACK_METHODS(Direction) +OSVR_CALLBACK_METHODS(EyeTracker2D) +OSVR_CALLBACK_METHODS(EyeTracker3D) +OSVR_CALLBACK_METHODS(EyeTrackerBlink) +OSVR_CALLBACK_METHODS(NaviVelocity) +OSVR_CALLBACK_METHODS(NaviPosition) + +#undef OSVR_CALLBACK_METHODS + +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/ClientKit/SystemCallbackC.h b/gfx/vr/osvr/ClientKit/SystemCallbackC.h new file mode 100644 index 000000000..2476d5f21 --- /dev/null +++ b/gfx/vr/osvr/ClientKit/SystemCallbackC.h @@ -0,0 +1,47 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_SystemCallbackC_h_GUID_543F3F04_343E_4389_08A0_DEA988EC23F7 +#define INCLUDED_SystemCallbackC_h_GUID_543F3F04_343E_4389_08A0_DEA988EC23F7 + +/* Internal Includes */ +#include <osvr/ClientKit/Export.h> +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/ReturnCodesC.h> +#include <osvr/Util/AnnotationMacrosC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/ClientKit/TransformsC.h b/gfx/vr/osvr/ClientKit/TransformsC.h new file mode 100644 index 000000000..183497dfd --- /dev/null +++ b/gfx/vr/osvr/ClientKit/TransformsC.h @@ -0,0 +1,75 @@ +/** @file + @brief Header controlling the OSVR transformation hierarchy + + Must be c-safe! + + @date 2015 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2015 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_TransformsC_h_GUID_5B5B7438_42D4_4095_E48A_90E2CC13498E +#define INCLUDED_TransformsC_h_GUID_5B5B7438_42D4_4095_E48A_90E2CC13498E + +/* Internal Includes */ +#include <osvr/ClientKit/Export.h> +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/ReturnCodesC.h> +#include <osvr/Util/ClientOpaqueTypesC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +/** @addtogroup ClientKit + @{ +*/ + +/** @brief Updates the internal "room to world" transformation (applied to all + tracker data for this client context instance) based on the user's head + orientation, so that the direction the user is facing becomes -Z to your + application. Only rotates about the Y axis (yaw). + + Note that this method internally calls osvrClientUpdate() to get a head pose + so your callbacks may be called during its execution! + + @param ctx Client context +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientSetRoomRotationUsingHead(OSVR_ClientContext ctx); + +/** @brief Clears/resets the internal "room to world" transformation back to an + identity transformation - that is, clears the effect of any other + manipulation of the room to world transform. + + @param ctx Client context +*/ +OSVR_CLIENTKIT_EXPORT OSVR_ReturnCode +osvrClientClearRoomToWorldTransform(OSVR_ClientContext ctx); + +/** @} */ +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/Util/APIBaseC.h b/gfx/vr/osvr/Util/APIBaseC.h new file mode 100644 index 000000000..4abe38550 --- /dev/null +++ b/gfx/vr/osvr/Util/APIBaseC.h @@ -0,0 +1,50 @@ +/** @file + @brief Header providing basic C macros for defining API headers. + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_APIBaseC_h_GUID_C5A2E769_2ADC_429E_D250_DF0883E6E5DB +#define INCLUDED_APIBaseC_h_GUID_C5A2E769_2ADC_429E_D250_DF0883E6E5DB + +#ifdef __cplusplus +#define OSVR_C_ONLY(X) +#define OSVR_CPP_ONLY(X) X +#define OSVR_EXTERN_C_BEGIN extern "C" { +#define OSVR_EXTERN_C_END } +#define OSVR_INLINE inline +#else +#define OSVR_C_ONLY(X) X +#define OSVR_CPP_ONLY(X) +#define OSVR_EXTERN_C_BEGIN +#define OSVR_EXTERN_C_END +#ifdef _MSC_VER +#define OSVR_INLINE static __inline +#else +#define OSVR_INLINE static inline +#endif +#endif + +#endif diff --git a/gfx/vr/osvr/Util/AnnotationMacrosC.h b/gfx/vr/osvr/Util/AnnotationMacrosC.h new file mode 100644 index 000000000..e086608c1 --- /dev/null +++ b/gfx/vr/osvr/Util/AnnotationMacrosC.h @@ -0,0 +1,232 @@ +/** @file + @brief Header containing macros for source-level annotation. + + In theory, supporting MSVC SAL, as well as compatible GCC and + Clang attributes. In practice, expanded as time allows and requires. + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_AnnotationMacrosC_h_GUID_48538D9B_35E3_4E9A_D2B0_D83D51DD5900 +#define INCLUDED_AnnotationMacrosC_h_GUID_48538D9B_35E3_4E9A_D2B0_D83D51DD5900 + +#ifndef OSVR_DISABLE_ANALYSIS + +#if defined(_MSC_VER) && (_MSC_VER >= 1700) +/* Visual C++ (2012 and newer) */ +/* Using SAL attribute format: + * http://msdn.microsoft.com/en-us/library/ms182032(v=vs.120).aspx */ + +#include <sal.h> + +#define OSVR_IN _In_ +#define OSVR_IN_PTR _In_ +#define OSVR_IN_OPT _In_opt_ +#define OSVR_IN_STRZ _In_z_ +#define OSVR_IN_READS(NUM_ELEMENTS) _In_reads_(NUM_ELEMENTS) + +#define OSVR_OUT _Out_ +#define OSVR_OUT_PTR _Outptr_ +#define OSVR_OUT_OPT _Out_opt_ + +#define OSVR_INOUT _Inout_ +#define OSVR_INOUT_PTR _Inout_ + +#define OSVR_RETURN_WARN_UNUSED _Must_inspect_result_ +#define OSVR_RETURN_SUCCESS_CONDITION(X) _Return_type_success_(X) + +/* end of msvc section */ +#elif defined(__GNUC__) && (__GNUC__ >= 4) +/* section for GCC and GCC-alikes */ + +#if defined(__clang__) +/* clang-specific section */ +#endif + +#define OSVR_FUNC_NONNULL(X) __attribute__((__nonnull__ X)) +#define OSVR_RETURN_WARN_UNUSED __attribute__((warn_unused_result)) + +/* end of gcc section and compiler detection */ +#endif + +/* end of ndef disable analysis */ +#endif + +/* Fallback declarations */ +/** +@defgroup annotation_macros Static analysis annotation macros +@brief Wrappers for Microsoft's SAL annotations and others +@ingroup Util + +Use of these is optional, but recommended particularly for C APIs, +as well as any methods handling a buffer with a length. +@{ +*/ +/** @name Parameter annotations + + These indicate the role and valid values for parameters to functions. + + At most one of these should be placed before a parameter's type name in the + function parameter list, in both the declaration and definition. (They must + match!) + @{ +*/ +/** @def OSVR_IN + @brief Indicates a required function parameter that serves only as input. +*/ +#ifndef OSVR_IN +#define OSVR_IN +#endif + +/** @def OSVR_IN_PTR + @brief Indicates a required pointer (non-null) function parameter that + serves only as input. +*/ +#ifndef OSVR_IN_PTR +#define OSVR_IN_PTR +#endif + +/** @def OSVR_IN_OPT + @brief Indicates a function parameter (pointer) that serves only as input, + but is optional and might be NULL. +*/ +#ifndef OSVR_IN_OPT +#define OSVR_IN_OPT +#endif + +/** @def OSVR_IN_STRZ + @brief Indicates a null-terminated string function parameter that serves + only as input. +*/ +#ifndef OSVR_IN_STRZ +#define OSVR_IN_STRZ +#endif + +/** @def OSVR_IN_READS(NUM_ELEMENTS) + @brief Indicates a buffer containing input with the specified number of + elements. + + The specified number of elements is typically the name of another parameter. +*/ +#ifndef OSVR_IN_READS +#define OSVR_IN_READS(NUM_ELEMENTS) +#endif + +/** @def OSVR_OUT + @brief Indicates a required function parameter that serves only as output. + In C code, since this usually means "pointer", you probably want + OSVR_OUT_PTR instead. +*/ +#ifndef OSVR_OUT +#define OSVR_OUT +#endif + +/** @def OSVR_OUT_PTR + @brief Indicates a required pointer (non-null) function parameter that + serves only as output. +*/ +#ifndef OSVR_OUT_PTR +#define OSVR_OUT_PTR +#endif + +/** @def OSVR_OUT_OPT + @brief Indicates a function parameter (pointer) that serves only as output, + but is optional and might be NULL +*/ +#ifndef OSVR_OUT_OPT +#define OSVR_OUT_OPT +#endif + +/** @def OSVR_INOUT + @brief Indicates a required function parameter that is both read and written + to. + + In C code, since this usually means "pointer", you probably want + OSVR_INOUT_PTR instead. +*/ +#ifndef OSVR_INOUT +#define OSVR_INOUT +#endif + +/** @def OSVR_INOUT_PTR + @brief Indicates a required pointer (non-null) function parameter that is + both read and written to. +*/ +#ifndef OSVR_INOUT_PTR +#define OSVR_INOUT_PTR +#endif + +/* End of parameter annotations. */ +/** @} */ + +/** @name Function annotations + + These indicate particular relevant aspects about a function. Some + duplicate the effective meaning of parameter annotations: applying both + allows the fullest extent of static analysis tools to analyze the code, + and in some compilers, generate warnings. + + @{ +*/ +/** @def OSVR_FUNC_NONNULL(X) + @brief Indicates the parameter(s) that must be non-null. + + @param X A parenthesized list of parameters by number (1-based index) + + Should be placed after a function declaration (but before the + semicolon). Repeating in the definition is not needed. +*/ +#ifndef OSVR_FUNC_NONNULL +#define OSVR_FUNC_NONNULL(X) +#endif + +/** @def OSVR_RETURN_WARN_UNUSED + @brief Indicates the function has a return value that must be used (either a + security problem or an obvious bug if not). + + Should be placed before the return value (and virtual keyword, if + applicable) in both declaration and definition. +*/ +#ifndef OSVR_RETURN_WARN_UNUSED +#define OSVR_RETURN_WARN_UNUSED +#endif +/* End of function annotations. */ +/** @} */ + +/** @def OSVR_RETURN_SUCCESS_CONDITION + @brief Applied to a typedef, indicates the condition for `return` under + which a function returning it should be considered to have succeeded (thus + holding certain specifications). + + Should be placed before the typename in a typedef, with the parameter + including the keyword `return` to substitute for the return value. +*/ +#ifndef OSVR_RETURN_SUCCESS_CONDITION +#define OSVR_RETURN_SUCCESS_CONDITION(X) +#endif + +/* End of annotation group. */ +/** @} */ +#endif diff --git a/gfx/vr/osvr/Util/BoolC.h b/gfx/vr/osvr/Util/BoolC.h new file mode 100644 index 000000000..b50ec7cfd --- /dev/null +++ b/gfx/vr/osvr/Util/BoolC.h @@ -0,0 +1,59 @@ +/** @file + @brief Header providing a C-safe "bool" type, because we can't depend on + Visual Studio providing proper C99 support in external-facing APIs. + + Must be c-safe! + + @date 2015 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2015 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_BoolC_h_GUID_4F97BE90_2758_4BA5_B0FC_0CA92DEBA210 +#define INCLUDED_BoolC_h_GUID_4F97BE90_2758_4BA5_B0FC_0CA92DEBA210 + +/* Internal Includes */ +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/StdInt.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN +/** @addtogroup Util +@{ +*/ + +/** @brief A pre-C99-safe bool type. Canonical values for true and false are + * provided. Interpretation of other values is not defined. */ +typedef uint8_t OSVR_CBool; +/** @brief Canonical "true" value for OSVR_CBool */ +#define OSVR_TRUE (1) +/** @brief Canonical "false" value for OSVR_CBool */ +#define OSVR_FALSE (0) + +/** @} */ +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/Util/ChannelCountC.h b/gfx/vr/osvr/Util/ChannelCountC.h new file mode 100644 index 000000000..dc49b3b17 --- /dev/null +++ b/gfx/vr/osvr/Util/ChannelCountC.h @@ -0,0 +1,57 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2015 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2015 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_ChannelCountC_h_GUID_CF7E5EE7_28B0_4B99_E823_DD701904B5D1 +#define INCLUDED_ChannelCountC_h_GUID_CF7E5EE7_28B0_4B99_E823_DD701904B5D1 + +/* Internal Includes */ +#include <osvr/Util/StdInt.h> +#include <osvr/Util/APIBaseC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +/** @addtogroup PluginKit +@{ +*/ + +/** @brief The integer type specifying a number of channels/sensors or a +channel/sensor index. +*/ +typedef uint32_t OSVR_ChannelCount; + +/** @} */ + +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/Util/ClientCallbackTypesC.h b/gfx/vr/osvr/Util/ClientCallbackTypesC.h new file mode 100644 index 000000000..ae17381dc --- /dev/null +++ b/gfx/vr/osvr/Util/ClientCallbackTypesC.h @@ -0,0 +1,140 @@ +/** @file
+ @brief Header
+
+ Must be c-safe!
+
+ GENERATED - do not edit by hand!
+
+ @date 2014
+
+ @author
+ Sensics, Inc.
+ <http://sensics.com/osvr>
+*/
+
+/*
+// Copyright 2014 Sensics, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+*/
+
+#ifndef INCLUDED_ClientCallbackTypesC_h_GUID_4D43A675_C8A4_4BBF_516F_59E6C785E4EF
+#define INCLUDED_ClientCallbackTypesC_h_GUID_4D43A675_C8A4_4BBF_516F_59E6C785E4EF
+
+/* Internal Includes */
+#include <osvr/Util/ClientReportTypesC.h>
+#include <osvr/Util/ImagingReportTypesC.h>
+#include <osvr/Util/ReturnCodesC.h>
+#include <osvr/Util/TimeValueC.h>
+
+/* Library/third-party includes */
+/* none */
+
+/* Standard includes */
+/* none */
+
+OSVR_EXTERN_C_BEGIN
+
+/** @addtogroup ClientKit
+ @{
+*/
+
+/** @name Report callback types
+ @{
+*/
+
+/* generated file - do not edit! */
+/** @brief C function type for a Pose callback */
+typedef void (*OSVR_PoseCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_PoseReport *report);
+/** @brief C function type for a Position callback */
+typedef void (*OSVR_PositionCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_PositionReport *report);
+/** @brief C function type for a Orientation callback */
+typedef void (*OSVR_OrientationCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_OrientationReport *report);
+/** @brief C function type for a Velocity callback */
+typedef void (*OSVR_VelocityCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_VelocityReport *report);
+/** @brief C function type for a LinearVelocity callback */
+typedef void (*OSVR_LinearVelocityCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_LinearVelocityReport *report);
+/** @brief C function type for a AngularVelocity callback */
+typedef void (*OSVR_AngularVelocityCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_AngularVelocityReport *report);
+/** @brief C function type for a Acceleration callback */
+typedef void (*OSVR_AccelerationCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_AccelerationReport *report);
+/** @brief C function type for a LinearAcceleration callback */
+typedef void (*OSVR_LinearAccelerationCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_LinearAccelerationReport *report);
+/** @brief C function type for a AngularAcceleration callback */
+typedef void (*OSVR_AngularAccelerationCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_AngularAccelerationReport *report);
+/** @brief C function type for a Button callback */
+typedef void (*OSVR_ButtonCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_ButtonReport *report);
+/** @brief C function type for a Analog callback */
+typedef void (*OSVR_AnalogCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_AnalogReport *report);
+/** @brief C function type for a Imaging callback */
+typedef void (*OSVR_ImagingCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_ImagingReport *report);
+/** @brief C function type for a Location2D callback */
+typedef void (*OSVR_Location2DCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_Location2DReport *report);
+/** @brief C function type for a Direction callback */
+typedef void (*OSVR_DirectionCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_DirectionReport *report);
+/** @brief C function type for a EyeTracker2D callback */
+typedef void (*OSVR_EyeTracker2DCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_EyeTracker2DReport *report);
+/** @brief C function type for a EyeTracker3D callback */
+typedef void (*OSVR_EyeTracker3DCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_EyeTracker3DReport *report);
+/** @brief C function type for a EyeTrackerBlink callback */
+typedef void (*OSVR_EyeTrackerBlinkCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_EyeTrackerBlinkReport *report);
+/** @brief C function type for a NaviVelocity callback */
+typedef void (*OSVR_NaviVelocityCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_NaviVelocityReport *report);
+/** @brief C function type for a NaviPosition callback */
+typedef void (*OSVR_NaviPositionCallback)(void *userdata,
+ const struct OSVR_TimeValue *timestamp,
+ const struct OSVR_NaviPositionReport *report);
+
+/** @} */
+
+/** @} */
+
+OSVR_EXTERN_C_END
+
+#endif
diff --git a/gfx/vr/osvr/Util/ClientOpaqueTypesC.h b/gfx/vr/osvr/Util/ClientOpaqueTypesC.h new file mode 100644 index 000000000..64eba6d61 --- /dev/null +++ b/gfx/vr/osvr/Util/ClientOpaqueTypesC.h @@ -0,0 +1,69 @@ +/** @file + @brief Header declaring opaque types used by @ref Client and @ref ClientKit + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_ClientOpaqueTypesC_h_GUID_24B79ED2_5751_4BA2_1690_BBD250EBC0C1 +#define INCLUDED_ClientOpaqueTypesC_h_GUID_24B79ED2_5751_4BA2_1690_BBD250EBC0C1 + +/* Internal Includes */ +#include <osvr/Util/APIBaseC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +/** @addtogroup ClientKit + @{ +*/ +/** @brief Opaque handle that should be retained by your application. You need + only and exactly one. + + Created by osvrClientInit() at application start. + + You are required to clean up this handle with osvrClientShutdown(). +*/ +typedef struct OSVR_ClientContextObject *OSVR_ClientContext; + +/** @brief Opaque handle to an interface used for registering callbacks and + getting status. + + You are not required to clean up this handle (it will be automatically + cleaned up when the context is), but you can if you are no longer using it, + using osvrClientFreeInterface() to inform the context that you no longer need + this interface. +*/ +typedef struct OSVR_ClientInterfaceObject *OSVR_ClientInterface; + +/** @} */ + +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/Util/ClientReportTypesC.h b/gfx/vr/osvr/Util/ClientReportTypesC.h new file mode 100644 index 000000000..85fa5a5a1 --- /dev/null +++ b/gfx/vr/osvr/Util/ClientReportTypesC.h @@ -0,0 +1,348 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_ClientReportTypesC_h_GUID_E79DAB07_78B7_4795_1EB9_CA6EEB274AEE +#define INCLUDED_ClientReportTypesC_h_GUID_E79DAB07_78B7_4795_1EB9_CA6EEB274AEE + +/* Internal Includes */ +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/Pose3C.h> +#include <osvr/Util/StdInt.h> + +#include <osvr/Util/Vec2C.h> +#include <osvr/Util/Vec3C.h> +#include <osvr/Util/ChannelCountC.h> +#include <osvr/Util/BoolC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +/** @addtogroup ClientKit + @{ +*/ + +/** @name State types +@{ +*/ +/** @brief Type of position state */ +typedef OSVR_Vec3 OSVR_PositionState; + +/** @brief Type of orientation state */ +typedef OSVR_Quaternion OSVR_OrientationState; + +/** @brief Type of pose state */ +typedef OSVR_Pose3 OSVR_PoseState; + +/** @brief Type of linear velocity state */ +typedef OSVR_Vec3 OSVR_LinearVelocityState; + +/** @brief The quaternion represents the incremental rotation taking place over + a period of dt seconds. Use of dt (which does not necessarily + have to be 1, as other velocity/acceleration representations imply) and an + incremental quaternion allows device reports to be scaled to avoid aliasing +*/ +typedef struct OSVR_IncrementalQuaternion { + OSVR_Quaternion incrementalRotation; + double dt; +} OSVR_IncrementalQuaternion; + +/** @brief Type of angular velocity state: an incremental quaternion, providing + the incremental rotation taking place due to velocity over a period of dt + seconds. +*/ +typedef OSVR_IncrementalQuaternion OSVR_AngularVelocityState; + +/** @brief Struct for combined velocity state */ +typedef struct OSVR_VelocityState { + OSVR_LinearVelocityState linearVelocity; + /** @brief Whether the data source reports valid data for + #OSVR_VelocityState::linearVelocity */ + OSVR_CBool linearVelocityValid; + + OSVR_AngularVelocityState angularVelocity; + /** @brief Whether the data source reports valid data for + #OSVR_VelocityState::angularVelocity */ + OSVR_CBool angularVelocityValid; +} OSVR_VelocityState; + +/** @brief Type of linear acceleration state */ +typedef OSVR_Vec3 OSVR_LinearAccelerationState; + +/** @brief Type of angular acceleration state +*/ +typedef OSVR_IncrementalQuaternion OSVR_AngularAccelerationState; + +/** @brief Struct for combined acceleration state */ +typedef struct OSVR_AccelerationState { + OSVR_LinearAccelerationState linearAcceleration; + /** @brief Whether the data source reports valid data for + #OSVR_AccelerationState::linearAcceleration */ + OSVR_CBool linearAccelerationValid; + + OSVR_AngularAccelerationState angularAcceleration; + /** @brief Whether the data source reports valid data for + #OSVR_AccelerationState::angularAcceleration */ + OSVR_CBool angularAccelerationValid; +} OSVR_AccelerationState; + +/** @brief Type of button state */ +typedef uint8_t OSVR_ButtonState; + +/** @brief OSVR_ButtonState value indicating "button down" */ +#define OSVR_BUTTON_PRESSED (1) + +/** @brief OSVR_ButtonState value indicating "button up" */ +#define OSVR_BUTTON_NOT_PRESSED (0) + +/** @brief Type of analog channel state */ +typedef double OSVR_AnalogState; + +/** @} */ + +/** @name Report types + @{ +*/ +/** @brief Report type for a position callback on a tracker interface */ +typedef struct OSVR_PositionReport { + /** @brief Identifies the sensor that the report comes from */ + int32_t sensor; + /** @brief The position vector */ + OSVR_PositionState xyz; +} OSVR_PositionReport; + +/** @brief Report type for an orientation callback on a tracker interface */ +typedef struct OSVR_OrientationReport { + /** @brief Identifies the sensor that the report comes from */ + int32_t sensor; + /** @brief The rotation unit quaternion */ + OSVR_OrientationState rotation; +} OSVR_OrientationReport; + +/** @brief Report type for a pose (position and orientation) callback on a + tracker interface +*/ +typedef struct OSVR_PoseReport { + /** @brief Identifies the sensor that the report comes from */ + int32_t sensor; + /** @brief The pose structure, containing a position vector and a rotation + quaternion + */ + OSVR_PoseState pose; +} OSVR_PoseReport; + +/** @brief Report type for a velocity (linear and angular) callback on a + tracker interface +*/ +typedef struct OSVR_VelocityReport { + /** @brief Identifies the sensor that the report comes from */ + int32_t sensor; + /** @brief The data state - note that not all fields are neccesarily valid, + use the `Valid` members to check the status of the other fields. + */ + OSVR_VelocityState state; +} OSVR_VelocityReport; + +/** @brief Report type for a linear velocity callback on a tracker interface +*/ +typedef struct OSVR_LinearVelocityReport { + /** @brief Identifies the sensor that the report comes from */ + int32_t sensor; + /** @brief The state itself */ + OSVR_LinearVelocityState state; +} OSVR_LinearVelocityReport; + +/** @brief Report type for an angular velocity callback on a tracker interface +*/ +typedef struct OSVR_AngularVelocityReport { + /** @brief Identifies the sensor that the report comes from */ + int32_t sensor; + /** @brief The state itself */ + OSVR_AngularVelocityState state; +} OSVR_AngularVelocityReport; + +/** @brief Report type for an acceleration (linear and angular) callback on a + tracker interface +*/ +typedef struct OSVR_AccelerationReport { + /** @brief Identifies the sensor that the report comes from */ + int32_t sensor; + /** @brief The data state - note that not all fields are neccesarily valid, + use the `Valid` members to check the status of the other fields. + */ + OSVR_AccelerationState state; +} OSVR_AccelerationReport; + +/** @brief Report type for a linear acceleration callback on a tracker interface +*/ +typedef struct OSVR_LinearAccelerationReport { + /** @brief Identifies the sensor that the report comes from */ + int32_t sensor; + /** @brief The state itself */ + OSVR_LinearAccelerationState state; +} OSVR_LinearAccelerationReport; + +/** @brief Report type for an angular acceleration callback on a tracker + interface +*/ +typedef struct OSVR_AngularAccelerationReport { + /** @brief Identifies the sensor that the report comes from */ + int32_t sensor; + /** @brief The state itself */ + OSVR_AngularAccelerationState state; +} OSVR_AngularAccelerationReport; + +/** @brief Report type for a callback on a button interface */ +typedef struct OSVR_ButtonReport { + /** @brief Identifies the sensor that the report comes from */ + int32_t sensor; + /** @brief The button state: 1 is pressed, 0 is not pressed. */ + OSVR_ButtonState state; +} OSVR_ButtonReport; + +/** @brief Report type for a callback on an analog interface */ +typedef struct OSVR_AnalogReport { + /** @brief Identifies the sensor/channel that the report comes from */ + int32_t sensor; + /** @brief The analog state. */ + OSVR_AnalogState state; +} OSVR_AnalogReport; + +/** @brief Type of location within a 2D region/surface, in normalized + coordinates (in range [0, 1] in standard OSVR coordinate system) +*/ +typedef OSVR_Vec2 OSVR_Location2DState; + +/** @brief Report type for 2D location */ +typedef struct OSVR_Location2DReport { + OSVR_ChannelCount sensor; + OSVR_Location2DState location; +} OSVR_Location2DReport; + +/** @brief Type of unit directional vector in 3D with no particular origin */ +typedef OSVR_Vec3 OSVR_DirectionState; + +/** @brief Report type for 3D Direction vector */ +typedef struct OSVR_DirectionReport { + OSVR_ChannelCount sensor; + OSVR_DirectionState direction; +} OSVR_DirectionReport; + +/** @brief Type of eye gaze direction in 3D which contains 3D vector (position) + containing gaze base point of the user's respective eye in 3D device + coordinates. +*/ +typedef OSVR_PositionState OSVR_EyeGazeBasePoint3DState; + +/** @brief Type of eye gaze position in 2D which contains users's gaze/point of + regard in normalized display coordinates (in range [0, 1] in standard OSVR + coordinate system) +*/ +typedef OSVR_Location2DState OSVR_EyeGazePosition2DState; + +// typedef OSVR_DirectionState OSVR_EyeGazeBasePoint3DState; + +/** @brief Type of 3D vector (direction vector) containing the normalized gaze + direction of user's respective eye */ +typedef OSVR_DirectionState OSVR_EyeGazeDirectionState; + +/** @brief State for 3D gaze report */ +typedef struct OSVR_EyeTracker3DState { + OSVR_CBool directionValid; + OSVR_DirectionState direction; + OSVR_CBool basePointValid; + OSVR_PositionState basePoint; +} OSVR_EyeTracker3DState; + +/** @brief Report type for 3D gaze report */ +typedef struct OSVR_EyeTracker3DReport { + OSVR_ChannelCount sensor; + OSVR_EyeTracker3DState state; +} OSVR_EyeTracker3DReport; + +/** @brief State for 2D location report */ +typedef OSVR_Location2DState OSVR_EyeTracker2DState; + +/** @brief Report type for 2D location report */ +typedef struct OSVR_EyeTracker2DReport { + OSVR_ChannelCount sensor; + OSVR_EyeTracker2DState state; +} OSVR_EyeTracker2DReport; + +/** @brief State for a blink event */ +typedef OSVR_ButtonState OSVR_EyeTrackerBlinkState; + +/** @brief OSVR_EyeTrackerBlinkState value indicating an eyes blink had occurred + */ +#define OSVR_EYE_BLINK (1) + +/** @brief OSVR_EyeTrackerBlinkState value indicating eyes are not blinking */ +#define OSVR_EYE_NO_BLINK (0) + +/** @brief Report type for a blink event */ +typedef struct OSVR_EyeTrackerBlinkReport { + OSVR_ChannelCount sensor; + OSVR_EyeTrackerBlinkState state; +} OSVR_EyeTrackerBlinkReport; + +/** @brief Report type for an Imaging callback (forward declaration) */ +struct OSVR_ImagingReport; + +/** @brief Type of Navigation Velocity state */ +typedef OSVR_Vec2 OSVR_NaviVelocityState; + +/** @brief Type of Navigation Position state */ +typedef OSVR_Vec2 OSVR_NaviPositionState; + +/** @brief Report type for an navigation velocity callback on a tracker + * interface */ +typedef struct OSVR_NaviVelocityReport { + OSVR_ChannelCount sensor; + /** @brief The 2D vector in world coordinate system, in meters/second */ + OSVR_NaviVelocityState state; +} OSVR_NaviVelocityReport; + +/** @brief Report type for an navigation position callback on a tracker + * interface */ +typedef struct OSVR_NaviPositionReport { + OSVR_ChannelCount sensor; + /** @brief The 2D vector in world coordinate system, in meters, relative to + * starting position */ + OSVR_NaviPositionState state; +} OSVR_NaviPositionReport; + +/** @} */ + +/** @} */ +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/Util/Export.h b/gfx/vr/osvr/Util/Export.h new file mode 100644 index 000000000..f3e26b89f --- /dev/null +++ b/gfx/vr/osvr/Util/Export.h @@ -0,0 +1,138 @@ +/** @file + @brief Automatically-generated export header - do not edit! + + @date 2016 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +// Copyright 2016 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +#ifndef OSVR_UTIL_EXPORT_H +#define OSVR_UTIL_EXPORT_H + +#ifdef OSVR_UTIL_STATIC_DEFINE +# define OSVR_UTIL_EXPORT +# define OSVR_UTIL_NO_EXPORT +#endif + +/* Per-compiler advance preventative definition */ +#if defined(__BORLANDC__) || defined(__CODEGEARC__) || defined(__HP_aCC) || \ + defined(__PGI) || defined(__WATCOMC__) +/* Compilers that don't support deprecated, according to CMake. */ +# ifndef OSVR_UTIL_DEPRECATED +# define OSVR_UTIL_DEPRECATED +# endif +#endif + +/* Check for attribute support */ +#if defined(__INTEL_COMPILER) +/* Checking before GNUC because Intel implements GNU extensions, + * so it chooses to define __GNUC__ as well. */ +# if __INTEL_COMPILER >= 1200 +/* Intel compiler 12.0 or newer can handle these attributes per CMake */ +# define OSVR_UTIL_EXPORT_HEADER_SUPPORTS_ATTRIBUTES +# endif + +#elif defined(__GNUC__) +# if (__GNUC__ > 4 || (__GNUC__ == 4 && __GNUC_MINOR__ >= 2)) +/* GCC 4.2+ */ +# define OSVR_UTIL_EXPORT_HEADER_SUPPORTS_ATTRIBUTES +# endif +#endif + +/* Per-platform defines */ +#if defined(_MSC_VER) +/* MSVC on Windows */ + +#ifndef OSVR_UTIL_EXPORT +# ifdef osvrUtil_EXPORTS + /* We are building this library */ +# define OSVR_UTIL_EXPORT __declspec(dllexport) +# else + /* We are using this library */ +# define OSVR_UTIL_EXPORT __declspec(dllimport) +# endif +#endif + +#ifndef OSVR_UTIL_DEPRECATED +# define OSVR_UTIL_DEPRECATED __declspec(deprecated) +#endif + +#elif defined(_WIN32) && defined(__GNUC__) +/* GCC-compatible on Windows */ + +#ifndef OSVR_UTIL_EXPORT +# ifdef osvrUtil_EXPORTS + /* We are building this library */ +# define OSVR_UTIL_EXPORT __attribute__((dllexport)) +# else + /* We are using this library */ +# define OSVR_UTIL_EXPORT __attribute__((dllimport)) +# endif +#endif + +#ifndef OSVR_UTIL_DEPRECATED +# define OSVR_UTIL_DEPRECATED __attribute__((__deprecated__)) +#endif + +#elif defined(OSVR_UTIL_EXPORT_HEADER_SUPPORTS_ATTRIBUTES) || \ + (defined(__APPLE__) && defined(__MACH__)) +/* GCC4.2+ compatible (assuming something *nix-like) and Mac OS X */ +/* (The first macro is defined at the top of the file, if applicable) */ +/* see https://gcc.gnu.org/wiki/Visibility */ + +#ifndef OSVR_UTIL_EXPORT + /* We are building/using this library */ +# define OSVR_UTIL_EXPORT __attribute__((visibility("default"))) +#endif + +#ifndef OSVR_UTIL_NO_EXPORT +# define OSVR_UTIL_NO_EXPORT __attribute__((visibility("hidden"))) +#endif + +#ifndef OSVR_UTIL_DEPRECATED +# define OSVR_UTIL_DEPRECATED __attribute__((__deprecated__)) +#endif + +#endif +/* End of platform ifdefs */ + +/* fallback def */ +#ifndef OSVR_UTIL_EXPORT +# define OSVR_UTIL_EXPORT +#endif + +/* fallback def */ +#ifndef OSVR_UTIL_NO_EXPORT +# define OSVR_UTIL_NO_EXPORT +#endif + +/* fallback def */ +#ifndef OSVR_UTIL_DEPRECATED_EXPORT +# define OSVR_UTIL_DEPRECATED_EXPORT OSVR_UTIL_EXPORT OSVR_UTIL_DEPRECATED +#endif + +/* fallback def */ +#ifndef OSVR_UTIL_DEPRECATED_NO_EXPORT +# define OSVR_UTIL_DEPRECATED_NO_EXPORT OSVR_UTIL_NO_EXPORT OSVR_UTIL_DEPRECATED +#endif + +/* Clean up after ourselves */ +#undef OSVR_UTIL_EXPORT_HEADER_SUPPORTS_ATTRIBUTES + +#endif diff --git a/gfx/vr/osvr/Util/ImagingReportTypesC.h b/gfx/vr/osvr/Util/ImagingReportTypesC.h new file mode 100644 index 000000000..1ce8b60d6 --- /dev/null +++ b/gfx/vr/osvr/Util/ImagingReportTypesC.h @@ -0,0 +1,91 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2015 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2015 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_ImagingReportTypesC_h_GUID_746A7BF8_B92D_4585_CA72_DC5391DEDF24 +#define INCLUDED_ImagingReportTypesC_h_GUID_746A7BF8_B92D_4585_CA72_DC5391DEDF24 + +/* Internal Includes */ +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/StdInt.h> +#include <osvr/Util/ChannelCountC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +/** @addtogroup ClientKit + @{ +*/ +typedef uint32_t OSVR_ImageDimension; +typedef uint8_t OSVR_ImageChannels; +typedef uint8_t OSVR_ImageDepth; + +/** @brief Type for raw buffer access to image data */ +typedef unsigned char OSVR_ImageBufferElement; + +typedef enum OSVR_ImagingValueType { + OSVR_IVT_UNSIGNED_INT = 0, + OSVR_IVT_SIGNED_INT = 1, + OSVR_IVT_FLOATING_POINT = 2 +} OSVR_ImagingValueType; + +typedef struct OSVR_ImagingMetadata { + /** @brief height in pixels */ + OSVR_ImageDimension height; + /** @brief width in pixels */ + OSVR_ImageDimension width; + /** @brief number of channels of data for each pixel */ + OSVR_ImageChannels channels; + /** @brief the depth (size) in bytes of each channel - valid values are 1, + * 2, 4, and 8 */ + OSVR_ImageDepth depth; + /** @brief Whether values are unsigned ints, signed ints, or floating point + */ + OSVR_ImagingValueType type; + +} OSVR_ImagingMetadata; + +typedef struct OSVR_ImagingState { + OSVR_ImagingMetadata metadata; + OSVR_ImageBufferElement *data; +} OSVR_ImagingState; + +typedef struct OSVR_ImagingReport { + OSVR_ChannelCount sensor; + OSVR_ImagingState state; +} OSVR_ImagingReport; + +/** @} */ + +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/Util/MatrixConventionsC.h b/gfx/vr/osvr/Util/MatrixConventionsC.h new file mode 100644 index 000000000..b1f2c2797 --- /dev/null +++ b/gfx/vr/osvr/Util/MatrixConventionsC.h @@ -0,0 +1,190 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2015 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2015 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_MatrixConventionsC_h_GUID_6FC7A4C6_E6C5_4A96_1C28_C3D21B909681 +#define INCLUDED_MatrixConventionsC_h_GUID_6FC7A4C6_E6C5_4A96_1C28_C3D21B909681 + +/* Internal Includes */ +#include <osvr/Util/Export.h> +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/StdInt.h> +#include <osvr/Util/Pose3C.h> +#include <osvr/Util/ReturnCodesC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +/** @defgroup MatrixConvention Matrix conventions and bit flags + @ingroup UtilMath +*/ + +/** @brief Type for passing matrix convention flags. + @ingroup MatrixConvention +*/ +typedef uint16_t OSVR_MatrixConventions; + +#ifndef OSVR_DOXYGEN_EXTERNAL +/** @brief Bitmasks for testing matrix conventions. + @ingroup MatrixConvention +*/ +typedef enum OSVR_MatrixMasks { + OSVR_MATRIX_MASK_ROWMAJOR = 0x1, + OSVR_MATRIX_MASK_ROWVECTORS = 0x2, + OSVR_MATRIX_MASK_LHINPUT = 0x4, + OSVR_MATRIX_MASK_UNSIGNEDZ = 0x8 +} OSVR_MatrixMasks; +#endif + +/** @defgroup MatrixFlags Matrix flags + @ingroup MatrixConvention + + Bit flags for specifying matrix options. Only one option may be specified + per enum, with all the specified options combined with bitwise-or `|`. + + Most methods that take matrix flags only obey ::OSVR_MatrixOrderingFlags and + ::OSVR_MatrixVectorFlags - the flags that affect memory order. The remaining + flags are for use with projection matrix generation API methods. + + @{ +*/ +/** @brief Flag bit controlling output memory order */ +typedef enum OSVR_MatrixOrderingFlags { + /** @brief Column-major memory order (default) */ + OSVR_MATRIX_COLMAJOR = 0x0, + /** @brief Row-major memory order */ + OSVR_MATRIX_ROWMAJOR = OSVR_MATRIX_MASK_ROWMAJOR +} OSVR_MatrixOrderingFlags; + +/** @brief Flag bit controlling expected input to matrices. + (Related to ::OSVR_MatrixOrderingFlags - setting one to non-default results + in an output change, but setting both to non-default results in effectively + no change in the output. If this blows your mind, just ignore this aside and + carry on.) +*/ +typedef enum OSVR_MatrixVectorFlags { + /** @brief Matrix transforms column vectors (default) */ + OSVR_MATRIX_COLVECTORS = 0x0, + /** @brief Matrix transforms row vectors */ + OSVR_MATRIX_ROWVECTORS = OSVR_MATRIX_MASK_ROWVECTORS +} OSVR_MatrixVectorFlags; + +/** @brief Flag bit to indicate coordinate system input to projection matrix */ +typedef enum OSVR_ProjectionMatrixInputFlags { + /** @brief Matrix takes vectors from a right-handed coordinate system + (default) */ + OSVR_MATRIX_RHINPUT = 0x0, + /** @brief Matrix takes vectors from a left-handed coordinate system */ + OSVR_MATRIX_LHINPUT = OSVR_MATRIX_MASK_LHINPUT + +} OSVR_ProjectionMatrixInputFlags; + +/** @brief Flag bit to indicate the desired post-projection Z value convention + */ +typedef enum OSVR_ProjectionMatrixZFlags { + /** @brief Matrix maps the near and far planes to signed Z values (in the + range [-1, 1]) (default)*/ + OSVR_MATRIX_SIGNEDZ = 0x0, + /** @brief Matrix maps the near and far planes to unsigned Z values (in the + range [0, 1]) */ + OSVR_MATRIX_UNSIGNEDZ = OSVR_MATRIX_MASK_UNSIGNEDZ +} OSVR_ProjectionMatrixZFlags; +/** @} */ /* end of matrix flags group */ + +enum { + /** @brief Constant for the number of elements in the matrices we use - 4x4. + @ingroup MatrixConvention + */ + OSVR_MATRIX_SIZE = 16 +}; + +/** @addtogroup UtilMath + @{ +*/ +/** @brief Set a matrix of doubles based on a Pose3. + @param pose The Pose3 to convert + @param flags Memory ordering flag - see @ref MatrixFlags + @param[out] mat an array of 16 doubles +*/ +OSVR_UTIL_EXPORT OSVR_ReturnCode osvrPose3ToMatrixd( + OSVR_Pose3 const *pose, OSVR_MatrixConventions flags, double *mat); + +/** @brief Set a matrix of floats based on a Pose3. + @param pose The Pose3 to convert + @param flags Memory ordering flag - see @ref MatrixFlags + @param[out] mat an array of 16 floats +*/ +OSVR_UTIL_EXPORT OSVR_ReturnCode osvrPose3ToMatrixf( + OSVR_Pose3 const *pose, OSVR_MatrixConventions flags, float *mat); +/** @} */ + +OSVR_EXTERN_C_END + +#ifdef __cplusplus +/** @brief Set a matrix based on a Pose3. (C++-only overload - detecting scalar + * type) */ +inline OSVR_ReturnCode osvrPose3ToMatrix(OSVR_Pose3 const *pose, + OSVR_MatrixConventions flags, + double *mat) { + return osvrPose3ToMatrixd(pose, flags, mat); +} + +/** @brief Set a matrix based on a Pose3. (C++-only overload - detecting scalar + * type) */ +inline OSVR_ReturnCode osvrPose3ToMatrix(OSVR_Pose3 const *pose, + OSVR_MatrixConventions flags, + float *mat) { + return osvrPose3ToMatrixf(pose, flags, mat); +} + +/** @brief Set a matrix based on a Pose3. (C++-only overload - detects scalar + * and takes array rather than pointer) */ +template <typename Scalar> +inline OSVR_ReturnCode osvrPose3ToMatrix(OSVR_Pose3 const *pose, + OSVR_MatrixConventions flags, + Scalar mat[OSVR_MATRIX_SIZE]) { + return osvrPose3ToMatrix(pose, flags, &(mat[0])); +} +/** @brief Set a matrix based on a Pose3. (C++-only overload - detects scalar, + * takes array, takes pose by reference) */ +template <typename Scalar> +inline OSVR_ReturnCode osvrPose3ToMatrix(OSVR_Pose3 const &pose, + OSVR_MatrixConventions flags, + Scalar mat[OSVR_MATRIX_SIZE]) { + return osvrPose3ToMatrix(&pose, flags, &(mat[0])); +} + +#endif + +/** @} */ + +#endif diff --git a/gfx/vr/osvr/Util/PlatformConfig.h b/gfx/vr/osvr/Util/PlatformConfig.h new file mode 100644 index 000000000..8342e4f8f --- /dev/null +++ b/gfx/vr/osvr/Util/PlatformConfig.h @@ -0,0 +1,88 @@ +/** @file
+ @brief Auto-configured header
+
+ If this filename ends in `.h`, don't edit it: your edits will
+ be lost next time this file is regenerated!
+
+ Must be c-safe!
+
+ @date 2014
+
+ @author
+ Sensics, Inc.
+ <http://sensics.com/osvr>
+*/
+
+/*
+// Copyright 2014 Sensics, Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+*/
+
+#ifndef INCLUDED_PlatformConfig_h_GUID_0D10E644_8114_4294_A839_699F39E1F0E0
+#define INCLUDED_PlatformConfig_h_GUID_0D10E644_8114_4294_A839_699F39E1F0E0
+
+/** @def OSVR_HAVE_STRUCT_TIMEVAL_IN_WINSOCK2_H
+ @brief Does the system have struct timeval in <winsock2.h>?
+*/
+#define OSVR_HAVE_STRUCT_TIMEVAL_IN_WINSOCK2_H
+
+/** @def OSVR_HAVE_STRUCT_TIMEVAL_IN_SYS_TIME_H
+ @brief Does the system have struct timeval in <sys/time.h>?
+*/
+
+/*
+ MinGW and similar environments have both winsock and sys/time.h, so
+ we hide this define for disambiguation at the top level.
+*/
+#ifndef OSVR_HAVE_STRUCT_TIMEVAL_IN_WINSOCK2_H
+/* #undef OSVR_HAVE_STRUCT_TIMEVAL_IN_SYS_TIME_H */
+#endif
+
+#if defined(OSVR_HAVE_STRUCT_TIMEVAL_IN_SYS_TIME_H) || \
+ defined(OSVR_HAVE_STRUCT_TIMEVAL_IN_WINSOCK2_H)
+#define OSVR_HAVE_STRUCT_TIMEVAL
+#endif
+
+/**
+ * Platform-specific variables.
+ *
+ * Prefer testing for specific compiler or platform features instead of relying
+ * on these variables.
+ *
+ */
+//@{
+/* #undef OSVR_AIX */
+/* #undef OSVR_ANDROID */
+/* #undef OSVR_BSDOS */
+/* #undef OSVR_FREEBSD */
+/* #undef OSVR_HPUX */
+/* #undef OSVR_IRIX */
+/* #undef OSVR_LINUX */
+/* #undef OSVR_KFREEBSD */
+/* #undef OSVR_NETBSD */
+/* #undef OSVR_OPENBSD */
+/* #undef OSVR_OFS1 */
+/* #undef OSVR_SCO_SV */
+/* #undef OSVR_UNIXWARE */
+/* #undef OSVR_XENIX */
+/* #undef OSVR_SUNOS */
+/* #undef OSVR_TRU64 */
+/* #undef OSVR_ULTRIX */
+/* #undef OSVR_CYGWIN */
+/* #undef OSVR_MACOSX */
+#define OSVR_WINDOWS
+//@}
+
+#endif
+
diff --git a/gfx/vr/osvr/Util/Pose3C.h b/gfx/vr/osvr/Util/Pose3C.h new file mode 100644 index 000000000..173428cfc --- /dev/null +++ b/gfx/vr/osvr/Util/Pose3C.h @@ -0,0 +1,70 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_Pose3C_h_GUID_066CFCE2_229C_4194_5D2B_2602CCD5C439 +#define INCLUDED_Pose3C_h_GUID_066CFCE2_229C_4194_5D2B_2602CCD5C439 + +/* Internal Includes */ + +/* Internal Includes */ +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/Vec3C.h> +#include <osvr/Util/QuaternionC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +/** @addtogroup UtilMath + @{ +*/ + +/** @brief A structure defining a 3D (6DOF) rigid body pose: translation and + rotation. +*/ +typedef struct OSVR_Pose3 { + /** @brief Position vector */ + OSVR_Vec3 translation; + /** @brief Orientation as a unit quaternion */ + OSVR_Quaternion rotation; +} OSVR_Pose3; + +/** @brief Set a pose to identity */ +OSVR_INLINE void osvrPose3SetIdentity(OSVR_Pose3 *pose) { + osvrQuatSetIdentity(&(pose->rotation)); + osvrVec3Zero(&(pose->translation)); +} +/** @} */ + +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/Util/QuaternionC.h b/gfx/vr/osvr/Util/QuaternionC.h new file mode 100644 index 000000000..8056c89a0 --- /dev/null +++ b/gfx/vr/osvr/Util/QuaternionC.h @@ -0,0 +1,92 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_QuaternionC_h_GUID_1470A5FE_8209_41A6_C19E_46077FDF9C66 +#define INCLUDED_QuaternionC_h_GUID_1470A5FE_8209_41A6_C19E_46077FDF9C66 + +/* Internal Includes */ +#include <osvr/Util/APIBaseC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +/** @addtogroup UtilMath + @{ +*/ +/** @brief A structure defining a quaternion, often a unit quaternion + * representing 3D rotation. +*/ +typedef struct OSVR_Quaternion { + /** @brief Internal data - direct access not recommended */ + double data[4]; +} OSVR_Quaternion; + +#define OSVR_QUAT_MEMBER(COMPONENT, INDEX) \ + /** @brief Accessor for quaternion component COMPONENT */ \ + OSVR_INLINE double osvrQuatGet##COMPONENT(OSVR_Quaternion const *q) { \ + return q->data[INDEX]; \ + } \ + /** @brief Setter for quaternion component COMPONENT */ \ + OSVR_INLINE void osvrQuatSet##COMPONENT(OSVR_Quaternion *q, double val) { \ + q->data[INDEX] = val; \ + } + +OSVR_QUAT_MEMBER(W, 0) +OSVR_QUAT_MEMBER(X, 1) +OSVR_QUAT_MEMBER(Y, 2) +OSVR_QUAT_MEMBER(Z, 3) + +#undef OSVR_QUAT_MEMBER + +/** @brief Set a quaternion to the identity rotation */ +OSVR_INLINE void osvrQuatSetIdentity(OSVR_Quaternion *q) { + osvrQuatSetW(q, 1); + osvrQuatSetX(q, 0); + osvrQuatSetY(q, 0); + osvrQuatSetZ(q, 0); +} + +/** @} */ + +OSVR_EXTERN_C_END + +#ifdef __cplusplus +template <typename StreamType> +inline StreamType &operator<<(StreamType &os, OSVR_Quaternion const &quat) { + os << "(" << osvrQuatGetW(&quat) << ", (" << osvrQuatGetX(&quat) << ", " + << osvrQuatGetY(&quat) << ", " << osvrQuatGetZ(&quat) << "))"; + return os; +} +#endif + +#endif diff --git a/gfx/vr/osvr/Util/QuatlibInteropC.h b/gfx/vr/osvr/Util/QuatlibInteropC.h new file mode 100644 index 000000000..ef2a7fe12 --- /dev/null +++ b/gfx/vr/osvr/Util/QuatlibInteropC.h @@ -0,0 +1,84 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_QuatlibInteropC_h_GUID_85D92019_F0CC_419C_5F6D_F5A3134AA5D4 +#define INCLUDED_QuatlibInteropC_h_GUID_85D92019_F0CC_419C_5F6D_F5A3134AA5D4 + +/* Internal Includes */ +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/Pose3C.h> + +/* Library/third-party includes */ +#include <quat.h> + +/* Standard includes */ +#include <string.h> + +OSVR_EXTERN_C_BEGIN + +/** @addtogroup UtilMath + @{ +*/ +OSVR_INLINE void osvrQuatToQuatlib(q_type dest, OSVR_Quaternion const *src) { + dest[Q_W] = osvrQuatGetW(src); + dest[Q_X] = osvrQuatGetX(src); + dest[Q_Y] = osvrQuatGetY(src); + dest[Q_Z] = osvrQuatGetZ(src); +} + +OSVR_INLINE void osvrQuatFromQuatlib(OSVR_Quaternion *dest, q_type const src) { + osvrQuatSetW(dest, src[Q_W]); + osvrQuatSetX(dest, src[Q_X]); + osvrQuatSetY(dest, src[Q_Y]); + osvrQuatSetZ(dest, src[Q_Z]); +} + +OSVR_INLINE void osvrVec3ToQuatlib(q_vec_type dest, OSVR_Vec3 const *src) { + memcpy((void *)(dest), (void const *)(src->data), sizeof(double) * 3); +} + +OSVR_INLINE void osvrVec3FromQuatlib(OSVR_Vec3 *dest, q_vec_type const src) { + memcpy((void *)(dest->data), (void const *)(src), sizeof(double) * 3); +} + +OSVR_INLINE void osvrPose3ToQuatlib(q_xyz_quat_type *dest, + OSVR_Pose3 const *src) { + osvrVec3ToQuatlib(dest->xyz, &(src->translation)); + osvrQuatToQuatlib(dest->quat, &(src->rotation)); +} + +OSVR_INLINE void osvrPose3FromQuatlib(OSVR_Pose3 *dest, + q_xyz_quat_type const *src) { + osvrVec3FromQuatlib(&(dest->translation), src->xyz); + osvrQuatFromQuatlib(&(dest->rotation), src->quat); +} + +/** @} */ + +OSVR_EXTERN_C_END +#endif diff --git a/gfx/vr/osvr/Util/RadialDistortionParametersC.h b/gfx/vr/osvr/Util/RadialDistortionParametersC.h new file mode 100644 index 000000000..ac85a680e --- /dev/null +++ b/gfx/vr/osvr/Util/RadialDistortionParametersC.h @@ -0,0 +1,62 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2015 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2015 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_RadialDistortionParametersC_h_GUID_925BCEB1_BACA_4DA7_5133_FFF560C72EBD +#define INCLUDED_RadialDistortionParametersC_h_GUID_925BCEB1_BACA_4DA7_5133_FFF560C72EBD + +/* Internal Includes */ +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/Vec2C.h> +#include <osvr/Util/Vec3C.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +/** @addtogroup UtilMath +@{ +*/ + +/** @brief Parameters for a per-color-component radial distortion shader +*/ +typedef struct OSVR_RadialDistortionParameters { + /** @brief Vector of K1 coefficients for the R, G, B channels*/ + OSVR_Vec3 k1; + /** @brief Center of projection for the radial distortion, relative to the + bounds of this surface. + */ + OSVR_Vec2 centerOfProjection; +} OSVR_RadialDistortionParameters; + +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/Util/RenderingTypesC.h b/gfx/vr/osvr/Util/RenderingTypesC.h new file mode 100644 index 000000000..51ec0cc09 --- /dev/null +++ b/gfx/vr/osvr/Util/RenderingTypesC.h @@ -0,0 +1,134 @@ +/** @file + @brief Header with integer types for Viewer, Eye, and Surface + counts/indices, as well as viewport information. + + Must be c-safe! + + @date 2015 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2015 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_RenderingTypesC_h_GUID_6689A6CA_76AC_48AC_A0D0_2902BC95AC35 +#define INCLUDED_RenderingTypesC_h_GUID_6689A6CA_76AC_48AC_A0D0_2902BC95AC35 + +/* Internal Includes */ +#include <osvr/Util/StdInt.h> +#include <osvr/Util/APIBaseC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +/** @addtogroup PluginKit +@{ +*/ + +/** @brief A count or index for a display input in a display config. +*/ +typedef uint8_t OSVR_DisplayInputCount; + +/** @brief The integer type used in specification of size or location of a + display input, in pixels. +*/ +typedef int32_t OSVR_DisplayDimension; + +/** @brief The integer type specifying a number of viewers in a system. + + A "head" is a viewer (though not all viewers are necessarily heads). + + The count is output from osvrClientGetNumViewers(). + + When used as an ID/index, it is zero-based, so values range from 0 to (count + - 1) inclusive. + + The most frequent count is 1, though higher values are theoretically + possible. If you do not handle higher values, do still check and alert the + user if their system reports a higher number, as your application may not + behave as the user expects. +*/ +typedef uint32_t OSVR_ViewerCount; + +/** @brief The integer type specifying the number of eyes (viewpoints) of a + viewer. + + The count for a given viewer is output from osvrClientGetNumEyesForViewer(). + + When used as an ID/index, it is zero-based,so values range from 0 to (count + - 1) inclusive, for a given viewer. + + Use as an ID/index is not meaningful except in conjunction with the ID of + the corresponding viewer. (that is, there is no overall "eye 0", but "viewer + 0, eye 0" is meaningful.) + + In practice, the most frequent counts are 1 (e.g. mono) and 2 (e.g. stereo), + and for example the latter results in eyes with ID 0 and 1 for the viewer. + There is no innate or consistent semantics/meaning ("left" or "right") to + indices guaranteed at this time, and applications should not try to infer + any. +*/ +typedef uint8_t OSVR_EyeCount; + +/** @brief The integer type specifying the number of surfaces seen by a viewer's + eye. + + The count for a given viewer and eye is output from + osvrClientGetNumSurfacesForViewerEye(). Note that the count is not + necessarily equal between eyes of a viewer. + + When used as an ID/index, it is zero-based, so values range from 0 to (count + - 1) inclusive, for a given viewer and eye. + + Use as an ID/index is not meaningful except in conjunction with the IDs of + the corresponding viewer and eye. (that is, there is no overall "surface 0", + but "viewer 0, eye 0, surface 0" is meaningful.) +*/ +typedef uint32_t OSVR_SurfaceCount; + +/** @brief The integer type used in specification of size or location of a + viewport. +*/ +typedef int32_t OSVR_ViewportDimension; + +/** @brief The integer type used to indicate relative priorities of a display + distortion strategy. Negative values are defined to mean that strategy is + unavailable. + + @sa OSVR_DISTORTION_PRIORITY_UNAVAILABLE +*/ +typedef int32_t OSVR_DistortionPriority; + +/** @brief The constant to return as an OSVR_DistortionPriority if a given + strategy is not available for a surface. + + @sa OSVR_DistortionPriority +*/ +#define OSVR_DISTORTION_PRIORITY_UNAVAILABLE (-1) + +/** @} */ + +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/Util/ReturnCodesC.h b/gfx/vr/osvr/Util/ReturnCodesC.h new file mode 100644 index 000000000..971798ea4 --- /dev/null +++ b/gfx/vr/osvr/Util/ReturnCodesC.h @@ -0,0 +1,57 @@ +/** @file + @brief Header declaring a type and values for simple C return codes. + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_ReturnCodesC_h_GUID_C81A2FDE_E5BB_4AAA_70A4_C616DD7C141A +#define INCLUDED_ReturnCodesC_h_GUID_C81A2FDE_E5BB_4AAA_70A4_C616DD7C141A + +/* Internal Includes */ +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/AnnotationMacrosC.h> + +OSVR_EXTERN_C_BEGIN + +/** @addtogroup PluginKit + @{ +*/ +/** @name Return Codes + @{ +*/ +/** @brief The "success" value for an OSVR_ReturnCode */ +#define OSVR_RETURN_SUCCESS (0) +/** @brief The "failure" value for an OSVR_ReturnCode */ +#define OSVR_RETURN_FAILURE (1) +/** @brief Return type from C API OSVR functions. */ +typedef OSVR_RETURN_SUCCESS_CONDITION( + return == OSVR_RETURN_SUCCESS) char OSVR_ReturnCode; +/** @} */ + +/** @} */ /* end of group */ + +OSVR_EXTERN_C_END + +#endif diff --git a/gfx/vr/osvr/Util/StdInt.h b/gfx/vr/osvr/Util/StdInt.h new file mode 100644 index 000000000..c9462b62c --- /dev/null +++ b/gfx/vr/osvr/Util/StdInt.h @@ -0,0 +1,42 @@ +/** @file + @brief Header wrapping the C99 standard `stdint` header. + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_StdInt_h_GUID_C1AAF35C_C704_4DB7_14AC_615730C4619B +#define INCLUDED_StdInt_h_GUID_C1AAF35C_C704_4DB7_14AC_615730C4619B + +/* IWYU pragma: begin_exports */ + +#if !defined(_MSC_VER) || (defined(_MSC_VER) && _MSC_VER >= 1600) +#include <stdint.h> +#else +#include "MSStdIntC.h" +#endif + +/* IWYU pragma: end_exports */ + +#endif diff --git a/gfx/vr/osvr/Util/TimeValueC.h b/gfx/vr/osvr/Util/TimeValueC.h new file mode 100644 index 000000000..7dcead654 --- /dev/null +++ b/gfx/vr/osvr/Util/TimeValueC.h @@ -0,0 +1,271 @@ +/** @file + @brief Header defining a dependency-free, cross-platform substitute for + struct timeval + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_TimeValueC_h_GUID_A02C6917_124D_4CB3_E63E_07F2DA7144E9 +#define INCLUDED_TimeValueC_h_GUID_A02C6917_124D_4CB3_E63E_07F2DA7144E9 + +/* Internal Includes */ +#include <osvr/Util/Export.h> +#include <osvr/Util/APIBaseC.h> +#include <osvr/Util/AnnotationMacrosC.h> +#include <osvr/Util/PlatformConfig.h> +#include <osvr/Util/StdInt.h> +#include <osvr/Util/BoolC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +/** @defgroup UtilTime Timestamp interaction + @ingroup Util + + This provides a level of interoperability with struct timeval on systems + with that facility. It provides a neutral representation with sufficiently + large types. + + For C++ code, use of std::chrono or boost::chrono instead is recommended. + + Note that these time values may not necessarily correlate between processes + so should not be used to estimate or measure latency, etc. + + @{ +*/ + +/** @brief The signed integer type storing the seconds in a struct + OSVR_TimeValue */ +typedef int64_t OSVR_TimeValue_Seconds; +/** @brief The signed integer type storing the microseconds in a struct + OSVR_TimeValue */ +typedef int32_t OSVR_TimeValue_Microseconds; + +/** @brief Standardized, portable parallel to struct timeval for representing + both absolute times and time intervals. + + Where interpreted as an absolute time, its meaning is to be considered the + same as that of the POSIX struct timeval: + time since 00:00 Coordinated Universal Time (UTC), January 1, 1970. + + For best results, please keep normalized. Output of all functions here + is normalized. + */ +typedef struct OSVR_TimeValue { + /** @brief Seconds portion of the time value. */ + OSVR_TimeValue_Seconds seconds; + /** @brief Microseconds portion of the time value. */ + OSVR_TimeValue_Microseconds microseconds; +} OSVR_TimeValue; + +#ifdef OSVR_HAVE_STRUCT_TIMEVAL +/** @brief Gets the current time in the TimeValue. Parallel to gettimeofday. */ +OSVR_UTIL_EXPORT void osvrTimeValueGetNow(OSVR_OUT OSVR_TimeValue *dest) + OSVR_FUNC_NONNULL((1)); + +struct timeval; /* forward declaration */ + +/** @brief Converts from a TimeValue struct to your system's struct timeval. + + @param dest Pointer to an empty struct timeval for your platform. + @param src A pointer to an OSVR_TimeValue you'd like to convert from. + + If either parameter is NULL, the function will return without doing + anything. +*/ +OSVR_UTIL_EXPORT void +osvrTimeValueToStructTimeval(OSVR_OUT struct timeval *dest, + OSVR_IN_PTR const OSVR_TimeValue *src) + OSVR_FUNC_NONNULL((1, 2)); + +/** @brief Converts from a TimeValue struct to your system's struct timeval. + @param dest An OSVR_TimeValue destination pointer. + @param src Pointer to a struct timeval you'd like to convert from. + + The result is normalized. + + If either parameter is NULL, the function will return without doing + anything. +*/ +OSVR_UTIL_EXPORT void +osvrStructTimevalToTimeValue(OSVR_OUT OSVR_TimeValue *dest, + OSVR_IN_PTR const struct timeval *src) + OSVR_FUNC_NONNULL((1, 2)); +#endif + +/** @brief "Normalizes" a time value so that the absolute number of microseconds + is less than 1,000,000, and that the sign of both components is the same. + + @param tv Address of a struct TimeValue to normalize in place. + + If the given pointer is NULL, this function returns without doing anything. +*/ +OSVR_UTIL_EXPORT void osvrTimeValueNormalize(OSVR_INOUT_PTR OSVR_TimeValue *tv) + OSVR_FUNC_NONNULL((1)); + +/** @brief Sums two time values, replacing the first with the result. + + @param tvA Destination and first source. + @param tvB second source + + If a given pointer is NULL, this function returns without doing anything. + + Both parameters are expected to be in normalized form. +*/ +OSVR_UTIL_EXPORT void osvrTimeValueSum(OSVR_INOUT_PTR OSVR_TimeValue *tvA, + OSVR_IN_PTR const OSVR_TimeValue *tvB) + OSVR_FUNC_NONNULL((1, 2)); + +/** @brief Computes the difference between two time values, replacing the first + with the result. + + Effectively, `*tvA = *tvA - *tvB` + + @param tvA Destination and first source. + @param tvB second source + + If a given pointer is NULL, this function returns without doing anything. + + Both parameters are expected to be in normalized form. +*/ +OSVR_UTIL_EXPORT void +osvrTimeValueDifference(OSVR_INOUT_PTR OSVR_TimeValue *tvA, + OSVR_IN_PTR const OSVR_TimeValue *tvB) + OSVR_FUNC_NONNULL((1, 2)); + +/** @brief Compares two time values (assumed to be normalized), returning + the same values as strcmp + + @return <0 if A is earlier than B, 0 if they are the same, and >0 if A + is later than B. +*/ +OSVR_UTIL_EXPORT int osvrTimeValueCmp(OSVR_IN_PTR const OSVR_TimeValue *tvA, + OSVR_IN_PTR const OSVR_TimeValue *tvB) + OSVR_FUNC_NONNULL((1, 2)); + +OSVR_EXTERN_C_END + +/** @brief Compute the difference between the two time values, returning the + duration as a double-precision floating-point number of seconds. + + Effectively, `ret = *tvA - *tvB` + + @param tvA first source. + @param tvB second source + @return Duration of timespan in seconds (floating-point) +*/ +OSVR_INLINE double +osvrTimeValueDurationSeconds(OSVR_IN_PTR const OSVR_TimeValue *tvA, + OSVR_IN_PTR const OSVR_TimeValue *tvB) { + OSVR_TimeValue A = *tvA; + osvrTimeValueDifference(&A, tvB); + double dt = A.seconds + A.microseconds / 1000000.0; + return dt; +} + +/** @brief True if A is later than B */ +OSVR_INLINE OSVR_CBool +osvrTimeValueGreater(OSVR_IN_PTR const OSVR_TimeValue *tvA, + OSVR_IN_PTR const OSVR_TimeValue *tvB) { + if (!tvA || !tvB) { + return OSVR_FALSE; + } + return ((tvA->seconds > tvB->seconds) || + (tvA->seconds == tvB->seconds && + tvA->microseconds > tvB->microseconds)) + ? OSVR_TRUE + : OSVR_FALSE; +} + +#ifdef __cplusplus + +#include <cmath> +#include <cassert> + +/// Returns true if the time value is normalized. Typically used in assertions. +inline bool osvrTimeValueIsNormalized(const OSVR_TimeValue &tv) { +#ifdef __APPLE__ + // apparently standard library used on mac only has floating-point abs? + return std::abs(double(tv.microseconds)) < 1000000 && +#else + return std::abs(tv.microseconds) < 1000000 && +#endif + ((tv.seconds > 0) == (tv.microseconds > 0)); +} + +/// True if A is later than B +inline bool osvrTimeValueGreater(const OSVR_TimeValue &tvA, + const OSVR_TimeValue &tvB) { + assert(osvrTimeValueIsNormalized(tvA) && + "First timevalue argument to comparison was not normalized!"); + assert(osvrTimeValueIsNormalized(tvB) && + "Second timevalue argument to comparison was not normalized!"); + return (tvA.seconds > tvB.seconds) || + (tvA.seconds == tvB.seconds && tvA.microseconds > tvB.microseconds); +} + +/// Operator > overload for time values +inline bool operator>(const OSVR_TimeValue &tvA, const OSVR_TimeValue &tvB) { + return osvrTimeValueGreater(tvA, tvB); +} + +/// Operator < overload for time values +inline bool operator<(const OSVR_TimeValue &tvA, const OSVR_TimeValue &tvB) { + // Change the order of arguments before forwarding. + return osvrTimeValueGreater(tvB, tvA); +} + +/// Operator == overload for time values +inline bool operator==(const OSVR_TimeValue &tvA, const OSVR_TimeValue &tvB) { + assert( + osvrTimeValueIsNormalized(tvA) && + "First timevalue argument to equality comparison was not normalized!"); + assert( + osvrTimeValueIsNormalized(tvB) && + "Second timevalue argument to equality comparison was not normalized!"); + return (tvA.seconds == tvB.seconds) && + (tvA.microseconds == tvB.microseconds); +} +/// Operator == overload for time values +inline bool operator!=(const OSVR_TimeValue &tvA, const OSVR_TimeValue &tvB) { + assert(osvrTimeValueIsNormalized(tvA) && "First timevalue argument to " + "inequality comparison was not " + "normalized!"); + assert(osvrTimeValueIsNormalized(tvB) && "Second timevalue argument to " + "inequality comparison was not " + "normalized!"); + return (tvA.seconds != tvB.seconds) || + (tvA.microseconds != tvB.microseconds); +} +#endif + +/** @} */ + +#endif diff --git a/gfx/vr/osvr/Util/Vec2C.h b/gfx/vr/osvr/Util/Vec2C.h new file mode 100644 index 000000000..0432a32e7 --- /dev/null +++ b/gfx/vr/osvr/Util/Vec2C.h @@ -0,0 +1,86 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_Vec2C_h_GUID_F9715DE4_2649_4182_0F4C_D62121235D5F +#define INCLUDED_Vec2C_h_GUID_F9715DE4_2649_4182_0F4C_D62121235D5F + +/* Internal Includes */ +#include <osvr/Util/APIBaseC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +/** @addtogroup UtilMath + @{ +*/ +/** @brief A structure defining a 2D vector, which represents position +*/ +typedef struct OSVR_Vec2 { + /** @brief Internal array data. */ + double data[2]; +} OSVR_Vec2; + +#define OSVR_VEC_MEMBER(COMPONENT, INDEX) \ + /** @brief Accessor for Vec2 component COMPONENT */ \ + OSVR_INLINE double osvrVec2Get##COMPONENT(OSVR_Vec2 const *v) { \ + return v->data[INDEX]; \ + } \ + /** @brief Setter for Vec2 component COMPONENT */ \ + OSVR_INLINE void osvrVec2Set##COMPONENT(OSVR_Vec2 *v, double val) { \ + v->data[INDEX] = val; \ + } + +OSVR_VEC_MEMBER(X, 0) +OSVR_VEC_MEMBER(Y, 1) + +#undef OSVR_VEC_MEMBER + +/** @brief Set a Vec2 to the zero vector */ +OSVR_INLINE void osvrVec2Zero(OSVR_Vec2 *v) { + osvrVec2SetX(v, 0); + osvrVec2SetY(v, 0); +} + +/** @} */ + +OSVR_EXTERN_C_END + +#ifdef __cplusplus +template <typename StreamType> +inline StreamType &operator<<(StreamType &os, OSVR_Vec2 const &vec) { + os << "(" << vec.data[0] << ", " << vec.data[1] << ")"; + return os; +} +#endif + +#endif // INCLUDED_Vec2C_h_GUID_F9715DE4_2649_4182_0F4C_D62121235D5F diff --git a/gfx/vr/osvr/Util/Vec3C.h b/gfx/vr/osvr/Util/Vec3C.h new file mode 100644 index 000000000..666861174 --- /dev/null +++ b/gfx/vr/osvr/Util/Vec3C.h @@ -0,0 +1,89 @@ +/** @file + @brief Header + + Must be c-safe! + + @date 2014 + + @author + Sensics, Inc. + <http://sensics.com/osvr> +*/ + +/* +// Copyright 2014 Sensics, Inc. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +*/ + +#ifndef INCLUDED_Vec3C_h_GUID_BF4E98ED_74CF_4785_DB61_109A00BA74DE +#define INCLUDED_Vec3C_h_GUID_BF4E98ED_74CF_4785_DB61_109A00BA74DE + +/* Internal Includes */ +#include <osvr/Util/APIBaseC.h> + +/* Library/third-party includes */ +/* none */ + +/* Standard includes */ +/* none */ + +OSVR_EXTERN_C_BEGIN + +/** @addtogroup UtilMath + @{ +*/ +/** @brief A structure defining a 3D vector, often a position/translation. +*/ +typedef struct OSVR_Vec3 { + /** @brief Internal array data. */ + double data[3]; +} OSVR_Vec3; + +#define OSVR_VEC_MEMBER(COMPONENT, INDEX) \ + /** @brief Accessor for Vec3 component COMPONENT */ \ + OSVR_INLINE double osvrVec3Get##COMPONENT(OSVR_Vec3 const *v) { \ + return v->data[INDEX]; \ + } \ + /** @brief Setter for Vec3 component COMPONENT */ \ + OSVR_INLINE void osvrVec3Set##COMPONENT(OSVR_Vec3 *v, double val) { \ + v->data[INDEX] = val; \ + } + +OSVR_VEC_MEMBER(X, 0) +OSVR_VEC_MEMBER(Y, 1) +OSVR_VEC_MEMBER(Z, 2) + +#undef OSVR_VEC_MEMBER + +/** @brief Set a Vec3 to the zero vector */ +OSVR_INLINE void osvrVec3Zero(OSVR_Vec3 *v) { + osvrVec3SetX(v, 0); + osvrVec3SetY(v, 0); + osvrVec3SetZ(v, 0); +} + +/** @} */ + +OSVR_EXTERN_C_END + +#ifdef __cplusplus +template <typename StreamType> +inline StreamType &operator<<(StreamType &os, OSVR_Vec3 const &vec) { + os << "(" << vec.data[0] << ", " << vec.data[1] << ", " << vec.data[2] + << ")"; + return os; +} +#endif + +#endif diff --git a/gfx/vr/ovr_capi_dynamic.h b/gfx/vr/ovr_capi_dynamic.h new file mode 100644 index 000000000..41e313dca --- /dev/null +++ b/gfx/vr/ovr_capi_dynamic.h @@ -0,0 +1,676 @@ +/* -*- Mode: c++; c-basic-offset: 2; indent-tabs-mode: nil; tab-width: 40 -*- */ +/* This Source Code Form is subject to the terms of the Mozilla Public + * License, v. 2.0. If a copy of the MPL was not distributed with this file, + * You can obtain one at http://mozilla.org/MPL/2.0/. */ + +/* This file contains just the needed struct definitions for + * interacting with the Oculus VR C API, without needing to #include + * OVR_CAPI.h directly. Note that it uses the same type names as the + * CAPI, and cannot be #included at the same time as OVR_CAPI.h. It + * does not include the entire C API, just want's needed. + */ + +#ifdef OVR_CAPI_h +#ifdef _MSC_VER +#pragma message("ovr_capi_dyanmic.h: OVR_CAPI.h included before ovr_capi_dynamic.h, skipping this") +#else +#warning OVR_CAPI.h included before ovr_capi_dynamic.h, skipping this +#endif +#define mozilla_ovr_capi_dynamic_h_ + +#else + +#ifndef mozilla_ovr_capi_dynamic_h_ +#define mozilla_ovr_capi_dynamic_h_ + +#define OVR_CAPI_LIMITED_MOZILLA 1 + +#ifdef HAVE_64BIT_BUILD +#define OVR_PTR_SIZE 8 +#define OVR_ON64(x) x +#else +#define OVR_PTR_SIZE 4 +#define OVR_ON64(x) /**/ +#endif + +#if defined(_WIN32) +#define OVR_PFN __cdecl +#else +#define OVR_PFN +#endif + +#if !defined(OVR_ALIGNAS) +#if defined(__GNUC__) || defined(__clang__) +#define OVR_ALIGNAS(n) __attribute__((aligned(n))) +#elif defined(_MSC_VER) || defined(__INTEL_COMPILER) +#define OVR_ALIGNAS(n) __declspec(align(n)) +#elif defined(__CC_ARM) +#define OVR_ALIGNAS(n) __align(n) +#else +#error Need to define OVR_ALIGNAS +#endif +#endif + +#if !defined(OVR_UNUSED_STRUCT_PAD) +#define OVR_UNUSED_STRUCT_PAD(padName, size) char padName[size]; +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +typedef int32_t ovrResult; + +typedef enum { + ovrSuccess = 0, + ovrSuccess_NotVisible = 1000, + ovrSuccess_HMDFirmwareMismatch = 4100, + ovrSuccess_TrackerFirmwareMismatch = 4101, + ovrSuccess_ControllerFirmwareMismatch = 4104, +} ovrSuccessType; + +typedef char ovrBool; +typedef struct OVR_ALIGNAS(4) { int x, y; } ovrVector2i; +typedef struct OVR_ALIGNAS(4) { int w, h; } ovrSizei; +typedef struct OVR_ALIGNAS(4) { ovrVector2i Pos; ovrSizei Size; } ovrRecti; +typedef struct OVR_ALIGNAS(4) { float x, y, z, w; } ovrQuatf; +typedef struct OVR_ALIGNAS(4) { float x, y; } ovrVector2f; +typedef struct OVR_ALIGNAS(4) { float x, y, z; } ovrVector3f; +typedef struct OVR_ALIGNAS(4) { float M[4][4]; } ovrMatrix4f; + +typedef struct OVR_ALIGNAS(4) { + ovrQuatf Orientation; + ovrVector3f Position; +} ovrPosef; + +typedef struct OVR_ALIGNAS(8) { + ovrPosef ThePose; + ovrVector3f AngularVelocity; + ovrVector3f LinearVelocity; + ovrVector3f AngularAcceleration; + ovrVector3f LinearAcceleration; + OVR_UNUSED_STRUCT_PAD(pad0, 4) + double TimeInSeconds; +} ovrPoseStatef; + +typedef struct { + float UpTan; + float DownTan; + float LeftTan; + float RightTan; +} ovrFovPort; + +typedef enum { + ovrHmd_None = 0, + ovrHmd_DK1 = 3, + ovrHmd_DKHD = 4, + ovrHmd_DK2 = 6, + ovrHmd_CB = 8, + ovrHmd_Other = 9, + ovrHmd_E3_2015 = 10, + ovrHmd_ES06 = 11, + ovrHmd_ES09 = 12, + ovrHmd_ES11 = 13, + ovrHmd_CV1 = 14, + ovrHmd_EnumSize = 0x7fffffff +} ovrHmdType; + +typedef enum { + ovrHmdCap_DebugDevice = 0x0010, + ovrHmdCap_EnumSize = 0x7fffffff +} ovrHmdCaps; + +typedef enum +{ + ovrTrackingCap_Orientation = 0x0010, + ovrTrackingCap_MagYawCorrection = 0x0020, + ovrTrackingCap_Position = 0x0040, + ovrTrackingCap_EnumSize = 0x7fffffff +} ovrTrackingCaps; + +typedef enum { + ovrEye_Left = 0, + ovrEye_Right = 1, + ovrEye_Count = 2, + ovrEye_EnumSize = 0x7fffffff +} ovrEyeType; + +typedef enum { + ovrTrackingOrigin_EyeLevel = 0, + ovrTrackingOrigin_FloorLevel = 1, + ovrTrackingOrigin_Count = 2, ///< \internal Count of enumerated elements. + ovrTrackingOrigin_EnumSize = 0x7fffffff ///< \internal Force type int32_t. +} ovrTrackingOrigin; + +typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) { + char Reserved[8]; +} ovrGraphicsLuid; + +typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) { + ovrHmdType Type; + OVR_ON64(OVR_UNUSED_STRUCT_PAD(pad0, 4)) + char ProductName[64]; + char Manufacturer[64]; + short VendorId; + short ProductId; + char SerialNumber[24]; + short FirmwareMajor; + short FirmwareMinor; + unsigned int AvailableHmdCaps; + unsigned int DefaultHmdCaps; + unsigned int AvailableTrackingCaps; + unsigned int DefaultTrackingCaps; + ovrFovPort DefaultEyeFov[ovrEye_Count]; + ovrFovPort MaxEyeFov[ovrEye_Count]; + ovrSizei Resolution; + float DisplayRefreshRate; + OVR_ON64(OVR_UNUSED_STRUCT_PAD(pad1, 4)) +} ovrHmdDesc; + +typedef struct ovrHmdStruct* ovrSession; + +typedef enum { + ovrStatus_OrientationTracked = 0x0001, + ovrStatus_PositionTracked = 0x0002, + ovrStatus_EnumSize = 0x7fffffff +} ovrStatusBits; + +typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) { + float FrustumHFovInRadians; + float FrustumVFovInRadians; + float FrustumNearZInMeters; + float FrustumFarZInMeters; +} ovrTrackerDesc; + +typedef enum { + ovrTracker_Connected = 0x0020, + ovrTracker_PoseTracked = 0x0004 +} ovrTrackerFlags; + +typedef struct OVR_ALIGNAS(8) { + unsigned int TrackerFlags; + ovrPosef Pose; + ovrPosef LeveledPose; + OVR_UNUSED_STRUCT_PAD(pad0, 4) +} ovrTrackerPose; + +typedef struct OVR_ALIGNAS(8) { + ovrPoseStatef HeadPose; + unsigned int StatusFlags; + ovrPoseStatef HandPoses[2]; + unsigned int HandStatusFlags[2]; + ovrPosef CalibratedOrigin; +} ovrTrackingState; + +typedef struct OVR_ALIGNAS(4) { + ovrEyeType Eye; + ovrFovPort Fov; + ovrRecti DistortedViewport; + ovrVector2f PixelsPerTanAngleAtCenter; + ovrVector3f HmdToEyeOffset; +} ovrEyeRenderDesc; + +typedef struct OVR_ALIGNAS(4) { + float Projection22; + float Projection23; + float Projection32; +} ovrTimewarpProjectionDesc; + +typedef struct OVR_ALIGNAS(4) { + ovrVector3f HmdToEyeViewOffset[ovrEye_Count]; + float HmdSpaceToWorldScaleInMeters; +} ovrViewScaleDesc; + +typedef enum { + ovrTexture_2D, + ovrTexture_2D_External, + ovrTexture_Cube, + ovrTexture_Count, + ovrTexture_EnumSize = 0x7fffffff +} ovrTextureType; + +typedef enum { + ovrTextureBind_None, + ovrTextureBind_DX_RenderTarget = 0x0001, + ovrTextureBind_DX_UnorderedAccess = 0x0002, + ovrTextureBind_DX_DepthStencil = 0x0004, + ovrTextureBind_EnumSize = 0x7fffffff +} ovrTextureBindFlags; + +typedef enum { + OVR_FORMAT_UNKNOWN, + OVR_FORMAT_B5G6R5_UNORM, + OVR_FORMAT_B5G5R5A1_UNORM, + OVR_FORMAT_B4G4R4A4_UNORM, + OVR_FORMAT_R8G8B8A8_UNORM, + OVR_FORMAT_R8G8B8A8_UNORM_SRGB, + OVR_FORMAT_B8G8R8A8_UNORM, + OVR_FORMAT_B8G8R8A8_UNORM_SRGB, + OVR_FORMAT_B8G8R8X8_UNORM, + OVR_FORMAT_B8G8R8X8_UNORM_SRGB, + OVR_FORMAT_R16G16B16A16_FLOAT, + OVR_FORMAT_D16_UNORM, + OVR_FORMAT_D24_UNORM_S8_UINT, + OVR_FORMAT_D32_FLOAT, + OVR_FORMAT_D32_FLOAT_S8X24_UINT, + OVR_FORMAT_ENUMSIZE = 0x7fffffff +} ovrTextureFormat; + +typedef enum { + ovrTextureMisc_None, + ovrTextureMisc_DX_Typeless = 0x0001, + ovrTextureMisc_AllowGenerateMips = 0x0002, + ovrTextureMisc_EnumSize = 0x7fffffff +} ovrTextureFlags; + +typedef struct { + ovrTextureType Type; + ovrTextureFormat Format; + int ArraySize; + int Width; + int Height; + int MipLevels; + int SampleCount; + ovrBool StaticImage; + unsigned int MiscFlags; + unsigned int BindFlags; +} ovrTextureSwapChainDesc; + +typedef struct +{ + ovrTextureFormat Format; + int Width; + int Height; + unsigned int MiscFlags; +} ovrMirrorTextureDesc; + +typedef void* ovrTextureSwapChain; +typedef struct ovrMirrorTextureData* ovrMirrorTexture; + + + +typedef enum { + ovrButton_A = 0x00000001, + ovrButton_B = 0x00000002, + ovrButton_RThumb = 0x00000004, + ovrButton_RShoulder = 0x00000008, + ovrButton_RMask = ovrButton_A | ovrButton_B | ovrButton_RThumb | ovrButton_RShoulder, + ovrButton_X = 0x00000100, + ovrButton_Y = 0x00000200, + ovrButton_LThumb = 0x00000400, + ovrButton_LShoulder = 0x00000800, + ovrButton_LMask = ovrButton_X | ovrButton_Y | ovrButton_LThumb | ovrButton_LShoulder, + ovrButton_Up = 0x00010000, + ovrButton_Down = 0x00020000, + ovrButton_Left = 0x00040000, + ovrButton_Right = 0x00080000, + ovrButton_Enter = 0x00100000, + ovrButton_Back = 0x00200000, + ovrButton_VolUp = 0x00400000, + ovrButton_VolDown = 0x00800000, + ovrButton_Home = 0x01000000, + ovrButton_Private = ovrButton_VolUp | ovrButton_VolDown | ovrButton_Home, + ovrButton_EnumSize = 0x7fffffff +} ovrButton; + +typedef enum { + ovrTouch_A = ovrButton_A, + ovrTouch_B = ovrButton_B, + ovrTouch_RThumb = ovrButton_RThumb, + ovrTouch_RIndexTrigger = 0x00000010, + ovrTouch_RButtonMask = ovrTouch_A | ovrTouch_B | ovrTouch_RThumb | ovrTouch_RIndexTrigger, + ovrTouch_X = ovrButton_X, + ovrTouch_Y = ovrButton_Y, + ovrTouch_LThumb = ovrButton_LThumb, + ovrTouch_LIndexTrigger = 0x00001000, + ovrTouch_LButtonMask = ovrTouch_X | ovrTouch_Y | ovrTouch_LThumb | ovrTouch_LIndexTrigger, + ovrTouch_RIndexPointing = 0x00000020, + ovrTouch_RThumbUp = 0x00000040, + ovrTouch_RPoseMask = ovrTouch_RIndexPointing | ovrTouch_RThumbUp, + ovrTouch_LIndexPointing = 0x00002000, + ovrTouch_LThumbUp = 0x00004000, + ovrTouch_LPoseMask = ovrTouch_LIndexPointing | ovrTouch_LThumbUp, + ovrTouch_EnumSize = 0x7fffffff +} ovrTouch; + +typedef enum { + ovrControllerType_None = 0x00, + ovrControllerType_LTouch = 0x01, + ovrControllerType_RTouch = 0x02, + ovrControllerType_Touch = 0x03, + ovrControllerType_Remote = 0x04, + ovrControllerType_XBox = 0x10, + ovrControllerType_Active = 0xff, + ovrControllerType_EnumSize = 0x7fffffff +} ovrControllerType; + +typedef enum { + ovrHand_Left = 0, + ovrHand_Right = 1, + ovrHand_Count = 2, + ovrHand_EnumSize = 0x7fffffff +} ovrHandType; + +typedef struct { + double TimeInSeconds; + unsigned int Buttons; + unsigned int Touches; + float IndexTrigger[ovrHand_Count]; + float HandTrigger[ovrHand_Count]; + ovrVector2f Thumbstick[ovrHand_Count]; + ovrControllerType ControllerType; +} ovrInputState; + +typedef enum { + ovrInit_Debug = 0x00000001, + ovrInit_RequestVersion = 0x00000004, + ovrinit_WritableBits = 0x00ffffff, + ovrInit_EnumSize = 0x7fffffff +} ovrInitFlags; + +typedef enum { + ovrLogLevel_Debug = 0, + ovrLogLevel_Info = 1, + ovrLogLevel_Error = 2, + ovrLogLevel_EnumSize = 0x7fffffff +} ovrLogLevel; + +typedef void (OVR_PFN* ovrLogCallback)(uintptr_t userData, int level, const char* message); + +typedef struct OVR_ALIGNAS(8) { + uint32_t Flags; + uint32_t RequestedMinorVersion; + ovrLogCallback LogCallback; + uintptr_t UserData; + uint32_t ConnectionTimeoutMS; + OVR_ON64(OVR_UNUSED_STRUCT_PAD(pad0, 4)) +} ovrInitParams; + +typedef ovrResult(OVR_PFN* pfn_ovr_Initialize)(const ovrInitParams* params); +typedef void (OVR_PFN* pfn_ovr_Shutdown)(); + +typedef struct { + ovrResult Result; + char ErrorString[512]; +} ovrErrorInfo; + +typedef void (OVR_PFN* pfn_ovr_GetLastErrorInfo)(ovrErrorInfo* errorInfo); +typedef const char* (OVR_PFN* pfn_ovr_GetVersionString)(); +typedef int (OVR_PFN* pfn_ovr_TraceMessage)(int level, const char* message); +typedef ovrHmdDesc (OVR_PFN* pfn_ovr_GetHmdDesc)(ovrSession session); +typedef unsigned int (OVR_PFN* pfn_ovr_GetTrackerCount)(ovrSession session); +typedef ovrTrackerDesc* (OVR_PFN* pfn_ovr_GetTrackerDesc)(ovrSession session, unsigned int trackerDescIndex); +typedef ovrResult (OVR_PFN* pfn_ovr_Create)(ovrSession* pSession, ovrGraphicsLuid* pLuid); +typedef void (OVR_PFN* pfn_ovr_Destroy)(ovrSession session); + +typedef struct { + ovrBool IsVisible; + ovrBool HmdPresent; + ovrBool HmdMounted; + ovrBool DisplayLost; + ovrBool ShouldQuit; + ovrBool ShouldRecenter; +} ovrSessionStatus; + +typedef ovrResult (OVR_PFN* pfn_ovr_GetSessionStatus)(ovrSession session, ovrSessionStatus* sessionStatus); + +typedef ovrResult (OVR_PFN* pfn_ovr_SetTrackingOriginType)(ovrSession session, ovrTrackingOrigin origin); +typedef ovrTrackingOrigin (OVR_PFN* pfn_ovr_GetTrackingOriginType)(ovrSession session); +typedef ovrResult (OVR_PFN* pfn_ovr_RecenterTrackingOrigin)(ovrSession session); +typedef void (OVR_PFN* pfn_ovr_ClearShouldRecenterFlag)(ovrSession session); +typedef ovrTrackingState (OVR_PFN* pfn_ovr_GetTrackingState)(ovrSession session, double absTime, ovrBool latencyMarker); +typedef ovrTrackerPose (OVR_PFN* pfn_ovr_GetTrackerPose)(ovrSession session, unsigned int trackerPoseIndex); +typedef ovrResult (OVR_PFN* pfn_ovr_GetInputState)(ovrSession session, ovrControllerType controllerType, ovrInputState* inputState); +typedef unsigned int (OVR_PFN* pfn_ovr_GetConnectedControllerTypes)(ovrSession session); +typedef ovrResult (OVR_PFN* pfn_ovr_SetControllerVibration)(ovrSession session, ovrControllerType controllerType, float frequency, float amplitude); + +enum { + ovrMaxLayerCount = 16 +}; + +typedef enum { + ovrLayerType_Disabled = 0, + ovrLayerType_EyeFov = 1, + ovrLayerType_Quad = 3, + ovrLayerType_EyeMatrix = 5, + ovrLayerType_EnumSize = 0x7fffffff +} ovrLayerType; + +typedef enum { + ovrLayerFlag_HighQuality = 0x01, + ovrLayerFlag_TextureOriginAtBottomLeft = 0x02, + ovrLayerFlag_HeadLocked = 0x04 +} ovrLayerFlags; + +typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) { + ovrLayerType Type; + unsigned Flags; +} ovrLayerHeader; + +typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) { + ovrLayerHeader Header; + ovrTextureSwapChain ColorTexture[ovrEye_Count]; + ovrRecti Viewport[ovrEye_Count]; + ovrFovPort Fov[ovrEye_Count]; + ovrPosef RenderPose[ovrEye_Count]; + double SensorSampleTime; +} ovrLayerEyeFov; + +typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) { + ovrLayerHeader Header; + ovrTextureSwapChain ColorTexture[ovrEye_Count]; + ovrRecti Viewport[ovrEye_Count]; + ovrPosef RenderPose[ovrEye_Count]; + ovrMatrix4f Matrix[ovrEye_Count]; + double SensorSampleTime; +} ovrLayerEyeMatrix; + +typedef struct OVR_ALIGNAS(OVR_PTR_SIZE) { + ovrLayerHeader Header; + ovrTextureSwapChain ColorTexture; + ovrRecti Viewport; + ovrPosef QuadPoseCenter; + ovrVector2f QuadSize; +} ovrLayerQuad; + +typedef union { + ovrLayerHeader Header; + ovrLayerEyeFov EyeFov; + ovrLayerQuad Quad; +} ovrLayer_Union; + + +typedef ovrResult (OVR_PFN* pfn_ovr_GetTextureSwapChainLength)(ovrSession session, ovrTextureSwapChain chain, int* out_Length); +typedef ovrResult (OVR_PFN* pfn_ovr_GetTextureSwapChainCurrentIndex)(ovrSession session, ovrTextureSwapChain chain, int* out_Index); +typedef ovrResult (OVR_PFN* pfn_ovr_GetTextureSwapChainDesc)(ovrSession session, ovrTextureSwapChain chain, ovrTextureSwapChainDesc* out_Desc); +typedef ovrResult (OVR_PFN* pfn_ovr_CommitTextureSwapChain)(ovrSession session, ovrTextureSwapChain chain); +typedef void (OVR_PFN* pfn_ovr_DestroyTextureSwapChain)(ovrSession session, ovrTextureSwapChain chain); +typedef void (OVR_PFN* pfn_ovr_DestroyMirrorTexture)(ovrSession session, ovrMirrorTexture mirrorTexture); +typedef ovrSizei(OVR_PFN* pfn_ovr_GetFovTextureSize)(ovrSession session, ovrEyeType eye, ovrFovPort fov, float pixelsPerDisplayPixel); +typedef ovrEyeRenderDesc(OVR_PFN* pfn_ovr_GetRenderDesc)(ovrSession session, ovrEyeType eyeType, ovrFovPort fov); +typedef ovrResult(OVR_PFN* pfn_ovr_SubmitFrame)(ovrSession session, unsigned int frameIndex, + const ovrViewScaleDesc* viewScaleDesc, + ovrLayerHeader const * const * layerPtrList, unsigned int layerCount); +typedef double (OVR_PFN* pfn_ovr_GetPredictedDisplayTime)(ovrSession session, long long frameIndex); +typedef double (OVR_PFN* pfn_ovr_GetTimeInSeconds)(); + + +typedef enum { + ovrPerfHud_Off = 0, + ovrPerfHud_PerfSummary = 1, + ovrPerfHud_LatencyTiming = 2, + ovrPerfHud_AppRenderTiming = 3, + ovrPerfHud_CompRenderTiming = 4, + ovrPerfHud_VersionInfo = 5, + ovrPerfHud_Count = 6, + ovrPerfHud_EnumSize = 0x7fffffff +} ovrPerfHudMode; + +typedef enum { + ovrLayerHud_Off = 0, + ovrLayerHud_Info = 1, + ovrLayerHud_EnumSize = 0x7fffffff +} ovrLayerHudMode; + +typedef enum { + ovrDebugHudStereo_Off = 0, + ovrDebugHudStereo_Quad = 1, + ovrDebugHudStereo_QuadWithCrosshair = 2, + ovrDebugHudStereo_CrosshairAtInfinity = 3, + ovrDebugHudStereo_Count, + ovrDebugHudStereo_EnumSize = 0x7fffffff +} ovrDebugHudStereoMode; + +typedef ovrBool(OVR_PFN* pfn_ovr_GetBool)(ovrSession session, const char* propertyName, ovrBool defaultVal); +typedef ovrBool(OVR_PFN* pfn_ovr_SetBool)(ovrSession session, const char* propertyName, ovrBool value); +typedef int (OVR_PFN* pfn_ovr_GetInt)(ovrSession session, const char* propertyName, int defaultVal); +typedef ovrBool (OVR_PFN* pfn_ovr_SetInt)(ovrSession session, const char* propertyName, int value); +typedef float (OVR_PFN* pfn_ovr_GetFloat)(ovrSession session, const char* propertyName, float defaultVal); +typedef ovrBool (OVR_PFN* pfn_ovr_SetFloat)(ovrSession session, const char* propertyName, float value); +typedef unsigned int (OVR_PFN* pfn_ovr_GetFloatArray)(ovrSession session, const char* propertyName, + float values[], unsigned int valuesCapacity); +typedef ovrBool (OVR_PFN* pfn_ovr_SetFloatArray)(ovrSession session, const char* propertyName, + const float values[], unsigned int valuesSize); +typedef const char* (OVR_PFN* pfn_ovr_GetString)(ovrSession session, const char* propertyName, + const char* defaultVal); +typedef ovrBool (OVR_PFN* pfn_ovr_SetString)(ovrSession session, const char* propertyName, + const char* value); + + + +typedef enum { + ovrError_MemoryAllocationFailure = -1000, + ovrError_SocketCreationFailure = -1001, + ovrError_InvalidSession = -1002, + ovrError_Timeout = -1003, + ovrError_NotInitialized = -1004, + ovrError_InvalidParameter = -1005, + ovrError_ServiceError = -1006, + ovrError_NoHmd = -1007, + ovrError_Unsupported = -1009, + ovrError_DeviceUnavailable = -1010, + ovrError_InvalidHeadsetOrientation = -1011, + ovrError_ClientSkippedDestroy = -1012, + ovrError_ClientSkippedShutdown = -1013, + ovrError_AudioReservedBegin = -2000, + ovrError_AudioDeviceNotFound = -2001, + ovrError_AudioComError = -2002, + ovrError_AudioReservedEnd = -2999, + ovrError_Initialize = -3000, + ovrError_LibLoad = -3001, + ovrError_LibVersion = -3002, + ovrError_ServiceConnection = -3003, + ovrError_ServiceVersion = -3004, + ovrError_IncompatibleOS = -3005, + ovrError_DisplayInit = -3006, + ovrError_ServerStart = -3007, + ovrError_Reinitialization = -3008, + ovrError_MismatchedAdapters = -3009, + ovrError_LeakingResources = -3010, + ovrError_ClientVersion = -3011, + ovrError_OutOfDateOS = -3012, + ovrError_OutOfDateGfxDriver = -3013, + ovrError_IncompatibleGPU = -3014, + ovrError_NoValidVRDisplaySystem = -3015, + ovrError_Obsolete = -3016, + ovrError_DisabledOrDefaultAdapter = -3017, + ovrError_HybridGraphicsNotSupported = -3018, + ovrError_DisplayManagerInit = -3019, + ovrError_TrackerDriverInit = -3020, + ovrError_InvalidBundleAdjustment = -4000, + ovrError_USBBandwidth = -4001, + ovrError_USBEnumeratedSpeed = -4002, + ovrError_ImageSensorCommError = -4003, + ovrError_GeneralTrackerFailure = -4004, + ovrError_ExcessiveFrameTruncation = -4005, + ovrError_ExcessiveFrameSkipping = -4006, + ovrError_SyncDisconnected = -4007, + ovrError_TrackerMemoryReadFailure = -4008, + ovrError_TrackerMemoryWriteFailure = -4009, + ovrError_TrackerFrameTimeout = -4010, + ovrError_TrackerTruncatedFrame = -4011, + ovrError_TrackerDriverFailure = -4012, + ovrError_TrackerNRFFailure = -4013, + ovrError_HardwareGone = -4014, + ovrError_NordicEnabledNoSync = -4015, + ovrError_NordicSyncNoFrames = -4016, + ovrError_CatastrophicFailure = -4017, + ovrError_HMDFirmwareMismatch = -4100, + ovrError_TrackerFirmwareMismatch = -4101, + ovrError_BootloaderDeviceDetected = -4102, + ovrError_TrackerCalibrationError = -4103, + ovrError_ControllerFirmwareMismatch = -4104, + ovrError_IMUTooManyLostSamples = -4200, + ovrError_IMURateError = -4201, + ovrError_FeatureReportFailure = -4202, + ovrError_Incomplete = -5000, + ovrError_Abandoned = -5001, + ovrError_DisplayLost = -6000, + ovrError_TextureSwapChainFull = -6001, + ovrError_TextureSwapChainInvalid = -6002, + ovrError_RuntimeException = -7000, + ovrError_MetricsUnknownApp = -90000, + ovrError_MetricsDuplicateApp = -90001, + ovrError_MetricsNoEvents = -90002, + ovrError_MetricsRuntime = -90003, + ovrError_MetricsFile = -90004, + ovrError_MetricsNoClientInfo = -90005, + ovrError_MetricsNoAppMetaData = -90006, + ovrError_MetricsNoApp = -90007, + ovrError_MetricsOafFailure = -90008, + ovrError_MetricsSessionAlreadyActive = -90009, + ovrError_MetricsSessionNotActive = -90010, +} ovrErrorType; + + +#ifdef XP_WIN + +struct IUnknown; + +typedef ovrResult (OVR_PFN* pfn_ovr_CreateTextureSwapChainDX)(ovrSession session, + IUnknown* d3dPtr, + const ovrTextureSwapChainDesc* desc, + ovrTextureSwapChain* out_TextureSwapChain); + +typedef ovrResult (OVR_PFN* pfn_ovr_GetTextureSwapChainBufferDX)(ovrSession session, + ovrTextureSwapChain chain, + int index, + IID iid, + void** out_Buffer); + +typedef ovrResult (OVR_PFN* pfn_ovr_CreateMirrorTextureDX)(ovrSession session, + IUnknown* d3dPtr, + const ovrMirrorTextureDesc* desc, + ovrMirrorTexture* out_MirrorTexture); + +typedef ovrResult (OVR_PFN* pfn_ovr_GetMirrorTextureBufferDX)(ovrSession session, + ovrMirrorTexture mirrorTexture, + IID iid, + void** out_Buffer); + +#endif + + +typedef ovrResult (OVR_PFN* pfn_ovr_CreateTextureSwapChainGL)(ovrSession session, + const ovrTextureSwapChainDesc* desc, + ovrTextureSwapChain* out_TextureSwapChain); + +typedef ovrResult (OVR_PFN* pfn_ovr_GetTextureSwapChainBufferGL)(ovrSession session, + ovrTextureSwapChain chain, + int index, + unsigned int* out_TexId); + +typedef ovrResult (OVR_PFN* pfn_ovr_CreateMirrorTextureGL)(ovrSession session, + const ovrMirrorTextureDesc* desc, + ovrMirrorTexture* out_MirrorTexture); + +typedef ovrResult (OVR_PFN* pfn_ovr_GetMirrorTextureBufferGL)(ovrSession session, + ovrMirrorTexture mirrorTexture, + unsigned int* out_TexId); + +#ifdef __cplusplus +} +#endif + +#endif /* mozilla_ovr_capi_dynamic_h_ */ +#endif /* OVR_CAPI_h */ |