Merge pull request #1325 from ada-tv/openxr-input
Some checks are pending
Master API-docs CI Build and Deploy / Build and deploy API-docs (push) Waiting to run
Master Doxygen CI Build and Deploy / Build and deploy Doxygen documentation (push) Waiting to run

MVP OpenXR Support
This commit is contained in:
ksuprynowicz 2025-04-05 15:41:07 +02:00 committed by GitHub
commit 18b6e744a3
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
29 changed files with 2459 additions and 55 deletions

View file

@ -64,7 +64,7 @@ jobs:
runner: [self_hosted, type-cx52, image-x86-app-docker-ce]
arch: amd64
build_type: full
# apt-dependencies: # add missing dependencies to docker image when convenient
apt-dependencies: libxcb-glx0-dev # add missing dependencies to docker image when convenient
image: docker.io/overte/overte-full-build:0.1.6-ubuntu-20.04-amd64
# Android builds are currently failing
#- os: ubuntu-18.04
@ -75,8 +75,8 @@ jobs:
runner: [self_hosted, type-cax41, image-arm-app-docker-ce]
arch: aarch64
build_type: full
apt-dependencies: libxcb-glx0-dev # add missing dependencies to docker image when convenient
image: docker.io/overte/overte-full-build:0.1.6-ubuntu-22.04-aarch64
# apt-dependencies: # add missing dependencies to docker image when convenient
fail-fast: false
runs-on: ${{matrix.runner}}
container: ${{matrix.image}}
@ -190,6 +190,16 @@ jobs:
echo "Installing apt packages"
sudo apt install -y ${{ matrix.apt-dependencies }} || exit 1
echo "Adding Toolchain test PPA"
apt install -y software-properties-common
add-apt-repository ppa:ubuntu-toolchain-r/test
echo "Installing gcc-13"
apt install -y gcc-13 g++-13 || exit 1
# Set GCC 13 as default
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-13 100 --slave /usr/bin/g++ g++ /usr/bin/g++-13 --slave /usr/bin/gcov gcov /usr/bin/gcov-13
else # macOS
echo "Downloading MacOSX10.12 SDK.."
curl --progress-bar -L -o macOS_SDK10.12.4.tar.xz "https://data.moto9000.moe/overte_packages/macOS_SDK10.12.4.tar.xz" || exit 1

View file

@ -1,4 +1,4 @@
Source: hifi-client-deps
Version: 0.1
Description: Collected dependencies for High Fidelity applications
Build-Depends: hifi-deps, aristo (windows), glslang, liblo (windows), nlohmann-json, openvr ((linux&!arm)|windows), quazip (!android), sdl2 (!android), spirv-cross (!android), spirv-tools (!android), sranipal (windows), vulkanmemoryallocator, discord-rpc (!android)
Build-Depends: hifi-deps, aristo (windows), glslang, liblo (windows), nlohmann-json, openvr ((linux&!arm)|windows), openxr-loader, quazip (!android), sdl2 (!android), spirv-cross (!android), spirv-tools (!android), sranipal (windows), vulkanmemoryallocator, discord-rpc (!android)

View file

@ -0,0 +1,34 @@
vcpkg_from_github(
OUT_SOURCE_PATH SOURCE_PATH
REPO open-source-parsers/jsoncpp
REF "${VERSION}"
SHA512 1d06e044759b1e1a4cc4960189dd7e001a0a4389d7239a6d59295af995a553518e4e0337b4b4b817e70da5d9731a4c98655af90791b6287870b5ff8d73ad8873
HEAD_REF master
)
string(COMPARE EQUAL "${VCPKG_LIBRARY_LINKAGE}" "static" JSONCPP_STATIC)
string(COMPARE EQUAL "${VCPKG_CRT_LINKAGE}" "static" STATIC_CRT)
vcpkg_cmake_configure(
SOURCE_PATH "${SOURCE_PATH}"
OPTIONS
-DJSONCPP_WITH_CMAKE_PACKAGE=ON
-DBUILD_STATIC_LIBS=${JSONCPP_STATIC}
-DJSONCPP_STATIC_WINDOWS_RUNTIME=${STATIC_CRT}
-DJSONCPP_WITH_PKGCONFIG_SUPPORT=ON
-DJSONCPP_WITH_POST_BUILD_UNITTEST=OFF
-DJSONCPP_WITH_TESTS=OFF
-DJSONCPP_WITH_EXAMPLE=OFF
-DBUILD_OBJECT_LIBS=OFF
)
vcpkg_cmake_install()
vcpkg_cmake_config_fixup(CONFIG_PATH lib/cmake/jsoncpp)
file(REMOVE_RECURSE "${CURRENT_PACKAGES_DIR}/debug/include")
vcpkg_copy_pdbs()
vcpkg_fixup_pkgconfig()
vcpkg_install_copyright(FILE_LIST "${SOURCE_PATH}/LICENSE")

View file

@ -0,0 +1,18 @@
{
"name": "jsoncpp",
"version": "1.9.5",
"port-version": 4,
"description": "JsonCpp is a C++ library that allows manipulating JSON values, including serialization and deserialization to and from strings. It can also preserve existing comment in unserialization/serialization steps, making it a convenient format to store user input files.",
"homepage": "https://github.com/open-source-parsers/jsoncpp",
"license": "MIT",
"dependencies": [
{
"name": "vcpkg-cmake",
"host": true
},
{
"name": "vcpkg-cmake-config",
"host": true
}
]
}

View file

@ -0,0 +1,30 @@
diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt
index c2e53cf..2c195de 100644
--- a/src/CMakeLists.txt
+++ b/src/CMakeLists.txt
@@ -122,7 +122,7 @@ if(NOT METAL_INCOMPATIBLE)
endif()
find_package(Threads REQUIRED)
-find_package(JsonCpp)
+find_package(jsoncpp CONFIG REQUIRED)
### All options defined here
option(BUILD_LOADER "Build loader" ON)
diff --git a/src/loader/CMakeLists.txt b/src/loader/CMakeLists.txt
index 28aff53..6ee58f4 100644
--- a/src/loader/CMakeLists.txt
+++ b/src/loader/CMakeLists.txt
@@ -101,7 +101,11 @@ endif()
# Get jsoncpp externally or internally
if(BUILD_WITH_SYSTEM_JSONCPP)
- target_link_libraries(openxr_loader PRIVATE JsonCpp::JsonCpp)
+ if(BUILD_SHARED_LIBS)
+ target_link_libraries(openxr_loader PRIVATE jsoncpp_lib)
+ else()
+ target_link_libraries(openxr_loader PRIVATE jsoncpp_static)
+ endif()
else()
if(NOT BUILD_LOADER_WITH_EXCEPTION_HANDLING)
target_compile_definitions(openxr_loader PRIVATE JSON_USE_EXCEPTION=0)

View file

@ -0,0 +1,60 @@
vcpkg_from_github(
OUT_SOURCE_PATH SOURCE_PATH
REPO KhronosGroup/OpenXR-SDK
REF "release-${VERSION}"
SHA512 f5f02857036d14c3894bee979bf108c4066ff5551393bc9bdde85dced5c5007148880c6174174dfe3b844e00baeb66106afbf18be069958128404d6a9bdc96ce
HEAD_REF master
PATCHES
fix-openxr-sdk-jsoncpp.patch
)
vcpkg_from_github(
OUT_SOURCE_PATH SDK_SOURCE_PATH
REPO KhronosGroup/OpenXR-SDK-Source
REF "release-${VERSION}"
SHA512 29155f5cd6104a479ce25ea090020001a01652ce42823ddad3e2569d7d2d513a0339c084d90acd3a00b220f7ba1cf68af1ac4b4c01f0a949aa9d919a1914d6c9
HEAD_REF master
PATCHES
fix-openxr-sdk-jsoncpp.patch
)
# Weird behavior inside the OpenXR loader. On Windows they force shared libraries to use static crt, and
# vice-versa. Might be better in future iterations to patch the CMakeLists.txt for OpenXR
if (VCPKG_TARGET_IS_UWP OR VCPKG_TARGET_IS_WINDOWS)
if(VCPKG_LIBRARY_LINKAGE STREQUAL static)
set(DYNAMIC_LOADER OFF)
set(VCPKG_CRT_LINKAGE dynamic)
else()
set(DYNAMIC_LOADER ON)
set(VCPKG_CRT_LINKAGE static)
endif()
endif()
vcpkg_find_acquire_program(PYTHON3)
vcpkg_cmake_configure(
SOURCE_PATH "${SOURCE_PATH}"
OPTIONS
-DBUILD_API_LAYERS=OFF
-DBUILD_TESTS=OFF
-DBUILD_CONFORMANCE_TESTS=OFF
-DDYNAMIC_LOADER=${DYNAMIC_LOADER}
-DPYTHON_EXECUTABLE="${PYTHON3}"
-DBUILD_WITH_SYSTEM_JSONCPP=ON
)
vcpkg_cmake_install()
if(VCPKG_TARGET_IS_WINDOWS)
vcpkg_cmake_config_fixup(PACKAGE_NAME OpenXR CONFIG_PATH cmake)
else()
vcpkg_cmake_config_fixup(PACKAGE_NAME OpenXR CONFIG_PATH lib/cmake/openxr)
endif()
file(REMOVE_RECURSE "${CURRENT_PACKAGES_DIR}/debug/include")
file(REMOVE_RECURSE "${CURRENT_PACKAGES_DIR}/debug/share")
vcpkg_fixup_pkgconfig()
vcpkg_copy_pdbs()
file(INSTALL "${SOURCE_PATH}/LICENSE" DESTINATION "${CURRENT_PACKAGES_DIR}/share/${PORT}" RENAME copyright)

View file

@ -0,0 +1,27 @@
{
"name": "openxr-loader",
"version": "1.1.46",
"description": "A royalty-free, open standard that provides high-performance access to Augmented Reality (AR) and Virtual Reality (VR)—collectively known as XR—platforms and devices",
"homepage": "https://github.com/KhronosGroup/OpenXR-SDK",
"license": "Apache-2.0",
"supports": "!uwp & !osx",
"dependencies": [
"jsoncpp",
{
"name": "vcpkg-cmake",
"host": true
},
{
"name": "vcpkg-cmake-config",
"host": true
}
],
"features": {
"vulkan": {
"description": "Vulkan functionality for OpenXR",
"dependencies": [
"vulkan"
]
}
}
}

View file

@ -0,0 +1,35 @@
{
"name": "OpenXR to Standard",
"channels": [
{ "from": "OpenXR.LeftHand", "to": "Standard.LeftHand" },
{ "from": "OpenXR.RightHand", "to": "Standard.RightHand" },
{ "from": "OpenXR.Head", "to" : "Standard.Head", "when" : [ "Application.InHMD"] },
{ "from": "OpenXR.LT", "to": "Standard.LT", "filters": [{"type": "deadZone", "min": 0.05}]},
{ "from": "OpenXR.RT", "to": "Standard.RT", "filters": [{"type": "deadZone", "min": 0.05}]},
{ "from": "OpenXR.LTClick", "to": "Standard.LTClick" },
{ "from": "OpenXR.RTClick", "to": "Standard.RTClick" },
{ "from": "OpenXR.LeftGrip", "to": "Standard.LeftGrip", "filters": [{ "type": "deadZone", "min": 0.05 }] },
{ "from": "OpenXR.RightGrip", "to": "Standard.RightGrip", "filters": [{ "type": "deadZone", "min": 0.05 }] },
{ "from": "OpenXR.LX", "to": "Standard.LX", "filters": [{ "type": "deadZone", "min": 0.05 }] },
{ "from": "OpenXR.LY", "to": "Standard.LY", "filters": [{ "type": "deadZone", "min": 0.05 }] },
{ "from": "OpenXR.LX", "to": "Actions.TranslateX", "peek": true, "filters": [{ "type": "deadZone", "min": 0.05 }] },
{ "from": "OpenXR.LY", "to": "Actions.TranslateZ", "peek": true, "filters": [{ "type": "deadZone", "min": 0.05 }] },
{ "from": "OpenXR.RX", "to": "Standard.RX"},
{ "from": "OpenXR.RY", "to": "Standard.RY" },
{ "from": "OpenXR.LS", "to": "Standard.LS" },
{ "from": "OpenXR.RS", "to": "Standard.RS" },
{ "from": "OpenXR.LSTouch", "to": "Standard.LSTouch" },
{ "from": "OpenXR.RSTouch", "to": "Standard.RSTouch" },
{ "from": "OpenXR.LeftPrimaryThumb", "to": "Standard.LeftPrimaryThumb" },
{ "from": "OpenXR.RightPrimaryThumb", "to": "Standard.RightPrimaryThumb" },
{ "from": "OpenXR.LeftSecondaryThumb", "to": "Standard.LeftSecondaryThumb" },
{ "from": "OpenXR.RightSecondaryThumb", "to": "Standard.RightSecondaryThumb" }
]
}

View file

@ -2373,7 +2373,6 @@ void Application::update(float deltaTime) {
AnimDebugDraw::getInstance().update();
}
{ // Game loop is done, mark the end of the frame for the scene transactions and the render loop to take over
PerformanceTimer perfTimer("enqueueFrame");
getMain3DScene()->enqueueFrame();

View file

@ -522,51 +522,41 @@ void Application::updateRenderArgs(float deltaTime) {
appRenderArgs._eyeToWorld = _myCamera.getTransform();
appRenderArgs._isStereo = false;
{
if (getActiveDisplayPlugin()->isStereo()) {
auto hmdInterface = DependencyManager::get<HMDScriptingInterface>();
float ipdScale = hmdInterface->getIPDScale();
// scale IPD by sensorToWorldScale, to make the world seem larger or smaller accordingly.
ipdScale *= sensorToWorldScale;
float ipdScale = hmdInterface->getIPDScale() * sensorToWorldScale;
auto baseProjection = appRenderArgs._renderArgs.getViewFrustum().getProjection();
if (getActiveDisplayPlugin()->isStereo()) {
// Stereo modes will typically have a larger projection matrix overall,
// so we ask for the 'mono' projection matrix, which for stereo and HMD
// plugins will imply the combined projection for both eyes.
//
// This is properly implemented for the Oculus plugins, but for OpenVR
// and Stereo displays I'm not sure how to get / calculate it, so we're
// just relying on the left FOV in each case and hoping that the
// overall culling margin of error doesn't cause popping in the
// right eye. There are FIXMEs in the relevant plugins
_myCamera.setProjection(getActiveDisplayPlugin()->getCullingProjection(baseProjection));
appRenderArgs._isStereo = true;
// Stereo modes will typically have a larger projection matrix overall,
// so we ask for the 'mono' projection matrix, which for stereo and HMD
// plugins will imply the combined projection for both eyes.
//
// This is properly implemented for the Oculus plugins, but for OpenVR
// and Stereo displays I'm not sure how to get / calculate it, so we're
// just relying on the left FOV in each case and hoping that the
// overall culling margin of error doesn't cause popping in the
// right eye. There are FIXMEs in the relevant plugins
_myCamera.setProjection(getActiveDisplayPlugin()->getCullingProjection(baseProjection));
appRenderArgs._isStereo = true;
auto& eyeOffsets = appRenderArgs._eyeOffsets;
auto& eyeProjections = appRenderArgs._eyeProjections;
auto& eyeOffsets = appRenderArgs._eyeOffsets;
auto& eyeProjections = appRenderArgs._eyeProjections;
// FIXME we probably don't need to set the projection matrix every frame,
// only when the display plugin changes (or in non-HMD modes when the user
// changes the FOV manually, which right now I don't think they can.
for_each_eye([&](Eye eye) {
// For providing the stereo eye views, the HMD head pose has already been
// applied to the avatar, so we need to get the difference between the head
// pose applied to the avatar and the per eye pose, and use THAT as
// the per-eye stereo matrix adjustment.
mat4 eyeToHead = getActiveDisplayPlugin()->getEyeToHeadTransform(eye);
// Grab the translation
vec3 eyeOffset = glm::vec3(eyeToHead[3]);
// Apply IPD scaling
mat4 eyeOffsetTransform = glm::translate(mat4(), eyeOffset * -1.0f * ipdScale);
eyeOffsets[eye] = eyeOffsetTransform;
eyeProjections[eye] = getActiveDisplayPlugin()->getEyeProjection(eye, baseProjection);
});
// FIXME we probably don't need to set the projection matrix every frame,
// only when the display plugin changes (or in non-HMD modes when the user
// changes the FOV manually, which right now I don't think they can.
for_each_eye([&](Eye eye) {
// Grab the translation
eyeOffsets[eye] = getActiveDisplayPlugin()->getEyeToHeadTransform(eye);
// Apply IPD scaling
eyeOffsets[eye][3][0] *= ipdScale;
eyeProjections[eye] = getActiveDisplayPlugin()->getEyeProjection(eye, baseProjection);
});
// Configure the type of display / stereo
appRenderArgs._renderArgs._displayMode = (isHMDMode() ? RenderArgs::STEREO_HMD : RenderArgs::STEREO_MONITOR);
}
// Configure the type of display / stereo
appRenderArgs._renderArgs._displayMode = (isHMDMode() ? RenderArgs::STEREO_HMD : RenderArgs::STEREO_MONITOR);
}
appRenderArgs._renderArgs._stencilMaskMode = getActiveDisplayPlugin()->getStencilMaskMode();

View file

@ -73,6 +73,18 @@ void Application::initializePluginManager(const QCommandLineParser& parser) {
qInfo() << "Disabling following input plugins:" << disabledInputs;
PluginManager::getInstance()->disableInputs(disabledInputs);
}
if (parser.isSet("useExperimentalXR")) {
auto pluginNames = QStringList();
pluginNames.push_back("OpenVR (Vive)");
PluginManager::getInstance()->disableDisplays(pluginNames);
PluginManager::getInstance()->disableInputs(pluginNames);
} else {
auto pluginNames = QStringList();
pluginNames.push_back("OpenXR");
PluginManager::getInstance()->disableDisplays(pluginNames);
PluginManager::getInstance()->disableInputs(pluginNames);
}
}
void Application::shutdownPlugins() {}

View file

@ -286,6 +286,10 @@ int main(int argc, const char* argv[]) {
"getProtocolVersionData",
"Debug option. Returns the network protocol detailed data in JSON."
);
QCommandLineOption useExperimentalXR(
"useExperimentalXR",
"Enables the experimental OpenXR plugin and disables the OpenVR plugin. Some features available in OpenVR aren't yet available in OpenXR."
);
// "--qmljsdebugger", which appears in output from "--help-all".
// Those below don't seem to be optional.
@ -335,6 +339,7 @@ int main(int argc, const char* argv[]) {
parser.addOption(getPluginsOption);
parser.addOption(getProtocolVersionHashOption);
parser.addOption(getProtocolVersionDataOption);
parser.addOption(useExperimentalXR);
QString applicationPath;

View file

@ -1195,9 +1195,9 @@ Mapping::Pointer UserInputMapper::parseMapping(const QString& json) {
QJsonDocument doc = QJsonDocument::fromJson(json.toUtf8(), &error);
// check validity of the document
if (doc.isNull()) {
qCDebug(controllers) << "Invalid JSON...\n";
qCDebug(controllers) << error.errorString();
qCDebug(controllers) << "JSON was:\n" << json << Qt::endl;
qCCritical(controllers) << "Invalid JSON...\n";
qCCritical(controllers) << error.errorString();
qCCritical(controllers) << "JSON was:\n" << json << Qt::endl;
return Mapping::Pointer();
}

View file

@ -745,6 +745,13 @@ void OpenGLDisplayPlugin::present(const std::shared_ptr<RefreshRateController>&
}
gpu::Backend::freeGPUMemSize.set(gpu::gl::getFreeDedicatedMemory());
// Drop current frame after presenting it once.
// This is required for the OpenXR frame cycle, since we call xrEndFrame after presenting.
// xrEndFrame must not be called multiple times.
if (_presentOnlyOnce) {
_currentFrame.reset();
}
} else if (alwaysPresent()) {
refreshRateController->clockEndTime();
internalPresent();

View file

@ -205,6 +205,8 @@ protected:
QImage getScreenshot(float aspectRatio);
QImage getSecondaryCameraScreenshot();
bool _presentOnlyOnce = false;
private:
static Setting::Handle<bool> _extraLinearToSRGBConversionSetting;
};

View file

@ -56,16 +56,16 @@ QRect HmdDisplayPlugin::getRecommendedHUDRect() const {
return CompositorHelper::VIRTUAL_SCREEN_RECOMMENDED_OVERLAY_RECT;
}
glm::mat4 HmdDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
return _eyeOffsets[eye];
glm::mat4 HmdDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
return _eyeOffsets[eye];
}
glm::mat4 HmdDisplayPlugin::getEyeProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
glm::mat4 HmdDisplayPlugin::getEyeProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
}
glm::mat4 HmdDisplayPlugin::getCullingProjection(const glm::mat4& baseProjection) const {
return _cullingProjection;
glm::mat4 HmdDisplayPlugin::getCullingProjection(const glm::mat4& baseProjection) const {
return _cullingProjection;
}
glm::ivec4 HmdDisplayPlugin::eyeViewport(Eye eye) const {

View file

@ -50,12 +50,19 @@ Backend::TransformCamera Backend::TransformCamera::getEyeCamera(int eye,
TransformCamera result = *this;
Transform eyeView = view;
Transform eyePreviousView = previousView;
glm::vec3 eyePosition = extractTranslation(stereo._eyeViews[eye]);
glm::quat eyeOrientation = glmExtractRotation(stereo._eyeViews[eye]);
glm::vec3 eyePreviousPosition = extractTranslation(prevStereo._eyeViews[eye]);
glm::quat eyePreviousOrientation = glmExtractRotation(prevStereo._eyeViews[eye]);
if (!stereo._skybox) {
eyeView.postTranslate(-Vec3(stereo._eyeViews[eye][3]));
eyePreviousView.postTranslate(-Vec3(prevStereo._eyeViews[eye][3]));
eyeView.postRotate(eyeOrientation).postTranslate(eyePosition);
eyePreviousView.postRotate(eyePreviousOrientation).postTranslate(eyePreviousPosition);
} else {
// FIXME: If "skybox" the ipd is set to 0 for now, let s try to propose a better solution for this in the future
eyePreviousView.setTranslation(vec3());
// XRTODO: maybe this is responsible for reprojection-like stutters?
//eyePreviousView.setTranslation(vec3());
eyeView.postRotate(eyeOrientation);
eyePreviousView.postRotate(eyePreviousOrientation);
}
result._projection = stereo._eyeProjections[eye];
Mat4 previousProjection = prevStereo._eyeProjections[eye];

View file

@ -212,9 +212,11 @@ T toNormalizedDeviceScale(const T& value, const T& size) {
#define ROLL(euler) euler.z
// float - linear interpolate
#if !defined(DONT_REDEFINE_LERP)
inline float lerp(float x, float y, float a) {
return x * (1.0f - a) + (y * a);
}
#endif
// vec2 lerp - linear interpolate
template<typename T, glm::precision P>

View file

@ -267,11 +267,11 @@ private:
CameraMode _mode{ CAMERA_MODE_LOOK_AT };
glm::mat4 _transform;
glm::mat4 _projection;
glm::mat4 _projection = glm::mat4(1.f);
// derived
glm::vec3 _position { 0.0f, 0.0f, 0.0f };
glm::quat _orientation;
glm::quat _orientation { 1.f, 0.f, 0.f, 0.f };
bool _isKeepLookingAt{ false };
glm::vec3 _lookingAt;

View file

@ -26,6 +26,9 @@ if (NOT SERVER_ONLY AND NOT ANDROID)
add_subdirectory(${DIR})
endif()
set(DIR "openxr")
add_subdirectory(${DIR})
set(DIR "hifiSdl2")
add_subdirectory(${DIR})

View file

@ -0,0 +1,31 @@
#
# Copyright 2024 Lubosz Sarnecki
# Copyright 2024 Overte e.V.
#
# SPDX-License-Identifier: Apache-2.0
#
find_package(OpenXR REQUIRED)
if (NOT OpenXR_FOUND)
MESSAGE(FATAL_ERROR "OpenXR not found!")
endif()
set(TARGET_NAME openxr)
setup_hifi_plugin(Gui Qml Multimedia)
link_hifi_libraries(shared task gl qml networking controllers ui
plugins display-plugins ui-plugins input-plugins
audio-client render-utils graphics shaders gpu render
material-networking model-networking model-baker hfm
model-serializers ktx image procedural ${PLATFORM_GL_BACKEND} OpenXR::openxr_loader)
include_hifi_library_headers(octree)
include_hifi_library_headers(script-engine)
if(CMAKE_CXX_COMPILER_ID MATCHES "GNU")
# Silence GCC warnings
target_compile_options(openxr PRIVATE -Wno-missing-field-initializers)
# Fix build issue where lerp is already defined on C++20 / GCC
target_compile_definitions(openxr PRIVATE -DDONT_REDEFINE_LERP)
endif()
set_property(TARGET openxr PROPERTY CXX_STANDARD 20)

View file

@ -0,0 +1,446 @@
//
// Overte OpenXR Plugin
//
// Copyright 2024 Lubosz Sarnecki
// Copyright 2024 Overte e.V.
//
// SPDX-License-Identifier: Apache-2.0
//
#include "OpenXrContext.h"
#include <qloggingcategory.h>
#include <QString>
#include <QGuiApplication>
#include <sstream>
Q_DECLARE_LOGGING_CATEGORY(xr_context_cat)
Q_LOGGING_CATEGORY(xr_context_cat, "openxr.context")
// Checks XrResult, returns false on errors and logs the error as qCritical.
bool xrCheck(XrInstance instance, XrResult result, const char* message) {
if (XR_SUCCEEDED(result))
return true;
char errorName[XR_MAX_RESULT_STRING_SIZE];
if (instance != XR_NULL_HANDLE) {
xrResultToString(instance, result, errorName);
} else {
sprintf(errorName, "%d", result);
}
qCCritical(xr_context_cat, "%s: %s", errorName, message);
return false;
}
// Extension functions must be loaded with xrGetInstanceProcAddr
static PFN_xrGetOpenGLGraphicsRequirementsKHR pfnGetOpenGLGraphicsRequirementsKHR = nullptr;
static bool loadXrFunction(XrInstance instance, const char* name, PFN_xrVoidFunction* out) {
auto result = xrGetInstanceProcAddr(instance, name, out);
if (result != XR_SUCCESS) {
qCCritical(xr_context_cat) << "Failed to load OpenXR function '" << name << "'";
return false;
}
return true;
}
OpenXrContext::OpenXrContext() {
_isSupported = initPreGraphics();
if (!_isSupported) {
qCWarning(xr_context_cat, "OpenXR is not supported.");
}
}
OpenXrContext::~OpenXrContext() {
if (_instance == XR_NULL_HANDLE) {
return;
}
XrResult res = xrDestroyInstance(_instance);
if (res != XR_SUCCESS) {
qCCritical(xr_context_cat, "Failed to destroy OpenXR instance");
}
qCDebug(xr_context_cat, "Destroyed instance.");
}
bool OpenXrContext::initInstance() {
if (static_cast<QGuiApplication*>(qApp)->platformName() == "wayland") {
qCCritical(xr_context_cat, "The OpenXR plugin does not support Wayland yet! Use the QT_QPA_PLATFORM=xcb environment variable to force Overte to launch with X11.");
return false;
}
uint32_t count = 0;
XrResult result = xrEnumerateInstanceExtensionProperties(nullptr, 0, &count, nullptr);
// Since this is the first OpenXR call we do, check here if RUNTIME_UNAVAILABLE is returned.
if (result == XR_ERROR_RUNTIME_UNAVAILABLE) {
qCCritical(xr_context_cat, "XR_ERROR_RUNTIME_UNAVAILABLE: Is XR_RUNTIME_JSON set correctly?");
return false;
}
if (!xrCheck(XR_NULL_HANDLE, result, "Failed to enumerate number of extensions."))
return false;
std::vector<XrExtensionProperties> properties;
for (uint32_t i = 0; i < count; i++) {
XrExtensionProperties props = { .type = XR_TYPE_EXTENSION_PROPERTIES };
properties.push_back(props);
}
result = xrEnumerateInstanceExtensionProperties(nullptr, count, &count, properties.data());
if (!xrCheck(XR_NULL_HANDLE, result, "Failed to enumerate extensions."))
return false;
bool openglSupported = false;
bool userPresenceSupported = false;
qCInfo(xr_context_cat, "Runtime supports %d extensions:", count);
for (uint32_t i = 0; i < count; i++) {
qCInfo(xr_context_cat, "%s v%d", properties[i].extensionName, properties[i].extensionVersion);
if (strcmp(XR_KHR_OPENGL_ENABLE_EXTENSION_NAME, properties[i].extensionName) == 0) {
openglSupported = true;
} else if (strcmp(XR_EXT_USER_PRESENCE_EXTENSION_NAME, properties[i].extensionName) == 0) {
userPresenceSupported = true;
}
}
if (!openglSupported) {
qCCritical(xr_context_cat, "Runtime does not support OpenGL!");
return false;
}
std::vector<const char*> enabled = {XR_KHR_OPENGL_ENABLE_EXTENSION_NAME};
if (userPresenceSupported) {
enabled.push_back(XR_EXT_USER_PRESENCE_EXTENSION_NAME);
}
XrInstanceCreateInfo info = {
.type = XR_TYPE_INSTANCE_CREATE_INFO,
.applicationInfo = {
.applicationName = "Overte",
.applicationVersion = 1,
.engineName = "Overte",
.engineVersion = 0,
.apiVersion = XR_API_VERSION_1_0,
},
.enabledExtensionCount = (uint32_t)enabled.size(),
.enabledExtensionNames = enabled.data(),
};
result = xrCreateInstance(&info, &_instance);
if (result == XR_ERROR_RUNTIME_FAILURE) {
qCCritical(xr_context_cat, "XR_ERROR_RUNTIME_FAILURE: Is the OpenXR runtime up and running?");
return false;
}
if (!xrCheck(XR_NULL_HANDLE, result, "Failed to create OpenXR instance."))
return false;
if (!loadXrFunction(_instance, "xrGetOpenGLGraphicsRequirementsKHR", (PFN_xrVoidFunction*)&pfnGetOpenGLGraphicsRequirementsKHR)) {
qCCritical(xr_context_cat) << "Failed to get OpenGL graphics requirements function!";
return false;
}
xrStringToPath(_instance, "/user/hand/left", &_handPaths[0]);
xrStringToPath(_instance, "/user/hand/right", &_handPaths[1]);
xrStringToPath(_instance, "/interaction_profiles/htc/vive_controller", &_viveControllerPath);
if (userPresenceSupported) {
XrSystemUserPresencePropertiesEXT presenceProps = {XR_TYPE_SYSTEM_USER_PRESENCE_PROPERTIES_EXT};
XrSystemProperties sysProps = {XR_TYPE_SYSTEM_PROPERTIES, &presenceProps};
result = xrGetSystemProperties(_instance, _systemId, &sysProps);
if (xrCheck(XR_NULL_HANDLE, result, "Couldn't get system properties")) {
_userPresenceAvailable = presenceProps.supportsUserPresence;
}
}
return true;
}
bool OpenXrContext::initSystem() {
XrSystemGetInfo info = {
.type = XR_TYPE_SYSTEM_GET_INFO,
.formFactor = XR_FORM_FACTOR_HEAD_MOUNTED_DISPLAY,
};
XrResult result = xrGetSystem(_instance, &info, &_systemId);
if (!xrCheck(_instance, result, "Failed to get system for HMD form factor."))
return false;
XrSystemProperties props = {
.type = XR_TYPE_SYSTEM_PROPERTIES,
};
result = xrGetSystemProperties(_instance, _systemId, &props);
if (!xrCheck(_instance, result, "Failed to get System properties"))
return false;
_systemName = QString::fromUtf8(props.systemName);
qCInfo(xr_context_cat, "System name : %s", props.systemName);
qCInfo(xr_context_cat, "Max layers : %d", props.graphicsProperties.maxLayerCount);
qCInfo(xr_context_cat, "Max swapchain size : %dx%d", props.graphicsProperties.maxSwapchainImageHeight,
props.graphicsProperties.maxSwapchainImageWidth);
qCInfo(xr_context_cat, "Orientation Tracking: %d", props.trackingProperties.orientationTracking);
qCInfo(xr_context_cat, "Position Tracking : %d", props.trackingProperties.positionTracking);
return true;
}
bool OpenXrContext::initGraphics() {
XrGraphicsRequirementsOpenGLKHR requirements = { .type = XR_TYPE_GRAPHICS_REQUIREMENTS_OPENGL_KHR };
XrResult result = pfnGetOpenGLGraphicsRequirementsKHR(_instance, _systemId, &requirements);
return xrCheck(_instance, result, "Failed to get OpenGL graphics requirements!");
}
bool OpenXrContext::requestExitSession() {
XrResult result = xrRequestExitSession(_session);
return xrCheck(_instance, result, "Failed to request exit session!");
}
bool OpenXrContext::initSession() {
#if defined(Q_OS_LINUX)
XrGraphicsBindingOpenGLXlibKHR binding = {
.type = XR_TYPE_GRAPHICS_BINDING_OPENGL_XLIB_KHR,
.xDisplay = XOpenDisplay(nullptr),
.glxDrawable = glXGetCurrentDrawable(),
.glxContext = glXGetCurrentContext(),
};
#elif defined(Q_OS_WIN)
XrGraphicsBindingOpenGLWin32KHR binding = {
.type = XR_TYPE_GRAPHICS_BINDING_OPENGL_WIN32_KHR,
.hDC = wglGetCurrentDC(),
.hGLRC = wglGetCurrentContext(),
};
#else
#error "Unsupported platform"
#endif
XrSessionCreateInfo info = {
.type = XR_TYPE_SESSION_CREATE_INFO,
.next = &binding,
.systemId = _systemId,
};
XrResult result = xrCreateSession(_instance, &info, &_session);
return xrCheck(_instance, result, "Failed to create session");
}
bool OpenXrContext::initSpaces() {
// TODO: Do xrEnumerateReferenceSpaces before assuming stage space is available.
XrReferenceSpaceCreateInfo stageSpaceInfo = {
.type = XR_TYPE_REFERENCE_SPACE_CREATE_INFO,
.referenceSpaceType = XR_REFERENCE_SPACE_TYPE_STAGE,
.poseInReferenceSpace = XR_INDENTITY_POSE,
};
XrResult result = xrCreateReferenceSpace(_session, &stageSpaceInfo, &_stageSpace);
if (!xrCheck(_instance, result, "Failed to create stage space!"))
return false;
XrReferenceSpaceCreateInfo viewSpaceInfo = {
.type = XR_TYPE_REFERENCE_SPACE_CREATE_INFO,
.referenceSpaceType = XR_REFERENCE_SPACE_TYPE_VIEW,
.poseInReferenceSpace = XR_INDENTITY_POSE,
};
result = xrCreateReferenceSpace(_session, &viewSpaceInfo, &_viewSpace);
return xrCheck(_instance, result, "Failed to create view space!");
}
#define ENUM_TO_STR(r) \
case r: \
return #r
static std::string xrSessionStateStr(XrSessionState state) {
switch (state) {
ENUM_TO_STR(XR_SESSION_STATE_UNKNOWN);
ENUM_TO_STR(XR_SESSION_STATE_IDLE);
ENUM_TO_STR(XR_SESSION_STATE_READY);
ENUM_TO_STR(XR_SESSION_STATE_SYNCHRONIZED);
ENUM_TO_STR(XR_SESSION_STATE_VISIBLE);
ENUM_TO_STR(XR_SESSION_STATE_FOCUSED);
ENUM_TO_STR(XR_SESSION_STATE_STOPPING);
ENUM_TO_STR(XR_SESSION_STATE_LOSS_PENDING);
ENUM_TO_STR(XR_SESSION_STATE_EXITING);
default: {
std::ostringstream ss;
ss << "UNKNOWN STATE " << state;
return ss.str();
}
}
}
// Called before restarting a new session
void OpenXrContext::reset() {
_shouldQuit = false;
_lastSessionState = XR_SESSION_STATE_UNKNOWN;
}
bool OpenXrContext::updateSessionState(XrSessionState newState) {
qCDebug(xr_context_cat, "Session state changed %s -> %s", xrSessionStateStr(_lastSessionState).c_str(),
xrSessionStateStr(newState).c_str());
_lastSessionState = newState;
switch (newState) {
// Don't run frame cycle but keep polling events
case XR_SESSION_STATE_IDLE:
case XR_SESSION_STATE_UNKNOWN: {
_shouldRunFrameCycle = false;
break;
}
// Run frame cycle and poll events
case XR_SESSION_STATE_FOCUSED:
case XR_SESSION_STATE_SYNCHRONIZED:
case XR_SESSION_STATE_VISIBLE: {
_shouldRunFrameCycle = true;
break;
}
// Begin the session
case XR_SESSION_STATE_READY: {
if (!_isSessionRunning) {
XrSessionBeginInfo session_begin_info = {
.type = XR_TYPE_SESSION_BEGIN_INFO,
.primaryViewConfigurationType = XR_VIEW_CONFIG_TYPE,
};
XrResult result = xrBeginSession(_session, &session_begin_info);
if (!xrCheck(_instance, result, "Failed to begin session!"))
return false;
qCDebug(xr_context_cat, "Session started!");
_isSessionRunning = true;
}
_shouldRunFrameCycle = true;
break;
}
// End the session, don't render, but keep polling for events
case XR_SESSION_STATE_STOPPING: {
if (_isSessionRunning) {
XrResult result = xrEndSession(_session);
if (!xrCheck(_instance, result, "Failed to end session!"))
return false;
_isSessionRunning = false;
}
_shouldRunFrameCycle = false;
break;
}
// Destroy session, skip run frame cycle, quit
case XR_SESSION_STATE_LOSS_PENDING:
case XR_SESSION_STATE_EXITING: {
XrResult result = xrDestroySession(_session);
if (!xrCheck(_instance, result, "Failed to destroy session!"))
return false;
_shouldQuit = true;
_shouldRunFrameCycle = false;
qCDebug(xr_context_cat, "Destroyed session");
break;
}
default:
qCWarning(xr_context_cat, "Unhandled session state: %d", newState);
}
return true;
}
bool OpenXrContext::pollEvents() {
XrEventDataBuffer event = { .type = XR_TYPE_EVENT_DATA_BUFFER };
XrResult result = xrPollEvent(_instance, &event);
while (result == XR_SUCCESS) {
switch (event.type) {
case XR_TYPE_EVENT_DATA_INSTANCE_LOSS_PENDING: {
const auto& instanceLossPending = *reinterpret_cast<XrEventDataInstanceLossPending*>(&event);
qCCritical(xr_context_cat, "Instance loss pending at %lu! Destroying instance.", instanceLossPending.lossTime);
_shouldQuit = true;
continue;
}
case XR_TYPE_EVENT_DATA_SESSION_STATE_CHANGED: {
const auto& sessionStateChanged = *reinterpret_cast<XrEventDataSessionStateChanged*>(&event);
if (!updateSessionState(sessionStateChanged.state)) {
return false;
}
break;
}
case XR_TYPE_EVENT_DATA_INTERACTION_PROFILE_CHANGED: {
for (int i = 0; i < HAND_COUNT; i++) {
XrInteractionProfileState state = { .type = XR_TYPE_INTERACTION_PROFILE_STATE };
XrResult res = xrGetCurrentInteractionProfile(_session, _handPaths[i], &state);
if (!xrCheck(_instance, res, "Failed to get interaction profile"))
continue;
_stickEmulation = false;
if (_viveControllerPath != XR_NULL_PATH && state.interactionProfile == _viveControllerPath) {
_stickEmulation = true;
}
uint32_t bufferCountOutput;
char profilePath[XR_MAX_PATH_LENGTH];
res = xrPathToString(_instance, state.interactionProfile, XR_MAX_PATH_LENGTH, &bufferCountOutput,
profilePath);
if (!xrCheck(_instance, res, "Failed to get interaction profile path."))
continue;
qCInfo(xr_context_cat, "Controller %d: Interaction profile changed to '%s'", i, profilePath);
}
break;
}
case XR_TYPE_EVENT_DATA_USER_PRESENCE_CHANGED_EXT: {
const auto& eventdata = *reinterpret_cast<XrEventDataUserPresenceChangedEXT*>(&event);
_hmdMounted = eventdata.isUserPresent;
break;
}
default:
qCWarning(xr_context_cat, "Unhandled event type %d", event.type);
}
event.type = XR_TYPE_EVENT_DATA_BUFFER;
result = xrPollEvent(_instance, &event);
}
if (result != XR_EVENT_UNAVAILABLE) {
qCCritical(xr_context_cat, "Failed to poll events!");
return false;
}
return true;
}
bool OpenXrContext::beginFrame() {
XrFrameBeginInfo info = { .type = XR_TYPE_FRAME_BEGIN_INFO };
XrResult result = xrBeginFrame(_session, &info);
return xrCheck(_instance, result, "failed to begin frame!");
}
bool OpenXrContext::initPreGraphics() {
if (!initInstance()) {
return false;
}
if (!initSystem()) {
return false;
}
return true;
}
bool OpenXrContext::initPostGraphics() {
if (!initGraphics()) {
return false;
}
if (!initSession()) {
return false;
}
if (!initSpaces()) {
return false;
}
return true;
}

View file

@ -0,0 +1,116 @@
//
// Overte OpenXR Plugin
//
// Copyright 2024 Lubosz Sarnecki
// Copyright 2024 Overte e.V.
//
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <optional>
#include <openxr/openxr.h>
#include "gpu/gl/GLBackend.h"
#if defined(Q_OS_LINUX)
#define XR_USE_PLATFORM_XLIB
#include <GL/glx.h>
// Unsorted from glx.h conflicts with qdir.h
#undef Unsorted
// MappingPointer from X11 conflicts with one from controllers/Forward.h
#undef MappingPointer
#elif defined(Q_OS_WIN)
#define XR_USE_PLATFORM_WIN32
#include <Unknwn.h>
#include <Windows.h>
#else
#error "Unimplemented platform"
#endif
#define XR_USE_GRAPHICS_API_OPENGL
#include <openxr/openxr_platform.h>
#include <glm/glm.hpp>
#include <glm/gtx/quaternion.hpp>
#include "controllers/Pose.h"
#define HAND_COUNT 2
constexpr XrPosef XR_INDENTITY_POSE = {
.orientation = { .x = 0, .y = 0, .z = 0, .w = 1.0 },
.position = { .x = 0, .y = 0, .z = 0 },
};
constexpr XrViewConfigurationType XR_VIEW_CONFIG_TYPE = XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO;
class OpenXrContext {
public:
XrInstance _instance = XR_NULL_HANDLE;
XrSession _session = XR_NULL_HANDLE;
XrSystemId _systemId = XR_NULL_SYSTEM_ID;
XrSpace _stageSpace = XR_NULL_HANDLE;
XrSpace _viewSpace = XR_NULL_HANDLE;
XrPath _handPaths[HAND_COUNT];
controller::Pose _lastHeadPose;
std::optional<XrTime> _lastPredictedDisplayTime;
bool _shouldQuit = false;
bool _shouldRunFrameCycle = false;
bool _isSupported = false;
QString _systemName;
bool _isSessionRunning = false;
// hack for vive controllers
bool _stickEmulation = false;
// only supported by a few runtimes, but lets us
// emulate OpenVR's headset proximity sensor system
bool _userPresenceAvailable = false;
// whether the headset is on, using XR_EXT_user_presence
bool _hmdMounted = true;
private:
XrSessionState _lastSessionState = XR_SESSION_STATE_UNKNOWN;
XrPath _viveControllerPath = XR_NULL_PATH;
public:
OpenXrContext();
~OpenXrContext();
bool initPostGraphics();
bool beginFrame();
bool pollEvents();
bool requestExitSession();
void reset();
private:
bool initPreGraphics();
bool initInstance();
bool initSystem();
bool initGraphics();
bool initSession();
bool initSpaces();
bool updateSessionState(XrSessionState newState);
};
inline static glm::vec3 xrVecToGlm(const XrVector3f& v) {
return glm::vec3(v.x, v.y, v.z);
}
inline static glm::quat xrQuatToGlm(const XrQuaternionf& q) {
return glm::quat(q.w, q.x, q.y, q.z);
}
bool xrCheck(XrInstance instance, XrResult result, const char* message);

View file

@ -0,0 +1,576 @@
//
// Overte OpenXR Plugin
//
// Copyright 2024 Lubosz Sarnecki
// Copyright 2024 Overte e.V.
//
// SPDX-License-Identifier: Apache-2.0
//
#include "OpenXrDisplayPlugin.h"
#include <qloggingcategory.h>
#include "ViewFrustum.h"
#include <chrono>
#include <glm/gtx/string_cast.hpp>
#include <glm/gtx/transform.hpp>
#include <thread>
#include <format>
#if defined(Q_OS_WIN)
#undef near
#undef far
#endif
Q_DECLARE_LOGGING_CATEGORY(xr_display_cat)
Q_LOGGING_CATEGORY(xr_display_cat, "openxr.display")
constexpr GLint XR_PREFERRED_COLOR_FORMAT = GL_SRGB8_ALPHA8;
OpenXrDisplayPlugin::OpenXrDisplayPlugin(std::shared_ptr<OpenXrContext> c) {
_context = c;
_presentOnlyOnce = true;
_lastFrameTime = 1.0f / 90.0f;
_estimatedTargetFramerate = 90.0f;
}
bool OpenXrDisplayPlugin::isSupported() const {
return _context->_isSupported;
}
// Slightly differs from glm::ortho
inline static glm::mat4 fovToProjection(const XrFovf fov, const float near, const float far) {
const float left = tanf(fov.angleLeft);
const float right = tanf(fov.angleRight);
const float down = tanf(fov.angleDown);
const float up = tanf(fov.angleUp);
const float width = right - left;
const float height = up - down;
const float m11 = 2 / width;
const float m22 = 2 / height;
const float m33 = -(far + near) / (far - near);
const float m31 = (right + left) / width;
const float m32 = (up + down) / height;
const float m43 = -(far * (near + near)) / (far - near);
// clang-format off
const float mat[16] = {
m11, 0 , 0 , 0,
0 , m22, 0 , 0,
m31, m32, m33, -1,
0 , 0 , m43, 0,
};
// clang-format on
return glm::make_mat4(mat);
}
glm::mat4 OpenXrDisplayPlugin::getEyeProjection(Eye eye, const glm::mat4& baseProjection) const {
if (!_views.has_value()) {
return baseProjection;
}
ViewFrustum frustum;
frustum.setProjection(baseProjection);
return fovToProjection(_views.value()[(eye == Left) ? 0 : 1].fov, frustum.getNearClip(), frustum.getFarClip());
}
// TODO: interface/src/Application_Graphics.cpp:535
glm::mat4 OpenXrDisplayPlugin::getCullingProjection(const glm::mat4& baseProjection) const {
return getEyeProjection(Left, baseProjection);
}
// OpenXR doesn't give us a target framerate,
// but it does do vsync on its own,
// so just push out frames as vsync allows
float OpenXrDisplayPlugin::getTargetFrameRate() const {
return std::numeric_limits<float>::max();
}
bool OpenXrDisplayPlugin::initViews() {
XrInstance instance = _context->_instance;
XrSystemId systemId = _context->_systemId;
XrResult result = xrEnumerateViewConfigurationViews(instance, systemId, XR_VIEW_CONFIG_TYPE, 0, &_viewCount, nullptr);
if (!xrCheck(instance, result, "Failed to get view configuration view count!")) {
qCCritical(xr_display_cat, "Failed to get view configuration view count!");
return false;
}
assert(_viewCount != 0);
_views = std::vector<XrView>();
for (uint32_t i = 0; i < _viewCount; i++) {
XrView view = { .type = XR_TYPE_VIEW };
_views.value().push_back(view);
XrViewConfigurationView viewConfig = { .type = XR_TYPE_VIEW_CONFIGURATION_VIEW };
_viewConfigs.push_back(viewConfig);
}
_swapChains.resize(_viewCount);
_swapChainLengths.resize(_viewCount);
_swapChainIndices.resize(_viewCount);
_images.resize(_viewCount);
result = xrEnumerateViewConfigurationViews(instance, systemId, XR_VIEW_CONFIG_TYPE, _viewCount, &_viewCount,
_viewConfigs.data());
if (!xrCheck(instance, result, "Failed to enumerate view configuration views!")) {
qCCritical(xr_display_cat, "Failed to enumerate view configuration views!");
return false;
}
return true;
}
#define ENUM_TO_STR(r) \
case r: \
return #r
static std::string glFormatStr(GLenum source) {
switch (source) {
ENUM_TO_STR(GL_RGBA16);
ENUM_TO_STR(GL_RGBA16F);
ENUM_TO_STR(GL_SRGB8_ALPHA8);
ENUM_TO_STR(GL_RGB10_A2UI);
default:
return std::format("0x{:X}", source);
}
}
static int64_t chooseSwapChainFormat(XrInstance instance, XrSession session, int64_t preferred) {
uint32_t formatCount;
XrResult result = xrEnumerateSwapchainFormats(session, 0, &formatCount, nullptr);
if (!xrCheck(instance, result, "Failed to get number of supported swapchain formats"))
return -1;
qCInfo(xr_display_cat, "Runtime supports %d swapchain formats", formatCount);
std::vector<int64_t> formats(formatCount);
result = xrEnumerateSwapchainFormats(session, formatCount, &formatCount, formats.data());
if (!xrCheck(instance, result, "Failed to enumerate swapchain formats"))
return -1;
int64_t chosen = formats[0];
for (uint32_t i = 0; i < formatCount; i++) {
qCInfo(xr_display_cat, "Supported GL format: %s", glFormatStr(formats[i]).c_str());
if (formats[i] == preferred) {
chosen = formats[i];
qCInfo(xr_display_cat, "Using preferred swapchain format %s", glFormatStr(chosen).c_str());
break;
}
}
if (chosen != preferred) {
qCWarning(xr_display_cat, "Falling back to non preferred swapchain format %s", glFormatStr(chosen).c_str());
}
return chosen;
}
bool OpenXrDisplayPlugin::initSwapChains() {
XrInstance instance = _context->_instance;
XrSession session = _context->_session;
int64_t format = chooseSwapChainFormat(instance, session, XR_PREFERRED_COLOR_FORMAT);
for (uint32_t i = 0; i < _viewCount; i++) {
_images[i].clear();
XrSwapchainCreateInfo info = {
.type = XR_TYPE_SWAPCHAIN_CREATE_INFO,
.createFlags = 0,
.usageFlags = XR_SWAPCHAIN_USAGE_SAMPLED_BIT | XR_SWAPCHAIN_USAGE_COLOR_ATTACHMENT_BIT,
.format = format,
.sampleCount = _viewConfigs[i].recommendedSwapchainSampleCount,
.width = _viewConfigs[i].recommendedImageRectWidth,
.height = _viewConfigs[i].recommendedImageRectHeight,
.faceCount = 1,
.arraySize = 1,
.mipCount = 1,
};
XrResult result = xrCreateSwapchain(session, &info, &_swapChains[i]);
if (!xrCheck(instance, result, "Failed to create swapchain!"))
return false;
result = xrEnumerateSwapchainImages(_swapChains[i], 0, &_swapChainLengths[i], nullptr);
if (!xrCheck(instance, result, "Failed to enumerate swapchains"))
return false;
for (uint32_t j = 0; j < _swapChainLengths[i]; j++) {
XrSwapchainImageOpenGLKHR image = { .type = XR_TYPE_SWAPCHAIN_IMAGE_OPENGL_KHR };
_images[i].push_back(image);
}
result = xrEnumerateSwapchainImages(_swapChains[i], _swapChainLengths[i], &_swapChainLengths[i],
(XrSwapchainImageBaseHeader*)_images[i].data());
if (!xrCheck(instance, result, "Failed to enumerate swapchain images"))
return false;
}
return true;
}
bool OpenXrDisplayPlugin::initLayers() {
for (uint32_t i = 0; i < _viewCount; i++) {
XrCompositionLayerProjectionView layer = {
.type = XR_TYPE_COMPOSITION_LAYER_PROJECTION_VIEW,
.subImage = {
.swapchain = _swapChains[i],
.imageRect = {
.offset = {
.x = 0,
.y = 0,
},
.extent = {
.width = (int32_t)_viewConfigs[i].recommendedImageRectWidth,
.height = (int32_t)_viewConfigs[i].recommendedImageRectHeight,
},
},
.imageArrayIndex = 0,
},
};
_projectionLayerViews.push_back(layer);
};
return true;
}
void OpenXrDisplayPlugin::init() {
Plugin::init();
if (!initViews()) {
qCCritical(xr_display_cat, "View init failed.");
return;
}
for (const XrViewConfigurationView& view : _viewConfigs) {
assert(view.recommendedImageRectWidth != 0);
qCDebug(xr_display_cat, "Swapchain dimensions: %dx%d", view.recommendedImageRectWidth, view.recommendedImageRectHeight);
// TODO: Don't render side-by-side but use multiview (texture arrays). This probably won't work with GL.
_renderTargetSize.x = view.recommendedImageRectWidth * 2;
_renderTargetSize.y = view.recommendedImageRectHeight;
}
emit deviceConnected(getName());
}
// FIXME: For some reason, OpenVR and OVR don't need this,
// and the game tick counter works as expected. In XR, it
// doesn't behave properly, so we have to emulate vsync delay manually.
void OpenXrDisplayPlugin::idle() {
float remainingUntilFrame = std::max(0.0f, _lastFrameTime - (1.0f / _estimatedTargetFramerate));
std::chrono::duration<float, std::ratio<1>> duration(remainingUntilFrame);
std::this_thread::sleep_for(duration);
}
const QString OpenXrDisplayPlugin::getName() const {
return QString("OpenXR: %1").arg(_context->_systemName);
}
bool OpenXrDisplayPlugin::internalActivate() {
_context->reset();
return HmdDisplayPlugin::internalActivate();
}
void OpenXrDisplayPlugin::internalDeactivate() {
// We can get into a state where activate -> deactivate -> activate is called in a chain.
// We are probably gonna have a bad time then. At least check if the session is already running.
// This happens when the application decides to switch display plugins back and forth. This should
// probably be fixed there.
if (_context->_isSessionRunning) {
if (!_context->requestExitSession()) {
qCCritical(xr_display_cat, "Failed to request exit session");
} else {
// Poll events until runtime wants to quit
while (!_context->_shouldQuit) {
_context->pollEvents();
}
}
}
HmdDisplayPlugin::internalDeactivate();
}
void OpenXrDisplayPlugin::customizeContext() {
gl::initModuleGl();
HmdDisplayPlugin::customizeContext();
if (!_context->initPostGraphics()) {
qCCritical(xr_display_cat, "Post graphics init failed.");
return;
}
if (!initSwapChains()) {
qCCritical(xr_display_cat, "Swap chain init failed.");
return;
}
if (!initLayers()) {
qCCritical(xr_display_cat, "Layer init failed.");
return;
}
// Create swap chain images for _compositeFramebuffer
for (size_t i = 0; i < _swapChainLengths[0]; ++i) {
gpu::TexturePointer texture =
gpu::Texture::createRenderBuffer(gpu::Element::COLOR_SRGBA_32, _renderTargetSize.x, _renderTargetSize.y,
gpu::Texture::SINGLE_MIP, gpu::Sampler(gpu::Sampler::FILTER_MIN_MAG_POINT));
_compositeSwapChain.push_back(texture);
}
}
void OpenXrDisplayPlugin::uncustomizeContext() {
_compositeSwapChain.clear();
_projectionLayerViews.clear();
for (uint32_t i = 0; i < _viewCount; i++) {
_images[i].clear();
}
HmdDisplayPlugin::uncustomizeContext();
}
void OpenXrDisplayPlugin::resetSensors() {
}
bool OpenXrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
std::chrono::time_point measureStart = std::chrono::high_resolution_clock::now();
_context->pollEvents();
if (_context->_shouldQuit) {
QMetaObject::invokeMethod(qApp, "quit");
return false;
}
if (!_context->_shouldRunFrameCycle) {
qCWarning(xr_display_cat, "beginFrameRender: Shoudln't run frame cycle. Skipping renderin frame %d", frameIndex);
return true;
}
// Wait for present thread
// Actually wait for xrEndFrame to happen.
bool haveFrameToSubmit = true;
{
std::unique_lock<std::mutex> lock(_haveFrameMutex);
haveFrameToSubmit = _haveFrameToSubmit;
}
while (haveFrameToSubmit) {
std::this_thread::sleep_for(std::chrono::microseconds(10));
{
std::unique_lock<std::mutex> lock(_haveFrameMutex);
haveFrameToSubmit = _haveFrameToSubmit;
}
}
_lastFrameState = { .type = XR_TYPE_FRAME_STATE };
XrResult result = xrWaitFrame(_context->_session, nullptr, &_lastFrameState);
if (!xrCheck(_context->_instance, result, "xrWaitFrame failed"))
return false;
if (!_context->beginFrame())
return false;
_context->_lastPredictedDisplayTime = _lastFrameState.predictedDisplayTime;
std::vector<XrView> eye_views(_viewCount);
for (uint32_t i = 0; i < _viewCount; i++) {
eye_views[i].type = XR_TYPE_VIEW;
}
// TODO: Probably shouldn't call xrLocateViews twice. Use only view space views?
XrViewLocateInfo eyeViewLocateInfo = {
.type = XR_TYPE_VIEW_LOCATE_INFO,
.viewConfigurationType = XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO,
.displayTime = _lastFrameState.predictedDisplayTime,
.space = _context->_viewSpace,
};
XrViewState eyeViewState = { .type = XR_TYPE_VIEW_STATE };
result = xrLocateViews(_context->_session, &eyeViewLocateInfo, &eyeViewState, _viewCount, &_viewCount, eye_views.data());
if (!xrCheck(_context->_instance, result, "Could not locate views"))
return false;
for (uint32_t i = 0; i < 2; i++) {
vec3 eyePosition = xrVecToGlm(eye_views[i].pose.position);
quat eyeOrientation = xrQuatToGlm(eye_views[i].pose.orientation);
_eyeOffsets[i] = controller::Pose(eyePosition, eyeOrientation).getMatrix();
}
_lastViewState = { .type = XR_TYPE_VIEW_STATE };
XrViewLocateInfo viewLocateInfo = {
.type = XR_TYPE_VIEW_LOCATE_INFO,
.viewConfigurationType = XR_VIEW_CONFIGURATION_TYPE_PRIMARY_STEREO,
.displayTime = _lastFrameState.predictedDisplayTime,
.space = _context->_stageSpace,
};
result = xrLocateViews(_context->_session, &viewLocateInfo, &_lastViewState, _viewCount, &_viewCount, _views.value().data());
if (!xrCheck(_context->_instance, result, "Could not locate views"))
return false;
for (uint32_t i = 0; i < _viewCount; i++) {
_projectionLayerViews[i].pose = _views.value()[i].pose;
_projectionLayerViews[i].fov = _views.value()[i].fov;
}
XrSpaceLocation headLocation = {
.type = XR_TYPE_SPACE_LOCATION,
.pose = XR_INDENTITY_POSE,
};
xrLocateSpace(_context->_viewSpace, _context->_stageSpace, _lastFrameState.predictedDisplayTime, &headLocation);
glm::vec3 headPosition = xrVecToGlm(headLocation.pose.position);
glm::quat headOrientation = xrQuatToGlm(headLocation.pose.orientation);
_context->_lastHeadPose = controller::Pose(headPosition, headOrientation);
_currentRenderFrameInfo = FrameInfo();
_currentRenderFrameInfo.renderPose = _context->_lastHeadPose.getMatrix();
_currentRenderFrameInfo.presentPose = _currentRenderFrameInfo.renderPose;
_frameInfos[frameIndex] = _currentRenderFrameInfo;
std::chrono::time_point measureEnd = std::chrono::high_resolution_clock::now();
std::chrono::duration<double, std::ratio<1>> delta = measureEnd - measureStart;
_lastFrameTime = delta.count();
auto newEstimatedFramerate =(1.0f / _lastFrameTime);
if (_estimatedTargetFramerate < newEstimatedFramerate) {
_estimatedTargetFramerate = newEstimatedFramerate;
}
return HmdDisplayPlugin::beginFrameRender(frameIndex);
}
void OpenXrDisplayPlugin::submitFrame(const gpu::FramePointer& newFrame) {
OpenGLDisplayPlugin::submitFrame(newFrame);
{
std::unique_lock<std::mutex> lock(_haveFrameMutex);
_haveFrameToSubmit = true;
}
}
void OpenXrDisplayPlugin::compositeLayers() {
if (!_context->_shouldRunFrameCycle) {
return;
}
if (_lastFrameState.shouldRender) {
_compositeFramebuffer->setRenderBuffer(0, _compositeSwapChain[_swapChainIndices[0]]);
HmdDisplayPlugin::compositeLayers();
}
}
void OpenXrDisplayPlugin::hmdPresent() {
if (!_context->_shouldRunFrameCycle) {
qCWarning(xr_display_cat, "hmdPresent: Shoudln't run frame cycle. Skipping renderin frame %d",
_currentFrame->frameIndex);
return;
}
if (_lastFrameState.shouldRender) {
// TODO: Use multiview swapchain
for (uint32_t i = 0; i < 2; i++) {
XrSwapchainImageAcquireInfo acquireInfo = { .type = XR_TYPE_SWAPCHAIN_IMAGE_ACQUIRE_INFO };
XrResult result = xrAcquireSwapchainImage(_swapChains[i], &acquireInfo, &_swapChainIndices[i]);
if (!xrCheck(_context->_instance, result, "failed to acquire swapchain image!"))
return;
XrSwapchainImageWaitInfo waitInfo = { .type = XR_TYPE_SWAPCHAIN_IMAGE_WAIT_INFO, .timeout = 1000 };
result = xrWaitSwapchainImage(_swapChains[i], &waitInfo);
if (!xrCheck(_context->_instance, result, "failed to wait for swapchain image!"))
return;
}
GLuint glTexId = getGLBackend()->getTextureID(_compositeFramebuffer->getRenderBuffer(0));
glCopyImageSubData(glTexId, GL_TEXTURE_2D, 0, 0, 0, 0, _images[0][_swapChainIndices[0]].image, GL_TEXTURE_2D, 0, 0, 0,
0, _renderTargetSize.x / 2, _renderTargetSize.y, 1);
glCopyImageSubData(glTexId, GL_TEXTURE_2D, 0, _renderTargetSize.x / 2, 0, 0, _images[1][_swapChainIndices[1]].image,
GL_TEXTURE_2D, 0, 0, 0, 0, _renderTargetSize.x / 2, _renderTargetSize.y, 1);
for (uint32_t i = 0; i < 2; i++) {
XrSwapchainImageReleaseInfo releaseInfo = { .type = XR_TYPE_SWAPCHAIN_IMAGE_RELEASE_INFO };
XrResult result = xrReleaseSwapchainImage(_swapChains[i], &releaseInfo);
if (!xrCheck(_context->_instance, result, "failed to release swapchain image!")) {
assert(false);
return;
}
}
}
endFrame();
_presentRate.increment();
{
std::unique_lock<std::mutex> lock(_haveFrameMutex);
_haveFrameToSubmit = false;
}
}
bool OpenXrDisplayPlugin::endFrame() {
XrCompositionLayerProjection projectionLayer = {
.type = XR_TYPE_COMPOSITION_LAYER_PROJECTION,
.layerFlags = 0,
.space = _context->_stageSpace,
.viewCount = _viewCount,
.views = _projectionLayerViews.data(),
};
std::vector<const XrCompositionLayerBaseHeader*> layers = {
(const XrCompositionLayerBaseHeader*)&projectionLayer,
};
XrFrameEndInfo info = {
.type = XR_TYPE_FRAME_END_INFO,
.displayTime = _lastFrameState.predictedDisplayTime,
.environmentBlendMode = XR_ENVIRONMENT_BLEND_MODE_OPAQUE,
.layerCount = (uint32_t)layers.size(),
.layers = layers.data(),
};
if ((_lastViewState.viewStateFlags & XR_VIEW_STATE_ORIENTATION_VALID_BIT) == 0) {
info.layerCount = 0;
}
if (!_lastFrameState.shouldRender) {
info.layerCount = 0;
}
XrResult result = xrEndFrame(_context->_session, &info);
if (!xrCheck(_context->_instance, result, "failed to end frame!")) {
return false;
}
return true;
}
void OpenXrDisplayPlugin::postPreview() {
}
bool OpenXrDisplayPlugin::isHmdMounted() const {
return _context->_hmdMounted;
}
void OpenXrDisplayPlugin::updatePresentPose() {
}
int OpenXrDisplayPlugin::getRequiredThreadCount() const {
return HmdDisplayPlugin::getRequiredThreadCount();
}
QRectF OpenXrDisplayPlugin::getPlayAreaRect() {
return QRectF(0, 0, 10, 10);
}
DisplayPlugin::StencilMaskMeshOperator OpenXrDisplayPlugin::getStencilMaskMeshOperator() {
return nullptr;
}

View file

@ -0,0 +1,92 @@
//
// Overte OpenXR Plugin
//
// Copyright 2024 Lubosz Sarnecki
// Copyright 2024 Overte e.V.
//
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include <graphics/Geometry.h>
#include <display-plugins/hmd/HmdDisplayPlugin.h>
#include "OpenXrContext.h"
class OpenXrDisplayPlugin : public HmdDisplayPlugin {
public:
OpenXrDisplayPlugin(std::shared_ptr<OpenXrContext> c);
bool isSupported() const override;
const QString getName() const override;
bool getSupportsAutoSwitch() override final { return true; }
glm::mat4 getEyeProjection(Eye eye, const glm::mat4& baseProjection) const override;
glm::mat4 getCullingProjection(const glm::mat4& baseProjection) const override;
void init() override;
void idle() override;
float getTargetFrameRate() const override;
bool hasAsyncReprojection() const override { return true; }
void customizeContext() override;
void uncustomizeContext() override;
void resetSensors() override;
bool beginFrameRender(uint32_t frameIndex) override;
void submitFrame(const gpu::FramePointer& newFrame) override;
void cycleDebugOutput() override { _lockCurrentTexture = !_lockCurrentTexture; }
int getRequiredThreadCount() const override;
QRectF getPlayAreaRect() override;
virtual StencilMaskMode getStencilMaskMode() const override { return StencilMaskMode::MESH; }
virtual StencilMaskMeshOperator getStencilMaskMeshOperator() override;
glm::mat4 getSensorResetMatrix() const { return glm::mat4(1.0f); }
protected:
bool internalActivate() override;
void internalDeactivate() override;
void updatePresentPose() override;
void compositeLayers() override;
void hmdPresent() override;
bool isHmdMounted() const override;
void postPreview() override;
private:
std::vector<gpu::TexturePointer> _compositeSwapChain;
XrViewState _lastViewState;
std::shared_ptr<OpenXrContext> _context;
uint32_t _viewCount = 0;
std::vector<XrCompositionLayerProjectionView> _projectionLayerViews;
std::optional<std::vector<XrView>> _views;
std::vector<XrViewConfigurationView> _viewConfigs;
std::vector<XrSwapchain> _swapChains;
std::vector<uint32_t> _swapChainLengths;
std::vector<uint32_t> _swapChainIndices;
std::vector<std::vector<XrSwapchainImageOpenGLKHR>> _images;
XrFrameState _lastFrameState;
bool initViews();
bool initSwapChains();
bool initLayers();
bool endFrame();
bool _haveFrameToSubmit = false;
std::mutex _haveFrameMutex;
float _lastFrameTime;
float _estimatedTargetFramerate;
};

View file

@ -0,0 +1,728 @@
//
// Overte OpenXR Plugin
//
// Copyright 2024 Lubosz Sarnecki
// Copyright 2024 Overte e.V.
//
// SPDX-License-Identifier: Apache-2.0
//
#include <glm/ext.hpp>
#include "OpenXrInputPlugin.h"
#include "AvatarConstants.h"
#include "PathUtils.h"
#include "controllers/UserInputMapper.h"
Q_DECLARE_LOGGING_CATEGORY(xr_input_cat)
Q_LOGGING_CATEGORY(xr_input_cat, "openxr.input")
OpenXrInputPlugin::OpenXrInputPlugin(std::shared_ptr<OpenXrContext> c) {
_context = c;
_inputDevice = std::make_shared<InputDevice>(_context);
}
// TODO: Config options
static const QString XR_CONFIGURATION_LAYOUT = QString("");
// TODO: full-body-tracking
void OpenXrInputPlugin::calibrate() {
}
// TODO: full-body-tracking
bool OpenXrInputPlugin::uncalibrate() {
return true;
}
bool OpenXrInputPlugin::isSupported() const {
return _context->_isSupported;
}
// TODO: Config options
void OpenXrInputPlugin::setConfigurationSettings(const QJsonObject configurationSettings) {
}
// TODO: Config options
QJsonObject OpenXrInputPlugin::configurationSettings() {
return QJsonObject();
}
QString OpenXrInputPlugin::configurationLayout() {
return XR_CONFIGURATION_LAYOUT;
}
bool OpenXrInputPlugin::activate() {
InputPlugin::activate();
loadSettings();
// register with UserInputMapper
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
userInputMapper->registerDevice(_inputDevice);
_registeredWithInputMapper = true;
return true;
}
void OpenXrInputPlugin::deactivate() {
InputPlugin::deactivate();
_inputDevice->_poseStateMap.clear();
// unregister with UserInputMapper
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
userInputMapper->removeDevice(_inputDevice->_deviceID);
_registeredWithInputMapper = false;
saveSettings();
}
void OpenXrInputPlugin::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
if (_context->_shouldQuit) {
deactivate();
return;
}
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
userInputMapper->withLock([&, this]() { _inputDevice->update(deltaTime, inputCalibrationData); });
if (_inputDevice->_trackedControllers == 0 && _registeredWithInputMapper) {
userInputMapper->removeDevice(_inputDevice->_deviceID);
_registeredWithInputMapper = false;
_inputDevice->_poseStateMap.clear();
}
if (!_registeredWithInputMapper && _inputDevice->_trackedControllers > 0) {
userInputMapper->registerDevice(_inputDevice);
_registeredWithInputMapper = true;
}
}
// TODO: Config options
void OpenXrInputPlugin::loadSettings() {
}
// TODO: Config options
void OpenXrInputPlugin::saveSettings() const {
}
OpenXrInputPlugin::InputDevice::InputDevice(std::shared_ptr<OpenXrContext> c) : controller::InputDevice("OpenXR") {
_context = c;
}
void OpenXrInputPlugin::InputDevice::focusOutEvent() {
_axisStateMap.clear();
_buttonPressedMap.clear();
};
bool OpenXrInputPlugin::InputDevice::triggerHapticPulse(float strength, float duration, uint16_t index) {
if (index > 2) {
return false;
}
std::unique_lock<std::recursive_mutex> locker(_lock);
// TODO: Haptic values in overte are always strengh 1.0 and duration only 13.0 or 16.0. So it's not really used.
// The duration does not seem to map to a time unit. 16ms seems quite short for a haptic vibration.
// Let's assume the duration is in 10 milliseconds.
// Let's also assume strength 1.0 is the middle value, which is 0.5 in OpenXR.
using namespace std::chrono;
nanoseconds durationNs = duration_cast<nanoseconds>(milliseconds(static_cast<int>(duration * 10.0f)));
XrDuration xrDuration = durationNs.count();
auto path = (index == 0) ? "left_haptic" : "right_haptic";
if (!_actions.at(path)->applyHaptic(xrDuration, XR_FREQUENCY_UNSPECIFIED, 0.5f * strength)) {
qCCritical(xr_input_cat) << "Failed to apply haptic feedback!";
}
return true;
}
bool OpenXrInputPlugin::Action::init(XrActionSet actionSet) {
XrInstance instance = _context->_instance;
XrActionCreateInfo info = {
.type = XR_TYPE_ACTION_CREATE_INFO,
.actionType = _type,
.countSubactionPaths = HAND_COUNT,
.subactionPaths = _context->_handPaths,
};
strncpy(info.actionName, _id.c_str(), XR_MAX_ACTION_NAME_SIZE - 1);
strncpy(info.localizedActionName, _friendlyName.c_str(), XR_MAX_LOCALIZED_ACTION_NAME_SIZE - 1);
XrResult result = xrCreateAction(actionSet, &info, &_action);
if (!xrCheck(instance, result, "Failed to create action"))
return false;
// Pose actions need spaces
if (_type == XR_ACTION_TYPE_POSE_INPUT) {
if (!createPoseSpaces()) {
return false;
}
}
return true;
}
std::vector<XrActionSuggestedBinding> OpenXrInputPlugin::Action::getBindings() {
assert(_action != XR_NULL_HANDLE);
std::vector<XrActionSuggestedBinding> bindings;
for (uint32_t i = 0; i < HAND_COUNT; i++) {
XrPath path;
xrStringToPath(_context->_instance, _id.c_str(), &path);
XrActionSuggestedBinding binding = { .action = _action, .binding = path };
bindings.push_back(binding);
}
return bindings;
}
XrActionStateFloat OpenXrInputPlugin::Action::getFloat() {
XrActionStateFloat state = {
.type = XR_TYPE_ACTION_STATE_FLOAT,
};
XrActionStateGetInfo info = {
.type = XR_TYPE_ACTION_STATE_GET_INFO,
.action = _action,
};
XrResult result = xrGetActionStateFloat(_context->_session, &info, &state);
xrCheck(_context->_instance, result, "Failed to get float state!");
return state;
}
XrActionStateVector2f OpenXrInputPlugin::Action::getVector2f() {
XrActionStateVector2f state = {
.type = XR_TYPE_ACTION_STATE_VECTOR2F,
};
XrActionStateGetInfo info = {
.type = XR_TYPE_ACTION_STATE_GET_INFO,
.action = _action,
};
XrResult result = xrGetActionStateVector2f(_context->_session, &info, &state);
xrCheck(_context->_instance, result, "Failed to get vector2 state!");
return state;
}
XrActionStateBoolean OpenXrInputPlugin::Action::getBool() {
XrActionStateBoolean state = {
.type = XR_TYPE_ACTION_STATE_BOOLEAN,
};
XrActionStateGetInfo info = {
.type = XR_TYPE_ACTION_STATE_GET_INFO,
.action = _action,
};
XrResult result = xrGetActionStateBoolean(_context->_session, &info, &state);
xrCheck(_context->_instance, result, "Failed to get float state!");
return state;
}
XrSpaceLocation OpenXrInputPlugin::Action::getPose() {
XrActionStatePose state = {
.type = XR_TYPE_ACTION_STATE_POSE,
};
XrActionStateGetInfo info = {
.type = XR_TYPE_ACTION_STATE_GET_INFO,
.action = _action,
};
XrResult result = xrGetActionStatePose(_context->_session, &info, &state);
xrCheck(_context->_instance, result, "failed to get pose value!");
XrSpaceLocation location = {
.type = XR_TYPE_SPACE_LOCATION,
};
if (_context->_lastPredictedDisplayTime.has_value()) {
result = xrLocateSpace(_poseSpace, _context->_stageSpace, _context->_lastPredictedDisplayTime.value(), &location);
xrCheck(_context->_instance, result, "Failed to locate hand space!");
}
return location;
}
bool OpenXrInputPlugin::Action::applyHaptic(XrDuration duration, float frequency, float amplitude) {
XrHapticVibration vibration = {
.type = XR_TYPE_HAPTIC_VIBRATION,
.duration = duration,
.frequency = frequency,
.amplitude = amplitude,
};
XrHapticActionInfo haptic_action_info = {
.type = XR_TYPE_HAPTIC_ACTION_INFO,
.action = _action,
};
XrResult result = xrApplyHapticFeedback(_context->_session, &haptic_action_info, (const XrHapticBaseHeader*)&vibration);
return xrCheck(_context->_instance, result, "Failed to apply haptic feedback!");
}
bool OpenXrInputPlugin::Action::createPoseSpaces() {
assert(_action != XR_NULL_HANDLE);
XrActionSpaceCreateInfo info = {
.type = XR_TYPE_ACTION_SPACE_CREATE_INFO,
.action = _action,
.poseInActionSpace = XR_INDENTITY_POSE,
};
XrResult result = xrCreateActionSpace(_context->_session, &info, &_poseSpace);
if (!xrCheck(_context->_instance, result, "Failed to create hand pose space"))
return false;
return true;
}
bool OpenXrInputPlugin::InputDevice::initBindings(const std::string& profileName,
const std::map<std::string, std::string>& actionsToBind) {
XrPath profilePath;
XrResult result = xrStringToPath(_context->_instance, profileName.c_str(), &profilePath);
if (!xrCheck(_context->_instance, result, "Failed to get interaction profile"))
return false;
std::vector<XrActionSuggestedBinding> suggestions;
for (const auto& [actionName, inputPathRaw] : actionsToBind) {
XrActionSuggestedBinding bind = {
.action = _actions[actionName]->_action,
};
xrStringToPath(_context->_instance, inputPathRaw.c_str(), &bind.binding);
suggestions.emplace(suggestions.end(), bind);
}
const XrInteractionProfileSuggestedBinding suggestedBinding = {
.type = XR_TYPE_INTERACTION_PROFILE_SUGGESTED_BINDING,
.interactionProfile = profilePath,
.countSuggestedBindings = (uint32_t)suggestions.size(),
.suggestedBindings = suggestions.data(),
};
result = xrSuggestInteractionProfileBindings(_context->_instance, &suggestedBinding);
return xrCheck(_context->_instance, result, "Failed to suggest bindings");
}
controller::Input::NamedVector OpenXrInputPlugin::InputDevice::getAvailableInputs() const {
using namespace controller;
QVector<Input::NamedPair> availableInputs{
makePair(HEAD, "Head"),
makePair(LEFT_HAND, "LeftHand"),
makePair(LS, "LS"),
makePair(LS_TOUCH, "LSTouch"),
makePair(LX, "LX"),
makePair(LY, "LY"),
makePair(LT, "LT"),
makePair(LT_CLICK, "LTClick"),
makePair(LEFT_GRIP, "LeftGrip"),
makePair(LEFT_PRIMARY_THUMB, "LeftPrimaryThumb"),
makePair(LEFT_SECONDARY_THUMB, "LeftSecondaryThumb"),
makePair(RIGHT_HAND, "RightHand"),
makePair(RS, "RS"),
makePair(RS_TOUCH, "RSTouch"),
makePair(RX, "RX"),
makePair(RY, "RY"),
makePair(RT, "RT"),
makePair(RT_CLICK, "RTClick"),
makePair(RIGHT_GRIP, "RightGrip"),
makePair(RIGHT_PRIMARY_THUMB, "RightPrimaryThumb"),
makePair(RIGHT_SECONDARY_THUMB, "RightSecondaryThumb"),
};
return availableInputs;
}
QString OpenXrInputPlugin::InputDevice::getDefaultMappingConfig() const {
return PathUtils::resourcesPath() + "/controllers/openxr.json";
}
bool OpenXrInputPlugin::InputDevice::initActions() {
if (_actionsInitialized)
return true;
assert(_context->_session != XR_NULL_HANDLE);
XrInstance instance = _context->_instance;
XrActionSetCreateInfo actionSetInfo = {
.type = XR_TYPE_ACTION_SET_CREATE_INFO,
.actionSetName = "overte",
.localizedActionSetName = "Overte",
.priority = 0,
};
XrResult result = xrCreateActionSet(instance, &actionSetInfo, &_actionSet);
if (!xrCheck(instance, result, "Failed to create action set."))
return false;
std::map<std::string, std::pair<std::string, XrActionType>> actionTypes = {
{"left_primary_click", {"Left Primary", XR_ACTION_TYPE_BOOLEAN_INPUT}},
{"left_secondary_click", {"Left Secondary (Tablet)", XR_ACTION_TYPE_BOOLEAN_INPUT}},
{"left_squeeze_value", {"Left Squeeze", XR_ACTION_TYPE_FLOAT_INPUT}},
{"left_trigger_value", {"Left Trigger", XR_ACTION_TYPE_FLOAT_INPUT}},
{"left_trigger_click", {"Left Trigger Click", XR_ACTION_TYPE_BOOLEAN_INPUT}},
{"left_thumbstick", {"Left Thumbstick", XR_ACTION_TYPE_VECTOR2F_INPUT}},
{"left_thumbstick_click", {"Left Thumbstick Click", XR_ACTION_TYPE_BOOLEAN_INPUT}},
{"left_thumbstick_touch", {"Left Thumbstick Touch", XR_ACTION_TYPE_BOOLEAN_INPUT}},
{"left_pose", {"Left Hand Pose", XR_ACTION_TYPE_POSE_INPUT}},
{"left_haptic", {"Left Hand Haptic", XR_ACTION_TYPE_VIBRATION_OUTPUT}},
{"right_primary_click", {"Right Primary", XR_ACTION_TYPE_BOOLEAN_INPUT}},
{"right_secondary_click", {"Right Secondary (Jump)", XR_ACTION_TYPE_BOOLEAN_INPUT}},
{"right_squeeze_value", {"Right Squeeze", XR_ACTION_TYPE_FLOAT_INPUT}},
{"right_trigger_value", {"Right Trigger", XR_ACTION_TYPE_FLOAT_INPUT}},
{"right_trigger_click", {"Right Trigger Click", XR_ACTION_TYPE_BOOLEAN_INPUT}},
{"right_thumbstick", {"Right Thumbstick", XR_ACTION_TYPE_VECTOR2F_INPUT}},
{"right_thumbstick_click", {"Right Thumbstick Click", XR_ACTION_TYPE_BOOLEAN_INPUT}},
{"right_thumbstick_touch", {"Right Thumbstick Touch", XR_ACTION_TYPE_BOOLEAN_INPUT}},
{"right_pose", {"Right Hand Pose", XR_ACTION_TYPE_POSE_INPUT}},
{"right_haptic", {"Right Hand Haptic", XR_ACTION_TYPE_VIBRATION_OUTPUT}},
};
std::string hand_left = "/user/hand/left/input";
std::string hand_right = "/user/hand/right/input";
std::map<std::string, std::map<std::string, std::string>> actionSuggestions = {
// not really usable, bare minimum
{"/interaction_profiles/khr/simple_controller", {
{"left_secondary_click", hand_left + "/menu/click"},
{"left_trigger_click", hand_left + "/select/click"},
{"left_pose", hand_left + "/grip/pose"},
{"left_haptic", "/user/hand/left/output/haptic"},
{"right_secondary_click", hand_right + "/menu/click"},
{"right_trigger_click", hand_right + "/select/click"},
{"right_pose", hand_right + "/grip/pose"},
{"right_haptic", "/user/hand/right/output/haptic"},
}},
{"/interaction_profiles/htc/vive_controller", {
{"left_secondary_click", hand_left + "/menu/click"},
{"left_squeeze_value", hand_left + "/squeeze/click"},
{"left_trigger_value", hand_left + "/trigger/value"},
{"left_trigger_click", hand_left + "/trigger/click"},
{"left_thumbstick", hand_left + "/trackpad"},
{"left_thumbstick_click", hand_left + "/trackpad/click"},
{"left_thumbstick_touch", hand_left + "/trackpad/touch"},
{"left_pose", hand_left + "/grip/pose"},
{"left_haptic", "/user/hand/left/output/haptic"},
{"right_secondary_click", hand_right + "/menu/click"},
{"right_squeeze_value", hand_right + "/squeeze/click"},
{"right_trigger_value", hand_right + "/trigger/value"},
{"right_trigger_click", hand_right + "/trigger/click"},
{"right_thumbstick", hand_right + "/trackpad"},
{"right_thumbstick_click", hand_right + "/trackpad/click"},
{"right_thumbstick_touch", hand_right + "/trackpad/touch"},
{"right_pose", hand_right + "/grip/pose"},
{"right_haptic", "/user/hand/right/output/haptic"},
}},
{"/interaction_profiles/oculus/touch_controller", {
{"left_primary_click", hand_left + "/x/click"},
{"left_secondary_click", hand_left + "/y/click"},
{"left_squeeze_value", hand_left + "/squeeze/value"},
{"left_trigger_value", hand_left + "/trigger/value"},
{"left_trigger_click", hand_left + "/trigger/click"},
{"left_thumbstick", hand_left + "/thumbstick"},
{"left_thumbstick_click", hand_left + "/thumbstick/click"},
{"left_thumbstick_touch", hand_left + "/thumbstick/touch"},
{"left_pose", hand_left + "/grip/pose"},
{"left_haptic", "/user/hand/left/output/haptic"},
{"right_primary_click", hand_right + "/a/click"},
{"right_secondary_click", hand_right + "/b/click"},
{"right_squeeze_value", hand_right + "/squeeze/value"},
{"right_trigger_value", hand_right + "/trigger/value"},
{"right_trigger_click", hand_right + "/trigger/click"},
{"right_thumbstick", hand_right + "/thumbstick"},
{"right_thumbstick_click", hand_right + "/thumbstick/click"},
{"right_thumbstick_touch", hand_right + "/thumbstick/touch"},
{"right_pose", hand_right + "/grip/pose"},
{"right_haptic", "/user/hand/right/output/haptic"},
}},
{"/interaction_profiles/microsoft/motion_controller", {
{"left_secondary_click", hand_left + "/menu/click"},
{"left_squeeze_value", hand_left + "/squeeze/click"},
{"left_trigger_value", hand_left + "/trigger/value"},
{"left_trigger_click", hand_left + "/trigger/click"},
{"left_thumbstick", hand_left + "/thumbstick"},
{"left_thumbstick_click", hand_left + "/trackpad/click"},
{"left_thumbstick_touch", hand_left + "/trackpad/touch"},
{"left_pose", hand_left + "/grip/pose"},
{"left_haptic", "/user/hand/left/output/haptic"},
{"right_secondary_click", hand_right + "/menu/click"},
{"right_squeeze_value", hand_right + "/squeeze/click"},
{"right_trigger_value", hand_right + "/trigger/value"},
{"right_trigger_click", hand_right + "/trigger/click"},
{"right_thumbstick", hand_right + "/thumbstick"},
{"right_thumbstick_click", hand_right + "/trackpad/click"},
{"right_thumbstick_touch", hand_right + "/trackpad/touch"},
{"right_pose", hand_right + "/grip/pose"},
{"right_haptic", "/user/hand/right/output/haptic"},
}},
{"/interaction_profiles/valve/index_controller", {
{"left_primary_click", hand_left + "/a/click"},
{"left_secondary_click", hand_left + "/b/click"},
{"left_squeeze_value", hand_left + "/squeeze/value"},
{"left_trigger_value", hand_left + "/trigger/value"},
{"left_trigger_click", hand_left + "/trigger/click"},
{"left_thumbstick", hand_left + "/thumbstick"},
{"left_thumbstick_click", hand_left + "/thumbstick/click"},
{"left_thumbstick_touch", hand_left + "/thumbstick/touch"},
{"left_pose", hand_left + "/grip/pose"},
{"left_haptic", "/user/hand/left/output/haptic"},
{"right_primary_click", hand_right + "/a/click"},
{"right_secondary_click", hand_right + "/b/click"},
{"right_squeeze_value", hand_right + "/squeeze/value"},
{"right_trigger_value", hand_right + "/trigger/value"},
{"right_trigger_click", hand_right + "/trigger/click"},
{"right_thumbstick", hand_right + "/thumbstick"},
{"right_thumbstick_click", hand_right + "/thumbstick/click"},
{"right_thumbstick_touch", hand_right + "/thumbstick/touch"},
{"right_pose", hand_right + "/grip/pose"},
{"right_haptic", "/user/hand/right/output/haptic"},
}},
};
for (const auto& [id, args] : actionTypes) {
auto friendlyName = args.first;
auto xr_type = args.second;
std::shared_ptr<Action> action = std::make_shared<Action>(_context, id, friendlyName, xr_type);
if (!action->init(_actionSet)) {
qCCritical(xr_input_cat) << "Creating action " << id.c_str() << " failed!";
} else {
_actions.emplace(id, action);
}
}
for (const auto& [profile, input] : actionSuggestions) {
if (!initBindings(profile, input)) {
qCWarning(xr_input_cat) << "Failed to suggest actions for " << profile.c_str();
}
}
XrSessionActionSetsAttachInfo attachInfo = {
.type = XR_TYPE_SESSION_ACTION_SETS_ATTACH_INFO,
.countActionSets = 1,
.actionSets = &_actionSet,
};
result = xrAttachSessionActionSets(_context->_session, &attachInfo);
if (!xrCheck(_context->_instance, result, "Failed to attach action set"))
return false;
_actionsInitialized = true;
return true;
}
void OpenXrInputPlugin::InputDevice::update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
_poseStateMap.clear();
_buttonPressedMap.clear();
_trackedControllers = 2;
if (_context->_session == XR_NULL_HANDLE) {
return;
}
if (!_actionsInitialized && !initActions()) {
qCCritical(xr_input_cat) << "Could not initialize actions!";
return;
}
const XrActiveActionSet active_actionset = {
.actionSet = _actionSet,
};
XrActionsSyncInfo syncInfo = {
.type = XR_TYPE_ACTIONS_SYNC_INFO,
.countActiveActionSets = 1,
.activeActionSets = &active_actionset,
};
XrInstance instance = _context->_instance;
XrSession session = _context->_session;
XrResult result = xrSyncActions(session, &syncInfo);
xrCheck(instance, result, "failed to sync actions!");
glm::mat4 sensorToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
static const glm::quat quarterX = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_X);
static const glm::quat touchToHand = yFlip * quarterX;
static const glm::quat leftQuarterZ = glm::angleAxis(-PI_OVER_TWO, Vectors::UNIT_Z);
static const glm::quat rightQuarterZ = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_Z);
static const glm::quat eighthX = glm::angleAxis(PI / 4.0f, Vectors::UNIT_X);
static const glm::quat leftRotationOffset = glm::inverse(leftQuarterZ * eighthX) * touchToHand;
static const glm::quat rightRotationOffset = glm::inverse(rightQuarterZ * eighthX) * touchToHand;
for (int i = 0; i < HAND_COUNT; i++) {
auto hand_path = (i == 0) ? "left_pose" : "right_pose";
XrSpaceLocation handLocation = _actions.at(hand_path)->getPose();
bool locationValid = (handLocation.locationFlags & XR_SPACE_LOCATION_ORIENTATION_VALID_BIT) != 0;
if (locationValid) {
vec3 translation = xrVecToGlm(handLocation.pose.position);
quat rotation = xrQuatToGlm(handLocation.pose.orientation);
auto pose = controller::Pose(translation, rotation);
glm::mat4 handOffset = i == 0 ? glm::toMat4(leftRotationOffset) : glm::toMat4(rightRotationOffset);
glm::mat4 posOffset(1.0f);
posOffset *= glm::translate(glm::vec3(handOffset[0]) * (i == 0 ? 0.1f : -0.1f));
posOffset *= glm::translate(glm::vec3(handOffset[1]) * -0.16f);
posOffset *= glm::translate(glm::vec3(handOffset[2]) * -0.02f);
_poseStateMap[i == 0 ? controller::LEFT_HAND : controller::RIGHT_HAND] =
pose.postTransform(posOffset).postTransform(handOffset).transform(sensorToAvatar);
}
}
glm::mat4 defaultHeadOffset;
float eyeZOffset = 0.16f;
if (inputCalibrationData.hmdAvatarAlignmentType == controller::HmdAvatarAlignmentType::Eyes) {
// align the eyes of the user with the eyes of the avatar
defaultHeadOffset = Matrices::Y_180 * (glm::inverse(inputCalibrationData.defaultCenterEyeMat) * inputCalibrationData.defaultHeadMat);
// don't double up on eye offset
eyeZOffset = 0.0f;
} else {
defaultHeadOffset = createMatFromQuatAndPos(-DEFAULT_AVATAR_HEAD_ROT, -DEFAULT_AVATAR_HEAD_TO_MIDDLE_EYE_OFFSET);
}
// try to account for weirdness with HMD view being inside the root of the head bone
auto headCorrectionA = glm::translate(glm::vec3(0.0f, 0.16f, eyeZOffset));
auto headCorrectionB = glm::translate(glm::vec3(0.0f, -0.2f, 0.0f));
_poseStateMap[controller::HEAD] = _context->_lastHeadPose.postTransform(headCorrectionA).postTransform(defaultHeadOffset).postTransform(headCorrectionB).transform(sensorToAvatar);
std::vector<std::pair<std::string, controller::StandardAxisChannel>> floatsToUpdate = {
{"left_trigger_value", controller::LT},
{"left_squeeze_value", controller::LEFT_GRIP},
{"right_trigger_value", controller::RT},
{"right_squeeze_value", controller::RIGHT_GRIP},
};
for (const auto& [name, channel] : floatsToUpdate) {
auto action = _actions.at(name)->getFloat();
if (action.isActive) {
_axisStateMap[channel].value = action.currentState;
}
}
std::vector<std::tuple<std::string, controller::StandardAxisChannel, controller::StandardAxisChannel>> axesToUpdate = {
{"left_thumbstick", controller::LX, controller::LY},
{"right_thumbstick", controller::RX, controller::RY},
};
for (const auto& [name, x_channel, y_channel] : axesToUpdate) {
auto action = _actions.at(name)->getVector2f();
if (action.isActive) {
_axisStateMap[x_channel].value = action.currentState.x;
_axisStateMap[y_channel].value = -action.currentState.y;
}
}
std::vector<std::pair<std::string, controller::StandardButtonChannel>> buttonsToUpdate = {
{"left_primary_click", controller::LEFT_PRIMARY_THUMB},
{"left_secondary_click", controller::LEFT_SECONDARY_THUMB},
{"left_trigger_click", controller::LT_CLICK},
{"left_thumbstick_click", controller::LS},
{"left_thumbstick_touch", controller::LS_TOUCH},
{"right_primary_click", controller::RIGHT_PRIMARY_THUMB},
{"right_secondary_click", controller::RIGHT_SECONDARY_THUMB},
{"right_trigger_click", controller::RT_CLICK},
{"right_thumbstick_click", controller::RS},
{"right_thumbstick_touch", controller::RS_TOUCH},
};
for (const auto& [name, channel] : buttonsToUpdate) {
auto action = _actions.at(name)->getBool();
if (action.isActive && action.currentState) {
_buttonPressedMap.insert(channel);
}
}
awfulRightStickHackForBrokenScripts();
if (_context->_stickEmulation) {
emulateStickFromTrackpad();
}
}
void OpenXrInputPlugin::InputDevice::emulateStickFromTrackpad() {
auto left_stick = _actions.at("left_thumbstick")->getVector2f().currentState;
auto right_stick = _actions.at("right_thumbstick")->getVector2f().currentState;
auto left_click = _actions.at("left_thumbstick_click")->getBool().currentState;
auto right_click = _actions.at("right_thumbstick_click")->getBool().currentState;
// set the axes to zero if the trackpad isn't clicked in
if (!right_click) {
_axisStateMap[controller::RX].value = 0.0f;
_axisStateMap[controller::RY].value = 0.0f;
}
if (!left_click) {
_axisStateMap[controller::LX].value = 0.0f;
_axisStateMap[controller::LY].value = 0.0f;
}
// "primary" button on trackpad center
if (
left_click &&
left_stick.x > -0.3f &&
left_stick.x < 0.3f &&
left_stick.y > -0.3f &&
left_stick.y < 0.3f
) {
_buttonPressedMap.insert(controller::LEFT_PRIMARY_THUMB);
}
if (
right_click &&
right_stick.x > -0.3f &&
right_stick.x < 0.3f &&
right_stick.y > -0.3f &&
right_stick.y < 0.3f
) {
_buttonPressedMap.insert(controller::RIGHT_PRIMARY_THUMB);
}
}
// FIXME: the vr controller scripts are horribly broken and don't work properly,
// this emulates a segmented vive trackpad to get teleport and snap turning behaving
void OpenXrInputPlugin::InputDevice::awfulRightStickHackForBrokenScripts() {
auto stick = _actions.at("right_thumbstick")->getVector2f().currentState;
_axisStateMap[controller::RX].value = 0.0f;
_axisStateMap[controller::RY].value = 0.0f;
if (stick.x < -0.6f && stick.y > -0.4f && stick.y < 0.4f) {
_axisStateMap[controller::RX].value = -1.0f;
}
if (stick.x > 0.6f && stick.y > -0.4f && stick.y < 0.4f) {
_axisStateMap[controller::RX].value = 1.0f;
}
if (stick.y > 0.6f && stick.x > -0.4f && stick.x < 0.4f) {
_axisStateMap[controller::RY].value = -1.0f;
}
if (stick.y < -0.6f && stick.x > -0.4f && stick.x < 0.4f) {
_axisStateMap[controller::RY].value = 1.0f;
}
}

View file

@ -0,0 +1,110 @@
//
// Overte OpenXR Plugin
//
// Copyright 2024 Lubosz Sarnecki
// Copyright 2024 Overte e.V.
//
// SPDX-License-Identifier: Apache-2.0
//
#pragma once
#include "plugins/InputPlugin.h"
#include "controllers/InputDevice.h"
#include "OpenXrContext.h"
#define HAND_COUNT 2
class OpenXrInputPlugin : public InputPlugin {
Q_OBJECT
public:
OpenXrInputPlugin(std::shared_ptr<OpenXrContext> c);
bool isSupported() const override;
const QString getName() const override { return "OpenXR"; }
bool isHandController() const override { return true; }
bool configurable() override { return true; }
QString configurationLayout() override;
void setConfigurationSettings(const QJsonObject configurationSettings) override;
QJsonObject configurationSettings() override;
void calibrate() override;
bool uncalibrate() override;
bool isHeadController() const override { return true; }
bool activate() override;
void deactivate() override;
QString getDeviceName() override { return _context.get()->_systemName; }
void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
void pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) override;
virtual void saveSettings() const override;
virtual void loadSettings() override;
private:
class Action {
public:
Action(std::shared_ptr<OpenXrContext> c, const std::string& id, const std::string &friendlyName, XrActionType type) {
_context = c;
_id = id;
_friendlyName = friendlyName;
_type = type;
}
bool init(XrActionSet actionSet);
std::vector<XrActionSuggestedBinding> getBindings();
XrActionStateFloat getFloat();
XrActionStateVector2f getVector2f();
XrActionStateBoolean getBool();
XrSpaceLocation getPose();
bool applyHaptic(XrDuration duration, float frequency, float amplitude);
XrAction _action = XR_NULL_HANDLE;
private:
bool createPoseSpaces();
std::shared_ptr<OpenXrContext> _context;
std::string _id, _friendlyName;
XrActionType _type;
XrSpace _poseSpace = XR_NULL_HANDLE;
};
class InputDevice : public controller::InputDevice {
public:
InputDevice(std::shared_ptr<OpenXrContext> c);
private:
controller::Input::NamedVector getAvailableInputs() const override;
QString getDefaultMappingConfig() const override;
void update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) override;
void focusOutEvent() override;
bool triggerHapticPulse(float strength, float duration, uint16_t index) override;
void emulateStickFromTrackpad();
void awfulRightStickHackForBrokenScripts();
mutable std::recursive_mutex _lock;
template <typename F>
void withLock(F&& f) {
std::unique_lock<std::recursive_mutex> locker(_lock);
f();
}
friend class OpenXrInputPlugin;
uint32_t _trackedControllers = 0;
XrActionSet _actionSet;
std::map<std::string, std::shared_ptr<Action>> _actions;
std::shared_ptr<OpenXrContext> _context;
bool _actionsInitialized = false;
bool initActions();
bool initBindings(const std::string& profileName, const std::map<std::string, std::string>& actionsToBind);
};
bool _registeredWithInputMapper = false;
std::shared_ptr<OpenXrContext> _context;
std::shared_ptr<InputDevice> _inputDevice;
};

View file

@ -0,0 +1,60 @@
//
// Overte OpenXR Plugin
//
// Copyright 2024 Lubosz Sarnecki
// Copyright 2024 Overte e.V.
//
// SPDX-License-Identifier: Apache-2.0
//
#include "plugins/RuntimePlugin.h"
#include "OpenXrDisplayPlugin.h"
#include "OpenXrInputPlugin.h"
class OpenXrProvider : public QObject, public DisplayProvider, InputProvider {
Q_OBJECT
Q_PLUGIN_METADATA(IID DisplayProvider_iid FILE "plugin.json")
Q_INTERFACES(DisplayProvider)
Q_PLUGIN_METADATA(IID InputProvider_iid FILE "plugin.json")
Q_INTERFACES(InputProvider)
public:
OpenXrProvider(QObject* parent = nullptr) : QObject(parent) {}
virtual ~OpenXrProvider() {}
std::shared_ptr<OpenXrContext> context = std::make_shared<OpenXrContext>();
virtual DisplayPluginList getDisplayPlugins() override {
static std::once_flag once;
std::call_once(once, [&] {
DisplayPluginPointer plugin(std::make_shared<OpenXrDisplayPlugin>(context));
if (plugin->isSupported()) {
_displayPlugins.push_back(plugin);
}
});
return _displayPlugins;
}
virtual InputPluginList getInputPlugins() override {
static std::once_flag once;
std::call_once(once, [&] {
InputPluginPointer plugin(std::make_shared<OpenXrInputPlugin>(context));
if (plugin->isSupported()) {
_inputPlugins.push_back(plugin);
}
});
return _inputPlugins;
}
virtual void destroyInputPlugins() override { _inputPlugins.clear(); }
virtual void destroyDisplayPlugins() override { _displayPlugins.clear(); }
private:
DisplayPluginList _displayPlugins;
InputPluginList _inputPlugins;
};
#include "OpenXrProvider.moc"

View file

@ -0,0 +1,4 @@
{
"name":"OpenXR",
"version":1
}