mirror of
https://thingvellir.net/git/overte
synced 2025-03-27 23:52:03 +01:00
Merge pull request #7491 from jherico/oculus_release
Update to latest Oculus SDK, add input device support
This commit is contained in:
commit
f0cc8c0dd3
22 changed files with 488 additions and 95 deletions
23
cmake/externals/LibOVR/CMakeLists.txt
vendored
23
cmake/externals/LibOVR/CMakeLists.txt
vendored
|
@ -12,19 +12,16 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
|||
# 0.5 public
|
||||
# URL http://static.oculus.com/sdk-downloads/ovr_sdk_win_0.5.0.1.zip
|
||||
# URL_MD5 d3fc4c02db9be5ff08af4ef4c97b32f9
|
||||
# 0.6 public
|
||||
# URL http://static.oculus.com/sdk-downloads/0.6.0.1/Public/1435190862/ovr_sdk_win_0.6.0.1.zip
|
||||
# URL_MD5 4b3ef825f9a1d6d3035c9f6820687da9
|
||||
# 0.8 public
|
||||
# URL http://static.oculus.com/sdk-downloads/0.8.0.0/Public/1445451746/ovr_sdk_win_0.8.0.0.zip
|
||||
# URL_MD5 54944b03b95149d6010f84eb701b9647
|
||||
# 1.3 public
|
||||
# URL http://hifi-public.s3.amazonaws.com/dependencies/ovr_sdk_win_1.3.0_public.zip
|
||||
# URL_MD5 4d26faba0c1f35ff80bf674c96ed9259
|
||||
|
||||
if (WIN32)
|
||||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL http://static.oculus.com/sdk-downloads/0.8.0.0/Public/1445451746/ovr_sdk_win_0.8.0.0.zip
|
||||
URL_MD5 54944b03b95149d6010f84eb701b9647
|
||||
URL http://hifi-public.s3.amazonaws.com/dependencies/ovr_sdk_win_1.3.0_public.zip
|
||||
URL_MD5 a2dcf695e0f03a70fdd1ed7480585e82
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
|
@ -33,14 +30,16 @@ if (WIN32)
|
|||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR)
|
||||
|
||||
# FIXME need to account for different architectures
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/LibOVR/Include CACHE TYPE INTERNAL)
|
||||
set(LIBOVR_DIR ${SOURCE_DIR}/OculusSDK/LibOVR)
|
||||
if ("${CMAKE_SIZEOF_VOID_P}" EQUAL "8")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/LibOVR/Lib/Windows/x64/Release/VS2013/LibOVR.lib CACHE TYPE INTERNAL)
|
||||
set(LIBOVR_LIB_DIR ${LIBOVR_DIR}/Lib/Windows/x64/Release/VS2013 CACHE TYPE INTERNAL)
|
||||
else()
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/LibOVR/Lib/Windows/Win32/Release/VS2013/LibOVR.lib CACHE TYPE INTERNAL)
|
||||
set(LIBOVR_LIB_DIR ${LIBOVR_DIR}/Lib/Windows/Win32/Release/VS2013 CACHE TYPE INTERNAL)
|
||||
endif()
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${LIBOVR_DIR}/Include CACHE TYPE INTERNAL)
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${LIBOVR_LIB_DIR}/LibOVR.lib CACHE TYPE INTERNAL)
|
||||
|
||||
elseif(APPLE)
|
||||
|
||||
ExternalProject_Add(
|
||||
|
|
|
@ -1489,11 +1489,15 @@ void Application::paintGL() {
|
|||
// FIXME not needed anymore?
|
||||
_offscreenContext->makeCurrent();
|
||||
|
||||
displayPlugin->updateHeadPose(_frameCount);
|
||||
displayPlugin->beginFrameRender(_frameCount);
|
||||
|
||||
// update the avatar with a fresh HMD pose
|
||||
getMyAvatar()->updateFromHMDSensorMatrix(getHMDSensorPose());
|
||||
|
||||
// update sensorToWorldMatrix for camera and hand controllers
|
||||
getMyAvatar()->updateSensorToWorldMatrix();
|
||||
|
||||
|
||||
auto lodManager = DependencyManager::get<LODManager>();
|
||||
|
||||
|
||||
|
@ -2006,6 +2010,12 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
}
|
||||
break;
|
||||
|
||||
case Qt::Key_Y:
|
||||
if (isShifted && isMeta) {
|
||||
getActiveDisplayPlugin()->cycleDebugOutput();
|
||||
}
|
||||
break;
|
||||
|
||||
case Qt::Key_B:
|
||||
if (isMeta) {
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
|
@ -2571,11 +2581,6 @@ void Application::idle(uint64_t now) {
|
|||
return; // bail early, nothing to do here.
|
||||
}
|
||||
|
||||
checkChangeCursor();
|
||||
|
||||
Stats::getInstance()->updateStats();
|
||||
AvatarInputs::getInstance()->update();
|
||||
|
||||
// These tasks need to be done on our first idle, because we don't want the showing of
|
||||
// overlay subwindows to do a showDesktop() until after the first time through
|
||||
static bool firstIdle = true;
|
||||
|
@ -2624,6 +2629,11 @@ void Application::idle(uint64_t now) {
|
|||
// We're going to execute idle processing, so restart the last idle timer
|
||||
_lastTimeUpdated.start();
|
||||
|
||||
checkChangeCursor();
|
||||
|
||||
Stats::getInstance()->updateStats();
|
||||
AvatarInputs::getInstance()->update();
|
||||
|
||||
{
|
||||
static uint64_t lastIdleStart{ now };
|
||||
uint64_t idleStartToStartDuration = now - lastIdleStart;
|
||||
|
@ -3388,9 +3398,6 @@ void Application::update(float deltaTime) {
|
|||
|
||||
qApp->updateMyAvatarLookAtPosition();
|
||||
|
||||
// update sensorToWorldMatrix for camera and hand controllers
|
||||
myAvatar->updateSensorToWorldMatrix();
|
||||
|
||||
{
|
||||
PROFILE_RANGE_EX("MyAvatar", 0xffff00ff, (uint64_t)getActiveDisplayPlugin()->presentCount());
|
||||
avatarManager->updateMyAvatar(deltaTime);
|
||||
|
|
|
@ -81,6 +81,9 @@ namespace controller {
|
|||
// Triggers
|
||||
LT,
|
||||
RT,
|
||||
// Grips (Oculus touch squeeze)
|
||||
LG,
|
||||
RG,
|
||||
NUM_STANDARD_AXES,
|
||||
LZ = LT,
|
||||
RZ = RT
|
||||
|
|
|
@ -552,9 +552,9 @@ float OpenGLDisplayPlugin::presentRate() {
|
|||
{
|
||||
Lock lock(_mutex);
|
||||
result = _usecsPerFrame.getAverage();
|
||||
result = 1.0f / result;
|
||||
result *= USECS_PER_SECOND;
|
||||
}
|
||||
result = 1.0f / result;
|
||||
result *= USECS_PER_SECOND;
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -17,9 +17,9 @@
|
|||
#include <GLMHelpers.h>
|
||||
#include <SimpleMovingAverage.h>
|
||||
#include <gl/OglplusHelpers.h>
|
||||
#include <gl/GLEscrow.h>
|
||||
|
||||
#define THREADED_PRESENT 1
|
||||
#include <gl/GLEscrow.h>
|
||||
|
||||
class OpenGLDisplayPlugin : public DisplayPlugin {
|
||||
protected:
|
||||
|
|
|
@ -69,10 +69,11 @@ void HmdDisplayPlugin::compositeOverlay() {
|
|||
// set the alpha
|
||||
Uniform<float>(*_program, _alphaUniform).Set(overlayAlpha);
|
||||
|
||||
auto eyePoses = _currentPresentFrameInfo.eyePoses;
|
||||
_sphereSection->Use();
|
||||
for_each_eye([&](Eye eye) {
|
||||
eyeViewport(eye);
|
||||
auto modelView = glm::inverse(_currentRenderEyePoses[eye]); // *glm::translate(mat4(), vec3(0, 0, -1));
|
||||
auto modelView = glm::inverse(eyePoses[eye]); // *glm::translate(mat4(), vec3(0, 0, -1));
|
||||
auto mvp = _eyeProjections[eye] * modelView;
|
||||
Uniform<glm::mat4>(*_program, _mvpUniform).Set(mvp);
|
||||
_sphereSection->Draw();
|
||||
|
@ -95,10 +96,10 @@ void HmdDisplayPlugin::compositePointer() {
|
|||
// Mouse pointer
|
||||
_plane->Use();
|
||||
// Reconstruct the headpose from the eye poses
|
||||
auto headPosition = (vec3(_currentRenderEyePoses[Left][3]) + vec3(_currentRenderEyePoses[Right][3])) / 2.0f;
|
||||
auto headPosition = vec3(_currentPresentFrameInfo.headPose[3]);
|
||||
for_each_eye([&](Eye eye) {
|
||||
eyeViewport(eye);
|
||||
auto reticleTransform = compositorHelper->getReticleTransform(_currentRenderEyePoses[eye], headPosition);
|
||||
auto reticleTransform = compositorHelper->getReticleTransform(_currentPresentFrameInfo.eyePoses[eye], headPosition);
|
||||
auto mvp = _eyeProjections[eye] * reticleTransform;
|
||||
Uniform<glm::mat4>(*_program, _mvpUniform).Set(mvp);
|
||||
_plane->Draw();
|
||||
|
@ -160,15 +161,28 @@ void HmdDisplayPlugin::internalPresent() {
|
|||
|
||||
void HmdDisplayPlugin::setEyeRenderPose(uint32_t frameIndex, Eye eye, const glm::mat4& pose) {
|
||||
Lock lock(_mutex);
|
||||
_renderEyePoses[frameIndex][eye] = pose;
|
||||
FrameInfo& frame = _frameInfos[frameIndex];
|
||||
frame.eyePoses[eye] = pose;
|
||||
}
|
||||
|
||||
void HmdDisplayPlugin::updateFrameData() {
|
||||
// Check if we have old frame data to discard
|
||||
{
|
||||
Lock lock(_mutex);
|
||||
auto itr = _frameInfos.find(_currentRenderFrameIndex);
|
||||
if (itr != _frameInfos.end()) {
|
||||
_frameInfos.erase(itr);
|
||||
}
|
||||
}
|
||||
|
||||
Parent::updateFrameData();
|
||||
Lock lock(_mutex);
|
||||
_currentRenderEyePoses = _renderEyePoses[_currentRenderFrameIndex];
|
||||
|
||||
{
|
||||
Lock lock(_mutex);
|
||||
_currentPresentFrameInfo = _frameInfos[_currentRenderFrameIndex];
|
||||
}
|
||||
}
|
||||
|
||||
glm::mat4 HmdDisplayPlugin::getHeadPose() const {
|
||||
return _headPoseCache.get();
|
||||
return _currentRenderFrameInfo.get().headPose;
|
||||
}
|
||||
|
|
|
@ -28,6 +28,16 @@ public:
|
|||
|
||||
virtual glm::mat4 getHeadPose() const override;
|
||||
|
||||
using EyePoses = std::array<glm::mat4, 2>;
|
||||
|
||||
struct FrameInfo {
|
||||
EyePoses eyePoses;
|
||||
glm::mat4 headPose;
|
||||
double sensorSampleTime { 0 };
|
||||
double predictedDisplayTime { 0 };
|
||||
};
|
||||
|
||||
|
||||
protected:
|
||||
virtual void hmdPresent() = 0;
|
||||
virtual bool isHmdMounted() const = 0;
|
||||
|
@ -46,10 +56,10 @@ protected:
|
|||
glm::mat4 _cullingProjection;
|
||||
glm::uvec2 _renderTargetSize;
|
||||
float _ipd { 0.064f };
|
||||
using EyePoses = std::array<glm::mat4, 2>;
|
||||
QMap<uint32_t, EyePoses> _renderEyePoses;
|
||||
EyePoses _currentRenderEyePoses;
|
||||
ThreadSafeValueCache<glm::mat4> _headPoseCache { glm::mat4() };
|
||||
|
||||
QMap<uint32_t, FrameInfo> _frameInfos;
|
||||
FrameInfo _currentPresentFrameInfo;
|
||||
ThreadSafeValueCache<FrameInfo> _currentRenderFrameInfo;
|
||||
|
||||
private:
|
||||
bool _enablePreview { false };
|
||||
|
|
|
@ -122,7 +122,7 @@ public:
|
|||
}
|
||||
|
||||
// will query the underlying hmd api to compute the most recent head pose
|
||||
virtual void updateHeadPose(uint32_t frameIndex) {}
|
||||
virtual void beginFrameRender(uint32_t frameIndex) {}
|
||||
|
||||
// returns a copy of the most recent head pose, computed via updateHeadPose
|
||||
virtual glm::mat4 getHeadPose() const {
|
||||
|
@ -142,6 +142,8 @@ public:
|
|||
virtual float presentRate() { return -1.0f; }
|
||||
uint32_t presentCount() const { return _presentedFrameIndex; }
|
||||
|
||||
virtual void cycleDebugOutput() {}
|
||||
|
||||
static const QString& MENU_PATH();
|
||||
|
||||
signals:
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
template <typename T>
|
||||
class ThreadSafeValueCache {
|
||||
public:
|
||||
ThreadSafeValueCache() {}
|
||||
ThreadSafeValueCache(const T& v) : _value { v } {}
|
||||
|
||||
// returns atomic copy of the cached value.
|
||||
|
|
|
@ -12,14 +12,19 @@
|
|||
#include "OculusHelpers.h"
|
||||
|
||||
void OculusBaseDisplayPlugin::resetSensors() {
|
||||
ovr_RecenterPose(_session);
|
||||
ovr_RecenterTrackingOrigin(_session);
|
||||
}
|
||||
|
||||
void OculusBaseDisplayPlugin::updateHeadPose(uint32_t frameIndex) {
|
||||
auto displayTime = ovr_GetPredictedDisplayTime(_session, frameIndex);
|
||||
auto trackingState = ovr_GetTrackingState(_session, displayTime, true);
|
||||
mat4 headPose = toGlm(trackingState.HeadPose.ThePose);
|
||||
_headPoseCache.set(headPose);
|
||||
void OculusBaseDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
|
||||
FrameInfo frame;
|
||||
frame.sensorSampleTime = ovr_GetTimeInSeconds();;
|
||||
frame.predictedDisplayTime = ovr_GetPredictedDisplayTime(_session, frameIndex);
|
||||
auto trackingState = ovr_GetTrackingState(_session, frame.predictedDisplayTime, ovrTrue);
|
||||
frame.headPose = toGlm(trackingState.HeadPose.ThePose);
|
||||
|
||||
_currentRenderFrameInfo.set(frame);
|
||||
Lock lock(_mutex);
|
||||
_frameInfos[frameIndex] = frame;
|
||||
}
|
||||
|
||||
bool OculusBaseDisplayPlugin::isSupported() const {
|
||||
|
@ -42,36 +47,30 @@ bool OculusBaseDisplayPlugin::internalActivate() {
|
|||
|
||||
_hmdDesc = ovr_GetHmdDesc(_session);
|
||||
|
||||
_ipd = ovr_GetFloat(_session, OVR_KEY_IPD, _ipd);
|
||||
|
||||
glm::uvec2 eyeSizes[2];
|
||||
_viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;
|
||||
|
||||
_ipd = 0;
|
||||
ovr_for_each_eye([&](ovrEyeType eye) {
|
||||
_eyeFovs[eye] = _hmdDesc.DefaultEyeFov[eye];
|
||||
ovrEyeRenderDesc& erd = _eyeRenderDescs[eye] = ovr_GetRenderDesc(_session, eye, _eyeFovs[eye]);
|
||||
ovrMatrix4f ovrPerspectiveProjection =
|
||||
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
|
||||
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_ClipRangeOpenGL);
|
||||
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
|
||||
_eyeOffsets[eye] = glm::translate(mat4(), toGlm(erd.HmdToEyeViewOffset));
|
||||
_eyeOffsets[eye] = glm::translate(mat4(), toGlm(erd.HmdToEyeOffset));
|
||||
eyeSizes[eye] = toGlm(ovr_GetFovTextureSize(_session, eye, erd.Fov, 1.0f));
|
||||
_viewScaleDesc.HmdToEyeViewOffset[eye] = erd.HmdToEyeViewOffset;
|
||||
_viewScaleDesc.HmdToEyeOffset[eye] = erd.HmdToEyeOffset;
|
||||
_ipd += glm::abs(glm::length(toGlm(erd.HmdToEyeOffset)));
|
||||
});
|
||||
|
||||
auto combinedFov = _eyeFovs[0];
|
||||
combinedFov.LeftTan = combinedFov.RightTan = std::max(combinedFov.LeftTan, combinedFov.RightTan);
|
||||
_cullingProjection = toGlm(ovrMatrix4f_Projection(combinedFov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
|
||||
_cullingProjection = toGlm(ovrMatrix4f_Projection(combinedFov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_ClipRangeOpenGL));
|
||||
|
||||
_renderTargetSize = uvec2(
|
||||
eyeSizes[0].x + eyeSizes[1].x,
|
||||
std::max(eyeSizes[0].y, eyeSizes[1].y));
|
||||
|
||||
if (!OVR_SUCCESS(ovr_ConfigureTracking(_session,
|
||||
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
|
||||
logWarning("Failed to attach to sensor device");
|
||||
}
|
||||
|
||||
// Parent class relies on our _session intialization, so it must come after that.
|
||||
memset(&_sceneLayer, 0, sizeof(ovrLayerEyeFov));
|
||||
_sceneLayer.Header.Type = ovrLayerType_EyeFov;
|
||||
_sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
|
||||
|
|
|
@ -20,7 +20,8 @@ public:
|
|||
|
||||
// Stereo specific methods
|
||||
virtual void resetSensors() override final;
|
||||
virtual void updateHeadPose(uint32_t frameIndex) override;
|
||||
virtual void beginFrameRender(uint32_t frameIndex) override;
|
||||
|
||||
|
||||
protected:
|
||||
void customizeContext() override;
|
||||
|
@ -28,9 +29,8 @@ protected:
|
|||
void internalDeactivate() override;
|
||||
|
||||
protected:
|
||||
ovrSession _session;
|
||||
ovrSession _session { nullptr };
|
||||
ovrGraphicsLuid _luid;
|
||||
float _ipd{ OVR_DEFAULT_IPD };
|
||||
ovrEyeRenderDesc _eyeRenderDescs[2];
|
||||
ovrFovPort _eyeFovs[2];
|
||||
ovrHmdDesc _hmdDesc;
|
||||
|
|
219
plugins/oculus/src/OculusControllerManager.cpp
Normal file
219
plugins/oculus/src/OculusControllerManager.cpp
Normal file
|
@ -0,0 +1,219 @@
|
|||
//
|
||||
// OculusControllerManager.cpp
|
||||
// input-plugins/src/input-plugins
|
||||
//
|
||||
// Created by Bradley Austin Davis 2016/03/04.
|
||||
// Copyright 2013-2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "OculusControllerManager.h"
|
||||
|
||||
#include <QtCore/QLoggingCategory>
|
||||
|
||||
#include <plugins/PluginContainer.h>
|
||||
#include <controllers/UserInputMapper.h>
|
||||
#include <controllers/StandardControls.h>
|
||||
|
||||
#include <PerfStat.h>
|
||||
#include <PathUtils.h>
|
||||
|
||||
#include "OculusHelpers.h"
|
||||
|
||||
Q_DECLARE_LOGGING_CATEGORY(oculus)
|
||||
|
||||
|
||||
static const QString MENU_PARENT = "Avatar";
|
||||
static const QString MENU_NAME = "Oculus Touch Controllers";
|
||||
static const QString MENU_PATH = MENU_PARENT + ">" + MENU_NAME;
|
||||
|
||||
const QString OculusControllerManager::NAME = "Oculus";
|
||||
|
||||
bool OculusControllerManager::isSupported() const {
|
||||
return oculusAvailable();
|
||||
}
|
||||
|
||||
bool OculusControllerManager::activate() {
|
||||
InputPlugin::activate();
|
||||
if (!_session) {
|
||||
_session = acquireOculusSession();
|
||||
}
|
||||
Q_ASSERT(_session);
|
||||
|
||||
// register with UserInputMapper
|
||||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
if (_remote) {
|
||||
userInputMapper->registerDevice(_remote);
|
||||
}
|
||||
if (_touch) {
|
||||
userInputMapper->registerDevice(_touch);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
void OculusControllerManager::deactivate() {
|
||||
InputPlugin::deactivate();
|
||||
|
||||
if (_session) {
|
||||
releaseOculusSession();
|
||||
_session = nullptr;
|
||||
}
|
||||
|
||||
// unregister with UserInputMapper
|
||||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
if (_touch) {
|
||||
userInputMapper->removeDevice(_touch->getDeviceID());
|
||||
}
|
||||
if (_remote) {
|
||||
userInputMapper->removeDevice(_remote->getDeviceID());
|
||||
}
|
||||
}
|
||||
|
||||
void OculusControllerManager::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, bool jointsCaptured) {
|
||||
PerformanceTimer perfTimer("OculusControllerManager::TouchDevice::update");
|
||||
|
||||
if (!OVR_SUCCESS(ovr_GetInputState(_session, ovrControllerType_Touch, &_inputState))) {
|
||||
qCWarning(oculus) << "Unable to read oculus input state";
|
||||
return;
|
||||
}
|
||||
|
||||
if (_touch) {
|
||||
_touch->update(deltaTime, inputCalibrationData, jointsCaptured);
|
||||
}
|
||||
if (_remote) {
|
||||
_remote->update(deltaTime, inputCalibrationData, jointsCaptured);
|
||||
}
|
||||
}
|
||||
|
||||
void OculusControllerManager::pluginFocusOutEvent() {
|
||||
if (_touch) {
|
||||
_touch->focusOutEvent();
|
||||
}
|
||||
if (_remote) {
|
||||
_remote->focusOutEvent();
|
||||
}
|
||||
}
|
||||
|
||||
using namespace controller;
|
||||
|
||||
static const std::vector<std::pair<ovrButton, StandardButtonChannel>> BUTTON_MAP { {
|
||||
{ ovrButton_X, X },
|
||||
{ ovrButton_Y, Y },
|
||||
{ ovrButton_A, A },
|
||||
{ ovrButton_B, B },
|
||||
{ ovrButton_LThumb, LS },
|
||||
{ ovrButton_RThumb, RS },
|
||||
{ ovrButton_LShoulder, LB },
|
||||
{ ovrButton_RShoulder, RB },
|
||||
} };
|
||||
|
||||
static const std::vector<std::pair<ovrTouch, StandardButtonChannel>> TOUCH_MAP { {
|
||||
{ ovrTouch_X, LEFT_SECONDARY_THUMB_TOUCH },
|
||||
{ ovrTouch_Y, LEFT_SECONDARY_THUMB_TOUCH },
|
||||
{ ovrTouch_A, RIGHT_SECONDARY_THUMB_TOUCH },
|
||||
{ ovrTouch_B, RIGHT_SECONDARY_THUMB_TOUCH },
|
||||
{ ovrTouch_LIndexTrigger, LEFT_PRIMARY_INDEX_TOUCH },
|
||||
{ ovrTouch_RIndexTrigger, RIGHT_PRIMARY_INDEX_TOUCH },
|
||||
{ ovrTouch_LThumb, LS_TOUCH },
|
||||
{ ovrTouch_RThumb, RS_TOUCH },
|
||||
{ ovrTouch_LThumbUp, LEFT_THUMB_UP },
|
||||
{ ovrTouch_RThumbUp, RIGHT_THUMB_UP },
|
||||
{ ovrTouch_LIndexPointing, LEFT_INDEX_POINT },
|
||||
{ ovrTouch_RIndexPointing, RIGHT_INDEX_POINT },
|
||||
} };
|
||||
|
||||
void OculusControllerManager::TouchDevice::update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, bool jointsCaptured) {
|
||||
_poseStateMap.clear();
|
||||
_buttonPressedMap.clear();
|
||||
|
||||
if (!jointsCaptured) {
|
||||
int numTrackedControllers = 0;
|
||||
static const auto REQUIRED_HAND_STATUS = ovrStatus_OrientationTracked & ovrStatus_PositionTracked;
|
||||
auto tracking = ovr_GetTrackingState(_parent._session, 0, false);
|
||||
ovr_for_each_hand([&](ovrHandType hand) {
|
||||
++numTrackedControllers;
|
||||
if (REQUIRED_HAND_STATUS == (tracking.HandStatusFlags[hand] & REQUIRED_HAND_STATUS)) {
|
||||
handlePose(deltaTime, inputCalibrationData, hand, tracking.HandPoses[hand]);
|
||||
}
|
||||
});
|
||||
}
|
||||
using namespace controller;
|
||||
// Axes
|
||||
const auto& inputState = _parent._inputState;
|
||||
_axisStateMap[LX] = inputState.Thumbstick[ovrHand_Left].x;
|
||||
_axisStateMap[LY] = inputState.Thumbstick[ovrHand_Left].y;
|
||||
_axisStateMap[LT] = inputState.IndexTrigger[ovrHand_Left];
|
||||
_axisStateMap[LG] = inputState.HandTrigger[ovrHand_Left];
|
||||
|
||||
_axisStateMap[RX] = inputState.Thumbstick[ovrHand_Right].x;
|
||||
_axisStateMap[RY] = inputState.Thumbstick[ovrHand_Right].y;
|
||||
_axisStateMap[RT] = inputState.IndexTrigger[ovrHand_Right];
|
||||
_axisStateMap[RG] = inputState.HandTrigger[ovrHand_Right];
|
||||
|
||||
// Buttons
|
||||
for (const auto& pair : BUTTON_MAP) {
|
||||
if (inputState.Buttons & pair.first) {
|
||||
_buttonPressedMap.insert(pair.second);
|
||||
}
|
||||
}
|
||||
// Touches
|
||||
for (const auto& pair : TOUCH_MAP) {
|
||||
if (inputState.Touches & pair.first) {
|
||||
_buttonPressedMap.insert(pair.second);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void OculusControllerManager::TouchDevice::focusOutEvent() {
|
||||
_axisStateMap.clear();
|
||||
_buttonPressedMap.clear();
|
||||
};
|
||||
|
||||
void OculusControllerManager::TouchDevice::handlePose(float deltaTime,
|
||||
const controller::InputCalibrationData& inputCalibrationData, ovrHandType hand,
|
||||
const ovrPoseStatef& handPose) {
|
||||
auto poseId = hand == ovrHand_Left ? controller::LEFT_HAND : controller::RIGHT_HAND;
|
||||
auto& pose = _poseStateMap[poseId];
|
||||
pose.translation = toGlm(handPose.ThePose.Position);
|
||||
pose.rotation = toGlm(handPose.ThePose.Orientation);
|
||||
pose.angularVelocity = toGlm(handPose.AngularVelocity);
|
||||
pose.velocity = toGlm(handPose.LinearVelocity);
|
||||
}
|
||||
|
||||
controller::Input::NamedVector OculusControllerManager::TouchDevice::getAvailableInputs() const {
|
||||
using namespace controller;
|
||||
QVector<Input::NamedPair> availableInputs{
|
||||
// Trackpad analogs
|
||||
makePair(LX, "LX"),
|
||||
makePair(LY, "LY"),
|
||||
makePair(RX, "RX"),
|
||||
makePair(RY, "RY"),
|
||||
// trigger analogs
|
||||
makePair(LT, "LT"),
|
||||
makePair(RT, "RT"),
|
||||
|
||||
makePair(LB, "LB"),
|
||||
makePair(RB, "RB"),
|
||||
|
||||
makePair(LS, "LS"),
|
||||
makePair(RS, "RS"),
|
||||
makePair(LEFT_HAND, "LeftHand"),
|
||||
makePair(RIGHT_HAND, "RightHand"),
|
||||
|
||||
makePair(LEFT_PRIMARY_THUMB, "LeftPrimaryThumb"),
|
||||
makePair(LEFT_SECONDARY_THUMB, "LeftSecondaryThumb"),
|
||||
makePair(RIGHT_PRIMARY_THUMB, "RightPrimaryThumb"),
|
||||
makePair(RIGHT_SECONDARY_THUMB, "RightSecondaryThumb"),
|
||||
};
|
||||
return availableInputs;
|
||||
}
|
||||
|
||||
QString OculusControllerManager::TouchDevice::getDefaultMappingConfig() const {
|
||||
static const QString MAPPING_JSON = PathUtils::resourcesPath() + "/controllers/touch.json";
|
||||
return MAPPING_JSON;
|
||||
}
|
||||
|
||||
|
||||
|
81
plugins/oculus/src/OculusControllerManager.h
Normal file
81
plugins/oculus/src/OculusControllerManager.h
Normal file
|
@ -0,0 +1,81 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2016/03/04
|
||||
// Copyright 2013-2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi__OculusControllerManager
|
||||
#define hifi__OculusControllerManager
|
||||
|
||||
#include <QObject>
|
||||
#include <unordered_set>
|
||||
|
||||
#include <GLMHelpers.h>
|
||||
|
||||
#include <controllers/InputDevice.h>
|
||||
#include <plugins/InputPlugin.h>
|
||||
|
||||
#include <OVR_CAPI.h>
|
||||
|
||||
class OculusControllerManager : public InputPlugin {
|
||||
Q_OBJECT
|
||||
public:
|
||||
// Plugin functions
|
||||
bool isSupported() const override;
|
||||
bool isJointController() const override { return true; }
|
||||
const QString& getName() const override { return NAME; }
|
||||
|
||||
bool activate() override;
|
||||
void deactivate() override;
|
||||
|
||||
void pluginFocusOutEvent() override;
|
||||
void pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, bool jointsCaptured) override;
|
||||
|
||||
private:
|
||||
class OculusInputDevice : public controller::InputDevice {
|
||||
public:
|
||||
OculusInputDevice(OculusControllerManager& parent, const QString& name) : controller::InputDevice(name), _parent(parent) {}
|
||||
|
||||
OculusControllerManager& _parent;
|
||||
friend class OculusControllerManager;
|
||||
};
|
||||
|
||||
class RemoteDevice : public OculusInputDevice {
|
||||
public:
|
||||
using Pointer = std::shared_ptr<RemoteDevice>;
|
||||
RemoteDevice(OculusControllerManager& parent) : OculusInputDevice(parent, "Oculus Remote") {}
|
||||
|
||||
controller::Input::NamedVector getAvailableInputs() const override;
|
||||
QString getDefaultMappingConfig() const override;
|
||||
void update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, bool jointsCaptured) override;
|
||||
void focusOutEvent() override;
|
||||
|
||||
friend class OculusControllerManager;
|
||||
};
|
||||
|
||||
class TouchDevice : public OculusInputDevice {
|
||||
public:
|
||||
using Pointer = std::shared_ptr<TouchDevice>;
|
||||
TouchDevice(OculusControllerManager& parent) : OculusInputDevice(parent, "Oculus Touch") {}
|
||||
|
||||
controller::Input::NamedVector getAvailableInputs() const override;
|
||||
QString getDefaultMappingConfig() const override;
|
||||
void update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, bool jointsCaptured) override;
|
||||
void focusOutEvent() override;
|
||||
|
||||
private:
|
||||
void handlePose(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, ovrHandType hand, const ovrPoseStatef& handPose);
|
||||
int _trackedControllers { 0 };
|
||||
friend class OculusControllerManager;
|
||||
};
|
||||
|
||||
ovrSession _session { nullptr };
|
||||
ovrInputState _inputState {};
|
||||
RemoteDevice::Pointer _remote;
|
||||
TouchDevice::Pointer _touch;
|
||||
static const QString NAME;
|
||||
};
|
||||
|
||||
#endif // hifi__OculusControllerManager
|
|
@ -10,6 +10,23 @@
|
|||
#include "OculusHelpers.h"
|
||||
|
||||
const QString OculusDisplayPlugin::NAME("Oculus Rift");
|
||||
static ovrPerfHudMode currentDebugMode = ovrPerfHud_Off;
|
||||
|
||||
bool OculusDisplayPlugin::internalActivate() {
|
||||
bool result = Parent::internalActivate();
|
||||
currentDebugMode = ovrPerfHud_Off;
|
||||
if (result && _session) {
|
||||
ovr_SetInt(_session, OVR_PERF_HUD_MODE, currentDebugMode);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
void OculusDisplayPlugin::cycleDebugOutput() {
|
||||
if (_session) {
|
||||
currentDebugMode = static_cast<ovrPerfHudMode>((currentDebugMode + 1) % ovrPerfHud_Count);
|
||||
ovr_SetInt(_session, OVR_PERF_HUD_MODE, currentDebugMode);
|
||||
}
|
||||
}
|
||||
|
||||
void OculusDisplayPlugin::customizeContext() {
|
||||
Parent::customizeContext();
|
||||
|
@ -48,12 +65,6 @@ void blit(const SrcFbo& srcFbo, const DstFbo& dstFbo) {
|
|||
});
|
||||
}
|
||||
|
||||
void OculusDisplayPlugin::updateFrameData() {
|
||||
Parent::updateFrameData();
|
||||
_sceneLayer.RenderPose[ovrEyeType::ovrEye_Left] = ovrPoseFromGlm(_currentRenderEyePoses[Left]);
|
||||
_sceneLayer.RenderPose[ovrEyeType::ovrEye_Right] = ovrPoseFromGlm(_currentRenderEyePoses[Right]);
|
||||
}
|
||||
|
||||
void OculusDisplayPlugin::hmdPresent() {
|
||||
|
||||
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentRenderFrameIndex)
|
||||
|
@ -63,12 +74,15 @@ void OculusDisplayPlugin::hmdPresent() {
|
|||
}
|
||||
|
||||
blit(_compositeFramebuffer, _sceneFbo);
|
||||
_sceneFbo->Commit();
|
||||
{
|
||||
_sceneLayer.SensorSampleTime = _currentPresentFrameInfo.sensorSampleTime;
|
||||
_sceneLayer.RenderPose[ovrEyeType::ovrEye_Left] = ovrPoseFromGlm(_currentPresentFrameInfo.headPose);
|
||||
_sceneLayer.RenderPose[ovrEyeType::ovrEye_Right] = ovrPoseFromGlm(_currentPresentFrameInfo.headPose);
|
||||
ovrLayerHeader* layers = &_sceneLayer.Header;
|
||||
ovrResult result = ovr_SubmitFrame(_session, _currentRenderFrameIndex, &_viewScaleDesc, &layers, 1);
|
||||
if (!OVR_SUCCESS(result)) {
|
||||
logWarning("Failed to present");
|
||||
}
|
||||
}
|
||||
_sceneFbo->Increment();
|
||||
}
|
||||
|
|
|
@ -22,12 +22,13 @@ public:
|
|||
float getTargetFrameRate() override { return TARGET_RATE_Oculus; }
|
||||
|
||||
protected:
|
||||
bool internalActivate() override;
|
||||
void hmdPresent() override;
|
||||
// FIXME update with Oculus API call once it's available in the SDK
|
||||
bool isHmdMounted() const override { return true; }
|
||||
void customizeContext() override;
|
||||
void uncustomizeContext() override;
|
||||
void updateFrameData() override;
|
||||
void cycleDebugOutput() override;
|
||||
|
||||
private:
|
||||
static const QString NAME;
|
||||
|
|
|
@ -9,7 +9,10 @@
|
|||
#include "OculusHelpers.h"
|
||||
|
||||
#include <atomic>
|
||||
|
||||
#include <QtCore/QLoggingCategory>
|
||||
#include <QtCore/QFile>
|
||||
#include <QtCore/QDir>
|
||||
|
||||
using Mutex = std::mutex;
|
||||
using Lock = std::unique_lock<Mutex>;
|
||||
|
@ -38,9 +41,23 @@ void logFatal(const char* what) {
|
|||
qFatal(error.c_str());
|
||||
}
|
||||
|
||||
static const QString OCULUS_RUNTIME_PATH { "C:\\Program Files (x86)\\Oculus\\Support\\oculus-runtime" };
|
||||
static const QString GOOD_OCULUS_RUNTIME_FILE { OCULUS_RUNTIME_PATH + "\\LibOVRRT64_1.dll" };
|
||||
|
||||
bool oculusAvailable() {
|
||||
ovrDetectResult detect = ovr_Detect(0);
|
||||
return (detect.IsOculusServiceRunning && detect.IsOculusHMDConnected);
|
||||
if (!detect.IsOculusServiceRunning || !detect.IsOculusHMDConnected) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// HACK Explicitly check for the presence of the 1.0 runtime DLL, and fail if it
|
||||
// doesn't exist
|
||||
if (!QFile(GOOD_OCULUS_RUNTIME_FILE).exists()) {
|
||||
qCWarning(oculus) << "Oculus Runtime detected, but no 1.x DLL present: \"" + GOOD_OCULUS_RUNTIME_FILE + "\"";
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
ovrSession acquireOculusSession() {
|
||||
|
@ -98,9 +115,9 @@ SwapFramebufferWrapper::~SwapFramebufferWrapper() {
|
|||
destroyColor();
|
||||
}
|
||||
|
||||
void SwapFramebufferWrapper::Increment() {
|
||||
++color->CurrentIndex;
|
||||
color->CurrentIndex %= color->TextureCount;
|
||||
void SwapFramebufferWrapper::Commit() {
|
||||
auto result = ovr_CommitTextureSwapChain(_session, color);
|
||||
Q_ASSERT(OVR_SUCCESS(result));
|
||||
}
|
||||
|
||||
void SwapFramebufferWrapper::Resize(const uvec2 & size) {
|
||||
|
@ -114,7 +131,7 @@ void SwapFramebufferWrapper::Resize(const uvec2 & size) {
|
|||
|
||||
void SwapFramebufferWrapper::destroyColor() {
|
||||
if (color) {
|
||||
ovr_DestroySwapTextureSet(_session, color);
|
||||
ovr_DestroyTextureSwapChain(_session, color);
|
||||
color = nullptr;
|
||||
}
|
||||
}
|
||||
|
@ -122,13 +139,30 @@ void SwapFramebufferWrapper::destroyColor() {
|
|||
void SwapFramebufferWrapper::initColor() {
|
||||
destroyColor();
|
||||
|
||||
if (!OVR_SUCCESS(ovr_CreateSwapTextureSetGL(_session, GL_SRGB8_ALPHA8, size.x, size.y, &color))) {
|
||||
ovrTextureSwapChainDesc desc = {};
|
||||
desc.Type = ovrTexture_2D;
|
||||
desc.ArraySize = 1;
|
||||
desc.Width = size.x;
|
||||
desc.Height = size.y;
|
||||
desc.MipLevels = 1;
|
||||
desc.Format = OVR_FORMAT_R8G8B8A8_UNORM_SRGB;
|
||||
desc.SampleCount = 1;
|
||||
desc.StaticImage = ovrFalse;
|
||||
|
||||
ovrResult result = ovr_CreateTextureSwapChainGL(_session, &desc, &color);
|
||||
if (!OVR_SUCCESS(result)) {
|
||||
logFatal("Failed to create swap textures");
|
||||
}
|
||||
|
||||
for (int i = 0; i < color->TextureCount; ++i) {
|
||||
ovrGLTexture& ovrTex = (ovrGLTexture&)color->Textures[i];
|
||||
glBindTexture(GL_TEXTURE_2D, ovrTex.OGL.TexId);
|
||||
int length = 0;
|
||||
result = ovr_GetTextureSwapChainLength(_session, color, &length);
|
||||
if (!OVR_SUCCESS(result) || !length) {
|
||||
qFatal("Unable to count swap chain textures");
|
||||
}
|
||||
for (int i = 0; i < length; ++i) {
|
||||
GLuint chainTexId;
|
||||
ovr_GetTextureSwapChainBufferGL(_session, color, i, &chainTexId);
|
||||
glBindTexture(GL_TEXTURE_2D, chainTexId);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
|
||||
|
@ -141,8 +175,11 @@ void SwapFramebufferWrapper::initDone() {
|
|||
}
|
||||
|
||||
void SwapFramebufferWrapper::onBind(oglplus::Framebuffer::Target target) {
|
||||
ovrGLTexture& tex = (ovrGLTexture&)(color->Textures[color->CurrentIndex]);
|
||||
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex.OGL.TexId, 0);
|
||||
int curIndex;
|
||||
ovr_GetTextureSwapChainCurrentIndex(_session, color, &curIndex);
|
||||
GLuint curTexId;
|
||||
ovr_GetTextureSwapChainBufferGL(_session, color, curIndex, &curTexId);
|
||||
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, curTexId, 0);
|
||||
}
|
||||
|
||||
void SwapFramebufferWrapper::onUnbind(oglplus::Framebuffer::Target target) {
|
||||
|
|
|
@ -111,10 +111,10 @@ inline ovrPosef ovrPoseFromGlm(const glm::mat4 & m) {
|
|||
// then submit it and increment to the next texture.
|
||||
// The Oculus SDK manages the creation and destruction of
|
||||
// the textures
|
||||
struct SwapFramebufferWrapper : public FramebufferWrapper<ovrSwapTextureSet*, void*> {
|
||||
struct SwapFramebufferWrapper : public FramebufferWrapper<ovrTextureSwapChain, void*> {
|
||||
SwapFramebufferWrapper(const ovrSession& session);
|
||||
~SwapFramebufferWrapper();
|
||||
void Increment();
|
||||
void Commit();
|
||||
void Resize(const uvec2 & size);
|
||||
protected:
|
||||
void initColor() override final;
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
#include "OculusDisplayPlugin.h"
|
||||
#include "OculusDebugDisplayPlugin.h"
|
||||
#include "OculusControllerManager.h"
|
||||
|
||||
class OculusProvider : public QObject, public DisplayProvider, InputProvider
|
||||
{
|
||||
|
@ -51,8 +52,6 @@ public:
|
|||
}
|
||||
|
||||
virtual InputPluginList getInputPlugins() override {
|
||||
// FIXME pending full oculus input API and hardware
|
||||
#if 0
|
||||
static std::once_flag once;
|
||||
std::call_once(once, [&] {
|
||||
InputPluginPointer plugin(new OculusControllerManager());
|
||||
|
@ -60,7 +59,6 @@ public:
|
|||
_inputPlugins.push_back(plugin);
|
||||
}
|
||||
});
|
||||
#endif
|
||||
return _inputPlugins;
|
||||
}
|
||||
|
||||
|
|
|
@ -35,10 +35,14 @@ void OculusLegacyDisplayPlugin::resetSensors() {
|
|||
ovrHmd_RecenterPose(_hmd);
|
||||
}
|
||||
|
||||
void OculusLegacyDisplayPlugin::updateHeadPose(uint32_t frameIndex) {
|
||||
void OculusLegacyDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
|
||||
FrameInfo frame;
|
||||
frame.predictedDisplayTime = frame.sensorSampleTime = ovr_GetTimeInSeconds();
|
||||
_trackingState = ovrHmd_GetTrackingState(_hmd, frame.predictedDisplayTime);
|
||||
frame.headPose = toGlm(_trackingState.HeadPose.ThePose);
|
||||
_currentRenderFrameInfo.set(frame);
|
||||
Lock lock(_mutex);
|
||||
_trackingState = ovrHmd_GetTrackingState(_hmd, ovr_GetTimeInSeconds());
|
||||
_headPoseCache.set(toGlm(_trackingState.HeadPose.ThePose));
|
||||
_frameInfos[frameIndex] = frame;
|
||||
}
|
||||
|
||||
bool OculusLegacyDisplayPlugin::isSupported() const {
|
||||
|
|
|
@ -26,7 +26,7 @@ public:
|
|||
|
||||
// Stereo specific methods
|
||||
virtual void resetSensors() override;
|
||||
virtual void updateHeadPose(uint32_t frameIndex) override;
|
||||
virtual void beginFrameRender(uint32_t frameIndex) override;
|
||||
|
||||
virtual float getTargetFrameRate() override;
|
||||
|
||||
|
|
|
@ -121,22 +121,23 @@ void OpenVrDisplayPlugin::resetSensors() {
|
|||
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::updateHeadPose(uint32_t frameIndex) {
|
||||
void OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
|
||||
|
||||
float displayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
|
||||
float frameDuration = 1.f / displayFrequency;
|
||||
float vsyncToPhotons = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SecondsFromVsyncToPhotons_Float);
|
||||
double displayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
|
||||
double frameDuration = 1.f / displayFrequency;
|
||||
double vsyncToPhotons = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SecondsFromVsyncToPhotons_Float);
|
||||
|
||||
FrameInfo frame;
|
||||
#if THREADED_PRESENT
|
||||
// 3 frames of prediction + vsyncToPhotons = 44ms total
|
||||
const float NUM_PREDICTION_FRAMES = 3.0f;
|
||||
float predictedSecondsFromNow = NUM_PREDICTION_FRAMES * frameDuration + vsyncToPhotons;
|
||||
const double NUM_PREDICTION_FRAMES = 3.0f;
|
||||
frame.predictedDisplayTime = NUM_PREDICTION_FRAMES * frameDuration + vsyncToPhotons;
|
||||
#else
|
||||
float predictedSecondsFromNow = frameDuration + vsyncToPhotons;
|
||||
frame.predictedDisplayTime = frameDuration + vsyncToPhotons;
|
||||
#endif
|
||||
|
||||
vr::TrackedDevicePose_t predictedTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
_system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseStanding, predictedSecondsFromNow, predictedTrackedDevicePose, vr::k_unMaxTrackedDeviceCount);
|
||||
_system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseStanding, frame.predictedDisplayTime, predictedTrackedDevicePose, vr::k_unMaxTrackedDeviceCount);
|
||||
|
||||
// copy and process predictedTrackedDevicePoses
|
||||
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
|
||||
|
@ -145,8 +146,11 @@ void OpenVrDisplayPlugin::updateHeadPose(uint32_t frameIndex) {
|
|||
_trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity));
|
||||
_trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity));
|
||||
}
|
||||
frame.headPose = _trackedDevicePoseMat4[0];
|
||||
_currentRenderFrameInfo.set(frame);
|
||||
|
||||
_headPoseCache.set(_trackedDevicePoseMat4[0]);
|
||||
Lock lock(_mutex);
|
||||
_frameInfos[frameIndex] = frame;
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::hmdPresent() {
|
||||
|
|
|
@ -27,7 +27,7 @@ public:
|
|||
|
||||
// Stereo specific methods
|
||||
virtual void resetSensors() override;
|
||||
virtual void updateHeadPose(uint32_t frameIndex) override;
|
||||
virtual void beginFrameRender(uint32_t frameIndex) override;
|
||||
|
||||
protected:
|
||||
bool internalActivate() override;
|
||||
|
|
Loading…
Reference in a new issue