openvr: fix for crash on exit

Before this fix, a script could call into HMD.getHUDLookAtPosition2D() while the app was shutting down, which in turn would call
getHeadPose() on the currently active display plugin.  This call could cause a crash within the openvr plugin, because the SDK was either shutdown, or in the process of shutting down on the main thread.

This fixes this by spliting the previous DisplayPlugin::getHeadPose(int) into two parts:

* updateHeadPose(int) which is only called once a frame and only by the main thread.
* getHeadPose() which is thread-safe and will return a cached copy of the hmd pose sampled by the last updateHeadPose.
This commit is contained in:
Anthony J. Thibault 2016-03-18 12:26:11 -07:00
parent 312635c1db
commit d218ca4960
9 changed files with 40 additions and 15 deletions

View file

@ -1603,7 +1603,7 @@ void Application::paintGL() {
// unmodified head pose because the only plugin that cares (the Oculus plugin) uses it // unmodified head pose because the only plugin that cares (the Oculus plugin) uses it
// for rotational timewarp. If we move to support positonal timewarp, we need to // for rotational timewarp. If we move to support positonal timewarp, we need to
// ensure this contains the full pose composed with the eye offsets. // ensure this contains the full pose composed with the eye offsets.
mat4 headPose = displayPlugin->getHeadPose(_frameCount); mat4 headPose = displayPlugin->updateHeadPose(_frameCount);
// FIXME we probably don't need to set the projection matrix every frame, // FIXME we probably don't need to set the projection matrix every frame,
// only when the display plugin changes (or in non-HMD modes when the user // only when the display plugin changes (or in non-HMD modes when the user
@ -2975,7 +2975,7 @@ void Application::updateMyAvatarLookAtPosition() {
lookAtPosition.x = -lookAtPosition.x; lookAtPosition.x = -lookAtPosition.x;
} }
if (isHMD) { if (isHMD) {
glm::mat4 headPose = getActiveDisplayPlugin()->getHeadPose(_frameCount); glm::mat4 headPose = getActiveDisplayPlugin()->getHeadPose();
glm::quat hmdRotation = glm::quat_cast(headPose); glm::quat hmdRotation = glm::quat_cast(headPose);
lookAtSpot = _myCamera.getPosition() + myAvatar->getOrientation() * (hmdRotation * lookAtPosition); lookAtSpot = _myCamera.getPosition() + myAvatar->getOrientation() * (hmdRotation * lookAtPosition);
} else { } else {
@ -4927,7 +4927,7 @@ mat4 Application::getEyeOffset(int eye) const {
mat4 Application::getHMDSensorPose() const { mat4 Application::getHMDSensorPose() const {
if (isHMDMode()) { if (isHMDMode()) {
return getActiveDisplayPlugin()->getHeadPose(_frameCount); return getActiveDisplayPlugin()->getHeadPose();
} }
return mat4(); return mat4();
} }

View file

@ -38,7 +38,7 @@ void AvatarUpdate::synchronousProcess() {
// transform the head pose from the displayPlugin into avatar coordinates. // transform the head pose from the displayPlugin into avatar coordinates.
glm::mat4 invAvatarMat = glm::inverse(createMatFromQuatAndPos(myAvatar->getOrientation(), myAvatar->getPosition())); glm::mat4 invAvatarMat = glm::inverse(createMatFromQuatAndPos(myAvatar->getOrientation(), myAvatar->getPosition()));
_headPose = invAvatarMat * (myAvatar->getSensorToWorldMatrix() * qApp->getActiveDisplayPlugin()->getHeadPose(frameCount)); _headPose = invAvatarMat * (myAvatar->getSensorToWorldMatrix() * qApp->getActiveDisplayPlugin()->getHeadPose());
if (!isThreaded()) { if (!isThreaded()) {
process(); process();

View file

@ -1258,7 +1258,7 @@ void MyAvatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, fl
if (qApp->isHMDMode()) { if (qApp->isHMDMode()) {
glm::vec3 cameraPosition = qApp->getCamera()->getPosition(); glm::vec3 cameraPosition = qApp->getCamera()->getPosition();
glm::mat4 headPose = qApp->getActiveDisplayPlugin()->getHeadPose(qApp->getFrameCount()); glm::mat4 headPose = qApp->getActiveDisplayPlugin()->getHeadPose();
glm::mat4 leftEyePose = qApp->getActiveDisplayPlugin()->getEyeToHeadTransform(Eye::Left); glm::mat4 leftEyePose = qApp->getActiveDisplayPlugin()->getEyeToHeadTransform(Eye::Left);
leftEyePose = leftEyePose * headPose; leftEyePose = leftEyePose * headPose;
glm::vec3 leftEyePosition = extractTranslation(leftEyePose); glm::vec3 leftEyePosition = extractTranslation(leftEyePose);

View file

@ -342,7 +342,7 @@ void CompositorHelper::computeHmdPickRay(const glm::vec2& cursorPos, glm::vec3&
} }
glm::mat4 CompositorHelper::getUiTransform() const { glm::mat4 CompositorHelper::getUiTransform() const {
return _currentCamera * glm::inverse(_currentDisplayPlugin->getHeadPose(_currentFrame)); return _currentCamera * glm::inverse(_currentDisplayPlugin->getHeadPose());
} }
//Finds the collision point of a world space ray //Finds the collision point of a world space ray

View file

@ -121,8 +121,14 @@ public:
static const glm::mat4 transform; return transform; static const glm::mat4 transform; return transform;
} }
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const { // will query the underlying hmd api to compute the most recent head pose
static const glm::mat4 pose; return pose; virtual glm::mat4 updateHeadPose(uint32_t frameIndex) {
return glm::mat4();
}
// returns a copy of the most recent head pose, computed via updateHeadPose
virtual glm::mat4 getHeadPose() const {
return glm::mat4();
} }
// Needed for timewarp style features // Needed for timewarp style features

View file

@ -15,14 +15,20 @@ void OculusBaseDisplayPlugin::resetSensors() {
ovr_RecenterPose(_session); ovr_RecenterPose(_session);
} }
glm::mat4 OculusBaseDisplayPlugin::getHeadPose(uint32_t frameIndex) const { glm::mat4 OculusBaseDisplayPlugin::updateHeadPose(uint32_t frameIndex) {
static uint32_t lastFrameSeen = 0; static uint32_t lastFrameSeen = 0;
auto displayTime = ovr_GetPredictedDisplayTime(_session, frameIndex); auto displayTime = ovr_GetPredictedDisplayTime(_session, frameIndex);
auto trackingState = ovr_GetTrackingState(_session, displayTime, frameIndex > lastFrameSeen); auto trackingState = ovr_GetTrackingState(_session, displayTime, frameIndex > lastFrameSeen);
if (frameIndex > lastFrameSeen) { if (frameIndex > lastFrameSeen) {
lastFrameSeen = frameIndex; lastFrameSeen = frameIndex;
} }
return toGlm(trackingState.HeadPose.ThePose); mat4 headPose = toGlm(trackingState.HeadPose.ThePose);
_headPoseCache.set(headPose);
return headPose;
}
glm::mat4 OculusBaseDisplayPlugin::getHeadPose() const {
return _headPoseCache.get();
} }
bool OculusBaseDisplayPlugin::isSupported() const { bool OculusBaseDisplayPlugin::isSupported() const {

View file

@ -8,6 +8,7 @@
#pragma once #pragma once
#include <display-plugins/hmd/HmdDisplayPlugin.h> #include <display-plugins/hmd/HmdDisplayPlugin.h>
#include <ThreadSafeValueCache.h>
#include <QTimer> #include <QTimer>
@ -20,7 +21,8 @@ public:
// Stereo specific methods // Stereo specific methods
virtual void resetSensors() override final; virtual void resetSensors() override final;
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const override; virtual glm::mat4 updateHeadPose(uint32_t frameIndex) override;
virtual glm::mat4 getHeadPose() const override;
protected: protected:
void customizeContext() override; void customizeContext() override;
@ -36,4 +38,5 @@ protected:
ovrHmdDesc _hmdDesc; ovrHmdDesc _hmdDesc;
ovrLayerEyeFov _sceneLayer; ovrLayerEyeFov _sceneLayer;
ovrViewScaleDesc _viewScaleDesc; ovrViewScaleDesc _viewScaleDesc;
ThreadSafeValueCache<glm::mat4> _headPoseCache { glm::mat4() };
}; };

View file

@ -38,7 +38,7 @@ static mat4 _sensorResetMat;
static std::array<vr::Hmd_Eye, 2> VR_EYES { { vr::Eye_Left, vr::Eye_Right } }; static std::array<vr::Hmd_Eye, 2> VR_EYES { { vr::Eye_Left, vr::Eye_Right } };
bool OpenVrDisplayPlugin::isSupported() const { bool OpenVrDisplayPlugin::isSupported() const {
return !isOculusPresent() && vr::VR_IsHmdPresent(); return /*!isOculusPresent() &&*/ vr::VR_IsHmdPresent();
} }
void OpenVrDisplayPlugin::internalActivate() { void OpenVrDisplayPlugin::internalActivate() {
@ -112,7 +112,7 @@ void OpenVrDisplayPlugin::resetSensors() {
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m)); _sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
} }
glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const { glm::mat4 OpenVrDisplayPlugin::updateHeadPose(uint32_t frameIndex) {
float displayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float); float displayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
float frameDuration = 1.f / displayFrequency; float frameDuration = 1.f / displayFrequency;
@ -139,14 +139,21 @@ glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
_trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity)); _trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity));
_trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity)); _trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity));
} }
_headPoseCache.set(_trackedDevicePoseMat4[0]);
return _trackedDevicePoseMat4[0]; return _trackedDevicePoseMat4[0];
} }
glm::mat4 OpenVrDisplayPlugin::getHeadPose() const {
return _headPoseCache.get();
}
void OpenVrDisplayPlugin::hmdPresent() { void OpenVrDisplayPlugin::hmdPresent() {
// Flip y-axis since GL UV coords are backwards. // Flip y-axis since GL UV coords are backwards.
static vr::VRTextureBounds_t leftBounds{ 0, 0, 0.5f, 1 }; static vr::VRTextureBounds_t leftBounds{ 0, 0, 0.5f, 1 };
static vr::VRTextureBounds_t rightBounds{ 0.5f, 0, 1, 1 }; static vr::VRTextureBounds_t rightBounds{ 0.5f, 0, 1, 1 };
vr::Texture_t texture { (void*)oglplus::GetName(_compositeFramebuffer->color), vr::API_OpenGL, vr::ColorSpace_Auto }; vr::Texture_t texture { (void*)oglplus::GetName(_compositeFramebuffer->color), vr::API_OpenGL, vr::ColorSpace_Auto };
_compositor->Submit(vr::Eye_Left, &texture, &leftBounds); _compositor->Submit(vr::Eye_Left, &texture, &leftBounds);

View file

@ -12,6 +12,7 @@
#include <openvr.h> #include <openvr.h>
#include <display-plugins/hmd/HmdDisplayPlugin.h> #include <display-plugins/hmd/HmdDisplayPlugin.h>
#include <ThreadSafeValueCache.h>
const float TARGET_RATE_OpenVr = 90.0f; // FIXME: get from sdk tracked device property? This number is vive-only. const float TARGET_RATE_OpenVr = 90.0f; // FIXME: get from sdk tracked device property? This number is vive-only.
@ -27,7 +28,8 @@ public:
// Stereo specific methods // Stereo specific methods
virtual void resetSensors() override; virtual void resetSensors() override;
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const override; virtual glm::mat4 updateHeadPose(uint32_t frameIndex) override;
virtual glm::mat4 getHeadPose() const override;
protected: protected:
void internalActivate() override; void internalActivate() override;
@ -41,4 +43,5 @@ private:
std::atomic<vr::EDeviceActivityLevel> _hmdActivityLevel { vr::k_EDeviceActivityLevel_Unknown }; std::atomic<vr::EDeviceActivityLevel> _hmdActivityLevel { vr::k_EDeviceActivityLevel_Unknown };
static const QString NAME; static const QString NAME;
mutable Mutex _poseMutex; mutable Mutex _poseMutex;
ThreadSafeValueCache<glm::mat4> _headPoseCache { glm::mat4() };
}; };