OpenVR: reduce thread contention, fix sensor reset

This commit is contained in:
Anthony J. Thibault 2016-02-10 11:14:26 -08:00
parent b9a16cec25
commit 90f0821c2b
2 changed files with 35 additions and 24 deletions

View file

@ -32,12 +32,15 @@ const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here
static vr::IVRCompositor* _compositor{ nullptr }; static vr::IVRCompositor* _compositor{ nullptr };
static vr::TrackedDevicePose_t _presentThreadTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount]; vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount]; mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
static mat4 _sensorResetMat; static mat4 _sensorResetMat;
static uvec2 _windowSize; static uvec2 _windowSize;
static uvec2 _renderTargetSize; static uvec2 _renderTargetSize;
struct PerEyeData { struct PerEyeData {
//uvec2 _viewportOrigin; //uvec2 _viewportOrigin;
//uvec2 _viewportSize; //uvec2 _viewportSize;
@ -87,11 +90,16 @@ void OpenVrDisplayPlugin::activate() {
// Recommended render target size is per-eye, so double the X size for // Recommended render target size is per-eye, so double the X size for
// left + right eyes // left + right eyes
_renderTargetSize.x *= 2; _renderTargetSize.x *= 2;
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
PerEyeData& eyeData = _eyesData[eye]; {
eyeData._projectionMatrix = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL)); Lock lock(_poseMutex);
eyeData._eyeOffset = toGlm(_hmd->GetEyeToHeadTransform(eye)); openvr_for_each_eye([&](vr::Hmd_Eye eye) {
}); PerEyeData& eyeData = _eyesData[eye];
eyeData._projectionMatrix = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
eyeData._eyeOffset = toGlm(_hmd->GetEyeToHeadTransform(eye));
});
}
_compositor = vr::VRCompositor(); _compositor = vr::VRCompositor();
Q_ASSERT(_compositor); Q_ASSERT(_compositor);
WindowOpenGLDisplayPlugin::activate(); WindowOpenGLDisplayPlugin::activate();
@ -130,25 +138,24 @@ mat4 OpenVrDisplayPlugin::getProjection(Eye eye, const mat4& baseProjection) con
if (eye == Mono) { if (eye == Mono) {
eye = Left; eye = Left;
} }
Lock lock(_poseMutex);
return _eyesData[eye]._projectionMatrix; return _eyesData[eye]._projectionMatrix;
} }
void OpenVrDisplayPlugin::resetSensors() { void OpenVrDisplayPlugin::resetSensors() {
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(_trackedDevicePoseMat4[0])); Lock lock(_poseMutex);
glm::mat4 m = toGlm(_trackedDevicePose[0].mDeviceToAbsoluteTracking);
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
} }
glm::mat4 OpenVrDisplayPlugin::getEyeToHeadTransform(Eye eye) const { glm::mat4 OpenVrDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
Lock lock(_poseMutex);
return _eyesData[eye]._eyeOffset; return _eyesData[eye]._eyeOffset;
} }
glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const { glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
glm::mat4 result; Lock lock(_poseMutex);
{ return _trackedDevicePoseMat4[0];
Lock lock(_mutex);
result = _trackedDevicePoseMat4[0];
}
return result;
} }
@ -177,16 +184,23 @@ void OpenVrDisplayPlugin::internalPresent() {
} }
vr::Texture_t texture{ (void*)_currentSceneTexture, vr::API_OpenGL, vr::ColorSpace_Auto }; vr::Texture_t texture{ (void*)_currentSceneTexture, vr::API_OpenGL, vr::ColorSpace_Auto };
{
Lock lock(_mutex); _compositor->Submit(vr::Eye_Left, &texture, &leftBounds);
_compositor->Submit(vr::Eye_Left, &texture, &leftBounds); _compositor->Submit(vr::Eye_Right, &texture, &rightBounds);
_compositor->Submit(vr::Eye_Right, &texture, &rightBounds);
}
glFinish(); glFinish();
if (_enablePreview) {
swapBuffers();
}
_compositor->WaitGetPoses(_presentThreadTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, nullptr, 0);
{ {
Lock lock(_mutex); // copy and process _presentThreadTrackedDevicePoses
_compositor->WaitGetPoses(_trackedDevicePose, vr::k_unMaxTrackedDeviceCount, nullptr, 0); Lock lock(_poseMutex);
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) { for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
_trackedDevicePose[i] = _presentThreadTrackedDevicePose[i];
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking); _trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
} }
openvr_for_each_eye([&](vr::Hmd_Eye eye) { openvr_for_each_eye([&](vr::Hmd_Eye eye) {
@ -194,9 +208,5 @@ void OpenVrDisplayPlugin::internalPresent() {
}); });
} }
if (_enablePreview) {
swapBuffers();
}
//WindowOpenGLDisplayPlugin::internalPresent(); //WindowOpenGLDisplayPlugin::internalPresent();
} }

View file

@ -47,5 +47,6 @@ private:
static const QString NAME; static const QString NAME;
bool _enablePreview { false }; bool _enablePreview { false };
bool _monoPreview { true }; bool _monoPreview { true };
mutable Mutex _poseMutex;
}; };