mirror of
https://github.com/JulianGro/overte.git
synced 2025-04-13 22:27:13 +02:00
OpenVR: reduce thread contention, fix sensor reset
This commit is contained in:
parent
b9a16cec25
commit
90f0821c2b
2 changed files with 35 additions and 24 deletions
|
@ -32,12 +32,15 @@ const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
|
|||
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here
|
||||
|
||||
static vr::IVRCompositor* _compositor{ nullptr };
|
||||
static vr::TrackedDevicePose_t _presentThreadTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
|
||||
static mat4 _sensorResetMat;
|
||||
static uvec2 _windowSize;
|
||||
static uvec2 _renderTargetSize;
|
||||
|
||||
|
||||
|
||||
struct PerEyeData {
|
||||
//uvec2 _viewportOrigin;
|
||||
//uvec2 _viewportSize;
|
||||
|
@ -87,11 +90,16 @@ void OpenVrDisplayPlugin::activate() {
|
|||
// Recommended render target size is per-eye, so double the X size for
|
||||
// left + right eyes
|
||||
_renderTargetSize.x *= 2;
|
||||
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
|
||||
PerEyeData& eyeData = _eyesData[eye];
|
||||
eyeData._projectionMatrix = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
|
||||
eyeData._eyeOffset = toGlm(_hmd->GetEyeToHeadTransform(eye));
|
||||
});
|
||||
|
||||
{
|
||||
Lock lock(_poseMutex);
|
||||
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
|
||||
PerEyeData& eyeData = _eyesData[eye];
|
||||
eyeData._projectionMatrix = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
|
||||
eyeData._eyeOffset = toGlm(_hmd->GetEyeToHeadTransform(eye));
|
||||
});
|
||||
}
|
||||
|
||||
_compositor = vr::VRCompositor();
|
||||
Q_ASSERT(_compositor);
|
||||
WindowOpenGLDisplayPlugin::activate();
|
||||
|
@ -130,25 +138,24 @@ mat4 OpenVrDisplayPlugin::getProjection(Eye eye, const mat4& baseProjection) con
|
|||
if (eye == Mono) {
|
||||
eye = Left;
|
||||
}
|
||||
Lock lock(_poseMutex);
|
||||
return _eyesData[eye]._projectionMatrix;
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::resetSensors() {
|
||||
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(_trackedDevicePoseMat4[0]));
|
||||
Lock lock(_poseMutex);
|
||||
glm::mat4 m = toGlm(_trackedDevicePose[0].mDeviceToAbsoluteTracking);
|
||||
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
|
||||
}
|
||||
|
||||
glm::mat4 OpenVrDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
|
||||
Lock lock(_poseMutex);
|
||||
return _eyesData[eye]._eyeOffset;
|
||||
}
|
||||
|
||||
glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
|
||||
glm::mat4 result;
|
||||
{
|
||||
Lock lock(_mutex);
|
||||
result = _trackedDevicePoseMat4[0];
|
||||
|
||||
}
|
||||
return result;
|
||||
Lock lock(_poseMutex);
|
||||
return _trackedDevicePoseMat4[0];
|
||||
}
|
||||
|
||||
|
||||
|
@ -177,16 +184,23 @@ void OpenVrDisplayPlugin::internalPresent() {
|
|||
}
|
||||
|
||||
vr::Texture_t texture{ (void*)_currentSceneTexture, vr::API_OpenGL, vr::ColorSpace_Auto };
|
||||
{
|
||||
Lock lock(_mutex);
|
||||
_compositor->Submit(vr::Eye_Left, &texture, &leftBounds);
|
||||
_compositor->Submit(vr::Eye_Right, &texture, &rightBounds);
|
||||
}
|
||||
|
||||
_compositor->Submit(vr::Eye_Left, &texture, &leftBounds);
|
||||
_compositor->Submit(vr::Eye_Right, &texture, &rightBounds);
|
||||
|
||||
glFinish();
|
||||
|
||||
if (_enablePreview) {
|
||||
swapBuffers();
|
||||
}
|
||||
|
||||
_compositor->WaitGetPoses(_presentThreadTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, nullptr, 0);
|
||||
|
||||
{
|
||||
Lock lock(_mutex);
|
||||
_compositor->WaitGetPoses(_trackedDevicePose, vr::k_unMaxTrackedDeviceCount, nullptr, 0);
|
||||
// copy and process _presentThreadTrackedDevicePoses
|
||||
Lock lock(_poseMutex);
|
||||
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
|
||||
_trackedDevicePose[i] = _presentThreadTrackedDevicePose[i];
|
||||
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
|
||||
}
|
||||
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
|
||||
|
@ -194,9 +208,5 @@ void OpenVrDisplayPlugin::internalPresent() {
|
|||
});
|
||||
}
|
||||
|
||||
if (_enablePreview) {
|
||||
swapBuffers();
|
||||
}
|
||||
|
||||
//WindowOpenGLDisplayPlugin::internalPresent();
|
||||
}
|
||||
|
|
|
@ -47,5 +47,6 @@ private:
|
|||
static const QString NAME;
|
||||
bool _enablePreview { false };
|
||||
bool _monoPreview { true };
|
||||
mutable Mutex _poseMutex;
|
||||
};
|
||||
|
||||
|
|
Loading…
Reference in a new issue