mirror of
https://thingvellir.net/git/overte
synced 2025-03-27 23:52:03 +01:00
OpenVRDisplayPlugin: predict poses for better tracking
Extrapolate the next set of poses for HMD and hand controllers. Currently we predict 44ms into the future, this seems too high, however it was discovered by inspection to be the best value. Obviously there is a source of latency that we need to track down, however even with this latency, it is a much improved experience.
This commit is contained in:
parent
e3ae2baa7b
commit
93f61fce7a
2 changed files with 28 additions and 22 deletions
|
@ -30,7 +30,6 @@ const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
|
|||
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here
|
||||
|
||||
static vr::IVRCompositor* _compositor{ nullptr };
|
||||
static vr::TrackedDevicePose_t _presentThreadTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
|
||||
static mat4 _sensorResetMat;
|
||||
|
@ -43,12 +42,12 @@ bool OpenVrDisplayPlugin::isSupported() const {
|
|||
void OpenVrDisplayPlugin::activate() {
|
||||
_container->setIsOptionChecked(StandingHMDSensorMode, true);
|
||||
|
||||
if (!_hmd) {
|
||||
_hmd = acquireOpenVrSystem();
|
||||
if (!_system) {
|
||||
_system = acquireOpenVrSystem();
|
||||
}
|
||||
Q_ASSERT(_hmd);
|
||||
Q_ASSERT(_system);
|
||||
|
||||
_hmd->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
|
||||
_system->GetRecommendedRenderTargetSize(&_renderTargetSize.x, &_renderTargetSize.y);
|
||||
// Recommended render target size is per-eye, so double the X size for
|
||||
// left + right eyes
|
||||
_renderTargetSize.x *= 2;
|
||||
|
@ -56,8 +55,8 @@ void OpenVrDisplayPlugin::activate() {
|
|||
{
|
||||
Lock lock(_poseMutex);
|
||||
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
|
||||
_eyeOffsets[eye] = toGlm(_hmd->GetEyeToHeadTransform(eye));
|
||||
_eyeProjections[eye] = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
|
||||
_eyeOffsets[eye] = toGlm(_system->GetEyeToHeadTransform(eye));
|
||||
_eyeProjections[eye] = toGlm(_system->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
|
||||
});
|
||||
// FIXME Calculate the proper combined projection by using GetProjectionRaw values from both eyes
|
||||
_cullingProjection = _eyeProjections[0];
|
||||
|
@ -71,9 +70,9 @@ void OpenVrDisplayPlugin::activate() {
|
|||
|
||||
void OpenVrDisplayPlugin::deactivate() {
|
||||
_container->setIsOptionChecked(StandingHMDSensorMode, false);
|
||||
if (_hmd) {
|
||||
if (_system) {
|
||||
releaseOpenVrSystem();
|
||||
_hmd = nullptr;
|
||||
_system = nullptr;
|
||||
}
|
||||
_compositor = nullptr;
|
||||
HmdDisplayPlugin::deactivate();
|
||||
|
@ -96,9 +95,24 @@ void OpenVrDisplayPlugin::resetSensors() {
|
|||
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
|
||||
}
|
||||
|
||||
|
||||
glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
|
||||
Lock lock(_poseMutex);
|
||||
|
||||
float displayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
|
||||
float frameDuration = 1.f / displayFrequency;
|
||||
float vsyncToPhotons = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SecondsFromVsyncToPhotons_Float);
|
||||
|
||||
// TODO: this seems awfuly long, 44ms total, but it produced the best results.
|
||||
const float NUM_PREDICTION_FRAMES = 3.0f;
|
||||
float predictedSecondsFromNow = NUM_PREDICTION_FRAMES * frameDuration + vsyncToPhotons;
|
||||
|
||||
vr::TrackedDevicePose_t predictedTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
_system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseSeated, predictedSecondsFromNow, predictedTrackedDevicePose, vr::k_unMaxTrackedDeviceCount);
|
||||
|
||||
// copy and process predictedTrackedDevicePoses
|
||||
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
|
||||
_trackedDevicePose[i] = predictedTrackedDevicePose[i];
|
||||
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
|
||||
}
|
||||
return _trackedDevicePoseMat4[0];
|
||||
}
|
||||
|
||||
|
@ -114,16 +128,8 @@ void OpenVrDisplayPlugin::internalPresent() {
|
|||
|
||||
glFinish();
|
||||
|
||||
_compositor->WaitGetPoses(_presentThreadTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, nullptr, 0);
|
||||
|
||||
{
|
||||
// copy and process _presentThreadTrackedDevicePoses
|
||||
Lock lock(_poseMutex);
|
||||
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
|
||||
_trackedDevicePose[i] = _presentThreadTrackedDevicePose[i];
|
||||
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
|
||||
}
|
||||
}
|
||||
vr::TrackedDevicePose_t currentTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
_compositor->WaitGetPoses(currentTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, nullptr, 0);
|
||||
|
||||
// Handle the mirroring in the base class
|
||||
HmdDisplayPlugin::internalPresent();
|
||||
|
|
|
@ -35,7 +35,7 @@ protected:
|
|||
virtual void internalPresent() override;
|
||||
|
||||
private:
|
||||
vr::IVRSystem* _hmd { nullptr };
|
||||
vr::IVRSystem* _system { nullptr };
|
||||
static const QString NAME;
|
||||
mutable Mutex _poseMutex;
|
||||
};
|
||||
|
|
Loading…
Reference in a new issue