Fixing wobble

This commit is contained in:
Brad Davis 2016-04-12 14:11:36 -07:00
parent 9d29d6c0e9
commit 1a02220886
4 changed files with 172 additions and 48 deletions

View file

@ -0,0 +1,71 @@
#version 450 core
uniform sampler2D sampler;
layout (location = 0) uniform mat3 reprojection = mat3(1);
layout (location = 4) uniform mat4 inverseProjections[2];
layout (location = 12) uniform mat4 projections[2];
in vec2 vTexCoord;
in vec3 vPosition;
out vec4 FragColor;
void main() {
vec2 uv = vTexCoord;
mat4 eyeInverseProjection;
mat4 eyeProjection;
float xoffset = 1.0;
vec2 uvmin = vec2(0.0);
vec2 uvmax = vec2(1.0);
// determine the correct projection and inverse projection to use.
if (vTexCoord.x < 0.5) {
uvmax.x = 0.5;
eyeInverseProjection = inverseProjections[0];
eyeProjection = projections[0];
} else {
xoffset = -1.0;
uvmin.x = 0.5;
uvmax.x = 1.0;
eyeInverseProjection = inverseProjections[1];
eyeProjection = projections[1];
}
// Account for stereo in calculating the per-eye NDC coordinates
vec4 ndcSpace = vec4(vPosition, 1.0);
ndcSpace.x *= 2.0;
ndcSpace.x += xoffset;
// Convert from NDC to eyespace
vec4 eyeSpace = eyeInverseProjection * ndcSpace;
eyeSpace /= eyeSpace.w;
// Convert to a noramlized ray
vec3 ray = eyeSpace.xyz;
ray = normalize(ray);
// Adjust the ray by the rotation
ray = reprojection * ray;
// Project back on to the texture plane
ray /= ray.z;
ray *= eyeSpace.z;
// Update the eyespace vector
eyeSpace.xyz = ray;
// Reproject back into NDC
ndcSpace = eyeProjection * eyeSpace;
ndcSpace /= ndcSpace.w;
ndcSpace.x -= xoffset;
ndcSpace.x /= 2.0;
// Calculate the new UV coordinates
uv = (ndcSpace.xy / 2.0) + 0.5;
if (any(greaterThan(uv, uvmax)) || any(lessThan(uv, uvmin))) {
FragColor = vec4(0.0, 0.0, 0.0, 1.0);
} else {
FragColor = texture(sampler, uv);
}
}

View file

@ -25,10 +25,11 @@
#include "../CompositorHelper.h"
static const QString MONO_PREVIEW = "Mono Preview";
static const QString REPROJECTION = "Allow Reprojection";
static const QString FRAMERATE = DisplayPlugin::MENU_PATH() + ">Framerate";
static const QString DEVELOPER_MENU_PATH = "Developer>" + DisplayPlugin::MENU_PATH();
static const bool DEFAULT_MONO_VIEW = true;
glm::uvec2 HmdDisplayPlugin::getRecommendedUiSize() const {
return CompositorHelper::VIRTUAL_SCREEN_SIZE;
}
@ -42,7 +43,13 @@ bool HmdDisplayPlugin::internalActivate() {
_container->setBoolSetting("monoPreview", _monoPreview);
}, true, _monoPreview);
_container->removeMenu(FRAMERATE);
_container->addMenu(DEVELOPER_MENU_PATH);
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, DEVELOPER_MENU_PATH, REPROJECTION,
[this](bool clicked) {
_enableReprojection = clicked;
_container->setBoolSetting("enableReprojection", _enableReprojection);
}, true, _enableReprojection);
for_each_eye([&](Eye eye) {
_eyeInverseProjections[eye] = glm::inverse(_eyeProjections[eye]);
});
@ -70,6 +77,7 @@ static const GLint REPROJECTION_MATRIX_LOCATION = 0;
static const GLint INVERSE_PROJECTION_MATRIX_LOCATION = 4;
static const GLint PROJECTION_MATRIX_LOCATION = 12;
static const char * REPROJECTION_FS = R"FS(#version 450 core
uniform sampler2D sampler;
layout (location = 0) uniform mat3 reprojection = mat3(1);
layout (location = 4) uniform mat4 inverseProjections[2];
@ -81,20 +89,10 @@ in vec3 vPosition;
out vec4 FragColor;
void main() {
vec2 uv = vTexCoord;
vec3 Z_AXIS = vec3(0.0, 0.0, -1.0);
vec3 rotated = reprojection * Z_AXIS;
float angle = acos(dot(Z_AXIS, rotated));
if (angle < 0.001) {
FragColor = texture(sampler, uv);
return;
}
mat4 eyeInverseProjection;
mat4 eyeProjection;
float xoffset = 1.0;
vec2 uvmin = vec2(0.0);
@ -127,10 +125,12 @@ void main() {
// Adjust the ray by the rotation
ray = reprojection * ray;
// Project back on to the texture plane
eyeSpace.xyz = ray * eyeSpace.z;
//eyeSpace.xyz = ray;
ray *= eyeSpace.z / ray.z;
// Update the eyespace vector
eyeSpace.xyz = ray;
// Reproject back into NDC
ndcSpace = eyeProjection * eyeSpace;
@ -148,6 +148,50 @@ void main() {
}
)FS";
#ifdef DEBUG_REPROJECTION_SHADER
#include <QtCore/QFile>
#include <QtCore/QFileInfo>
#include <QtCore/QDateTime>
#include <PathUtils.h>
static const QString REPROJECTION_FS_FILE = "c:/Users/bdavis/Git/hifi/interface/resources/shaders/reproject.frag";
static ProgramPtr getReprojectionProgram() {
static ProgramPtr _currentProgram;
uint64_t now = usecTimestampNow();
static uint64_t _lastFileCheck = now;
bool modified = false;
if ((now - _lastFileCheck) > USECS_PER_MSEC * 100) {
QFileInfo info(REPROJECTION_FS_FILE);
QDateTime lastModified = info.lastModified();
static QDateTime _lastModified = lastModified;
qDebug() << lastModified.toTime_t();
qDebug() << _lastModified.toTime_t();
if (lastModified > _lastModified) {
_lastModified = lastModified;
modified = true;
}
}
if (!_currentProgram || modified) {
_currentProgram.reset();
try {
QFile shaderFile(REPROJECTION_FS_FILE);
shaderFile.open(QIODevice::ReadOnly);
QString fragment = shaderFile.readAll();
compileProgram(_currentProgram, REPROJECTION_VS, fragment.toLocal8Bit().data());
} catch (const std::runtime_error& error) {
qDebug() << "Failed to build: " << error.what();
}
if (!_currentProgram) {
_currentProgram = loadDefaultShader();
}
}
return _currentProgram;
}
#endif
void HmdDisplayPlugin::customizeContext() {
Parent::customizeContext();
@ -161,6 +205,7 @@ void HmdDisplayPlugin::customizeContext() {
void HmdDisplayPlugin::uncustomizeContext() {
_sphereSection.reset();
_compositeFramebuffer.reset();
_reprojectionProgram.reset();
Parent::uncustomizeContext();
}
@ -169,31 +214,27 @@ void HmdDisplayPlugin::updatePresentPose() {
_currentPresentFrameInfo.presentPose = _currentPresentFrameInfo.renderPose;
}
glm::mat3 HmdDisplayPlugin::FrameInfo::presentRotation() const {
if (renderPose == presentPose) {
return glm::mat3();
}
quat renderRotation = glm::quat_cast(renderPose);
quat presentRotation = glm::quat_cast(presentPose);
quat reprojection = glm::inverse(renderRotation) * presentRotation;
return glm::mat3_cast(reprojection);
}
void HmdDisplayPlugin::compositeScene() {
updatePresentPose();
glm::mat3 reprojection = _currentPresentFrameInfo.presentRotation();
if (glm::mat3() == reprojection) {
if (!_enableReprojection || glm::mat3() == _currentPresentFrameInfo.presentReprojection) {
// No reprojection required
Parent::compositeScene();
return;
}
#ifdef DEBUG_REPROJECTION_SHADER
_reprojectionProgram = getReprojectionProgram();
#endif
useProgram(_reprojectionProgram);
using namespace oglplus;
Uniform<glm::mat3>(*_reprojectionProgram, REPROJECTION_MATRIX_LOCATION).Set(reprojection);
// FIXME what's the right oglplus mechanism to do this?
Texture::MinFilter(TextureTarget::_2D, TextureMinFilter::Linear);
Texture::MagFilter(TextureTarget::_2D, TextureMagFilter::Linear);
Uniform<glm::mat3>(*_reprojectionProgram, REPROJECTION_MATRIX_LOCATION).Set(_currentPresentFrameInfo.presentReprojection);
//Uniform<glm::mat4>(*_reprojectionProgram, PROJECTION_MATRIX_LOCATION).Set(_eyeProjections);
//Uniform<glm::mat4>(*_reprojectionProgram, INVERSE_PROJECTION_MATRIX_LOCATION).Set(_eyeInverseProjections);
// FIXME what's the right oglplus mechanism to do this? It's not that ^^^ ... better yet, switch to a uniform buffer
glUniformMatrix4fv(INVERSE_PROJECTION_MATRIX_LOCATION, 2, GL_FALSE, &(_eyeInverseProjections[0][0][0]));
glUniformMatrix4fv(PROJECTION_MATRIX_LOCATION, 2, GL_FALSE, &(_eyeProjections[0][0][0]));
_plane->UseInProgram(*_reprojectionProgram);

View file

@ -28,13 +28,6 @@ public:
virtual glm::mat4 getHeadPose() const override;
struct FrameInfo {
glm::mat4 renderPose;
glm::mat4 presentPose;
double sensorSampleTime { 0 };
double predictedDisplayTime { 0 };
glm::mat3 presentRotation() const;
};
protected:
@ -60,6 +53,16 @@ protected:
glm::uvec2 _renderTargetSize;
float _ipd { 0.064f };
struct FrameInfo {
glm::mat4 rawRenderPose;
glm::mat4 renderPose;
glm::mat4 rawPresentPose;
glm::mat4 presentPose;
double sensorSampleTime { 0 };
double predictedDisplayTime { 0 };
glm::mat3 presentReprojection;
};
QMap<uint32_t, FrameInfo> _frameInfos;
FrameInfo _currentPresentFrameInfo;
FrameInfo _currentRenderFrameInfo;
@ -67,6 +70,7 @@ protected:
private:
bool _enablePreview { false };
bool _monoPreview { true };
bool _enableReprojection { true };
ShapeWrapperPtr _sphereSection;
ProgramPtr _reprojectionProgram;
};

View file

@ -125,8 +125,8 @@ void OpenVrDisplayPlugin::resetSensors() {
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
}
void OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
void OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
double displayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
double frameDuration = 1.f / displayFrequency;
double vsyncToPhotons = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SecondsFromVsyncToPhotons_Float);
@ -146,11 +146,12 @@ void OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
// copy and process predictedTrackedDevicePoses
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
_trackedDevicePose[i] = predictedTrackedDevicePose[i];
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
_trackedDevicePoseMat4[i] = toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
_trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity));
_trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity));
}
_currentRenderFrameInfo.renderPose = _trackedDevicePoseMat4[0];
_currentRenderFrameInfo.rawRenderPose = _trackedDevicePoseMat4[vr::k_unTrackedDeviceIndex_Hmd];
_currentRenderFrameInfo.renderPose = _sensorResetMat * _currentRenderFrameInfo.rawRenderPose;
Lock lock(_mutex);
_frameInfos[frameIndex] = _currentRenderFrameInfo;
@ -183,13 +184,20 @@ bool OpenVrDisplayPlugin::isHmdMounted() const {
}
void OpenVrDisplayPlugin::updatePresentPose() {
float fSecondsSinceLastVsync;
_system->GetTimeSinceLastVsync(&fSecondsSinceLastVsync, nullptr);
float fDisplayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
float fFrameDuration = 1.f / fDisplayFrequency;
float fVsyncToPhotons = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SecondsFromVsyncToPhotons_Float);
float fPredictedSecondsFromNow = fFrameDuration - fSecondsSinceLastVsync + fVsyncToPhotons;
vr::TrackedDevicePose_t presentPoseOpenVR;
_system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseStanding, fPredictedSecondsFromNow, &presentPoseOpenVR, 1);
_currentPresentFrameInfo.presentPose = _sensorResetMat * toGlm(presentPoseOpenVR.mDeviceToAbsoluteTracking);
{
float fSecondsSinceLastVsync;
_system->GetTimeSinceLastVsync(&fSecondsSinceLastVsync, nullptr);
float fDisplayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
float fFrameDuration = 1.f / fDisplayFrequency;
float fVsyncToPhotons = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SecondsFromVsyncToPhotons_Float);
float fPredictedSecondsFromNow = fFrameDuration - fSecondsSinceLastVsync + fVsyncToPhotons;
vr::TrackedDevicePose_t pose;
_system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseStanding, fPredictedSecondsFromNow, &pose, 1);
_currentPresentFrameInfo.rawPresentPose = toGlm(pose.mDeviceToAbsoluteTracking);
}
_currentPresentFrameInfo.presentPose = _sensorResetMat * _currentPresentFrameInfo.rawPresentPose;
mat3 renderRotation(_currentPresentFrameInfo.rawRenderPose);
mat3 presentRotation(_currentPresentFrameInfo.rawPresentPose);
_currentPresentFrameInfo.presentReprojection = glm::mat3(glm::inverse(renderRotation) * presentRotation);
}