Working on reprojection

This commit is contained in:
Brad Davis 2016-07-31 18:19:28 -07:00
parent 66cc9136eb
commit 7e93747acf
30 changed files with 868 additions and 260 deletions

View file

@ -6,73 +6,12 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#version 410 core
uniform sampler2D sampler;
uniform mat3 reprojection = mat3(1);
uniform mat4 inverseProjections[2];
uniform mat4 projections[2];
in vec2 vTexCoord;
in vec3 vPosition;
in vec2 varTexCoord0;
out vec4 FragColor;
void main() {
vec2 uv = vTexCoord;
mat4 eyeInverseProjection;
mat4 eyeProjection;
float xoffset = 1.0;
vec2 uvmin = vec2(0.0);
vec2 uvmax = vec2(1.0);
// determine the correct projection and inverse projection to use.
if (vTexCoord.x < 0.5) {
uvmax.x = 0.5;
eyeInverseProjection = inverseProjections[0];
eyeProjection = projections[0];
} else {
xoffset = -1.0;
uvmin.x = 0.5;
uvmax.x = 1.0;
eyeInverseProjection = inverseProjections[1];
eyeProjection = projections[1];
}
// Account for stereo in calculating the per-eye NDC coordinates
vec4 ndcSpace = vec4(vPosition, 1.0);
ndcSpace.x *= 2.0;
ndcSpace.x += xoffset;
// Convert from NDC to eyespace
vec4 eyeSpace = eyeInverseProjection * ndcSpace;
eyeSpace /= eyeSpace.w;
// Convert to a noramlized ray
vec3 ray = eyeSpace.xyz;
ray = normalize(ray);
// Adjust the ray by the rotation
ray = reprojection * ray;
// Project back on to the texture plane
ray *= eyeSpace.z / ray.z;
// Update the eyespace vector
eyeSpace.xyz = ray;
// Reproject back into NDC
ndcSpace = eyeProjection * eyeSpace;
ndcSpace /= ndcSpace.w;
ndcSpace.x -= xoffset;
ndcSpace.x /= 2.0;
// Calculate the new UV coordinates
uv = (ndcSpace.xy / 2.0) + 0.5;
if (any(greaterThan(uv, uvmax)) || any(lessThan(uv, uvmin))) {
FragColor = vec4(0.0, 0.0, 0.0, 1.0);
} else {
FragColor = texture(sampler, uv);
}
FragColor = texture(sampler, varTexCoord0);
}

View file

@ -6,15 +6,85 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#version 410 core
in vec3 Position;
in vec2 TexCoord;
precision highp float;
out vec3 vPosition;
out vec2 vTexCoord;
struct TransformCamera {
mat4 _view;
mat4 _viewInverse;
mat4 _projectionViewUntranslated;
mat4 _projection;
mat4 _projectionInverse;
vec4 _viewport;
vec4 _stereoInfo;
};
void main() {
gl_Position = vec4(Position, 1);
vTexCoord = TexCoord;
vPosition = Position;
layout(std140) uniform transformCameraBuffer {
TransformCamera _camera;
};
TransformCamera getTransformCamera() {
return _camera;
}
vec3 getEyeWorldPos() {
return _camera._viewInverse[3].xyz;
}
bool cam_isStereo() {
return _camera._stereoInfo.x > 0.0;
}
float cam_getStereoSide() {
return _camera._stereoInfo.y;
}
struct Reprojection {
mat4 rotation;
};
layout(std140) uniform reprojectionBuffer {
Reprojection reprojection;
};
layout(location = 0) in vec4 inPosition;
noperspective out vec2 varTexCoord0;
void main(void) {
// standard transform
TransformCamera cam = getTransformCamera();
vec2 uv = inPosition.xy;
uv.x /= 2.0;
vec4 pos = inPosition;
pos *= 2.0;
pos -= 1.0;
if (cam_getStereoSide() > 0.0) {
uv.x += 0.5;
}
if (reprojection.rotation != mat4(1)) {
vec4 eyeSpace = _camera._projectionInverse * pos;
eyeSpace /= eyeSpace.w;
// Convert to a noramlized ray
vec3 ray = eyeSpace.xyz;
ray = normalize(ray);
// Adjust the ray by the rotation
ray = mat3(inverse(reprojection.rotation)) * ray;
// Project back on to the texture plane
ray *= eyeSpace.z / ray.z;
eyeSpace.xyz = ray;
// Move back into NDC space
eyeSpace = _camera._projection * eyeSpace;
eyeSpace /= eyeSpace.w;
eyeSpace.z = 0.0;
pos = eyeSpace;
}
gl_Position = pos;
varTexCoord0 = uv;
}

View file

@ -1,4 +1,5 @@
set(TARGET_NAME display-plugins)
AUTOSCRIBE_SHADER_LIB(gpu display-plugins)
setup_hifi_library(OpenGL)
link_hifi_libraries(shared plugins ui-plugins gl gpu-gl ui)

View file

@ -12,6 +12,7 @@
#include "NullDisplayPlugin.h"
#include "stereo/SideBySideStereoDisplayPlugin.h"
#include "stereo/InterleavedStereoDisplayPlugin.h"
#include "hmd/DebugHmdDisplayPlugin.h"
#include "Basic2DWindowOpenGLDisplayPlugin.h"
const QString& DisplayPlugin::MENU_PATH() {
@ -23,6 +24,7 @@ const QString& DisplayPlugin::MENU_PATH() {
DisplayPluginList getDisplayPlugins() {
DisplayPlugin* PLUGIN_POOL[] = {
new Basic2DWindowOpenGLDisplayPlugin(),
new DebugHmdDisplayPlugin(),
#ifdef DEBUG
new NullDisplayPlugin(),
#endif

View file

@ -33,6 +33,7 @@
#include <gpu/Texture.h>
#include <gpu/StandardShaderLib.h>
#include <gpu/gl/GLShared.h>
#include <gpu/gl/GLBackend.h>
#include <GeometryCache.h>
#include <FramebufferCache.h>
@ -213,9 +214,6 @@ private:
QGLContext* _context { nullptr };
};
bool OpenGLDisplayPlugin::isRenderThread() const {
return QThread::currentThread() == DependencyManager::get<PresentThread>()->thread();
}
OpenGLDisplayPlugin::OpenGLDisplayPlugin() {
}
@ -253,6 +251,9 @@ bool OpenGLDisplayPlugin::activate() {
presentThread->start();
}
_presentThread = presentThread.data();
if (!RENDER_THREAD) {
RENDER_THREAD = _presentThread;
}
// Child classes may override this in order to do things like initialize
// libraries, etc
@ -677,3 +678,10 @@ ivec4 OpenGLDisplayPlugin::eyeViewport(Eye eye) const {
}
return ivec4(vpPos, vpSize);
}
gpu::gl::GLBackend* OpenGLDisplayPlugin::getGLBackend() {
if (!_backend) {
return nullptr;
}
return dynamic_cast<gpu::gl::GLBackend*>(_backend.get());
}

View file

@ -21,6 +21,12 @@
#include <gl/GLEscrow.h>
#include <shared/RateCounter.h>
namespace gpu {
namespace gl {
class GLBackend;
}
}
class OpenGLDisplayPlugin : public DisplayPlugin {
Q_OBJECT
Q_PROPERTY(float overlayAlpha MEMBER _overlayAlpha)
@ -37,8 +43,6 @@ public:
// between the main thread and the presentation thread
bool activate() override final;
void deactivate() override final;
bool isRenderThread() const override final;
bool eventFilter(QObject* receiver, QEvent* event) override;
bool isDisplayVisible() const override { return true; }
@ -139,6 +143,7 @@ protected:
f();
}
gpu::gl::GLBackend* getGLBackend();
private:
// Any resource shared by the main thread and the presentation thread must
// be serialized through this mutex

View file

@ -0,0 +1,76 @@
//
// Created by Bradley Austin Davis on 2016/07/31
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "DebugHmdDisplayPlugin.h"
#include <ViewFrustum.h>
#include <controllers/Pose.h>
#include <gpu/Frame.h>
const QString DebugHmdDisplayPlugin::NAME("HMD Simulator");
static const QString DEBUG_FLAG("HIFI_DEBUG_HMD");
bool DebugHmdDisplayPlugin::isSupported() const {
// FIXME use the env variable
return true;
}
void DebugHmdDisplayPlugin::resetSensors() {
_currentRenderFrameInfo.renderPose = glm::mat4(); // identity
}
bool DebugHmdDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
_currentRenderFrameInfo = FrameInfo();
_currentRenderFrameInfo.sensorSampleTime = secTimestampNow();
_currentRenderFrameInfo.predictedDisplayTime = _currentRenderFrameInfo.sensorSampleTime;
// FIXME simulate head movement
//_currentRenderFrameInfo.renderPose = ;
//_currentRenderFrameInfo.presentPose = _currentRenderFrameInfo.renderPose;
withNonPresentThreadLock([&] {
_uiModelTransform = DependencyManager::get<CompositorHelper>()->getModelTransform();
_frameInfos[frameIndex] = _currentRenderFrameInfo;
});
return Parent::beginFrameRender(frameIndex);
}
// DLL based display plugins MUST initialize GLEW inside the DLL code.
void DebugHmdDisplayPlugin::customizeContext() {
glewExperimental = true;
GLenum err = glewInit();
glGetError(); // clear the potential error from glewExperimental
Parent::customizeContext();
}
bool DebugHmdDisplayPlugin::internalActivate() {
_ipd = 0.0327499993f * 2.0f;
_eyeProjections[0][0] = vec4{ 0.759056330, 0.000000000, 0.000000000, 0.000000000 };
_eyeProjections[0][1] = vec4{ 0.000000000, 0.682773232, 0.000000000, 0.000000000 };
_eyeProjections[0][2] = vec4{ -0.0580431037, -0.00619550655, -1.00000489, -1.00000000 };
_eyeProjections[0][3] = vec4{ 0.000000000, 0.000000000, -0.0800003856, 0.000000000 };
_eyeProjections[1][0] = vec4{ 0.752847493, 0.000000000, 0.000000000, 0.000000000 };
_eyeProjections[1][1] = vec4{ 0.000000000, 0.678060353, 0.000000000, 0.000000000 };
_eyeProjections[1][2] = vec4{ 0.0578232110, -0.00669418881, -1.00000489, -1.000000000 };
_eyeProjections[1][3] = vec4{ 0.000000000, 0.000000000, -0.0800003856, 0.000000000 };
_eyeInverseProjections[0] = glm::inverse(_eyeProjections[0]);
_eyeInverseProjections[1] = glm::inverse(_eyeProjections[1]);
_eyeOffsets[0][3] = vec4{ -0.0327499993, 0.0, 0.0149999997, 1.0 };
_eyeOffsets[0][3] = vec4{ 0.0327499993, 0.0, 0.0149999997, 1.0 };
_renderTargetSize = { 3024, 1680 };
_cullingProjection = _eyeProjections[0];
// This must come after the initialization, so that the values calculated
// above are available during the customizeContext call (when not running
// in threaded present mode)
return Parent::internalActivate();
}
void DebugHmdDisplayPlugin::updatePresentPose() {
// if (usecTimestampNow() % 4000000 > 2000000) {
_currentPresentFrameInfo.presentPose = glm::mat4_cast(glm::angleAxis(0.5f, Vectors::UP));
// }
}

View file

@ -0,0 +1,33 @@
//
// Created by Bradley Austin Davis on 2016/07/31
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include "HmdDisplayPlugin.h"
class DebugHmdDisplayPlugin : public HmdDisplayPlugin {
using Parent = HmdDisplayPlugin;
public:
const QString& getName() const override { return NAME; }
grouping getGrouping() const override { return DEVELOPER; }
bool isSupported() const override;
void resetSensors() override final;
bool beginFrameRender(uint32_t frameIndex) override;
float getTargetFrameRate() const override { return 90; }
protected:
void updatePresentPose() override;
void hmdPresent() override {}
bool isHmdMounted() const override { return true; }
void customizeContext() override;
bool internalActivate() override;
private:
static const QString NAME;
};

View file

@ -5,6 +5,7 @@
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "HmdDisplayPlugin.h"
#include <memory>
@ -25,13 +26,16 @@
#include <GeometryCache.h>
#include <gpu/Context.h>
#include <gpu/StandardShaderLib.h>
#include <gpu/gl/GLBackend.h>
#include <PathUtils.h>
#include "hmd_reproject_vert.h"
#include "hmd_reproject_frag.h"
#include "../Logging.h"
#include "../CompositorHelper.h"
static const QString MONO_PREVIEW = "Mono Preview";
static const QString REPROJECTION = "Allow Reprojection";
static const QString FRAMERATE = DisplayPlugin::MENU_PATH() + ">Framerate";
@ -41,6 +45,7 @@ static const int NUMBER_OF_HANDS = 2;
static const glm::mat4 IDENTITY_MATRIX;
//#define LIVE_SHADER_RELOAD 1
extern glm::vec3 getPoint(float yaw, float pitch);
static QString readFile(const QString& filename) {
QFile file(filename);
@ -103,10 +108,36 @@ void HmdDisplayPlugin::internalDeactivate() {
Parent::internalDeactivate();
}
extern glm::vec3 getPoint(float yaw, float pitch);
void HmdDisplayPlugin::customizeContext() {
Parent::customizeContext();
// Only enable mirroring if we know vsync is disabled
// On Mac, this won't work due to how the contexts are handled, so don't try
#if !defined(Q_OS_MAC)
enableVsync(false);
#endif
_enablePreview = !isVsyncEnabled();
_overlay.build();
#if 0
updateReprojectionProgram();
updateLaserProgram();
_laserGeometry = loadLaser(_laserProgram);
#endif
void flushBuffer(const gpu::BufferPointer& buffer) {
buffer->applyUpdate(buffer->getUpdate());
_compositeFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create(gpu::Element::COLOR_SRGBA_32, _renderTargetSize.x, _renderTargetSize.y));
_compositeTexture = _compositeFramebuffer->getRenderBuffer(0);
}
void HmdDisplayPlugin::uncustomizeContext() {
#if 0
_overlayProgram.reset();
_sphereSection.reset();
_compositeFramebuffer.reset();
_previewProgram.reset();
_reprojectionProgram.reset();
_laserProgram.reset();
_laserGeometry.reset();
#endif
Parent::uncustomizeContext();
}
void HmdDisplayPlugin::OverlayRender::build() {
@ -137,7 +168,7 @@ void HmdDisplayPlugin::OverlayRender::build() {
vertices->append(sizeof(Vertex), (gpu::Byte*)&vertex);
}
}
flushBuffer(vertices);
vertices->flush();
// Compute number of indices needed
static const int VERTEX_PER_TRANGLE = 3;
@ -164,7 +195,7 @@ void HmdDisplayPlugin::OverlayRender::build() {
}
}
this->indices->append(indices);
flushBuffer(this->indices);
this->indices->flush();
format = std::make_shared<gpu::Stream::Format>(); // 1 for everyone
format->setAttribute(gpu::Stream::POSITION, gpu::Stream::POSITION, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 0);
format->setAttribute(gpu::Stream::TEXCOORD, gpu::Stream::TEXCOORD, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::UV));
@ -206,13 +237,13 @@ void HmdDisplayPlugin::OverlayRender::updatePipeline() {
pipeline = gpu::Pipeline::create(program, state);
}
}
}
void HmdDisplayPlugin::OverlayRender::render() {
void HmdDisplayPlugin::OverlayRender::render(HmdDisplayPlugin& plugin) {
for_each_eye([&](Eye eye){
uniforms.mvp = mvps[eye];
uniformBuffers[eye]->setSubData(0, uniforms);
flushBuffer(uniformBuffers[eye]);
uniformBuffers[eye]->flush();
});
gpu::Batch batch;
batch.enableStereo(false);
@ -233,22 +264,6 @@ void HmdDisplayPlugin::OverlayRender::render() {
plugin._backend->render(batch);
}
void HmdDisplayPlugin::customizeContext() {
Parent::customizeContext();
// Only enable mirroring if we know vsync is disabled
// On Mac, this won't work due to how the contexts are handled, so don't try
#if !defined(Q_OS_MAC)
enableVsync(false);
#endif
_enablePreview = !isVsyncEnabled();
_overlay.build();
#if 0
updateReprojectionProgram();
updateLaserProgram();
_laserGeometry = loadLaser(_laserProgram);
#endif
}
#if 0
void HmdDisplayPlugin::updateReprojectionProgram() {
static const QString vsFile = PathUtils::resourcesPath() + "/shaders/hmd_reproject.vert";
@ -328,54 +343,145 @@ void HmdDisplayPlugin::updateLaserProgram() {
#endif
}
void HmdDisplayPlugin::uncustomizeContext() {
#if 0
_overlayProgram.reset();
_sphereSection.reset();
_compositeFramebuffer.reset();
_previewProgram.reset();
_reprojectionProgram.reset();
_laserProgram.reset();
_laserGeometry.reset();
#endif
Parent::uncustomizeContext();
}
// By default assume we'll present with the same pose as the render
void HmdDisplayPlugin::updatePresentPose() {
_currentPresentFrameInfo.presentPose = _currentPresentFrameInfo.renderPose;
}
void HmdDisplayPlugin::compositeScene() {
updatePresentPose();
//static const std::string HMD_REPROJECT_FRAG = R"SHADER(
//
//in vec2 varTexCoord0;
//
//out vec4 outFragColor;
//
//uniform sampler2D sampler;
//
//void main() {
// vec2 uv = varTexCoord0;
// outFragColor = texture(sampler, uv); // vec4(varTexCoord0, 0.0, 1.0);
//}
//
//)SHADER";
void HmdDisplayPlugin::SceneRenderer::build() {
static const QString vsFile = "C:/Users/bdavis/Git/hifi/interface/resources/shaders/hmd_reproject.vert";
static const QString fsFile = "C:/Users/bdavis/Git/hifi/interface/resources/shaders/hmd_reproject.frag";
if (!_enableReprojection || glm::mat3() == _currentPresentFrameInfo.presentReprojection) {
// No reprojection required
Parent::compositeScene();
return;
#if 1 //LIVE_SHADER_RELOAD
static qint64 vsBuiltAge = 0;
static qint64 fsBuiltAge = 0;
QFileInfo vsInfo(vsFile);
QFileInfo fsInfo(fsFile);
auto vsAge = vsInfo.lastModified().toMSecsSinceEpoch();
auto fsAge = fsInfo.lastModified().toMSecsSinceEpoch();
if (!pipeline || vsAge > vsBuiltAge || fsAge > fsBuiltAge) {
vsBuiltAge = vsAge;
fsBuiltAge = fsAge;
#else
if (!pipeline) {
#endif
QString vsSource = readFile(vsFile);
QString fsSource = readFile(fsFile);
auto vs = gpu::Shader::createVertex(vsSource.toLocal8Bit().toStdString());
auto ps = gpu::Shader::createPixel(fsSource.toLocal8Bit().toStdString());
gpu::ShaderPointer program = gpu::Shader::createProgram(vs, ps);
gpu::gl::GLBackend::makeProgram(*program);
uniformsLocation = program->getBuffers().findLocation("reprojectionBuffer");
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(gpu::State::DepthTest(false));
pipeline = gpu::Pipeline::create(program, state);
}
#ifdef DEBUG_REPROJECTION_SHADER
_reprojectionProgram = getReprojectionProgram();
#endif
#if 0
useProgram(_reprojectionProgram);
if (!uniformBuffer) {
uniformBuffer = std::make_shared<gpu::Buffer>(sizeof(Uniforms), nullptr);
}
using namespace oglplus;
Texture::MinFilter(TextureTarget::_2D, TextureMinFilter::Linear);
Texture::MagFilter(TextureTarget::_2D, TextureMagFilter::Linear);
Uniform<glm::mat3>(*_reprojectionProgram, _reprojectionUniforms.reprojectionMatrix).Set(_currentPresentFrameInfo.presentReprojection);
//Uniform<glm::mat4>(*_reprojectionProgram, PROJECTION_MATRIX_LOCATION).Set(_eyeProjections);
//Uniform<glm::mat4>(*_reprojectionProgram, INVERSE_PROJECTION_MATRIX_LOCATION).Set(_eyeInverseProjections);
// FIXME what's the right oglplus mechanism to do this? It's not that ^^^ ... better yet, switch to a uniform buffer
glUniformMatrix4fv(_reprojectionUniforms.inverseProjectionMatrix, 2, GL_FALSE, &(_eyeInverseProjections[0][0][0]));
glUniformMatrix4fv(_reprojectionUniforms.projectionMatrix, 2, GL_FALSE, &(_eyeProjections[0][0][0]));
_plane->UseInProgram(*_reprojectionProgram);
_plane->Draw();
#endif
if (!vertices) {
static const uint16_t stacks = 128;
static const uint16_t slices = 64;
static const vec3 increment = vec3(1) / vec3(slices, stacks, 1);
std::vector<vec3> vertexBuffer;
vertexCount = stacks * slices * 3 * 2;
for (size_t x = 0; x < slices; ++x) {
for (size_t y = 0; y < stacks; ++y) {
vertexBuffer.push_back(vec3(x, y + 1, 0) * increment);
vertexBuffer.push_back(vec3(x, y, 0) * increment);
vertexBuffer.push_back(vec3(x + 1, y + 1, 0) * increment);
vertexBuffer.push_back(vec3(x + 1, y + 1, 0) * increment);
vertexBuffer.push_back(vec3(x, y, 0) * increment);
vertexBuffer.push_back(vec3(x + 1, y, 0) * increment);
}
}
vertices = std::make_shared<gpu::Buffer>();
vertices->setData(sizeof(vec3) * vertexBuffer.size(), (gpu::Byte*)vertexBuffer.data());
vertices->flush();
format = std::make_shared<gpu::Stream::Format>();
format->setAttribute(gpu::Stream::POSITION, gpu::Stream::POSITION, gpu::Element(gpu::VEC3, gpu::FLOAT, gpu::XYZ), 0);
}
}
void HmdDisplayPlugin::SceneRenderer::update(const glm::mat4& rotation) {
build();
{
uniforms.rotation = mat4();
float correctionMagnitude = glm::angle(glm::quat_cast(rotation));
if (correctionMagnitude > 0.001f) {
uniforms.rotation = rotation;
}
static size_t i = 0;
if (0 == (++i % 10)) {
qDebug() << "Correction angle size " << correctionMagnitude;
}
}
uniformBuffer->setSubData(0, uniforms);
uniformBuffer->flush();
}
void HmdDisplayPlugin::SceneRenderer::render(gpu::Batch& batch) {
if (pipeline) {
batch.setPipeline(pipeline);
batch.setInputFormat(format);
batch.setInputBuffer(gpu::Stream::POSITION,
gpu::BufferView(vertices, 0, vertices->getSize(), sizeof(vec3), format->getAttributes().at(gpu::Stream::POSITION)._element));
batch.draw(gpu::TRIANGLES, vertexCount);
}
}
void HmdDisplayPlugin::compositeScene() {
{
auto batchPose = glm::dmat3(glm::mat3(_currentFrame->pose));
auto currentPose = glm::dmat3(glm::mat3(_currentPresentFrameInfo.presentPose));
auto correction = glm::inverse(batchPose) * currentPose;
_sceneRenderer.update(glm::mat4(glm::dmat4(correction)));
}
{
gpu::Batch batch;
batch.enableStereo(false);
batch.setViewportTransform(ivec4(uvec2(), _renderTargetSize));
batch.setFramebuffer(_compositeFramebuffer);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(1, 1, 0, 1));
_backend->render(batch);
}
{
gpu::Batch batch;
if (_sceneRenderer.uniformsLocation >= 0) {
batch.setUniformBuffer(_sceneRenderer.uniformsLocation, _sceneRenderer.uniformBuffer);
}
batch.setViewportTransform(ivec4(uvec2(), _renderTargetSize));
batch.setViewTransform(Transform());
batch.setProjectionTransform(mat4());
batch.setFramebuffer(_compositeFramebuffer);
batch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));
_sceneRenderer.render(batch);
_backend->render(batch);
}
}
void HmdDisplayPlugin::compositeOverlay() {
#if 0
if (!_currentFrame) {
return;
}
@ -455,9 +561,11 @@ void HmdDisplayPlugin::compositeOverlay() {
_overlay.uniforms.glowColors[1] = _presentHandLasers[1].color;
}
_overlay.render();
#endif
}
void HmdDisplayPlugin::compositePointer() {
#if 0
auto& cursorManager = Cursor::Manager::instance();
const auto& cursorData = _cursorsData[cursorManager.getCursor()->getIcon()];
auto compositorHelper = DependencyManager::get<CompositorHelper>();
@ -469,7 +577,7 @@ void HmdDisplayPlugin::compositePointer() {
batch.setFramebuffer(_currentFrame->framebuffer);
batch.setPipeline(_cursorPipeline);
batch.setResourceTexture(0, cursorData.texture);
batch.setViewTransform(Transform());
batch.clearViewTransform();
for_each_eye([&](Eye eye) {
auto eyePose = _currentPresentFrameInfo.presentPose * getEyeToHeadTransform(eye);
auto reticleTransform = compositorHelper->getReticleTransform(eyePose, headPosition);
@ -479,15 +587,16 @@ void HmdDisplayPlugin::compositePointer() {
batch.draw(gpu::TRIANGLE_STRIP, 4);
});
_backend->render(batch);
#endif
}
void HmdDisplayPlugin::internalPresent() {
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)presentCount())
// Composite together the scene, overlay and mouse cursor
hmdPresent();
/*
// screen preview mirroring
auto window = _container->getPrimaryWidget();
auto devicePixelRatio = window->devicePixelRatio();
@ -513,20 +622,19 @@ void HmdDisplayPlugin::internalPresent() {
} else if (targetViewportSize.y < windowSize.y) {
targetViewportPosition.y = (windowSize.y - targetViewportSize.y) / 2;
}
*/
if (_enablePreview) {
Parent::internalPresent();
//gpu::Batch presentBatch;
//presentBatch.enableStereo(false);
//presentBatch.setViewTransform(Transform());
//presentBatch.setFramebuffer(gpu::FramebufferPointer());
//presentBatch.setViewportTransform(ivec4(targetViewportPosition, targetViewportSize));
//presentBatch.setResourceTexture(0, _currentFrame->framebuffer->getRenderBuffer(0));
//presentBatch.setPipeline(_presentPipeline);
//presentBatch.draw(gpu::TRIANGLE_STRIP, 4);
//_backend->render(presentBatch);
//swapBuffers();
gpu::Batch presentBatch;
presentBatch.enableStereo(false);
presentBatch.setViewTransform(Transform());
presentBatch.setFramebuffer(gpu::FramebufferPointer());
presentBatch.setViewportTransform(ivec4(uvec2(0), getSurfacePixels()));
presentBatch.setResourceTexture(0, _compositeTexture);
presentBatch.setPipeline(_presentPipeline);
presentBatch.draw(gpu::TRIANGLE_STRIP, 4);
_backend->render(presentBatch);
swapBuffers();
}
postPreview();
@ -553,6 +661,8 @@ void HmdDisplayPlugin::updateFrameData() {
}
});
}
updatePresentPose();
}
glm::mat4 HmdDisplayPlugin::getHeadPose() const {

View file

@ -23,7 +23,6 @@
class HmdDisplayPlugin : public OpenGLDisplayPlugin {
using Parent = OpenGLDisplayPlugin;
public:
HmdDisplayPlugin() : _overlay( *this ) {}
bool isHmd() const override final { return true; }
float getIPD() const override final { return _ipd; }
glm::mat4 getEyeToHeadTransform(Eye eye) const override final { return _eyeOffsets[eye]; }
@ -67,9 +66,6 @@ protected:
}
};
Transform _uiModelTransform;
std::array<HandLaserInfo, 2> _handLasers;
std::array<glm::mat4, 2> _handPoses;
@ -87,9 +83,7 @@ protected:
float _ipd { 0.064f };
struct FrameInfo {
glm::mat4 rawRenderPose;
glm::mat4 renderPose;
glm::mat4 rawPresentPose;
glm::mat4 presentPose;
double sensorSampleTime { 0 };
double predictedDisplayTime { 0 };
@ -99,6 +93,8 @@ protected:
QMap<uint32_t, FrameInfo> _frameInfos;
FrameInfo _currentPresentFrameInfo;
FrameInfo _currentRenderFrameInfo;
gpu::FramebufferPointer _compositeFramebuffer;
gpu::TexturePointer _compositeTexture;
private:
void updateLaserProgram();
@ -113,10 +109,24 @@ private:
glm::uvec2 _prevWindowSize { 0, 0 };
qreal _prevDevicePixelRatio { 0 };
struct SceneRenderer {
int32_t uniformsLocation{ -1 };
uint32_t vertexCount;
struct Uniforms {
mat4 rotation;
} uniforms;
gpu::Stream::FormatPointer format;
gpu::BufferPointer vertices;
gpu::PipelinePointer pipeline;
gpu::BufferPointer uniformBuffer;
void build();
void update(const glm::mat4& rotation);
void render(gpu::Batch& batch);
} _sceneRenderer;
struct OverlayRender {
OverlayRender(HmdDisplayPlugin& plugin) : plugin(plugin) {};
HmdDisplayPlugin& plugin;
gpu::Stream::FormatPointer format;
gpu::BufferPointer vertices;
gpu::BufferPointer indices;
@ -148,7 +158,7 @@ private:
void build();
void updatePipeline();
void render();
void render(HmdDisplayPlugin& plugin);
} _overlay;
#if 0
ProgramPtr _previewProgram;

View file

@ -0,0 +1,35 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#version 410 core
uniform vec4 color = vec4(1.0, 1.0, 1.0, 1.0);
layout(location = 0) in vec3 inLineDistance;
out vec4 FragColor;
void main() {
vec2 d = inLineDistance.xy;
d.y = abs(d.y);
d.x = abs(d.x);
if (d.x > 1.0) {
d.x = (d.x - 1.0) / 0.02;
} else {
d.x = 0.0;
}
float alpha = 1.0 - length(d);
if (alpha <= 0.0) {
discard;
}
alpha = pow(alpha, 10.0);
if (alpha < 0.05) {
discard;
}
FragColor = vec4(color.rgb, alpha);
}

View file

@ -0,0 +1,70 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#version 410 core
#extension GL_EXT_geometry_shader4 : enable
layout(location = 0) out vec3 outLineDistance;
layout(lines) in;
layout(triangle_strip, max_vertices = 24) out;
vec3[2] getOrthogonals(in vec3 n, float scale) {
float yDot = abs(dot(n, vec3(0, 1, 0)));
vec3 result[2];
if (yDot < 0.9) {
result[0] = normalize(cross(n, vec3(0, 1, 0)));
} else {
result[0] = normalize(cross(n, vec3(1, 0, 0)));
}
// The cross of result[0] and n is orthogonal to both, which are orthogonal to each other
result[1] = cross(result[0], n);
result[0] *= scale;
result[1] *= scale;
return result;
}
vec2 orthogonal(vec2 v) {
vec2 result = v.yx;
result.y *= -1.0;
return result;
}
void main() {
vec2 endpoints[2];
for (int i = 0; i < 2; ++i) {
endpoints[i] = gl_PositionIn[i].xy / gl_PositionIn[i].w;
}
vec2 lineNormal = normalize(endpoints[1] - endpoints[0]);
vec2 lineOrthogonal = orthogonal(lineNormal);
lineNormal *= 0.02;
lineOrthogonal *= 0.02;
gl_Position = gl_PositionIn[0];
gl_Position.xy -= lineOrthogonal;
outLineDistance = vec3(-1.02, -1, gl_Position.z);
EmitVertex();
gl_Position = gl_PositionIn[0];
gl_Position.xy += lineOrthogonal;
outLineDistance = vec3(-1.02, 1, gl_Position.z);
EmitVertex();
gl_Position = gl_PositionIn[1];
gl_Position.xy -= lineOrthogonal;
outLineDistance = vec3(1.02, -1, gl_Position.z);
EmitVertex();
gl_Position = gl_PositionIn[1];
gl_Position.xy += lineOrthogonal;
outLineDistance = vec3(1.02, 1, gl_Position.z);
EmitVertex();
EndPrimitive();
}

View file

@ -0,0 +1,13 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
in vec3 Position;
void main() {
gl_Position = mvp * vec4(Position, 1);
}

View file

@ -0,0 +1,109 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
struct ReprojectionData {
mat4 projections[2];
mat4 inverseProjections[2];
mat4 rotation;
};
layout(std140) uniform reprojectionBuffer {
ReprojectionData reprojection;
};
in vec2 varTexCoord0;
out vec4 outFragColor;
uniform sampler2D sampler;
vec4 toNdcSpaceFromUv(vec2 uv) {
vec4 result = vec4(uv, 0.0, 1.0);
result.xy *= 2.0;
result.xy -= 1.0;
return result;
}
vec4 toNdcSpaceFromStereoUv(vec2 uv) {
if (uv.x >= 0.5) {
uv.x -= 0.5;
}
uv.x *= 2.0;
return toNdcSpaceFromUv(uv);
}
vec2 toUvFromNdcSpace(vec4 ndc) {
ndc /= ndc.w;
vec2 result = ndc.xy;
result += 1.0;
result /= 2.0;
return result;
}
void main() {
vec2 uv = varTexCoord0;
mat4 eyeInverseProjection;
mat4 eyeProjection;
vec2 uvmin = vec2(0.0);
vec2 uvmax = vec2(1.0);
// determine the correct projection and inverse projection to use.
if (uv.x < 0.5) {
uvmax.x = 0.5;
eyeInverseProjection = reprojection.inverseProjections[0];
eyeProjection = reprojection.projections[0];
} else {
uvmin.x = 0.5;
uvmax.x = 1.0;
eyeInverseProjection = reprojection.inverseProjections[1];
eyeProjection = reprojection.projections[1];
}
// Account for stereo in calculating the per-eye NDC coordinates
vec4 ndcSpace = toNdcSpaceFromStereoUv(varTexCoord0);
// Convert from NDC to eyespace
vec4 eyeSpace = eyeInverseProjection * ndcSpace;
eyeSpace /= eyeSpace.w;
// Convert to a noramlized ray
vec3 ray = eyeSpace.xyz;
ray = normalize(ray);
// Adjust the ray by the rotation
vec4 ray4 = reprojection.rotation * vec4(ray, 1.0);
ray4 /= ray4.w;
ray = ray4.xyz;
// Project back on to the texture plane
ray *= eyeSpace.z / ray.z;
// Update the eyespace vector
eyeSpace.xyz = ray;
// Reproject back into NDC
ndcSpace = eyeProjection * eyeSpace;
// Calculate the new UV coordinates
if (uv.x >= 0.5) {
uv = toUvFromNdcSpace(ndcSpace);
uv.x += 1.0;
} else {
uv = toUvFromNdcSpace(ndcSpace);
}
uv.x /= 2.0;
if (any(greaterThan(uv, uvmax)) || any(lessThan(uv, uvmin))) {
outFragColor = vec4(0.0, 0.0, 0.0, 1.0);
} else {
outFragColor = texture(sampler, uv);
}
}

View file

@ -0,0 +1,18 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Inputs.slh@>
layout(location = 0) out vec3 outPosition;
layout(location = 1) out vec2 outTexCoord;
void main() {
outTexCoord = TexCoord;
outPosition = Position;
gl_Position = vec4(Position, 1);
}

View file

@ -0,0 +1,75 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
uniform sampler2D sampler;
struct OverlayData {
mat4 mvp;
vec4 glowPoints;
vec4 glowColors[2];
vec4 resolutionRadiusAlpha;
};
layout(std140) uniform overlayBuffer {
OverlayData overlay;
};
vec2 resolution = overlay.resolutionRadiusAlpha.xy;
float radius = overlay.resolutionRadiusAlpha.z;
float alpha = overlay.resolutionRadiusAlpha.w;
vec4 glowPoints = overlay.glowPoints;
vec4 glowColors[2] = overlay.glowColors;
in vec3 vPosition;
in vec2 vTexCoord;
out vec4 FragColor;
float easeInOutCubic(float f) {
const float d = 1.0;
const float b = 0.0;
const float c = 1.0;
float t = f;
if ((t /= d / 2.0) < 1.0) return c / 2.0 * t * t * t + b;
return c / 2.0 * ((t -= 2.0) * t * t + 2.0) + b;
}
void main() {
FragColor = texture(sampler, vTexCoord);
vec2 aspect = resolution;
aspect /= resolution.x;
float glowIntensity = 0.0;
float dist1 = distance(vTexCoord * aspect, glowPoints.xy * aspect);
float dist2 = distance(vTexCoord * aspect, glowPoints.zw * aspect);
float dist = min(dist1, dist2);
vec3 glowColor = glowColors[0].rgb;
if (dist2 < dist1) {
glowColor = glowColors[1].rgb;
}
if (dist <= radius) {
glowIntensity = 1.0 - (dist / radius);
glowColor.rgb = pow(glowColor, vec3(1.0 - glowIntensity));
glowIntensity = easeInOutCubic(glowIntensity);
glowIntensity = pow(glowIntensity, 0.5);
}
if (alpha <= 0.0) {
if (glowIntensity <= 0.0) {
discard;
}
FragColor = vec4(glowColor, glowIntensity);
return;
}
FragColor.rgb = mix(FragColor.rgb, glowColor.rgb, glowIntensity);
FragColor.a *= alpha;
}

View file

@ -0,0 +1,32 @@
//
// Created by Bradley Austin Davis on 2016/07/11
// Copyright 2013-2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
struct OverlayData {
mat4 mvp;
vec4 glowPoints;
vec4 glowColors[2];
vec4 resolutionRadiusAlpha;
};
layout(std140) uniform overlayBuffer {
OverlayData overlay;
};
mat4 mvp = overlay.mvp;
layout(location = 0) in vec3 Position;
layout(location = 3) in vec2 TexCoord;
out vec3 vPosition;
out vec2 vTexCoord;
void main() {
gl_Position = mvp * vec4(Position, 1);
vTexCoord = TexCoord;
vPosition = Position;
}

View file

@ -161,6 +161,7 @@ public:
virtual void do_setStateScissorRect(Batch& batch, size_t paramOffset) final;
virtual GLuint getFramebufferID(const FramebufferPointer& framebuffer) = 0;
virtual GLuint getTextureID(const TexturePointer& texture, bool needTransfer = true) = 0;
protected:
@ -169,7 +170,6 @@ protected:
virtual GLuint getBufferID(const Buffer& buffer) = 0;
virtual GLBuffer* syncGPUObject(const Buffer& buffer) = 0;
virtual GLuint getTextureID(const TexturePointer& texture, bool needTransfer = true) = 0;
virtual GLTexture* syncGPUObject(const TexturePointer& texture, bool sync = true) = 0;
virtual GLuint getQueryID(const QueryPointer& query) = 0;

View file

@ -92,11 +92,13 @@ public:
GLuint result = object->_id;
// Don't return textures that are in transfer state
if ((object->getSyncState() != GLSyncState::Idle) ||
// Don't return transferrable textures that have never completed transfer
(!object->_transferrable || 0 != object->_transferCount)) {
// Will be either 0 or the original texture being downsampled.
result = object->_downsampleSource._texture;
if (shouldSync) {
if ((object->getSyncState() != GLSyncState::Idle) ||
// Don't return transferrable textures that have never completed transfer
(!object->_transferrable || 0 != object->_transferCount)) {
// Will be either 0 or the original texture being downsampled.
result = object->_downsampleSource._texture;
}
}
return result;

View file

@ -128,7 +128,6 @@ public:
Present = QEvent::User + 1
};
virtual bool isRenderThread() const { return false; }
virtual bool isHmd() const { return false; }
virtual int getHmdScreen() const { return -1; }
/// By default, all HMDs are stereo

View file

@ -14,7 +14,11 @@
#include <QtCore/QCoreApplication>
#include <QtCore/QThread>
extern bool isRenderThread();
QThread* RENDER_THREAD = nullptr;
bool isRenderThread() {
return QThread::currentThread() == RENDER_THREAD;
}
ProfileRange::ProfileRange(const char *name) {
if (!isRenderThread()) {

View file

@ -12,6 +12,11 @@
#ifdef _WIN32
#include <stdint.h>
#include <QtCore/QThread>
extern QThread* RENDER_THREAD;
extern bool isRenderThread();
class ProfileRange {
public:
ProfileRange(const char *name);

View file

@ -161,10 +161,3 @@ void PluginContainer::setBoolSetting(const QString& settingName, bool value) {
Setting::Handle<bool> settingValue(settingName, value);
return settingValue.set(value);
}
bool isRenderThread() {
return QThread::currentThread() != qApp->thread();
// FIXME causes a deadlock on switching display plugins
auto displayPlugin = PluginContainer::getInstance().getActiveDisplayPlugin();
return displayPlugin && displayPlugin->isRenderThread();
}

View file

@ -11,7 +11,7 @@
const QString OculusDebugDisplayPlugin::NAME("Oculus Rift (Simulator)");
static const QString DEBUG_FLAG("HIFI_DEBUG_OCULUS");
static bool enableDebugOculus = QProcessEnvironment::systemEnvironment().contains("HIFI_DEBUG_OCULUS");
static bool enableDebugOculus = true || QProcessEnvironment::systemEnvironment().contains("HIFI_DEBUG_OCULUS");
bool OculusDebugDisplayPlugin::isSupported() const {
if (!enableDebugOculus) {

View file

@ -110,8 +110,7 @@ void OculusDisplayPlugin::hmdPresent() {
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentFrame->frameIndex)
// Manually bind the texture to the FBO
auto& glBackend = dynamic_cast<gpu::gl::GLBackend&>(*_backend);
auto fbo = glBackend.getFramebufferID(_outputFramebuffer);
auto fbo = getGLBackend()->getFramebufferID(_outputFramebuffer);
{
int curIndex;
ovr_GetTextureSwapChainCurrentIndex(_session, _textureSwapChain, &curIndex);

View file

@ -6,7 +6,6 @@
# See the accompanying file LICENSE or http:#www.apache.org/licenses/LICENSE-2.0.html
#
if (FALSE)
if (WIN32)
# we're using static GLEW, so define GLEW_STATIC
add_definitions(-DGLEW_STATIC)
@ -14,7 +13,7 @@ if (WIN32)
setup_hifi_plugin(OpenGL Script Qml Widgets)
link_hifi_libraries(shared gl networking controllers ui
plugins display-plugins ui-plugins input-plugins script-engine
render-utils model gpu render model-networking fbx)
render-utils model gpu gpu-gl render model-networking fbx)
include_hifi_library_headers(octree)
@ -23,4 +22,3 @@ if (WIN32)
target_include_directories(${TARGET_NAME} PRIVATE ${OPENVR_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${OPENVR_LIBRARIES})
endif()
endif()

View file

@ -18,6 +18,9 @@
#include <GLMHelpers.h>
#include <gl/GlWindow.h>
#include <gpu/Frame.h>
#include <gpu/gl/GLBackend.h>
#include <controllers/Pose.h>
#include <PerfStat.h>
#include <ui-plugins/PluginContainer.h>
@ -32,16 +35,15 @@ const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here
static vr::IVRCompositor* _compositor { nullptr };
vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
vec3 _trackedDeviceLinearVelocities[vr::k_unMaxTrackedDeviceCount];
vec3 _trackedDeviceAngularVelocities[vr::k_unMaxTrackedDeviceCount];
PoseData _nextRenderPoseData;
PoseData _nextSimPoseData;
static mat4 _sensorResetMat;
static std::array<vr::Hmd_Eye, 2> VR_EYES { { vr::Eye_Left, vr::Eye_Right } };
bool _openVrDisplayActive { false };
bool OpenVrDisplayPlugin::isSupported() const {
return openVrSupported();
}
@ -82,7 +84,7 @@ bool OpenVrDisplayPlugin::internalActivate() {
// left + right eyes
_renderTargetSize.x *= 2;
withRenderThreadLock([&] {
withNonPresentThreadLock([&] {
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
_eyeOffsets[eye] = toGlm(_system->GetEyeToHeadTransform(eye));
_eyeProjections[eye] = toGlm(_system->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
@ -124,9 +126,6 @@ void OpenVrDisplayPlugin::internalDeactivate() {
if (_system) {
// Invalidate poses. It's fine if someone else sets these shared values, but we're about to stop updating them, and
// we don't want ViveControllerManager to consider old values to be valid.
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
_trackedDevicePose[i].bPoseIsValid = false;
}
releaseOpenVrSystem();
_system = nullptr;
}
@ -146,10 +145,11 @@ void OpenVrDisplayPlugin::customizeContext() {
}
void OpenVrDisplayPlugin::resetSensors() {
withRenderThreadLock([&] {
glm::mat4 m = toGlm(_trackedDevicePose[0].mDeviceToAbsoluteTracking);
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
glm::mat4 m;
withNonPresentThreadLock([&] {
m = toGlm(_nextSimPoseData.vrPoses[0].mDeviceToAbsoluteTracking);
});
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
}
static bool isBadPose(vr::HmdMatrix34_t* mat) {
@ -165,30 +165,21 @@ bool OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
QMetaObject::invokeMethod(qApp, "quit");
return false;
}
double displayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
double frameDuration = 1.f / displayFrequency;
double vsyncToPhotons = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SecondsFromVsyncToPhotons_Float);
_currentRenderFrameInfo = FrameInfo();
#if THREADED_PRESENT
// 3 frames of prediction + vsyncToPhotons = 44ms total
const double NUM_PREDICTION_FRAMES = 3.0f;
_currentRenderFrameInfo.predictedDisplayTime = NUM_PREDICTION_FRAMES * frameDuration + vsyncToPhotons;
#else
_currentRenderFrameInfo.predictedDisplayTime = frameDuration + vsyncToPhotons;
#endif
_system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseStanding, _currentRenderFrameInfo.predictedDisplayTime, _trackedDevicePose, vr::k_unMaxTrackedDeviceCount);
withNonPresentThreadLock([&] {
_currentRenderFrameInfo.renderPose = _nextSimPoseData.poses[vr::k_unTrackedDeviceIndex_Hmd];
});
// HACK: when interface is launched and steam vr is NOT running, openvr will return bad HMD poses for a few frames
// To workaround this, filter out any hmd poses that are obviously bad, i.e. beneath the floor.
if (isBadPose(&_trackedDevicePose[vr::k_unTrackedDeviceIndex_Hmd].mDeviceToAbsoluteTracking)) {
if (isBadPose(&_nextSimPoseData.vrPoses[vr::k_unTrackedDeviceIndex_Hmd].mDeviceToAbsoluteTracking)) {
qDebug() << "WARNING: ignoring bad hmd pose from openvr";
// use the last known good HMD pose
_trackedDevicePose[vr::k_unTrackedDeviceIndex_Hmd].mDeviceToAbsoluteTracking = _lastGoodHMDPose;
_nextSimPoseData.vrPoses[vr::k_unTrackedDeviceIndex_Hmd].mDeviceToAbsoluteTracking = _lastGoodHMDPose;
} else {
_lastGoodHMDPose = _trackedDevicePose[vr::k_unTrackedDeviceIndex_Hmd].mDeviceToAbsoluteTracking;
_lastGoodHMDPose = _nextSimPoseData.vrPoses[vr::k_unTrackedDeviceIndex_Hmd].mDeviceToAbsoluteTracking;
}
vr::TrackedDeviceIndex_t handIndices[2] { vr::k_unTrackedDeviceIndexInvalid, vr::k_unTrackedDeviceIndexInvalid };
@ -197,7 +188,7 @@ bool OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
auto trackedCount = _system->GetSortedTrackedDeviceIndicesOfClass(vr::TrackedDeviceClass_Controller, controllerIndices, 2);
// Find the left and right hand controllers, if they exist
for (uint32_t i = 0; i < std::min<uint32_t>(trackedCount, 2); ++i) {
if (_trackedDevicePose[i].bPoseIsValid) {
if (_nextSimPoseData.vrPoses[i].bPoseIsValid) {
auto role = _system->GetControllerRoleForTrackedDeviceIndex(controllerIndices[i]);
if (vr::TrackedControllerRole_LeftHand == role) {
handIndices[0] = controllerIndices[i];
@ -208,14 +199,7 @@ bool OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
}
}
// copy and process predictedTrackedDevicePoses
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
_trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity));
_trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity));
}
_currentRenderFrameInfo.rawRenderPose = toGlm(_trackedDevicePose[vr::k_unTrackedDeviceIndex_Hmd].mDeviceToAbsoluteTracking);
_currentRenderFrameInfo.renderPose = _trackedDevicePoseMat4[vr::k_unTrackedDeviceIndex_Hmd];
_currentRenderFrameInfo.renderPose = _nextSimPoseData.poses[vr::k_unTrackedDeviceIndex_Hmd];
bool keyboardVisible = isOpenVrKeyboardShown();
@ -226,16 +210,16 @@ bool OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
continue;
}
auto deviceIndex = handIndices[i];
const mat4& mat = _trackedDevicePoseMat4[deviceIndex];
const vec3& linearVelocity = _trackedDeviceLinearVelocities[deviceIndex];
const vec3& angularVelocity = _trackedDeviceAngularVelocities[deviceIndex];
const mat4& mat = _nextSimPoseData.poses[deviceIndex];
const vec3& linearVelocity = _nextSimPoseData.linearVelocities[deviceIndex];
const vec3& angularVelocity = _nextSimPoseData.angularVelocities[deviceIndex];
auto correctedPose = openVrControllerPoseToHandPose(i == 0, mat, linearVelocity, angularVelocity);
static const glm::quat HAND_TO_LASER_ROTATION = glm::rotation(Vectors::UNIT_Z, Vectors::UNIT_NEG_Y);
handPoses[i] = glm::translate(glm::mat4(), correctedPose.translation) * glm::mat4_cast(correctedPose.rotation * HAND_TO_LASER_ROTATION);
}
}
withRenderThreadLock([&] {
withNonPresentThreadLock([&] {
_uiModelTransform = DependencyManager::get<CompositorHelper>()->getModelTransform();
// Make controller poses available to the presentation thread
_handPoses = handPoses;
@ -245,24 +229,44 @@ bool OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
}
void OpenVrDisplayPlugin::hmdPresent() {
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentPresentFrameIndex)
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentFrame->frameIndex)
// Flip y-axis since GL UV coords are backwards.
static vr::VRTextureBounds_t leftBounds { 0, 0, 0.5f, 1 };
static vr::VRTextureBounds_t rightBounds { 0.5f, 0, 1, 1 };
auto glTexId = getGLBackend()->getTextureID(_compositeTexture, false);
vr::Texture_t vrTexture{ (void*)glTexId, vr::API_OpenGL, vr::ColorSpace_Auto };
vr::Texture_t texture { (void*)oglplus::GetName(_compositeFramebuffer->color), vr::API_OpenGL, vr::ColorSpace_Auto };
_compositor->Submit(vr::Eye_Left, &texture, &leftBounds);
_compositor->Submit(vr::Eye_Right, &texture, &rightBounds);
_compositor->Submit(vr::Eye_Left, &vrTexture, &leftBounds);
_compositor->Submit(vr::Eye_Right, &vrTexture, &rightBounds);
}
void OpenVrDisplayPlugin::postPreview() {
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentPresentFrameIndex)
// Clear
{
// We want to make sure the glFinish waits for the entire present to complete, not just the submission
// of the command. So, we do a clear here right here so the glFinish will wait fully for the swap.
glClearColor(0, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
glFlush();
}
vr::TrackedDevicePose_t currentTrackedDevicePose[vr::k_unMaxTrackedDeviceCount];
_compositor->WaitGetPoses(currentTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, nullptr, 0);
_hmdActivityLevel = _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd);
// Flush and wait for swap.
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentFrame->frameIndex)
PoseData nextRender;
nextRender.frameIndex = presentCount();
vr::VRCompositor()->WaitGetPoses(nextRender.vrPoses, vr::k_unMaxTrackedDeviceCount, nullptr, 0);
glm::mat4 resetMat;
withPresentThreadLock([&] {
resetMat = _sensorResetMat;
});
nextRender.update(resetMat);
withPresentThreadLock([&] {
_nextSimPoseData = nextRender;
});
_nextRenderPoseData = nextRender;
_hmdActivityLevel = vr::k_EDeviceActivityLevel_UserInteraction; // _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd);
}
bool OpenVrDisplayPlugin::isHmdMounted() const {
@ -270,25 +274,8 @@ bool OpenVrDisplayPlugin::isHmdMounted() const {
}
void OpenVrDisplayPlugin::updatePresentPose() {
mat4 sensorResetMat;
withPresentThreadLock([&] {
sensorResetMat = _sensorResetMat;
});
{
float fSecondsSinceLastVsync;
_system->GetTimeSinceLastVsync(&fSecondsSinceLastVsync, nullptr);
float fDisplayFrequency = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_DisplayFrequency_Float);
float fFrameDuration = 1.f / fDisplayFrequency;
float fVsyncToPhotons = _system->GetFloatTrackedDeviceProperty(vr::k_unTrackedDeviceIndex_Hmd, vr::Prop_SecondsFromVsyncToPhotons_Float);
float fPredictedSecondsFromNow = fFrameDuration - fSecondsSinceLastVsync + fVsyncToPhotons;
vr::TrackedDevicePose_t pose;
_system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseStanding, fPredictedSecondsFromNow, &pose, 1);
_currentPresentFrameInfo.rawPresentPose = toGlm(pose.mDeviceToAbsoluteTracking);
}
_currentPresentFrameInfo.presentPose = sensorResetMat * _currentPresentFrameInfo.rawPresentPose;
mat3 renderRotation(_currentPresentFrameInfo.rawRenderPose);
mat3 presentRotation(_currentPresentFrameInfo.rawPresentPose);
_currentPresentFrameInfo.presentReprojection = glm::mat3(glm::inverse(renderRotation) * presentRotation);
_currentPresentFrameInfo.presentPose = _nextRenderPoseData.poses[vr::k_unTrackedDeviceIndex_Hmd];
//_currentPresentFrameInfo.presentPose = _currentPresentFrameInfo.renderPose;
}
bool OpenVrDisplayPlugin::suppressKeyboard() {

View file

@ -118,7 +118,7 @@ static vr::IVROverlay* _overlay { nullptr };
static QObject* _keyboardFocusObject { nullptr };
static QString _existingText;
static Qt::InputMethodHints _currentHints;
extern vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
extern PoseData _nextSimPoseData;
static bool _keyboardShown { false };
static bool _overlayRevealed { false };
static const uint32_t SHOW_KEYBOARD_DELAY_MS = 400;
@ -160,7 +160,7 @@ void showOpenVrKeyboard(bool show = true) {
if (vr::VROverlayError_None == showKeyboardResult) {
_keyboardShown = true;
// Try to position the keyboard slightly below where the user is looking.
mat4 headPose = cancelOutRollAndPitch(toGlm(_trackedDevicePose[0].mDeviceToAbsoluteTracking));
mat4 headPose = cancelOutRollAndPitch(toGlm(_nextSimPoseData.vrPoses[0].mDeviceToAbsoluteTracking));
mat4 keyboardTransform = glm::translate(headPose, vec3(0, -0.5, -1));
keyboardTransform = keyboardTransform * glm::rotate(mat4(), 3.14159f / 4.0f, vec3(-1, 0, 0));
auto keyboardTransformVr = toOpenVr(keyboardTransform);

View file

@ -59,4 +59,21 @@ inline vr::HmdMatrix34_t toOpenVr(const mat4& m) {
return result;
}
struct PoseData {
uint32_t frameIndex{ 0 };
vr::TrackedDevicePose_t vrPoses[vr::k_unMaxTrackedDeviceCount];
mat4 poses[vr::k_unMaxTrackedDeviceCount];
vec3 linearVelocities[vr::k_unMaxTrackedDeviceCount];
vec3 angularVelocities[vr::k_unMaxTrackedDeviceCount];
void update(const glm::mat4& resetMat) {
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
poses[i] = resetMat * toGlm(vrPoses[i].mDeviceToAbsoluteTracking);
linearVelocities[i] = transformVectorFast(resetMat, toGlm(vrPoses[i].vVelocity));
angularVelocities[i] = transformVectorFast(resetMat, toGlm(vrPoses[i].vAngularVelocity));
}
}
};
controller::Pose openVrControllerPoseToHandPose(bool isLeftHand, const mat4& mat, const vec3& linearVelocity, const vec3& angularVelocity);

View file

@ -29,10 +29,7 @@
#include "OpenVrHelpers.h"
extern vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
extern mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
extern vec3 _trackedDeviceLinearVelocities[vr::k_unMaxTrackedDeviceCount];
extern vec3 _trackedDeviceAngularVelocities[vr::k_unMaxTrackedDeviceCount];
extern PoseData _nextSimPoseData;
vr::IVRSystem* acquireOpenVrSystem();
void releaseOpenVrSystem();
@ -48,6 +45,7 @@ static const QString RENDER_CONTROLLERS = "Render Hand Controllers";
const QString ViveControllerManager::NAME = "OpenVR";
bool ViveControllerManager::isSupported() const {
return false;
return openVrSupported();
}
@ -279,12 +277,12 @@ void ViveControllerManager::InputDevice::handleHandController(float deltaTime, u
if (_system->IsTrackedDeviceConnected(deviceIndex) &&
_system->GetTrackedDeviceClass(deviceIndex) == vr::TrackedDeviceClass_Controller &&
_trackedDevicePose[deviceIndex].bPoseIsValid) {
_nextSimPoseData.vrPoses[deviceIndex].bPoseIsValid) {
// process pose
const mat4& mat = _trackedDevicePoseMat4[deviceIndex];
const vec3 linearVelocity = _trackedDeviceLinearVelocities[deviceIndex];
const vec3 angularVelocity = _trackedDeviceAngularVelocities[deviceIndex];
const mat4& mat = _nextSimPoseData.poses[deviceIndex];
const vec3 linearVelocity = _nextSimPoseData.linearVelocities[deviceIndex];
const vec3 angularVelocity = _nextSimPoseData.angularVelocities[deviceIndex];
handlePoseEvent(deltaTime, inputCalibrationData, mat, linearVelocity, angularVelocity, isLeftHand);
vr::VRControllerState_t controllerState = vr::VRControllerState_t();
@ -428,7 +426,7 @@ void ViveControllerManager::InputDevice::hapticsHelper(float deltaTime, bool lef
if (_system->IsTrackedDeviceConnected(deviceIndex) &&
_system->GetTrackedDeviceClass(deviceIndex) == vr::TrackedDeviceClass_Controller &&
_trackedDevicePose[deviceIndex].bPoseIsValid) {
_nextSimPoseData.vrPoses[deviceIndex].bPoseIsValid) {
float strength = leftHand ? _leftHapticStrength : _rightHapticStrength;
float duration = leftHand ? _leftHapticDuration : _rightHapticDuration;