Merge pull request #7118 from jherico/hmd_mono

Remove distortion on HMD previews, both mono and stereo
This commit is contained in:
Anthony Thibault 2016-02-17 15:39:48 -08:00
commit e863da5a68
17 changed files with 294 additions and 421 deletions

View file

@ -1532,7 +1532,7 @@ void Application::paintGL() {
// just relying on the left FOV in each case and hoping that the
// overall culling margin of error doesn't cause popping in the
// right eye. There are FIXMEs in the relevant plugins
_myCamera.setProjection(displayPlugin->getProjection(Mono, _myCamera.getProjection()));
_myCamera.setProjection(displayPlugin->getCullingProjection(_myCamera.getProjection()));
renderArgs._context->enableStereo(true);
mat4 eyeOffsets[2];
mat4 eyeProjections[2];
@ -1563,7 +1563,7 @@ void Application::paintGL() {
displayPlugin->setEyeRenderPose(_frameCount, eye, headPose);
eyeProjections[eye] = displayPlugin->getProjection(eye, baseProjection);
eyeProjections[eye] = displayPlugin->getEyeProjection(eye, baseProjection);
});
renderArgs._context->setStereoProjections(eyeProjections);
renderArgs._context->setStereoViews(eyeOffsets);
@ -5005,7 +5005,7 @@ void Application::updateInputModes() {
mat4 Application::getEyeProjection(int eye) const {
if (isHMDMode()) {
return getActiveDisplayPlugin()->getProjection((Eye)eye, _viewFrustum.getProjection());
return getActiveDisplayPlugin()->getEyeProjection((Eye)eye, _viewFrustum.getProjection());
}
return _viewFrustum.getProjection();

View file

@ -0,0 +1,87 @@
//
// Created by Bradley Austin Davis on 2016/02/15
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "HmdDisplayPlugin.h"
#include <memory>
#include <glm/gtc/matrix_transform.hpp>
#include <QtCore/QLoggingCategory>
#include <QtWidgets/QWidget>
#include <GLMHelpers.h>
#include <plugins/PluginContainer.h>
#include "../Logging.h"
static const QString MONO_PREVIEW = "Mono Preview";
static const QString FRAMERATE = DisplayPlugin::MENU_PATH() + ">Framerate";
static const bool DEFAULT_MONO_VIEW = true;
void HmdDisplayPlugin::activate() {
_monoPreview = _container->getBoolSetting("monoPreview", DEFAULT_MONO_VIEW);
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, MENU_PATH(), MONO_PREVIEW,
[this](bool clicked) {
_monoPreview = clicked;
_container->setBoolSetting("monoPreview", _monoPreview);
}, true, _monoPreview);
_container->removeMenu(FRAMERATE);
WindowOpenGLDisplayPlugin::activate();
}
void HmdDisplayPlugin::deactivate() {
WindowOpenGLDisplayPlugin::deactivate();
}
void HmdDisplayPlugin::customizeContext() {
WindowOpenGLDisplayPlugin::customizeContext();
// Only enable mirroring if we know vsync is disabled
enableVsync(false);
_enablePreview = !isVsyncEnabled();
}
void HmdDisplayPlugin::internalPresent() {
// screen preview mirroring
if (_enablePreview) {
auto windowSize = toGlm(_window->size());
float windowAspect = aspect(windowSize);
float sceneAspect = aspect(_renderTargetSize);
if (_monoPreview) {
sceneAspect /= 2.0f;
}
float aspectRatio = sceneAspect / windowAspect;
uvec2 targetViewportSize = windowSize;
if (aspectRatio < 1.0f) {
targetViewportSize.x *= aspectRatio;
} else {
targetViewportSize.y /= aspectRatio;
}
uvec2 targetViewportPosition;
if (targetViewportSize.x < windowSize.x) {
targetViewportPosition.x = (windowSize.x - targetViewportSize.x) / 2;
} else if (targetViewportSize.y < windowSize.y) {
targetViewportPosition.y = (windowSize.y - targetViewportSize.y) / 2;
}
glClear(GL_COLOR_BUFFER_BIT);
glViewport(
targetViewportPosition.x, targetViewportPosition.y,
targetViewportSize.x * (_monoPreview ? 2 : 1), targetViewportSize.y);
glEnable(GL_SCISSOR_TEST);
glScissor(
targetViewportPosition.x, targetViewportPosition.y,
targetViewportSize.x, targetViewportSize.y);
glBindTexture(GL_TEXTURE_2D, _currentSceneTexture);
GLenum err = glGetError();
Q_ASSERT(0 == err);
drawUnitQuad();
glDisable(GL_SCISSOR_TEST);
swapBuffers();
}
}

View file

@ -0,0 +1,39 @@
//
// Created by Bradley Austin Davis on 2016/02/15
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include <QtGlobal>
#include "../WindowOpenGLDisplayPlugin.h"
class HmdDisplayPlugin : public WindowOpenGLDisplayPlugin {
public:
bool isHmd() const override final { return true; }
float getIPD() const override final { return _ipd; }
glm::mat4 getEyeToHeadTransform(Eye eye) const override final { return _eyeOffsets[eye]; }
glm::mat4 getEyeProjection(Eye eye, const glm::mat4& baseProjection) const override final { return _eyeProjections[eye]; }
glm::mat4 getCullingProjection(const glm::mat4& baseProjection) const override final { return _cullingProjection; }
glm::uvec2 getRecommendedUiSize() const override final { return uvec2(1920, 1080); }
glm::uvec2 getRecommendedRenderSize() const override final { return _renderTargetSize; }
void activate() override;
void deactivate() override;
protected:
void internalPresent() override;
void customizeContext() override;
std::array<glm::mat4, 2> _eyeOffsets;
std::array<glm::mat4, 2> _eyeProjections;
glm::mat4 _cullingProjection;
glm::uvec2 _renderTargetSize;
float _ipd { 0.064f };
private:
bool _enablePreview { false };
bool _monoPreview { true };
};

View file

@ -39,16 +39,11 @@ const float DEFAULT_SEPARATION = DEFAULT_IPD / DEFAULT_SCREEN_WIDTH;
// Default convergence depth: where is the screen plane in the virtual space (which depth)
const float DEFAULT_CONVERGENCE = 0.5f;
glm::mat4 StereoDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
glm::mat4 StereoDisplayPlugin::getEyeProjection(Eye eye, const glm::mat4& baseProjection) const {
// Refer to http://www.nvidia.com/content/gtc-2010/pdfs/2010_gtc2010.pdf on creating
// stereo projection matrices. Do NOT use "toe-in", use translation.
// Updated version: http://developer.download.nvidia.com/assets/gamedev/docs/Siggraph2011-Stereoscopy_From_XY_to_Z-SG.pdf
if (eye == Mono) {
// FIXME provide a combined matrix, needed for proper culling
return baseProjection;
}
float frustumshift = DEFAULT_SEPARATION;
if (eye == Right) {
frustumshift = -frustumshift;

View file

@ -20,8 +20,8 @@ public:
virtual void deactivate() override;
virtual float getRecommendedAspectRatio() const override;
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::mat4 getEyeProjection(Eye eye, const glm::mat4& baseProjection) const override;
// NOTE, because Stereo displays don't include head tracking, and therefore
// can't include roll or pitch, the eye separation is embedded into the projection
// matrix. However, this eliminates the possibility of easily mainpulating

View file

@ -24,8 +24,7 @@ class QImage;
enum Eye {
Left,
Right,
Mono
Right
};
/*
@ -95,10 +94,15 @@ public:
}
// Stereo specific methods
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const {
virtual glm::mat4 getEyeProjection(Eye eye, const glm::mat4& baseProjection) const {
return baseProjection;
}
virtual glm::mat4 getCullingProjection(const glm::mat4& baseProjection) const {
return baseProjection;
}
// Fetch the most recently displayed image as a QImage
virtual QImage getScreenshot() const = 0;

View file

@ -11,22 +11,10 @@
#include "OculusHelpers.h"
uvec2 OculusBaseDisplayPlugin::getRecommendedRenderSize() const {
return _desiredFramebufferSize;
}
glm::mat4 OculusBaseDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
}
void OculusBaseDisplayPlugin::resetSensors() {
ovr_RecenterPose(_session);
}
glm::mat4 OculusBaseDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
return glm::translate(mat4(), toGlm(_eyeOffsets[eye]));
}
glm::mat4 OculusBaseDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
static uint32_t lastFrameSeen = 0;
auto displayTime = ovr_GetPredictedDisplayTime(_session, frameIndex);
@ -70,7 +58,7 @@ void OculusBaseDisplayPlugin::customizeContext() {
glewExperimental = true;
GLenum err = glewInit();
glGetError();
WindowOpenGLDisplayPlugin::customizeContext();
HmdDisplayPlugin::customizeContext();
}
void OculusBaseDisplayPlugin::init() {
@ -88,37 +76,31 @@ void OculusBaseDisplayPlugin::activate() {
qFatal("Failed to acquire HMD");
}
WindowOpenGLDisplayPlugin::activate();
HmdDisplayPlugin::activate();
_hmdDesc = ovr_GetHmdDesc(_session);
_ipd = ovr_GetFloat(_session, OVR_KEY_IPD, _ipd);
glm::uvec2 eyeSizes[2];
_viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;
ovr_for_each_eye([&](ovrEyeType eye) {
_eyeFovs[eye] = _hmdDesc.DefaultEyeFov[eye];
ovrEyeRenderDesc& erd = _eyeRenderDescs[eye] = ovr_GetRenderDesc(_session, eye, _eyeFovs[eye]);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
_compositeEyeProjections[eye] = toGlm(ovrPerspectiveProjection);
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
_eyeOffsets[eye] = glm::translate(mat4(), toGlm(erd.HmdToEyeViewOffset));
eyeSizes[eye] = toGlm(ovr_GetFovTextureSize(_session, eye, erd.Fov, 1.0f));
_viewScaleDesc.HmdToEyeViewOffset[eye] = erd.HmdToEyeViewOffset;
});
ovrFovPort combined = _eyeFovs[Left];
combined.LeftTan = std::max(_eyeFovs[Left].LeftTan, _eyeFovs[Right].LeftTan);
combined.RightTan = std::max(_eyeFovs[Left].RightTan, _eyeFovs[Right].RightTan);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(combined, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[Mono] = toGlm(ovrPerspectiveProjection);
auto combinedFov = _eyeFovs[0];
combinedFov.LeftTan = combinedFov.RightTan = std::max(combinedFov.LeftTan, combinedFov.RightTan);
_cullingProjection = toGlm(ovrMatrix4f_Projection(combinedFov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
_desiredFramebufferSize = uvec2(
_renderTargetSize = uvec2(
eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
@ -144,20 +126,8 @@ void OculusBaseDisplayPlugin::activate() {
}
void OculusBaseDisplayPlugin::deactivate() {
WindowOpenGLDisplayPlugin::deactivate();
#if (OVR_MAJOR_VERSION >= 6)
HmdDisplayPlugin::deactivate();
ovr_Destroy(_session);
_session = nullptr;
ovr_Shutdown();
#endif
}
float OculusBaseDisplayPlugin::getIPD() const {
float result = OVR_DEFAULT_IPD;
#if (OVR_MAJOR_VERSION >= 6)
result = ovr_GetFloat(_session, OVR_KEY_IPD, result);
#endif
return result;
}

View file

@ -7,13 +7,13 @@
//
#pragma once
#include <display-plugins/WindowOpenGLDisplayPlugin.h>
#include <display-plugins/hmd/HmdDisplayPlugin.h>
#include <QTimer>
#include <OVR_CAPI_GL.h>
class OculusBaseDisplayPlugin : public WindowOpenGLDisplayPlugin {
class OculusBaseDisplayPlugin : public HmdDisplayPlugin {
public:
virtual bool isSupported() const override;
@ -24,25 +24,13 @@ public:
virtual void deactivate() override;
// Stereo specific methods
virtual bool isHmd() const override final { return true; }
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::uvec2 getRecommendedRenderSize() const override final;
virtual glm::uvec2 getRecommendedUiSize() const override final { return uvec2(1920, 1080); }
virtual void resetSensors() override final;
virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override final;
virtual float getIPD() const override final;
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const override;
protected:
virtual void customizeContext() override;
protected:
ovrVector3f _eyeOffsets[2];
mat4 _eyeProjections[3];
mat4 _compositeEyeProjections[2];
uvec2 _desiredFramebufferSize;
ovrSession _session;
ovrGraphicsLuid _luid;
float _ipd{ OVR_DEFAULT_IPD };
@ -50,23 +38,5 @@ protected:
ovrFovPort _eyeFovs[2];
ovrHmdDesc _hmdDesc;
ovrLayerEyeFov _sceneLayer;
ovrViewScaleDesc _viewScaleDesc;
};
#if (OVR_MAJOR_VERSION == 6)
#define ovr_Create ovrHmd_Create
#define ovr_CreateSwapTextureSetGL ovrHmd_CreateSwapTextureSetGL
#define ovr_CreateMirrorTextureGL ovrHmd_CreateMirrorTextureGL
#define ovr_Destroy ovrHmd_Destroy
#define ovr_DestroySwapTextureSet ovrHmd_DestroySwapTextureSet
#define ovr_DestroyMirrorTexture ovrHmd_DestroyMirrorTexture
#define ovr_GetFloat ovrHmd_GetFloat
#define ovr_GetFovTextureSize ovrHmd_GetFovTextureSize
#define ovr_GetFrameTiming ovrHmd_GetFrameTiming
#define ovr_GetTrackingState ovrHmd_GetTrackingState
#define ovr_GetRenderDesc ovrHmd_GetRenderDesc
#define ovr_RecenterPose ovrHmd_RecenterPose
#define ovr_SubmitFrame ovrHmd_SubmitFrame
#define ovr_ConfigureTracking ovrHmd_ConfigureTracking
#define ovr_GetHmdDesc(X) *X
#endif

View file

@ -6,150 +6,11 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "OculusDisplayPlugin.h"
#include <QtOpenGL/QGLWidget>
// FIXME get rid of this
#include <gl/Config.h>
#include <plugins/PluginContainer.h>
#include "OculusHelpers.h"
#if (OVR_MAJOR_VERSION >= 6)
// A base class for FBO wrappers that need to use the Oculus C
// API to manage textures via ovr_CreateSwapTextureSetGL,
// ovr_CreateMirrorTextureGL, etc
template <typename C>
struct RiftFramebufferWrapper : public FramebufferWrapper<C, char> {
ovrSession session;
RiftFramebufferWrapper(const ovrSession& session) : session(session) {
color = 0;
depth = 0;
};
~RiftFramebufferWrapper() {
destroyColor();
}
void Resize(const uvec2 & size) {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
this->size = size;
initColor();
initDone();
}
protected:
virtual void destroyColor() {
}
virtual void initDepth() override final {
}
};
// A wrapper for constructing and using a swap texture set,
// where each frame you draw to a texture via the FBO,
// then submit it and increment to the next texture.
// The Oculus SDK manages the creation and destruction of
// the textures
struct SwapFramebufferWrapper : public RiftFramebufferWrapper<ovrSwapTextureSet*> {
SwapFramebufferWrapper(const ovrHmd & hmd)
: RiftFramebufferWrapper(hmd) {
}
void Increment() {
++color->CurrentIndex;
color->CurrentIndex %= color->TextureCount;
}
protected:
virtual void destroyColor() override {
if (color) {
ovr_DestroySwapTextureSet(session, color);
color = nullptr;
}
}
virtual void initColor() override {
destroyColor();
if (!OVR_SUCCESS(ovr_CreateSwapTextureSetGL(session, GL_SRGB8_ALPHA8, size.x, size.y, &color))) {
qFatal("Unable to create swap textures");
}
for (int i = 0; i < color->TextureCount; ++i) {
ovrGLTexture& ovrTex = (ovrGLTexture&)color->Textures[i];
glBindTexture(GL_TEXTURE_2D, ovrTex.OGL.TexId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
glBindTexture(GL_TEXTURE_2D, 0);
}
virtual void initDone() override {
}
virtual void onBind(oglplus::Framebuffer::Target target) override {
ovrGLTexture& tex = (ovrGLTexture&)(color->Textures[color->CurrentIndex]);
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex.OGL.TexId, 0);
}
virtual void onUnbind(oglplus::Framebuffer::Target target) override {
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
}
};
// We use a FBO to wrap the mirror texture because it makes it easier to
// render to the screen via glBlitFramebuffer
struct MirrorFramebufferWrapper : public RiftFramebufferWrapper<ovrGLTexture*> {
MirrorFramebufferWrapper(const ovrHmd & hmd)
: RiftFramebufferWrapper(hmd) { }
private:
virtual void destroyColor() override {
if (color) {
ovr_DestroyMirrorTexture(session, (ovrTexture*)color);
color = nullptr;
}
}
void initColor() override {
destroyColor();
ovrResult result = ovr_CreateMirrorTextureGL(session, GL_SRGB8_ALPHA8, size.x, size.y, (ovrTexture**)&color);
Q_ASSERT(OVR_SUCCESS(result));
}
void initDone() override {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, color->OGL.TexId, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
}
};
#endif
const QString OculusDisplayPlugin::NAME("Oculus Rift");
static const QString MONO_PREVIEW = "Mono Preview";
static const QString FRAMERATE = DisplayPlugin::MENU_PATH() + ">Framerate";
static const bool DEFAULT_MONO_VIEW = true;
void OculusDisplayPlugin::activate() {
_monoPreview = _container->getBoolSetting("monoPreview", DEFAULT_MONO_VIEW);
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, MENU_PATH(), MONO_PREVIEW,
[this](bool clicked) {
_monoPreview = clicked;
_container->setBoolSetting("monoPreview", _monoPreview);
}, true, _monoPreview);
_container->removeMenu(FRAMERATE);
OculusBaseDisplayPlugin::activate();
}
@ -182,27 +43,8 @@ void OculusDisplayPlugin::internalPresent() {
}
using namespace oglplus;
// Need to make sure only the display plugin is responsible for
// controlling vsync
wglSwapIntervalEXT(0);
// screen preview mirroring
if (_enablePreview) {
auto windowSize = toGlm(_window->size());
if (_monoPreview) {
Context::Viewport(windowSize.x * 2, windowSize.y);
Context::Scissor(0, windowSize.y, windowSize.x, windowSize.y);
} else {
Context::Viewport(windowSize.x, windowSize.y);
}
glBindTexture(GL_TEXTURE_2D, _currentSceneTexture);
GLenum err = glGetError();
Q_ASSERT(0 == err);
drawUnitQuad();
}
const auto& size = _sceneFbo->size;
_sceneFbo->Bound([&] {
auto size = _sceneFbo->size;
Context::Viewport(size.x, size.y);
glBindTexture(GL_TEXTURE_2D, _currentSceneTexture);
//glEnable(GL_FRAMEBUFFER_SRGB);
@ -225,27 +67,17 @@ void OculusDisplayPlugin::internalPresent() {
_sceneLayer.RenderPose[ovrEyeType::ovrEye_Right] = eyePoses.second;
{
ovrViewScaleDesc viewScaleDesc;
viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;
viewScaleDesc.HmdToEyeViewOffset[0] = _eyeOffsets[0];
viewScaleDesc.HmdToEyeViewOffset[1] = _eyeOffsets[1];
ovrLayerHeader* layers = &_sceneLayer.Header;
ovrResult result = ovr_SubmitFrame(_session, frameIndex, &viewScaleDesc, &layers, 1);
ovrResult result = ovr_SubmitFrame(_session, frameIndex, &_viewScaleDesc, &layers, 1);
if (!OVR_SUCCESS(result)) {
qDebug() << result;
}
}
_sceneFbo->Increment();
/*
The swapbuffer call here is only required if we want to mirror the content to the screen.
However, it should only be done if we can reliably disable v-sync on the mirror surface,
otherwise the swapbuffer delay will interefere with the framerate of the headset
*/
if (_enablePreview) {
swapBuffers();
}
// Handle mirroring to screen in base class
HmdDisplayPlugin::internalPresent();
}
void OculusDisplayPlugin::setEyeRenderPose(uint32_t frameIndex, Eye eye, const glm::mat4& pose) {

View file

@ -8,16 +8,69 @@
#include "OculusHelpers.h"
// A wrapper for constructing and using a swap texture set,
// where each frame you draw to a texture via the FBO,
// then submit it and increment to the next texture.
// The Oculus SDK manages the creation and destruction of
// the textures
namespace Oculus {
ovrHmd _hmd;
ovrEyeRenderDesc _eyeRenderDescs[2];
ovrPosef _eyePoses[2];
ovrVector3f _eyeOffsets[2];
ovrFovPort _eyeFovs[2];
mat4 _eyeProjections[2];
mat4 _compositeEyeProjections[2];
uvec2 _desiredFramebufferSize;
SwapFramebufferWrapper::SwapFramebufferWrapper(const ovrSession& session)
: _session(session) {
color = nullptr;
depth = nullptr;
}
SwapFramebufferWrapper::~SwapFramebufferWrapper() {
destroyColor();
}
void SwapFramebufferWrapper::Increment() {
++color->CurrentIndex;
color->CurrentIndex %= color->TextureCount;
}
void SwapFramebufferWrapper::Resize(const uvec2 & size) {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
this->size = size;
initColor();
initDone();
}
void SwapFramebufferWrapper::destroyColor() {
if (color) {
ovr_DestroySwapTextureSet(_session, color);
color = nullptr;
}
}
void SwapFramebufferWrapper::initColor() {
destroyColor();
if (!OVR_SUCCESS(ovr_CreateSwapTextureSetGL(_session, GL_SRGB8_ALPHA8, size.x, size.y, &color))) {
qFatal("Unable to create swap textures");
}
for (int i = 0; i < color->TextureCount; ++i) {
ovrGLTexture& ovrTex = (ovrGLTexture&)color->Textures[i];
glBindTexture(GL_TEXTURE_2D, ovrTex.OGL.TexId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
glBindTexture(GL_TEXTURE_2D, 0);
}
void SwapFramebufferWrapper::initDone() {
}
void SwapFramebufferWrapper::onBind(oglplus::Framebuffer::Target target) {
ovrGLTexture& tex = (ovrGLTexture&)(color->Textures[color->CurrentIndex]);
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex.OGL.TexId, 0);
}
void SwapFramebufferWrapper::onUnbind(oglplus::Framebuffer::Target target) {
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
}

View file

@ -12,9 +12,7 @@
#include <glm/gtc/type_ptr.hpp>
#include <glm/gtc/matrix_transform.hpp>
#if (OVR_MAJOR_VERSION < 6)
#define OVR_SUCCESS(x) x
#endif
#include <gl/OglplusHelpers.h>
// Convenience method for looping over each eye with a lambda
template <typename Function>
@ -87,3 +85,26 @@ inline ovrPosef ovrPoseFromGlm(const glm::mat4 & m) {
result.Position = ovrFromGlm(translation);
return result;
}
// A wrapper for constructing and using a swap texture set,
// where each frame you draw to a texture via the FBO,
// then submit it and increment to the next texture.
// The Oculus SDK manages the creation and destruction of
// the textures
struct SwapFramebufferWrapper : public FramebufferWrapper<ovrSwapTextureSet*, void*> {
SwapFramebufferWrapper(const ovrSession& session);
~SwapFramebufferWrapper();
void Increment();
void Resize(const uvec2 & size);
protected:
void initColor() override final;
void initDepth() override final {}
void initDone() override final;
void onBind(oglplus::Framebuffer::Target target) override final;
void onUnbind(oglplus::Framebuffer::Target target) override final;
void destroyColor();
private:
ovrSession _session;
};

View file

@ -31,29 +31,15 @@ const QString OculusLegacyDisplayPlugin::NAME("Oculus Rift (0.5) (Legacy)");
OculusLegacyDisplayPlugin::OculusLegacyDisplayPlugin() {
}
uvec2 OculusLegacyDisplayPlugin::getRecommendedRenderSize() const {
return _desiredFramebufferSize;
}
glm::mat4 OculusLegacyDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
}
void OculusLegacyDisplayPlugin::resetSensors() {
ovrHmd_RecenterPose(_hmd);
}
glm::mat4 OculusLegacyDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
return toGlm(_eyePoses[eye]);
}
glm::mat4 OculusLegacyDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
static uint32_t lastFrameSeen = 0;
if (frameIndex > lastFrameSeen) {
Lock lock(_mutex);
_trackingState = ovrHmd_GetTrackingState(_hmd, ovr_GetTimeInSeconds());
ovrHmd_GetEyePoses(_hmd, frameIndex, _eyeOffsets, _eyePoses, &_trackingState);
lastFrameSeen = frameIndex;
}
return toGlm(_trackingState.HeadPose.ThePose);
@ -87,7 +73,7 @@ bool OculusLegacyDisplayPlugin::isSupported() const {
}
void OculusLegacyDisplayPlugin::activate() {
WindowOpenGLDisplayPlugin::activate();
HmdDisplayPlugin::activate();
if (!(ovr_Initialize(nullptr))) {
Q_ASSERT(false);
@ -100,30 +86,26 @@ void OculusLegacyDisplayPlugin::activate() {
qFatal("Failed to acquire HMD");
}
_ipd = ovrHmd_GetFloat(_hmd, OVR_KEY_IPD, _ipd);
glm::uvec2 eyeSizes[2];
ovr_for_each_eye([&](ovrEyeType eye) {
_eyeFovs[eye] = _hmd->MaxEyeFov[eye];
ovrEyeRenderDesc erd = _eyeRenderDescs[eye] = ovrHmd_GetRenderDesc(_hmd, eye, _eyeFovs[eye]);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
_compositeEyeProjections[eye] = toGlm(ovrPerspectiveProjection);
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
_eyeOffsets[eye] = glm::translate(mat4(), toGlm(erd.HmdToEyeViewOffset));
eyeSizes[eye] = toGlm(ovrHmd_GetFovTextureSize(_hmd, eye, erd.Fov, 1.0f));
});
ovrFovPort combined = _eyeFovs[Left];
combined.LeftTan = std::max(_eyeFovs[Left].LeftTan, _eyeFovs[Right].LeftTan);
combined.RightTan = std::max(_eyeFovs[Left].RightTan, _eyeFovs[Right].RightTan);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(combined, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[Mono] = toGlm(ovrPerspectiveProjection);
_desiredFramebufferSize = uvec2(eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
auto combinedFov = _eyeFovs[0];
combinedFov.LeftTan = combinedFov.RightTan = std::max(combinedFov.LeftTan, combinedFov.RightTan);
_cullingProjection = toGlm(ovrMatrix4f_Projection(combinedFov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded));
_renderTargetSize = uvec2(
eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
if (!ovrHmd_ConfigureTracking(_hmd,
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0)) {
@ -132,13 +114,12 @@ void OculusLegacyDisplayPlugin::activate() {
}
void OculusLegacyDisplayPlugin::deactivate() {
WindowOpenGLDisplayPlugin::deactivate();
HmdDisplayPlugin::deactivate();
ovrHmd_Destroy(_hmd);
_hmd = nullptr;
ovr_Shutdown();
}
// DLL based display plugins MUST initialize GLEW inside the DLL code.
void OculusLegacyDisplayPlugin::customizeContext() {
static std::once_flag once;
@ -147,7 +128,7 @@ void OculusLegacyDisplayPlugin::customizeContext() {
glewInit();
glGetError();
});
WindowOpenGLDisplayPlugin::customizeContext();
HmdDisplayPlugin::customizeContext();
#if 0
ovrGLConfig config; memset(&config, 0, sizeof(ovrRenderAPIConfig));
auto& header = config.Config.Header;
@ -179,7 +160,7 @@ void OculusLegacyDisplayPlugin::customizeContext() {
#if 0
void OculusLegacyDisplayPlugin::uncustomizeContext() {
WindowOpenGLDisplayPlugin::uncustomizeContext();
HmdDisplayPlugin::uncustomizeContext();
}
void OculusLegacyDisplayPlugin::internalPresent() {
@ -200,3 +181,4 @@ float OculusLegacyDisplayPlugin::getTargetFrameRate() {
return TARGET_RATE_OculusLegacy;
}

View file

@ -7,7 +7,7 @@
//
#pragma once
#include <display-plugins/WindowOpenGLDisplayPlugin.h>
#include <display-plugins/hmd/HmdDisplayPlugin.h>
#include <QTimer>
@ -15,7 +15,7 @@
const float TARGET_RATE_OculusLegacy = 75.0f;
class OculusLegacyDisplayPlugin : public WindowOpenGLDisplayPlugin {
class OculusLegacyDisplayPlugin : public HmdDisplayPlugin {
public:
OculusLegacyDisplayPlugin();
virtual bool isSupported() const override;
@ -27,12 +27,7 @@ public:
virtual int getHmdScreen() const override;
// Stereo specific methods
virtual bool isHmd() const override { return true; }
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::uvec2 getRecommendedRenderSize() const override;
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
virtual void resetSensors() override;
virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override;
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const override;
virtual float getTargetFrameRate() override;
@ -50,12 +45,7 @@ private:
ovrHmd _hmd;
mutable ovrTrackingState _trackingState;
ovrEyeRenderDesc _eyeRenderDescs[2];
mutable ovrPosef _eyePoses[2];
ovrVector3f _eyeOffsets[2];
ovrFovPort _eyeFovs[2];
mat4 _eyeProjections[3];
mat4 _compositeEyeProjections[2];
uvec2 _desiredFramebufferSize;
//ovrTexture _eyeTextures[2]; // FIXME - not currently in use
mutable int _hmdScreen { -1 };
bool _hswDismissed { false };

View file

@ -25,10 +25,8 @@
#include "OpenVrHelpers.h"
Q_DECLARE_LOGGING_CATEGORY(displayplugins)
Q_LOGGING_CATEGORY(displayplugins, "hifi.displayplugins")
const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here
static vr::IVRCompositor* _compositor{ nullptr };
@ -36,40 +34,7 @@ static vr::TrackedDevicePose_t _presentThreadTrackedDevicePose[vr::k_unMaxTracke
vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
static mat4 _sensorResetMat;
static uvec2 _windowSize;
static uvec2 _renderTargetSize;
struct PerEyeData {
//uvec2 _viewportOrigin;
//uvec2 _viewportSize;
mat4 _projectionMatrix;
mat4 _eyeOffset;
mat4 _pose;
};
static PerEyeData _eyesData[2];
template<typename F>
void openvr_for_each_eye(F f) {
f(vr::Hmd_Eye::Eye_Left);
f(vr::Hmd_Eye::Eye_Right);
}
mat4 toGlm(const vr::HmdMatrix44_t& m) {
return glm::transpose(glm::make_mat4(&m.m[0][0]));
}
mat4 toGlm(const vr::HmdMatrix34_t& m) {
mat4 result = mat4(
m.m[0][0], m.m[1][0], m.m[2][0], 0.0,
m.m[0][1], m.m[1][1], m.m[2][1], 0.0,
m.m[0][2], m.m[1][2], m.m[2][2], 0.0,
m.m[0][3], m.m[1][3], m.m[2][3], 1.0f);
return result;
}
static std::array<vr::Hmd_Eye, 2> VR_EYES { { vr::Eye_Left, vr::Eye_Right } };
bool OpenVrDisplayPlugin::isSupported() const {
return vr::VR_IsHmdPresent();
@ -91,15 +56,17 @@ void OpenVrDisplayPlugin::activate() {
{
Lock lock(_poseMutex);
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
PerEyeData& eyeData = _eyesData[eye];
eyeData._projectionMatrix = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
eyeData._eyeOffset = toGlm(_hmd->GetEyeToHeadTransform(eye));
_eyeOffsets[eye] = toGlm(_hmd->GetEyeToHeadTransform(eye));
_eyeProjections[eye] = toGlm(_hmd->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
});
// FIXME Calculate the proper combined projection by using GetProjectionRaw values from both eyes
_cullingProjection = _eyeProjections[0];
}
_compositor = vr::VRCompositor();
Q_ASSERT(_compositor);
WindowOpenGLDisplayPlugin::activate();
HmdDisplayPlugin::activate();
}
void OpenVrDisplayPlugin::deactivate() {
@ -109,34 +76,18 @@ void OpenVrDisplayPlugin::deactivate() {
_hmd = nullptr;
}
_compositor = nullptr;
WindowOpenGLDisplayPlugin::deactivate();
HmdDisplayPlugin::deactivate();
}
void OpenVrDisplayPlugin::customizeContext() {
// Display plugins in DLLs must initialize glew locally
static std::once_flag once;
std::call_once(once, []{
glewExperimental = true;
GLenum err = glewInit();
glGetError();
});
WindowOpenGLDisplayPlugin::customizeContext();
enableVsync(false);
// Only enable mirroring if we know vsync is disabled
_enablePreview = !isVsyncEnabled();
}
uvec2 OpenVrDisplayPlugin::getRecommendedRenderSize() const {
return _renderTargetSize;
}
mat4 OpenVrDisplayPlugin::getProjection(Eye eye, const mat4& baseProjection) const {
// FIXME hack to ensure that we don't crash trying to get the combined matrix
if (eye == Mono) {
eye = Left;
}
Lock lock(_poseMutex);
return _eyesData[eye]._projectionMatrix;
HmdDisplayPlugin::customizeContext();
}
void OpenVrDisplayPlugin::resetSensors() {
@ -145,41 +96,17 @@ void OpenVrDisplayPlugin::resetSensors() {
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
}
glm::mat4 OpenVrDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
Lock lock(_poseMutex);
return _eyesData[eye]._eyeOffset;
}
glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
Lock lock(_poseMutex);
return _trackedDevicePoseMat4[0];
}
void OpenVrDisplayPlugin::submitSceneTexture(uint32_t frameIndex, uint32_t sceneTexture, const glm::uvec2& sceneSize) {
WindowOpenGLDisplayPlugin::submitSceneTexture(frameIndex, sceneTexture, sceneSize);
}
void OpenVrDisplayPlugin::internalPresent() {
// Flip y-axis since GL UV coords are backwards.
static vr::VRTextureBounds_t leftBounds{ 0, 0, 0.5f, 1 };
static vr::VRTextureBounds_t rightBounds{ 0.5f, 0, 1, 1 };
// screen preview mirroring
if (_enablePreview) {
auto windowSize = toGlm(_window->size());
if (_monoPreview) {
glViewport(0, 0, windowSize.x * 2, windowSize.y);
glScissor(0, windowSize.y, windowSize.x, windowSize.y);
} else {
glViewport(0, 0, windowSize.x, windowSize.y);
}
glBindTexture(GL_TEXTURE_2D, _currentSceneTexture);
GLenum err = glGetError();
Q_ASSERT(0 == err);
drawUnitQuad();
}
vr::Texture_t texture{ (void*)_currentSceneTexture, vr::API_OpenGL, vr::ColorSpace_Auto };
_compositor->Submit(vr::Eye_Left, &texture, &leftBounds);
@ -187,10 +114,6 @@ void OpenVrDisplayPlugin::internalPresent() {
glFinish();
if (_enablePreview) {
swapBuffers();
}
_compositor->WaitGetPoses(_presentThreadTrackedDevicePose, vr::k_unMaxTrackedDeviceCount, nullptr, 0);
{
@ -200,10 +123,8 @@ void OpenVrDisplayPlugin::internalPresent() {
_trackedDevicePose[i] = _presentThreadTrackedDevicePose[i];
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
}
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
_eyesData[eye]._pose = _trackedDevicePoseMat4[0];
});
}
//WindowOpenGLDisplayPlugin::internalPresent();
// Handle the mirroring in the base class
HmdDisplayPlugin::internalPresent();
}

View file

@ -11,15 +11,14 @@
#include <openvr.h>
#include <display-plugins/WindowOpenGLDisplayPlugin.h>
#include <display-plugins/hmd/HmdDisplayPlugin.h>
const float TARGET_RATE_OpenVr = 90.0f; // FIXME: get from sdk tracked device property? This number is vive-only.
class OpenVrDisplayPlugin : public WindowOpenGLDisplayPlugin {
class OpenVrDisplayPlugin : public HmdDisplayPlugin {
public:
virtual bool isSupported() const override;
virtual const QString& getName() const override { return NAME; }
virtual bool isHmd() const override { return true; }
virtual float getTargetFrameRate() override { return TARGET_RATE_OpenVr; }
@ -28,16 +27,9 @@ public:
virtual void customizeContext() override;
virtual glm::uvec2 getRecommendedRenderSize() const override;
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
// Stereo specific methods
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual void resetSensors() override;
virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override;
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const override;
virtual void submitSceneTexture(uint32_t frameIndex, uint32_t sceneTexture, const glm::uvec2& sceneSize) override;
protected:
virtual void internalPresent() override;
@ -45,8 +37,6 @@ protected:
private:
vr::IVRSystem* _hmd { nullptr };
static const QString NAME;
bool _enablePreview { false };
bool _monoPreview { true };
mutable Mutex _poseMutex;
};

View file

@ -16,6 +16,7 @@
Q_DECLARE_LOGGING_CATEGORY(displayplugins)
Q_LOGGING_CATEGORY(displayplugins, "hifi.plugins.display")
using Mutex = std::mutex;
using Lock = std::unique_lock<Mutex>;

View file

@ -15,3 +15,21 @@
vr::IVRSystem* acquireOpenVrSystem();
void releaseOpenVrSystem();
template<typename F>
void openvr_for_each_eye(F f) {
f(vr::Hmd_Eye::Eye_Left);
f(vr::Hmd_Eye::Eye_Right);
}
inline mat4 toGlm(const vr::HmdMatrix44_t& m) {
return glm::transpose(glm::make_mat4(&m.m[0][0]));
}
inline mat4 toGlm(const vr::HmdMatrix34_t& m) {
mat4 result = mat4(
m.m[0][0], m.m[1][0], m.m[2][0], 0.0,
m.m[0][1], m.m[1][1], m.m[2][1], 0.0,
m.m[0][2], m.m[1][2], m.m[2][2], 0.0,
m.m[0][3], m.m[1][3], m.m[2][3], 1.0f);
return result;
}