Cleanup plugin interface, break up oculus plugins

This commit is contained in:
Brad Davis 2015-08-19 19:09:21 -07:00
parent 76f236adf6
commit 7fb491e48c
21 changed files with 465 additions and 381 deletions

View file

@ -5,6 +5,21 @@ set(EXTERNAL_NAME LibOVR)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER) string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
# These are all provided in order to allow easier testing of both
# the legacy display plugin and the new windows only plugin on
# various versions of the SDK, all on windows
#
# 0.5 public
# URL http://static.oculus.com/sdk-downloads/ovr_sdk_win_0.5.0.1.zip
# URL_MD5 d3fc4c02db9be5ff08af4ef4c97b32f9
# 0.6 public
# URL http://static.oculus.com/sdk-downloads/0.6.0.1/Public/1435190862/ovr_sdk_win_0.6.0.1.zip
# URL_MD5 4b3ef825f9a1d6d3035c9f6820687da9
# 0.7 alpha
# URL https://s3.amazonaws.com/static.oculus.com/sdk-downloads/0.7.0.0/Public/Alpha/ovr_sdk_win_0.7.0.0_RC1.zip
# URL_MD5 a562bb9d117087b2cf9d86653ea70fd8
if (WIN32) if (WIN32)
ExternalProject_Add( ExternalProject_Add(

View file

@ -1058,18 +1058,29 @@ void Application::paintGL() {
// Using the latter will cause the camera to wobble with idle animations, // Using the latter will cause the camera to wobble with idle animations,
// or with changes from the face tracker // or with changes from the face tracker
renderArgs._renderMode = RenderArgs::DEFAULT_RENDER_MODE; renderArgs._renderMode = RenderArgs::DEFAULT_RENDER_MODE;
_myCamera.setPosition(_myAvatar->getDefaultEyePosition());
_myCamera.setRotation(_myAvatar->getOrientation());
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
_myCamera.setRotation(_myAvatar->getOrientation());
// https://www.youtube.com/watch?v=pFriRcIwqNU if (!getActiveDisplayPlugin()->isHmd()) {
vec3 boomStick = glm::vec3(0.0f, 0.0f, 1.0f) * _myAvatar->getBoomLength() * _myAvatar->getScale(); _myCamera.setPosition(_myAvatar->getDefaultEyePosition());
quat boomRotation = _myAvatar->getOrientation(); _myCamera.setRotation(_myAvatar->getHead()->getCameraOrientation());
if (!isHMDMode() && Menu::getInstance()->isOptionChecked(MenuOption::CenterPlayerInView)) { } else {
boomRotation = _myCamera.getRotation(); mat4 camMat = _myAvatar->getSensorToWorldMatrix() * _myAvatar->getHMDSensorMatrix();
} _myCamera.setPosition(extractTranslation(camMat));
_myCamera.setPosition(_myAvatar->getDefaultEyePosition() + boomRotation * boomStick); _myCamera.setRotation(glm::quat_cast(camMat));
}
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
if (isHMDMode()) {
_myCamera.setRotation(_myAvatar->getWorldAlignedOrientation());
} else {
_myCamera.setRotation(_myAvatar->getHead()->getOrientation());
}
if (Menu::getInstance()->isOptionChecked(MenuOption::CenterPlayerInView)) {
_myCamera.setPosition(_myAvatar->getDefaultEyePosition() +
_myCamera.getRotation() * glm::vec3(0.0f, 0.0f, 1.0f) * _myAvatar->getBoomLength() * _myAvatar->getScale());
} else {
_myCamera.setPosition(_myAvatar->getDefaultEyePosition() +
_myAvatar->getOrientation() * glm::vec3(0.0f, 0.0f, 1.0f) * _myAvatar->getBoomLength() * _myAvatar->getScale());
}
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) { } else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
_myCamera.setRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f))); _myCamera.setRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
_myCamera.setPosition(_myAvatar->getDefaultEyePosition() + _myCamera.setPosition(_myAvatar->getDefaultEyePosition() +
@ -1115,7 +1126,7 @@ void Application::paintGL() {
// FIXME we don't need to set these every frame, // FIXME we don't need to set these every frame,
// only when the display plugin changes // only when the display plugin changes
for_each_eye([&](Eye eye) { for_each_eye([&](Eye eye) {
eyeViews[eye] = displayPlugin->getModelview(eye, mat4()); eyeViews[eye] = displayPlugin->getView(eye, mat4());
eyeProjections[eye] = displayPlugin->getProjection(eye, baseProjection); eyeProjections[eye] = displayPlugin->getProjection(eye, baseProjection);
}); });
renderArgs._context->setStereoProjections(eyeProjections); renderArgs._context->setStereoProjections(eyeProjections);
@ -4952,7 +4963,7 @@ mat4 Application::getEyePose(int eye) const {
mat4 Application::getEyeOffset(int eye) const { mat4 Application::getEyeOffset(int eye) const {
if (isHMDMode()) { if (isHMDMode()) {
mat4 identity; mat4 identity;
return getActiveDisplayPlugin()->getModelview((Eye)eye, identity); return getActiveDisplayPlugin()->getView((Eye)eye, identity);
} }
return mat4(); return mat4();

View file

@ -30,11 +30,11 @@ void Basic2DWindowOpenGLDisplayPlugin::activate() {
CONTAINER->unsetFullscreen(); CONTAINER->unsetFullscreen();
} }
}, true, false); }, true, false);
MainWindowOpenGLDisplayPlugin::activate(); WindowOpenGLDisplayPlugin::activate();
} }
void Basic2DWindowOpenGLDisplayPlugin::deactivate() { void Basic2DWindowOpenGLDisplayPlugin::deactivate() {
MainWindowOpenGLDisplayPlugin::deactivate(); WindowOpenGLDisplayPlugin::deactivate();
} }
int Basic2DWindowOpenGLDisplayPlugin::getDesiredInterval(bool isThrottled) const { int Basic2DWindowOpenGLDisplayPlugin::getDesiredInterval(bool isThrottled) const {

View file

@ -7,10 +7,10 @@
// //
#pragma once #pragma once
#include "MainWindowOpenGLDisplayPlugin.h" #include "WindowOpenGLDisplayPlugin.h"
class QScreen; class QScreen;
class Basic2DWindowOpenGLDisplayPlugin : public MainWindowOpenGLDisplayPlugin { class Basic2DWindowOpenGLDisplayPlugin : public WindowOpenGLDisplayPlugin {
Q_OBJECT Q_OBJECT
public: public:

View file

@ -15,8 +15,8 @@
#include "Basic2DWindowOpenGLDisplayPlugin.h" #include "Basic2DWindowOpenGLDisplayPlugin.h"
#include "openvr/OpenVrDisplayPlugin.h" #include "openvr/OpenVrDisplayPlugin.h"
#include "oculus/Oculus_0_5_DisplayPlugin.h" #include "oculus/OculusDisplayPlugin.h"
#include "oculus/Oculus_0_6_DisplayPlugin.h" #include "oculus/OculusLegacyDisplayPlugin.h"
// TODO migrate to a DLL model where plugins are discovered and loaded at runtime by the PluginManager class // TODO migrate to a DLL model where plugins are discovered and loaded at runtime by the PluginManager class
DisplayPluginList getDisplayPlugins() { DisplayPluginList getDisplayPlugins() {
@ -32,9 +32,13 @@ DisplayPluginList getDisplayPlugins() {
//new InterleavedStereoDisplayPlugin(), //new InterleavedStereoDisplayPlugin(),
// HMDs // HMDs
new Oculus_0_5_DisplayPlugin(),
new Oculus_0_6_DisplayPlugin(), // Windows Oculus SDK
new OculusDisplayPlugin(),
// Mac/Linux Oculus SDK (0.5)
new OculusLegacyDisplayPlugin(),
#ifdef Q_OS_WIN #ifdef Q_OS_WIN
// SteamVR SDK
new OpenVrDisplayPlugin(), new OpenVrDisplayPlugin(),
#endif #endif
nullptr nullptr

View file

@ -97,20 +97,16 @@ public:
return baseProjection; return baseProjection;
} }
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const { virtual glm::mat4 getView(Eye eye, const glm::mat4& baseView) const {
return glm::inverse(getEyePose(eye)) * baseModelview; return glm::inverse(getEyePose(eye)) * baseView;
} }
// HMD specific methods // HMD specific methods
// TODO move these into another class // TODO move these into another class?
virtual glm::mat4 getEyePose(Eye eye) const { virtual glm::mat4 getEyePose(Eye eye) const {
static const glm::mat4 pose; return pose; static const glm::mat4 pose; return pose;
} }
virtual glm::vec3 getEyeOffset(Eye eye) const {
static const glm::vec3 offset; return offset;
}
virtual glm::mat4 getHeadPose() const { virtual glm::mat4 getHeadPose() const {
static const glm::mat4 pose; return pose; static const glm::mat4 pose; return pose;
} }
@ -119,11 +115,6 @@ public:
virtual void resetSensors() {} virtual void resetSensors() {}
virtual float devicePixelRatio() { return 1.0; } virtual float devicePixelRatio() { return 1.0; }
//// The window for the surface, used for event interception. May be null.
//virtual QWindow* getWindow() const = 0;
//virtual void installEventFilter(QObject* filter) {}
//virtual void removeEventFilter(QObject* filter) {}
signals: signals:
void recommendedFramebufferSizeChanged(const QSize & size); void recommendedFramebufferSizeChanged(const QSize & size);

View file

@ -1,9 +0,0 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "MainWindowOpenGLDisplayPlugin.h"

View file

@ -1,13 +0,0 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include "WindowOpenGLDisplayPlugin.h"
class MainWindowOpenGLDisplayPlugin : public WindowOpenGLDisplayPlugin {
};

View file

@ -1,82 +0,0 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "OculusBaseDisplayPlugin.h"
#include <ViewFrustum.h>
#include "OculusHelpers.h"
using namespace Oculus;
OculusBaseDisplayPlugin::OculusBaseDisplayPlugin() : _ipd(OVR_DEFAULT_IPD) {
}
void OculusBaseDisplayPlugin::activate() {
glm::uvec2 eyeSizes[2];
ovr_for_each_eye([&](ovrEyeType eye) {
_eyeFovs[eye] = _hmd->MaxEyeFov[eye];
ovrEyeRenderDesc& erd = _eyeRenderDescs[eye] = ovrHmd_GetRenderDesc(_hmd, eye, _eyeFovs[eye]);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
_compositeEyeProjections[eye] = toGlm(ovrPerspectiveProjection);
// We handle the eye offsets slightly differently, using an _ipd in the base class
// _eyeOffsets[eye] = erd.HmdToEyeViewOffset;
_eyeOffsets[eye] = { 0, 0, 0 };
eyeSizes[eye] = toGlm(ovrHmd_GetFovTextureSize(_hmd, eye, erd.Fov, 1.0f));
});
_ipd = ovrHmd_GetFloat(_hmd, OVR_KEY_IPD, _ipd);
_desiredFramebufferSize = uvec2(
eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
_frameIndex = 0;
if (!OVR_SUCCESS(ovrHmd_ConfigureTracking(_hmd,
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
qFatal("Could not attach to sensor device");
}
MainWindowOpenGLDisplayPlugin::activate();
}
uvec2 OculusBaseDisplayPlugin::getRecommendedRenderSize() const {
return _desiredFramebufferSize;
}
void OculusBaseDisplayPlugin::preRender() {
ovrHmd_GetEyePoses(_hmd, _frameIndex, _eyeOffsets, _eyePoses, nullptr);
}
glm::mat4 OculusBaseDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
}
glm::mat4 OculusBaseDisplayPlugin::getModelview(Eye eye, const glm::mat4& baseModelview) const {
return baseModelview * toGlm(_eyePoses[eye]);
}
void OculusBaseDisplayPlugin::resetSensors() {
ovrHmd_RecenterPose(_hmd);
}
glm::mat4 OculusBaseDisplayPlugin::getEyePose(Eye eye) const {
return toGlm(_eyePoses[eye]);
}
// Should NOT be used for rendering as this will mess up timewarp. Use the getModelview() method above for
// any use of head poses for rendering, ensuring you use the correct eye
glm::mat4 OculusBaseDisplayPlugin::getHeadPose() const {
ovrTrackingState state = ovrHmd_GetTrackingState(_hmd, 0.0f);
return toGlm(state.HeadPose.ThePose);
}

View file

@ -1,28 +0,0 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include "../MainWindowOpenGLDisplayPlugin.h"
class OculusBaseDisplayPlugin : public MainWindowOpenGLDisplayPlugin {
public:
OculusBaseDisplayPlugin();
// Stereo specific methods
virtual bool isHmd() const override { return true; }
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const override;
virtual void activate() override;
virtual void preRender() override;
virtual glm::uvec2 getRecommendedRenderSize() const override;
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
virtual void resetSensors() override;
virtual glm::mat4 getEyePose(Eye eye) const override;
virtual glm::mat4 getHeadPose() const override;
protected:
float _ipd;
};

View file

@ -5,7 +5,7 @@
// Distributed under the Apache License, Version 2.0. // Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// //
#include "Oculus_0_6_DisplayPlugin.h" #include "OculusDisplayPlugin.h"
#include <memory> #include <memory>
@ -15,9 +15,7 @@
#include <GlWindow.h> #include <GlWindow.h>
#include <QEvent> #include <QEvent>
#include <QResizeEvent> #include <QResizeEvent>
#include <QThread>
#include <OVR_CAPI_GL.h>
#include <OglplusHelpers.h> #include <OglplusHelpers.h>
#include <oglplus/opt/list_init.hpp> #include <oglplus/opt/list_init.hpp>
@ -27,18 +25,34 @@
#include <PerfStat.h> #include <PerfStat.h>
#include <plugins/PluginContainer.h> #include <plugins/PluginContainer.h>
#include <ViewFrustum.h>
#include "OculusHelpers.h" #include "OculusHelpers.h"
using namespace Oculus; #if (OVR_MAJOR_VERSION == 6)
#if (OVR_MAJOR_VERSION == 6) #define ovr_Create ovrHmd_Create
SwapFboPtr _sceneFbo; #define ovr_CreateSwapTextureSetGL ovrHmd_CreateSwapTextureSetGL
MirrorFboPtr _mirrorFbo; #define ovr_CreateMirrorTextureGL ovrHmd_CreateMirrorTextureGL
ovrLayerEyeFov _sceneLayer; #define ovr_Destroy ovrHmd_Destroy
#define ovr_DestroySwapTextureSet ovrHmd_DestroySwapTextureSet
#define ovr_DestroyMirrorTexture ovrHmd_DestroyMirrorTexture
#define ovr_GetFloat ovrHmd_GetFloat
#define ovr_GetFovTextureSize ovrHmd_GetFovTextureSize
#define ovr_GetFrameTiming ovrHmd_GetFrameTiming
#define ovr_GetTrackingState ovrHmd_GetTrackingState
#define ovr_GetRenderDesc ovrHmd_GetRenderDesc
#define ovr_RecenterPose ovrHmd_RecenterPose
#define ovr_SubmitFrame ovrHmd_SubmitFrame
#define ovr_ConfigureTracking ovrHmd_ConfigureTracking
#define ovr_GetHmdDesc(X) *X
#endif
#if (OVR_MAJOR_VERSION >= 6)
// A base class for FBO wrappers that need to use the Oculus C // A base class for FBO wrappers that need to use the Oculus C
// API to manage textures via ovrHmd_CreateSwapTextureSetGL, // API to manage textures via ovr_CreateSwapTextureSetGL,
// ovrHmd_CreateMirrorTextureGL, etc // ovr_CreateMirrorTextureGL, etc
template <typename C> template <typename C>
struct RiftFramebufferWrapper : public FramebufferWrapper<C, char> { struct RiftFramebufferWrapper : public FramebufferWrapper<C, char> {
ovrHmd hmd; ovrHmd hmd;
@ -73,7 +87,7 @@ struct SwapFramebufferWrapper : public RiftFramebufferWrapper<ovrSwapTextureSet*
~SwapFramebufferWrapper() { ~SwapFramebufferWrapper() {
if (color) { if (color) {
ovrHmd_DestroySwapTextureSet(hmd, color); ovr_DestroySwapTextureSet(hmd, color);
color = nullptr; color = nullptr;
} }
} }
@ -86,11 +100,11 @@ struct SwapFramebufferWrapper : public RiftFramebufferWrapper<ovrSwapTextureSet*
protected: protected:
virtual void initColor() override { virtual void initColor() override {
if (color) { if (color) {
ovrHmd_DestroySwapTextureSet(hmd, color); ovr_DestroySwapTextureSet(hmd, color);
color = nullptr; color = nullptr;
} }
if (!OVR_SUCCESS(ovrHmd_CreateSwapTextureSetGL(hmd, GL_RGBA, size.x, size.y, &color))) { if (!OVR_SUCCESS(ovr_CreateSwapTextureSetGL(hmd, GL_RGBA, size.x, size.y, &color))) {
qFatal("Unable to create swap textures"); qFatal("Unable to create swap textures");
} }
@ -127,7 +141,7 @@ struct MirrorFramebufferWrapper : public RiftFramebufferWrapper<ovrGLTexture*> {
virtual ~MirrorFramebufferWrapper() { virtual ~MirrorFramebufferWrapper() {
if (color) { if (color) {
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color); ovr_DestroyMirrorTexture(hmd, (ovrTexture*)color);
color = nullptr; color = nullptr;
} }
} }
@ -135,10 +149,10 @@ struct MirrorFramebufferWrapper : public RiftFramebufferWrapper<ovrGLTexture*> {
private: private:
void initColor() override { void initColor() override {
if (color) { if (color) {
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color); ovr_DestroyMirrorTexture(hmd, (ovrTexture*)color);
color = nullptr; color = nullptr;
} }
ovrResult result = ovrHmd_CreateMirrorTextureGL(hmd, GL_RGBA, size.x, size.y, (ovrTexture**)&color); ovrResult result = ovr_CreateMirrorTextureGL(hmd, GL_RGBA, size.x, size.y, (ovrTexture**)&color);
Q_ASSERT(OVR_SUCCESS(result)); Q_ASSERT(OVR_SUCCESS(result));
} }
@ -149,52 +163,128 @@ private:
} }
}; };
#endif #endif
const QString OculusDisplayPlugin::NAME("Oculus Rift");
const QString Oculus_0_6_DisplayPlugin::NAME("Oculus Rift"); uvec2 OculusDisplayPlugin::getRecommendedRenderSize() const {
return _desiredFramebufferSize;
}
const QString & Oculus_0_6_DisplayPlugin::getName() const { void OculusDisplayPlugin::preRender() {
#if (OVR_MAJOR_VERSION >= 6)
ovrFrameTiming ftiming = ovr_GetFrameTiming(_hmd, _frameIndex);
_trackingState = ovr_GetTrackingState(_hmd, ftiming.DisplayMidpointSeconds);
ovr_CalcEyePoses(_trackingState.HeadPose.ThePose, _eyeOffsets, _eyePoses);
#endif
}
glm::mat4 OculusDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
}
void OculusDisplayPlugin::resetSensors() {
#if (OVR_MAJOR_VERSION >= 6)
ovr_RecenterPose(_hmd);
#endif
}
glm::mat4 OculusDisplayPlugin::getEyePose(Eye eye) const {
return toGlm(_eyePoses[eye]);
}
glm::mat4 OculusDisplayPlugin::getHeadPose() const {
return toGlm(_trackingState.HeadPose.ThePose);
}
const QString & OculusDisplayPlugin::getName() const {
return NAME; return NAME;
} }
bool Oculus_0_6_DisplayPlugin::isSupported() const { bool OculusDisplayPlugin::isSupported() const {
#if (OVR_MAJOR_VERSION == 6) #if (OVR_MAJOR_VERSION >= 6)
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) { return true;
return false;
}
bool result = false;
if (ovrHmd_Detect() > 0) {
result = true;
}
ovr_Shutdown();
return result;
#else #else
return false; return false;
#endif #endif
} }
void OculusDisplayPlugin::init() {
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
qFatal("Could not init OVR");
}
}
#if (OVR_MAJOR_VERSION == 6) void OculusDisplayPlugin::deinit() {
ovrLayerEyeFov& getSceneLayer() { ovr_Shutdown();
}
#if (OVR_MAJOR_VERSION >= 6)
ovrLayerEyeFov& OculusDisplayPlugin::getSceneLayer() {
return _sceneLayer; return _sceneLayer;
} }
#endif #endif
//static gpu::TexturePointer _texture; //static gpu::TexturePointer _texture;
void Oculus_0_6_DisplayPlugin::activate() { void OculusDisplayPlugin::activate() {
#if (OVR_MAJOR_VERSION == 6) #if (OVR_MAJOR_VERSION >= 6)
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) { if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
Q_ASSERT(false); Q_ASSERT(false);
qFatal("Failed to Initialize SDK"); qFatal("Failed to Initialize SDK");
} }
if (!OVR_SUCCESS(ovrHmd_Create(0, &_hmd))) {
// CONTAINER->getPrimarySurface()->makeCurrent();
#if (OVR_MAJOR_VERSION == 6)
if (!OVR_SUCCESS(ovr_Create(0, &_hmd))) {
#elif (OVR_MAJOR_VERSION == 7)
if (!OVR_SUCCESS(ovr_Create(&_hmd, &_luid))) {
#endif
Q_ASSERT(false); Q_ASSERT(false);
qFatal("Failed to acquire HMD"); qFatal("Failed to acquire HMD");
} }
OculusBaseDisplayPlugin::activate(); _hmdDesc = ovr_GetHmdDesc(_hmd);
_ipd = ovr_GetFloat(_hmd, OVR_KEY_IPD, _ipd);
glm::uvec2 eyeSizes[2];
ovr_for_each_eye([&](ovrEyeType eye) {
_eyeFovs[eye] = _hmdDesc.DefaultEyeFov[eye];
ovrEyeRenderDesc& erd = _eyeRenderDescs[eye] = ovr_GetRenderDesc(_hmd, eye, _eyeFovs[eye]);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
_compositeEyeProjections[eye] = toGlm(ovrPerspectiveProjection);
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
eyeSizes[eye] = toGlm(ovr_GetFovTextureSize(_hmd, eye, erd.Fov, 1.0f));
});
ovrFovPort combined = _eyeFovs[Left];
combined.LeftTan = std::max(_eyeFovs[Left].LeftTan, _eyeFovs[Right].LeftTan);
combined.RightTan = std::max(_eyeFovs[Left].RightTan, _eyeFovs[Right].RightTan);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(combined, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[Mono] = toGlm(ovrPerspectiveProjection);
_desiredFramebufferSize = uvec2(
eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
_frameIndex = 0;
if (!OVR_SUCCESS(ovr_ConfigureTracking(_hmd,
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
qFatal("Could not attach to sensor device");
}
WindowOpenGLDisplayPlugin::activate();
// Parent class relies on our _hmd intialization, so it must come after that. // Parent class relies on our _hmd intialization, so it must come after that.
ovrLayerEyeFov& sceneLayer = getSceneLayer(); ovrLayerEyeFov& sceneLayer = getSceneLayer();
@ -203,7 +293,7 @@ void Oculus_0_6_DisplayPlugin::activate() {
sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft; sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
ovr_for_each_eye([&](ovrEyeType eye) { ovr_for_each_eye([&](ovrEyeType eye) {
ovrFovPort & fov = sceneLayer.Fov[eye] = _eyeRenderDescs[eye].Fov; ovrFovPort & fov = sceneLayer.Fov[eye] = _eyeRenderDescs[eye].Fov;
ovrSizei & size = sceneLayer.Viewport[eye].Size = ovrHmd_GetFovTextureSize(_hmd, eye, fov, 1.0f); ovrSizei & size = sceneLayer.Viewport[eye].Size = ovr_GetFovTextureSize(_hmd, eye, fov, 1.0f);
sceneLayer.Viewport[eye].Pos = { eye == ovrEye_Left ? 0 : size.w, 0 }; sceneLayer.Viewport[eye].Pos = { eye == ovrEye_Left ? 0 : size.w, 0 };
}); });
// We're rendering both eyes to the same texture, so only one of the // We're rendering both eyes to the same texture, so only one of the
@ -214,17 +304,16 @@ void Oculus_0_6_DisplayPlugin::activate() {
PerformanceTimer::setActive(true); PerformanceTimer::setActive(true);
if (!OVR_SUCCESS(ovrHmd_ConfigureTracking(_hmd, if (!OVR_SUCCESS(ovr_ConfigureTracking(_hmd,
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) { ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
qFatal("Could not attach to sensor device"); qFatal("Could not attach to sensor device");
} }
#endif #endif
} }
void Oculus_0_6_DisplayPlugin::customizeContext() { void OculusDisplayPlugin::customizeContext() {
#if (OVR_MAJOR_VERSION == 6) WindowOpenGLDisplayPlugin::customizeContext();
OculusBaseDisplayPlugin::customizeContext(); #if (OVR_MAJOR_VERSION >= 6)
//_texture = DependencyManager::get<TextureCache>()-> //_texture = DependencyManager::get<TextureCache>()->
// getImageTexture(PathUtils::resourcesPath() + "/images/cube_texture.png"); // getImageTexture(PathUtils::resourcesPath() + "/images/cube_texture.png");
uvec2 mirrorSize = toGlm(_window->geometry().size()); uvec2 mirrorSize = toGlm(_window->geometry().size());
@ -236,24 +325,29 @@ void Oculus_0_6_DisplayPlugin::customizeContext() {
#endif #endif
} }
void Oculus_0_6_DisplayPlugin::deactivate() { void OculusDisplayPlugin::deactivate() {
#if (OVR_MAJOR_VERSION == 6) #if (OVR_MAJOR_VERSION >= 6)
makeCurrent(); makeCurrent();
_sceneFbo.reset(); _sceneFbo.reset();
_mirrorFbo.reset(); _mirrorFbo.reset();
doneCurrent(); doneCurrent();
PerformanceTimer::setActive(false); PerformanceTimer::setActive(false);
OculusBaseDisplayPlugin::deactivate(); WindowOpenGLDisplayPlugin::deactivate();
ovrHmd_Destroy(_hmd); ovr_Destroy(_hmd);
_hmd = nullptr; _hmd = nullptr;
ovr_Shutdown(); ovr_Shutdown();
#endif #endif
} }
void Oculus_0_6_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) { void OculusDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
#if (OVR_MAJOR_VERSION == 6) static bool inDisplay = false;
if (inDisplay) {
return;
}
inDisplay = true;
#if (OVR_MAJOR_VERSION >= 6)
using namespace oglplus; using namespace oglplus;
// Need to make sure only the display plugin is responsible for // Need to make sure only the display plugin is responsible for
// controlling vsync // controlling vsync
@ -263,6 +357,7 @@ void Oculus_0_6_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sc
auto size = _sceneFbo->size; auto size = _sceneFbo->size;
Context::Viewport(size.x, size.y); Context::Viewport(size.x, size.y);
glBindTexture(GL_TEXTURE_2D, finalTexture); glBindTexture(GL_TEXTURE_2D, finalTexture);
GLenum err = glGetError();
drawUnitQuad(); drawUnitQuad();
}); });
@ -280,17 +375,25 @@ void Oculus_0_6_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sc
the UI visible in the output window (unlikely). This should be done before the UI visible in the output window (unlikely). This should be done before
_sceneFbo->Increment or we're be using the wrong texture _sceneFbo->Increment or we're be using the wrong texture
*/ */
//_sceneFbo->Bound(GL_READ_FRAMEBUFFER, [&] { _sceneFbo->Bound(Framebuffer::Target::Read, [&] {
// glBlitFramebuffer( glBlitFramebuffer(
// 0, 0, _sceneFbo->size.x, _sceneFbo->size.y, 0, 0, _sceneFbo->size.x, _sceneFbo->size.y,
// 0, 0, windowSize.x, _mirrorFbo.y, 0, 0, windowSize.x, windowSize.y,
// GL_COLOR_BUFFER_BIT, GL_NEAREST); GL_COLOR_BUFFER_BIT, GL_NEAREST);
//}); });
{ {
PerformanceTimer("OculusSubmit"); PerformanceTimer("OculusSubmit");
ovrViewScaleDesc viewScaleDesc;
viewScaleDesc.HmdSpaceToWorldScaleInMeters = 1.0f;
viewScaleDesc.HmdToEyeViewOffset[0] = _eyeOffsets[0];
viewScaleDesc.HmdToEyeViewOffset[1] = _eyeOffsets[1];
ovrLayerHeader* layers = &sceneLayer.Header; ovrLayerHeader* layers = &sceneLayer.Header;
ovrResult result = ovrHmd_SubmitFrame(_hmd, _frameIndex, nullptr, &layers, 1); ovrResult result = ovr_SubmitFrame(_hmd, 0, &viewScaleDesc, &layers, 1);
if (!OVR_SUCCESS(result)) {
qDebug() << result;
}
} }
_sceneFbo->Increment(); _sceneFbo->Increment();
@ -299,21 +402,22 @@ void Oculus_0_6_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sc
will contain the post-distorted and fully composited scene regardless of how many layers will contain the post-distorted and fully composited scene regardless of how many layers
we send. we send.
*/ */
auto mirrorSize = _mirrorFbo->size; //auto mirrorSize = _mirrorFbo->size;
_mirrorFbo->Bound(Framebuffer::Target::Read, [&] { //_mirrorFbo->Bound(Framebuffer::Target::Read, [&] {
Context::BlitFramebuffer( // Context::BlitFramebuffer(
0, mirrorSize.y, mirrorSize.x, 0, // 0, mirrorSize.y, mirrorSize.x, 0,
0, 0, windowSize.x, windowSize.y, // 0, 0, windowSize.x, windowSize.y,
BufferSelectBit::ColorBuffer, BlitFilter::Nearest); // BufferSelectBit::ColorBuffer, BlitFilter::Nearest);
}); //});
++_frameIndex; ++_frameIndex;
#endif #endif
inDisplay = false;
} }
// Pass input events on to the application // Pass input events on to the application
bool Oculus_0_6_DisplayPlugin::eventFilter(QObject* receiver, QEvent* event) { bool OculusDisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
#if (OVR_MAJOR_VERSION == 6) #if (OVR_MAJOR_VERSION >= 6)
if (event->type() == QEvent::Resize) { if (event->type() == QEvent::Resize) {
QResizeEvent* resizeEvent = static_cast<QResizeEvent*>(event); QResizeEvent* resizeEvent = static_cast<QResizeEvent*>(event);
qDebug() << resizeEvent->size().width() << " x " << resizeEvent->size().height(); qDebug() << resizeEvent->size().width() << " x " << resizeEvent->size().height();
@ -323,7 +427,7 @@ bool Oculus_0_6_DisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
doneCurrent(); doneCurrent();
} }
#endif #endif
return OculusBaseDisplayPlugin::eventFilter(receiver, event); return WindowOpenGLDisplayPlugin::eventFilter(receiver, event);
} }
/* /*
@ -331,8 +435,8 @@ bool Oculus_0_6_DisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
However, it should only be done if we can reliably disable v-sync on the mirror surface, However, it should only be done if we can reliably disable v-sync on the mirror surface,
otherwise the swapbuffer delay will interefere with the framerate of the headset otherwise the swapbuffer delay will interefere with the framerate of the headset
*/ */
void Oculus_0_6_DisplayPlugin::finishFrame() { void OculusDisplayPlugin::finishFrame() {
swapBuffers(); //swapBuffers();
doneCurrent(); doneCurrent();
}; };

View file

@ -0,0 +1,78 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include "../WindowOpenGLDisplayPlugin.h"
#include <QTimer>
#include <OVR_CAPI.h>
class OffscreenGlCanvas;
struct SwapFramebufferWrapper;
struct MirrorFramebufferWrapper;
using SwapFboPtr = QSharedPointer<SwapFramebufferWrapper>;
using MirrorFboPtr = QSharedPointer<MirrorFramebufferWrapper>;
class OculusDisplayPlugin : public WindowOpenGLDisplayPlugin {
public:
virtual bool isSupported() const override;
virtual const QString & getName() const override;
virtual void init() override;
virtual void deinit() override;
virtual void activate() override;
virtual void deactivate() override;
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
// Stereo specific methods
virtual bool isHmd() const override { return true; }
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::uvec2 getRecommendedRenderSize() const override;
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
virtual void resetSensors() override;
virtual glm::mat4 getEyePose(Eye eye) const override;
virtual glm::mat4 getHeadPose() const override;
protected:
virtual void preRender() override;
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
virtual void customizeContext() override;
// Do not perform swap in finish
virtual void finishFrame() override;
private:
static const QString NAME;
ovrHmd _hmd;
float _ipd{ OVR_DEFAULT_IPD };
unsigned int _frameIndex;
ovrEyeRenderDesc _eyeRenderDescs[2];
ovrPosef _eyePoses[2];
ovrVector3f _eyeOffsets[2];
ovrFovPort _eyeFovs[2];
mat4 _eyeProjections[3];
mat4 _compositeEyeProjections[2];
uvec2 _desiredFramebufferSize;
ovrTrackingState _trackingState;
#if (OVR_MAJOR_VERSION >= 6)
ovrLayerEyeFov& getSceneLayer();
ovrHmdDesc _hmdDesc;
SwapFboPtr _sceneFbo;
MirrorFboPtr _mirrorFbo;
ovrLayerEyeFov _sceneLayer;
#endif
#if (OVR_MAJOR_VERSION == 7)
ovrGraphicsLuid _luid;
#endif
};

View file

@ -7,7 +7,7 @@
// //
#pragma once #pragma once
#include <OVR_CAPI.h> #include <OVR_CAPI_GL.h>
#include <GLMHelpers.h> #include <GLMHelpers.h>
#include <glm/gtc/type_ptr.hpp> #include <glm/gtc/type_ptr.hpp>
#include <glm/gtc/matrix_transform.hpp> #include <glm/gtc/matrix_transform.hpp>
@ -79,14 +79,3 @@ inline ovrQuatf ovrFromGlm(const glm::quat & q) {
return{ q.x, q.y, q.z, q.w }; return{ q.x, q.y, q.z, q.w };
} }
namespace Oculus {
extern ovrHmd _hmd;
extern unsigned int _frameIndex;
extern ovrEyeRenderDesc _eyeRenderDescs[2];
extern ovrPosef _eyePoses[2];
extern ovrVector3f _eyeOffsets[2];
extern ovrFovPort _eyeFovs[2];
extern mat4 _eyeProjections[2];
extern mat4 _compositeEyeProjections[2];
extern uvec2 _desiredFramebufferSize;
}

View file

@ -5,7 +5,7 @@
// Distributed under the Apache License, Version 2.0. // Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
// //
#include "Oculus_0_5_DisplayPlugin.h" #include "OculusLegacyDisplayPlugin.h"
#include <memory> #include <memory>
@ -19,34 +19,66 @@
#include <QGuiApplication> #include <QGuiApplication>
#include <QScreen> #include <QScreen>
#include <OVR_CAPI_GL.h>
#include <PerfStat.h> #include <PerfStat.h>
#include <OglplusHelpers.h> #include <OglplusHelpers.h>
#include <ViewFrustum.h>
#include "plugins/PluginContainer.h" #include "plugins/PluginContainer.h"
#include "OculusHelpers.h" #include "OculusHelpers.h"
using namespace Oculus;
ovrTexture _eyeTextures[2];
int _hmdScreen{ -1 };
bool _hswDismissed{ false };
DisplayPlugin* makeOculusDisplayPlugin() {
return new Oculus_0_5_DisplayPlugin();
}
using namespace oglplus; using namespace oglplus;
const QString Oculus_0_5_DisplayPlugin::NAME("Oculus Rift (0.5)"); const QString OculusLegacyDisplayPlugin::NAME("Oculus Rift (0.5)");
const QString & Oculus_0_5_DisplayPlugin::getName() const { const QString & OculusLegacyDisplayPlugin::getName() const {
return NAME; return NAME;
} }
OculusLegacyDisplayPlugin::OculusLegacyDisplayPlugin() : _ipd(OVR_DEFAULT_IPD) {
}
bool Oculus_0_5_DisplayPlugin::isSupported() const { uvec2 OculusLegacyDisplayPlugin::getRecommendedRenderSize() const {
return _desiredFramebufferSize;
}
void OculusLegacyDisplayPlugin::preRender() {
#if (OVR_MAJOR_VERSION == 5)
ovrHmd_GetEyePoses(_hmd, _frameIndex, _eyeOffsets, _eyePoses, &_trackingState);
ovrHmd_BeginFrame(_hmd, _frameIndex);
#endif
WindowOpenGLDisplayPlugin::preRender();
}
glm::mat4 OculusLegacyDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
}
void OculusLegacyDisplayPlugin::resetSensors() {
#if (OVR_MAJOR_VERSION == 5)
ovrHmd_RecenterPose(_hmd);
#endif
}
glm::mat4 OculusLegacyDisplayPlugin::getEyePose(Eye eye) const {
#if (OVR_MAJOR_VERSION == 5)
return toGlm(_eyePoses[eye]);
#else
return WindowOpenGLDisplayPlugin::getEyePose(eye);
#endif
}
// Should NOT be used for rendering as this will mess up timewarp. Use the getModelview() method above for
// any use of head poses for rendering, ensuring you use the correct eye
glm::mat4 OculusLegacyDisplayPlugin::getHeadPose() const {
#if (OVR_MAJOR_VERSION == 5)
return toGlm(_trackingState.HeadPose.ThePose);
#else
return WindowOpenGLDisplayPlugin::getHeadPose();
#endif
}
bool OculusLegacyDisplayPlugin::isSupported() const {
#if (OVR_MAJOR_VERSION == 5) #if (OVR_MAJOR_VERSION == 5)
if (!ovr_Initialize(nullptr)) { if (!ovr_Initialize(nullptr)) {
return false; return false;
@ -77,7 +109,7 @@ bool Oculus_0_5_DisplayPlugin::isSupported() const {
#endif #endif
} }
void Oculus_0_5_DisplayPlugin::activate() { void OculusLegacyDisplayPlugin::activate() {
#if (OVR_MAJOR_VERSION == 5) #if (OVR_MAJOR_VERSION == 5)
if (!OVR_SUCCESS(ovr_Initialize(nullptr))) { if (!OVR_SUCCESS(ovr_Initialize(nullptr))) {
Q_ASSERT(false); Q_ASSERT(false);
@ -89,7 +121,34 @@ void Oculus_0_5_DisplayPlugin::activate() {
qFatal("Failed to acquire HMD"); qFatal("Failed to acquire HMD");
} }
OculusBaseDisplayPlugin::activate(); glm::uvec2 eyeSizes[2];
ovr_for_each_eye([&](ovrEyeType eye) {
_eyeFovs[eye] = _hmd->MaxEyeFov[eye];
ovrEyeRenderDesc erd = _eyeRenderDescs[eye] = ovrHmd_GetRenderDesc(_hmd, eye, _eyeFovs[eye]);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
_compositeEyeProjections[eye] = toGlm(ovrPerspectiveProjection);
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
eyeSizes[eye] = toGlm(ovrHmd_GetFovTextureSize(_hmd, eye, erd.Fov, 1.0f));
});
_desiredFramebufferSize = uvec2(
eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
_frameIndex = 0;
if (!OVR_SUCCESS(ovrHmd_ConfigureTracking(_hmd,
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0))) {
qFatal("Could not attach to sensor device");
}
WindowOpenGLDisplayPlugin::activate();
int screen = getHmdScreen(); int screen = getHmdScreen();
if (screen != -1) { if (screen != -1) {
CONTAINER->setFullscreen(qApp->screens()[screen]); CONTAINER->setFullscreen(qApp->screens()[screen]);
@ -118,17 +177,16 @@ void Oculus_0_5_DisplayPlugin::activate() {
} }
}); });
ovrEyeRenderDesc _eyeRenderDescs[ovrEye_Count];
ovrBool result = ovrHmd_ConfigureRendering(_hmd, &config.Config, distortionCaps, _eyeFovs, _eyeRenderDescs); ovrBool result = ovrHmd_ConfigureRendering(_hmd, &config.Config, distortionCaps, _eyeFovs, _eyeRenderDescs);
Q_ASSERT(result); Q_ASSERT(result);
#endif #endif
} }
void Oculus_0_5_DisplayPlugin::deactivate() { void OculusLegacyDisplayPlugin::deactivate() {
#if (OVR_MAJOR_VERSION == 5) #if (OVR_MAJOR_VERSION == 5)
_window->removeEventFilter(this); _window->removeEventFilter(this);
OculusBaseDisplayPlugin::deactivate(); WindowOpenGLDisplayPlugin::deactivate();
QScreen* riftScreen = nullptr; QScreen* riftScreen = nullptr;
if (_hmdScreen >= 0) { if (_hmdScreen >= 0) {
@ -142,18 +200,11 @@ void Oculus_0_5_DisplayPlugin::deactivate() {
#endif #endif
} }
void Oculus_0_5_DisplayPlugin::preRender() { void OculusLegacyDisplayPlugin::preDisplay() {
#if (OVR_MAJOR_VERSION == 5)
OculusBaseDisplayPlugin::preRender();
ovrHmd_BeginFrame(_hmd, _frameIndex);
#endif
}
void Oculus_0_5_DisplayPlugin::preDisplay() {
_window->makeCurrent(); _window->makeCurrent();
} }
void Oculus_0_5_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) { void OculusLegacyDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
++_frameIndex; ++_frameIndex;
#if (OVR_MAJOR_VERSION == 5) #if (OVR_MAJOR_VERSION == 5)
ovr_for_each_eye([&](ovrEyeType eye) { ovr_for_each_eye([&](ovrEyeType eye) {
@ -164,7 +215,7 @@ void Oculus_0_5_DisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sc
} }
// Pass input events on to the application // Pass input events on to the application
bool Oculus_0_5_DisplayPlugin::eventFilter(QObject* receiver, QEvent* event) { bool OculusLegacyDisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
#if (OVR_MAJOR_VERSION == 5) #if (OVR_MAJOR_VERSION == 5)
if (!_hswDismissed && (event->type() == QEvent::KeyPress)) { if (!_hswDismissed && (event->type() == QEvent::KeyPress)) {
static ovrHSWDisplayState hswState; static ovrHSWDisplayState hswState;
@ -176,17 +227,19 @@ bool Oculus_0_5_DisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
} }
} }
#endif #endif
return OculusBaseDisplayPlugin::eventFilter(receiver, event); return WindowOpenGLDisplayPlugin::eventFilter(receiver, event);
} }
// FIXME mirroring tot he main window is diffucult on OSX because it requires that we // FIXME mirroring tot he main window is diffucult on OSX because it requires that we
// trigger a swap, which causes the client to wait for the v-sync of the main screen running // trigger a swap, which causes the client to wait for the v-sync of the main screen running
// at 60 Hz. This would introduce judder. Perhaps we can push mirroring to a separate // at 60 Hz. This would introduce judder. Perhaps we can push mirroring to a separate
// thread // thread
void Oculus_0_5_DisplayPlugin::finishFrame() { // FIXME If we move to the 'batch rendering on a different thread' we can possibly do this.
// however, we need to make sure it doesn't block the event handling.
void OculusLegacyDisplayPlugin::finishFrame() {
_window->doneCurrent(); _window->doneCurrent();
}; };
int Oculus_0_5_DisplayPlugin::getHmdScreen() const { int OculusLegacyDisplayPlugin::getHmdScreen() const {
return _hmdScreen; return _hmdScreen;
} }

View file

@ -0,0 +1,63 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include "../WindowOpenGLDisplayPlugin.h"
#include <QTimer>
#include <OVR_CAPI.h>
class OculusLegacyDisplayPlugin : public WindowOpenGLDisplayPlugin {
public:
OculusLegacyDisplayPlugin();
virtual bool isSupported() const override;
virtual const QString & getName() const override;
virtual void activate() override;
virtual void deactivate() override;
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
virtual int getHmdScreen() const override;
// Stereo specific methods
virtual bool isHmd() const override { return true; }
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::uvec2 getRecommendedRenderSize() const override;
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
virtual void resetSensors() override;
virtual glm::mat4 getEyePose(Eye eye) const override;
virtual glm::mat4 getHeadPose() const override;
protected:
virtual void preRender() override;
virtual void preDisplay() override;
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
// Do not perform swap in finish
virtual void finishFrame() override;
private:
static const QString NAME;
float _ipd{ OVR_DEFAULT_IPD };
ovrHmd _hmd;
unsigned int _frameIndex;
ovrTrackingState _trackingState;
ovrEyeRenderDesc _eyeRenderDescs[2];
ovrPosef _eyePoses[2];
ovrVector3f _eyeOffsets[2];
ovrFovPort _eyeFovs[2];
mat4 _eyeProjections[2];
mat4 _compositeEyeProjections[2];
uvec2 _desiredFramebufferSize;
ovrTexture _eyeTextures[2];
mutable int _hmdScreen{ -1 };
bool _hswDismissed{ false };
};

View file

@ -1,37 +0,0 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include "OculusBaseDisplayPlugin.h"
#include <QTimer>
class Oculus_0_5_DisplayPlugin : public OculusBaseDisplayPlugin {
public:
virtual bool isSupported() const override;
virtual const QString & getName() const override;
virtual void activate() override;
virtual void deactivate() override;
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
virtual int getHmdScreen() const override;
protected:
virtual void preRender() override;
virtual void preDisplay() override;
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
// Do not perform swap in finish
virtual void finishFrame() override;
private:
static const QString NAME;
};

View file

@ -1,41 +0,0 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include "OculusBaseDisplayPlugin.h"
#include <QTimer>
class OffscreenGlCanvas;
struct SwapFramebufferWrapper;
struct MirrorFramebufferWrapper;
using SwapFboPtr = QSharedPointer<SwapFramebufferWrapper>;
using MirrorFboPtr = QSharedPointer<MirrorFramebufferWrapper>;
class Oculus_0_6_DisplayPlugin : public OculusBaseDisplayPlugin {
public:
virtual bool isSupported() const override;
virtual const QString & getName() const override;
virtual void activate() override;
virtual void deactivate() override;
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
protected:
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
virtual void customizeContext() override;
// Do not perform swap in finish
virtual void finishFrame() override;
private:
static const QString NAME;
};

View file

@ -128,7 +128,7 @@ void OpenVrDisplayPlugin::activate() {
delete[] buffer; delete[] buffer;
} }
Q_ASSERT(unSize <= 1); Q_ASSERT(unSize <= 1);
MainWindowOpenGLDisplayPlugin::activate(); WindowOpenGLDisplayPlugin::activate();
} }
void OpenVrDisplayPlugin::deactivate() { void OpenVrDisplayPlugin::deactivate() {
@ -141,7 +141,7 @@ void OpenVrDisplayPlugin::deactivate() {
_hmd = nullptr; _hmd = nullptr;
} }
_compositor = nullptr; _compositor = nullptr;
MainWindowOpenGLDisplayPlugin::deactivate(); WindowOpenGLDisplayPlugin::deactivate();
} }
uvec2 OpenVrDisplayPlugin::getRecommendedRenderSize() const { uvec2 OpenVrDisplayPlugin::getRecommendedRenderSize() const {
@ -152,16 +152,12 @@ mat4 OpenVrDisplayPlugin::getProjection(Eye eye, const mat4& baseProjection) con
return _eyesData[eye]._projectionMatrix; return _eyesData[eye]._projectionMatrix;
} }
glm::mat4 OpenVrDisplayPlugin::getModelview(Eye eye, const mat4& baseModelview) const {
return baseModelview * getEyePose(eye);
}
void OpenVrDisplayPlugin::resetSensors() { void OpenVrDisplayPlugin::resetSensors() {
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(_trackedDevicePoseMat4[0])); _sensorResetMat = glm::inverse(cancelOutRollAndPitch(_trackedDevicePoseMat4[0]));
} }
glm::mat4 OpenVrDisplayPlugin::getEyePose(Eye eye) const { glm::mat4 OpenVrDisplayPlugin::getEyePose(Eye eye) const {
return getHeadPose() * _eyesData[eye]._eyeOffset; return _eyesData[eye]._eyeOffset * getHeadPose();
} }
glm::mat4 OpenVrDisplayPlugin::getHeadPose() const { glm::mat4 OpenVrDisplayPlugin::getHeadPose() const {
@ -169,7 +165,7 @@ glm::mat4 OpenVrDisplayPlugin::getHeadPose() const {
} }
void OpenVrDisplayPlugin::customizeContext() { void OpenVrDisplayPlugin::customizeContext() {
MainWindowOpenGLDisplayPlugin::customizeContext(); WindowOpenGLDisplayPlugin::customizeContext();
} }
void OpenVrDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) { void OpenVrDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {

View file

@ -11,9 +11,9 @@
#if defined(Q_OS_WIN) #if defined(Q_OS_WIN)
#include "../MainWindowOpenGLDisplayPlugin.h" #include "../WindowOpenGLDisplayPlugin.h"
class OpenVrDisplayPlugin : public MainWindowOpenGLDisplayPlugin { class OpenVrDisplayPlugin : public WindowOpenGLDisplayPlugin {
public: public:
virtual bool isSupported() const override; virtual bool isSupported() const override;
virtual const QString & getName() const override; virtual const QString & getName() const override;
@ -27,7 +27,6 @@ public:
// Stereo specific methods // Stereo specific methods
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override; virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const override;
virtual void resetSensors() override; virtual void resetSensors() override;
virtual glm::mat4 getEyePose(Eye eye) const override; virtual glm::mat4 getEyePose(Eye eye) const override;

View file

@ -42,12 +42,12 @@ glm::mat4 StereoDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProje
return glm::translate(baseProjection, vec3(frustumshift, 0, 0)); return glm::translate(baseProjection, vec3(frustumshift, 0, 0));
} }
glm::mat4 StereoDisplayPlugin::getModelview(Eye eye, const glm::mat4& baseModelview) const { glm::mat4 StereoDisplayPlugin::getEyePose(Eye eye) const {
float modelviewShift = HALF_DEFAULT_IPD; float modelviewShift = HALF_DEFAULT_IPD;
if (eye == Left) { if (eye == Left) {
modelviewShift = -modelviewShift; modelviewShift = -modelviewShift;
} }
return baseModelview * glm::translate(mat4(), vec3(modelviewShift, 0, 0)); return glm::translate(mat4(), vec3(modelviewShift, 0, 0));
} }
void StereoDisplayPlugin::activate() { void StereoDisplayPlugin::activate() {
@ -57,11 +57,3 @@ void StereoDisplayPlugin::activate() {
//CONTAINER->setFullscreen(qApp->primaryScreen()); //CONTAINER->setFullscreen(qApp->primaryScreen());
// FIXME Add menu items // FIXME Add menu items
} }
glm::vec3 StereoDisplayPlugin::getEyeOffset(Eye eye) const {
glm::vec3 result(_ipd / 2.0f, 0, 0);
if (eye == Eye::Right) {
result *= -1.0f;
}
return result;
}

View file

@ -7,9 +7,9 @@
// //
#pragma once #pragma once
#include "../MainWindowOpenGLDisplayPlugin.h" #include "../WindowOpenGLDisplayPlugin.h"
class StereoDisplayPlugin : public MainWindowOpenGLDisplayPlugin { class StereoDisplayPlugin : public WindowOpenGLDisplayPlugin {
Q_OBJECT Q_OBJECT
public: public:
StereoDisplayPlugin(); StereoDisplayPlugin();
@ -19,8 +19,7 @@ public:
virtual void activate() override; virtual void activate() override;
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override; virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
virtual glm::mat4 getModelview(Eye eye, const glm::mat4& baseModelview) const override; virtual glm::mat4 getEyePose(Eye eye) const override;
virtual glm::vec3 getEyeOffset(Eye eye) const override;
protected: protected:
float _ipd{ 0.064f }; float _ipd{ 0.064f };