Merge pull request #3099 from Barnold1953/OculusSDK

Oculus SDK Upgrade
This commit is contained in:
Brad Hefta-Gaub 2014-07-01 10:43:39 -07:00
commit a88c92ea17
15 changed files with 630 additions and 225 deletions

View file

@ -42,14 +42,11 @@ else (LIBOVR_LIBRARIES AND LIBOVR_INCLUDE_DIRS)
if (UDEV_LIBRARY AND XINERAMA_LIBRARY AND OVR_LIBRARY) if (UDEV_LIBRARY AND XINERAMA_LIBRARY AND OVR_LIBRARY)
set(LIBOVR_LIBRARIES "${OVR_LIBRARY};${UDEV_LIBRARY};${XINERAMA_LIBRARY}" CACHE INTERNAL "Oculus libraries") set(LIBOVR_LIBRARIES "${OVR_LIBRARY};${UDEV_LIBRARY};${XINERAMA_LIBRARY}" CACHE INTERNAL "Oculus libraries")
endif (UDEV_LIBRARY AND XINERAMA_LIBRARY AND OVR_LIBRARY) endif (UDEV_LIBRARY AND XINERAMA_LIBRARY AND OVR_LIBRARY)
elseif (WIN32) elseif (WIN32)
if (CMAKE_BUILD_TYPE MATCHES DEBUG) find_library(LIBOVR_RELEASE_LIBRARIES "Lib/Win32/libovr.lib" HINTS ${LIBOVR_SEARCH_DIRS})
set(WINDOWS_LIBOVR_NAME "libovrd.lib") find_library(LIBOVR_DEBUG_LIBRARIES "Lib/Win32/libovrd.lib" HINTS ${LIBOVR_SEARCH_DIRS})
else()
set(WINDOWS_LIBOVR_NAME "libovr.lib")
endif()
find_library(LIBOVR_LIBRARIES "Lib/Win32/${WINDOWS_LIBOVR_NAME}" HINTS ${LIBOVR_SEARCH_DIRS}) set(LIBOVR_LIBRARIES "${LIBOVR_RELEASE_LIBRARIES} ${LIBOVR_DEBUG_LIBRARIES}")
endif () endif ()
if (LIBOVR_INCLUDE_DIRS AND LIBOVR_LIBRARIES) if (LIBOVR_INCLUDE_DIRS AND LIBOVR_LIBRARIES)

View file

@ -183,7 +183,11 @@ if (LIBOVR_FOUND AND NOT DISABLE_LIBOVR)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -isystem ${LIBOVR_INCLUDE_DIRS}") set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -isystem ${LIBOVR_INCLUDE_DIRS}")
endif () endif ()
target_link_libraries(${TARGET_NAME} "${LIBOVR_LIBRARIES}") if (WIN32)
target_link_libraries(${TARGET_NAME} optimized "${LIBOVR_RELEASE_LIBRARIES}" debug "${LIBOVR_DEBUG_LIBRARIES}")
else()
target_link_libraries(${TARGET_NAME} "${LIBOVR_LIBRARIES}")
endif()
endif (LIBOVR_FOUND AND NOT DISABLE_LIBOVR) endif (LIBOVR_FOUND AND NOT DISABLE_LIBOVR)
# and with PrioVR library # and with PrioVR library

View file

@ -2,18 +2,12 @@
Instructions for adding the Oculus library (LibOVR) to Interface Instructions for adding the Oculus library (LibOVR) to Interface
Stephen Birarda, March 6, 2014 Stephen Birarda, March 6, 2014
You can download the Oculus SDK from https://developer.oculusvr.com/ (account creation required). Interface has been tested with SDK version 0.2.5. You can download the Oculus SDK from https://developer.oculusvr.com/ (account creation required). Interface has been tested with SDK version 0.3.2.
1. Copy the Oculus SDK folders from the LibOVR directory (Lib, Include, Src) into the interface/externals/oculus folder. 1. Copy the Oculus SDK folders from the LibOVR directory (Lib, Include, Src) into the interface/externals/oculus folder.
This readme.txt should be there as well. This readme.txt should be there as well.
You may optionally choose to copy the SDK folders to a location outside the repository (so you can re-use with different checkouts and different projects). You may optionally choose to copy the SDK folders to a location outside the repository (so you can re-use with different checkouts and different projects).
If so our CMake find module expects you to set the ENV variable 'HIFI_LIB_DIR' to a directory containing a subfolder 'oculus' that contains the three folders mentioned above. If so our CMake find module expects you to set the ENV variable 'HIFI_LIB_DIR' to a directory containing a subfolder 'oculus' that contains the three folders mentioned above.
NOTE: On OS X there is a linker error with version 0.2.5c of the Oculus SDK.
It must be re-built (from the included LibOVR_With_Samples.xcodeproj) with RRTI support.
In XCode Build Settings for the ovr target, set "Enable C++ Runtime Types" to yes.
Then, Archive and use the organizer to save a copy of the built products.
In the exported directory you will have a new libovr.a to copy into the oculus directory from above.
2. Clear your build directory, run cmake and build, and you should be all set. 2. Clear your build directory, run cmake and build, and you should be all set.

View file

@ -4,12 +4,11 @@
// oculus.frag // oculus.frag
// fragment shader // fragment shader
// //
// Created by Andrzej Kapolka on 11/26/13. // Created by Ben Arnold on 6/24/14.
// Copyright 2013 High Fidelity, Inc. // Copyright 2014 High Fidelity, Inc.
// //
// this shader is an adaptation (HLSL -> GLSL, removed conditional) of the one in the Oculus sample // this shader is an adaptation (HLSL -> GLSL) of the one in the
// code (Samples/OculusRoomTiny/RenderTiny_D3D1X_Device.cpp), which is under the Apache license // Oculus_SDK_Overview.pdf for the 3.2 SDK.
// (http://www.apache.org/licenses/LICENSE-2.0)
// //
// Distributed under the Apache License, Version 2.0. // Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
@ -17,23 +16,16 @@
uniform sampler2D texture; uniform sampler2D texture;
uniform vec2 lensCenter; varying float vFade;
uniform vec2 screenCenter; varying vec2 oTexCoord0;
uniform vec2 scale; varying vec2 oTexCoord1;
uniform vec2 scaleIn; varying vec2 oTexCoord2;
uniform vec4 hmdWarpParam;
vec2 hmdWarp(vec2 in01) {
vec2 theta = (in01 - lensCenter) * scaleIn;
float rSq = theta.x * theta.x + theta.y * theta.y;
vec2 theta1 = theta * (hmdWarpParam.x + hmdWarpParam.y * rSq +
hmdWarpParam.z * rSq * rSq + hmdWarpParam.w * rSq * rSq * rSq);
return lensCenter + scale * theta1;
}
void main(void) { void main(void) {
vec2 tc = hmdWarp(gl_TexCoord[0].st); // 3 samples for fixing chromatic aberrations
vec2 below = step(screenCenter.st + vec2(-0.25, -0.5), tc.st); float r = texture2D(texture, oTexCoord0.xy).r;
vec2 above = vec2(1.0, 1.0) - step(screenCenter.st + vec2(0.25, 0.5), tc.st); float g = texture2D(texture, oTexCoord1.xy).g;
gl_FragColor = mix(vec4(0.0, 0.0, 0.0, 1.0), texture2D(texture, tc), above.s * above.t * below.s * below.t); float b = texture2D(texture, oTexCoord2.xy).b;
gl_FragColor = vec4(r * vFade, g * vFade, b * vFade, 1.0);
} }

View file

@ -0,0 +1,63 @@
#version 120
//
// oculus.vert
// vertex shader
//
// Created by Ben Arnold on 6/24/14.
// Copyright 2014 High Fidelity, Inc.
//
// this shader is an adaptation (HLSL -> GLSL) of the one in the
// Oculus_SDK_Overview.pdf for the 3.2 SDK.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
uniform vec2 EyeToSourceUVScale;
uniform vec2 EyeToSourceUVOffset;
uniform mat4 EyeRotationStart;
uniform mat4 EyeRotationEnd;
attribute vec2 position;
attribute vec4 color;
attribute vec2 texCoord0;
attribute vec2 texCoord1;
attribute vec2 texCoord2;
varying float vFade;
varying vec2 oTexCoord0;
varying vec2 oTexCoord1;
varying vec2 oTexCoord2;
vec2 TimewarpTexCoord(vec2 texCoord, mat4 rotMat)
{
// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic
// aberration and distortion). These are now "real world" vectors in direction (x,y,1)
// relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors.
vec3 transformed = vec3( rotMat * vec4(texCoord.xy, 1, 1) );
// Project them back onto the Z=1 plane of the rendered images.
vec2 flattened = (transformed.xy / transformed.z);
// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
return (EyeToSourceUVScale * flattened + EyeToSourceUVOffset);
}
void main()
{
float timewarpMixFactor = color.a;
mat4 mixedEyeRot = EyeRotationStart * (1.0 - timewarpMixFactor) + EyeRotationEnd * (timewarpMixFactor);
oTexCoord0 = TimewarpTexCoord(texCoord0, mixedEyeRot);
oTexCoord1 = TimewarpTexCoord(texCoord1, mixedEyeRot);
oTexCoord2 = TimewarpTexCoord(texCoord2, mixedEyeRot);
//Flip y texture coordinates
oTexCoord0.y = 1.0 - oTexCoord0.y;
oTexCoord1.y = 1.0 - oTexCoord1.y;
oTexCoord2.y = 1.0 - oTexCoord2.y;
gl_Position = vec4(position.xy, 0.5, 1.0);
vFade = color.r; // For vignette fade
}

View file

@ -565,6 +565,16 @@ void Application::paintGL() {
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings); bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
PerformanceWarning warn(showWarnings, "Application::paintGL()"); PerformanceWarning warn(showWarnings, "Application::paintGL()");
const bool glowEnabled = Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect);
// Set the desired FBO texture size. If it hasn't changed, this does nothing.
// Otherwise, it must rebuild the FBOs
if (OculusManager::isConnected()) {
_textureCache.setFrameBufferSize(OculusManager::getRenderTargetSize());
} else {
_textureCache.setFrameBufferSize(_glWidget->size());
}
glEnable(GL_LINE_SMOOTH); glEnable(GL_LINE_SMOOTH);
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) { if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) {
@ -573,28 +583,16 @@ void Application::paintGL() {
_myCamera.setTargetRotation(_myAvatar->getHead()->getCameraOrientation()); _myCamera.setTargetRotation(_myAvatar->getHead()->getCameraOrientation());
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) { } else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
//Note, the camera distance is set in Camera::setMode() so we dont have to do it here.
_myCamera.setTightness(0.0f); // Camera is directly connected to head without smoothing _myCamera.setTightness(0.0f); // Camera is directly connected to head without smoothing
_myCamera.setTargetPosition(_myAvatar->getUprightHeadPosition()); _myCamera.setTargetPosition(_myAvatar->getUprightHeadPosition());
_myCamera.setTargetRotation(_myAvatar->getHead()->getCameraOrientation()); _myCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation());
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) { } else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
_myCamera.setTightness(0.0f); _myCamera.setTightness(0.0f);
glm::vec3 eyePosition = _myAvatar->getHead()->calculateAverageEyePosition();
float headHeight = eyePosition.y - _myAvatar->getPosition().y;
_myCamera.setDistance(MIRROR_FULLSCREEN_DISTANCE * _scaleMirror); _myCamera.setDistance(MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
_myCamera.setTargetPosition(_myAvatar->getPosition() + glm::vec3(0, headHeight + (_raiseMirror * _myAvatar->getScale()), 0));
_myCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f))); _myCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
} _myCamera.setTargetPosition(_myAvatar->getHead()->calculateAverageEyePosition());
if (OculusManager::isConnected()) {
// Oculus in third person causes nausea, so only allow it if option is checked in dev menu
if (!Menu::getInstance()->isOptionChecked(MenuOption::AllowOculusCameraModeChange) || _myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) {
_myCamera.setDistance(0.0f);
_myCamera.setTargetPosition(_myAvatar->getHead()->calculateAverageEyePosition());
_myCamera.setTargetRotation(_myAvatar->getHead()->getCameraOrientation());
}
_myCamera.setUpShift(0.0f);
_myCamera.setTightness(0.0f); // Camera is directly connected to head without smoothing
} }
// Update camera position // Update camera position
@ -629,16 +627,32 @@ void Application::paintGL() {
updateShadowMap(); updateShadowMap();
} }
//If we aren't using the glow shader, we have to clear the color and depth buffer
if (!glowEnabled) {
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}
if (OculusManager::isConnected()) { if (OculusManager::isConnected()) {
OculusManager::display(whichCamera); //When in mirror mode, use camera rotation. Otherwise, use body rotation
if (whichCamera.getMode() == CAMERA_MODE_MIRROR) {
OculusManager::display(whichCamera.getRotation(), whichCamera.getPosition(), whichCamera);
} else {
OculusManager::display(_myAvatar->getWorldAlignedOrientation(), whichCamera.getPosition(), whichCamera);
}
} else if (TV3DManager::isConnected()) { } else if (TV3DManager::isConnected()) {
_glowEffect.prepare(); if (glowEnabled) {
_glowEffect.prepare();
}
TV3DManager::display(whichCamera); TV3DManager::display(whichCamera);
_glowEffect.render(); if (glowEnabled) {
_glowEffect.render();
}
} else { } else {
_glowEffect.prepare(); if (glowEnabled) {
_glowEffect.prepare();
}
glMatrixMode(GL_MODELVIEW); glMatrixMode(GL_MODELVIEW);
glPushMatrix(); glPushMatrix();
@ -646,7 +660,9 @@ void Application::paintGL() {
displaySide(whichCamera); displaySide(whichCamera);
glPopMatrix(); glPopMatrix();
_glowEffect.render(); if (glowEnabled) {
_glowEffect.render();
}
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) { if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
renderRearViewMirror(_mirrorViewRect); renderRearViewMirror(_mirrorViewRect);
@ -3136,9 +3152,7 @@ void Application::resetSensors() {
_faceshift.reset(); _faceshift.reset();
_visage.reset(); _visage.reset();
if (OculusManager::isConnected()) { OculusManager::reset();
OculusManager::reset();
}
_prioVR.reset(); _prioVR.reset();

View file

@ -12,6 +12,7 @@
#include "Application.h" #include "Application.h"
#include "GLCanvas.h" #include "GLCanvas.h"
#include "devices/OculusManager.h"
#include <QMimeData> #include <QMimeData>
#include <QUrl> #include <QUrl>
#include <QMainWindow> #include <QMainWindow>
@ -41,8 +42,17 @@ void GLCanvas::initializeGL() {
void GLCanvas::paintGL() { void GLCanvas::paintGL() {
if (!_throttleRendering && !Application::getInstance()->getWindow()->isMinimized()) { if (!_throttleRendering && !Application::getInstance()->getWindow()->isMinimized()) {
//Need accurate frame timing for the oculus rift
if (OculusManager::isConnected()) {
OculusManager::beginFrameTiming();
}
Application::getInstance()->paintGL(); Application::getInstance()->paintGL();
swapBuffers(); swapBuffers();
if (OculusManager::isConnected()) {
OculusManager::endFrameTiming();
}
} }
} }
@ -102,8 +112,17 @@ void GLCanvas::activeChanged(Qt::ApplicationState state) {
void GLCanvas::throttleRender() { void GLCanvas::throttleRender() {
_frameTimer.start(_idleRenderInterval); _frameTimer.start(_idleRenderInterval);
if (!Application::getInstance()->getWindow()->isMinimized()) { if (!Application::getInstance()->getWindow()->isMinimized()) {
//Need accurate frame timing for the oculus rift
if (OculusManager::isConnected()) {
OculusManager::beginFrameTiming();
}
Application::getInstance()->paintGL(); Application::getInstance()->paintGL();
swapBuffers(); swapBuffers();
if (OculusManager::isConnected()) {
OculusManager::endFrameTiming();
}
} }
} }

View file

@ -332,6 +332,8 @@ Menu::Menu() :
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Stars, Qt::Key_Asterisk, true); addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Stars, Qt::Key_Asterisk, true);
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Atmosphere, Qt::SHIFT | Qt::Key_A, true); addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Atmosphere, Qt::SHIFT | Qt::Key_A, true);
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::EnableGlowEffect, 0, true);
addActionToQMenuAndActionHash(renderOptionsMenu, addActionToQMenuAndActionHash(renderOptionsMenu,
MenuOption::GlowMode, MenuOption::GlowMode,
0, 0,
@ -400,7 +402,6 @@ Menu::Menu() :
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::ChatCircling, 0, false); addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::ChatCircling, 0, false);
QMenu* oculusOptionsMenu = developerMenu->addMenu("Oculus Options"); QMenu* oculusOptionsMenu = developerMenu->addMenu("Oculus Options");
addCheckableActionToQMenuAndActionHash(oculusOptionsMenu, MenuOption::AllowOculusCameraModeChange, 0, false);
addCheckableActionToQMenuAndActionHash(oculusOptionsMenu, MenuOption::DisplayOculusOverlays, 0, true); addCheckableActionToQMenuAndActionHash(oculusOptionsMenu, MenuOption::DisplayOculusOverlays, 0, true);
QMenu* sixenseOptionsMenu = developerMenu->addMenu("Sixense Options"); QMenu* sixenseOptionsMenu = developerMenu->addMenu("Sixense Options");

View file

@ -295,7 +295,6 @@ private:
namespace MenuOption { namespace MenuOption {
const QString AboutApp = "About Interface"; const QString AboutApp = "About Interface";
const QString AlignForearmsWithWrists = "Align Forearms with Wrists"; const QString AlignForearmsWithWrists = "Align Forearms with Wrists";
const QString AllowOculusCameraModeChange = "Allow Oculus Camera Mode Change (Nausea)";
const QString AlternateIK = "Alternate IK"; const QString AlternateIK = "Alternate IK";
const QString AmbientOcclusion = "Ambient Occlusion"; const QString AmbientOcclusion = "Ambient Occlusion";
const QString Animations = "Animations..."; const QString Animations = "Animations...";
@ -352,6 +351,7 @@ namespace MenuOption {
const QString DontFadeOnVoxelServerChanges = "Don't Fade In/Out on Voxel Server Changes"; const QString DontFadeOnVoxelServerChanges = "Don't Fade In/Out on Voxel Server Changes";
const QString EchoLocalAudio = "Echo Local Audio"; const QString EchoLocalAudio = "Echo Local Audio";
const QString EchoServerAudio = "Echo Server Audio"; const QString EchoServerAudio = "Echo Server Audio";
const QString EnableGlowEffect = "Enable Glow Effect (Warning: Poor Oculus Performance)";
const QString Enable3DTVMode = "Enable 3DTV Mode"; const QString Enable3DTVMode = "Enable 3DTV Mode";
const QString EnableVRMode = "Enable VR Mode"; const QString EnableVRMode = "Enable VR Mode";
const QString ExpandMiscAvatarTiming = "Expand Misc MyAvatar Timing"; const QString ExpandMiscAvatarTiming = "Expand Misc MyAvatar Timing";

View file

@ -856,7 +856,9 @@ void MyAvatar::renderBody(RenderMode renderMode, float glowLevel) {
renderAttachments(renderMode); renderAttachments(renderMode);
// Render head so long as the camera isn't inside it // Render head so long as the camera isn't inside it
if (shouldRenderHead(Application::getInstance()->getCamera()->getPosition(), renderMode)) { const Camera *camera = Application::getInstance()->getCamera();
const glm::vec3 cameraPos = camera->getPosition() + (camera->getRotation() * glm::vec3(0.0f, 0.0f, 1.0f)) * camera->getDistance();
if (shouldRenderHead(cameraPos, renderMode)) {
getHead()->render(1.0f, modelRenderMode); getHead()->render(1.0f, modelRenderMode);
if (Menu::getInstance()->isOptionChecked(MenuOption::StringHair)) { if (Menu::getInstance()->isOptionChecked(MenuOption::StringHair)) {
renderHair(); renderHair();
@ -920,9 +922,16 @@ void MyAvatar::updateOrientation(float deltaTime) {
glm::vec3 angularVelocity(yaw - head->getBaseYaw(), pitch - head->getBasePitch(), roll - head->getBaseRoll()); glm::vec3 angularVelocity(yaw - head->getBaseYaw(), pitch - head->getBasePitch(), roll - head->getBaseRoll());
head->setAngularVelocity(angularVelocity); head->setAngularVelocity(angularVelocity);
head->setBaseYaw(yaw); //Invert yaw and roll when in mirror mode
head->setBasePitch(pitch); if (Application::getInstance()->getCamera()->getMode() == CAMERA_MODE_MIRROR) {
head->setBaseRoll(roll); head->setBaseYaw(-yaw);
head->setBasePitch(pitch);
head->setBaseRoll(-roll);
} else {
head->setBaseYaw(yaw);
head->setBasePitch(pitch);
head->setBaseRoll(roll);
}
} }

View file

@ -3,6 +3,7 @@
// interface/src/devices // interface/src/devices
// //
// Created by Stephen Birarda on 5/9/13. // Created by Stephen Birarda on 5/9/13.
// Refactored by Ben Arnold on 6/30/2014
// Copyright 2012 High Fidelity, Inc. // Copyright 2012 High Fidelity, Inc.
// //
// Distributed under the Apache License, Version 2.0. // Distributed under the Apache License, Version 2.0.
@ -11,67 +12,207 @@
#include "InterfaceConfig.h" #include "InterfaceConfig.h"
#include "OculusManager.h"
#include <QOpenGLFramebufferObject> #include <QOpenGLFramebufferObject>
#include <glm/glm.hpp> #include <glm/glm.hpp>
#include "Application.h" #include "Application.h"
#include "OculusManager.h"
#ifdef HAVE_LIBOVR
using namespace OVR;
ProgramObject OculusManager::_program; ProgramObject OculusManager::_program;
int OculusManager::_textureLocation; int OculusManager::_textureLocation;
int OculusManager::_lensCenterLocation; int OculusManager::_eyeToSourceUVScaleLocation;
int OculusManager::_screenCenterLocation; int OculusManager::_eyeToSourceUVOffsetLocation;
int OculusManager::_scaleLocation; int OculusManager::_eyeRotationStartLocation;
int OculusManager::_scaleInLocation; int OculusManager::_eyeRotationEndLocation;
int OculusManager::_hmdWarpParamLocation; int OculusManager::_positionAttributeLocation;
int OculusManager::_colorAttributeLocation;
int OculusManager::_texCoord0AttributeLocation;
int OculusManager::_texCoord1AttributeLocation;
int OculusManager::_texCoord2AttributeLocation;
bool OculusManager::_isConnected = false; bool OculusManager::_isConnected = false;
#ifdef HAVE_LIBOVR ovrHmd OculusManager::_ovrHmd;
using namespace OVR; ovrHmdDesc OculusManager::_ovrHmdDesc;
using namespace OVR::Util::Render; ovrFovPort OculusManager::_eyeFov[ovrEye_Count];
ovrEyeRenderDesc OculusManager::_eyeRenderDesc[ovrEye_Count];
ovrSizei OculusManager::_renderTargetSize;
ovrVector2f OculusManager::_UVScaleOffset[ovrEye_Count][2];
GLuint OculusManager::_vertices[ovrEye_Count] = { 0, 0 };
GLuint OculusManager::_indices[ovrEye_Count] = { 0, 0 };
GLsizei OculusManager::_meshSize[ovrEye_Count] = { 0, 0 };
ovrFrameTiming OculusManager::_hmdFrameTiming;
ovrRecti OculusManager::_eyeRenderViewport[ovrEye_Count];
unsigned int OculusManager::_frameIndex = 0;
bool OculusManager::_frameTimingActive = false;
bool OculusManager::_programInitialized = false;
Camera* OculusManager::_camera = NULL;
Ptr<DeviceManager> OculusManager::_deviceManager;
Ptr<HMDDevice> OculusManager::_hmdDevice;
Ptr<SensorDevice> OculusManager::_sensorDevice;
SensorFusion* OculusManager::_sensorFusion;
StereoConfig OculusManager::_stereoConfig;
#endif #endif
void OculusManager::connect() { void OculusManager::connect() {
#ifdef HAVE_LIBOVR #ifdef HAVE_LIBOVR
System::Init(); ovr_Initialize();
_deviceManager = *DeviceManager::Create();
_hmdDevice = *_deviceManager->EnumerateDevices<HMDDevice>().CreateDevice();
if (_hmdDevice) { _ovrHmd = ovrHmd_Create(0);
if (_ovrHmd) {
_isConnected = true; _isConnected = true;
_sensorDevice = *_hmdDevice->GetSensor(); ovrHmd_GetDesc(_ovrHmd, &_ovrHmdDesc);
_sensorFusion = new SensorFusion;
_sensorFusion->AttachToSensor(_sensorDevice); _eyeFov[0] = _ovrHmdDesc.DefaultEyeFov[0];
_sensorFusion->SetPredictionEnabled(true); _eyeFov[1] = _ovrHmdDesc.DefaultEyeFov[1];
//Get texture size
ovrSizei recommendedTex0Size = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left,
_eyeFov[0], 1.0f);
ovrSizei recommendedTex1Size = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Right,
_eyeFov[1], 1.0f);
_renderTargetSize.w = recommendedTex0Size.w + recommendedTex1Size.w;
_renderTargetSize.h = recommendedTex0Size.h;
if (_renderTargetSize.h < recommendedTex1Size.h) {
_renderTargetSize.h = recommendedTex1Size.h;
}
_eyeRenderDesc[0] = ovrHmd_GetRenderDesc(_ovrHmd, ovrEye_Left, _eyeFov[0]);
_eyeRenderDesc[1] = ovrHmd_GetRenderDesc(_ovrHmd, ovrEye_Right, _eyeFov[1]);
ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence | ovrHmdCap_LatencyTest);
ovrHmd_StartSensor(_ovrHmd, ovrSensorCap_Orientation | ovrSensorCap_YawCorrection |
ovrSensorCap_Position,
ovrSensorCap_Orientation);
if (!_camera) {
_camera = new Camera;
}
if (!_programInitialized) {
// Shader program
_programInitialized = true;
_program.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() + "shaders/oculus.vert");
_program.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() + "shaders/oculus.frag");
_program.link();
// Uniforms
_textureLocation = _program.uniformLocation("texture");
_eyeToSourceUVScaleLocation = _program.uniformLocation("EyeToSourceUVScale");
_eyeToSourceUVOffsetLocation = _program.uniformLocation("EyeToSourceUVOffset");
_eyeRotationStartLocation = _program.uniformLocation("EyeRotationStart");
_eyeRotationEndLocation = _program.uniformLocation("EyeRotationEnd");
// Attributes
_positionAttributeLocation = _program.attributeLocation("position");
_colorAttributeLocation = _program.attributeLocation("color");
_texCoord0AttributeLocation = _program.attributeLocation("texCoord0");
_texCoord1AttributeLocation = _program.attributeLocation("texCoord1");
_texCoord2AttributeLocation = _program.attributeLocation("texCoord2");
}
//Generate the distortion VBOs
generateDistortionMesh();
HMDInfo info;
_hmdDevice->GetDeviceInfo(&info);
_stereoConfig.SetHMDInfo(info);
_program.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() + "shaders/oculus.frag");
_program.link();
_textureLocation = _program.uniformLocation("texture");
_lensCenterLocation = _program.uniformLocation("lensCenter");
_screenCenterLocation = _program.uniformLocation("screenCenter");
_scaleLocation = _program.uniformLocation("scale");
_scaleInLocation = _program.uniformLocation("scaleIn");
_hmdWarpParamLocation = _program.uniformLocation("hmdWarpParam");
} else { } else {
_deviceManager.Clear(); _isConnected = false;
System::Destroy(); ovrHmd_Destroy(_ovrHmd);
ovr_Shutdown();
} }
#endif #endif
} }
//Disconnects and deallocates the OR
void OculusManager::disconnect() {
#ifdef HAVE_LIBOVR
if (_isConnected) {
_isConnected = false;
ovrHmd_Destroy(_ovrHmd);
ovr_Shutdown();
//Free the distortion mesh data
for (int i = 0; i < ovrEye_Count; i++) {
if (_vertices[i] != 0) {
glDeleteBuffers(1, &(_vertices[i]));
_vertices[i] = 0;
}
if (_indices[i] != 0) {
glDeleteBuffers(1, &(_indices[i]));
_indices[i] = 0;
}
}
}
#endif
}
#ifdef HAVE_LIBOVR
void OculusManager::generateDistortionMesh() {
//Check if we already have the distortion mesh
if (_vertices[0] != 0) {
printf("WARNING: Tried to generate Oculus distortion mesh twice without freeing the VBOs.");
return;
}
//Viewport for the render target for each eye
_eyeRenderViewport[0].Pos = Vector2i(0, 0);
_eyeRenderViewport[0].Size = Sizei(_renderTargetSize.w / 2, _renderTargetSize.h);
_eyeRenderViewport[1].Pos = Vector2i((_renderTargetSize.w + 1) / 2, 0);
_eyeRenderViewport[1].Size = _eyeRenderViewport[0].Size;
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
// Allocate and generate distortion mesh vertices
ovrDistortionMesh meshData;
ovrHmd_CreateDistortionMesh(_ovrHmd, _eyeRenderDesc[eyeNum].Eye, _eyeRenderDesc[eyeNum].Fov, _ovrHmdDesc.DistortionCaps, &meshData);
ovrHmd_GetRenderScaleAndOffset(_eyeRenderDesc[eyeNum].Fov, _renderTargetSize, _eyeRenderViewport[eyeNum],
_UVScaleOffset[eyeNum]);
// Parse the vertex data and create a render ready vertex buffer
DistortionVertex* pVBVerts = (DistortionVertex*)OVR_ALLOC(sizeof(DistortionVertex) * meshData.VertexCount);
_meshSize[eyeNum] = meshData.IndexCount;
// Convert the oculus vertex data to the DistortionVertex format.
DistortionVertex* v = pVBVerts;
ovrDistortionVertex* ov = meshData.pVertexData;
for (unsigned int vertNum = 0; vertNum < meshData.VertexCount; vertNum++) {
v->pos.x = ov->Pos.x;
v->pos.y = ov->Pos.y;
v->texR.x = ov->TexR.x;
v->texR.y = ov->TexR.y;
v->texG.x = ov->TexG.x;
v->texG.y = ov->TexG.y;
v->texB.x = ov->TexB.x;
v->texB.y = ov->TexB.y;
v->color.r = v->color.g = v->color.b = (GLubyte)(ov->VignetteFactor * 255.99f);
v->color.a = (GLubyte)(ov->TimeWarpFactor * 255.99f);
v++;
ov++;
}
//vertices
glGenBuffers(1, &(_vertices[eyeNum]));
glBindBuffer(GL_ARRAY_BUFFER, _vertices[eyeNum]);
glBufferData(GL_ARRAY_BUFFER, sizeof(DistortionVertex) * meshData.VertexCount, pVBVerts, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//indices
glGenBuffers(1, &(_indices[eyeNum]));
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indices[eyeNum]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned short) * meshData.IndexCount, meshData.pIndexData, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
//Now that we have the VBOs we can get rid of the mesh data
OVR_FREE(pVBVerts);
ovrHmd_DestroyDistortionMesh(&meshData);
}
}
#endif
bool OculusManager::isConnected() { bool OculusManager::isConnected() {
#ifdef HAVE_LIBOVR #ifdef HAVE_LIBOVR
return _isConnected && Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode); return _isConnected && Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode);
@ -80,137 +221,237 @@ bool OculusManager::isConnected() {
#endif #endif
} }
//Begins the frame timing for oculus prediction purposes
void OculusManager::beginFrameTiming() {
#ifdef HAVE_LIBOVR
if (_frameTimingActive) {
printf("WARNING: Called OculusManager::beginFrameTiming() twice in a row, need to call OculusManager::endFrameTiming().");
}
_hmdFrameTiming = ovrHmd_BeginFrameTiming(_ovrHmd, _frameIndex);
_frameTimingActive = true;
#endif
}
//Ends frame timing
void OculusManager::endFrameTiming() {
#ifdef HAVE_LIBOVR
ovrHmd_EndFrameTiming(_ovrHmd);
_frameIndex++;
_frameTimingActive = false;
#endif
}
//Sets the camera FoV and aspect ratio
void OculusManager::configureCamera(Camera& camera, int screenWidth, int screenHeight) { void OculusManager::configureCamera(Camera& camera, int screenWidth, int screenHeight) {
#ifdef HAVE_LIBOVR #ifdef HAVE_LIBOVR
_stereoConfig.SetFullViewport(Viewport(0, 0, screenWidth, screenHeight)); camera.setAspectRatio(_renderTargetSize.w / _renderTargetSize.h);
camera.setAspectRatio(_stereoConfig.GetAspect()); camera.setFieldOfView(atan(_eyeFov[0].UpTan) * DEGREES_PER_RADIAN * 2.0f);
camera.setFieldOfView(_stereoConfig.GetYFOVDegrees());
#endif #endif
} }
void OculusManager::display(Camera& whichCamera) { //Displays everything for the oculus, frame timing must be active
void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera) {
#ifdef HAVE_LIBOVR #ifdef HAVE_LIBOVR
//beginFrameTiming must be called before display
if (!_frameTimingActive) {
printf("WARNING: Called OculusManager::display() without calling OculusManager::beginFrameTiming() first.");
return;
}
ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay(); ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay();
// We only need to render the overlays to a texture once, then we just render the texture as a quad // We only need to render the overlays to a texture once, then we just render the texture as a quad
// PrioVR will only work if renderOverlay is called, calibration is connected to Application::renderingOverlay() // PrioVR will only work if renderOverlay is called, calibration is connected to Application::renderingOverlay()
applicationOverlay.renderOverlay(true); applicationOverlay.renderOverlay(true);
const bool displayOverlays = Menu::getInstance()->isOptionChecked(MenuOption::DisplayOculusOverlays); const bool displayOverlays = Menu::getInstance()->isOptionChecked(MenuOption::DisplayOculusOverlays);
Application::getInstance()->getGlowEffect()->prepare();
// render the left eye view to the left side of the screen //Bind our framebuffer object. If we are rendering the glow effect, we let the glow effect shader take care of it
const StereoEyeParams& leftEyeParams = _stereoConfig.GetEyeRenderParams(StereoEye_Left); if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
Application::getInstance()->getGlowEffect()->prepare();
} else {
Application::getInstance()->getTextureCache()->getPrimaryFramebufferObject()->bind();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}
ovrPosef eyeRenderPose[ovrEye_Count];
_camera->setTightness(0.0f); // In first person, camera follows (untweaked) head exactly without delay
_camera->setDistance(0.0f);
_camera->setUpShift(0.0f);
glMatrixMode(GL_PROJECTION); glMatrixMode(GL_PROJECTION);
glPushMatrix(); glPushMatrix();
glLoadIdentity();
glTranslatef(_stereoConfig.GetProjectionCenterOffset(), 0, 0);
gluPerspective(whichCamera.getFieldOfView(), whichCamera.getAspectRatio(),
whichCamera.getNearClip(), whichCamera.getFarClip());
glViewport(leftEyeParams.VP.x, leftEyeParams.VP.y, leftEyeParams.VP.w, leftEyeParams.VP.h);
glMatrixMode(GL_MODELVIEW); glMatrixMode(GL_MODELVIEW);
glPushMatrix(); glPushMatrix();
glLoadIdentity();
glTranslatef(_stereoConfig.GetIPD() * 0.5f, 0, 0); glm::quat orientation;
Application::getInstance()->displaySide(whichCamera); //Render each eye into an fbo
for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) {
if (displayOverlays) { ovrEyeType eye = _ovrHmdDesc.EyeRenderOrder[eyeIndex];
applicationOverlay.displayOverlayTextureOculus(whichCamera);
}
// and the right eye to the right side
const StereoEyeParams& rightEyeParams = _stereoConfig.GetEyeRenderParams(StereoEye_Right);
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glTranslatef(-_stereoConfig.GetProjectionCenterOffset(), 0, 0);
gluPerspective(whichCamera.getFieldOfView(), whichCamera.getAspectRatio(),
whichCamera.getNearClip(), whichCamera.getFarClip());
glViewport(rightEyeParams.VP.x, rightEyeParams.VP.y, rightEyeParams.VP.w, rightEyeParams.VP.h);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glTranslatef(_stereoConfig.GetIPD() * -0.5f, 0, 0);
Application::getInstance()->displaySide(whichCamera);
if (displayOverlays) { //Set the camera rotation for this eye
applicationOverlay.displayOverlayTextureOculus(whichCamera); eyeRenderPose[eye] = ovrHmd_GetEyePose(_ovrHmd, eye);
orientation.x = eyeRenderPose[eye].Orientation.x;
orientation.y = eyeRenderPose[eye].Orientation.y;
orientation.z = eyeRenderPose[eye].Orientation.z;
orientation.w = eyeRenderPose[eye].Orientation.w;
_camera->setTargetRotation(bodyOrientation * orientation);
_camera->setTargetPosition(position);
_camera->update(1.0f / Application::getInstance()->getFps());
Matrix4f proj = ovrMatrix4f_Projection(_eyeRenderDesc[eye].Fov, whichCamera.getNearClip(), whichCamera.getFarClip(), true);
proj.Transpose();
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
glLoadMatrixf((GLfloat *)proj.M);
glViewport(_eyeRenderViewport[eye].Pos.x, _eyeRenderViewport[eye].Pos.y,
_eyeRenderViewport[eye].Size.w, _eyeRenderViewport[eye].Size.h);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
glTranslatef(_eyeRenderDesc[eye].ViewAdjust.x, _eyeRenderDesc[eye].ViewAdjust.y, _eyeRenderDesc[eye].ViewAdjust.z);
Application::getInstance()->displaySide(*_camera);
if (displayOverlays) {
applicationOverlay.displayOverlayTextureOculus(*_camera);
}
} }
//Wait till time-warp to reduce latency
ovr_WaitTillTime(_hmdFrameTiming.TimewarpPointSeconds);
glPopMatrix(); glPopMatrix();
// restore our normal viewport
const Viewport& fullViewport = _stereoConfig.GetFullViewport();
glViewport(fullViewport.x, fullViewport.y, fullViewport.w, fullViewport.h);
QOpenGLFramebufferObject* fbo = Application::getInstance()->getGlowEffect()->render(true); //Full texture viewport for glow effect
glBindTexture(GL_TEXTURE_2D, fbo->texture()); glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h);
//Bind the output texture from the glow shader. If glow effect is disabled, we just grab the texture
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
QOpenGLFramebufferObject* fbo = Application::getInstance()->getGlowEffect()->render(true);
glBindTexture(GL_TEXTURE_2D, fbo->texture());
} else {
Application::getInstance()->getTextureCache()->getPrimaryFramebufferObject()->release();
glBindTexture(GL_TEXTURE_2D, Application::getInstance()->getTextureCache()->getPrimaryFramebufferObject()->texture());
}
// restore our normal viewport
glViewport(0, 0, Application::getInstance()->getGLWidget()->width(), Application::getInstance()->getGLWidget()->height());
glMatrixMode(GL_PROJECTION); glMatrixMode(GL_PROJECTION);
glPopMatrix();
//Renders the distorted mesh onto the screen
renderDistortionMesh(eyeRenderPose);
glBindTexture(GL_TEXTURE_2D, 0);
#endif
}
#ifdef HAVE_LIBOVR
void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
glLoadIdentity(); glLoadIdentity();
gluOrtho2D(fullViewport.x, fullViewport.x + fullViewport.w, fullViewport.y, fullViewport.y + fullViewport.h); gluOrtho2D(0, Application::getInstance()->getGLWidget()->width(), 0, Application::getInstance()->getGLWidget()->height());
glDisable(GL_DEPTH_TEST); glDisable(GL_DEPTH_TEST);
// for reference on setting these values, see SDK file Samples/OculusRoomTiny/RenderTiny_Device.cpp
float scaleFactor = 1.0 / _stereoConfig.GetDistortionScale();
float aspectRatio = _stereoConfig.GetAspect();
glDisable(GL_BLEND); glDisable(GL_BLEND);
_program.bind(); _program.bind();
_program.setUniformValue(_textureLocation, 0); _program.setUniformValue(_textureLocation, 0);
const DistortionConfig& distortionConfig = _stereoConfig.GetDistortionConfig();
_program.setUniformValue(_lensCenterLocation, (0.5 + distortionConfig.XCenterOffset * 0.5) * 0.5, 0.5);
_program.setUniformValue(_screenCenterLocation, 0.25, 0.5);
_program.setUniformValue(_scaleLocation, 0.25 * scaleFactor, 0.5 * scaleFactor * aspectRatio);
_program.setUniformValue(_scaleInLocation, 4, 2 / aspectRatio);
_program.setUniformValue(_hmdWarpParamLocation, distortionConfig.K[0], distortionConfig.K[1],
distortionConfig.K[2], distortionConfig.K[3]);
glColor3f(1, 0, 1); _program.enableAttributeArray(_positionAttributeLocation);
glBegin(GL_QUADS); _program.enableAttributeArray(_colorAttributeLocation);
glTexCoord2f(0, 0); _program.enableAttributeArray(_texCoord0AttributeLocation);
glVertex2f(0, 0); _program.enableAttributeArray(_texCoord1AttributeLocation);
glTexCoord2f(0.5, 0); _program.enableAttributeArray(_texCoord2AttributeLocation);
glVertex2f(leftEyeParams.VP.w, 0);
glTexCoord2f(0.5, 1); //Render the distortion meshes for each eye
glVertex2f(leftEyeParams.VP.w, leftEyeParams.VP.h); for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
glTexCoord2f(0, 1); GLfloat uvScale[2] = { _UVScaleOffset[eyeNum][0].x, _UVScaleOffset[eyeNum][0].y };
glVertex2f(0, leftEyeParams.VP.h); _program.setUniformValueArray(_eyeToSourceUVScaleLocation, uvScale, 1, 2);
glEnd(); GLfloat uvOffset[2] = { _UVScaleOffset[eyeNum][1].x, _UVScaleOffset[eyeNum][1].y };
_program.setUniformValueArray(_eyeToSourceUVOffsetLocation, uvOffset, 1, 2);
_program.setUniformValue(_lensCenterLocation, 0.5 + (0.5 - distortionConfig.XCenterOffset * 0.5) * 0.5, 0.5);
_program.setUniformValue(_screenCenterLocation, 0.75, 0.5); ovrMatrix4f timeWarpMatrices[2];
Matrix4f transposeMatrices[2];
glBegin(GL_QUADS); //Grabs the timewarp matrices to be used in the shader
glTexCoord2f(0.5, 0); ovrHmd_GetEyeTimewarpMatrices(_ovrHmd, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
glVertex2f(leftEyeParams.VP.w, 0); transposeMatrices[0] = Matrix4f(timeWarpMatrices[0]);
glTexCoord2f(1, 0); transposeMatrices[1] = Matrix4f(timeWarpMatrices[1]);
glVertex2f(fullViewport.w, 0);
glTexCoord2f(1, 1); //Have to transpose the matrices before using them
glVertex2f(fullViewport.w, leftEyeParams.VP.h); transposeMatrices[0].Transpose();
glTexCoord2f(0.5, 1); transposeMatrices[1].Transpose();
glVertex2f(leftEyeParams.VP.w, leftEyeParams.VP.h);
glEnd(); glUniformMatrix4fv(_eyeRotationStartLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[0].M);
glUniformMatrix4fv(_eyeRotationEndLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[1].M);
glEnable(GL_BLEND);
glBindTexture(GL_TEXTURE_2D, 0); glBindBuffer(GL_ARRAY_BUFFER, _vertices[eyeNum]);
//Set vertex attribute pointers
glVertexAttribPointer(_positionAttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)0);
glVertexAttribPointer(_texCoord0AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)8);
glVertexAttribPointer(_texCoord1AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)16);
glVertexAttribPointer(_texCoord2AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)24);
glVertexAttribPointer(_colorAttributeLocation, 4, GL_UNSIGNED_BYTE, GL_TRUE, sizeof(DistortionVertex), (void *)32);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indices[eyeNum]);
glDrawElements(GL_TRIANGLES, _meshSize[eyeNum], GL_UNSIGNED_SHORT, 0);
}
_program.disableAttributeArray(_positionAttributeLocation);
_program.disableAttributeArray(_colorAttributeLocation);
_program.disableAttributeArray(_texCoord0AttributeLocation);
_program.disableAttributeArray(_texCoord1AttributeLocation);
_program.disableAttributeArray(_texCoord2AttributeLocation);
glEnable(GL_BLEND);
glEnable(GL_DEPTH_TEST);
_program.release(); _program.release();
glBindBuffer(GL_ARRAY_BUFFER, 0);
glPopMatrix();
#endif
} }
#endif
//Tries to reconnect to the sensors
void OculusManager::reset() { void OculusManager::reset() {
#ifdef HAVE_LIBOVR #ifdef HAVE_LIBOVR
_sensorFusion->Reset(); disconnect();
connect();
#endif #endif
} }
//Gets the current predicted angles from the oculus sensors
void OculusManager::getEulerAngles(float& yaw, float& pitch, float& roll) { void OculusManager::getEulerAngles(float& yaw, float& pitch, float& roll) {
#ifdef HAVE_LIBOVR #ifdef HAVE_LIBOVR
_sensorFusion->GetPredictedOrientation().GetEulerAngles<Axis_Y, Axis_X, Axis_Z, Rotate_CCW, Handed_R>(&yaw, &pitch, &roll); ovrSensorState ss = ovrHmd_GetSensorState(_ovrHmd, _hmdFrameTiming.ScanoutMidpointSeconds);
if (ss.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) {
ovrPosef pose = ss.Predicted.Pose;
Quatf orientation = Quatf(pose.Orientation);
orientation.GetEulerAngles<Axis_Y, Axis_X, Axis_Z, Rotate_CCW, Handed_R>(&yaw, &pitch, &roll);
}
#endif
}
//Used to set the size of the glow framebuffers
QSize OculusManager::getRenderTargetSize() {
#ifdef HAVE_LIBOVR
QSize rv;
rv.setWidth(_renderTargetSize.w);
rv.setHeight(_renderTargetSize.h);
return rv;
#else
return QSize(100, 100);
#endif #endif
} }

View file

@ -3,6 +3,7 @@
// interface/src/devices // interface/src/devices
// //
// Created by Stephen Birarda on 5/9/13. // Created by Stephen Birarda on 5/9/13.
// Refactored by Ben Arnold on 6/30/2014
// Copyright 2012 High Fidelity, Inc. // Copyright 2012 High Fidelity, Inc.
// //
// Distributed under the Apache License, Version 2.0. // Distributed under the Apache License, Version 2.0.
@ -12,10 +13,9 @@
#ifndef hifi_OculusManager_h #ifndef hifi_OculusManager_h
#define hifi_OculusManager_h #define hifi_OculusManager_h
#include <iostream>
#ifdef HAVE_LIBOVR #ifdef HAVE_LIBOVR
#include <OVR.h> #include <OVR.h>
#include "../src/Util/Util_Render_Stereo.h"
#endif #endif
#include "renderer/ProgramObject.h" #include "renderer/ProgramObject.h"
@ -28,38 +28,69 @@ class Camera;
class OculusManager { class OculusManager {
public: public:
static void connect(); static void connect();
static void disconnect();
static bool isConnected(); static bool isConnected();
static void beginFrameTiming();
static void endFrameTiming();
static void configureCamera(Camera& camera, int screenWidth, int screenHeight); static void configureCamera(Camera& camera, int screenWidth, int screenHeight);
static void display(const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera);
static void display(Camera& whichCamera);
static void reset(); static void reset();
/// param \yaw[out] yaw in radians /// param \yaw[out] yaw in radians
/// param \pitch[out] pitch in radians /// param \pitch[out] pitch in radians
/// param \roll[out] roll in radians /// param \roll[out] roll in radians
static void getEulerAngles(float& yaw, float& pitch, float& roll); static void getEulerAngles(float& yaw, float& pitch, float& roll);
static QSize getRenderTargetSize();
static void updateYawOffset();
private: private:
#ifdef HAVE_LIBOVR
static void generateDistortionMesh();
static void renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]);
struct DistortionVertex {
glm::vec2 pos;
glm::vec2 texR;
glm::vec2 texG;
glm::vec2 texB;
struct {
GLubyte r;
GLubyte g;
GLubyte b;
GLubyte a;
} color;
};
static ProgramObject _program; static ProgramObject _program;
//Uniforms
static int _textureLocation; static int _textureLocation;
static int _lensCenterLocation; static int _eyeToSourceUVScaleLocation;
static int _screenCenterLocation; static int _eyeToSourceUVOffsetLocation;
static int _scaleLocation; static int _eyeRotationStartLocation;
static int _scaleInLocation; static int _eyeRotationEndLocation;
static int _hmdWarpParamLocation; //Attributes
static int _positionAttributeLocation;
static int _colorAttributeLocation;
static int _texCoord0AttributeLocation;
static int _texCoord1AttributeLocation;
static int _texCoord2AttributeLocation;
static bool _isConnected; static bool _isConnected;
#ifdef HAVE_LIBOVR static ovrHmd _ovrHmd;
static OVR::Ptr<OVR::DeviceManager> _deviceManager; static ovrHmdDesc _ovrHmdDesc;
static OVR::Ptr<OVR::HMDDevice> _hmdDevice; static ovrFovPort _eyeFov[ovrEye_Count];
static OVR::Ptr<OVR::SensorDevice> _sensorDevice; static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count];
static OVR::SensorFusion* _sensorFusion; static ovrSizei _renderTargetSize;
static OVR::Util::Render::StereoConfig _stereoConfig; static ovrVector2f _UVScaleOffset[ovrEye_Count][2];
static GLuint _vertices[ovrEye_Count];
static GLuint _indices[ovrEye_Count];
static GLsizei _meshSize[ovrEye_Count];
static ovrFrameTiming _hmdFrameTiming;
static ovrRecti _eyeRenderViewport[ovrEye_Count];
static unsigned int _frameIndex;
static bool _frameTimingActive;
static bool _programInitialized;
static Camera* _camera;
#endif #endif
}; };

View file

@ -180,7 +180,7 @@ QOpenGLFramebufferObject* GlowEffect::render(bool toTexture) {
glBindTexture(GL_TEXTURE_2D, oldDiffusedFBO->texture()); glBindTexture(GL_TEXTURE_2D, oldDiffusedFBO->texture());
_diffuseProgram->bind(); _diffuseProgram->bind();
QSize size = Application::getInstance()->getGLWidget()->size(); QSize size = primaryFBO->size();
_diffuseProgram->setUniformValue(_diffusionScaleLocation, 1.0f / size.width(), 1.0f / size.height()); _diffuseProgram->setUniformValue(_diffusionScaleLocation, 1.0f / size.width(), 1.0f / size.height());
renderFullscreenQuad(); renderFullscreenQuad();

View file

@ -28,10 +28,12 @@ TextureCache::TextureCache() :
_permutationNormalTextureID(0), _permutationNormalTextureID(0),
_whiteTextureID(0), _whiteTextureID(0),
_blueTextureID(0), _blueTextureID(0),
_primaryDepthTextureID(0),
_primaryFramebufferObject(NULL), _primaryFramebufferObject(NULL),
_secondaryFramebufferObject(NULL), _secondaryFramebufferObject(NULL),
_tertiaryFramebufferObject(NULL), _tertiaryFramebufferObject(NULL),
_shadowFramebufferObject(NULL) _shadowFramebufferObject(NULL),
_frameBufferSize(100, 100)
{ {
} }
@ -46,9 +48,41 @@ TextureCache::~TextureCache() {
glDeleteTextures(1, &_primaryDepthTextureID); glDeleteTextures(1, &_primaryDepthTextureID);
} }
delete _primaryFramebufferObject; if (_primaryFramebufferObject) {
delete _secondaryFramebufferObject; delete _primaryFramebufferObject;
delete _tertiaryFramebufferObject; }
if (_secondaryFramebufferObject) {
delete _secondaryFramebufferObject;
}
if (_tertiaryFramebufferObject) {
delete _tertiaryFramebufferObject;
}
}
void TextureCache::setFrameBufferSize(QSize frameBufferSize) {
//If the size changed, we need to delete our FBOs
if (_frameBufferSize != frameBufferSize) {
_frameBufferSize = frameBufferSize;
if (_primaryFramebufferObject) {
delete _primaryFramebufferObject;
_primaryFramebufferObject = NULL;
glDeleteTextures(1, &_primaryDepthTextureID);
_primaryDepthTextureID = 0;
}
if (_secondaryFramebufferObject) {
delete _secondaryFramebufferObject;
_secondaryFramebufferObject = NULL;
}
if (_tertiaryFramebufferObject) {
delete _tertiaryFramebufferObject;
_tertiaryFramebufferObject = NULL;
}
}
} }
GLuint TextureCache::getPermutationNormalTextureID() { GLuint TextureCache::getPermutationNormalTextureID() {
@ -131,13 +165,14 @@ QSharedPointer<NetworkTexture> TextureCache::getTexture(const QUrl& url, bool no
} }
QOpenGLFramebufferObject* TextureCache::getPrimaryFramebufferObject() { QOpenGLFramebufferObject* TextureCache::getPrimaryFramebufferObject() {
if (!_primaryFramebufferObject) { if (!_primaryFramebufferObject) {
_primaryFramebufferObject = createFramebufferObject(); _primaryFramebufferObject = createFramebufferObject();
glGenTextures(1, &_primaryDepthTextureID); glGenTextures(1, &_primaryDepthTextureID);
glBindTexture(GL_TEXTURE_2D, _primaryDepthTextureID); glBindTexture(GL_TEXTURE_2D, _primaryDepthTextureID);
QSize size = Application::getInstance()->getGLWidget()->size();
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, size.width(), size.height(), glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, _frameBufferSize.width(), _frameBufferSize.height(),
0, GL_DEPTH_COMPONENT, GL_UNSIGNED_BYTE, 0); 0, GL_DEPTH_COMPONENT, GL_UNSIGNED_BYTE, 0);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
@ -230,7 +265,7 @@ QSharedPointer<Resource> TextureCache::createResource(const QUrl& url,
} }
QOpenGLFramebufferObject* TextureCache::createFramebufferObject() { QOpenGLFramebufferObject* TextureCache::createFramebufferObject() {
QOpenGLFramebufferObject* fbo = new QOpenGLFramebufferObject(Application::getInstance()->getGLWidget()->size()); QOpenGLFramebufferObject* fbo = new QOpenGLFramebufferObject(_frameBufferSize);
Application::getInstance()->getGLWidget()->installEventFilter(this); Application::getInstance()->getGLWidget()->installEventFilter(this);
glBindTexture(GL_TEXTURE_2D, fbo->texture()); glBindTexture(GL_TEXTURE_2D, fbo->texture());

View file

@ -32,6 +32,9 @@ public:
TextureCache(); TextureCache();
virtual ~TextureCache(); virtual ~TextureCache();
/// Sets the desired texture resolution for the framebuffer objects.
void setFrameBufferSize(QSize frameBufferSize);
/// Returns the ID of the permutation/normal texture used for Perlin noise shader programs. This texture /// Returns the ID of the permutation/normal texture used for Perlin noise shader programs. This texture
/// has two lines: the first, a set of random numbers in [0, 255] to be used as permutation offsets, and /// has two lines: the first, a set of random numbers in [0, 255] to be used as permutation offsets, and
/// the second, a set of random unit vectors to be used as noise gradients. /// the second, a set of random unit vectors to be used as noise gradients.
@ -94,6 +97,8 @@ private:
QOpenGLFramebufferObject* _shadowFramebufferObject; QOpenGLFramebufferObject* _shadowFramebufferObject;
GLuint _shadowDepthTextureID; GLuint _shadowDepthTextureID;
QSize _frameBufferSize;
}; };
/// A simple object wrapper for an OpenGL texture. /// A simple object wrapper for an OpenGL texture.