mirror of
https://thingvellir.net/git/overte
synced 2025-03-27 23:52:03 +01:00
commit
a88c92ea17
15 changed files with 630 additions and 225 deletions
|
@ -42,14 +42,11 @@ else (LIBOVR_LIBRARIES AND LIBOVR_INCLUDE_DIRS)
|
|||
if (UDEV_LIBRARY AND XINERAMA_LIBRARY AND OVR_LIBRARY)
|
||||
set(LIBOVR_LIBRARIES "${OVR_LIBRARY};${UDEV_LIBRARY};${XINERAMA_LIBRARY}" CACHE INTERNAL "Oculus libraries")
|
||||
endif (UDEV_LIBRARY AND XINERAMA_LIBRARY AND OVR_LIBRARY)
|
||||
elseif (WIN32)
|
||||
if (CMAKE_BUILD_TYPE MATCHES DEBUG)
|
||||
set(WINDOWS_LIBOVR_NAME "libovrd.lib")
|
||||
else()
|
||||
set(WINDOWS_LIBOVR_NAME "libovr.lib")
|
||||
endif()
|
||||
elseif (WIN32)
|
||||
find_library(LIBOVR_RELEASE_LIBRARIES "Lib/Win32/libovr.lib" HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
find_library(LIBOVR_DEBUG_LIBRARIES "Lib/Win32/libovrd.lib" HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
|
||||
find_library(LIBOVR_LIBRARIES "Lib/Win32/${WINDOWS_LIBOVR_NAME}" HINTS ${LIBOVR_SEARCH_DIRS})
|
||||
set(LIBOVR_LIBRARIES "${LIBOVR_RELEASE_LIBRARIES} ${LIBOVR_DEBUG_LIBRARIES}")
|
||||
endif ()
|
||||
|
||||
if (LIBOVR_INCLUDE_DIRS AND LIBOVR_LIBRARIES)
|
||||
|
|
|
@ -183,7 +183,11 @@ if (LIBOVR_FOUND AND NOT DISABLE_LIBOVR)
|
|||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -isystem ${LIBOVR_INCLUDE_DIRS}")
|
||||
endif ()
|
||||
|
||||
target_link_libraries(${TARGET_NAME} "${LIBOVR_LIBRARIES}")
|
||||
if (WIN32)
|
||||
target_link_libraries(${TARGET_NAME} optimized "${LIBOVR_RELEASE_LIBRARIES}" debug "${LIBOVR_DEBUG_LIBRARIES}")
|
||||
else()
|
||||
target_link_libraries(${TARGET_NAME} "${LIBOVR_LIBRARIES}")
|
||||
endif()
|
||||
endif (LIBOVR_FOUND AND NOT DISABLE_LIBOVR)
|
||||
|
||||
# and with PrioVR library
|
||||
|
|
8
interface/external/oculus/readme.txt
vendored
8
interface/external/oculus/readme.txt
vendored
|
@ -2,18 +2,12 @@
|
|||
Instructions for adding the Oculus library (LibOVR) to Interface
|
||||
Stephen Birarda, March 6, 2014
|
||||
|
||||
You can download the Oculus SDK from https://developer.oculusvr.com/ (account creation required). Interface has been tested with SDK version 0.2.5.
|
||||
You can download the Oculus SDK from https://developer.oculusvr.com/ (account creation required). Interface has been tested with SDK version 0.3.2.
|
||||
|
||||
1. Copy the Oculus SDK folders from the LibOVR directory (Lib, Include, Src) into the interface/externals/oculus folder.
|
||||
This readme.txt should be there as well.
|
||||
|
||||
You may optionally choose to copy the SDK folders to a location outside the repository (so you can re-use with different checkouts and different projects).
|
||||
If so our CMake find module expects you to set the ENV variable 'HIFI_LIB_DIR' to a directory containing a subfolder 'oculus' that contains the three folders mentioned above.
|
||||
|
||||
NOTE: On OS X there is a linker error with version 0.2.5c of the Oculus SDK.
|
||||
It must be re-built (from the included LibOVR_With_Samples.xcodeproj) with RRTI support.
|
||||
In XCode Build Settings for the ovr target, set "Enable C++ Runtime Types" to yes.
|
||||
Then, Archive and use the organizer to save a copy of the built products.
|
||||
In the exported directory you will have a new libovr.a to copy into the oculus directory from above.
|
||||
|
||||
2. Clear your build directory, run cmake and build, and you should be all set.
|
|
@ -4,12 +4,11 @@
|
|||
// oculus.frag
|
||||
// fragment shader
|
||||
//
|
||||
// Created by Andrzej Kapolka on 11/26/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
// Created by Ben Arnold on 6/24/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// this shader is an adaptation (HLSL -> GLSL, removed conditional) of the one in the Oculus sample
|
||||
// code (Samples/OculusRoomTiny/RenderTiny_D3D1X_Device.cpp), which is under the Apache license
|
||||
// (http://www.apache.org/licenses/LICENSE-2.0)
|
||||
// this shader is an adaptation (HLSL -> GLSL) of the one in the
|
||||
// Oculus_SDK_Overview.pdf for the 3.2 SDK.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
@ -17,23 +16,16 @@
|
|||
|
||||
uniform sampler2D texture;
|
||||
|
||||
uniform vec2 lensCenter;
|
||||
uniform vec2 screenCenter;
|
||||
uniform vec2 scale;
|
||||
uniform vec2 scaleIn;
|
||||
uniform vec4 hmdWarpParam;
|
||||
|
||||
vec2 hmdWarp(vec2 in01) {
|
||||
vec2 theta = (in01 - lensCenter) * scaleIn;
|
||||
float rSq = theta.x * theta.x + theta.y * theta.y;
|
||||
vec2 theta1 = theta * (hmdWarpParam.x + hmdWarpParam.y * rSq +
|
||||
hmdWarpParam.z * rSq * rSq + hmdWarpParam.w * rSq * rSq * rSq);
|
||||
return lensCenter + scale * theta1;
|
||||
}
|
||||
varying float vFade;
|
||||
varying vec2 oTexCoord0;
|
||||
varying vec2 oTexCoord1;
|
||||
varying vec2 oTexCoord2;
|
||||
|
||||
void main(void) {
|
||||
vec2 tc = hmdWarp(gl_TexCoord[0].st);
|
||||
vec2 below = step(screenCenter.st + vec2(-0.25, -0.5), tc.st);
|
||||
vec2 above = vec2(1.0, 1.0) - step(screenCenter.st + vec2(0.25, 0.5), tc.st);
|
||||
gl_FragColor = mix(vec4(0.0, 0.0, 0.0, 1.0), texture2D(texture, tc), above.s * above.t * below.s * below.t);
|
||||
// 3 samples for fixing chromatic aberrations
|
||||
float r = texture2D(texture, oTexCoord0.xy).r;
|
||||
float g = texture2D(texture, oTexCoord1.xy).g;
|
||||
float b = texture2D(texture, oTexCoord2.xy).b;
|
||||
|
||||
gl_FragColor = vec4(r * vFade, g * vFade, b * vFade, 1.0);
|
||||
}
|
||||
|
|
63
interface/resources/shaders/oculus.vert
Normal file
63
interface/resources/shaders/oculus.vert
Normal file
|
@ -0,0 +1,63 @@
|
|||
#version 120
|
||||
|
||||
//
|
||||
// oculus.vert
|
||||
// vertex shader
|
||||
//
|
||||
// Created by Ben Arnold on 6/24/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// this shader is an adaptation (HLSL -> GLSL) of the one in the
|
||||
// Oculus_SDK_Overview.pdf for the 3.2 SDK.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
uniform vec2 EyeToSourceUVScale;
|
||||
uniform vec2 EyeToSourceUVOffset;
|
||||
uniform mat4 EyeRotationStart;
|
||||
uniform mat4 EyeRotationEnd;
|
||||
|
||||
attribute vec2 position;
|
||||
attribute vec4 color;
|
||||
attribute vec2 texCoord0;
|
||||
attribute vec2 texCoord1;
|
||||
attribute vec2 texCoord2;
|
||||
|
||||
varying float vFade;
|
||||
varying vec2 oTexCoord0;
|
||||
varying vec2 oTexCoord1;
|
||||
varying vec2 oTexCoord2;
|
||||
|
||||
vec2 TimewarpTexCoord(vec2 texCoord, mat4 rotMat)
|
||||
{
|
||||
// Vertex inputs are in TanEyeAngle space for the R,G,B channels (i.e. after chromatic
|
||||
// aberration and distortion). These are now "real world" vectors in direction (x,y,1)
|
||||
// relative to the eye of the HMD. Apply the 3x3 timewarp rotation to these vectors.
|
||||
vec3 transformed = vec3( rotMat * vec4(texCoord.xy, 1, 1) );
|
||||
|
||||
// Project them back onto the Z=1 plane of the rendered images.
|
||||
vec2 flattened = (transformed.xy / transformed.z);
|
||||
|
||||
// Scale them into ([0,0.5],[0,1]) or ([0.5,0],[0,1]) UV lookup space (depending on eye)
|
||||
return (EyeToSourceUVScale * flattened + EyeToSourceUVOffset);
|
||||
}
|
||||
|
||||
void main()
|
||||
{
|
||||
float timewarpMixFactor = color.a;
|
||||
mat4 mixedEyeRot = EyeRotationStart * (1.0 - timewarpMixFactor) + EyeRotationEnd * (timewarpMixFactor);
|
||||
|
||||
oTexCoord0 = TimewarpTexCoord(texCoord0, mixedEyeRot);
|
||||
oTexCoord1 = TimewarpTexCoord(texCoord1, mixedEyeRot);
|
||||
oTexCoord2 = TimewarpTexCoord(texCoord2, mixedEyeRot);
|
||||
|
||||
//Flip y texture coordinates
|
||||
oTexCoord0.y = 1.0 - oTexCoord0.y;
|
||||
oTexCoord1.y = 1.0 - oTexCoord1.y;
|
||||
oTexCoord2.y = 1.0 - oTexCoord2.y;
|
||||
|
||||
gl_Position = vec4(position.xy, 0.5, 1.0);
|
||||
vFade = color.r; // For vignette fade
|
||||
}
|
|
@ -565,6 +565,16 @@ void Application::paintGL() {
|
|||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||
PerformanceWarning warn(showWarnings, "Application::paintGL()");
|
||||
|
||||
const bool glowEnabled = Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect);
|
||||
|
||||
// Set the desired FBO texture size. If it hasn't changed, this does nothing.
|
||||
// Otherwise, it must rebuild the FBOs
|
||||
if (OculusManager::isConnected()) {
|
||||
_textureCache.setFrameBufferSize(OculusManager::getRenderTargetSize());
|
||||
} else {
|
||||
_textureCache.setFrameBufferSize(_glWidget->size());
|
||||
}
|
||||
|
||||
glEnable(GL_LINE_SMOOTH);
|
||||
|
||||
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) {
|
||||
|
@ -573,28 +583,16 @@ void Application::paintGL() {
|
|||
_myCamera.setTargetRotation(_myAvatar->getHead()->getCameraOrientation());
|
||||
|
||||
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
||||
//Note, the camera distance is set in Camera::setMode() so we dont have to do it here.
|
||||
_myCamera.setTightness(0.0f); // Camera is directly connected to head without smoothing
|
||||
_myCamera.setTargetPosition(_myAvatar->getUprightHeadPosition());
|
||||
_myCamera.setTargetRotation(_myAvatar->getHead()->getCameraOrientation());
|
||||
_myCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation());
|
||||
|
||||
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
|
||||
_myCamera.setTightness(0.0f);
|
||||
glm::vec3 eyePosition = _myAvatar->getHead()->calculateAverageEyePosition();
|
||||
float headHeight = eyePosition.y - _myAvatar->getPosition().y;
|
||||
_myCamera.setDistance(MIRROR_FULLSCREEN_DISTANCE * _scaleMirror);
|
||||
_myCamera.setTargetPosition(_myAvatar->getPosition() + glm::vec3(0, headHeight + (_raiseMirror * _myAvatar->getScale()), 0));
|
||||
_myCamera.setTargetRotation(_myAvatar->getWorldAlignedOrientation() * glm::quat(glm::vec3(0.0f, PI + _rotateMirror, 0.0f)));
|
||||
}
|
||||
|
||||
if (OculusManager::isConnected()) {
|
||||
// Oculus in third person causes nausea, so only allow it if option is checked in dev menu
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::AllowOculusCameraModeChange) || _myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) {
|
||||
_myCamera.setDistance(0.0f);
|
||||
_myCamera.setTargetPosition(_myAvatar->getHead()->calculateAverageEyePosition());
|
||||
_myCamera.setTargetRotation(_myAvatar->getHead()->getCameraOrientation());
|
||||
}
|
||||
_myCamera.setUpShift(0.0f);
|
||||
_myCamera.setTightness(0.0f); // Camera is directly connected to head without smoothing
|
||||
_myCamera.setTargetPosition(_myAvatar->getHead()->calculateAverageEyePosition());
|
||||
}
|
||||
|
||||
// Update camera position
|
||||
|
@ -629,16 +627,32 @@ void Application::paintGL() {
|
|||
updateShadowMap();
|
||||
}
|
||||
|
||||
//If we aren't using the glow shader, we have to clear the color and depth buffer
|
||||
if (!glowEnabled) {
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
}
|
||||
|
||||
if (OculusManager::isConnected()) {
|
||||
OculusManager::display(whichCamera);
|
||||
//When in mirror mode, use camera rotation. Otherwise, use body rotation
|
||||
if (whichCamera.getMode() == CAMERA_MODE_MIRROR) {
|
||||
OculusManager::display(whichCamera.getRotation(), whichCamera.getPosition(), whichCamera);
|
||||
} else {
|
||||
OculusManager::display(_myAvatar->getWorldAlignedOrientation(), whichCamera.getPosition(), whichCamera);
|
||||
}
|
||||
|
||||
} else if (TV3DManager::isConnected()) {
|
||||
_glowEffect.prepare();
|
||||
if (glowEnabled) {
|
||||
_glowEffect.prepare();
|
||||
}
|
||||
TV3DManager::display(whichCamera);
|
||||
_glowEffect.render();
|
||||
if (glowEnabled) {
|
||||
_glowEffect.render();
|
||||
}
|
||||
|
||||
} else {
|
||||
_glowEffect.prepare();
|
||||
if (glowEnabled) {
|
||||
_glowEffect.prepare();
|
||||
}
|
||||
|
||||
glMatrixMode(GL_MODELVIEW);
|
||||
glPushMatrix();
|
||||
|
@ -646,7 +660,9 @@ void Application::paintGL() {
|
|||
displaySide(whichCamera);
|
||||
glPopMatrix();
|
||||
|
||||
_glowEffect.render();
|
||||
if (glowEnabled) {
|
||||
_glowEffect.render();
|
||||
}
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Mirror)) {
|
||||
renderRearViewMirror(_mirrorViewRect);
|
||||
|
@ -3136,9 +3152,7 @@ void Application::resetSensors() {
|
|||
_faceshift.reset();
|
||||
_visage.reset();
|
||||
|
||||
if (OculusManager::isConnected()) {
|
||||
OculusManager::reset();
|
||||
}
|
||||
OculusManager::reset();
|
||||
|
||||
_prioVR.reset();
|
||||
|
||||
|
|
|
@ -12,6 +12,7 @@
|
|||
#include "Application.h"
|
||||
|
||||
#include "GLCanvas.h"
|
||||
#include "devices/OculusManager.h"
|
||||
#include <QMimeData>
|
||||
#include <QUrl>
|
||||
#include <QMainWindow>
|
||||
|
@ -41,8 +42,17 @@ void GLCanvas::initializeGL() {
|
|||
|
||||
void GLCanvas::paintGL() {
|
||||
if (!_throttleRendering && !Application::getInstance()->getWindow()->isMinimized()) {
|
||||
//Need accurate frame timing for the oculus rift
|
||||
if (OculusManager::isConnected()) {
|
||||
OculusManager::beginFrameTiming();
|
||||
}
|
||||
|
||||
Application::getInstance()->paintGL();
|
||||
swapBuffers();
|
||||
|
||||
if (OculusManager::isConnected()) {
|
||||
OculusManager::endFrameTiming();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -102,8 +112,17 @@ void GLCanvas::activeChanged(Qt::ApplicationState state) {
|
|||
void GLCanvas::throttleRender() {
|
||||
_frameTimer.start(_idleRenderInterval);
|
||||
if (!Application::getInstance()->getWindow()->isMinimized()) {
|
||||
//Need accurate frame timing for the oculus rift
|
||||
if (OculusManager::isConnected()) {
|
||||
OculusManager::beginFrameTiming();
|
||||
}
|
||||
|
||||
Application::getInstance()->paintGL();
|
||||
swapBuffers();
|
||||
|
||||
if (OculusManager::isConnected()) {
|
||||
OculusManager::endFrameTiming();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -332,6 +332,8 @@ Menu::Menu() :
|
|||
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Stars, Qt::Key_Asterisk, true);
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::Atmosphere, Qt::SHIFT | Qt::Key_A, true);
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::EnableGlowEffect, 0, true);
|
||||
addActionToQMenuAndActionHash(renderOptionsMenu,
|
||||
MenuOption::GlowMode,
|
||||
0,
|
||||
|
@ -400,7 +402,6 @@ Menu::Menu() :
|
|||
addCheckableActionToQMenuAndActionHash(avatarOptionsMenu, MenuOption::ChatCircling, 0, false);
|
||||
|
||||
QMenu* oculusOptionsMenu = developerMenu->addMenu("Oculus Options");
|
||||
addCheckableActionToQMenuAndActionHash(oculusOptionsMenu, MenuOption::AllowOculusCameraModeChange, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(oculusOptionsMenu, MenuOption::DisplayOculusOverlays, 0, true);
|
||||
|
||||
QMenu* sixenseOptionsMenu = developerMenu->addMenu("Sixense Options");
|
||||
|
|
|
@ -295,7 +295,6 @@ private:
|
|||
namespace MenuOption {
|
||||
const QString AboutApp = "About Interface";
|
||||
const QString AlignForearmsWithWrists = "Align Forearms with Wrists";
|
||||
const QString AllowOculusCameraModeChange = "Allow Oculus Camera Mode Change (Nausea)";
|
||||
const QString AlternateIK = "Alternate IK";
|
||||
const QString AmbientOcclusion = "Ambient Occlusion";
|
||||
const QString Animations = "Animations...";
|
||||
|
@ -352,6 +351,7 @@ namespace MenuOption {
|
|||
const QString DontFadeOnVoxelServerChanges = "Don't Fade In/Out on Voxel Server Changes";
|
||||
const QString EchoLocalAudio = "Echo Local Audio";
|
||||
const QString EchoServerAudio = "Echo Server Audio";
|
||||
const QString EnableGlowEffect = "Enable Glow Effect (Warning: Poor Oculus Performance)";
|
||||
const QString Enable3DTVMode = "Enable 3DTV Mode";
|
||||
const QString EnableVRMode = "Enable VR Mode";
|
||||
const QString ExpandMiscAvatarTiming = "Expand Misc MyAvatar Timing";
|
||||
|
|
|
@ -856,7 +856,9 @@ void MyAvatar::renderBody(RenderMode renderMode, float glowLevel) {
|
|||
renderAttachments(renderMode);
|
||||
|
||||
// Render head so long as the camera isn't inside it
|
||||
if (shouldRenderHead(Application::getInstance()->getCamera()->getPosition(), renderMode)) {
|
||||
const Camera *camera = Application::getInstance()->getCamera();
|
||||
const glm::vec3 cameraPos = camera->getPosition() + (camera->getRotation() * glm::vec3(0.0f, 0.0f, 1.0f)) * camera->getDistance();
|
||||
if (shouldRenderHead(cameraPos, renderMode)) {
|
||||
getHead()->render(1.0f, modelRenderMode);
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::StringHair)) {
|
||||
renderHair();
|
||||
|
@ -920,9 +922,16 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
|||
glm::vec3 angularVelocity(yaw - head->getBaseYaw(), pitch - head->getBasePitch(), roll - head->getBaseRoll());
|
||||
head->setAngularVelocity(angularVelocity);
|
||||
|
||||
head->setBaseYaw(yaw);
|
||||
head->setBasePitch(pitch);
|
||||
head->setBaseRoll(roll);
|
||||
//Invert yaw and roll when in mirror mode
|
||||
if (Application::getInstance()->getCamera()->getMode() == CAMERA_MODE_MIRROR) {
|
||||
head->setBaseYaw(-yaw);
|
||||
head->setBasePitch(pitch);
|
||||
head->setBaseRoll(-roll);
|
||||
} else {
|
||||
head->setBaseYaw(yaw);
|
||||
head->setBasePitch(pitch);
|
||||
head->setBaseRoll(roll);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
// interface/src/devices
|
||||
//
|
||||
// Created by Stephen Birarda on 5/9/13.
|
||||
// Refactored by Ben Arnold on 6/30/2014
|
||||
// Copyright 2012 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
|
@ -11,67 +12,207 @@
|
|||
|
||||
#include "InterfaceConfig.h"
|
||||
|
||||
#include "OculusManager.h"
|
||||
|
||||
#include <QOpenGLFramebufferObject>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
|
||||
#include "Application.h"
|
||||
#include "OculusManager.h"
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
|
||||
using namespace OVR;
|
||||
|
||||
ProgramObject OculusManager::_program;
|
||||
int OculusManager::_textureLocation;
|
||||
int OculusManager::_lensCenterLocation;
|
||||
int OculusManager::_screenCenterLocation;
|
||||
int OculusManager::_scaleLocation;
|
||||
int OculusManager::_scaleInLocation;
|
||||
int OculusManager::_hmdWarpParamLocation;
|
||||
int OculusManager::_eyeToSourceUVScaleLocation;
|
||||
int OculusManager::_eyeToSourceUVOffsetLocation;
|
||||
int OculusManager::_eyeRotationStartLocation;
|
||||
int OculusManager::_eyeRotationEndLocation;
|
||||
int OculusManager::_positionAttributeLocation;
|
||||
int OculusManager::_colorAttributeLocation;
|
||||
int OculusManager::_texCoord0AttributeLocation;
|
||||
int OculusManager::_texCoord1AttributeLocation;
|
||||
int OculusManager::_texCoord2AttributeLocation;
|
||||
bool OculusManager::_isConnected = false;
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
using namespace OVR;
|
||||
using namespace OVR::Util::Render;
|
||||
ovrHmd OculusManager::_ovrHmd;
|
||||
ovrHmdDesc OculusManager::_ovrHmdDesc;
|
||||
ovrFovPort OculusManager::_eyeFov[ovrEye_Count];
|
||||
ovrEyeRenderDesc OculusManager::_eyeRenderDesc[ovrEye_Count];
|
||||
ovrSizei OculusManager::_renderTargetSize;
|
||||
ovrVector2f OculusManager::_UVScaleOffset[ovrEye_Count][2];
|
||||
GLuint OculusManager::_vertices[ovrEye_Count] = { 0, 0 };
|
||||
GLuint OculusManager::_indices[ovrEye_Count] = { 0, 0 };
|
||||
GLsizei OculusManager::_meshSize[ovrEye_Count] = { 0, 0 };
|
||||
ovrFrameTiming OculusManager::_hmdFrameTiming;
|
||||
ovrRecti OculusManager::_eyeRenderViewport[ovrEye_Count];
|
||||
unsigned int OculusManager::_frameIndex = 0;
|
||||
bool OculusManager::_frameTimingActive = false;
|
||||
bool OculusManager::_programInitialized = false;
|
||||
Camera* OculusManager::_camera = NULL;
|
||||
|
||||
Ptr<DeviceManager> OculusManager::_deviceManager;
|
||||
Ptr<HMDDevice> OculusManager::_hmdDevice;
|
||||
Ptr<SensorDevice> OculusManager::_sensorDevice;
|
||||
SensorFusion* OculusManager::_sensorFusion;
|
||||
StereoConfig OculusManager::_stereoConfig;
|
||||
#endif
|
||||
|
||||
void OculusManager::connect() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
System::Init();
|
||||
_deviceManager = *DeviceManager::Create();
|
||||
_hmdDevice = *_deviceManager->EnumerateDevices<HMDDevice>().CreateDevice();
|
||||
ovr_Initialize();
|
||||
|
||||
if (_hmdDevice) {
|
||||
_ovrHmd = ovrHmd_Create(0);
|
||||
if (_ovrHmd) {
|
||||
_isConnected = true;
|
||||
|
||||
_sensorDevice = *_hmdDevice->GetSensor();
|
||||
_sensorFusion = new SensorFusion;
|
||||
_sensorFusion->AttachToSensor(_sensorDevice);
|
||||
_sensorFusion->SetPredictionEnabled(true);
|
||||
|
||||
ovrHmd_GetDesc(_ovrHmd, &_ovrHmdDesc);
|
||||
|
||||
_eyeFov[0] = _ovrHmdDesc.DefaultEyeFov[0];
|
||||
_eyeFov[1] = _ovrHmdDesc.DefaultEyeFov[1];
|
||||
|
||||
//Get texture size
|
||||
ovrSizei recommendedTex0Size = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left,
|
||||
_eyeFov[0], 1.0f);
|
||||
ovrSizei recommendedTex1Size = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Right,
|
||||
_eyeFov[1], 1.0f);
|
||||
_renderTargetSize.w = recommendedTex0Size.w + recommendedTex1Size.w;
|
||||
_renderTargetSize.h = recommendedTex0Size.h;
|
||||
if (_renderTargetSize.h < recommendedTex1Size.h) {
|
||||
_renderTargetSize.h = recommendedTex1Size.h;
|
||||
}
|
||||
|
||||
_eyeRenderDesc[0] = ovrHmd_GetRenderDesc(_ovrHmd, ovrEye_Left, _eyeFov[0]);
|
||||
_eyeRenderDesc[1] = ovrHmd_GetRenderDesc(_ovrHmd, ovrEye_Right, _eyeFov[1]);
|
||||
|
||||
ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence | ovrHmdCap_LatencyTest);
|
||||
|
||||
ovrHmd_StartSensor(_ovrHmd, ovrSensorCap_Orientation | ovrSensorCap_YawCorrection |
|
||||
ovrSensorCap_Position,
|
||||
ovrSensorCap_Orientation);
|
||||
|
||||
if (!_camera) {
|
||||
_camera = new Camera;
|
||||
}
|
||||
|
||||
if (!_programInitialized) {
|
||||
// Shader program
|
||||
_programInitialized = true;
|
||||
_program.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() + "shaders/oculus.vert");
|
||||
_program.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() + "shaders/oculus.frag");
|
||||
_program.link();
|
||||
|
||||
// Uniforms
|
||||
_textureLocation = _program.uniformLocation("texture");
|
||||
_eyeToSourceUVScaleLocation = _program.uniformLocation("EyeToSourceUVScale");
|
||||
_eyeToSourceUVOffsetLocation = _program.uniformLocation("EyeToSourceUVOffset");
|
||||
_eyeRotationStartLocation = _program.uniformLocation("EyeRotationStart");
|
||||
_eyeRotationEndLocation = _program.uniformLocation("EyeRotationEnd");
|
||||
|
||||
// Attributes
|
||||
_positionAttributeLocation = _program.attributeLocation("position");
|
||||
_colorAttributeLocation = _program.attributeLocation("color");
|
||||
_texCoord0AttributeLocation = _program.attributeLocation("texCoord0");
|
||||
_texCoord1AttributeLocation = _program.attributeLocation("texCoord1");
|
||||
_texCoord2AttributeLocation = _program.attributeLocation("texCoord2");
|
||||
}
|
||||
|
||||
//Generate the distortion VBOs
|
||||
generateDistortionMesh();
|
||||
|
||||
HMDInfo info;
|
||||
_hmdDevice->GetDeviceInfo(&info);
|
||||
_stereoConfig.SetHMDInfo(info);
|
||||
|
||||
_program.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() + "shaders/oculus.frag");
|
||||
_program.link();
|
||||
|
||||
_textureLocation = _program.uniformLocation("texture");
|
||||
_lensCenterLocation = _program.uniformLocation("lensCenter");
|
||||
_screenCenterLocation = _program.uniformLocation("screenCenter");
|
||||
_scaleLocation = _program.uniformLocation("scale");
|
||||
_scaleInLocation = _program.uniformLocation("scaleIn");
|
||||
_hmdWarpParamLocation = _program.uniformLocation("hmdWarpParam");
|
||||
} else {
|
||||
_deviceManager.Clear();
|
||||
System::Destroy();
|
||||
_isConnected = false;
|
||||
ovrHmd_Destroy(_ovrHmd);
|
||||
ovr_Shutdown();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
//Disconnects and deallocates the OR
|
||||
void OculusManager::disconnect() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
if (_isConnected) {
|
||||
_isConnected = false;
|
||||
ovrHmd_Destroy(_ovrHmd);
|
||||
ovr_Shutdown();
|
||||
|
||||
//Free the distortion mesh data
|
||||
for (int i = 0; i < ovrEye_Count; i++) {
|
||||
if (_vertices[i] != 0) {
|
||||
glDeleteBuffers(1, &(_vertices[i]));
|
||||
_vertices[i] = 0;
|
||||
}
|
||||
if (_indices[i] != 0) {
|
||||
glDeleteBuffers(1, &(_indices[i]));
|
||||
_indices[i] = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
void OculusManager::generateDistortionMesh() {
|
||||
|
||||
//Check if we already have the distortion mesh
|
||||
if (_vertices[0] != 0) {
|
||||
printf("WARNING: Tried to generate Oculus distortion mesh twice without freeing the VBOs.");
|
||||
return;
|
||||
}
|
||||
|
||||
//Viewport for the render target for each eye
|
||||
_eyeRenderViewport[0].Pos = Vector2i(0, 0);
|
||||
_eyeRenderViewport[0].Size = Sizei(_renderTargetSize.w / 2, _renderTargetSize.h);
|
||||
_eyeRenderViewport[1].Pos = Vector2i((_renderTargetSize.w + 1) / 2, 0);
|
||||
_eyeRenderViewport[1].Size = _eyeRenderViewport[0].Size;
|
||||
|
||||
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
|
||||
// Allocate and generate distortion mesh vertices
|
||||
ovrDistortionMesh meshData;
|
||||
ovrHmd_CreateDistortionMesh(_ovrHmd, _eyeRenderDesc[eyeNum].Eye, _eyeRenderDesc[eyeNum].Fov, _ovrHmdDesc.DistortionCaps, &meshData);
|
||||
|
||||
ovrHmd_GetRenderScaleAndOffset(_eyeRenderDesc[eyeNum].Fov, _renderTargetSize, _eyeRenderViewport[eyeNum],
|
||||
_UVScaleOffset[eyeNum]);
|
||||
|
||||
// Parse the vertex data and create a render ready vertex buffer
|
||||
DistortionVertex* pVBVerts = (DistortionVertex*)OVR_ALLOC(sizeof(DistortionVertex) * meshData.VertexCount);
|
||||
_meshSize[eyeNum] = meshData.IndexCount;
|
||||
|
||||
// Convert the oculus vertex data to the DistortionVertex format.
|
||||
DistortionVertex* v = pVBVerts;
|
||||
ovrDistortionVertex* ov = meshData.pVertexData;
|
||||
for (unsigned int vertNum = 0; vertNum < meshData.VertexCount; vertNum++) {
|
||||
v->pos.x = ov->Pos.x;
|
||||
v->pos.y = ov->Pos.y;
|
||||
v->texR.x = ov->TexR.x;
|
||||
v->texR.y = ov->TexR.y;
|
||||
v->texG.x = ov->TexG.x;
|
||||
v->texG.y = ov->TexG.y;
|
||||
v->texB.x = ov->TexB.x;
|
||||
v->texB.y = ov->TexB.y;
|
||||
v->color.r = v->color.g = v->color.b = (GLubyte)(ov->VignetteFactor * 255.99f);
|
||||
v->color.a = (GLubyte)(ov->TimeWarpFactor * 255.99f);
|
||||
v++;
|
||||
ov++;
|
||||
}
|
||||
|
||||
//vertices
|
||||
glGenBuffers(1, &(_vertices[eyeNum]));
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _vertices[eyeNum]);
|
||||
glBufferData(GL_ARRAY_BUFFER, sizeof(DistortionVertex) * meshData.VertexCount, pVBVerts, GL_STATIC_DRAW);
|
||||
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||
|
||||
//indices
|
||||
glGenBuffers(1, &(_indices[eyeNum]));
|
||||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indices[eyeNum]);
|
||||
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned short) * meshData.IndexCount, meshData.pIndexData, GL_STATIC_DRAW);
|
||||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
|
||||
|
||||
//Now that we have the VBOs we can get rid of the mesh data
|
||||
OVR_FREE(pVBVerts);
|
||||
ovrHmd_DestroyDistortionMesh(&meshData);
|
||||
}
|
||||
|
||||
}
|
||||
#endif
|
||||
|
||||
bool OculusManager::isConnected() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
return _isConnected && Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode);
|
||||
|
@ -80,137 +221,237 @@ bool OculusManager::isConnected() {
|
|||
#endif
|
||||
}
|
||||
|
||||
//Begins the frame timing for oculus prediction purposes
|
||||
void OculusManager::beginFrameTiming() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
|
||||
if (_frameTimingActive) {
|
||||
printf("WARNING: Called OculusManager::beginFrameTiming() twice in a row, need to call OculusManager::endFrameTiming().");
|
||||
}
|
||||
|
||||
_hmdFrameTiming = ovrHmd_BeginFrameTiming(_ovrHmd, _frameIndex);
|
||||
_frameTimingActive = true;
|
||||
#endif
|
||||
}
|
||||
|
||||
//Ends frame timing
|
||||
void OculusManager::endFrameTiming() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
ovrHmd_EndFrameTiming(_ovrHmd);
|
||||
_frameIndex++;
|
||||
_frameTimingActive = false;
|
||||
#endif
|
||||
}
|
||||
|
||||
//Sets the camera FoV and aspect ratio
|
||||
void OculusManager::configureCamera(Camera& camera, int screenWidth, int screenHeight) {
|
||||
#ifdef HAVE_LIBOVR
|
||||
_stereoConfig.SetFullViewport(Viewport(0, 0, screenWidth, screenHeight));
|
||||
camera.setAspectRatio(_stereoConfig.GetAspect());
|
||||
camera.setFieldOfView(_stereoConfig.GetYFOVDegrees());
|
||||
camera.setAspectRatio(_renderTargetSize.w / _renderTargetSize.h);
|
||||
camera.setFieldOfView(atan(_eyeFov[0].UpTan) * DEGREES_PER_RADIAN * 2.0f);
|
||||
#endif
|
||||
}
|
||||
|
||||
void OculusManager::display(Camera& whichCamera) {
|
||||
//Displays everything for the oculus, frame timing must be active
|
||||
void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera) {
|
||||
#ifdef HAVE_LIBOVR
|
||||
//beginFrameTiming must be called before display
|
||||
if (!_frameTimingActive) {
|
||||
printf("WARNING: Called OculusManager::display() without calling OculusManager::beginFrameTiming() first.");
|
||||
return;
|
||||
}
|
||||
|
||||
ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay();
|
||||
|
||||
// We only need to render the overlays to a texture once, then we just render the texture as a quad
|
||||
// PrioVR will only work if renderOverlay is called, calibration is connected to Application::renderingOverlay()
|
||||
applicationOverlay.renderOverlay(true);
|
||||
const bool displayOverlays = Menu::getInstance()->isOptionChecked(MenuOption::DisplayOculusOverlays);
|
||||
|
||||
Application::getInstance()->getGlowEffect()->prepare();
|
||||
|
||||
// render the left eye view to the left side of the screen
|
||||
const StereoEyeParams& leftEyeParams = _stereoConfig.GetEyeRenderParams(StereoEye_Left);
|
||||
//Bind our framebuffer object. If we are rendering the glow effect, we let the glow effect shader take care of it
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
|
||||
Application::getInstance()->getGlowEffect()->prepare();
|
||||
} else {
|
||||
Application::getInstance()->getTextureCache()->getPrimaryFramebufferObject()->bind();
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
}
|
||||
|
||||
ovrPosef eyeRenderPose[ovrEye_Count];
|
||||
|
||||
_camera->setTightness(0.0f); // In first person, camera follows (untweaked) head exactly without delay
|
||||
_camera->setDistance(0.0f);
|
||||
_camera->setUpShift(0.0f);
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glPushMatrix();
|
||||
glLoadIdentity();
|
||||
glTranslatef(_stereoConfig.GetProjectionCenterOffset(), 0, 0);
|
||||
gluPerspective(whichCamera.getFieldOfView(), whichCamera.getAspectRatio(),
|
||||
whichCamera.getNearClip(), whichCamera.getFarClip());
|
||||
|
||||
glViewport(leftEyeParams.VP.x, leftEyeParams.VP.y, leftEyeParams.VP.w, leftEyeParams.VP.h);
|
||||
|
||||
glMatrixMode(GL_MODELVIEW);
|
||||
glPushMatrix();
|
||||
glLoadIdentity();
|
||||
glTranslatef(_stereoConfig.GetIPD() * 0.5f, 0, 0);
|
||||
|
||||
Application::getInstance()->displaySide(whichCamera);
|
||||
|
||||
glm::quat orientation;
|
||||
|
||||
//Render each eye into an fbo
|
||||
for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) {
|
||||
|
||||
if (displayOverlays) {
|
||||
applicationOverlay.displayOverlayTextureOculus(whichCamera);
|
||||
}
|
||||
|
||||
// and the right eye to the right side
|
||||
const StereoEyeParams& rightEyeParams = _stereoConfig.GetEyeRenderParams(StereoEye_Right);
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glLoadIdentity();
|
||||
glTranslatef(-_stereoConfig.GetProjectionCenterOffset(), 0, 0);
|
||||
gluPerspective(whichCamera.getFieldOfView(), whichCamera.getAspectRatio(),
|
||||
whichCamera.getNearClip(), whichCamera.getFarClip());
|
||||
|
||||
glViewport(rightEyeParams.VP.x, rightEyeParams.VP.y, rightEyeParams.VP.w, rightEyeParams.VP.h);
|
||||
glMatrixMode(GL_MODELVIEW);
|
||||
glLoadIdentity();
|
||||
glTranslatef(_stereoConfig.GetIPD() * -0.5f, 0, 0);
|
||||
|
||||
Application::getInstance()->displaySide(whichCamera);
|
||||
ovrEyeType eye = _ovrHmdDesc.EyeRenderOrder[eyeIndex];
|
||||
|
||||
if (displayOverlays) {
|
||||
applicationOverlay.displayOverlayTextureOculus(whichCamera);
|
||||
//Set the camera rotation for this eye
|
||||
eyeRenderPose[eye] = ovrHmd_GetEyePose(_ovrHmd, eye);
|
||||
orientation.x = eyeRenderPose[eye].Orientation.x;
|
||||
orientation.y = eyeRenderPose[eye].Orientation.y;
|
||||
orientation.z = eyeRenderPose[eye].Orientation.z;
|
||||
orientation.w = eyeRenderPose[eye].Orientation.w;
|
||||
|
||||
_camera->setTargetRotation(bodyOrientation * orientation);
|
||||
_camera->setTargetPosition(position);
|
||||
_camera->update(1.0f / Application::getInstance()->getFps());
|
||||
|
||||
Matrix4f proj = ovrMatrix4f_Projection(_eyeRenderDesc[eye].Fov, whichCamera.getNearClip(), whichCamera.getFarClip(), true);
|
||||
proj.Transpose();
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glLoadIdentity();
|
||||
glLoadMatrixf((GLfloat *)proj.M);
|
||||
|
||||
glViewport(_eyeRenderViewport[eye].Pos.x, _eyeRenderViewport[eye].Pos.y,
|
||||
_eyeRenderViewport[eye].Size.w, _eyeRenderViewport[eye].Size.h);
|
||||
|
||||
glMatrixMode(GL_MODELVIEW);
|
||||
glLoadIdentity();
|
||||
glTranslatef(_eyeRenderDesc[eye].ViewAdjust.x, _eyeRenderDesc[eye].ViewAdjust.y, _eyeRenderDesc[eye].ViewAdjust.z);
|
||||
|
||||
Application::getInstance()->displaySide(*_camera);
|
||||
|
||||
if (displayOverlays) {
|
||||
applicationOverlay.displayOverlayTextureOculus(*_camera);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//Wait till time-warp to reduce latency
|
||||
ovr_WaitTillTime(_hmdFrameTiming.TimewarpPointSeconds);
|
||||
|
||||
glPopMatrix();
|
||||
|
||||
// restore our normal viewport
|
||||
const Viewport& fullViewport = _stereoConfig.GetFullViewport();
|
||||
glViewport(fullViewport.x, fullViewport.y, fullViewport.w, fullViewport.h);
|
||||
|
||||
QOpenGLFramebufferObject* fbo = Application::getInstance()->getGlowEffect()->render(true);
|
||||
glBindTexture(GL_TEXTURE_2D, fbo->texture());
|
||||
//Full texture viewport for glow effect
|
||||
glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h);
|
||||
|
||||
//Bind the output texture from the glow shader. If glow effect is disabled, we just grab the texture
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
|
||||
QOpenGLFramebufferObject* fbo = Application::getInstance()->getGlowEffect()->render(true);
|
||||
glBindTexture(GL_TEXTURE_2D, fbo->texture());
|
||||
} else {
|
||||
Application::getInstance()->getTextureCache()->getPrimaryFramebufferObject()->release();
|
||||
glBindTexture(GL_TEXTURE_2D, Application::getInstance()->getTextureCache()->getPrimaryFramebufferObject()->texture());
|
||||
}
|
||||
|
||||
// restore our normal viewport
|
||||
glViewport(0, 0, Application::getInstance()->getGLWidget()->width(), Application::getInstance()->getGLWidget()->height());
|
||||
|
||||
glMatrixMode(GL_PROJECTION);
|
||||
glPopMatrix();
|
||||
|
||||
//Renders the distorted mesh onto the screen
|
||||
renderDistortionMesh(eyeRenderPose);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
|
||||
|
||||
glLoadIdentity();
|
||||
gluOrtho2D(fullViewport.x, fullViewport.x + fullViewport.w, fullViewport.y, fullViewport.y + fullViewport.h);
|
||||
gluOrtho2D(0, Application::getInstance()->getGLWidget()->width(), 0, Application::getInstance()->getGLWidget()->height());
|
||||
|
||||
glDisable(GL_DEPTH_TEST);
|
||||
|
||||
// for reference on setting these values, see SDK file Samples/OculusRoomTiny/RenderTiny_Device.cpp
|
||||
|
||||
float scaleFactor = 1.0 / _stereoConfig.GetDistortionScale();
|
||||
float aspectRatio = _stereoConfig.GetAspect();
|
||||
|
||||
|
||||
glDisable(GL_BLEND);
|
||||
_program.bind();
|
||||
_program.setUniformValue(_textureLocation, 0);
|
||||
const DistortionConfig& distortionConfig = _stereoConfig.GetDistortionConfig();
|
||||
_program.setUniformValue(_lensCenterLocation, (0.5 + distortionConfig.XCenterOffset * 0.5) * 0.5, 0.5);
|
||||
_program.setUniformValue(_screenCenterLocation, 0.25, 0.5);
|
||||
_program.setUniformValue(_scaleLocation, 0.25 * scaleFactor, 0.5 * scaleFactor * aspectRatio);
|
||||
_program.setUniformValue(_scaleInLocation, 4, 2 / aspectRatio);
|
||||
_program.setUniformValue(_hmdWarpParamLocation, distortionConfig.K[0], distortionConfig.K[1],
|
||||
distortionConfig.K[2], distortionConfig.K[3]);
|
||||
|
||||
glColor3f(1, 0, 1);
|
||||
glBegin(GL_QUADS);
|
||||
glTexCoord2f(0, 0);
|
||||
glVertex2f(0, 0);
|
||||
glTexCoord2f(0.5, 0);
|
||||
glVertex2f(leftEyeParams.VP.w, 0);
|
||||
glTexCoord2f(0.5, 1);
|
||||
glVertex2f(leftEyeParams.VP.w, leftEyeParams.VP.h);
|
||||
glTexCoord2f(0, 1);
|
||||
glVertex2f(0, leftEyeParams.VP.h);
|
||||
glEnd();
|
||||
|
||||
_program.setUniformValue(_lensCenterLocation, 0.5 + (0.5 - distortionConfig.XCenterOffset * 0.5) * 0.5, 0.5);
|
||||
_program.setUniformValue(_screenCenterLocation, 0.75, 0.5);
|
||||
|
||||
glBegin(GL_QUADS);
|
||||
glTexCoord2f(0.5, 0);
|
||||
glVertex2f(leftEyeParams.VP.w, 0);
|
||||
glTexCoord2f(1, 0);
|
||||
glVertex2f(fullViewport.w, 0);
|
||||
glTexCoord2f(1, 1);
|
||||
glVertex2f(fullViewport.w, leftEyeParams.VP.h);
|
||||
glTexCoord2f(0.5, 1);
|
||||
glVertex2f(leftEyeParams.VP.w, leftEyeParams.VP.h);
|
||||
glEnd();
|
||||
|
||||
glEnable(GL_BLEND);
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
_program.enableAttributeArray(_positionAttributeLocation);
|
||||
_program.enableAttributeArray(_colorAttributeLocation);
|
||||
_program.enableAttributeArray(_texCoord0AttributeLocation);
|
||||
_program.enableAttributeArray(_texCoord1AttributeLocation);
|
||||
_program.enableAttributeArray(_texCoord2AttributeLocation);
|
||||
|
||||
//Render the distortion meshes for each eye
|
||||
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
|
||||
GLfloat uvScale[2] = { _UVScaleOffset[eyeNum][0].x, _UVScaleOffset[eyeNum][0].y };
|
||||
_program.setUniformValueArray(_eyeToSourceUVScaleLocation, uvScale, 1, 2);
|
||||
GLfloat uvOffset[2] = { _UVScaleOffset[eyeNum][1].x, _UVScaleOffset[eyeNum][1].y };
|
||||
_program.setUniformValueArray(_eyeToSourceUVOffsetLocation, uvOffset, 1, 2);
|
||||
|
||||
ovrMatrix4f timeWarpMatrices[2];
|
||||
Matrix4f transposeMatrices[2];
|
||||
//Grabs the timewarp matrices to be used in the shader
|
||||
ovrHmd_GetEyeTimewarpMatrices(_ovrHmd, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
|
||||
transposeMatrices[0] = Matrix4f(timeWarpMatrices[0]);
|
||||
transposeMatrices[1] = Matrix4f(timeWarpMatrices[1]);
|
||||
|
||||
//Have to transpose the matrices before using them
|
||||
transposeMatrices[0].Transpose();
|
||||
transposeMatrices[1].Transpose();
|
||||
|
||||
glUniformMatrix4fv(_eyeRotationStartLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[0].M);
|
||||
glUniformMatrix4fv(_eyeRotationEndLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[1].M);
|
||||
|
||||
glBindBuffer(GL_ARRAY_BUFFER, _vertices[eyeNum]);
|
||||
|
||||
//Set vertex attribute pointers
|
||||
glVertexAttribPointer(_positionAttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)0);
|
||||
glVertexAttribPointer(_texCoord0AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)8);
|
||||
glVertexAttribPointer(_texCoord1AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)16);
|
||||
glVertexAttribPointer(_texCoord2AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)24);
|
||||
glVertexAttribPointer(_colorAttributeLocation, 4, GL_UNSIGNED_BYTE, GL_TRUE, sizeof(DistortionVertex), (void *)32);
|
||||
|
||||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indices[eyeNum]);
|
||||
glDrawElements(GL_TRIANGLES, _meshSize[eyeNum], GL_UNSIGNED_SHORT, 0);
|
||||
}
|
||||
|
||||
_program.disableAttributeArray(_positionAttributeLocation);
|
||||
_program.disableAttributeArray(_colorAttributeLocation);
|
||||
_program.disableAttributeArray(_texCoord0AttributeLocation);
|
||||
_program.disableAttributeArray(_texCoord1AttributeLocation);
|
||||
_program.disableAttributeArray(_texCoord2AttributeLocation);
|
||||
|
||||
glEnable(GL_BLEND);
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
_program.release();
|
||||
|
||||
glPopMatrix();
|
||||
#endif
|
||||
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
//Tries to reconnect to the sensors
|
||||
void OculusManager::reset() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
_sensorFusion->Reset();
|
||||
disconnect();
|
||||
connect();
|
||||
#endif
|
||||
}
|
||||
|
||||
//Gets the current predicted angles from the oculus sensors
|
||||
void OculusManager::getEulerAngles(float& yaw, float& pitch, float& roll) {
|
||||
#ifdef HAVE_LIBOVR
|
||||
_sensorFusion->GetPredictedOrientation().GetEulerAngles<Axis_Y, Axis_X, Axis_Z, Rotate_CCW, Handed_R>(&yaw, &pitch, &roll);
|
||||
ovrSensorState ss = ovrHmd_GetSensorState(_ovrHmd, _hmdFrameTiming.ScanoutMidpointSeconds);
|
||||
|
||||
if (ss.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) {
|
||||
ovrPosef pose = ss.Predicted.Pose;
|
||||
Quatf orientation = Quatf(pose.Orientation);
|
||||
orientation.GetEulerAngles<Axis_Y, Axis_X, Axis_Z, Rotate_CCW, Handed_R>(&yaw, &pitch, &roll);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
//Used to set the size of the glow framebuffers
|
||||
QSize OculusManager::getRenderTargetSize() {
|
||||
#ifdef HAVE_LIBOVR
|
||||
QSize rv;
|
||||
rv.setWidth(_renderTargetSize.w);
|
||||
rv.setHeight(_renderTargetSize.h);
|
||||
return rv;
|
||||
#else
|
||||
return QSize(100, 100);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -3,6 +3,7 @@
|
|||
// interface/src/devices
|
||||
//
|
||||
// Created by Stephen Birarda on 5/9/13.
|
||||
// Refactored by Ben Arnold on 6/30/2014
|
||||
// Copyright 2012 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
|
@ -12,10 +13,9 @@
|
|||
#ifndef hifi_OculusManager_h
|
||||
#define hifi_OculusManager_h
|
||||
|
||||
#include <iostream>
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
#include <OVR.h>
|
||||
#include "../src/Util/Util_Render_Stereo.h"
|
||||
#endif
|
||||
|
||||
#include "renderer/ProgramObject.h"
|
||||
|
@ -28,38 +28,69 @@ class Camera;
|
|||
class OculusManager {
|
||||
public:
|
||||
static void connect();
|
||||
|
||||
static void disconnect();
|
||||
static bool isConnected();
|
||||
|
||||
static void beginFrameTiming();
|
||||
static void endFrameTiming();
|
||||
static void configureCamera(Camera& camera, int screenWidth, int screenHeight);
|
||||
|
||||
static void display(Camera& whichCamera);
|
||||
|
||||
static void display(const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera);
|
||||
static void reset();
|
||||
|
||||
/// param \yaw[out] yaw in radians
|
||||
/// param \pitch[out] pitch in radians
|
||||
/// param \roll[out] roll in radians
|
||||
static void getEulerAngles(float& yaw, float& pitch, float& roll);
|
||||
|
||||
static void updateYawOffset();
|
||||
static QSize getRenderTargetSize();
|
||||
|
||||
private:
|
||||
#ifdef HAVE_LIBOVR
|
||||
static void generateDistortionMesh();
|
||||
static void renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]);
|
||||
|
||||
struct DistortionVertex {
|
||||
glm::vec2 pos;
|
||||
glm::vec2 texR;
|
||||
glm::vec2 texG;
|
||||
glm::vec2 texB;
|
||||
struct {
|
||||
GLubyte r;
|
||||
GLubyte g;
|
||||
GLubyte b;
|
||||
GLubyte a;
|
||||
} color;
|
||||
};
|
||||
|
||||
static ProgramObject _program;
|
||||
//Uniforms
|
||||
static int _textureLocation;
|
||||
static int _lensCenterLocation;
|
||||
static int _screenCenterLocation;
|
||||
static int _scaleLocation;
|
||||
static int _scaleInLocation;
|
||||
static int _hmdWarpParamLocation;
|
||||
static int _eyeToSourceUVScaleLocation;
|
||||
static int _eyeToSourceUVOffsetLocation;
|
||||
static int _eyeRotationStartLocation;
|
||||
static int _eyeRotationEndLocation;
|
||||
//Attributes
|
||||
static int _positionAttributeLocation;
|
||||
static int _colorAttributeLocation;
|
||||
static int _texCoord0AttributeLocation;
|
||||
static int _texCoord1AttributeLocation;
|
||||
static int _texCoord2AttributeLocation;
|
||||
|
||||
static bool _isConnected;
|
||||
|
||||
#ifdef HAVE_LIBOVR
|
||||
static OVR::Ptr<OVR::DeviceManager> _deviceManager;
|
||||
static OVR::Ptr<OVR::HMDDevice> _hmdDevice;
|
||||
static OVR::Ptr<OVR::SensorDevice> _sensorDevice;
|
||||
static OVR::SensorFusion* _sensorFusion;
|
||||
static OVR::Util::Render::StereoConfig _stereoConfig;
|
||||
static ovrHmd _ovrHmd;
|
||||
static ovrHmdDesc _ovrHmdDesc;
|
||||
static ovrFovPort _eyeFov[ovrEye_Count];
|
||||
static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count];
|
||||
static ovrSizei _renderTargetSize;
|
||||
static ovrVector2f _UVScaleOffset[ovrEye_Count][2];
|
||||
static GLuint _vertices[ovrEye_Count];
|
||||
static GLuint _indices[ovrEye_Count];
|
||||
static GLsizei _meshSize[ovrEye_Count];
|
||||
static ovrFrameTiming _hmdFrameTiming;
|
||||
static ovrRecti _eyeRenderViewport[ovrEye_Count];
|
||||
static unsigned int _frameIndex;
|
||||
static bool _frameTimingActive;
|
||||
static bool _programInitialized;
|
||||
static Camera* _camera;
|
||||
#endif
|
||||
};
|
||||
|
||||
|
|
|
@ -180,7 +180,7 @@ QOpenGLFramebufferObject* GlowEffect::render(bool toTexture) {
|
|||
glBindTexture(GL_TEXTURE_2D, oldDiffusedFBO->texture());
|
||||
|
||||
_diffuseProgram->bind();
|
||||
QSize size = Application::getInstance()->getGLWidget()->size();
|
||||
QSize size = primaryFBO->size();
|
||||
_diffuseProgram->setUniformValue(_diffusionScaleLocation, 1.0f / size.width(), 1.0f / size.height());
|
||||
|
||||
renderFullscreenQuad();
|
||||
|
|
|
@ -28,10 +28,12 @@ TextureCache::TextureCache() :
|
|||
_permutationNormalTextureID(0),
|
||||
_whiteTextureID(0),
|
||||
_blueTextureID(0),
|
||||
_primaryDepthTextureID(0),
|
||||
_primaryFramebufferObject(NULL),
|
||||
_secondaryFramebufferObject(NULL),
|
||||
_tertiaryFramebufferObject(NULL),
|
||||
_shadowFramebufferObject(NULL)
|
||||
_shadowFramebufferObject(NULL),
|
||||
_frameBufferSize(100, 100)
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -46,9 +48,41 @@ TextureCache::~TextureCache() {
|
|||
glDeleteTextures(1, &_primaryDepthTextureID);
|
||||
}
|
||||
|
||||
delete _primaryFramebufferObject;
|
||||
delete _secondaryFramebufferObject;
|
||||
delete _tertiaryFramebufferObject;
|
||||
if (_primaryFramebufferObject) {
|
||||
delete _primaryFramebufferObject;
|
||||
}
|
||||
|
||||
if (_secondaryFramebufferObject) {
|
||||
delete _secondaryFramebufferObject;
|
||||
}
|
||||
|
||||
if (_tertiaryFramebufferObject) {
|
||||
delete _tertiaryFramebufferObject;
|
||||
}
|
||||
}
|
||||
|
||||
void TextureCache::setFrameBufferSize(QSize frameBufferSize) {
|
||||
//If the size changed, we need to delete our FBOs
|
||||
if (_frameBufferSize != frameBufferSize) {
|
||||
_frameBufferSize = frameBufferSize;
|
||||
|
||||
if (_primaryFramebufferObject) {
|
||||
delete _primaryFramebufferObject;
|
||||
_primaryFramebufferObject = NULL;
|
||||
glDeleteTextures(1, &_primaryDepthTextureID);
|
||||
_primaryDepthTextureID = 0;
|
||||
}
|
||||
|
||||
if (_secondaryFramebufferObject) {
|
||||
delete _secondaryFramebufferObject;
|
||||
_secondaryFramebufferObject = NULL;
|
||||
}
|
||||
|
||||
if (_tertiaryFramebufferObject) {
|
||||
delete _tertiaryFramebufferObject;
|
||||
_tertiaryFramebufferObject = NULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
GLuint TextureCache::getPermutationNormalTextureID() {
|
||||
|
@ -131,13 +165,14 @@ QSharedPointer<NetworkTexture> TextureCache::getTexture(const QUrl& url, bool no
|
|||
}
|
||||
|
||||
QOpenGLFramebufferObject* TextureCache::getPrimaryFramebufferObject() {
|
||||
|
||||
if (!_primaryFramebufferObject) {
|
||||
_primaryFramebufferObject = createFramebufferObject();
|
||||
|
||||
|
||||
glGenTextures(1, &_primaryDepthTextureID);
|
||||
glBindTexture(GL_TEXTURE_2D, _primaryDepthTextureID);
|
||||
QSize size = Application::getInstance()->getGLWidget()->size();
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, size.width(), size.height(),
|
||||
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_DEPTH_COMPONENT, _frameBufferSize.width(), _frameBufferSize.height(),
|
||||
0, GL_DEPTH_COMPONENT, GL_UNSIGNED_BYTE, 0);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
|
||||
|
@ -230,7 +265,7 @@ QSharedPointer<Resource> TextureCache::createResource(const QUrl& url,
|
|||
}
|
||||
|
||||
QOpenGLFramebufferObject* TextureCache::createFramebufferObject() {
|
||||
QOpenGLFramebufferObject* fbo = new QOpenGLFramebufferObject(Application::getInstance()->getGLWidget()->size());
|
||||
QOpenGLFramebufferObject* fbo = new QOpenGLFramebufferObject(_frameBufferSize);
|
||||
Application::getInstance()->getGLWidget()->installEventFilter(this);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, fbo->texture());
|
||||
|
|
|
@ -32,6 +32,9 @@ public:
|
|||
TextureCache();
|
||||
virtual ~TextureCache();
|
||||
|
||||
/// Sets the desired texture resolution for the framebuffer objects.
|
||||
void setFrameBufferSize(QSize frameBufferSize);
|
||||
|
||||
/// Returns the ID of the permutation/normal texture used for Perlin noise shader programs. This texture
|
||||
/// has two lines: the first, a set of random numbers in [0, 255] to be used as permutation offsets, and
|
||||
/// the second, a set of random unit vectors to be used as noise gradients.
|
||||
|
@ -94,6 +97,8 @@ private:
|
|||
|
||||
QOpenGLFramebufferObject* _shadowFramebufferObject;
|
||||
GLuint _shadowDepthTextureID;
|
||||
|
||||
QSize _frameBufferSize;
|
||||
};
|
||||
|
||||
/// A simple object wrapper for an OpenGL texture.
|
||||
|
|
Loading…
Reference in a new issue