Porting oculus rendering to 0.4.4 SDK

This commit is contained in:
Brad Davis 2015-02-23 22:15:59 -08:00
parent 5ec0ad14c1
commit 670a566763
4 changed files with 488 additions and 220 deletions

View file

@ -57,13 +57,15 @@ void GLCanvas::initializeGL() {
void GLCanvas::paintGL() {
if (!_throttleRendering && !Application::getInstance()->getWindow()->isMinimized()) {
//Need accurate frame timing for the oculus rift
Application::getInstance()->paintGL();
if (OculusManager::isConnected()) {
OculusManager::beginFrameTiming();
}
// FIXME abstract the functionality of various display modes out
// to enable supporting various HMDs as well as 2D screens through
// a single interface
if (!OculusManager::isConnected()) {
swapBuffers();
Application::getInstance()->paintGL();
swapBuffers();
if (OculusManager::isConnected()) {
OculusManager::endFrameTiming();
}
}
}
@ -124,9 +126,16 @@ void GLCanvas::activeChanged(Qt::ApplicationState state) {
void GLCanvas::throttleRender() {
_frameTimer.start(_idleRenderInterval);
if (!Application::getInstance()->getWindow()->isMinimized()) {
//Need accurate frame timing for the oculus rift
if (OculusManager::isConnected()) {
OculusManager::beginFrameTiming();
}
Application::getInstance()->paintGL();
if (!OculusManager::isConnected()) {
swapBuffers();
swapBuffers();
if (OculusManager::isConnected()) {
OculusManager::endFrameTiming();
}
}
}

View file

@ -28,24 +28,38 @@
#include <PathUtils.h>
#include <SharedUtil.h>
#include <UserActivityLogger.h>
#include "Application.h"
#ifdef HAVE_LIBOVR
#include "OVR_Math.h"
#include "OVR_Version.h"
using namespace OVR;
template <typename Function>
void for_each_eye(Function function) {
for (ovrEyeType eye = ovrEyeType::ovrEye_Left;
eye < ovrEyeType::ovrEye_Count;
eye = static_cast<ovrEyeType>(eye + 1)) {
function(eye);
}
}
ProgramObject OculusManager::_program;
int OculusManager::_textureLocation;
int OculusManager::_eyeToSourceUVScaleLocation;
int OculusManager::_eyeToSourceUVOffsetLocation;
int OculusManager::_eyeRotationStartLocation;
int OculusManager::_eyeRotationEndLocation;
int OculusManager::_positionAttributeLocation;
int OculusManager::_colorAttributeLocation;
int OculusManager::_texCoord0AttributeLocation;
int OculusManager::_texCoord1AttributeLocation;
int OculusManager::_texCoord2AttributeLocation;
bool OculusManager::_isConnected = false;
ovrHmd OculusManager::_ovrHmd;
ovrHmdDesc OculusManager::_ovrHmdDesc;
ovrFovPort OculusManager::_eyeFov[ovrEye_Count];
ovrVector3f OculusManager::_eyeOffset[ovrEye_Count];
ovrEyeRenderDesc OculusManager::_eyeRenderDesc[ovrEye_Count];
ovrSizei OculusManager::_renderTargetSize;
glm::mat4 OculusManager::_eyeProjection[ovrEye_Count];
ovrVector2f OculusManager::_UVScaleOffset[ovrEye_Count][2];
GLuint OculusManager::_vertices[ovrEye_Count] = { 0, 0 };
GLuint OculusManager::_indices[ovrEye_Count] = { 0, 0 };
GLsizei OculusManager::_meshSize[ovrEye_Count] = { 0, 0 };
ovrFrameTiming OculusManager::_hmdFrameTiming;
ovrRecti OculusManager::_eyeRenderViewport[ovrEye_Count];
unsigned int OculusManager::_frameIndex = 0;
bool OculusManager::_frameTimingActive = false;
bool OculusManager::_programInitialized = false;
@ -63,97 +77,129 @@ glm::vec3 OculusManager::_calibrationPosition;
glm::quat OculusManager::_calibrationOrientation;
quint64 OculusManager::_calibrationStartTime;
int OculusManager::_calibrationMessage = NULL;
glm::vec3 OculusManager::_eyePositions[ovrEye_Count];
void OculusManager::init() {
#endif
glm::vec3 OculusManager::_leftEyePosition = glm::vec3();
glm::vec3 OculusManager::_rightEyePosition = glm::vec3();
void OculusManager::connect() {
#ifdef HAVE_LIBOVR
_calibrationState = UNCALIBRATED;
qDebug() << "Oculus SDK" << OVR_VERSION_STRING;
ovr_Initialize();
_ovrHmd = ovrHmd_Create(0);
if (_ovrHmd) {
// FIXME Not sure this is effective prior to starting rendering
ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence);
ovrHmd_ConfigureTracking(_ovrHmd, ovrTrackingCap_Orientation | ovrTrackingCap_Position |
ovrTrackingCap_MagYawCorrection,
ovrTrackingCap_Orientation);
if (!_isConnected) {
UserActivityLogger::getInstance().connectedDevice("hmd", "oculus");
}
_isConnected = true;
#if defined(__APPLE__) || defined(_WIN32)
_eyeFov[0] = _ovrHmd->DefaultEyeFov[0];
_eyeFov[1] = _ovrHmd->DefaultEyeFov[1];
#else
ovrHmd_GetDesc(_ovrHmd, &_ovrHmdDesc);
_eyeFov[0] = _ovrHmdDesc.DefaultEyeFov[0];
_eyeFov[1] = _ovrHmdDesc.DefaultEyeFov[1];
#endif
//Get texture size
ovrSizei recommendedTex0Size = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left,
_eyeFov[0], 1.0f);
ovrSizei recommendedTex1Size = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Right,
_eyeFov[1], 1.0f);
_renderTargetSize.w = recommendedTex0Size.w + recommendedTex1Size.w;
_renderTargetSize.h = recommendedTex0Size.h;
if (_renderTargetSize.h < recommendedTex1Size.h) {
_renderTargetSize.h = recommendedTex1Size.h;
}
for_each_eye([&](ovrEyeType eye) {
_eyeFov[eye] = _ovrHmd->DefaultEyeFov[eye];
});
_eyeRenderDesc[0] = ovrHmd_GetRenderDesc(_ovrHmd, ovrEye_Left, _eyeFov[0]);
_eyeRenderDesc[1] = ovrHmd_GetRenderDesc(_ovrHmd, ovrEye_Right, _eyeFov[1]);
#if defined(__APPLE__) || defined(_WIN32)
ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence);
#else
ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence | ovrHmdCap_LatencyTest);
#endif
#if defined(__APPLE__) || defined(_WIN32)
ovrHmd_ConfigureTracking(_ovrHmd, ovrTrackingCap_Orientation | ovrTrackingCap_Position |
ovrTrackingCap_MagYawCorrection,
ovrTrackingCap_Orientation);
#else
ovrHmd_StartSensor(_ovrHmd, ovrSensorCap_Orientation | ovrSensorCap_YawCorrection |
ovrSensorCap_Position,
ovrSensorCap_Orientation);
#endif
if (!_camera) {
_camera = new Camera;
configureCamera(*_camera, 0, 0); // no need to use screen dimensions; they're ignored
}
if (!_programInitialized) {
// Shader program
_programInitialized = true;
_program.addShaderFromSourceFile(QGLShader::Vertex, PathUtils::resourcesPath() + "shaders/oculus.vert");
_program.addShaderFromSourceFile(QGLShader::Fragment, PathUtils::resourcesPath() + "shaders/oculus.frag");
_program.link();
// Uniforms
_textureLocation = _program.uniformLocation("texture");
_eyeToSourceUVScaleLocation = _program.uniformLocation("EyeToSourceUVScale");
_eyeToSourceUVOffsetLocation = _program.uniformLocation("EyeToSourceUVOffset");
_eyeRotationStartLocation = _program.uniformLocation("EyeRotationStart");
_eyeRotationEndLocation = _program.uniformLocation("EyeRotationEnd");
// Attributes
_positionAttributeLocation = _program.attributeLocation("position");
_colorAttributeLocation = _program.attributeLocation("color");
_texCoord0AttributeLocation = _program.attributeLocation("texCoord0");
_texCoord1AttributeLocation = _program.attributeLocation("texCoord1");
_texCoord2AttributeLocation = _program.attributeLocation("texCoord2");
}
//Generate the distortion VBOs
generateDistortionMesh();
} else {
_isConnected = false;
// we're definitely not in "VR mode" so tell the menu that
Menu::getInstance()->getActionForOption(MenuOption::EnableVRMode)->setChecked(false);
ovrHmd_Destroy(_ovrHmd);
ovr_Shutdown();
}
}
// FIXME store like the others
ovrGLTexture _eyeTextures[ovrEye_Count];
void OculusManager::connect() {
_calibrationState = UNCALIBRATED;
qDebug() << "Oculus SDK" << OVR_VERSION_STRING;
if (_ovrHmd) {
UserActivityLogger::getInstance().connectedDevice("hmd", "oculus");
MainWindow* applicationWindow = Application::getInstance()->getWindow();
ovrGLConfig cfg;
memset(&cfg, 0, sizeof(cfg));
cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
cfg.OGL.Header.BackBufferSize = _ovrHmd->Resolution;
cfg.OGL.Header.Multisample = 1;
int distortionCaps = 0
| ovrDistortionCap_Chromatic
| ovrDistortionCap_Vignette
| ovrDistortionCap_Overdrive
| ovrDistortionCap_TimeWarp;
ovrEyeRenderDesc eyeRenderDescs[2];
int configResult = ovrHmd_ConfigureRendering(_ovrHmd, &cfg.Config,
distortionCaps, _eyeFov, eyeRenderDescs);
assert(configResult);
for_each_eye([&](ovrEyeType eye) {
_eyeFov[eye] = _ovrHmd->DefaultEyeFov[eye];
});
_renderTargetSize = { 0, 0 };
memset(_eyeTextures, 0, sizeof(_eyeTextures));
for_each_eye([&](ovrEyeType eye) {
//Get texture size
ovrSizei recommendedTexSize = ovrHmd_GetFovTextureSize(_ovrHmd, eye, _eyeFov[eye], 1.0f);
auto & eyeTexture = _eyeTextures[eye].Texture.Header;
eyeTexture.API = ovrRenderAPI_OpenGL;
eyeTexture.RenderViewport.Size = recommendedTexSize;
eyeTexture.RenderViewport.Pos = { _renderTargetSize.w, 0 };
_renderTargetSize.h = std::max(_renderTargetSize.h, recommendedTexSize.h);
_renderTargetSize.w += recommendedTexSize.w;
const ovrEyeRenderDesc & erd = eyeRenderDescs[eye];
ovrMatrix4f ovrPerspectiveProjection = ovrMatrix4f_Projection(erd.Fov, 0.01f, 100000.0f, true);
_eyeProjection[eye] = toGlm(ovrPerspectiveProjection);
_eyeOffset[eye] = erd.HmdToEyeViewOffset;
});
for_each_eye([&](ovrEyeType eye) {
ovrGLTexture & eyeTexture = _eyeTextures[eye];
eyeTexture.Texture.Header.TextureSize = _renderTargetSize;
});
}
#endif
}
//Disconnects and deallocates the OR
void OculusManager::disconnect() {
if (_ovrHmd) {
#ifdef HAVE_LIBOVR
if (_isConnected) {
_isConnected = false;
ovrHmd_Destroy(_ovrHmd);
_ovrHmd = nullptr;
ovr_Shutdown();
//Free the distortion mesh data
for (int i = 0; i < ovrEye_Count; i++) {
if (_vertices[i] != 0) {
glDeleteBuffers(1, &(_vertices[i]));
_vertices[i] = 0;
}
if (_indices[i] != 0) {
glDeleteBuffers(1, &(_indices[i]));
_indices[i] = 0;
}
}
}
#endif
}
#ifdef HAVE_LIBOVR
void OculusManager::positionCalibrationBillboard(Text3DOverlay* billboard) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
glm::quat headOrientation = myAvatar->getHeadOrientation();
@ -164,7 +210,9 @@ void OculusManager::positionCalibrationBillboard(Text3DOverlay* billboard) {
+ headOrientation * glm::vec3(0.0f, 0.0f, -CALIBRATION_MESSAGE_DISTANCE));
billboard->setRotation(headOrientation);
}
#endif
#ifdef HAVE_LIBOVR
void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
static QString instructionMessage = "Hold still to calibrate";
static QString progressMessage;
@ -256,54 +304,148 @@ void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) {
}
}
#endif
void OculusManager::recalibrate() {
#ifdef HAVE_LIBOVR
_calibrationState = UNCALIBRATED;
#endif
}
void OculusManager::abandonCalibration() {
#ifdef HAVE_LIBOVR
_calibrationState = CALIBRATED;
if (_calibrationMessage) {
qDebug() << "Abandoned HMD calibration";
Application::getInstance()->getOverlays().deleteOverlay(_calibrationMessage);
_calibrationMessage = NULL;
}
#endif
}
#ifdef HAVE_LIBOVR
void OculusManager::generateDistortionMesh() {
//Check if we already have the distortion mesh
if (_vertices[0] != 0) {
printf("WARNING: Tried to generate Oculus distortion mesh twice without freeing the VBOs.");
return;
}
//Viewport for the render target for each eye
_eyeRenderViewport[0].Pos = Vector2i(0, 0);
_eyeRenderViewport[0].Size = Sizei(_renderTargetSize.w / 2, _renderTargetSize.h);
_eyeRenderViewport[1].Pos = Vector2i((_renderTargetSize.w + 1) / 2, 0);
_eyeRenderViewport[1].Size = _eyeRenderViewport[0].Size;
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
// Allocate and generate distortion mesh vertices
ovrDistortionMesh meshData;
ovrHmd_CreateDistortionMesh(_ovrHmd, _eyeRenderDesc[eyeNum].Eye, _eyeRenderDesc[eyeNum].Fov, _ovrHmdDesc.DistortionCaps, &meshData);
ovrHmd_GetRenderScaleAndOffset(_eyeRenderDesc[eyeNum].Fov, _renderTargetSize, _eyeRenderViewport[eyeNum],
_UVScaleOffset[eyeNum]);
// Parse the vertex data and create a render ready vertex buffer
DistortionVertex* pVBVerts = (DistortionVertex*)OVR_ALLOC(sizeof(DistortionVertex) * meshData.VertexCount);
_meshSize[eyeNum] = meshData.IndexCount;
// Convert the oculus vertex data to the DistortionVertex format.
DistortionVertex* v = pVBVerts;
ovrDistortionVertex* ov = meshData.pVertexData;
for (unsigned int vertNum = 0; vertNum < meshData.VertexCount; vertNum++) {
#if defined(__APPLE__) || defined(_WIN32)
v->pos.x = ov->ScreenPosNDC.x;
v->pos.y = ov->ScreenPosNDC.y;
v->texR.x = ov->TanEyeAnglesR.x;
v->texR.y = ov->TanEyeAnglesR.y;
v->texG.x = ov->TanEyeAnglesG.x;
v->texG.y = ov->TanEyeAnglesG.y;
v->texB.x = ov->TanEyeAnglesB.x;
v->texB.y = ov->TanEyeAnglesB.y;
#else
v->pos.x = ov->Pos.x;
v->pos.y = ov->Pos.y;
v->texR.x = ov->TexR.x;
v->texR.y = ov->TexR.y;
v->texG.x = ov->TexG.x;
v->texG.y = ov->TexG.y;
v->texB.x = ov->TexB.x;
v->texB.y = ov->TexB.y;
#endif
v->color.r = v->color.g = v->color.b = (GLubyte)(ov->VignetteFactor * 255.99f);
v->color.a = (GLubyte)(ov->TimeWarpFactor * 255.99f);
v++;
ov++;
}
//vertices
glGenBuffers(1, &(_vertices[eyeNum]));
glBindBuffer(GL_ARRAY_BUFFER, _vertices[eyeNum]);
glBufferData(GL_ARRAY_BUFFER, sizeof(DistortionVertex) * meshData.VertexCount, pVBVerts, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//indices
glGenBuffers(1, &(_indices[eyeNum]));
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indices[eyeNum]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned short) * meshData.IndexCount, meshData.pIndexData, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
//Now that we have the VBOs we can get rid of the mesh data
OVR_FREE(pVBVerts);
ovrHmd_DestroyDistortionMesh(&meshData);
}
}
#endif
bool OculusManager::isConnected() {
return _ovrHmd && Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode);
#ifdef HAVE_LIBOVR
return _isConnected && Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode);
#else
return false;
#endif
}
//Begins the frame timing for oculus prediction purposes
void OculusManager::beginFrameTiming() {
#ifdef HAVE_LIBOVR
if (_frameTimingActive) {
printf("WARNING: Called OculusManager::beginFrameTiming() twice in a row, need to call OculusManager::endFrameTiming().");
}
_hmdFrameTiming = ovrHmd_BeginFrameTiming(_ovrHmd, _frameIndex);
_frameTimingActive = true;
#endif
}
//Ends frame timing
void OculusManager::endFrameTiming() {
#ifdef HAVE_LIBOVR
ovrHmd_EndFrameTiming(_ovrHmd);
_frameIndex++;
_frameTimingActive = false;
#endif
}
//Sets the camera FoV and aspect ratio
void OculusManager::configureCamera(Camera& camera, int screenWidth, int screenHeight) {
// FIXME the oculus projection matrix is assymetrical, this will not work.
#ifdef HAVE_LIBOVR
camera.setAspectRatio(_renderTargetSize.w * 0.5f / _renderTargetSize.h);
camera.setFieldOfView(atan(_eyeFov[0].UpTan) * DEGREES_PER_RADIAN * 2.0f);
#endif
}
RenderArgs::RenderSide RENDER_SIDES[ovrEye_Count] = { RenderArgs::STEREO_LEFT, RenderArgs::STEREO_RIGHT };
//Displays everything for the oculus, frame timing must be active
void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera) {
static const glm::vec2 topLeft(-1.0f, -1.0f);
static const glm::vec2 bottomRight(1.0f, 1.0f);
static const glm::vec2 texCoordTopLeft(0.0f, 0.0f);
static const glm::vec2 texCoordBottomRight(1.0f, 1.0f);
auto glCanvas = DependencyManager::get<GLCanvas>();
static bool attached = false;
if (!attached) {
attached = true;
void * nativeWindowHandle = (void*)(size_t)glCanvas->effectiveWinId();
if (nullptr != nativeWindowHandle) {
ovrHmd_AttachToWindow(_ovrHmd, nativeWindowHandle, nullptr, nullptr);
}
#ifdef HAVE_LIBOVR
//beginFrameTiming must be called before display
if (!_frameTimingActive) {
printf("WARNING: Called OculusManager::display() without calling OculusManager::beginFrameTiming() first.");
return;
}
ovrHmd_BeginFrame(_ovrHmd, ++_frameIndex);
ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay();
// We only need to render the overlays to a texture once, then we just render the texture on the hemisphere
@ -318,124 +460,263 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}
ovrPosef eyeRenderPose[ovrEye_Count];
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glm::quat orientation;
glm::vec3 trackerPosition;
#if defined(__APPLE__) || defined(_WIN32)
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
ovrVector3f ovrHeadPosition = ts.HeadPose.ThePose.Position;
trackerPosition = glm::vec3(ovrHeadPosition.x, ovrHeadPosition.y, ovrHeadPosition.z);
if (_calibrationState != CALIBRATED) {
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
ovrVector3f ovrHeadPosition = ts.HeadPose.ThePose.Position;
glm::vec3 trackerPosition = glm::vec3(ovrHeadPosition.x, ovrHeadPosition.y, ovrHeadPosition.z);
calibrate(trackerPosition, toGlm(ts.HeadPose.ThePose.Orientation));
trackerPosition = bodyOrientation * trackerPosition;
ovrQuatf ovrHeadOrientation = ts.HeadPose.ThePose.Orientation;
orientation = glm::quat(ovrHeadOrientation.w, ovrHeadOrientation.x, ovrHeadOrientation.y, ovrHeadOrientation.z);
calibrate(trackerPosition, orientation);
}
trackerPosition = bodyOrientation * trackerPosition;
#endif
//Render each eye into an fbo
ovrPosef eyeRenderPose[ovrEye_Count];
ovrHmd_GetEyePoses(_ovrHmd, 0, _eyeOffset, eyeRenderPose, nullptr);
for_each_eye([&](int i) {
_activeEyeIndex = i;
ovrEyeType eye = _ovrHmd->EyeRenderOrder[_activeEyeIndex];
const ovrPosef & pose = eyeRenderPose[eye];
for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) {
_activeEyeIndex = eyeIndex;
#if defined(__APPLE__) || defined(_WIN32)
ovrEyeType eye = _ovrHmd->EyeRenderOrder[eyeIndex];
#else
ovrEyeType eye = _ovrHmdDesc.EyeRenderOrder[eyeIndex];
#endif
// Set the camera rotation for this eye
eyeRenderPose[eye] = ovrHmd_GetHmdPosePerEye(_ovrHmd, eye);
orientation.x = eyeRenderPose[eye].Orientation.x;
orientation.y = eyeRenderPose[eye].Orientation.y;
orientation.z = eyeRenderPose[eye].Orientation.z;
orientation.w = eyeRenderPose[eye].Orientation.w;
// Update the application camera with the latest HMD position
whichCamera.setHmdPosition(bodyOrientation * toGlm(pose.Position));
whichCamera.setHmdRotation(toGlm(pose.Orientation));
whichCamera.setHmdPosition(trackerPosition);
whichCamera.setHmdRotation(orientation);
// Update our camera to what the application camera is doing
_camera->setRotation(whichCamera.getRotation());
_camera->setPosition(whichCamera.getPosition());
RenderArgs::RenderSide renderSide = RENDER_SIDES[eye];
glm::vec3 thisEyePosition = position + toGlm(eyeRenderPose[eye].Position);
// Store the latest left and right eye render locations for things that need to know
_eyePositions[eye] = thisEyePosition;
glm::vec3 thisEyePosition = position + trackerPosition +
(bodyOrientation * glm::quat(orientation.x, orientation.y, orientation.z, orientation.w) *
glm::vec3(_eyeRenderDesc[eye].HmdToEyeViewOffset.x, _eyeRenderDesc[eye].HmdToEyeViewOffset.y, _eyeRenderDesc[eye].HmdToEyeViewOffset.z));
RenderArgs::RenderSide renderSide = RenderArgs::STEREO_LEFT;
if (eyeIndex == 0) {
_leftEyePosition = thisEyePosition;
} else {
_rightEyePosition = thisEyePosition;
renderSide = RenderArgs::STEREO_RIGHT;
}
_camera->update(1.0f / Application::getInstance()->getFps());
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
const ovrFovPort& port = _eyeFov[eye];
const ovrFovPort& port = _eyeFov[_activeEyeIndex];
float nearClip = whichCamera.getNearClip(), farClip = whichCamera.getFarClip();
glFrustum(-nearClip * port.LeftTan, nearClip * port.RightTan, -nearClip * port.DownTan,
nearClip * port.UpTan, nearClip, farClip);
glViewport(_eyeRenderViewport[eye].Pos.x, _eyeRenderViewport[eye].Pos.y,
_eyeRenderViewport[eye].Size.w, _eyeRenderViewport[eye].Size.h);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
auto vp = _eyeTextures[eye].Texture.Header.RenderViewport;
glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
// HACK: instead of passing the stereo eye offset directly in the matrix, pass it in the camera offset
//glTranslatef(_eyeRenderDesc[eye].ViewAdjust.x, _eyeRenderDesc[eye].ViewAdjust.y, _eyeRenderDesc[eye].ViewAdjust.z);
//_camera->setEyeOffsetPosition(glm::vec3(-_eyeRenderDesc[eye].ViewAdjust.x, -_eyeRenderDesc[eye].ViewAdjust.y, -_eyeRenderDesc[eye].ViewAdjust.z));
_camera->setEyeOffsetPosition(glm::vec3(-_eyeRenderDesc[eye].HmdToEyeViewOffset.x, -_eyeRenderDesc[eye].HmdToEyeViewOffset.y, -_eyeRenderDesc[eye].HmdToEyeViewOffset.z));
Application::getInstance()->displaySide(*_camera, false, RenderArgs::MONO);
applicationOverlay.displayOverlayTextureOculus(*_camera);
});
_activeEyeIndex = -1;
_activeEyeIndex = -1;
}
//Wait till time-warp to reduce latency
ovr_WaitTillTime(_hmdFrameTiming.TimewarpPointSeconds);
glPopMatrix();
//Full texture viewport for glow effect
glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h);
int finalTexture = 0;
//Bind the output texture from the glow shader. If glow effect is disabled, we just grab the texture
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
QOpenGLFramebufferObject* fbo = DependencyManager::get<GlowEffect>()->render(true);
finalTexture = fbo->texture();
glBindTexture(GL_TEXTURE_2D, fbo->texture());
} else {
DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject()->release();
finalTexture = DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject()->texture();
glBindTexture(GL_TEXTURE_2D, DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject()->texture());
}
for_each_eye([&](int eye) {
_eyeTextures[eye].OGL.TexId = finalTexture;
});
// restore our normal viewport
auto glCanvas = DependencyManager::get<GLCanvas>();
glViewport(0, 0, glCanvas->getDeviceWidth(), glCanvas->getDeviceHeight());
glMatrixMode(GL_PROJECTION);
glPopMatrix();
// restore our normal viewport
glViewport(0, 0, glCanvas->getDeviceWidth(), glCanvas->getDeviceHeight());
//Renders the distorted mesh onto the screen
renderDistortionMesh(eyeRenderPose);
glBindTexture(GL_TEXTURE_2D, 0);
ovrHmd_EndFrame(_ovrHmd, eyeRenderPose, &(_eyeTextures[0].Texture));
#endif
}
#ifdef HAVE_LIBOVR
void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
glLoadIdentity();
auto glCanvas = DependencyManager::get<GLCanvas>();
glOrtho(0, glCanvas->getDeviceWidth(), 0, glCanvas->getDeviceHeight(), -1.0, 1.0);
glDisable(GL_DEPTH_TEST);
glDisable(GL_BLEND);
_program.bind();
_program.setUniformValue(_textureLocation, 0);
_program.enableAttributeArray(_positionAttributeLocation);
_program.enableAttributeArray(_colorAttributeLocation);
_program.enableAttributeArray(_texCoord0AttributeLocation);
_program.enableAttributeArray(_texCoord1AttributeLocation);
_program.enableAttributeArray(_texCoord2AttributeLocation);
//Render the distortion meshes for each eye
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
GLfloat uvScale[2] = { _UVScaleOffset[eyeNum][0].x, _UVScaleOffset[eyeNum][0].y };
_program.setUniformValueArray(_eyeToSourceUVScaleLocation, uvScale, 1, 2);
GLfloat uvOffset[2] = { _UVScaleOffset[eyeNum][1].x, _UVScaleOffset[eyeNum][1].y };
_program.setUniformValueArray(_eyeToSourceUVOffsetLocation, uvOffset, 1, 2);
ovrMatrix4f timeWarpMatrices[2];
Matrix4f transposeMatrices[2];
//Grabs the timewarp matrices to be used in the shader
ovrHmd_GetEyeTimewarpMatrices(_ovrHmd, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
transposeMatrices[0] = Matrix4f(timeWarpMatrices[0]);
transposeMatrices[1] = Matrix4f(timeWarpMatrices[1]);
//Have to transpose the matrices before using them
transposeMatrices[0].Transpose();
transposeMatrices[1].Transpose();
glUniformMatrix4fv(_eyeRotationStartLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[0].M);
glUniformMatrix4fv(_eyeRotationEndLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[1].M);
glBindBuffer(GL_ARRAY_BUFFER, _vertices[eyeNum]);
//Set vertex attribute pointers
glVertexAttribPointer(_positionAttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)0);
glVertexAttribPointer(_texCoord0AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)8);
glVertexAttribPointer(_texCoord1AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)16);
glVertexAttribPointer(_texCoord2AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)24);
glVertexAttribPointer(_colorAttributeLocation, 4, GL_UNSIGNED_BYTE, GL_TRUE, sizeof(DistortionVertex), (void *)32);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indices[eyeNum]);
glDrawElements(GL_TRIANGLES, _meshSize[eyeNum], GL_UNSIGNED_SHORT, 0);
}
_program.disableAttributeArray(_positionAttributeLocation);
_program.disableAttributeArray(_colorAttributeLocation);
_program.disableAttributeArray(_texCoord0AttributeLocation);
_program.disableAttributeArray(_texCoord1AttributeLocation);
_program.disableAttributeArray(_texCoord2AttributeLocation);
glEnable(GL_BLEND);
glEnable(GL_DEPTH_TEST);
_program.release();
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
#endif
//Tries to reconnect to the sensors
void OculusManager::reset() {
if (_ovrHmd) {
#ifdef HAVE_LIBOVR
if (_isConnected) {
ovrHmd_RecenterPose(_ovrHmd);
}
#endif
}
//Gets the current predicted angles from the oculus sensors
void OculusManager::getEulerAngles(float& yaw, float& pitch, float& roll) {
#ifdef HAVE_LIBOVR
#if defined(__APPLE__) || defined(_WIN32)
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
#else
ovrSensorState ss = ovrHmd_GetSensorState(_ovrHmd, _hmdFrameTiming.ScanoutMidpointSeconds);
#endif
#if defined(__APPLE__) || defined(_WIN32)
if (ts.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) {
#else
if (ss.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) {
#endif
#if defined(__APPLE__) || defined(_WIN32)
ovrPosef headPose = ts.HeadPose.ThePose;
glm::vec3 angles = safeEulerAngles(toGlm(headPose.Orientation));
yaw = angles.y;
pitch = angles.x;
roll = angles.z;
#else
ovrPosef headPose = ss.Predicted.Pose;
#endif
Quatf orientation = Quatf(headPose.Orientation);
orientation.GetEulerAngles<Axis_Y, Axis_X, Axis_Z, Rotate_CCW, Handed_R>(&yaw, &pitch, &roll);
} else {
yaw = 0.0f;
pitch = 0.0f;
roll = 0.0f;
}
#else
yaw = 0.0f;
pitch = 0.0f;
roll = 0.0f;
#endif
}
glm::vec3 OculusManager::getRelativePosition() {
#if (defined(__APPLE__) || defined(_WIN32)) && HAVE_LIBOVR
ovrTrackingState trackingState = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
ovrVector3f headPosition = trackingState.HeadPose.ThePose.Position;
return glm::vec3(headPosition.x, headPosition.y, headPosition.z);
#else
// no positional tracking in Linux yet
return glm::vec3(0.0f, 0.0f, 0.0f);
#endif
}
//Used to set the size of the glow framebuffers
QSize OculusManager::getRenderTargetSize() {
#ifdef HAVE_LIBOVR
QSize rv;
rv.setWidth(_renderTargetSize.w);
rv.setHeight(_renderTargetSize.h);
return rv;
#else
return QSize(100, 100);
#endif
}
void OculusManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) {
#ifdef HAVE_LIBOVR
if (_activeEyeIndex != -1) {
const ovrFovPort& port = _eyeFov[_activeEyeIndex];
right = nearVal * port.RightTan;
@ -443,10 +724,12 @@ void OculusManager::overrideOffAxisFrustum(float& left, float& right, float& bot
top = nearVal * port.UpTan;
bottom = -nearVal * port.DownTan;
}
#endif
}
int OculusManager::getHMDScreen() {
int hmdScreenIndex = -1; // unknown
#ifdef HAVE_LIBOVR
// TODO: it might be smarter to handle multiple HMDs connected in this case. but for now,
// we will simply assume the initialization code that set up _ovrHmd picked the best hmd
@ -495,6 +778,7 @@ int OculusManager::getHMDScreen() {
screenNumber++;
}
}
#endif
return hmdScreenIndex;
}

View file

@ -12,19 +12,13 @@
#ifndef hifi_OculusManager_h
#define hifi_OculusManager_h
#define HAVE_LIBOVR 1
#ifdef HAVE_LIBOVR
#include <OVR_Version.h>
#include <OVR_Types.h>
#include <OVR_CAPI.h>
#include <OVR_CAPI_GL.h>
#endif
#include <ProgramObject.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
class Camera;
class PalmData;
@ -33,14 +27,13 @@ class Text3DOverlay;
/// Handles interaction with the Oculus Rift.
class OculusManager {
public:
static void init();
static void connect();
static void disconnect();
static bool isConnected();
static void recalibrate();
static void abandonCalibration();
//static void beginFrameTiming();
//static void endFrameTiming();
static void beginFrameTiming();
static void endFrameTiming();
static void configureCamera(Camera& camera, int screenWidth, int screenHeight);
static void display(const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera);
static void reset();
@ -55,21 +48,57 @@ public:
static void overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal,
float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane);
static glm::vec3 getLeftEyePosition() { return _eyePositions[ovrEye_Left]; }
static glm::vec3 getRightEyePosition() { return _eyePositions[ovrEye_Right]; }
static glm::vec3 getLeftEyePosition() { return _leftEyePosition; }
static glm::vec3 getRightEyePosition() { return _rightEyePosition; }
static int getHMDScreen();
private:
#ifdef HAVE_LIBOVR
static void generateDistortionMesh();
static void renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]);
static bool similarNames(const QString& nameA,const QString& nameB);
//static bool _isConnected;
struct DistortionVertex {
glm::vec2 pos;
glm::vec2 texR;
glm::vec2 texG;
glm::vec2 texB;
struct {
GLubyte r;
GLubyte g;
GLubyte b;
GLubyte a;
} color;
};
static ProgramObject _program;
//Uniforms
static int _textureLocation;
static int _eyeToSourceUVScaleLocation;
static int _eyeToSourceUVOffsetLocation;
static int _eyeRotationStartLocation;
static int _eyeRotationEndLocation;
//Attributes
static int _positionAttributeLocation;
static int _colorAttributeLocation;
static int _texCoord0AttributeLocation;
static int _texCoord1AttributeLocation;
static int _texCoord2AttributeLocation;
static bool _isConnected;
static ovrHmd _ovrHmd;
static ovrHmdDesc _ovrHmdDesc;
static ovrFovPort _eyeFov[ovrEye_Count];
static ovrVector3f _eyeOffset[ovrEye_Count];
static glm::mat4 _eyeProjection[ovrEye_Count];
static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count];
static ovrSizei _renderTargetSize;
static ovrVector2f _UVScaleOffset[ovrEye_Count][2];
static GLuint _vertices[ovrEye_Count];
static GLuint _indices[ovrEye_Count];
static GLsizei _meshSize[ovrEye_Count];
static ovrFrameTiming _hmdFrameTiming;
static ovrRecti _eyeRenderViewport[ovrEye_Count];
static unsigned int _frameIndex;
static bool _frameTimingActive;
@ -98,61 +127,12 @@ private:
static quint64 _calibrationStartTime;
static int _calibrationMessage;
static glm::vec3 _eyePositions[ovrEye_Count];
#endif
static glm::vec3 _leftEyePosition;
static glm::vec3 _rightEyePosition;
};
inline glm::mat4 toGlm(const ovrMatrix4f & om) {
return glm::transpose(glm::make_mat4(&om.M[0][0]));
}
inline glm::mat4 toGlm(const ovrFovPort & fovport, float nearPlane = 0.01f, float farPlane = 10000.0f) {
return toGlm(ovrMatrix4f_Projection(fovport, nearPlane, farPlane, true));
}
inline glm::vec3 toGlm(const ovrVector3f & ov) {
return glm::make_vec3(&ov.x);
}
inline glm::vec2 toGlm(const ovrVector2f & ov) {
return glm::make_vec2(&ov.x);
}
inline glm::uvec2 toGlm(const ovrSizei & ov) {
return glm::uvec2(ov.w, ov.h);
}
inline glm::quat toGlm(const ovrQuatf & oq) {
return glm::make_quat(&oq.x);
}
inline glm::mat4 toGlm(const ovrPosef & op) {
glm::mat4 orientation = glm::mat4_cast(toGlm(op.Orientation));
glm::mat4 translation = glm::translate(glm::mat4(), toGlm(op.Position));
return translation * orientation;
}
inline ovrMatrix4f ovrFromGlm(const glm::mat4 & m) {
ovrMatrix4f result;
glm::mat4 transposed(glm::transpose(m));
memcpy(result.M, &(transposed[0][0]), sizeof(float) * 16);
return result;
}
inline ovrVector3f ovrFromGlm(const glm::vec3 & v) {
return{ v.x, v.y, v.z };
}
inline ovrVector2f ovrFromGlm(const glm::vec2 & v) {
return{ v.x, v.y };
}
inline ovrSizei ovrFromGlm(const glm::uvec2 & v) {
return{ v.x, v.y };
}
inline ovrQuatf ovrFromGlm(const glm::quat & q) {
return{ q.x, q.y, q.z, q.w };
}
#endif // hifi_OculusManager_h

View file

@ -18,8 +18,6 @@
#include "AddressManager.h"
#include "Application.h"
#include "devices/OculusManager.h"
#ifdef Q_OS_WIN
static BOOL CALLBACK enumWindowsCallback(HWND hWnd, LPARAM lParam) {
@ -94,10 +92,7 @@ int main(int argc, const char* argv[]) {
usecTimestampNowForceClockSkew(clockSkew);
qDebug("clockSkewOption=%s clockSkew=%d", clockSkewOption, clockSkew);
}
// Oculus initialization MUST PRECEDE OpenGL context creation.
// The nature of the Application constructor means this has to be either here,
// or in the main window ctor, before GL startup.
OculusManager::init();
int exitCode;
{
QSettings::setDefaultFormat(QSettings::IniFormat);