Working on oculus build

This commit is contained in:
Brad Davis 2015-02-20 09:32:25 -08:00
parent 9962166a1c
commit 16feaceab5
6 changed files with 311 additions and 297 deletions

22
cmake/externals/LibOVR/CMakeLists.txt vendored Normal file
View file

@ -0,0 +1,22 @@
set(EXTERNAL_NAME LibOVR)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
PREFIX ${EXTERNAL_NAME}
GIT_REPOSITORY https://github.com/jherico/OculusSDK.git
CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
LOG_DOWNLOAD ON
)
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${INSTALL_DIR}/include CACHE TYPE STRING)
if (WIN32)
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${INSTALL_DIR}/lib/ovr.lib CACHE TYPE STRING)
else ()
set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${INSTALL_DIR}/lib/libovr.a CACHE TYPE STRING)
endif ()

View file

@ -24,42 +24,15 @@ hifi_library_search_hints("libovr")
include(SelectLibraryConfigurations)
if (NOT ANDROID)
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
hifi_library_search_hints("libovr")
find_path(LIBOVR_INCLUDE_DIRS OVR_CAPI.h PATH_SUFFIXES include HINTS ${LIBOVR_SEARCH_DIRS})
find_library(LIBOVR_LIBRARIES ovr PATH_SUFFIXES lib HINTS ${LIBOVR_SEARCH_DIRS})
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(LIBOVR DEFAULT_MSG LIBOVR_INCLUDE_DIRS LIBOVR_LIBRARIES)
find_path(LIBOVR_INCLUDE_DIRS OVR.h PATH_SUFFIXES Include HINTS ${LIBOVR_SEARCH_DIRS})
find_path(LIBOVR_SRC_DIR Util_Render_Stereo.h PATH_SUFFIXES Src/Util HINTS ${LIBOVR_SEARCH_DIRS})
if (APPLE)
find_library(LIBOVR_LIBRARY_DEBUG NAMES ovr PATH_SUFFIXES Lib/Mac/Debug HINTS ${LIBOVR_SEARCH_DIRS})
find_library(LIBOVR_LIBRARY_RELEASE NAMES ovr PATH_SUFFIXES Lib/Mac/Release HINTS ${LIBOVR_SEARCH_DIRS})
find_library(ApplicationServices ApplicationServices)
find_library(IOKit IOKit)
elseif (UNIX)
find_library(UDEV_LIBRARY_RELEASE udev /usr/lib/x86_64-linux-gnu/)
find_library(XINERAMA_LIBRARY_RELEASE Xinerama /usr/lib/x86_64-linux-gnu/)
if (CMAKE_CL_64)
set(LINUX_ARCH_DIR "i386")
else()
set(LINUX_ARCH_DIR "x86_64")
endif()
find_library(LIBOVR_LIBRARY_DEBUG NAMES ovr PATH_SUFFIXES Lib/Linux/Debug/${LINUX_ARCH_DIR} HINTS ${LIBOVR_SEARCH_DIRS})
find_library(LIBOVR_LIBRARY_RELEASE NAMES ovr PATH_SUFFIXES Lib/Linux/Release/${LINUX_ARCH_DIR} HINTS ${LIBOVR_SEARCH_DIRS})
select_library_configurations(UDEV)
select_library_configurations(XINERAMA)
elseif (WIN32)
if (MSVC10)
find_library(LIBOVR_LIBRARY_DEBUG NAMES libovrd PATH_SUFFIXES Lib/Win32/VS2010 HINTS ${LIBOVR_SEARCH_DIRS})
find_library(LIBOVR_LIBRARY_RELEASE NAMES libovr PATH_SUFFIXES Lib/Win32/VS2010 HINTS ${LIBOVR_SEARCH_DIRS})
elseif (MSVC12)
find_library(LIBOVR_LIBRARY_DEBUG NAMES libovrd PATH_SUFFIXES Lib/Win32/VS2013 HINTS ${LIBOVR_SEARCH_DIRS})
find_library(LIBOVR_LIBRARY_RELEASE NAMES libovr PATH_SUFFIXES Lib/Win32/VS2013 HINTS ${LIBOVR_SEARCH_DIRS})
endif ()
find_package(ATL)
endif ()
else (NOT ANDROID)
set(_VRLIB_JNI_DIR "VRLib/jni")
set(_VRLIB_LIBS_DIR "VRLib/obj/local/armeabi-v7a")
@ -76,31 +49,4 @@ else (NOT ANDROID)
find_library(TURBOJPEG_LIBRARY NAMES jpeg PATH_SUFFIXES 3rdParty/turbojpeg HINTS ${LIBOVR_SEARCH_DIRS})
endif (NOT ANDROID)
select_library_configurations(LIBOVR)
set(LIBOVR_LIBRARIES ${LIBOVR_LIBRARY})
list(APPEND LIBOVR_ARGS_LIST LIBOVR_INCLUDE_DIRS LIBOVR_SRC_DIR LIBOVR_LIBRARY)
if (APPLE)
list(APPEND LIBOVR_LIBRARIES ${IOKit} ${ApplicationServices})
list(APPEND LIBOVR_ARGS_LIST IOKit ApplicationServices)
elseif (ANDROID)
list(APPEND LIBOVR_ANDROID_LIBRARIES "-lGLESv3" "-lEGL" "-landroid" "-lOpenMAXAL" "-llog" "-lz" "-lOpenSLES")
list(APPEND LIBOVR_ARGS_LIST LIBOVR_ANDROID_LIBRARIES LIBOVR_VRLIB_DIR MINIZIP_DIR JNI_DIR TURBOJPEG_LIBRARY)
elseif (UNIX)
list(APPEND LIBOVR_LIBRARIES "${UDEV_LIBRARY}" "${XINERAMA_LIBRARY}")
list(APPEND LIBOVR_ARGS_LIST UDEV_LIBRARY XINERAMA_LIBRARY)
elseif (WIN32)
list(APPEND LIBOVR_LIBRARIES ${ATL_LIBRARIES})
list(APPEND LIBOVR_ARGS_LIST ATL_LIBRARIES)
endif ()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(LibOVR DEFAULT_MSG ${LIBOVR_ARGS_LIST})
if (ANDROID)
list(APPEND LIBOVR_INCLUDE_DIRS ${LIBOVR_SRC_DIR} ${MINIZIP_DIR} ${JNI_DIR})
endif ()
mark_as_advanced(LIBOVR_INCLUDE_DIRS LIBOVR_LIBRARIES LIBOVR_SEARCH_DIRS)

View file

@ -2,7 +2,7 @@ set(TARGET_NAME interface)
project(${TARGET_NAME})
# set a default root dir for each of our optional externals if it was not passed
set(OPTIONAL_EXTERNALS "Faceshift" "LibOVR" "PrioVR" "Sixense" "LeapMotion" "RtMidi" "Qxmpp" "SDL2" "RSSDK")
set(OPTIONAL_EXTERNALS "Faceshift" "PrioVR" "Sixense" "LeapMotion" "RtMidi" "Qxmpp" "SDL2" "RSSDK")
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)
@ -112,6 +112,11 @@ add_dependency_external_project(glm)
find_package(GLM REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${GLM_INCLUDE_DIRS})
add_dependency_external_project(LibOVR)
find_package(LibOVR REQUIRED)
target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVR_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES})
# link required hifi libraries
link_hifi_libraries(shared octree environment gpu model fbx metavoxels networking entities avatars
audio audio-client animation script-engine physics

View file

@ -1,16 +0,0 @@
Instructions for adding the Oculus library (LibOVR) to Interface
Stephen Birarda, March 6, 2014
You can download the Oculus SDK from https://developer.oculusvr.com/ (account creation required). Interface has been tested with SDK version 0.4.1.
1. Copy the Oculus SDK folders from the LibOVR directory (Lib, Include, Src) into the interface/externals/libovr folder.
This readme.txt should be there as well.
You may optionally choose to copy the SDK folders to a location outside the repository (so you can re-use with different checkouts and different projects).
If so our CMake find module expects you to set the ENV variable 'HIFI_LIB_DIR' to a directory containing a subfolder 'oculus' that contains the three folders mentioned above.
NOTE: For Windows users, you should copy libovr.lib and libovrd.lib from the \oculus\Lib\Win32\VS2010 directory to the \libovr\Lib\Win32\ directory.
2. Clear your build directory, run cmake and build, and you should be all set.

View file

@ -335,9 +335,9 @@ void OculusManager::generateDistortionMesh() {
}
//Viewport for the render target for each eye
_eyeRenderViewport[0].Pos = Vector2i(0, 0);
_eyeRenderViewport[0].Size = Sizei(_renderTargetSize.w / 2, _renderTargetSize.h);
_eyeRenderViewport[1].Pos = Vector2i((_renderTargetSize.w + 1) / 2, 0);
_eyeRenderViewport[0].Pos = { 0, 0 }; // = Vector2i(0, 0);
_eyeRenderViewport[0].Size = { _renderTargetSize.w / 2, _renderTargetSize.h };
_eyeRenderViewport[1].Pos = { (_renderTargetSize.w + 1) / 2, 0 };
_eyeRenderViewport[1].Size = _eyeRenderViewport[0].Size;
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
@ -349,7 +349,7 @@ void OculusManager::generateDistortionMesh() {
_UVScaleOffset[eyeNum]);
// Parse the vertex data and create a render ready vertex buffer
DistortionVertex* pVBVerts = (DistortionVertex*)OVR_ALLOC(sizeof(DistortionVertex) * meshData.VertexCount);
DistortionVertex* pVBVerts = new DistortionVertex[meshData.VertexCount];
_meshSize[eyeNum] = meshData.IndexCount;
// Convert the oculus vertex data to the DistortionVertex format.
@ -394,7 +394,7 @@ void OculusManager::generateDistortionMesh() {
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
//Now that we have the VBOs we can get rid of the mesh data
OVR_FREE(pVBVerts);
delete [] pVBVerts;
ovrHmd_DestroyDistortionMesh(&meshData);
}
@ -439,223 +439,217 @@ void OculusManager::configureCamera(Camera& camera, int screenWidth, int screenH
#endif
}
#ifdef HAVE_LIBOVR
glm::vec3 toGlm(const ovrVector3f & v) {
return std::move(glm::vec3(v.x, v.y, v.z));
}
#endif
//Displays everything for the oculus, frame timing must be active
void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera) {
#ifdef HAVE_LIBOVR
//beginFrameTiming must be called before display
if (!_frameTimingActive) {
printf("WARNING: Called OculusManager::display() without calling OculusManager::beginFrameTiming() first.");
return;
}
ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay();
// We only need to render the overlays to a texture once, then we just render the texture on the hemisphere
// PrioVR will only work if renderOverlay is called, calibration is connected to Application::renderingOverlay()
applicationOverlay.renderOverlay(true);
//Bind our framebuffer object. If we are rendering the glow effect, we let the glow effect shader take care of it
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
DependencyManager::get<GlowEffect>()->prepare();
} else {
DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject()->bind();
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
}
ovrPosef eyeRenderPose[ovrEye_Count];
glMatrixMode(GL_PROJECTION);
glPushMatrix();
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glm::quat orientation;
glm::vec3 trackerPosition;
#if defined(__APPLE__) || defined(_WIN32)
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
ovrVector3f ovrHeadPosition = ts.HeadPose.ThePose.Position;
trackerPosition = glm::vec3(ovrHeadPosition.x, ovrHeadPosition.y, ovrHeadPosition.z);
if (_calibrationState != CALIBRATED) {
ovrQuatf ovrHeadOrientation = ts.HeadPose.ThePose.Orientation;
orientation = glm::quat(ovrHeadOrientation.w, ovrHeadOrientation.x, ovrHeadOrientation.y, ovrHeadOrientation.z);
calibrate(trackerPosition, orientation);
}
trackerPosition = bodyOrientation * trackerPosition;
#endif
// void ovrHmd_GetEyePoses(ovrHmd hmd, unsigned int frameIndex, ovrVector3f hmdToEyeViewOffset[2],
// ovrPosef outEyePoses[2], ovrTrackingState* outHmdTrackingState);
//Render each eye into an fbo
ovrHmd_GetEyePoses(_ovrHmd, 0, _eyeOffset, eyeRenderPose, nullptr);
for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) {
_activeEyeIndex = eyeIndex;
#if defined(__APPLE__) || defined(_WIN32)
ovrEyeType eye = _ovrHmd->EyeRenderOrder[eyeIndex];
#else
ovrEyeType eye = _ovrHmdDesc.EyeRenderOrder[eyeIndex];
#endif
// Set the camera rotation for this eye
orientation.x = eyeRenderPose[eye].Orientation.x;
orientation.y = eyeRenderPose[eye].Orientation.y;
orientation.z = eyeRenderPose[eye].Orientation.z;
orientation.w = eyeRenderPose[eye].Orientation.w;
// Update the application camera with the latest HMD position
whichCamera.setHmdPosition(trackerPosition);
whichCamera.setHmdRotation(orientation);
// Update our camera to what the application camera is doing
_camera->setRotation(whichCamera.getRotation());
_camera->setPosition(whichCamera.getPosition());
// Store the latest left and right eye render locations for things that need to know
glm::vec3 thisEyePosition = position + trackerPosition + toGlm(eyeRenderPose[eye].Position);
RenderArgs::RenderSide renderSide = RenderArgs::STEREO_LEFT;
if (eyeIndex == 0) {
_leftEyePosition = thisEyePosition;
} else {
_rightEyePosition = thisEyePosition;
renderSide = RenderArgs::STEREO_RIGHT;
}
_camera->update(1.0f / Application::getInstance()->getFps());
glMatrixMode(GL_PROJECTION);
glLoadIdentity();
const ovrFovPort& port = _eyeFov[_activeEyeIndex];
float nearClip = whichCamera.getNearClip(), farClip = whichCamera.getFarClip();
glFrustum(-nearClip * port.LeftTan, nearClip * port.RightTan, -nearClip * port.DownTan,
nearClip * port.UpTan, nearClip, farClip);
glViewport(_eyeRenderViewport[eye].Pos.x, _eyeRenderViewport[eye].Pos.y,
_eyeRenderViewport[eye].Size.w, _eyeRenderViewport[eye].Size.h);
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
// HACK: instead of passing the stereo eye offset directly in the matrix, pass it in the camera offset
//glTranslatef(_eyeRenderDesc[eye].ViewAdjust.x, _eyeRenderDesc[eye].ViewAdjust.y, _eyeRenderDesc[eye].ViewAdjust.z);
//_camera->setEyeOffsetPosition(glm::vec3(-_eyeRenderDesc[eye].ViewAdjust.x, -_eyeRenderDesc[eye].ViewAdjust.y, -_eyeRenderDesc[eye].ViewAdjust.z));
Application::getInstance()->displaySide(*_camera, false, RenderArgs::MONO);
applicationOverlay.displayOverlayTextureOculus(*_camera);
_activeEyeIndex = -1;
}
//Wait till time-warp to reduce latency
ovr_WaitTillTime(_hmdFrameTiming.TimewarpPointSeconds);
glPopMatrix();
//Full texture viewport for glow effect
glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h);
//Bind the output texture from the glow shader. If glow effect is disabled, we just grab the texture
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
QOpenGLFramebufferObject* fbo = DependencyManager::get<GlowEffect>()->render(true);
glBindTexture(GL_TEXTURE_2D, fbo->texture());
} else {
DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject()->release();
glBindTexture(GL_TEXTURE_2D, DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject()->texture());
}
// restore our normal viewport
auto glCanvas = DependencyManager::get<GLCanvas>();
glViewport(0, 0, glCanvas->getDeviceWidth(), glCanvas->getDeviceHeight());
glMatrixMode(GL_PROJECTION);
glPopMatrix();
//Renders the distorted mesh onto the screen
renderDistortionMesh(eyeRenderPose);
glBindTexture(GL_TEXTURE_2D, 0);
// //beginFrameTiming must be called before display
// if (!_frameTimingActive) {
// printf("WARNING: Called OculusManager::display() without calling OculusManager::beginFrameTiming() first.");
// return;
// }
//
// ApplicationOverlay& applicationOverlay = Application::getInstance()->getApplicationOverlay();
//
// // We only need to render the overlays to a texture once, then we just render the texture on the hemisphere
// // PrioVR will only work if renderOverlay is called, calibration is connected to Application::renderingOverlay()
// applicationOverlay.renderOverlay(true);
//
// //Bind our framebuffer object. If we are rendering the glow effect, we let the glow effect shader take care of it
// if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
// DependencyManager::get<GlowEffect>()->prepare();
// } else {
// DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject()->bind();
// glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
// }
//
// ovrPosef eyeRenderPose[ovrEye_Count];
//
// glMatrixMode(GL_PROJECTION);
// glPushMatrix();
//
// glMatrixMode(GL_MODELVIEW);
// glPushMatrix();
//
// glm::quat orientation;
// glm::vec3 trackerPosition;
//
//#if defined(__APPLE__) || defined(_WIN32)
// ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
// ovrVector3f ovrHeadPosition = ts.HeadPose.ThePose.Position;
//
// trackerPosition = glm::vec3(ovrHeadPosition.x, ovrHeadPosition.y, ovrHeadPosition.z);
//
// if (_calibrationState != CALIBRATED) {
// ovrQuatf ovrHeadOrientation = ts.HeadPose.ThePose.Orientation;
// orientation = glm::quat(ovrHeadOrientation.w, ovrHeadOrientation.x, ovrHeadOrientation.y, ovrHeadOrientation.z);
// calibrate(trackerPosition, orientation);
// }
//
// trackerPosition = bodyOrientation * trackerPosition;
//#endif
//
// // void ovrHmd_GetEyePoses(ovrHmd hmd, unsigned int frameIndex, ovrVector3f hmdToEyeViewOffset[2],
// // ovrPosef outEyePoses[2], ovrTrackingState* outHmdTrackingState);
//
// //Render each eye into an fbo
// ovrHmd_GetEyePoses(_ovrHmd, 0, _eyeOffset, eyeRenderPose, nullptr);
// for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) {
// _activeEyeIndex = eyeIndex;
//
//#if defined(__APPLE__) || defined(_WIN32)
// ovrEyeType eye = _ovrHmd->EyeRenderOrder[eyeIndex];
//#else
// ovrEyeType eye = _ovrHmdDesc.EyeRenderOrder[eyeIndex];
//#endif
// // Set the camera rotation for this eye
// orientation.x = eyeRenderPose[eye].Orientation.x;
// orientation.y = eyeRenderPose[eye].Orientation.y;
// orientation.z = eyeRenderPose[eye].Orientation.z;
// orientation.w = eyeRenderPose[eye].Orientation.w;
//
// // Update the application camera with the latest HMD position
// whichCamera.setHmdPosition(trackerPosition);
// whichCamera.setHmdRotation(orientation);
//
// // Update our camera to what the application camera is doing
// _camera->setRotation(whichCamera.getRotation());
// _camera->setPosition(whichCamera.getPosition());
//
// // Store the latest left and right eye render locations for things that need to know
// glm::vec3 thisEyePosition = position + trackerPosition + toGlm(eyeRenderPose[eye].Position);
//
// RenderArgs::RenderSide renderSide = RenderArgs::STEREO_LEFT;
// if (eyeIndex == 0) {
// _leftEyePosition = thisEyePosition;
// } else {
// _rightEyePosition = thisEyePosition;
// renderSide = RenderArgs::STEREO_RIGHT;
// }
//
// _camera->update(1.0f / Application::getInstance()->getFps());
//
// glMatrixMode(GL_PROJECTION);
// glLoadIdentity();
// const ovrFovPort& port = _eyeFov[_activeEyeIndex];
// float nearClip = whichCamera.getNearClip(), farClip = whichCamera.getFarClip();
// glFrustum(-nearClip * port.LeftTan, nearClip * port.RightTan, -nearClip * port.DownTan,
// nearClip * port.UpTan, nearClip, farClip);
//
// glViewport(_eyeRenderViewport[eye].Pos.x, _eyeRenderViewport[eye].Pos.y,
// _eyeRenderViewport[eye].Size.w, _eyeRenderViewport[eye].Size.h);
//
//
// glMatrixMode(GL_MODELVIEW);
// glLoadIdentity();
//
// // HACK: instead of passing the stereo eye offset directly in the matrix, pass it in the camera offset
// //glTranslatef(_eyeRenderDesc[eye].ViewAdjust.x, _eyeRenderDesc[eye].ViewAdjust.y, _eyeRenderDesc[eye].ViewAdjust.z);
//
// //_camera->setEyeOffsetPosition(glm::vec3(-_eyeRenderDesc[eye].ViewAdjust.x, -_eyeRenderDesc[eye].ViewAdjust.y, -_eyeRenderDesc[eye].ViewAdjust.z));
// Application::getInstance()->displaySide(*_camera, false, RenderArgs::MONO);
//
// applicationOverlay.displayOverlayTextureOculus(*_camera);
// _activeEyeIndex = -1;
// }
//
// //Wait till time-warp to reduce latency
// ovr_WaitTillTime(_hmdFrameTiming.TimewarpPointSeconds);
//
// glPopMatrix();
//
// //Full texture viewport for glow effect
// glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h);
//
// //Bind the output texture from the glow shader. If glow effect is disabled, we just grab the texture
// if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
// QOpenGLFramebufferObject* fbo = DependencyManager::get<GlowEffect>()->render(true);
// glBindTexture(GL_TEXTURE_2D, fbo->texture());
// } else {
// DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject()->release();
// glBindTexture(GL_TEXTURE_2D, DependencyManager::get<TextureCache>()->getPrimaryFramebufferObject()->texture());
// }
//
// // restore our normal viewport
// auto glCanvas = DependencyManager::get<GLCanvas>();
// glViewport(0, 0, glCanvas->getDeviceWidth(), glCanvas->getDeviceHeight());
//
// glMatrixMode(GL_PROJECTION);
// glPopMatrix();
//
// //Renders the distorted mesh onto the screen
// renderDistortionMesh(eyeRenderPose);
//
// glBindTexture(GL_TEXTURE_2D, 0);
//
#endif
}
#ifdef HAVE_LIBOVR
void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
glLoadIdentity();
auto glCanvas = DependencyManager::get<GLCanvas>();
glOrtho(0, glCanvas->getDeviceWidth(), 0, glCanvas->getDeviceHeight(), -1.0, 1.0);
glDisable(GL_DEPTH_TEST);
glDisable(GL_BLEND);
_program.bind();
_program.setUniformValue(_textureLocation, 0);
_program.enableAttributeArray(_positionAttributeLocation);
_program.enableAttributeArray(_colorAttributeLocation);
_program.enableAttributeArray(_texCoord0AttributeLocation);
_program.enableAttributeArray(_texCoord1AttributeLocation);
_program.enableAttributeArray(_texCoord2AttributeLocation);
//Render the distortion meshes for each eye
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
GLfloat uvScale[2] = { _UVScaleOffset[eyeNum][0].x, _UVScaleOffset[eyeNum][0].y };
_program.setUniformValueArray(_eyeToSourceUVScaleLocation, uvScale, 1, 2);
GLfloat uvOffset[2] = { _UVScaleOffset[eyeNum][1].x, _UVScaleOffset[eyeNum][1].y };
_program.setUniformValueArray(_eyeToSourceUVOffsetLocation, uvOffset, 1, 2);
ovrMatrix4f timeWarpMatrices[2];
Matrix4f transposeMatrices[2];
//Grabs the timewarp matrices to be used in the shader
ovrHmd_GetEyeTimewarpMatrices(_ovrHmd, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
transposeMatrices[0] = Matrix4f(timeWarpMatrices[0]);
transposeMatrices[1] = Matrix4f(timeWarpMatrices[1]);
//Have to transpose the matrices before using them
transposeMatrices[0].Transpose();
transposeMatrices[1].Transpose();
glUniformMatrix4fv(_eyeRotationStartLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[0].M);
glUniformMatrix4fv(_eyeRotationEndLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[1].M);
glBindBuffer(GL_ARRAY_BUFFER, _vertices[eyeNum]);
//Set vertex attribute pointers
glVertexAttribPointer(_positionAttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)0);
glVertexAttribPointer(_texCoord0AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)8);
glVertexAttribPointer(_texCoord1AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)16);
glVertexAttribPointer(_texCoord2AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)24);
glVertexAttribPointer(_colorAttributeLocation, 4, GL_UNSIGNED_BYTE, GL_TRUE, sizeof(DistortionVertex), (void *)32);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indices[eyeNum]);
glDrawElements(GL_TRIANGLES, _meshSize[eyeNum], GL_UNSIGNED_SHORT, 0);
}
_program.disableAttributeArray(_positionAttributeLocation);
_program.disableAttributeArray(_colorAttributeLocation);
_program.disableAttributeArray(_texCoord0AttributeLocation);
_program.disableAttributeArray(_texCoord1AttributeLocation);
_program.disableAttributeArray(_texCoord2AttributeLocation);
glEnable(GL_BLEND);
glEnable(GL_DEPTH_TEST);
_program.release();
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
#endif
//#ifdef HAVE_LIBOVR
//void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
//
// glLoadIdentity();
// auto glCanvas = DependencyManager::get<GLCanvas>();
// glOrtho(0, glCanvas->getDeviceWidth(), 0, glCanvas->getDeviceHeight(), -1.0, 1.0);
//
// glDisable(GL_DEPTH_TEST);
//
// glDisable(GL_BLEND);
// _program.bind();
// _program.setUniformValue(_textureLocation, 0);
//
// _program.enableAttributeArray(_positionAttributeLocation);
// _program.enableAttributeArray(_colorAttributeLocation);
// _program.enableAttributeArray(_texCoord0AttributeLocation);
// _program.enableAttributeArray(_texCoord1AttributeLocation);
// _program.enableAttributeArray(_texCoord2AttributeLocation);
//
// //Render the distortion meshes for each eye
// for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
// GLfloat uvScale[2] = { _UVScaleOffset[eyeNum][0].x, _UVScaleOffset[eyeNum][0].y };
// _program.setUniformValueArray(_eyeToSourceUVScaleLocation, uvScale, 1, 2);
// GLfloat uvOffset[2] = { _UVScaleOffset[eyeNum][1].x, _UVScaleOffset[eyeNum][1].y };
// _program.setUniformValueArray(_eyeToSourceUVOffsetLocation, uvOffset, 1, 2);
//
// ovrMatrix4f timeWarpMatrices[2];
// Matrix4f transposeMatrices[2];
// //Grabs the timewarp matrices to be used in the shader
// ovrHmd_GetEyeTimewarpMatrices(_ovrHmd, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
// transposeMatrices[0] = Matrix4f(timeWarpMatrices[0]);
// transposeMatrices[1] = Matrix4f(timeWarpMatrices[1]);
//
// //Have to transpose the matrices before using them
// transposeMatrices[0].Transpose();
// transposeMatrices[1].Transpose();
//
// glUniformMatrix4fv(_eyeRotationStartLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[0].M);
// glUniformMatrix4fv(_eyeRotationEndLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[1].M);
//
// glBindBuffer(GL_ARRAY_BUFFER, _vertices[eyeNum]);
//
// //Set vertex attribute pointers
// glVertexAttribPointer(_positionAttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)0);
// glVertexAttribPointer(_texCoord0AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)8);
// glVertexAttribPointer(_texCoord1AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)16);
// glVertexAttribPointer(_texCoord2AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)24);
// glVertexAttribPointer(_colorAttributeLocation, 4, GL_UNSIGNED_BYTE, GL_TRUE, sizeof(DistortionVertex), (void *)32);
//
// glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indices[eyeNum]);
// glDrawElements(GL_TRIANGLES, _meshSize[eyeNum], GL_UNSIGNED_SHORT, 0);
// }
//
// _program.disableAttributeArray(_positionAttributeLocation);
// _program.disableAttributeArray(_colorAttributeLocation);
// _program.disableAttributeArray(_texCoord0AttributeLocation);
// _program.disableAttributeArray(_texCoord1AttributeLocation);
// _program.disableAttributeArray(_texCoord2AttributeLocation);
//
// glEnable(GL_BLEND);
// glEnable(GL_DEPTH_TEST);
// _program.release();
// glBindBuffer(GL_ARRAY_BUFFER, 0);
//}
//#endif
//Tries to reconnect to the sensors
void OculusManager::reset() {
@ -685,8 +679,10 @@ void OculusManager::getEulerAngles(float& yaw, float& pitch, float& roll) {
#else
ovrPosef headPose = ss.Predicted.Pose;
#endif
Quatf orientation = Quatf(headPose.Orientation);
orientation.GetEulerAngles<Axis_Y, Axis_X, Axis_Z, Rotate_CCW, Handed_R>(&yaw, &pitch, &roll);
glm::vec3 angles = safeEulerAngles(toGlm(headPose.Orientation));
yaw = angles.y;
pitch = angles.x;
roll = angles.z;
} else {
yaw = 0.0f;
pitch = 0.0f;

View file

@ -13,11 +13,18 @@
#ifndef hifi_OculusManager_h
#define hifi_OculusManager_h
#define HAVE_LIBOVR 1
#ifdef HAVE_LIBOVR
#include <OVR.h>
#include <OVR_Version.h>
#include <OVR_Types.h>
#include <OVR_CAPI.h>
#include <OVR_CAPI_GL.h>
#endif
#include <ProgramObject.h>
#include <glm/glm.hpp>
#include <glm/gtc/matrix_transform.hpp>
#include <glm/gtc/type_ptr.hpp>
class Camera;
class PalmData;
@ -135,4 +142,58 @@ private:
};
inline glm::mat4 toGlm(const ovrMatrix4f & om) {
return glm::transpose(glm::make_mat4(&om.M[0][0]));
}
inline glm::mat4 toGlm(const ovrFovPort & fovport, float nearPlane = 0.01f, float farPlane = 10000.0f) {
return toGlm(ovrMatrix4f_Projection(fovport, nearPlane, farPlane, true));
}
inline glm::vec3 toGlm(const ovrVector3f & ov) {
return glm::make_vec3(&ov.x);
}
inline glm::vec2 toGlm(const ovrVector2f & ov) {
return glm::make_vec2(&ov.x);
}
inline glm::uvec2 toGlm(const ovrSizei & ov) {
return glm::uvec2(ov.w, ov.h);
}
inline glm::quat toGlm(const ovrQuatf & oq) {
return glm::make_quat(&oq.x);
}
inline glm::mat4 toGlm(const ovrPosef & op) {
glm::mat4 orientation = glm::mat4_cast(toGlm(op.Orientation));
glm::mat4 translation = glm::translate(glm::mat4(), toGlm(op.Position));
return translation * orientation;
}
inline ovrMatrix4f ovrFromGlm(const glm::mat4 & m) {
ovrMatrix4f result;
glm::mat4 transposed(glm::transpose(m));
memcpy(result.M, &(transposed[0][0]), sizeof(float) * 16);
return result;
}
inline ovrVector3f ovrFromGlm(const glm::vec3 & v) {
return{ v.x, v.y, v.z };
}
inline ovrVector2f ovrFromGlm(const glm::vec2 & v) {
return{ v.x, v.y };
}
inline ovrSizei ovrFromGlm(const glm::uvec2 & v) {
return{ v.x, v.y };
}
inline ovrQuatf ovrFromGlm(const glm::quat & q) {
return{ q.x, q.y, q.z, q.w };
}
#endif // hifi_OculusManager_h