diff --git a/CMakeLists.txt b/CMakeLists.txt index fdeb6dfd0f..16eed7b0fc 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -162,6 +162,7 @@ option(GET_GLM "Get GLM library automatically as external project" 1) option(GET_GVERB "Get Gverb library automatically as external project" 1) option(GET_SOXR "Get Soxr library automatically as external project" 1) option(GET_TBB "Get Threading Building Blocks library automatically as external project" 1) +option(GET_LIBOVR "Get LibOVR library automatically as external project" 1) option(USE_NSIGHT "Attempt to find the nSight libraries" 1) if (WIN32) diff --git a/cmake/externals/LibOVR/CMakeLists.txt b/cmake/externals/LibOVR/CMakeLists.txt new file mode 100644 index 0000000000..ef437921d4 --- /dev/null +++ b/cmake/externals/LibOVR/CMakeLists.txt @@ -0,0 +1,85 @@ +include(ExternalProject) +include(SelectLibraryConfigurations) + +set(EXTERNAL_NAME LibOVR) + +string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER) + +if (WIN32) + + ExternalProject_Add( + ${EXTERNAL_NAME} + URL http://static.oculus.com/sdk-downloads/ovr_sdk_win_0.5.0.1.zip + URL_MD5 d3fc4c02db9be5ff08af4ef4c97b32f9 + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND "" + LOG_DOWNLOAD 1 + ) + + ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR) + + # FIXME need to account for different architectures + set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/LibOVR/Include CACHE TYPE INTERNAL) + set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/LibOVR/Lib/Windows/Win32/Release/VS2013/LibOVR.lib CACHE TYPE INTERNAL) + +elseif(APPLE) + + ExternalProject_Add( + ${EXTERNAL_NAME} + URL http://static.oculus.com/sdk-downloads/ovr_sdk_macos_0.5.0.1.tar.gz + URL_MD5 0a0785a04fb285f64f62267388344ad6 + CONFIGURE_COMMAND "" + BUILD_COMMAND "" + INSTALL_COMMAND "" + LOG_DOWNLOAD 1 + ) + + ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR) + # In theory we should use the Headers path inside the framework, as seen here + # but unfortunately Oculus doesn't seem to have figured out automated testing + # so they released a framework with missing headers. + #set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/LibOVR/Lib/Mac/Release/LibOVR.framework/Headers/ CACHE TYPE INTERNAL) + + # Work around the broken framework by using a different path for the headers. + set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/LibOVR/Include CACHE TYPE INTERNAL) + set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${SOURCE_DIR}/LibOVR/Lib/Mac/Release/LibOVR.framework/LibOVR CACHE TYPE INTERNAL) + + + +elseif(NOT ANDROID) + + # http://static.oculus.com/sdk-downloads/ovr_sdk_linux_0.4.4.tar.xz + # ec3bd8cff4a1461b4e21210e7feb0572 + ExternalProject_Add( + ${EXTERNAL_NAME} + PREFIX ${EXTERNAL_NAME} + GIT_REPOSITORY https://github.com/jherico/OculusSDK.git + GIT_TAG b9832379a401640c5f615ed75a60edaf09be64ef + CMAKE_ARGS -DCMAKE_INSTALL_PREFIX:PATH= + LOG_DOWNLOAD ON + ) + + ExternalProject_Get_Property(${EXTERNAL_NAME} SOURCE_DIR) + ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR) + + set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE ${INSTALL_DIR}/lib/libovr.a CACHE TYPE INTERNAL) + set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG "" CACHE TYPE INTERNAL) + + find_package(Threads REQUIRED) + find_package(X11 REQUIRED) + + # Check for XRandR (modern resolution switching and gamma control) + if (NOT X11_Xrandr_FOUND) + message(FATAL_ERROR "The RandR library and headers were not found") + endif() + + set(${EXTERNAL_NAME_UPPER}_LIBRARY_EXTRAS rt udev ${CMAKE_THREAD_LIBS_INIT} ${X11_X11_LIB} ${X11_Xrandr_LIB}) + + select_library_configurations(${EXTERNAL_NAME_UPPER}) + + set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${SOURCE_DIR}/LibOVR/Include ${SOURCE_DIR}/LibOVR/Src CACHE TYPE INTERNAL) + set(${EXTERNAL_NAME_UPPER}_LIBRARIES ${${EXTERNAL_NAME_UPPER}_LIBRARY} ${${EXTERNAL_NAME_UPPER}_LIBRARY_EXTRAS} CACHE TYPE INTERNAL) +endif() + + diff --git a/cmake/modules/FindLibOVR.cmake b/cmake/modules/FindLibOVR.cmake index 62d8313d63..df45a639cf 100644 --- a/cmake/modules/FindLibOVR.cmake +++ b/cmake/modules/FindLibOVR.cmake @@ -18,48 +18,12 @@ # See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html # -include("${MACRO_DIR}/HifiLibrarySearchHints.cmake") -hifi_library_search_hints("libovr") - -include(SelectLibraryConfigurations) if (NOT ANDROID) - find_path(LIBOVR_INCLUDE_DIRS OVR.h PATH_SUFFIXES Include HINTS ${LIBOVR_SEARCH_DIRS}) - find_path(LIBOVR_SRC_DIR Util_Render_Stereo.h PATH_SUFFIXES Src/Util HINTS ${LIBOVR_SEARCH_DIRS}) - - if (APPLE) - find_library(LIBOVR_LIBRARY_DEBUG NAMES ovr PATH_SUFFIXES Lib/Mac/Debug HINTS ${LIBOVR_SEARCH_DIRS}) - find_library(LIBOVR_LIBRARY_RELEASE NAMES ovr PATH_SUFFIXES Lib/Mac/Release HINTS ${LIBOVR_SEARCH_DIRS}) - find_library(ApplicationServices ApplicationServices) - find_library(IOKit IOKit) - elseif (UNIX) - find_library(UDEV_LIBRARY_RELEASE udev /usr/lib/x86_64-linux-gnu/) - find_library(XINERAMA_LIBRARY_RELEASE Xinerama /usr/lib/x86_64-linux-gnu/) - - if (CMAKE_CL_64) - set(LINUX_ARCH_DIR "i386") - else() - set(LINUX_ARCH_DIR "x86_64") - endif() - - find_library(LIBOVR_LIBRARY_DEBUG NAMES ovr PATH_SUFFIXES Lib/Linux/Debug/${LINUX_ARCH_DIR} HINTS ${LIBOVR_SEARCH_DIRS}) - find_library(LIBOVR_LIBRARY_RELEASE NAMES ovr PATH_SUFFIXES Lib/Linux/Release/${LINUX_ARCH_DIR} HINTS ${LIBOVR_SEARCH_DIRS}) - - select_library_configurations(UDEV) - select_library_configurations(XINERAMA) - - elseif (WIN32) - if (MSVC10) - find_library(LIBOVR_LIBRARY_DEBUG NAMES libovrd PATH_SUFFIXES Lib/Win32/VS2010 HINTS ${LIBOVR_SEARCH_DIRS}) - find_library(LIBOVR_LIBRARY_RELEASE NAMES libovr PATH_SUFFIXES Lib/Win32/VS2010 HINTS ${LIBOVR_SEARCH_DIRS}) - elseif (MSVC12) - find_library(LIBOVR_LIBRARY_DEBUG NAMES libovrd PATH_SUFFIXES Lib/Win32/VS2013 HINTS ${LIBOVR_SEARCH_DIRS}) - find_library(LIBOVR_LIBRARY_RELEASE NAMES libovr PATH_SUFFIXES Lib/Win32/VS2013 HINTS ${LIBOVR_SEARCH_DIRS}) - endif () - find_package(ATL) - endif () - + include(FindPackageHandleStandardArgs) + find_package_handle_standard_args(LIBOVR DEFAULT_MSG LIBOVR_INCLUDE_DIRS LIBOVR_LIBRARIES) + else (NOT ANDROID) set(_VRLIB_JNI_DIR "VRLib/jni") set(_VRLIB_LIBS_DIR "VRLib/obj/local/armeabi-v7a") @@ -76,31 +40,4 @@ else (NOT ANDROID) find_library(TURBOJPEG_LIBRARY NAMES jpeg PATH_SUFFIXES 3rdParty/turbojpeg HINTS ${LIBOVR_SEARCH_DIRS}) endif (NOT ANDROID) -select_library_configurations(LIBOVR) -set(LIBOVR_LIBRARIES ${LIBOVR_LIBRARY}) - -list(APPEND LIBOVR_ARGS_LIST LIBOVR_INCLUDE_DIRS LIBOVR_SRC_DIR LIBOVR_LIBRARY) - -if (APPLE) - list(APPEND LIBOVR_LIBRARIES ${IOKit} ${ApplicationServices}) - list(APPEND LIBOVR_ARGS_LIST IOKit ApplicationServices) -elseif (ANDROID) - - list(APPEND LIBOVR_ANDROID_LIBRARIES "-lGLESv3" "-lEGL" "-landroid" "-lOpenMAXAL" "-llog" "-lz" "-lOpenSLES") - list(APPEND LIBOVR_ARGS_LIST LIBOVR_ANDROID_LIBRARIES LIBOVR_VRLIB_DIR MINIZIP_DIR JNI_DIR TURBOJPEG_LIBRARY) -elseif (UNIX) - list(APPEND LIBOVR_LIBRARIES "${UDEV_LIBRARY}" "${XINERAMA_LIBRARY}") - list(APPEND LIBOVR_ARGS_LIST UDEV_LIBRARY XINERAMA_LIBRARY) -elseif (WIN32) - list(APPEND LIBOVR_LIBRARIES ${ATL_LIBRARIES}) - list(APPEND LIBOVR_ARGS_LIST ATL_LIBRARIES) -endif () - -include(FindPackageHandleStandardArgs) -find_package_handle_standard_args(LibOVR DEFAULT_MSG ${LIBOVR_ARGS_LIST}) - -if (ANDROID) - list(APPEND LIBOVR_INCLUDE_DIRS ${LIBOVR_SRC_DIR} ${MINIZIP_DIR} ${JNI_DIR}) -endif () - mark_as_advanced(LIBOVR_INCLUDE_DIRS LIBOVR_LIBRARIES LIBOVR_SEARCH_DIRS) diff --git a/interface/CMakeLists.txt b/interface/CMakeLists.txt index dfe9689b27..3d829020e3 100644 --- a/interface/CMakeLists.txt +++ b/interface/CMakeLists.txt @@ -2,7 +2,7 @@ set(TARGET_NAME interface) project(${TARGET_NAME}) # set a default root dir for each of our optional externals if it was not passed -set(OPTIONAL_EXTERNALS "Faceshift" "LibOVR" "Sixense" "LeapMotion" "RtMidi" "SDL2" "RSSDK") +set(OPTIONAL_EXTERNALS "Faceshift" "Sixense" "LeapMotion" "RtMidi" "SDL2" "RSSDK") foreach(EXTERNAL ${OPTIONAL_EXTERNALS}) string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE) if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR) @@ -110,6 +110,11 @@ add_dependency_external_projects(glm bullet) find_package(GLM REQUIRED) target_include_directories(${TARGET_NAME} PRIVATE ${GLM_INCLUDE_DIRS}) +add_dependency_external_projects(LibOVR) +find_package(LibOVR REQUIRED) +target_include_directories(${TARGET_NAME} PRIVATE ${LIBOVR_INCLUDE_DIRS}) +target_link_libraries(${TARGET_NAME} ${LIBOVR_LIBRARIES}) + find_package(Bullet REQUIRED) target_include_directories(${TARGET_NAME} SYSTEM PRIVATE ${BULLET_INCLUDE_DIRS}) target_link_libraries(${TARGET_NAME} ${BULLET_LIBRARIES}) diff --git a/interface/external/libovr/readme.txt b/interface/external/libovr/readme.txt deleted file mode 100644 index f9db808d88..0000000000 --- a/interface/external/libovr/readme.txt +++ /dev/null @@ -1,16 +0,0 @@ - -Instructions for adding the Oculus library (LibOVR) to Interface -Stephen Birarda, March 6, 2014 - -You can download the Oculus SDK from https://developer.oculusvr.com/ (account creation required). Interface has been tested with SDK version 0.4.1. - -1. Copy the Oculus SDK folders from the LibOVR directory (Lib, Include, Src) into the interface/externals/libovr folder. - This readme.txt should be there as well. - - You may optionally choose to copy the SDK folders to a location outside the repository (so you can re-use with different checkouts and different projects). - If so our CMake find module expects you to set the ENV variable 'HIFI_LIB_DIR' to a directory containing a subfolder 'oculus' that contains the three folders mentioned above. - - NOTE: For Windows users, you should copy libovr.lib and libovrd.lib from the \oculus\Lib\Win32\VS2010 directory to the \libovr\Lib\Win32\ directory. - -2. Clear your build directory, run cmake and build, and you should be all set. - diff --git a/interface/src/Application.cpp b/interface/src/Application.cpp index 5275b8c529..9d094e3e55 100644 --- a/interface/src/Application.cpp +++ b/interface/src/Application.cpp @@ -774,10 +774,6 @@ void Application::paintGL() { } if (OculusManager::isConnected()) { - //Clear the color buffer to ensure that there isnt any residual color - //Left over from when OR was not connected. - glClear(GL_COLOR_BUFFER_BIT); - //When in mirror mode, use camera rotation. Otherwise, use body rotation if (_myCamera.getMode() == CAMERA_MODE_MIRROR) { OculusManager::display(_myCamera.getRotation(), _myCamera.getPosition(), _myCamera); diff --git a/interface/src/GLCanvas.cpp b/interface/src/GLCanvas.cpp index b3c1db7b24..97709d2d53 100644 --- a/interface/src/GLCanvas.cpp +++ b/interface/src/GLCanvas.cpp @@ -66,9 +66,13 @@ void GLCanvas::paintGL() { } Application::getInstance()->paintGL(); - swapBuffers(); - - if (OculusManager::isConnected()) { + + if (!OculusManager::isConnected()) { + swapBuffers(); + } else { + if (OculusManager::allowSwap()) { + swapBuffers(); + } OculusManager::endFrameTiming(); } } diff --git a/interface/src/devices/OculusManager.cpp b/interface/src/devices/OculusManager.cpp index 4d864d8cec..db10298583 100644 --- a/interface/src/devices/OculusManager.cpp +++ b/interface/src/devices/OculusManager.cpp @@ -18,7 +18,8 @@ #include #include #include -#include +#include +#include #include @@ -29,12 +30,28 @@ #include #include +#include + #include "Application.h" -#ifdef HAVE_LIBOVR +template +void for_each_eye(Function function) { + for (ovrEyeType eye = ovrEyeType::ovrEye_Left; + eye < ovrEyeType::ovrEye_Count; + eye = static_cast(eye + 1)) { + function(eye); + } +} -using namespace OVR; +template +void for_each_eye(const ovrHmd & hmd, Function function) { + for (int i = 0; i < ovrEye_Count; ++i) { + ovrEyeType eye = hmd->EyeRenderOrder[i]; + function(eye); + } +} +#ifdef OVR_CLIENT_DISTORTION ProgramObject OculusManager::_program; int OculusManager::_textureLocation; int OculusManager::_eyeToSourceUVScaleLocation; @@ -46,24 +63,27 @@ int OculusManager::_colorAttributeLocation; int OculusManager::_texCoord0AttributeLocation; int OculusManager::_texCoord1AttributeLocation; int OculusManager::_texCoord2AttributeLocation; -bool OculusManager::_isConnected = false; - -ovrHmd OculusManager::_ovrHmd; -ovrHmdDesc OculusManager::_ovrHmdDesc; -ovrFovPort OculusManager::_eyeFov[ovrEye_Count]; -ovrEyeRenderDesc OculusManager::_eyeRenderDesc[ovrEye_Count]; -ovrSizei OculusManager::_renderTargetSize; ovrVector2f OculusManager::_UVScaleOffset[ovrEye_Count][2]; GLuint OculusManager::_vertices[ovrEye_Count] = { 0, 0 }; GLuint OculusManager::_indices[ovrEye_Count] = { 0, 0 }; GLsizei OculusManager::_meshSize[ovrEye_Count] = { 0, 0 }; ovrFrameTiming OculusManager::_hmdFrameTiming; -ovrRecti OculusManager::_eyeRenderViewport[ovrEye_Count]; +bool OculusManager::_programInitialized = false; +#endif + +ovrTexture OculusManager::_eyeTextures[ovrEye_Count]; +bool OculusManager::_isConnected = false; +ovrHmd OculusManager::_ovrHmd; +ovrFovPort OculusManager::_eyeFov[ovrEye_Count]; +ovrVector3f OculusManager::_eyeOffset[ovrEye_Count]; +ovrEyeRenderDesc OculusManager::_eyeRenderDesc[ovrEye_Count]; +ovrSizei OculusManager::_renderTargetSize; +glm::mat4 OculusManager::_eyeProjection[ovrEye_Count]; unsigned int OculusManager::_frameIndex = 0; bool OculusManager::_frameTimingActive = false; -bool OculusManager::_programInitialized = false; Camera* OculusManager::_camera = NULL; -int OculusManager::_activeEyeIndex = -1; +ovrEyeType OculusManager::_activeEye = ovrEye_Count; +bool OculusManager::_hswDismissed = false; float OculusManager::CALIBRATION_DELTA_MINIMUM_LENGTH = 0.02f; float OculusManager::CALIBRATION_DELTA_MINIMUM_ANGLE = 5.0f * RADIANS_PER_DEGREE; @@ -76,68 +96,86 @@ glm::vec3 OculusManager::_calibrationPosition; glm::quat OculusManager::_calibrationOrientation; quint64 OculusManager::_calibrationStartTime; int OculusManager::_calibrationMessage = NULL; +glm::vec3 OculusManager::_eyePositions[ovrEye_Count]; +// TODO expose this as a developer toggle +bool OculusManager::_eyePerFrameMode = false; +ovrEyeType OculusManager::_lastEyeRendered = ovrEye_Count; +ovrSizei OculusManager::_recommendedTexSize = { 0, 0 }; +float OculusManager::_offscreenRenderScale = 1.0; + +void OculusManager::initSdk() { + ovr_Initialize(); + _ovrHmd = ovrHmd_Create(0); + if (!_ovrHmd) { + _ovrHmd = ovrHmd_CreateDebug(ovrHmd_DK2); + } +} + +void OculusManager::shutdownSdk() { + ovrHmd_Destroy(_ovrHmd); + ovr_Shutdown(); +} + +void OculusManager::init() { +#ifdef OVR_DIRECT_MODE + initSdk(); #endif - -glm::vec3 OculusManager::_leftEyePosition = glm::vec3(); -glm::vec3 OculusManager::_rightEyePosition = glm::vec3(); +} void OculusManager::connect() { -#ifdef HAVE_LIBOVR +#ifndef OVR_DIRECT_MODE + initSdk(); +#endif _calibrationState = UNCALIBRATED; qDebug() << "Oculus SDK" << OVR_VERSION_STRING; - ovr_Initialize(); - - _ovrHmd = ovrHmd_Create(0); if (_ovrHmd) { if (!_isConnected) { UserActivityLogger::getInstance().connectedDevice("hmd", "oculus"); } _isConnected = true; - -#if defined(__APPLE__) || defined(_WIN32) - _eyeFov[0] = _ovrHmd->DefaultEyeFov[0]; - _eyeFov[1] = _ovrHmd->DefaultEyeFov[1]; -#else - ovrHmd_GetDesc(_ovrHmd, &_ovrHmdDesc); - _eyeFov[0] = _ovrHmdDesc.DefaultEyeFov[0]; - _eyeFov[1] = _ovrHmdDesc.DefaultEyeFov[1]; -#endif - //Get texture size - ovrSizei recommendedTex0Size = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left, - _eyeFov[0], 1.0f); - ovrSizei recommendedTex1Size = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Right, - _eyeFov[1], 1.0f); - _renderTargetSize.w = recommendedTex0Size.w + recommendedTex1Size.w; - _renderTargetSize.h = recommendedTex0Size.h; - if (_renderTargetSize.h < recommendedTex1Size.h) { - _renderTargetSize.h = recommendedTex1Size.h; - } - _eyeRenderDesc[0] = ovrHmd_GetRenderDesc(_ovrHmd, ovrEye_Left, _eyeFov[0]); - _eyeRenderDesc[1] = ovrHmd_GetRenderDesc(_ovrHmd, ovrEye_Right, _eyeFov[1]); + for_each_eye([&](ovrEyeType eye) { + _eyeFov[eye] = _ovrHmd->DefaultEyeFov[eye]; + }); -#if defined(__APPLE__) || defined(_WIN32) - ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence); -#else - ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence | ovrHmdCap_LatencyTest); -#endif + ovrGLConfig cfg; + memset(&cfg, 0, sizeof(cfg)); + cfg.OGL.Header.API = ovrRenderAPI_OpenGL; + cfg.OGL.Header.BackBufferSize = _ovrHmd->Resolution; + cfg.OGL.Header.Multisample = 1; + + int distortionCaps = 0 + | ovrDistortionCap_Vignette + | ovrDistortionCap_Overdrive + | ovrDistortionCap_TimeWarp; + + int configResult = ovrHmd_ConfigureRendering(_ovrHmd, &cfg.Config, + distortionCaps, _eyeFov, _eyeRenderDesc); + assert(configResult); + + + _recommendedTexSize = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left, _eyeFov[ovrEye_Left], 1.0f); + _renderTargetSize = { _recommendedTexSize.w * 2, _recommendedTexSize.h }; + for_each_eye([&](ovrEyeType eye) { + //Get texture size + _eyeTextures[eye].Header.API = ovrRenderAPI_OpenGL; + _eyeTextures[eye].Header.TextureSize = _renderTargetSize; + _eyeTextures[eye].Header.RenderViewport.Pos = { 0, 0 }; + }); + _eyeTextures[ovrEye_Right].Header.RenderViewport.Pos.x = _recommendedTexSize.w; + + ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction); -#if defined(__APPLE__) || defined(_WIN32) ovrHmd_ConfigureTracking(_ovrHmd, ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, ovrTrackingCap_Orientation); -#else - ovrHmd_StartSensor(_ovrHmd, ovrSensorCap_Orientation | ovrSensorCap_YawCorrection | - ovrSensorCap_Position, - ovrSensorCap_Orientation); -#endif if (!_camera) { _camera = new Camera; configureCamera(*_camera, 0, 0); // no need to use screen dimensions; they're ignored } - +#ifdef OVR_CLIENT_DISTORTION if (!_programInitialized) { // Shader program _programInitialized = true; @@ -162,27 +200,27 @@ void OculusManager::connect() { //Generate the distortion VBOs generateDistortionMesh(); - +#endif } else { _isConnected = false; // we're definitely not in "VR mode" so tell the menu that Menu::getInstance()->getActionForOption(MenuOption::EnableVRMode)->setChecked(false); - - ovrHmd_Destroy(_ovrHmd); - ovr_Shutdown(); } -#endif } //Disconnects and deallocates the OR void OculusManager::disconnect() { -#ifdef HAVE_LIBOVR if (_isConnected) { _isConnected = false; - ovrHmd_Destroy(_ovrHmd); - ovr_Shutdown(); + // Prepare to potentially have to dismiss the HSW again + // if the user re-enables VR + _hswDismissed = false; +#ifndef OVR_DIRECT_MODE + shutdownSdk(); +#endif +#ifdef OVR_CLIENT_DISTORTION //Free the distortion mesh data for (int i = 0; i < ovrEye_Count; i++) { if (_vertices[i] != 0) { @@ -194,11 +232,10 @@ void OculusManager::disconnect() { _indices[i] = 0; } } - } #endif + } } -#ifdef HAVE_LIBOVR void OculusManager::positionCalibrationBillboard(Text3DOverlay* billboard) { MyAvatar* myAvatar = DependencyManager::get()->getMyAvatar(); glm::quat headOrientation = myAvatar->getHeadOrientation(); @@ -209,9 +246,7 @@ void OculusManager::positionCalibrationBillboard(Text3DOverlay* billboard) { + headOrientation * glm::vec3(0.0f, 0.0f, -CALIBRATION_MESSAGE_DISTANCE)); billboard->setRotation(headOrientation); } -#endif -#ifdef HAVE_LIBOVR void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) { static QString instructionMessage = "Hold still to calibrate"; static QString progressMessage; @@ -303,26 +338,21 @@ void OculusManager::calibrate(glm::vec3 position, glm::quat orientation) { } } -#endif void OculusManager::recalibrate() { -#ifdef HAVE_LIBOVR _calibrationState = UNCALIBRATED; -#endif } void OculusManager::abandonCalibration() { -#ifdef HAVE_LIBOVR _calibrationState = CALIBRATED; if (_calibrationMessage) { qDebug() << "Abandoned HMD calibration"; Application::getInstance()->getOverlays().deleteOverlay(_calibrationMessage); _calibrationMessage = NULL; } -#endif } -#ifdef HAVE_LIBOVR +#ifdef OVR_CLIENT_DISTORTION void OculusManager::generateDistortionMesh() { //Check if we already have the distortion mesh @@ -331,29 +361,19 @@ void OculusManager::generateDistortionMesh() { return; } - //Viewport for the render target for each eye - _eyeRenderViewport[0].Pos = Vector2i(0, 0); - _eyeRenderViewport[0].Size = Sizei(_renderTargetSize.w / 2, _renderTargetSize.h); - _eyeRenderViewport[1].Pos = Vector2i((_renderTargetSize.w + 1) / 2, 0); - _eyeRenderViewport[1].Size = _eyeRenderViewport[0].Size; - for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) { // Allocate and generate distortion mesh vertices ovrDistortionMesh meshData; - ovrHmd_CreateDistortionMesh(_ovrHmd, _eyeRenderDesc[eyeNum].Eye, _eyeRenderDesc[eyeNum].Fov, _ovrHmdDesc.DistortionCaps, &meshData); - - ovrHmd_GetRenderScaleAndOffset(_eyeRenderDesc[eyeNum].Fov, _renderTargetSize, _eyeRenderViewport[eyeNum], - _UVScaleOffset[eyeNum]); + ovrHmd_CreateDistortionMesh(_ovrHmd, _eyeRenderDesc[eyeNum].Eye, _eyeRenderDesc[eyeNum].Fov, _ovrHmd->DistortionCaps, &meshData); // Parse the vertex data and create a render ready vertex buffer - DistortionVertex* pVBVerts = (DistortionVertex*)OVR_ALLOC(sizeof(DistortionVertex) * meshData.VertexCount); + DistortionVertex* pVBVerts = new DistortionVertex[meshData.VertexCount]; _meshSize[eyeNum] = meshData.IndexCount; // Convert the oculus vertex data to the DistortionVertex format. DistortionVertex* v = pVBVerts; ovrDistortionVertex* ov = meshData.pVertexData; for (unsigned int vertNum = 0; vertNum < meshData.VertexCount; vertNum++) { -#if defined(__APPLE__) || defined(_WIN32) v->pos.x = ov->ScreenPosNDC.x; v->pos.y = ov->ScreenPosNDC.y; v->texR.x = ov->TanEyeAnglesR.x; @@ -362,16 +382,6 @@ void OculusManager::generateDistortionMesh() { v->texG.y = ov->TanEyeAnglesG.y; v->texB.x = ov->TanEyeAnglesB.x; v->texB.y = ov->TanEyeAnglesB.y; -#else - v->pos.x = ov->Pos.x; - v->pos.y = ov->Pos.y; - v->texR.x = ov->TexR.x; - v->texR.y = ov->TexR.y; - v->texG.x = ov->TexG.x; - v->texG.y = ov->TexG.y; - v->texB.x = ov->TexB.x; - v->texB.y = ov->TexB.y; -#endif v->color.r = v->color.g = v->color.b = (GLubyte)(ov->VignetteFactor * 255.99f); v->color.a = (GLubyte)(ov->TimeWarpFactor * 255.99f); v++; @@ -391,7 +401,7 @@ void OculusManager::generateDistortionMesh() { glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0); //Now that we have the VBOs we can get rid of the mesh data - OVR_FREE(pVBVerts); + delete [] pVBVerts; ovrHmd_DestroyDistortionMesh(&meshData); } @@ -399,46 +409,101 @@ void OculusManager::generateDistortionMesh() { #endif bool OculusManager::isConnected() { -#ifdef HAVE_LIBOVR return _isConnected && Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode); -#else - return false; -#endif } //Begins the frame timing for oculus prediction purposes void OculusManager::beginFrameTiming() { -#ifdef HAVE_LIBOVR - if (_frameTimingActive) { printf("WARNING: Called OculusManager::beginFrameTiming() twice in a row, need to call OculusManager::endFrameTiming()."); } - _hmdFrameTiming = ovrHmd_BeginFrameTiming(_ovrHmd, _frameIndex); - _frameTimingActive = true; +#ifdef OVR_CLIENT_DISTORTION + _hmdFrameTiming = ovrHmd_BeginFrameTiming(_ovrHmd, _frameIndex); #endif + _frameTimingActive = true; +} + +bool OculusManager::allowSwap() { + return false; } //Ends frame timing void OculusManager::endFrameTiming() { -#ifdef HAVE_LIBOVR +#ifdef OVR_CLIENT_DISTORTION ovrHmd_EndFrameTiming(_ovrHmd); +#endif _frameIndex++; _frameTimingActive = false; -#endif } //Sets the camera FoV and aspect ratio void OculusManager::configureCamera(Camera& camera, int screenWidth, int screenHeight) { -#ifdef HAVE_LIBOVR camera.setAspectRatio(_renderTargetSize.w * 0.5f / _renderTargetSize.h); camera.setFieldOfView(atan(_eyeFov[0].UpTan) * DEGREES_PER_RADIAN * 2.0f); -#endif } +static bool timerActive = false; //Displays everything for the oculus, frame timing must be active void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera) { -#ifdef HAVE_LIBOVR + auto glCanvas = Application::getInstance()->getGLWidget(); + +#ifdef DEBUG + // Ensure the frame counter always increments by exactly 1 + static int oldFrameIndex = -1; + assert(oldFrameIndex == -1 || oldFrameIndex == _frameIndex - 1); + oldFrameIndex = _frameIndex; +#endif + + // Every so often do some additional timing calculations and debug output + bool debugFrame = 0 == _frameIndex % 400; + +#if 0 + // Try to measure the amount of time taken to do the distortion + // (does not seem to work on OSX with SDK based distortion) + // FIXME can't use a static object here, because it will cause a crash when the + // query attempts deconstruct after the GL context is gone. + static QOpenGLTimerQuery timerQuery; + if (!timerQuery.isCreated()) { + timerQuery.create(); + } + + if (timerActive && timerQuery.isResultAvailable()) { + auto result = timerQuery.waitForResult(); + if (result) { qDebug() << "Distortion took " << result << "ns"; }; + timerActive = false; + } +#endif + +#ifdef OVR_DIRECT_MODE + static bool attached = false; + if (!attached) { + attached = true; + void * nativeWindowHandle = (void*)(size_t)glCanvas->effectiveWinId(); + if (nullptr != nativeWindowHandle) { + ovrHmd_AttachToWindow(_ovrHmd, nativeWindowHandle, nullptr, nullptr); + } + } +#endif + +#ifndef OVR_CLIENT_DISTORTION + // FIXME: we need a better way of responding to the HSW. In particular + // we need to ensure that it's only displayed once per session, rather than + // every time the user toggles VR mode, and we need to hook it up to actual + // keyboard input. OVR claim they are refactoring HSW + // https://forums.oculus.com/viewtopic.php?f=20&t=21720#p258599 + static ovrHSWDisplayState hasWarningState; + if (!_hswDismissed) { + ovrHmd_GetHSWDisplayState(_ovrHmd, &hasWarningState); + if (hasWarningState.Displayed) { + ovrHmd_DismissHSWDisplay(_ovrHmd); + } else { + _hswDismissed = true; + } + } +#endif + + //beginFrameTiming must be called before display if (!_frameTimingActive) { printf("WARNING: Called OculusManager::display() without calling OculusManager::beginFrameTiming() first."); @@ -459,7 +524,6 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT); } - ovrPosef eyeRenderPose[ovrEye_Count]; glMatrixMode(GL_PROJECTION); glPushMatrix(); @@ -470,7 +534,6 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p glm::quat orientation; glm::vec3 trackerPosition; -#if defined(__APPLE__) || defined(_WIN32) ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds()); ovrVector3f ovrHeadPosition = ts.HeadPose.ThePose.Position; @@ -483,105 +546,153 @@ void OculusManager::display(const glm::quat &bodyOrientation, const glm::vec3 &p } trackerPosition = bodyOrientation * trackerPosition; -#endif - + static ovrVector3f eyeOffsets[2] = { { 0, 0, 0 }, { 0, 0, 0 } }; + ovrPosef eyePoses[ovrEye_Count]; + ovrHmd_GetEyePoses(_ovrHmd, _frameIndex, eyeOffsets, eyePoses, nullptr); + ovrHmd_BeginFrame(_ovrHmd, _frameIndex); + static ovrPosef eyeRenderPose[ovrEye_Count]; //Render each eye into an fbo - for (int eyeIndex = 0; eyeIndex < ovrEye_Count; eyeIndex++) { - _activeEyeIndex = eyeIndex; - -#if defined(__APPLE__) || defined(_WIN32) - ovrEyeType eye = _ovrHmd->EyeRenderOrder[eyeIndex]; -#else - ovrEyeType eye = _ovrHmdDesc.EyeRenderOrder[eyeIndex]; -#endif + for_each_eye(_ovrHmd, [&](ovrEyeType eye){ + // If we're in eye-per-frame mode, only render one eye + // per call to display, and allow timewarp to correct for + // the other eye. Poor man's perf improvement + if (_eyePerFrameMode && eye == _lastEyeRendered) { + return; + } + _lastEyeRendered = _activeEye = eye; + eyeRenderPose[eye] = eyePoses[eye]; // Set the camera rotation for this eye - eyeRenderPose[eye] = ovrHmd_GetEyePose(_ovrHmd, eye); orientation.x = eyeRenderPose[eye].Orientation.x; orientation.y = eyeRenderPose[eye].Orientation.y; orientation.z = eyeRenderPose[eye].Orientation.z; orientation.w = eyeRenderPose[eye].Orientation.w; - + // Update the application camera with the latest HMD position whichCamera.setHmdPosition(trackerPosition); whichCamera.setHmdRotation(orientation); - + // Update our camera to what the application camera is doing _camera->setRotation(whichCamera.getRotation()); _camera->setPosition(whichCamera.getPosition()); - + // Store the latest left and right eye render locations for things that need to know glm::vec3 thisEyePosition = position + trackerPosition + (bodyOrientation * glm::quat(orientation.x, orientation.y, orientation.z, orientation.w) * - glm::vec3(_eyeRenderDesc[eye].ViewAdjust.x, _eyeRenderDesc[eye].ViewAdjust.y, _eyeRenderDesc[eye].ViewAdjust.z)); - - RenderArgs::RenderSide renderSide = RenderArgs::STEREO_LEFT; - if (eyeIndex == 0) { - _leftEyePosition = thisEyePosition; - } else { - _rightEyePosition = thisEyePosition; - renderSide = RenderArgs::STEREO_RIGHT; - } + glm::vec3(_eyeRenderDesc[eye].HmdToEyeViewOffset.x, _eyeRenderDesc[eye].HmdToEyeViewOffset.y, _eyeRenderDesc[eye].HmdToEyeViewOffset.z)); + _eyePositions[eye] = thisEyePosition; _camera->update(1.0f / Application::getInstance()->getFps()); glMatrixMode(GL_PROJECTION); glLoadIdentity(); - const ovrFovPort& port = _eyeFov[_activeEyeIndex]; + const ovrFovPort& port = _eyeFov[_activeEye]; float nearClip = whichCamera.getNearClip(), farClip = whichCamera.getFarClip(); glFrustum(-nearClip * port.LeftTan, nearClip * port.RightTan, -nearClip * port.DownTan, nearClip * port.UpTan, nearClip, farClip); - - glViewport(_eyeRenderViewport[eye].Pos.x, _eyeRenderViewport[eye].Pos.y, - _eyeRenderViewport[eye].Size.w, _eyeRenderViewport[eye].Size.h); - + ovrRecti & vp = _eyeTextures[eye].Header.RenderViewport; + vp.Size.h = _recommendedTexSize.h * _offscreenRenderScale; + vp.Size.w = _recommendedTexSize.w * _offscreenRenderScale; + + glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h); + glMatrixMode(GL_MODELVIEW); glLoadIdentity(); - + // HACK: instead of passing the stereo eye offset directly in the matrix, pass it in the camera offset //glTranslatef(_eyeRenderDesc[eye].ViewAdjust.x, _eyeRenderDesc[eye].ViewAdjust.y, _eyeRenderDesc[eye].ViewAdjust.z); - - _camera->setEyeOffsetPosition(glm::vec3(-_eyeRenderDesc[eye].ViewAdjust.x, -_eyeRenderDesc[eye].ViewAdjust.y, -_eyeRenderDesc[eye].ViewAdjust.z)); + + _camera->setEyeOffsetPosition(glm::vec3(-_eyeRenderDesc[eye].HmdToEyeViewOffset.x, -_eyeRenderDesc[eye].HmdToEyeViewOffset.y, -_eyeRenderDesc[eye].HmdToEyeViewOffset.z)); Application::getInstance()->displaySide(*_camera, false, RenderArgs::MONO); applicationOverlay.displayOverlayTextureOculus(*_camera); - _activeEyeIndex = -1; - } - - //Wait till time-warp to reduce latency - ovr_WaitTillTime(_hmdFrameTiming.TimewarpPointSeconds); + }); + _activeEye = ovrEye_Count; glPopMatrix(); - //Full texture viewport for glow effect - glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h); - + QOpenGLFramebufferObject * finalFbo = nullptr; //Bind the output texture from the glow shader. If glow effect is disabled, we just grab the texture if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) { - QOpenGLFramebufferObject* fbo = DependencyManager::get()->render(true); - glBindTexture(GL_TEXTURE_2D, fbo->texture()); + //Full texture viewport for glow effect + glViewport(0, 0, _renderTargetSize.w, _renderTargetSize.h); + finalFbo = DependencyManager::get()->render(true); } else { - DependencyManager::get()->getPrimaryFramebufferObject()->release(); - glBindTexture(GL_TEXTURE_2D, DependencyManager::get()->getPrimaryFramebufferObject()->texture()); + finalFbo = DependencyManager::get()->getPrimaryFramebufferObject(); + finalFbo->release(); } - // restore our normal viewport - auto glCanvas = Application::getInstance()->getGLWidget(); - glViewport(0, 0, glCanvas->getDeviceWidth(), glCanvas->getDeviceHeight()); - glMatrixMode(GL_PROJECTION); glPopMatrix(); + // restore our normal viewport + glViewport(0, 0, glCanvas->getDeviceWidth(), glCanvas->getDeviceHeight()); + +#if 0 + if (debugFrame && !timerActive) { + timerQuery.begin(); + } +#endif + +#ifdef OVR_CLIENT_DISTORTION + + //Wait till time-warp to reduce latency + ovr_WaitTillTime(_hmdFrameTiming.TimewarpPointSeconds); + + //Clear the color buffer to ensure that there isnt any residual color + //Left over from when OR was not connected. + glClear(GL_COLOR_BUFFER_BIT); + + glBindTexture(GL_TEXTURE_2D, finalFbo->texture()); + //Renders the distorted mesh onto the screen renderDistortionMesh(eyeRenderPose); - - glBindTexture(GL_TEXTURE_2D, 0); + glBindTexture(GL_TEXTURE_2D, 0); + glCanvas->swapBuffers(); + +#else + + for_each_eye([&](ovrEyeType eye) { + ovrGLTexture & glEyeTexture = reinterpret_cast(_eyeTextures[eye]); + glEyeTexture.OGL.TexId = finalFbo->texture(); + + }); + + ovrHmd_EndFrame(_ovrHmd, eyeRenderPose, _eyeTextures); #endif + +#if 0 + if (debugFrame && !timerActive) { + timerQuery.end(); + timerActive = true; + } +#endif + + // No DK2, no message. + char latency2Text[128] = ""; + { + float latencies[5] = {}; + if (debugFrame && ovrHmd_GetFloatArray(_ovrHmd, "DK2Latency", latencies, 5) == 5) + { + bool nonZero = false; + for (int i = 0; i < 5; ++i) + { + nonZero |= (latencies[i] != 0.f); + } + + if (nonZero) + { + qDebug() << QString().sprintf("M2P Latency: Ren: %4.2fms TWrp: %4.2fms PostPresent: %4.2fms Err: %4.2fms %4.2fms", + latencies[0], latencies[1], latencies[2], latencies[3], latencies[4]); + } + } + } + } -#ifdef HAVE_LIBOVR +#ifdef OVR_CLIENT_DISTORTION void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) { glLoadIdentity(); @@ -602,24 +713,25 @@ void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) { //Render the distortion meshes for each eye for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) { + + ovrHmd_GetRenderScaleAndOffset(_eyeRenderDesc[eyeNum].Fov, _renderTargetSize, _eyeTextures[eyeNum].Header.RenderViewport, + _UVScaleOffset[eyeNum]); + GLfloat uvScale[2] = { _UVScaleOffset[eyeNum][0].x, _UVScaleOffset[eyeNum][0].y }; _program.setUniformValueArray(_eyeToSourceUVScaleLocation, uvScale, 1, 2); - GLfloat uvOffset[2] = { _UVScaleOffset[eyeNum][1].x, _UVScaleOffset[eyeNum][1].y }; + GLfloat uvOffset[2] = { _UVScaleOffset[eyeNum][1].x, 1.0f - _UVScaleOffset[eyeNum][1].y }; _program.setUniformValueArray(_eyeToSourceUVOffsetLocation, uvOffset, 1, 2); ovrMatrix4f timeWarpMatrices[2]; - Matrix4f transposeMatrices[2]; + glm::mat4 transposeMatrices[2]; //Grabs the timewarp matrices to be used in the shader ovrHmd_GetEyeTimewarpMatrices(_ovrHmd, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices); - transposeMatrices[0] = Matrix4f(timeWarpMatrices[0]); - transposeMatrices[1] = Matrix4f(timeWarpMatrices[1]); - //Have to transpose the matrices before using them - transposeMatrices[0].Transpose(); - transposeMatrices[1].Transpose(); + transposeMatrices[0] = glm::transpose(toGlm(timeWarpMatrices[0])); + transposeMatrices[1] = glm::transpose(toGlm(timeWarpMatrices[1])); - glUniformMatrix4fv(_eyeRotationStartLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[0].M); - glUniformMatrix4fv(_eyeRotationEndLocation, 1, GL_FALSE, (GLfloat *)transposeMatrices[1].M); + glUniformMatrix4fv(_eyeRotationStartLocation, 1, GL_FALSE, (GLfloat *)&transposeMatrices[0][0][0]); + glUniformMatrix4fv(_eyeRotationEndLocation, 1, GL_FALSE, (GLfloat *)&transposeMatrices[1][0][0]); glBindBuffer(GL_ARRAY_BUFFER, _vertices[eyeNum]); @@ -649,86 +761,54 @@ void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) { //Tries to reconnect to the sensors void OculusManager::reset() { -#ifdef HAVE_LIBOVR if (_isConnected) { ovrHmd_RecenterPose(_ovrHmd); } -#endif } //Gets the current predicted angles from the oculus sensors void OculusManager::getEulerAngles(float& yaw, float& pitch, float& roll) { -#ifdef HAVE_LIBOVR -#if defined(__APPLE__) || defined(_WIN32) ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds()); -#else - ovrSensorState ss = ovrHmd_GetSensorState(_ovrHmd, _hmdFrameTiming.ScanoutMidpointSeconds); -#endif -#if defined(__APPLE__) || defined(_WIN32) if (ts.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) { -#else - if (ss.StatusFlags & (ovrStatus_OrientationTracked | ovrStatus_PositionTracked)) { -#endif - -#if defined(__APPLE__) || defined(_WIN32) - ovrPosef headPose = ts.HeadPose.ThePose; -#else - ovrPosef headPose = ss.Predicted.Pose; -#endif - Quatf orientation = Quatf(headPose.Orientation); - orientation.GetEulerAngles(&yaw, &pitch, &roll); + glm::vec3 euler = glm::eulerAngles(toGlm(ts.HeadPose.ThePose.Orientation)); + yaw = euler.y; + pitch = euler.x; + roll = euler.z; } else { yaw = 0.0f; pitch = 0.0f; roll = 0.0f; } -#else - yaw = 0.0f; - pitch = 0.0f; - roll = 0.0f; -#endif } glm::vec3 OculusManager::getRelativePosition() { -#if (defined(__APPLE__) || defined(_WIN32)) && HAVE_LIBOVR ovrTrackingState trackingState = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds()); ovrVector3f headPosition = trackingState.HeadPose.ThePose.Position; return glm::vec3(headPosition.x, headPosition.y, headPosition.z); -#else - // no positional tracking in Linux yet - return glm::vec3(0.0f, 0.0f, 0.0f); -#endif } //Used to set the size of the glow framebuffers QSize OculusManager::getRenderTargetSize() { -#ifdef HAVE_LIBOVR QSize rv; rv.setWidth(_renderTargetSize.w); rv.setHeight(_renderTargetSize.h); return rv; -#else - return QSize(100, 100); -#endif } void OculusManager::overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal, float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane) { -#ifdef HAVE_LIBOVR - if (_activeEyeIndex != -1) { - const ovrFovPort& port = _eyeFov[_activeEyeIndex]; + if (_activeEye != ovrEye_Count) { + const ovrFovPort& port = _eyeFov[_activeEye]; right = nearVal * port.RightTan; left = -nearVal * port.LeftTan; top = nearVal * port.UpTan; bottom = -nearVal * port.DownTan; } -#endif } int OculusManager::getHMDScreen() { int hmdScreenIndex = -1; // unknown -#ifdef HAVE_LIBOVR // TODO: it might be smarter to handle multiple HMDs connected in this case. but for now, // we will simply assume the initialization code that set up _ovrHmd picked the best hmd @@ -777,7 +857,6 @@ int OculusManager::getHMDScreen() { screenNumber++; } } -#endif return hmdScreenIndex; } diff --git a/interface/src/devices/OculusManager.h b/interface/src/devices/OculusManager.h index 71fc08c8f9..6c23776e18 100644 --- a/interface/src/devices/OculusManager.h +++ b/interface/src/devices/OculusManager.h @@ -13,19 +13,39 @@ #ifndef hifi_OculusManager_h #define hifi_OculusManager_h -#ifdef HAVE_LIBOVR -#include -#endif +#include #include +#include +#include +#include class Camera; class PalmData; class Text3DOverlay; +// Uncomment this to enable client side distortion. NOT recommended since +// the Oculus SDK will ideally provide the best practices for distortion in +// in terms of performance and quality, and by using it we will get updated +// best practices for free with new runtime releases. +#define OVR_CLIENT_DISTORTION 1 + + +// On Win32 platforms, enabling Direct HMD requires that the SDK be +// initialized before the GL context is set up, but this breaks v-sync +// for any application that has a Direct mode enable Rift connected +// but is not rendering to it. For the time being I'm setting this as +// a macro enabled mechanism which changes where the SDK is initialized. +// To enable Direct HMD mode, you can un-comment this, but with the +// caveat that it will break v-sync in NON-VR mode if you have an Oculus +// Rift connect and in Direct mode +#define OVR_DIRECT_MODE 1 + + /// Handles interaction with the Oculus Rift. class OculusManager { public: + static void init(); static void connect(); static void disconnect(); static bool isConnected(); @@ -33,6 +53,7 @@ public: static void abandonCalibration(); static void beginFrameTiming(); static void endFrameTiming(); + static bool allowSwap(); static void configureCamera(Camera& camera, int screenWidth, int screenHeight); static void display(const glm::quat &bodyOrientation, const glm::vec3 &position, Camera& whichCamera); static void reset(); @@ -47,18 +68,17 @@ public: static void overrideOffAxisFrustum(float& left, float& right, float& bottom, float& top, float& nearVal, float& farVal, glm::vec4& nearClipPlane, glm::vec4& farClipPlane); - static glm::vec3 getLeftEyePosition() { return _leftEyePosition; } - static glm::vec3 getRightEyePosition() { return _rightEyePosition; } + static glm::vec3 getLeftEyePosition() { return _eyePositions[ovrEye_Left]; } + static glm::vec3 getRightEyePosition() { return _eyePositions[ovrEye_Right]; } static int getHMDScreen(); private: -#ifdef HAVE_LIBOVR + static void initSdk(); + static void shutdownSdk(); +#ifdef OVR_CLIENT_DISTORTION static void generateDistortionMesh(); static void renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]); - - static bool similarNames(const QString& nameA,const QString& nameB); - struct DistortionVertex { glm::vec2 pos; glm::vec2 texR; @@ -85,25 +105,28 @@ private: static int _texCoord0AttributeLocation; static int _texCoord1AttributeLocation; static int _texCoord2AttributeLocation; - - static bool _isConnected; - - static ovrHmd _ovrHmd; - static ovrHmdDesc _ovrHmdDesc; - static ovrFovPort _eyeFov[ovrEye_Count]; - static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count]; - static ovrSizei _renderTargetSize; static ovrVector2f _UVScaleOffset[ovrEye_Count][2]; static GLuint _vertices[ovrEye_Count]; static GLuint _indices[ovrEye_Count]; static GLsizei _meshSize[ovrEye_Count]; static ovrFrameTiming _hmdFrameTiming; - static ovrRecti _eyeRenderViewport[ovrEye_Count]; + static bool _programInitialized; +#endif + + static ovrTexture _eyeTextures[ovrEye_Count]; + static bool _isConnected; + static glm::vec3 _eyePositions[ovrEye_Count]; + static ovrHmd _ovrHmd; + static ovrFovPort _eyeFov[ovrEye_Count]; + static ovrVector3f _eyeOffset[ovrEye_Count]; + static glm::mat4 _eyeProjection[ovrEye_Count]; + static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count]; + static ovrSizei _renderTargetSize; static unsigned int _frameIndex; static bool _frameTimingActive; - static bool _programInitialized; static Camera* _camera; - static int _activeEyeIndex; + static ovrEyeType _activeEye; + static bool _hswDismissed; static void calibrate(const glm::vec3 position, const glm::quat orientation); enum CalibrationState { @@ -125,13 +148,65 @@ private: static glm::quat _calibrationOrientation; static quint64 _calibrationStartTime; static int _calibrationMessage; - -#endif - - static glm::vec3 _leftEyePosition; - static glm::vec3 _rightEyePosition; - - + // TODO drop this variable and use the existing 'Developer | Render | Scale Resolution' value + static ovrSizei _recommendedTexSize; + static float _offscreenRenderScale; + static bool _eyePerFrameMode; + static ovrEyeType _lastEyeRendered; }; + +inline glm::mat4 toGlm(const ovrMatrix4f & om) { + return glm::transpose(glm::make_mat4(&om.M[0][0])); +} + +inline glm::mat4 toGlm(const ovrFovPort & fovport, float nearPlane = 0.01f, float farPlane = 10000.0f) { + return toGlm(ovrMatrix4f_Projection(fovport, nearPlane, farPlane, true)); +} + +inline glm::vec3 toGlm(const ovrVector3f & ov) { + return glm::make_vec3(&ov.x); +} + +inline glm::vec2 toGlm(const ovrVector2f & ov) { + return glm::make_vec2(&ov.x); +} + +inline glm::uvec2 toGlm(const ovrSizei & ov) { + return glm::uvec2(ov.w, ov.h); +} + +inline glm::quat toGlm(const ovrQuatf & oq) { + return glm::make_quat(&oq.x); +} + +inline glm::mat4 toGlm(const ovrPosef & op) { + glm::mat4 orientation = glm::mat4_cast(toGlm(op.Orientation)); + glm::mat4 translation = glm::translate(glm::mat4(), toGlm(op.Position)); + return translation * orientation; +} + +inline ovrMatrix4f ovrFromGlm(const glm::mat4 & m) { + ovrMatrix4f result; + glm::mat4 transposed(glm::transpose(m)); + memcpy(result.M, &(transposed[0][0]), sizeof(float) * 16); + return result; +} + +inline ovrVector3f ovrFromGlm(const glm::vec3 & v) { + return{ v.x, v.y, v.z }; +} + +inline ovrVector2f ovrFromGlm(const glm::vec2 & v) { + return{ v.x, v.y }; +} + +inline ovrSizei ovrFromGlm(const glm::uvec2 & v) { + return{ (int)v.x, (int)v.y }; +} + +inline ovrQuatf ovrFromGlm(const glm::quat & q) { + return{ q.x, q.y, q.z, q.w }; +} + #endif // hifi_OculusManager_h diff --git a/interface/src/main.cpp b/interface/src/main.cpp index 64ecb2b9e7..9ee8ab3aae 100644 --- a/interface/src/main.cpp +++ b/interface/src/main.cpp @@ -18,6 +18,7 @@ #include "AddressManager.h" #include "Application.h" +#include "devices/OculusManager.h" #ifdef Q_OS_WIN static BOOL CALLBACK enumWindowsCallback(HWND hWnd, LPARAM lParam) { @@ -92,6 +93,10 @@ int main(int argc, const char* argv[]) { usecTimestampNowForceClockSkew(clockSkew); qDebug("clockSkewOption=%s clockSkew=%d", clockSkewOption, clockSkew); } + // Oculus initialization MUST PRECEDE OpenGL context creation. + // The nature of the Application constructor means this has to be either here, + // or in the main window ctor, before GL startup. + OculusManager::init(); int exitCode; {