Moving windows to SDK 0.6

Working on SDK 0.6 for windows

Working on SDK 0.6 for windows
This commit is contained in:
Brad Davis 2015-06-06 23:51:37 -07:00 committed by Bradley Austin Davis
parent a63811a491
commit f614268667
10 changed files with 851 additions and 403 deletions

View file

@ -9,8 +9,8 @@ if (WIN32)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://static.oculus.com/sdk-downloads/ovr_sdk_win_0.5.0.1.zip
URL_MD5 d3fc4c02db9be5ff08af4ef4c97b32f9
URL http://static.oculus.com/sdk-downloads/0.6.0.0/1431634088/ovr_sdk_win_0.6.0.0.zip
URL_MD5 a3dfdab037a854fdcf7e6033fa8d7028
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""

View file

@ -4,11 +4,11 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
include(ExternalProject)
ExternalProject_Add(
${EXTERNAL_NAME}
URL http://softlayer-dal.dl.sourceforge.net/project/oglplus/oglplus-0.61.x/oglplus-0.61.0.zip
URL_MD5 bb55038c36c660d2b6c7be380414fa60
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
GIT_REPOSITORY https://github.com/matus-chochlik/oglplus.git
GIT_TAG 9660aaaf65c5a3f47e6c54827ae28ae1ec5c5deb
CONFIGURE_COMMAND ""
BUILD_COMMAND ""
INSTALL_COMMAND ""
LOG_DOWNLOAD 1
)

View file

@ -0,0 +1,43 @@
macro(SETUP_HIFI_OPENGL)
if (APPLE)
# link in required OS X frameworks and include the right GL headers
find_library(OpenGL OpenGL)
target_link_libraries(${TARGET_NAME} ${OpenGL})
elseif (WIN32)
add_dependency_external_projects(glew)
find_package(GLEW REQUIRED)
target_include_directories(${TARGET_NAME} PUBLIC ${GLEW_INCLUDE_DIRS})
target_link_libraries(${TARGET_NAME} ${GLEW_LIBRARIES} opengl32.lib)
if (USE_NSIGHT)
# try to find the Nsight package and add it to the build if we find it
find_package(NSIGHT)
if (NSIGHT_FOUND)
include_directories(${NSIGHT_INCLUDE_DIRS})
add_definitions(-DNSIGHT_FOUND)
target_link_libraries(${TARGET_NAME} "${NSIGHT_LIBRARIES}")
endif()
endif()
elseif(ANDROID)
target_link_libraries(${TARGET_NAME} "-lGLESv3" "-lEGL")
else()
find_package(OpenGL REQUIRED)
if (${OPENGL_INCLUDE_DIR})
include_directories(SYSTEM "${OPENGL_INCLUDE_DIR}")
endif()
target_link_libraries(${TARGET_NAME} "${OPENGL_LIBRARY}")
target_include_directories(${TARGET_NAME} PUBLIC ${OPENGL_INCLUDE_DIR})
endif()
endmacro()

View file

@ -1860,11 +1860,13 @@ void Application::setEnableVRMode(bool enableVRMode) {
// attempt to reconnect the Oculus manager - it's possible this was a workaround
// for the sixense crash
OculusManager::disconnect();
_glWidget->makeCurrent();
OculusManager::connect();
}
OculusManager::recalibrate();
} else {
OculusManager::abandonCalibration();
OculusManager::disconnect();
_mirrorCamera.setHmdPosition(glm::vec3());
_mirrorCamera.setHmdRotation(glm::quat());
@ -2102,12 +2104,12 @@ void Application::init() {
_mirrorCamera.setMode(CAMERA_MODE_MIRROR);
OculusManager::connect();
if (OculusManager::isConnected()) {
QMetaObject::invokeMethod(Menu::getInstance()->getActionForOption(MenuOption::Fullscreen),
"trigger",
Qt::QueuedConnection);
}
//OculusManager::connect();
//if (OculusManager::isConnected()) {
// QMetaObject::invokeMethod(Menu::getInstance()->getActionForOption(MenuOption::Fullscreen),
// "trigger",
// Qt::QueuedConnection);
//}
TV3DManager::connect();
if (TV3DManager::isConnected()) {

View file

@ -25,6 +25,8 @@
#include <avatar/AvatarManager.h>
#include <avatar/MyAvatar.h>
#include <GlowEffect.h>
#include <gpu/GLBackend.h>
#include <OglplusHelpers.h>
#include <PathUtils.h>
#include <SharedUtil.h>
#include <UserActivityLogger.h>
@ -34,7 +36,6 @@
#include "InterfaceLogging.h"
#include "Application.h"
#include <gpu/GLBackend.h>
template <typename Function>
void for_each_eye(Function function) {
@ -53,27 +54,132 @@ void for_each_eye(const ovrHmd & hmd, Function function) {
}
}
#ifdef OVR_CLIENT_DISTORTION
ProgramObject OculusManager::_program;
int OculusManager::_textureLocation;
int OculusManager::_eyeToSourceUVScaleLocation;
int OculusManager::_eyeToSourceUVOffsetLocation;
int OculusManager::_eyeRotationStartLocation;
int OculusManager::_eyeRotationEndLocation;
int OculusManager::_positionAttributeLocation;
int OculusManager::_colorAttributeLocation;
int OculusManager::_texCoord0AttributeLocation;
int OculusManager::_texCoord1AttributeLocation;
int OculusManager::_texCoord2AttributeLocation;
ovrVector2f OculusManager::_UVScaleOffset[ovrEye_Count][2];
GLuint OculusManager::_vertices[ovrEye_Count] = { 0, 0 };
GLuint OculusManager::_indices[ovrEye_Count] = { 0, 0 };
GLsizei OculusManager::_meshSize[ovrEye_Count] = { 0, 0 };
ovrFrameTiming OculusManager::_hmdFrameTiming;
bool OculusManager::_programInitialized = false;
#endif
#ifdef Q_OS_WIN
// A base class for FBO wrappers that need to use the Oculus C
// API to manage textures via ovrHmd_CreateSwapTextureSetGL,
// ovrHmd_CreateMirrorTextureGL, etc
template <typename C>
struct RiftFramebufferWrapper : public FramebufferWrapper<C, char> {
ovrHmd hmd;
RiftFramebufferWrapper(const ovrHmd & hmd) : hmd(hmd) {
color = 0;
depth = 0;
};
void Resize(const uvec2 & size) {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
this->size = size;
initColor();
initDone();
}
protected:
virtual void initDepth() override final {
}
};
// A wrapper for constructing and using a swap texture set,
// where each frame you draw to a texture via the FBO,
// then submit it and increment to the next texture.
// The Oculus SDK manages the creation and destruction of
// the textures
struct SwapFramebufferWrapper : public RiftFramebufferWrapper<ovrSwapTextureSet*> {
SwapFramebufferWrapper(const ovrHmd & hmd)
: RiftFramebufferWrapper(hmd) {
}
~SwapFramebufferWrapper() {
if (color) {
ovrHmd_DestroySwapTextureSet(hmd, color);
color = nullptr;
}
}
void Increment() {
++color->CurrentIndex;
color->CurrentIndex %= color->TextureCount;
}
protected:
virtual void initColor() override {
if (color) {
ovrHmd_DestroySwapTextureSet(hmd, color);
color = nullptr;
}
ovrResult result = ovrHmd_CreateSwapTextureSetGL(hmd, GL_RGBA, size.x, size.y, &color);
Q_ASSERT(OVR_SUCCESS(result));
for (int i = 0; i < color->TextureCount; ++i) {
ovrGLTexture& ovrTex = (ovrGLTexture&)color->Textures[i];
glBindTexture(GL_TEXTURE_2D, ovrTex.OGL.TexId);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
}
glBindTexture(GL_TEXTURE_2D, 0);
}
virtual void initDone() override {
}
virtual void onBind(oglplus::Framebuffer::Target target) override {
ovrGLTexture& tex = (ovrGLTexture&)(color->Textures[color->CurrentIndex]);
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, tex.OGL.TexId, 0);
}
virtual void onUnbind(oglplus::Framebuffer::Target target) override {
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
}
};
// We use a FBO to wrap the mirror texture because it makes it easier to
// render to the screen via glBlitFramebuffer
struct MirrorFramebufferWrapper : public RiftFramebufferWrapper<ovrGLTexture*> {
MirrorFramebufferWrapper(const ovrHmd & hmd)
: RiftFramebufferWrapper(hmd) {
}
virtual ~MirrorFramebufferWrapper() {
if (color) {
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
color = nullptr;
}
}
private:
void initColor() override {
if (color) {
ovrHmd_DestroyMirrorTexture(hmd, (ovrTexture*)color);
color = nullptr;
}
ovrResult result = ovrHmd_CreateMirrorTextureGL(hmd, GL_RGBA, size.x, size.y, (ovrTexture**)&color);
Q_ASSERT(OVR_SUCCESS(result));
}
void initDone() override {
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, oglplus::GetName(fbo));
glFramebufferTexture2D(GL_DRAW_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, color->OGL.TexId, 0);
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
}
};
SwapFramebufferWrapper* OculusManager::_swapFbo{ nullptr };
MirrorFramebufferWrapper* OculusManager::_mirrorFbo{ nullptr };
ovrLayerEyeFov OculusManager::_sceneLayer;
#else
ovrTexture OculusManager::_eyeTextures[ovrEye_Count];
#endif
bool OculusManager::_isConnected = false;
ovrHmd OculusManager::_ovrHmd;
ovrFovPort OculusManager::_eyeFov[ovrEye_Count];
@ -104,39 +210,41 @@ bool OculusManager::_eyePerFrameMode = false;
ovrEyeType OculusManager::_lastEyeRendered = ovrEye_Count;
ovrSizei OculusManager::_recommendedTexSize = { 0, 0 };
float OculusManager::_offscreenRenderScale = 1.0;
void OculusManager::initSdk() {
ovr_Initialize();
_ovrHmd = ovrHmd_Create(0);
if (!_ovrHmd) {
_ovrHmd = ovrHmd_CreateDebug(ovrHmd_DK2);
}
}
void OculusManager::shutdownSdk() {
if (_ovrHmd) {
ovrHmd_Destroy(_ovrHmd);
_ovrHmd = nullptr;
ovr_Shutdown();
}
}
ovrRecti OculusManager::_eyeViewports[ovrEye_Count];
void OculusManager::init() {
#ifdef OVR_DIRECT_MODE
initSdk();
#endif
}
void OculusManager::deinit() {
#ifdef OVR_DIRECT_MODE
shutdownSdk();
#endif
}
void OculusManager::connect() {
#ifndef OVR_DIRECT_MODE
initSdk();
ovrInitParams initParams; memset(&initParams, 0, sizeof(initParams));
#ifdef DEBUG
//initParams.Flags |= ovrInit_Debug;
#endif
ovr_Initialize(&initParams);
#ifdef Q_OS_WIN
ovrResult res = ovrHmd_Create(0, &_ovrHmd);
#ifdef DEBUG
if (!OVR_SUCCESS(res)) {
res = ovrHmd_CreateDebug(ovrHmd_DK2, &_ovrHmd);
Q_ASSERT(OVR_SUCCESS(res));
}
#endif
#else
_ovrHmd = ovrHmd_Create(0);
#ifdef DEBUG
if (!_ovrHmd) {
_ovrHmd = ovrHmd_CreateDebug(ovrHmd_DK2);
}
#endif
#endif
_calibrationState = UNCALIBRATED;
qCDebug(interfaceapp) << "Oculus SDK" << OVR_VERSION_STRING;
@ -150,6 +258,28 @@ void OculusManager::connect() {
_eyeFov[eye] = _ovrHmd->DefaultEyeFov[eye];
});
_recommendedTexSize = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left, _eyeFov[ovrEye_Left], 1.0f);
_renderTargetSize = { _recommendedTexSize.w * 2, _recommendedTexSize.h };
#ifdef Q_OS_WIN
_mirrorFbo = new MirrorFramebufferWrapper(_ovrHmd);
_swapFbo = new SwapFramebufferWrapper(_ovrHmd);
_swapFbo->Init(toGlm(_renderTargetSize));
_sceneLayer.ColorTexture[0] = _swapFbo->color;
_sceneLayer.ColorTexture[1] = nullptr;
_sceneLayer.Viewport[0].Pos = { 0, 0 };
_sceneLayer.Viewport[0].Size = { _swapFbo->size.x / 2, _swapFbo->size.y };
_sceneLayer.Viewport[1].Pos = { _swapFbo->size.x / 2, 0 };
_sceneLayer.Viewport[1].Size = { _swapFbo->size.x / 2, _swapFbo->size.y };
_sceneLayer.Header.Type = ovrLayerType_EyeFov;
_sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
for_each_eye([&](ovrEyeType eye) {
_eyeViewports[eye] = _sceneLayer.Viewport[eye];
_sceneLayer.Fov[eye] = _eyeFov[eye];
});
#else
ovrGLConfig cfg;
memset(&cfg, 0, sizeof(cfg));
cfg.OGL.Header.API = ovrRenderAPI_OpenGL;
@ -166,15 +296,19 @@ void OculusManager::connect() {
assert(configResult);
_recommendedTexSize = ovrHmd_GetFovTextureSize(_ovrHmd, ovrEye_Left, _eyeFov[ovrEye_Left], 1.0f);
_renderTargetSize = { _recommendedTexSize.w * 2, _recommendedTexSize.h };
for_each_eye([&](ovrEyeType eye) {
//Get texture size
_eyeTextures[eye].Header.API = ovrRenderAPI_OpenGL;
_eyeTextures[eye].Header.TextureSize = _renderTargetSize;
_eyeTextures[eye].Header.RenderViewport.Pos = { 0, 0 };
_eyeTextures[eye].Header.RenderViewport.Size = _renderTargetSize;
_eyeTextures[eye].Header.RenderViewport.Size.w /= 2;
});
_eyeTextures[ovrEye_Right].Header.RenderViewport.Pos.x = _recommendedTexSize.w;
for_each_eye([&](ovrEyeType eye) {
_eyeViewports[eye] = _eyeTextures[eye].Header.RenderViewport;
});
#endif
ovrHmd_SetEnabledCaps(_ovrHmd, ovrHmdCap_LowPersistence | ovrHmdCap_DynamicPrediction);
@ -186,32 +320,6 @@ void OculusManager::connect() {
_camera = new Camera;
configureCamera(*_camera); // no need to use screen dimensions; they're ignored
}
#ifdef OVR_CLIENT_DISTORTION
if (!_programInitialized) {
// Shader program
_programInitialized = true;
_program.addShaderFromSourceFile(QGLShader::Vertex, PathUtils::resourcesPath() + "shaders/oculus.vert");
_program.addShaderFromSourceFile(QGLShader::Fragment, PathUtils::resourcesPath() + "shaders/oculus.frag");
_program.link();
// Uniforms
_textureLocation = _program.uniformLocation("texture");
_eyeToSourceUVScaleLocation = _program.uniformLocation("EyeToSourceUVScale");
_eyeToSourceUVOffsetLocation = _program.uniformLocation("EyeToSourceUVOffset");
_eyeRotationStartLocation = _program.uniformLocation("EyeRotationStart");
_eyeRotationEndLocation = _program.uniformLocation("EyeRotationEnd");
// Attributes
_positionAttributeLocation = _program.attributeLocation("position");
_colorAttributeLocation = _program.attributeLocation("color");
_texCoord0AttributeLocation = _program.attributeLocation("texCoord0");
_texCoord1AttributeLocation = _program.attributeLocation("texCoord1");
_texCoord2AttributeLocation = _program.attributeLocation("texCoord2");
}
//Generate the distortion VBOs
generateDistortionMesh();
#endif
} else {
_isConnected = false;
@ -223,27 +331,29 @@ void OculusManager::connect() {
//Disconnects and deallocates the OR
void OculusManager::disconnect() {
if (_isConnected) {
#ifdef Q_OS_WIN
if (_swapFbo) {
delete _swapFbo;
_swapFbo = nullptr;
}
if (_mirrorFbo) {
delete _mirrorFbo;
_mirrorFbo = nullptr;
}
#endif
if (_ovrHmd) {
ovrHmd_Destroy(_ovrHmd);
_ovrHmd = nullptr;
}
ovr_Shutdown();
_isConnected = false;
// Prepare to potentially have to dismiss the HSW again
// if the user re-enables VR
_hswDismissed = false;
#ifndef OVR_DIRECT_MODE
shutdownSdk();
#endif
#ifdef OVR_CLIENT_DISTORTION
//Free the distortion mesh data
for (int i = 0; i < ovrEye_Count; i++) {
if (_vertices[i] != 0) {
glDeleteBuffers(1, &(_vertices[i]));
_vertices[i] = 0;
}
if (_indices[i] != 0) {
glDeleteBuffers(1, &(_indices[i]));
_indices[i] = 0;
}
}
#endif
}
}
@ -363,64 +473,8 @@ void OculusManager::abandonCalibration() {
}
}
#ifdef OVR_CLIENT_DISTORTION
void OculusManager::generateDistortionMesh() {
//Check if we already have the distortion mesh
if (_vertices[0] != 0) {
printf("WARNING: Tried to generate Oculus distortion mesh twice without freeing the VBOs.");
return;
}
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
// Allocate and generate distortion mesh vertices
ovrDistortionMesh meshData;
ovrHmd_CreateDistortionMesh(_ovrHmd, _eyeRenderDesc[eyeNum].Eye, _eyeRenderDesc[eyeNum].Fov, _ovrHmd->DistortionCaps, &meshData);
// Parse the vertex data and create a render ready vertex buffer
DistortionVertex* pVBVerts = new DistortionVertex[meshData.VertexCount];
_meshSize[eyeNum] = meshData.IndexCount;
// Convert the oculus vertex data to the DistortionVertex format.
DistortionVertex* v = pVBVerts;
ovrDistortionVertex* ov = meshData.pVertexData;
for (unsigned int vertNum = 0; vertNum < meshData.VertexCount; vertNum++) {
v->pos.x = ov->ScreenPosNDC.x;
v->pos.y = ov->ScreenPosNDC.y;
v->texR.x = ov->TanEyeAnglesR.x;
v->texR.y = ov->TanEyeAnglesR.y;
v->texG.x = ov->TanEyeAnglesG.x;
v->texG.y = ov->TanEyeAnglesG.y;
v->texB.x = ov->TanEyeAnglesB.x;
v->texB.y = ov->TanEyeAnglesB.y;
v->color.r = v->color.g = v->color.b = (GLubyte)(ov->VignetteFactor * 255.99f);
v->color.a = (GLubyte)(ov->TimeWarpFactor * 255.99f);
v++;
ov++;
}
//vertices
glGenBuffers(1, &(_vertices[eyeNum]));
glBindBuffer(GL_ARRAY_BUFFER, _vertices[eyeNum]);
glBufferData(GL_ARRAY_BUFFER, sizeof(DistortionVertex) * meshData.VertexCount, pVBVerts, GL_STATIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
//indices
glGenBuffers(1, &(_indices[eyeNum]));
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indices[eyeNum]);
glBufferData(GL_ELEMENT_ARRAY_BUFFER, sizeof(unsigned short) * meshData.IndexCount, meshData.pIndexData, GL_STATIC_DRAW);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
//Now that we have the VBOs we can get rid of the mesh data
delete [] pVBVerts;
ovrHmd_DestroyDistortionMesh(&meshData);
}
}
#endif
bool OculusManager::isConnected() {
return _isConnected && Menu::getInstance()->isOptionChecked(MenuOption::EnableVRMode);
return _isConnected;
}
//Begins the frame timing for oculus prediction purposes
@ -428,10 +482,6 @@ void OculusManager::beginFrameTiming() {
if (_frameTimingActive) {
printf("WARNING: Called OculusManager::beginFrameTiming() twice in a row, need to call OculusManager::endFrameTiming().");
}
#ifdef OVR_CLIENT_DISTORTION
_hmdFrameTiming = ovrHmd_BeginFrameTiming(_ovrHmd, _frameIndex);
#endif
_frameTimingActive = true;
}
@ -441,9 +491,6 @@ bool OculusManager::allowSwap() {
//Ends frame timing
void OculusManager::endFrameTiming() {
#ifdef OVR_CLIENT_DISTORTION
ovrHmd_EndFrameTiming(_ovrHmd);
#endif
_frameIndex++;
_frameTimingActive = false;
}
@ -494,19 +541,20 @@ void OculusManager::display(QGLWidget * glCanvas, const glm::quat &bodyOrientati
timerActive = false;
}
#endif
#ifdef OVR_DIRECT_MODE
static bool attached = false;
if (!attached) {
attached = true;
void * nativeWindowHandle = (void*)(size_t)glCanvas->effectiveWinId();
if (nullptr != nativeWindowHandle) {
ovrHmd_AttachToWindow(_ovrHmd, nativeWindowHandle, nullptr, nullptr);
}
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
glm::vec3 trackerPosition = toGlm(ts.HeadPose.ThePose.Position);
if (_calibrationState != CALIBRATED) {
calibrate(trackerPosition, toGlm(ts.HeadPose.ThePose.Orientation));
}
#endif
trackerPosition = bodyOrientation * trackerPosition;
static ovrVector3f eyeOffsets[2] = { { 0, 0, 0 }, { 0, 0, 0 } };
ovrPosef eyePoses[ovrEye_Count];
ovrHmd_GetEyePoses(_ovrHmd, _frameIndex, eyeOffsets, eyePoses, nullptr);
#ifndef OVR_CLIENT_DISTORTION
#ifndef Q_OS_WIN
// FIXME: we need a better way of responding to the HSW. In particular
// we need to ensure that it's only displayed once per session, rather than
// every time the user toggles VR mode, and we need to hook it up to actual
@ -522,7 +570,6 @@ void OculusManager::display(QGLWidget * glCanvas, const glm::quat &bodyOrientati
}
}
#endif
//beginFrameTiming must be called before display
if (!_frameTimingActive) {
@ -545,27 +592,12 @@ void OculusManager::display(QGLWidget * glCanvas, const glm::quat &bodyOrientati
glMatrixMode(GL_MODELVIEW);
glPushMatrix();
glm::quat orientation;
glm::vec3 trackerPosition;
auto deviceSize = qApp->getDeviceSize();
ovrTrackingState ts = ovrHmd_GetTrackingState(_ovrHmd, ovr_GetTimeInSeconds());
ovrVector3f ovrHeadPosition = ts.HeadPose.ThePose.Position;
trackerPosition = glm::vec3(ovrHeadPosition.x, ovrHeadPosition.y, ovrHeadPosition.z);
if (_calibrationState != CALIBRATED) {
ovrQuatf ovrHeadOrientation = ts.HeadPose.ThePose.Orientation;
orientation = glm::quat(ovrHeadOrientation.w, ovrHeadOrientation.x, ovrHeadOrientation.y, ovrHeadOrientation.z);
calibrate(trackerPosition, orientation);
}
trackerPosition = bodyOrientation * trackerPosition;
static ovrVector3f eyeOffsets[2] = { { 0, 0, 0 }, { 0, 0, 0 } };
ovrPosef eyePoses[ovrEye_Count];
ovrHmd_GetEyePoses(_ovrHmd, _frameIndex, eyeOffsets, eyePoses, nullptr);
#ifndef Q_OS_WIN
ovrHmd_BeginFrame(_ovrHmd, _frameIndex);
#endif
static ovrPosef eyeRenderPose[ovrEye_Count];
//Render each eye into an fbo
for_each_eye(_ovrHmd, [&](ovrEyeType eye){
@ -578,11 +610,7 @@ void OculusManager::display(QGLWidget * glCanvas, const glm::quat &bodyOrientati
_lastEyeRendered = _activeEye = eye;
eyeRenderPose[eye] = eyePoses[eye];
// Set the camera rotation for this eye
orientation.x = eyeRenderPose[eye].Orientation.x;
orientation.y = eyeRenderPose[eye].Orientation.y;
orientation.z = eyeRenderPose[eye].Orientation.z;
orientation.w = eyeRenderPose[eye].Orientation.w;
glm::quat orientation = toGlm(eyeRenderPose[eye].Orientation);
// Update the application camera with the latest HMD position
whichCamera.setHmdPosition(trackerPosition);
whichCamera.setHmdRotation(orientation);
@ -607,19 +635,14 @@ void OculusManager::display(QGLWidget * glCanvas, const glm::quat &bodyOrientati
glMatrixMode(GL_MODELVIEW);
glLoadIdentity();
ovrRecti & vp = _eyeTextures[eye].Header.RenderViewport;
vp.Size.h = _recommendedTexSize.h * _offscreenRenderScale;
vp.Size.w = _recommendedTexSize.w * _offscreenRenderScale;
const ovrRecti& vp = _eyeViewports[eye];
glViewport(vp.Pos.x, vp.Pos.y, vp.Size.w, vp.Size.h);
qApp->displaySide(*_camera, false, RenderArgs::MONO);
qApp->getApplicationOverlay().displayOverlayTextureHmd(*_camera);
//qApp->getApplicationOverlay().displayOverlayTextureHmd(*_camera);
});
_activeEye = ovrEye_Count;
glPopMatrix();
gpu::FramebufferPointer finalFbo;
//Bind the output texture from the glow shader. If glow effect is disabled, we just grab the texture
if (Menu::getInstance()->isOptionChecked(MenuOption::EnableGlowEffect)) {
@ -633,146 +656,70 @@ void OculusManager::display(QGLWidget * glCanvas, const glm::quat &bodyOrientati
glMatrixMode(GL_PROJECTION);
glPopMatrix();
glMatrixMode(GL_MODELVIEW);
glPopMatrix();
#ifdef Q_OS_WIN
auto srcFboSize = finalFbo->getSize();
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
_swapFbo->Bound(oglplus::Framebuffer::Target::Draw, [&] {
glBlitFramebuffer(
0, 0, srcFboSize.x, srcFboSize.y,
0, 0, _swapFbo->size.x, _swapFbo->size.y,
GL_COLOR_BUFFER_BIT, GL_NEAREST);
});
auto destWindowSize = qApp->getDeviceSize();
glBlitFramebuffer(
0, 0, srcFboSize.x, srcFboSize.y,
0, 0, destWindowSize.width(), destWindowSize.height(),
GL_COLOR_BUFFER_BIT, GL_NEAREST);
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
ovrLayerEyeFov sceneLayer;
sceneLayer.ColorTexture[0] = _swapFbo->color;
sceneLayer.ColorTexture[1] = nullptr;
sceneLayer.Viewport[0].Pos = { 0, 0 };
sceneLayer.Viewport[0].Size = { _swapFbo->size.x / 2, _swapFbo->size.y };
sceneLayer.Viewport[1].Pos = { _swapFbo->size.x / 2, 0 };
sceneLayer.Viewport[1].Size = { _swapFbo->size.x / 2, _swapFbo->size.y };
sceneLayer.Header.Type = ovrLayerType_EyeFov;
sceneLayer.Header.Flags = ovrLayerFlag_TextureOriginAtBottomLeft;
for_each_eye([&](ovrEyeType eye) {
sceneLayer.Fov[eye] = _eyeFov[eye];
sceneLayer.RenderPose[eye] = eyeRenderPose[eye];
});
auto header = &sceneLayer.Header;
ovrResult res = ovrHmd_SubmitFrame(_ovrHmd, _frameIndex, nullptr, &header, 1);
Q_ASSERT(OVR_SUCCESS(res));
_swapFbo->Increment();
#else
GLuint textureId = gpu::GLBackend::getTextureID(finalFbo->getRenderBuffer(0));
for_each_eye([&](ovrEyeType eye) {
ovrGLTexture & glEyeTexture = reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]);
glEyeTexture.OGL.TexId = textureId;
});
//ovrHmd_EndFrame(_ovrHmd, eyeRenderPose, _eyeTextures);
glClearColor(0, 0, 0, 1);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
auto srcFboSize = finalFbo->getSize();
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
auto destWindowSize = qApp->getDeviceSize();
glBlitFramebuffer(
0, 0, srcFboSize.x, srcFboSize.y,
0, 0, destWindowSize.width(), destWindowSize.height(),
GL_COLOR_BUFFER_BIT, GL_NEAREST);
glBindFramebuffer(GL_READ_FRAMEBUFFER, 0);
glCanvas->swapBuffers();
#endif
// restore our normal viewport
glViewport(0, 0, deviceSize.width(), deviceSize.height());
#if 0
if (debugFrame && !timerActive) {
timerQuery.begin();
}
#endif
#ifdef OVR_CLIENT_DISTORTION
//Wait till time-warp to reduce latency
ovr_WaitTillTime(_hmdFrameTiming.TimewarpPointSeconds);
#ifdef DEBUG_RENDER_WITHOUT_DISTORTION
auto fboSize = finalFbo->getSize();
glBindFramebuffer(GL_READ_FRAMEBUFFER, gpu::GLBackend::getFramebufferID(finalFbo));
glBlitFramebuffer(
0, 0, fboSize.x, fboSize.y,
0, 0, deviceSize.width(), deviceSize.height(),
GL_COLOR_BUFFER_BIT, GL_NEAREST);
#else
//Clear the color buffer to ensure that there isnt any residual color
//Left over from when OR was not connected.
glClear(GL_COLOR_BUFFER_BIT);
glBindTexture(GL_TEXTURE_2D, gpu::GLBackend::getTextureID(finalFbo->getRenderBuffer(0)));
//Renders the distorted mesh onto the screen
renderDistortionMesh(eyeRenderPose);
glBindTexture(GL_TEXTURE_2D, 0);
#endif
glCanvas->swapBuffers();
#else
for_each_eye([&](ovrEyeType eye) {
ovrGLTexture & glEyeTexture = reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]);
glEyeTexture.OGL.TexId = finalFbo->texture();
});
ovrHmd_EndFrame(_ovrHmd, eyeRenderPose, _eyeTextures);
#endif
#if 0
if (debugFrame && !timerActive) {
timerQuery.end();
timerActive = true;
}
#endif
// No DK2, no message.
{
float latencies[5] = {};
if (debugFrame && ovrHmd_GetFloatArray(_ovrHmd, "DK2Latency", latencies, 5) == 5)
{
bool nonZero = false;
for (int i = 0; i < 5; ++i)
{
nonZero |= (latencies[i] != 0.f);
}
if (nonZero)
{
qCDebug(interfaceapp) << QString().sprintf("M2P Latency: Ren: %4.2fms TWrp: %4.2fms PostPresent: %4.2fms Err: %4.2fms %4.2fms",
latencies[0], latencies[1], latencies[2], latencies[3], latencies[4]);
}
}
}
}
#ifdef OVR_CLIENT_DISTORTION
void OculusManager::renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]) {
glLoadIdentity();
auto deviceSize = qApp->getDeviceSize();
glOrtho(0, deviceSize.width(), 0, deviceSize.height(), -1.0, 1.0);
glDisable(GL_DEPTH_TEST);
glDisable(GL_BLEND);
_program.bind();
_program.setUniformValue(_textureLocation, 0);
_program.enableAttributeArray(_positionAttributeLocation);
_program.enableAttributeArray(_colorAttributeLocation);
_program.enableAttributeArray(_texCoord0AttributeLocation);
_program.enableAttributeArray(_texCoord1AttributeLocation);
_program.enableAttributeArray(_texCoord2AttributeLocation);
//Render the distortion meshes for each eye
for (int eyeNum = 0; eyeNum < ovrEye_Count; eyeNum++) {
ovrHmd_GetRenderScaleAndOffset(_eyeRenderDesc[eyeNum].Fov, _renderTargetSize, _eyeTextures[eyeNum].Header.RenderViewport,
_UVScaleOffset[eyeNum]);
GLfloat uvScale[2] = { _UVScaleOffset[eyeNum][0].x, _UVScaleOffset[eyeNum][0].y };
_program.setUniformValueArray(_eyeToSourceUVScaleLocation, uvScale, 1, 2);
GLfloat uvOffset[2] = { _UVScaleOffset[eyeNum][1].x, 1.0f - _UVScaleOffset[eyeNum][1].y };
_program.setUniformValueArray(_eyeToSourceUVOffsetLocation, uvOffset, 1, 2);
ovrMatrix4f timeWarpMatrices[2];
glm::mat4 transposeMatrices[2];
//Grabs the timewarp matrices to be used in the shader
ovrHmd_GetEyeTimewarpMatrices(_ovrHmd, (ovrEyeType)eyeNum, eyeRenderPose[eyeNum], timeWarpMatrices);
//Have to transpose the matrices before using them
transposeMatrices[0] = glm::transpose(toGlm(timeWarpMatrices[0]));
transposeMatrices[1] = glm::transpose(toGlm(timeWarpMatrices[1]));
glUniformMatrix4fv(_eyeRotationStartLocation, 1, GL_FALSE, (GLfloat *)&transposeMatrices[0][0][0]);
glUniformMatrix4fv(_eyeRotationEndLocation, 1, GL_FALSE, (GLfloat *)&transposeMatrices[1][0][0]);
glBindBuffer(GL_ARRAY_BUFFER, _vertices[eyeNum]);
//Set vertex attribute pointers
glVertexAttribPointer(_positionAttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)0);
glVertexAttribPointer(_texCoord0AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)8);
glVertexAttribPointer(_texCoord1AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)16);
glVertexAttribPointer(_texCoord2AttributeLocation, 2, GL_FLOAT, GL_FALSE, sizeof(DistortionVertex), (void *)24);
glVertexAttribPointer(_colorAttributeLocation, 4, GL_UNSIGNED_BYTE, GL_TRUE, sizeof(DistortionVertex), (void *)32);
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, _indices[eyeNum]);
glDrawElements(GL_TRIANGLES, _meshSize[eyeNum], GL_UNSIGNED_SHORT, 0);
}
_program.disableAttributeArray(_positionAttributeLocation);
_program.disableAttributeArray(_colorAttributeLocation);
_program.disableAttributeArray(_texCoord0AttributeLocation);
_program.disableAttributeArray(_texCoord1AttributeLocation);
_program.disableAttributeArray(_texCoord2AttributeLocation);
glEnable(GL_BLEND);
glEnable(GL_DEPTH_TEST);
_program.release();
glBindBuffer(GL_ARRAY_BUFFER, 0);
}
#endif
//Tries to reconnect to the sensors
void OculusManager::reset() {
if (_isConnected) {
@ -825,6 +772,9 @@ void OculusManager::overrideOffAxisFrustum(float& left, float& right, float& bot
}
int OculusManager::getHMDScreen() {
#ifdef Q_OS_WIN
return -1;
#else
int hmdScreenIndex = -1; // unknown
// TODO: it might be smarter to handle multiple HMDs connected in this case. but for now,
// we will simply assume the initialization code that set up _ovrHmd picked the best hmd
@ -875,5 +825,6 @@ int OculusManager::getHMDScreen() {
}
}
return hmdScreenIndex;
#endif
}

View file

@ -24,26 +24,9 @@ class Camera;
class PalmData;
class Text3DOverlay;
// Uncomment this to enable client side distortion. NOT recommended since
// the Oculus SDK will ideally provide the best practices for distortion in
// in terms of performance and quality, and by using it we will get updated
// best practices for free with new runtime releases.
#define OVR_CLIENT_DISTORTION 1
// Direct HMD mode is currently only supported on windows and some linux systems will
// misbehave if we try to enable the Oculus SDK at all, so isolate support for Direct
// mode only to windows for now
#ifdef Q_OS_WIN
// On Win32 platforms, enabling Direct HMD requires that the SDK be
// initialized before the GL context is set up, but this breaks v-sync
// for any application that has a Direct mode enable Rift connected
// but is not rendering to it. For the time being I'm setting this as
// a macro enabled mechanism which changes where the SDK is initialized.
// To enable Direct HMD mode, you can un-comment this, but with the
// caveat that it will break v-sync in NON-VR mode if you have an Oculus
// Rift connect and in Direct mode
#define OVR_DIRECT_MODE 1
struct SwapFramebufferWrapper;
struct MirrorFramebufferWrapper;
#endif
@ -83,44 +66,7 @@ public:
private:
static void initSdk();
static void shutdownSdk();
#ifdef OVR_CLIENT_DISTORTION
static void generateDistortionMesh();
static void renderDistortionMesh(ovrPosef eyeRenderPose[ovrEye_Count]);
struct DistortionVertex {
glm::vec2 pos;
glm::vec2 texR;
glm::vec2 texG;
glm::vec2 texB;
struct {
GLubyte r;
GLubyte g;
GLubyte b;
GLubyte a;
} color;
};
static ProgramObject _program;
//Uniforms
static int _textureLocation;
static int _eyeToSourceUVScaleLocation;
static int _eyeToSourceUVOffsetLocation;
static int _eyeRotationStartLocation;
static int _eyeRotationEndLocation;
//Attributes
static int _positionAttributeLocation;
static int _colorAttributeLocation;
static int _texCoord0AttributeLocation;
static int _texCoord1AttributeLocation;
static int _texCoord2AttributeLocation;
static ovrVector2f _UVScaleOffset[ovrEye_Count][2];
static GLuint _vertices[ovrEye_Count];
static GLuint _indices[ovrEye_Count];
static GLsizei _meshSize[ovrEye_Count];
static ovrFrameTiming _hmdFrameTiming;
static bool _programInitialized;
#endif
static ovrTexture _eyeTextures[ovrEye_Count];
static bool _isConnected;
static glm::vec3 _eyePositions[ovrEye_Count];
static ovrHmd _ovrHmd;
@ -128,6 +74,7 @@ private:
static ovrVector3f _eyeOffset[ovrEye_Count];
static glm::mat4 _eyeProjection[ovrEye_Count];
static ovrEyeRenderDesc _eyeRenderDesc[ovrEye_Count];
static ovrRecti _eyeViewports[ovrEye_Count];
static ovrSizei _renderTargetSize;
static unsigned int _frameIndex;
static bool _frameTimingActive;
@ -160,6 +107,13 @@ private:
static float _offscreenRenderScale;
static bool _eyePerFrameMode;
static ovrEyeType _lastEyeRendered;
#ifdef Q_OS_WIN
static SwapFramebufferWrapper* _swapFbo;
static MirrorFramebufferWrapper* _mirrorFbo;
static ovrLayerEyeFov _sceneLayer;
#else
static ovrTexture _eyeTextures[ovrEye_Count];
#endif
};

View file

@ -1412,7 +1412,7 @@ void Model::simulate(float deltaTime, bool fullUpdate) {
if (_snapModelToRegistrationPoint && !_snappedToRegistrationPoint) {
snapToRegistrationPoint();
}
simulateInternal(deltaTime);
//simulateInternal(deltaTime);
}
}

View file

@ -2,8 +2,18 @@ set(TARGET_NAME shared)
# use setup_hifi_library macro to setup our project and link appropriate Qt modules
# TODO: there isn't really a good reason to have Script linked here - let's get what is requiring it out (RegisteredMetaTypes.cpp)
setup_hifi_library(Gui Network Script Widgets)
setup_hifi_library(Gui Network OpenGL Script Widgets)
setup_hifi_opengl()
add_dependency_external_projects(glm)
find_package(GLM REQUIRED)
target_include_directories(${TARGET_NAME} PUBLIC ${GLM_INCLUDE_DIRS})
target_include_directories(${TARGET_NAME} PUBLIC ${GLM_INCLUDE_DIRS})
add_dependency_external_projects(boostconfig)
find_package(BOOSTCONFIG REQUIRED)
target_include_directories(${TARGET_NAME} PUBLIC ${BOOSTCONFIG_INCLUDE_DIRS})
add_dependency_external_projects(oglplus)
find_package(OGLPLUS REQUIRED)
target_include_directories(${TARGET_NAME} PUBLIC ${OGLPLUS_INCLUDE_DIRS})

View file

@ -0,0 +1,322 @@
//
// Created by Bradley Austin Davis on 2015/05/29
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "OglplusHelpers.h"
using namespace oglplus;
using namespace oglplus::shapes;
static const char * SIMPLE_TEXTURED_VS = R"VS(#version 410 core
#pragma line __LINE__
uniform mat4 Projection = mat4(1);
uniform mat4 ModelView = mat4(1);
layout(location = 0) in vec3 Position;
layout(location = 1) in vec2 TexCoord;
out vec2 vTexCoord;
void main() {
gl_Position = Projection * ModelView * vec4(Position, 1);
vTexCoord = TexCoord;
}
)VS";
static const char * SIMPLE_TEXTURED_FS = R"FS(#version 410 core
#pragma line __LINE__
uniform sampler2D sampler;
uniform float Alpha = 1.0;
in vec2 vTexCoord;
out vec4 vFragColor;
void main() {
vec4 c = texture(sampler, vTexCoord);
c.a = min(Alpha, c.a);
vFragColor = c;
}
)FS";
ProgramPtr loadDefaultShader() {
ProgramPtr result;
compileProgram(result, SIMPLE_TEXTURED_VS, SIMPLE_TEXTURED_FS);
return result;
}
void compileProgram(ProgramPtr & result, const std::string& vs, const std::string& fs) {
using namespace oglplus;
try {
result = ProgramPtr(new Program());
// attach the shaders to the program
result->AttachShader(
VertexShader()
.Source(GLSLSource(vs))
.Compile()
);
result->AttachShader(
FragmentShader()
.Source(GLSLSource(fs))
.Compile()
);
result->Link();
} catch (ProgramBuildError & err) {
Q_UNUSED(err);
Q_ASSERT_X(false, "compileProgram", "Failed to build shader program");
qFatal((const char*)err.Message);
result.reset();
}
}
ShapeWrapperPtr loadPlane(ProgramPtr program, float aspect) {
using namespace oglplus;
Vec3f a(1, 0, 0);
Vec3f b(0, 1, 0);
if (aspect > 1) {
b[1] /= aspect;
} else {
a[0] *= aspect;
}
return ShapeWrapperPtr(
new shapes::ShapeWrapper({ "Position", "TexCoord" }, shapes::Plane(a, b), *program)
);
}
// Return a point's cartesian coordinates on a sphere from pitch and yaw
static glm::vec3 getPoint(float yaw, float pitch) {
return glm::vec3(glm::cos(-pitch) * (-glm::sin(yaw)),
glm::sin(-pitch),
glm::cos(-pitch) * (-glm::cos(yaw)));
}
class SphereSection : public DrawingInstructionWriter, public DrawMode {
public:
using IndexArray = std::vector<GLuint>;
using PosArray = std::vector<float>;
using TexArray = std::vector<float>;
/// The type of the index container returned by Indices()
// vertex positions
PosArray _pos_data;
// vertex tex coords
TexArray _tex_data;
IndexArray _idx_data;
unsigned int _prim_count{ 0 };
public:
SphereSection(
const float fov,
const float aspectRatio,
const int slices_,
const int stacks_) {
//UV mapping source: http://www.mvps.org/directx/articles/spheremap.htm
if (fov >= PI) {
qDebug() << "TexturedHemisphere::buildVBO(): FOV greater or equal than Pi will create issues";
}
int gridSize = std::max(slices_, stacks_);
int gridSizeLog2 = 1;
while (1 << gridSizeLog2 < gridSize) {
++gridSizeLog2;
}
gridSize = (1 << gridSizeLog2) + 1;
// Compute number of vertices needed
int vertices = gridSize * gridSize;
_pos_data.resize(vertices * 3);
_tex_data.resize(vertices * 2);
// Compute vertices positions and texture UV coordinate
for (int y = 0; y <= gridSize; ++y) {
for (int x = 0; x <= gridSize; ++x) {
}
}
for (int i = 0; i < gridSize; i++) {
float stacksRatio = (float)i / (float)(gridSize - 1); // First stack is 0.0f, last stack is 1.0f
// abs(theta) <= fov / 2.0f
float pitch = -fov * (stacksRatio - 0.5f);
for (int j = 0; j < gridSize; j++) {
float slicesRatio = (float)j / (float)(gridSize - 1); // First slice is 0.0f, last slice is 1.0f
// abs(phi) <= fov * aspectRatio / 2.0f
float yaw = -fov * aspectRatio * (slicesRatio - 0.5f);
int vertex = i * gridSize + j;
int posOffset = vertex * 3;
int texOffset = vertex * 2;
vec3 pos = getPoint(yaw, pitch);
_pos_data[posOffset] = pos.x;
_pos_data[posOffset + 1] = pos.y;
_pos_data[posOffset + 2] = pos.z;
_tex_data[texOffset] = slicesRatio;
_tex_data[texOffset + 1] = stacksRatio;
}
} // done with vertices
int rowLen = gridSize;
// gridsize now refers to the triangles, not the vertices, so reduce by one
// or die by fencepost error http://en.wikipedia.org/wiki/Off-by-one_error
--gridSize;
int quads = gridSize * gridSize;
for (int t = 0; t < quads; ++t) {
int x =
((t & 0x0001) >> 0) |
((t & 0x0004) >> 1) |
((t & 0x0010) >> 2) |
((t & 0x0040) >> 3) |
((t & 0x0100) >> 4) |
((t & 0x0400) >> 5) |
((t & 0x1000) >> 6) |
((t & 0x4000) >> 7);
int y =
((t & 0x0002) >> 1) |
((t & 0x0008) >> 2) |
((t & 0x0020) >> 3) |
((t & 0x0080) >> 4) |
((t & 0x0200) >> 5) |
((t & 0x0800) >> 6) |
((t & 0x2000) >> 7) |
((t & 0x8000) >> 8);
int i = x * (rowLen) + y;
_idx_data.push_back(i);
_idx_data.push_back(i + 1);
_idx_data.push_back(i + rowLen + 1);
_idx_data.push_back(i + rowLen + 1);
_idx_data.push_back(i + rowLen);
_idx_data.push_back(i);
}
_prim_count = quads * 2;
}
/// Returns the winding direction of faces
FaceOrientation FaceWinding(void) const {
return FaceOrientation::CCW;
}
typedef GLuint(SphereSection::*VertexAttribFunc)(std::vector<GLfloat>&) const;
/// Makes the vertex positions and returns the number of values per vertex
template <typename T>
GLuint Positions(std::vector<T>& dest) const {
dest.clear();
dest.insert(dest.begin(), _pos_data.begin(), _pos_data.end());
return 3;
}
/// Makes the vertex normals and returns the number of values per vertex
template <typename T>
GLuint Normals(std::vector<T>& dest) const {
dest.clear();
return 3;
}
/// Makes the vertex tangents and returns the number of values per vertex
template <typename T>
GLuint Tangents(std::vector<T>& dest) const {
dest.clear();
return 3;
}
/// Makes the vertex bi-tangents and returns the number of values per vertex
template <typename T>
GLuint Bitangents(std::vector<T>& dest) const {
dest.clear();
return 3;
}
/// Makes the texture coordinates returns the number of values per vertex
template <typename T>
GLuint TexCoordinates(std::vector<T>& dest) const {
dest.clear();
dest.insert(dest.begin(), _tex_data.begin(), _tex_data.end());
return 2;
}
typedef VertexAttribsInfo<
SphereSection,
std::tuple<
VertexPositionsTag,
VertexNormalsTag,
VertexTangentsTag,
VertexBitangentsTag,
VertexTexCoordinatesTag
>
> VertexAttribs;
Spheref MakeBoundingSphere(void) const {
GLfloat min_x = _pos_data[3], max_x = _pos_data[3];
GLfloat min_y = _pos_data[4], max_y = _pos_data[4];
GLfloat min_z = _pos_data[5], max_z = _pos_data[5];
for (std::size_t v = 0, vn = _pos_data.size() / 3; v != vn; ++v) {
GLfloat x = _pos_data[v * 3 + 0];
GLfloat y = _pos_data[v * 3 + 1];
GLfloat z = _pos_data[v * 3 + 2];
if (min_x > x) min_x = x;
if (min_y > y) min_y = y;
if (min_z > z) min_z = z;
if (max_x < x) max_x = x;
if (max_y < y) max_y = y;
if (max_z < z) max_z = z;
}
Vec3f c(
(min_x + max_x) * 0.5f,
(min_y + max_y) * 0.5f,
(min_z + max_z) * 0.5f
);
return Spheref(
c.x(), c.y(), c.z(),
Distance(c, Vec3f(min_x, min_y, min_z))
);
}
/// Queries the bounding sphere coordinates and dimensions
template <typename T>
void BoundingSphere(oglplus::Sphere<T>& bounding_sphere) const {
bounding_sphere = oglplus::Sphere<T>(MakeBoundingSphere());
}
/// Returns element indices that are used with the drawing instructions
const IndexArray & Indices(Default = Default()) const {
return _idx_data;
}
/// Returns the instructions for rendering of faces
DrawingInstructions Instructions(PrimitiveType primitive) const {
DrawingInstructions instr = this->MakeInstructions();
DrawOperation operation;
operation.method = DrawOperation::Method::DrawElements;
operation.mode = primitive;
operation.first = 0;
operation.count = _prim_count * 3;
operation.restart_index = DrawOperation::NoRestartIndex();
operation.phase = 0;
this->AddInstruction(instr, operation);
return std::move(instr);
}
/// Returns the instructions for rendering of faces
DrawingInstructions Instructions(Default = Default()) const {
return Instructions(PrimitiveType::Triangles);
}
};
ShapeWrapperPtr loadSphereSection(ProgramPtr program, float fov, float aspect, int slices, int stacks) {
using namespace oglplus;
return ShapeWrapperPtr(
new shapes::ShapeWrapper({ "Position", "TexCoord" }, SphereSection(fov, aspect, slices, stacks), *program)
);
}

View file

@ -0,0 +1,166 @@
//
// Created by Bradley Austin Davis on 2015/05/26
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#include "GLMHelpers.h"
#define OGLPLUS_USE_GLCOREARB_H 0
#if defined(__APPLE__)
#define OGLPLUS_USE_GL3_H 1
#elif defined(WIN32)
#define OGLPLUS_USE_GLEW 1
#pragma warning(disable : 4068)
#elif defined(ANDROID)
#else
#define OGLPLUS_USE_GLCOREARB_H 1
#endif
#define OGLPLUS_USE_BOOST_CONFIG 1
#define OGLPLUS_NO_SITE_CONFIG 1
#define OGLPLUS_LOW_PROFILE 1
#include <oglplus/gl.hpp>
#include <oglplus/all.hpp>
#include <oglplus/interop/glm.hpp>
#include <oglplus/bound/texture.hpp>
#include <oglplus/bound/framebuffer.hpp>
#include <oglplus/bound/renderbuffer.hpp>
#include <oglplus/shapes/wrapper.hpp>
#include <oglplus/shapes/plane.hpp>
#include "NumericalConstants.h"
using FramebufferPtr = std::shared_ptr<oglplus::Framebuffer>;
using ShapeWrapperPtr = std::shared_ptr<oglplus::shapes::ShapeWrapper>;
using BufferPtr = std::shared_ptr<oglplus::Buffer>;
using VertexArrayPtr = std::shared_ptr<oglplus::VertexArray>;
using ProgramPtr = std::shared_ptr<oglplus::Program>;
using Mat4Uniform = oglplus::Uniform<mat4>;
ProgramPtr loadDefaultShader();
void compileProgram(ProgramPtr & result, const std::string& vs, const std::string& fs);
ShapeWrapperPtr loadPlane(ProgramPtr program, float aspect = 1.0f);
ShapeWrapperPtr loadSphereSection(ProgramPtr program, float fov = PI / 3.0f * 2.0f, float aspect = 16.0f / 9.0f, int slices = 32, int stacks = 32);
// A basic wrapper for constructing a framebuffer with a renderbuffer
// for the depth attachment and an undefined type for the color attachement
// This allows us to reuse the basic framebuffer code for both the Mirror
// FBO as well as the Oculus swap textures we will use to render the scene
// Though we don't really need depth at all for the mirror FBO, or even an
// FBO, but using one means I can just use a glBlitFramebuffer to get it onto
// the screen.
template <
typename C,
typename D
>
struct FramebufferWrapper {
uvec2 size;
oglplus::Framebuffer fbo;
C color;
D depth;
FramebufferWrapper() {}
virtual ~FramebufferWrapper() {
}
virtual void Init(const uvec2 & size) {
this->size = size;
initColor();
initDepth();
initDone();
}
template <typename F>
void Bound(F f) {
Bound(oglplus::Framebuffer::Target::Draw, f);
}
template <typename F>
void Bound(oglplus::Framebuffer::Target target , F f) {
fbo.Bind(target);
onBind(target);
f();
onUnbind(target);
oglplus::DefaultFramebuffer().Bind(target);
}
void Viewport() {
oglplus::Context::Viewport(size.x, size.y);
}
protected:
virtual void onBind(oglplus::Framebuffer::Target target) {}
virtual void onUnbind(oglplus::Framebuffer::Target target) {}
static GLenum toEnum(oglplus::Framebuffer::Target target) {
switch (target) {
case oglplus::Framebuffer::Target::Draw:
return GL_DRAW_FRAMEBUFFER;
case oglplus::Framebuffer::Target::Read:
return GL_READ_FRAMEBUFFER;
default:
Q_ASSERT(false);
return GL_FRAMEBUFFER;
}
}
virtual void initDepth() {}
virtual void initColor() {}
virtual void initDone() = 0;
};
struct BasicFramebufferWrapper : public FramebufferWrapper <oglplus::Texture, oglplus::Renderbuffer> {
protected:
virtual void initDepth() override {
using namespace oglplus;
Context::Bound(Renderbuffer::Target::Renderbuffer, depth)
.Storage(
PixelDataInternalFormat::DepthComponent,
size.x, size.y);
}
virtual void initColor() override {
using namespace oglplus;
Context::Bound(oglplus::Texture::Target::_2D, color)
.MinFilter(TextureMinFilter::Linear)
.MagFilter(TextureMagFilter::Linear)
.WrapS(TextureWrap::ClampToEdge)
.WrapT(TextureWrap::ClampToEdge)
.Image2D(
0, PixelDataInternalFormat::RGBA8,
size.x, size.y,
0, PixelDataFormat::RGB, PixelDataType::UnsignedByte, nullptr
);
}
virtual void initDone() override {
using namespace oglplus;
static const Framebuffer::Target target = Framebuffer::Target::Draw;
Bound(target, [&] {
fbo.AttachTexture(target, FramebufferAttachment::Color, color, 0);
fbo.AttachRenderbuffer(target, FramebufferAttachment::Depth, depth);
fbo.Complete(target);
});
}
};
using BasicFramebufferWrapperPtr = std::shared_ptr<BasicFramebufferWrapper>;