Fixing legacy oculus plugin

This commit is contained in:
Bradley Austin Davis 2015-12-22 09:28:38 -08:00
parent 3dfc48e205
commit e0e1ae43f5
3 changed files with 73 additions and 83 deletions

View file

@ -6,8 +6,7 @@
# See the accompanying file LICENSE or http:#www.apache.org/licenses/LICENSE-2.0.html
#
#if (NOT WIN32)
if (FALSE)
if (NOT WIN32)
set(TARGET_NAME oculusLegacy)
setup_hifi_plugin()

View file

@ -39,12 +39,6 @@ uvec2 OculusLegacyDisplayPlugin::getRecommendedRenderSize() const {
return _desiredFramebufferSize;
}
void OculusLegacyDisplayPlugin::preRender() {
ovrHmd_GetEyePoses(_hmd, _frameIndex, _eyeOffsets, _eyePoses, &_trackingState);
ovrHmd_BeginFrame(_hmd, _frameIndex);
WindowOpenGLDisplayPlugin::preRender();
}
glm::mat4 OculusLegacyDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
}
@ -57,12 +51,18 @@ glm::mat4 OculusLegacyDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
return toGlm(_eyePoses[eye]);
}
// Should NOT be used for rendering as this will mess up timewarp. Use the getModelview() method above for
// any use of head poses for rendering, ensuring you use the correct eye
glm::mat4 OculusLegacyDisplayPlugin::getHeadPose() const {
return toGlm(_trackingState.HeadPose.ThePose);
}
glm::mat4 OculusLegacyDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
static uint32_t lastFrameSeen = 0;
if (frameIndex > lastFrameSeen) {
Lock lock(_mutex);
// auto trackingState = ovr_GetTrackingState(_session, displayTime, frameIndex > lastFrameSeen);
// ovrHmd_GetEyePoses(_hmd, frameIndex, _eyeOffsets, _eyePoses, &_trackingState);
lastFrameSeen = frameIndex;
}
// return toGlm(trackingState.HeadPose.ThePose);
return glm::mat4();
}
bool OculusLegacyDisplayPlugin::isSupported() const {
if (!ovr_Initialize(nullptr)) {
@ -92,10 +92,14 @@ bool OculusLegacyDisplayPlugin::isSupported() const {
}
void OculusLegacyDisplayPlugin::activate() {
WindowOpenGLDisplayPlugin::activate();
if (!(ovr_Initialize(nullptr))) {
Q_ASSERT(false);
qFatal("Failed to Initialize SDK");
}
_hswDismissed = false;
_hmd = ovrHmd_Create(0);
if (!_hmd) {
@ -107,13 +111,13 @@ void OculusLegacyDisplayPlugin::activate() {
_eyeFovs[eye] = _hmd->MaxEyeFov[eye];
ovrEyeRenderDesc erd = _eyeRenderDescs[eye] = ovrHmd_GetRenderDesc(_hmd, eye, _eyeFovs[eye]);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
_compositeEyeProjections[eye] = toGlm(ovrPerspectiveProjection);
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
eyeSizes[eye] = toGlm(ovrHmd_GetFovTextureSize(_hmd, eye, erd.Fov, 1.0f));
});
@ -121,60 +125,27 @@ void OculusLegacyDisplayPlugin::activate() {
combined.LeftTan = std::max(_eyeFovs[Left].LeftTan, _eyeFovs[Right].LeftTan);
combined.RightTan = std::max(_eyeFovs[Left].RightTan, _eyeFovs[Right].RightTan);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(combined, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
ovrMatrix4f_Projection(combined, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[Mono] = toGlm(ovrPerspectiveProjection);
_desiredFramebufferSize = uvec2(
eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
_frameIndex = 0;
_desiredFramebufferSize = uvec2(eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
if (!ovrHmd_ConfigureTracking(_hmd,
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0)) {
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0)) {
qFatal("Could not attach to sensor device");
}
WindowOpenGLDisplayPlugin::activate();
int screen = getHmdScreen();
if (screen != -1) {
_container->setFullscreen(qApp->screens()[screen]);
}
_window->installEventFilter(this);
_window->makeCurrent();
ovrGLConfig config; memset(&config, 0, sizeof(ovrRenderAPIConfig));
auto& header = config.Config.Header;
header.API = ovrRenderAPI_OpenGL;
header.BackBufferSize = _hmd->Resolution;
header.Multisample = 1;
int distortionCaps = 0
| ovrDistortionCap_TimeWarp
;
memset(_eyeTextures, 0, sizeof(ovrTexture) * 2);
ovr_for_each_eye([&](ovrEyeType eye) {
auto& header = _eyeTextures[eye].Header;
header.API = ovrRenderAPI_OpenGL;
header.TextureSize = { (int)_desiredFramebufferSize.x, (int)_desiredFramebufferSize.y };
header.RenderViewport.Size = header.TextureSize;
header.RenderViewport.Size.w /= 2;
if (eye == ovrEye_Right) {
header.RenderViewport.Pos.x = header.RenderViewport.Size.w;
}
});
#ifndef NDEBUG
ovrBool result =
#endif
ovrHmd_ConfigureRendering(_hmd, &config.Config, distortionCaps, _eyeFovs, _eyeRenderDescs);
assert(result);
}
void OculusLegacyDisplayPlugin::deactivate() {
_window->removeEventFilter(this);
WindowOpenGLDisplayPlugin::deactivate();
QScreen* riftScreen = nullptr;
@ -194,20 +165,46 @@ void OculusLegacyDisplayPlugin::customizeContext() {
glewInit();
glGetError();
WindowOpenGLDisplayPlugin::customizeContext();
}
void OculusLegacyDisplayPlugin::preDisplay() {
_window->makeCurrent();
}
void OculusLegacyDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
++_frameIndex;
ovrGLConfig config; memset(&config, 0, sizeof(ovrRenderAPIConfig));
auto& header = config.Config.Header;
header.API = ovrRenderAPI_OpenGL;
header.BackBufferSize = _hmd->Resolution;
header.Multisample = 1;
int distortionCaps = ovrDistortionCap_TimeWarp;
memset(_eyeTextures, 0, sizeof(ovrTexture) * 2);
ovr_for_each_eye([&](ovrEyeType eye) {
reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]).OGL.TexId = finalTexture;
auto& header = _eyeTextures[eye].Header;
header.API = ovrRenderAPI_OpenGL;
header.TextureSize = { (int)_desiredFramebufferSize.x, (int)_desiredFramebufferSize.y };
header.RenderViewport.Size = header.TextureSize;
header.RenderViewport.Size.w /= 2;
if (eye == ovrEye_Right) {
header.RenderViewport.Pos.x = header.RenderViewport.Size.w;
}
});
#ifndef NDEBUG
ovrBool result =
#endif
ovrHmd_ConfigureRendering(_hmd, &config.Config, distortionCaps, _eyeFovs, _eyeRenderDescs);
assert(result);
}
void OculusLegacyDisplayPlugin::uncustomizeContext() {
WindowOpenGLDisplayPlugin::uncustomizeContext();
}
void OculusLegacyDisplayPlugin::internalPresent() {
ovrHmd_BeginFrame(_hmd, 0);
ovr_for_each_eye([&](ovrEyeType eye) {
reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]).OGL.TexId = _currentSceneTexture;
});
ovrHmd_EndFrame(_hmd, _eyePoses, _eyeTextures);
}
/*
// Pass input events on to the application
bool OculusLegacyDisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
if (!_hswDismissed && (event->type() == QEvent::KeyPress)) {
@ -221,17 +218,13 @@ bool OculusLegacyDisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
}
return WindowOpenGLDisplayPlugin::eventFilter(receiver, event);
}
// FIXME mirroring tot he main window is diffucult on OSX because it requires that we
// trigger a swap, which causes the client to wait for the v-sync of the main screen running
// at 60 Hz. This would introduce judder. Perhaps we can push mirroring to a separate
// thread
// FIXME If we move to the 'batch rendering on a different thread' we can possibly do this.
// however, we need to make sure it doesn't block the event handling.
void OculusLegacyDisplayPlugin::finishFrame() {
_window->doneCurrent();
};
*/
int OculusLegacyDisplayPlugin::getHmdScreen() const {
return _hmdScreen;
}
float OculusLegacyDisplayPlugin::getTargetFrameRate() {
return TARGET_RATE_OculusLegacy;
}

View file

@ -24,11 +24,9 @@ public:
virtual void activate() override;
virtual void deactivate() override;
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
// virtual bool eventFilter(QObject* receiver, QEvent* event) override;
virtual int getHmdScreen() const override;
virtual float getTargetFrameRate() override { return TARGET_RATE_OculusLegacy; }
// Stereo specific methods
virtual bool isHmd() const override { return true; }
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
@ -36,20 +34,20 @@ public:
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
virtual void resetSensors() override;
virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override;
virtual glm::mat4 getHeadPose() const override;
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const override;
virtual float getTargetFrameRate() override;
protected:
virtual void customizeContext() override;
virtual void preRender() override;
virtual void preDisplay() override;
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
// Do not perform swap in finish
virtual void finishFrame() override;
virtual void uncustomizeContext() override;
virtual void internalPresent() override;
private:
static const QString NAME;
ovrHmd _hmd;
std::mutex _statelock;
ovrTrackingState _trackingState;
ovrEyeRenderDesc _eyeRenderDescs[2];
ovrPosef _eyePoses[2];