Merge pull request #6726 from jherico/legacy_oculus

Legacy oculus work
This commit is contained in:
Brad Hefta-Gaub 2015-12-22 11:02:37 -08:00
commit 298e4d0f83
3 changed files with 71 additions and 105 deletions

View file

@ -6,8 +6,7 @@
# See the accompanying file LICENSE or http:#www.apache.org/licenses/LICENSE-2.0.html
#
#if (NOT WIN32)
if (FALSE)
if (NOT WIN32)
set(TARGET_NAME oculusLegacy)
setup_hifi_plugin()

View file

@ -26,7 +26,7 @@
using namespace oglplus;
const QString OculusLegacyDisplayPlugin::NAME("Oculus Rift (0.5)");
const QString OculusLegacyDisplayPlugin::NAME("Oculus Rift (0.5) (Simulated)");
const QString & OculusLegacyDisplayPlugin::getName() const {
return NAME;
@ -39,12 +39,6 @@ uvec2 OculusLegacyDisplayPlugin::getRecommendedRenderSize() const {
return _desiredFramebufferSize;
}
void OculusLegacyDisplayPlugin::preRender() {
ovrHmd_GetEyePoses(_hmd, _frameIndex, _eyeOffsets, _eyePoses, &_trackingState);
ovrHmd_BeginFrame(_hmd, _frameIndex);
WindowOpenGLDisplayPlugin::preRender();
}
glm::mat4 OculusLegacyDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseProjection) const {
return _eyeProjections[eye];
}
@ -57,13 +51,18 @@ glm::mat4 OculusLegacyDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
return toGlm(_eyePoses[eye]);
}
// Should NOT be used for rendering as this will mess up timewarp. Use the getModelview() method above for
// any use of head poses for rendering, ensuring you use the correct eye
glm::mat4 OculusLegacyDisplayPlugin::getHeadPose() const {
glm::mat4 OculusLegacyDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
static uint32_t lastFrameSeen = 0;
if (frameIndex > lastFrameSeen) {
Lock lock(_mutex);
_trackingState = ovrHmd_GetTrackingState(_hmd, ovr_GetTimeInSeconds());
ovrHmd_GetEyePoses(_hmd, frameIndex, _eyeOffsets, _eyePoses, &_trackingState);
lastFrameSeen = frameIndex;
}
return toGlm(_trackingState.HeadPose.ThePose);
}
bool OculusLegacyDisplayPlugin::isSupported() const {
if (!ovr_Initialize(nullptr)) {
return false;
@ -92,10 +91,13 @@ bool OculusLegacyDisplayPlugin::isSupported() const {
}
void OculusLegacyDisplayPlugin::activate() {
WindowOpenGLDisplayPlugin::activate();
if (!(ovr_Initialize(nullptr))) {
Q_ASSERT(false);
qFatal("Failed to Initialize SDK");
}
_hswDismissed = false;
_hmd = ovrHmd_Create(0);
if (!_hmd) {
@ -107,13 +109,13 @@ void OculusLegacyDisplayPlugin::activate() {
_eyeFovs[eye] = _hmd->MaxEyeFov[eye];
ovrEyeRenderDesc erd = _eyeRenderDescs[eye] = ovrHmd_GetRenderDesc(_hmd, eye, _eyeFovs[eye]);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
ovrMatrix4f_Projection(erd.Fov, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[eye] = toGlm(ovrPerspectiveProjection);
ovrPerspectiveProjection =
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
ovrMatrix4f_Projection(erd.Fov, 0.001f, 10.0f, ovrProjection_RightHanded);
_compositeEyeProjections[eye] = toGlm(ovrPerspectiveProjection);
_eyeOffsets[eye] = erd.HmdToEyeViewOffset;
eyeSizes[eye] = toGlm(ovrHmd_GetFovTextureSize(_hmd, eye, erd.Fov, 1.0f));
});
@ -121,38 +123,43 @@ void OculusLegacyDisplayPlugin::activate() {
combined.LeftTan = std::max(_eyeFovs[Left].LeftTan, _eyeFovs[Right].LeftTan);
combined.RightTan = std::max(_eyeFovs[Left].RightTan, _eyeFovs[Right].RightTan);
ovrMatrix4f ovrPerspectiveProjection =
ovrMatrix4f_Projection(combined, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
ovrMatrix4f_Projection(combined, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, ovrProjection_RightHanded);
_eyeProjections[Mono] = toGlm(ovrPerspectiveProjection);
_desiredFramebufferSize = uvec2(
eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
_frameIndex = 0;
_desiredFramebufferSize = uvec2(eyeSizes[0].x + eyeSizes[1].x,
std::max(eyeSizes[0].y, eyeSizes[1].y));
if (!ovrHmd_ConfigureTracking(_hmd,
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0)) {
ovrTrackingCap_Orientation | ovrTrackingCap_Position | ovrTrackingCap_MagYawCorrection, 0)) {
qFatal("Could not attach to sensor device");
}
}
WindowOpenGLDisplayPlugin::activate();
void OculusLegacyDisplayPlugin::deactivate() {
WindowOpenGLDisplayPlugin::deactivate();
ovrHmd_Destroy(_hmd);
_hmd = nullptr;
ovr_Shutdown();
}
int screen = getHmdScreen();
if (screen != -1) {
_container->setFullscreen(qApp->screens()[screen]);
}
_window->installEventFilter(this);
_window->makeCurrent();
// DLL based display plugins MUST initialize GLEW inside the DLL code.
void OculusLegacyDisplayPlugin::customizeContext() {
static std::once_flag once;
std::call_once(once, []{
glewExperimental = true;
glewInit();
glGetError();
});
WindowOpenGLDisplayPlugin::customizeContext();
#if 0
ovrGLConfig config; memset(&config, 0, sizeof(ovrRenderAPIConfig));
auto& header = config.Config.Header;
header.API = ovrRenderAPI_OpenGL;
header.BackBufferSize = _hmd->Resolution;
header.Multisample = 1;
int distortionCaps = 0
| ovrDistortionCap_TimeWarp
;
int distortionCaps = ovrDistortionCap_TimeWarp;
memset(_eyeTextures, 0, sizeof(ovrTexture) * 2);
ovr_for_each_eye([&](ovrEyeType eye) {
auto& header = _eyeTextures[eye].Header;
@ -164,74 +171,36 @@ void OculusLegacyDisplayPlugin::activate() {
header.RenderViewport.Pos.x = header.RenderViewport.Size.w;
}
});
#ifndef NDEBUG
#ifndef NDEBUG
ovrBool result =
#endif
ovrHmd_ConfigureRendering(_hmd, &config.Config, distortionCaps, _eyeFovs, _eyeRenderDescs);
#endif
ovrHmd_ConfigureRendering(_hmd, &config.Config, distortionCaps, _eyeFovs, _eyeRenderDescs);
assert(result);
}
void OculusLegacyDisplayPlugin::deactivate() {
_window->removeEventFilter(this);
WindowOpenGLDisplayPlugin::deactivate();
#endif
QScreen* riftScreen = nullptr;
if (_hmdScreen >= 0) {
riftScreen = qApp->screens()[_hmdScreen];
}
_container->unsetFullscreen(riftScreen);
ovrHmd_Destroy(_hmd);
_hmd = nullptr;
ovr_Shutdown();
}
// DLL based display plugins MUST initialize GLEW inside the DLL code.
void OculusLegacyDisplayPlugin::customizeContext() {
glewExperimental = true;
glewInit();
glGetError();
WindowOpenGLDisplayPlugin::customizeContext();
#if 0
void OculusLegacyDisplayPlugin::uncustomizeContext() {
WindowOpenGLDisplayPlugin::uncustomizeContext();
}
void OculusLegacyDisplayPlugin::preDisplay() {
_window->makeCurrent();
}
void OculusLegacyDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
++_frameIndex;
void OculusLegacyDisplayPlugin::internalPresent() {
ovrHmd_BeginFrame(_hmd, 0);
ovr_for_each_eye([&](ovrEyeType eye) {
reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]).OGL.TexId = finalTexture;
reinterpret_cast<ovrGLTexture&>(_eyeTextures[eye]).OGL.TexId = _currentSceneTexture;
});
ovrHmd_EndFrame(_hmd, _eyePoses, _eyeTextures);
}
// Pass input events on to the application
bool OculusLegacyDisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
if (!_hswDismissed && (event->type() == QEvent::KeyPress)) {
static ovrHSWDisplayState hswState;
ovrHmd_GetHSWDisplayState(_hmd, &hswState);
if (hswState.Displayed) {
ovrHmd_DismissHSWDisplay(_hmd);
} else {
_hswDismissed = true;
}
}
return WindowOpenGLDisplayPlugin::eventFilter(receiver, event);
}
// FIXME mirroring tot he main window is diffucult on OSX because it requires that we
// trigger a swap, which causes the client to wait for the v-sync of the main screen running
// at 60 Hz. This would introduce judder. Perhaps we can push mirroring to a separate
// thread
// FIXME If we move to the 'batch rendering on a different thread' we can possibly do this.
// however, we need to make sure it doesn't block the event handling.
void OculusLegacyDisplayPlugin::finishFrame() {
_window->doneCurrent();
};
#endif
int OculusLegacyDisplayPlugin::getHmdScreen() const {
return _hmdScreen;
}
float OculusLegacyDisplayPlugin::getTargetFrameRate() {
return TARGET_RATE_OculusLegacy;
}

View file

@ -24,11 +24,8 @@ public:
virtual void activate() override;
virtual void deactivate() override;
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
virtual int getHmdScreen() const override;
virtual float getTargetFrameRate() override { return TARGET_RATE_OculusLegacy; }
// Stereo specific methods
virtual bool isHmd() const override { return true; }
virtual glm::mat4 getProjection(Eye eye, const glm::mat4& baseProjection) const override;
@ -36,23 +33,24 @@ public:
virtual glm::uvec2 getRecommendedUiSize() const override { return uvec2(1920, 1080); }
virtual void resetSensors() override;
virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override;
virtual glm::mat4 getHeadPose() const override;
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const override;
virtual float getTargetFrameRate() override;
protected:
virtual void customizeContext() override;
virtual void preRender() override;
virtual void preDisplay() override;
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
// Do not perform swap in finish
virtual void finishFrame() override;
#if 0
virtual void uncustomizeContext() override;
virtual void internalPresent() override;
#endif
private:
static const QString NAME;
ovrHmd _hmd;
ovrTrackingState _trackingState;
mutable ovrTrackingState _trackingState;
ovrEyeRenderDesc _eyeRenderDescs[2];
ovrPosef _eyePoses[2];
mutable ovrPosef _eyePoses[2];
ovrVector3f _eyeOffsets[2];
ovrFovPort _eyeFovs[2];
mat4 _eyeProjections[3];