mirror of
https://github.com/lubosz/overte.git
synced 2025-04-23 17:54:00 +02:00
Merge pull request #8042 from jherico/lasers
First pass prototype of laser rendering in OpenVR plugin
This commit is contained in:
commit
d05987de6d
25 changed files with 620 additions and 214 deletions
|
@ -105,3 +105,13 @@ QString HMDScriptingInterface::preferredAudioInput() const {
|
|||
QString HMDScriptingInterface::preferredAudioOutput() const {
|
||||
return qApp->getActiveDisplayPlugin()->getPreferredAudioOutDevice();
|
||||
}
|
||||
|
||||
bool HMDScriptingInterface::setHandLasers(int hands, bool enabled, const glm::vec4& color, const glm::vec3& direction) const {
|
||||
return qApp->getActiveDisplayPlugin()->setHandLaser(hands,
|
||||
enabled ? DisplayPlugin::HandLaserMode::Overlay : DisplayPlugin::HandLaserMode::None,
|
||||
color, direction);
|
||||
}
|
||||
|
||||
void HMDScriptingInterface::disableHandLasers(int hands) const {
|
||||
qApp->getActiveDisplayPlugin()->setHandLaser(hands, DisplayPlugin::HandLaserMode::None);
|
||||
}
|
||||
|
|
|
@ -36,6 +36,8 @@ public:
|
|||
Q_INVOKABLE glm::vec2 overlayToSpherical(const glm::vec2 & overlayPos) const;
|
||||
Q_INVOKABLE QString preferredAudioInput() const;
|
||||
Q_INVOKABLE QString preferredAudioOutput() const;
|
||||
Q_INVOKABLE bool setHandLasers(int hands, bool enabled, const glm::vec4& color, const glm::vec3& direction) const;
|
||||
Q_INVOKABLE void disableHandLasers(int hands) const;
|
||||
|
||||
public:
|
||||
HMDScriptingInterface();
|
||||
|
|
|
@ -32,6 +32,7 @@ class Mapping;
|
|||
using MappingPointer = std::shared_ptr<Mapping>;
|
||||
using MappingList = std::list<MappingPointer>;
|
||||
|
||||
struct Pose;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
@ -213,9 +213,10 @@ OpenGLDisplayPlugin::OpenGLDisplayPlugin() {
|
|||
}
|
||||
|
||||
void OpenGLDisplayPlugin::cleanupForSceneTexture(const gpu::TexturePointer& sceneTexture) {
|
||||
Lock lock(_mutex);
|
||||
Q_ASSERT(_sceneTextureToFrameIndexMap.contains(sceneTexture));
|
||||
_sceneTextureToFrameIndexMap.remove(sceneTexture);
|
||||
withRenderThreadLock([&] {
|
||||
Q_ASSERT(_sceneTextureToFrameIndexMap.contains(sceneTexture));
|
||||
_sceneTextureToFrameIndexMap.remove(sceneTexture);
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
@ -394,10 +395,9 @@ void OpenGLDisplayPlugin::submitSceneTexture(uint32_t frameIndex, const gpu::Tex
|
|||
return;
|
||||
}
|
||||
|
||||
{
|
||||
Lock lock(_mutex);
|
||||
withRenderThreadLock([&] {
|
||||
_sceneTextureToFrameIndexMap[sceneTexture] = frameIndex;
|
||||
}
|
||||
});
|
||||
|
||||
// Submit it to the presentation thread via escrow
|
||||
_sceneTextureEscrow.submit(sceneTexture);
|
||||
|
@ -431,11 +431,12 @@ void OpenGLDisplayPlugin::updateTextures() {
|
|||
}
|
||||
|
||||
void OpenGLDisplayPlugin::updateFrameData() {
|
||||
Lock lock(_mutex);
|
||||
auto previousFrameIndex = _currentPresentFrameIndex;
|
||||
_currentPresentFrameIndex = _sceneTextureToFrameIndexMap[_currentSceneTexture];
|
||||
auto skippedCount = (_currentPresentFrameIndex - previousFrameIndex) - 1;
|
||||
_droppedFrameRate.increment(skippedCount);
|
||||
withPresentThreadLock([&] {
|
||||
auto previousFrameIndex = _currentPresentFrameIndex;
|
||||
_currentPresentFrameIndex = _sceneTextureToFrameIndexMap[_currentSceneTexture];
|
||||
auto skippedCount = (_currentPresentFrameIndex - previousFrameIndex) - 1;
|
||||
_droppedFrameRate.increment(skippedCount);
|
||||
});
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::compositeOverlay() {
|
||||
|
@ -492,14 +493,14 @@ void OpenGLDisplayPlugin::compositeLayers() {
|
|||
}
|
||||
_compositeFramebuffer->Bound(Framebuffer::Target::Draw, [&] {
|
||||
Context::Viewport(targetRenderSize.x, targetRenderSize.y);
|
||||
Context::Clear().DepthBuffer();
|
||||
glBindTexture(GL_TEXTURE_2D, getSceneTextureId());
|
||||
compositeScene();
|
||||
auto sceneTextureId = getSceneTextureId();
|
||||
auto overlayTextureId = getOverlayTextureId();
|
||||
glBindTexture(GL_TEXTURE_2D, sceneTextureId);
|
||||
compositeScene();
|
||||
if (overlayTextureId) {
|
||||
glEnable(GL_BLEND);
|
||||
glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA);
|
||||
glBindTexture(GL_TEXTURE_2D, overlayTextureId);
|
||||
Context::Enable(Capability::Blend);
|
||||
Context::BlendFunc(BlendFunction::SrcAlpha, BlendFunction::OneMinusSrcAlpha);
|
||||
compositeOverlay();
|
||||
|
||||
auto compositorHelper = DependencyManager::get<CompositorHelper>();
|
||||
|
@ -507,11 +508,16 @@ void OpenGLDisplayPlugin::compositeLayers() {
|
|||
auto& cursorManager = Cursor::Manager::instance();
|
||||
const auto& cursorData = _cursorsData[cursorManager.getCursor()->getIcon()];
|
||||
glBindTexture(GL_TEXTURE_2D, cursorData.texture);
|
||||
glActiveTexture(GL_TEXTURE1);
|
||||
glBindTexture(GL_TEXTURE_2D, overlayTextureId);
|
||||
compositePointer();
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
glActiveTexture(GL_TEXTURE0);
|
||||
}
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
glDisable(GL_BLEND);
|
||||
Context::Disable(Capability::Blend);
|
||||
}
|
||||
compositeExtra();
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -549,7 +555,11 @@ float OpenGLDisplayPlugin::newFramePresentRate() const {
|
|||
}
|
||||
|
||||
float OpenGLDisplayPlugin::droppedFrameRate() const {
|
||||
return _droppedFrameRate.rate();
|
||||
float result;
|
||||
withRenderThreadLock([&] {
|
||||
result = _droppedFrameRate.rate();
|
||||
});
|
||||
return result;
|
||||
}
|
||||
|
||||
float OpenGLDisplayPlugin::presentRate() const {
|
||||
|
@ -664,3 +674,11 @@ void OpenGLDisplayPlugin::useProgram(const ProgramPtr& program) {
|
|||
_activeProgram = program;
|
||||
}
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::assertIsRenderThread() const {
|
||||
Q_ASSERT(QThread::currentThread() != _presentThread);
|
||||
}
|
||||
|
||||
void OpenGLDisplayPlugin::assertIsPresentThread() const {
|
||||
Q_ASSERT(QThread::currentThread() == _presentThread);
|
||||
}
|
||||
|
|
|
@ -74,6 +74,7 @@ protected:
|
|||
virtual void compositeScene();
|
||||
virtual void compositeOverlay();
|
||||
virtual void compositePointer();
|
||||
virtual void compositeExtra() {};
|
||||
|
||||
virtual bool hasFocus() const override;
|
||||
|
||||
|
@ -109,7 +110,6 @@ protected:
|
|||
int32_t _alphaUniform { -1 };
|
||||
ShapeWrapperPtr _plane;
|
||||
|
||||
mutable Mutex _mutex;
|
||||
RateCounter<> _droppedFrameRate;
|
||||
RateCounter<> _newFrameRate;
|
||||
RateCounter<> _presentRate;
|
||||
|
@ -135,7 +135,27 @@ protected:
|
|||
BasicFramebufferWrapperPtr _compositeFramebuffer;
|
||||
bool _lockCurrentTexture { false };
|
||||
|
||||
void assertIsRenderThread() const;
|
||||
void assertIsPresentThread() const;
|
||||
|
||||
template<typename F>
|
||||
void withPresentThreadLock(F f) const {
|
||||
assertIsPresentThread();
|
||||
Lock lock(_presentMutex);
|
||||
f();
|
||||
}
|
||||
|
||||
template<typename F>
|
||||
void withRenderThreadLock(F f) const {
|
||||
assertIsRenderThread();
|
||||
Lock lock(_presentMutex);
|
||||
f();
|
||||
}
|
||||
|
||||
private:
|
||||
// Any resource shared by the main thread and the presentation thread must
|
||||
// be serialized through this mutex
|
||||
mutable Mutex _presentMutex;
|
||||
ProgramPtr _activeProgram;
|
||||
};
|
||||
|
||||
|
|
|
@ -9,6 +9,7 @@
|
|||
|
||||
#include <memory>
|
||||
#include <glm/gtc/matrix_transform.hpp>
|
||||
#include <glm/gtx/intersect.hpp>
|
||||
|
||||
#include <QtCore/QLoggingCategory>
|
||||
#include <QtWidgets/QApplication>
|
||||
|
@ -37,7 +38,6 @@ QRect HmdDisplayPlugin::getRecommendedOverlayRect() const {
|
|||
return CompositorHelper::VIRTUAL_SCREEN_RECOMMENDED_OVERLAY_RECT;
|
||||
}
|
||||
|
||||
|
||||
bool HmdDisplayPlugin::internalActivate() {
|
||||
_monoPreview = _container->getBoolSetting("monoPreview", DEFAULT_MONO_VIEW);
|
||||
|
||||
|
@ -197,14 +197,43 @@ static ProgramPtr getReprojectionProgram() {
|
|||
#endif
|
||||
|
||||
|
||||
static const char * LASER_VS = R"VS(#version 410 core
|
||||
uniform mat4 mvp = mat4(1);
|
||||
|
||||
in vec3 Position;
|
||||
|
||||
out vec3 vPosition;
|
||||
|
||||
void main() {
|
||||
gl_Position = mvp * vec4(Position, 1);
|
||||
vPosition = Position;
|
||||
}
|
||||
|
||||
)VS";
|
||||
|
||||
static const char * LASER_FS = R"FS(#version 410 core
|
||||
|
||||
uniform vec4 color = vec4(1.0, 1.0, 1.0, 1.0);
|
||||
in vec3 vPosition;
|
||||
|
||||
out vec4 FragColor;
|
||||
|
||||
void main() {
|
||||
FragColor = color;
|
||||
}
|
||||
|
||||
)FS";
|
||||
|
||||
void HmdDisplayPlugin::customizeContext() {
|
||||
Parent::customizeContext();
|
||||
// Only enable mirroring if we know vsync is disabled
|
||||
enableVsync(false);
|
||||
_enablePreview = !isVsyncEnabled();
|
||||
_sphereSection = loadSphereSection(_program, CompositorHelper::VIRTUAL_UI_TARGET_FOV.y, CompositorHelper::VIRTUAL_UI_ASPECT_RATIO);
|
||||
compileProgram(_laserProgram, LASER_VS, LASER_FS);
|
||||
_laserGeometry = loadLaser(_laserProgram);
|
||||
compileProgram(_reprojectionProgram, REPROJECTION_VS, REPROJECTION_FS);
|
||||
|
||||
|
||||
using namespace oglplus;
|
||||
REPROJECTION_MATRIX_LOCATION = Uniform<glm::mat3>(*_reprojectionProgram, "reprojection").Location();
|
||||
INVERSE_PROJECTION_MATRIX_LOCATION = Uniform<glm::mat4>(*_reprojectionProgram, "inverseProjections").Location();
|
||||
|
@ -215,6 +244,8 @@ void HmdDisplayPlugin::uncustomizeContext() {
|
|||
_sphereSection.reset();
|
||||
_compositeFramebuffer.reset();
|
||||
_reprojectionProgram.reset();
|
||||
_laserProgram.reset();
|
||||
_laserGeometry.reset();
|
||||
Parent::uncustomizeContext();
|
||||
}
|
||||
|
||||
|
@ -288,6 +319,7 @@ void HmdDisplayPlugin::compositePointer() {
|
|||
});
|
||||
}
|
||||
|
||||
|
||||
void HmdDisplayPlugin::internalPresent() {
|
||||
|
||||
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)presentCount())
|
||||
|
@ -344,22 +376,114 @@ void HmdDisplayPlugin::setEyeRenderPose(uint32_t frameIndex, Eye eye, const glm:
|
|||
|
||||
void HmdDisplayPlugin::updateFrameData() {
|
||||
// Check if we have old frame data to discard
|
||||
{
|
||||
Lock lock(_mutex);
|
||||
withPresentThreadLock([&] {
|
||||
auto itr = _frameInfos.find(_currentPresentFrameIndex);
|
||||
if (itr != _frameInfos.end()) {
|
||||
_frameInfos.erase(itr);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
Parent::updateFrameData();
|
||||
|
||||
{
|
||||
Lock lock(_mutex);
|
||||
withPresentThreadLock([&] {
|
||||
_currentPresentFrameInfo = _frameInfos[_currentPresentFrameIndex];
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
glm::mat4 HmdDisplayPlugin::getHeadPose() const {
|
||||
return _currentRenderFrameInfo.renderPose;
|
||||
}
|
||||
|
||||
bool HmdDisplayPlugin::setHandLaser(uint32_t hands, HandLaserMode mode, const vec4& color, const vec3& direction) {
|
||||
HandLaserInfo info;
|
||||
info.mode = mode;
|
||||
info.color = color;
|
||||
info.direction = direction;
|
||||
withRenderThreadLock([&] {
|
||||
if (hands & Hand::LeftHand) {
|
||||
_handLasers[0] = info;
|
||||
}
|
||||
if (hands & Hand::RightHand) {
|
||||
_handLasers[1] = info;
|
||||
}
|
||||
});
|
||||
// FIXME defer to a child class plugin to determine if hand lasers are actually
|
||||
// available based on the presence or absence of hand controllers
|
||||
return true;
|
||||
}
|
||||
|
||||
void HmdDisplayPlugin::compositeExtra() {
|
||||
std::array<HandLaserInfo, 2> handLasers;
|
||||
std::array<mat4, 2> renderHandPoses;
|
||||
withPresentThreadLock([&] {
|
||||
handLasers = _handLasers;
|
||||
renderHandPoses = _handPoses;
|
||||
});
|
||||
|
||||
// If neither hand laser is activated, exit
|
||||
if (!handLasers[0].valid() && !handLasers[1].valid()) {
|
||||
return;
|
||||
}
|
||||
|
||||
static const glm::mat4 identity;
|
||||
if (renderHandPoses[0] == identity && renderHandPoses[1] == identity) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Render hand lasers
|
||||
using namespace oglplus;
|
||||
useProgram(_laserProgram);
|
||||
_laserGeometry->Use();
|
||||
std::array<mat4, 2> handLaserModelMatrices;
|
||||
|
||||
for (int i = 0; i < 2; ++i) {
|
||||
if (renderHandPoses[i] == identity) {
|
||||
continue;
|
||||
}
|
||||
const auto& handLaser = handLasers[i];
|
||||
if (!handLaser.valid()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const auto& laserDirection = handLaser.direction;
|
||||
auto model = renderHandPoses[i];
|
||||
auto castDirection = glm::quat_cast(model) * laserDirection;
|
||||
if (glm::abs(glm::length2(castDirection) - 1.0f) > EPSILON) {
|
||||
castDirection = glm::normalize(castDirection);
|
||||
}
|
||||
|
||||
// FIXME fetch the actual UI radius from... somewhere?
|
||||
float uiRadius = 1.0f;
|
||||
|
||||
// Find the intersection of the laser with he UI and use it to scale the model matrix
|
||||
float distance;
|
||||
if (!glm::intersectRaySphere(vec3(renderHandPoses[i][3]), castDirection, vec3(0), uiRadius * uiRadius, distance)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Make sure we rotate to match the desired laser direction
|
||||
if (laserDirection != Vectors::UNIT_NEG_Z) {
|
||||
auto rotation = glm::rotation(Vectors::UNIT_NEG_Z, laserDirection);
|
||||
model = model * glm::mat4_cast(rotation);
|
||||
}
|
||||
|
||||
model = glm::scale(model, vec3(distance));
|
||||
handLaserModelMatrices[i] = model;
|
||||
}
|
||||
|
||||
for_each_eye([&](Eye eye) {
|
||||
eyeViewport(eye);
|
||||
auto eyePose = _currentPresentFrameInfo.presentPose * getEyeToHeadTransform(eye);
|
||||
auto view = glm::inverse(eyePose);
|
||||
const auto& projection = _eyeProjections[eye];
|
||||
for (int i = 0; i < 2; ++i) {
|
||||
if (handLaserModelMatrices[i] == identity) {
|
||||
continue;
|
||||
}
|
||||
Uniform<glm::mat4>(*_laserProgram, "mvp").Set(projection * view * handLaserModelMatrices[i]);
|
||||
Uniform<glm::vec4>(*_laserProgram, "color").Set(handLasers[i].color);
|
||||
_laserGeometry->Draw();
|
||||
// TODO render some kind of visual indicator at the intersection point with the UI.
|
||||
}
|
||||
});
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ public:
|
|||
|
||||
virtual glm::mat4 getHeadPose() const override;
|
||||
|
||||
|
||||
bool setHandLaser(uint32_t hands, HandLaserMode mode, const vec4& color, const vec3& direction) override;
|
||||
|
||||
protected:
|
||||
virtual void hmdPresent() = 0;
|
||||
|
@ -46,7 +46,21 @@ protected:
|
|||
void customizeContext() override;
|
||||
void uncustomizeContext() override;
|
||||
void updateFrameData() override;
|
||||
void compositeExtra() override;
|
||||
|
||||
struct HandLaserInfo {
|
||||
HandLaserMode mode { HandLaserMode::None };
|
||||
vec4 color { 1.0f };
|
||||
vec3 direction { 0, 0, -1 };
|
||||
|
||||
// Is this hand laser info suitable for drawing?
|
||||
bool valid() const {
|
||||
return (mode != HandLaserMode::None && color.a > 0.0f && direction != vec3());
|
||||
}
|
||||
};
|
||||
|
||||
std::array<HandLaserInfo, 2> _handLasers;
|
||||
std::array<glm::mat4, 2> _handPoses;
|
||||
std::array<glm::mat4, 2> _eyeOffsets;
|
||||
std::array<glm::mat4, 2> _eyeProjections;
|
||||
std::array<glm::mat4, 2> _eyeInverseProjections;
|
||||
|
@ -75,5 +89,7 @@ private:
|
|||
bool _enableReprojection { true };
|
||||
ShapeWrapperPtr _sphereSection;
|
||||
ProgramPtr _reprojectionProgram;
|
||||
ProgramPtr _laserProgram;
|
||||
ShapeWrapperPtr _laserGeometry;
|
||||
};
|
||||
|
||||
|
|
|
@ -45,9 +45,11 @@ in vec2 vTexCoord;
|
|||
out vec4 FragColor;
|
||||
|
||||
void main() {
|
||||
|
||||
FragColor = texture(sampler, vTexCoord);
|
||||
FragColor.a *= alpha;
|
||||
if (FragColor.a <= 0.0) {
|
||||
discard;
|
||||
}
|
||||
}
|
||||
|
||||
)FS";
|
||||
|
@ -359,6 +361,94 @@ ShapeWrapperPtr loadSphereSection(ProgramPtr program, float fov, float aspect, i
|
|||
);
|
||||
}
|
||||
|
||||
namespace oglplus {
|
||||
namespace shapes {
|
||||
|
||||
class Laser : public DrawingInstructionWriter, public DrawMode {
|
||||
public:
|
||||
using IndexArray = std::vector<GLuint>;
|
||||
using PosArray = std::vector<float>;
|
||||
/// The type of the index container returned by Indices()
|
||||
// vertex positions
|
||||
PosArray _pos_data;
|
||||
IndexArray _idx_data;
|
||||
unsigned int _prim_count { 0 };
|
||||
|
||||
public:
|
||||
Laser() {
|
||||
int vertices = 2;
|
||||
_pos_data.resize(vertices * 3);
|
||||
_pos_data[0] = 0;
|
||||
_pos_data[1] = 0;
|
||||
_pos_data[2] = 0;
|
||||
|
||||
_pos_data[3] = 0;
|
||||
_pos_data[4] = 0;
|
||||
_pos_data[5] = -1;
|
||||
|
||||
_idx_data.push_back(0);
|
||||
_idx_data.push_back(1);
|
||||
_prim_count = 1;
|
||||
}
|
||||
|
||||
/// Returns the winding direction of faces
|
||||
FaceOrientation FaceWinding(void) const {
|
||||
return FaceOrientation::CCW;
|
||||
}
|
||||
|
||||
/// Queries the bounding sphere coordinates and dimensions
|
||||
template <typename T>
|
||||
void BoundingSphere(Sphere<T>& bounding_sphere) const {
|
||||
bounding_sphere = Sphere<T>(0, 0, -0.5, 0.5);
|
||||
}
|
||||
|
||||
typedef GLuint(Laser::*VertexAttribFunc)(std::vector<GLfloat>&) const;
|
||||
|
||||
/// Makes the vertex positions and returns the number of values per vertex
|
||||
template <typename T>
|
||||
GLuint Positions(std::vector<T>& dest) const {
|
||||
dest.clear();
|
||||
dest.insert(dest.begin(), _pos_data.begin(), _pos_data.end());
|
||||
return 3;
|
||||
}
|
||||
|
||||
typedef VertexAttribsInfo<
|
||||
Laser,
|
||||
std::tuple<VertexPositionsTag>
|
||||
> VertexAttribs;
|
||||
|
||||
|
||||
/// Returns element indices that are used with the drawing instructions
|
||||
const IndexArray & Indices(Default = Default()) const {
|
||||
return _idx_data;
|
||||
}
|
||||
|
||||
/// Returns the instructions for rendering of faces
|
||||
DrawingInstructions Instructions(PrimitiveType primitive) const {
|
||||
DrawingInstructions instr = MakeInstructions();
|
||||
DrawOperation operation;
|
||||
operation.method = DrawOperation::Method::DrawElements;
|
||||
operation.mode = primitive;
|
||||
operation.first = 0;
|
||||
operation.count = _prim_count * 3;
|
||||
operation.restart_index = DrawOperation::NoRestartIndex();
|
||||
operation.phase = 0;
|
||||
AddInstruction(instr, operation);
|
||||
return instr;
|
||||
}
|
||||
|
||||
/// Returns the instructions for rendering of faces
|
||||
DrawingInstructions Instructions(Default = Default()) const {
|
||||
return Instructions(PrimitiveType::Lines);
|
||||
}
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
ShapeWrapperPtr loadLaser(const ProgramPtr& program) {
|
||||
return std::make_shared<shapes::ShapeWrapper>(shapes::ShapeWrapper("Position", shapes::Laser(), *program));
|
||||
}
|
||||
|
||||
void TextureRecycler::setSize(const uvec2& size) {
|
||||
if (size == _size) {
|
||||
return;
|
||||
|
|
|
@ -64,8 +64,9 @@ ProgramPtr loadCubemapShader();
|
|||
void compileProgram(ProgramPtr & result, const std::string& vs, const std::string& fs);
|
||||
ShapeWrapperPtr loadSkybox(ProgramPtr program);
|
||||
ShapeWrapperPtr loadPlane(ProgramPtr program, float aspect = 1.0f);
|
||||
ShapeWrapperPtr loadSphereSection(ProgramPtr program, float fov = PI / 3.0f * 2.0f, float aspect = 16.0f / 9.0f, int slices = 32, int stacks = 32);
|
||||
|
||||
ShapeWrapperPtr loadSphereSection(ProgramPtr program, float fov = PI / 3.0f * 2.0f, float aspect = 16.0f / 9.0f, int slices = 128, int stacks = 128);
|
||||
ShapeWrapperPtr loadLaser(const ProgramPtr& program);
|
||||
|
||||
|
||||
// A basic wrapper for constructing a framebuffer with a renderbuffer
|
||||
// for the depth attachment and an undefined type for the color attachement
|
||||
|
|
|
@ -173,6 +173,7 @@ void AccountManager::setAuthURL(const QUrl& authURL) {
|
|||
<< "from previous settings file";
|
||||
}
|
||||
}
|
||||
settings.endGroup();
|
||||
|
||||
if (_accountInfo.getAccessToken().token.isEmpty()) {
|
||||
qCWarning(networking) << "Unable to load account file. No existing account settings will be loaded.";
|
||||
|
|
|
@ -168,6 +168,26 @@ public:
|
|||
|
||||
static const QString& MENU_PATH();
|
||||
|
||||
enum Hand {
|
||||
LeftHand = 0x01,
|
||||
RightHand = 0x02,
|
||||
};
|
||||
|
||||
enum class HandLaserMode {
|
||||
None, // Render no hand lasers
|
||||
Overlay, // Render hand lasers only if they intersect with the UI layer, and stop at the UI layer
|
||||
};
|
||||
|
||||
virtual bool setHandLaser(
|
||||
uint32_t hands, // Bits from the Hand enum
|
||||
HandLaserMode mode, // Mode in which to render
|
||||
const vec4& color = vec4(1), // The color of the rendered laser
|
||||
const vec3& direction = vec3(0, 0, -1) // The direction in which to render the hand lasers
|
||||
) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
||||
signals:
|
||||
void recommendedFramebufferSizeChanged(const QSize & size);
|
||||
// Indicates that this display plugin is no longer valid for use.
|
||||
|
|
|
@ -334,6 +334,7 @@ void ScriptEngines::clearScripts() {
|
|||
Settings settings;
|
||||
settings.beginWriteArray(SETTINGS_KEY);
|
||||
settings.remove("");
|
||||
settings.endArray();
|
||||
}
|
||||
|
||||
void ScriptEngines::saveScripts() {
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
const QString Settings::firstRun { "firstRun" };
|
||||
|
||||
|
||||
Settings::Settings() :
|
||||
_manager(DependencyManager::get<Setting::Manager>()),
|
||||
_locker(&(_manager->getLock()))
|
||||
|
@ -25,6 +26,9 @@ Settings::Settings() :
|
|||
}
|
||||
|
||||
Settings::~Settings() {
|
||||
if (_prefixes.size() != 0) {
|
||||
qFatal("Unstable Settings Prefixes: You must call endGroup for every beginGroup and endArray for every begin*Array call");
|
||||
}
|
||||
}
|
||||
|
||||
void Settings::remove(const QString& key) {
|
||||
|
@ -50,14 +54,17 @@ bool Settings::contains(const QString& key) const {
|
|||
}
|
||||
|
||||
int Settings::beginReadArray(const QString & prefix) {
|
||||
_prefixes.push(prefix);
|
||||
return _manager->beginReadArray(prefix);
|
||||
}
|
||||
|
||||
void Settings::beginWriteArray(const QString& prefix, int size) {
|
||||
_prefixes.push(prefix);
|
||||
_manager->beginWriteArray(prefix, size);
|
||||
}
|
||||
|
||||
void Settings::endArray() {
|
||||
_prefixes.pop();
|
||||
_manager->endArray();
|
||||
}
|
||||
|
||||
|
@ -66,10 +73,12 @@ void Settings::setArrayIndex(int i) {
|
|||
}
|
||||
|
||||
void Settings::beginGroup(const QString& prefix) {
|
||||
_prefixes.push(prefix);
|
||||
_manager->beginGroup(prefix);
|
||||
}
|
||||
|
||||
void Settings::endGroup() {
|
||||
_prefixes.pop();
|
||||
_manager->endGroup();
|
||||
}
|
||||
|
||||
|
|
|
@ -58,8 +58,10 @@ public:
|
|||
void setQuatValue(const QString& name, const glm::quat& quatValue);
|
||||
void getQuatValueIfValid(const QString& name, glm::quat& quatValue);
|
||||
|
||||
private:
|
||||
QSharedPointer<Setting::Manager> _manager;
|
||||
QWriteLocker _locker;
|
||||
QStack<QString> _prefixes;
|
||||
};
|
||||
|
||||
namespace Setting {
|
||||
|
|
|
@ -98,6 +98,8 @@ namespace Setting {
|
|||
// Register Handle
|
||||
manager->registerHandle(this);
|
||||
_isInitialized = true;
|
||||
} else {
|
||||
qWarning() << "Settings interface used after manager destroyed";
|
||||
}
|
||||
|
||||
// Load value from disk
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#include "OculusBaseDisplayPlugin.h"
|
||||
|
||||
#include <ViewFrustum.h>
|
||||
#include <controllers/Pose.h>
|
||||
|
||||
#include "OculusHelpers.h"
|
||||
|
||||
|
@ -24,8 +25,24 @@ bool OculusBaseDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
|
|||
auto trackingState = ovr_GetTrackingState(_session, _currentRenderFrameInfo.predictedDisplayTime, ovrTrue);
|
||||
_currentRenderFrameInfo.renderPose = toGlm(trackingState.HeadPose.ThePose);
|
||||
_currentRenderFrameInfo.presentPose = _currentRenderFrameInfo.renderPose;
|
||||
Lock lock(_mutex);
|
||||
_frameInfos[frameIndex] = _currentRenderFrameInfo;
|
||||
|
||||
std::array<glm::mat4, 2> handPoses;
|
||||
// Make controller poses available to the presentation thread
|
||||
ovr_for_each_hand([&](ovrHandType hand) {
|
||||
static const auto REQUIRED_HAND_STATUS = ovrStatus_OrientationTracked & ovrStatus_PositionTracked;
|
||||
if (REQUIRED_HAND_STATUS != (trackingState.HandStatusFlags[hand] & REQUIRED_HAND_STATUS)) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto correctedPose = ovrControllerPoseToHandPose(hand, trackingState.HandPoses[hand]);
|
||||
static const glm::quat HAND_TO_LASER_ROTATION = glm::rotation(Vectors::UNIT_Z, Vectors::UNIT_NEG_Y);
|
||||
handPoses[hand] = glm::translate(glm::mat4(), correctedPose.translation) * glm::mat4_cast(correctedPose.rotation * HAND_TO_LASER_ROTATION);
|
||||
});
|
||||
|
||||
withRenderThreadLock([&] {
|
||||
_handPoses = handPoses;
|
||||
_frameInfos[frameIndex] = _currentRenderFrameInfo;
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -243,91 +243,13 @@ void OculusControllerManager::TouchDevice::focusOutEvent() {
|
|||
void OculusControllerManager::TouchDevice::handlePose(float deltaTime,
|
||||
const controller::InputCalibrationData& inputCalibrationData, ovrHandType hand,
|
||||
const ovrPoseStatef& handPose) {
|
||||
// When the sensor-to-world rotation is identity the coordinate axes look like this:
|
||||
//
|
||||
// user
|
||||
// forward
|
||||
// -z
|
||||
// |
|
||||
// y| user
|
||||
// y o----x right
|
||||
// o-----x user
|
||||
// | up
|
||||
// |
|
||||
// z
|
||||
//
|
||||
// Rift
|
||||
|
||||
// From ABOVE the hand canonical axes looks like this:
|
||||
//
|
||||
// | | | | y | | | |
|
||||
// | | | | | | | | |
|
||||
// | | | | |
|
||||
// |left | / x---- + \ |right|
|
||||
// | _/ z \_ |
|
||||
// | | | |
|
||||
// | | | |
|
||||
//
|
||||
|
||||
// So when the user is in Rift space facing the -zAxis with hands outstretched and palms down
|
||||
// the rotation to align the Touch axes with those of the hands is:
|
||||
//
|
||||
// touchToHand = halfTurnAboutY * quaterTurnAboutX
|
||||
|
||||
// Due to how the Touch controllers fit into the palm there is an offset that is different for each hand.
|
||||
// You can think of this offset as the inverse of the measured rotation when the hands are posed, such that
|
||||
// the combination (measurement * offset) is identity at this orientation.
|
||||
//
|
||||
// Qoffset = glm::inverse(deltaRotation when hand is posed fingers forward, palm down)
|
||||
//
|
||||
// An approximate offset for the Touch can be obtained by inspection:
|
||||
//
|
||||
// Qoffset = glm::inverse(glm::angleAxis(sign * PI/2.0f, zAxis) * glm::angleAxis(PI/4.0f, xAxis))
|
||||
//
|
||||
// So the full equation is:
|
||||
//
|
||||
// Q = combinedMeasurement * touchToHand
|
||||
//
|
||||
// Q = (deltaQ * QOffset) * (yFlip * quarterTurnAboutX)
|
||||
//
|
||||
// Q = (deltaQ * inverse(deltaQForAlignedHand)) * (yFlip * quarterTurnAboutX)
|
||||
|
||||
auto poseId = hand == ovrHand_Left ? controller::LEFT_HAND : controller::RIGHT_HAND;
|
||||
auto& pose = _poseStateMap[poseId];
|
||||
|
||||
static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||
static const glm::quat quarterX = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_X);
|
||||
static const glm::quat touchToHand = yFlip * quarterX;
|
||||
|
||||
static const glm::quat leftQuarterZ = glm::angleAxis(-PI_OVER_TWO, Vectors::UNIT_Z);
|
||||
static const glm::quat rightQuarterZ = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_Z);
|
||||
static const glm::quat eighthX = glm::angleAxis(PI / 4.0f, Vectors::UNIT_X);
|
||||
|
||||
static const glm::quat leftRotationOffset = glm::inverse(leftQuarterZ * eighthX) * touchToHand;
|
||||
static const glm::quat rightRotationOffset = glm::inverse(rightQuarterZ * eighthX) * touchToHand;
|
||||
|
||||
static const float CONTROLLER_LENGTH_OFFSET = 0.0762f; // three inches
|
||||
static const glm::vec3 CONTROLLER_OFFSET = glm::vec3(CONTROLLER_LENGTH_OFFSET / 2.0f,
|
||||
CONTROLLER_LENGTH_OFFSET / 2.0f,
|
||||
CONTROLLER_LENGTH_OFFSET * 2.0f);
|
||||
static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
|
||||
static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;
|
||||
|
||||
auto translationOffset = (hand == ovrHand_Left ? leftTranslationOffset : rightTranslationOffset);
|
||||
auto rotationOffset = (hand == ovrHand_Left ? leftRotationOffset : rightRotationOffset);
|
||||
|
||||
glm::quat rotation = toGlm(handPose.ThePose.Orientation);
|
||||
|
||||
pose.translation = toGlm(handPose.ThePose.Position);
|
||||
pose.translation += rotation * translationOffset;
|
||||
pose.rotation = rotation * rotationOffset;
|
||||
pose.angularVelocity = toGlm(handPose.AngularVelocity);
|
||||
pose.velocity = toGlm(handPose.LinearVelocity);
|
||||
pose.valid = true;
|
||||
|
||||
pose = ovrControllerPoseToHandPose(hand, handPose);
|
||||
// transform into avatar frame
|
||||
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
||||
pose = pose.transform(controllerToAvatar);
|
||||
|
||||
}
|
||||
|
||||
bool OculusControllerManager::TouchDevice::triggerHapticPulse(float strength, float duration, controller::Hand hand) {
|
||||
|
|
|
@ -15,6 +15,9 @@
|
|||
#include <QtCore/QFile>
|
||||
#include <QtCore/QDir>
|
||||
|
||||
#include <controllers/Input.h>
|
||||
#include <controllers/Pose.h>
|
||||
|
||||
using Mutex = std::mutex;
|
||||
using Lock = std::unique_lock<Mutex>;
|
||||
|
||||
|
@ -191,3 +194,88 @@ void SwapFramebufferWrapper::onBind(oglplus::Framebuffer::Target target) {
|
|||
void SwapFramebufferWrapper::onUnbind(oglplus::Framebuffer::Target target) {
|
||||
glFramebufferTexture2D(toEnum(target), GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, 0, 0);
|
||||
}
|
||||
|
||||
|
||||
controller::Pose ovrControllerPoseToHandPose(
|
||||
ovrHandType hand,
|
||||
const ovrPoseStatef& handPose) {
|
||||
// When the sensor-to-world rotation is identity the coordinate axes look like this:
|
||||
//
|
||||
// user
|
||||
// forward
|
||||
// -z
|
||||
// |
|
||||
// y| user
|
||||
// y o----x right
|
||||
// o-----x user
|
||||
// | up
|
||||
// |
|
||||
// z
|
||||
//
|
||||
// Rift
|
||||
|
||||
// From ABOVE the hand canonical axes looks like this:
|
||||
//
|
||||
// | | | | y | | | |
|
||||
// | | | | | | | | |
|
||||
// | | | | |
|
||||
// |left | / x---- + \ |right|
|
||||
// | _/ z \_ |
|
||||
// | | | |
|
||||
// | | | |
|
||||
//
|
||||
|
||||
// So when the user is in Rift space facing the -zAxis with hands outstretched and palms down
|
||||
// the rotation to align the Touch axes with those of the hands is:
|
||||
//
|
||||
// touchToHand = halfTurnAboutY * quaterTurnAboutX
|
||||
|
||||
// Due to how the Touch controllers fit into the palm there is an offset that is different for each hand.
|
||||
// You can think of this offset as the inverse of the measured rotation when the hands are posed, such that
|
||||
// the combination (measurement * offset) is identity at this orientation.
|
||||
//
|
||||
// Qoffset = glm::inverse(deltaRotation when hand is posed fingers forward, palm down)
|
||||
//
|
||||
// An approximate offset for the Touch can be obtained by inspection:
|
||||
//
|
||||
// Qoffset = glm::inverse(glm::angleAxis(sign * PI/2.0f, zAxis) * glm::angleAxis(PI/4.0f, xAxis))
|
||||
//
|
||||
// So the full equation is:
|
||||
//
|
||||
// Q = combinedMeasurement * touchToHand
|
||||
//
|
||||
// Q = (deltaQ * QOffset) * (yFlip * quarterTurnAboutX)
|
||||
//
|
||||
// Q = (deltaQ * inverse(deltaQForAlignedHand)) * (yFlip * quarterTurnAboutX)
|
||||
static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||
static const glm::quat quarterX = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_X);
|
||||
static const glm::quat touchToHand = yFlip * quarterX;
|
||||
|
||||
static const glm::quat leftQuarterZ = glm::angleAxis(-PI_OVER_TWO, Vectors::UNIT_Z);
|
||||
static const glm::quat rightQuarterZ = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_Z);
|
||||
static const glm::quat eighthX = glm::angleAxis(PI / 4.0f, Vectors::UNIT_X);
|
||||
|
||||
static const glm::quat leftRotationOffset = glm::inverse(leftQuarterZ * eighthX) * touchToHand;
|
||||
static const glm::quat rightRotationOffset = glm::inverse(rightQuarterZ * eighthX) * touchToHand;
|
||||
|
||||
static const float CONTROLLER_LENGTH_OFFSET = 0.0762f; // three inches
|
||||
static const glm::vec3 CONTROLLER_OFFSET = glm::vec3(CONTROLLER_LENGTH_OFFSET / 2.0f,
|
||||
CONTROLLER_LENGTH_OFFSET / 2.0f,
|
||||
CONTROLLER_LENGTH_OFFSET * 2.0f);
|
||||
static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
|
||||
static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;
|
||||
|
||||
auto translationOffset = (hand == ovrHand_Left ? leftTranslationOffset : rightTranslationOffset);
|
||||
auto rotationOffset = (hand == ovrHand_Left ? leftRotationOffset : rightRotationOffset);
|
||||
|
||||
glm::quat rotation = toGlm(handPose.ThePose.Orientation);
|
||||
|
||||
controller::Pose pose;
|
||||
pose.translation = toGlm(handPose.ThePose.Position);
|
||||
pose.translation += rotation * translationOffset;
|
||||
pose.rotation = rotation * rotationOffset;
|
||||
pose.angularVelocity = toGlm(handPose.AngularVelocity);
|
||||
pose.velocity = toGlm(handPose.LinearVelocity);
|
||||
pose.valid = true;
|
||||
return pose;
|
||||
}
|
|
@ -13,6 +13,7 @@
|
|||
#include <glm/gtc/matrix_transform.hpp>
|
||||
|
||||
#include <gl/OglplusHelpers.h>
|
||||
#include <controllers/Forward.h>
|
||||
|
||||
void logWarning(const char* what);
|
||||
void logFatal(const char* what);
|
||||
|
@ -128,3 +129,7 @@ protected:
|
|||
private:
|
||||
ovrSession _session;
|
||||
};
|
||||
|
||||
controller::Pose ovrControllerPoseToHandPose(
|
||||
ovrHandType hand,
|
||||
const ovrPoseStatef& handPose);
|
||||
|
|
|
@ -45,8 +45,9 @@ bool OculusLegacyDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
|
|||
_currentRenderFrameInfo.predictedDisplayTime = _currentRenderFrameInfo.sensorSampleTime = ovr_GetTimeInSeconds();
|
||||
_trackingState = ovrHmd_GetTrackingState(_hmd, _currentRenderFrameInfo.predictedDisplayTime);
|
||||
_currentRenderFrameInfo.rawRenderPose = _currentRenderFrameInfo.renderPose = toGlm(_trackingState.HeadPose.ThePose);
|
||||
Lock lock(_mutex);
|
||||
_frameInfos[frameIndex] = _currentRenderFrameInfo;
|
||||
withRenderThreadLock([&]{
|
||||
_frameInfos[frameIndex] = _currentRenderFrameInfo;
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
#include <GLMHelpers.h>
|
||||
#include <gl/GlWindow.h>
|
||||
|
||||
#include <controllers/Pose.h>
|
||||
#include <PerfStat.h>
|
||||
#include <plugins/PluginContainer.h>
|
||||
#include <ViewFrustum.h>
|
||||
|
@ -29,11 +30,13 @@ Q_DECLARE_LOGGING_CATEGORY(displayplugins)
|
|||
const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
|
||||
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probably shouldn't be hardcoded here
|
||||
|
||||
static vr::IVRCompositor* _compositor{ nullptr };
|
||||
static vr::IVRCompositor* _compositor { nullptr };
|
||||
vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
|
||||
mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
|
||||
vec3 _trackedDeviceLinearVelocities[vr::k_unMaxTrackedDeviceCount];
|
||||
vec3 _trackedDeviceAngularVelocities[vr::k_unMaxTrackedDeviceCount];
|
||||
|
||||
static mat4 _sensorResetMat;
|
||||
static std::array<vr::Hmd_Eye, 2> VR_EYES { { vr::Eye_Left, vr::Eye_Right } };
|
||||
bool _openVrDisplayActive { false };
|
||||
|
@ -59,16 +62,14 @@ bool OpenVrDisplayPlugin::internalActivate() {
|
|||
// left + right eyes
|
||||
_renderTargetSize.x *= 2;
|
||||
|
||||
{
|
||||
Lock lock(_poseMutex);
|
||||
withRenderThreadLock([&] {
|
||||
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
|
||||
_eyeOffsets[eye] = toGlm(_system->GetEyeToHeadTransform(eye));
|
||||
_eyeProjections[eye] = toGlm(_system->GetProjectionMatrix(eye, DEFAULT_NEAR_CLIP, DEFAULT_FAR_CLIP, vr::API_OpenGL));
|
||||
});
|
||||
// FIXME Calculate the proper combined projection by using GetProjectionRaw values from both eyes
|
||||
_cullingProjection = _eyeProjections[0];
|
||||
|
||||
}
|
||||
});
|
||||
|
||||
_compositor = vr::VRCompositor();
|
||||
Q_ASSERT(_compositor);
|
||||
|
@ -113,7 +114,7 @@ void OpenVrDisplayPlugin::internalDeactivate() {
|
|||
void OpenVrDisplayPlugin::customizeContext() {
|
||||
// Display plugins in DLLs must initialize glew locally
|
||||
static std::once_flag once;
|
||||
std::call_once(once, []{
|
||||
std::call_once(once, [] {
|
||||
glewExperimental = true;
|
||||
GLenum err = glewInit();
|
||||
glGetError(); // clear the potential error from glewExperimental
|
||||
|
@ -123,9 +124,10 @@ void OpenVrDisplayPlugin::customizeContext() {
|
|||
}
|
||||
|
||||
void OpenVrDisplayPlugin::resetSensors() {
|
||||
Lock lock(_poseMutex);
|
||||
glm::mat4 m = toGlm(_trackedDevicePose[0].mDeviceToAbsoluteTracking);
|
||||
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
|
||||
withRenderThreadLock([&] {
|
||||
glm::mat4 m = toGlm(_trackedDevicePose[0].mDeviceToAbsoluteTracking);
|
||||
_sensorResetMat = glm::inverse(cancelOutRollAndPitch(m));
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
|
@ -150,6 +152,24 @@ bool OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
|
|||
|
||||
_system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseStanding, _currentRenderFrameInfo.predictedDisplayTime, _trackedDevicePose, vr::k_unMaxTrackedDeviceCount);
|
||||
|
||||
|
||||
vr::TrackedDeviceIndex_t handIndices[2] { vr::k_unTrackedDeviceIndexInvalid, vr::k_unTrackedDeviceIndexInvalid };
|
||||
{
|
||||
vr::TrackedDeviceIndex_t controllerIndices[2] ;
|
||||
auto trackedCount = _system->GetSortedTrackedDeviceIndicesOfClass(vr::TrackedDeviceClass_Controller, controllerIndices, 2);
|
||||
// Find the left and right hand controllers, if they exist
|
||||
for (uint32_t i = 0; i < std::min<uint32_t>(trackedCount, 2); ++i) {
|
||||
if (_trackedDevicePose[i].bPoseIsValid) {
|
||||
auto role = _system->GetControllerRoleForTrackedDeviceIndex(controllerIndices[i]);
|
||||
if (vr::TrackedControllerRole_LeftHand == role) {
|
||||
handIndices[0] = controllerIndices[i];
|
||||
} else if (vr::TrackedControllerRole_RightHand == role) {
|
||||
handIndices[1] = controllerIndices[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// copy and process predictedTrackedDevicePoses
|
||||
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
|
||||
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
|
||||
|
@ -159,18 +179,38 @@ bool OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
|
|||
_currentRenderFrameInfo.rawRenderPose = toGlm(_trackedDevicePose[vr::k_unTrackedDeviceIndex_Hmd].mDeviceToAbsoluteTracking);
|
||||
_currentRenderFrameInfo.renderPose = _trackedDevicePoseMat4[vr::k_unTrackedDeviceIndex_Hmd];
|
||||
|
||||
Lock lock(_mutex);
|
||||
_frameInfos[frameIndex] = _currentRenderFrameInfo;
|
||||
bool keyboardVisible = isOpenVrKeyboardShown();
|
||||
|
||||
std::array<mat4, 2> handPoses;
|
||||
if (!keyboardVisible) {
|
||||
for (int i = 0; i < 2; ++i) {
|
||||
if (handIndices[i] == vr::k_unTrackedDeviceIndexInvalid) {
|
||||
continue;
|
||||
}
|
||||
auto deviceIndex = handIndices[i];
|
||||
const mat4& mat = _trackedDevicePoseMat4[deviceIndex];
|
||||
const vec3& linearVelocity = _trackedDeviceLinearVelocities[deviceIndex];
|
||||
const vec3& angularVelocity = _trackedDeviceAngularVelocities[deviceIndex];
|
||||
auto correctedPose = openVrControllerPoseToHandPose(i == 0, mat, linearVelocity, angularVelocity);
|
||||
static const glm::quat HAND_TO_LASER_ROTATION = glm::rotation(Vectors::UNIT_Z, Vectors::UNIT_NEG_Y);
|
||||
handPoses[i] = glm::translate(glm::mat4(), correctedPose.translation) * glm::mat4_cast(correctedPose.rotation * HAND_TO_LASER_ROTATION);
|
||||
}
|
||||
}
|
||||
|
||||
withRenderThreadLock([&] {
|
||||
// Make controller poses available to the presentation thread
|
||||
_handPoses = handPoses;
|
||||
_frameInfos[frameIndex] = _currentRenderFrameInfo;
|
||||
});
|
||||
return true;
|
||||
}
|
||||
|
||||
void OpenVrDisplayPlugin::hmdPresent() {
|
||||
|
||||
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)_currentPresentFrameIndex)
|
||||
|
||||
// Flip y-axis since GL UV coords are backwards.
|
||||
static vr::VRTextureBounds_t leftBounds{ 0, 0, 0.5f, 1 };
|
||||
static vr::VRTextureBounds_t rightBounds{ 0.5f, 0, 1, 1 };
|
||||
static vr::VRTextureBounds_t leftBounds { 0, 0, 0.5f, 1 };
|
||||
static vr::VRTextureBounds_t rightBounds { 0.5f, 0, 1, 1 };
|
||||
|
||||
vr::Texture_t texture { (void*)oglplus::GetName(_compositeFramebuffer->color), vr::API_OpenGL, vr::ColorSpace_Auto };
|
||||
|
||||
|
@ -191,6 +231,10 @@ bool OpenVrDisplayPlugin::isHmdMounted() const {
|
|||
}
|
||||
|
||||
void OpenVrDisplayPlugin::updatePresentPose() {
|
||||
mat4 sensorResetMat;
|
||||
withPresentThreadLock([&] {
|
||||
sensorResetMat = _sensorResetMat;
|
||||
});
|
||||
{
|
||||
float fSecondsSinceLastVsync;
|
||||
_system->GetTimeSinceLastVsync(&fSecondsSinceLastVsync, nullptr);
|
||||
|
@ -202,9 +246,8 @@ void OpenVrDisplayPlugin::updatePresentPose() {
|
|||
_system->GetDeviceToAbsoluteTrackingPose(vr::TrackingUniverseStanding, fPredictedSecondsFromNow, &pose, 1);
|
||||
_currentPresentFrameInfo.rawPresentPose = toGlm(pose.mDeviceToAbsoluteTracking);
|
||||
}
|
||||
_currentPresentFrameInfo.presentPose = _sensorResetMat * _currentPresentFrameInfo.rawPresentPose;
|
||||
_currentPresentFrameInfo.presentPose = sensorResetMat * _currentPresentFrameInfo.rawPresentPose;
|
||||
mat3 renderRotation(_currentPresentFrameInfo.rawRenderPose);
|
||||
mat3 presentRotation(_currentPresentFrameInfo.rawPresentPose);
|
||||
_currentPresentFrameInfo.presentReprojection = glm::mat3(glm::inverse(renderRotation) * presentRotation);
|
||||
}
|
||||
|
||||
|
|
|
@ -43,5 +43,4 @@ private:
|
|||
vr::IVRSystem* _system { nullptr };
|
||||
std::atomic<vr::EDeviceActivityLevel> _hmdActivityLevel { vr::k_EDeviceActivityLevel_Unknown };
|
||||
static const QString NAME;
|
||||
mutable Mutex _poseMutex;
|
||||
};
|
||||
|
|
|
@ -18,8 +18,9 @@
|
|||
#include <QtQuick/QQuickWindow>
|
||||
|
||||
#include <Windows.h>
|
||||
|
||||
#include <OffscreenUi.h>
|
||||
#include <controllers/Pose.h>
|
||||
#include <NumericalConstants.h>
|
||||
|
||||
Q_DECLARE_LOGGING_CATEGORY(displayplugins)
|
||||
Q_LOGGING_CATEGORY(displayplugins, "hifi.plugins.display")
|
||||
|
@ -208,6 +209,10 @@ void disableOpenVrKeyboard() {
|
|||
QObject::disconnect(_focusConnection);
|
||||
}
|
||||
|
||||
bool isOpenVrKeyboardShown() {
|
||||
return _keyboardShown;
|
||||
}
|
||||
|
||||
|
||||
void handleOpenVrEvents() {
|
||||
if (!activeHmd) {
|
||||
|
@ -238,3 +243,86 @@ void handleOpenVrEvents() {
|
|||
|
||||
}
|
||||
|
||||
controller::Pose openVrControllerPoseToHandPose(bool isLeftHand, const mat4& mat, const vec3& linearVelocity, const vec3& angularVelocity) {
|
||||
// When the sensor-to-world rotation is identity the coordinate axes look like this:
|
||||
//
|
||||
// user
|
||||
// forward
|
||||
// -z
|
||||
// |
|
||||
// y| user
|
||||
// y o----x right
|
||||
// o-----x user
|
||||
// | up
|
||||
// |
|
||||
// z
|
||||
//
|
||||
// Rift
|
||||
|
||||
// From ABOVE the hand canonical axes looks like this:
|
||||
//
|
||||
// | | | | y | | | |
|
||||
// | | | | | | | | |
|
||||
// | | | | |
|
||||
// |left | / x---- + \ |right|
|
||||
// | _/ z \_ |
|
||||
// | | | |
|
||||
// | | | |
|
||||
//
|
||||
|
||||
// So when the user is in Rift space facing the -zAxis with hands outstretched and palms down
|
||||
// the rotation to align the Touch axes with those of the hands is:
|
||||
//
|
||||
// touchToHand = halfTurnAboutY * quaterTurnAboutX
|
||||
|
||||
// Due to how the Touch controllers fit into the palm there is an offset that is different for each hand.
|
||||
// You can think of this offset as the inverse of the measured rotation when the hands are posed, such that
|
||||
// the combination (measurement * offset) is identity at this orientation.
|
||||
//
|
||||
// Qoffset = glm::inverse(deltaRotation when hand is posed fingers forward, palm down)
|
||||
//
|
||||
// An approximate offset for the Touch can be obtained by inspection:
|
||||
//
|
||||
// Qoffset = glm::inverse(glm::angleAxis(sign * PI/2.0f, zAxis) * glm::angleAxis(PI/4.0f, xAxis))
|
||||
//
|
||||
// So the full equation is:
|
||||
//
|
||||
// Q = combinedMeasurement * touchToHand
|
||||
//
|
||||
// Q = (deltaQ * QOffset) * (yFlip * quarterTurnAboutX)
|
||||
//
|
||||
// Q = (deltaQ * inverse(deltaQForAlignedHand)) * (yFlip * quarterTurnAboutX)
|
||||
static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||
static const glm::quat quarterX = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_X);
|
||||
static const glm::quat touchToHand = yFlip * quarterX;
|
||||
|
||||
static const glm::quat leftQuarterZ = glm::angleAxis(-PI_OVER_TWO, Vectors::UNIT_Z);
|
||||
static const glm::quat rightQuarterZ = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_Z);
|
||||
static const glm::quat eighthX = glm::angleAxis(PI / 4.0f, Vectors::UNIT_X);
|
||||
|
||||
static const glm::quat leftRotationOffset = glm::inverse(leftQuarterZ * eighthX) * touchToHand;
|
||||
static const glm::quat rightRotationOffset = glm::inverse(rightQuarterZ * eighthX) * touchToHand;
|
||||
|
||||
static const float CONTROLLER_LENGTH_OFFSET = 0.0762f; // three inches
|
||||
static const glm::vec3 CONTROLLER_OFFSET = glm::vec3(CONTROLLER_LENGTH_OFFSET / 2.0f,
|
||||
CONTROLLER_LENGTH_OFFSET / 2.0f,
|
||||
CONTROLLER_LENGTH_OFFSET * 2.0f);
|
||||
static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
|
||||
static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;
|
||||
|
||||
auto translationOffset = (isLeftHand ? leftTranslationOffset : rightTranslationOffset);
|
||||
auto rotationOffset = (isLeftHand ? leftRotationOffset : rightRotationOffset);
|
||||
|
||||
glm::vec3 position = extractTranslation(mat);
|
||||
glm::quat rotation = glm::normalize(glm::quat_cast(mat));
|
||||
|
||||
position += rotation * translationOffset;
|
||||
rotation = rotation * rotationOffset;
|
||||
|
||||
// transform into avatar frame
|
||||
auto result = controller::Pose(position, rotation);
|
||||
// handle change in velocity due to translationOffset
|
||||
result.velocity = linearVelocity + glm::cross(angularVelocity, position - extractTranslation(mat));
|
||||
result.angularVelocity = angularVelocity;
|
||||
return result;
|
||||
}
|
|
@ -12,6 +12,8 @@
|
|||
#include <glm/gtc/type_ptr.hpp>
|
||||
#include <glm/gtc/matrix_transform.hpp>
|
||||
|
||||
#include <controllers/Forward.h>
|
||||
|
||||
bool openVrSupported();
|
||||
|
||||
vr::IVRSystem* acquireOpenVrSystem();
|
||||
|
@ -20,6 +22,7 @@ void handleOpenVrEvents();
|
|||
bool openVrQuitRequested();
|
||||
void enableOpenVrKeyboard();
|
||||
void disableOpenVrKeyboard();
|
||||
bool isOpenVrKeyboardShown();
|
||||
|
||||
|
||||
template<typename F>
|
||||
|
@ -54,3 +57,5 @@ inline vr::HmdMatrix34_t toOpenVr(const mat4& m) {
|
|||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
controller::Pose openVrControllerPoseToHandPose(bool isLeftHand, const mat4& mat, const vec3& linearVelocity, const vec3& angularVelocity);
|
||||
|
|
|
@ -37,10 +37,6 @@ vr::IVRSystem* acquireOpenVrSystem();
|
|||
void releaseOpenVrSystem();
|
||||
|
||||
|
||||
static const float CONTROLLER_LENGTH_OFFSET = 0.0762f; // three inches
|
||||
static const glm::vec3 CONTROLLER_OFFSET = glm::vec3(CONTROLLER_LENGTH_OFFSET / 2.0f,
|
||||
CONTROLLER_LENGTH_OFFSET / 2.0f,
|
||||
CONTROLLER_LENGTH_OFFSET * 2.0f);
|
||||
static const char* CONTROLLER_MODEL_STRING = "vr_controller_05_wireless_b";
|
||||
|
||||
static const QString MENU_PARENT = "Avatar";
|
||||
|
@ -382,86 +378,11 @@ void ViveControllerManager::InputDevice::handleButtonEvent(float deltaTime, uint
|
|||
void ViveControllerManager::InputDevice::handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData,
|
||||
const mat4& mat, const vec3& linearVelocity,
|
||||
const vec3& angularVelocity, bool isLeftHand) {
|
||||
// When the sensor-to-world rotation is identity the coordinate axes look like this:
|
||||
//
|
||||
// user
|
||||
// forward
|
||||
// -z
|
||||
// |
|
||||
// y| user
|
||||
// y o----x right
|
||||
// o-----x user
|
||||
// | up
|
||||
// |
|
||||
// z
|
||||
//
|
||||
// Vive
|
||||
//
|
||||
|
||||
// From ABOVE the hand canonical axes looks like this:
|
||||
//
|
||||
// | | | | y | | | |
|
||||
// | | | | | | | | |
|
||||
// | | | | |
|
||||
// |left | / x---- + \ |right|
|
||||
// | _/ z \_ |
|
||||
// | | | |
|
||||
// | | | |
|
||||
//
|
||||
|
||||
// So when the user is standing in Vive space facing the -zAxis with hands outstretched and palms down
|
||||
// the rotation to align the Vive axes with those of the hands is:
|
||||
//
|
||||
// QviveToHand = halfTurnAboutY * quaterTurnAboutX
|
||||
|
||||
// Due to how the Vive controllers fit into the palm there is an offset that is different for each hand.
|
||||
// You can think of this offset as the inverse of the measured rotation when the hands are posed, such that
|
||||
// the combination (measurement * offset) is identity at this orientation.
|
||||
//
|
||||
// Qoffset = glm::inverse(deltaRotation when hand is posed fingers forward, palm down)
|
||||
//
|
||||
// An approximate offset for the Vive can be obtained by inspection:
|
||||
//
|
||||
// Qoffset = glm::inverse(glm::angleAxis(sign * PI/4.0f, zAxis) * glm::angleAxis(PI/2.0f, xAxis))
|
||||
//
|
||||
// So the full equation is:
|
||||
//
|
||||
// Q = combinedMeasurement * viveToHand
|
||||
//
|
||||
// Q = (deltaQ * QOffset) * (yFlip * quarterTurnAboutX)
|
||||
//
|
||||
// Q = (deltaQ * inverse(deltaQForAlignedHand)) * (yFlip * quarterTurnAboutX)
|
||||
|
||||
static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||
static const glm::quat quarterX = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_X);
|
||||
static const glm::quat viveToHand = yFlip * quarterX;
|
||||
|
||||
static const glm::quat leftQuaterZ = glm::angleAxis(-PI_OVER_TWO, Vectors::UNIT_Z);
|
||||
static const glm::quat rightQuaterZ = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_Z);
|
||||
static const glm::quat eighthX = glm::angleAxis(PI / 4.0f, Vectors::UNIT_X);
|
||||
|
||||
static const glm::quat leftRotationOffset = glm::inverse(leftQuaterZ * eighthX) * viveToHand;
|
||||
static const glm::quat rightRotationOffset = glm::inverse(rightQuaterZ * eighthX) * viveToHand;
|
||||
|
||||
static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
|
||||
static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;
|
||||
|
||||
auto translationOffset = (isLeftHand ? leftTranslationOffset : rightTranslationOffset);
|
||||
auto rotationOffset = (isLeftHand ? leftRotationOffset : rightRotationOffset);
|
||||
|
||||
glm::vec3 position = extractTranslation(mat);
|
||||
glm::quat rotation = glm::normalize(glm::quat_cast(mat));
|
||||
|
||||
position += rotation * translationOffset;
|
||||
rotation = rotation * rotationOffset;
|
||||
auto pose = openVrControllerPoseToHandPose(isLeftHand, mat, linearVelocity, angularVelocity);
|
||||
|
||||
// transform into avatar frame
|
||||
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
||||
auto avatarPose = controller::Pose(position, rotation);
|
||||
// handle change in velocity due to translationOffset
|
||||
avatarPose.velocity = linearVelocity + glm::cross(angularVelocity, position - extractTranslation(mat));
|
||||
avatarPose.angularVelocity = angularVelocity;
|
||||
_poseStateMap[isLeftHand ? controller::LEFT_HAND : controller::RIGHT_HAND] = avatarPose.transform(controllerToAvatar);
|
||||
_poseStateMap[isLeftHand ? controller::LEFT_HAND : controller::RIGHT_HAND] = pose.transform(controllerToAvatar);
|
||||
}
|
||||
|
||||
bool ViveControllerManager::InputDevice::triggerHapticPulse(float strength, float duration, controller::Hand hand) {
|
||||
|
|
Loading…
Reference in a new issue