mirror of
https://github.com/overte-org/overte.git
synced 2025-08-06 22:39:18 +02:00
Fixing laser offset, support laser in Oculus
This commit is contained in:
parent
aab42ff841
commit
6707f889b8
9 changed files with 138 additions and 123 deletions
|
@ -517,6 +517,7 @@ void OpenGLDisplayPlugin::compositeLayers() {
|
||||||
glBindTexture(GL_TEXTURE_2D, 0);
|
glBindTexture(GL_TEXTURE_2D, 0);
|
||||||
Context::Disable(Capability::Blend);
|
Context::Disable(Capability::Blend);
|
||||||
}
|
}
|
||||||
|
compositeExtra();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -74,6 +74,7 @@ protected:
|
||||||
virtual void compositeScene();
|
virtual void compositeScene();
|
||||||
virtual void compositeOverlay();
|
virtual void compositeOverlay();
|
||||||
virtual void compositePointer();
|
virtual void compositePointer();
|
||||||
|
virtual void compositeExtra() {};
|
||||||
|
|
||||||
virtual bool hasFocus() const override;
|
virtual bool hasFocus() const override;
|
||||||
|
|
||||||
|
|
|
@ -316,10 +316,9 @@ void HmdDisplayPlugin::compositePointer() {
|
||||||
Uniform<glm::mat4>(*_program, _mvpUniform).Set(mvp);
|
Uniform<glm::mat4>(*_program, _mvpUniform).Set(mvp);
|
||||||
_plane->Draw();
|
_plane->Draw();
|
||||||
});
|
});
|
||||||
|
|
||||||
compositeLasers();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void HmdDisplayPlugin::internalPresent() {
|
void HmdDisplayPlugin::internalPresent() {
|
||||||
|
|
||||||
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)presentCount())
|
PROFILE_RANGE_EX(__FUNCTION__, 0xff00ff00, (uint64_t)presentCount())
|
||||||
|
@ -412,24 +411,7 @@ bool HmdDisplayPlugin::setHandLaser(uint32_t hands, HandLaserMode mode, const ve
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME try to consolidate the duplication of logic between this function and a similar one in CompsitorHelper.
|
void HmdDisplayPlugin::compositeExtra() {
|
||||||
static float calculateRayUiCollisionDistance(const glm::mat4& headPose, const glm::vec3& position, const glm::vec3& direction) {
|
|
||||||
auto relativePosition4 = glm::inverse(headPose) * vec4(position, 1);
|
|
||||||
auto relativePosition = vec3(relativePosition4) / relativePosition4.w;
|
|
||||||
auto relativeDirection = glm::inverse(glm::quat_cast(headPose)) * direction;
|
|
||||||
if (glm::abs(glm::length2(relativeDirection) - 1.0f) > EPSILON) {
|
|
||||||
relativeDirection = glm::normalize(relativeDirection);
|
|
||||||
}
|
|
||||||
// FIXME fetch the actual UI radius from... somewhere?
|
|
||||||
float uiRadius = 1.0f;
|
|
||||||
float instersectionDistance;
|
|
||||||
if (!glm::intersectRaySphere(relativePosition, relativeDirection, vec3(0), uiRadius * uiRadius, instersectionDistance)) {
|
|
||||||
return -1;
|
|
||||||
}
|
|
||||||
return instersectionDistance;
|
|
||||||
}
|
|
||||||
|
|
||||||
void HmdDisplayPlugin::compositeLasers() {
|
|
||||||
std::array<HandLaserInfo, 2> handLasers;
|
std::array<HandLaserInfo, 2> handLasers;
|
||||||
std::array<mat4, 2> renderHandPoses;
|
std::array<mat4, 2> renderHandPoses;
|
||||||
withPresentThreadLock([&] {
|
withPresentThreadLock([&] {
|
||||||
|
@ -465,10 +447,16 @@ void HmdDisplayPlugin::compositeLasers() {
|
||||||
const auto& laserDirection = handLaser.direction;
|
const auto& laserDirection = handLaser.direction;
|
||||||
auto model = renderHandPoses[i];
|
auto model = renderHandPoses[i];
|
||||||
auto castDirection = glm::quat_cast(model) * laserDirection;
|
auto castDirection = glm::quat_cast(model) * laserDirection;
|
||||||
|
if (glm::abs(glm::length2(castDirection) - 1.0f) > EPSILON) {
|
||||||
|
castDirection = glm::normalize(castDirection);
|
||||||
|
}
|
||||||
|
|
||||||
|
// FIXME fetch the actual UI radius from... somewhere?
|
||||||
|
float uiRadius = 1.0f;
|
||||||
|
|
||||||
// Find the intersection of the laser with he UI and use it to scale the model matrix
|
// Find the intersection of the laser with he UI and use it to scale the model matrix
|
||||||
float distance = calculateRayUiCollisionDistance(_currentPresentFrameInfo.presentPose, vec3(renderHandPoses[i][3]), castDirection);
|
float distance;
|
||||||
if (distance < 0) {
|
if (!glm::intersectRaySphere(vec3(renderHandPoses[i][3]), castDirection, vec3(0), uiRadius * uiRadius, distance)) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -46,9 +46,7 @@ protected:
|
||||||
void customizeContext() override;
|
void customizeContext() override;
|
||||||
void uncustomizeContext() override;
|
void uncustomizeContext() override;
|
||||||
void updateFrameData() override;
|
void updateFrameData() override;
|
||||||
|
void compositeExtra() override;
|
||||||
void compositeLasers();
|
|
||||||
|
|
||||||
|
|
||||||
struct HandLaserInfo {
|
struct HandLaserInfo {
|
||||||
HandLaserMode mode { HandLaserMode::None };
|
HandLaserMode mode { HandLaserMode::None };
|
||||||
|
|
|
@ -8,6 +8,7 @@
|
||||||
#include "OculusBaseDisplayPlugin.h"
|
#include "OculusBaseDisplayPlugin.h"
|
||||||
|
|
||||||
#include <ViewFrustum.h>
|
#include <ViewFrustum.h>
|
||||||
|
#include <controllers/Pose.h>
|
||||||
|
|
||||||
#include "OculusHelpers.h"
|
#include "OculusHelpers.h"
|
||||||
|
|
||||||
|
@ -25,16 +26,21 @@ bool OculusBaseDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
|
||||||
_currentRenderFrameInfo.renderPose = toGlm(trackingState.HeadPose.ThePose);
|
_currentRenderFrameInfo.renderPose = toGlm(trackingState.HeadPose.ThePose);
|
||||||
_currentRenderFrameInfo.presentPose = _currentRenderFrameInfo.renderPose;
|
_currentRenderFrameInfo.presentPose = _currentRenderFrameInfo.renderPose;
|
||||||
|
|
||||||
|
std::array<glm::mat4, 2> handPoses;
|
||||||
|
// Make controller poses available to the presentation thread
|
||||||
|
ovr_for_each_hand([&](ovrHandType hand) {
|
||||||
|
static const auto REQUIRED_HAND_STATUS = ovrStatus_OrientationTracked & ovrStatus_PositionTracked;
|
||||||
|
if (REQUIRED_HAND_STATUS != (trackingState.HandStatusFlags[hand] & REQUIRED_HAND_STATUS)) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto correctedPose = ovrControllerPoseToHandPose(hand, trackingState.HandPoses[hand]);
|
||||||
|
static const glm::quat HAND_TO_LASER_ROTATION = glm::rotation(Vectors::UNIT_Z, Vectors::UNIT_NEG_Y);
|
||||||
|
handPoses[hand] = glm::translate(glm::mat4(), correctedPose.translation) * glm::mat4_cast(correctedPose.rotation * HAND_TO_LASER_ROTATION);
|
||||||
|
});
|
||||||
|
|
||||||
withRenderThreadLock([&] {
|
withRenderThreadLock([&] {
|
||||||
// Make controller poses available to the presentation thread
|
_handPoses = handPoses;
|
||||||
ovr_for_each_hand([&](ovrHandType hand){
|
|
||||||
static const auto REQUIRED_HAND_STATUS = ovrStatus_OrientationTracked & ovrStatus_PositionTracked;
|
|
||||||
if (REQUIRED_HAND_STATUS == (trackingState.HandStatusFlags[hand] & REQUIRED_HAND_STATUS)) {
|
|
||||||
_handPoses[hand] = toGlm(trackingState.HandPoses[hand].ThePose);
|
|
||||||
} else {
|
|
||||||
_handPoses[hand] = glm::mat4();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
_frameInfos[frameIndex] = _currentRenderFrameInfo;
|
_frameInfos[frameIndex] = _currentRenderFrameInfo;
|
||||||
});
|
});
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
#include <GLMHelpers.h>
|
#include <GLMHelpers.h>
|
||||||
#include <gl/GlWindow.h>
|
#include <gl/GlWindow.h>
|
||||||
|
|
||||||
|
#include <controllers/Pose.h>
|
||||||
#include <PerfStat.h>
|
#include <PerfStat.h>
|
||||||
#include <plugins/PluginContainer.h>
|
#include <plugins/PluginContainer.h>
|
||||||
#include <ViewFrustum.h>
|
#include <ViewFrustum.h>
|
||||||
|
@ -179,15 +180,26 @@ bool OpenVrDisplayPlugin::beginFrameRender(uint32_t frameIndex) {
|
||||||
_currentRenderFrameInfo.renderPose = _trackedDevicePoseMat4[vr::k_unTrackedDeviceIndex_Hmd];
|
_currentRenderFrameInfo.renderPose = _trackedDevicePoseMat4[vr::k_unTrackedDeviceIndex_Hmd];
|
||||||
|
|
||||||
bool keyboardVisible = isOpenVrKeyboardShown();
|
bool keyboardVisible = isOpenVrKeyboardShown();
|
||||||
|
|
||||||
|
std::array<mat4, 2> handPoses;
|
||||||
|
if (!keyboardVisible) {
|
||||||
|
for (int i = 0; i < 2; ++i) {
|
||||||
|
if (handIndices[i] == vr::k_unTrackedDeviceIndexInvalid) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
auto deviceIndex = handIndices[i];
|
||||||
|
const mat4& mat = _trackedDevicePoseMat4[deviceIndex];
|
||||||
|
const vec3& linearVelocity = _trackedDeviceLinearVelocities[deviceIndex];
|
||||||
|
const vec3& angularVelocity = _trackedDeviceAngularVelocities[deviceIndex];
|
||||||
|
auto correctedPose = openVrControllerPoseToHandPose(i == 0, mat, linearVelocity, angularVelocity);
|
||||||
|
static const glm::quat HAND_TO_LASER_ROTATION = glm::rotation(Vectors::UNIT_Z, Vectors::UNIT_NEG_Y);
|
||||||
|
handPoses[i] = glm::translate(glm::mat4(), correctedPose.translation) * glm::mat4_cast(correctedPose.rotation * HAND_TO_LASER_ROTATION);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
withRenderThreadLock([&] {
|
withRenderThreadLock([&] {
|
||||||
// Make controller poses available to the presentation thread
|
// Make controller poses available to the presentation thread
|
||||||
for (int i = 0; i < 2; ++i) {
|
_handPoses = handPoses;
|
||||||
if (keyboardVisible || handIndices[i] == vr::k_unTrackedDeviceIndexInvalid) {
|
|
||||||
_handPoses[i] = glm::mat4();
|
|
||||||
} else {
|
|
||||||
_handPoses[i] = _sensorResetMat * toGlm(_trackedDevicePose[handIndices[i]].mDeviceToAbsoluteTracking);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
_frameInfos[frameIndex] = _currentRenderFrameInfo;
|
_frameInfos[frameIndex] = _currentRenderFrameInfo;
|
||||||
});
|
});
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -18,8 +18,9 @@
|
||||||
#include <QtQuick/QQuickWindow>
|
#include <QtQuick/QQuickWindow>
|
||||||
|
|
||||||
#include <Windows.h>
|
#include <Windows.h>
|
||||||
|
|
||||||
#include <OffscreenUi.h>
|
#include <OffscreenUi.h>
|
||||||
|
#include <controllers/Pose.h>
|
||||||
|
#include <NumericalConstants.h>
|
||||||
|
|
||||||
Q_DECLARE_LOGGING_CATEGORY(displayplugins)
|
Q_DECLARE_LOGGING_CATEGORY(displayplugins)
|
||||||
Q_LOGGING_CATEGORY(displayplugins, "hifi.plugins.display")
|
Q_LOGGING_CATEGORY(displayplugins, "hifi.plugins.display")
|
||||||
|
@ -242,3 +243,86 @@ void handleOpenVrEvents() {
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
controller::Pose openVrControllerPoseToHandPose(bool isLeftHand, const mat4& mat, const vec3& linearVelocity, const vec3& angularVelocity) {
|
||||||
|
// When the sensor-to-world rotation is identity the coordinate axes look like this:
|
||||||
|
//
|
||||||
|
// user
|
||||||
|
// forward
|
||||||
|
// -z
|
||||||
|
// |
|
||||||
|
// y| user
|
||||||
|
// y o----x right
|
||||||
|
// o-----x user
|
||||||
|
// | up
|
||||||
|
// |
|
||||||
|
// z
|
||||||
|
//
|
||||||
|
// Rift
|
||||||
|
|
||||||
|
// From ABOVE the hand canonical axes looks like this:
|
||||||
|
//
|
||||||
|
// | | | | y | | | |
|
||||||
|
// | | | | | | | | |
|
||||||
|
// | | | | |
|
||||||
|
// |left | / x---- + \ |right|
|
||||||
|
// | _/ z \_ |
|
||||||
|
// | | | |
|
||||||
|
// | | | |
|
||||||
|
//
|
||||||
|
|
||||||
|
// So when the user is in Rift space facing the -zAxis with hands outstretched and palms down
|
||||||
|
// the rotation to align the Touch axes with those of the hands is:
|
||||||
|
//
|
||||||
|
// touchToHand = halfTurnAboutY * quaterTurnAboutX
|
||||||
|
|
||||||
|
// Due to how the Touch controllers fit into the palm there is an offset that is different for each hand.
|
||||||
|
// You can think of this offset as the inverse of the measured rotation when the hands are posed, such that
|
||||||
|
// the combination (measurement * offset) is identity at this orientation.
|
||||||
|
//
|
||||||
|
// Qoffset = glm::inverse(deltaRotation when hand is posed fingers forward, palm down)
|
||||||
|
//
|
||||||
|
// An approximate offset for the Touch can be obtained by inspection:
|
||||||
|
//
|
||||||
|
// Qoffset = glm::inverse(glm::angleAxis(sign * PI/2.0f, zAxis) * glm::angleAxis(PI/4.0f, xAxis))
|
||||||
|
//
|
||||||
|
// So the full equation is:
|
||||||
|
//
|
||||||
|
// Q = combinedMeasurement * touchToHand
|
||||||
|
//
|
||||||
|
// Q = (deltaQ * QOffset) * (yFlip * quarterTurnAboutX)
|
||||||
|
//
|
||||||
|
// Q = (deltaQ * inverse(deltaQForAlignedHand)) * (yFlip * quarterTurnAboutX)
|
||||||
|
static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||||
|
static const glm::quat quarterX = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_X);
|
||||||
|
static const glm::quat touchToHand = yFlip * quarterX;
|
||||||
|
|
||||||
|
static const glm::quat leftQuarterZ = glm::angleAxis(-PI_OVER_TWO, Vectors::UNIT_Z);
|
||||||
|
static const glm::quat rightQuarterZ = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_Z);
|
||||||
|
static const glm::quat eighthX = glm::angleAxis(PI / 4.0f, Vectors::UNIT_X);
|
||||||
|
|
||||||
|
static const glm::quat leftRotationOffset = glm::inverse(leftQuarterZ * eighthX) * touchToHand;
|
||||||
|
static const glm::quat rightRotationOffset = glm::inverse(rightQuarterZ * eighthX) * touchToHand;
|
||||||
|
|
||||||
|
static const float CONTROLLER_LENGTH_OFFSET = 0.0762f; // three inches
|
||||||
|
static const glm::vec3 CONTROLLER_OFFSET = glm::vec3(CONTROLLER_LENGTH_OFFSET / 2.0f,
|
||||||
|
CONTROLLER_LENGTH_OFFSET / 2.0f,
|
||||||
|
CONTROLLER_LENGTH_OFFSET * 2.0f);
|
||||||
|
static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
|
||||||
|
static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;
|
||||||
|
|
||||||
|
auto translationOffset = (isLeftHand ? leftTranslationOffset : rightTranslationOffset);
|
||||||
|
auto rotationOffset = (isLeftHand ? leftRotationOffset : rightRotationOffset);
|
||||||
|
|
||||||
|
glm::vec3 position = extractTranslation(mat);
|
||||||
|
glm::quat rotation = glm::normalize(glm::quat_cast(mat));
|
||||||
|
|
||||||
|
position += rotation * translationOffset;
|
||||||
|
rotation = rotation * rotationOffset;
|
||||||
|
|
||||||
|
// transform into avatar frame
|
||||||
|
auto result = controller::Pose(position, rotation);
|
||||||
|
// handle change in velocity due to translationOffset
|
||||||
|
result.velocity = linearVelocity + glm::cross(angularVelocity, position - extractTranslation(mat));
|
||||||
|
result.angularVelocity = angularVelocity;
|
||||||
|
return result;
|
||||||
|
}
|
|
@ -12,6 +12,8 @@
|
||||||
#include <glm/gtc/type_ptr.hpp>
|
#include <glm/gtc/type_ptr.hpp>
|
||||||
#include <glm/gtc/matrix_transform.hpp>
|
#include <glm/gtc/matrix_transform.hpp>
|
||||||
|
|
||||||
|
#include <controllers/Forward.h>
|
||||||
|
|
||||||
bool openVrSupported();
|
bool openVrSupported();
|
||||||
|
|
||||||
vr::IVRSystem* acquireOpenVrSystem();
|
vr::IVRSystem* acquireOpenVrSystem();
|
||||||
|
@ -55,3 +57,5 @@ inline vr::HmdMatrix34_t toOpenVr(const mat4& m) {
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
controller::Pose openVrControllerPoseToHandPose(bool isLeftHand, const mat4& mat, const vec3& linearVelocity, const vec3& angularVelocity);
|
||||||
|
|
|
@ -37,10 +37,6 @@ vr::IVRSystem* acquireOpenVrSystem();
|
||||||
void releaseOpenVrSystem();
|
void releaseOpenVrSystem();
|
||||||
|
|
||||||
|
|
||||||
static const float CONTROLLER_LENGTH_OFFSET = 0.0762f; // three inches
|
|
||||||
static const glm::vec3 CONTROLLER_OFFSET = glm::vec3(CONTROLLER_LENGTH_OFFSET / 2.0f,
|
|
||||||
CONTROLLER_LENGTH_OFFSET / 2.0f,
|
|
||||||
CONTROLLER_LENGTH_OFFSET * 2.0f);
|
|
||||||
static const char* CONTROLLER_MODEL_STRING = "vr_controller_05_wireless_b";
|
static const char* CONTROLLER_MODEL_STRING = "vr_controller_05_wireless_b";
|
||||||
|
|
||||||
static const QString MENU_PARENT = "Avatar";
|
static const QString MENU_PARENT = "Avatar";
|
||||||
|
@ -382,86 +378,11 @@ void ViveControllerManager::InputDevice::handleButtonEvent(float deltaTime, uint
|
||||||
void ViveControllerManager::InputDevice::handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData,
|
void ViveControllerManager::InputDevice::handlePoseEvent(float deltaTime, const controller::InputCalibrationData& inputCalibrationData,
|
||||||
const mat4& mat, const vec3& linearVelocity,
|
const mat4& mat, const vec3& linearVelocity,
|
||||||
const vec3& angularVelocity, bool isLeftHand) {
|
const vec3& angularVelocity, bool isLeftHand) {
|
||||||
// When the sensor-to-world rotation is identity the coordinate axes look like this:
|
auto pose = openVrControllerPoseToHandPose(isLeftHand, mat, linearVelocity, angularVelocity);
|
||||||
//
|
|
||||||
// user
|
|
||||||
// forward
|
|
||||||
// -z
|
|
||||||
// |
|
|
||||||
// y| user
|
|
||||||
// y o----x right
|
|
||||||
// o-----x user
|
|
||||||
// | up
|
|
||||||
// |
|
|
||||||
// z
|
|
||||||
//
|
|
||||||
// Vive
|
|
||||||
//
|
|
||||||
|
|
||||||
// From ABOVE the hand canonical axes looks like this:
|
|
||||||
//
|
|
||||||
// | | | | y | | | |
|
|
||||||
// | | | | | | | | |
|
|
||||||
// | | | | |
|
|
||||||
// |left | / x---- + \ |right|
|
|
||||||
// | _/ z \_ |
|
|
||||||
// | | | |
|
|
||||||
// | | | |
|
|
||||||
//
|
|
||||||
|
|
||||||
// So when the user is standing in Vive space facing the -zAxis with hands outstretched and palms down
|
|
||||||
// the rotation to align the Vive axes with those of the hands is:
|
|
||||||
//
|
|
||||||
// QviveToHand = halfTurnAboutY * quaterTurnAboutX
|
|
||||||
|
|
||||||
// Due to how the Vive controllers fit into the palm there is an offset that is different for each hand.
|
|
||||||
// You can think of this offset as the inverse of the measured rotation when the hands are posed, such that
|
|
||||||
// the combination (measurement * offset) is identity at this orientation.
|
|
||||||
//
|
|
||||||
// Qoffset = glm::inverse(deltaRotation when hand is posed fingers forward, palm down)
|
|
||||||
//
|
|
||||||
// An approximate offset for the Vive can be obtained by inspection:
|
|
||||||
//
|
|
||||||
// Qoffset = glm::inverse(glm::angleAxis(sign * PI/4.0f, zAxis) * glm::angleAxis(PI/2.0f, xAxis))
|
|
||||||
//
|
|
||||||
// So the full equation is:
|
|
||||||
//
|
|
||||||
// Q = combinedMeasurement * viveToHand
|
|
||||||
//
|
|
||||||
// Q = (deltaQ * QOffset) * (yFlip * quarterTurnAboutX)
|
|
||||||
//
|
|
||||||
// Q = (deltaQ * inverse(deltaQForAlignedHand)) * (yFlip * quarterTurnAboutX)
|
|
||||||
|
|
||||||
static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
|
|
||||||
static const glm::quat quarterX = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_X);
|
|
||||||
static const glm::quat viveToHand = yFlip * quarterX;
|
|
||||||
|
|
||||||
static const glm::quat leftQuaterZ = glm::angleAxis(-PI_OVER_TWO, Vectors::UNIT_Z);
|
|
||||||
static const glm::quat rightQuaterZ = glm::angleAxis(PI_OVER_TWO, Vectors::UNIT_Z);
|
|
||||||
static const glm::quat eighthX = glm::angleAxis(PI / 4.0f, Vectors::UNIT_X);
|
|
||||||
|
|
||||||
static const glm::quat leftRotationOffset = glm::inverse(leftQuaterZ * eighthX) * viveToHand;
|
|
||||||
static const glm::quat rightRotationOffset = glm::inverse(rightQuaterZ * eighthX) * viveToHand;
|
|
||||||
|
|
||||||
static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
|
|
||||||
static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;
|
|
||||||
|
|
||||||
auto translationOffset = (isLeftHand ? leftTranslationOffset : rightTranslationOffset);
|
|
||||||
auto rotationOffset = (isLeftHand ? leftRotationOffset : rightRotationOffset);
|
|
||||||
|
|
||||||
glm::vec3 position = extractTranslation(mat);
|
|
||||||
glm::quat rotation = glm::normalize(glm::quat_cast(mat));
|
|
||||||
|
|
||||||
position += rotation * translationOffset;
|
|
||||||
rotation = rotation * rotationOffset;
|
|
||||||
|
|
||||||
// transform into avatar frame
|
// transform into avatar frame
|
||||||
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
||||||
auto avatarPose = controller::Pose(position, rotation);
|
_poseStateMap[isLeftHand ? controller::LEFT_HAND : controller::RIGHT_HAND] = pose.transform(controllerToAvatar);
|
||||||
// handle change in velocity due to translationOffset
|
|
||||||
avatarPose.velocity = linearVelocity + glm::cross(angularVelocity, position - extractTranslation(mat));
|
|
||||||
avatarPose.angularVelocity = angularVelocity;
|
|
||||||
_poseStateMap[isLeftHand ? controller::LEFT_HAND : controller::RIGHT_HAND] = avatarPose.transform(controllerToAvatar);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
bool ViveControllerManager::InputDevice::triggerHapticPulse(float strength, float duration, controller::Hand hand) {
|
bool ViveControllerManager::InputDevice::triggerHapticPulse(float strength, float duration, controller::Hand hand) {
|
||||||
|
|
Loading…
Reference in a new issue