mirror of
https://github.com/overte-org/overte.git
synced 2025-08-09 19:52:26 +02:00
Merge pull request #10594 from sethalves/head-controller
Head controller
This commit is contained in:
commit
0bbbcbba67
28 changed files with 175 additions and 132 deletions
|
@ -57,6 +57,8 @@
|
||||||
{ "from": "OculusTouch.LeftThumbUp", "to": "Standard.LeftThumbUp" },
|
{ "from": "OculusTouch.LeftThumbUp", "to": "Standard.LeftThumbUp" },
|
||||||
{ "from": "OculusTouch.RightThumbUp", "to": "Standard.RightThumbUp" },
|
{ "from": "OculusTouch.RightThumbUp", "to": "Standard.RightThumbUp" },
|
||||||
{ "from": "OculusTouch.LeftIndexPoint", "to": "Standard.LeftIndexPoint" },
|
{ "from": "OculusTouch.LeftIndexPoint", "to": "Standard.LeftIndexPoint" },
|
||||||
{ "from": "OculusTouch.RightIndexPoint", "to": "Standard.RightIndexPoint" }
|
{ "from": "OculusTouch.RightIndexPoint", "to": "Standard.RightIndexPoint" },
|
||||||
|
|
||||||
|
{ "from": "OculusTouch.Head", "to" : "Standard.Head", "when" : [ "Application.InHMD"] }
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -2188,7 +2188,7 @@ void Application::paintGL() {
|
||||||
_myCamera.setOrientation(glm::quat_cast(camMat));
|
_myCamera.setOrientation(glm::quat_cast(camMat));
|
||||||
} else {
|
} else {
|
||||||
_myCamera.setPosition(myAvatar->getDefaultEyePosition());
|
_myCamera.setPosition(myAvatar->getDefaultEyePosition());
|
||||||
_myCamera.setOrientation(myAvatar->getMyHead()->getCameraOrientation());
|
_myCamera.setOrientation(myAvatar->getMyHead()->getHeadOrientation());
|
||||||
}
|
}
|
||||||
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
||||||
if (isHMDMode()) {
|
if (isHMDMode()) {
|
||||||
|
@ -4115,6 +4115,7 @@ void Application::updateMyAvatarLookAtPosition() {
|
||||||
lookAtPosition.x = -lookAtPosition.x;
|
lookAtPosition.x = -lookAtPosition.x;
|
||||||
}
|
}
|
||||||
if (isHMD) {
|
if (isHMD) {
|
||||||
|
// TODO -- this code is probably wrong, getHeadPose() returns something in sensor frame, not avatar
|
||||||
glm::mat4 headPose = getActiveDisplayPlugin()->getHeadPose();
|
glm::mat4 headPose = getActiveDisplayPlugin()->getHeadPose();
|
||||||
glm::quat hmdRotation = glm::quat_cast(headPose);
|
glm::quat hmdRotation = glm::quat_cast(headPose);
|
||||||
lookAtSpot = _myCamera.getPosition() + myAvatar->getOrientation() * (hmdRotation * lookAtPosition);
|
lookAtSpot = _myCamera.getPosition() + myAvatar->getOrientation() * (hmdRotation * lookAtPosition);
|
||||||
|
@ -4157,8 +4158,9 @@ void Application::updateMyAvatarLookAtPosition() {
|
||||||
} else {
|
} else {
|
||||||
// I am not looking at anyone else, so just look forward
|
// I am not looking at anyone else, so just look forward
|
||||||
if (isHMD) {
|
if (isHMD) {
|
||||||
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
|
glm::mat4 worldHeadMat = myAvatar->getSensorToWorldMatrix() *
|
||||||
lookAtSpot = transformPoint(worldHMDMat, glm::vec3(0.0f, 0.0f, -TREE_SCALE));
|
myAvatar->getHeadControllerPoseInSensorFrame().getMatrix();
|
||||||
|
lookAtSpot = transformPoint(worldHeadMat, glm::vec3(0.0f, 0.0f, -TREE_SCALE));
|
||||||
} else {
|
} else {
|
||||||
lookAtSpot = myAvatar->getHead()->getEyePosition() +
|
lookAtSpot = myAvatar->getHead()->getEyePosition() +
|
||||||
(myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
|
(myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
|
||||||
|
|
|
@ -409,7 +409,7 @@ void MyAvatar::update(float deltaTime) {
|
||||||
// update moving average of HMD facing in xz plane.
|
// update moving average of HMD facing in xz plane.
|
||||||
const float HMD_FACING_TIMESCALE = 4.0f; // very slow average
|
const float HMD_FACING_TIMESCALE = 4.0f; // very slow average
|
||||||
float tau = deltaTime / HMD_FACING_TIMESCALE;
|
float tau = deltaTime / HMD_FACING_TIMESCALE;
|
||||||
_hmdSensorFacingMovingAverage = lerp(_hmdSensorFacingMovingAverage, _hmdSensorFacing, tau);
|
_headControllerFacingMovingAverage = lerp(_headControllerFacingMovingAverage, _headControllerFacing, tau);
|
||||||
|
|
||||||
if (_smoothOrientationTimer < SMOOTH_TIME_ORIENTATION) {
|
if (_smoothOrientationTimer < SMOOTH_TIME_ORIENTATION) {
|
||||||
_rotationChanged = usecTimestampNow();
|
_rotationChanged = usecTimestampNow();
|
||||||
|
@ -417,16 +417,18 @@ void MyAvatar::update(float deltaTime) {
|
||||||
}
|
}
|
||||||
|
|
||||||
#ifdef DEBUG_DRAW_HMD_MOVING_AVERAGE
|
#ifdef DEBUG_DRAW_HMD_MOVING_AVERAGE
|
||||||
glm::vec3 p = transformPoint(getSensorToWorldMatrix(), _hmdSensorPosition + glm::vec3(_hmdSensorFacingMovingAverage.x, 0.0f, _hmdSensorFacingMovingAverage.y));
|
glm::vec3 p = transformPoint(getSensorToWorldMatrix(), getHeadControllerPoseInAvatarFrame() *
|
||||||
|
glm::vec3(_headControllerFacingMovingAverage.x, 0.0f, _headControllerFacingMovingAverage.y));
|
||||||
DebugDraw::getInstance().addMarker("facing-avg", getOrientation(), p, glm::vec4(1.0f));
|
DebugDraw::getInstance().addMarker("facing-avg", getOrientation(), p, glm::vec4(1.0f));
|
||||||
p = transformPoint(getSensorToWorldMatrix(), _hmdSensorPosition + glm::vec3(_hmdSensorFacing.x, 0.0f, _hmdSensorFacing.y));
|
p = transformPoint(getSensorToWorldMatrix(), getHMDSensorPosition() +
|
||||||
|
glm::vec3(_headControllerFacing.x, 0.0f, _headControllerFacing.y));
|
||||||
DebugDraw::getInstance().addMarker("facing", getOrientation(), p, glm::vec4(1.0f));
|
DebugDraw::getInstance().addMarker("facing", getOrientation(), p, glm::vec4(1.0f));
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (_goToPending) {
|
if (_goToPending) {
|
||||||
setPosition(_goToPosition);
|
setPosition(_goToPosition);
|
||||||
setOrientation(_goToOrientation);
|
setOrientation(_goToOrientation);
|
||||||
_hmdSensorFacingMovingAverage = _hmdSensorFacing; // reset moving average
|
_headControllerFacingMovingAverage = _headControllerFacing; // reset moving average
|
||||||
_goToPending = false;
|
_goToPending = false;
|
||||||
// updateFromHMDSensorMatrix (called from paintGL) expects that the sensorToWorldMatrix is updated for any position changes
|
// updateFromHMDSensorMatrix (called from paintGL) expects that the sensorToWorldMatrix is updated for any position changes
|
||||||
// that happen between render and Application::update (which calls updateSensorToWorldMatrix to do so).
|
// that happen between render and Application::update (which calls updateSensorToWorldMatrix to do so).
|
||||||
|
@ -633,15 +635,21 @@ void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
|
||||||
_hmdSensorMatrix = hmdSensorMatrix;
|
_hmdSensorMatrix = hmdSensorMatrix;
|
||||||
auto newHmdSensorPosition = extractTranslation(hmdSensorMatrix);
|
auto newHmdSensorPosition = extractTranslation(hmdSensorMatrix);
|
||||||
|
|
||||||
if (newHmdSensorPosition != _hmdSensorPosition &&
|
if (newHmdSensorPosition != getHMDSensorPosition() &&
|
||||||
glm::length(newHmdSensorPosition) > MAX_HMD_ORIGIN_DISTANCE) {
|
glm::length(newHmdSensorPosition) > MAX_HMD_ORIGIN_DISTANCE) {
|
||||||
qWarning() << "Invalid HMD sensor position " << newHmdSensorPosition;
|
qWarning() << "Invalid HMD sensor position " << newHmdSensorPosition;
|
||||||
// Ignore unreasonable HMD sensor data
|
// Ignore unreasonable HMD sensor data
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
_hmdSensorPosition = newHmdSensorPosition;
|
_hmdSensorPosition = newHmdSensorPosition;
|
||||||
_hmdSensorOrientation = glm::quat_cast(hmdSensorMatrix);
|
_hmdSensorOrientation = glm::quat_cast(hmdSensorMatrix);
|
||||||
_hmdSensorFacing = getFacingDir2D(_hmdSensorOrientation);
|
auto headPose = _headControllerPoseInSensorFrameCache.get();
|
||||||
|
if (headPose.isValid()) {
|
||||||
|
_headControllerFacing = getFacingDir2D(headPose.rotation);
|
||||||
|
} else {
|
||||||
|
_headControllerFacing = glm::vec2(1.0f, 0.0f);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void MyAvatar::updateJointFromController(controller::Action poseKey, ThreadSafeValueCache<glm::mat4>& matrixCache) {
|
void MyAvatar::updateJointFromController(controller::Action poseKey, ThreadSafeValueCache<glm::mat4>& matrixCache) {
|
||||||
|
@ -679,7 +687,7 @@ void MyAvatar::updateSensorToWorldMatrix() {
|
||||||
|
|
||||||
// Update avatar head rotation with sensor data
|
// Update avatar head rotation with sensor data
|
||||||
void MyAvatar::updateFromTrackers(float deltaTime) {
|
void MyAvatar::updateFromTrackers(float deltaTime) {
|
||||||
glm::vec3 estimatedPosition, estimatedRotation;
|
glm::vec3 estimatedRotation;
|
||||||
|
|
||||||
bool inHmd = qApp->isHMDMode();
|
bool inHmd = qApp->isHMDMode();
|
||||||
bool playing = DependencyManager::get<recording::Deck>()->isPlaying();
|
bool playing = DependencyManager::get<recording::Deck>()->isPlaying();
|
||||||
|
@ -690,11 +698,7 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
||||||
FaceTracker* tracker = qApp->getActiveFaceTracker();
|
FaceTracker* tracker = qApp->getActiveFaceTracker();
|
||||||
bool inFacetracker = tracker && !FaceTracker::isMuted();
|
bool inFacetracker = tracker && !FaceTracker::isMuted();
|
||||||
|
|
||||||
if (inHmd) {
|
if (inFacetracker) {
|
||||||
estimatedPosition = extractTranslation(getHMDSensorMatrix());
|
|
||||||
estimatedPosition.x *= -1.0f;
|
|
||||||
} else if (inFacetracker) {
|
|
||||||
estimatedPosition = tracker->getHeadTranslation();
|
|
||||||
estimatedRotation = glm::degrees(safeEulerAngles(tracker->getHeadRotation()));
|
estimatedRotation = glm::degrees(safeEulerAngles(tracker->getHeadRotation()));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1479,12 +1483,12 @@ void MyAvatar::updateMotors() {
|
||||||
if (_motionBehaviors & AVATAR_MOTION_ACTION_MOTOR_ENABLED) {
|
if (_motionBehaviors & AVATAR_MOTION_ACTION_MOTOR_ENABLED) {
|
||||||
if (_characterController.getState() == CharacterController::State::Hover ||
|
if (_characterController.getState() == CharacterController::State::Hover ||
|
||||||
_characterController.computeCollisionGroup() == BULLET_COLLISION_GROUP_COLLISIONLESS) {
|
_characterController.computeCollisionGroup() == BULLET_COLLISION_GROUP_COLLISIONLESS) {
|
||||||
motorRotation = getMyHead()->getCameraOrientation();
|
motorRotation = getMyHead()->getHeadOrientation();
|
||||||
} else {
|
} else {
|
||||||
// non-hovering = walking: follow camera twist about vertical but not lift
|
// non-hovering = walking: follow camera twist about vertical but not lift
|
||||||
// so we decompose camera's rotation and store the twist part in motorRotation
|
// so we decompose camera's rotation and store the twist part in motorRotation
|
||||||
glm::quat liftRotation;
|
glm::quat liftRotation;
|
||||||
swingTwistDecomposition(getMyHead()->getCameraOrientation(), _worldUpDirection, liftRotation, motorRotation);
|
swingTwistDecomposition(getMyHead()->getHeadOrientation(), _worldUpDirection, liftRotation, motorRotation);
|
||||||
}
|
}
|
||||||
const float DEFAULT_MOTOR_TIMESCALE = 0.2f;
|
const float DEFAULT_MOTOR_TIMESCALE = 0.2f;
|
||||||
const float INVALID_MOTOR_TIMESCALE = 1.0e6f;
|
const float INVALID_MOTOR_TIMESCALE = 1.0e6f;
|
||||||
|
@ -1498,7 +1502,7 @@ void MyAvatar::updateMotors() {
|
||||||
}
|
}
|
||||||
if (_motionBehaviors & AVATAR_MOTION_SCRIPTED_MOTOR_ENABLED) {
|
if (_motionBehaviors & AVATAR_MOTION_SCRIPTED_MOTOR_ENABLED) {
|
||||||
if (_scriptedMotorFrame == SCRIPTED_MOTOR_CAMERA_FRAME) {
|
if (_scriptedMotorFrame == SCRIPTED_MOTOR_CAMERA_FRAME) {
|
||||||
motorRotation = getMyHead()->getCameraOrientation() * glm::angleAxis(PI, Vectors::UNIT_Y);
|
motorRotation = getMyHead()->getHeadOrientation() * glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||||
} else if (_scriptedMotorFrame == SCRIPTED_MOTOR_AVATAR_FRAME) {
|
} else if (_scriptedMotorFrame == SCRIPTED_MOTOR_AVATAR_FRAME) {
|
||||||
motorRotation = getOrientation() * glm::angleAxis(PI, Vectors::UNIT_Y);
|
motorRotation = getOrientation() * glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||||
} else {
|
} else {
|
||||||
|
@ -1847,7 +1851,7 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
||||||
if (getCharacterController()->getState() == CharacterController::State::Hover) {
|
if (getCharacterController()->getState() == CharacterController::State::Hover) {
|
||||||
|
|
||||||
// This is the direction the user desires to fly in.
|
// This is the direction the user desires to fly in.
|
||||||
glm::vec3 desiredFacing = getMyHead()->getCameraOrientation() * Vectors::UNIT_Z;
|
glm::vec3 desiredFacing = getMyHead()->getHeadOrientation() * Vectors::UNIT_Z;
|
||||||
desiredFacing.y = 0.0f;
|
desiredFacing.y = 0.0f;
|
||||||
|
|
||||||
// This is our reference frame, it is captured when the user begins to move.
|
// This is our reference frame, it is captured when the user begins to move.
|
||||||
|
@ -1886,11 +1890,9 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
||||||
|
|
||||||
getHead()->setBasePitch(getHead()->getBasePitch() + getDriveKey(PITCH) * _pitchSpeed * deltaTime);
|
getHead()->setBasePitch(getHead()->getBasePitch() + getDriveKey(PITCH) * _pitchSpeed * deltaTime);
|
||||||
|
|
||||||
if (qApp->isHMDMode()) {
|
auto headPose = getHeadControllerPoseInAvatarFrame();
|
||||||
glm::quat orientation = glm::quat_cast(getSensorToWorldMatrix()) * getHMDSensorOrientation();
|
if (headPose.isValid()) {
|
||||||
glm::quat bodyOrientation = getWorldBodyOrientation();
|
glm::quat localOrientation = headPose.rotation * Quaternions::Y_180;
|
||||||
glm::quat localOrientation = glm::inverse(bodyOrientation) * orientation;
|
|
||||||
|
|
||||||
// these angles will be in radians
|
// these angles will be in radians
|
||||||
// ... so they need to be converted to degrees before we do math...
|
// ... so they need to be converted to degrees before we do math...
|
||||||
glm::vec3 euler = glm::eulerAngles(localOrientation) * DEGREES_PER_RADIAN;
|
glm::vec3 euler = glm::eulerAngles(localOrientation) * DEGREES_PER_RADIAN;
|
||||||
|
@ -2004,11 +2006,14 @@ void MyAvatar::updatePosition(float deltaTime) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// capture the head rotation, in sensor space, when the user first indicates they would like to move/fly.
|
// capture the head rotation, in sensor space, when the user first indicates they would like to move/fly.
|
||||||
if (!_hoverReferenceCameraFacingIsCaptured && (fabs(getDriveKey(TRANSLATE_Z)) > 0.1f || fabs(getDriveKey(TRANSLATE_X)) > 0.1f)) {
|
if (!_hoverReferenceCameraFacingIsCaptured &&
|
||||||
|
(fabs(getDriveKey(TRANSLATE_Z)) > 0.1f || fabs(getDriveKey(TRANSLATE_X)) > 0.1f)) {
|
||||||
_hoverReferenceCameraFacingIsCaptured = true;
|
_hoverReferenceCameraFacingIsCaptured = true;
|
||||||
// transform the camera facing vector into sensor space.
|
// transform the camera facing vector into sensor space.
|
||||||
_hoverReferenceCameraFacing = transformVectorFast(glm::inverse(_sensorToWorldMatrix), getMyHead()->getCameraOrientation() * Vectors::UNIT_Z);
|
_hoverReferenceCameraFacing = transformVectorFast(glm::inverse(_sensorToWorldMatrix),
|
||||||
} else if (_hoverReferenceCameraFacingIsCaptured && (fabs(getDriveKey(TRANSLATE_Z)) <= 0.1f && fabs(getDriveKey(TRANSLATE_X)) <= 0.1f)) {
|
getMyHead()->getHeadOrientation() * Vectors::UNIT_Z);
|
||||||
|
} else if (_hoverReferenceCameraFacingIsCaptured &&
|
||||||
|
(fabs(getDriveKey(TRANSLATE_Z)) <= 0.1f && fabs(getDriveKey(TRANSLATE_X)) <= 0.1f)) {
|
||||||
_hoverReferenceCameraFacingIsCaptured = false;
|
_hoverReferenceCameraFacingIsCaptured = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2322,36 +2327,27 @@ bool MyAvatar::isDriveKeyDisabled(DriveKeys key) const {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
glm::vec3 MyAvatar::getWorldBodyPosition() const {
|
|
||||||
return transformPoint(_sensorToWorldMatrix, extractTranslation(_bodySensorMatrix));
|
|
||||||
}
|
|
||||||
|
|
||||||
glm::quat MyAvatar::getWorldBodyOrientation() const {
|
|
||||||
return glm::quat_cast(_sensorToWorldMatrix * _bodySensorMatrix);
|
|
||||||
}
|
|
||||||
|
|
||||||
// old school meat hook style
|
// old school meat hook style
|
||||||
glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
|
glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
|
||||||
|
glm::vec3 headPosition;
|
||||||
// HMD is in sensor space.
|
glm::quat headOrientation;
|
||||||
const glm::vec3 hmdPosition = getHMDSensorPosition();
|
auto headPose = getHeadControllerPoseInSensorFrame();
|
||||||
const glm::quat hmdOrientation = getHMDSensorOrientation();
|
if (headPose.isValid()) {
|
||||||
const glm::quat hmdOrientationYawOnly = cancelOutRollAndPitch(hmdOrientation);
|
headPosition = getHeadControllerPoseInSensorFrame().translation;
|
||||||
|
headOrientation = getHeadControllerPoseInSensorFrame().rotation * Quaternions::Y_180;
|
||||||
|
}
|
||||||
|
const glm::quat headOrientationYawOnly = cancelOutRollAndPitch(headOrientation);
|
||||||
|
|
||||||
const Rig& rig = _skeletonModel->getRig();
|
const Rig& rig = _skeletonModel->getRig();
|
||||||
int rightEyeIndex = rig.indexOfJoint("RightEye");
|
int headIndex = rig.indexOfJoint("Head");
|
||||||
int leftEyeIndex = rig.indexOfJoint("LeftEye");
|
|
||||||
int neckIndex = rig.indexOfJoint("Neck");
|
int neckIndex = rig.indexOfJoint("Neck");
|
||||||
int hipsIndex = rig.indexOfJoint("Hips");
|
int hipsIndex = rig.indexOfJoint("Hips");
|
||||||
|
|
||||||
glm::vec3 rigMiddleEyePos = DEFAULT_AVATAR_MIDDLE_EYE_POS;
|
glm::vec3 rigHeadPos = headIndex != -1 ? rig.getAbsoluteDefaultPose(headIndex).trans() : DEFAULT_AVATAR_HEAD_POS;
|
||||||
if (leftEyeIndex >= 0 && rightEyeIndex >= 0) {
|
|
||||||
rigMiddleEyePos = (rig.getAbsoluteDefaultPose(leftEyeIndex).trans() + rig.getAbsoluteDefaultPose(rightEyeIndex).trans()) / 2.0f;
|
|
||||||
}
|
|
||||||
glm::vec3 rigNeckPos = neckIndex != -1 ? rig.getAbsoluteDefaultPose(neckIndex).trans() : DEFAULT_AVATAR_NECK_POS;
|
glm::vec3 rigNeckPos = neckIndex != -1 ? rig.getAbsoluteDefaultPose(neckIndex).trans() : DEFAULT_AVATAR_NECK_POS;
|
||||||
glm::vec3 rigHipsPos = hipsIndex != -1 ? rig.getAbsoluteDefaultPose(hipsIndex).trans() : DEFAULT_AVATAR_HIPS_POS;
|
glm::vec3 rigHipsPos = hipsIndex != -1 ? rig.getAbsoluteDefaultPose(hipsIndex).trans() : DEFAULT_AVATAR_HIPS_POS;
|
||||||
|
|
||||||
glm::vec3 localEyes = (rigMiddleEyePos - rigHipsPos);
|
glm::vec3 localHead = (rigHeadPos - rigHipsPos);
|
||||||
glm::vec3 localNeck = (rigNeckPos - rigHipsPos);
|
glm::vec3 localNeck = (rigNeckPos - rigHipsPos);
|
||||||
|
|
||||||
// apply simplistic head/neck model
|
// apply simplistic head/neck model
|
||||||
|
@ -2360,11 +2356,11 @@ glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
|
||||||
// eyeToNeck offset is relative full HMD orientation.
|
// eyeToNeck offset is relative full HMD orientation.
|
||||||
// while neckToRoot offset is only relative to HMDs yaw.
|
// while neckToRoot offset is only relative to HMDs yaw.
|
||||||
// Y_180 is necessary because rig is z forward and hmdOrientation is -z forward
|
// Y_180 is necessary because rig is z forward and hmdOrientation is -z forward
|
||||||
glm::vec3 eyeToNeck = hmdOrientation * Quaternions::Y_180 * (localNeck - localEyes);
|
glm::vec3 headToNeck = headOrientation * Quaternions::Y_180 * (localNeck - localHead);
|
||||||
glm::vec3 neckToRoot = hmdOrientationYawOnly * Quaternions::Y_180 * -localNeck;
|
glm::vec3 neckToRoot = headOrientationYawOnly * Quaternions::Y_180 * -localNeck;
|
||||||
glm::vec3 bodyPos = hmdPosition + eyeToNeck + neckToRoot;
|
glm::vec3 bodyPos = headPosition + headToNeck + neckToRoot;
|
||||||
|
|
||||||
return createMatFromQuatAndPos(hmdOrientationYawOnly, bodyPos);
|
return createMatFromQuatAndPos(headOrientationYawOnly, bodyPos);
|
||||||
}
|
}
|
||||||
|
|
||||||
glm::vec3 MyAvatar::getPositionForAudio() {
|
glm::vec3 MyAvatar::getPositionForAudio() {
|
||||||
|
@ -2480,7 +2476,7 @@ bool MyAvatar::FollowHelper::shouldActivateRotation(const MyAvatar& myAvatar, co
|
||||||
} else {
|
} else {
|
||||||
const float FOLLOW_ROTATION_THRESHOLD = cosf(PI / 6.0f); // 30 degrees
|
const float FOLLOW_ROTATION_THRESHOLD = cosf(PI / 6.0f); // 30 degrees
|
||||||
glm::vec2 bodyFacing = getFacingDir2D(currentBodyMatrix);
|
glm::vec2 bodyFacing = getFacingDir2D(currentBodyMatrix);
|
||||||
return glm::dot(myAvatar.getHMDSensorFacingMovingAverage(), bodyFacing) < FOLLOW_ROTATION_THRESHOLD;
|
return glm::dot(-myAvatar.getHeadControllerFacingMovingAverage(), bodyFacing) < FOLLOW_ROTATION_THRESHOLD;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2627,9 +2623,10 @@ glm::mat4 MyAvatar::computeCameraRelativeHandControllerMatrix(const glm::mat4& c
|
||||||
cameraWorldMatrix *= createMatFromScaleQuatAndPos(vec3(-1.0f, 1.0f, 1.0f), glm::quat(), glm::vec3());
|
cameraWorldMatrix *= createMatFromScaleQuatAndPos(vec3(-1.0f, 1.0f, 1.0f), glm::quat(), glm::vec3());
|
||||||
}
|
}
|
||||||
|
|
||||||
// compute a NEW sensorToWorldMatrix for the camera. The equation is cameraWorldMatrix = cameraSensorToWorldMatrix * _hmdSensorMatrix.
|
// compute a NEW sensorToWorldMatrix for the camera.
|
||||||
|
// The equation is cameraWorldMatrix = cameraSensorToWorldMatrix * _hmdSensorMatrix.
|
||||||
// here we solve for the unknown cameraSensorToWorldMatrix.
|
// here we solve for the unknown cameraSensorToWorldMatrix.
|
||||||
glm::mat4 cameraSensorToWorldMatrix = cameraWorldMatrix * glm::inverse(_hmdSensorMatrix);
|
glm::mat4 cameraSensorToWorldMatrix = cameraWorldMatrix * glm::inverse(getHMDSensorMatrix());
|
||||||
|
|
||||||
// Using the new cameraSensorToWorldMatrix, compute where the controller is in world space.
|
// Using the new cameraSensorToWorldMatrix, compute where the controller is in world space.
|
||||||
glm::mat4 controllerWorldMatrix = cameraSensorToWorldMatrix * controllerSensorMatrix;
|
glm::mat4 controllerWorldMatrix = cameraSensorToWorldMatrix * controllerSensorMatrix;
|
||||||
|
|
|
@ -185,7 +185,6 @@ public:
|
||||||
const glm::mat4& getHMDSensorMatrix() const { return _hmdSensorMatrix; }
|
const glm::mat4& getHMDSensorMatrix() const { return _hmdSensorMatrix; }
|
||||||
const glm::vec3& getHMDSensorPosition() const { return _hmdSensorPosition; }
|
const glm::vec3& getHMDSensorPosition() const { return _hmdSensorPosition; }
|
||||||
const glm::quat& getHMDSensorOrientation() const { return _hmdSensorOrientation; }
|
const glm::quat& getHMDSensorOrientation() const { return _hmdSensorOrientation; }
|
||||||
const glm::vec2& getHMDSensorFacingMovingAverage() const { return _hmdSensorFacingMovingAverage; }
|
|
||||||
|
|
||||||
Q_INVOKABLE void setOrientationVar(const QVariant& newOrientationVar);
|
Q_INVOKABLE void setOrientationVar(const QVariant& newOrientationVar);
|
||||||
Q_INVOKABLE QVariant getOrientationVar() const;
|
Q_INVOKABLE QVariant getOrientationVar() const;
|
||||||
|
@ -470,6 +469,8 @@ public:
|
||||||
controller::Pose getHeadControllerPoseInSensorFrame() const;
|
controller::Pose getHeadControllerPoseInSensorFrame() const;
|
||||||
controller::Pose getHeadControllerPoseInWorldFrame() const;
|
controller::Pose getHeadControllerPoseInWorldFrame() const;
|
||||||
controller::Pose getHeadControllerPoseInAvatarFrame() const;
|
controller::Pose getHeadControllerPoseInAvatarFrame() const;
|
||||||
|
const glm::vec2& getHeadControllerFacingMovingAverage() const { return _headControllerFacingMovingAverage; }
|
||||||
|
|
||||||
|
|
||||||
void setArmControllerPosesInSensorFrame(const controller::Pose& left, const controller::Pose& right);
|
void setArmControllerPosesInSensorFrame(const controller::Pose& left, const controller::Pose& right);
|
||||||
controller::Pose getLeftArmControllerPoseInSensorFrame() const;
|
controller::Pose getLeftArmControllerPoseInSensorFrame() const;
|
||||||
|
@ -563,10 +564,6 @@ signals:
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
||||||
glm::vec3 getWorldBodyPosition() const;
|
|
||||||
glm::quat getWorldBodyOrientation() const;
|
|
||||||
|
|
||||||
|
|
||||||
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail) override;
|
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail) override;
|
||||||
|
|
||||||
void simulate(float deltaTime);
|
void simulate(float deltaTime);
|
||||||
|
@ -676,13 +673,13 @@ private:
|
||||||
// working copies -- see AvatarData for thread-safe _sensorToWorldMatrixCache, used for outward facing access
|
// working copies -- see AvatarData for thread-safe _sensorToWorldMatrixCache, used for outward facing access
|
||||||
glm::mat4 _sensorToWorldMatrix { glm::mat4() };
|
glm::mat4 _sensorToWorldMatrix { glm::mat4() };
|
||||||
|
|
||||||
// cache of the current HMD sensor position and orientation
|
// cache of the current HMD sensor position and orientation in sensor space.
|
||||||
// in sensor space.
|
|
||||||
glm::mat4 _hmdSensorMatrix;
|
glm::mat4 _hmdSensorMatrix;
|
||||||
glm::quat _hmdSensorOrientation;
|
glm::quat _hmdSensorOrientation;
|
||||||
glm::vec3 _hmdSensorPosition;
|
glm::vec3 _hmdSensorPosition;
|
||||||
glm::vec2 _hmdSensorFacing; // facing vector in xz plane
|
// cache head controller pose in sensor space
|
||||||
glm::vec2 _hmdSensorFacingMovingAverage { 0, 0 }; // facing vector in xz plane
|
glm::vec2 _headControllerFacing; // facing vector in xz plane
|
||||||
|
glm::vec2 _headControllerFacingMovingAverage { 0, 0 }; // facing vector in xz plane
|
||||||
|
|
||||||
// cache of the current body position and orientation of the avatar's body,
|
// cache of the current body position and orientation of the avatar's body,
|
||||||
// in sensor space.
|
// in sensor space.
|
||||||
|
|
|
@ -26,19 +26,20 @@ using namespace std;
|
||||||
MyHead::MyHead(MyAvatar* owningAvatar) : Head(owningAvatar) {
|
MyHead::MyHead(MyAvatar* owningAvatar) : Head(owningAvatar) {
|
||||||
}
|
}
|
||||||
|
|
||||||
glm::quat MyHead::getCameraOrientation() const {
|
glm::quat MyHead::getHeadOrientation() const {
|
||||||
// NOTE: Head::getCameraOrientation() is not used for orienting the camera "view" while in Oculus mode, so
|
// NOTE: Head::getHeadOrientation() is not used for orienting the camera "view" while in Oculus mode, so
|
||||||
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how
|
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how
|
||||||
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
|
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
|
||||||
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
|
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
|
||||||
// always the same.
|
// always the same.
|
||||||
if (qApp->isHMDMode()) {
|
|
||||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||||
return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
|
auto headPose = myAvatar->getHeadControllerPoseInWorldFrame();
|
||||||
} else {
|
if (headPose.isValid()) {
|
||||||
Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
|
return headPose.rotation * Quaternions::Y_180;
|
||||||
return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return myAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
|
||||||
}
|
}
|
||||||
|
|
||||||
void MyHead::simulate(float deltaTime) {
|
void MyHead::simulate(float deltaTime) {
|
||||||
|
|
|
@ -18,7 +18,7 @@ public:
|
||||||
explicit MyHead(MyAvatar* owningAvatar);
|
explicit MyHead(MyAvatar* owningAvatar);
|
||||||
|
|
||||||
/// \return orientationBody * orientationBasePitch
|
/// \return orientationBody * orientationBasePitch
|
||||||
glm::quat getCameraOrientation() const;
|
glm::quat getHeadOrientation() const;
|
||||||
void simulate(float deltaTime) override;
|
void simulate(float deltaTime) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
|
@ -52,26 +52,18 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||||
// input action is the highest priority source for head orientation.
|
// input action is the highest priority source for head orientation.
|
||||||
auto avatarHeadPose = myAvatar->getHeadControllerPoseInAvatarFrame();
|
auto avatarHeadPose = myAvatar->getHeadControllerPoseInAvatarFrame();
|
||||||
if (avatarHeadPose.isValid()) {
|
if (avatarHeadPose.isValid()) {
|
||||||
glm::mat4 rigHeadMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHeadPose.getRotation(), avatarHeadPose.getTranslation());
|
glm::mat4 rigHeadMat = Matrices::Y_180 *
|
||||||
|
createMatFromQuatAndPos(avatarHeadPose.getRotation(), avatarHeadPose.getTranslation());
|
||||||
headParams.rigHeadPosition = extractTranslation(rigHeadMat);
|
headParams.rigHeadPosition = extractTranslation(rigHeadMat);
|
||||||
headParams.rigHeadOrientation = glmExtractRotation(rigHeadMat);
|
headParams.rigHeadOrientation = glmExtractRotation(rigHeadMat);
|
||||||
headParams.headEnabled = true;
|
headParams.headEnabled = true;
|
||||||
} else {
|
} else {
|
||||||
if (qApp->isHMDMode()) {
|
// even though full head IK is disabled, the rig still needs the head orientation to rotate the head up and
|
||||||
// get HMD position from sensor space into world space, and back into rig space
|
// down in desktop mode.
|
||||||
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
|
// preMult 180 is necessary to convert from avatar to rig coordinates.
|
||||||
glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
|
// postMult 180 is necessary to convert head from -z forward to z forward.
|
||||||
glm::mat4 worldToRig = glm::inverse(rigToWorld);
|
headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame() * Quaternions::Y_180;
|
||||||
glm::mat4 rigHMDMat = worldToRig * worldHMDMat;
|
headParams.headEnabled = false;
|
||||||
_rig.computeHeadFromHMD(AnimPose(rigHMDMat), headParams.rigHeadPosition, headParams.rigHeadOrientation);
|
|
||||||
headParams.headEnabled = true;
|
|
||||||
} else {
|
|
||||||
// even though full head IK is disabled, the rig still needs the head orientation to rotate the head up and down in desktop mode.
|
|
||||||
// preMult 180 is necessary to convert from avatar to rig coordinates.
|
|
||||||
// postMult 180 is necessary to convert head from -z forward to z forward.
|
|
||||||
headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame() * Quaternions::Y_180;
|
|
||||||
headParams.headEnabled = false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
auto avatarHipsPose = myAvatar->getHipsControllerPoseInAvatarFrame();
|
auto avatarHipsPose = myAvatar->getHipsControllerPoseInAvatarFrame();
|
||||||
|
|
|
@ -54,6 +54,10 @@ bool HMDScriptingInterface::isHMDAvailable(const QString& name) {
|
||||||
return PluginUtils::isHMDAvailable(name);
|
return PluginUtils::isHMDAvailable(name);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool HMDScriptingInterface::isHeadControllerAvailable(const QString& name) {
|
||||||
|
return PluginUtils::isHeadControllerAvailable(name);
|
||||||
|
}
|
||||||
|
|
||||||
bool HMDScriptingInterface::isHandControllerAvailable(const QString& name) {
|
bool HMDScriptingInterface::isHandControllerAvailable(const QString& name) {
|
||||||
return PluginUtils::isHandControllerAvailable(name);
|
return PluginUtils::isHandControllerAvailable(name);
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,6 +43,7 @@ public:
|
||||||
Q_INVOKABLE QString preferredAudioOutput() const;
|
Q_INVOKABLE QString preferredAudioOutput() const;
|
||||||
|
|
||||||
Q_INVOKABLE bool isHMDAvailable(const QString& name = "");
|
Q_INVOKABLE bool isHMDAvailable(const QString& name = "");
|
||||||
|
Q_INVOKABLE bool isHeadControllerAvailable(const QString& name = "");
|
||||||
Q_INVOKABLE bool isHandControllerAvailable(const QString& name = "");
|
Q_INVOKABLE bool isHandControllerAvailable(const QString& name = "");
|
||||||
Q_INVOKABLE bool isSubdeviceContainingNameAvailable(const QString& name);
|
Q_INVOKABLE bool isSubdeviceContainingNameAvailable(const QString& name);
|
||||||
|
|
||||||
|
|
|
@ -304,18 +304,6 @@ glm::quat Head::getEyeRotation(const glm::vec3& eyePosition) const {
|
||||||
return rotationBetween(orientation * IDENTITY_FORWARD, lookAtDelta + glm::length(lookAtDelta) * _saccade) * orientation;
|
return rotationBetween(orientation * IDENTITY_FORWARD, lookAtDelta + glm::length(lookAtDelta) * _saccade) * orientation;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Head::setFinalPitch(float finalPitch) {
|
|
||||||
_deltaPitch = glm::clamp(finalPitch, MIN_HEAD_PITCH, MAX_HEAD_PITCH) - _basePitch;
|
|
||||||
}
|
|
||||||
|
|
||||||
void Head::setFinalYaw(float finalYaw) {
|
|
||||||
_deltaYaw = glm::clamp(finalYaw, MIN_HEAD_YAW, MAX_HEAD_YAW) - _baseYaw;
|
|
||||||
}
|
|
||||||
|
|
||||||
void Head::setFinalRoll(float finalRoll) {
|
|
||||||
_deltaRoll = glm::clamp(finalRoll, MIN_HEAD_ROLL, MAX_HEAD_ROLL) - _baseRoll;
|
|
||||||
}
|
|
||||||
|
|
||||||
float Head::getFinalYaw() const {
|
float Head::getFinalYaw() const {
|
||||||
return glm::clamp(_baseYaw + _deltaYaw, MIN_HEAD_YAW, MAX_HEAD_YAW);
|
return glm::clamp(_baseYaw + _deltaYaw, MIN_HEAD_YAW, MAX_HEAD_YAW);
|
||||||
}
|
}
|
||||||
|
|
|
@ -71,9 +71,6 @@ public:
|
||||||
void setDeltaRoll(float roll) { _deltaRoll = roll; }
|
void setDeltaRoll(float roll) { _deltaRoll = roll; }
|
||||||
float getDeltaRoll() const { return _deltaRoll; }
|
float getDeltaRoll() const { return _deltaRoll; }
|
||||||
|
|
||||||
virtual void setFinalYaw(float finalYaw) override;
|
|
||||||
virtual void setFinalPitch(float finalPitch) override;
|
|
||||||
virtual void setFinalRoll(float finalRoll) override;
|
|
||||||
virtual float getFinalPitch() const override;
|
virtual float getFinalPitch() const override;
|
||||||
virtual float getFinalYaw() const override;
|
virtual float getFinalYaw() const override;
|
||||||
virtual float getFinalRoll() const override;
|
virtual float getFinalRoll() const override;
|
||||||
|
|
|
@ -45,9 +45,6 @@ public:
|
||||||
float getBaseRoll() const { return _baseRoll; }
|
float getBaseRoll() const { return _baseRoll; }
|
||||||
void setBaseRoll(float roll) { _baseRoll = glm::clamp(roll, MIN_HEAD_ROLL, MAX_HEAD_ROLL); }
|
void setBaseRoll(float roll) { _baseRoll = glm::clamp(roll, MIN_HEAD_ROLL, MAX_HEAD_ROLL); }
|
||||||
|
|
||||||
virtual void setFinalYaw(float finalYaw) { _baseYaw = finalYaw; }
|
|
||||||
virtual void setFinalPitch(float finalPitch) { _basePitch = finalPitch; }
|
|
||||||
virtual void setFinalRoll(float finalRoll) { _baseRoll = finalRoll; }
|
|
||||||
virtual float getFinalYaw() const { return _baseYaw; }
|
virtual float getFinalYaw() const { return _baseYaw; }
|
||||||
virtual float getFinalPitch() const { return _basePitch; }
|
virtual float getFinalPitch() const { return _basePitch; }
|
||||||
virtual float getFinalRoll() const { return _baseRoll; }
|
virtual float getFinalRoll() const { return _baseRoll; }
|
||||||
|
|
|
@ -39,6 +39,7 @@ namespace controller {
|
||||||
quat getRotation() const { return rotation; }
|
quat getRotation() const { return rotation; }
|
||||||
vec3 getVelocity() const { return velocity; }
|
vec3 getVelocity() const { return velocity; }
|
||||||
vec3 getAngularVelocity() const { return angularVelocity; }
|
vec3 getAngularVelocity() const { return angularVelocity; }
|
||||||
|
mat4 getMatrix() const { return createMatFromQuatAndPos(rotation, translation); }
|
||||||
|
|
||||||
Pose transform(const glm::mat4& mat) const;
|
Pose transform(const glm::mat4& mat) const;
|
||||||
Pose postTransform(const glm::mat4& mat) const;
|
Pose postTransform(const glm::mat4& mat) const;
|
||||||
|
|
|
@ -70,8 +70,6 @@ public:
|
||||||
bool isSupported() const override { return true; }
|
bool isSupported() const override { return true; }
|
||||||
const QString getName() const override { return NAME; }
|
const QString getName() const override { return NAME; }
|
||||||
|
|
||||||
bool isHandController() const override { return false; }
|
|
||||||
|
|
||||||
void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
|
void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
|
||||||
void pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) override;
|
void pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) override;
|
||||||
|
|
||||||
|
|
|
@ -39,8 +39,6 @@ public:
|
||||||
virtual bool isSupported() const override;
|
virtual bool isSupported() const override;
|
||||||
virtual const QString getName() const override { return NAME; }
|
virtual const QString getName() const override { return NAME; }
|
||||||
|
|
||||||
bool isHandController() const override { return false; }
|
|
||||||
|
|
||||||
virtual void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
|
virtual void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
|
||||||
virtual void pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) override;
|
virtual void pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) override;
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,7 @@ public:
|
||||||
// Some input plugins are comprised of multiple subdevices (SDL2, for instance).
|
// Some input plugins are comprised of multiple subdevices (SDL2, for instance).
|
||||||
// If an input plugin is only a single device, it will only return it's primary name.
|
// If an input plugin is only a single device, it will only return it's primary name.
|
||||||
virtual QStringList getSubdeviceNames() { return { getName() }; };
|
virtual QStringList getSubdeviceNames() { return { getName() }; };
|
||||||
virtual bool isHandController() const = 0;
|
virtual bool isHandController() const { return false; }
|
||||||
|
virtual bool isHeadController() const { return false; }
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,15 @@ bool PluginUtils::isHMDAvailable(const QString& pluginName) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool PluginUtils::isHeadControllerAvailable(const QString& pluginName) {
|
||||||
|
for (auto& inputPlugin : PluginManager::getInstance()->getInputPlugins()) {
|
||||||
|
if (inputPlugin->isHeadController() && (pluginName.isEmpty() || inputPlugin->getName() == pluginName)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
};
|
||||||
|
|
||||||
bool PluginUtils::isHandControllerAvailable(const QString& pluginName) {
|
bool PluginUtils::isHandControllerAvailable(const QString& pluginName) {
|
||||||
for (auto& inputPlugin : PluginManager::getInstance()->getInputPlugins()) {
|
for (auto& inputPlugin : PluginManager::getInstance()->getInputPlugins()) {
|
||||||
if (inputPlugin->isHandController() && (pluginName.isEmpty() || inputPlugin->getName() == pluginName)) {
|
if (inputPlugin->isHandController() && (pluginName.isEmpty() || inputPlugin->getName() == pluginName)) {
|
||||||
|
|
|
@ -16,6 +16,7 @@ class PluginUtils {
|
||||||
public:
|
public:
|
||||||
static bool isHMDAvailable(const QString& pluginName = "");
|
static bool isHMDAvailable(const QString& pluginName = "");
|
||||||
static bool isHandControllerAvailable(const QString& pluginName = "");
|
static bool isHandControllerAvailable(const QString& pluginName = "");
|
||||||
|
static bool isHeadControllerAvailable(const QString& pluginName = "");
|
||||||
static bool isSubdeviceContainingNameAvailable(QString name);
|
static bool isSubdeviceContainingNameAvailable(QString name);
|
||||||
static bool isViveControllerAvailable();
|
static bool isViveControllerAvailable();
|
||||||
static bool isOculusTouchControllerAvailable();
|
static bool isOculusTouchControllerAvailable();
|
||||||
|
|
|
@ -273,7 +273,7 @@ bool KinectPlugin::activate() {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool KinectPlugin::isHandController() const {
|
bool KinectPlugin::isHandController() const {
|
||||||
bool sensorAvailable = false;
|
bool sensorAvailable = false;
|
||||||
#ifdef HAVE_KINECT
|
#ifdef HAVE_KINECT
|
||||||
if (_kinectSensor) {
|
if (_kinectSensor) {
|
||||||
|
@ -285,6 +285,10 @@ bool KinectPlugin::isHandController() const {
|
||||||
return _enabled && _initialized && sensorAvailable;
|
return _enabled && _initialized && sensorAvailable;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool KinectPlugin::isHeadController() const {
|
||||||
|
return isHandController();
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
bool KinectPlugin::initializeDefaultSensor() const {
|
bool KinectPlugin::initializeDefaultSensor() const {
|
||||||
#ifdef HAVE_KINECT
|
#ifdef HAVE_KINECT
|
||||||
|
@ -654,4 +658,4 @@ void KinectPlugin::InputDevice::clearState() {
|
||||||
int poseIndex = KinectJointIndexToPoseIndex((KinectJointIndex)i);
|
int poseIndex = KinectJointIndexToPoseIndex((KinectJointIndex)i);
|
||||||
_poseStateMap[poseIndex] = controller::Pose();
|
_poseStateMap[poseIndex] = controller::Pose();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,6 +43,7 @@ class KinectPlugin : public InputPlugin {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
public:
|
public:
|
||||||
bool isHandController() const override;
|
bool isHandController() const override;
|
||||||
|
bool isHeadController() const override;
|
||||||
|
|
||||||
// Plugin functions
|
// Plugin functions
|
||||||
virtual void init() override;
|
virtual void init() override;
|
||||||
|
|
|
@ -25,8 +25,6 @@ class NeuronPlugin : public InputPlugin {
|
||||||
public:
|
public:
|
||||||
friend void FrameDataReceivedCallback(void* context, void* sender, _BvhDataHeaderEx* header, float* data);
|
friend void FrameDataReceivedCallback(void* context, void* sender, _BvhDataHeaderEx* header, float* data);
|
||||||
|
|
||||||
bool isHandController() const override { return false; }
|
|
||||||
|
|
||||||
// Plugin functions
|
// Plugin functions
|
||||||
virtual void init() override;
|
virtual void init() override;
|
||||||
virtual bool isSupported() const override;
|
virtual bool isSupported() const override;
|
||||||
|
|
|
@ -27,7 +27,6 @@ public:
|
||||||
const QString getName() const override { return NAME; }
|
const QString getName() const override { return NAME; }
|
||||||
|
|
||||||
QStringList getSubdeviceNames() override;
|
QStringList getSubdeviceNames() override;
|
||||||
bool isHandController() const override { return false; }
|
|
||||||
|
|
||||||
void init() override;
|
void init() override;
|
||||||
void deinit() override;
|
void deinit() override;
|
||||||
|
|
|
@ -33,7 +33,7 @@ public:
|
||||||
|
|
||||||
// Sixense always seems to initialize even if the hydras are not present. Is there
|
// Sixense always seems to initialize even if the hydras are not present. Is there
|
||||||
// a way we can properly detect whether the hydras are present?
|
// a way we can properly detect whether the hydras are present?
|
||||||
bool isHandController() const override { return false; }
|
// bool isHandController() const override { return true; }
|
||||||
|
|
||||||
virtual bool activate() override;
|
virtual bool activate() override;
|
||||||
virtual void deactivate() override;
|
virtual void deactivate() override;
|
||||||
|
|
|
@ -19,6 +19,8 @@
|
||||||
|
|
||||||
#include <PerfStat.h>
|
#include <PerfStat.h>
|
||||||
#include <PathUtils.h>
|
#include <PathUtils.h>
|
||||||
|
#include <NumericalConstants.h>
|
||||||
|
#include <StreamUtils.h>
|
||||||
|
|
||||||
#include <OVR_CAPI.h>
|
#include <OVR_CAPI.h>
|
||||||
|
|
||||||
|
@ -208,13 +210,18 @@ void OculusControllerManager::RemoteDevice::focusOutEvent() {
|
||||||
_buttonPressedMap.clear();
|
_buttonPressedMap.clear();
|
||||||
}
|
}
|
||||||
|
|
||||||
void OculusControllerManager::TouchDevice::update(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
|
bool OculusControllerManager::isHeadControllerMounted() const {
|
||||||
_buttonPressedMap.clear();
|
|
||||||
ovrSessionStatus status;
|
ovrSessionStatus status;
|
||||||
if (!OVR_SUCCESS(ovr_GetSessionStatus(_parent._session, &status)) || (ovrFalse == status.HmdMounted)) {
|
bool success = OVR_SUCCESS(ovr_GetSessionStatus(_session, &status));
|
||||||
// if the HMD isn't on someone's head, don't take input from the controllers
|
if (!success) {
|
||||||
return;
|
return false;
|
||||||
}
|
}
|
||||||
|
return status.HmdMounted == ovrTrue;
|
||||||
|
}
|
||||||
|
|
||||||
|
void OculusControllerManager::TouchDevice::update(float deltaTime,
|
||||||
|
const controller::InputCalibrationData& inputCalibrationData) {
|
||||||
|
_buttonPressedMap.clear();
|
||||||
|
|
||||||
int numTrackedControllers = 0;
|
int numTrackedControllers = 0;
|
||||||
quint64 currentTime = usecTimestampNow();
|
quint64 currentTime = usecTimestampNow();
|
||||||
|
@ -230,14 +237,14 @@ void OculusControllerManager::TouchDevice::update(float deltaTime, const control
|
||||||
_lastControllerPose[controller] = tracking.HandPoses[hand];
|
_lastControllerPose[controller] = tracking.HandPoses[hand];
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (_lostTracking[controller]) {
|
if (_lostTracking[controller]) {
|
||||||
if (currentTime > _regainTrackingDeadline[controller]) {
|
if (currentTime > _regainTrackingDeadline[controller]) {
|
||||||
_poseStateMap.erase(controller);
|
_poseStateMap.erase(controller);
|
||||||
_poseStateMap[controller].valid = false;
|
_poseStateMap[controller].valid = false;
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
quint64 deadlineToRegainTracking = currentTime + LOST_TRACKING_DELAY;
|
quint64 deadlineToRegainTracking = currentTime + LOST_TRACKING_DELAY;
|
||||||
_regainTrackingDeadline[controller] = deadlineToRegainTracking;
|
_regainTrackingDeadline[controller] = deadlineToRegainTracking;
|
||||||
|
@ -245,6 +252,13 @@ void OculusControllerManager::TouchDevice::update(float deltaTime, const control
|
||||||
}
|
}
|
||||||
handleRotationForUntrackedHand(inputCalibrationData, hand, tracking.HandPoses[hand]);
|
handleRotationForUntrackedHand(inputCalibrationData, hand, tracking.HandPoses[hand]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
if (_parent.isHeadControllerMounted()) {
|
||||||
|
handleHeadPose(deltaTime, inputCalibrationData, tracking.HeadPose);
|
||||||
|
} else {
|
||||||
|
_poseStateMap[controller::HEAD].valid = false;
|
||||||
|
}
|
||||||
|
|
||||||
using namespace controller;
|
using namespace controller;
|
||||||
// Axes
|
// Axes
|
||||||
const auto& inputState = _parent._inputState;
|
const auto& inputState = _parent._inputState;
|
||||||
|
@ -269,7 +283,7 @@ void OculusControllerManager::TouchDevice::update(float deltaTime, const control
|
||||||
if (inputState.Touches & pair.first) {
|
if (inputState.Touches & pair.first) {
|
||||||
_buttonPressedMap.insert(pair.second);
|
_buttonPressedMap.insert(pair.second);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Haptics
|
// Haptics
|
||||||
{
|
{
|
||||||
|
@ -292,16 +306,38 @@ void OculusControllerManager::TouchDevice::focusOutEvent() {
|
||||||
_buttonPressedMap.clear();
|
_buttonPressedMap.clear();
|
||||||
};
|
};
|
||||||
|
|
||||||
void OculusControllerManager::TouchDevice::handlePose(float deltaTime,
|
void OculusControllerManager::TouchDevice::handlePose(float deltaTime,
|
||||||
const controller::InputCalibrationData& inputCalibrationData, ovrHandType hand,
|
const controller::InputCalibrationData& inputCalibrationData,
|
||||||
const ovrPoseStatef& handPose) {
|
ovrHandType hand, const ovrPoseStatef& handPose) {
|
||||||
auto poseId = hand == ovrHand_Left ? controller::LEFT_HAND : controller::RIGHT_HAND;
|
auto poseId = hand == ovrHand_Left ? controller::LEFT_HAND : controller::RIGHT_HAND;
|
||||||
auto& pose = _poseStateMap[poseId];
|
auto& pose = _poseStateMap[poseId];
|
||||||
pose = ovrControllerPoseToHandPose(hand, handPose);
|
pose = ovrControllerPoseToHandPose(hand, handPose);
|
||||||
// transform into avatar frame
|
// transform into avatar frame
|
||||||
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
||||||
pose = pose.transform(controllerToAvatar);
|
pose = pose.transform(controllerToAvatar);
|
||||||
|
}
|
||||||
|
|
||||||
|
void OculusControllerManager::TouchDevice::handleHeadPose(float deltaTime,
|
||||||
|
const controller::InputCalibrationData& inputCalibrationData,
|
||||||
|
const ovrPoseStatef& headPose) {
|
||||||
|
glm::mat4 mat = createMatFromQuatAndPos(toGlm(headPose.ThePose.Orientation),
|
||||||
|
toGlm(headPose.ThePose.Position));
|
||||||
|
|
||||||
|
//perform a 180 flip to make the HMD face the +z instead of -z, beacuse the head faces +z
|
||||||
|
glm::mat4 matYFlip = mat * Matrices::Y_180;
|
||||||
|
controller::Pose pose(extractTranslation(matYFlip),
|
||||||
|
glmExtractRotation(matYFlip),
|
||||||
|
toGlm(headPose.LinearVelocity), // XXX * matYFlip ?
|
||||||
|
toGlm(headPose.AngularVelocity));
|
||||||
|
|
||||||
|
glm::mat4 sensorToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
||||||
|
glm::mat4 defaultHeadOffset = glm::inverse(inputCalibrationData.defaultCenterEyeMat) *
|
||||||
|
inputCalibrationData.defaultHeadMat;
|
||||||
|
|
||||||
|
controller::Pose hmdHeadPose = pose.transform(sensorToAvatar);
|
||||||
|
|
||||||
|
pose.valid = true;
|
||||||
|
_poseStateMap[controller::HEAD] = hmdHeadPose.postTransform(defaultHeadOffset);
|
||||||
}
|
}
|
||||||
|
|
||||||
void OculusControllerManager::TouchDevice::handleRotationForUntrackedHand(const controller::InputCalibrationData& inputCalibrationData,
|
void OculusControllerManager::TouchDevice::handleRotationForUntrackedHand(const controller::InputCalibrationData& inputCalibrationData,
|
||||||
|
@ -382,6 +418,7 @@ controller::Input::NamedVector OculusControllerManager::TouchDevice::getAvailabl
|
||||||
|
|
||||||
makePair(LEFT_HAND, "LeftHand"),
|
makePair(LEFT_HAND, "LeftHand"),
|
||||||
makePair(RIGHT_HAND, "RightHand"),
|
makePair(RIGHT_HAND, "RightHand"),
|
||||||
|
makePair(HEAD, "Head"),
|
||||||
|
|
||||||
makePair(LEFT_PRIMARY_THUMB_TOUCH, "LeftPrimaryThumbTouch"),
|
makePair(LEFT_PRIMARY_THUMB_TOUCH, "LeftPrimaryThumbTouch"),
|
||||||
makePair(LEFT_SECONDARY_THUMB_TOUCH, "LeftSecondaryThumbTouch"),
|
makePair(LEFT_SECONDARY_THUMB_TOUCH, "LeftSecondaryThumbTouch"),
|
||||||
|
|
|
@ -28,6 +28,8 @@ public:
|
||||||
const QString getName() const override { return NAME; }
|
const QString getName() const override { return NAME; }
|
||||||
|
|
||||||
bool isHandController() const override { return _touch != nullptr; }
|
bool isHandController() const override { return _touch != nullptr; }
|
||||||
|
bool isHeadController() const override { return true; }
|
||||||
|
bool isHeadControllerMounted() const;
|
||||||
QStringList getSubdeviceNames() override;
|
QStringList getSubdeviceNames() override;
|
||||||
|
|
||||||
bool activate() override;
|
bool activate() override;
|
||||||
|
@ -75,8 +77,13 @@ private:
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void stopHapticPulse(bool leftHand);
|
void stopHapticPulse(bool leftHand);
|
||||||
void handlePose(float deltaTime, const controller::InputCalibrationData& inputCalibrationData, ovrHandType hand, const ovrPoseStatef& handPose);
|
void handlePose(float deltaTime, const controller::InputCalibrationData& inputCalibrationData,
|
||||||
void handleRotationForUntrackedHand(const controller::InputCalibrationData& inputCalibrationData, ovrHandType hand, const ovrPoseStatef& handPose);
|
ovrHandType hand, const ovrPoseStatef& handPose);
|
||||||
|
void handleRotationForUntrackedHand(const controller::InputCalibrationData& inputCalibrationData,
|
||||||
|
ovrHandType hand, const ovrPoseStatef& handPose);
|
||||||
|
void handleHeadPose(float deltaTime, const controller::InputCalibrationData& inputCalibrationData,
|
||||||
|
const ovrPoseStatef& headPose);
|
||||||
|
|
||||||
int _trackedControllers { 0 };
|
int _trackedControllers { 0 };
|
||||||
|
|
||||||
// perform an action when the TouchDevice mutex is acquired.
|
// perform an action when the TouchDevice mutex is acquired.
|
||||||
|
|
|
@ -56,7 +56,7 @@ public:
|
||||||
bool isKeyboardVisible() override;
|
bool isKeyboardVisible() override;
|
||||||
|
|
||||||
// Possibly needs an additional thread for VR submission
|
// Possibly needs an additional thread for VR submission
|
||||||
int getRequiredThreadCount() const override;
|
int getRequiredThreadCount() const override;
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
bool internalActivate() override;
|
bool internalActivate() override;
|
||||||
|
|
|
@ -164,6 +164,14 @@ void ViveControllerManager::deactivate() {
|
||||||
_registeredWithInputMapper = false;
|
_registeredWithInputMapper = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool ViveControllerManager::isHeadControllerMounted() const {
|
||||||
|
if (_inputDevice && _inputDevice->isHeadControllerMounted()) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
vr::EDeviceActivityLevel activityLevel = _system->GetTrackedDeviceActivityLevel(vr::k_unTrackedDeviceIndex_Hmd);
|
||||||
|
return activityLevel == vr::k_EDeviceActivityLevel_UserInteraction;
|
||||||
|
}
|
||||||
|
|
||||||
void ViveControllerManager::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
|
void ViveControllerManager::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
|
||||||
|
|
||||||
if (!_system) {
|
if (!_system) {
|
||||||
|
|
|
@ -41,6 +41,8 @@ public:
|
||||||
const QString getName() const override { return NAME; }
|
const QString getName() const override { return NAME; }
|
||||||
|
|
||||||
bool isHandController() const override { return true; }
|
bool isHandController() const override { return true; }
|
||||||
|
bool isHeadController() const override { return true; }
|
||||||
|
bool isHeadControllerMounted() const;
|
||||||
|
|
||||||
bool activate() override;
|
bool activate() override;
|
||||||
void deactivate() override;
|
void deactivate() override;
|
||||||
|
@ -54,6 +56,7 @@ private:
|
||||||
class InputDevice : public controller::InputDevice {
|
class InputDevice : public controller::InputDevice {
|
||||||
public:
|
public:
|
||||||
InputDevice(vr::IVRSystem*& system);
|
InputDevice(vr::IVRSystem*& system);
|
||||||
|
bool isHeadControllerMounted() const { return _overrideHead; }
|
||||||
private:
|
private:
|
||||||
// Device functions
|
// Device functions
|
||||||
controller::Input::NamedVector getAvailableInputs() const override;
|
controller::Input::NamedVector getAvailableInputs() const override;
|
||||||
|
|
Loading…
Reference in a new issue