mirror of
https://github.com/overte-org/overte.git
synced 2025-08-08 18:36:45 +02:00
work on fixing some head to eye offsets. MyHead::getHmdOrientation to MyHead::getHeadOrientation
This commit is contained in:
parent
c40a76f320
commit
3359a0a794
5 changed files with 57 additions and 47 deletions
|
@ -2091,7 +2091,7 @@ void Application::paintGL() {
|
||||||
_myCamera.setOrientation(glm::quat_cast(camMat));
|
_myCamera.setOrientation(glm::quat_cast(camMat));
|
||||||
} else {
|
} else {
|
||||||
_myCamera.setPosition(myAvatar->getDefaultEyePosition());
|
_myCamera.setPosition(myAvatar->getDefaultEyePosition());
|
||||||
_myCamera.setOrientation(myAvatar->getMyHead()->getCameraOrientation());
|
_myCamera.setOrientation(myAvatar->getMyHead()->getHeadOrientation());
|
||||||
}
|
}
|
||||||
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
||||||
if (isHMDMode()) {
|
if (isHMDMode()) {
|
||||||
|
@ -4047,9 +4047,9 @@ void Application::updateMyAvatarLookAtPosition() {
|
||||||
} else {
|
} else {
|
||||||
// I am not looking at anyone else, so just look forward
|
// I am not looking at anyone else, so just look forward
|
||||||
if (isHMD) {
|
if (isHMD) {
|
||||||
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() *
|
glm::mat4 worldHeadMat = myAvatar->getSensorToWorldMatrix() *
|
||||||
myAvatar->getHeadControllerPoseInSensorFrame().getMatrix() * Matrices::Y_180;
|
myAvatar->getHeadControllerPoseInSensorFrame().getMatrix();
|
||||||
lookAtSpot = transformPoint(worldHMDMat, glm::vec3(0.0f, 0.0f, -TREE_SCALE));
|
lookAtSpot = transformPoint(worldHeadMat, glm::vec3(0.0f, 0.0f, TREE_SCALE));
|
||||||
} else {
|
} else {
|
||||||
lookAtSpot = myAvatar->getHead()->getEyePosition() +
|
lookAtSpot = myAvatar->getHead()->getEyePosition() +
|
||||||
(myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
|
(myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
|
||||||
|
|
|
@ -644,9 +644,12 @@ void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
|
||||||
|
|
||||||
_hmdSensorPosition = newHmdSensorPosition;
|
_hmdSensorPosition = newHmdSensorPosition;
|
||||||
_hmdSensorOrientation = glm::quat_cast(hmdSensorMatrix);
|
_hmdSensorOrientation = glm::quat_cast(hmdSensorMatrix);
|
||||||
|
// _headControllerFacing = getFacingDir2D(_hmdSensorOrientation);
|
||||||
auto headPose = _headControllerPoseInSensorFrameCache.get();
|
auto headPose = _headControllerPoseInSensorFrameCache.get();
|
||||||
if (headPose.isValid()) {
|
if (headPose.isValid()) {
|
||||||
_headControllerFacing = getFacingDir2D(headPose.rotation);
|
_headControllerFacing = getFacingDir2D(headPose.rotation);
|
||||||
|
} else {
|
||||||
|
_headControllerFacing = glm::vec2(1.0f, 0.0f);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1481,12 +1484,12 @@ void MyAvatar::updateMotors() {
|
||||||
if (_motionBehaviors & AVATAR_MOTION_ACTION_MOTOR_ENABLED) {
|
if (_motionBehaviors & AVATAR_MOTION_ACTION_MOTOR_ENABLED) {
|
||||||
if (_characterController.getState() == CharacterController::State::Hover ||
|
if (_characterController.getState() == CharacterController::State::Hover ||
|
||||||
_characterController.computeCollisionGroup() == BULLET_COLLISION_GROUP_COLLISIONLESS) {
|
_characterController.computeCollisionGroup() == BULLET_COLLISION_GROUP_COLLISIONLESS) {
|
||||||
motorRotation = getMyHead()->getCameraOrientation();
|
motorRotation = getMyHead()->getHeadOrientation();
|
||||||
} else {
|
} else {
|
||||||
// non-hovering = walking: follow camera twist about vertical but not lift
|
// non-hovering = walking: follow camera twist about vertical but not lift
|
||||||
// so we decompose camera's rotation and store the twist part in motorRotation
|
// so we decompose camera's rotation and store the twist part in motorRotation
|
||||||
glm::quat liftRotation;
|
glm::quat liftRotation;
|
||||||
swingTwistDecomposition(getMyHead()->getCameraOrientation(), _worldUpDirection, liftRotation, motorRotation);
|
swingTwistDecomposition(getMyHead()->getHeadOrientation(), _worldUpDirection, liftRotation, motorRotation);
|
||||||
}
|
}
|
||||||
const float DEFAULT_MOTOR_TIMESCALE = 0.2f;
|
const float DEFAULT_MOTOR_TIMESCALE = 0.2f;
|
||||||
const float INVALID_MOTOR_TIMESCALE = 1.0e6f;
|
const float INVALID_MOTOR_TIMESCALE = 1.0e6f;
|
||||||
|
@ -1500,7 +1503,7 @@ void MyAvatar::updateMotors() {
|
||||||
}
|
}
|
||||||
if (_motionBehaviors & AVATAR_MOTION_SCRIPTED_MOTOR_ENABLED) {
|
if (_motionBehaviors & AVATAR_MOTION_SCRIPTED_MOTOR_ENABLED) {
|
||||||
if (_scriptedMotorFrame == SCRIPTED_MOTOR_CAMERA_FRAME) {
|
if (_scriptedMotorFrame == SCRIPTED_MOTOR_CAMERA_FRAME) {
|
||||||
motorRotation = getMyHead()->getCameraOrientation() * glm::angleAxis(PI, Vectors::UNIT_Y);
|
motorRotation = getMyHead()->getHeadOrientation() * glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||||
} else if (_scriptedMotorFrame == SCRIPTED_MOTOR_AVATAR_FRAME) {
|
} else if (_scriptedMotorFrame == SCRIPTED_MOTOR_AVATAR_FRAME) {
|
||||||
motorRotation = getOrientation() * glm::angleAxis(PI, Vectors::UNIT_Y);
|
motorRotation = getOrientation() * glm::angleAxis(PI, Vectors::UNIT_Y);
|
||||||
} else {
|
} else {
|
||||||
|
@ -1849,7 +1852,7 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
||||||
if (getCharacterController()->getState() == CharacterController::State::Hover) {
|
if (getCharacterController()->getState() == CharacterController::State::Hover) {
|
||||||
|
|
||||||
// This is the direction the user desires to fly in.
|
// This is the direction the user desires to fly in.
|
||||||
glm::vec3 desiredFacing = getMyHead()->getCameraOrientation() * Vectors::UNIT_Z;
|
glm::vec3 desiredFacing = getMyHead()->getHeadOrientation() * Vectors::UNIT_Z;
|
||||||
desiredFacing.y = 0.0f;
|
desiredFacing.y = 0.0f;
|
||||||
|
|
||||||
// This is our reference frame, it is captured when the user begins to move.
|
// This is our reference frame, it is captured when the user begins to move.
|
||||||
|
@ -1888,8 +1891,9 @@ void MyAvatar::updateOrientation(float deltaTime) {
|
||||||
|
|
||||||
getHead()->setBasePitch(getHead()->getBasePitch() + getDriveKey(PITCH) * _pitchSpeed * deltaTime);
|
getHead()->setBasePitch(getHead()->getBasePitch() + getDriveKey(PITCH) * _pitchSpeed * deltaTime);
|
||||||
|
|
||||||
if (getHeadControllerPoseInAvatarFrame().isValid()) {
|
auto headPose = getHeadControllerPoseInAvatarFrame();
|
||||||
glm::quat localOrientation = getHeadControllerPoseInAvatarFrame().rotation;
|
if (headPose.isValid()) {
|
||||||
|
glm::quat localOrientation = headPose.rotation;
|
||||||
// these angles will be in radians
|
// these angles will be in radians
|
||||||
// ... so they need to be converted to degrees before we do math...
|
// ... so they need to be converted to degrees before we do math...
|
||||||
glm::vec3 euler = glm::eulerAngles(localOrientation) * DEGREES_PER_RADIAN;
|
glm::vec3 euler = glm::eulerAngles(localOrientation) * DEGREES_PER_RADIAN;
|
||||||
|
@ -2003,11 +2007,14 @@ void MyAvatar::updatePosition(float deltaTime) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// capture the head rotation, in sensor space, when the user first indicates they would like to move/fly.
|
// capture the head rotation, in sensor space, when the user first indicates they would like to move/fly.
|
||||||
if (!_hoverReferenceCameraFacingIsCaptured && (fabs(getDriveKey(TRANSLATE_Z)) > 0.1f || fabs(getDriveKey(TRANSLATE_X)) > 0.1f)) {
|
if (!_hoverReferenceCameraFacingIsCaptured &&
|
||||||
|
(fabs(getDriveKey(TRANSLATE_Z)) > 0.1f || fabs(getDriveKey(TRANSLATE_X)) > 0.1f)) {
|
||||||
_hoverReferenceCameraFacingIsCaptured = true;
|
_hoverReferenceCameraFacingIsCaptured = true;
|
||||||
// transform the camera facing vector into sensor space.
|
// transform the camera facing vector into sensor space.
|
||||||
_hoverReferenceCameraFacing = transformVectorFast(glm::inverse(_sensorToWorldMatrix), getMyHead()->getCameraOrientation() * Vectors::UNIT_Z);
|
_hoverReferenceCameraFacing = transformVectorFast(glm::inverse(_sensorToWorldMatrix),
|
||||||
} else if (_hoverReferenceCameraFacingIsCaptured && (fabs(getDriveKey(TRANSLATE_Z)) <= 0.1f && fabs(getDriveKey(TRANSLATE_X)) <= 0.1f)) {
|
getMyHead()->getHeadOrientation() * Vectors::UNIT_Z);
|
||||||
|
} else if (_hoverReferenceCameraFacingIsCaptured &&
|
||||||
|
(fabs(getDriveKey(TRANSLATE_Z)) <= 0.1f && fabs(getDriveKey(TRANSLATE_X)) <= 0.1f)) {
|
||||||
_hoverReferenceCameraFacingIsCaptured = false;
|
_hoverReferenceCameraFacingIsCaptured = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2332,35 +2339,32 @@ glm::quat MyAvatar::getWorldBodyOrientation() const {
|
||||||
// old school meat hook style
|
// old school meat hook style
|
||||||
glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
|
glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
|
||||||
|
|
||||||
|
// HMD is in sensor space.
|
||||||
const glm::vec3 headPosition = getHeadControllerPoseInSensorFrame().translation;
|
const glm::vec3 headPosition = getHeadControllerPoseInSensorFrame().translation;
|
||||||
const glm::quat headOrientation = getHeadControllerPoseInSensorFrame().rotation * Quaternions::Y_180;
|
const glm::quat headOrientation = getHeadControllerPoseInSensorFrame().rotation * Quaternions::Y_180;
|
||||||
const glm::quat headOrientationYawOnly = cancelOutRollAndPitch(headOrientation);
|
const glm::quat headOrientationYawOnly = cancelOutRollAndPitch(headOrientation);
|
||||||
|
|
||||||
const Rig& rig = _skeletonModel->getRig();
|
const Rig& rig = _skeletonModel->getRig();
|
||||||
int rightEyeIndex = rig.indexOfJoint("RightEye");
|
int headIndex = rig.indexOfJoint("Head");
|
||||||
int leftEyeIndex = rig.indexOfJoint("LeftEye");
|
|
||||||
int neckIndex = rig.indexOfJoint("Neck");
|
int neckIndex = rig.indexOfJoint("Neck");
|
||||||
int hipsIndex = rig.indexOfJoint("Hips");
|
int hipsIndex = rig.indexOfJoint("Hips");
|
||||||
|
|
||||||
glm::vec3 rigMiddleEyePos = DEFAULT_AVATAR_MIDDLE_EYE_POS;
|
glm::vec3 rigHeadPos = headIndex != -1 ? rig.getAbsoluteDefaultPose(headIndex).trans() : DEFAULT_AVATAR_HEAD_POS;
|
||||||
if (leftEyeIndex >= 0 && rightEyeIndex >= 0) {
|
|
||||||
rigMiddleEyePos = (rig.getAbsoluteDefaultPose(leftEyeIndex).trans() + rig.getAbsoluteDefaultPose(rightEyeIndex).trans()) / 2.0f;
|
|
||||||
}
|
|
||||||
glm::vec3 rigNeckPos = neckIndex != -1 ? rig.getAbsoluteDefaultPose(neckIndex).trans() : DEFAULT_AVATAR_NECK_POS;
|
glm::vec3 rigNeckPos = neckIndex != -1 ? rig.getAbsoluteDefaultPose(neckIndex).trans() : DEFAULT_AVATAR_NECK_POS;
|
||||||
glm::vec3 rigHipsPos = hipsIndex != -1 ? rig.getAbsoluteDefaultPose(hipsIndex).trans() : DEFAULT_AVATAR_HIPS_POS;
|
glm::vec3 rigHipsPos = hipsIndex != -1 ? rig.getAbsoluteDefaultPose(hipsIndex).trans() : DEFAULT_AVATAR_HIPS_POS;
|
||||||
|
|
||||||
glm::vec3 localEyes = (rigMiddleEyePos - rigHipsPos);
|
glm::vec3 localHead = (rigHeadPos - rigHipsPos);
|
||||||
glm::vec3 localNeck = (rigNeckPos - rigHipsPos);
|
glm::vec3 localNeck = (rigNeckPos - rigHipsPos);
|
||||||
|
|
||||||
// apply simplistic head/neck model
|
// apply simplistic head/neck model
|
||||||
// figure out where the avatar body should be by applying offsets from the avatar's neck & head joints.
|
// figure out where the avatar body should be by applying offsets from the avatar's neck & head joints.
|
||||||
|
|
||||||
// eyeToNeck offset is relative to head's full orientation,
|
// eyeToNeck offset is relative full HMD orientation.
|
||||||
// while neckToRoot offset is only relative to head's yaw.
|
// while neckToRoot offset is only relative to HMDs yaw.
|
||||||
// Y_180 is necessary because rig is z forward and headOrientation is -z forward
|
// Y_180 is necessary because rig is z forward and hmdOrientation is -z forward
|
||||||
glm::vec3 eyeToNeck = headOrientation * Quaternions::Y_180 * (localNeck - localEyes);
|
glm::vec3 headToNeck = headOrientation * Quaternions::Y_180 * (localNeck - localHead);
|
||||||
glm::vec3 neckToRoot = headOrientationYawOnly * Quaternions::Y_180 * -localNeck;
|
glm::vec3 neckToRoot = headOrientationYawOnly * Quaternions::Y_180 * -localNeck;
|
||||||
glm::vec3 bodyPos = headPosition + eyeToNeck + neckToRoot;
|
glm::vec3 bodyPos = headPosition + headToNeck + neckToRoot;
|
||||||
|
|
||||||
return createMatFromQuatAndPos(headOrientationYawOnly, bodyPos);
|
return createMatFromQuatAndPos(headOrientationYawOnly, bodyPos);
|
||||||
}
|
}
|
||||||
|
@ -2478,7 +2482,7 @@ bool MyAvatar::FollowHelper::shouldActivateRotation(const MyAvatar& myAvatar, co
|
||||||
} else {
|
} else {
|
||||||
const float FOLLOW_ROTATION_THRESHOLD = cosf(PI / 6.0f); // 30 degrees
|
const float FOLLOW_ROTATION_THRESHOLD = cosf(PI / 6.0f); // 30 degrees
|
||||||
glm::vec2 bodyFacing = getFacingDir2D(currentBodyMatrix);
|
glm::vec2 bodyFacing = getFacingDir2D(currentBodyMatrix);
|
||||||
return glm::dot(myAvatar.getHeadControllerFacingMovingAverage() * -1.0f, bodyFacing) < FOLLOW_ROTATION_THRESHOLD;
|
return glm::dot(-myAvatar.getHeadControllerFacingMovingAverage(), bodyFacing) < FOLLOW_ROTATION_THRESHOLD;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -26,19 +26,20 @@ using namespace std;
|
||||||
MyHead::MyHead(MyAvatar* owningAvatar) : Head(owningAvatar) {
|
MyHead::MyHead(MyAvatar* owningAvatar) : Head(owningAvatar) {
|
||||||
}
|
}
|
||||||
|
|
||||||
glm::quat MyHead::getCameraOrientation() const {
|
glm::quat MyHead::getHeadOrientation() const {
|
||||||
// NOTE: Head::getCameraOrientation() is not used for orienting the camera "view" while in Oculus mode, so
|
// NOTE: Head::getHeadOrientation() is not used for orienting the camera "view" while in Oculus mode, so
|
||||||
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how
|
// you may wonder why this code is here. This method will be called while in Oculus mode to determine how
|
||||||
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
|
// to change the driving direction while in Oculus mode. It is used to support driving toward where you're
|
||||||
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
|
// head is looking. Note that in oculus mode, your actual camera view and where your head is looking is not
|
||||||
// always the same.
|
// always the same.
|
||||||
if (qApp->isHMDMode()) {
|
|
||||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||||
return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
|
auto headPose = myAvatar->getHeadControllerPoseInWorldFrame();
|
||||||
} else {
|
if (headPose.isValid()) {
|
||||||
Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
|
return headPose.rotation * Quaternions::Y_180;
|
||||||
return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return myAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(_basePitch, 0.0f, 0.0f)));
|
||||||
}
|
}
|
||||||
|
|
||||||
void MyHead::simulate(float deltaTime) {
|
void MyHead::simulate(float deltaTime) {
|
||||||
|
|
|
@ -18,7 +18,7 @@ public:
|
||||||
explicit MyHead(MyAvatar* owningAvatar);
|
explicit MyHead(MyAvatar* owningAvatar);
|
||||||
|
|
||||||
/// \return orientationBody * orientationBasePitch
|
/// \return orientationBody * orientationBasePitch
|
||||||
glm::quat getCameraOrientation() const;
|
glm::quat getHeadOrientation() const;
|
||||||
void simulate(float deltaTime) override;
|
void simulate(float deltaTime) override;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
|
|
|
@ -326,19 +326,24 @@ void OculusControllerManager::TouchDevice::handlePose(float deltaTime,
|
||||||
void OculusControllerManager::TouchDevice::handleHeadPose(float deltaTime,
|
void OculusControllerManager::TouchDevice::handleHeadPose(float deltaTime,
|
||||||
const controller::InputCalibrationData& inputCalibrationData,
|
const controller::InputCalibrationData& inputCalibrationData,
|
||||||
const ovrPoseStatef& headPose) {
|
const ovrPoseStatef& headPose) {
|
||||||
auto poseId = controller::HEAD;
|
glm::mat4 mat = createMatFromQuatAndPos(toGlm(headPose.ThePose.Orientation),
|
||||||
auto& pose = _poseStateMap[poseId];
|
toGlm(headPose.ThePose.Position));
|
||||||
|
|
||||||
|
//perform a 180 flip to make the HMD face the +z instead of -z, beacuse the head faces +z
|
||||||
|
glm::mat4 matYFlip = mat * Matrices::Y_180;
|
||||||
|
controller::Pose pose(extractTranslation(matYFlip),
|
||||||
|
glmExtractRotation(matYFlip),
|
||||||
|
toGlm(headPose.LinearVelocity), // XXX * matYFlip ?
|
||||||
|
toGlm(headPose.AngularVelocity));
|
||||||
|
|
||||||
|
glm::mat4 sensorToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
||||||
|
glm::mat4 defaultHeadOffset = glm::inverse(inputCalibrationData.defaultCenterEyeMat) *
|
||||||
|
inputCalibrationData.defaultHeadMat;
|
||||||
|
|
||||||
|
controller::Pose hmdHeadPose = pose.transform(sensorToAvatar);
|
||||||
|
|
||||||
static const glm::quat yFlip = glm::angleAxis(PI, Vectors::UNIT_Y);
|
|
||||||
pose.translation = toGlm(headPose.ThePose.Position);
|
|
||||||
pose.rotation = toGlm(headPose.ThePose.Orientation) * yFlip;
|
|
||||||
pose.angularVelocity = toGlm(headPose.AngularVelocity);
|
|
||||||
pose.velocity = toGlm(headPose.LinearVelocity);
|
|
||||||
pose.valid = true;
|
pose.valid = true;
|
||||||
|
_poseStateMap[controller::HEAD] = hmdHeadPose.postTransform(defaultHeadOffset);
|
||||||
// transform into avatar frame
|
|
||||||
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
|
||||||
pose = pose.transform(controllerToAvatar);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void OculusControllerManager::TouchDevice::handleRotationForUntrackedHand(const controller::InputCalibrationData& inputCalibrationData,
|
void OculusControllerManager::TouchDevice::handleRotationForUntrackedHand(const controller::InputCalibrationData& inputCalibrationData,
|
||||||
|
|
Loading…
Reference in a new issue