mirror of
https://github.com/overte-org/overte.git
synced 2025-04-19 13:43:49 +02:00
Merge pull request #6895 from hyperlogic/tony/hmd-recenter-on-rotation
MyAvatar: Recenter when the head turns away from the hips
This commit is contained in:
commit
86162094c1
9 changed files with 344 additions and 113 deletions
|
@ -221,15 +221,13 @@ QByteArray MyAvatar::toByteArray(bool cullSmallChanges, bool sendAll) {
|
|||
}
|
||||
|
||||
void MyAvatar::reset(bool andReload) {
|
||||
|
||||
if (andReload) {
|
||||
qApp->setRawAvatarUpdateThreading(false);
|
||||
}
|
||||
|
||||
// Reset dynamic state.
|
||||
_wasPushing = _isPushing = _isBraking = _billboardValid = false;
|
||||
_followVelocity = Vectors::ZERO;
|
||||
_followSpeed = 0.0f;
|
||||
_follow.deactivate();
|
||||
_skeletonModel.reset();
|
||||
getHead()->reset();
|
||||
_targetVelocity = glm::vec3(0.0f);
|
||||
|
@ -262,9 +260,22 @@ void MyAvatar::reset(bool andReload) {
|
|||
|
||||
void MyAvatar::update(float deltaTime) {
|
||||
|
||||
// update moving average of HMD facing in xz plane.
|
||||
const float HMD_FACING_TIMESCALE = 4.0f; // very slow average
|
||||
float tau = deltaTime / HMD_FACING_TIMESCALE;
|
||||
_hmdSensorFacingMovingAverage = lerp(_hmdSensorFacingMovingAverage, _hmdSensorFacing, tau);
|
||||
|
||||
#ifdef DEBUG_DRAW_HMD_MOVING_AVERAGE
|
||||
glm::vec3 p = transformPoint(getSensorToWorldMatrix(), _hmdSensorPosition + glm::vec3(_hmdSensorFacingMovingAverage.x, 0.0f, _hmdSensorFacingMovingAverage.y));
|
||||
DebugDraw::getInstance().addMarker("facing-avg", getOrientation(), p, glm::vec4(1.0f));
|
||||
p = transformPoint(getSensorToWorldMatrix(), _hmdSensorPosition + glm::vec3(_hmdSensorFacing.x, 0.0f, _hmdSensorFacing.y));
|
||||
DebugDraw::getInstance().addMarker("facing", getOrientation(), p, glm::vec4(1.0f));
|
||||
#endif
|
||||
|
||||
if (_goToPending) {
|
||||
setPosition(_goToPosition);
|
||||
setOrientation(_goToOrientation);
|
||||
_hmdSensorFacingMovingAverage = _hmdSensorFacing; // reset moving average
|
||||
_goToPending = false;
|
||||
// updateFromHMDSensorMatrix (called from paintGL) expects that the sensorToWorldMatrix is updated for any position changes
|
||||
// that happen between render and Application::update (which calls updateSensorToWorldMatrix to do so).
|
||||
|
@ -365,46 +376,7 @@ void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
|
|||
_hmdSensorMatrix = hmdSensorMatrix;
|
||||
_hmdSensorPosition = extractTranslation(hmdSensorMatrix);
|
||||
_hmdSensorOrientation = glm::quat_cast(hmdSensorMatrix);
|
||||
}
|
||||
|
||||
void MyAvatar::updateHMDFollowVelocity() {
|
||||
// compute offset to body's target position (in sensor-frame)
|
||||
auto sensorBodyMatrix = deriveBodyFromHMDSensor();
|
||||
glm::vec3 offset = extractTranslation(sensorBodyMatrix) - extractTranslation(_bodySensorMatrix);
|
||||
_followOffsetDistance = glm::length(offset);
|
||||
|
||||
const float FOLLOW_TIMESCALE = 0.5f;
|
||||
const float FOLLOW_THRESHOLD_SPEED = 0.2f;
|
||||
const float FOLLOW_MIN_DISTANCE = 0.01f;
|
||||
const float FOLLOW_THRESHOLD_DISTANCE = 0.2f;
|
||||
const float FOLLOW_MAX_IDLE_DISTANCE = 0.1f;
|
||||
|
||||
bool hmdIsAtRest = _hmdAtRestDetector.update(_hmdSensorPosition, _hmdSensorOrientation);
|
||||
|
||||
_followOffsetDistance = glm::length(offset);
|
||||
if (_followOffsetDistance < FOLLOW_MIN_DISTANCE) {
|
||||
// close enough
|
||||
_followOffsetDistance = 0.0f;
|
||||
} else {
|
||||
bool avatarIsMoving = glm::length(_velocity - _followVelocity) > FOLLOW_THRESHOLD_SPEED;
|
||||
bool shouldFollow = (hmdIsAtRest || avatarIsMoving) && _followOffsetDistance > FOLLOW_MAX_IDLE_DISTANCE;
|
||||
|
||||
glm::vec3 truncatedOffset = offset;
|
||||
if (truncatedOffset.y < 0.0f) {
|
||||
truncatedOffset.y = 0.0f;
|
||||
}
|
||||
float truncatedDistance = glm::length(truncatedOffset);
|
||||
bool needsNewSpeed = truncatedDistance > FOLLOW_THRESHOLD_DISTANCE;
|
||||
if (needsNewSpeed || (shouldFollow && _followSpeed == 0.0f)) {
|
||||
// compute new speed
|
||||
_followSpeed = _followOffsetDistance / FOLLOW_TIMESCALE;
|
||||
}
|
||||
if (_followSpeed > 0.0f) {
|
||||
// to compute new velocity we must rotate offset into the world-frame
|
||||
glm::quat sensorToWorldRotation = glm::normalize(glm::quat_cast(_sensorToWorldMatrix));
|
||||
_followVelocity = _followSpeed * glm::normalize(sensorToWorldRotation * offset);
|
||||
}
|
||||
}
|
||||
_hmdSensorFacing = getFacingDir2D(_hmdSensorOrientation);
|
||||
}
|
||||
|
||||
// best called at end of main loop, just before rendering.
|
||||
|
@ -1057,63 +1029,21 @@ void MyAvatar::prepareForPhysicsSimulation() {
|
|||
_characterController.setTargetVelocity(getTargetVelocity());
|
||||
_characterController.setPositionAndOrientation(getPosition(), getOrientation());
|
||||
if (qApp->isHMDMode()) {
|
||||
updateHMDFollowVelocity();
|
||||
} else if (_followSpeed > 0.0f) {
|
||||
_followVelocity = Vectors::ZERO;
|
||||
_followSpeed = 0.0f;
|
||||
_follow.prePhysicsUpdate(*this, deriveBodyFromHMDSensor(), _bodySensorMatrix);
|
||||
} else {
|
||||
_follow.deactivate();
|
||||
}
|
||||
_characterController.setFollowVelocity(_followVelocity);
|
||||
}
|
||||
|
||||
void MyAvatar::harvestResultsFromPhysicsSimulation(float deltaType) {
|
||||
void MyAvatar::harvestResultsFromPhysicsSimulation(float deltaTime) {
|
||||
glm::vec3 position = getPosition();
|
||||
glm::quat orientation = getOrientation();
|
||||
_characterController.getPositionAndOrientation(position, orientation);
|
||||
nextAttitude(position, orientation);
|
||||
if (_followSpeed > 0.0f) {
|
||||
adjustSensorTransform();
|
||||
setVelocity(_characterController.getLinearVelocity() + _followVelocity);
|
||||
} else {
|
||||
setVelocity(_characterController.getLinearVelocity());
|
||||
}
|
||||
_bodySensorMatrix = _follow.postPhysicsUpdate(*this, _bodySensorMatrix);
|
||||
|
||||
// now that physics has adjusted our position, we can update attachements.
|
||||
Avatar::simulateAttachments(deltaType);
|
||||
}
|
||||
|
||||
void MyAvatar::adjustSensorTransform() {
|
||||
// compute blendFactor of latest hmdShift
|
||||
// which we'll use to blend the rotation part
|
||||
float linearDistance = _characterController.getFollowTime() * _followSpeed;
|
||||
float blendFactor = linearDistance < _followOffsetDistance ? linearDistance / _followOffsetDistance : 1.0f;
|
||||
|
||||
auto newBodySensorMatrix = deriveBodyFromHMDSensor();
|
||||
auto worldBodyMatrix = _sensorToWorldMatrix * newBodySensorMatrix;
|
||||
glm::quat finalBodyRotation = glm::normalize(glm::quat_cast(worldBodyMatrix));
|
||||
if (blendFactor >= 0.99f) {
|
||||
// the "adjustment" is more or less complete so stop following
|
||||
_followVelocity = Vectors::ZERO;
|
||||
_followSpeed = 0.0f;
|
||||
// and slam the body's transform anyway to eliminate any slight errors
|
||||
glm::vec3 finalBodyPosition = extractTranslation(worldBodyMatrix);
|
||||
nextAttitude(finalBodyPosition, finalBodyRotation);
|
||||
_bodySensorMatrix = newBodySensorMatrix;
|
||||
} else {
|
||||
// physics already did the positional blending for us
|
||||
glm::vec3 newBodyPosition = getPosition();
|
||||
// but the rotational part must be done manually
|
||||
glm::quat newBodyRotation = glm::normalize(safeMix(getOrientation(), finalBodyRotation, blendFactor));
|
||||
nextAttitude(newBodyPosition, newBodyRotation);
|
||||
|
||||
// interp sensor matrix toward the desired
|
||||
glm::vec3 prevPosition = extractTranslation(_bodySensorMatrix);
|
||||
glm::quat prevRotation = glm::normalize(glm::quat_cast(_bodySensorMatrix));
|
||||
glm::vec3 nextPosition = extractTranslation(newBodySensorMatrix);
|
||||
glm::quat nextRotation = glm::normalize(glm::quat_cast(newBodySensorMatrix));
|
||||
_bodySensorMatrix = createMatFromQuatAndPos(
|
||||
glm::normalize(safeMix(prevRotation, nextRotation, blendFactor)),
|
||||
lerp(prevPosition, nextPosition, blendFactor));
|
||||
}
|
||||
Avatar::simulateAttachments(deltaTime);
|
||||
}
|
||||
|
||||
QString MyAvatar::getScriptedMotorFrame() const {
|
||||
|
@ -1831,3 +1761,78 @@ void audioListenModeFromScriptValue(const QScriptValue& object, AudioListenerMod
|
|||
void MyAvatar::lateUpdatePalms() {
|
||||
Avatar::updatePalms();
|
||||
}
|
||||
|
||||
|
||||
static const float FOLLOW_TIME = 0.5f;
|
||||
|
||||
void MyAvatar::FollowHelper::deactivate() {
|
||||
_timeRemaining = 0.0f;
|
||||
}
|
||||
|
||||
void MyAvatar::FollowHelper::activate() {
|
||||
// TODO: Perhaps, the follow time should be proportional to the displacement.
|
||||
_timeRemaining = FOLLOW_TIME;
|
||||
}
|
||||
|
||||
bool MyAvatar::FollowHelper::isActive() const {
|
||||
return _timeRemaining > 0.0f;
|
||||
}
|
||||
|
||||
bool MyAvatar::FollowHelper::shouldActivate(const MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix, const glm::mat4& currentBodyMatrix) const {
|
||||
|
||||
const float FOLLOW_ROTATION_THRESHOLD = cosf(PI / 4.0f);
|
||||
|
||||
glm::vec2 bodyFacing = getFacingDir2D(currentBodyMatrix);
|
||||
if (glm::dot(myAvatar.getHMDSensorFacingMovingAverage(), bodyFacing) < FOLLOW_ROTATION_THRESHOLD) {
|
||||
return true;
|
||||
}
|
||||
|
||||
const float CYLINDER_TOP = 0.1f;
|
||||
const float CYLINDER_BOTTOM = -0.5f;
|
||||
const float CYLINDER_RADIUS = 0.15f;
|
||||
|
||||
glm::vec3 offset = extractTranslation(desiredBodyMatrix) - extractTranslation(currentBodyMatrix);
|
||||
glm::vec3 radialOffset(offset.x, 0.0f, offset.y);
|
||||
float radialDistance = glm::length(radialOffset);
|
||||
|
||||
return (offset.y > CYLINDER_TOP) || (offset.y < CYLINDER_BOTTOM) || (radialDistance > CYLINDER_RADIUS);
|
||||
}
|
||||
|
||||
void MyAvatar::FollowHelper::prePhysicsUpdate(MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix, const glm::mat4& currentBodyMatrix) {
|
||||
_desiredBodyMatrix = desiredBodyMatrix;
|
||||
if (!isActive() && shouldActivate(myAvatar, desiredBodyMatrix, currentBodyMatrix)) {
|
||||
activate();
|
||||
}
|
||||
|
||||
if (isActive()) {
|
||||
glm::mat4 desiredWorldMatrix = myAvatar.getSensorToWorldMatrix() * _desiredBodyMatrix;
|
||||
myAvatar.getCharacterController()->setFollowParameters(desiredWorldMatrix, _timeRemaining);
|
||||
} else {
|
||||
glm::mat4 currentWorldMatrix = myAvatar.getSensorToWorldMatrix() * currentBodyMatrix;
|
||||
myAvatar.getCharacterController()->setFollowParameters(currentWorldMatrix, 0.0f);
|
||||
}
|
||||
}
|
||||
|
||||
glm::mat4 MyAvatar::FollowHelper::postPhysicsUpdate(const MyAvatar& myAvatar, const glm::mat4& currentBodyMatrix) {
|
||||
if (isActive()) {
|
||||
float dt = myAvatar.getCharacterController()->getFollowTime();
|
||||
_timeRemaining -= dt;
|
||||
|
||||
// apply follow displacement to the body matrix.
|
||||
glm::vec3 worldLinearDisplacement = myAvatar.getCharacterController()->getFollowLinearDisplacement();
|
||||
glm::quat worldAngularDisplacement = myAvatar.getCharacterController()->getFollowAngularDisplacement();
|
||||
glm::quat sensorToWorld = glmExtractRotation(myAvatar.getSensorToWorldMatrix());
|
||||
glm::quat worldToSensor = glm::inverse(sensorToWorld);
|
||||
|
||||
glm::vec3 sensorLinearDisplacement = worldToSensor * worldLinearDisplacement;
|
||||
glm::quat sensorAngularDisplacement = worldToSensor * worldAngularDisplacement * sensorToWorld;
|
||||
|
||||
glm::mat4 newBodyMat = createMatFromQuatAndPos(sensorAngularDisplacement * glmExtractRotation(currentBodyMatrix),
|
||||
sensorLinearDisplacement + extractTranslation(currentBodyMatrix));
|
||||
return newBodyMat;
|
||||
|
||||
} else {
|
||||
return currentBodyMatrix;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -96,8 +96,10 @@ public:
|
|||
const glm::mat4& getHMDSensorMatrix() const { return _hmdSensorMatrix; }
|
||||
const glm::vec3& getHMDSensorPosition() const { return _hmdSensorPosition; }
|
||||
const glm::quat& getHMDSensorOrientation() const { return _hmdSensorOrientation; }
|
||||
const glm::vec2& getHMDSensorFacingMovingAverage() const { return _hmdSensorFacingMovingAverage; }
|
||||
glm::mat4 getSensorToWorldMatrix() const;
|
||||
|
||||
|
||||
// Pass a recent sample of the HMD to the avatar.
|
||||
// This can also update the avatar's position to follow the HMD
|
||||
// as it moves through the world.
|
||||
|
@ -204,10 +206,10 @@ public:
|
|||
virtual void setAttachmentData(const QVector<AttachmentData>& attachmentData) override;
|
||||
|
||||
MyCharacterController* getCharacterController() { return &_characterController; }
|
||||
const MyCharacterController* getCharacterController() const { return &_characterController; }
|
||||
|
||||
void prepareForPhysicsSimulation();
|
||||
void harvestResultsFromPhysicsSimulation(float deltaTime);
|
||||
void adjustSensorTransform();
|
||||
|
||||
const QString& getCollisionSoundURL() { return _collisionSoundURL; }
|
||||
void setCollisionSoundURL(const QString& url);
|
||||
|
@ -295,11 +297,6 @@ private:
|
|||
float scale = 1.0f, bool isSoft = false,
|
||||
bool allowDuplicates = false, bool useSaved = true) override;
|
||||
|
||||
//void beginFollowingHMD();
|
||||
//bool shouldFollowHMD() const;
|
||||
//void followHMD(float deltaTime);
|
||||
void updateHMDFollowVelocity();
|
||||
|
||||
bool cameraInsideHead() const;
|
||||
|
||||
// These are made private for MyAvatar so that you will use the "use" methods instead
|
||||
|
@ -367,6 +364,8 @@ private:
|
|||
glm::mat4 _hmdSensorMatrix;
|
||||
glm::quat _hmdSensorOrientation;
|
||||
glm::vec3 _hmdSensorPosition;
|
||||
glm::vec2 _hmdSensorFacing; // facing vector in xz plane
|
||||
glm::vec2 _hmdSensorFacingMovingAverage { 0, 0 }; // facing vector in xz plane
|
||||
|
||||
// cache of the current body position and orientation of the avatar's body,
|
||||
// in sensor space.
|
||||
|
@ -375,9 +374,18 @@ private:
|
|||
// used to transform any sensor into world space, including the _hmdSensorMat, or hand controllers.
|
||||
glm::mat4 _sensorToWorldMatrix;
|
||||
|
||||
glm::vec3 _followVelocity { Vectors::ZERO };
|
||||
float _followSpeed { 0.0f };
|
||||
float _followOffsetDistance { 0.0f };
|
||||
struct FollowHelper {
|
||||
glm::mat4 _desiredBodyMatrix;
|
||||
float _timeRemaining { 0.0f };
|
||||
|
||||
void deactivate();
|
||||
void activate();
|
||||
bool isActive() const;
|
||||
bool shouldActivate(const MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix, const glm::mat4& currentBodyMatrix) const;
|
||||
void prePhysicsUpdate(MyAvatar& myAvatar, const glm::mat4& bodySensorMatrix, const glm::mat4& currentBodyMatrix);
|
||||
glm::mat4 postPhysicsUpdate(const MyAvatar& myAvatar, const glm::mat4& currentBodyMatrix);
|
||||
};
|
||||
FollowHelper _follow;
|
||||
|
||||
bool _goToPending;
|
||||
glm::vec3 _goToPosition;
|
||||
|
|
|
@ -32,4 +32,54 @@ inline btQuaternion glmToBullet(const glm::quat& g) {
|
|||
return btQuaternion(g.x, g.y, g.z, g.w);
|
||||
}
|
||||
|
||||
inline btMatrix3x3 glmToBullet(const glm::mat3& m) {
|
||||
return btMatrix3x3(m[0][0], m[1][0], m[2][0],
|
||||
m[0][1], m[1][1], m[2][1],
|
||||
m[0][2], m[1][2], m[2][2]);
|
||||
}
|
||||
|
||||
// btTransform does not contain a full 4x4 matrix, so this transform is lossy.
|
||||
// Affine transformations are OK but perspective transformations are not.
|
||||
inline btTransform glmToBullet(const glm::mat4& m) {
|
||||
glm::mat3 m3(m);
|
||||
return btTransform(glmToBullet(m3), glmToBullet(glm::vec3(m[3][0], m[3][1], m[3][2])));
|
||||
}
|
||||
|
||||
inline glm::mat4 bulletToGLM(const btTransform& t) {
|
||||
glm::mat4 m(glm::mat4::_null);
|
||||
|
||||
const btMatrix3x3& basis = t.getBasis();
|
||||
// copy over 3x3 part
|
||||
for (int r = 0; r < 3; r++) {
|
||||
for (int c = 0; c < 3; c++) {
|
||||
m[c][r] = basis[r][c];
|
||||
}
|
||||
}
|
||||
|
||||
// copy traslation part
|
||||
btVector3 origin = t.getOrigin();
|
||||
m[3][0] = origin.getX();
|
||||
m[3][1] = origin.getY();
|
||||
m[3][2] = origin.getZ();
|
||||
|
||||
// set last row
|
||||
m[0][3] = 0.0f;
|
||||
m[1][3] = 0.0f;
|
||||
m[2][3] = 0.0f;
|
||||
m[3][3] = 1.0f;
|
||||
return m;
|
||||
}
|
||||
|
||||
inline btVector3 rotateVector(const btQuaternion& q, const btVector3& vec) {
|
||||
return glmToBullet(bulletToGLM(q) * bulletToGLM(vec));
|
||||
}
|
||||
|
||||
inline btVector3 clampLength(const btVector3& v, const float& maxLength) {
|
||||
if (v.length() > maxLength) {
|
||||
return v.normalized() * maxLength;
|
||||
} else {
|
||||
return v;
|
||||
}
|
||||
}
|
||||
|
||||
#endif // hifi_BulletUtil_h
|
||||
|
|
|
@ -13,7 +13,6 @@
|
|||
|
||||
#include <NumericalConstants.h>
|
||||
|
||||
#include "BulletUtil.h"
|
||||
#include "PhysicsCollisionGroups.h"
|
||||
#include "ObjectMotionState.h"
|
||||
|
||||
|
@ -21,7 +20,6 @@ const btVector3 LOCAL_UP_AXIS(0.0f, 1.0f, 0.0f);
|
|||
const float JUMP_SPEED = 3.5f;
|
||||
const float MAX_FALL_HEIGHT = 20.0f;
|
||||
|
||||
|
||||
// helper class for simple ray-traces from character
|
||||
class ClosestNotMe : public btCollisionWorld::ClosestRayResultCallback {
|
||||
public:
|
||||
|
@ -50,7 +48,8 @@ CharacterController::CharacterController() {
|
|||
_floorDistance = MAX_FALL_HEIGHT;
|
||||
|
||||
_walkVelocity.setValue(0.0f, 0.0f, 0.0f);
|
||||
_followVelocity.setValue(0.0f, 0.0f, 0.0f);
|
||||
_followDesiredBodyTransform.setIdentity();
|
||||
_followTimeRemaining = 0.0f;
|
||||
_jumpSpeed = JUMP_SPEED;
|
||||
_isOnGround = false;
|
||||
_isJumping = false;
|
||||
|
@ -59,6 +58,8 @@ CharacterController::CharacterController() {
|
|||
_isPushingUp = false;
|
||||
_jumpToHoverStart = 0;
|
||||
_followTime = 0.0f;
|
||||
_followLinearDisplacement = btVector3(0, 0, 0);
|
||||
_followAngularDisplacement = btQuaternion::getIdentity();
|
||||
|
||||
_pendingFlags = PENDING_FLAG_UPDATE_SHAPE;
|
||||
|
||||
|
@ -189,12 +190,40 @@ void CharacterController::playerStep(btCollisionWorld* dynaWorld, btScalar dt) {
|
|||
}
|
||||
}
|
||||
|
||||
// Rather than add _followVelocity to the velocity of the RigidBody, we explicitly teleport
|
||||
// the RigidBody forward according to the formula: distance = rate * time
|
||||
if (_followVelocity.length2() > 0.0f) {
|
||||
// Dynamicaly compute a follow velocity to move this body toward the _followDesiredBodyTransform.
|
||||
// Rather then add this velocity to velocity the RigidBody, we explicitly teleport the RigidBody towards its goal.
|
||||
// This mirrors the computation done in MyAvatar::FollowHelper::postPhysicsUpdate().
|
||||
// These two computations must be kept in sync.
|
||||
const float MINIMUM_TIME_REMAINING = 0.005f;
|
||||
const float MAX_DISPLACEMENT = 0.5f * _radius;
|
||||
_followTimeRemaining -= dt;
|
||||
if (_followTimeRemaining >= MINIMUM_TIME_REMAINING) {
|
||||
btTransform bodyTransform = _rigidBody->getWorldTransform();
|
||||
bodyTransform.setOrigin(bodyTransform.getOrigin() + dt * _followVelocity);
|
||||
_rigidBody->setWorldTransform(bodyTransform);
|
||||
|
||||
btVector3 startPos = bodyTransform.getOrigin();
|
||||
btVector3 deltaPos = _followDesiredBodyTransform.getOrigin() - startPos;
|
||||
btVector3 vel = deltaPos / _followTimeRemaining;
|
||||
btVector3 linearDisplacement = clampLength(vel * dt, MAX_DISPLACEMENT); // clamp displacement to prevent tunneling.
|
||||
btVector3 endPos = startPos + linearDisplacement;
|
||||
|
||||
btQuaternion startRot = bodyTransform.getRotation();
|
||||
glm::vec2 currentFacing = getFacingDir2D(bulletToGLM(startRot));
|
||||
glm::vec2 currentRight(currentFacing.y, -currentFacing.x);
|
||||
glm::vec2 desiredFacing = getFacingDir2D(bulletToGLM(_followDesiredBodyTransform.getRotation()));
|
||||
float deltaAngle = acosf(glm::clamp(glm::dot(currentFacing, desiredFacing), -1.0f, 1.0f));
|
||||
float angularSpeed = deltaAngle / _followTimeRemaining;
|
||||
float sign = copysignf(1.0f, glm::dot(desiredFacing, currentRight));
|
||||
btQuaternion angularDisplacement = btQuaternion(btVector3(0.0f, 1.0f, 0.0f), sign * angularSpeed * dt);
|
||||
btQuaternion endRot = angularDisplacement * startRot;
|
||||
|
||||
// in order to accumulate displacement of avatar position, we need to take _shapeLocalOffset into account.
|
||||
btVector3 shapeLocalOffset = glmToBullet(_shapeLocalOffset);
|
||||
btVector3 swingDisplacement = rotateVector(endRot, -shapeLocalOffset) - rotateVector(startRot, -shapeLocalOffset);
|
||||
|
||||
_followLinearDisplacement = linearDisplacement + swingDisplacement + _followLinearDisplacement;
|
||||
_followAngularDisplacement = angularDisplacement * _followAngularDisplacement;
|
||||
|
||||
_rigidBody->setWorldTransform(btTransform(endRot, endPos));
|
||||
}
|
||||
_followTime += dt;
|
||||
}
|
||||
|
@ -323,8 +352,17 @@ void CharacterController::setTargetVelocity(const glm::vec3& velocity) {
|
|||
_walkVelocity = glmToBullet(velocity);
|
||||
}
|
||||
|
||||
void CharacterController::setFollowVelocity(const glm::vec3& velocity) {
|
||||
_followVelocity = glmToBullet(velocity);
|
||||
void CharacterController::setFollowParameters(const glm::mat4& desiredWorldBodyMatrix, float timeRemaining) {
|
||||
_followTimeRemaining = timeRemaining;
|
||||
_followDesiredBodyTransform = glmToBullet(desiredWorldBodyMatrix) * btTransform(btQuaternion::getIdentity(), glmToBullet(_shapeLocalOffset));
|
||||
}
|
||||
|
||||
glm::vec3 CharacterController::getFollowLinearDisplacement() const {
|
||||
return bulletToGLM(_followLinearDisplacement);
|
||||
}
|
||||
|
||||
glm::quat CharacterController::getFollowAngularDisplacement() const {
|
||||
return bulletToGLM(_followAngularDisplacement);
|
||||
}
|
||||
|
||||
glm::vec3 CharacterController::getLinearVelocity() const {
|
||||
|
@ -378,6 +416,9 @@ void CharacterController::preSimulation() {
|
|||
}
|
||||
}
|
||||
_followTime = 0.0f;
|
||||
|
||||
_followLinearDisplacement = btVector3(0, 0, 0);
|
||||
_followAngularDisplacement = btQuaternion::getIdentity();
|
||||
}
|
||||
|
||||
void CharacterController::postSimulation() {
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
#include <BulletDynamics/Character/btCharacterControllerInterface.h>
|
||||
|
||||
#include <GLMHelpers.h>
|
||||
#include "BulletUtil.h"
|
||||
|
||||
const uint32_t PENDING_FLAG_ADD_TO_SIMULATION = 1U << 0;
|
||||
const uint32_t PENDING_FLAG_REMOVE_FROM_SIMULATION = 1U << 1;
|
||||
|
@ -66,8 +67,10 @@ public:
|
|||
void getPositionAndOrientation(glm::vec3& position, glm::quat& rotation) const;
|
||||
|
||||
void setTargetVelocity(const glm::vec3& velocity);
|
||||
void setFollowVelocity(const glm::vec3& velocity);
|
||||
void setFollowParameters(const glm::mat4& desiredWorldMatrix, float timeRemaining);
|
||||
float getFollowTime() const { return _followTime; }
|
||||
glm::vec3 getFollowLinearDisplacement() const;
|
||||
glm::quat getFollowAngularDisplacement() const;
|
||||
|
||||
glm::vec3 getLinearVelocity() const;
|
||||
|
||||
|
@ -87,7 +90,8 @@ protected:
|
|||
protected:
|
||||
btVector3 _currentUp;
|
||||
btVector3 _walkVelocity;
|
||||
btVector3 _followVelocity;
|
||||
btTransform _followDesiredBodyTransform;
|
||||
btScalar _followTimeRemaining;
|
||||
btTransform _characterBodyTransform;
|
||||
|
||||
glm::vec3 _shapeLocalOffset;
|
||||
|
@ -105,6 +109,8 @@ protected:
|
|||
|
||||
btScalar _jumpSpeed;
|
||||
btScalar _followTime;
|
||||
btVector3 _followLinearDisplacement;
|
||||
btQuaternion _followAngularDisplacement;
|
||||
|
||||
bool _enabled;
|
||||
bool _isOnGround;
|
||||
|
|
|
@ -425,6 +425,28 @@ void generateBasisVectors(const glm::vec3& primaryAxis, const glm::vec3& seconda
|
|||
vAxisOut = glm::cross(wAxisOut, uAxisOut);
|
||||
}
|
||||
|
||||
glm::vec2 getFacingDir2D(const glm::quat& rot) {
|
||||
glm::vec3 facing3D = rot * Vectors::UNIT_NEG_Z;
|
||||
glm::vec2 facing2D(facing3D.x, facing3D.z);
|
||||
const float ALMOST_ZERO = 0.0001f;
|
||||
if (glm::length(facing2D) < ALMOST_ZERO) {
|
||||
return glm::vec2(1.0f, 0.0f);
|
||||
} else {
|
||||
return glm::normalize(facing2D);
|
||||
}
|
||||
}
|
||||
|
||||
glm::vec2 getFacingDir2D(const glm::mat4& m) {
|
||||
glm::vec3 facing3D = transformVector(m, Vectors::UNIT_NEG_Z);
|
||||
glm::vec2 facing2D(facing3D.x, facing3D.z);
|
||||
const float ALMOST_ZERO = 0.0001f;
|
||||
if (glm::length(facing2D) < ALMOST_ZERO) {
|
||||
return glm::vec2(1.0f, 0.0f);
|
||||
} else {
|
||||
return glm::normalize(facing2D);
|
||||
}
|
||||
}
|
||||
|
||||
bool isNaN(glm::vec3 value) {
|
||||
return isNaN(value.x) || isNaN(value.y) || isNaN(value.z);
|
||||
}
|
||||
|
@ -432,3 +454,4 @@ bool isNaN(glm::vec3 value) {
|
|||
bool isNaN(glm::quat value) {
|
||||
return isNaN(value.w) || isNaN(value.x) || isNaN(value.y) || isNaN(value.z);
|
||||
}
|
||||
|
||||
|
|
|
@ -224,6 +224,8 @@ glm::vec3 transformVector(const glm::mat4& m, const glm::vec3& v);
|
|||
void generateBasisVectors(const glm::vec3& primaryAxis, const glm::vec3& secondaryAxis,
|
||||
glm::vec3& uAxisOut, glm::vec3& vAxisOut, glm::vec3& wAxisOut);
|
||||
|
||||
glm::vec2 getFacingDir2D(const glm::quat& rot);
|
||||
glm::vec2 getFacingDir2D(const glm::mat4& m);
|
||||
|
||||
bool isNaN(glm::vec3 value);
|
||||
bool isNaN(glm::quat value);
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
|
||||
#include <BulletUtil.h>
|
||||
#include <NumericalConstants.h>
|
||||
#include <GLMHelpers.h>
|
||||
|
||||
// Add additional qtest functionality (the include order is important!)
|
||||
#include "../QTestExtensions.h"
|
||||
|
@ -46,12 +47,49 @@ void BulletUtilTests::fromBulletToGLM() {
|
|||
QCOMPARE(gQ.y, bQ.getY());
|
||||
QCOMPARE(gQ.z, bQ.getZ());
|
||||
QCOMPARE(gQ.w, bQ.getW());
|
||||
|
||||
// mat4
|
||||
btMatrix3x3 basis(bQ);
|
||||
btVector3 origin(100.0f, 200.0f, 300.0f);
|
||||
btTransform bM(basis, origin);
|
||||
|
||||
glm::mat4 gM = bulletToGLM(bM);
|
||||
|
||||
glm::vec3 translation = extractTranslation(gM);
|
||||
QCOMPARE(translation.x, bM.getOrigin().getX());
|
||||
QCOMPARE(translation.y, bM.getOrigin().getY());
|
||||
QCOMPARE(translation.z, bM.getOrigin().getZ());
|
||||
|
||||
glm::quat rotation = glmExtractRotation(gM);
|
||||
QCOMPARE(rotation.x, bM.getRotation().getX());
|
||||
QCOMPARE(rotation.y, bM.getRotation().getY());
|
||||
QCOMPARE(rotation.z, bM.getRotation().getZ());
|
||||
QCOMPARE(rotation.w, bM.getRotation().getW());
|
||||
|
||||
// As a sanity check, transform vectors by their corresponding matrices and compare the result.
|
||||
btVector3 bX = bM * btVector3(1.0f, 0.0f, 0.0f);
|
||||
btVector3 bY = bM * btVector3(0.0f, 1.0f, 0.0f);
|
||||
btVector3 bZ = bM * btVector3(0.0f, 0.0f, 1.0f);
|
||||
|
||||
glm::vec3 gX = transformPoint(gM, glm::vec3(1.0f, 0.0f, 0.0f));
|
||||
glm::vec3 gY = transformPoint(gM, glm::vec3(0.0f, 1.0f, 0.0f));
|
||||
glm::vec3 gZ = transformPoint(gM, glm::vec3(0.0f, 0.0f, 1.0f));
|
||||
|
||||
QCOMPARE(gX.x, bX.getX());
|
||||
QCOMPARE(gX.y, bX.getY());
|
||||
QCOMPARE(gX.z, bX.getZ());
|
||||
QCOMPARE(gY.x, bY.getX());
|
||||
QCOMPARE(gY.y, bY.getY());
|
||||
QCOMPARE(gY.z, bY.getZ());
|
||||
QCOMPARE(gZ.x, bZ.getX());
|
||||
QCOMPARE(gZ.y, bZ.getY());
|
||||
QCOMPARE(gZ.z, bZ.getZ());
|
||||
}
|
||||
|
||||
void BulletUtilTests::fromGLMToBullet() {
|
||||
glm::vec3 gV(1.23f, 4.56f, 7.89f);
|
||||
btVector3 bV = glmToBullet(gV);
|
||||
|
||||
|
||||
QCOMPARE(gV.x, bV.getX());
|
||||
QCOMPARE(gV.y, bV.getY());
|
||||
QCOMPARE(gV.z, bV.getZ());
|
||||
|
@ -66,4 +104,60 @@ void BulletUtilTests::fromGLMToBullet() {
|
|||
QCOMPARE(gQ.y, bQ.getY());
|
||||
QCOMPARE(gQ.z, bQ.getZ());
|
||||
QCOMPARE(gQ.w, bQ.getW());
|
||||
|
||||
// mat3
|
||||
glm::mat3 gM3 = glm::mat3_cast(gQ);
|
||||
btMatrix3x3 bM3 = glmToBullet(gM3);
|
||||
bM3.getRotation(bQ);
|
||||
QCOMPARE(gQ.x, bQ.getX());
|
||||
QCOMPARE(gQ.y, bQ.getY());
|
||||
QCOMPARE(gQ.z, bQ.getZ());
|
||||
QCOMPARE(gQ.w, bQ.getW());
|
||||
|
||||
// mat4
|
||||
glm::mat4 gM4 = createMatFromQuatAndPos(gQ, gV);
|
||||
btTransform bM4 = glmToBullet(gM4);
|
||||
bQ = bM4.getRotation();
|
||||
bV = bM4.getOrigin();
|
||||
QCOMPARE(gQ.x, bQ.getX());
|
||||
QCOMPARE(gQ.y, bQ.getY());
|
||||
QCOMPARE(gQ.z, bQ.getZ());
|
||||
QCOMPARE(gQ.w, bQ.getW());
|
||||
QCOMPARE(gV.x, bV.getX());
|
||||
QCOMPARE(gV.y, bV.getY());
|
||||
QCOMPARE(gV.z, bV.getZ());
|
||||
}
|
||||
|
||||
void BulletUtilTests::rotateVectorTest() {
|
||||
|
||||
float angle = 0.317f * PI;
|
||||
btVector3 axis(1.23f, 2.34f, 3.45f);
|
||||
axis.normalize();
|
||||
btQuaternion q(axis, angle);
|
||||
|
||||
btVector3 xAxis(1.0f, 0.0f, 0.0f);
|
||||
|
||||
btVector3 result0 = rotateVector(q, xAxis);
|
||||
|
||||
btTransform m(q);
|
||||
btVector3 result1 = m * xAxis;
|
||||
|
||||
QCOMPARE(result0.getX(), result0.getX());
|
||||
QCOMPARE(result0.getY(), result1.getY());
|
||||
QCOMPARE(result0.getZ(), result1.getZ());
|
||||
}
|
||||
|
||||
void BulletUtilTests::clampLengthTest() {
|
||||
btVector3 vec(1.0f, 3.0f, 2.0f);
|
||||
btVector3 clampedVec1 = clampLength(vec, 1.0f);
|
||||
btVector3 clampedVec2 = clampLength(vec, 2.0f);
|
||||
btVector3 normalizedVec = vec.normalized();
|
||||
|
||||
QCOMPARE(clampedVec1.getX(), normalizedVec.getX());
|
||||
QCOMPARE(clampedVec1.getY(), normalizedVec.getY());
|
||||
QCOMPARE(clampedVec1.getZ(), normalizedVec.getZ());
|
||||
|
||||
QCOMPARE(clampedVec2.getX(), normalizedVec.getX() * 2.0f);
|
||||
QCOMPARE(clampedVec2.getY(), normalizedVec.getY() * 2.0f);
|
||||
QCOMPARE(clampedVec2.getZ(), normalizedVec.getZ() * 2.0f);
|
||||
}
|
||||
|
|
|
@ -20,6 +20,8 @@ class BulletUtilTests : public QObject {
|
|||
private slots:
|
||||
void fromBulletToGLM();
|
||||
void fromGLMToBullet();
|
||||
void rotateVectorTest();
|
||||
void clampLengthTest();
|
||||
};
|
||||
|
||||
#endif // hifi_BulletUtilTests_h
|
||||
|
|
Loading…
Reference in a new issue