Merge pull request #928 from Phil-Palmer/fix/vr-recenter

VR fixes for: couldn't sit on the floor, wrong walk directions.
This commit is contained in:
daleglass 2021-01-16 20:51:03 +01:00 committed by GitHub
commit 06b1a8e17c
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
12 changed files with 837 additions and 498 deletions

File diff suppressed because it is too large Load diff

View file

@ -283,15 +283,16 @@ class MyAvatar : public Avatar {
* the value.</p>
* @property {number} analogPlusSprintSpeed - The sprint (run) speed of your avatar for the "AnalogPlus" control scheme.
* @property {MyAvatar.SitStandModelType} userRecenterModel - Controls avatar leaning and recentering behavior.
* @property {number} isInSittingState - <code>true</code> if the user wearing the HMD is determined to be sitting
* (avatar leaning is disabled, recentering is enabled), <code>false</code> if the user wearing the HMD is
* determined to be standing (avatar leaning is enabled, and avatar recenters if it leans too far).
* If <code>userRecenterModel == 2</code> (i.e., "auto") the property value automatically updates as the user sits
* or stands, unless <code>isSitStandStateLocked == true</code>. Setting the property value overrides the current
* sitting / standing state, which is updated when the user next sits or stands unless
* <code>isSitStandStateLocked == true</code>.
* <p class="important">Deprecated: This property is deprecated and will be removed.</p>
* @property {boolean} isInSittingState - <code>true</code> if the user wearing the HMD is determined to be sitting;
* <code>false</code> if the user wearing the HMD is determined to be standing. This can affect whether the avatar
* is allowed to stand, lean or recenter its footing, depending on user preferences.
* The property value automatically updates as the user sits or stands. Setting the property value overrides the current
* sitting / standing state, which is updated when the user next sits or stands.
* @property {boolean} isSitStandStateLocked - <code>true</code> to lock the avatar sitting/standing state, i.e., use this
* to disable automatically changing state.
* <p class="important">Deprecated: This property is deprecated and will be removed.
* See also: <code>getUserRecenterModel</code> and <code>setUserRecenterModel</code>.</p>
* @property {boolean} allowTeleporting - <code>true</code> if teleporting is enabled in the Interface settings,
* <code>false</code> if it isn't. <em>Read-only.</em>
*
@ -413,8 +414,8 @@ class MyAvatar : public Avatar {
Q_PROPERTY(float walkBackwardSpeed READ getWalkBackwardSpeed WRITE setWalkBackwardSpeed NOTIFY walkBackwardSpeedChanged);
Q_PROPERTY(float sprintSpeed READ getSprintSpeed WRITE setSprintSpeed NOTIFY sprintSpeedChanged);
Q_PROPERTY(bool isInSittingState READ getIsInSittingState WRITE setIsInSittingState);
Q_PROPERTY(MyAvatar::SitStandModelType userRecenterModel READ getUserRecenterModel WRITE setUserRecenterModel);
Q_PROPERTY(bool isSitStandStateLocked READ getIsSitStandStateLocked WRITE setIsSitStandStateLocked);
Q_PROPERTY(MyAvatar::SitStandModelType userRecenterModel READ getUserRecenterModel WRITE setUserRecenterModel); // Deprecated
Q_PROPERTY(bool isSitStandStateLocked READ getIsSitStandStateLocked WRITE setIsSitStandStateLocked); // Deprecated
Q_PROPERTY(bool allowTeleporting READ getAllowTeleporting)
const QString DOMINANT_LEFT_HAND = "left";
@ -519,6 +520,7 @@ public:
/**jsdoc
* <p>Specifies different avatar leaning and recentering behaviors.</p>
* <p class="important">Deprecated: This type is deprecated and will be removed.</p>
* <table>
* <thead>
* <tr><th>Value</th><th>Name</th><th>Description</th></tr>
@ -549,6 +551,29 @@ public:
};
Q_ENUM(SitStandModelType)
// Note: The option strings in setupPreferences (PreferencesDialog.cpp) must match this order.
enum class AllowAvatarStandingPreference : uint {
WhenUserIsStanding,
Always,
Count,
Default = Always
};
Q_ENUM(AllowAvatarStandingPreference)
// Note: The option strings in setupPreferences (PreferencesDialog.cpp) must match this order.
enum class AllowAvatarLeaningPreference : uint {
WhenUserIsStanding,
Always,
Never,
AlwaysNoRecenter, // experimental
Count,
Default = WhenUserIsStanding
};
Q_ENUM(AllowAvatarLeaningPreference)
static const std::array<QString, (uint)AllowAvatarStandingPreference::Count> allowAvatarStandingPreferenceStrings;
static const std::array<QString, (uint)AllowAvatarLeaningPreference::Count> allowAvatarLeaningPreferenceStrings;
explicit MyAvatar(QThread* thread);
virtual ~MyAvatar();
@ -1417,7 +1442,6 @@ public:
controller::Pose getControllerPoseInSensorFrame(controller::Action action) const;
controller::Pose getControllerPoseInWorldFrame(controller::Action action) const;
controller::Pose getControllerPoseInAvatarFrame(controller::Action action) const;
glm::quat getOffHandRotation() const;
bool hasDriveInput() const;
@ -1596,7 +1620,7 @@ public:
* @function MyAvatar.getAvatarScale
* @returns {number} The target scale for the avatar, range <code>0.005</code> &ndash; <code>1000.0</code>.
*/
Q_INVOKABLE float getAvatarScale();
Q_INVOKABLE float getAvatarScale() const;
/**jsdoc
* Sets the target scale of the avatar. The target scale is the desired scale of the avatar without any restrictions on
@ -1709,7 +1733,7 @@ public:
// derive avatar body position and orientation from the current HMD Sensor location.
// results are in sensor frame (-z forward)
glm::mat4 deriveBodyFromHMDSensor() const;
glm::mat4 deriveBodyFromHMDSensor(const bool forceFollowYPos = false) const;
glm::mat4 getSpine2RotationRigSpace() const;
@ -1753,10 +1777,14 @@ public:
bool getIsInWalkingState() const;
void setIsInSittingState(bool isSitting);
bool getIsInSittingState() const;
void setUserRecenterModel(MyAvatar::SitStandModelType modelName);
MyAvatar::SitStandModelType getUserRecenterModel() const;
void setIsSitStandStateLocked(bool isLocked);
bool getIsSitStandStateLocked() const;
void setUserRecenterModel(MyAvatar::SitStandModelType modelName); // Deprecated, will be removed.
MyAvatar::SitStandModelType getUserRecenterModel() const; // Deprecated, will be removed.
void setIsSitStandStateLocked(bool isLocked); // Deprecated, will be removed.
bool getIsSitStandStateLocked() const; // Deprecated, will be removed.
void setAllowAvatarStandingPreference(const AllowAvatarStandingPreference preference);
AllowAvatarStandingPreference getAllowAvatarStandingPreference() const;
void setAllowAvatarLeaningPreference(const AllowAvatarLeaningPreference preference);
AllowAvatarLeaningPreference getAllowAvatarLeaningPreference() const;
void setWalkSpeed(float value);
float getWalkSpeed() const;
void setWalkBackwardSpeed(float value);
@ -1989,6 +2017,10 @@ public:
glm::vec3 getLookAtPivotPoint();
glm::vec3 getCameraEyesPosition(float deltaTime);
bool isJumping();
bool getHMDCrouchRecenterEnabled() const;
bool isAllowedToLean() const;
bool areFeetTracked() const { return _isBodyPartTracked._feet; }; // Determine if the feet are under direct control.
bool areHipsTracked() const { return _isBodyPartTracked._hips; }; // Determine if the hips are under direct control.
public slots:
@ -2709,6 +2741,16 @@ private:
bool _isBraking { false };
bool _isAway { false };
// Indicates which parts of the body are under direct control (tracked).
struct {
bool _feet { false }; // Left or right foot.
bool _feetPreviousUpdate{ false };// Value of _feet on the previous update.
bool _hips{ false };
bool _leftHand{ false };
bool _rightHand{ false };
bool _head{ false };
} _isBodyPartTracked;
float _boomLength { ZOOM_DEFAULT };
float _yawSpeed; // degrees/sec
float _pitchSpeed; // degrees/sec
@ -2791,6 +2833,7 @@ private:
void resetLookAtRotation(const glm::vec3& avatarPosition, const glm::quat& avatarOrientation);
void resetPointAt();
static glm::vec3 aimToBlendValues(const glm::vec3& aimVector, const glm::quat& frameOrientation);
void centerBodyInternal(const bool forceFollowYPos = false);
// Avatar Preferences
QUrl _fullAvatarURLFromPreferences;
@ -2841,26 +2884,21 @@ private:
struct FollowHelper {
FollowHelper();
enum FollowType {
Rotation = 0,
Horizontal,
Vertical,
NumFollowTypes
};
float _timeRemaining[NumFollowTypes];
CharacterController::FollowTimePerType _timeRemaining;
void deactivate();
void deactivate(FollowType type);
void activate();
void activate(FollowType type);
void deactivate(CharacterController::FollowType type);
void activate(CharacterController::FollowType type, const bool snapFollow);
bool isActive() const;
bool isActive(FollowType followType) const;
float getMaxTimeRemaining() const;
bool isActive(CharacterController::FollowType followType) const;
void decrementTimeRemaining(float dt);
bool shouldActivateRotation(const MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix, const glm::mat4& currentBodyMatrix) const;
bool shouldActivateRotation(const MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix, const glm::mat4& currentBodyMatrix, bool& shouldSnapOut) const;
bool shouldActivateVertical(const MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix, const glm::mat4& currentBodyMatrix) const;
bool shouldActivateHorizontal(const MyAvatar& myAvatar, const glm::mat4& desiredBodyMatrix, const glm::mat4& currentBodyMatrix) const;
bool shouldActivateHorizontalCG(MyAvatar& myAvatar) const;
bool shouldActivateHorizontal(const MyAvatar& myAvatar,
const glm::mat4& desiredBodyMatrix,
const glm::mat4& currentBodyMatrix,
bool& resetModeOut,
bool& goToWalkingStateOut) const;
void prePhysicsUpdate(MyAvatar& myAvatar, const glm::mat4& bodySensorMatrix, const glm::mat4& currentBodyMatrix, bool hasDriveInput);
glm::mat4 postPhysicsUpdate(MyAvatar& myAvatar, const glm::mat4& currentBodyMatrix);
bool getForceActivateRotation() const;
@ -2871,16 +2909,23 @@ private:
void setForceActivateHorizontal(bool val);
bool getToggleHipsFollowing() const;
void setToggleHipsFollowing(bool followHead);
bool _squatDetected { false };
std::atomic<bool> _forceActivateRotation { false };
std::atomic<bool> _forceActivateVertical { false };
std::atomic<bool> _forceActivateHorizontal { false };
std::atomic<bool> _toggleHipsFollowing { true };
private:
bool shouldActivateHorizontal_userSitting(const MyAvatar& myAvatar,
const glm::mat4& desiredBodyMatrix,
const glm::mat4& currentBodyMatrix) const;
bool shouldActivateHorizontal_userStanding(const MyAvatar& myAvatar,
bool& resetModeOut,
bool& goToWalkingStateOut) const;
};
FollowHelper _follow;
bool isFollowActive(FollowHelper::FollowType followType) const;
bool isFollowActive(CharacterController::FollowType followType) const;
bool _goToPending { false };
bool _physicsSafetyPending { false };
@ -2922,6 +2967,9 @@ private:
bool _centerOfGravityModelEnabled { true };
bool _hmdLeanRecenterEnabled { true };
bool _hmdCrouchRecenterEnabled {
true
}; // Is MyAvatar allowed to recenter vertically (stand) when the user is sitting in the real world.
bool _sprint { false };
AnimPose _prePhysicsRoomPose;
@ -2953,7 +3001,6 @@ private:
ThreadSafeValueCache<float> _userHeight { DEFAULT_AVATAR_HEIGHT };
float _averageUserHeightSensorSpace { _userHeight.get() };
bool _sitStandStateChange { false };
ThreadSafeValueCache<bool> _lockSitStandState { false };
// max unscaled forward movement speed
ThreadSafeValueCache<float> _defaultWalkSpeed { DEFAULT_AVATAR_MAX_WALKING_SPEED };
@ -2969,9 +3016,13 @@ private:
float _walkSpeedScalar { AVATAR_WALK_SPEED_SCALAR };
bool _isInWalkingState { false };
ThreadSafeValueCache<bool> _isInSittingState { false };
ThreadSafeValueCache<MyAvatar::SitStandModelType> _userRecenterModel { MyAvatar::SitStandModelType::Auto };
ThreadSafeValueCache<MyAvatar::AllowAvatarStandingPreference> _allowAvatarStandingPreference{
MyAvatar::AllowAvatarStandingPreference::Default
}; // The user preference of when MyAvatar may stand.
ThreadSafeValueCache<MyAvatar::AllowAvatarLeaningPreference> _allowAvatarLeaningPreference{
MyAvatar::AllowAvatarLeaningPreference::Default
}; // The user preference of when MyAvatar may lean.
float _sitStandStateTimer { 0.0f };
float _squatTimer { 0.0f };
float _tippingPoint { _userHeight.get() };
// load avatar scripts once when rig is ready
@ -3012,7 +3063,8 @@ private:
Setting::Handle<int> _controlSchemeIndexSetting;
std::vector<Setting::Handle<QUuid>> _avatarEntityIDSettings;
std::vector<Setting::Handle<QByteArray>> _avatarEntityDataSettings;
Setting::Handle<QString> _userRecenterModelSetting;
Setting::Handle<QString> _allowAvatarStandingPreferenceSetting;
Setting::Handle<QString> _allowAvatarLeaningPreferenceSetting;
// AvatarEntities stuff:
// We cache the "map of unfortunately-formatted-binary-blobs" because they are expensive to compute

View file

@ -26,7 +26,9 @@ void MyCharacterController::RayShotgunResult::reset() {
walkable = true;
}
MyCharacterController::MyCharacterController(std::shared_ptr<MyAvatar> avatar) {
MyCharacterController::MyCharacterController(std::shared_ptr<MyAvatar> avatar,
const FollowTimePerType& followTimeRemainingPerType) :
CharacterController(followTimeRemainingPerType) {
assert(avatar);
_avatar = avatar;

View file

@ -23,7 +23,7 @@ class DetailedMotionState;
class MyCharacterController : public CharacterController {
public:
explicit MyCharacterController(std::shared_ptr<MyAvatar> avatar);
explicit MyCharacterController(std::shared_ptr<MyAvatar> avatar, const FollowTimePerType& followTimeRemainingPerType);
~MyCharacterController ();
void addToWorld() override;

View file

@ -65,13 +65,21 @@ static AnimPose computeHipsInSensorFrame(MyAvatar* myAvatar, bool isFlying) {
return result;
}
// Use the center-of-gravity model if the user and the avatar are standing, unless flying or walking.
// If artificial standing is disabled, use center-of-gravity regardless of the user's sit/stand state.
bool useCenterOfGravityModel =
myAvatar->getCenterOfGravityModelEnabled() && !isFlying && !myAvatar->getIsInWalkingState() &&
(!myAvatar->getHMDCrouchRecenterEnabled() || !myAvatar->getIsInSittingState()) &&
myAvatar->getHMDLeanRecenterEnabled() &&
(myAvatar->getAllowAvatarLeaningPreference() != MyAvatar::AllowAvatarLeaningPreference::AlwaysNoRecenter);
glm::mat4 hipsMat;
if (myAvatar->getCenterOfGravityModelEnabled() && !isFlying && !(myAvatar->getIsInWalkingState()) && !(myAvatar->getIsInSittingState()) && myAvatar->getHMDLeanRecenterEnabled()) {
if (useCenterOfGravityModel) {
// then we use center of gravity model
hipsMat = myAvatar->deriveBodyUsingCgModel();
} else {
// otherwise use the default of putting the hips under the head
hipsMat = myAvatar->deriveBodyFromHMDSensor();
hipsMat = myAvatar->deriveBodyFromHMDSensor(true);
}
glm::vec3 hipsPos = extractTranslation(hipsMat);
glm::quat hipsRot = glmExtractRotation(hipsMat);
@ -82,7 +90,7 @@ static AnimPose computeHipsInSensorFrame(MyAvatar* myAvatar, bool isFlying) {
// dampen hips rotation, by mixing it with the avatar orientation in sensor space
// turning this off for center of gravity model because it is already mixed in there
if (!(myAvatar->getCenterOfGravityModelEnabled())) {
if (!useCenterOfGravityModel) {
const float MIX_RATIO = 0.5f;
hipsRot = safeLerp(glmExtractRotation(avatarToSensorMat), hipsRot, MIX_RATIO);
}

View file

@ -67,13 +67,13 @@ void AnimStats::updateStats(bool force) {
// print if we are recentering or not.
_recenterText = "Recenter: ";
if (myAvatar->isFollowActive(MyAvatar::FollowHelper::Rotation)) {
if (myAvatar->isFollowActive(CharacterController::FollowType::Rotation)) {
_recenterText += "Rotation ";
}
if (myAvatar->isFollowActive(MyAvatar::FollowHelper::Horizontal)) {
if (myAvatar->isFollowActive(CharacterController::FollowType::Horizontal)) {
_recenterText += "Horizontal ";
}
if (myAvatar->isFollowActive(MyAvatar::FollowHelper::Vertical)) {
if (myAvatar->isFollowActive(CharacterController::FollowType::Vertical)) {
_recenterText += "Vertical ";
}
emit recenterTextChanged();

View file

@ -422,40 +422,40 @@ void setupPreferences() {
preferences->addPreference(preference);
}
{
auto getter = [myAvatar]()->int {
switch (myAvatar->getUserRecenterModel()) {
case MyAvatar::SitStandModelType::Auto:
default:
return 0;
case MyAvatar::SitStandModelType::ForceSit:
return 1;
case MyAvatar::SitStandModelType::ForceStand:
return 2;
case MyAvatar::SitStandModelType::DisableHMDLean:
return 3;
}
IntPreference::Getter getter = [myAvatar]() -> int {
return static_cast<int>(myAvatar->getAllowAvatarStandingPreference());
};
auto setter = [myAvatar](int value) {
switch (value) {
case 0:
default:
myAvatar->setUserRecenterModel(MyAvatar::SitStandModelType::Auto);
break;
case 1:
myAvatar->setUserRecenterModel(MyAvatar::SitStandModelType::ForceSit);
break;
case 2:
myAvatar->setUserRecenterModel(MyAvatar::SitStandModelType::ForceStand);
break;
case 3:
myAvatar->setUserRecenterModel(MyAvatar::SitStandModelType::DisableHMDLean);
break;
}
IntPreference::Setter setter = [myAvatar](const int& value) {
myAvatar->setAllowAvatarStandingPreference(static_cast<MyAvatar::AllowAvatarStandingPreference>(value));
};
auto preference = new RadioButtonsPreference(VR_MOVEMENT, "Auto / Force Sit / Force Stand / Disable Recenter", getter, setter);
auto preference = new RadioButtonsPreference(VR_MOVEMENT, "Allow my avatar to stand", getter, setter);
QStringList items;
items << "Auto - turns on avatar leaning when standing in real world" << "Seated - disables all avatar leaning while sitting in real world" << "Standing - enables avatar leaning while sitting in real world" << "Disabled - allows avatar sitting on the floor [Experimental]";
preference->setHeading("Avatar leaning behavior");
items << "When I'm standing"
<< "Always"; // Must match the order in MyAvatar::AllowAvatarStandingPreference.
assert(items.size() == static_cast<uint>(MyAvatar::AllowAvatarStandingPreference::Count));
preference->setHeading("Allow my avatar to stand:");
preference->setItems(items);
preferences->addPreference(preference);
}
{
IntPreference::Getter getter = [myAvatar]() -> int {
return static_cast<int>(myAvatar->getAllowAvatarLeaningPreference());
};
IntPreference::Setter setter = [myAvatar](const int& value) {
myAvatar->setAllowAvatarLeaningPreference(static_cast<MyAvatar::AllowAvatarLeaningPreference>(value));
};
auto preference = new RadioButtonsPreference(VR_MOVEMENT, "Allow my avatar to lean", getter, setter);
QStringList items;
items << "When I'm standing"
<< "Always"
<< "Never"
<< "Always, no recenter (Experimental)"; // Must match the order in MyAvatar::AllowAvatarLeaningPreference.
assert(items.size() == static_cast<uint>(MyAvatar::AllowAvatarLeaningPreference::Count));
preference->setHeading("Allow my avatar to lean:");
preference->setItems(items);
preferences->addPreference(preference);
}

View file

@ -1855,6 +1855,16 @@ glm::vec3 Rig::deflectHandFromTorso(const glm::vec3& handPosition, const HFMJoin
return position;
}
// Get the scale factor to convert distances in the geometry frame into the unscaled rig frame.
// Typically it will be the unit conversion from cm to m.
float Rig::GetScaleFactorGeometryToUnscaledRig() const {
// Normally the model offset transform will contain the avatar scale factor; we explicitly remove it here.
AnimPose modelOffsetWithoutAvatarScale(glm::vec3(1.0f), getModelOffsetPose().rot(), getModelOffsetPose().trans());
AnimPose geomToRigWithoutAvatarScale = modelOffsetWithoutAvatarScale * getGeometryOffsetPose();
return geomToRigWithoutAvatarScale.scale().x; // in practice this is always a uniform scale factor.
}
void Rig::updateHands(bool leftHandEnabled, bool rightHandEnabled, bool hipsEnabled, bool hipsEstimated,
bool leftArmEnabled, bool rightArmEnabled, bool headEnabled, float dt,
const AnimPose& leftHandPose, const AnimPose& rightHandPose,
@ -2703,10 +2713,10 @@ void Rig::computeAvatarBoundingCapsule(
Extents totalExtents;
totalExtents.reset();
// HACK by convention our Avatars are always modeled such that y=0 is the ground plane.
// add the zero point so that our avatars will always have bounding volumes that are flush with the ground
// HACK by convention our Avatars are always modeled such that y=0 (GEOMETRY_GROUND_Y) is the ground plane.
// add the ground point so that our avatars will always have bounding volumes that are flush with the ground
// even if they do not have legs (default robot)
totalExtents.addPoint(glm::vec3(0.0f));
totalExtents.addPoint(glm::vec3(0.0f, GEOMETRY_GROUND_Y, 0.0f));
// To reduce the radius of the bounding capsule to be tight with the torso, we only consider joints
// from the head to the hips when computing the rest of the bounding capsule.
@ -2747,24 +2757,20 @@ void Rig::initFlow(bool isActive) {
}
}
// Get the vertical position of eye joints, in the rig coordinate frame, ignoring the avatar scale.
float Rig::getUnscaledEyeHeight() const {
// Normally the model offset transform will contain the avatar scale factor, we explicitly remove it here.
AnimPose modelOffsetWithoutAvatarScale(glm::vec3(1.0f), getModelOffsetPose().rot(), getModelOffsetPose().trans());
AnimPose geomToRigWithoutAvatarScale = modelOffsetWithoutAvatarScale * getGeometryOffsetPose();
// This factor can be used to scale distances in the geometry frame into the unscaled rig frame.
// Typically it will be the unit conversion from cm to m.
float scaleFactor = geomToRigWithoutAvatarScale.scale().x; // in practice this always a uniform scale factor.
// Factor to scale distances in the geometry frame into the unscaled rig frame.
float scaleFactor = GetScaleFactorGeometryToUnscaledRig();
int headTopJoint = indexOfJoint("HeadTop_End");
int headJoint = indexOfJoint("Head");
int eyeJoint = indexOfJoint("LeftEye") != -1 ? indexOfJoint("LeftEye") : indexOfJoint("RightEye");
int toeJoint = indexOfJoint("LeftToeBase") != -1 ? indexOfJoint("LeftToeBase") : indexOfJoint("RightToeBase");
// Makes assumption that the y = 0 plane in geometry is the ground plane.
// We also make that assumption in Rig::computeAvatarBoundingCapsule()
const float GROUND_Y = 0.0f;
// Values from the skeleton are in the geometry coordinate frame.
auto skeleton = getAnimSkeleton();
if (eyeJoint >= 0 && toeJoint >= 0) {
@ -2772,8 +2778,8 @@ float Rig::getUnscaledEyeHeight() const {
float eyeHeight = skeleton->getAbsoluteDefaultPose(eyeJoint).trans().y - skeleton->getAbsoluteDefaultPose(toeJoint).trans().y;
return scaleFactor * eyeHeight;
} else if (eyeJoint >= 0) {
// Measure Eye joint to y = 0 plane.
float eyeHeight = skeleton->getAbsoluteDefaultPose(eyeJoint).trans().y - GROUND_Y;
// Measure Eye joint to ground plane.
float eyeHeight = skeleton->getAbsoluteDefaultPose(eyeJoint).trans().y - GEOMETRY_GROUND_Y;
return scaleFactor * eyeHeight;
} else if (headTopJoint >= 0 && toeJoint >= 0) {
// Measure from ToeBase joint to HeadTop_End joint, then remove forehead distance.
@ -2783,19 +2789,36 @@ float Rig::getUnscaledEyeHeight() const {
} else if (headTopJoint >= 0) {
// Measure from HeadTop_End joint to the ground, then remove forehead distance.
const float ratio = DEFAULT_AVATAR_EYE_TO_TOP_OF_HEAD / DEFAULT_AVATAR_HEIGHT;
float headHeight = skeleton->getAbsoluteDefaultPose(headTopJoint).trans().y - GROUND_Y;
float headHeight = skeleton->getAbsoluteDefaultPose(headTopJoint).trans().y - GEOMETRY_GROUND_Y;
return scaleFactor * (headHeight - headHeight * ratio);
} else if (headJoint >= 0) {
// Measure Head joint to the ground, then add in distance from neck to eye.
const float DEFAULT_AVATAR_NECK_TO_EYE = DEFAULT_AVATAR_NECK_TO_TOP_OF_HEAD - DEFAULT_AVATAR_EYE_TO_TOP_OF_HEAD;
const float ratio = DEFAULT_AVATAR_NECK_TO_EYE / DEFAULT_AVATAR_NECK_HEIGHT;
float neckHeight = skeleton->getAbsoluteDefaultPose(headJoint).trans().y - GROUND_Y;
float neckHeight = skeleton->getAbsoluteDefaultPose(headJoint).trans().y - GEOMETRY_GROUND_Y;
return scaleFactor * (neckHeight + neckHeight * ratio);
} else {
return DEFAULT_AVATAR_EYE_HEIGHT;
}
}
// Get the vertical position of the hips joint, in the rig coordinate frame, ignoring the avatar scale.
float Rig::getUnscaledHipsHeight() const {
// This factor can be used to scale distances in the geometry frame into the unscaled rig frame.
float scaleFactor = GetScaleFactorGeometryToUnscaledRig();
int hipsJoint = indexOfJoint("Hips");
// Values from the skeleton are in the geometry coordinate frame.
if (hipsJoint >= 0) {
// Measure hip joint to ground plane.
float hipsHeight = getAnimSkeleton()->getAbsoluteDefaultPose(hipsJoint).trans().y - GEOMETRY_GROUND_Y;
return scaleFactor * hipsHeight;
} else {
return DEFAULT_AVATAR_HIPS_HEIGHT;
}
}
void Rig::setDirectionalBlending(const QString& targetName, const glm::vec3& blendingTarget, const QString& alphaName, float alpha) {
_animVars.set(targetName, blendingTarget);
_animVars.set(alphaName, alpha);

View file

@ -251,6 +251,7 @@ public:
Flow& getFlow() { return _internalFlow; }
float getUnscaledEyeHeight() const;
float getUnscaledHipsHeight() const;
void buildAbsoluteRigPoses(const AnimPoseVec& relativePoses, AnimPoseVec& absolutePosesOut) const;
int getOverrideJointCount() const;
@ -287,6 +288,11 @@ protected:
glm::vec3 deflectHandFromTorso(const glm::vec3& handPosition, const HFMJointShapeInfo& hipsShapeInfo, const HFMJointShapeInfo& spineShapeInfo,
const HFMJointShapeInfo& spine1ShapeInfo, const HFMJointShapeInfo& spine2ShapeInfo) const;
// Get the scale factor to convert distances in the geometry frame into the unscaled rig frame.
float GetScaleFactorGeometryToUnscaledRig() const;
// The ground plane Y position in geometry space.
static constexpr float GEOMETRY_GROUND_Y = 0.0f;
AnimPose _modelOffset; // model to rig space
AnimPose _geometryOffset; // geometry to model space (includes unit offset & fst offsets)

View file

@ -107,12 +107,12 @@ CharacterController::CharacterMotor::CharacterMotor(const glm::vec3& vel, const
static uint32_t _numCharacterControllers { 0 };
CharacterController::CharacterController() {
CharacterController::CharacterController(const FollowTimePerType& followTimeRemainingPerType) :
_followTimeRemainingPerType(followTimeRemainingPerType) {
_floorDistance = _scaleFactor * DEFAULT_AVATAR_FALL_HEIGHT;
_targetVelocity.setValue(0.0f, 0.0f, 0.0f);
_followDesiredBodyTransform.setIdentity();
_followTimeRemaining = 0.0f;
_state = State::Hover;
_isPushingUp = false;
_rayHitStartTime = 0;
@ -350,64 +350,103 @@ void CharacterController::playerStep(btCollisionWorld* collisionWorld, btScalar
btVector3 velocity = _rigidBody->getLinearVelocity() - _parentVelocity;
computeNewVelocity(dt, velocity);
const float MINIMUM_TIME_REMAINING = 0.005f;
const float MAX_DISPLACEMENT = 0.5f * _radius;
_followTimeRemaining -= dt;
if (_followTimeRemaining >= MINIMUM_TIME_REMAINING) {
btTransform bodyTransform = _rigidBody->getWorldTransform();
constexpr float MINIMUM_TIME_REMAINING = 0.005f;
static_assert(FOLLOW_TIME_IMMEDIATE_SNAP > MINIMUM_TIME_REMAINING, "The code below assumes this condition is true.");
bool hasFollowTimeRemaining = false;
for (float followTime : _followTimeRemainingPerType) {
if (followTime > MINIMUM_TIME_REMAINING) {
hasFollowTimeRemaining = true;
break;
}
}
if (hasFollowTimeRemaining) {
const float MAX_DISPLACEMENT = 0.5f * _radius;
btTransform bodyTransform = _rigidBody->getWorldTransform();
btVector3 startPos = bodyTransform.getOrigin();
btVector3 deltaPos = _followDesiredBodyTransform.getOrigin() - startPos;
btVector3 vel = deltaPos / _followTimeRemaining;
btVector3 linearDisplacement = clampLength(vel * dt, MAX_DISPLACEMENT); // clamp displacement to prevent tunneling.
btVector3 linearDisplacement(0.0f, 0.0f, 0.0f);
{
float horizontalTime = _followTimeRemainingPerType[static_cast<uint>(FollowType::Horizontal)];
float verticalTime = _followTimeRemainingPerType[static_cast<uint>(FollowType::Vertical)];
if (horizontalTime == FOLLOW_TIME_IMMEDIATE_SNAP) {
linearDisplacement.setX(deltaPos.x());
linearDisplacement.setZ(deltaPos.z());
} else if (horizontalTime > MINIMUM_TIME_REMAINING) {
linearDisplacement.setX((deltaPos.x() * dt) / horizontalTime);
linearDisplacement.setZ((deltaPos.z() * dt) / horizontalTime);
}
if (verticalTime == FOLLOW_TIME_IMMEDIATE_SNAP) {
linearDisplacement.setY(deltaPos.y());
} else if (verticalTime > MINIMUM_TIME_REMAINING) {
linearDisplacement.setY((deltaPos.y() * dt) / verticalTime);
}
linearDisplacement = clampLength(linearDisplacement, MAX_DISPLACEMENT); // clamp displacement to prevent tunneling.
}
btVector3 endPos = startPos + linearDisplacement;
// resolve the simple linearDisplacement
_followLinearDisplacement += linearDisplacement;
// now for the rotational part...
btQuaternion startRot = bodyTransform.getRotation();
btQuaternion desiredRot = _followDesiredBodyTransform.getRotation();
// startRot as default rotation
btQuaternion endRot = startRot;
// the dot product between two quaternions is equal to +/- cos(angle/2)
// where 'angle' is that of the rotation between them
float qDot = desiredRot.dot(startRot);
float rotationTime = _followTimeRemainingPerType[static_cast<uint>(FollowType::Rotation)];
if (rotationTime > MINIMUM_TIME_REMAINING) {
btQuaternion desiredRot = _followDesiredBodyTransform.getRotation();
// when the abs() value of the dot product is approximately 1.0
// then the two rotations are effectively adjacent
const float MIN_DOT_PRODUCT_OF_ADJACENT_QUATERNIONS = 0.99999f; // corresponds to approx 0.5 degrees
if (fabsf(qDot) < MIN_DOT_PRODUCT_OF_ADJACENT_QUATERNIONS) {
if (qDot < 0.0f) {
// the quaternions are actually on opposite hyperhemispheres
// so we move one to agree with the other and negate qDot
desiredRot = -desiredRot;
qDot = -qDot;
// the dot product between two quaternions is equal to +/- cos(angle/2)
// where 'angle' is that of the rotation between them
float qDot = desiredRot.dot(startRot);
// when the abs() value of the dot product is approximately 1.0
// then the two rotations are effectively adjacent
const float MIN_DOT_PRODUCT_OF_ADJACENT_QUATERNIONS = 0.99999f; // corresponds to approx 0.5 degrees
if (fabsf(qDot) < MIN_DOT_PRODUCT_OF_ADJACENT_QUATERNIONS) {
if (qDot < 0.0f) {
// the quaternions are actually on opposite hyperhemispheres
// so we move one to agree with the other and negate qDot
desiredRot = -desiredRot;
qDot = -qDot;
}
btQuaternion deltaRot = desiredRot * startRot.inverse();
// the axis is the imaginary part, but scaled by sin(angle/2)
btVector3 axis(deltaRot.getX(), deltaRot.getY(), deltaRot.getZ());
axis /= sqrtf(1.0f - qDot * qDot);
// compute the angle we will resolve for this dt, but don't overshoot
float angle = 2.0f * acosf(qDot);
if (rotationTime != FOLLOW_TIME_IMMEDIATE_SNAP) {
if (dt < rotationTime) {
angle *= dt / rotationTime;
}
}
// accumulate rotation
deltaRot = btQuaternion(axis, angle);
_followAngularDisplacement = (deltaRot * _followAngularDisplacement).normalize();
// in order to accumulate displacement of avatar position, we need to take _shapeLocalOffset into account.
btVector3 shapeLocalOffset = glmToBullet(_shapeLocalOffset);
endRot = deltaRot * startRot;
btVector3 swingDisplacement =
rotateVector(endRot, -shapeLocalOffset) - rotateVector(startRot, -shapeLocalOffset);
_followLinearDisplacement += swingDisplacement;
}
btQuaternion deltaRot = desiredRot * startRot.inverse();
// the axis is the imaginary part, but scaled by sin(angle/2)
btVector3 axis(deltaRot.getX(), deltaRot.getY(), deltaRot.getZ());
axis /= sqrtf(1.0f - qDot * qDot);
// compute the angle we will resolve for this dt, but don't overshoot
float angle = 2.0f * acosf(qDot);
if (dt < _followTimeRemaining) {
angle *= dt / _followTimeRemaining;
}
// accumulate rotation
deltaRot = btQuaternion(axis, angle);
_followAngularDisplacement = (deltaRot * _followAngularDisplacement).normalize();
// in order to accumulate displacement of avatar position, we need to take _shapeLocalOffset into account.
btVector3 shapeLocalOffset = glmToBullet(_shapeLocalOffset);
endRot = deltaRot * startRot;
btVector3 swingDisplacement = rotateVector(endRot, -shapeLocalOffset) - rotateVector(startRot, -shapeLocalOffset);
_followLinearDisplacement += swingDisplacement;
}
_rigidBody->setWorldTransform(btTransform(endRot, endPos));
}
@ -606,8 +645,7 @@ void CharacterController::setParentVelocity(const glm::vec3& velocity) {
_parentVelocity = glmToBullet(velocity);
}
void CharacterController::setFollowParameters(const glm::mat4& desiredWorldBodyMatrix, float timeRemaining) {
_followTimeRemaining = timeRemaining;
void CharacterController::setFollowParameters(const glm::mat4& desiredWorldBodyMatrix) {
_followDesiredBodyTransform = glmToBullet(desiredWorldBodyMatrix) * btTransform(btQuaternion::getIdentity(), glmToBullet(_shapeLocalOffset));
}

View file

@ -53,7 +53,20 @@ const btScalar MIN_CHARACTER_MOTOR_TIMESCALE = 0.05f;
class CharacterController : public btCharacterControllerInterface {
public:
CharacterController();
enum class FollowType : uint8_t {
Rotation,
Horizontal,
Vertical,
Count
};
// Remaining follow time for each FollowType
typedef std::array<float, static_cast<size_t>(FollowType::Count)> FollowTimePerType;
// Follow time value meaning that we should snap immediately to the target.
static constexpr float FOLLOW_TIME_IMMEDIATE_SNAP = FLT_MAX;
CharacterController(const FollowTimePerType& followTimeRemainingPerType);
virtual ~CharacterController();
bool needsRemoval() const;
bool needsAddition() const;
@ -99,7 +112,8 @@ public:
void getPositionAndOrientation(glm::vec3& position, glm::quat& rotation) const;
void setParentVelocity(const glm::vec3& parentVelocity);
void setFollowParameters(const glm::mat4& desiredWorldMatrix, float timeRemaining);
void setFollowParameters(const glm::mat4& desiredWorldMatrix);
float getFollowTime() const { return _followTime; }
glm::vec3 getFollowLinearDisplacement() const;
glm::quat getFollowAngularDisplacement() const;
@ -144,7 +158,7 @@ public:
void setPendingFlagsUpdateCollisionMask(){ _pendingFlags |= PENDING_FLAG_UPDATE_COLLISION_MASK; }
void setSeated(bool isSeated) { _isSeated = isSeated; }
bool getSeated() { return _isSeated; }
bool getSeated() const { return _isSeated; }
void resetStuckCounter() { _numStuckSubsteps = 0; }
@ -178,7 +192,7 @@ protected:
btVector3 _preSimulationVelocity;
btVector3 _velocityChange;
btTransform _followDesiredBodyTransform;
btScalar _followTimeRemaining;
const FollowTimePerType& _followTimeRemainingPerType;
btTransform _characterBodyTransform;
btVector3 _position;
btQuaternion _rotation;

View file

@ -20,6 +20,7 @@ const float DEFAULT_AVATAR_EYE_TO_TOP_OF_HEAD = 0.11f; // meters
const float DEFAULT_AVATAR_NECK_TO_TOP_OF_HEAD = 0.185f; // meters
const float DEFAULT_AVATAR_NECK_HEIGHT = DEFAULT_AVATAR_HEIGHT - DEFAULT_AVATAR_NECK_TO_TOP_OF_HEAD;
const float DEFAULT_AVATAR_EYE_HEIGHT = DEFAULT_AVATAR_HEIGHT - DEFAULT_AVATAR_EYE_TO_TOP_OF_HEAD;
const float DEFAULT_AVATAR_HIPS_HEIGHT = 1.01327407f; // meters
const float DEFAULT_SPINE2_SPLINE_PROPORTION = 0.71f;
const float DEFAULT_AVATAR_SUPPORT_BASE_LEFT = -0.25f;
const float DEFAULT_AVATAR_SUPPORT_BASE_RIGHT = 0.25f;