Merge branch 'master' of https://github.com/highfidelity/hifi into baseball

This commit is contained in:
Ryan Huffman 2015-11-16 13:56:02 -08:00
commit 75f4ae97c6
6 changed files with 106 additions and 137 deletions

View file

@ -125,7 +125,7 @@ MyAvatar::MyAvatar(RigPointer rig) :
AVATAR_FRAME_TYPE = recording::Frame::registerFrameType(HEADER_NAME);
});
// FIXME how to deal with driving multiple avatars locally?
// FIXME how to deal with driving multiple avatars locally?
Frame::registerFrameHandler(AVATAR_FRAME_TYPE, [this](Frame::ConstPointer frame) {
qDebug() << "Playback of avatar frame length: " << frame->data.size();
avatarStateFromFrame(frame->data, this);
@ -177,9 +177,8 @@ void MyAvatar::reset(bool andReload) {
// Reset dynamic state.
_wasPushing = _isPushing = _isBraking = _billboardValid = false;
_isFollowingHMD = false;
_hmdFollowVelocity = Vectors::ZERO;
_hmdFollowSpeed = 0.0f;
_followVelocity = Vectors::ZERO;
_followSpeed = 0.0f;
_skeletonModel.reset();
getHead()->reset();
_targetVelocity = glm::vec3(0.0f);
@ -352,52 +351,40 @@ void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
void MyAvatar::updateHMDFollowVelocity() {
// compute offset to body's target position (in sensor-frame)
auto sensorBodyMatrix = deriveBodyFromHMDSensor();
_hmdFollowOffset = extractTranslation(sensorBodyMatrix) - extractTranslation(_bodySensorMatrix);
glm::vec3 truncatedOffset = _hmdFollowOffset;
if (truncatedOffset.y < 0.0f) {
// don't pull the body DOWN to match the target (allow animation system to squat)
truncatedOffset.y = 0.0f;
}
float truncatedOffsetDistance = glm::length(truncatedOffset);
glm::vec3 offset = extractTranslation(sensorBodyMatrix) - extractTranslation(_bodySensorMatrix);
_followOffsetDistance = glm::length(offset);
const float FOLLOW_TIMESCALE = 0.5f;
const float FOLLOW_THRESHOLD_SPEED = 0.2f;
const float FOLLOW_MIN_DISTANCE = 0.01f;
const float FOLLOW_THRESHOLD_DISTANCE = 0.2f;
const float FOLLOW_MAX_IDLE_DISTANCE = 0.1f;
bool isMoving;
if (_lastIsMoving) {
const float MOVE_EXIT_SPEED_THRESHOLD = 0.07f; // m/sec
isMoving = glm::length(_velocity) >= MOVE_EXIT_SPEED_THRESHOLD;
} else {
const float MOVE_ENTER_SPEED_THRESHOLD = 0.2f; // m/sec
isMoving = glm::length(_velocity) > MOVE_ENTER_SPEED_THRESHOLD;
}
bool justStartedMoving = (_lastIsMoving != isMoving) && isMoving;
_lastIsMoving = isMoving;
bool hmdIsAtRest = _hmdAtRestDetector.update(_hmdSensorPosition, _hmdSensorOrientation);
const float MIN_HMD_HIP_SHIFT = 0.05f;
if (justStartedMoving || (hmdIsAtRest && truncatedOffsetDistance > MIN_HMD_HIP_SHIFT)) {
_isFollowingHMD = true;
}
bool needNewFollowSpeed = (_isFollowingHMD && _hmdFollowSpeed == 0.0f);
if (!needNewFollowSpeed) {
// check to see if offset has exceeded its threshold
const float MAX_HMD_HIP_SHIFT = 0.2f;
if (truncatedOffsetDistance > MAX_HMD_HIP_SHIFT) {
_isFollowingHMD = true;
needNewFollowSpeed = true;
_followOffsetDistance = glm::length(offset);
if (_followOffsetDistance < FOLLOW_MIN_DISTANCE) {
// close enough
_followOffsetDistance = 0.0f;
} else {
bool avatarIsMoving = glm::length(_velocity - _followVelocity) > FOLLOW_THRESHOLD_SPEED;
bool shouldFollow = (hmdIsAtRest || avatarIsMoving) && _followOffsetDistance > FOLLOW_MAX_IDLE_DISTANCE;
glm::vec3 truncatedOffset = offset;
if (truncatedOffset.y < 0.0f) {
truncatedOffset.y = 0.0f;
}
float truncatedDistance = glm::length(truncatedOffset);
bool needsNewSpeed = truncatedDistance > FOLLOW_THRESHOLD_DISTANCE;
if (needsNewSpeed || (shouldFollow && _followSpeed == 0.0f)) {
// compute new speed
_followSpeed = _followOffsetDistance / FOLLOW_TIMESCALE;
}
if (_followSpeed > 0.0f) {
// to compute new velocity we must rotate offset into the world-frame
glm::quat sensorToWorldRotation = extractRotation(_sensorToWorldMatrix);
_followVelocity = _followSpeed * glm::normalize(sensorToWorldRotation * offset);
}
}
if (_isFollowingHMD) {
// only bother to rotate into world frame if we're following
glm::quat sensorToWorldRotation = extractRotation(_sensorToWorldMatrix);
_hmdFollowOffset = sensorToWorldRotation * _hmdFollowOffset;
}
if (needNewFollowSpeed) {
// compute new velocity that will be used to resolve offset of hips from body
const float FOLLOW_HMD_DURATION = 0.5f; // seconds
_hmdFollowVelocity = (_hmdFollowOffset / FOLLOW_HMD_DURATION);
_hmdFollowSpeed = glm::length(_hmdFollowVelocity);
} else if (_isFollowingHMD) {
// compute new velocity (but not new speed)
_hmdFollowVelocity = _hmdFollowSpeed * glm::normalize(_hmdFollowOffset);
}
}
@ -1295,11 +1282,11 @@ void MyAvatar::prepareForPhysicsSimulation() {
_characterController.setAvatarPositionAndOrientation(getPosition(), getOrientation());
if (qApp->isHMDMode()) {
updateHMDFollowVelocity();
} else if (_isFollowingHMD) {
_isFollowingHMD = false;
_hmdFollowVelocity = Vectors::ZERO;
} else if (_followSpeed > 0.0f) {
_followVelocity = Vectors::ZERO;
_followSpeed = 0.0f;
}
_characterController.setHMDVelocity(_hmdFollowVelocity);
_characterController.setFollowVelocity(_followVelocity);
}
void MyAvatar::harvestResultsFromPhysicsSimulation() {
@ -1307,35 +1294,27 @@ void MyAvatar::harvestResultsFromPhysicsSimulation() {
glm::quat orientation = getOrientation();
_characterController.getAvatarPositionAndOrientation(position, orientation);
nextAttitude(position, orientation);
if (_isFollowingHMD) {
setVelocity(_characterController.getLinearVelocity() + _hmdFollowVelocity);
glm::vec3 hmdShift = _characterController.getHMDShift();
adjustSensorTransform(hmdShift);
if (_followSpeed > 0.0f) {
adjustSensorTransform();
setVelocity(_characterController.getLinearVelocity() + _followVelocity);
} else {
setVelocity(_characterController.getLinearVelocity());
}
}
void MyAvatar::adjustSensorTransform(glm::vec3 hmdShift) {
void MyAvatar::adjustSensorTransform() {
// compute blendFactor of latest hmdShift
// which we'll use to blend the rotation part
float blendFactor = 1.0f;
float shiftLength = glm::length(hmdShift);
if (shiftLength > 1.0e-5f) {
float offsetLength = glm::length(_hmdFollowOffset);
if (offsetLength > shiftLength) {
blendFactor = shiftLength / offsetLength;
}
}
float linearDistance = _characterController.getFollowTime() * _followSpeed;
float blendFactor = linearDistance < _followOffsetDistance ? linearDistance / _followOffsetDistance : 1.0f;
auto newBodySensorMatrix = deriveBodyFromHMDSensor();
auto worldBodyMatrix = _sensorToWorldMatrix * newBodySensorMatrix;
glm::quat finalBodyRotation = glm::normalize(glm::quat_cast(worldBodyMatrix));
if (blendFactor >= 0.99f) {
// the "adjustment" is more or less complete so stop following
_isFollowingHMD = false;
_hmdFollowSpeed = 0.0f;
_hmdFollowVelocity = Vectors::ZERO;
_followVelocity = Vectors::ZERO;
_followSpeed = 0.0f;
// and slam the body's transform anyway to eliminate any slight errors
glm::vec3 finalBodyPosition = extractTranslation(worldBodyMatrix);
nextAttitude(finalBodyPosition, finalBodyRotation);
@ -1515,6 +1494,9 @@ void MyAvatar::initAnimGraph() {
QUrl::fromLocalFile(PathUtils::resourcesPath() + "meshes/defaultAvatar_full/avatar-animation.json") :
_animGraphUrl);
_rig->initAnimGraph(graphUrl, _skeletonModel.getGeometry()->getFBXGeometry());
_bodySensorMatrix = deriveBodyFromHMDSensor(); // Based on current cached HMD position/rotation..
updateSensorToWorldMatrix(); // Uses updated position/orientation and _bodySensorMatrix changes
}
void MyAvatar::destroyAnimGraph() {
@ -1989,53 +1971,19 @@ glm::quat MyAvatar::getWorldBodyOrientation() const {
// derive avatar body position and orientation from the current HMD Sensor location.
// results are in sensor space
glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
// HMD is in sensor space.
const glm::vec3 hmdPosition = getHMDSensorPosition();
const glm::quat hmdOrientation = getHMDSensorOrientation();
const glm::quat hmdOrientationYawOnly = cancelOutRollAndPitch(hmdOrientation);
const glm::vec3 DEFAULT_RIGHT_EYE_POS(-0.3f, 1.6f, 0.0f);
const glm::vec3 DEFAULT_LEFT_EYE_POS(0.3f, 1.6f, 0.0f);
const glm::vec3 DEFAULT_NECK_POS(0.0f, 1.5f, 0.0f);
const glm::vec3 DEFAULT_HIPS_POS(0.0f, 1.0f, 0.0f);
vec3 localEyes, localNeck;
if (!_debugDrawSkeleton) {
const glm::quat rotY180 = glm::angleAxis((float)PI, glm::vec3(0.0f, 1.0f, 0.0f));
localEyes = rotY180 * (((DEFAULT_RIGHT_EYE_POS + DEFAULT_LEFT_EYE_POS) / 2.0f) - DEFAULT_HIPS_POS);
localNeck = rotY180 * (DEFAULT_NECK_POS - DEFAULT_HIPS_POS);
} else {
// TODO: At the moment MyAvatar does not have access to the rig, which has the skeleton, which has the bind poses.
// for now use the _debugDrawSkeleton, which is initialized with the same FBX model as the rig.
// TODO: cache these indices.
int rightEyeIndex = _debugDrawSkeleton->nameToJointIndex("RightEye");
int leftEyeIndex = _debugDrawSkeleton->nameToJointIndex("LeftEye");
int neckIndex = _debugDrawSkeleton->nameToJointIndex("Neck");
int hipsIndex = _debugDrawSkeleton->nameToJointIndex("Hips");
glm::vec3 absRightEyePos = rightEyeIndex != -1 ? _debugDrawSkeleton->getAbsoluteBindPose(rightEyeIndex).trans : DEFAULT_RIGHT_EYE_POS;
glm::vec3 absLeftEyePos = leftEyeIndex != -1 ? _debugDrawSkeleton->getAbsoluteBindPose(leftEyeIndex).trans : DEFAULT_LEFT_EYE_POS;
glm::vec3 absNeckPos = neckIndex != -1 ? _debugDrawSkeleton->getAbsoluteBindPose(neckIndex).trans : DEFAULT_NECK_POS;
glm::vec3 absHipsPos = neckIndex != -1 ? _debugDrawSkeleton->getAbsoluteBindPose(hipsIndex).trans : DEFAULT_HIPS_POS;
const glm::quat rotY180 = glm::angleAxis((float)PI, glm::vec3(0.0f, 1.0f, 0.0f));
localEyes = rotY180 * (((absRightEyePos + absLeftEyePos) / 2.0f) - absHipsPos);
localNeck = rotY180 * (absNeckPos - absHipsPos);
if (_rig) {
// orientation
const glm::quat hmdOrientation = getHMDSensorOrientation();
const glm::quat yaw = cancelOutRollAndPitch(hmdOrientation);
// position
// we flip about yAxis when going from "root" to "avatar" frame
// and we must also apply "yaw" to get into HMD frame
glm::quat rotY180 = glm::angleAxis((float)M_PI, glm::vec3(0.0f, 1.0f, 0.0f));
glm::vec3 eyesInAvatarFrame = rotY180 * yaw * _rig->getEyesInRootFrame();
glm::vec3 bodyPos = getHMDSensorPosition() - eyesInAvatarFrame;
return createMatFromQuatAndPos(yaw, bodyPos);
}
// apply simplistic head/neck model
// figure out where the avatar body should be by applying offsets from the avatar's neck & head joints.
// eyeToNeck offset is relative full HMD orientation.
// while neckToRoot offset is only relative to HMDs yaw.
glm::vec3 eyeToNeck = hmdOrientation * (localNeck - localEyes);
glm::vec3 neckToRoot = hmdOrientationYawOnly * -localNeck;
glm::vec3 bodyPos = hmdPosition + eyeToNeck + neckToRoot;
// avatar facing is determined solely by hmd orientation.
return createMatFromQuatAndPos(hmdOrientationYawOnly, bodyPos);
return glm::mat4();
}
glm::vec3 MyAvatar::getPositionForAudio() {

View file

@ -206,7 +206,7 @@ public:
void prepareForPhysicsSimulation();
void harvestResultsFromPhysicsSimulation();
void adjustSensorTransform(glm::vec3 hmdShift);
void adjustSensorTransform();
const QString& getCollisionSoundURL() { return _collisionSoundURL; }
void setCollisionSoundURL(const QString& url);
@ -329,7 +329,7 @@ private:
PalmData getActivePalmData(int palmIndex) const;
// derive avatar body position and orientation from the current HMD Sensor location.
// results are in sensor space
// results are in HMD frame
glm::mat4 deriveBodyFromHMDSensor() const;
float _driveKeys[MAX_DRIVE_KEYS];
@ -393,9 +393,10 @@ private:
// used to transform any sensor into world space, including the _hmdSensorMat, or hand controllers.
glm::mat4 _sensorToWorldMatrix;
glm::vec3 _hmdFollowOffset { Vectors::ZERO };
glm::vec3 _hmdFollowVelocity { Vectors::ZERO };
float _hmdFollowSpeed { 0.0f };
glm::vec3 _followVelocity { Vectors::ZERO };
float _followSpeed { 0.0f };
float _followOffsetDistance { 0.0f };
bool _goToPending;
glm::vec3 _goToPosition;
@ -413,9 +414,6 @@ private:
glm::vec3 _customListenPosition;
glm::quat _customListenOrientation;
bool _isFollowingHMD { false };
float _followHMDAlpha { 0.0f };
AtRestDetector _hmdAtRestDetector;
bool _lastIsMoving { false };
};

View file

@ -60,7 +60,7 @@ MyCharacterController::MyCharacterController(MyAvatar* avatar) {
_floorDistance = MAX_FALL_HEIGHT;
_walkVelocity.setValue(0.0f, 0.0f, 0.0f);
_hmdVelocity.setValue(0.0f, 0.0f, 0.0f);
_followVelocity.setValue(0.0f, 0.0f, 0.0f);
_jumpSpeed = JUMP_SPEED;
_isOnGround = false;
_isJumping = false;
@ -68,7 +68,7 @@ MyCharacterController::MyCharacterController(MyAvatar* avatar) {
_isHovering = true;
_isPushingUp = false;
_jumpToHoverStart = 0;
_lastStepDuration = 0.0f;
_followTime = 0.0f;
_pendingFlags = PENDING_FLAG_UPDATE_SHAPE;
updateShapeIfNecessary();
@ -161,16 +161,14 @@ void MyCharacterController::playerStep(btCollisionWorld* dynaWorld, btScalar dt)
}
}
// Rather than add _hmdVelocity to the velocity of the RigidBody, we explicitly teleport
// Rather than add _followVelocity to the velocity of the RigidBody, we explicitly teleport
// the RigidBody forward according to the formula: distance = rate * time
if (_hmdVelocity.length2() > 0.0f) {
if (_followVelocity.length2() > 0.0f) {
btTransform bodyTransform = _rigidBody->getWorldTransform();
bodyTransform.setOrigin(bodyTransform.getOrigin() + dt * _hmdVelocity);
bodyTransform.setOrigin(bodyTransform.getOrigin() + dt * _followVelocity);
_rigidBody->setWorldTransform(bodyTransform);
}
// MyAvatar will ask us how far we stepped for HMD motion, which will depend on how
// much time has accumulated in _lastStepDuration.
_lastStepDuration += dt;
_followTime += dt;
}
void MyCharacterController::jump() {
@ -346,8 +344,8 @@ void MyCharacterController::setTargetVelocity(const glm::vec3& velocity) {
_walkVelocity = glmToBullet(velocity);
}
void MyCharacterController::setHMDVelocity(const glm::vec3& velocity) {
_hmdVelocity = glmToBullet(velocity);
void MyCharacterController::setFollowVelocity(const glm::vec3& velocity) {
_followVelocity = glmToBullet(velocity);
}
glm::vec3 MyCharacterController::getLinearVelocity() const {
@ -400,7 +398,7 @@ void MyCharacterController::preSimulation() {
}
}
}
_lastStepDuration = 0.0f;
_followTime = 0.0f;
}
void MyCharacterController::postSimulation() {

View file

@ -64,8 +64,8 @@ public:
void getAvatarPositionAndOrientation(glm::vec3& position, glm::quat& rotation) const;
void setTargetVelocity(const glm::vec3& velocity);
void setHMDVelocity(const glm::vec3& velocity);
glm::vec3 getHMDShift() const { return _lastStepDuration * bulletToGLM(_hmdVelocity); }
void setFollowVelocity(const glm::vec3& velocity);
float getFollowTime() const { return _followTime; }
glm::vec3 getLinearVelocity() const;
@ -75,7 +75,7 @@ protected:
protected:
btVector3 _currentUp;
btVector3 _walkVelocity;
btVector3 _hmdVelocity;
btVector3 _followVelocity;
btTransform _avatarBodyTransform;
glm::vec3 _shapeLocalOffset;
@ -93,7 +93,7 @@ protected:
btScalar _gravity;
btScalar _jumpSpeed;
btScalar _lastStepDuration;
btScalar _followTime;
bool _enabled;
bool _isOnGround;

View file

@ -407,6 +407,24 @@ void Rig::calcAnimAlpha(float speed, const std::vector<float>& referenceSpeeds,
*alphaOut = alpha;
}
void Rig::computeEyesInRootFrame(const AnimPoseVec& poses) {
// TODO: use cached eye/hips indices for these calculations
int numPoses = poses.size();
int rightEyeIndex = _animSkeleton->nameToJointIndex(QString("RightEye"));
int leftEyeIndex = _animSkeleton->nameToJointIndex(QString("LeftEye"));
if (numPoses > rightEyeIndex && numPoses > leftEyeIndex
&& rightEyeIndex > 0 && leftEyeIndex > 0) {
int hipsIndex = _animSkeleton->nameToJointIndex(QString("Hips"));
int headIndex = _animSkeleton->nameToJointIndex(QString("Head"));
if (hipsIndex >= 0 && headIndex > 0) {
glm::vec3 rightEye = _animSkeleton->getAbsolutePose(rightEyeIndex, poses).trans;
glm::vec3 leftEye = _animSkeleton->getAbsolutePose(leftEyeIndex, poses).trans;
glm::vec3 hips = _animSkeleton->getAbsolutePose(hipsIndex, poses).trans;
_eyesInRootFrame = 0.5f * (rightEye + leftEye) - hips;
}
}
}
// animation reference speeds.
static const std::vector<float> FORWARD_SPEEDS = { 0.4f, 1.4f, 4.5f }; // m/s
static const std::vector<float> BACKWARD_SPEEDS = { 0.6f, 1.45f }; // m/s
@ -730,6 +748,7 @@ void Rig::updateAnimations(float deltaTime, glm::mat4 rootTransform) {
setJointTranslation((int)i, true, poses[i].trans, PRIORITY);
}
computeEyesInRootFrame(poses);
} else {
// First normalize the fades so that they sum to 1.0.
@ -1124,14 +1143,14 @@ void Rig::updateLeanJoint(int index, float leanSideways, float leanForward, floa
static AnimPose avatarToBonePose(AnimPose pose, AnimSkeleton::ConstPointer skeleton) {
AnimPose rootPose = skeleton->getAbsoluteBindPose(skeleton->nameToJointIndex("Hips"));
AnimPose rotY180(glm::vec3(1), glm::angleAxis((float)PI, glm::vec3(0.0f, 1.0f, 0.0f)), glm::vec3(0));
AnimPose rotY180(glm::vec3(1.0f), glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f)), glm::vec3(0));
return rootPose * rotY180 * pose;
}
#ifdef DEBUG_RENDERING
static AnimPose boneToAvatarPose(AnimPose pose, AnimSkeleton::ConstPointer skeleton) {
AnimPose rootPose = skeleton->getAbsoluteBindPose(skeleton->nameToJointIndex("Hips"));
AnimPose rotY180(glm::vec3(1), glm::angleAxis((float)PI, glm::vec3(0.0f, 1.0f, 0.0f)), glm::vec3(0));
AnimPose rotY180(glm::vec3(1.0f), glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f)), glm::vec3(0));
return (rootPose * rotY180).inverse() * pose;
}
#endif
@ -1342,6 +1361,7 @@ void Rig::updateFromHandParameters(const HandParameters& params, float dt) {
void Rig::makeAnimSkeleton(const FBXGeometry& fbxGeometry) {
if (!_animSkeleton) {
_animSkeleton = std::make_shared<AnimSkeleton>(fbxGeometry);
computeEyesInRootFrame(_animSkeleton->getRelativeBindPoses());
}
}

View file

@ -214,6 +214,8 @@ public:
bool getModelOffset(glm::vec3& modelOffsetOut) const;
const glm::vec3& getEyesInRootFrame() const { return _eyesInRootFrame; }
protected:
void updateAnimationStateHandlers();
@ -222,6 +224,8 @@ public:
void updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::quat& worldHeadOrientation, const glm::vec3& lookAt, const glm::vec3& saccade);
void calcAnimAlpha(float speed, const std::vector<float>& referenceSpeeds, float* alphaOut) const;
void computeEyesInRootFrame(const AnimPoseVec& poses);
QVector<JointState> _jointStates;
int _rootJointIndex = -1;
@ -241,6 +245,7 @@ public:
glm::vec3 _lastFront;
glm::vec3 _lastPosition;
glm::vec3 _lastVelocity;
glm::vec3 _eyesInRootFrame { Vectors::ZERO };
std::shared_ptr<AnimNode> _animNode;
std::shared_ptr<AnimSkeleton> _animSkeleton;