use animation state to compute bodyInSensorFrame

This commit is contained in:
U-GAPOS\andrew 2015-11-13 16:39:40 -08:00
parent 91bc7ca062
commit 6733767d8b
4 changed files with 40 additions and 49 deletions

View file

@ -1975,53 +1975,19 @@ glm::quat MyAvatar::getWorldBodyOrientation() const {
// derive avatar body position and orientation from the current HMD Sensor location.
// results are in sensor space
glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
// HMD is in sensor space.
const glm::vec3 hmdPosition = getHMDSensorPosition();
const glm::quat hmdOrientation = getHMDSensorOrientation();
const glm::quat hmdOrientationYawOnly = cancelOutRollAndPitch(hmdOrientation);
const glm::vec3 DEFAULT_RIGHT_EYE_POS(-0.3f, 1.6f, 0.0f);
const glm::vec3 DEFAULT_LEFT_EYE_POS(0.3f, 1.6f, 0.0f);
const glm::vec3 DEFAULT_NECK_POS(0.0f, 1.5f, 0.0f);
const glm::vec3 DEFAULT_HIPS_POS(0.0f, 1.0f, 0.0f);
vec3 localEyes, localNeck;
if (!_debugDrawSkeleton) {
const glm::quat rotY180 = glm::angleAxis((float)PI, glm::vec3(0.0f, 1.0f, 0.0f));
localEyes = rotY180 * (((DEFAULT_RIGHT_EYE_POS + DEFAULT_LEFT_EYE_POS) / 2.0f) - DEFAULT_HIPS_POS);
localNeck = rotY180 * (DEFAULT_NECK_POS - DEFAULT_HIPS_POS);
} else {
// TODO: At the moment MyAvatar does not have access to the rig, which has the skeleton, which has the bind poses.
// for now use the _debugDrawSkeleton, which is initialized with the same FBX model as the rig.
// TODO: cache these indices.
int rightEyeIndex = _debugDrawSkeleton->nameToJointIndex("RightEye");
int leftEyeIndex = _debugDrawSkeleton->nameToJointIndex("LeftEye");
int neckIndex = _debugDrawSkeleton->nameToJointIndex("Neck");
int hipsIndex = _debugDrawSkeleton->nameToJointIndex("Hips");
glm::vec3 absRightEyePos = rightEyeIndex != -1 ? _debugDrawSkeleton->getAbsoluteBindPose(rightEyeIndex).trans : DEFAULT_RIGHT_EYE_POS;
glm::vec3 absLeftEyePos = leftEyeIndex != -1 ? _debugDrawSkeleton->getAbsoluteBindPose(leftEyeIndex).trans : DEFAULT_LEFT_EYE_POS;
glm::vec3 absNeckPos = neckIndex != -1 ? _debugDrawSkeleton->getAbsoluteBindPose(neckIndex).trans : DEFAULT_NECK_POS;
glm::vec3 absHipsPos = neckIndex != -1 ? _debugDrawSkeleton->getAbsoluteBindPose(hipsIndex).trans : DEFAULT_HIPS_POS;
const glm::quat rotY180 = glm::angleAxis((float)PI, glm::vec3(0.0f, 1.0f, 0.0f));
localEyes = rotY180 * (((absRightEyePos + absLeftEyePos) / 2.0f) - absHipsPos);
localNeck = rotY180 * (absNeckPos - absHipsPos);
if (_rig) {
// orientation
const glm::quat hmdOrientation = getHMDSensorOrientation();
const glm::quat yaw = cancelOutRollAndPitch(hmdOrientation);
// position
// we flip about yAxis when going from "root" to "avatar" frame
// and we must also apply "yaw" to get into HMD frame
glm::quat rotY180 = glm::angleAxis((float)M_PI, glm::vec3(0.0f, 1.0f, 0.0f));
glm::vec3 eyesInAvatarFrame = rotY180 * yaw * _rig->getEyesInRootFrame();
glm::vec3 bodyPos = getHMDSensorPosition() - eyesInAvatarFrame;
return createMatFromQuatAndPos(yaw, bodyPos);
}
// apply simplistic head/neck model
// figure out where the avatar body should be by applying offsets from the avatar's neck & head joints.
// eyeToNeck offset is relative full HMD orientation.
// while neckToRoot offset is only relative to HMDs yaw.
glm::vec3 eyeToNeck = hmdOrientation * (localNeck - localEyes);
glm::vec3 neckToRoot = hmdOrientationYawOnly * -localNeck;
glm::vec3 bodyPos = hmdPosition + eyeToNeck + neckToRoot;
// avatar facing is determined solely by hmd orientation.
return createMatFromQuatAndPos(hmdOrientationYawOnly, bodyPos);
return glm::mat4();
}
glm::vec3 MyAvatar::getPositionForAudio() {

View file

@ -330,7 +330,7 @@ private:
PalmData getActivePalmData(int palmIndex) const;
// derive avatar body position and orientation from the current HMD Sensor location.
// results are in sensor space
// results are in HMD frame
glm::mat4 deriveBodyFromHMDSensor() const;
float _driveKeys[MAX_DRIVE_KEYS];

View file

@ -407,6 +407,24 @@ void Rig::calcAnimAlpha(float speed, const std::vector<float>& referenceSpeeds,
*alphaOut = alpha;
}
void Rig::computeEyesInRootFrame(const AnimPoseVec& poses) {
// TODO: use cached eye/hips indices for these calculations
int numPoses = poses.size();
int rightEyeIndex = _animSkeleton->nameToJointIndex(QString("RightEye"));
int leftEyeIndex = _animSkeleton->nameToJointIndex(QString("LeftEye"));
if (numPoses > rightEyeIndex && numPoses > leftEyeIndex
&& rightEyeIndex > 0 && leftEyeIndex > 0) {
int hipsIndex = _animSkeleton->nameToJointIndex(QString("Hips"));
int headIndex = _animSkeleton->nameToJointIndex(QString("Head"));
if (hipsIndex >= 0 && headIndex > 0) {
glm::vec3 rightEye = _animSkeleton->getAbsolutePose(rightEyeIndex, poses).trans;
glm::vec3 leftEye = _animSkeleton->getAbsolutePose(leftEyeIndex, poses).trans;
glm::vec3 hips = _animSkeleton->getAbsolutePose(hipsIndex, poses).trans;
_eyesInRootFrame = 0.5f * (rightEye + leftEye) - hips;
}
}
}
// animation reference speeds.
static const std::vector<float> FORWARD_SPEEDS = { 0.4f, 1.4f, 4.5f }; // m/s
static const std::vector<float> BACKWARD_SPEEDS = { 0.6f, 1.45f }; // m/s
@ -730,6 +748,7 @@ void Rig::updateAnimations(float deltaTime, glm::mat4 rootTransform) {
setJointTranslation((int)i, true, poses[i].trans, PRIORITY);
}
computeEyesInRootFrame(poses);
} else {
// First normalize the fades so that they sum to 1.0.
@ -1124,14 +1143,14 @@ void Rig::updateLeanJoint(int index, float leanSideways, float leanForward, floa
static AnimPose avatarToBonePose(AnimPose pose, AnimSkeleton::ConstPointer skeleton) {
AnimPose rootPose = skeleton->getAbsoluteBindPose(skeleton->nameToJointIndex("Hips"));
AnimPose rotY180(glm::vec3(1), glm::angleAxis((float)PI, glm::vec3(0.0f, 1.0f, 0.0f)), glm::vec3(0));
AnimPose rotY180(glm::vec3(1.0f), glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f)), glm::vec3(0));
return rootPose * rotY180 * pose;
}
#ifdef DEBUG_RENDERING
static AnimPose boneToAvatarPose(AnimPose pose, AnimSkeleton::ConstPointer skeleton) {
AnimPose rootPose = skeleton->getAbsoluteBindPose(skeleton->nameToJointIndex("Hips"));
AnimPose rotY180(glm::vec3(1), glm::angleAxis((float)PI, glm::vec3(0.0f, 1.0f, 0.0f)), glm::vec3(0));
AnimPose rotY180(glm::vec3(1.0f), glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f)), glm::vec3(0));
return (rootPose * rotY180).inverse() * pose;
}
#endif
@ -1342,6 +1361,7 @@ void Rig::updateFromHandParameters(const HandParameters& params, float dt) {
void Rig::makeAnimSkeleton(const FBXGeometry& fbxGeometry) {
if (!_animSkeleton) {
_animSkeleton = std::make_shared<AnimSkeleton>(fbxGeometry);
computeEyesInRootFrame(_animSkeleton->getRelativeBindPoses());
}
}

View file

@ -214,6 +214,8 @@ public:
bool getModelOffset(glm::vec3& modelOffsetOut) const;
const glm::vec3& getEyesInRootFrame() const { return _eyesInRootFrame; }
protected:
void updateAnimationStateHandlers();
@ -222,6 +224,8 @@ public:
void updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::quat& worldHeadOrientation, const glm::vec3& lookAt, const glm::vec3& saccade);
void calcAnimAlpha(float speed, const std::vector<float>& referenceSpeeds, float* alphaOut) const;
void computeEyesInRootFrame(const AnimPoseVec& poses);
QVector<JointState> _jointStates;
int _rootJointIndex = -1;
@ -241,6 +245,7 @@ public:
glm::vec3 _lastFront;
glm::vec3 _lastPosition;
glm::vec3 _lastVelocity;
glm::vec3 _eyesInRootFrame { Vectors::ZERO };
std::shared_ptr<AnimNode> _animNode;
std::shared_ptr<AnimSkeleton> _animSkeleton;