fix hand IK rotations or all models

This commit is contained in:
Andrew Meadows 2015-08-24 17:58:11 -07:00
parent 802b3a88c3
commit bb63267e30
5 changed files with 122 additions and 67 deletions

View file

@ -157,11 +157,11 @@ void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
setBlendshapeCoefficients(_owningAvatar->getHead()->getBlendshapeCoefficients());
Model::simulate(deltaTime, fullUpdate);
if (!isActive() || !_owningAvatar->isMyAvatar()) {
return; // only simulate for own avatar
}
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
if (myAvatar->isPlaying()) {
// Don't take inputs if playing back a recording.
@ -248,40 +248,24 @@ void SkeletonModel::applyHandPosition(int jointIndex, const glm::vec3& position)
rotationBetween(handRotation * glm::vec3(-sign, 0.0f, 0.0f), forearmVector),
true, PALM_PRIORITY);
}
void SkeletonModel::applyPalmData(int jointIndex, PalmData& palm) {
if (jointIndex == -1 || jointIndex >= _rig->getJointStateCount()) {
return;
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
float sign = (jointIndex == geometry.rightHandJointIndex) ? 1.0f : -1.0f;
int parentJointIndex = geometry.joints.at(jointIndex).parentIndex;
if (parentJointIndex == -1) {
return;
}
// rotate palm to align with its normal (normal points out of hand's palm)
// the palm's position must be transformed into the model-frame
glm::quat inverseRotation = glm::inverse(_rotation);
glm::vec3 palmPosition = inverseRotation * (palm.getPosition() - _translation);
glm::vec3 palmNormal = inverseRotation * palm.getNormal();
glm::vec3 fingerDirection = inverseRotation * palm.getFingerDirection();
glm::quat palmRotation = rotationBetween(geometry.palmDirection, palmNormal);
palmRotation = rotationBetween(palmRotation * glm::vec3(-sign, 0.0f, 0.0f), fingerDirection) * palmRotation;
// the palm's "raw" rotation is already in the model-frame
glm::quat palmRotation = palm.getRawRotation();
if (Menu::getInstance()->isOptionChecked(MenuOption::AlternateIK)) {
_rig->setHandPosition(jointIndex, palmPosition, palmRotation, extractUniformScale(_scale), PALM_PRIORITY);
} else if (Menu::getInstance()->isOptionChecked(MenuOption::AlignForearmsWithWrists)) {
float forearmLength = geometry.joints.at(jointIndex).distanceToParent * extractUniformScale(_scale);
glm::vec3 forearm = palmRotation * glm::vec3(sign * forearmLength, 0.0f, 0.0f);
setJointPosition(parentJointIndex, palmPosition + forearm,
glm::quat(), false, -1, false, glm::vec3(0.0f, -1.0f, 0.0f), PALM_PRIORITY);
_rig->setJointRotationInBindFrame(parentJointIndex, palmRotation, PALM_PRIORITY);
// lock hand to forearm by slamming its rotation (in parent-frame) to identity
_rig->setJointRotationInConstrainedFrame(jointIndex, glm::quat(), PALM_PRIORITY);
} else {
inverseKinematics(jointIndex, palmPosition, palmRotation, PALM_PRIORITY);
}
inverseKinematics(jointIndex, palmPosition, palmRotation, PALM_PRIORITY);
}
void SkeletonModel::renderJointConstraints(gpu::Batch& batch, int jointIndex) {
@ -301,13 +285,13 @@ void SkeletonModel::renderJointConstraints(gpu::Batch& batch, int jointIndex) {
_rotation :
_rotation * _rig->getJointState(joint.parentIndex).getRotation();
float fanScale = directionSize * 0.75f;
Transform transform = Transform();
transform.setTranslation(position);
transform.setRotation(parentRotation);
transform.setScale(fanScale);
batch.setModelTransform(transform);
const int AXIS_COUNT = 3;
auto geometryCache = DependencyManager::get<GeometryCache>();
@ -318,7 +302,7 @@ void SkeletonModel::renderJointConstraints(gpu::Batch& batch, int jointIndex) {
}
glm::vec3 axis;
axis[i] = 1.0f;
glm::vec3 otherAxis;
if (i == 0) {
otherAxis.y = 1.0f;
@ -339,18 +323,18 @@ void SkeletonModel::renderJointConstraints(gpu::Batch& batch, int jointIndex) {
// better if the skeleton model cached these buffers for each of the joints they are rendering
geometryCache->updateVertices(_triangleFanID, points, color);
geometryCache->renderVertices(batch, gpu::TRIANGLE_FAN, _triangleFanID);
}
renderOrientationDirections(batch, jointIndex, position, _rotation * jointState.getRotation(), directionSize);
jointIndex = joint.parentIndex;
} while (jointIndex != -1 && geometry.joints.at(jointIndex).isFree);
}
void SkeletonModel::renderOrientationDirections(gpu::Batch& batch, int jointIndex,
void SkeletonModel::renderOrientationDirections(gpu::Batch& batch, int jointIndex,
glm::vec3 position, const glm::quat& orientation, float size) {
auto geometryCache = DependencyManager::get<GeometryCache>();
if (!_jointOrientationLines.contains(jointIndex)) {
@ -486,7 +470,7 @@ void SkeletonModel::buildShapes() {
if (_geometry == NULL || _rig->jointStatesEmpty()) {
return;
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
if (geometry.joints.isEmpty() || geometry.rootJointIndex == -1) {
// rootJointIndex == -1 if the avatar model has no skeleton
@ -551,7 +535,7 @@ void SkeletonModel::renderBoundingCollisionShapes(gpu::Batch& batch, float alpha
geometryCache->renderSphere(batch, _boundingCapsuleRadius, BALL_SUBDIVISIONS, BALL_SUBDIVISIONS,
glm::vec4(0.6f, 0.6f, 0.8f, alpha));
// draw a yellow sphere at the capsule bottom point
// draw a yellow sphere at the capsule bottom point
glm::vec3 bottomPoint = topPoint - glm::vec3(0.0f, -_boundingCapsuleHeight, 0.0f);
glm::vec3 axis = topPoint - bottomPoint;
transform.setTranslation(bottomPoint);

View file

@ -153,6 +153,24 @@ void JointState::setRotationInBindFrame(const glm::quat& rotation, float priorit
}
}
void JointState::setRotationInModelFrame(const glm::quat& rotationInModelFrame, float priority, bool constrain) {
// rotation is from bind- to model-frame
if (priority >= _animationPriority) {
glm::quat parentRotation = computeParentRotation();
// R = Rp * Rpre * r * Rpost
// R' = Rp * Rpre * r' * Rpost
// r' = (Rp * Rpre)^ * R' * Rpost^
glm::quat targetRotation = glm::inverse(parentRotation * _preRotation) * rotationInModelFrame * glm::inverse(_postRotation);
if (constrain && _constraint) {
_constraint->softClamp(targetRotation, _rotationInConstrainedFrame, 0.5f);
}
_rotationInConstrainedFrame = glm::normalize(targetRotation);
_transformChanged = true;
_animationPriority = priority;
}
}
void JointState::clearTransformTranslation() {
_transform[3][0] = 0.0f;
_transform[3][1] = 0.0f;

View file

@ -82,6 +82,11 @@ public:
/// NOTE: the JointState's model-frame transform/rotation are NOT updated!
void setRotationInBindFrame(const glm::quat& rotation, float priority, bool constrain = false);
/// \param rotationInModelRame is in model-frame
/// computes and sets new _rotationInConstrainedFrame to match rotationInModelFrame
/// NOTE: the JointState's model-frame transform/rotation are NOT updated!
void setRotationInModelFrame(const glm::quat& rotationInModelFrame, float priority, bool constrain);
void setRotationInConstrainedFrame(glm::quat targetRotation, float priority, bool constrain = false, float mix = 1.0f);
void setVisibleRotationInConstrainedFrame(const glm::quat& targetRotation);
const glm::quat& getRotationInConstrainedFrame() const { return _rotationInConstrainedFrame; }

View file

@ -586,7 +586,7 @@ bool Rig::setJointPosition(int jointIndex, const glm::vec3& position, const glm:
void Rig::inverseKinematics(int endIndex, glm::vec3 targetPosition, const glm::quat& targetRotation, float priority,
const QVector<int>& freeLineage, glm::mat4 parentTransform) {
// NOTE: targetRotation is from bind- to model-frame
// NOTE: targetRotation is from in model-frame
if (endIndex == -1 || _jointStates.isEmpty()) {
return;
@ -690,7 +690,7 @@ void Rig::inverseKinematics(int endIndex, glm::vec3 targetPosition, const glm::q
} while (numIterations < MAX_ITERATION_COUNT && distanceToGo < ACCEPTABLE_IK_ERROR);
// set final rotation of the end joint
endState.setRotationInBindFrame(targetRotation, priority, true);
endState.setRotationInModelFrame(targetRotation, priority, true);
}
bool Rig::restoreJointPosition(int jointIndex, float fraction, float priority, const QVector<int>& freeLineage) {

View file

@ -21,6 +21,10 @@
#include "SixenseManager.h"
#include "UserActivityLogger.h"
#ifdef HAVE_SIXENSE
#include "sixense.h"
#endif
// TODO: This should not be here
#include <QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(inputplugins)
@ -30,8 +34,6 @@ Q_LOGGING_CATEGORY(inputplugins, "hifi.inputplugins")
const unsigned int LEFT_MASK = 0;
const unsigned int RIGHT_MASK = 1U << 1;
#ifdef HAVE_SIXENSE
const int CALIBRATION_STATE_IDLE = 0;
const int CALIBRATION_STATE_X = 1;
const int CALIBRATION_STATE_Y = 2;
@ -51,8 +53,6 @@ typedef int (*SixenseTakeIntFunction)(int);
typedef int (*SixenseTakeIntAndSixenseControllerData)(int, sixenseControllerData*);
#endif
#endif
const QString SixenseManager::NAME = "Sixense";
const QString MENU_PARENT = "Avatar";
@ -66,8 +66,8 @@ SixenseManager& SixenseManager::getInstance() {
}
SixenseManager::SixenseManager() :
InputDevice("Hydra"),
#if defined(HAVE_SIXENSE) && defined(__APPLE__)
InputDevice("Hydra"),
#ifdef __APPLE__
_sixenseLibrary(NULL),
#endif
_hydrasConnected(false)
@ -213,18 +213,16 @@ void SixenseManager::update(float deltaTime, bool jointsCaptured) {
// NOTE: Sixense API returns pos data in millimeters but we IMMEDIATELY convert to meters.
glm::vec3 position(data->pos[0], data->pos[1], data->pos[2]);
position *= METERS_PER_MILLIMETER;
// Check to see if this hand/controller is on the base
const float CONTROLLER_AT_BASE_DISTANCE = 0.075f;
if (glm::length(position) >= CONTROLLER_AT_BASE_DISTANCE) {
handleButtonEvent(data->buttons, numActiveControllers - 1);
handleAxisEvent(data->joystick_x, data->joystick_y, data->trigger, numActiveControllers - 1);
// Rotation of Palm
glm::quat rotation(data->rot_quat[3], -data->rot_quat[0], data->rot_quat[1], -data->rot_quat[2]);
rotation = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f)) * _orbRotation * rotation;
if (!jointsCaptured) {
// Rotation of Palm
glm::quat rotation(data->rot_quat[3], data->rot_quat[0], data->rot_quat[1], data->rot_quat[2]);
handlePoseEvent(position, rotation, numActiveControllers - 1);
} else {
_poseStateMap.clear();
@ -232,7 +230,7 @@ void SixenseManager::update(float deltaTime, bool jointsCaptured) {
} else {
_poseStateMap[(numActiveControllers - 1) == 0 ? LEFT_HAND : RIGHT_HAND] = UserInputMapper::PoseValue();
}
// // Read controller buttons and joystick into the hand
// palm->setControllerButtons(data->buttons);
// palm->setTrigger(data->trigger);
@ -242,7 +240,7 @@ void SixenseManager::update(float deltaTime, bool jointsCaptured) {
if (numActiveControllers == 2) {
updateCalibration(controllers);
}
for (auto axisState : _axisStateMap) {
if (fabsf(axisState.second) < CONTROLLER_THRESHOLD) {
_axisStateMap[axisState.first] = 0.0f;
@ -436,16 +434,66 @@ void SixenseManager::handleButtonEvent(unsigned int buttons, int index) {
void SixenseManager::handlePoseEvent(glm::vec3 position, glm::quat rotation, int index) {
#ifdef HAVE_SIXENSE
// From ABOVE the sixense coordinate frame looks like this:
//
// |
// USB cables
// |
// .-. user
// (Orb) --neckX---- forward
// '-' |
// | | user
// neckZ y +---- right
// | (o)-----x
// |
// |
// z
// Transform the measured position into body frame.
glm::vec3 neck = _neckBase;
// Set y component of the "neck" to raise the measured position a little bit.
neck.y = 0.5f;
position = _orbRotation * (position - neck);
// adjustment for hydra controllers fit into hands
float sign = (index == 0) ? -1.0f : 1.0f;
rotation *= glm::angleAxis(sign * PI/4.0f, glm::vec3(0.0f, 0.0f, 1.0f));
// From ABOVE the hand canonical axes looks like this:
//
// | | | | y | | | |
// | | | | | | | | |
// | | | | |
// |left | / x----(+) \ |right|
// | _/ z \_ |
// | | | |
// | | | |
//
// To convert sixense's delta-rotation into the hand's frame we will have to transform it like so:
//
// deltaHand = Qsh^ * deltaSixense * Qsh
//
// where Qsh = transform from sixense axes to hand axes. By inspection we can determine Qsh:
//
// Qsh = angleAxis(PI, zAxis) * angleAxis(-PI/2, xAxis)
//
const glm::vec3 xAxis = glm::vec3(1.0f, 0.0f, 0.0f);
const glm::vec3 zAxis = glm::vec3(0.0f, 0.0f, 1.0f);
const glm::quat sixenseToHand = glm::angleAxis(PI, zAxis) * glm::angleAxis(-PI/2.0f, xAxis);
// In addition to Qsh each hand has pre-offset introduced by the shape of the sixense controllers
// and how they fit into the hand in their relaxed state. This offset is a quarter turn about
// the sixense's z-axis, with its direction different for the two hands:
float sign = (index == 0) ? 1.0f : -1.0f;
const glm::quat preOffset = glm::angleAxis(sign * PI / 2.0f, zAxis);
// Finally, there is a post-offset (same for both hands) to get the hand's rest orientation
// (fingers forward, palm down) aligned properly in the avatar's model-frame.
const glm::quat postOffset = glm::angleAxis(PI / 2.0f, xAxis);
// The total rotation of the hand uses the formula:
//
// rotation = postOffset * Qsh^ * (measuredRotation * preOffset) * Qsh
//
rotation = postOffset * glm::inverse(sixenseToHand) * rotation * preOffset * sixenseToHand;
_poseStateMap[makeInput(JointChannel(index)).getChannel()] = UserInputMapper::PoseValue(position, rotation);
#endif // HAVE_SIXENSE
}
@ -453,7 +501,7 @@ void SixenseManager::handlePoseEvent(glm::vec3 position, glm::quat rotation, int
void SixenseManager::registerToUserInputMapper(UserInputMapper& mapper) {
// Grab the current free device ID
_deviceID = mapper.getFreeDeviceID();
auto proxy = std::make_shared<UserInputMapper::DeviceProxy>(_name);
proxy->getButton = [this] (const UserInputMapper::Input& input, int timestamp) -> bool { return this->getButton(input.getChannel()); };
proxy->getAxis = [this] (const UserInputMapper::Input& input, int timestamp) -> float { return this->getAxis(input.getChannel()); };
@ -465,25 +513,25 @@ void SixenseManager::registerToUserInputMapper(UserInputMapper& mapper) {
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_2, 0), "Left Button 2"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_3, 0), "Left Button 3"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_4, 0), "Left Button 4"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_FWD, 0), "L1"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BACK_TRIGGER, 0), "L2"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_Y_POS, 0), "Left Stick Up"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_Y_NEG, 0), "Left Stick Down"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_X_POS, 0), "Left Stick Right"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_X_NEG, 0), "Left Stick Left"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_TRIGGER, 0), "Left Trigger Press"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_0, 1), "Right Start"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_1, 1), "Right Button 1"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_2, 1), "Right Button 2"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_3, 1), "Right Button 3"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_4, 1), "Right Button 4"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BUTTON_FWD, 1), "R1"));
availableInputs.append(UserInputMapper::InputPair(makeInput(BACK_TRIGGER, 1), "R2"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_Y_POS, 1), "Right Stick Up"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_Y_NEG, 1), "Right Stick Down"));
availableInputs.append(UserInputMapper::InputPair(makeInput(AXIS_X_POS, 1), "Right Stick Right"));
@ -506,35 +554,35 @@ void SixenseManager::assignDefaultInputMapping(UserInputMapper& mapper) {
const float JOYSTICK_PITCH_SPEED = 0.25f;
const float BUTTON_MOVE_SPEED = 1.0f;
const float BOOM_SPEED = 0.1f;
// Left Joystick: Movement, strafing
mapper.addInputChannel(UserInputMapper::LONGITUDINAL_FORWARD, makeInput(AXIS_Y_POS, 0), JOYSTICK_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::LONGITUDINAL_BACKWARD, makeInput(AXIS_Y_NEG, 0), JOYSTICK_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_RIGHT, makeInput(AXIS_X_POS, 0), JOYSTICK_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::LATERAL_LEFT, makeInput(AXIS_X_NEG, 0), JOYSTICK_MOVE_SPEED);
// Right Joystick: Camera orientation
mapper.addInputChannel(UserInputMapper::YAW_RIGHT, makeInput(AXIS_X_POS, 1), JOYSTICK_YAW_SPEED);
mapper.addInputChannel(UserInputMapper::YAW_LEFT, makeInput(AXIS_X_NEG, 1), JOYSTICK_YAW_SPEED);
mapper.addInputChannel(UserInputMapper::PITCH_UP, makeInput(AXIS_Y_POS, 1), JOYSTICK_PITCH_SPEED);
mapper.addInputChannel(UserInputMapper::PITCH_DOWN, makeInput(AXIS_Y_NEG, 1), JOYSTICK_PITCH_SPEED);
// Buttons
mapper.addInputChannel(UserInputMapper::BOOM_IN, makeInput(BUTTON_3, 0), BOOM_SPEED);
mapper.addInputChannel(UserInputMapper::BOOM_OUT, makeInput(BUTTON_1, 0), BOOM_SPEED);
mapper.addInputChannel(UserInputMapper::VERTICAL_UP, makeInput(BUTTON_3, 1), BUTTON_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::VERTICAL_DOWN, makeInput(BUTTON_1, 1), BUTTON_MOVE_SPEED);
mapper.addInputChannel(UserInputMapper::SHIFT, makeInput(BUTTON_2, 0));
mapper.addInputChannel(UserInputMapper::SHIFT, makeInput(BUTTON_2, 1));
mapper.addInputChannel(UserInputMapper::ACTION1, makeInput(BUTTON_4, 0));
mapper.addInputChannel(UserInputMapper::ACTION2, makeInput(BUTTON_4, 1));
mapper.addInputChannel(UserInputMapper::LEFT_HAND, makeInput(LEFT_HAND));
mapper.addInputChannel(UserInputMapper::RIGHT_HAND, makeInput(RIGHT_HAND));
mapper.addInputChannel(UserInputMapper::LEFT_HAND_CLICK, makeInput(BACK_TRIGGER, 0));
mapper.addInputChannel(UserInputMapper::RIGHT_HAND_CLICK, makeInput(BACK_TRIGGER, 1));