Merge pull request #5901 from hyperlogic/tony/gangsta-lean

Threshold based walking/leaning while in HMD.
This commit is contained in:
Howard Stearns 2015-09-25 15:51:39 -07:00
commit a820682816
8 changed files with 67 additions and 58 deletions

View file

@ -560,7 +560,7 @@
"id": "strafeLeft",
"type": "clip",
"data": {
"url": "https://hifi-public.s3.amazonaws.com/ozan/anim/standard_anims/strafe_left.fbx",
"url": "http://hifi-public.s3.amazonaws.com/ozan/anim/standard_anims/side_step_left.fbx",
"startFrame": 0.0,
"endFrame": 31.0,
"timeScale": 1.0,
@ -572,7 +572,7 @@
"id": "strafeRight",
"type": "clip",
"data": {
"url": "https://hifi-public.s3.amazonaws.com/ozan/anim/standard_anims/strafe_right.fbx",
"url": "http://hifi-public.s3.amazonaws.com/ozan/anim/standard_anims/side_step_right.fbx",
"startFrame": 0.0,
"endFrame": 31.0,
"timeScale": 1.0,

View file

@ -708,7 +708,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
// Now that menu is initalized we can sync myAvatar with it's state.
_myAvatar->updateMotionBehaviorFromMenu();
_myAvatar->updateStandingHMDModeFromMenu();
// the 3Dconnexion device wants to be initiliazed after a window is displayed.
ConnexionClient::getInstance().init();
@ -1055,8 +1054,6 @@ void Application::paintGL() {
auto displayPlugin = getActiveDisplayPlugin();
displayPlugin->preRender();
_offscreenContext->makeCurrent();
// update the avatar with a fresh HMD pose
_myAvatar->updateFromHMDSensorMatrix(getHMDSensorPose());
auto lodManager = DependencyManager::get<LODManager>();
@ -2898,6 +2895,9 @@ void Application::update(float deltaTime) {
userInputMapper->getActionState(UserInputMapper::SHIFT), RIGHT_HAND_INDEX);
}
// update the avatar with a fresh HMD pose
_myAvatar->updateFromHMDSensorMatrix(getHMDSensorPose(), deltaTime);
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
updateCamera(deltaTime); // handle various camera tweaks like off axis projection

View file

@ -294,9 +294,6 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::TurnWithHead, 0, false);
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::StandingHMDSensorMode, 0, false,
avatar, SLOT(updateStandingHMDModeFromMenu()));
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::WorldAxes);
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Stats);

View file

@ -124,14 +124,12 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
_isEyeTrackerConnected = eyeTracker->isTracking();
}
if (!myAvatar->getStandingHMDSensorMode()) {
// Twist the upper body to follow the rotation of the head, but only do this with my avatar,
// since everyone else will see the full joint rotations for other people.
const float BODY_FOLLOW_HEAD_YAW_RATE = 0.1f;
const float BODY_FOLLOW_HEAD_FACTOR = 0.66f;
float currentTwist = getTorsoTwist();
setTorsoTwist(currentTwist + (getFinalYaw() * BODY_FOLLOW_HEAD_FACTOR - currentTwist) * BODY_FOLLOW_HEAD_YAW_RATE);
}
// Twist the upper body to follow the rotation of the head, but only do this with my avatar,
// since everyone else will see the full joint rotations for other people.
const float BODY_FOLLOW_HEAD_YAW_RATE = 0.1f;
const float BODY_FOLLOW_HEAD_FACTOR = 0.66f;
float currentTwist = getTorsoTwist();
setTorsoTwist(currentTwist + (getFinalYaw() * BODY_FOLLOW_HEAD_FACTOR - currentTwist) * BODY_FOLLOW_HEAD_YAW_RATE);
}
if (!(_isFaceTrackerConnected || billboard)) {
@ -392,7 +390,7 @@ glm::quat Head::getCameraOrientation() const {
// always the same.
if (qApp->getAvatarUpdater()->isHMDMode()) {
MyAvatar* myAvatar = dynamic_cast<MyAvatar*>(_owningAvatar);
if (myAvatar && myAvatar->getStandingHMDSensorMode()) {
if (myAvatar) {
return glm::quat_cast(myAvatar->getSensorToWorldMatrix()) * myAvatar->getHMDSensorOrientation();
} else {
return getOrientation();

View file

@ -104,7 +104,6 @@ MyAvatar::MyAvatar(RigPointer rig) :
_hmdSensorPosition(),
_bodySensorMatrix(),
_sensorToWorldMatrix(),
_standingHMDSensorMode(false),
_goToPending(false),
_goToPosition(),
_goToOrientation(),
@ -268,29 +267,59 @@ void MyAvatar::simulate(float deltaTime) {
}
glm::mat4 MyAvatar::getSensorToWorldMatrix() const {
if (getStandingHMDSensorMode()) {
return _sensorToWorldMatrix;
} else {
return createMatFromQuatAndPos(getWorldAlignedOrientation(), getDefaultEyePosition());
}
return _sensorToWorldMatrix;
}
// best called at start of main loop just after we have a fresh hmd pose.
// update internal body position from new hmd pose.
void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix, float deltaTime) {
// update the sensorMatrices based on the new hmd pose
_hmdSensorMatrix = hmdSensorMatrix;
_hmdSensorPosition = extractTranslation(hmdSensorMatrix);
_hmdSensorOrientation = glm::quat_cast(hmdSensorMatrix);
_bodySensorMatrix = deriveBodyFromHMDSensor();
if (getStandingHMDSensorMode()) {
// set the body position/orientation to reflect motion due to the head.
auto worldMat = _sensorToWorldMatrix * _bodySensorMatrix;
nextAttitude(extractTranslation(worldMat), glm::quat_cast(worldMat));
const float STRAIGHTING_LEAN_DURATION = 0.5f; // seconds
const float STRAIGHTING_LEAN_THRESHOLD = 0.2f; // meters
auto newBodySensorMatrix = deriveBodyFromHMDSensor();
glm::vec3 diff = extractTranslation(newBodySensorMatrix) - extractTranslation(_bodySensorMatrix);
if (!_straightingLean && glm::length(diff) > STRAIGHTING_LEAN_THRESHOLD) {
// begin homing toward derived body position.
_straightingLean = true;
_straightingLeanAlpha = 0.0f;
} else if (_straightingLean) {
auto newBodySensorMatrix = deriveBodyFromHMDSensor();
auto worldBodyMatrix = _sensorToWorldMatrix * newBodySensorMatrix;
glm::vec3 worldBodyPos = extractTranslation(worldBodyMatrix);
glm::quat worldBodyRot = glm::normalize(glm::quat_cast(worldBodyMatrix));
_straightingLeanAlpha += (1.0f / STRAIGHTING_LEAN_DURATION) * deltaTime;
if (_straightingLeanAlpha >= 1.0f) {
_straightingLean = false;
nextAttitude(worldBodyPos, worldBodyRot);
_bodySensorMatrix = newBodySensorMatrix;
} else {
// interp position toward the desired pos
glm::vec3 pos = lerp(getPosition(), worldBodyPos, _straightingLeanAlpha);
glm::quat rot = glm::normalize(safeMix(getOrientation(), worldBodyRot, _straightingLeanAlpha));
nextAttitude(pos, rot);
// interp sensor matrix toward desired
glm::vec3 nextBodyPos = extractTranslation(newBodySensorMatrix);
glm::quat nextBodyRot = glm::normalize(glm::quat_cast(newBodySensorMatrix));
glm::vec3 prevBodyPos = extractTranslation(_bodySensorMatrix);
glm::quat prevBodyRot = glm::normalize(glm::quat_cast(_bodySensorMatrix));
pos = lerp(prevBodyPos, nextBodyPos, _straightingLeanAlpha);
rot = glm::normalize(safeMix(prevBodyRot, nextBodyRot, _straightingLeanAlpha));
_bodySensorMatrix = createMatFromQuatAndPos(rot, pos);
}
}
}
//
// best called at end of main loop, just before rendering.
// update sensor to world matrix from current body position and hmd sensor.
// This is so the correct camera can be used for rendering.
@ -359,11 +388,9 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
Head* head = getHead();
if (inHmd || isPlaying()) {
if (!getStandingHMDSensorMode()) {
head->setDeltaPitch(estimatedRotation.x);
head->setDeltaYaw(estimatedRotation.y);
head->setDeltaRoll(estimatedRotation.z);
}
head->setDeltaPitch(estimatedRotation.x);
head->setDeltaYaw(estimatedRotation.y);
head->setDeltaRoll(estimatedRotation.z);
} else {
float magnifyFieldOfView = qApp->getFieldOfView() /
_realWorldFieldOfView.get();
@ -385,12 +412,10 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
relativePosition.x = -relativePosition.x;
}
if (!(inHmd && getStandingHMDSensorMode())) {
head->setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
head->setLeanForward(glm::clamp(glm::degrees(atanf(relativePosition.z * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
}
head->setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
head->setLeanForward(glm::clamp(glm::degrees(atanf(relativePosition.z * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
}
@ -1744,11 +1769,6 @@ void MyAvatar::updateMotionBehaviorFromMenu() {
_characterController.setEnabled(menu->isOptionChecked(MenuOption::EnableCharacterController));
}
void MyAvatar::updateStandingHMDModeFromMenu() {
Menu* menu = Menu::getInstance();
_standingHMDSensorMode = menu->isOptionChecked(MenuOption::StandingHMDSensorMode);
}
//Renders sixense laser pointers for UI selection with controllers
void MyAvatar::renderLaserPointers(gpu::Batch& batch) {
const float PALM_TIP_ROD_RADIUS = 0.002f;

View file

@ -68,7 +68,7 @@ public:
// best called at start of main loop just after we have a fresh hmd pose.
// update internal body position from new hmd pose.
void updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix);
void updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix, float deltaTime);
// best called at end of main loop, just before rendering.
// update sensor to world matrix from current body position and hmd sensor.
@ -168,7 +168,6 @@ public:
static const float ZOOM_MAX;
static const float ZOOM_DEFAULT;
bool getStandingHMDSensorMode() const { return _standingHMDSensorMode; }
void doUpdateBillboard();
void destroyAnimGraph();
@ -194,7 +193,6 @@ public slots:
void setThrust(glm::vec3 newThrust) { _thrust = newThrust; }
void updateMotionBehaviorFromMenu();
void updateStandingHMDModeFromMenu();
glm::vec3 getLeftPalmPosition();
glm::vec3 getLeftPalmVelocity();
@ -345,8 +343,6 @@ private:
// used to transform any sensor into world space, including the _hmdSensorMat, or hand controllers.
glm::mat4 _sensorToWorldMatrix;
bool _standingHMDSensorMode;
bool _goToPending;
glm::vec3 _goToPosition;
glm::quat _goToOrientation;
@ -362,6 +358,9 @@ private:
AudioListenerMode _audioListenerMode;
glm::vec3 _customListenPosition;
glm::quat _customListenOrientation;
bool _straightingLean = false;
float _straightingLeanAlpha = 0.0f;
};
QScriptValue audioListenModeToScriptValue(QScriptEngine* engine, const AudioListenerMode& audioListenerMode);

View file

@ -123,7 +123,7 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
Rig::HeadParameters headParams;
headParams.modelRotation = getRotation();
headParams.modelTranslation = getTranslation();
headParams.enableLean = qApp->getAvatarUpdater()->isHMDMode() && !myAvatar->getStandingHMDSensorMode();
headParams.enableLean = qApp->getAvatarUpdater()->isHMDMode();
headParams.leanSideways = head->getFinalLeanSideways();
headParams.leanForward = head->getFinalLeanForward();
headParams.torsoTwist = head->getTorsoTwist();

View file

@ -68,12 +68,7 @@ void OverlayConductor::updateMode() {
Mode newMode;
if (qApp->isHMDMode()) {
MyAvatar* myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
if (myAvatar->getStandingHMDSensorMode()) {
newMode = STANDING;
} else {
newMode = SITTING;
}
newMode = SITTING;
} else {
newMode = FLAT;
}