more work in progress vive room tracking

Found and disabled torso twist
use an old copy of the hmd pose before physics, and the most up to date
one after physics.
This commit is contained in:
Anthony J. Thibault 2015-07-07 00:12:48 -07:00
parent 834b75f4c4
commit 5293effc2e
8 changed files with 144 additions and 93 deletions

View file

@ -2670,6 +2670,10 @@ void Application::update(float deltaTime) {
_physicsEngine.stepSimulation();
_entities.getTree()->unlock();
// AJT: delay the head pose until after the physics step.
_headPosition = glm::vec3(getActiveDisplayPlugin()->getHeadPose()[3]);
_headOrientation = glm::quat_cast(getActiveDisplayPlugin()->getHeadPose());
if (_physicsEngine.hasOutgoingChanges()) {
_entities.getTree()->lockForWrite();
_entitySimulation.lock();
@ -4865,11 +4869,11 @@ void Application::shutdownPlugins() {
}
glm::vec3 Application::getHeadPosition() const {
return glm::vec3(getActiveDisplayPlugin()->getHeadPose()[3]);
return _headPosition;
}
glm::quat Application::getHeadOrientation() const {
return glm::quat_cast(getActiveDisplayPlugin()->getHeadPose());
return _headOrientation;
}
glm::uvec2 Application::getCanvasSize() const {

View file

@ -668,6 +668,9 @@ private:
Overlays _overlays;
ApplicationOverlay _applicationOverlay;
ApplicationCompositor _compositor;
glm::vec3 _headPosition;
glm::quat _headOrientation;
};
#endif // hifi_Application_h

View file

@ -27,6 +27,8 @@
using namespace std;
static bool isRoomTracking = true;
Head::Head(Avatar* owningAvatar) :
HeadData((AvatarData*)owningAvatar),
_returnHeadToCenter(false),
@ -116,12 +118,15 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
}
}
}
// Twist the upper body to follow the rotation of the head, but only do this with my avatar,
// since everyone else will see the full joint rotations for other people.
const float BODY_FOLLOW_HEAD_YAW_RATE = 0.1f;
const float BODY_FOLLOW_HEAD_FACTOR = 0.66f;
float currentTwist = getTorsoTwist();
setTorsoTwist(currentTwist + (getFinalYaw() * BODY_FOLLOW_HEAD_FACTOR - currentTwist) * BODY_FOLLOW_HEAD_YAW_RATE);
if (!isRoomTracking) {
// Twist the upper body to follow the rotation of the head, but only do this with my avatar,
// since everyone else will see the full joint rotations for other people.
const float BODY_FOLLOW_HEAD_YAW_RATE = 0.1f;
const float BODY_FOLLOW_HEAD_FACTOR = 0.66f;
float currentTwist = getTorsoTwist();
setTorsoTwist(currentTwist + (getFinalYaw() * BODY_FOLLOW_HEAD_FACTOR - currentTwist) * BODY_FOLLOW_HEAD_YAW_RATE);
}
}
if (!(_isFaceTrackerConnected || billboard)) {

View file

@ -155,11 +155,12 @@ void MyAvatar::reset() {
void MyAvatar::update(float deltaTime) {
/*
qCDebug(interfaceapp, "update()");
qCDebug(interfaceapp, "update() *************");
glm::vec3 pos = getPosition();
qCDebug(interfaceapp, "\tpos = (%.5f, %.5f, %.5f)", pos.x, pos.y, pos.z);
*/
glm::vec3 axis = glm::axis(getOrientation());
float angle = glm::angle(getOrientation());
qCDebug(interfaceapp, "\trot = axis = (%.5f, %.5f, %.5f), angle = %.5f", axis.x, axis.y, axis.z, angle);
if (_referential) {
_referential->update();
@ -1299,9 +1300,12 @@ void MyAvatar::updateOrientation(float deltaTime) {
_bodyYawDelta += _driveKeys[ROT_LEFT] * YAW_SPEED * deltaTime;
getHead()->setBasePitch(getHead()->getBasePitch() + (_driveKeys[ROT_UP] - _driveKeys[ROT_DOWN]) * PITCH_SPEED * deltaTime);
// AJT: disable arrow key movement.
/*
// update body orientation by movement inputs
setOrientation(getOrientation() *
glm::quat(glm::radians(glm::vec3(0.0f, _bodyYawDelta, 0.0f) * deltaTime)));
*/
// decay body rotation momentum
const float BODY_SPIN_FRICTION = 7.5f;
@ -1329,22 +1333,8 @@ void MyAvatar::updateOrientation(float deltaTime) {
head->setBasePitch(PITCH(euler));
head->setBaseRoll(ROLL(euler));
mat4 pose = glm::mat4_cast(qApp->getHeadOrientation());
vec3 xAxis = vec3(pose[0]);
vec3 yAxis = vec3(pose[1]);
vec3 zAxis = vec3(pose[2]);
// cancel out the roll and pitch
vec3 newZ = (zAxis.x == 0 && zAxis.z == 0) ? vec3(1, 0, 0) : glm::normalize(vec3(zAxis.x, 0, zAxis.z));
vec3 newX = glm::cross(vec3(0, 1, 0), newZ);
vec3 newY = glm::cross(newZ, newX);
mat4 m;
m[0] = vec4(newX, 0);
m[1] = vec4(newY, 0);
m[2] = vec4(newZ, 0);
m[3] = pose[3];
setOrientation(glm::quat(m));
glm::quat q = calcBodyOrientationFromSensors();
setAvatarOrientation(q);
}
}
@ -1502,24 +1492,7 @@ void MyAvatar::updatePosition(float deltaTime) {
_moving = speed > MOVING_SPEED_THRESHOLD;
if (qApp->isHMDMode() && isRoomTracking) {
// hmd is in sensor space.
const glm::vec3 hmdPosition = qApp->getHeadPosition();
const glm::quat hmdOrientation = qApp->getHeadOrientation();
const glm::quat hmdOrientationYawOnly = cancelOutRollAndPitch(hmdOrientation);
// In sensor space, figure out where the avatar body should be,
// by applying offsets from the avatar's neck & head joints.
vec3 localEyes = _skeletonModel.getDefaultEyeModelPosition();
vec3 localNeck;
if (_skeletonModel.getLocalNeckPosition(localNeck)) {
glm::vec3 eyeToNeck = hmdOrientation * (localNeck - localEyes);
glm::vec3 neckToRoot = hmdOrientationYawOnly * -localNeck;
glm::vec3 roomBodyPos = hmdPosition + eyeToNeck + neckToRoot;
// now convert from sensor space into world coordinates
glm::vec3 worldBodyPos = _sensorToWorldMat * roomBodyPos;
setAvatarPosition(worldBodyPos);
}
setAvatarPosition(calcBodyPositionFromSensors());
}
}
@ -1614,17 +1587,29 @@ void MyAvatar::goToLocation(const glm::vec3& newPosition,
glm::mat4 m;
// AJT: FIXME, goToLocation doens't work with orientation.
/*
// Set the orientation of the sensor room, not the avatar itself.
if (hasOrientation) {
qCDebug(interfaceapp).nospace() << "MyAvatar goToLocation - new orientation is "
<< newOrientation.x << ", " << newOrientation.y << ", " << newOrientation.z << ", " << newOrientation.w;
glm::vec3 axis = glm::axis(newOrientation);
qCDebug(interfaceapp).nospace() << "MyAvatar goToLocation - new orientation is axis = ("
<< axis.x << ", " << axis.y << ", " << axis.z << "), theta = " << glm::angle(newOrientation);
// TODO: FIXME: add support for shouldFaceLocation
m = mat4_cast(newOrientation);
m = glm::mat4_cast(newOrientation);
}
*/
m[3] = glm::vec4(newPosition, 1);
_sensorToWorldMat = m;
qCDebug(interfaceapp, "\tsensorMat = | %10.4f %10.4f %10.4f %10.4f |", m[0][0], m[1][0], m[2][0], m[3][0]);
qCDebug(interfaceapp, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][1], m[1][1], m[2][1], m[3][1]);
qCDebug(interfaceapp, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][2], m[1][2], m[2][2], m[3][2]);
qCDebug(interfaceapp, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][3], m[1][3], m[2][3], m[3][3]);
/*
glm::vec3 shiftedPosition = newPosition;
@ -1721,23 +1706,64 @@ void MyAvatar::relayDriveKeysToCharacterController() {
}
void MyAvatar::setAvatarPosition(glm::vec3 pos) {
AvatarData::setPosition(pos);
qCDebug(interfaceapp, "setAvatarPosition = (%.5f, %.5f, %.5f)", pos.x, pos.y, pos.z);
Avatar::setPosition(pos);
}
void MyAvatar::setAvatarOrientation(glm::quat quat) {
AvatarData::setOrientation(quat);
glm::vec3 axis = glm::axis(quat);
float angle = glm::angle(quat);
qCDebug(interfaceapp, "setAvatarOrientation axis = (%.5f, %.5f, %.5f), theta = %.5f", axis.x, axis.y, axis.z, angle);
Avatar::setOrientation(quat);
}
// these are overriden, because they must move the sensor mat, such that the avatar will be at the given location.
void MyAvatar::setPosition(const glm::vec3 position, bool overideReferential) {
glm::vec3 sensorPos = qApp->getHeadPosition();
_sensorToWorldMat[3] = glm::vec3(position - sensorPos, 1);
qCDebug(interfaceapp, "setPosition = (%.5f, %.5f, %.5f)", position.x, position.y, position.z);
glm::vec3 bodyPos = calcBodyPositionFromSensors();
glm::vec3 desiredPos = position;
glm::vec3 sensorPos(_sensorToWorldMat[3]);
_sensorToWorldMat[3] = glm::vec4(desiredPos - bodyPos + sensorPos, 1);
setAvatarPosition(position);
}
void MyAvatar::setOrientation(const glm::quat& orientation, bool overideReferential) {
glm::mat4 sensorMat = cancelOutRollAndPitch(createMatFromQuatAndPos(qApp->getHeadOrientation(), qApp->getHeadPosition()));
gmm::mat4 worldMat = createMatFromQuatAndPos(_orientation, _position);
_sensorToWorldMat = worldMat * inverse(sensorMat);
glm::vec3 axis = glm::axis(orientation);
float angle = glm::angle(orientation);
qCDebug(interfaceapp, "setOrientation axis = (%.5f, %.5f, %.5f), theta = %.5f", axis.x, axis.y, axis.z, angle);
glm::vec3 bodyPos = calcBodyPositionFromSensors();
glm::quat bodyOrientation = calcBodyOrientationFromSensors();
glm::mat4 bodyMat = createMatFromQuatAndPos(bodyOrientation, bodyPos);
glm::mat4 desiredMat = createMatFromQuatAndPos(orientation, bodyPos);
_sensorToWorldMat = desiredMat * glm::inverse(bodyMat) * _sensorToWorldMat;
setAvatarOrientation(orientation);
}
glm::vec3 MyAvatar::calcBodyPositionFromSensors() const {
// hmd is in sensor space.
const glm::vec3 hmdPosition = qApp->getHeadPosition();
const glm::quat hmdOrientation = qApp->getHeadOrientation();
const glm::quat hmdOrientationYawOnly = cancelOutRollAndPitch(hmdOrientation);
// In sensor space, figure out where the avatar body should be,
// by applying offsets from the avatar's neck & head joints.
vec3 localEyes = _skeletonModel.getDefaultEyeModelPosition();
vec3 localNeck(0.0f, 0.6f, 0.0f); // start with some kind of guess if the skeletonModel is not loaded yet.
_skeletonModel.getLocalNeckPosition(localNeck);
glm::vec3 eyeToNeck = hmdOrientation * (localNeck - localEyes);
glm::vec3 neckToRoot = hmdOrientationYawOnly * -localNeck;
glm::vec3 roomBodyPos = hmdPosition + eyeToNeck + neckToRoot;
// now convert from sensor space into world coordinates
return transformPoint(_sensorToWorldMat, roomBodyPos);
}
glm::quat MyAvatar::calcBodyOrientationFromSensors() const {
const glm::quat hmdOrientation = qApp->getHeadOrientation();
const glm::quat hmdOrientationYawOnly = cancelOutRollAndPitch(hmdOrientation);
// TODO: do a beter calculation of bodyOrientation for now just use hmd facing.
return glm::quat_cast(_sensorToWorldMat) * hmdOrientationYawOnly;
}

View file

@ -207,8 +207,8 @@ public slots:
virtual void rebuildSkeletonBody();
// these are overriden, because they must move the sensor mat, such that the avatar will be at the given location.
virtual void setPosition(const glm::vec3 position, bool overideReferential) override;
virtual void setOrientation(const glm::quat& orientation, bool overideReferential) overide;
virtual void setPosition(const glm::vec3 position, bool overideReferential = false) override;
virtual void setOrientation(const glm::quat& orientation, bool overideReferential = false) override;
glm::mat4 getSensorToWorldMat() const { return _sensorToWorldMat; }
@ -222,6 +222,9 @@ private:
void setAvatarPosition(glm::vec3 pos);
void setAvatarOrientation(glm::quat quat);
glm::vec3 calcBodyPositionFromSensors() const;
glm::quat calcBodyOrientationFromSensors() const;
bool cameraInsideHead() const;
// These are made private for MyAvatar so that you will use the "use" methods instead

View file

@ -27,7 +27,7 @@
#include <QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(displayplugins)
Q_LOGGING_CATEGORY(displayplugins, "hifi.physics")
Q_LOGGING_CATEGORY(displayplugins, "hifi.displayplugins")
const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
@ -210,6 +210,9 @@ void OpenVrDisplayPlugin::finishFrame() {
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
_eyesData[eye]._pose = _trackedDevicePoseMat4[0];
});
glm::vec3 p(_trackedDevicePoseMat4[0][3]);
qCDebug(displayplugins, "trackPos = (%.5f, %.5f, %.5f)", p.x, p.y, p.z);
};
#endif

View file

@ -347,4 +347,44 @@ QRectF glmToRect(const glm::vec2 & pos, const glm::vec2 & size) {
return result;
}
// create matrix from orientation and position
glm::mat4 createMatFromQuatAndPos(glm::quat q, glm::vec3 p) {
glm::mat4 m = glm::mat4_cast(q);
m[3] = glm::vec4(p, 1);
return m;
}
// cancel out roll and pitch
glm::quat cancelOutRollAndPitch(glm::quat q) {
glm::vec3 xAxis = q * glm::vec3(1, 0, 0);
glm::vec3 yAxis = q * glm::vec3(0, 1, 0);
glm::vec3 zAxis = q * glm::vec3(0, 0, 1);
// cancel out the roll and pitch
glm::vec3 newZ = (zAxis.x == 0 && zAxis.z == 0) ? vec3(1, 0, 0) : glm::normalize(vec3(zAxis.x, 0, zAxis.z));
glm::vec3 newX = glm::cross(vec3(0, 1, 0), newZ);
glm::vec3 newY = glm::cross(newZ, newX);
glm::mat4 temp(glm::vec4(newX, 0), glm::vec4(newY, 0), glm::vec4(newZ, 0), glm::vec4(0, 0, 0, 1));
return glm::quat_cast(temp);
}
// cancel out roll and pitch
glm::mat4 cancelOutRollAndPitch(glm::mat4 m) {
glm::vec3 xAxis = glm::vec3(m[0]);
glm::vec3 yAxis = glm::vec3(m[1]);
glm::vec3 zAxis = glm::vec3(m[2]);
// cancel out the roll and pitch
glm::vec3 newZ = (zAxis.x == 0 && zAxis.z == 0) ? vec3(1, 0, 0) : glm::normalize(vec3(zAxis.x, 0, zAxis.z));
glm::vec3 newX = glm::cross(vec3(0, 1, 0), newZ);
glm::vec3 newY = glm::cross(newZ, newX);
glm::mat4 temp(glm::vec4(newX, 0), glm::vec4(newY, 0), glm::vec4(newZ, 0), m[3]);
return temp;
}
glm::vec3 transformPoint(const glm::mat4& m, glm::vec3 p) {
glm::vec4 temp = m * glm::vec4(p, 1);
return glm::vec3(temp.x / temp.w, temp.y / temp.w, temp.z / temp.w);
}

View file

@ -150,42 +150,9 @@ T toNormalizedDeviceScale(const T& value, const T& size) {
#define PITCH(euler) euler.x
#define ROLL(euler) euler.z
// create matrix from orientation and position
glm::mat4 createMatFromQuatAndPos(glm::quat q, glm::vec3 p) {
glm::mat4 m = glm::mat4_cast(q);
m[3] = glm::vec4(p, 1);
return m;
}
// cancel out roll and pitch
glm::quat cancelOutRollAndPitch(glm::quat q) {
glm::vec3 xAxis = q * glm::vec3(1, 0, 0);
glm::vec3 yAxis = q * glm::vec3(0, 1, 0);
glm::vec3 zAxis = q * glm::vec3(0, 0, 1);
// cancel out the roll and pitch
glm::vec3 newZ = (zAxis.x == 0 && zAxis.z == 0) ? vec3(1, 0, 0) : glm::normalize(vec3(zAxis.x, 0, zAxis.z));
glm::vec3 newX = glm::cross(vec3(0, 1, 0), newZ);
glm::vec3 newY = glm::cross(newZ, newX);
glm::mat4 temp(glm::vec4(newX, 0), glm::vec4(newY, 0), glm::vec4(newZ, 0), glm::vec4(0, 0, 0, 1));
return glm::quat_cast(temp);
}
// cancel out roll and pitch
glm::mat4 cancelOutRollAndPitch(glm::mat4 m) {
glm::vec3 xAxis = glm::vec3(m[0]);
glm::vec3 yAxis = glm::vec3(m[1]);
glm::vec3 zAxis = glm::vec3(m[2]);
// cancel out the roll and pitch
glm::vec3 newZ = (zAxis.x == 0 && zAxis.z == 0) ? vec3(1, 0, 0) : glm::normalize(vec3(zAxis.x, 0, zAxis.z));
glm::vec3 newX = glm::cross(vec3(0, 1, 0), newZ);
glm::vec3 newY = glm::cross(newZ, newX);
glm::mat4 temp(glm::vec4(newX, 0), glm::vec4(newY, 0), glm::vec4(newZ, 0), m[3]);
return temp;
}
glm::mat4 createMatFromQuatAndPos(glm::quat q, glm::vec3 p);
glm::quat cancelOutRollAndPitch(glm::quat q);
glm::mat4 cancelOutRollAndPitch(glm::mat4 m);
glm::vec3 transformPoint(const glm::mat4& m, glm::vec3 p);
#endif // hifi_GLMHelpers_h