merge with standing-mode, hands relative to neck

This commit is contained in:
Sam Gondelman 2015-07-02 17:44:19 -07:00
commit 3f6fbd058c
8 changed files with 176 additions and 17 deletions

View file

@ -952,9 +952,20 @@ void Application::paintGL() {
// per-eye HMD pose will be applied later. So set the camera orientation
// to only the yaw, excluding pitch and roll, i.e. an orientation that
// is orthongonal to the (body's) Y axis
_myCamera.setRotation(_myAvatar->getWorldAlignedOrientation());
//_myCamera.setRotation(_myAvatar->getWorldAlignedOrientation());
// AJT: no actually we do want the roll and pitch
_myCamera.setRotation(getHeadOrientation());
}
/*
qCDebug(interfaceapp, "paintGL");
glm::vec3 cameraPos = _myCamera.getPosition();
glm::quat cameraRot = _myCamera.getRotation();
qCDebug(interfaceapp, "\tcamera pos = (%.5f, %.5f, %.5f)", cameraPos.x, cameraPos.y, cameraPos.z);
qCDebug(interfaceapp, "\tcamera rot = (%.5f, %.5f, %.5f, %.5f)", cameraRot.x, cameraRot.y, cameraRot.z, cameraRot.w);
*/
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
if (isHMDMode()) {
_myCamera.setRotation(_myAvatar->getWorldAlignedOrientation());
@ -2713,6 +2724,7 @@ void Application::setPalmData(Hand* hand, UserInputMapper::PoseValue pose, int i
// TODO: velocity filters, tip velocities, et.c
// see SixenseManager
palm->setRawPosition(pose.getTranslation());
palm->setRawRotation(pose.getRotation());
}

View file

@ -75,6 +75,8 @@ const float MyAvatar::ZOOM_MIN = 0.5f;
const float MyAvatar::ZOOM_MAX = 10.0f;
const float MyAvatar::ZOOM_DEFAULT = 1.5f;
static bool isRoomTracking = true;
MyAvatar::MyAvatar() :
Avatar(),
_gravity(0.0f, 0.0f, 0.0f),
@ -99,7 +101,9 @@ MyAvatar::MyAvatar() :
_realWorldFieldOfView("realWorldFieldOfView",
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
_firstPersonSkeletonModel(this),
_prevShouldDrawHead(true)
_prevShouldDrawHead(true),
_prevRoomBodyPos(0, 0, 0),
_currRoomBodyPos(0, 0, 0)
{
_firstPersonSkeletonModel.setIsFirstPerson(true);
@ -149,6 +153,13 @@ void MyAvatar::reset() {
}
void MyAvatar::update(float deltaTime) {
/*
qCDebug(interfaceapp, "update()");
glm::vec3 pos = getPosition();
qCDebug(interfaceapp, "\tpos = (%.5f, %.5f, %.5f)", pos.x, pos.y, pos.z);
*/
if (_referential) {
_referential->update();
}
@ -156,6 +167,9 @@ void MyAvatar::update(float deltaTime) {
Head* head = getHead();
head->relaxLean(deltaTime);
updateFromTrackers(deltaTime);
if (qApp->isHMDMode() && isRoomTracking) {
updateRoomTracking(deltaTime);
}
// Get audio loudness data from audio input device
auto audio = DependencyManager::get<AudioClient>();
head->setAudioLoudness(audio->getLastInputLoudness());
@ -312,10 +326,12 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
relativePosition.x = -relativePosition.x;
}
head->setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
head->setLeanForward(glm::clamp(glm::degrees(atanf(relativePosition.z * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
if (!(inHmd && isRoomTracking)) {
head->setLeanSideways(glm::clamp(glm::degrees(atanf(relativePosition.x * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
head->setLeanForward(glm::clamp(glm::degrees(atanf(relativePosition.z * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
}
}
@ -929,7 +945,14 @@ bool MyAvatar::isLookingAtLeftEye() {
}
glm::vec3 MyAvatar::getDefaultEyePosition() const {
return _position + getWorldAlignedOrientation() * _skeletonModel.getDefaultEyeModelPosition();
/*
qCDebug(interfaceapp, "getDefaultEyePosition()");
glm::vec3 e = _skeletonModel.getDefaultEyeModelPosition();
qCDebug(interfaceapp, "\teye pos = (%.5f, %.5f, %.5f)", e.x, e.y, e.z);
glm::vec3 p = getPosition() + getWorldAlignedOrientation() * _skeletonModel.getDefaultEyeModelPosition();
qCDebug(interfaceapp, "\tworld pos = (%.5f, %.5f, %.5f)", p.x, p.y, p.z);
*/
return getPosition() + getWorldAlignedOrientation() * _skeletonModel.getDefaultEyeModelPosition();
}
const float SCRIPT_PRIORITY = DEFAULT_PRIORITY + 1.0f;
@ -1299,6 +1322,23 @@ void MyAvatar::updateOrientation(float deltaTime) {
head->setBaseYaw(YAW(euler));
head->setBasePitch(PITCH(euler));
head->setBaseRoll(ROLL(euler));
mat4 pose = glm::mat4_cast(qApp->getHeadOrientation());
vec3 xAxis = vec3(pose[0]);
vec3 yAxis = vec3(pose[1]);
vec3 zAxis = vec3(pose[2]);
// cancel out the roll and pitch
vec3 newZ = (zAxis.x == 0 && zAxis.z == 0) ? vec3(1, 0, 0) : glm::normalize(vec3(zAxis.x, 0, zAxis.z));
vec3 newX = glm::cross(vec3(0, 1, 0), newZ);
vec3 newY = glm::cross(newZ, newX);
mat4 m;
m[0] = vec4(newX, 0);
m[1] = vec4(newY, 0);
m[2] = vec4(newZ, 0);
m[3] = pose[3];
setOrientation(glm::quat(m));
}
}
@ -1454,6 +1494,14 @@ void MyAvatar::updatePosition(float deltaTime) {
const float MOVING_SPEED_THRESHOLD = 0.01f;
_moving = speed > MOVING_SPEED_THRESHOLD;
if (qApp->isHMDMode() && isRoomTracking) {
glm::vec3 newPos = (_currRoomBodyPos - _prevRoomBodyPos) + getPosition();
/*
qCDebug(interfaceapp, "updatePosition");
qCDebug(interfaceapp, "\tnewPos = (%.5f, %.5f, %.5f)", newPos.x, newPos.y, newPos.z);
*/
setPosition(newPos);
}
}
void MyAvatar::updateCollisionSound(const glm::vec3 &penetration, float deltaTime, float frequency) {
@ -1519,6 +1567,26 @@ void MyAvatar::maybeUpdateBillboard() {
sendBillboardPacket();
}
void MyAvatar::updateRoomTracking(float deltaTime) {
vec3 localEyes = _skeletonModel.getDefaultEyeModelPosition();
vec3 localNeck;
if (_skeletonModel.getLocalNeckPosition(localNeck)) {
glm::vec3 eyeToNeck = qApp->getHeadOrientation() * (localNeck - localEyes);
glm::vec3 neckToRoot = qApp->getHeadOrientation() * localNeck;
glm::vec3 roomBodyPos = qApp->getHeadPosition() + eyeToNeck + neckToRoot;
_prevRoomBodyPos = _currRoomBodyPos;
_currRoomBodyPos = roomBodyPos;
//qCDebug(interfaceapp, "updateRoomTracking()");
//qCDebug(interfaceapp, "\troomBodyPos = (%.5f, %.5f, %.5f)", roomBodyPos.x, roomBodyPos.y, roomBodyPos.z);
glm::vec3 delta = _currRoomBodyPos - _prevRoomBodyPos;
//qCDebug(interfaceapp, "\tdelta = (%.5f, %.5f, %.5f)", delta.x, delta.y, delta.z);
glm::vec3 e = _skeletonModel.getDefaultEyeModelPosition();
//qCDebug(interfaceapp, "\teye pos = (%.5f, %.5f, %.5f)", e.x, e.y, e.z);
}
}
void MyAvatar::increaseSize() {
if ((1.0f + SCALING_RATIO) * _targetScale < MAX_AVATAR_SCALE) {
_targetScale *= (1.0f + SCALING_RATIO);

View file

@ -169,7 +169,9 @@ public:
static const float ZOOM_MIN;
static const float ZOOM_MAX;
static const float ZOOM_DEFAULT;
const glm::vec3& getRoomBodyPos() const { return _currRoomBodyPos; }
public slots:
void increaseSize();
void decreaseSize();
@ -260,7 +262,8 @@ private:
void updatePosition(float deltaTime);
void updateCollisionSound(const glm::vec3& penetration, float deltaTime, float frequency);
void maybeUpdateBillboard();
void updateRoomTracking(float deltaTime);
// Avatar Preferences
bool _useFullAvatar = false;
QUrl _fullAvatarURLFromPreferences;
@ -274,6 +277,9 @@ private:
// used for rendering when in first person view or when in an HMD.
SkeletonModel _firstPersonSkeletonModel;
bool _prevShouldDrawHead;
glm::vec3 _prevRoomBodyPos;
glm::vec3 _currRoomBodyPos;
glm::mat4 _headWorldTransformMat;
};
#endif // hifi_MyAvatar_h

View file

@ -507,6 +507,10 @@ bool SkeletonModel::getNeckPosition(glm::vec3& neckPosition) const {
return isActive() && getJointPositionInWorldFrame(_geometry->getFBXGeometry().neckJointIndex, neckPosition);
}
bool SkeletonModel::getLocalNeckPosition(glm::vec3& neckPosition) const {
return isActive() && getJointPosition(_geometry->getFBXGeometry().neckJointIndex, neckPosition);
}
bool SkeletonModel::getNeckParentRotationFromDefaultOrientation(glm::quat& neckParentRotation) const {
if (!isActive()) {
return false;

View file

@ -83,6 +83,7 @@ public:
/// Returns the position of the neck joint.
/// \return whether or not the neck was found
bool getNeckPosition(glm::vec3& neckPosition) const;
bool getLocalNeckPosition(glm::vec3& neckPosition) const;
/// Returns the rotation of the neck joint's parent from default orientation
/// \return whether or not the neck was found

View file

@ -24,6 +24,9 @@
#include "OpenVrHelpers.h"
#include <QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(displayplugins)
Q_LOGGING_CATEGORY(displayplugins, "hifi.physics")
const QString OpenVrDisplayPlugin::NAME("OpenVR (Vive)");
@ -128,8 +131,47 @@ mat4 OpenVrDisplayPlugin::getProjection(Eye eye, const mat4& baseProjection) con
return _eyesData[eye]._projectionMatrix;
}
mat4 OpenVrDisplayPlugin::getModelview(Eye eye, const mat4& baseModelview) const {
return baseModelview * _eyesData[eye]._pose;
glm::mat4 OpenVrDisplayPlugin::getModelview(Eye eye, const mat4& baseModelview) const {
/*
qCDebug(displayplugins, displayplugins, "getModelView(%d)\n", eye);
glm::mat4 m = baseModelview;
qCDebug(displayplugins, "\tbaseModelView = | %10.4f %10.4f %10.4f %10.4f |", m[0][0], m[1][0], m[2][0], m[3][0]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][1], m[1][1], m[2][1], m[3][1]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][2], m[1][2], m[2][2], m[3][2]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][3], m[1][3], m[2][3], m[3][3]);
m = _eyesData[eye]._eyeOffset;
qCDebug(displayplugins, "\teyeOffset = | %10.4f %10.4f %10.4f %10.4f |", m[0][0], m[1][0], m[2][0], m[3][0]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][1], m[1][1], m[2][1], m[3][1]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][2], m[1][2], m[2][2], m[3][2]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][3], m[1][3], m[2][3], m[3][3]);
m = glm::inverse(_eyesData[eye]._eyeOffset);
qCDebug(displayplugins, "\teyeOffsetInv = | %10.4f %10.4f %10.4f %10.4f |", m[0][0], m[1][0], m[2][0], m[3][0]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][1], m[1][1], m[2][1], m[3][1]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][2], m[1][2], m[2][2], m[3][2]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][3], m[1][3], m[2][3], m[3][3]);
m = _eyesData[eye]._pose;
qCDebug(displayplugins, "\tpose = | %10.4f %10.4f %10.4f %10.4f |", m[0][0], m[1][0], m[2][0], m[3][0]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][1], m[1][1], m[2][1], m[3][1]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][2], m[1][2], m[2][2], m[3][2]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][3], m[1][3], m[2][3], m[3][3]);
m = glm::inverse(_eyesData[eye]._eyeOffset) * baseModelview;
qCDebug(displayplugins, "\tbroken modelView = | %10.4f %10.4f %10.4f %10.4f |", m[0][0], m[1][0], m[2][0], m[3][0]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][1], m[1][1], m[2][1], m[3][1]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][2], m[1][2], m[2][2], m[3][2]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][3], m[1][3], m[2][3], m[3][3]);
m = baseModelview * _eyesData[eye]._pose;
qCDebug(displayplugins, "\tworking modelView = | %10.4f %10.4f %10.4f %10.4f |", m[0][0], m[1][0], m[2][0], m[3][0]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][1], m[1][1], m[2][1], m[3][1]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][2], m[1][2], m[2][2], m[3][2]);
qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][3], m[1][3], m[2][3], m[3][3]);
*/
return baseModelview * _eyesData[eye]._eyeOffset;
}
void OpenVrDisplayPlugin::resetSensors() {
@ -154,11 +196,11 @@ void OpenVrDisplayPlugin::resetSensors() {
}
glm::mat4 OpenVrDisplayPlugin::getEyePose(Eye eye) const {
return _eyesData[eye]._pose;
return _eyesData[eye]._eyeOffset;
}
glm::mat4 OpenVrDisplayPlugin::getHeadPose() const {
return _sensorResetMat * _trackedDevicePoseMat4[0];
return _trackedDevicePoseMat4[0];
}
void OpenVrDisplayPlugin::customizeContext(PluginContainer * container) {
@ -179,10 +221,10 @@ void OpenVrDisplayPlugin::finishFrame() {
doneCurrent();
_compositor->WaitGetPoses(_trackedDevicePose, vr::k_unMaxTrackedDeviceCount);
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
_trackedDevicePoseMat4[i] = toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
}
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
_eyesData[eye]._pose = _sensorResetMat * _trackedDevicePoseMat4[0];
_eyesData[eye]._pose = _trackedDevicePoseMat4[0];
});
};

View file

@ -17,6 +17,11 @@
#include <display-plugins\openvr\OpenVrHelpers.h>
#include "UserActivityLogger.h"
#include <QLoggingCategory>
Q_DECLARE_LOGGING_CATEGORY(inputplugins)
Q_LOGGING_CATEGORY(inputplugins, "hifi.inputplugins")
extern vr::IVRSystem *_hmd;
extern vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
extern mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
@ -49,6 +54,9 @@ void ViveControllerManager::activate() {
}
void ViveControllerManager::update() {
if (!_hmd) {
return;
}
if (_isInitialized && _isEnabled) {
PerformanceTimer perfTimer("Vive Controllers");
@ -127,7 +135,10 @@ void ViveControllerManager::handleButtonEvent(unsigned int buttons, int index) {
}
void ViveControllerManager::handlePoseEvent(const mat4& mat, int index) {
glm::vec3 position(mat[3][0], mat[3][1], mat[3][2]);
glm::vec4 p = _trackedDevicePoseMat4[vr::k_unTrackedDeviceIndex_Hmd][3];
glm::vec3 headPos(p.x, p.y, p.z);
glm::vec3 position = glm::vec3(mat[3][0], mat[3][1], mat[3][2]) - headPos;
glm::quat rotation = glm::quat_cast(mat);
//rotation = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f)) * rotation;
_poseStateMap[makeInput(JointChannel(index)).getChannel()] = UserInputMapper::PoseValue(position, rotation);

View file

@ -7,6 +7,7 @@
#include "BulletUtil.h"
#include "DynamicCharacterController.h"
#include "PhysicsLogging.h"
const btVector3 LOCAL_UP_AXIS(0.0f, 1.0f, 0.0f);
const float DEFAULT_GRAVITY = -5.0f;
@ -356,6 +357,13 @@ void DynamicCharacterController::preSimulation(btScalar timeStep) {
glm::vec3 position = _avatarData->getPosition() + rotation * _shapeLocalOffset;
_rigidBody->setWorldTransform(btTransform(glmToBullet(rotation), glmToBullet(position)));
/*
qCDebug(physics, "preSimulation()");
qCDebug(physics, "\trigidbody position = (%.5f, %.5f, %.5f)", position.x, position.y, position.z);
glm::vec3 p = _avatarData->getPosition();
qCDebug(physics, "\tavatar position = (%.5f, %.5f, %.5f)", p.x, p.y, p.z);
*/
// the rotation is dictated by AvatarData
btTransform xform = _rigidBody->getWorldTransform();
xform.setRotation(glmToBullet(rotation));
@ -411,6 +419,13 @@ void DynamicCharacterController::postSimulation() {
_avatarData->setOrientation(rotation);
_avatarData->setPosition(position - rotation * _shapeLocalOffset);
_avatarData->setVelocity(bulletToGLM(_rigidBody->getLinearVelocity()));
/*
qCDebug(physics, "postSimulation()");
qCDebug(physics, "\trigidbody position = (%.5f, %.5f, %.5f)", position.x, position.y, position.z);
glm::vec3 p = position - rotation * _shapeLocalOffset;
qCDebug(physics, "\tavatar position = (%.5f, %.5f, %.5f)", p.x, p.y, p.z);
qCDebug(physics, "\t_shapeLocalOffset = (%.5f, %.5f, %.5f)", _shapeLocalOffset.x, _shapeLocalOffset.y, _shapeLocalOffset.z);
*/
}
}