mirror of
https://github.com/overte-org/overte.git
synced 2025-08-08 16:18:05 +02:00
Merge branch 'sam/vive-hand-controllers' of github.com:hyperlogic/hifi into sam/vive-hand-controllers
This commit is contained in:
commit
65b76869fa
3 changed files with 47 additions and 31 deletions
|
@ -108,7 +108,10 @@ MyAvatar::MyAvatar() :
|
||||||
_hmdSensorPosition(),
|
_hmdSensorPosition(),
|
||||||
_bodySensorMatrix(),
|
_bodySensorMatrix(),
|
||||||
_sensorToWorldMatrix(),
|
_sensorToWorldMatrix(),
|
||||||
_standingHMDSensorMode(false)
|
_standingHMDSensorMode(false),
|
||||||
|
_goToPending(false),
|
||||||
|
_goToPosition(),
|
||||||
|
_goToOrientation()
|
||||||
{
|
{
|
||||||
_firstPersonSkeletonModel.setIsFirstPerson(true);
|
_firstPersonSkeletonModel.setIsFirstPerson(true);
|
||||||
|
|
||||||
|
@ -159,6 +162,12 @@ void MyAvatar::reset() {
|
||||||
|
|
||||||
void MyAvatar::update(float deltaTime) {
|
void MyAvatar::update(float deltaTime) {
|
||||||
|
|
||||||
|
if (_goToPending) {
|
||||||
|
setPosition(_goToPosition);
|
||||||
|
setOrientation(_goToOrientation);
|
||||||
|
_goToPending = false;
|
||||||
|
}
|
||||||
|
|
||||||
if (_referential) {
|
if (_referential) {
|
||||||
_referential->update();
|
_referential->update();
|
||||||
}
|
}
|
||||||
|
@ -250,22 +259,31 @@ void MyAvatar::simulate(float deltaTime) {
|
||||||
maybeUpdateBillboard();
|
maybeUpdateBillboard();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
glm::mat4 MyAvatar::getSensorToWorldMatrix() const {
|
||||||
|
if (getStandingHMDSensorMode()) {
|
||||||
|
return _sensorToWorldMatrix;
|
||||||
|
} else {
|
||||||
|
return createMatFromQuatAndPos(getWorldAlignedOrientation(), getDefaultEyePosition());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// best called at end of main loop, just before rendering.
|
// best called at end of main loop, just before rendering.
|
||||||
// update sensor to world matrix from current body position and hmd sensor.
|
// update sensor to world matrix from current body position and hmd sensor.
|
||||||
// This is so the correct camera can be used for rendering.
|
// This is so the correct camera can be used for rendering.
|
||||||
void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
|
void MyAvatar::updateFromHMDSensorMatrix(const glm::mat4& hmdSensorMatrix) {
|
||||||
|
|
||||||
// update the sensorMatrices based on the new hmd pose
|
// update the sensorMatrices based on the new hmd pose
|
||||||
_hmdSensorMatrix = hmdSensorMatrix;
|
_hmdSensorMatrix = hmdSensorMatrix;
|
||||||
_hmdSensorPosition = extractTranslation(hmdSensorMatrix);
|
_hmdSensorPosition = extractTranslation(hmdSensorMatrix);
|
||||||
_hmdSensorOrientation = glm::quat_cast(hmdSensorMatrix);
|
_hmdSensorOrientation = glm::quat_cast(hmdSensorMatrix);
|
||||||
_bodySensorMatrix = deriveBodyFromHMDSensor();
|
_bodySensorMatrix = deriveBodyFromHMDSensor();
|
||||||
|
|
||||||
|
if (getStandingHMDSensorMode()) {
|
||||||
// set the body position/orientation to reflect motion due to the head.
|
// set the body position/orientation to reflect motion due to the head.
|
||||||
auto worldMat = _sensorToWorldMatrix * _bodySensorMatrix;
|
auto worldMat = _sensorToWorldMatrix * _bodySensorMatrix;
|
||||||
setPosition(extractTranslation(worldMat));
|
setPosition(extractTranslation(worldMat));
|
||||||
setOrientation(glm::quat_cast(worldMat));
|
setOrientation(glm::quat_cast(worldMat));
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// best called at end of main loop, just before rendering.
|
// best called at end of main loop, just before rendering.
|
||||||
// update sensor to world matrix from current body position and hmd sensor.
|
// update sensor to world matrix from current body position and hmd sensor.
|
||||||
|
@ -1604,15 +1622,9 @@ void MyAvatar::goToLocation(const glm::vec3& newPosition,
|
||||||
qCDebug(interfaceapp).nospace() << "MyAvatar goToLocation - moving to " << newPosition.x << ", "
|
qCDebug(interfaceapp).nospace() << "MyAvatar goToLocation - moving to " << newPosition.x << ", "
|
||||||
<< newPosition.y << ", " << newPosition.z;
|
<< newPosition.y << ", " << newPosition.z;
|
||||||
|
|
||||||
if (qApp->isHMDMode() && getStandingHMDSensorMode()) {
|
_goToPending = true;
|
||||||
// AJT: FIXME, does not work with orientation.
|
_goToPosition = newPosition;
|
||||||
// AJT: FIXME, does not work with shouldFaceLocation flag.
|
_goToOrientation = getOrientation();
|
||||||
// Set the orientation of the sensor room, not the avatar itself.
|
|
||||||
glm::mat4 m;
|
|
||||||
m[3] = glm::vec4(newPosition, 1);
|
|
||||||
_sensorToWorldMatrix = m;
|
|
||||||
} else {
|
|
||||||
glm::vec3 shiftedPosition = newPosition;
|
|
||||||
if (hasOrientation) {
|
if (hasOrientation) {
|
||||||
qCDebug(interfaceapp).nospace() << "MyAvatar goToLocation - new orientation is "
|
qCDebug(interfaceapp).nospace() << "MyAvatar goToLocation - new orientation is "
|
||||||
<< newOrientation.x << ", " << newOrientation.y << ", " << newOrientation.z << ", " << newOrientation.w;
|
<< newOrientation.x << ", " << newOrientation.y << ", " << newOrientation.z << ", " << newOrientation.w;
|
||||||
|
@ -1625,14 +1637,12 @@ void MyAvatar::goToLocation(const glm::vec3& newPosition,
|
||||||
|
|
||||||
// move the user a couple units away
|
// move the user a couple units away
|
||||||
const float DISTANCE_TO_USER = 2.0f;
|
const float DISTANCE_TO_USER = 2.0f;
|
||||||
shiftedPosition = newPosition - quatOrientation * IDENTITY_FRONT * DISTANCE_TO_USER;
|
_goToPosition = newPosition - quatOrientation * IDENTITY_FRONT * DISTANCE_TO_USER;
|
||||||
}
|
}
|
||||||
|
|
||||||
setOrientation(quatOrientation);
|
_goToOrientation = quatOrientation;
|
||||||
}
|
}
|
||||||
|
|
||||||
slamPosition(shiftedPosition);
|
|
||||||
}
|
|
||||||
emit transformChanged();
|
emit transformChanged();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -48,7 +48,7 @@ public:
|
||||||
const glm::mat4& getHMDSensorMatrix() const { return _hmdSensorMatrix; }
|
const glm::mat4& getHMDSensorMatrix() const { return _hmdSensorMatrix; }
|
||||||
const glm::vec3& getHMDSensorPosition() const { return _hmdSensorPosition; }
|
const glm::vec3& getHMDSensorPosition() const { return _hmdSensorPosition; }
|
||||||
const glm::quat& getHMDSensorOrientation() const { return _hmdSensorOrientation; }
|
const glm::quat& getHMDSensorOrientation() const { return _hmdSensorOrientation; }
|
||||||
glm::mat4 getSensorToWorldMatrix() const { return _sensorToWorldMatrix; }
|
glm::mat4 getSensorToWorldMatrix() const;
|
||||||
|
|
||||||
// best called at start of main loop just after we have a fresh hmd pose.
|
// best called at start of main loop just after we have a fresh hmd pose.
|
||||||
// update internal body position from new hmd pose.
|
// update internal body position from new hmd pose.
|
||||||
|
@ -330,6 +330,10 @@ private:
|
||||||
glm::mat4 _sensorToWorldMatrix;
|
glm::mat4 _sensorToWorldMatrix;
|
||||||
|
|
||||||
bool _standingHMDSensorMode;
|
bool _standingHMDSensorMode;
|
||||||
|
|
||||||
|
bool _goToPending;
|
||||||
|
glm::vec3 _goToPosition;
|
||||||
|
glm::quat _goToOrientation;
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif // hifi_MyAvatar_h
|
#endif // hifi_MyAvatar_h
|
||||||
|
|
|
@ -54,7 +54,9 @@ glm::mat4 OculusBaseDisplayPlugin::getProjection(Eye eye, const glm::mat4& baseP
|
||||||
}
|
}
|
||||||
|
|
||||||
glm::mat4 OculusBaseDisplayPlugin::getModelview(Eye eye, const glm::mat4& baseModelview) const {
|
glm::mat4 OculusBaseDisplayPlugin::getModelview(Eye eye, const glm::mat4& baseModelview) const {
|
||||||
return baseModelview * toGlm(_eyePoses[eye]);
|
auto eyeOffsetMat = glm::mat4();
|
||||||
|
setTranslation(eyeOffsetMat, toGlm(_eyeOffsets[eye]));
|
||||||
|
return baseModelview * eyeOffsetMat;
|
||||||
}
|
}
|
||||||
|
|
||||||
void OculusBaseDisplayPlugin::resetSensors() {
|
void OculusBaseDisplayPlugin::resetSensors() {
|
||||||
|
|
Loading…
Reference in a new issue