mirror of
https://github.com/lubosz/overte.git
synced 2025-04-16 01:13:40 +02:00
Correct controller offsets with sensor scaling
This commit is contained in:
parent
ffdb10681e
commit
d62a0ea8ff
5 changed files with 23 additions and 15 deletions
|
@ -2478,9 +2478,9 @@ void Application::paintGL() {
|
|||
auto hmdInterface = DependencyManager::get<HMDScriptingInterface>();
|
||||
float ipdScale = hmdInterface->getIPDScale();
|
||||
|
||||
// scale IPD by height ratio, to make the world seem larger or smaller accordingly.
|
||||
float heightRatio = getMyAvatar()->getEyeHeight() / getMyAvatar()->getUserEyeHeight();
|
||||
ipdScale *= heightRatio;
|
||||
// scale IPD by sensorToWorldScale, to make the world seem larger or smaller accordingly.
|
||||
float sensorToWorldScale = getMyAvatar()->getSensorToWorldScale();
|
||||
ipdScale *= sensorToWorldScale;
|
||||
|
||||
mat4 eyeProjections[2];
|
||||
{
|
||||
|
@ -2509,7 +2509,7 @@ void Application::paintGL() {
|
|||
// adjust near clip plane by heightRatio
|
||||
auto baseProjection = glm::perspective(renderArgs.getViewFrustum().getFieldOfView(),
|
||||
renderArgs.getViewFrustum().getAspectRatio(),
|
||||
renderArgs.getViewFrustum().getNearClip() * heightRatio,
|
||||
renderArgs.getViewFrustum().getNearClip() * sensorToWorldScale,
|
||||
renderArgs.getViewFrustum().getFarClip());
|
||||
|
||||
// FIXME we probably don't need to set the projection matrix every frame,
|
||||
|
|
|
@ -679,7 +679,6 @@ void MyAvatar::updateSensorToWorldMatrix() {
|
|||
}
|
||||
|
||||
_sensorToWorldMatrixCache.set(_sensorToWorldMatrix);
|
||||
_sensorToWorldScaleCache.set(sensorToWorldScale);
|
||||
|
||||
updateJointFromController(controller::Action::LEFT_HAND, _controllerLeftHandMatrixCache);
|
||||
updateJointFromController(controller::Action::RIGHT_HAND, _controllerRightHandMatrixCache);
|
||||
|
@ -2577,8 +2576,8 @@ glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
|
|||
glm::vec3 headToNeck = headOrientation * Quaternions::Y_180 * (localNeck - localHead);
|
||||
glm::vec3 neckToRoot = headOrientationYawOnly * Quaternions::Y_180 * -localNeck;
|
||||
|
||||
float invHeightRatio = getUserEyeHeight() / getEyeHeight();
|
||||
glm::vec3 bodyPos = headPosition + invHeightRatio * (headToNeck + neckToRoot);
|
||||
float invSensorToWorldScale = getUserEyeHeight() / getEyeHeight();
|
||||
glm::vec3 bodyPos = headPosition + invSensorToWorldScale * (headToNeck + neckToRoot);
|
||||
|
||||
return createMatFromQuatAndPos(headOrientationYawOnly, bodyPos);
|
||||
}
|
||||
|
|
|
@ -2401,6 +2401,11 @@ glm::mat4 AvatarData::getSensorToWorldMatrix() const {
|
|||
return _sensorToWorldMatrixCache.get();
|
||||
}
|
||||
|
||||
// thread-safe
|
||||
float AvatarData::getSensorToWorldScale() const {
|
||||
return extractUniformScale(_sensorToWorldMatrixCache.get());
|
||||
}
|
||||
|
||||
// thread-safe
|
||||
glm::mat4 AvatarData::getControllerLeftHandMatrix() const {
|
||||
return _controllerLeftHandMatrixCache.get();
|
||||
|
|
|
@ -385,6 +385,8 @@ class AvatarData : public QObject, public SpatiallyNestable {
|
|||
Q_PROPERTY(glm::mat4 controllerLeftHandMatrix READ getControllerLeftHandMatrix)
|
||||
Q_PROPERTY(glm::mat4 controllerRightHandMatrix READ getControllerRightHandMatrix)
|
||||
|
||||
Q_PROPERTY(float sensorToWorldScale READ getSensorToWorldScale)
|
||||
|
||||
public:
|
||||
|
||||
virtual QString getName() const override { return QString("Avatar:") + _displayName; }
|
||||
|
@ -617,6 +619,7 @@ public:
|
|||
|
||||
// thread safe
|
||||
Q_INVOKABLE glm::mat4 getSensorToWorldMatrix() const;
|
||||
Q_INVOKABLE float getSensorToWorldScale() const;
|
||||
Q_INVOKABLE glm::mat4 getControllerLeftHandMatrix() const;
|
||||
Q_INVOKABLE glm::mat4 getControllerRightHandMatrix() const;
|
||||
|
||||
|
|
|
@ -19,14 +19,15 @@ getGrabCommunications = function getFarGrabCommunications() {
|
|||
var GRAB_POINT_SPHERE_OFFSET = { x: 0.04, y: 0.13, z: 0.039 }; // x = upward, y = forward, z = lateral
|
||||
|
||||
getGrabPointSphereOffset = function(handController) {
|
||||
if (handController === Controller.Standard.RightHand) {
|
||||
return GRAB_POINT_SPHERE_OFFSET;
|
||||
var offset = GRAB_POINT_SPHERE_OFFSET;
|
||||
if (handController === Controller.Standard.LeftHand) {
|
||||
offset = {
|
||||
x: -GRAB_POINT_SPHERE_OFFSET.x,
|
||||
y: GRAB_POINT_SPHERE_OFFSET.y,
|
||||
z: GRAB_POINT_SPHERE_OFFSET.z
|
||||
};
|
||||
}
|
||||
return {
|
||||
x: GRAB_POINT_SPHERE_OFFSET.x * -1,
|
||||
y: GRAB_POINT_SPHERE_OFFSET.y,
|
||||
z: GRAB_POINT_SPHERE_OFFSET.z
|
||||
};
|
||||
return Vec3.multiply(MyAvatar.sensorToWorldScale, offset);
|
||||
};
|
||||
|
||||
// controllerWorldLocation is where the controller would be, in-world, with an added offset
|
||||
|
@ -53,7 +54,7 @@ getControllerWorldLocation = function (handController, doOffset) {
|
|||
|
||||
} else if (!HMD.isHandControllerAvailable()) {
|
||||
// NOTE: keep this offset in sync with scripts/system/controllers/handControllerPointer.js:493
|
||||
var VERTICAL_HEAD_LASER_OFFSET = 0.1;
|
||||
var VERTICAL_HEAD_LASER_OFFSET = 0.1 * MyAvatar.sensorToWorldScale;
|
||||
position = Vec3.sum(Camera.position, Vec3.multiplyQbyV(Camera.orientation, {x: 0, y: VERTICAL_HEAD_LASER_OFFSET, z: 0}));
|
||||
orientation = Quat.multiply(Camera.orientation, Quat.angleAxis(-90, { x: 1, y: 0, z: 0 }));
|
||||
valid = true;
|
||||
|
|
Loading…
Reference in a new issue