From 3b87cd0ea895cbf40c33bb5a786f3cae981a20f7 Mon Sep 17 00:00:00 2001 From: Anthony Thibault Date: Mon, 22 Feb 2016 19:15:22 -0800 Subject: [PATCH] Improve hold action by using controller velocity The AvatarHoldAction now derives the body velocity by using data from the controller::Pose. Rather then trying to derive it based on previous positions. This results in more acurate motion of the held object when the hold is released. OpenVR input plugin: pass the velocity and angularVelocity directly from the controller pose to the controller::Pose. --- interface/src/Application.cpp | 9 ++++ interface/src/avatar/AvatarActionHold.cpp | 51 +++++++++++++++----- interface/src/avatar/AvatarActionHold.h | 6 ++- libraries/avatars/src/HandData.h | 1 + plugins/openvr/src/OpenVrDisplayPlugin.cpp | 5 ++ plugins/openvr/src/OpenVrHelpers.h | 6 ++- plugins/openvr/src/ViveControllerManager.cpp | 31 ++++++++---- plugins/openvr/src/ViveControllerManager.h | 9 ++-- 8 files changed, 89 insertions(+), 29 deletions(-) diff --git a/interface/src/Application.cpp b/interface/src/Application.cpp index 64cd586e13..5c28e31d8c 100644 --- a/interface/src/Application.cpp +++ b/interface/src/Application.cpp @@ -5037,6 +5037,8 @@ void Application::setPalmData(Hand* hand, const controller::Pose& pose, float de glm::vec3 position = pose.getTranslation(); glm::quat rotation = pose.getRotation(); + // AJT: REMOVE + /* // Compute current velocity from position change glm::vec3 rawVelocity; if (deltaTime > 0.0f) { @@ -5057,6 +5059,13 @@ void Application::setPalmData(Hand* hand, const controller::Pose& pose, float de } else { palm.setRawAngularVelocity(glm::vec3(0.0f)); } + */ + + glm::vec3 rawVelocity = pose.getVelocity(); + glm::vec3 angularVelocity = pose.getAngularVelocity(); + + palm.setRawVelocity(rawVelocity); + palm.setRawAngularVelocity(angularVelocity); if (controller::InputDevice::getLowVelocityFilter()) { // Use a velocity sensitive filter to damp small motions and preserve large ones with diff --git a/interface/src/avatar/AvatarActionHold.cpp b/interface/src/avatar/AvatarActionHold.cpp index 71bd1b1f82..0093c22377 100644 --- a/interface/src/avatar/AvatarActionHold.cpp +++ b/interface/src/avatar/AvatarActionHold.cpp @@ -93,7 +93,8 @@ void AvatarActionHold::prepareForPhysicsSimulation() { activateBody(true); } -std::shared_ptr AvatarActionHold::getTarget(glm::quat& rotation, glm::vec3& position) { +std::shared_ptr AvatarActionHold::getTarget(glm::quat& rotation, glm::vec3& position, + glm::vec3& linearVelocity, glm::vec3& angularVelocity) { auto avatarManager = DependencyManager::get(); auto holdingAvatar = std::static_pointer_cast(avatarManager->getAvatarBySessionID(_holderID)); @@ -103,19 +104,22 @@ std::shared_ptr AvatarActionHold::getTarget(glm::quat& rotation, glm::ve withReadLock([&]{ bool isRightHand = (_hand == "right"); - glm::vec3 palmPosition { Vectors::ZERO }; - glm::quat palmRotation { Quaternions::IDENTITY }; + glm::vec3 palmPosition; + glm::quat palmRotation; + glm::vec3 palmLinearVelocity; + glm::vec3 palmAngularVelocity; + + PalmData palmData = holdingAvatar->getHand()->getCopyOfPalmData(isRightHand ? HandData::RightHand : HandData::LeftHand); + + // TODO: adjust according to _relativePosition and _relativeRotation? + linearVelocity = palmData.getVelocity(); + angularVelocity = palmData.getAngularVelocity(); if (_ignoreIK && holdingAvatar->isMyAvatar()) { // We cannot ignore other avatars IK and this is not the point of this option // This is meant to make the grabbing behavior more reactive. - if (isRightHand) { - palmPosition = holdingAvatar->getHand()->getCopyOfPalmData(HandData::RightHand).getPosition(); - palmRotation = holdingAvatar->getHand()->getCopyOfPalmData(HandData::RightHand).getRotation(); - } else { - palmPosition = holdingAvatar->getHand()->getCopyOfPalmData(HandData::LeftHand).getPosition(); - palmRotation = holdingAvatar->getHand()->getCopyOfPalmData(HandData::LeftHand).getRotation(); - } + palmPosition = palmData.getPosition(); + palmRotation = palmData.getRotation(); } else if (holdingAvatar->isMyAvatar()) { glm::vec3 avatarRigidBodyPosition; glm::quat avatarRigidBodyRotation; @@ -153,14 +157,19 @@ std::shared_ptr AvatarActionHold::getTarget(glm::quat& rotation, glm::ve rotation = palmRotation * _relativeRotation; position = palmPosition + rotation * _relativePosition; + + // update linearVelocity based on offset via _relativePosition; + linearVelocity = linearVelocity + glm::cross(angularVelocity, position - palmPosition); }); return holdingAvatar; } void AvatarActionHold::updateActionWorker(float deltaTimeStep) { - glm::quat rotation { Quaternions::IDENTITY }; - glm::vec3 position { Vectors::ZERO }; + glm::quat rotation; + glm::vec3 position; + glm::vec3 linearVelocity; + glm::vec3 angularVelocity; bool valid = false; int holdCount = 0; @@ -173,7 +182,8 @@ void AvatarActionHold::updateActionWorker(float deltaTimeStep) { std::shared_ptr holdAction = std::static_pointer_cast(action); glm::quat rotationForAction; glm::vec3 positionForAction; - std::shared_ptr holdingAvatar = holdAction->getTarget(rotationForAction, positionForAction); + glm::vec3 linearVelocityForAction, angularVelocityForAction; + std::shared_ptr holdingAvatar = holdAction->getTarget(rotationForAction, positionForAction, linearVelocityForAction, angularVelocityForAction); if (holdingAvatar) { holdCount ++; if (holdAction.get() == this) { @@ -183,15 +193,21 @@ void AvatarActionHold::updateActionWorker(float deltaTimeStep) { } position += positionForAction; + linearVelocity += linearVelocityForAction; + angularVelocity += angularVelocityForAction; } } if (valid && holdCount > 0) { position /= holdCount; + linearVelocity /= holdCount; + angularVelocity /= holdCount; withWriteLock([&]{ _positionalTarget = position; _rotationalTarget = rotation; + _linearVelocityTarget = linearVelocity; + _angularVelocityTarget = angularVelocity; _positionalTargetSet = true; _rotationalTargetSet = true; _active = true; @@ -225,15 +241,24 @@ void AvatarActionHold::doKinematicUpdate(float deltaTimeStep) { withWriteLock([&]{ if (_kinematicSetVelocity) { + rigidBody->setLinearVelocity(glmToBullet(_linearVelocityTarget)); + rigidBody->setAngularVelocity(glmToBullet(_angularVelocityTarget)); + /* if (_previousSet) { // smooth velocity over 2 frames glm::vec3 positionalDelta = _positionalTarget - _previousPositionalTarget; glm::vec3 positionalVelocity = (positionalDelta + _previousPositionalDelta) / (deltaTimeStep + _previousDeltaTimeStep); rigidBody->setLinearVelocity(glmToBullet(positionalVelocity)); + + if (_hand == "right") { + qDebug() << "AJT: rb vel = " << positionalVelocity.x << positionalVelocity.y << positionalVelocity.z; + } + _previousPositionalDelta = positionalDelta; _previousDeltaTimeStep = deltaTimeStep; } + */ } btTransform worldTrans = rigidBody->getWorldTransform(); diff --git a/interface/src/avatar/AvatarActionHold.h b/interface/src/avatar/AvatarActionHold.h index 7646f87238..018c3fd076 100644 --- a/interface/src/avatar/AvatarActionHold.h +++ b/interface/src/avatar/AvatarActionHold.h @@ -36,7 +36,8 @@ public: virtual bool shouldSuppressLocationEdits() override { return _active && !_ownerEntity.expired(); } bool getAvatarRigidBodyLocation(glm::vec3& avatarRigidBodyPosition, glm::quat& avatarRigidBodyRotation); - std::shared_ptr getTarget(glm::quat& rotation, glm::vec3& position); + std::shared_ptr getTarget(glm::quat& rotation, glm::vec3& position, + glm::vec3& linearVelocity, glm::vec3& angularVelocity); virtual void prepareForPhysicsSimulation() override; @@ -50,6 +51,9 @@ private: QString _hand { "right" }; QUuid _holderID; + glm::vec3 _linearVelocityTarget; + glm::vec3 _angularVelocityTarget; + bool _kinematic { false }; bool _kinematicSetVelocity { false }; bool _previousSet { false }; diff --git a/libraries/avatars/src/HandData.h b/libraries/avatars/src/HandData.h index 34ed610f80..63af43e399 100644 --- a/libraries/avatars/src/HandData.h +++ b/libraries/avatars/src/HandData.h @@ -93,6 +93,7 @@ public: PalmData(HandData* owningHandData = nullptr, HandData::Hand hand = HandData::UnknownHand); glm::vec3 getPosition() const { return _owningHandData->localToWorldPosition(_rawPosition); } glm::vec3 getVelocity() const { return _owningHandData->localToWorldDirection(_rawVelocity); } + glm::vec3 getAngularVelocity() const { return _owningHandData->localToWorldDirection(_rawAngularVelocity); } const glm::vec3& getRawPosition() const { return _rawPosition; } bool isActive() const { return _isActive; } diff --git a/plugins/openvr/src/OpenVrDisplayPlugin.cpp b/plugins/openvr/src/OpenVrDisplayPlugin.cpp index efd230bc28..559ea5bcfb 100644 --- a/plugins/openvr/src/OpenVrDisplayPlugin.cpp +++ b/plugins/openvr/src/OpenVrDisplayPlugin.cpp @@ -21,6 +21,7 @@ #include #include #include +#include #include "OpenVrHelpers.h" @@ -32,6 +33,8 @@ const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probab static vr::IVRCompositor* _compositor{ nullptr }; vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount]; mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount]; +vec3 _trackedDeviceLinearVelocities[vr::k_unMaxTrackedDeviceCount]; +vec3 _trackedDeviceAngularVelocities[vr::k_unMaxTrackedDeviceCount]; static mat4 _sensorResetMat; static std::array VR_EYES { { vr::Eye_Left, vr::Eye_Right } }; @@ -119,6 +122,8 @@ glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const { for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) { _trackedDevicePose[i] = predictedTrackedDevicePose[i]; _trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking); + _trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity)); + _trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity)); } return _trackedDevicePoseMat4[0]; } diff --git a/plugins/openvr/src/OpenVrHelpers.h b/plugins/openvr/src/OpenVrHelpers.h index e4a34c53b7..26179fb757 100644 --- a/plugins/openvr/src/OpenVrHelpers.h +++ b/plugins/openvr/src/OpenVrHelpers.h @@ -13,7 +13,7 @@ #include vr::IVRSystem* acquireOpenVrSystem(); -void releaseOpenVrSystem(); +void releaseOpenVrSystem(); template void openvr_for_each_eye(F f) { @@ -25,6 +25,10 @@ inline mat4 toGlm(const vr::HmdMatrix44_t& m) { return glm::transpose(glm::make_mat4(&m.m[0][0])); } +inline vec3 toGlm(const vr::HmdVector3_t& v) { + return vec3(v.v[0], v.v[1], v.v[2]); +} + inline mat4 toGlm(const vr::HmdMatrix34_t& m) { mat4 result = mat4( m.m[0][0], m.m[1][0], m.m[2][0], 0.0, diff --git a/plugins/openvr/src/ViveControllerManager.cpp b/plugins/openvr/src/ViveControllerManager.cpp index 720a6d48c6..071d5fd631 100644 --- a/plugins/openvr/src/ViveControllerManager.cpp +++ b/plugins/openvr/src/ViveControllerManager.cpp @@ -29,6 +29,8 @@ extern vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount]; extern mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount]; +extern vec3 _trackedDeviceLinearVelocities[vr::k_unMaxTrackedDeviceCount]; +extern vec3 _trackedDeviceAngularVelocities[vr::k_unMaxTrackedDeviceCount]; vr::IVRSystem* acquireOpenVrSystem(); void releaseOpenVrSystem(); @@ -249,10 +251,11 @@ void ViveControllerManager::InputDevice::update(float deltaTime, const controlle numTrackedControllers++; bool left = numTrackedControllers == 2; - const mat4& mat = _trackedDevicePoseMat4[device]; - if (!jointsCaptured) { - handlePoseEvent(inputCalibrationData, mat, numTrackedControllers - 1); + const mat4& mat = _trackedDevicePoseMat4[device]; + const vec3 linearVelocity = _trackedDeviceLinearVelocities[device]; + const vec3 angularVelocity = _trackedDeviceAngularVelocities[device]; + handlePoseEvent(inputCalibrationData, mat, linearVelocity, angularVelocity, numTrackedControllers - 1); } // handle inputs @@ -314,7 +317,9 @@ void ViveControllerManager::InputDevice::handleButtonEvent(uint32_t button, bool } } -void ViveControllerManager::InputDevice::handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, bool left) { +void ViveControllerManager::InputDevice::handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData, + const mat4& mat, const vec3& linearVelocity, + const vec3& angularVelocity, bool left) { // When the sensor-to-world rotation is identity the coordinate axes look like this: // // user @@ -379,16 +384,22 @@ void ViveControllerManager::InputDevice::handlePoseEvent(const controller::Input static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET; static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET; - glm::vec3 position = extractTranslation(mat); - glm::quat rotation = glm::quat_cast(mat); + auto translationOffset = (left ? leftTranslationOffset : rightTranslationOffset); + auto rotationOffset = (left ? leftRotationOffset : rightRotationOffset); - position += rotation * (left ? leftTranslationOffset : rightTranslationOffset); - rotation = rotation * (left ? leftRotationOffset : rightRotationOffset); + glm::vec3 position = extractTranslation(mat); + glm::quat rotation = glm::normalize(glm::quat_cast(mat)); + + position += rotation * translationOffset; + rotation = rotation * rotationOffset; // transform into avatar frame glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat; - auto avatarPose = controller::Pose(position, rotation).transform(controllerToAvatar); - _poseStateMap[left ? controller::LEFT_HAND : controller::RIGHT_HAND] = avatarPose; + auto avatarPose = controller::Pose(position, rotation); + // handle change in velocity due to translationOffset + avatarPose.velocity = linearVelocity + glm::cross(angularVelocity, position - extractTranslation(mat)); + avatarPose.angularVelocity = angularVelocity; + _poseStateMap[left ? controller::LEFT_HAND : controller::RIGHT_HAND] = avatarPose.transform(controllerToAvatar); } controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableInputs() const { diff --git a/plugins/openvr/src/ViveControllerManager.h b/plugins/openvr/src/ViveControllerManager.h index 282c8e41a5..51339cd465 100644 --- a/plugins/openvr/src/ViveControllerManager.h +++ b/plugins/openvr/src/ViveControllerManager.h @@ -46,7 +46,7 @@ public: void updateRendering(RenderArgs* args, render::ScenePointer scene, render::PendingChanges pendingChanges); void setRenderControllers(bool renderControllers) { _renderControllers = renderControllers; } - + private: class InputDevice : public controller::InputDevice { public: @@ -60,7 +60,8 @@ private: void handleButtonEvent(uint32_t button, bool pressed, bool left); void handleAxisEvent(uint32_t axis, float x, float y, bool left); - void handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, bool left); + void handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, + const vec3& linearVelocity, const vec3& angularVelocity, bool left); int _trackedControllers { 0 }; vr::IVRSystem*& _hmd; @@ -68,8 +69,8 @@ private: }; void renderHand(const controller::Pose& pose, gpu::Batch& batch, int sign); - - + + bool _registeredWithInputMapper { false }; bool _modelLoaded { false };