From 3b87cd0ea895cbf40c33bb5a786f3cae981a20f7 Mon Sep 17 00:00:00 2001 From: Anthony Thibault Date: Mon, 22 Feb 2016 19:15:22 -0800 Subject: [PATCH 1/4] Improve hold action by using controller velocity The AvatarHoldAction now derives the body velocity by using data from the controller::Pose. Rather then trying to derive it based on previous positions. This results in more acurate motion of the held object when the hold is released. OpenVR input plugin: pass the velocity and angularVelocity directly from the controller pose to the controller::Pose. --- interface/src/Application.cpp | 9 ++++ interface/src/avatar/AvatarActionHold.cpp | 51 +++++++++++++++----- interface/src/avatar/AvatarActionHold.h | 6 ++- libraries/avatars/src/HandData.h | 1 + plugins/openvr/src/OpenVrDisplayPlugin.cpp | 5 ++ plugins/openvr/src/OpenVrHelpers.h | 6 ++- plugins/openvr/src/ViveControllerManager.cpp | 31 ++++++++---- plugins/openvr/src/ViveControllerManager.h | 9 ++-- 8 files changed, 89 insertions(+), 29 deletions(-) diff --git a/interface/src/Application.cpp b/interface/src/Application.cpp index 64cd586e13..5c28e31d8c 100644 --- a/interface/src/Application.cpp +++ b/interface/src/Application.cpp @@ -5037,6 +5037,8 @@ void Application::setPalmData(Hand* hand, const controller::Pose& pose, float de glm::vec3 position = pose.getTranslation(); glm::quat rotation = pose.getRotation(); + // AJT: REMOVE + /* // Compute current velocity from position change glm::vec3 rawVelocity; if (deltaTime > 0.0f) { @@ -5057,6 +5059,13 @@ void Application::setPalmData(Hand* hand, const controller::Pose& pose, float de } else { palm.setRawAngularVelocity(glm::vec3(0.0f)); } + */ + + glm::vec3 rawVelocity = pose.getVelocity(); + glm::vec3 angularVelocity = pose.getAngularVelocity(); + + palm.setRawVelocity(rawVelocity); + palm.setRawAngularVelocity(angularVelocity); if (controller::InputDevice::getLowVelocityFilter()) { // Use a velocity sensitive filter to damp small motions and preserve large ones with diff --git a/interface/src/avatar/AvatarActionHold.cpp b/interface/src/avatar/AvatarActionHold.cpp index 71bd1b1f82..0093c22377 100644 --- a/interface/src/avatar/AvatarActionHold.cpp +++ b/interface/src/avatar/AvatarActionHold.cpp @@ -93,7 +93,8 @@ void AvatarActionHold::prepareForPhysicsSimulation() { activateBody(true); } -std::shared_ptr AvatarActionHold::getTarget(glm::quat& rotation, glm::vec3& position) { +std::shared_ptr AvatarActionHold::getTarget(glm::quat& rotation, glm::vec3& position, + glm::vec3& linearVelocity, glm::vec3& angularVelocity) { auto avatarManager = DependencyManager::get(); auto holdingAvatar = std::static_pointer_cast(avatarManager->getAvatarBySessionID(_holderID)); @@ -103,19 +104,22 @@ std::shared_ptr AvatarActionHold::getTarget(glm::quat& rotation, glm::ve withReadLock([&]{ bool isRightHand = (_hand == "right"); - glm::vec3 palmPosition { Vectors::ZERO }; - glm::quat palmRotation { Quaternions::IDENTITY }; + glm::vec3 palmPosition; + glm::quat palmRotation; + glm::vec3 palmLinearVelocity; + glm::vec3 palmAngularVelocity; + + PalmData palmData = holdingAvatar->getHand()->getCopyOfPalmData(isRightHand ? HandData::RightHand : HandData::LeftHand); + + // TODO: adjust according to _relativePosition and _relativeRotation? + linearVelocity = palmData.getVelocity(); + angularVelocity = palmData.getAngularVelocity(); if (_ignoreIK && holdingAvatar->isMyAvatar()) { // We cannot ignore other avatars IK and this is not the point of this option // This is meant to make the grabbing behavior more reactive. - if (isRightHand) { - palmPosition = holdingAvatar->getHand()->getCopyOfPalmData(HandData::RightHand).getPosition(); - palmRotation = holdingAvatar->getHand()->getCopyOfPalmData(HandData::RightHand).getRotation(); - } else { - palmPosition = holdingAvatar->getHand()->getCopyOfPalmData(HandData::LeftHand).getPosition(); - palmRotation = holdingAvatar->getHand()->getCopyOfPalmData(HandData::LeftHand).getRotation(); - } + palmPosition = palmData.getPosition(); + palmRotation = palmData.getRotation(); } else if (holdingAvatar->isMyAvatar()) { glm::vec3 avatarRigidBodyPosition; glm::quat avatarRigidBodyRotation; @@ -153,14 +157,19 @@ std::shared_ptr AvatarActionHold::getTarget(glm::quat& rotation, glm::ve rotation = palmRotation * _relativeRotation; position = palmPosition + rotation * _relativePosition; + + // update linearVelocity based on offset via _relativePosition; + linearVelocity = linearVelocity + glm::cross(angularVelocity, position - palmPosition); }); return holdingAvatar; } void AvatarActionHold::updateActionWorker(float deltaTimeStep) { - glm::quat rotation { Quaternions::IDENTITY }; - glm::vec3 position { Vectors::ZERO }; + glm::quat rotation; + glm::vec3 position; + glm::vec3 linearVelocity; + glm::vec3 angularVelocity; bool valid = false; int holdCount = 0; @@ -173,7 +182,8 @@ void AvatarActionHold::updateActionWorker(float deltaTimeStep) { std::shared_ptr holdAction = std::static_pointer_cast(action); glm::quat rotationForAction; glm::vec3 positionForAction; - std::shared_ptr holdingAvatar = holdAction->getTarget(rotationForAction, positionForAction); + glm::vec3 linearVelocityForAction, angularVelocityForAction; + std::shared_ptr holdingAvatar = holdAction->getTarget(rotationForAction, positionForAction, linearVelocityForAction, angularVelocityForAction); if (holdingAvatar) { holdCount ++; if (holdAction.get() == this) { @@ -183,15 +193,21 @@ void AvatarActionHold::updateActionWorker(float deltaTimeStep) { } position += positionForAction; + linearVelocity += linearVelocityForAction; + angularVelocity += angularVelocityForAction; } } if (valid && holdCount > 0) { position /= holdCount; + linearVelocity /= holdCount; + angularVelocity /= holdCount; withWriteLock([&]{ _positionalTarget = position; _rotationalTarget = rotation; + _linearVelocityTarget = linearVelocity; + _angularVelocityTarget = angularVelocity; _positionalTargetSet = true; _rotationalTargetSet = true; _active = true; @@ -225,15 +241,24 @@ void AvatarActionHold::doKinematicUpdate(float deltaTimeStep) { withWriteLock([&]{ if (_kinematicSetVelocity) { + rigidBody->setLinearVelocity(glmToBullet(_linearVelocityTarget)); + rigidBody->setAngularVelocity(glmToBullet(_angularVelocityTarget)); + /* if (_previousSet) { // smooth velocity over 2 frames glm::vec3 positionalDelta = _positionalTarget - _previousPositionalTarget; glm::vec3 positionalVelocity = (positionalDelta + _previousPositionalDelta) / (deltaTimeStep + _previousDeltaTimeStep); rigidBody->setLinearVelocity(glmToBullet(positionalVelocity)); + + if (_hand == "right") { + qDebug() << "AJT: rb vel = " << positionalVelocity.x << positionalVelocity.y << positionalVelocity.z; + } + _previousPositionalDelta = positionalDelta; _previousDeltaTimeStep = deltaTimeStep; } + */ } btTransform worldTrans = rigidBody->getWorldTransform(); diff --git a/interface/src/avatar/AvatarActionHold.h b/interface/src/avatar/AvatarActionHold.h index 7646f87238..018c3fd076 100644 --- a/interface/src/avatar/AvatarActionHold.h +++ b/interface/src/avatar/AvatarActionHold.h @@ -36,7 +36,8 @@ public: virtual bool shouldSuppressLocationEdits() override { return _active && !_ownerEntity.expired(); } bool getAvatarRigidBodyLocation(glm::vec3& avatarRigidBodyPosition, glm::quat& avatarRigidBodyRotation); - std::shared_ptr getTarget(glm::quat& rotation, glm::vec3& position); + std::shared_ptr getTarget(glm::quat& rotation, glm::vec3& position, + glm::vec3& linearVelocity, glm::vec3& angularVelocity); virtual void prepareForPhysicsSimulation() override; @@ -50,6 +51,9 @@ private: QString _hand { "right" }; QUuid _holderID; + glm::vec3 _linearVelocityTarget; + glm::vec3 _angularVelocityTarget; + bool _kinematic { false }; bool _kinematicSetVelocity { false }; bool _previousSet { false }; diff --git a/libraries/avatars/src/HandData.h b/libraries/avatars/src/HandData.h index 34ed610f80..63af43e399 100644 --- a/libraries/avatars/src/HandData.h +++ b/libraries/avatars/src/HandData.h @@ -93,6 +93,7 @@ public: PalmData(HandData* owningHandData = nullptr, HandData::Hand hand = HandData::UnknownHand); glm::vec3 getPosition() const { return _owningHandData->localToWorldPosition(_rawPosition); } glm::vec3 getVelocity() const { return _owningHandData->localToWorldDirection(_rawVelocity); } + glm::vec3 getAngularVelocity() const { return _owningHandData->localToWorldDirection(_rawAngularVelocity); } const glm::vec3& getRawPosition() const { return _rawPosition; } bool isActive() const { return _isActive; } diff --git a/plugins/openvr/src/OpenVrDisplayPlugin.cpp b/plugins/openvr/src/OpenVrDisplayPlugin.cpp index efd230bc28..559ea5bcfb 100644 --- a/plugins/openvr/src/OpenVrDisplayPlugin.cpp +++ b/plugins/openvr/src/OpenVrDisplayPlugin.cpp @@ -21,6 +21,7 @@ #include #include #include +#include #include "OpenVrHelpers.h" @@ -32,6 +33,8 @@ const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probab static vr::IVRCompositor* _compositor{ nullptr }; vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount]; mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount]; +vec3 _trackedDeviceLinearVelocities[vr::k_unMaxTrackedDeviceCount]; +vec3 _trackedDeviceAngularVelocities[vr::k_unMaxTrackedDeviceCount]; static mat4 _sensorResetMat; static std::array VR_EYES { { vr::Eye_Left, vr::Eye_Right } }; @@ -119,6 +122,8 @@ glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const { for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) { _trackedDevicePose[i] = predictedTrackedDevicePose[i]; _trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking); + _trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity)); + _trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity)); } return _trackedDevicePoseMat4[0]; } diff --git a/plugins/openvr/src/OpenVrHelpers.h b/plugins/openvr/src/OpenVrHelpers.h index e4a34c53b7..26179fb757 100644 --- a/plugins/openvr/src/OpenVrHelpers.h +++ b/plugins/openvr/src/OpenVrHelpers.h @@ -13,7 +13,7 @@ #include vr::IVRSystem* acquireOpenVrSystem(); -void releaseOpenVrSystem(); +void releaseOpenVrSystem(); template void openvr_for_each_eye(F f) { @@ -25,6 +25,10 @@ inline mat4 toGlm(const vr::HmdMatrix44_t& m) { return glm::transpose(glm::make_mat4(&m.m[0][0])); } +inline vec3 toGlm(const vr::HmdVector3_t& v) { + return vec3(v.v[0], v.v[1], v.v[2]); +} + inline mat4 toGlm(const vr::HmdMatrix34_t& m) { mat4 result = mat4( m.m[0][0], m.m[1][0], m.m[2][0], 0.0, diff --git a/plugins/openvr/src/ViveControllerManager.cpp b/plugins/openvr/src/ViveControllerManager.cpp index 720a6d48c6..071d5fd631 100644 --- a/plugins/openvr/src/ViveControllerManager.cpp +++ b/plugins/openvr/src/ViveControllerManager.cpp @@ -29,6 +29,8 @@ extern vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount]; extern mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount]; +extern vec3 _trackedDeviceLinearVelocities[vr::k_unMaxTrackedDeviceCount]; +extern vec3 _trackedDeviceAngularVelocities[vr::k_unMaxTrackedDeviceCount]; vr::IVRSystem* acquireOpenVrSystem(); void releaseOpenVrSystem(); @@ -249,10 +251,11 @@ void ViveControllerManager::InputDevice::update(float deltaTime, const controlle numTrackedControllers++; bool left = numTrackedControllers == 2; - const mat4& mat = _trackedDevicePoseMat4[device]; - if (!jointsCaptured) { - handlePoseEvent(inputCalibrationData, mat, numTrackedControllers - 1); + const mat4& mat = _trackedDevicePoseMat4[device]; + const vec3 linearVelocity = _trackedDeviceLinearVelocities[device]; + const vec3 angularVelocity = _trackedDeviceAngularVelocities[device]; + handlePoseEvent(inputCalibrationData, mat, linearVelocity, angularVelocity, numTrackedControllers - 1); } // handle inputs @@ -314,7 +317,9 @@ void ViveControllerManager::InputDevice::handleButtonEvent(uint32_t button, bool } } -void ViveControllerManager::InputDevice::handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, bool left) { +void ViveControllerManager::InputDevice::handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData, + const mat4& mat, const vec3& linearVelocity, + const vec3& angularVelocity, bool left) { // When the sensor-to-world rotation is identity the coordinate axes look like this: // // user @@ -379,16 +384,22 @@ void ViveControllerManager::InputDevice::handlePoseEvent(const controller::Input static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET; static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET; - glm::vec3 position = extractTranslation(mat); - glm::quat rotation = glm::quat_cast(mat); + auto translationOffset = (left ? leftTranslationOffset : rightTranslationOffset); + auto rotationOffset = (left ? leftRotationOffset : rightRotationOffset); - position += rotation * (left ? leftTranslationOffset : rightTranslationOffset); - rotation = rotation * (left ? leftRotationOffset : rightRotationOffset); + glm::vec3 position = extractTranslation(mat); + glm::quat rotation = glm::normalize(glm::quat_cast(mat)); + + position += rotation * translationOffset; + rotation = rotation * rotationOffset; // transform into avatar frame glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat; - auto avatarPose = controller::Pose(position, rotation).transform(controllerToAvatar); - _poseStateMap[left ? controller::LEFT_HAND : controller::RIGHT_HAND] = avatarPose; + auto avatarPose = controller::Pose(position, rotation); + // handle change in velocity due to translationOffset + avatarPose.velocity = linearVelocity + glm::cross(angularVelocity, position - extractTranslation(mat)); + avatarPose.angularVelocity = angularVelocity; + _poseStateMap[left ? controller::LEFT_HAND : controller::RIGHT_HAND] = avatarPose.transform(controllerToAvatar); } controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableInputs() const { diff --git a/plugins/openvr/src/ViveControllerManager.h b/plugins/openvr/src/ViveControllerManager.h index 282c8e41a5..51339cd465 100644 --- a/plugins/openvr/src/ViveControllerManager.h +++ b/plugins/openvr/src/ViveControllerManager.h @@ -46,7 +46,7 @@ public: void updateRendering(RenderArgs* args, render::ScenePointer scene, render::PendingChanges pendingChanges); void setRenderControllers(bool renderControllers) { _renderControllers = renderControllers; } - + private: class InputDevice : public controller::InputDevice { public: @@ -60,7 +60,8 @@ private: void handleButtonEvent(uint32_t button, bool pressed, bool left); void handleAxisEvent(uint32_t axis, float x, float y, bool left); - void handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, bool left); + void handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, + const vec3& linearVelocity, const vec3& angularVelocity, bool left); int _trackedControllers { 0 }; vr::IVRSystem*& _hmd; @@ -68,8 +69,8 @@ private: }; void renderHand(const controller::Pose& pose, gpu::Batch& batch, int sign); - - + + bool _registeredWithInputMapper { false }; bool _modelLoaded { false }; From 0fd260076bbcae43f4d7bcf11b752f85476a9d7e Mon Sep 17 00:00:00 2001 From: "Anthony J. Thibault" Date: Mon, 22 Feb 2016 20:05:41 -0800 Subject: [PATCH 2/4] SixenseManager: fix velocity and angularVelocity computation Copied the delta based computation of velocity and angularVelocity that was in Application::setPalmData() and moved it into SixenseManager. This will guarantee that the velocity computation is the same as it was previously. The goal here is to NOT change the behavior of the hydra. The moving average style calculation of velocities has been removed. Removed dead code. --- interface/src/Application.cpp | 25 ------------- interface/src/avatar/Avatar.cpp | 29 --------------- interface/src/avatar/AvatarActionHold.cpp | 16 --------- plugins/hifiSixense/src/SixenseManager.cpp | 42 +++++++++------------- plugins/hifiSixense/src/SixenseManager.h | 7 ---- 5 files changed, 16 insertions(+), 103 deletions(-) diff --git a/interface/src/Application.cpp b/interface/src/Application.cpp index 5c28e31d8c..ed7aef0dab 100644 --- a/interface/src/Application.cpp +++ b/interface/src/Application.cpp @@ -5036,31 +5036,6 @@ void Application::setPalmData(Hand* hand, const controller::Pose& pose, float de // controller pose is in Avatar frame. glm::vec3 position = pose.getTranslation(); glm::quat rotation = pose.getRotation(); - - // AJT: REMOVE - /* - // Compute current velocity from position change - glm::vec3 rawVelocity; - if (deltaTime > 0.0f) { - rawVelocity = (position - palm.getRawPosition()) / deltaTime; - } else { - rawVelocity = glm::vec3(0.0f); - } - palm.setRawVelocity(rawVelocity); // meters/sec - - // Angular Velocity of Palm - glm::quat deltaRotation = rotation * glm::inverse(palm.getRawRotation()); - glm::vec3 angularVelocity(0.0f); - float rotationAngle = glm::angle(deltaRotation); - if ((rotationAngle > EPSILON) && (deltaTime > 0.0f)) { - angularVelocity = glm::normalize(glm::axis(deltaRotation)); - angularVelocity *= (rotationAngle / deltaTime); - palm.setRawAngularVelocity(angularVelocity); - } else { - palm.setRawAngularVelocity(glm::vec3(0.0f)); - } - */ - glm::vec3 rawVelocity = pose.getVelocity(); glm::vec3 angularVelocity = pose.getAngularVelocity(); diff --git a/interface/src/avatar/Avatar.cpp b/interface/src/avatar/Avatar.cpp index f62eb5a070..2b164a7bac 100644 --- a/interface/src/avatar/Avatar.cpp +++ b/interface/src/avatar/Avatar.cpp @@ -929,35 +929,6 @@ void Avatar::setAttachmentData(const QVector& attachmentData) { } _attachmentModels[i]->setURL(attachmentData[i].modelURL); } - - // AJT: TODO REMOVE - /* - // make sure we have as many models as attachments - while (_attachmentModels.size() < attachmentData.size()) { - Model* model = nullptr; - if (_unusedAttachments.size() > 0) { - model = _unusedAttachments.takeFirst(); - } else { - model = new Model(std::make_shared(), this); - } - model->init(); - _attachmentModels.append(model); - } - while (_attachmentModels.size() > attachmentData.size()) { - auto attachmentModel = _attachmentModels.back(); - _attachmentModels.pop_back(); - _attachmentsToRemove.push_back(attachmentModel); - } - */ - - /* - // update the urls - for (int i = 0; i < attachmentData.size(); i++) { - _attachmentModels[i]->setURL(attachmentData.at(i).modelURL); - _attachmentModels[i]->setSnapModelToCenter(true); - _attachmentModels[i]->setScaleToFit(true, getUniformScale() * _attachmentData.at(i).scale); - } - */ } diff --git a/interface/src/avatar/AvatarActionHold.cpp b/interface/src/avatar/AvatarActionHold.cpp index 0093c22377..87990b941e 100644 --- a/interface/src/avatar/AvatarActionHold.cpp +++ b/interface/src/avatar/AvatarActionHold.cpp @@ -243,22 +243,6 @@ void AvatarActionHold::doKinematicUpdate(float deltaTimeStep) { if (_kinematicSetVelocity) { rigidBody->setLinearVelocity(glmToBullet(_linearVelocityTarget)); rigidBody->setAngularVelocity(glmToBullet(_angularVelocityTarget)); - /* - if (_previousSet) { - // smooth velocity over 2 frames - glm::vec3 positionalDelta = _positionalTarget - _previousPositionalTarget; - glm::vec3 positionalVelocity = - (positionalDelta + _previousPositionalDelta) / (deltaTimeStep + _previousDeltaTimeStep); - rigidBody->setLinearVelocity(glmToBullet(positionalVelocity)); - - if (_hand == "right") { - qDebug() << "AJT: rb vel = " << positionalVelocity.x << positionalVelocity.y << positionalVelocity.z; - } - - _previousPositionalDelta = positionalDelta; - _previousDeltaTimeStep = deltaTimeStep; - } - */ } btTransform worldTrans = rigidBody->getWorldTransform(); diff --git a/plugins/hifiSixense/src/SixenseManager.cpp b/plugins/hifiSixense/src/SixenseManager.cpp index aad61e4bea..9fdce3add4 100644 --- a/plugins/hifiSixense/src/SixenseManager.cpp +++ b/plugins/hifiSixense/src/SixenseManager.cpp @@ -113,7 +113,6 @@ void SixenseManager::deactivate() { _container->removeMenu(MENU_PATH); _inputDevice->_poseStateMap.clear(); - _inputDevice->_collectedSamples.clear(); if (_inputDevice->_deviceID != controller::Input::INVALID_DEVICE) { auto userInputMapper = DependencyManager::get(); @@ -158,7 +157,6 @@ void SixenseManager::InputDevice::update(float deltaTime, const controller::Inpu _axisStateMap.clear(); _buttonPressedMap.clear(); _poseStateMap.clear(); - _collectedSamples.clear(); } return; } @@ -209,13 +207,10 @@ void SixenseManager::InputDevice::update(float deltaTime, const controller::Inpu rawPoses[i] = controller::Pose(position, rotation, Vectors::ZERO, Vectors::ZERO); } else { _poseStateMap.clear(); - _collectedSamples.clear(); } } else { auto hand = left ? controller::StandardPoseChannel::LEFT_HAND : controller::StandardPoseChannel::RIGHT_HAND; _poseStateMap[hand] = controller::Pose(); - _collectedSamples[hand].first.clear(); - _collectedSamples[hand].second.clear(); } } @@ -481,29 +476,24 @@ void SixenseManager::InputDevice::handlePoseEvent(float deltaTime, const control glm::vec3 velocity(0.0f); glm::vec3 angularVelocity(0.0f); - if (prevPose.isValid() && deltaTime > std::numeric_limits::epsilon()) { - auto& samples = _collectedSamples[hand]; - - velocity = (pos - prevPose.getTranslation()) / deltaTime; - samples.first.addSample(velocity); - velocity = samples.first.average; - - auto deltaRot = glm::normalize(rot * glm::conjugate(prevPose.getRotation())); - auto axis = glm::axis(deltaRot); - auto speed = glm::angle(deltaRot) / deltaTime; - assert(!glm::isnan(speed)); - angularVelocity = speed * axis; - samples.second.addSample(angularVelocity); - angularVelocity = samples.second.average; - } else if (!prevPose.isValid()) { - _collectedSamples[hand].first.clear(); - _collectedSamples[hand].second.clear(); - } + glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat; // transform pose into avatar frame. - glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat; - auto avatarPose = controller::Pose(pos, rot, velocity, angularVelocity).transform(controllerToAvatar); - _poseStateMap[hand] = avatarPose; + auto nextPose = controller::Pose(pos, rot, velocity, angularVelocity).transform(controllerToAvatar); + + if (prevPose.isValid() && (deltaTime > std::numeric_limits::epsilon())) { + nextPose.velocity = (nextPose.getTranslation() - prevPose.getTranslation()) / deltaTime; + + glm::quat deltaRotation = nextPose.getRotation() * glm::inverse(prevPose.getRotation()); + float rotationAngle = glm::angle(deltaRotation); + if (rotationAngle > EPSILON) { + nextPose.angularVelocity = glm::normalize(glm::axis(deltaRotation)); + nextPose.angularVelocity *= (rotationAngle / deltaTime); + } + + } + _poseStateMap[hand] = nextPose; + #endif // HAVE_SIXENSE } diff --git a/plugins/hifiSixense/src/SixenseManager.h b/plugins/hifiSixense/src/SixenseManager.h index fc74b83532..5106c87836 100644 --- a/plugins/hifiSixense/src/SixenseManager.h +++ b/plugins/hifiSixense/src/SixenseManager.h @@ -52,11 +52,6 @@ private: static const glm::vec3 DEFAULT_AVATAR_POSITION; static const float CONTROLLER_THRESHOLD; - template - using SampleAverage = MovingAverage; - using Samples = std::pair, SampleAverage>; - using MovingAverageMap = std::map; - class InputDevice : public controller::InputDevice { public: InputDevice() : controller::InputDevice("Hydra") {} @@ -75,8 +70,6 @@ private: friend class SixenseManager; - MovingAverageMap _collectedSamples; - int _calibrationState { CALIBRATION_STATE_IDLE }; // these are calibration results glm::vec3 _avatarPosition { DEFAULT_AVATAR_POSITION }; // in hydra-frame From e544275500a10cce28427b8d088fb0df0a19fad6 Mon Sep 17 00:00:00 2001 From: "Anthony J. Thibault" Date: Tue, 23 Feb 2016 12:07:14 -0800 Subject: [PATCH 3/4] Physics: extrapolate kinematic objects as well as dynamic ones. --- libraries/physics/src/EntityMotionState.cpp | 2 +- .../physics/src/ThreadSafeDynamicsWorld.cpp | 19 +++++++++++++------ 2 files changed, 14 insertions(+), 7 deletions(-) diff --git a/libraries/physics/src/EntityMotionState.cpp b/libraries/physics/src/EntityMotionState.cpp index 8c0dab98db..ff7d550175 100644 --- a/libraries/physics/src/EntityMotionState.cpp +++ b/libraries/physics/src/EntityMotionState.cpp @@ -191,7 +191,7 @@ void EntityMotionState::getWorldTransform(btTransform& worldTrans) const { } // This callback is invoked by the physics simulation at the end of each simulation step... -// iff the corresponding RigidBody is DYNAMIC and has moved. +// iff the corresponding RigidBody is active. void EntityMotionState::setWorldTransform(const btTransform& worldTrans) { if (!_entity) { return; diff --git a/libraries/physics/src/ThreadSafeDynamicsWorld.cpp b/libraries/physics/src/ThreadSafeDynamicsWorld.cpp index b6f3487f1a..17b2b08d4d 100644 --- a/libraries/physics/src/ThreadSafeDynamicsWorld.cpp +++ b/libraries/physics/src/ThreadSafeDynamicsWorld.cpp @@ -96,19 +96,26 @@ void ThreadSafeDynamicsWorld::synchronizeMotionState(btRigidBody* body) { ///@todo: add 'dirty' flag //if (body->getActivationState() != ISLAND_SLEEPING) { + btTransform bodyTransform; + btVector3 bodyLinearVelocity, bodyAngularVelocity; if (body->isKinematicObject()) { ObjectMotionState* objectMotionState = static_cast(body->getMotionState()); if (objectMotionState->hasInternalKinematicChanges()) { objectMotionState->clearInternalKinematicChanges(); - body->getMotionState()->setWorldTransform(body->getWorldTransform()); } - return; + bodyTransform = body->getWorldTransform(); + bodyLinearVelocity = body->getLinearVelocity(); + bodyAngularVelocity = body->getAngularVelocity(); + } else { + bodyTransform = body->getInterpolationWorldTransform(); + bodyLinearVelocity = body->getInterpolationLinearVelocity(); + bodyAngularVelocity = body->getInterpolationAngularVelocity(); } + + // integrate the object foward to cover the time in-between fixed time steps. + btScalar dt = (m_latencyMotionStateInterpolation && m_fixedTimeStep) ? m_localTime - m_fixedTimeStep : m_localTime * body->getHitFraction(); btTransform interpolatedTransform; - btTransformUtil::integrateTransform(body->getInterpolationWorldTransform(), - body->getInterpolationLinearVelocity(),body->getInterpolationAngularVelocity(), - (m_latencyMotionStateInterpolation && m_fixedTimeStep) ? m_localTime - m_fixedTimeStep : m_localTime*body->getHitFraction(), - interpolatedTransform); + btTransformUtil::integrateTransform(bodyTransform, bodyLinearVelocity, bodyAngularVelocity, dt, interpolatedTransform); body->getMotionState()->setWorldTransform(interpolatedTransform); } } From 38b29f47091b0f22ef41bd2718e66d3851ed0edd Mon Sep 17 00:00:00 2001 From: Anthony Thibault Date: Tue, 23 Feb 2016 13:26:15 -0800 Subject: [PATCH 4/4] Revert "Physics: extrapolate kinematic objects as well as dynamic ones." This reverts commit e544275500a10cce28427b8d088fb0df0a19fad6. --- libraries/physics/src/EntityMotionState.cpp | 2 +- .../physics/src/ThreadSafeDynamicsWorld.cpp | 19 ++++++------------- 2 files changed, 7 insertions(+), 14 deletions(-) diff --git a/libraries/physics/src/EntityMotionState.cpp b/libraries/physics/src/EntityMotionState.cpp index ff7d550175..8c0dab98db 100644 --- a/libraries/physics/src/EntityMotionState.cpp +++ b/libraries/physics/src/EntityMotionState.cpp @@ -191,7 +191,7 @@ void EntityMotionState::getWorldTransform(btTransform& worldTrans) const { } // This callback is invoked by the physics simulation at the end of each simulation step... -// iff the corresponding RigidBody is active. +// iff the corresponding RigidBody is DYNAMIC and has moved. void EntityMotionState::setWorldTransform(const btTransform& worldTrans) { if (!_entity) { return; diff --git a/libraries/physics/src/ThreadSafeDynamicsWorld.cpp b/libraries/physics/src/ThreadSafeDynamicsWorld.cpp index 17b2b08d4d..b6f3487f1a 100644 --- a/libraries/physics/src/ThreadSafeDynamicsWorld.cpp +++ b/libraries/physics/src/ThreadSafeDynamicsWorld.cpp @@ -96,26 +96,19 @@ void ThreadSafeDynamicsWorld::synchronizeMotionState(btRigidBody* body) { ///@todo: add 'dirty' flag //if (body->getActivationState() != ISLAND_SLEEPING) { - btTransform bodyTransform; - btVector3 bodyLinearVelocity, bodyAngularVelocity; if (body->isKinematicObject()) { ObjectMotionState* objectMotionState = static_cast(body->getMotionState()); if (objectMotionState->hasInternalKinematicChanges()) { objectMotionState->clearInternalKinematicChanges(); + body->getMotionState()->setWorldTransform(body->getWorldTransform()); } - bodyTransform = body->getWorldTransform(); - bodyLinearVelocity = body->getLinearVelocity(); - bodyAngularVelocity = body->getAngularVelocity(); - } else { - bodyTransform = body->getInterpolationWorldTransform(); - bodyLinearVelocity = body->getInterpolationLinearVelocity(); - bodyAngularVelocity = body->getInterpolationAngularVelocity(); + return; } - - // integrate the object foward to cover the time in-between fixed time steps. - btScalar dt = (m_latencyMotionStateInterpolation && m_fixedTimeStep) ? m_localTime - m_fixedTimeStep : m_localTime * body->getHitFraction(); btTransform interpolatedTransform; - btTransformUtil::integrateTransform(bodyTransform, bodyLinearVelocity, bodyAngularVelocity, dt, interpolatedTransform); + btTransformUtil::integrateTransform(body->getInterpolationWorldTransform(), + body->getInterpolationLinearVelocity(),body->getInterpolationAngularVelocity(), + (m_latencyMotionStateInterpolation && m_fixedTimeStep) ? m_localTime - m_fixedTimeStep : m_localTime*body->getHitFraction(), + interpolatedTransform); body->getMotionState()->setWorldTransform(interpolatedTransform); } }