Improve hold action by using controller velocity

The AvatarHoldAction now derives the body velocity by using data from the controller::Pose.  Rather then trying to derive it based on previous positions.  This results in more acurate motion of the held object when the hold is released.

OpenVR input plugin: pass the velocity and angularVelocity directly from the controller pose to the controller::Pose.
This commit is contained in:
Anthony Thibault 2016-02-22 19:15:22 -08:00
parent e6ccf501fa
commit 3b87cd0ea8
8 changed files with 89 additions and 29 deletions

View file

@ -5037,6 +5037,8 @@ void Application::setPalmData(Hand* hand, const controller::Pose& pose, float de
glm::vec3 position = pose.getTranslation();
glm::quat rotation = pose.getRotation();
// AJT: REMOVE
/*
// Compute current velocity from position change
glm::vec3 rawVelocity;
if (deltaTime > 0.0f) {
@ -5057,6 +5059,13 @@ void Application::setPalmData(Hand* hand, const controller::Pose& pose, float de
} else {
palm.setRawAngularVelocity(glm::vec3(0.0f));
}
*/
glm::vec3 rawVelocity = pose.getVelocity();
glm::vec3 angularVelocity = pose.getAngularVelocity();
palm.setRawVelocity(rawVelocity);
palm.setRawAngularVelocity(angularVelocity);
if (controller::InputDevice::getLowVelocityFilter()) {
// Use a velocity sensitive filter to damp small motions and preserve large ones with

View file

@ -93,7 +93,8 @@ void AvatarActionHold::prepareForPhysicsSimulation() {
activateBody(true);
}
std::shared_ptr<Avatar> AvatarActionHold::getTarget(glm::quat& rotation, glm::vec3& position) {
std::shared_ptr<Avatar> AvatarActionHold::getTarget(glm::quat& rotation, glm::vec3& position,
glm::vec3& linearVelocity, glm::vec3& angularVelocity) {
auto avatarManager = DependencyManager::get<AvatarManager>();
auto holdingAvatar = std::static_pointer_cast<Avatar>(avatarManager->getAvatarBySessionID(_holderID));
@ -103,19 +104,22 @@ std::shared_ptr<Avatar> AvatarActionHold::getTarget(glm::quat& rotation, glm::ve
withReadLock([&]{
bool isRightHand = (_hand == "right");
glm::vec3 palmPosition { Vectors::ZERO };
glm::quat palmRotation { Quaternions::IDENTITY };
glm::vec3 palmPosition;
glm::quat palmRotation;
glm::vec3 palmLinearVelocity;
glm::vec3 palmAngularVelocity;
PalmData palmData = holdingAvatar->getHand()->getCopyOfPalmData(isRightHand ? HandData::RightHand : HandData::LeftHand);
// TODO: adjust according to _relativePosition and _relativeRotation?
linearVelocity = palmData.getVelocity();
angularVelocity = palmData.getAngularVelocity();
if (_ignoreIK && holdingAvatar->isMyAvatar()) {
// We cannot ignore other avatars IK and this is not the point of this option
// This is meant to make the grabbing behavior more reactive.
if (isRightHand) {
palmPosition = holdingAvatar->getHand()->getCopyOfPalmData(HandData::RightHand).getPosition();
palmRotation = holdingAvatar->getHand()->getCopyOfPalmData(HandData::RightHand).getRotation();
} else {
palmPosition = holdingAvatar->getHand()->getCopyOfPalmData(HandData::LeftHand).getPosition();
palmRotation = holdingAvatar->getHand()->getCopyOfPalmData(HandData::LeftHand).getRotation();
}
palmPosition = palmData.getPosition();
palmRotation = palmData.getRotation();
} else if (holdingAvatar->isMyAvatar()) {
glm::vec3 avatarRigidBodyPosition;
glm::quat avatarRigidBodyRotation;
@ -153,14 +157,19 @@ std::shared_ptr<Avatar> AvatarActionHold::getTarget(glm::quat& rotation, glm::ve
rotation = palmRotation * _relativeRotation;
position = palmPosition + rotation * _relativePosition;
// update linearVelocity based on offset via _relativePosition;
linearVelocity = linearVelocity + glm::cross(angularVelocity, position - palmPosition);
});
return holdingAvatar;
}
void AvatarActionHold::updateActionWorker(float deltaTimeStep) {
glm::quat rotation { Quaternions::IDENTITY };
glm::vec3 position { Vectors::ZERO };
glm::quat rotation;
glm::vec3 position;
glm::vec3 linearVelocity;
glm::vec3 angularVelocity;
bool valid = false;
int holdCount = 0;
@ -173,7 +182,8 @@ void AvatarActionHold::updateActionWorker(float deltaTimeStep) {
std::shared_ptr<AvatarActionHold> holdAction = std::static_pointer_cast<AvatarActionHold>(action);
glm::quat rotationForAction;
glm::vec3 positionForAction;
std::shared_ptr<Avatar> holdingAvatar = holdAction->getTarget(rotationForAction, positionForAction);
glm::vec3 linearVelocityForAction, angularVelocityForAction;
std::shared_ptr<Avatar> holdingAvatar = holdAction->getTarget(rotationForAction, positionForAction, linearVelocityForAction, angularVelocityForAction);
if (holdingAvatar) {
holdCount ++;
if (holdAction.get() == this) {
@ -183,15 +193,21 @@ void AvatarActionHold::updateActionWorker(float deltaTimeStep) {
}
position += positionForAction;
linearVelocity += linearVelocityForAction;
angularVelocity += angularVelocityForAction;
}
}
if (valid && holdCount > 0) {
position /= holdCount;
linearVelocity /= holdCount;
angularVelocity /= holdCount;
withWriteLock([&]{
_positionalTarget = position;
_rotationalTarget = rotation;
_linearVelocityTarget = linearVelocity;
_angularVelocityTarget = angularVelocity;
_positionalTargetSet = true;
_rotationalTargetSet = true;
_active = true;
@ -225,15 +241,24 @@ void AvatarActionHold::doKinematicUpdate(float deltaTimeStep) {
withWriteLock([&]{
if (_kinematicSetVelocity) {
rigidBody->setLinearVelocity(glmToBullet(_linearVelocityTarget));
rigidBody->setAngularVelocity(glmToBullet(_angularVelocityTarget));
/*
if (_previousSet) {
// smooth velocity over 2 frames
glm::vec3 positionalDelta = _positionalTarget - _previousPositionalTarget;
glm::vec3 positionalVelocity =
(positionalDelta + _previousPositionalDelta) / (deltaTimeStep + _previousDeltaTimeStep);
rigidBody->setLinearVelocity(glmToBullet(positionalVelocity));
if (_hand == "right") {
qDebug() << "AJT: rb vel = " << positionalVelocity.x << positionalVelocity.y << positionalVelocity.z;
}
_previousPositionalDelta = positionalDelta;
_previousDeltaTimeStep = deltaTimeStep;
}
*/
}
btTransform worldTrans = rigidBody->getWorldTransform();

View file

@ -36,7 +36,8 @@ public:
virtual bool shouldSuppressLocationEdits() override { return _active && !_ownerEntity.expired(); }
bool getAvatarRigidBodyLocation(glm::vec3& avatarRigidBodyPosition, glm::quat& avatarRigidBodyRotation);
std::shared_ptr<Avatar> getTarget(glm::quat& rotation, glm::vec3& position);
std::shared_ptr<Avatar> getTarget(glm::quat& rotation, glm::vec3& position,
glm::vec3& linearVelocity, glm::vec3& angularVelocity);
virtual void prepareForPhysicsSimulation() override;
@ -50,6 +51,9 @@ private:
QString _hand { "right" };
QUuid _holderID;
glm::vec3 _linearVelocityTarget;
glm::vec3 _angularVelocityTarget;
bool _kinematic { false };
bool _kinematicSetVelocity { false };
bool _previousSet { false };

View file

@ -93,6 +93,7 @@ public:
PalmData(HandData* owningHandData = nullptr, HandData::Hand hand = HandData::UnknownHand);
glm::vec3 getPosition() const { return _owningHandData->localToWorldPosition(_rawPosition); }
glm::vec3 getVelocity() const { return _owningHandData->localToWorldDirection(_rawVelocity); }
glm::vec3 getAngularVelocity() const { return _owningHandData->localToWorldDirection(_rawAngularVelocity); }
const glm::vec3& getRawPosition() const { return _rawPosition; }
bool isActive() const { return _isActive; }

View file

@ -21,6 +21,7 @@
#include <PerfStat.h>
#include <plugins/PluginContainer.h>
#include <ViewFrustum.h>
#include <GLMHelpers.h>
#include "OpenVrHelpers.h"
@ -32,6 +33,8 @@ const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probab
static vr::IVRCompositor* _compositor{ nullptr };
vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
vec3 _trackedDeviceLinearVelocities[vr::k_unMaxTrackedDeviceCount];
vec3 _trackedDeviceAngularVelocities[vr::k_unMaxTrackedDeviceCount];
static mat4 _sensorResetMat;
static std::array<vr::Hmd_Eye, 2> VR_EYES { { vr::Eye_Left, vr::Eye_Right } };
@ -119,6 +122,8 @@ glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
_trackedDevicePose[i] = predictedTrackedDevicePose[i];
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
_trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity));
_trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity));
}
return _trackedDevicePoseMat4[0];
}

View file

@ -13,7 +13,7 @@
#include <glm/gtc/matrix_transform.hpp>
vr::IVRSystem* acquireOpenVrSystem();
void releaseOpenVrSystem();
void releaseOpenVrSystem();
template<typename F>
void openvr_for_each_eye(F f) {
@ -25,6 +25,10 @@ inline mat4 toGlm(const vr::HmdMatrix44_t& m) {
return glm::transpose(glm::make_mat4(&m.m[0][0]));
}
inline vec3 toGlm(const vr::HmdVector3_t& v) {
return vec3(v.v[0], v.v[1], v.v[2]);
}
inline mat4 toGlm(const vr::HmdMatrix34_t& m) {
mat4 result = mat4(
m.m[0][0], m.m[1][0], m.m[2][0], 0.0,

View file

@ -29,6 +29,8 @@
extern vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
extern mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
extern vec3 _trackedDeviceLinearVelocities[vr::k_unMaxTrackedDeviceCount];
extern vec3 _trackedDeviceAngularVelocities[vr::k_unMaxTrackedDeviceCount];
vr::IVRSystem* acquireOpenVrSystem();
void releaseOpenVrSystem();
@ -249,10 +251,11 @@ void ViveControllerManager::InputDevice::update(float deltaTime, const controlle
numTrackedControllers++;
bool left = numTrackedControllers == 2;
const mat4& mat = _trackedDevicePoseMat4[device];
if (!jointsCaptured) {
handlePoseEvent(inputCalibrationData, mat, numTrackedControllers - 1);
const mat4& mat = _trackedDevicePoseMat4[device];
const vec3 linearVelocity = _trackedDeviceLinearVelocities[device];
const vec3 angularVelocity = _trackedDeviceAngularVelocities[device];
handlePoseEvent(inputCalibrationData, mat, linearVelocity, angularVelocity, numTrackedControllers - 1);
}
// handle inputs
@ -314,7 +317,9 @@ void ViveControllerManager::InputDevice::handleButtonEvent(uint32_t button, bool
}
}
void ViveControllerManager::InputDevice::handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, bool left) {
void ViveControllerManager::InputDevice::handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData,
const mat4& mat, const vec3& linearVelocity,
const vec3& angularVelocity, bool left) {
// When the sensor-to-world rotation is identity the coordinate axes look like this:
//
// user
@ -379,16 +384,22 @@ void ViveControllerManager::InputDevice::handlePoseEvent(const controller::Input
static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;
glm::vec3 position = extractTranslation(mat);
glm::quat rotation = glm::quat_cast(mat);
auto translationOffset = (left ? leftTranslationOffset : rightTranslationOffset);
auto rotationOffset = (left ? leftRotationOffset : rightRotationOffset);
position += rotation * (left ? leftTranslationOffset : rightTranslationOffset);
rotation = rotation * (left ? leftRotationOffset : rightRotationOffset);
glm::vec3 position = extractTranslation(mat);
glm::quat rotation = glm::normalize(glm::quat_cast(mat));
position += rotation * translationOffset;
rotation = rotation * rotationOffset;
// transform into avatar frame
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
auto avatarPose = controller::Pose(position, rotation).transform(controllerToAvatar);
_poseStateMap[left ? controller::LEFT_HAND : controller::RIGHT_HAND] = avatarPose;
auto avatarPose = controller::Pose(position, rotation);
// handle change in velocity due to translationOffset
avatarPose.velocity = linearVelocity + glm::cross(angularVelocity, position - extractTranslation(mat));
avatarPose.angularVelocity = angularVelocity;
_poseStateMap[left ? controller::LEFT_HAND : controller::RIGHT_HAND] = avatarPose.transform(controllerToAvatar);
}
controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableInputs() const {

View file

@ -46,7 +46,7 @@ public:
void updateRendering(RenderArgs* args, render::ScenePointer scene, render::PendingChanges pendingChanges);
void setRenderControllers(bool renderControllers) { _renderControllers = renderControllers; }
private:
class InputDevice : public controller::InputDevice {
public:
@ -60,7 +60,8 @@ private:
void handleButtonEvent(uint32_t button, bool pressed, bool left);
void handleAxisEvent(uint32_t axis, float x, float y, bool left);
void handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, bool left);
void handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat,
const vec3& linearVelocity, const vec3& angularVelocity, bool left);
int _trackedControllers { 0 };
vr::IVRSystem*& _hmd;
@ -68,8 +69,8 @@ private:
};
void renderHand(const controller::Pose& pose, gpu::Batch& batch, int sign);
bool _registeredWithInputMapper { false };
bool _modelLoaded { false };