mirror of
https://github.com/overte-org/overte.git
synced 2025-04-20 13:03:55 +02:00
Merge pull request #7162 from hyperlogic/tony/controller-velocity
Improve hand controller move and throw velocity
This commit is contained in:
commit
8d91912d4b
11 changed files with 93 additions and 120 deletions
|
@ -5039,27 +5039,11 @@ void Application::setPalmData(Hand* hand, const controller::Pose& pose, float de
|
|||
// controller pose is in Avatar frame.
|
||||
glm::vec3 position = pose.getTranslation();
|
||||
glm::quat rotation = pose.getRotation();
|
||||
glm::vec3 rawVelocity = pose.getVelocity();
|
||||
glm::vec3 angularVelocity = pose.getAngularVelocity();
|
||||
|
||||
// Compute current velocity from position change
|
||||
glm::vec3 rawVelocity;
|
||||
if (deltaTime > 0.0f) {
|
||||
rawVelocity = (position - palm.getRawPosition()) / deltaTime;
|
||||
} else {
|
||||
rawVelocity = glm::vec3(0.0f);
|
||||
}
|
||||
palm.setRawVelocity(rawVelocity); // meters/sec
|
||||
|
||||
// Angular Velocity of Palm
|
||||
glm::quat deltaRotation = rotation * glm::inverse(palm.getRawRotation());
|
||||
glm::vec3 angularVelocity(0.0f);
|
||||
float rotationAngle = glm::angle(deltaRotation);
|
||||
if ((rotationAngle > EPSILON) && (deltaTime > 0.0f)) {
|
||||
angularVelocity = glm::normalize(glm::axis(deltaRotation));
|
||||
angularVelocity *= (rotationAngle / deltaTime);
|
||||
palm.setRawAngularVelocity(angularVelocity);
|
||||
} else {
|
||||
palm.setRawAngularVelocity(glm::vec3(0.0f));
|
||||
}
|
||||
palm.setRawVelocity(rawVelocity);
|
||||
palm.setRawAngularVelocity(angularVelocity);
|
||||
|
||||
if (controller::InputDevice::getLowVelocityFilter()) {
|
||||
// Use a velocity sensitive filter to damp small motions and preserve large ones with
|
||||
|
|
|
@ -930,35 +930,6 @@ void Avatar::setAttachmentData(const QVector<AttachmentData>& attachmentData) {
|
|||
}
|
||||
_attachmentModels[i]->setURL(attachmentData[i].modelURL);
|
||||
}
|
||||
|
||||
// AJT: TODO REMOVE
|
||||
/*
|
||||
// make sure we have as many models as attachments
|
||||
while (_attachmentModels.size() < attachmentData.size()) {
|
||||
Model* model = nullptr;
|
||||
if (_unusedAttachments.size() > 0) {
|
||||
model = _unusedAttachments.takeFirst();
|
||||
} else {
|
||||
model = new Model(std::make_shared<Rig>(), this);
|
||||
}
|
||||
model->init();
|
||||
_attachmentModels.append(model);
|
||||
}
|
||||
while (_attachmentModels.size() > attachmentData.size()) {
|
||||
auto attachmentModel = _attachmentModels.back();
|
||||
_attachmentModels.pop_back();
|
||||
_attachmentsToRemove.push_back(attachmentModel);
|
||||
}
|
||||
*/
|
||||
|
||||
/*
|
||||
// update the urls
|
||||
for (int i = 0; i < attachmentData.size(); i++) {
|
||||
_attachmentModels[i]->setURL(attachmentData.at(i).modelURL);
|
||||
_attachmentModels[i]->setSnapModelToCenter(true);
|
||||
_attachmentModels[i]->setScaleToFit(true, getUniformScale() * _attachmentData.at(i).scale);
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -93,7 +93,8 @@ void AvatarActionHold::prepareForPhysicsSimulation() {
|
|||
activateBody(true);
|
||||
}
|
||||
|
||||
std::shared_ptr<Avatar> AvatarActionHold::getTarget(glm::quat& rotation, glm::vec3& position) {
|
||||
std::shared_ptr<Avatar> AvatarActionHold::getTarget(glm::quat& rotation, glm::vec3& position,
|
||||
glm::vec3& linearVelocity, glm::vec3& angularVelocity) {
|
||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||
auto holdingAvatar = std::static_pointer_cast<Avatar>(avatarManager->getAvatarBySessionID(_holderID));
|
||||
|
||||
|
@ -103,19 +104,22 @@ std::shared_ptr<Avatar> AvatarActionHold::getTarget(glm::quat& rotation, glm::ve
|
|||
|
||||
withReadLock([&]{
|
||||
bool isRightHand = (_hand == "right");
|
||||
glm::vec3 palmPosition { Vectors::ZERO };
|
||||
glm::quat palmRotation { Quaternions::IDENTITY };
|
||||
glm::vec3 palmPosition;
|
||||
glm::quat palmRotation;
|
||||
glm::vec3 palmLinearVelocity;
|
||||
glm::vec3 palmAngularVelocity;
|
||||
|
||||
PalmData palmData = holdingAvatar->getHand()->getCopyOfPalmData(isRightHand ? HandData::RightHand : HandData::LeftHand);
|
||||
|
||||
// TODO: adjust according to _relativePosition and _relativeRotation?
|
||||
linearVelocity = palmData.getVelocity();
|
||||
angularVelocity = palmData.getAngularVelocity();
|
||||
|
||||
if (_ignoreIK && holdingAvatar->isMyAvatar()) {
|
||||
// We cannot ignore other avatars IK and this is not the point of this option
|
||||
// This is meant to make the grabbing behavior more reactive.
|
||||
if (isRightHand) {
|
||||
palmPosition = holdingAvatar->getHand()->getCopyOfPalmData(HandData::RightHand).getPosition();
|
||||
palmRotation = holdingAvatar->getHand()->getCopyOfPalmData(HandData::RightHand).getRotation();
|
||||
} else {
|
||||
palmPosition = holdingAvatar->getHand()->getCopyOfPalmData(HandData::LeftHand).getPosition();
|
||||
palmRotation = holdingAvatar->getHand()->getCopyOfPalmData(HandData::LeftHand).getRotation();
|
||||
}
|
||||
palmPosition = palmData.getPosition();
|
||||
palmRotation = palmData.getRotation();
|
||||
} else if (holdingAvatar->isMyAvatar()) {
|
||||
glm::vec3 avatarRigidBodyPosition;
|
||||
glm::quat avatarRigidBodyRotation;
|
||||
|
@ -153,14 +157,19 @@ std::shared_ptr<Avatar> AvatarActionHold::getTarget(glm::quat& rotation, glm::ve
|
|||
|
||||
rotation = palmRotation * _relativeRotation;
|
||||
position = palmPosition + rotation * _relativePosition;
|
||||
|
||||
// update linearVelocity based on offset via _relativePosition;
|
||||
linearVelocity = linearVelocity + glm::cross(angularVelocity, position - palmPosition);
|
||||
});
|
||||
|
||||
return holdingAvatar;
|
||||
}
|
||||
|
||||
void AvatarActionHold::updateActionWorker(float deltaTimeStep) {
|
||||
glm::quat rotation { Quaternions::IDENTITY };
|
||||
glm::vec3 position { Vectors::ZERO };
|
||||
glm::quat rotation;
|
||||
glm::vec3 position;
|
||||
glm::vec3 linearVelocity;
|
||||
glm::vec3 angularVelocity;
|
||||
bool valid = false;
|
||||
int holdCount = 0;
|
||||
|
||||
|
@ -173,7 +182,8 @@ void AvatarActionHold::updateActionWorker(float deltaTimeStep) {
|
|||
std::shared_ptr<AvatarActionHold> holdAction = std::static_pointer_cast<AvatarActionHold>(action);
|
||||
glm::quat rotationForAction;
|
||||
glm::vec3 positionForAction;
|
||||
std::shared_ptr<Avatar> holdingAvatar = holdAction->getTarget(rotationForAction, positionForAction);
|
||||
glm::vec3 linearVelocityForAction, angularVelocityForAction;
|
||||
std::shared_ptr<Avatar> holdingAvatar = holdAction->getTarget(rotationForAction, positionForAction, linearVelocityForAction, angularVelocityForAction);
|
||||
if (holdingAvatar) {
|
||||
holdCount ++;
|
||||
if (holdAction.get() == this) {
|
||||
|
@ -183,15 +193,21 @@ void AvatarActionHold::updateActionWorker(float deltaTimeStep) {
|
|||
}
|
||||
|
||||
position += positionForAction;
|
||||
linearVelocity += linearVelocityForAction;
|
||||
angularVelocity += angularVelocityForAction;
|
||||
}
|
||||
}
|
||||
|
||||
if (valid && holdCount > 0) {
|
||||
position /= holdCount;
|
||||
linearVelocity /= holdCount;
|
||||
angularVelocity /= holdCount;
|
||||
|
||||
withWriteLock([&]{
|
||||
_positionalTarget = position;
|
||||
_rotationalTarget = rotation;
|
||||
_linearVelocityTarget = linearVelocity;
|
||||
_angularVelocityTarget = angularVelocity;
|
||||
_positionalTargetSet = true;
|
||||
_rotationalTargetSet = true;
|
||||
_active = true;
|
||||
|
@ -225,15 +241,8 @@ void AvatarActionHold::doKinematicUpdate(float deltaTimeStep) {
|
|||
|
||||
withWriteLock([&]{
|
||||
if (_kinematicSetVelocity) {
|
||||
if (_previousSet) {
|
||||
// smooth velocity over 2 frames
|
||||
glm::vec3 positionalDelta = _positionalTarget - _previousPositionalTarget;
|
||||
glm::vec3 positionalVelocity =
|
||||
(positionalDelta + _previousPositionalDelta) / (deltaTimeStep + _previousDeltaTimeStep);
|
||||
rigidBody->setLinearVelocity(glmToBullet(positionalVelocity));
|
||||
_previousPositionalDelta = positionalDelta;
|
||||
_previousDeltaTimeStep = deltaTimeStep;
|
||||
}
|
||||
rigidBody->setLinearVelocity(glmToBullet(_linearVelocityTarget));
|
||||
rigidBody->setAngularVelocity(glmToBullet(_angularVelocityTarget));
|
||||
}
|
||||
|
||||
btTransform worldTrans = rigidBody->getWorldTransform();
|
||||
|
|
|
@ -36,7 +36,8 @@ public:
|
|||
virtual bool shouldSuppressLocationEdits() override { return _active && !_ownerEntity.expired(); }
|
||||
|
||||
bool getAvatarRigidBodyLocation(glm::vec3& avatarRigidBodyPosition, glm::quat& avatarRigidBodyRotation);
|
||||
std::shared_ptr<Avatar> getTarget(glm::quat& rotation, glm::vec3& position);
|
||||
std::shared_ptr<Avatar> getTarget(glm::quat& rotation, glm::vec3& position,
|
||||
glm::vec3& linearVelocity, glm::vec3& angularVelocity);
|
||||
|
||||
virtual void prepareForPhysicsSimulation() override;
|
||||
|
||||
|
@ -50,6 +51,9 @@ private:
|
|||
QString _hand { "right" };
|
||||
QUuid _holderID;
|
||||
|
||||
glm::vec3 _linearVelocityTarget;
|
||||
glm::vec3 _angularVelocityTarget;
|
||||
|
||||
bool _kinematic { false };
|
||||
bool _kinematicSetVelocity { false };
|
||||
bool _previousSet { false };
|
||||
|
|
|
@ -93,6 +93,7 @@ public:
|
|||
PalmData(HandData* owningHandData = nullptr, HandData::Hand hand = HandData::UnknownHand);
|
||||
glm::vec3 getPosition() const { return _owningHandData->localToWorldPosition(_rawPosition); }
|
||||
glm::vec3 getVelocity() const { return _owningHandData->localToWorldDirection(_rawVelocity); }
|
||||
glm::vec3 getAngularVelocity() const { return _owningHandData->localToWorldDirection(_rawAngularVelocity); }
|
||||
|
||||
const glm::vec3& getRawPosition() const { return _rawPosition; }
|
||||
bool isActive() const { return _isActive; }
|
||||
|
|
|
@ -113,7 +113,6 @@ void SixenseManager::deactivate() {
|
|||
_container->removeMenu(MENU_PATH);
|
||||
|
||||
_inputDevice->_poseStateMap.clear();
|
||||
_inputDevice->_collectedSamples.clear();
|
||||
|
||||
if (_inputDevice->_deviceID != controller::Input::INVALID_DEVICE) {
|
||||
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
|
||||
|
@ -158,7 +157,6 @@ void SixenseManager::InputDevice::update(float deltaTime, const controller::Inpu
|
|||
_axisStateMap.clear();
|
||||
_buttonPressedMap.clear();
|
||||
_poseStateMap.clear();
|
||||
_collectedSamples.clear();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
@ -209,13 +207,10 @@ void SixenseManager::InputDevice::update(float deltaTime, const controller::Inpu
|
|||
rawPoses[i] = controller::Pose(position, rotation, Vectors::ZERO, Vectors::ZERO);
|
||||
} else {
|
||||
_poseStateMap.clear();
|
||||
_collectedSamples.clear();
|
||||
}
|
||||
} else {
|
||||
auto hand = left ? controller::StandardPoseChannel::LEFT_HAND : controller::StandardPoseChannel::RIGHT_HAND;
|
||||
_poseStateMap[hand] = controller::Pose();
|
||||
_collectedSamples[hand].first.clear();
|
||||
_collectedSamples[hand].second.clear();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -481,29 +476,24 @@ void SixenseManager::InputDevice::handlePoseEvent(float deltaTime, const control
|
|||
glm::vec3 velocity(0.0f);
|
||||
glm::vec3 angularVelocity(0.0f);
|
||||
|
||||
if (prevPose.isValid() && deltaTime > std::numeric_limits<float>::epsilon()) {
|
||||
auto& samples = _collectedSamples[hand];
|
||||
|
||||
velocity = (pos - prevPose.getTranslation()) / deltaTime;
|
||||
samples.first.addSample(velocity);
|
||||
velocity = samples.first.average;
|
||||
|
||||
auto deltaRot = glm::normalize(rot * glm::conjugate(prevPose.getRotation()));
|
||||
auto axis = glm::axis(deltaRot);
|
||||
auto speed = glm::angle(deltaRot) / deltaTime;
|
||||
assert(!glm::isnan(speed));
|
||||
angularVelocity = speed * axis;
|
||||
samples.second.addSample(angularVelocity);
|
||||
angularVelocity = samples.second.average;
|
||||
} else if (!prevPose.isValid()) {
|
||||
_collectedSamples[hand].first.clear();
|
||||
_collectedSamples[hand].second.clear();
|
||||
}
|
||||
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
||||
|
||||
// transform pose into avatar frame.
|
||||
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
||||
auto avatarPose = controller::Pose(pos, rot, velocity, angularVelocity).transform(controllerToAvatar);
|
||||
_poseStateMap[hand] = avatarPose;
|
||||
auto nextPose = controller::Pose(pos, rot, velocity, angularVelocity).transform(controllerToAvatar);
|
||||
|
||||
if (prevPose.isValid() && (deltaTime > std::numeric_limits<float>::epsilon())) {
|
||||
nextPose.velocity = (nextPose.getTranslation() - prevPose.getTranslation()) / deltaTime;
|
||||
|
||||
glm::quat deltaRotation = nextPose.getRotation() * glm::inverse(prevPose.getRotation());
|
||||
float rotationAngle = glm::angle(deltaRotation);
|
||||
if (rotationAngle > EPSILON) {
|
||||
nextPose.angularVelocity = glm::normalize(glm::axis(deltaRotation));
|
||||
nextPose.angularVelocity *= (rotationAngle / deltaTime);
|
||||
}
|
||||
|
||||
}
|
||||
_poseStateMap[hand] = nextPose;
|
||||
|
||||
#endif // HAVE_SIXENSE
|
||||
}
|
||||
|
||||
|
|
|
@ -52,11 +52,6 @@ private:
|
|||
static const glm::vec3 DEFAULT_AVATAR_POSITION;
|
||||
static const float CONTROLLER_THRESHOLD;
|
||||
|
||||
template<typename T>
|
||||
using SampleAverage = MovingAverage<T, MAX_NUM_AVERAGING_SAMPLES>;
|
||||
using Samples = std::pair<SampleAverage<glm::vec3>, SampleAverage<glm::vec3>>;
|
||||
using MovingAverageMap = std::map<int, Samples>;
|
||||
|
||||
class InputDevice : public controller::InputDevice {
|
||||
public:
|
||||
InputDevice() : controller::InputDevice("Hydra") {}
|
||||
|
@ -75,8 +70,6 @@ private:
|
|||
|
||||
friend class SixenseManager;
|
||||
|
||||
MovingAverageMap _collectedSamples;
|
||||
|
||||
int _calibrationState { CALIBRATION_STATE_IDLE };
|
||||
// these are calibration results
|
||||
glm::vec3 _avatarPosition { DEFAULT_AVATAR_POSITION }; // in hydra-frame
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
#include <PerfStat.h>
|
||||
#include <plugins/PluginContainer.h>
|
||||
#include <ViewFrustum.h>
|
||||
#include <GLMHelpers.h>
|
||||
|
||||
#include "OpenVrHelpers.h"
|
||||
|
||||
|
@ -32,6 +33,8 @@ const QString StandingHMDSensorMode = "Standing HMD Sensor Mode"; // this probab
|
|||
static vr::IVRCompositor* _compositor{ nullptr };
|
||||
vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
|
||||
vec3 _trackedDeviceLinearVelocities[vr::k_unMaxTrackedDeviceCount];
|
||||
vec3 _trackedDeviceAngularVelocities[vr::k_unMaxTrackedDeviceCount];
|
||||
static mat4 _sensorResetMat;
|
||||
static std::array<vr::Hmd_Eye, 2> VR_EYES { { vr::Eye_Left, vr::Eye_Right } };
|
||||
|
||||
|
@ -119,6 +122,8 @@ glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
|
|||
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
|
||||
_trackedDevicePose[i] = predictedTrackedDevicePose[i];
|
||||
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
|
||||
_trackedDeviceLinearVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vVelocity));
|
||||
_trackedDeviceAngularVelocities[i] = transformVectorFast(_sensorResetMat, toGlm(_trackedDevicePose[i].vAngularVelocity));
|
||||
}
|
||||
return _trackedDevicePoseMat4[0];
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@
|
|||
#include <glm/gtc/matrix_transform.hpp>
|
||||
|
||||
vr::IVRSystem* acquireOpenVrSystem();
|
||||
void releaseOpenVrSystem();
|
||||
void releaseOpenVrSystem();
|
||||
|
||||
template<typename F>
|
||||
void openvr_for_each_eye(F f) {
|
||||
|
@ -25,6 +25,10 @@ inline mat4 toGlm(const vr::HmdMatrix44_t& m) {
|
|||
return glm::transpose(glm::make_mat4(&m.m[0][0]));
|
||||
}
|
||||
|
||||
inline vec3 toGlm(const vr::HmdVector3_t& v) {
|
||||
return vec3(v.v[0], v.v[1], v.v[2]);
|
||||
}
|
||||
|
||||
inline mat4 toGlm(const vr::HmdMatrix34_t& m) {
|
||||
mat4 result = mat4(
|
||||
m.m[0][0], m.m[1][0], m.m[2][0], 0.0,
|
||||
|
|
|
@ -29,6 +29,8 @@
|
|||
|
||||
extern vr::TrackedDevicePose_t _trackedDevicePose[vr::k_unMaxTrackedDeviceCount];
|
||||
extern mat4 _trackedDevicePoseMat4[vr::k_unMaxTrackedDeviceCount];
|
||||
extern vec3 _trackedDeviceLinearVelocities[vr::k_unMaxTrackedDeviceCount];
|
||||
extern vec3 _trackedDeviceAngularVelocities[vr::k_unMaxTrackedDeviceCount];
|
||||
|
||||
vr::IVRSystem* acquireOpenVrSystem();
|
||||
void releaseOpenVrSystem();
|
||||
|
@ -249,10 +251,11 @@ void ViveControllerManager::InputDevice::update(float deltaTime, const controlle
|
|||
numTrackedControllers++;
|
||||
bool left = numTrackedControllers == 2;
|
||||
|
||||
const mat4& mat = _trackedDevicePoseMat4[device];
|
||||
|
||||
if (!jointsCaptured) {
|
||||
handlePoseEvent(inputCalibrationData, mat, numTrackedControllers - 1);
|
||||
const mat4& mat = _trackedDevicePoseMat4[device];
|
||||
const vec3 linearVelocity = _trackedDeviceLinearVelocities[device];
|
||||
const vec3 angularVelocity = _trackedDeviceAngularVelocities[device];
|
||||
handlePoseEvent(inputCalibrationData, mat, linearVelocity, angularVelocity, numTrackedControllers - 1);
|
||||
}
|
||||
|
||||
// handle inputs
|
||||
|
@ -314,7 +317,9 @@ void ViveControllerManager::InputDevice::handleButtonEvent(uint32_t button, bool
|
|||
}
|
||||
}
|
||||
|
||||
void ViveControllerManager::InputDevice::handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, bool left) {
|
||||
void ViveControllerManager::InputDevice::handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData,
|
||||
const mat4& mat, const vec3& linearVelocity,
|
||||
const vec3& angularVelocity, bool left) {
|
||||
// When the sensor-to-world rotation is identity the coordinate axes look like this:
|
||||
//
|
||||
// user
|
||||
|
@ -379,16 +384,22 @@ void ViveControllerManager::InputDevice::handlePoseEvent(const controller::Input
|
|||
static const glm::vec3 leftTranslationOffset = glm::vec3(-1.0f, 1.0f, 1.0f) * CONTROLLER_OFFSET;
|
||||
static const glm::vec3 rightTranslationOffset = CONTROLLER_OFFSET;
|
||||
|
||||
glm::vec3 position = extractTranslation(mat);
|
||||
glm::quat rotation = glm::quat_cast(mat);
|
||||
auto translationOffset = (left ? leftTranslationOffset : rightTranslationOffset);
|
||||
auto rotationOffset = (left ? leftRotationOffset : rightRotationOffset);
|
||||
|
||||
position += rotation * (left ? leftTranslationOffset : rightTranslationOffset);
|
||||
rotation = rotation * (left ? leftRotationOffset : rightRotationOffset);
|
||||
glm::vec3 position = extractTranslation(mat);
|
||||
glm::quat rotation = glm::normalize(glm::quat_cast(mat));
|
||||
|
||||
position += rotation * translationOffset;
|
||||
rotation = rotation * rotationOffset;
|
||||
|
||||
// transform into avatar frame
|
||||
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
||||
auto avatarPose = controller::Pose(position, rotation).transform(controllerToAvatar);
|
||||
_poseStateMap[left ? controller::LEFT_HAND : controller::RIGHT_HAND] = avatarPose;
|
||||
auto avatarPose = controller::Pose(position, rotation);
|
||||
// handle change in velocity due to translationOffset
|
||||
avatarPose.velocity = linearVelocity + glm::cross(angularVelocity, position - extractTranslation(mat));
|
||||
avatarPose.angularVelocity = angularVelocity;
|
||||
_poseStateMap[left ? controller::LEFT_HAND : controller::RIGHT_HAND] = avatarPose.transform(controllerToAvatar);
|
||||
}
|
||||
|
||||
controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableInputs() const {
|
||||
|
|
|
@ -46,7 +46,7 @@ public:
|
|||
void updateRendering(RenderArgs* args, render::ScenePointer scene, render::PendingChanges pendingChanges);
|
||||
|
||||
void setRenderControllers(bool renderControllers) { _renderControllers = renderControllers; }
|
||||
|
||||
|
||||
private:
|
||||
class InputDevice : public controller::InputDevice {
|
||||
public:
|
||||
|
@ -60,7 +60,8 @@ private:
|
|||
|
||||
void handleButtonEvent(uint32_t button, bool pressed, bool left);
|
||||
void handleAxisEvent(uint32_t axis, float x, float y, bool left);
|
||||
void handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat, bool left);
|
||||
void handlePoseEvent(const controller::InputCalibrationData& inputCalibrationData, const mat4& mat,
|
||||
const vec3& linearVelocity, const vec3& angularVelocity, bool left);
|
||||
|
||||
int _trackedControllers { 0 };
|
||||
vr::IVRSystem*& _hmd;
|
||||
|
@ -68,8 +69,8 @@ private:
|
|||
};
|
||||
|
||||
void renderHand(const controller::Pose& pose, gpu::Batch& batch, int sign);
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
bool _registeredWithInputMapper { false };
|
||||
bool _modelLoaded { false };
|
||||
|
|
Loading…
Reference in a new issue