mirror of
https://github.com/overte-org/overte.git
synced 2025-04-07 23:53:54 +02:00
Merge pull request #14040 from hyperlogic/feature/acceleration-limit-filter
Quality Improvements to Avatars driven by Vive Trackers
This commit is contained in:
commit
eb383b1d09
24 changed files with 1487 additions and 90 deletions
|
@ -51,32 +51,34 @@
|
|||
{ "from": "Vive.RSCenter", "to": "Standard.RightPrimaryThumb" },
|
||||
{ "from": "Vive.RightApplicationMenu", "to": "Standard.RightSecondaryThumb" },
|
||||
|
||||
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand"},
|
||||
{ "from": "Vive.RightHand", "to": "Standard.RightHand"},
|
||||
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand" },
|
||||
{ "from": "Vive.RightHand", "to": "Standard.RightHand" },
|
||||
{ "from": "Vive.Head", "to" : "Standard.Head" },
|
||||
|
||||
{
|
||||
"from": "Vive.LeftFoot", "to" : "Standard.LeftFoot",
|
||||
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.3}]
|
||||
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.15}]
|
||||
},
|
||||
|
||||
{
|
||||
"from": "Vive.RightFoot", "to" : "Standard.RightFoot",
|
||||
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.3}]
|
||||
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.15}]
|
||||
},
|
||||
|
||||
{
|
||||
"from": "Vive.Hips", "to" : "Standard.Hips",
|
||||
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.3}]
|
||||
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.15}]
|
||||
},
|
||||
|
||||
{
|
||||
"from": "Vive.Spine2", "to" : "Standard.Spine2",
|
||||
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.3}]
|
||||
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.15}]
|
||||
},
|
||||
{
|
||||
"from": "Vive.RightArm", "to" : "Standard.RightArm",
|
||||
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.15}]
|
||||
},
|
||||
{
|
||||
"from": "Vive.LeftArm", "to" : "Standard.LeftArm",
|
||||
"filters" : [{"type" : "exponentialSmoothing", "rotation" : 0.15, "translation": 0.15}]
|
||||
},
|
||||
|
||||
{ "from": "Vive.Head", "to" : "Standard.Head"},
|
||||
{ "from": "Vive.RightArm", "to" : "Standard.RightArm" },
|
||||
{ "from": "Vive.LeftArm", "to" : "Standard.LeftArm" },
|
||||
|
||||
{ "from": "Vive.TrackedObject00", "to" : "Standard.TrackedObject00" },
|
||||
{ "from": "Vive.TrackedObject01", "to" : "Standard.TrackedObject01" },
|
||||
|
|
|
@ -822,11 +822,44 @@ Flickable {
|
|||
}
|
||||
}
|
||||
|
||||
Row {
|
||||
id: outOfRangeDataStrategyRow
|
||||
anchors.top: viveInDesktop.bottom
|
||||
anchors.topMargin: 5
|
||||
anchors.left: openVrConfiguration.left
|
||||
anchors.leftMargin: leftMargin + 10
|
||||
spacing: 15
|
||||
|
||||
RalewayRegular {
|
||||
id: outOfRangeDataStrategyLabel
|
||||
size: 12
|
||||
text: "Out Of Range Data Strategy:"
|
||||
color: hifi.colors.lightGrayText
|
||||
topPadding: 5
|
||||
}
|
||||
|
||||
HifiControls.ComboBox {
|
||||
id: outOfRangeDataStrategyComboBox
|
||||
|
||||
height: 25
|
||||
width: 100
|
||||
|
||||
editable: true
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
model: ["None", "Freeze", "Drop"]
|
||||
label: ""
|
||||
|
||||
onCurrentIndexChanged: {
|
||||
sendConfigurationSettings();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
RalewayBold {
|
||||
id: viveDesktopText
|
||||
size: 10
|
||||
size: 12
|
||||
text: "Use " + stack.selectedPlugin + " devices in desktop mode"
|
||||
color: hifi.colors.white
|
||||
color: hifi.colors.lightGrayText
|
||||
|
||||
anchors {
|
||||
left: viveInDesktop.right
|
||||
|
@ -946,6 +979,7 @@ Flickable {
|
|||
|
||||
viveInDesktop.checked = desktopMode;
|
||||
hmdInDesktop.checked = hmdDesktopPosition;
|
||||
outOfRangeDataStrategyComboBox.currentIndex = outOfRangeDataStrategyComboBox.model.indexOf(settings.outOfRangeDataStrategy);
|
||||
|
||||
initializeButtonState();
|
||||
updateCalibrationText();
|
||||
|
@ -1107,7 +1141,8 @@ Flickable {
|
|||
"armCircumference": armCircumference.realValue,
|
||||
"shoulderWidth": shoulderWidth.realValue,
|
||||
"desktopMode": viveInDesktop.checked,
|
||||
"hmdDesktopTracking": hmdInDesktop.checked
|
||||
"hmdDesktopTracking": hmdInDesktop.checked,
|
||||
"outOfRangeDataStrategy": outOfRangeDataStrategyComboBox.model[outOfRangeDataStrategyComboBox.currentIndex]
|
||||
}
|
||||
|
||||
return settingsObject;
|
||||
|
|
|
@ -5815,6 +5815,42 @@ void Application::update(float deltaTime) {
|
|||
controller::Pose pose = userInputMapper->getPoseState(action);
|
||||
myAvatar->setControllerPoseInSensorFrame(action, pose.transform(avatarToSensorMatrix));
|
||||
}
|
||||
|
||||
static const std::vector<QString> trackedObjectStringLiterals = {
|
||||
QStringLiteral("_TrackedObject00"), QStringLiteral("_TrackedObject01"), QStringLiteral("_TrackedObject02"), QStringLiteral("_TrackedObject03"),
|
||||
QStringLiteral("_TrackedObject04"), QStringLiteral("_TrackedObject05"), QStringLiteral("_TrackedObject06"), QStringLiteral("_TrackedObject07"),
|
||||
QStringLiteral("_TrackedObject08"), QStringLiteral("_TrackedObject09"), QStringLiteral("_TrackedObject10"), QStringLiteral("_TrackedObject11"),
|
||||
QStringLiteral("_TrackedObject12"), QStringLiteral("_TrackedObject13"), QStringLiteral("_TrackedObject14"), QStringLiteral("_TrackedObject15")
|
||||
};
|
||||
|
||||
// Controlled by the Developer > Avatar > Show Tracked Objects menu.
|
||||
if (_showTrackedObjects) {
|
||||
static const std::vector<controller::Action> trackedObjectActions = {
|
||||
controller::Action::TRACKED_OBJECT_00, controller::Action::TRACKED_OBJECT_01, controller::Action::TRACKED_OBJECT_02, controller::Action::TRACKED_OBJECT_03,
|
||||
controller::Action::TRACKED_OBJECT_04, controller::Action::TRACKED_OBJECT_05, controller::Action::TRACKED_OBJECT_06, controller::Action::TRACKED_OBJECT_07,
|
||||
controller::Action::TRACKED_OBJECT_08, controller::Action::TRACKED_OBJECT_09, controller::Action::TRACKED_OBJECT_10, controller::Action::TRACKED_OBJECT_11,
|
||||
controller::Action::TRACKED_OBJECT_12, controller::Action::TRACKED_OBJECT_13, controller::Action::TRACKED_OBJECT_14, controller::Action::TRACKED_OBJECT_15
|
||||
};
|
||||
|
||||
int i = 0;
|
||||
glm::vec4 BLUE(0.0f, 0.0f, 1.0f, 1.0f);
|
||||
for (auto& action : trackedObjectActions) {
|
||||
controller::Pose pose = userInputMapper->getPoseState(action);
|
||||
if (pose.valid) {
|
||||
glm::vec3 pos = transformPoint(myAvatarMatrix, pose.translation);
|
||||
glm::quat rot = glmExtractRotation(myAvatarMatrix) * pose.rotation;
|
||||
DebugDraw::getInstance().addMarker(trackedObjectStringLiterals[i], rot, pos, BLUE);
|
||||
} else {
|
||||
DebugDraw::getInstance().removeMarker(trackedObjectStringLiterals[i]);
|
||||
}
|
||||
i++;
|
||||
}
|
||||
} else if (_prevShowTrackedObjects) {
|
||||
for (auto& key : trackedObjectStringLiterals) {
|
||||
DebugDraw::getInstance().removeMarker(key);
|
||||
}
|
||||
}
|
||||
_prevShowTrackedObjects = _showTrackedObjects;
|
||||
}
|
||||
|
||||
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
|
||||
|
@ -8311,6 +8347,10 @@ void Application::setShowBulletConstraintLimits(bool value) {
|
|||
_physicsEngine->setShowBulletConstraintLimits(value);
|
||||
}
|
||||
|
||||
void Application::setShowTrackedObjects(bool value) {
|
||||
_showTrackedObjects = value;
|
||||
}
|
||||
|
||||
void Application::startHMDStandBySession() {
|
||||
_autoSwitchDisplayModeSupportedHMDPlugin->startStandBySession();
|
||||
}
|
||||
|
|
|
@ -498,6 +498,8 @@ private slots:
|
|||
void setShowBulletConstraints(bool value);
|
||||
void setShowBulletConstraintLimits(bool value);
|
||||
|
||||
void setShowTrackedObjects(bool value);
|
||||
|
||||
private:
|
||||
void init();
|
||||
bool handleKeyEventForFocusedEntityOrOverlay(QEvent* event);
|
||||
|
@ -779,5 +781,8 @@ private:
|
|||
std::atomic<bool> _pendingRenderEvent { true };
|
||||
|
||||
bool quitWhenFinished { false };
|
||||
|
||||
bool _showTrackedObjects { false };
|
||||
bool _prevShowTrackedObjects { false };
|
||||
};
|
||||
#endif // hifi_Application_h
|
||||
|
|
|
@ -626,6 +626,8 @@ Menu::Menu() {
|
|||
avatar.get(), SLOT(updateMotionBehaviorFromMenu()),
|
||||
UNSPECIFIED_POSITION, "Developer");
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowTrackedObjects, 0, false, qApp, SLOT(setShowTrackedObjects(bool)));
|
||||
|
||||
// Developer > Hands >>>
|
||||
MenuWrapper* handOptionsMenu = developerMenu->addMenu("Hands");
|
||||
addCheckableActionToQMenuAndActionHash(handOptionsMenu, MenuOption::DisplayHandTargets, 0, false,
|
||||
|
|
|
@ -183,6 +183,7 @@ namespace MenuOption {
|
|||
const QString RunClientScriptTests = "Run Client Script Tests";
|
||||
const QString RunTimingTests = "Run Timing Tests";
|
||||
const QString ScriptedMotorControl = "Enable Scripted Motor Control";
|
||||
const QString ShowTrackedObjects = "Show Tracked Objects";
|
||||
const QString SendWrongDSConnectVersion = "Send wrong DS connect version";
|
||||
const QString SendWrongProtocolVersion = "Send wrong protocol version";
|
||||
const QString SetHomeLocation = "Set Home Location";
|
||||
|
|
|
@ -36,6 +36,7 @@ Rig::CharacterControllerState convertCharacterControllerState(CharacterControlle
|
|||
static AnimPose computeHipsInSensorFrame(MyAvatar* myAvatar, bool isFlying) {
|
||||
glm::mat4 worldToSensorMat = glm::inverse(myAvatar->getSensorToWorldMatrix());
|
||||
|
||||
|
||||
// check for pinned hips.
|
||||
auto hipsIndex = myAvatar->getJointIndex("Hips");
|
||||
if (myAvatar->isJointPinned(hipsIndex)) {
|
||||
|
@ -199,49 +200,38 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
if (avatarHeadPose.isValid() && !(params.primaryControllerFlags[Rig::PrimaryControllerType_Hips] & (uint8_t)Rig::ControllerFlags::Enabled)) {
|
||||
bool isFlying = (myAvatar->getCharacterController()->getState() == CharacterController::State::Hover || myAvatar->getCharacterController()->computeCollisionGroup() == BULLET_COLLISION_GROUP_COLLISIONLESS);
|
||||
|
||||
if (!_prevHipsValid) {
|
||||
AnimPose hips = computeHipsInSensorFrame(myAvatar, isFlying);
|
||||
_prevHips = hips;
|
||||
}
|
||||
|
||||
AnimPose hips = computeHipsInSensorFrame(myAvatar, isFlying);
|
||||
|
||||
// timescale in seconds
|
||||
const float TRANS_HORIZ_TIMESCALE = 0.15f;
|
||||
const float TRANS_VERT_TIMESCALE = 0.01f; // We want the vertical component of the hips to follow quickly to prevent spine squash/stretch.
|
||||
const float ROT_TIMESCALE = 0.15f;
|
||||
const float FLY_IDLE_TRANSITION_TIMESCALE = 0.25f;
|
||||
|
||||
float transHorizAlpha, transVertAlpha, rotAlpha;
|
||||
if (_flyIdleTimer < 0.0f) {
|
||||
transHorizAlpha = glm::min(deltaTime / TRANS_HORIZ_TIMESCALE, 1.0f);
|
||||
transVertAlpha = glm::min(deltaTime / TRANS_VERT_TIMESCALE, 1.0f);
|
||||
rotAlpha = glm::min(deltaTime / ROT_TIMESCALE, 1.0f);
|
||||
_smoothHipsHelper.setHorizontalTranslationTimescale(TRANS_HORIZ_TIMESCALE);
|
||||
_smoothHipsHelper.setVerticalTranslationTimescale(TRANS_VERT_TIMESCALE);
|
||||
_smoothHipsHelper.setRotationTimescale(ROT_TIMESCALE);
|
||||
} else {
|
||||
transHorizAlpha = glm::min(deltaTime / FLY_IDLE_TRANSITION_TIMESCALE, 1.0f);
|
||||
transVertAlpha = glm::min(deltaTime / FLY_IDLE_TRANSITION_TIMESCALE, 1.0f);
|
||||
rotAlpha = glm::min(deltaTime / FLY_IDLE_TRANSITION_TIMESCALE, 1.0f);
|
||||
_smoothHipsHelper.setHorizontalTranslationTimescale(FLY_IDLE_TRANSITION_TIMESCALE);
|
||||
_smoothHipsHelper.setVerticalTranslationTimescale(FLY_IDLE_TRANSITION_TIMESCALE);
|
||||
_smoothHipsHelper.setRotationTimescale(FLY_IDLE_TRANSITION_TIMESCALE);
|
||||
}
|
||||
|
||||
// smootly lerp hips, in sensorframe, with different coeff for horiz and vertical translation.
|
||||
float hipsY = hips.trans().y;
|
||||
hips.trans() = lerp(_prevHips.trans(), hips.trans(), transHorizAlpha);
|
||||
hips.trans().y = lerp(_prevHips.trans().y, hipsY, transVertAlpha);
|
||||
hips.rot() = safeLerp(_prevHips.rot(), hips.rot(), rotAlpha);
|
||||
|
||||
_prevHips = hips;
|
||||
_prevHipsValid = true;
|
||||
AnimPose sensorHips = computeHipsInSensorFrame(myAvatar, isFlying);
|
||||
if (!_prevIsEstimatingHips) {
|
||||
_smoothHipsHelper.teleport(sensorHips);
|
||||
}
|
||||
sensorHips = _smoothHipsHelper.update(sensorHips, deltaTime);
|
||||
|
||||
glm::mat4 invRigMat = glm::inverse(myAvatar->getTransform().getMatrix() * Matrices::Y_180);
|
||||
AnimPose sensorToRigPose(invRigMat * myAvatar->getSensorToWorldMatrix());
|
||||
|
||||
params.primaryControllerPoses[Rig::PrimaryControllerType_Hips] = sensorToRigPose * hips;
|
||||
params.primaryControllerPoses[Rig::PrimaryControllerType_Hips] = sensorToRigPose * sensorHips;
|
||||
params.primaryControllerFlags[Rig::PrimaryControllerType_Hips] = (uint8_t)Rig::ControllerFlags::Enabled | (uint8_t)Rig::ControllerFlags::Estimated;
|
||||
|
||||
// set spine2 if we have hand controllers
|
||||
if (myAvatar->getControllerPoseInAvatarFrame(controller::Action::RIGHT_HAND).isValid() &&
|
||||
myAvatar->getControllerPoseInAvatarFrame(controller::Action::LEFT_HAND).isValid() &&
|
||||
!(params.primaryControllerFlags[Rig::PrimaryControllerType_Spine2] & (uint8_t)Rig::ControllerFlags::Enabled)) {
|
||||
myAvatar->getControllerPoseInAvatarFrame(controller::Action::LEFT_HAND).isValid() &&
|
||||
!(params.primaryControllerFlags[Rig::PrimaryControllerType_Spine2] & (uint8_t)Rig::ControllerFlags::Enabled)) {
|
||||
|
||||
AnimPose currentSpine2Pose;
|
||||
AnimPose currentHeadPose;
|
||||
|
@ -268,8 +258,9 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
}
|
||||
}
|
||||
|
||||
_prevIsEstimatingHips = true;
|
||||
} else {
|
||||
_prevHipsValid = false;
|
||||
_prevIsEstimatingHips = false;
|
||||
}
|
||||
|
||||
params.isTalking = head->getTimeWithoutTalking() <= 1.5f;
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#define hifi_MySkeletonModel_h
|
||||
|
||||
#include <avatars-renderer/SkeletonModel.h>
|
||||
#include <AnimUtil.h>
|
||||
#include "MyAvatar.h"
|
||||
|
||||
/// A skeleton loaded from a model.
|
||||
|
@ -26,11 +27,12 @@ public:
|
|||
private:
|
||||
void updateFingers();
|
||||
|
||||
AnimPose _prevHips; // sensor frame
|
||||
bool _prevHipsValid { false };
|
||||
CriticallyDampedSpringPoseHelper _smoothHipsHelper; // sensor frame
|
||||
bool _prevIsFlying { false };
|
||||
float _flyIdleTimer { 0.0f };
|
||||
|
||||
float _prevIsEstimatingHips { false };
|
||||
|
||||
std::map<int, int> _jointRotationFrameOffsetMap;
|
||||
};
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
#include "AnimUtil.h"
|
||||
|
||||
static const int MAX_TARGET_MARKERS = 30;
|
||||
static const float JOINT_CHAIN_INTERP_TIME = 0.25f;
|
||||
static const float JOINT_CHAIN_INTERP_TIME = 0.5f;
|
||||
|
||||
static void lookupJointInfo(const AnimInverseKinematics::JointChainInfo& jointChainInfo,
|
||||
int indexA, int indexB,
|
||||
|
@ -253,11 +253,25 @@ void AnimInverseKinematics::solve(const AnimContext& context, const std::vector<
|
|||
if (numLoops == MAX_IK_LOOPS) {
|
||||
for (size_t i = 0; i < _prevJointChainInfoVec.size(); i++) {
|
||||
if (_prevJointChainInfoVec[i].timer > 0.0f) {
|
||||
|
||||
float alpha = (JOINT_CHAIN_INTERP_TIME - _prevJointChainInfoVec[i].timer) / JOINT_CHAIN_INTERP_TIME;
|
||||
|
||||
// ease in expo
|
||||
alpha = 1.0f - powf(2.0f, -10.0f * alpha);
|
||||
|
||||
size_t chainSize = std::min(_prevJointChainInfoVec[i].jointInfoVec.size(), jointChainInfoVec[i].jointInfoVec.size());
|
||||
for (size_t j = 0; j < chainSize; j++) {
|
||||
jointChainInfoVec[i].jointInfoVec[j].rot = safeMix(_prevJointChainInfoVec[i].jointInfoVec[j].rot, jointChainInfoVec[i].jointInfoVec[j].rot, alpha);
|
||||
jointChainInfoVec[i].jointInfoVec[j].trans = lerp(_prevJointChainInfoVec[i].jointInfoVec[j].trans, jointChainInfoVec[i].jointInfoVec[j].trans, alpha);
|
||||
|
||||
if (jointChainInfoVec[i].target.getType() != IKTarget::Type::Unknown) {
|
||||
// if we are interping into an enabled target type, i.e. not off, lerp the rot and the trans.
|
||||
for (size_t j = 0; j < chainSize; j++) {
|
||||
jointChainInfoVec[i].jointInfoVec[j].rot = safeMix(_prevJointChainInfoVec[i].jointInfoVec[j].rot, jointChainInfoVec[i].jointInfoVec[j].rot, alpha);
|
||||
jointChainInfoVec[i].jointInfoVec[j].trans = lerp(_prevJointChainInfoVec[i].jointInfoVec[j].trans, jointChainInfoVec[i].jointInfoVec[j].trans, alpha);
|
||||
}
|
||||
} else {
|
||||
// if we are interping into a disabled target type, keep the rot & trans the same, but lerp the weight down to zero.
|
||||
jointChainInfoVec[i].target.setType((int)_prevJointChainInfoVec[i].target.getType());
|
||||
jointChainInfoVec[i].target.setWeight(_prevJointChainInfoVec[i].target.getWeight() * (1.0f - alpha));
|
||||
jointChainInfoVec[i].jointInfoVec = _prevJointChainInfoVec[i].jointInfoVec;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -201,14 +201,17 @@ const AnimPoseVec& AnimTwoBoneIK::evaluate(const AnimVariantMap& animVars, const
|
|||
if (_interpType != InterpType::None) {
|
||||
_interpAlpha += _interpAlphaVel * dt;
|
||||
|
||||
// ease in expo
|
||||
float easeInAlpha = 1.0f - powf(2.0f, -10.0f * _interpAlpha);
|
||||
|
||||
if (_interpAlpha < 1.0f) {
|
||||
AnimChain interpChain;
|
||||
if (_interpType == InterpType::SnapshotToUnderPoses) {
|
||||
interpChain = underChain;
|
||||
interpChain.blend(_snapshotChain, _interpAlpha);
|
||||
interpChain.blend(_snapshotChain, easeInAlpha);
|
||||
} else if (_interpType == InterpType::SnapshotToSolve) {
|
||||
interpChain = ikChain;
|
||||
interpChain.blend(_snapshotChain, _interpAlpha);
|
||||
interpChain.blend(_snapshotChain, easeInAlpha);
|
||||
}
|
||||
// copy interpChain into _poses
|
||||
interpChain.outputRelativePoses(_poses);
|
||||
|
|
|
@ -38,4 +38,94 @@ AnimPose boneLookAt(const glm::vec3& target, const AnimPose& bone);
|
|||
// and returns a bodyRot that is also z-forward and y-up
|
||||
glm::quat computeBodyFacingFromHead(const glm::quat& headRot, const glm::vec3& up);
|
||||
|
||||
|
||||
// Uses a approximation of a critically damped spring to smooth full AnimPoses.
|
||||
// It provides seperate timescales for horizontal, vertical and rotation components.
|
||||
// The timescale is roughly how much time it will take the spring will reach halfway toward it's target.
|
||||
class CriticallyDampedSpringPoseHelper {
|
||||
public:
|
||||
CriticallyDampedSpringPoseHelper() : _prevPoseValid(false) {}
|
||||
|
||||
void setHorizontalTranslationTimescale(float timescale) {
|
||||
_horizontalTranslationTimescale = timescale;
|
||||
}
|
||||
void setVerticalTranslationTimescale(float timescale) {
|
||||
_verticalTranslationTimescale = timescale;
|
||||
}
|
||||
void setRotationTimescale(float timescale) {
|
||||
_rotationTimescale = timescale;
|
||||
}
|
||||
|
||||
AnimPose update(const AnimPose& pose, float deltaTime) {
|
||||
if (!_prevPoseValid) {
|
||||
_prevPose = pose;
|
||||
_prevPoseValid = true;
|
||||
}
|
||||
|
||||
const float horizontalTranslationAlpha = glm::min(deltaTime / _horizontalTranslationTimescale, 1.0f);
|
||||
const float verticalTranslationAlpha = glm::min(deltaTime / _verticalTranslationTimescale, 1.0f);
|
||||
const float rotationAlpha = glm::min(deltaTime / _rotationTimescale, 1.0f);
|
||||
|
||||
const float poseY = pose.trans().y;
|
||||
AnimPose newPose = _prevPose;
|
||||
newPose.trans() = lerp(_prevPose.trans(), pose.trans(), horizontalTranslationAlpha);
|
||||
newPose.trans().y = lerp(_prevPose.trans().y, poseY, verticalTranslationAlpha);
|
||||
newPose.rot() = safeLerp(_prevPose.rot(), pose.rot(), rotationAlpha);
|
||||
|
||||
_prevPose = newPose;
|
||||
_prevPoseValid = true;
|
||||
|
||||
return newPose;
|
||||
}
|
||||
|
||||
void teleport(const AnimPose& pose) {
|
||||
_prevPoseValid = true;
|
||||
_prevPose = pose;
|
||||
}
|
||||
|
||||
protected:
|
||||
AnimPose _prevPose;
|
||||
float _horizontalTranslationTimescale { 0.15f };
|
||||
float _verticalTranslationTimescale { 0.15f };
|
||||
float _rotationTimescale { 0.15f };
|
||||
bool _prevPoseValid;
|
||||
};
|
||||
|
||||
class SnapshotBlendPoseHelper {
|
||||
public:
|
||||
SnapshotBlendPoseHelper() : _snapshotValid(false) {}
|
||||
|
||||
void setBlendDuration(float duration) {
|
||||
_duration = duration;
|
||||
}
|
||||
|
||||
void setSnapshot(const AnimPose& pose) {
|
||||
_snapshotValid = true;
|
||||
_snapshotPose = pose;
|
||||
_timer = _duration;
|
||||
}
|
||||
|
||||
AnimPose update(const AnimPose& targetPose, float deltaTime) {
|
||||
_timer -= deltaTime;
|
||||
if (_timer > 0.0f) {
|
||||
float alpha = (_duration - _timer) / _duration;
|
||||
|
||||
// ease in expo
|
||||
alpha = 1.0f - powf(2.0f, -10.0f * alpha);
|
||||
|
||||
AnimPose newPose = targetPose;
|
||||
newPose.blend(_snapshotPose, alpha);
|
||||
return newPose;
|
||||
} else {
|
||||
return targetPose;
|
||||
}
|
||||
}
|
||||
|
||||
protected:
|
||||
AnimPose _snapshotPose;
|
||||
float _duration { 1.0f };
|
||||
float _timer { 0.0f };
|
||||
bool _snapshotValid { false };
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
|
@ -1675,6 +1675,7 @@ glm::vec3 Rig::calculateKneePoleVector(int footJointIndex, int kneeIndex, int up
|
|||
|
||||
void Rig::updateFromControllerParameters(const ControllerParameters& params, float dt) {
|
||||
if (!_animSkeleton || !_animNode) {
|
||||
_previousControllerParameters = params;
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -1685,7 +1686,9 @@ void Rig::updateFromControllerParameters(const ControllerParameters& params, flo
|
|||
bool leftHandEnabled = params.primaryControllerFlags[PrimaryControllerType_LeftHand] & (uint8_t)ControllerFlags::Enabled;
|
||||
bool rightHandEnabled = params.primaryControllerFlags[PrimaryControllerType_RightHand] & (uint8_t)ControllerFlags::Enabled;
|
||||
bool hipsEnabled = params.primaryControllerFlags[PrimaryControllerType_Hips] & (uint8_t)ControllerFlags::Enabled;
|
||||
bool prevHipsEnabled = _previousControllerParameters.primaryControllerFlags[PrimaryControllerType_Hips] & (uint8_t)ControllerFlags::Enabled;
|
||||
bool hipsEstimated = params.primaryControllerFlags[PrimaryControllerType_Hips] & (uint8_t)ControllerFlags::Estimated;
|
||||
bool prevHipsEstimated = _previousControllerParameters.primaryControllerFlags[PrimaryControllerType_Hips] & (uint8_t)ControllerFlags::Estimated;
|
||||
bool leftFootEnabled = params.primaryControllerFlags[PrimaryControllerType_LeftFoot] & (uint8_t)ControllerFlags::Enabled;
|
||||
bool rightFootEnabled = params.primaryControllerFlags[PrimaryControllerType_RightFoot] & (uint8_t)ControllerFlags::Enabled;
|
||||
bool spine2Enabled = params.primaryControllerFlags[PrimaryControllerType_Spine2] & (uint8_t)ControllerFlags::Enabled;
|
||||
|
@ -1724,9 +1727,26 @@ void Rig::updateFromControllerParameters(const ControllerParameters& params, flo
|
|||
}
|
||||
|
||||
if (hipsEnabled) {
|
||||
|
||||
// Apply a bit of smoothing when the hips toggle between estimated and non-estimated poses.
|
||||
// This should help smooth out problems with the vive tracker when the sensor is occluded.
|
||||
if (prevHipsEnabled && hipsEstimated != prevHipsEstimated) {
|
||||
// blend from a snapshot of the previous hips.
|
||||
const float HIPS_BLEND_DURATION = 0.5f;
|
||||
_hipsBlendHelper.setBlendDuration(HIPS_BLEND_DURATION);
|
||||
_hipsBlendHelper.setSnapshot(_previousControllerParameters.primaryControllerPoses[PrimaryControllerType_Hips]);
|
||||
} else if (!prevHipsEnabled) {
|
||||
// we have no sensible value to blend from.
|
||||
const float HIPS_BLEND_DURATION = 0.0f;
|
||||
_hipsBlendHelper.setBlendDuration(HIPS_BLEND_DURATION);
|
||||
_hipsBlendHelper.setSnapshot(params.primaryControllerPoses[PrimaryControllerType_Hips]);
|
||||
}
|
||||
|
||||
AnimPose hips = _hipsBlendHelper.update(params.primaryControllerPoses[PrimaryControllerType_Hips], dt);
|
||||
|
||||
_animVars.set("hipsType", (int)IKTarget::Type::RotationAndPosition);
|
||||
_animVars.set("hipsPosition", params.primaryControllerPoses[PrimaryControllerType_Hips].trans());
|
||||
_animVars.set("hipsRotation", params.primaryControllerPoses[PrimaryControllerType_Hips].rot());
|
||||
_animVars.set("hipsPosition", hips.trans());
|
||||
_animVars.set("hipsRotation", hips.rot());
|
||||
} else {
|
||||
_animVars.set("hipsType", (int)IKTarget::Type::Unknown);
|
||||
}
|
||||
|
@ -1766,6 +1786,8 @@ void Rig::updateFromControllerParameters(const ControllerParameters& params, flo
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
_previousControllerParameters = params;
|
||||
}
|
||||
|
||||
void Rig::initAnimGraph(const QUrl& url) {
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
#include "AnimNode.h"
|
||||
#include "AnimNodeLoader.h"
|
||||
#include "SimpleMovingAverage.h"
|
||||
#include "AnimUtil.h"
|
||||
|
||||
class Rig;
|
||||
class AnimInverseKinematics;
|
||||
|
@ -414,6 +415,9 @@ protected:
|
|||
|
||||
AnimContext _lastContext;
|
||||
AnimVariantMap _lastAnimVars;
|
||||
|
||||
SnapshotBlendPoseHelper _hipsBlendHelper;
|
||||
ControllerParameters _previousControllerParameters;
|
||||
};
|
||||
|
||||
#endif /* defined(__hifi__Rig__) */
|
||||
|
|
|
@ -31,6 +31,7 @@
|
|||
#include "filters/RotateFilter.h"
|
||||
#include "filters/LowVelocityFilter.h"
|
||||
#include "filters/ExponentialSmoothingFilter.h"
|
||||
#include "filters/AccelerationLimiterFilter.h"
|
||||
|
||||
using namespace controller;
|
||||
|
||||
|
@ -51,6 +52,7 @@ REGISTER_FILTER_CLASS_INSTANCE(PostTransformFilter, "postTransform")
|
|||
REGISTER_FILTER_CLASS_INSTANCE(RotateFilter, "rotate")
|
||||
REGISTER_FILTER_CLASS_INSTANCE(LowVelocityFilter, "lowVelocity")
|
||||
REGISTER_FILTER_CLASS_INSTANCE(ExponentialSmoothingFilter, "exponentialSmoothing")
|
||||
REGISTER_FILTER_CLASS_INSTANCE(AccelerationLimiterFilter, "accelerationLimiter")
|
||||
|
||||
const QString JSON_FILTER_TYPE = QStringLiteral("type");
|
||||
const QString JSON_FILTER_PARAMS = QStringLiteral("params");
|
||||
|
|
|
@ -0,0 +1,192 @@
|
|||
//
|
||||
// Created by Anthony Thibault 2018/11/09
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
#include "AccelerationLimiterFilter.h"
|
||||
|
||||
#include <QtCore/QJsonObject>
|
||||
#include <QtCore/QJsonArray>
|
||||
#include "../../UserInputMapper.h"
|
||||
#include "../../Input.h"
|
||||
#include <DependencyManager.h>
|
||||
#include <QDebug>
|
||||
#include <StreamUtils.h>
|
||||
|
||||
static const QString JSON_ROTATION_ACCELERATION_LIMIT = QStringLiteral("rotationAccelerationLimit");
|
||||
static const QString JSON_TRANSLATION_ACCELERATION_LIMIT = QStringLiteral("translationAccelerationLimit");
|
||||
static const QString JSON_TRANSLATION_SNAP_THRESHOLD = QStringLiteral("translationSnapThreshold");
|
||||
static const QString JSON_ROTATION_SNAP_THRESHOLD = QStringLiteral("rotationSnapThreshold");
|
||||
|
||||
static glm::vec3 angularVelFromDeltaRot(const glm::quat& deltaQ, float dt) {
|
||||
// Measure the angular velocity of a delta rotation quaternion by using quaternion logarithm.
|
||||
// The logarithm of a unit quternion returns the axis of rotation with a length of one half the angle of rotation in the imaginary part.
|
||||
// The real part will be 0. Then we multiply it by 2 / dt. turning it into the angular velocity, (except for the extra w = 0 part).
|
||||
glm::quat omegaQ((2.0f / dt) * glm::log(deltaQ));
|
||||
return glm::vec3(omegaQ.x, omegaQ.y, omegaQ.z);
|
||||
}
|
||||
|
||||
static glm::quat deltaRotFromAngularVel(const glm::vec3& omega, float dt) {
|
||||
// Convert angular velocity into a delta quaternion by using quaternion exponent.
|
||||
// The exponent of quaternion will return a delta rotation around the axis of the imaginary part, by twice the angle as determined by the length of that imaginary part.
|
||||
// It is the inverse of the logarithm step in angularVelFromDeltaRot
|
||||
glm::quat omegaQ(0.0f, omega.x, omega.y, omega.z);
|
||||
return glm::exp((dt / 2.0f) * omegaQ);
|
||||
}
|
||||
|
||||
static glm::vec3 filterTranslation(const glm::vec3& x0, const glm::vec3& x1, const glm::vec3& x2, const glm::vec3& x3,
|
||||
float dt, float accLimit, float snapThreshold) {
|
||||
|
||||
// measure the linear velocities of this step and the previoius step
|
||||
glm::vec3 v1 = (x3 - x1) / (2.0f * dt);
|
||||
glm::vec3 v0 = (x2 - x0) / (2.0f * dt);
|
||||
|
||||
// compute the acceleration
|
||||
const glm::vec3 a = (v1 - v0) / dt;
|
||||
|
||||
// clamp the acceleration if it is over the limit
|
||||
float aLen = glm::length(a);
|
||||
|
||||
// pick limit based on if we are moving faster then our target
|
||||
float distToTarget = glm::length(x3 - x2);
|
||||
if (aLen > accLimit && distToTarget > snapThreshold) {
|
||||
// Solve for a new `v1`, such that `a` does not exceed `aLimit`
|
||||
// This combines two steps:
|
||||
// 1) Computing a limited accelration in the direction of `a`, but with a magnitute of `aLimit`:
|
||||
// `newA = a * (aLimit / aLen)`
|
||||
// 2) Computing new `v1`
|
||||
// `v1 = newA * dt + v0`
|
||||
// We combine the scalars from step 1 and step 2 into a single term to avoid having to do multiple scalar-vec3 multiplies.
|
||||
v1 = a * ((accLimit * dt) / aLen) + v0;
|
||||
|
||||
// apply limited v1 to compute filtered x3
|
||||
return v1 * dt + x2;
|
||||
} else {
|
||||
// did not exceed limit, no filtering necesary
|
||||
return x3;
|
||||
}
|
||||
}
|
||||
|
||||
static glm::quat filterRotation(const glm::quat& q0In, const glm::quat& q1In, const glm::quat& q2In, const glm::quat& q3In,
|
||||
float dt, float accLimit, float snapThreshold) {
|
||||
|
||||
// ensure quaternions have the same polarity
|
||||
glm::quat q0 = q0In;
|
||||
glm::quat q1 = glm::dot(q0In, q1In) < 0.0f ? -q1In : q1In;
|
||||
glm::quat q2 = glm::dot(q1In, q2In) < 0.0f ? -q2In : q2In;
|
||||
glm::quat q3 = glm::dot(q2In, q3In) < 0.0f ? -q3In : q3In;
|
||||
|
||||
// measure the angular velocities of this step and the previous step
|
||||
glm::vec3 w1 = angularVelFromDeltaRot(q3 * glm::inverse(q1), 2.0f * dt);
|
||||
glm::vec3 w0 = angularVelFromDeltaRot(q2 * glm::inverse(q0), 2.0f * dt);
|
||||
|
||||
const glm::vec3 a = (w1 - w0) / dt;
|
||||
float aLen = glm::length(a);
|
||||
|
||||
// clamp the acceleration if it is over the limit
|
||||
float angleToTarget = glm::angle(q3 * glm::inverse(q2));
|
||||
if (aLen > accLimit && angleToTarget > snapThreshold) {
|
||||
// solve for a new w1, such that a does not exceed the accLimit
|
||||
w1 = a * ((accLimit * dt) / aLen) + w0;
|
||||
|
||||
// apply limited w1 to compute filtered q3
|
||||
return deltaRotFromAngularVel(w1, dt) * q2;
|
||||
} else {
|
||||
// did not exceed limit, no filtering necesary
|
||||
return q3;
|
||||
}
|
||||
}
|
||||
|
||||
namespace controller {
|
||||
|
||||
Pose AccelerationLimiterFilter::apply(Pose value) const {
|
||||
|
||||
if (value.isValid()) {
|
||||
|
||||
// to perform filtering in sensor space, we need to compute the transformations.
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
const InputCalibrationData calibrationData = userInputMapper->getInputCalibrationData();
|
||||
glm::mat4 sensorToAvatarMat = glm::inverse(calibrationData.avatarMat) * calibrationData.sensorToWorldMat;
|
||||
glm::mat4 avatarToSensorMat = glm::inverse(calibrationData.sensorToWorldMat) * calibrationData.avatarMat;
|
||||
|
||||
// transform pose into sensor space.
|
||||
Pose sensorValue = value.transform(avatarToSensorMat);
|
||||
|
||||
if (_prevValid) {
|
||||
|
||||
const float DELTA_TIME = 0.01111111f;
|
||||
|
||||
glm::vec3 unfilteredTranslation = sensorValue.translation;
|
||||
sensorValue.translation = filterTranslation(_prevPos[0], _prevPos[1], _prevPos[2], sensorValue.translation,
|
||||
DELTA_TIME, _translationAccelerationLimit, _translationSnapThreshold);
|
||||
glm::quat unfilteredRot = sensorValue.rotation;
|
||||
sensorValue.rotation = filterRotation(_prevRot[0], _prevRot[1], _prevRot[2], sensorValue.rotation,
|
||||
DELTA_TIME, _rotationAccelerationLimit, _rotationSnapThreshold);
|
||||
|
||||
// remember previous values.
|
||||
_prevPos[0] = _prevPos[1];
|
||||
_prevPos[1] = _prevPos[2];
|
||||
_prevPos[2] = sensorValue.translation;
|
||||
_prevRot[0] = _prevRot[1];
|
||||
_prevRot[1] = _prevRot[2];
|
||||
_prevRot[2] = sensorValue.rotation;
|
||||
|
||||
_unfilteredPrevPos[0] = _unfilteredPrevPos[1];
|
||||
_unfilteredPrevPos[1] = _unfilteredPrevPos[2];
|
||||
_unfilteredPrevPos[2] = unfilteredTranslation;
|
||||
_unfilteredPrevRot[0] = _unfilteredPrevRot[1];
|
||||
_unfilteredPrevRot[1] = _unfilteredPrevRot[2];
|
||||
_unfilteredPrevRot[2] = unfilteredRot;
|
||||
|
||||
// transform back into avatar space
|
||||
return sensorValue.transform(sensorToAvatarMat);
|
||||
} else {
|
||||
// initialize previous values with the current sample.
|
||||
_prevPos[0] = sensorValue.translation;
|
||||
_prevPos[1] = sensorValue.translation;
|
||||
_prevPos[2] = sensorValue.translation;
|
||||
_prevRot[0] = sensorValue.rotation;
|
||||
_prevRot[1] = sensorValue.rotation;
|
||||
_prevRot[2] = sensorValue.rotation;
|
||||
|
||||
_unfilteredPrevPos[0] = sensorValue.translation;
|
||||
_unfilteredPrevPos[1] = sensorValue.translation;
|
||||
_unfilteredPrevPos[2] = sensorValue.translation;
|
||||
_unfilteredPrevRot[0] = sensorValue.rotation;
|
||||
_unfilteredPrevRot[1] = sensorValue.rotation;
|
||||
_unfilteredPrevRot[2] = sensorValue.rotation;
|
||||
|
||||
_prevValid = true;
|
||||
|
||||
// no previous value to smooth with, so return value unchanged
|
||||
return value;
|
||||
}
|
||||
} else {
|
||||
// mark previous poses as invalid.
|
||||
_prevValid = false;
|
||||
|
||||
// return invalid value unchanged
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
bool AccelerationLimiterFilter::parseParameters(const QJsonValue& parameters) {
|
||||
if (parameters.isObject()) {
|
||||
auto obj = parameters.toObject();
|
||||
if (obj.contains(JSON_ROTATION_ACCELERATION_LIMIT) && obj.contains(JSON_TRANSLATION_ACCELERATION_LIMIT) &&
|
||||
obj.contains(JSON_ROTATION_SNAP_THRESHOLD) && obj.contains(JSON_TRANSLATION_SNAP_THRESHOLD)) {
|
||||
_rotationAccelerationLimit = (float)obj[JSON_ROTATION_ACCELERATION_LIMIT].toDouble();
|
||||
_translationAccelerationLimit = (float)obj[JSON_TRANSLATION_ACCELERATION_LIMIT].toDouble();
|
||||
_rotationSnapThreshold = (float)obj[JSON_ROTATION_SNAP_THRESHOLD].toDouble();
|
||||
_translationSnapThreshold = (float)obj[JSON_TRANSLATION_SNAP_THRESHOLD].toDouble();
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,41 @@
|
|||
//
|
||||
// Created by Anthony Thibault 2018/11/09
|
||||
// Copyright 2018 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_Controllers_Filters_Acceleration_Limiter_h
|
||||
#define hifi_Controllers_Filters_Acceleration_Limiter_h
|
||||
|
||||
#include "../Filter.h"
|
||||
|
||||
namespace controller {
|
||||
|
||||
class AccelerationLimiterFilter : public Filter {
|
||||
REGISTER_FILTER_CLASS(AccelerationLimiterFilter);
|
||||
|
||||
public:
|
||||
AccelerationLimiterFilter() {}
|
||||
|
||||
float apply(float value) const override { return value; }
|
||||
Pose apply(Pose value) const override;
|
||||
bool parseParameters(const QJsonValue& parameters) override;
|
||||
|
||||
private:
|
||||
float _rotationAccelerationLimit { FLT_MAX };
|
||||
float _translationAccelerationLimit { FLT_MAX };
|
||||
float _rotationSnapThreshold { 0.0f };
|
||||
float _translationSnapThreshold { 0.0f };
|
||||
|
||||
mutable glm::vec3 _prevPos[3]; // sensor space
|
||||
mutable glm::quat _prevRot[3]; // sensor space
|
||||
mutable glm::vec3 _unfilteredPrevPos[3]; // sensor space
|
||||
mutable glm::quat _unfilteredPrevRot[3]; // sensor space
|
||||
mutable bool _prevValid { false };
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
|
@ -35,7 +35,7 @@ namespace controller {
|
|||
if (_prevSensorValue.isValid()) {
|
||||
// exponential smoothing filter
|
||||
sensorValue.translation = _translationConstant * sensorValue.getTranslation() + (1.0f - _translationConstant) * _prevSensorValue.getTranslation();
|
||||
sensorValue.rotation = safeMix(sensorValue.getRotation(), _prevSensorValue.getRotation(), _rotationConstant);
|
||||
sensorValue.rotation = safeMix(sensorValue.getRotation(), _prevSensorValue.getRotation(), (1.0f - _rotationConstant));
|
||||
|
||||
// remember previous sensor space value.
|
||||
_prevSensorValue = sensorValue;
|
||||
|
|
|
@ -129,6 +129,28 @@ static glm::mat4 calculateResetMat() {
|
|||
return glm::mat4();
|
||||
}
|
||||
|
||||
static QString outOfRangeDataStrategyToString(ViveControllerManager::OutOfRangeDataStrategy strategy) {
|
||||
switch (strategy) {
|
||||
default:
|
||||
case ViveControllerManager::OutOfRangeDataStrategy::None:
|
||||
return "None";
|
||||
case ViveControllerManager::OutOfRangeDataStrategy::Freeze:
|
||||
return "Freeze";
|
||||
case ViveControllerManager::OutOfRangeDataStrategy::Drop:
|
||||
return "Drop";
|
||||
}
|
||||
}
|
||||
|
||||
static ViveControllerManager::OutOfRangeDataStrategy stringToOutOfRangeDataStrategy(const QString& string) {
|
||||
if (string == "Drop") {
|
||||
return ViveControllerManager::OutOfRangeDataStrategy::Drop;
|
||||
} else if (string == "Freeze") {
|
||||
return ViveControllerManager::OutOfRangeDataStrategy::Freeze;
|
||||
} else {
|
||||
return ViveControllerManager::OutOfRangeDataStrategy::None;
|
||||
}
|
||||
}
|
||||
|
||||
bool ViveControllerManager::isDesktopMode() {
|
||||
if (_container) {
|
||||
return !_container->getActiveDisplayPlugin()->isHmd();
|
||||
|
@ -288,8 +310,10 @@ void ViveControllerManager::loadSettings() {
|
|||
if (_inputDevice) {
|
||||
const double DEFAULT_ARM_CIRCUMFERENCE = 0.33;
|
||||
const double DEFAULT_SHOULDER_WIDTH = 0.48;
|
||||
const QString DEFAULT_OUT_OF_RANGE_STRATEGY = "Drop";
|
||||
_inputDevice->_armCircumference = settings.value("armCircumference", QVariant(DEFAULT_ARM_CIRCUMFERENCE)).toDouble();
|
||||
_inputDevice->_shoulderWidth = settings.value("shoulderWidth", QVariant(DEFAULT_SHOULDER_WIDTH)).toDouble();
|
||||
_inputDevice->_outOfRangeDataStrategy = stringToOutOfRangeDataStrategy(settings.value("outOfRangeDataStrategy", QVariant(DEFAULT_OUT_OF_RANGE_STRATEGY)).toString());
|
||||
}
|
||||
}
|
||||
settings.endGroup();
|
||||
|
@ -303,6 +327,7 @@ void ViveControllerManager::saveSettings() const {
|
|||
if (_inputDevice) {
|
||||
settings.setValue(QString("armCircumference"), _inputDevice->_armCircumference);
|
||||
settings.setValue(QString("shoulderWidth"), _inputDevice->_shoulderWidth);
|
||||
settings.setValue(QString("outOfRangeDataStrategy"), outOfRangeDataStrategyToString(_inputDevice->_outOfRangeDataStrategy));
|
||||
}
|
||||
}
|
||||
settings.endGroup();
|
||||
|
@ -446,6 +471,8 @@ void ViveControllerManager::InputDevice::configureCalibrationSettings(const QJso
|
|||
hmdDesktopTracking = iter.value().toBool();
|
||||
} else if (iter.key() == "desktopMode") {
|
||||
hmdDesktopMode = iter.value().toBool();
|
||||
} else if (iter.key() == "outOfRangeDataStrategy") {
|
||||
_outOfRangeDataStrategy = stringToOutOfRangeDataStrategy(iter.value().toString());
|
||||
}
|
||||
iter++;
|
||||
}
|
||||
|
@ -468,6 +495,7 @@ QJsonObject ViveControllerManager::InputDevice::configurationSettings() {
|
|||
configurationSettings["puckCount"] = (int)_validTrackedObjects.size();
|
||||
configurationSettings["armCircumference"] = (double)_armCircumference * M_TO_CM;
|
||||
configurationSettings["shoulderWidth"] = (double)_shoulderWidth * M_TO_CM;
|
||||
configurationSettings["outOfRangeDataStrategy"] = outOfRangeDataStrategyToString(_outOfRangeDataStrategy);
|
||||
return configurationSettings;
|
||||
}
|
||||
|
||||
|
@ -484,6 +512,10 @@ void ViveControllerManager::InputDevice::emitCalibrationStatus() {
|
|||
emit inputConfiguration->calibrationStatus(status);
|
||||
}
|
||||
|
||||
static controller::Pose buildPose(const glm::mat4& mat, const glm::vec3& linearVelocity, const glm::vec3& angularVelocity) {
|
||||
return controller::Pose(extractTranslation(mat), glmExtractRotation(mat), linearVelocity, angularVelocity);
|
||||
}
|
||||
|
||||
void ViveControllerManager::InputDevice::handleTrackedObject(uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData) {
|
||||
uint32_t poseIndex = controller::TRACKED_OBJECT_00 + deviceIndex;
|
||||
printDeviceTrackingResultChange(deviceIndex);
|
||||
|
@ -492,35 +524,48 @@ void ViveControllerManager::InputDevice::handleTrackedObject(uint32_t deviceInde
|
|||
_nextSimPoseData.vrPoses[deviceIndex].bPoseIsValid &&
|
||||
poseIndex <= controller::TRACKED_OBJECT_15) {
|
||||
|
||||
mat4& mat = mat4();
|
||||
vec3 linearVelocity = vec3();
|
||||
vec3 angularVelocity = vec3();
|
||||
// check if the device is tracking out of range, then process the correct pose depending on the result.
|
||||
if (_nextSimPoseData.vrPoses[deviceIndex].eTrackingResult != vr::TrackingResult_Running_OutOfRange) {
|
||||
mat = _nextSimPoseData.poses[deviceIndex];
|
||||
linearVelocity = _nextSimPoseData.linearVelocities[deviceIndex];
|
||||
angularVelocity = _nextSimPoseData.angularVelocities[deviceIndex];
|
||||
} else {
|
||||
mat = _lastSimPoseData.poses[deviceIndex];
|
||||
linearVelocity = _lastSimPoseData.linearVelocities[deviceIndex];
|
||||
angularVelocity = _lastSimPoseData.angularVelocities[deviceIndex];
|
||||
|
||||
// make sure that we do not overwrite the pose in the _lastSimPose with incorrect data.
|
||||
_nextSimPoseData.poses[deviceIndex] = _lastSimPoseData.poses[deviceIndex];
|
||||
_nextSimPoseData.linearVelocities[deviceIndex] = _lastSimPoseData.linearVelocities[deviceIndex];
|
||||
_nextSimPoseData.angularVelocities[deviceIndex] = _lastSimPoseData.angularVelocities[deviceIndex];
|
||||
controller::Pose pose;
|
||||
switch (_outOfRangeDataStrategy) {
|
||||
case OutOfRangeDataStrategy::Drop:
|
||||
default:
|
||||
// Drop - Mark all non Running_OK results as invald
|
||||
if (_nextSimPoseData.vrPoses[deviceIndex].eTrackingResult == vr::TrackingResult_Running_OK) {
|
||||
pose = buildPose(_nextSimPoseData.poses[deviceIndex], _nextSimPoseData.linearVelocities[deviceIndex], _nextSimPoseData.angularVelocities[deviceIndex]);
|
||||
} else {
|
||||
pose.valid = false;
|
||||
}
|
||||
break;
|
||||
case OutOfRangeDataStrategy::None:
|
||||
// None - Ignore eTrackingResult all together
|
||||
pose = buildPose(_nextSimPoseData.poses[deviceIndex], _nextSimPoseData.linearVelocities[deviceIndex], _nextSimPoseData.angularVelocities[deviceIndex]);
|
||||
break;
|
||||
case OutOfRangeDataStrategy::Freeze:
|
||||
// Freeze - Dont invalide non Running_OK poses, instead just return the last good pose.
|
||||
if (_nextSimPoseData.vrPoses[deviceIndex].eTrackingResult == vr::TrackingResult_Running_OK) {
|
||||
pose = buildPose(_nextSimPoseData.poses[deviceIndex], _nextSimPoseData.linearVelocities[deviceIndex], _nextSimPoseData.angularVelocities[deviceIndex]);
|
||||
} else {
|
||||
pose = buildPose(_lastSimPoseData.poses[deviceIndex], _lastSimPoseData.linearVelocities[deviceIndex], _lastSimPoseData.angularVelocities[deviceIndex]);
|
||||
|
||||
// make sure that we do not overwrite the pose in the _lastSimPose with incorrect data.
|
||||
_nextSimPoseData.poses[deviceIndex] = _lastSimPoseData.poses[deviceIndex];
|
||||
_nextSimPoseData.linearVelocities[deviceIndex] = _lastSimPoseData.linearVelocities[deviceIndex];
|
||||
_nextSimPoseData.angularVelocities[deviceIndex] = _lastSimPoseData.angularVelocities[deviceIndex];
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
controller::Pose pose(extractTranslation(mat), glmExtractRotation(mat), linearVelocity, angularVelocity);
|
||||
if (pose.valid) {
|
||||
// transform into avatar frame
|
||||
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
||||
_poseStateMap[poseIndex] = pose.transform(controllerToAvatar);
|
||||
|
||||
// transform into avatar frame
|
||||
glm::mat4 controllerToAvatar = glm::inverse(inputCalibrationData.avatarMat) * inputCalibrationData.sensorToWorldMat;
|
||||
_poseStateMap[poseIndex] = pose.transform(controllerToAvatar);
|
||||
|
||||
// but _validTrackedObjects remain in sensor frame
|
||||
_validTrackedObjects.push_back(std::make_pair(poseIndex, pose));
|
||||
_trackedControllers++;
|
||||
// but _validTrackedObjects remain in sensor frame
|
||||
_validTrackedObjects.push_back(std::make_pair(poseIndex, pose));
|
||||
_trackedControllers++;
|
||||
} else {
|
||||
// insert invalid pose into state map
|
||||
_poseStateMap[poseIndex] = pose;
|
||||
}
|
||||
} else {
|
||||
controller::Pose invalidPose;
|
||||
_poseStateMap[poseIndex] = invalidPose;
|
||||
|
|
|
@ -60,11 +60,18 @@ public:
|
|||
virtual void saveSettings() const override;
|
||||
virtual void loadSettings() override;
|
||||
|
||||
enum class OutOfRangeDataStrategy {
|
||||
None,
|
||||
Freeze,
|
||||
Drop
|
||||
};
|
||||
|
||||
private:
|
||||
class InputDevice : public controller::InputDevice {
|
||||
public:
|
||||
InputDevice(vr::IVRSystem*& system);
|
||||
bool isHeadControllerMounted() const { return _overrideHead; }
|
||||
|
||||
private:
|
||||
// Device functions
|
||||
controller::Input::NamedVector getAvailableInputs() const override;
|
||||
|
@ -162,6 +169,7 @@ private:
|
|||
FilteredStick _filteredLeftStick;
|
||||
FilteredStick _filteredRightStick;
|
||||
std::string _headsetName {""};
|
||||
OutOfRangeDataStrategy _outOfRangeDataStrategy { OutOfRangeDataStrategy::Drop };
|
||||
|
||||
std::vector<PuckPosePair> _validTrackedObjects;
|
||||
std::map<uint32_t, glm::mat4> _pucksPostOffset;
|
||||
|
|
222
scripts/developer/accelerationFilterApp.js
Normal file
222
scripts/developer/accelerationFilterApp.js
Normal file
|
@ -0,0 +1,222 @@
|
|||
var LEFT_HAND_INDEX = 0;
|
||||
var RIGHT_HAND_INDEX = 1;
|
||||
var LEFT_FOOT_INDEX = 2;
|
||||
var RIGHT_FOOT_INDEX = 3;
|
||||
var HIPS_INDEX = 4;
|
||||
var SPINE2_INDEX = 5;
|
||||
|
||||
var mappingJson = {
|
||||
name: "com.highfidelity.testing.accelerationTest",
|
||||
channels: [
|
||||
{
|
||||
from: "Standard.LeftHand",
|
||||
to: "Actions.LeftHand",
|
||||
filters: [
|
||||
{
|
||||
type: "accelerationLimiter",
|
||||
rotationAccelerationLimit: 2000.0,
|
||||
translationAccelerationLimit: 100.0,
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
from: "Standard.RightHand",
|
||||
to: "Actions.RightHand",
|
||||
filters: [
|
||||
{
|
||||
type: "accelerationLimiter",
|
||||
rotationAccelerationLimit: 2000.0,
|
||||
translationAccelerationLimit: 100.0,
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
from: "Standard.LeftFoot",
|
||||
to: "Actions.LeftFoot",
|
||||
filters: [
|
||||
{
|
||||
type: "accelerationLimiter",
|
||||
rotationAccelerationLimit: 2000.0,
|
||||
translationAccelerationLimit: 100.0,
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
from: "Standard.RightFoot",
|
||||
to: "Actions.RightFoot",
|
||||
filters: [
|
||||
{
|
||||
type: "accelerationLimiter",
|
||||
rotationAccelerationLimit: 2000.0,
|
||||
translationAccelerationLimit: 100.0,
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
from: "Standard.Hips",
|
||||
to: "Actions.Hips",
|
||||
filters: [
|
||||
{
|
||||
type: "accelerationLimiter",
|
||||
rotationAccelerationLimit: 2000.0,
|
||||
translationAccelerationLimit: 100.0,
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
from: "Standard.Spine2",
|
||||
to: "Actions.Spine2",
|
||||
filters: [
|
||||
{
|
||||
type: "accelerationLimiter",
|
||||
rotationAccelerationLimit: 2000.0,
|
||||
translationAccelerationLimit: 100.0,
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
//
|
||||
// tablet app boiler plate
|
||||
//
|
||||
|
||||
var TABLET_BUTTON_NAME = "ACCFILT";
|
||||
var HTML_URL = "https://s3.amazonaws.com/hifi-public/tony/html/accelerationFilterApp.html?2";
|
||||
|
||||
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
|
||||
var tabletButton = tablet.addButton({
|
||||
text: TABLET_BUTTON_NAME,
|
||||
icon: "https://s3.amazonaws.com/hifi-public/tony/icons/tpose-i.svg",
|
||||
activeIcon: "https://s3.amazonaws.com/hifi-public/tony/icons/tpose-a.svg"
|
||||
});
|
||||
|
||||
tabletButton.clicked.connect(function () {
|
||||
if (shown) {
|
||||
tablet.gotoHomeScreen();
|
||||
} else {
|
||||
tablet.gotoWebScreen(HTML_URL);
|
||||
}
|
||||
});
|
||||
|
||||
var shown = false;
|
||||
|
||||
function onScreenChanged(type, url) {
|
||||
if (type === "Web" && url === HTML_URL) {
|
||||
tabletButton.editProperties({isActive: true});
|
||||
if (!shown) {
|
||||
// hook up to event bridge
|
||||
tablet.webEventReceived.connect(onWebEventReceived);
|
||||
shownChanged(true);
|
||||
}
|
||||
shown = true;
|
||||
} else {
|
||||
tabletButton.editProperties({isActive: false});
|
||||
if (shown) {
|
||||
// disconnect from event bridge
|
||||
tablet.webEventReceived.disconnect(onWebEventReceived);
|
||||
shownChanged(false);
|
||||
}
|
||||
shown = false;
|
||||
}
|
||||
}
|
||||
|
||||
function getTranslationAccelerationLimit(i) {
|
||||
return mappingJson.channels[i].filters[0].translationAccelerationLimit;
|
||||
}
|
||||
function setTranslationAccelerationLimit(i, value) {
|
||||
mappingJson.channels[i].filters[0].translationAccelerationLimit = value;
|
||||
mappingChanged();
|
||||
}
|
||||
function getRotationAccelerationLimit(i) {
|
||||
return mappingJson.channels[i].filters[0].rotationAccelerationLimit;
|
||||
}
|
||||
function setRotationAccelerationLimit(i, value) {
|
||||
mappingJson.channels[i].filters[0].rotationAccelerationLimit = value; mappingChanged();
|
||||
}
|
||||
|
||||
function onWebEventReceived(msg) {
|
||||
if (msg.name === "init-complete") {
|
||||
var values = [
|
||||
{name: "left-hand-translation-acceleration-limit", val: getTranslationAccelerationLimit(LEFT_HAND_INDEX), checked: false},
|
||||
{name: "left-hand-rotation-acceleration-limit", val: getRotationAccelerationLimit(LEFT_HAND_INDEX), checked: false},
|
||||
{name: "right-hand-translation-acceleration-limit", val: getTranslationAccelerationLimit(RIGHT_HAND_INDEX), checked: false},
|
||||
{name: "right-hand-rotation-acceleration-limit", val: getRotationAccelerationLimit(RIGHT_HAND_INDEX), checked: false},
|
||||
{name: "left-foot-translation-acceleration-limit", val: getTranslationAccelerationLimit(LEFT_FOOT_INDEX), checked: false},
|
||||
{name: "left-foot-rotation-acceleration-limit", val: getRotationAccelerationLimit(LEFT_FOOT_INDEX), checked: false},
|
||||
{name: "right-foot-translation-acceleration-limit", val: getTranslationAccelerationLimit(RIGHT_FOOT_INDEX), checked: false},
|
||||
{name: "right-foot-rotation-acceleration-limit", val: getRotationAccelerationLimit(RIGHT_FOOT_INDEX), checked: false},
|
||||
{name: "hips-translation-acceleration-limit", val: getTranslationAccelerationLimit(HIPS_INDEX), checked: false},
|
||||
{name: "hips-rotation-acceleration-limit", val: getRotationAccelerationLimit(HIPS_INDEX), checked: false},
|
||||
{name: "spine2-translation-acceleration-limit", val: getTranslationAccelerationLimit(SPINE2_INDEX), checked: false},
|
||||
{name: "spine2-rotation-acceleration-limit", val: getRotationAccelerationLimit(SPINE2_INDEX), checked: false}
|
||||
];
|
||||
tablet.emitScriptEvent(JSON.stringify(values));
|
||||
} else if (msg.name === "left-hand-translation-acceleration-limit") {
|
||||
setTranslationAccelerationLimit(LEFT_HAND_INDEX, parseInt(msg.val, 10));
|
||||
} else if (msg.name === "left-hand-rotation-acceleration-limit") {
|
||||
setRotationAccelerationLimit(LEFT_HAND_INDEX, parseInt(msg.val, 10));
|
||||
} else if (msg.name === "right-hand-translation-acceleration-limit") {
|
||||
setTranslationAccelerationLimit(RIGHT_HAND_INDEX, parseInt(msg.val, 10));
|
||||
} else if (msg.name === "right-hand-rotation-acceleration-limit") {
|
||||
setRotationAccelerationLimit(RIGHT_HAND_INDEX, parseInt(msg.val, 10));
|
||||
} else if (msg.name === "left-foot-translation-acceleration-limit") {
|
||||
setTranslationAccelerationLimit(LEFT_FOOT_INDEX, parseInt(msg.val, 10));
|
||||
} else if (msg.name === "left-foot-rotation-acceleration-limit") {
|
||||
setRotationAccelerationLimit(LEFT_FOOT_INDEX, parseInt(msg.val, 10));
|
||||
} else if (msg.name === "right-foot-translation-acceleration-limit") {
|
||||
setTranslationAccelerationLimit(RIGHT_FOOT_INDEX, parseInt(msg.val, 10));
|
||||
} else if (msg.name === "right-foot-rotation-acceleration-limit") {
|
||||
setRotationAccelerationLimit(RIGHT_FOOT_INDEX, parseInt(msg.val, 10));
|
||||
} else if (msg.name === "hips-translation-acceleration-limit") {
|
||||
setTranslationAccelerationLimit(HIPS_INDEX, parseInt(msg.val, 10));
|
||||
} else if (msg.name === "hips-rotation-acceleration-limit") {
|
||||
setRotationAccelerationLimit(HIPS_INDEX, parseInt(msg.val, 10));
|
||||
} else if (msg.name === "spine2-translation-acceleration-limit") {
|
||||
setTranslationAccelerationLimit(SPINE2_INDEX, parseInt(msg.val, 10));
|
||||
} else if (msg.name === "spine2-rotation-acceleration-limit") {
|
||||
setRotationAccelerationLimit(SPINE2_INDEX, parseInt(msg.val, 10));
|
||||
}
|
||||
}
|
||||
|
||||
tablet.screenChanged.connect(onScreenChanged);
|
||||
|
||||
function shutdownTabletApp() {
|
||||
tablet.removeButton(tabletButton);
|
||||
if (shown) {
|
||||
tablet.webEventReceived.disconnect(onWebEventReceived);
|
||||
tablet.gotoHomeScreen();
|
||||
}
|
||||
tablet.screenChanged.disconnect(onScreenChanged);
|
||||
}
|
||||
|
||||
//
|
||||
// end tablet app boiler plate
|
||||
//
|
||||
|
||||
var mapping;
|
||||
function mappingChanged() {
|
||||
if (mapping) {
|
||||
mapping.disable();
|
||||
}
|
||||
mapping = Controller.parseMapping(JSON.stringify(mappingJson));
|
||||
mapping.enable();
|
||||
}
|
||||
|
||||
function shownChanged(newShown) {
|
||||
if (newShown) {
|
||||
mappingChanged();
|
||||
} else {
|
||||
mapping.disable();
|
||||
}
|
||||
}
|
||||
|
||||
mappingChanged();
|
||||
|
||||
Script.scriptEnding.connect(function() {
|
||||
if (mapping) {
|
||||
mapping.disable();
|
||||
}
|
||||
tablet.removeButton(tabletButton);
|
||||
});
|
||||
|
240
scripts/developer/exponentialFilterApp.js
Normal file
240
scripts/developer/exponentialFilterApp.js
Normal file
|
@ -0,0 +1,240 @@
|
|||
var LEFT_HAND_INDEX = 0;
|
||||
var RIGHT_HAND_INDEX = 1;
|
||||
var LEFT_FOOT_INDEX = 2;
|
||||
var RIGHT_FOOT_INDEX = 3;
|
||||
var HIPS_INDEX = 4;
|
||||
var SPINE2_INDEX = 5;
|
||||
|
||||
var HAND_SMOOTHING_TRANSLATION = 0.3;
|
||||
var HAND_SMOOTHING_ROTATION = 0.15;
|
||||
var FOOT_SMOOTHING_TRANSLATION = 0.3;
|
||||
var FOOT_SMOOTHING_ROTATION = 0.15;
|
||||
var TORSO_SMOOTHING_TRANSLATION = 0.3;
|
||||
var TORSO_SMOOTHING_ROTATION = 0.16;
|
||||
|
||||
var mappingJson = {
|
||||
name: "com.highfidelity.testing.exponentialFilterApp",
|
||||
channels: [
|
||||
{
|
||||
from: "Standard.LeftHand",
|
||||
to: "Actions.LeftHand",
|
||||
filters: [
|
||||
{
|
||||
type: "exponentialSmoothing",
|
||||
translation: HAND_SMOOTHING_TRANSLATION,
|
||||
rotation: HAND_SMOOTHING_ROTATION
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
from: "Standard.RightHand",
|
||||
to: "Actions.RightHand",
|
||||
filters: [
|
||||
{
|
||||
type: "exponentialSmoothing",
|
||||
translation: HAND_SMOOTHING_TRANSLATION,
|
||||
rotation: HAND_SMOOTHING_ROTATION
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
from: "Standard.LeftFoot",
|
||||
to: "Actions.LeftFoot",
|
||||
filters: [
|
||||
{
|
||||
type: "exponentialSmoothing",
|
||||
translation: FOOT_SMOOTHING_TRANSLATION,
|
||||
rotation: FOOT_SMOOTHING_ROTATION
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
from: "Standard.RightFoot",
|
||||
to: "Actions.RightFoot",
|
||||
filters: [
|
||||
{
|
||||
type: "exponentialSmoothing",
|
||||
translation: FOOT_SMOOTHING_TRANSLATION,
|
||||
rotation: FOOT_SMOOTHING_ROTATION
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
from: "Standard.Hips",
|
||||
to: "Actions.Hips",
|
||||
filters: [
|
||||
{
|
||||
type: "exponentialSmoothing",
|
||||
translation: TORSO_SMOOTHING_TRANSLATION,
|
||||
rotation: TORSO_SMOOTHING_ROTATION
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
from: "Standard.Spine2",
|
||||
to: "Actions.Spine2",
|
||||
filters: [
|
||||
{
|
||||
type: "exponentialSmoothing",
|
||||
translation: TORSO_SMOOTHING_TRANSLATION,
|
||||
rotation: TORSO_SMOOTHING_ROTATION
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
};
|
||||
|
||||
//
|
||||
// tablet app boiler plate
|
||||
//
|
||||
|
||||
var TABLET_BUTTON_NAME = "EXPFILT";
|
||||
var HTML_URL = "https://s3.amazonaws.com/hifi-public/tony/html/exponentialFilterApp.html?7";
|
||||
|
||||
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
|
||||
var tabletButton = tablet.addButton({
|
||||
text: TABLET_BUTTON_NAME,
|
||||
icon: "https://s3.amazonaws.com/hifi-public/tony/icons/tpose-i.svg",
|
||||
activeIcon: "https://s3.amazonaws.com/hifi-public/tony/icons/tpose-a.svg"
|
||||
});
|
||||
|
||||
tabletButton.clicked.connect(function () {
|
||||
if (shown) {
|
||||
tablet.gotoHomeScreen();
|
||||
} else {
|
||||
tablet.gotoWebScreen(HTML_URL);
|
||||
}
|
||||
});
|
||||
|
||||
var shown = false;
|
||||
|
||||
function onScreenChanged(type, url) {
|
||||
if (type === "Web" && url === HTML_URL) {
|
||||
tabletButton.editProperties({isActive: true});
|
||||
if (!shown) {
|
||||
// hook up to event bridge
|
||||
tablet.webEventReceived.connect(onWebEventReceived);
|
||||
shownChanged(true);
|
||||
}
|
||||
shown = true;
|
||||
} else {
|
||||
tabletButton.editProperties({isActive: false});
|
||||
if (shown) {
|
||||
// disconnect from event bridge
|
||||
tablet.webEventReceived.disconnect(onWebEventReceived);
|
||||
shownChanged(false);
|
||||
}
|
||||
shown = false;
|
||||
}
|
||||
}
|
||||
|
||||
function getTranslation(i) {
|
||||
return mappingJson.channels[i].filters[0].translation;
|
||||
}
|
||||
function setTranslation(i, value) {
|
||||
mappingJson.channels[i].filters[0].translation = value;
|
||||
mappingChanged();
|
||||
}
|
||||
function getRotation(i) {
|
||||
return mappingJson.channels[i].filters[0].rotation;
|
||||
}
|
||||
function setRotation(i, value) {
|
||||
mappingJson.channels[i].filters[0].rotation = value; mappingChanged();
|
||||
}
|
||||
|
||||
function onWebEventReceived(msg) {
|
||||
if (msg.name === "init-complete") {
|
||||
var values = [
|
||||
{name: "enable-filtering", val: filterEnabled ? "on" : "off", checked: false},
|
||||
{name: "left-hand-translation", val: getTranslation(LEFT_HAND_INDEX), checked: false},
|
||||
{name: "left-hand-rotation", val: getRotation(LEFT_HAND_INDEX), checked: false},
|
||||
{name: "right-hand-translation", val: getTranslation(RIGHT_HAND_INDEX), checked: false},
|
||||
{name: "right-hand-rotation", val: getRotation(RIGHT_HAND_INDEX), checked: false},
|
||||
{name: "left-foot-translation", val: getTranslation(LEFT_FOOT_INDEX), checked: false},
|
||||
{name: "left-foot-rotation", val: getRotation(LEFT_FOOT_INDEX), checked: false},
|
||||
{name: "right-foot-translation", val: getTranslation(RIGHT_FOOT_INDEX), checked: false},
|
||||
{name: "right-foot-rotation", val: getRotation(RIGHT_FOOT_INDEX), checked: false},
|
||||
{name: "hips-translation", val: getTranslation(HIPS_INDEX), checked: false},
|
||||
{name: "hips-rotation", val: getRotation(HIPS_INDEX), checked: false},
|
||||
{name: "spine2-translation", val: getTranslation(SPINE2_INDEX), checked: false},
|
||||
{name: "spine2-rotation", val: getRotation(SPINE2_INDEX), checked: false}
|
||||
];
|
||||
tablet.emitScriptEvent(JSON.stringify(values));
|
||||
} else if (msg.name === "enable-filtering") {
|
||||
if (msg.val === "on") {
|
||||
filterEnabled = true;
|
||||
} else if (msg.val === "off") {
|
||||
filterEnabled = false;
|
||||
}
|
||||
mappingChanged();
|
||||
} else if (msg.name === "left-hand-translation") {
|
||||
setTranslation(LEFT_HAND_INDEX, Number(msg.val));
|
||||
} else if (msg.name === "left-hand-rotation") {
|
||||
setRotation(LEFT_HAND_INDEX, Number(msg.val));
|
||||
} else if (msg.name === "right-hand-translation") {
|
||||
setTranslation(RIGHT_HAND_INDEX, Number(msg.val));
|
||||
} else if (msg.name === "right-hand-rotation") {
|
||||
setRotation(RIGHT_HAND_INDEX, Number(msg.val));
|
||||
} else if (msg.name === "left-foot-translation") {
|
||||
setTranslation(LEFT_FOOT_INDEX, Number(msg.val));
|
||||
} else if (msg.name === "left-foot-rotation") {
|
||||
setRotation(LEFT_FOOT_INDEX, Number(msg.val));
|
||||
} else if (msg.name === "right-foot-translation") {
|
||||
setTranslation(RIGHT_FOOT_INDEX, Number(msg.val));
|
||||
} else if (msg.name === "right-foot-rotation") {
|
||||
setRotation(RIGHT_FOOT_INDEX, Number(msg.val));
|
||||
} else if (msg.name === "hips-translation") {
|
||||
setTranslation(HIPS_INDEX, Number(msg.val));
|
||||
} else if (msg.name === "hips-rotation") {
|
||||
setRotation(HIPS_INDEX, Number(msg.val));
|
||||
} else if (msg.name === "spine2-translation") {
|
||||
setTranslation(SPINE2_INDEX, Number(msg.val));
|
||||
} else if (msg.name === "spine2-rotation") {
|
||||
setRotation(SPINE2_INDEX, Number(msg.val));
|
||||
}
|
||||
}
|
||||
|
||||
tablet.screenChanged.connect(onScreenChanged);
|
||||
|
||||
function shutdownTabletApp() {
|
||||
tablet.removeButton(tabletButton);
|
||||
if (shown) {
|
||||
tablet.webEventReceived.disconnect(onWebEventReceived);
|
||||
tablet.gotoHomeScreen();
|
||||
}
|
||||
tablet.screenChanged.disconnect(onScreenChanged);
|
||||
}
|
||||
|
||||
//
|
||||
// end tablet app boiler plate
|
||||
//
|
||||
|
||||
var filterEnabled = true;
|
||||
var mapping;
|
||||
function mappingChanged() {
|
||||
if (mapping) {
|
||||
mapping.disable();
|
||||
}
|
||||
if (filterEnabled) {
|
||||
mapping = Controller.parseMapping(JSON.stringify(mappingJson));
|
||||
mapping.enable();
|
||||
}
|
||||
}
|
||||
|
||||
function shownChanged(newShown) {
|
||||
if (newShown) {
|
||||
mappingChanged();
|
||||
} else {
|
||||
mapping.disable();
|
||||
}
|
||||
}
|
||||
|
||||
mappingChanged();
|
||||
|
||||
Script.scriptEnding.connect(function() {
|
||||
if (mapping) {
|
||||
mapping.disable();
|
||||
}
|
||||
tablet.removeButton(tabletButton);
|
||||
});
|
||||
|
|
@ -21,16 +21,14 @@ function shutdown() {
|
|||
var BLUE = {x: 0, y: 0, z: 1, w: 1};
|
||||
|
||||
function update(dt) {
|
||||
if (Controller.Hardware.Vive) {
|
||||
TRACKED_OBJECT_POSES.forEach(function (key) {
|
||||
var pose = Controller.getPoseValue(Controller.Standard[key]);
|
||||
if (pose.valid) {
|
||||
DebugDraw.addMyAvatarMarker(key, pose.rotation, pose.translation, BLUE);
|
||||
} else {
|
||||
DebugDraw.removeMyAvatarMarker(key);
|
||||
}
|
||||
});
|
||||
}
|
||||
TRACKED_OBJECT_POSES.forEach(function (key) {
|
||||
var pose = Controller.getPoseValue(Controller.Standard[key]);
|
||||
if (pose.valid) {
|
||||
DebugDraw.addMyAvatarMarker(key, pose.rotation, pose.translation, BLUE);
|
||||
} else {
|
||||
DebugDraw.removeMyAvatarMarker(key);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
init();
|
438
scripts/developer/tests/filtered-puck-attach.js
Normal file
438
scripts/developer/tests/filtered-puck-attach.js
Normal file
|
@ -0,0 +1,438 @@
|
|||
//
|
||||
// Created by Anthony J. Thibault on 2017/06/20
|
||||
// Modified by Robbie Uvanni to support multiple pucks and easier placement of pucks on entities, on 2017/08/01
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
// When this script is running, a new app button, named "PUCKTACH", will be added to the toolbar/tablet.
|
||||
// Click this app to bring up the puck attachment panel.
|
||||
//
|
||||
|
||||
/* eslint indent: ["error", 4, { "outerIIFEBody": 0 }] */
|
||||
/* global Xform */
|
||||
Script.include("/~/system/libraries/Xform.js");
|
||||
|
||||
(function() { // BEGIN LOCAL_SCOPE
|
||||
|
||||
var TABLET_BUTTON_NAME = "PUCKATTACH";
|
||||
var TABLET_APP_URL = "https://s3.amazonaws.com/hifi-public/tony/html/filtered-puck-attach.html?2";
|
||||
var NUM_TRACKED_OBJECTS = 16;
|
||||
|
||||
var tablet = Tablet.getTablet("com.highfidelity.interface.tablet.system");
|
||||
var tabletButton = tablet.addButton({
|
||||
text: TABLET_BUTTON_NAME,
|
||||
icon: "https://s3.amazonaws.com/hifi-public/tony/icons/puck-i.svg",
|
||||
activeIcon: "https://s3.amazonaws.com/hifi-public/tony/icons/puck-a.svg"
|
||||
});
|
||||
|
||||
var shown = false;
|
||||
function onScreenChanged(type, url) {
|
||||
if (type === "Web" && url === TABLET_APP_URL) {
|
||||
tabletButton.editProperties({isActive: true});
|
||||
if (!shown) {
|
||||
// hook up to event bridge
|
||||
tablet.webEventReceived.connect(onWebEventReceived);
|
||||
shownChanged(true);
|
||||
}
|
||||
shown = true;
|
||||
} else {
|
||||
tabletButton.editProperties({isActive: false});
|
||||
if (shown) {
|
||||
// disconnect from event bridge
|
||||
tablet.webEventReceived.disconnect(onWebEventReceived);
|
||||
shownChanged(false);
|
||||
}
|
||||
shown = false;
|
||||
}
|
||||
}
|
||||
tablet.screenChanged.connect(onScreenChanged);
|
||||
|
||||
function pad(num, size) {
|
||||
var tempString = "000000000" + num;
|
||||
return tempString.substr(tempString.length - size);
|
||||
}
|
||||
function indexToTrackedObjectName(index) {
|
||||
return "TrackedObject" + pad(index, 2);
|
||||
}
|
||||
function getAvailableTrackedObjects() {
|
||||
var available = [];
|
||||
var i;
|
||||
for (i = 0; i < NUM_TRACKED_OBJECTS; i++) {
|
||||
var key = indexToTrackedObjectName(i);
|
||||
var pose = Controller.getPoseValue(Controller.Standard[key]);
|
||||
if (pose && pose.valid) {
|
||||
available.push(i);
|
||||
}
|
||||
}
|
||||
return available;
|
||||
}
|
||||
function sendAvailableTrackedObjects() {
|
||||
tablet.emitScriptEvent(JSON.stringify({
|
||||
pucks: getAvailableTrackedObjects(),
|
||||
selectedPuck: ((lastPuck === undefined) ? -1 : lastPuck.name)
|
||||
}));
|
||||
}
|
||||
|
||||
function getRelativePosition(origin, rotation, offset) {
|
||||
var relativeOffset = Vec3.multiplyQbyV(rotation, offset);
|
||||
var worldPosition = Vec3.sum(origin, relativeOffset);
|
||||
return worldPosition;
|
||||
}
|
||||
function getPropertyForEntity(entityID, propertyName) {
|
||||
return Entities.getEntityProperties(entityID, [propertyName])[propertyName];
|
||||
}
|
||||
function entityExists(entityID) {
|
||||
return Object.keys(Entities.getEntityProperties(entityID)).length > 0;
|
||||
}
|
||||
|
||||
var VIVE_PUCK_MODEL = "https://s3.amazonaws.com/hifi-public/tony/vive_tracker_puck_y180z180.obj";
|
||||
var VIVE_PUCK_DIMENSIONS = { x: 0.0945, y: 0.0921, z: 0.0423 }; // 1/1000th scale of model
|
||||
var VIVE_PUCK_SEARCH_DISTANCE = 1.5; // metres
|
||||
var VIVE_PUCK_SPAWN_DISTANCE = 0.5; // metres
|
||||
var VIVE_PUCK_TRACKED_OBJECT_MAX_DISTANCE = 10.0; // metres
|
||||
var VIVE_PUCK_NAME = "Tracked Puck";
|
||||
|
||||
var trackedPucks = { };
|
||||
var lastPuck;
|
||||
|
||||
var DEFAULT_ROTATION_SMOOTHING_CONSTANT = 1.0; // no smoothing
|
||||
var DEFAULT_TRANSLATION_SMOOTHING_CONSTANT = 1.0; // no smoothing
|
||||
var DEFAULT_TRANSLATION_ACCELERATION_LIMIT = 1000; // only extreme accelerations are smoothed
|
||||
var DEFAULT_ROTATION_ACCELERATION_LIMIT = 10000; // only extreme accelerations are smoothed
|
||||
var DEFAULT_TRANSLATION_SNAP_THRESHOLD = 0; // no snapping
|
||||
var DEFAULT_ROTATION_SNAP_THRESHOLD = 0; // no snapping
|
||||
|
||||
function buildMappingJson() {
|
||||
var obj = {name: "com.highfidelity.testing.filteredPuckAttach", channels: []};
|
||||
var i;
|
||||
for (i = 0; i < NUM_TRACKED_OBJECTS; i++) {
|
||||
obj.channels.push({
|
||||
from: "Vive." + indexToTrackedObjectName(i),
|
||||
to: "Standard." + indexToTrackedObjectName(i),
|
||||
filters: [
|
||||
{
|
||||
type: "accelerationLimiter",
|
||||
translationAccelerationLimit: DEFAULT_TRANSLATION_ACCELERATION_LIMIT,
|
||||
rotationAccelerationLimit: DEFAULT_ROTATION_ACCELERATION_LIMIT,
|
||||
translationSnapThreshold: DEFAULT_TRANSLATION_SNAP_THRESHOLD,
|
||||
rotationSnapThreshold: DEFAULT_ROTATION_SNAP_THRESHOLD,
|
||||
},
|
||||
{
|
||||
type: "exponentialSmoothing",
|
||||
translation: DEFAULT_TRANSLATION_SMOOTHING_CONSTANT,
|
||||
rotation: DEFAULT_ROTATION_SMOOTHING_CONSTANT
|
||||
}
|
||||
]
|
||||
});
|
||||
}
|
||||
return obj;
|
||||
}
|
||||
|
||||
var mappingJson = buildMappingJson();
|
||||
|
||||
var mapping;
|
||||
function mappingChanged() {
|
||||
if (mapping) {
|
||||
mapping.disable();
|
||||
}
|
||||
mapping = Controller.parseMapping(JSON.stringify(mappingJson));
|
||||
mapping.enable();
|
||||
}
|
||||
|
||||
function shownChanged(newShown) {
|
||||
if (newShown) {
|
||||
mappingChanged();
|
||||
} else {
|
||||
mapping.disable();
|
||||
}
|
||||
}
|
||||
|
||||
function setTranslationAccelerationLimit(value) {
|
||||
var i;
|
||||
for (i = 0; i < NUM_TRACKED_OBJECTS; i++) {
|
||||
mappingJson.channels[i].filters[0].translationAccelerationLimit = value;
|
||||
}
|
||||
mappingChanged();
|
||||
}
|
||||
|
||||
function setTranslationSnapThreshold(value) {
|
||||
// convert from mm
|
||||
var MM_PER_M = 1000;
|
||||
var meters = value / MM_PER_M;
|
||||
var i;
|
||||
for (i = 0; i < NUM_TRACKED_OBJECTS; i++) {
|
||||
mappingJson.channels[i].filters[0].translationSnapThreshold = meters;
|
||||
}
|
||||
mappingChanged();
|
||||
}
|
||||
|
||||
function setRotationAccelerationLimit(value) {
|
||||
var i;
|
||||
for (i = 0; i < NUM_TRACKED_OBJECTS; i++) {
|
||||
mappingJson.channels[i].filters[0].rotationAccelerationLimit = value;
|
||||
}
|
||||
mappingChanged();
|
||||
}
|
||||
|
||||
function setRotationSnapThreshold(value) {
|
||||
// convert from degrees
|
||||
var PI_IN_DEGREES = 180;
|
||||
var radians = value * (Math.pi / PI_IN_DEGREES);
|
||||
var i;
|
||||
for (i = 0; i < NUM_TRACKED_OBJECTS; i++) {
|
||||
mappingJson.channels[i].filters[0].translationSnapThreshold = radians;
|
||||
}
|
||||
mappingChanged();
|
||||
}
|
||||
|
||||
function setTranslationSmoothingConstant(value) {
|
||||
var i;
|
||||
for (i = 0; i < NUM_TRACKED_OBJECTS; i++) {
|
||||
mappingJson.channels[i].filters[1].translation = value;
|
||||
}
|
||||
mappingChanged();
|
||||
}
|
||||
|
||||
function setRotationSmoothingConstant(value) {
|
||||
var i;
|
||||
for (i = 0; i < NUM_TRACKED_OBJECTS; i++) {
|
||||
mappingJson.channels[i].filters[1].rotation = value;
|
||||
}
|
||||
mappingChanged();
|
||||
}
|
||||
|
||||
|
||||
function createPuck(puck) {
|
||||
// create a puck entity and add it to our list of pucks
|
||||
var action = indexToTrackedObjectName(puck.puckno);
|
||||
var pose = Controller.getPoseValue(Controller.Standard[action]);
|
||||
|
||||
if (pose && pose.valid) {
|
||||
var spawnOffset = Vec3.multiply(Vec3.FRONT, VIVE_PUCK_SPAWN_DISTANCE);
|
||||
var spawnPosition = getRelativePosition(MyAvatar.position, MyAvatar.orientation, spawnOffset);
|
||||
|
||||
// should be an overlay
|
||||
var puckEntityProperties = {
|
||||
name: "Tracked Puck",
|
||||
type: "Model",
|
||||
modelURL: VIVE_PUCK_MODEL,
|
||||
dimensions: VIVE_PUCK_DIMENSIONS,
|
||||
position: spawnPosition,
|
||||
userData: '{ "grabbableKey": { "grabbable": true, "kinematic": false } }'
|
||||
};
|
||||
|
||||
var puckEntityID = Entities.addEntity(puckEntityProperties);
|
||||
|
||||
// if we've already created this puck, destroy it
|
||||
if (trackedPucks.hasOwnProperty(puck.puckno)) {
|
||||
destroyPuck(puck.puckno);
|
||||
}
|
||||
// if we had an unfinalized puck, destroy it
|
||||
if (lastPuck !== undefined) {
|
||||
destroyPuck(lastPuck.name);
|
||||
}
|
||||
// create our new unfinalized puck
|
||||
trackedPucks[puck.puckno] = {
|
||||
puckEntityID: puckEntityID,
|
||||
trackedEntityID: ""
|
||||
};
|
||||
lastPuck = trackedPucks[puck.puckno];
|
||||
lastPuck.name = Number(puck.puckno);
|
||||
}
|
||||
}
|
||||
function finalizePuck(puckName) {
|
||||
// find nearest entity and change its parent to the puck
|
||||
|
||||
if (!trackedPucks.hasOwnProperty(puckName)) {
|
||||
print('2');
|
||||
return;
|
||||
}
|
||||
if (lastPuck === undefined) {
|
||||
print('3');
|
||||
return;
|
||||
}
|
||||
if (lastPuck.name !== Number(puckName)) {
|
||||
print('1');
|
||||
return;
|
||||
}
|
||||
|
||||
var puckPosition = getPropertyForEntity(lastPuck.puckEntityID, "position");
|
||||
var foundEntities = Entities.findEntities(puckPosition, VIVE_PUCK_SEARCH_DISTANCE);
|
||||
|
||||
var foundEntity;
|
||||
var leastDistance = Number.MAX_VALUE;
|
||||
|
||||
for (var i = 0; i < foundEntities.length; i++) {
|
||||
var entity = foundEntities[i];
|
||||
|
||||
if (getPropertyForEntity(entity, "name") !== VIVE_PUCK_NAME) {
|
||||
var entityPosition = getPropertyForEntity(entity, "position");
|
||||
var d = Vec3.distance(entityPosition, puckPosition);
|
||||
|
||||
if (d < leastDistance) {
|
||||
leastDistance = d;
|
||||
foundEntity = entity;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (foundEntity) {
|
||||
lastPuck.trackedEntityID = foundEntity;
|
||||
// remember the userdata and collisionless flag for the tracked entity since
|
||||
// we're about to remove it and make it ungrabbable and collisionless
|
||||
lastPuck.trackedEntityUserData = getPropertyForEntity(foundEntity, "userData");
|
||||
lastPuck.trackedEntityCollisionFlag = getPropertyForEntity(foundEntity, "collisionless");
|
||||
// update properties of the tracked entity
|
||||
Entities.editEntity(lastPuck.trackedEntityID, {
|
||||
"parentID": lastPuck.puckEntityID,
|
||||
"userData": '{ "grabbableKey": { "grabbable": false } }',
|
||||
"collisionless": 1
|
||||
});
|
||||
// remove reference to puck since it is now calibrated and finalized
|
||||
lastPuck = undefined;
|
||||
}
|
||||
}
|
||||
function updatePucks() {
|
||||
// for each puck, update its position and orientation
|
||||
for (var puckName in trackedPucks) {
|
||||
if (!trackedPucks.hasOwnProperty(puckName)) {
|
||||
continue;
|
||||
}
|
||||
var action = indexToTrackedObjectName(puckName);
|
||||
var pose = Controller.getPoseValue(Controller.Standard[action]);
|
||||
if (pose && pose.valid) {
|
||||
var puck = trackedPucks[puckName];
|
||||
if (puck.trackedEntityID) {
|
||||
if (entityExists(puck.trackedEntityID)) {
|
||||
var avatarXform = new Xform(MyAvatar.orientation, MyAvatar.position);
|
||||
var puckXform = new Xform(pose.rotation, pose.translation);
|
||||
var finalXform = Xform.mul(avatarXform, puckXform);
|
||||
|
||||
var d = Vec3.distance(MyAvatar.position, finalXform.pos);
|
||||
if (d > VIVE_PUCK_TRACKED_OBJECT_MAX_DISTANCE) {
|
||||
print('tried to move tracked object too far away: ' + d);
|
||||
return;
|
||||
}
|
||||
|
||||
Entities.editEntity(puck.puckEntityID, {
|
||||
position: finalXform.pos,
|
||||
rotation: finalXform.rot
|
||||
});
|
||||
|
||||
// in case someone grabbed both entities and destroyed the
|
||||
// child/parent relationship
|
||||
Entities.editEntity(puck.trackedEntityID, {
|
||||
parentID: puck.puckEntityID
|
||||
});
|
||||
} else {
|
||||
destroyPuck(puckName);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
function destroyPuck(puckName) {
|
||||
// unparent entity and delete its parent
|
||||
if (!trackedPucks.hasOwnProperty(puckName)) {
|
||||
return;
|
||||
}
|
||||
|
||||
var puck = trackedPucks[puckName];
|
||||
var puckEntityID = puck.puckEntityID;
|
||||
var trackedEntityID = puck.trackedEntityID;
|
||||
|
||||
// remove the puck as a parent entity and restore the tracked entities
|
||||
// former userdata and collision flag
|
||||
Entities.editEntity(trackedEntityID, {
|
||||
"parentID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"userData": puck.trackedEntityUserData,
|
||||
"collisionless": puck.trackedEntityCollisionFlag
|
||||
});
|
||||
|
||||
delete trackedPucks[puckName];
|
||||
|
||||
// in some cases, the entity deletion may occur before the parent change
|
||||
// has been processed, resulting in both the puck and the tracked entity
|
||||
// to be deleted so we wait 100ms before deleting the puck, assuming
|
||||
// that the parent change has occured
|
||||
var DELETE_TIMEOUT = 100; // ms
|
||||
Script.setTimeout(function() {
|
||||
// delete the puck
|
||||
Entities.deleteEntity(puckEntityID);
|
||||
}, DELETE_TIMEOUT);
|
||||
}
|
||||
function destroyPucks() {
|
||||
// remove all pucks and unparent entities
|
||||
for (var puckName in trackedPucks) {
|
||||
if (trackedPucks.hasOwnProperty(puckName)) {
|
||||
destroyPuck(puckName);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function onWebEventReceived(msg) {
|
||||
var obj = {};
|
||||
|
||||
try {
|
||||
obj = JSON.parse(msg);
|
||||
} catch (err) {
|
||||
return;
|
||||
}
|
||||
|
||||
switch (obj.cmd) {
|
||||
case "ready":
|
||||
sendAvailableTrackedObjects();
|
||||
break;
|
||||
case "create":
|
||||
createPuck(obj);
|
||||
break;
|
||||
case "finalize":
|
||||
finalizePuck(obj.puckno);
|
||||
break;
|
||||
case "destroy":
|
||||
destroyPuck(obj.puckno);
|
||||
break;
|
||||
case "translation-acceleration-limit":
|
||||
setTranslationAccelerationLimit(Number(obj.val));
|
||||
break;
|
||||
case "translation-snap-threshold":
|
||||
setTranslationSnapThreshold(Number(obj.val));
|
||||
break;
|
||||
case "rotation-acceleration-limit":
|
||||
setRotationAccelerationLimit(Number(obj.val));
|
||||
break;
|
||||
case "rotation-snap-threshold":
|
||||
setRotationSnapThreshold(Number(obj.val));
|
||||
break;
|
||||
case "translation-smoothing-constant":
|
||||
setTranslationSmoothingConstant(Number(obj.val));
|
||||
break;
|
||||
case "rotation-smoothing-constant":
|
||||
setRotationSmoothingConstant(Number(obj.val));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
Script.update.connect(updatePucks);
|
||||
Script.scriptEnding.connect(function () {
|
||||
tablet.removeButton(tabletButton);
|
||||
destroyPucks();
|
||||
if (shown) {
|
||||
tablet.webEventReceived.disconnect(onWebEventReceived);
|
||||
tablet.gotoHomeScreen();
|
||||
}
|
||||
tablet.screenChanged.disconnect(onScreenChanged);
|
||||
if (mapping) {
|
||||
mapping.disable();
|
||||
}
|
||||
});
|
||||
tabletButton.clicked.connect(function () {
|
||||
if (shown) {
|
||||
tablet.gotoHomeScreen();
|
||||
} else {
|
||||
tablet.gotoWebScreen(TABLET_APP_URL);
|
||||
}
|
||||
});
|
||||
}()); // END LOCAL_SCOPE
|
|
@ -85,7 +85,7 @@ function entityExists(entityID) {
|
|||
return Object.keys(Entities.getEntityProperties(entityID)).length > 0;
|
||||
}
|
||||
|
||||
var VIVE_PUCK_MODEL = "http://content.highfidelity.com/seefo/production/puck-attach/vive_tracker_puck.obj";
|
||||
var VIVE_PUCK_MODEL = "https://s3.amazonaws.com/hifi-public/tony/vive_tracker_puck_y180z180.obj";
|
||||
var VIVE_PUCK_DIMENSIONS = { x: 0.0945, y: 0.0921, z: 0.0423 }; // 1/1000th scale of model
|
||||
var VIVE_PUCK_SEARCH_DISTANCE = 1.5; // metres
|
||||
var VIVE_PUCK_SPAWN_DISTANCE = 0.5; // metres
|
||||
|
@ -304,4 +304,4 @@ tabletButton.clicked.connect(function () {
|
|||
tablet.gotoWebScreen(TABLET_APP_URL);
|
||||
}
|
||||
});
|
||||
}()); // END LOCAL_SCOPE
|
||||
}()); // END LOCAL_SCOPE
|
||||
|
|
Loading…
Reference in a new issue