From 15cf008f130ea29e1b91576f3e790bb2636d494a Mon Sep 17 00:00:00 2001 From: "Anthony J. Thibault" Date: Tue, 7 Jul 2015 15:15:35 -0700 Subject: [PATCH] hand controllers and arrow keys now work --- interface/src/Application.cpp | 19 ++++++-- interface/src/Util.cpp | 27 +++++++++++ interface/src/avatar/MyAvatar.cpp | 45 ++++--------------- .../openvr/OpenVrDisplayPlugin.cpp | 42 ----------------- .../input-plugins/ViveControllerManager.cpp | 11 ++--- 5 files changed, 58 insertions(+), 86 deletions(-) diff --git a/interface/src/Application.cpp b/interface/src/Application.cpp index e0eb5702d5..af0e567768 100644 --- a/interface/src/Application.cpp +++ b/interface/src/Application.cpp @@ -966,7 +966,7 @@ void Application::paintGL() { //_myCamera.setRotation(_myAvatar->getWorldAlignedOrientation()); // AJT: no actually we do want the roll and pitch - _myCamera.setRotation(getHeadOrientation()); + _myCamera.setRotation(glm::quat_cast(_myAvatar->getSensorToWorldMat()) * getHeadOrientation()); } /* @@ -2788,6 +2788,11 @@ void Application::update(float deltaTime) { } } +// AJT: hack +extern const int NUM_MARKERS; +extern glm::mat4 markerMats[]; +extern glm::vec4 markerColors[]; + void Application::setPalmData(Hand* hand, UserInputMapper::PoseValue pose, int index) { PalmData* palm; bool foundHand = false; @@ -2813,8 +2818,16 @@ void Application::setPalmData(Hand* hand, UserInputMapper::PoseValue pose, int i // TODO: velocity filters, tip velocities, et.c // see SixenseManager - palm->setRawPosition(pose.getTranslation()); - palm->setRawRotation(pose.getRotation()); + // transform from sensor space, to world space, to avatar model space. + glm::mat4 poseMat = createMatFromQuatAndPos(pose.getRotation(), pose.getTranslation()); + glm::mat4 sensorToWorldMat = _myAvatar->getSensorToWorldMat(); + glm::mat4 modelMat = createMatFromQuatAndPos(_myAvatar->getOrientation(), _myAvatar->getPosition()); + glm::mat4 objectPose = glm::inverse(modelMat) * sensorToWorldMat * poseMat; + + palm->setRawPosition(extractTranslation(objectPose)); + palm->setRawRotation(glm::quat_cast(objectPose)); + + markerMats[index] = sensorToWorldMat * poseMat; } void Application::emulateMouse(Hand* hand, float click, float shift, int index) { diff --git a/interface/src/Util.cpp b/interface/src/Util.cpp index bf4df3f3d2..99b58edaa5 100644 --- a/interface/src/Util.cpp +++ b/interface/src/Util.cpp @@ -33,6 +33,13 @@ using namespace std; +// AJT HACK, I'm using these markers for debugging. +// extern them and set them in other cpp files and they will be rendered +// with the world box. +const int NUM_MARKERS = 4; +glm::mat4 markerMats[NUM_MARKERS]; +glm::vec4 markerColors[NUM_MARKERS] = {{1, 0, 0, 1}, {0, 1, 0, 1}, {0, 0, 1, 1}, {0, 1, 1, 1}}; + void renderWorldBox(gpu::Batch& batch) { auto geometryCache = DependencyManager::get(); @@ -71,6 +78,26 @@ void renderWorldBox(gpu::Batch& batch) { transform.setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, MARKER_DISTANCE)); batch.setModelTransform(transform); geometryCache->renderSphere(batch, MARKER_RADIUS, 10, 10, grey); + + // draw marker spheres + for (int i = 0; i < NUM_MARKERS; i++) { + transform.setTranslation(extractTranslation(markerMats[i])); + batch.setModelTransform(transform); + geometryCache->renderSphere(batch, 0.02f, 10, 10, markerColors[i]); + } + + // draw marker axes + auto identity = Transform{}; + batch.setModelTransform(identity); + for (int i = 0; i < NUM_MARKERS; i++) { + glm::vec3 base = extractTranslation(markerMats[i]); + glm::vec3 xAxis = transformPoint(markerMats[i], glm::vec3(1, 0, 0)); + glm::vec3 yAxis = transformPoint(markerMats[i], glm::vec3(0, 1, 0)); + glm::vec3 zAxis = transformPoint(markerMats[i], glm::vec3(0, 0, 1)); + geometryCache->renderLine(batch, base, xAxis, red); + geometryCache->renderLine(batch, base, yAxis, green); + geometryCache->renderLine(batch, base, zAxis, blue); + } } // Return a random vector of average length 1 diff --git a/interface/src/avatar/MyAvatar.cpp b/interface/src/avatar/MyAvatar.cpp index ddee3881a9..5c307452cd 100644 --- a/interface/src/avatar/MyAvatar.cpp +++ b/interface/src/avatar/MyAvatar.cpp @@ -155,13 +155,6 @@ void MyAvatar::reset() { void MyAvatar::update(float deltaTime) { - qCDebug(interfaceapp, "update() *************"); - glm::vec3 pos = getPosition(); - qCDebug(interfaceapp, "\tpos = (%.5f, %.5f, %.5f)", pos.x, pos.y, pos.z); - glm::vec3 axis = glm::axis(getOrientation()); - float angle = glm::angle(getOrientation()); - qCDebug(interfaceapp, "\trot = axis = (%.5f, %.5f, %.5f), angle = %.5f", axis.x, axis.y, axis.z, angle); - if (_referential) { _referential->update(); } @@ -947,13 +940,6 @@ bool MyAvatar::isLookingAtLeftEye() { } glm::vec3 MyAvatar::getDefaultEyePosition() const { - /* - qCDebug(interfaceapp, "getDefaultEyePosition()"); - glm::vec3 e = _skeletonModel.getDefaultEyeModelPosition(); - qCDebug(interfaceapp, "\teye pos = (%.5f, %.5f, %.5f)", e.x, e.y, e.z); - glm::vec3 p = getPosition() + getWorldAlignedOrientation() * _skeletonModel.getDefaultEyeModelPosition(); - qCDebug(interfaceapp, "\tworld pos = (%.5f, %.5f, %.5f)", p.x, p.y, p.z); - */ return getPosition() + getWorldAlignedOrientation() * _skeletonModel.getDefaultEyeModelPosition(); } @@ -1300,12 +1286,13 @@ void MyAvatar::updateOrientation(float deltaTime) { _bodyYawDelta += _driveKeys[ROT_LEFT] * YAW_SPEED * deltaTime; getHead()->setBasePitch(getHead()->getBasePitch() + (_driveKeys[ROT_UP] - _driveKeys[ROT_DOWN]) * PITCH_SPEED * deltaTime); - // AJT: disable arrow key movement. -/* - // update body orientation by movement inputs - setOrientation(getOrientation() * - glm::quat(glm::radians(glm::vec3(0.0f, _bodyYawDelta, 0.0f) * deltaTime))); -*/ + glm::quat twist = glm::quat(glm::radians(glm::vec3(0.0f, _bodyYawDelta, 0.0f) * deltaTime)); + + glm::vec3 bodyPosition = calcBodyPositionFromSensors(); + glm::quat bodyOrientation = calcBodyOrientationFromSensors(); + glm::mat4 bodyMat = createMatFromQuatAndPos(bodyOrientation, bodyPosition); + glm::mat4 sensorOffset = bodyMat * glm::mat4_cast(twist) * glm::inverse(bodyMat); + _sensorToWorldMat = sensorOffset * _sensorToWorldMat; // decay body rotation momentum const float BODY_SPIN_FRICTION = 7.5f; @@ -1586,6 +1573,8 @@ void MyAvatar::goToLocation(const glm::vec3& newPosition, << newPosition.y << ", " << newPosition.z; glm::mat4 m; + m[3] = glm::vec4(newPosition, 1); + _sensorToWorldMat = m; // AJT: FIXME, goToLocation doens't work with orientation. /* @@ -1602,13 +1591,6 @@ void MyAvatar::goToLocation(const glm::vec3& newPosition, m = glm::mat4_cast(newOrientation); } */ - m[3] = glm::vec4(newPosition, 1); - _sensorToWorldMat = m; - - qCDebug(interfaceapp, "\tsensorMat = | %10.4f %10.4f %10.4f %10.4f |", m[0][0], m[1][0], m[2][0], m[3][0]); - qCDebug(interfaceapp, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][1], m[1][1], m[2][1], m[3][1]); - qCDebug(interfaceapp, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][2], m[1][2], m[2][2], m[3][2]); - qCDebug(interfaceapp, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][3], m[1][3], m[2][3], m[3][3]); /* glm::vec3 shiftedPosition = newPosition; @@ -1706,20 +1688,15 @@ void MyAvatar::relayDriveKeysToCharacterController() { } void MyAvatar::setAvatarPosition(glm::vec3 pos) { - qCDebug(interfaceapp, "setAvatarPosition = (%.5f, %.5f, %.5f)", pos.x, pos.y, pos.z); Avatar::setPosition(pos); } void MyAvatar::setAvatarOrientation(glm::quat quat) { - glm::vec3 axis = glm::axis(quat); - float angle = glm::angle(quat); - qCDebug(interfaceapp, "setAvatarOrientation axis = (%.5f, %.5f, %.5f), theta = %.5f", axis.x, axis.y, axis.z, angle); Avatar::setOrientation(quat); } // these are overriden, because they must move the sensor mat, such that the avatar will be at the given location. void MyAvatar::setPosition(const glm::vec3 position, bool overideReferential) { - qCDebug(interfaceapp, "setPosition = (%.5f, %.5f, %.5f)", position.x, position.y, position.z); glm::vec3 bodyPos = calcBodyPositionFromSensors(); glm::vec3 desiredPos = position; glm::vec3 sensorPos(_sensorToWorldMat[3]); @@ -1728,10 +1705,6 @@ void MyAvatar::setPosition(const glm::vec3 position, bool overideReferential) { } void MyAvatar::setOrientation(const glm::quat& orientation, bool overideReferential) { - glm::vec3 axis = glm::axis(orientation); - float angle = glm::angle(orientation); - qCDebug(interfaceapp, "setOrientation axis = (%.5f, %.5f, %.5f), theta = %.5f", axis.x, axis.y, axis.z, angle); - glm::vec3 bodyPos = calcBodyPositionFromSensors(); glm::quat bodyOrientation = calcBodyOrientationFromSensors(); glm::mat4 bodyMat = createMatFromQuatAndPos(bodyOrientation, bodyPos); diff --git a/libraries/display-plugins/src/display-plugins/openvr/OpenVrDisplayPlugin.cpp b/libraries/display-plugins/src/display-plugins/openvr/OpenVrDisplayPlugin.cpp index cf96fa70b0..8e43cd2b65 100644 --- a/libraries/display-plugins/src/display-plugins/openvr/OpenVrDisplayPlugin.cpp +++ b/libraries/display-plugins/src/display-plugins/openvr/OpenVrDisplayPlugin.cpp @@ -133,45 +133,6 @@ mat4 OpenVrDisplayPlugin::getProjection(Eye eye, const mat4& baseProjection) con } glm::mat4 OpenVrDisplayPlugin::getModelview(Eye eye, const mat4& baseModelview) const { - /* - qCDebug(displayplugins, displayplugins, "getModelView(%d)\n", eye); - glm::mat4 m = baseModelview; - qCDebug(displayplugins, "\tbaseModelView = | %10.4f %10.4f %10.4f %10.4f |", m[0][0], m[1][0], m[2][0], m[3][0]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][1], m[1][1], m[2][1], m[3][1]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][2], m[1][2], m[2][2], m[3][2]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][3], m[1][3], m[2][3], m[3][3]); - - m = _eyesData[eye]._eyeOffset; - qCDebug(displayplugins, "\teyeOffset = | %10.4f %10.4f %10.4f %10.4f |", m[0][0], m[1][0], m[2][0], m[3][0]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][1], m[1][1], m[2][1], m[3][1]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][2], m[1][2], m[2][2], m[3][2]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][3], m[1][3], m[2][3], m[3][3]); - - m = glm::inverse(_eyesData[eye]._eyeOffset); - qCDebug(displayplugins, "\teyeOffsetInv = | %10.4f %10.4f %10.4f %10.4f |", m[0][0], m[1][0], m[2][0], m[3][0]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][1], m[1][1], m[2][1], m[3][1]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][2], m[1][2], m[2][2], m[3][2]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][3], m[1][3], m[2][3], m[3][3]); - - m = _eyesData[eye]._pose; - qCDebug(displayplugins, "\tpose = | %10.4f %10.4f %10.4f %10.4f |", m[0][0], m[1][0], m[2][0], m[3][0]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][1], m[1][1], m[2][1], m[3][1]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][2], m[1][2], m[2][2], m[3][2]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][3], m[1][3], m[2][3], m[3][3]); - - m = glm::inverse(_eyesData[eye]._eyeOffset) * baseModelview; - qCDebug(displayplugins, "\tbroken modelView = | %10.4f %10.4f %10.4f %10.4f |", m[0][0], m[1][0], m[2][0], m[3][0]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][1], m[1][1], m[2][1], m[3][1]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][2], m[1][2], m[2][2], m[3][2]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][3], m[1][3], m[2][3], m[3][3]); - - m = baseModelview * _eyesData[eye]._pose; - qCDebug(displayplugins, "\tworking modelView = | %10.4f %10.4f %10.4f %10.4f |", m[0][0], m[1][0], m[2][0], m[3][0]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][1], m[1][1], m[2][1], m[3][1]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][2], m[1][2], m[2][2], m[3][2]); - qCDebug(displayplugins, "\t | %10.4f %10.4f %10.4f %10.4f |", m[0][3], m[1][3], m[2][3], m[3][3]); - */ - return baseModelview * _eyesData[eye]._eyeOffset; } @@ -210,9 +171,6 @@ void OpenVrDisplayPlugin::finishFrame() { openvr_for_each_eye([&](vr::Hmd_Eye eye) { _eyesData[eye]._pose = _trackedDevicePoseMat4[0]; }); - - glm::vec3 p(_trackedDevicePoseMat4[0][3]); - qCDebug(displayplugins, "trackPos = (%.5f, %.5f, %.5f)", p.x, p.y, p.z); }; #endif diff --git a/libraries/input-plugins/src/input-plugins/ViveControllerManager.cpp b/libraries/input-plugins/src/input-plugins/ViveControllerManager.cpp index 79735c8cec..11a738d69e 100644 --- a/libraries/input-plugins/src/input-plugins/ViveControllerManager.cpp +++ b/libraries/input-plugins/src/input-plugins/ViveControllerManager.cpp @@ -134,6 +134,7 @@ void ViveControllerManager::focusOutEvent() { }; void ViveControllerManager::handleAxisEvent(Axis axis, float x, float y, int index) { + /* if (axis == TRACKPAD_AXIS) { _axisStateMap[makeInput(AXIS_Y_POS, index).getChannel()] = (y > 0.0f) ? y : 0.0f; _axisStateMap[makeInput(AXIS_Y_NEG, index).getChannel()] = (y < 0.0f) ? -y : 0.0f; @@ -142,9 +143,11 @@ void ViveControllerManager::handleAxisEvent(Axis axis, float x, float y, int ind } else if (axis == TRIGGER_AXIS) { _axisStateMap[makeInput(BACK_TRIGGER, index).getChannel()] = x; } + */ } void ViveControllerManager::handleButtonEvent(uint64_t buttons, int index) { + /* if (buttons & VR_BUTTON_A) { _buttonPressedMap.insert(makeInput(BUTTON_A, index).getChannel()); } @@ -157,13 +160,11 @@ void ViveControllerManager::handleButtonEvent(uint64_t buttons, int index) { if (buttons & VR_TRIGGER_BUTTON) { _buttonPressedMap.insert(makeInput(TRIGGER_BUTTON, index).getChannel()); } + */ } void ViveControllerManager::handlePoseEvent(const mat4& mat, int index) { - glm::vec4 p = _trackedDevicePoseMat4[vr::k_unTrackedDeviceIndex_Hmd][3]; - glm::vec3 headPos(p.x, p.y, p.z); - glm::vec3 position = glm::vec3(mat[3][0], mat[3][1], mat[3][2]) - headPos + vec3(0, 0.6f, 0); // figure out why this offset is necessary - + glm::vec3 position = extractTranslation(mat); glm::quat rotation = glm::quat_cast(mat); // Flip the rotation appropriately for each hand @@ -173,7 +174,7 @@ void ViveControllerManager::handlePoseEvent(const mat4& mat, int index) { rotation = rotation * glm::angleAxis(PI, glm::vec3(1.0f, 0.0f, 0.0f)) * glm::angleAxis(PI + PI_OVER_TWO, glm::vec3(0.0f, 0.0f, 1.0f)); } - _poseStateMap[makeInput(JointChannel(index)).getChannel()] = UserInputMapper::PoseValue(position, - rotation); + _poseStateMap[makeInput(JointChannel(index)).getChannel()] = UserInputMapper::PoseValue(position, rotation); } void ViveControllerManager::registerToUserInputMapper(UserInputMapper& mapper) {