eye and hand tracking for htc vive pro

This commit is contained in:
Seth Alves 2019-08-09 09:28:58 -07:00
parent 07034721c7
commit a0031c6f10
4 changed files with 570 additions and 5 deletions

View file

@ -1,4 +1,4 @@
Source: hifi-client-deps
Version: 0.1
Description: Collected dependencies for High Fidelity applications
Build-Depends: hifi-deps, aristo (windows), glslang, liblo (windows), nlohmann-json, openvr (windows), quazip (!android), sdl2 (!android), spirv-cross (!android), spirv-tools (!android), sranipal (windows), vulkanmemoryallocator
Build-Depends: hifi-deps, glslang, nlohmann-json, openvr (windows), sdl2 (!android), spirv-cross (!android), spirv-tools (!android), vulkanmemoryallocator, aristo (windows), sranipal (windows)

View file

@ -15,5 +15,7 @@ if (WIN32 AND (NOT USE_GLES))
include_hifi_library_headers(octree)
target_openvr()
target_sranipal()
target_aristo()
target_link_libraries(${TARGET_NAME} Winmm.lib)
endif()

View file

@ -13,6 +13,21 @@
#include <algorithm>
#include <string>
#ifdef _WIN32
#pragma warning( push )
#pragma warning( disable : 4091 )
#pragma warning( disable : 4334 )
#endif
#include <SRanipal.h>
#include <SRanipal_Eye.h>
#include <SRanipal_Enums.h>
#include <interface_gesture.hpp>
#ifdef _WIN32
#pragma warning( pop )
#endif
#include <PerfStat.h>
#include <PathUtils.h>
#include <GeometryCache.h>
@ -37,6 +52,8 @@
#include <Plugins/InputConfiguration.h>
#include <controllers/StandardControls.h>
#include "OpenVrDisplayPlugin.h"
extern PoseData _nextSimPoseData;
vr::IVRSystem* acquireOpenVrSystem();
@ -130,6 +147,51 @@ static glm::mat4 calculateResetMat() {
return glm::mat4();
}
class ViveProEyeReadThread : public QThread {
public:
ViveProEyeReadThread() {
setObjectName("OpenVR ViveProEye Read Thread");
}
void run() override {
while (!quit) {
ViveSR::anipal::Eye::EyeData eyeData;
int result = ViveSR::anipal::Eye::GetEyeData(&eyeData);
{
QMutexLocker locker(&eyeDataMutex);
eyeDataBuffer.getEyeDataResult = result;
if (result == ViveSR::Error::WORK) {
uint64_t leftValids = eyeData.verbose_data.left.eye_data_validata_bit_mask;
uint64_t rightValids = eyeData.verbose_data.right.eye_data_validata_bit_mask;
eyeDataBuffer.leftDirectionValid =
(leftValids & (uint64_t)ViveSR::anipal::Eye::SINGLE_EYE_DATA_GAZE_DIRECTION_VALIDITY) > (uint64_t)0;
eyeDataBuffer.rightDirectionValid =
(rightValids & (uint64_t)ViveSR::anipal::Eye::SINGLE_EYE_DATA_GAZE_DIRECTION_VALIDITY) > (uint64_t)0;
eyeDataBuffer.leftOpennessValid =
(leftValids & (uint64_t)ViveSR::anipal::Eye::SINGLE_EYE_DATA_EYE_OPENNESS_VALIDITY) > (uint64_t)0;
eyeDataBuffer.rightOpennessValid =
(rightValids & (uint64_t)ViveSR::anipal::Eye::SINGLE_EYE_DATA_EYE_OPENNESS_VALIDITY) > (uint64_t)0;
float *leftGaze = eyeData.verbose_data.left.gaze_direction_normalized.elem_;
float *rightGaze = eyeData.verbose_data.right.gaze_direction_normalized.elem_;
eyeDataBuffer.leftEyeGaze = glm::vec3(leftGaze[0], leftGaze[1], leftGaze[2]);
eyeDataBuffer.rightEyeGaze = glm::vec3(rightGaze[0], rightGaze[1], rightGaze[2]);
eyeDataBuffer.leftEyeOpenness = eyeData.verbose_data.left.eye_openness;
eyeDataBuffer.rightEyeOpenness = eyeData.verbose_data.right.eye_openness;
}
}
}
}
bool quit { false };
// mutex and buffer for moving data from this thread to the other one
QMutex eyeDataMutex;
EyeDataBuffer eyeDataBuffer;
};
static QString outOfRangeDataStrategyToString(ViveControllerManager::OutOfRangeDataStrategy strategy) {
switch (strategy) {
default:
@ -211,6 +273,81 @@ QString ViveControllerManager::configurationLayout() {
return OPENVR_LAYOUT;
}
bool isDeviceIndexActive(vr::IVRSystem*& system, uint32_t deviceIndex) {
if (!system) {
return false;
}
if (deviceIndex != vr::k_unTrackedDeviceIndexInvalid &&
system->GetTrackedDeviceClass(deviceIndex) == vr::TrackedDeviceClass_Controller &&
system->IsTrackedDeviceConnected(deviceIndex)) {
vr::EDeviceActivityLevel activityLevel = system->GetTrackedDeviceActivityLevel(deviceIndex);
if (activityLevel == vr::k_EDeviceActivityLevel_UserInteraction) {
return true;
}
}
return false;
}
bool isHandControllerActive(vr::IVRSystem*& system, vr::ETrackedControllerRole deviceRole) {
if (!system) {
return false;
}
auto deviceIndex = system->GetTrackedDeviceIndexForControllerRole(deviceRole);
return isDeviceIndexActive(system, deviceIndex);
}
bool areBothHandControllersActive(vr::IVRSystem*& system) {
return
isHandControllerActive(system, vr::TrackedControllerRole_LeftHand) &&
isHandControllerActive(system, vr::TrackedControllerRole_RightHand);
}
void ViveControllerManager::enableGestureDetection() {
if (_viveCameraHandTracker) {
return;
}
if (!ViveSR::anipal::Eye::IsViveProEye()) {
return;
}
// #define HAND_TRACKER_USE_EXTERNAL_TRANSFORM 1
#ifdef HAND_TRACKER_USE_EXTERNAL_TRANSFORM
UseExternalTransform(true); // camera hand tracker results are in HMD frame
#else
UseExternalTransform(false); // camera hand tracker results are in sensor frame
#endif
GestureOption options; // defaults are GestureBackendAuto and GestureModeSkeleton
GestureFailure gestureFailure = StartGestureDetection(&options);
switch (gestureFailure) {
case GestureFailureNone:
qDebug() << "StartGestureDetection success";
_viveCameraHandTracker = true;
break;
case GestureFailureOpenCL:
qDebug() << "StartGestureDetection (Only on Windows) OpenCL is not supported on the machine";
break;
case GestureFailureCamera:
qDebug() << "StartGestureDetection Start camera failed";
break;
case GestureFailureInternal:
qDebug() << "StartGestureDetection Internal errors";
break;
case GestureFailureCPUOnPC:
qDebug() << "StartGestureDetection CPU backend is not supported on Windows";
break;
}
}
void ViveControllerManager::disableGestureDetection() {
if (!_viveCameraHandTracker) {
return;
}
StopGestureDetection();
_viveCameraHandTracker = false;
}
bool ViveControllerManager::activate() {
InputPlugin::activate();
@ -230,6 +367,28 @@ bool ViveControllerManager::activate() {
auto userInputMapper = DependencyManager::get<controller::UserInputMapper>();
userInputMapper->registerDevice(_inputDevice);
_registeredWithInputMapper = true;
if (ViveSR::anipal::Eye::IsViveProEye()) {
qDebug() << "Vive Pro eye-tracking detected";
int error = ViveSR::anipal::Initial(ViveSR::anipal::Eye::ANIPAL_TYPE_EYE, NULL);
if (error == ViveSR::Error::WORK) {
_viveProEye = true;
qDebug() << "Successfully initialize Eye engine.";
} else if (error == ViveSR::Error::RUNTIME_NOT_FOUND) {
_viveProEye = false;
qDebug() << "please follows SRanipal SDK guide to install SR_Runtime first";
} else {
_viveProEye = false;
qDebug() << "Failed to initialize Eye engine. please refer to ViveSR error code:" << error;
}
if (_viveProEye) {
_viveProEyeReadThread = std::make_shared<ViveProEyeReadThread>();
_viveProEyeReadThread->start(QThread::HighPriority);
}
}
return true;
}
@ -251,6 +410,13 @@ void ViveControllerManager::deactivate() {
userInputMapper->removeDevice(_inputDevice->_deviceID);
_registeredWithInputMapper = false;
if (_viveProEyeReadThread) {
_viveProEyeReadThread->quit = true;
_viveProEyeReadThread->wait();
_viveProEyeReadThread = nullptr;
ViveSR::anipal::Release(ViveSR::anipal::Eye::ANIPAL_TYPE_EYE);
}
saveSettings();
}
@ -262,6 +428,311 @@ bool ViveControllerManager::isHeadControllerMounted() const {
return activityLevel == vr::k_EDeviceActivityLevel_UserInteraction;
}
void ViveControllerManager::invalidateEyeInputs() {
_inputDevice->_poseStateMap[controller::LEFT_EYE].valid = false;
_inputDevice->_poseStateMap[controller::RIGHT_EYE].valid = false;
_inputDevice->_axisStateMap[controller::LEFT_EYE_BLINK].valid = false;
_inputDevice->_axisStateMap[controller::RIGHT_EYE_BLINK].valid = false;
}
void ViveControllerManager::updateEyeTracker(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
if (!isHeadControllerMounted()) {
invalidateEyeInputs();
return;
}
EyeDataBuffer eyeDataBuffer;
{
// GetEyeData takes around 4ms to finish, so we run it on a thread.
QMutexLocker locker(&_viveProEyeReadThread->eyeDataMutex);
memcpy(&eyeDataBuffer, &_viveProEyeReadThread->eyeDataBuffer, sizeof(eyeDataBuffer));
}
if (eyeDataBuffer.getEyeDataResult != ViveSR::Error::WORK) {
invalidateEyeInputs();
return;
}
// only update from buffer values if the new data is "valid"
if (!eyeDataBuffer.leftDirectionValid) {
eyeDataBuffer.leftEyeGaze = _prevEyeData.leftEyeGaze;
eyeDataBuffer.leftDirectionValid = _prevEyeData.leftDirectionValid;
}
if (!eyeDataBuffer.rightDirectionValid) {
eyeDataBuffer.rightEyeGaze = _prevEyeData.rightEyeGaze;
eyeDataBuffer.rightDirectionValid = _prevEyeData.rightDirectionValid;
}
if (!eyeDataBuffer.leftOpennessValid) {
eyeDataBuffer.leftEyeOpenness = _prevEyeData.leftEyeOpenness;
eyeDataBuffer.leftOpennessValid = _prevEyeData.leftOpennessValid;
}
if (!eyeDataBuffer.rightOpennessValid) {
eyeDataBuffer.rightEyeOpenness = _prevEyeData.rightEyeOpenness;
eyeDataBuffer.rightOpennessValid = _prevEyeData.rightOpennessValid;
}
_prevEyeData = eyeDataBuffer;
// transform data into what the controller system expects.
// in the data from sranipal, left=+x, up=+y, forward=+z
mat4 localLeftEyeMat = glm::lookAt(vec3(0.0f, 0.0f, 0.0f),
glm::vec3(-eyeDataBuffer.leftEyeGaze[0],
eyeDataBuffer.leftEyeGaze[1],
eyeDataBuffer.leftEyeGaze[2]),
vec3(0.0f, 1.0f, 0.0f));
quat localLeftEyeRot = glm::quat_cast(localLeftEyeMat);
quat avatarLeftEyeRot = _inputDevice->_poseStateMap[controller::HEAD].rotation * localLeftEyeRot;
mat4 localRightEyeMat = glm::lookAt(vec3(0.0f, 0.0f, 0.0f),
glm::vec3(-eyeDataBuffer.rightEyeGaze[0],
eyeDataBuffer.rightEyeGaze[1],
eyeDataBuffer.rightEyeGaze[2]),
vec3(0.0f, 1.0f, 0.0f));
quat localRightEyeRot = glm::quat_cast(localRightEyeMat);
quat avatarRightEyeRot = _inputDevice->_poseStateMap[controller::HEAD].rotation * localRightEyeRot;
// TODO -- figure out translations for eyes
if (eyeDataBuffer.leftDirectionValid) {
_inputDevice->_poseStateMap[controller::LEFT_EYE] = controller::Pose(glm::vec3(), avatarLeftEyeRot);
_inputDevice->_poseStateMap[controller::LEFT_EYE].valid = true;
} else {
_inputDevice->_poseStateMap[controller::LEFT_EYE].valid = false;
}
if (eyeDataBuffer.rightDirectionValid) {
_inputDevice->_poseStateMap[controller::RIGHT_EYE] = controller::Pose(glm::vec3(), avatarRightEyeRot);
_inputDevice->_poseStateMap[controller::RIGHT_EYE].valid = true;
} else {
_inputDevice->_poseStateMap[controller::RIGHT_EYE].valid = false;
}
quint64 now = usecTimestampNow();
// in hifi, 0 is open 1 is closed. in SRanipal 1 is open, 0 is closed.
if (eyeDataBuffer.leftOpennessValid) {
_inputDevice->_axisStateMap[controller::LEFT_EYE_BLINK] =
controller::AxisValue(1.0f - eyeDataBuffer.leftEyeOpenness, now);
} else {
_inputDevice->_poseStateMap[controller::LEFT_EYE_BLINK].valid = false;
}
if (eyeDataBuffer.rightOpennessValid) {
_inputDevice->_axisStateMap[controller::RIGHT_EYE_BLINK] =
controller::AxisValue(1.0f - eyeDataBuffer.rightEyeOpenness, now);
} else {
_inputDevice->_poseStateMap[controller::RIGHT_EYE_BLINK].valid = false;
}
}
glm::vec3 ViveControllerManager::getRollingAverageHandPoint(int handIndex, int pointIndex) const {
#if 0
return _handPoints[0][handIndex][pointIndex];
#else
glm::vec3 result;
for (int s = 0; s < NUMBER_OF_HAND_TRACKER_SMOOTHING_FRAMES; s++) {
result += _handPoints[s][handIndex][pointIndex];
}
return result / NUMBER_OF_HAND_TRACKER_SMOOTHING_FRAMES;
#endif
}
controller::Pose ViveControllerManager::trackedHandDataToPose(int hand, const glm::vec3& palmFacing,
int nearHandPositionIndex, int farHandPositionIndex) {
glm::vec3 nearPoint = getRollingAverageHandPoint(hand, nearHandPositionIndex);
glm::quat poseRot;
if (nearHandPositionIndex != farHandPositionIndex) {
glm::vec3 farPoint = getRollingAverageHandPoint(hand, farHandPositionIndex);
glm::vec3 pointingDir = farPoint - nearPoint; // y axis
glm::vec3 otherAxis = glm::cross(pointingDir, palmFacing);
glm::mat4 rotMat;
rotMat = glm::mat4(glm::vec4(otherAxis, 0.0f),
glm::vec4(pointingDir, 0.0f),
glm::vec4(palmFacing * (hand == 0 ? 1.0f : -1.0f), 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
poseRot = glm::normalize(glmExtractRotation(rotMat));
}
if (!isNaN(poseRot)) {
controller::Pose pose(nearPoint, poseRot);
return pose;
} else {
controller::Pose pose;
pose.valid = false;
return pose;
}
}
void ViveControllerManager::trackFinger(int hand, int jointIndex1, int jointIndex2, int jointIndex3, int jointIndex4,
controller::StandardPoseChannel joint1, controller::StandardPoseChannel joint2,
controller::StandardPoseChannel joint3, controller::StandardPoseChannel joint4) {
glm::vec3 point1 = getRollingAverageHandPoint(hand, jointIndex1);
glm::vec3 point2 = getRollingAverageHandPoint(hand, jointIndex2);
glm::vec3 point3 = getRollingAverageHandPoint(hand, jointIndex3);
glm::vec3 point4 = getRollingAverageHandPoint(hand, jointIndex4);
glm::vec3 wristPos = getRollingAverageHandPoint(hand, 0);
glm::vec3 thumb2 = getRollingAverageHandPoint(hand, 2);
glm::vec3 pinkie1 = getRollingAverageHandPoint(hand, 17);
// 1st
glm::vec3 palmFacing = glm::normalize(glm::cross(pinkie1 - wristPos, thumb2 - wristPos));
glm::vec3 handForward = glm::normalize(point1 - wristPos);
glm::vec3 x = glm::normalize(glm::cross(palmFacing, handForward));
glm::vec3 y = glm::normalize(point2 - point1);
glm::vec3 z = (hand == 0) ? glm::cross(y, x) : glm::cross(x, y);
glm::mat4 rotMat1 = glm::mat4(glm::vec4(x, 0.0f),
glm::vec4(y, 0.0f),
glm::vec4(z, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
glm::quat rot1 = glm::normalize(glmExtractRotation(rotMat1));
if (!isNaN(rot1)) {
_inputDevice->_poseStateMap[joint1] = controller::Pose(point1, rot1);
}
// 2nd
glm::vec3 x2 = x; // glm::normalize(glm::cross(point3 - point2, point2 - point1));
glm::vec3 y2 = glm::normalize(point3 - point2);
glm::vec3 z2 = (hand == 0) ? glm::cross(y2, x2) : glm::cross(x2, y2);
glm::mat4 rotMat2 = glm::mat4(glm::vec4(x2, 0.0f),
glm::vec4(y2, 0.0f),
glm::vec4(z2, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
glm::quat rot2 = glm::normalize(glmExtractRotation(rotMat2));
if (!isNaN(rot2)) {
_inputDevice->_poseStateMap[joint2] = controller::Pose(point2, rot2);
}
// 3rd
glm::vec3 x3 = x; // glm::normalize(glm::cross(point4 - point3, point3 - point1));
glm::vec3 y3 = glm::normalize(point4 - point3);
glm::vec3 z3 = (hand == 0) ? glm::cross(y3, x3) : glm::cross(x3, y3);
glm::mat4 rotMat3 = glm::mat4(glm::vec4(x3, 0.0f),
glm::vec4(y3, 0.0f),
glm::vec4(z3, 0.0f),
glm::vec4(0.0f, 0.0f, 0.0f, 1.0f));
glm::quat rot3 = glm::normalize(glmExtractRotation(rotMat3));
if (!isNaN(rot3)) {
_inputDevice->_poseStateMap[joint3] = controller::Pose(point3, rot3);
}
// 4th
glm::quat rot4 = rot3;
if (!isNaN(rot4)) {
_inputDevice->_poseStateMap[joint4] = controller::Pose(point4, rot4);
}
}
void ViveControllerManager::updateCameraHandTracker(float deltaTime,
const controller::InputCalibrationData& inputCalibrationData) {
if (areBothHandControllersActive(_system)) {
// if both hand-controllers are in use, don't do camera hand tracking
disableGestureDetection();
} else {
enableGestureDetection();
}
if (!_viveCameraHandTracker) {
return;
}
const GestureResult* results = NULL;
int handTrackerFrameIndex { -1 };
int resultsHandCount = GetGestureResult(&results, &handTrackerFrameIndex);
if (handTrackerFrameIndex >= 0 /* && handTrackerFrameIndex != _lastHandTrackerFrameIndex */) {
#ifdef HAND_TRACKER_USE_EXTERNAL_TRANSFORM
glm::mat4 trackedHandToAvatar =
glm::inverse(inputCalibrationData.avatarMat) *
inputCalibrationData.sensorToWorldMat *
inputCalibrationData.hmdSensorMat;
// glm::mat4 trackedHandToAvatar = _inputDevice->_poseStateMap[controller::HEAD].getMatrix() * Matrices::Y_180;
#else
DisplayPluginPointer displayPlugin = _container->getActiveDisplayPlugin();
std::shared_ptr<OpenVrDisplayPlugin> openVRDisplayPlugin =
std::dynamic_pointer_cast<OpenVrDisplayPlugin>(displayPlugin);
glm::mat4 sensorResetMatrix;
if (openVRDisplayPlugin) {
sensorResetMatrix = openVRDisplayPlugin->getSensorResetMatrix();
}
glm::mat4 trackedHandToAvatar =
glm::inverse(inputCalibrationData.avatarMat) *
inputCalibrationData.sensorToWorldMat *
sensorResetMatrix;
#endif
// roll all the old points in the rolling average
memmove(&(_handPoints[1]),
&(_handPoints[0]),
sizeof(_handPoints[0]) * (NUMBER_OF_HAND_TRACKER_SMOOTHING_FRAMES - 1));
for (int handIndex = 0; handIndex < resultsHandCount; handIndex++) {
bool isLeftHand = results[handIndex].isLeft;
vr::ETrackedControllerRole controllerRole =
isLeftHand ? vr::TrackedControllerRole_LeftHand : vr::TrackedControllerRole_RightHand;
if (isHandControllerActive(_system, controllerRole)) {
continue; // if the controller for this hand is tracked, ignore camera hand tracking
}
int hand = isLeftHand ? 0 : 1;
for (int pointIndex = 0; pointIndex < NUMBER_OF_HAND_POINTS; pointIndex++) {
glm::vec3 pos(results[handIndex].points[3 * pointIndex],
results[handIndex].points[3 * pointIndex + 1],
-results[handIndex].points[3 * pointIndex + 2]);
_handPoints[0][hand][pointIndex] = transformPoint(trackedHandToAvatar, pos);
}
glm::vec3 wristPos = getRollingAverageHandPoint(hand, 0);
glm::vec3 thumb2 = getRollingAverageHandPoint(hand, 2);
glm::vec3 pinkie1 = getRollingAverageHandPoint(hand, 17);
glm::vec3 palmFacing = glm::cross(pinkie1 - wristPos, thumb2 - wristPos); // z axis
_inputDevice->_poseStateMap[isLeftHand ? controller::LEFT_HAND : controller::RIGHT_HAND] =
trackedHandDataToPose(hand, palmFacing, 0, 9);
trackFinger(hand, 1, 2, 3, 4,
isLeftHand ? controller::LEFT_HAND_THUMB1 : controller::RIGHT_HAND_THUMB1,
isLeftHand ? controller::LEFT_HAND_THUMB2 : controller::RIGHT_HAND_THUMB2,
isLeftHand ? controller::LEFT_HAND_THUMB3 : controller::RIGHT_HAND_THUMB3,
isLeftHand ? controller::LEFT_HAND_THUMB4 : controller::RIGHT_HAND_THUMB4);
trackFinger(hand, 5, 6, 7, 8,
isLeftHand ? controller::LEFT_HAND_INDEX1 : controller::RIGHT_HAND_INDEX1,
isLeftHand ? controller::LEFT_HAND_INDEX2 : controller::RIGHT_HAND_INDEX2,
isLeftHand ? controller::LEFT_HAND_INDEX3 : controller::RIGHT_HAND_INDEX3,
isLeftHand ? controller::LEFT_HAND_INDEX4 : controller::RIGHT_HAND_INDEX4);
trackFinger(hand, 9, 10, 11, 12,
isLeftHand ? controller::LEFT_HAND_MIDDLE1 : controller::RIGHT_HAND_MIDDLE1,
isLeftHand ? controller::LEFT_HAND_MIDDLE2 : controller::RIGHT_HAND_MIDDLE2,
isLeftHand ? controller::LEFT_HAND_MIDDLE3 : controller::RIGHT_HAND_MIDDLE3,
isLeftHand ? controller::LEFT_HAND_MIDDLE4 : controller::RIGHT_HAND_MIDDLE4);
trackFinger(hand, 13, 14, 15, 16,
isLeftHand ? controller::LEFT_HAND_RING1 : controller::RIGHT_HAND_RING1,
isLeftHand ? controller::LEFT_HAND_RING2 : controller::RIGHT_HAND_RING2,
isLeftHand ? controller::LEFT_HAND_RING3 : controller::RIGHT_HAND_RING3,
isLeftHand ? controller::LEFT_HAND_RING4 : controller::RIGHT_HAND_RING4);
trackFinger(hand, 17, 18, 19, 20,
isLeftHand ? controller::LEFT_HAND_PINKY1 : controller::RIGHT_HAND_PINKY1,
isLeftHand ? controller::LEFT_HAND_PINKY2 : controller::RIGHT_HAND_PINKY2,
isLeftHand ? controller::LEFT_HAND_PINKY3 : controller::RIGHT_HAND_PINKY3,
isLeftHand ? controller::LEFT_HAND_PINKY4 : controller::RIGHT_HAND_PINKY4);
}
}
_lastHandTrackerFrameIndex = handTrackerFrameIndex;
}
void ViveControllerManager::pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) {
if (!_system) {
@ -297,6 +768,12 @@ void ViveControllerManager::pluginUpdate(float deltaTime, const controller::Inpu
userInputMapper->registerDevice(_inputDevice);
_registeredWithInputMapper = true;
}
if (_viveProEye) {
updateEyeTracker(deltaTime, inputCalibrationData);
}
updateCameraHandTracker(deltaTime, inputCalibrationData);
}
void ViveControllerManager::loadSettings() {
@ -830,9 +1307,7 @@ void ViveControllerManager::InputDevice::handleHmd(uint32_t deviceIndex, const c
void ViveControllerManager::InputDevice::handleHandController(float deltaTime, uint32_t deviceIndex, const controller::InputCalibrationData& inputCalibrationData, bool isLeftHand) {
if (_system->IsTrackedDeviceConnected(deviceIndex) &&
_system->GetTrackedDeviceClass(deviceIndex) == vr::TrackedDeviceClass_Controller &&
_nextSimPoseData.vrPoses[deviceIndex].bPoseIsValid) {
if (isDeviceIndexActive(_system, deviceIndex) && _nextSimPoseData.vrPoses[deviceIndex].bPoseIsValid) {
// process pose
const mat4& mat = _nextSimPoseData.poses[deviceIndex];
@ -1401,9 +1876,52 @@ controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableI
makePair(LEFT_GRIP, "LeftGrip"),
makePair(RIGHT_GRIP, "RightGrip"),
// 3d location of controller
// 3d location of left controller and fingers
makePair(LEFT_HAND, "LeftHand"),
makePair(LEFT_HAND_THUMB1, "LeftHandThumb1"),
makePair(LEFT_HAND_THUMB2, "LeftHandThumb2"),
makePair(LEFT_HAND_THUMB3, "LeftHandThumb3"),
makePair(LEFT_HAND_THUMB4, "LeftHandThumb4"),
makePair(LEFT_HAND_INDEX1, "LeftHandIndex1"),
makePair(LEFT_HAND_INDEX2, "LeftHandIndex2"),
makePair(LEFT_HAND_INDEX3, "LeftHandIndex3"),
makePair(LEFT_HAND_INDEX4, "LeftHandIndex4"),
makePair(LEFT_HAND_MIDDLE1, "LeftHandMiddle1"),
makePair(LEFT_HAND_MIDDLE2, "LeftHandMiddle2"),
makePair(LEFT_HAND_MIDDLE3, "LeftHandMiddle3"),
makePair(LEFT_HAND_MIDDLE4, "LeftHandMiddle4"),
makePair(LEFT_HAND_RING1, "LeftHandRing1"),
makePair(LEFT_HAND_RING2, "LeftHandRing2"),
makePair(LEFT_HAND_RING3, "LeftHandRing3"),
makePair(LEFT_HAND_RING4, "LeftHandRing4"),
makePair(LEFT_HAND_PINKY1, "LeftHandPinky1"),
makePair(LEFT_HAND_PINKY2, "LeftHandPinky2"),
makePair(LEFT_HAND_PINKY3, "LeftHandPinky3"),
makePair(LEFT_HAND_PINKY4, "LeftHandPinky4"),
// 3d location of right controller and fingers
makePair(RIGHT_HAND, "RightHand"),
makePair(RIGHT_HAND_THUMB1, "RightHandThumb1"),
makePair(RIGHT_HAND_THUMB2, "RightHandThumb2"),
makePair(RIGHT_HAND_THUMB3, "RightHandThumb3"),
makePair(RIGHT_HAND_THUMB4, "RightHandThumb4"),
makePair(RIGHT_HAND_INDEX1, "RightHandIndex1"),
makePair(RIGHT_HAND_INDEX2, "RightHandIndex2"),
makePair(RIGHT_HAND_INDEX3, "RightHandIndex3"),
makePair(RIGHT_HAND_INDEX4, "RightHandIndex4"),
makePair(RIGHT_HAND_MIDDLE1, "RightHandMiddle1"),
makePair(RIGHT_HAND_MIDDLE2, "RightHandMiddle2"),
makePair(RIGHT_HAND_MIDDLE3, "RightHandMiddle3"),
makePair(RIGHT_HAND_MIDDLE4, "RightHandMiddle4"),
makePair(RIGHT_HAND_RING1, "RightHandRing1"),
makePair(RIGHT_HAND_RING2, "RightHandRing2"),
makePair(RIGHT_HAND_RING3, "RightHandRing3"),
makePair(RIGHT_HAND_RING4, "RightHandRing4"),
makePair(RIGHT_HAND_PINKY1, "RightHandPinky1"),
makePair(RIGHT_HAND_PINKY2, "RightHandPinky2"),
makePair(RIGHT_HAND_PINKY3, "RightHandPinky3"),
makePair(RIGHT_HAND_PINKY4, "RightHandPinky4"),
makePair(LEFT_FOOT, "LeftFoot"),
makePair(RIGHT_FOOT, "RightFoot"),
makePair(HIPS, "Hips"),
@ -1411,6 +1929,10 @@ controller::Input::NamedVector ViveControllerManager::InputDevice::getAvailableI
makePair(HEAD, "Head"),
makePair(LEFT_ARM, "LeftArm"),
makePair(RIGHT_ARM, "RightArm"),
makePair(LEFT_EYE, "LeftEye"),
makePair(RIGHT_EYE, "RightEye"),
makePair(LEFT_EYE_BLINK, "LeftEyeBlink"),
makePair(RIGHT_EYE_BLINK, "RightEyeBlink"),
// 16 tracked poses
makePair(TRACKED_OBJECT_00, "TrackedObject00"),

View file

@ -31,6 +31,23 @@ namespace vr {
class IVRSystem;
}
class ViveProEyeReadThread;
class EyeDataBuffer {
public:
int getEyeDataResult { 0 };
bool leftDirectionValid { false };
bool rightDirectionValid { false };
bool leftOpennessValid { false };
bool rightOpennessValid { false };
glm::vec3 leftEyeGaze;
glm::vec3 rightEyeGaze;
float leftEyeOpenness { 0.0f };
float rightEyeOpenness { 0.0f };
};
class ViveControllerManager : public InputPlugin {
Q_OBJECT
public:
@ -49,12 +66,18 @@ public:
bool isHeadController() const override { return true; }
bool isHeadControllerMounted() const;
void enableGestureDetection();
void disableGestureDetection();
bool activate() override;
void deactivate() override;
QString getDeviceName() { return QString::fromStdString(_inputDevice->_headsetName); }
void pluginFocusOutEvent() override { _inputDevice->focusOutEvent(); }
void invalidateEyeInputs();
void updateEyeTracker(float deltaTime, const controller::InputCalibrationData& inputCalibrationData);
void updateCameraHandTracker(float deltaTime, const controller::InputCalibrationData& inputCalibrationData);
void pluginUpdate(float deltaTime, const controller::InputCalibrationData& inputCalibrationData) override;
virtual void saveSettings() const override;
@ -229,6 +252,24 @@ private:
vr::IVRSystem* _system { nullptr };
std::shared_ptr<InputDevice> _inputDevice { std::make_shared<InputDevice>(_system) };
bool _viveProEye { false };
mutable std::recursive_mutex _getEyeDataLock;
std::shared_ptr<ViveProEyeReadThread> _viveProEyeReadThread;
EyeDataBuffer _prevEyeData;
bool _viveCameraHandTracker { false };
int _lastHandTrackerFrameIndex { -1 };
const static int NUMBER_OF_HAND_TRACKER_SMOOTHING_FRAMES { 6 };
const static int NUMBER_OF_HAND_POINTS { 21 };
glm::vec3 _handPoints[NUMBER_OF_HAND_TRACKER_SMOOTHING_FRAMES][2][NUMBER_OF_HAND_POINTS]; // 2 for number of hands
glm::vec3 getRollingAverageHandPoint(int handIndex, int pointIndex) const;
controller::Pose trackedHandDataToPose(int hand, const glm::vec3& palmFacing,
int nearHandPositionIndex, int farHandPositionIndex);
void trackFinger(int hand, int jointIndex1, int jointIndex2, int jointIndex3, int jointIndex4,
controller::StandardPoseChannel joint1, controller::StandardPoseChannel joint2,
controller::StandardPoseChannel joint3, controller::StandardPoseChannel joint4);
static const char* NAME;
};