mirror of
https://github.com/JulianGro/overte.git
synced 2025-04-14 11:46:34 +02:00
Merge pull request #16157 from sethalves/vive-pro-groundwork
DEV-605: Vive pro groundwork
This commit is contained in:
commit
e7001c3f1b
54 changed files with 596 additions and 698 deletions
|
@ -1,38 +0,0 @@
|
|||
#
|
||||
# FindiViewHMD.cmake
|
||||
#
|
||||
# Try to find the SMI iViewHMD eye tracker library
|
||||
#
|
||||
# You must provide a IVIEWHMD_ROOT_DIR which contains 3rdParty, include, and libs directories
|
||||
#
|
||||
# Once done this will define
|
||||
#
|
||||
# IVIEWHMD_FOUND - system found iViewHMD
|
||||
# IVIEWHMD_INCLUDE_DIRS - the iViewHMD include directory
|
||||
# IVIEWHMD_LIBRARIES - link this to use iViewHMD
|
||||
#
|
||||
# Created on 27 Jul 2015 by David Rowe
|
||||
# Copyright 2015 High Fidelity, Inc.
|
||||
#
|
||||
|
||||
if (WIN32)
|
||||
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("iViewHMD")
|
||||
|
||||
find_path(IVIEWHMD_INCLUDE_DIRS iViewHMDAPI.h PATH_SUFFIXES include HINTS ${IVIEWHMD_SEARCH_DIRS})
|
||||
find_library(IVIEWHMD_LIBRARIES NAMES iViewHMDAPI PATH_SUFFIXES libs/x86 HINTS ${IVIEWHMD_SEARCH_DIRS})
|
||||
find_path(IVIEWHMD_API_DLL_PATH iViewHMDAPI.dll PATH_SUFFIXES libs/x86 HINTS ${IVIEWHMD_SEARCH_DIRS})
|
||||
list(APPEND IVIEWHMD_REQUIREMENTS IVIEWHMD_INCLUDE_DIRS IVIEWHMD_LIBRARIES IVIEWHMD_API_DLL_PATH)
|
||||
|
||||
find_path(IVIEWHMD_DLL_PATH_3RD_PARTY libiViewNG.dll PATH_SUFFIXES 3rdParty HINTS ${IVIEWHMD_SEARCH_DIRS})
|
||||
list(APPEND IVIEWHMD_REQUIREMENTS IVIEWHMD_DLL_PATH_3RD_PARTY)
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(IVIEWHMD DEFAULT_MSG ${IVIEWHMD_REQUIREMENTS})
|
||||
|
||||
add_paths_to_fixup_libs(${IVIEWHMD_API_DLL_PATH} ${IVIEWHMD_DLL_PATH_3RD_PARTY})
|
||||
|
||||
mark_as_advanced(IVIEWHMD_INCLUDE_DIRS IVIEWHMD_LIBRARIES IVIEWHMD_SEARCH_DIRS)
|
||||
|
||||
endif()
|
14
interface/external/iViewHMD/readme.txt
vendored
14
interface/external/iViewHMD/readme.txt
vendored
|
@ -1,14 +0,0 @@
|
|||
|
||||
Instructions for adding SMI HMD Eye Tracking to Interface on Windows
|
||||
David Rowe, 27 Jul 2015.
|
||||
|
||||
1. Download and install the SMI HMD Eye Tracking software from http://update.smivision.com/iViewNG-HMD.exe.
|
||||
|
||||
2. Copy the SDK folders (3rdParty, include, libs) from the SDK installation folder C:\Program Files (x86)\SMI\iViewNG-HMD\SDK
|
||||
into the interface/externals/iViewHMD folder. This readme.txt should be there as well.
|
||||
|
||||
You may optionally choose to copy the SDK folders to a location outside the repository (so you can re-use with different
|
||||
checkouts and different projects). If so, set the ENV variable "HIFI_LIB_DIR" to a directory containing a subfolder
|
||||
"iViewHMD" that contains the folders mentioned above.
|
||||
|
||||
3. Clear your build directory, run cmake and build, and you should be all set.
|
|
@ -162,7 +162,14 @@
|
|||
{ "from": "Standard.Head", "to": "Actions.Head" },
|
||||
{ "from": "Standard.LeftArm", "to": "Actions.LeftArm" },
|
||||
{ "from": "Standard.RightArm", "to": "Actions.RightArm" },
|
||||
|
||||
|
||||
{ "from": "Standard.LeftEye", "to": "Actions.LeftEye" },
|
||||
{ "from": "Standard.RightEye", "to": "Actions.RightEye" },
|
||||
|
||||
{ "from": "Standard.LeftEyeBlink", "to": "Actions.LeftEyeBlink" },
|
||||
{ "from": "Standard.RightEyeBlink", "to": "Actions.RightEyeBlink" },
|
||||
|
||||
|
||||
{ "from": "Standard.TrackedObject00", "to" : "Actions.TrackedObject00" },
|
||||
{ "from": "Standard.TrackedObject01", "to" : "Actions.TrackedObject01" },
|
||||
{ "from": "Standard.TrackedObject02", "to" : "Actions.TrackedObject02" },
|
||||
|
|
|
@ -57,7 +57,13 @@
|
|||
{ "from": "Standard.Head", "to": "Actions.Head" },
|
||||
{ "from": "Standard.LeftArm", "to": "Actions.LeftArm" },
|
||||
{ "from": "Standard.RightArm", "to": "Actions.RightArm" },
|
||||
|
||||
|
||||
{ "from": "Standard.LeftEye", "to": "Actions.LeftEye" },
|
||||
{ "from": "Standard.RightEye", "to": "Actions.RightEye" },
|
||||
|
||||
{ "from": "Standard.LeftEyeBlink", "to": "Actions.LeftEyeBlink" },
|
||||
{ "from": "Standard.RightEyeBlink", "to": "Actions.RightEyeBlink" },
|
||||
|
||||
{ "from": "Standard.TrackedObject00", "to" : "Actions.TrackedObject00" },
|
||||
{ "from": "Standard.TrackedObject01", "to" : "Actions.TrackedObject01" },
|
||||
{ "from": "Standard.TrackedObject02", "to" : "Actions.TrackedObject02" },
|
||||
|
|
|
@ -54,8 +54,52 @@
|
|||
{ "from": "Vive.RightApplicationMenu", "to": "Standard.RightSecondaryThumb" },
|
||||
|
||||
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand" },
|
||||
{ "from": "Vive.LeftHandThumb1", "to": "Standard.LeftHandThumb1"},
|
||||
{ "from": "Vive.LeftHandThumb2", "to": "Standard.LeftHandThumb2"},
|
||||
{ "from": "Vive.LeftHandThumb3", "to": "Standard.LeftHandThumb3"},
|
||||
{ "from": "Vive.LeftHandThumb4", "to": "Standard.LeftHandThumb4"},
|
||||
{ "from": "Vive.LeftHandIndex1", "to": "Standard.LeftHandIndex1"},
|
||||
{ "from": "Vive.LeftHandIndex2", "to": "Standard.LeftHandIndex2"},
|
||||
{ "from": "Vive.LeftHandIndex3", "to": "Standard.LeftHandIndex3"},
|
||||
{ "from": "Vive.LeftHandIndex4", "to": "Standard.LeftHandIndex4"},
|
||||
{ "from": "Vive.LeftHandMiddle1", "to": "Standard.LeftHandMiddle1"},
|
||||
{ "from": "Vive.LeftHandMiddle2", "to": "Standard.LeftHandMiddle2"},
|
||||
{ "from": "Vive.LeftHandMiddle3", "to": "Standard.LeftHandMiddle3"},
|
||||
{ "from": "Vive.LeftHandMiddle4", "to": "Standard.LeftHandMiddle4"},
|
||||
{ "from": "Vive.LeftHandRing1", "to": "Standard.LeftHandRing1"},
|
||||
{ "from": "Vive.LeftHandRing2", "to": "Standard.LeftHandRing2"},
|
||||
{ "from": "Vive.LeftHandRing3", "to": "Standard.LeftHandRing3"},
|
||||
{ "from": "Vive.LeftHandRing4", "to": "Standard.LeftHandRing4"},
|
||||
{ "from": "Vive.LeftHandPinky1", "to": "Standard.LeftHandPinky1"},
|
||||
{ "from": "Vive.LeftHandPinky2", "to": "Standard.LeftHandPinky2"},
|
||||
{ "from": "Vive.LeftHandPinky3", "to": "Standard.LeftHandPinky3"},
|
||||
{ "from": "Vive.LeftHandPinky4", "to": "Standard.LeftHandPinky4"},
|
||||
{ "from": "Vive.RightHand", "to": "Standard.RightHand" },
|
||||
{ "from": "Vive.RightHandThumb1", "to": "Standard.RightHandThumb1"},
|
||||
{ "from": "Vive.RightHandThumb2", "to": "Standard.RightHandThumb2"},
|
||||
{ "from": "Vive.RightHandThumb3", "to": "Standard.RightHandThumb3"},
|
||||
{ "from": "Vive.RightHandThumb4", "to": "Standard.RightHandThumb4"},
|
||||
{ "from": "Vive.RightHandIndex1", "to": "Standard.RightHandIndex1"},
|
||||
{ "from": "Vive.RightHandIndex2", "to": "Standard.RightHandIndex2"},
|
||||
{ "from": "Vive.RightHandIndex3", "to": "Standard.RightHandIndex3"},
|
||||
{ "from": "Vive.RightHandIndex4", "to": "Standard.RightHandIndex4"},
|
||||
{ "from": "Vive.RightHandMiddle1", "to": "Standard.RightHandMiddle1"},
|
||||
{ "from": "Vive.RightHandMiddle2", "to": "Standard.RightHandMiddle2"},
|
||||
{ "from": "Vive.RightHandMiddle3", "to": "Standard.RightHandMiddle3"},
|
||||
{ "from": "Vive.RightHandMiddle4", "to": "Standard.RightHandMiddle4"},
|
||||
{ "from": "Vive.RightHandRing1", "to": "Standard.RightHandRing1"},
|
||||
{ "from": "Vive.RightHandRing2", "to": "Standard.RightHandRing2"},
|
||||
{ "from": "Vive.RightHandRing3", "to": "Standard.RightHandRing3"},
|
||||
{ "from": "Vive.RightHandRing4", "to": "Standard.RightHandRing4"},
|
||||
{ "from": "Vive.RightHandPinky1", "to": "Standard.RightHandPinky1"},
|
||||
{ "from": "Vive.RightHandPinky2", "to": "Standard.RightHandPinky2"},
|
||||
{ "from": "Vive.RightHandPinky3", "to": "Standard.RightHandPinky3"},
|
||||
{ "from": "Vive.RightHandPinky4", "to": "Standard.RightHandPinky4"},
|
||||
{ "from": "Vive.Head", "to" : "Standard.Head" },
|
||||
{ "from": "Vive.LeftEye", "to" : "Standard.LeftEye" },
|
||||
{ "from": "Vive.RightEye", "to" : "Standard.RightEye" },
|
||||
{ "from": "Vive.LeftEyeBlink", "to" : "Standard.LeftEyeBlink" },
|
||||
{ "from": "Vive.RightEyeBlink", "to" : "Standard.RightEyeBlink" },
|
||||
|
||||
{
|
||||
"from": "Vive.LeftFoot", "to" : "Standard.LeftFoot",
|
||||
|
|
|
@ -60,6 +60,7 @@
|
|||
#include <shared/QtHelpers.h>
|
||||
#include <shared/PlatformHelper.h>
|
||||
#include <shared/GlobalAppProperties.h>
|
||||
#include <GeometryUtil.h>
|
||||
#include <StatTracker.h>
|
||||
#include <Trace.h>
|
||||
#include <ResourceScriptingInterface.h>
|
||||
|
@ -154,7 +155,6 @@
|
|||
#include <display-plugins/CompositorHelper.h>
|
||||
#include <display-plugins/hmd/HmdDisplayPlugin.h>
|
||||
#include <display-plugins/RefreshRateController.h>
|
||||
#include <trackers/EyeTracker.h>
|
||||
#include <avatars-renderer/ScriptAvatar.h>
|
||||
#include <RenderableEntityItem.h>
|
||||
#include <RenderableTextEntityItem.h>
|
||||
|
@ -878,7 +878,6 @@ bool setupEssentials(int& argc, char** argv, bool runningMarkerExisted) {
|
|||
DependencyManager::set<DdeFaceTracker>();
|
||||
#endif
|
||||
|
||||
DependencyManager::set<EyeTracker>();
|
||||
DependencyManager::set<AudioClient>();
|
||||
DependencyManager::set<AudioScope>();
|
||||
DependencyManager::set<DeferredLightingEffect>();
|
||||
|
@ -1997,12 +1996,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
connect(ddeTracker.data(), &FaceTracker::muteToggled, this, &Application::faceTrackerMuteToggled);
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
eyeTracker->init();
|
||||
setActiveEyeTracker();
|
||||
#endif
|
||||
|
||||
// If launched from Steam, let it handle updates
|
||||
const QString HIFI_NO_UPDATER_COMMAND_LINE_KEY = "--no-updater";
|
||||
bool noUpdater = arguments().indexOf(HIFI_NO_UPDATER_COMMAND_LINE_KEY) != -1;
|
||||
|
@ -2746,9 +2739,6 @@ void Application::cleanupBeforeQuit() {
|
|||
// Stop third party processes so that they're not left running in the event of a subsequent shutdown crash.
|
||||
#ifdef HAVE_DDE
|
||||
DependencyManager::get<DdeFaceTracker>()->setEnabled(false);
|
||||
#endif
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
DependencyManager::get<EyeTracker>()->setEnabled(false, true);
|
||||
#endif
|
||||
AnimDebugDraw::getInstance().shutdown();
|
||||
|
||||
|
@ -2823,9 +2813,6 @@ void Application::cleanupBeforeQuit() {
|
|||
#ifdef HAVE_DDE
|
||||
DependencyManager::destroy<DdeFaceTracker>();
|
||||
#endif
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
DependencyManager::destroy<EyeTracker>();
|
||||
#endif
|
||||
|
||||
DependencyManager::destroy<ContextOverlayInterface>(); // Must be destroyed before TabletScriptingInterface
|
||||
|
||||
|
@ -2834,7 +2821,7 @@ void Application::cleanupBeforeQuit() {
|
|||
DependencyManager::destroy<TabletScriptingInterface>();
|
||||
DependencyManager::destroy<ToolbarScriptingInterface>();
|
||||
DependencyManager::destroy<OffscreenUi>();
|
||||
|
||||
|
||||
DependencyManager::destroy<OffscreenQmlSurfaceCache>();
|
||||
|
||||
_snapshotSoundInjector = nullptr;
|
||||
|
@ -5329,35 +5316,6 @@ void Application::setActiveFaceTracker() const {
|
|||
#endif
|
||||
}
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
void Application::setActiveEyeTracker() {
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
if (!eyeTracker->isInitialized()) {
|
||||
return;
|
||||
}
|
||||
|
||||
bool isEyeTracking = Menu::getInstance()->isOptionChecked(MenuOption::SMIEyeTracking);
|
||||
bool isSimulating = Menu::getInstance()->isOptionChecked(MenuOption::SimulateEyeTracking);
|
||||
eyeTracker->setEnabled(isEyeTracking, isSimulating);
|
||||
|
||||
Menu::getInstance()->getActionForOption(MenuOption::OnePointCalibration)->setEnabled(isEyeTracking && !isSimulating);
|
||||
Menu::getInstance()->getActionForOption(MenuOption::ThreePointCalibration)->setEnabled(isEyeTracking && !isSimulating);
|
||||
Menu::getInstance()->getActionForOption(MenuOption::FivePointCalibration)->setEnabled(isEyeTracking && !isSimulating);
|
||||
}
|
||||
|
||||
void Application::calibrateEyeTracker1Point() {
|
||||
DependencyManager::get<EyeTracker>()->calibrate(1);
|
||||
}
|
||||
|
||||
void Application::calibrateEyeTracker3Points() {
|
||||
DependencyManager::get<EyeTracker>()->calibrate(3);
|
||||
}
|
||||
|
||||
void Application::calibrateEyeTracker5Points() {
|
||||
DependencyManager::get<EyeTracker>()->calibrate(5);
|
||||
}
|
||||
#endif
|
||||
|
||||
bool Application::exportEntities(const QString& filename,
|
||||
const QVector<QUuid>& entityIDs,
|
||||
const glm::vec3* givenOffset) {
|
||||
|
@ -5831,8 +5789,8 @@ void Application::pushPostUpdateLambda(void* key, const std::function<void()>& f
|
|||
_postUpdateLambdas[key] = func;
|
||||
}
|
||||
|
||||
// Called during Application::update immediately before AvatarManager::updateMyAvatar, updating my data that is then sent to everyone.
|
||||
// (Maybe this code should be moved there?)
|
||||
// Called during Application::update immediately before AvatarManager::updateMyAvatar, updating my data that is then sent
|
||||
// to everyone.
|
||||
// The principal result is to call updateLookAtTargetAvatar() and then setLookAtPosition().
|
||||
// Note that it is called BEFORE we update position or joints based on sensors, etc.
|
||||
void Application::updateMyAvatarLookAtPosition() {
|
||||
|
@ -5841,91 +5799,8 @@ void Application::updateMyAvatarLookAtPosition() {
|
|||
PerformanceWarning warn(showWarnings, "Application::updateMyAvatarLookAtPosition()");
|
||||
|
||||
auto myAvatar = getMyAvatar();
|
||||
myAvatar->updateLookAtTargetAvatar();
|
||||
FaceTracker* faceTracker = getActiveFaceTracker();
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
|
||||
bool isLookingAtSomeone = false;
|
||||
bool isHMD = qApp->isHMDMode();
|
||||
glm::vec3 lookAtSpot;
|
||||
if (eyeTracker->isTracking() && (isHMD || eyeTracker->isSimulating())) {
|
||||
// Look at the point that the user is looking at.
|
||||
glm::vec3 lookAtPosition = eyeTracker->getLookAtPosition();
|
||||
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
|
||||
lookAtPosition.x = -lookAtPosition.x;
|
||||
}
|
||||
if (isHMD) {
|
||||
// TODO -- this code is probably wrong, getHeadPose() returns something in sensor frame, not avatar
|
||||
glm::mat4 headPose = getActiveDisplayPlugin()->getHeadPose();
|
||||
glm::quat hmdRotation = glm::quat_cast(headPose);
|
||||
lookAtSpot = _myCamera.getPosition() + myAvatar->getWorldOrientation() * (hmdRotation * lookAtPosition);
|
||||
} else {
|
||||
lookAtSpot = myAvatar->getHead()->getEyePosition()
|
||||
+ (myAvatar->getHead()->getFinalOrientationInWorldFrame() * lookAtPosition);
|
||||
}
|
||||
} else {
|
||||
AvatarSharedPointer lookingAt = myAvatar->getLookAtTargetAvatar().lock();
|
||||
bool haveLookAtCandidate = lookingAt && myAvatar.get() != lookingAt.get();
|
||||
auto avatar = static_pointer_cast<Avatar>(lookingAt);
|
||||
bool mutualLookAtSnappingEnabled = avatar && avatar->getLookAtSnappingEnabled() && myAvatar->getLookAtSnappingEnabled();
|
||||
if (haveLookAtCandidate && mutualLookAtSnappingEnabled) {
|
||||
// If I am looking at someone else, look directly at one of their eyes
|
||||
isLookingAtSomeone = true;
|
||||
auto lookingAtHead = avatar->getHead();
|
||||
|
||||
const float MAXIMUM_FACE_ANGLE = 65.0f * RADIANS_PER_DEGREE;
|
||||
glm::vec3 lookingAtFaceOrientation = lookingAtHead->getFinalOrientationInWorldFrame() * IDENTITY_FORWARD;
|
||||
glm::vec3 fromLookingAtToMe = glm::normalize(myAvatar->getHead()->getEyePosition()
|
||||
- lookingAtHead->getEyePosition());
|
||||
float faceAngle = glm::angle(lookingAtFaceOrientation, fromLookingAtToMe);
|
||||
|
||||
if (faceAngle < MAXIMUM_FACE_ANGLE) {
|
||||
// Randomly look back and forth between look targets
|
||||
eyeContactTarget target = Menu::getInstance()->isOptionChecked(MenuOption::FixGaze) ?
|
||||
LEFT_EYE : myAvatar->getEyeContactTarget();
|
||||
switch (target) {
|
||||
case LEFT_EYE:
|
||||
lookAtSpot = lookingAtHead->getLeftEyePosition();
|
||||
break;
|
||||
case RIGHT_EYE:
|
||||
lookAtSpot = lookingAtHead->getRightEyePosition();
|
||||
break;
|
||||
case MOUTH:
|
||||
lookAtSpot = lookingAtHead->getMouthPosition();
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
// Just look at their head (mid point between eyes)
|
||||
lookAtSpot = lookingAtHead->getEyePosition();
|
||||
}
|
||||
} else {
|
||||
// I am not looking at anyone else, so just look forward
|
||||
auto headPose = myAvatar->getControllerPoseInWorldFrame(controller::Action::HEAD);
|
||||
if (headPose.isValid()) {
|
||||
lookAtSpot = transformPoint(headPose.getMatrix(), glm::vec3(0.0f, 0.0f, TREE_SCALE));
|
||||
} else {
|
||||
lookAtSpot = myAvatar->getHead()->getEyePosition() +
|
||||
(myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
|
||||
}
|
||||
}
|
||||
|
||||
// Deflect the eyes a bit to match the detected gaze from the face tracker if active.
|
||||
if (faceTracker && !faceTracker->isMuted()) {
|
||||
float eyePitch = faceTracker->getEstimatedEyePitch();
|
||||
float eyeYaw = faceTracker->getEstimatedEyeYaw();
|
||||
const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f;
|
||||
glm::vec3 origin = myAvatar->getHead()->getEyePosition();
|
||||
float deflection = faceTracker->getEyeDeflection();
|
||||
if (isLookingAtSomeone) {
|
||||
deflection *= GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT;
|
||||
}
|
||||
lookAtSpot = origin + _myCamera.getOrientation() * glm::quat(glm::radians(glm::vec3(
|
||||
eyePitch * deflection, eyeYaw * deflection, 0.0f))) *
|
||||
glm::inverse(_myCamera.getOrientation()) * (lookAtSpot - origin);
|
||||
}
|
||||
}
|
||||
|
||||
myAvatar->getHead()->setLookAtPosition(lookAtSpot);
|
||||
myAvatar->updateLookAtPosition(faceTracker, _myCamera);
|
||||
}
|
||||
|
||||
void Application::updateThreads(float deltaTime) {
|
||||
|
@ -6497,7 +6372,10 @@ void Application::update(float deltaTime) {
|
|||
controller::Action::LEFT_UP_LEG,
|
||||
controller::Action::RIGHT_UP_LEG,
|
||||
controller::Action::LEFT_TOE_BASE,
|
||||
controller::Action::RIGHT_TOE_BASE
|
||||
controller::Action::RIGHT_TOE_BASE,
|
||||
controller::Action::LEFT_EYE,
|
||||
controller::Action::RIGHT_EYE
|
||||
|
||||
};
|
||||
|
||||
// copy controller poses from userInputMapper to myAvatar.
|
||||
|
@ -7172,8 +7050,7 @@ void Application::resetSensors(bool andReload) {
|
|||
#ifdef HAVE_DDE
|
||||
DependencyManager::get<DdeFaceTracker>()->reset();
|
||||
#endif
|
||||
|
||||
DependencyManager::get<EyeTracker>()->reset();
|
||||
|
||||
_overlayConductor.centerUI();
|
||||
getActiveDisplayPlugin()->resetSensors();
|
||||
getMyAvatar()->reset(true, andReload);
|
||||
|
|
|
@ -437,13 +437,6 @@ public slots:
|
|||
void sendWrongProtocolVersionsSignature(bool checked) { ::sendWrongProtocolVersionsSignature(checked); }
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
void setActiveEyeTracker();
|
||||
void calibrateEyeTracker1Point();
|
||||
void calibrateEyeTracker3Points();
|
||||
void calibrateEyeTracker5Points();
|
||||
#endif
|
||||
|
||||
static void showHelp();
|
||||
|
||||
void cycleCamera();
|
||||
|
|
|
@ -534,32 +534,18 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::AutoMuteAudio, 0, false);
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
// Developer > Avatar > Eye Tracking
|
||||
MenuWrapper* eyeTrackingMenu = avatarDebugMenu->addMenu("Eye Tracking");
|
||||
addCheckableActionToQMenuAndActionHash(eyeTrackingMenu, MenuOption::SMIEyeTracking, 0, false,
|
||||
qApp, SLOT(setActiveEyeTracker()));
|
||||
{
|
||||
MenuWrapper* calibrateEyeTrackingMenu = eyeTrackingMenu->addMenu("Calibrate");
|
||||
addActionToQMenuAndActionHash(calibrateEyeTrackingMenu, MenuOption::OnePointCalibration, 0,
|
||||
qApp, SLOT(calibrateEyeTracker1Point()));
|
||||
addActionToQMenuAndActionHash(calibrateEyeTrackingMenu, MenuOption::ThreePointCalibration, 0,
|
||||
qApp, SLOT(calibrateEyeTracker3Points()));
|
||||
addActionToQMenuAndActionHash(calibrateEyeTrackingMenu, MenuOption::FivePointCalibration, 0,
|
||||
qApp, SLOT(calibrateEyeTracker5Points()));
|
||||
}
|
||||
addCheckableActionToQMenuAndActionHash(eyeTrackingMenu, MenuOption::SimulateEyeTracking, 0, false,
|
||||
qApp, SLOT(setActiveEyeTracker()));
|
||||
#endif
|
||||
|
||||
action = addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AvatarReceiveStats, 0, false);
|
||||
connect(action, &QAction::triggered, [this]{ Avatar::setShowReceiveStats(isOptionChecked(MenuOption::AvatarReceiveStats)); });
|
||||
action = addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowBoundingCollisionShapes, 0, false);
|
||||
connect(action, &QAction::triggered, [this]{ Avatar::setShowCollisionShapes(isOptionChecked(MenuOption::ShowBoundingCollisionShapes)); });
|
||||
action = addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowMyLookAtVectors, 0, false);
|
||||
connect(action, &QAction::triggered, [this]{ Avatar::setShowMyLookAtVectors(isOptionChecked(MenuOption::ShowMyLookAtVectors)); });
|
||||
action = addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowMyLookAtTarget, 0, false);
|
||||
connect(action, &QAction::triggered, [this]{ Avatar::setShowMyLookAtTarget(isOptionChecked(MenuOption::ShowMyLookAtTarget)); });
|
||||
action = addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowOtherLookAtVectors, 0, false);
|
||||
connect(action, &QAction::triggered, [this]{ Avatar::setShowOtherLookAtVectors(isOptionChecked(MenuOption::ShowOtherLookAtVectors)); });
|
||||
action = addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowOtherLookAtTarget, 0, false);
|
||||
connect(action, &QAction::triggered, [this]{ Avatar::setShowOtherLookAtTarget(isOptionChecked(MenuOption::ShowOtherLookAtTarget)); });
|
||||
|
||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||
auto avatar = avatarManager->getMyAvatar();
|
||||
|
|
|
@ -188,7 +188,9 @@ namespace MenuOption {
|
|||
const QString ShowBoundingCollisionShapes = "Show Bounding Collision Shapes";
|
||||
const QString ShowDSConnectTable = "Show Domain Connection Timing";
|
||||
const QString ShowMyLookAtVectors = "Show My Eye Vectors";
|
||||
const QString ShowMyLookAtTarget = "Show My Look-At Target";
|
||||
const QString ShowOtherLookAtVectors = "Show Other Eye Vectors";
|
||||
const QString ShowOtherLookAtTarget = "Show Other Look-At Target";
|
||||
const QString EnableLookAtSnapping = "Enable LookAt Snapping";
|
||||
const QString ShowRealtimeEntityStats = "Show Realtime Entity Stats";
|
||||
const QString SimulateEyeTracking = "Simulate";
|
||||
|
|
|
@ -772,6 +772,18 @@ void MyAvatar::update(float deltaTime) {
|
|||
emit energyChanged(currentEnergy);
|
||||
|
||||
updateEyeContactTarget(deltaTime);
|
||||
|
||||
// if we're getting eye rotations from a tracker, disable observer-side procedural eye motions
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
bool eyesTracked =
|
||||
userInputMapper->getPoseState(controller::Action::LEFT_EYE).valid &&
|
||||
userInputMapper->getPoseState(controller::Action::RIGHT_EYE).valid;
|
||||
|
||||
int leftEyeJointIndex = getJointIndex("LeftEye");
|
||||
int rightEyeJointIndex = getJointIndex("RightEye");
|
||||
bool eyesAreOverridden = getIsJointOverridden(leftEyeJointIndex) || getIsJointOverridden(rightEyeJointIndex);
|
||||
|
||||
_headData->setHasProceduralEyeMovement(!(eyesTracked || eyesAreOverridden));
|
||||
}
|
||||
|
||||
void MyAvatar::updateEyeContactTarget(float deltaTime) {
|
||||
|
@ -1454,8 +1466,50 @@ void MyAvatar::setEnableDebugDrawHandControllers(bool isEnabled) {
|
|||
_enableDebugDrawHandControllers = isEnabled;
|
||||
|
||||
if (!isEnabled) {
|
||||
DebugDraw::getInstance().removeMarker("leftHandController");
|
||||
DebugDraw::getInstance().removeMarker("rightHandController");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND");
|
||||
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_THUMB1");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_THUMB2");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_THUMB3");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_THUMB4");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_INDEX1");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_INDEX2");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_INDEX3");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_INDEX4");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_MIDDLE1");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_MIDDLE2");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_MIDDLE3");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_MIDDLE4");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_RING1");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_RING2");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_RING3");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_RING4");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_PINKY1");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_PINKY2");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_PINKY3");
|
||||
DebugDraw::getInstance().removeMarker("LEFT_HAND_PINKY4");
|
||||
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_THUMB1");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_THUMB2");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_THUMB3");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_THUMB4");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_INDEX1");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_INDEX2");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_INDEX3");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_INDEX4");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_MIDDLE1");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_MIDDLE2");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_MIDDLE3");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_MIDDLE4");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_RING1");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_RING2");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_RING3");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_RING4");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_PINKY1");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_PINKY2");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_PINKY3");
|
||||
DebugDraw::getInstance().removeMarker("RIGHT_HAND_PINKY4");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3097,6 +3151,16 @@ void MyAvatar::animGraphLoaded() {
|
|||
disconnect(&(_skeletonModel->getRig()), SIGNAL(onLoadComplete()), this, SLOT(animGraphLoaded()));
|
||||
}
|
||||
|
||||
void MyAvatar::debugDrawPose(controller::Action action, const char* channelName, float size) {
|
||||
auto pose = getControllerPoseInWorldFrame(action);
|
||||
if (pose.isValid()) {
|
||||
DebugDraw::getInstance().addMarker(channelName, pose.getRotation(), pose.getTranslation(), glm::vec4(1), size);
|
||||
} else {
|
||||
DebugDraw::getInstance().removeMarker(channelName);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void MyAvatar::postUpdate(float deltaTime, const render::ScenePointer& scene) {
|
||||
|
||||
Avatar::postUpdate(deltaTime, scene);
|
||||
|
@ -3137,20 +3201,50 @@ void MyAvatar::postUpdate(float deltaTime, const render::ScenePointer& scene) {
|
|||
}
|
||||
|
||||
if (_enableDebugDrawHandControllers) {
|
||||
auto leftHandPose = getControllerPoseInWorldFrame(controller::Action::LEFT_HAND);
|
||||
auto rightHandPose = getControllerPoseInWorldFrame(controller::Action::RIGHT_HAND);
|
||||
debugDrawPose(controller::Action::LEFT_HAND, "LEFT_HAND", 1.0);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND, "RIGHT_HAND", 1.0);
|
||||
|
||||
if (leftHandPose.isValid()) {
|
||||
DebugDraw::getInstance().addMarker("leftHandController", leftHandPose.getRotation(), leftHandPose.getTranslation(), glm::vec4(1));
|
||||
} else {
|
||||
DebugDraw::getInstance().removeMarker("leftHandController");
|
||||
}
|
||||
debugDrawPose(controller::Action::LEFT_HAND_THUMB1, "LEFT_HAND_THUMB1", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_THUMB2, "LEFT_HAND_THUMB2", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_THUMB3, "LEFT_HAND_THUMB3", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_THUMB4, "LEFT_HAND_THUMB4", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_INDEX1, "LEFT_HAND_INDEX1", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_INDEX2, "LEFT_HAND_INDEX2", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_INDEX3, "LEFT_HAND_INDEX3", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_INDEX4, "LEFT_HAND_INDEX4", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_MIDDLE1, "LEFT_HAND_MIDDLE1", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_MIDDLE2, "LEFT_HAND_MIDDLE2", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_MIDDLE3, "LEFT_HAND_MIDDLE3", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_MIDDLE4, "LEFT_HAND_MIDDLE4", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_RING1, "LEFT_HAND_RING1", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_RING2, "LEFT_HAND_RING2", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_RING3, "LEFT_HAND_RING3", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_RING4, "LEFT_HAND_RING4", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_PINKY1, "LEFT_HAND_PINKY1", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_PINKY2, "LEFT_HAND_PINKY2", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_PINKY3, "LEFT_HAND_PINKY3", 0.1f);
|
||||
debugDrawPose(controller::Action::LEFT_HAND_PINKY4, "LEFT_HAND_PINKY4", 0.1f);
|
||||
|
||||
if (rightHandPose.isValid()) {
|
||||
DebugDraw::getInstance().addMarker("rightHandController", rightHandPose.getRotation(), rightHandPose.getTranslation(), glm::vec4(1));
|
||||
} else {
|
||||
DebugDraw::getInstance().removeMarker("rightHandController");
|
||||
}
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_THUMB1, "RIGHT_HAND_THUMB1", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_THUMB2, "RIGHT_HAND_THUMB2", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_THUMB3, "RIGHT_HAND_THUMB3", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_THUMB4, "RIGHT_HAND_THUMB4", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_INDEX1, "RIGHT_HAND_INDEX1", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_INDEX2, "RIGHT_HAND_INDEX2", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_INDEX3, "RIGHT_HAND_INDEX3", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_INDEX4, "RIGHT_HAND_INDEX4", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_MIDDLE1, "RIGHT_HAND_MIDDLE1", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_MIDDLE2, "RIGHT_HAND_MIDDLE2", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_MIDDLE3, "RIGHT_HAND_MIDDLE3", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_MIDDLE4, "RIGHT_HAND_MIDDLE4", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_RING1, "RIGHT_HAND_RING1", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_RING2, "RIGHT_HAND_RING2", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_RING3, "RIGHT_HAND_RING3", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_RING4, "RIGHT_HAND_RING4", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_PINKY1, "RIGHT_HAND_PINKY1", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_PINKY2, "RIGHT_HAND_PINKY2", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_PINKY3, "RIGHT_HAND_PINKY3", 0.1f);
|
||||
debugDrawPose(controller::Action::RIGHT_HAND_PINKY4, "RIGHT_HAND_PINKY4", 0.1f);
|
||||
}
|
||||
|
||||
DebugDraw::getInstance().updateMyAvatarPos(getWorldPosition());
|
||||
|
@ -6296,3 +6390,125 @@ void MyAvatar::endSit(const glm::vec3& position, const glm::quat& rotation) {
|
|||
setSitDriveKeysStatus(true);
|
||||
}
|
||||
}
|
||||
|
||||
bool MyAvatar::getIsJointOverridden(int jointIndex) const {
|
||||
// has this joint been set by a script?
|
||||
return _skeletonModel->getIsJointOverridden(jointIndex);
|
||||
}
|
||||
|
||||
void MyAvatar::updateLookAtPosition(FaceTracker* faceTracker, Camera& myCamera) {
|
||||
|
||||
updateLookAtTargetAvatar();
|
||||
|
||||
bool isLookingAtSomeone = false;
|
||||
glm::vec3 lookAtSpot;
|
||||
|
||||
const MyHead* myHead = getMyHead();
|
||||
|
||||
int leftEyeJointIndex = getJointIndex("LeftEye");
|
||||
int rightEyeJointIndex = getJointIndex("RightEye");
|
||||
bool eyesAreOverridden = getIsJointOverridden(leftEyeJointIndex) ||
|
||||
getIsJointOverridden(rightEyeJointIndex);
|
||||
if (eyesAreOverridden) {
|
||||
// A script has set the eye rotations, so use these to set lookAtSpot
|
||||
glm::quat leftEyeRotation = getAbsoluteJointRotationInObjectFrame(leftEyeJointIndex);
|
||||
glm::quat rightEyeRotation = getAbsoluteJointRotationInObjectFrame(rightEyeJointIndex);
|
||||
glm::vec3 leftVec = getWorldOrientation() * leftEyeRotation * IDENTITY_FORWARD;
|
||||
glm::vec3 rightVec = getWorldOrientation() * rightEyeRotation * IDENTITY_FORWARD;
|
||||
glm::vec3 leftEyePosition = myHead->getLeftEyePosition();
|
||||
glm::vec3 rightEyePosition = myHead->getRightEyePosition();
|
||||
float t1, t2;
|
||||
bool success = findClosestApproachOfLines(leftEyePosition, leftVec, rightEyePosition, rightVec, t1, t2);
|
||||
if (success) {
|
||||
glm::vec3 leftFocus = leftEyePosition + leftVec * t1;
|
||||
glm::vec3 rightFocus = rightEyePosition + rightVec * t2;
|
||||
lookAtSpot = (leftFocus + rightFocus) / 2.0f; // average
|
||||
} else {
|
||||
lookAtSpot = myHead->getEyePosition() + glm::normalize(leftVec) * 1000.0f;
|
||||
}
|
||||
} else {
|
||||
controller::Pose leftEyePose = getControllerPoseInAvatarFrame(controller::Action::LEFT_EYE);
|
||||
controller::Pose rightEyePose = getControllerPoseInAvatarFrame(controller::Action::RIGHT_EYE);
|
||||
if (leftEyePose.isValid() && rightEyePose.isValid()) {
|
||||
// an eye tracker is in use, set lookAtSpot from this
|
||||
glm::vec3 leftVec = getWorldOrientation() * leftEyePose.rotation * glm::vec3(0.0f, 0.0f, -1.0f);
|
||||
glm::vec3 rightVec = getWorldOrientation() * rightEyePose.rotation * glm::vec3(0.0f, 0.0f, -1.0f);
|
||||
|
||||
glm::vec3 leftEyePosition = myHead->getLeftEyePosition();
|
||||
glm::vec3 rightEyePosition = myHead->getRightEyePosition();
|
||||
float t1, t2;
|
||||
bool success = findClosestApproachOfLines(leftEyePosition, leftVec, rightEyePosition, rightVec, t1, t2);
|
||||
if (success) {
|
||||
glm::vec3 leftFocus = leftEyePosition + leftVec * t1;
|
||||
glm::vec3 rightFocus = rightEyePosition + rightVec * t2;
|
||||
lookAtSpot = (leftFocus + rightFocus) / 2.0f; // average
|
||||
} else {
|
||||
lookAtSpot = myHead->getEyePosition() + glm::normalize(leftVec) * 1000.0f;
|
||||
}
|
||||
} else {
|
||||
// no script override, no eye tracker, so do procedural eye motion
|
||||
AvatarSharedPointer lookingAt = getLookAtTargetAvatar().lock();
|
||||
bool haveLookAtCandidate = lookingAt && this != lookingAt.get();
|
||||
auto avatar = static_pointer_cast<Avatar>(lookingAt);
|
||||
bool mutualLookAtSnappingEnabled =
|
||||
avatar && avatar->getLookAtSnappingEnabled() && getLookAtSnappingEnabled();
|
||||
if (haveLookAtCandidate && mutualLookAtSnappingEnabled) {
|
||||
// If I am looking at someone else, look directly at one of their eyes
|
||||
isLookingAtSomeone = true;
|
||||
auto lookingAtHead = avatar->getHead();
|
||||
|
||||
const float MAXIMUM_FACE_ANGLE = 65.0f * RADIANS_PER_DEGREE;
|
||||
glm::vec3 lookingAtFaceOrientation = lookingAtHead->getFinalOrientationInWorldFrame() * IDENTITY_FORWARD;
|
||||
glm::vec3 fromLookingAtToMe = glm::normalize(getHead()->getEyePosition()
|
||||
- lookingAtHead->getEyePosition());
|
||||
float faceAngle = glm::angle(lookingAtFaceOrientation, fromLookingAtToMe);
|
||||
|
||||
if (faceAngle < MAXIMUM_FACE_ANGLE) {
|
||||
// Randomly look back and forth between look targets
|
||||
eyeContactTarget target = Menu::getInstance()->isOptionChecked(MenuOption::FixGaze) ?
|
||||
LEFT_EYE : getEyeContactTarget();
|
||||
switch (target) {
|
||||
case LEFT_EYE:
|
||||
lookAtSpot = lookingAtHead->getLeftEyePosition();
|
||||
break;
|
||||
case RIGHT_EYE:
|
||||
lookAtSpot = lookingAtHead->getRightEyePosition();
|
||||
break;
|
||||
case MOUTH:
|
||||
lookAtSpot = lookingAtHead->getMouthPosition();
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
// Just look at their head (mid point between eyes)
|
||||
lookAtSpot = lookingAtHead->getEyePosition();
|
||||
}
|
||||
} else {
|
||||
// I am not looking at anyone else, so just look forward
|
||||
auto headPose = getControllerPoseInWorldFrame(controller::Action::HEAD);
|
||||
if (headPose.isValid()) {
|
||||
lookAtSpot = transformPoint(headPose.getMatrix(), glm::vec3(0.0f, 0.0f, TREE_SCALE));
|
||||
} else {
|
||||
lookAtSpot = myHead->getEyePosition() +
|
||||
(getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
|
||||
}
|
||||
}
|
||||
|
||||
// Deflect the eyes a bit to match the detected gaze from the face tracker if active.
|
||||
if (faceTracker && !faceTracker->isMuted()) {
|
||||
float eyePitch = faceTracker->getEstimatedEyePitch();
|
||||
float eyeYaw = faceTracker->getEstimatedEyeYaw();
|
||||
const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f;
|
||||
glm::vec3 origin = myHead->getEyePosition();
|
||||
float deflection = faceTracker->getEyeDeflection();
|
||||
if (isLookingAtSomeone) {
|
||||
deflection *= GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT;
|
||||
}
|
||||
lookAtSpot = origin + myCamera.getOrientation() * glm::quat(glm::radians(glm::vec3(
|
||||
eyePitch * deflection, eyeYaw * deflection, 0.0f))) *
|
||||
glm::inverse(myCamera.getOrientation()) * (lookAtSpot - origin);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
getHead()->setLookAtPosition(lookAtSpot);
|
||||
}
|
||||
|
|
|
@ -29,10 +29,12 @@
|
|||
#include <ScriptEngine.h>
|
||||
#include <SettingHandle.h>
|
||||
#include <Sound.h>
|
||||
#include <shared/Camera.h>
|
||||
|
||||
#include "AtRestDetector.h"
|
||||
#include "MyCharacterController.h"
|
||||
#include "RingBufferHistory.h"
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
|
||||
class AvatarActionHold;
|
||||
class ModelItemID;
|
||||
|
@ -1864,6 +1866,8 @@ public:
|
|||
bool getFlowActive() const;
|
||||
bool getNetworkGraphActive() const;
|
||||
|
||||
void updateLookAtPosition(FaceTracker* faceTracker, Camera& myCamera);
|
||||
|
||||
// sets the reaction enabled and triggered parameters of the passed in params
|
||||
// also clears internal reaction triggers
|
||||
void updateRigControllerParameters(Rig::ControllerParameters& params);
|
||||
|
@ -1871,6 +1875,10 @@ public:
|
|||
// Don't substitute verify-fail:
|
||||
virtual const QUrl& getSkeletonModelURL() const override { return _skeletonModelURL; }
|
||||
|
||||
void debugDrawPose(controller::Action action, const char* channelName, float size);
|
||||
|
||||
bool getIsJointOverridden(int jointIndex) const;
|
||||
|
||||
public slots:
|
||||
|
||||
/**jsdoc
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
#include <recording/Deck.h>
|
||||
#include <Rig.h>
|
||||
#include <trackers/FaceTracker.h>
|
||||
#include <trackers/EyeTracker.h>
|
||||
#include <FaceshiftConstants.h>
|
||||
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
#include "Application.h"
|
||||
|
@ -46,18 +46,37 @@ void MyHead::simulate(float deltaTime) {
|
|||
auto player = DependencyManager::get<recording::Deck>();
|
||||
// Only use face trackers when not playing back a recording.
|
||||
if (!player->isPlaying()) {
|
||||
auto faceTracker = qApp->getActiveFaceTracker();
|
||||
const bool hasActualFaceTrackerConnected = faceTracker && !faceTracker->isMuted();
|
||||
_isFaceTrackerConnected = hasActualFaceTrackerConnected || _owningAvatar->getHasScriptedBlendshapes();
|
||||
if (_isFaceTrackerConnected) {
|
||||
if (hasActualFaceTrackerConnected) {
|
||||
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
|
||||
}
|
||||
}
|
||||
// TODO -- finish removing face-tracker specific code. To do this, add input channels for
|
||||
// each blendshape-coefficient and update the various json files to relay them in a useful way.
|
||||
// After that, input plugins can be used to drive the avatar's face, and the various "DDE" files
|
||||
// can be ported into the plugin and removed.
|
||||
//
|
||||
// auto faceTracker = qApp->getActiveFaceTracker();
|
||||
// const bool hasActualFaceTrackerConnected = faceTracker && !faceTracker->isMuted();
|
||||
// _isFaceTrackerConnected = hasActualFaceTrackerConnected || _owningAvatar->getHasScriptedBlendshapes();
|
||||
// if (_isFaceTrackerConnected) {
|
||||
// if (hasActualFaceTrackerConnected) {
|
||||
// _blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
|
||||
// }
|
||||
// }
|
||||
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
_isEyeTrackerConnected = eyeTracker->isTracking();
|
||||
// if eye tracker is connected we should get the data here.
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
bool eyeLidsTracked =
|
||||
userInputMapper->getActionStateValid(controller::Action::LEFT_EYE_BLINK) &&
|
||||
userInputMapper->getActionStateValid(controller::Action::RIGHT_EYE_BLINK);
|
||||
setFaceTrackerConnected(eyeLidsTracked);
|
||||
if (eyeLidsTracked) {
|
||||
float leftEyeBlink = userInputMapper->getActionState(controller::Action::LEFT_EYE_BLINK);
|
||||
float rightEyeBlink = userInputMapper->getActionState(controller::Action::RIGHT_EYE_BLINK);
|
||||
_blendshapeCoefficients.resize(std::max(_blendshapeCoefficients.size(), 2));
|
||||
_blendshapeCoefficients[EYE_BLINK_INDICES[0]] = leftEyeBlink;
|
||||
_blendshapeCoefficients[EYE_BLINK_INDICES[1]] = rightEyeBlink;
|
||||
} else {
|
||||
const float FULLY_OPEN = 0.0f;
|
||||
_blendshapeCoefficients.resize(std::max(_blendshapeCoefficients.size(), 2));
|
||||
_blendshapeCoefficients[EYE_BLINK_INDICES[0]] = FULLY_OPEN;
|
||||
_blendshapeCoefficients[EYE_BLINK_INDICES[1]] = FULLY_OPEN;
|
||||
}
|
||||
}
|
||||
Parent::simulate(deltaTime);
|
||||
}
|
||||
|
|
|
@ -114,13 +114,12 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
|
||||
Head* head = _owningAvatar->getHead();
|
||||
|
||||
// make sure lookAt is not too close to face (avoid crosseyes)
|
||||
glm::vec3 lookAt = head->getLookAtPosition();
|
||||
glm::vec3 focusOffset = lookAt - _owningAvatar->getHead()->getEyePosition();
|
||||
float focusDistance = glm::length(focusOffset);
|
||||
const float MIN_LOOK_AT_FOCUS_DISTANCE = 1.0f;
|
||||
if (focusDistance < MIN_LOOK_AT_FOCUS_DISTANCE && focusDistance > EPSILON) {
|
||||
lookAt = _owningAvatar->getHead()->getEyePosition() + (MIN_LOOK_AT_FOCUS_DISTANCE / focusDistance) * focusOffset;
|
||||
bool eyePosesValid = !head->getHasProceduralEyeMovement();
|
||||
glm::vec3 lookAt;
|
||||
if (eyePosesValid) {
|
||||
lookAt = head->getLookAtPosition(); // don't apply no-crosseyes code when eyes are being tracked
|
||||
} else {
|
||||
lookAt = avoidCrossedEyes(head->getLookAtPosition());
|
||||
}
|
||||
|
||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||
|
|
|
@ -715,7 +715,7 @@ void Rig::reset(const HFMModel& hfmModel) {
|
|||
}
|
||||
}
|
||||
|
||||
bool Rig::jointStatesEmpty() {
|
||||
bool Rig::jointStatesEmpty() const {
|
||||
return _internalPoseSet._relativePoses.empty();
|
||||
}
|
||||
|
||||
|
@ -878,6 +878,20 @@ void Rig::setJointRotation(int index, bool valid, const glm::quat& rotation, flo
|
|||
}
|
||||
}
|
||||
|
||||
bool Rig::getIsJointOverridden(int jointIndex) const {
|
||||
if (QThread::currentThread() == thread()) {
|
||||
if (isIndexValid(jointIndex)) {
|
||||
return _internalPoseSet._overrideFlags[jointIndex];
|
||||
}
|
||||
} else {
|
||||
QReadLocker readLock(&_externalPoseSetLock);
|
||||
if (jointIndex >= 0 && jointIndex < (int)_externalPoseSet._overrideFlags.size()) {
|
||||
return _externalPoseSet._overrideFlags[jointIndex];
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool Rig::getJointPositionInWorldFrame(int jointIndex, glm::vec3& position, glm::vec3 translation, glm::quat rotation) const {
|
||||
bool success { false };
|
||||
glm::vec3 originalPosition = position;
|
||||
|
|
|
@ -135,7 +135,7 @@ public:
|
|||
|
||||
void initJointStates(const HFMModel& hfmModel, const glm::mat4& modelOffset);
|
||||
void reset(const HFMModel& hfmModel);
|
||||
bool jointStatesEmpty();
|
||||
bool jointStatesEmpty() const;
|
||||
int getJointStateCount() const;
|
||||
int indexOfJoint(const QString& jointName) const;
|
||||
QString nameOfJoint(int jointIndex) const;
|
||||
|
@ -163,6 +163,8 @@ public:
|
|||
void setJointTranslation(int index, bool valid, const glm::vec3& translation, float priority);
|
||||
void setJointRotation(int index, bool valid, const glm::quat& rotation, float priority);
|
||||
|
||||
bool getIsJointOverridden(int jointIndex) const;
|
||||
|
||||
// if translation and rotation is identity, position will be in rig space
|
||||
bool getJointPositionInWorldFrame(int jointIndex, glm::vec3& position,
|
||||
glm::vec3 translation, glm::quat rotation) const;
|
||||
|
|
|
@ -108,11 +108,21 @@ void Avatar::setShowMyLookAtVectors(bool showMine) {
|
|||
showMyLookAtVectors = showMine;
|
||||
}
|
||||
|
||||
static bool showMyLookAtTarget = false;
|
||||
void Avatar::setShowMyLookAtTarget(bool showMine) {
|
||||
showMyLookAtTarget = showMine;
|
||||
}
|
||||
|
||||
static bool showOtherLookAtVectors = false;
|
||||
void Avatar::setShowOtherLookAtVectors(bool showOthers) {
|
||||
showOtherLookAtVectors = showOthers;
|
||||
}
|
||||
|
||||
static bool showOtherLookAtTarget = false;
|
||||
void Avatar::setShowOtherLookAtTarget(bool showOthers) {
|
||||
showOtherLookAtTarget = showOthers;
|
||||
}
|
||||
|
||||
static bool showCollisionShapes = false;
|
||||
void Avatar::setShowCollisionShapes(bool render) {
|
||||
showCollisionShapes = render;
|
||||
|
@ -711,6 +721,14 @@ void Avatar::updateRenderItem(render::Transaction& transaction) {
|
|||
|
||||
void Avatar::postUpdate(float deltaTime, const render::ScenePointer& scene) {
|
||||
|
||||
if (isMyAvatar() ? showMyLookAtTarget : showOtherLookAtTarget) {
|
||||
glm::vec3 lookAtTarget = getHead()->getLookAtPosition();
|
||||
DebugDraw::getInstance().addMarker(QString("look-at-") + getID().toString(),
|
||||
glm::quat(), lookAtTarget, glm::vec4(1), 1.0f);
|
||||
} else {
|
||||
DebugDraw::getInstance().removeMarker(QString("look-at-") + getID().toString());
|
||||
}
|
||||
|
||||
if (isMyAvatar() ? showMyLookAtVectors : showOtherLookAtVectors) {
|
||||
const float EYE_RAY_LENGTH = 10.0;
|
||||
const glm::vec4 BLUE(0.0f, 0.0f, _lookAtSnappingEnabled ? 1.0f : 0.25f, 1.0f);
|
||||
|
|
|
@ -140,7 +140,9 @@ public:
|
|||
static void setShowAvatars(bool render);
|
||||
static void setShowReceiveStats(bool receiveStats);
|
||||
static void setShowMyLookAtVectors(bool showMine);
|
||||
static void setShowMyLookAtTarget(bool showMine);
|
||||
static void setShowOtherLookAtVectors(bool showOthers);
|
||||
static void setShowOtherLookAtTarget(bool showOthers);
|
||||
static void setShowCollisionShapes(bool render);
|
||||
static void setShowNamesAboveHeads(bool show);
|
||||
|
||||
|
|
|
@ -17,7 +17,6 @@
|
|||
#include <DependencyManager.h>
|
||||
#include <GeometryUtil.h>
|
||||
#include <trackers/FaceTracker.h>
|
||||
#include <trackers/EyeTracker.h>
|
||||
#include <Rig.h>
|
||||
#include "Logging.h"
|
||||
|
||||
|
@ -58,7 +57,7 @@ void Head::simulate(float deltaTime) {
|
|||
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
|
||||
}
|
||||
|
||||
if (!_isEyeTrackerConnected) {
|
||||
if (getHasProceduralEyeMovement()) {
|
||||
// Update eye saccades
|
||||
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
|
||||
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
|
||||
|
@ -82,6 +81,7 @@ void Head::simulate(float deltaTime) {
|
|||
const float FULLY_OPEN = 0.0f;
|
||||
const float FULLY_CLOSED = 1.0f;
|
||||
if (getHasProceduralBlinkFaceMovement()) {
|
||||
// handle automatic blinks
|
||||
// Detect transition from talking to not; force blink after that and a delay
|
||||
bool forceBlink = false;
|
||||
const float TALKING_LOUDNESS = 150.0f;
|
||||
|
@ -129,7 +129,7 @@ void Head::simulate(float deltaTime) {
|
|||
_leftEyeBlink = FULLY_OPEN;
|
||||
}
|
||||
|
||||
// use data to update fake Faceshift blendshape coefficients
|
||||
// use data to update fake Faceshift blendshape coefficients
|
||||
if (getHasAudioEnabledFaceMovement()) {
|
||||
// Update audio attack data for facial animation (eyebrows and mouth)
|
||||
float audioAttackAveragingRate = (10.0f - deltaTime * NORMAL_HZ) / 10.0f; // --> 0.9 at 60 Hz
|
||||
|
@ -152,7 +152,8 @@ void Head::simulate(float deltaTime) {
|
|||
_mouthTime = 0.0f;
|
||||
}
|
||||
|
||||
FaceTracker::updateFakeCoefficients(_leftEyeBlink,
|
||||
FaceTracker::updateFakeCoefficients(
|
||||
_leftEyeBlink,
|
||||
_rightEyeBlink,
|
||||
_browAudioLift,
|
||||
_audioJawOpen,
|
||||
|
@ -162,6 +163,8 @@ void Head::simulate(float deltaTime) {
|
|||
_transientBlendshapeCoefficients);
|
||||
|
||||
if (getHasProceduralEyeFaceMovement()) {
|
||||
// This controls two things, the eye brow and the upper eye lid, it is driven by the vertical up/down angle of the
|
||||
// eyes relative to the head. This is to try to help prevent sleepy eyes/crazy eyes.
|
||||
applyEyelidOffset(getOrientation());
|
||||
}
|
||||
|
||||
|
@ -292,7 +295,7 @@ glm::quat Head::getFinalOrientationInLocalFrame() const {
|
|||
}
|
||||
|
||||
// Everyone else's head keeps track of a lookAtPosition that everybody sees the same, and refers to where that head
|
||||
// is looking in model space -- e.g., at someone's eyeball, or between their eyes, or mouth, etc. Everyon's Interface
|
||||
// is looking in model space -- e.g., at someone's eyeball, or between their eyes, or mouth, etc. Everyone's Interface
|
||||
// will have the same value for the lookAtPosition of any given head.
|
||||
//
|
||||
// Everyone else's head also keeps track of a correctedLookAtPosition that may be different for the same head within
|
||||
|
|
|
@ -93,19 +93,30 @@ void SkeletonModel::initJointStates() {
|
|||
emit skeletonLoaded();
|
||||
}
|
||||
|
||||
glm::vec3 SkeletonModel::avoidCrossedEyes(const glm::vec3& lookAt) {
|
||||
// make sure lookAt is not too close to face (avoid crosseyes)
|
||||
glm::vec3 focusOffset = lookAt - _owningAvatar->getHead()->getEyePosition();
|
||||
float focusDistance = glm::length(focusOffset);
|
||||
const float MIN_LOOK_AT_FOCUS_DISTANCE = 1.0f;
|
||||
if (focusDistance < MIN_LOOK_AT_FOCUS_DISTANCE && focusDistance > EPSILON) {
|
||||
return _owningAvatar->getHead()->getEyePosition() + (MIN_LOOK_AT_FOCUS_DISTANCE / focusDistance) * focusOffset;
|
||||
} else {
|
||||
return lookAt;
|
||||
}
|
||||
}
|
||||
|
||||
// Called within Model::simulate call, below.
|
||||
void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||
assert(!_owningAvatar->isMyAvatar());
|
||||
|
||||
Head* head = _owningAvatar->getHead();
|
||||
|
||||
// make sure lookAt is not too close to face (avoid crosseyes)
|
||||
glm::vec3 lookAt = head->getCorrectedLookAtPosition();
|
||||
glm::vec3 focusOffset = lookAt - _owningAvatar->getHead()->getEyePosition();
|
||||
float focusDistance = glm::length(focusOffset);
|
||||
const float MIN_LOOK_AT_FOCUS_DISTANCE = 1.0f;
|
||||
if (focusDistance < MIN_LOOK_AT_FOCUS_DISTANCE && focusDistance > EPSILON) {
|
||||
lookAt = _owningAvatar->getHead()->getEyePosition() + (MIN_LOOK_AT_FOCUS_DISTANCE / focusDistance) * focusOffset;
|
||||
bool eyePosesValid = !head->getHasProceduralEyeMovement();
|
||||
glm::vec3 lookAt;
|
||||
if (eyePosesValid) {
|
||||
lookAt = head->getLookAtPosition(); // don't apply no-crosseyes code etc when eyes are being tracked
|
||||
} else {
|
||||
lookAt = avoidCrossedEyes(head->getCorrectedLookAtPosition());
|
||||
}
|
||||
|
||||
// no need to call Model::updateRig() because otherAvatars get their joint state
|
||||
|
@ -288,6 +299,15 @@ bool SkeletonModel::getEyeModelPositions(glm::vec3& firstEyePosition, glm::vec3&
|
|||
return false;
|
||||
}
|
||||
|
||||
|
||||
bool SkeletonModel::getIsJointOverridden(int jointIndex) const {
|
||||
// has this joint been set by a script?
|
||||
if (!isLoaded() || _rig.jointStatesEmpty()) {
|
||||
return false;
|
||||
}
|
||||
return _rig.getIsJointOverridden(jointIndex);
|
||||
}
|
||||
|
||||
bool SkeletonModel::getEyePositions(glm::vec3& firstEyePosition, glm::vec3& secondEyePosition) const {
|
||||
if (getEyeModelPositions(firstEyePosition, secondEyePosition)) {
|
||||
firstEyePosition = _translation + _rotation * firstEyePosition;
|
||||
|
@ -352,4 +372,3 @@ bool SkeletonModel::hasSkeleton() {
|
|||
|
||||
void SkeletonModel::onInvalidate() {
|
||||
}
|
||||
|
||||
|
|
|
@ -37,9 +37,12 @@ public:
|
|||
void initJointStates() override;
|
||||
|
||||
void simulate(float deltaTime, bool fullUpdate = true) override;
|
||||
glm::vec3 avoidCrossedEyes(const glm::vec3& lookAt);
|
||||
void updateRig(float deltaTime, glm::mat4 parentTransform) override;
|
||||
void updateAttitude(const glm::quat& orientation);
|
||||
|
||||
bool getIsJointOverridden(int jointIndex) const;
|
||||
|
||||
/// Returns the index of the left hand joint, or -1 if not found.
|
||||
int getLeftHandJointIndex() const { return isActive() ? _rig.indexOfJoint("LeftHand") : -1; }
|
||||
|
||||
|
|
|
@ -245,9 +245,10 @@ QByteArray AvatarData::toByteArrayStateful(AvatarDataDetail dataDetail, bool dro
|
|||
}
|
||||
|
||||
QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSentTime,
|
||||
const QVector<JointData>& lastSentJointData,
|
||||
AvatarDataPacket::SendStatus& sendStatus, bool dropFaceTracking, bool distanceAdjust,
|
||||
glm::vec3 viewerPosition, QVector<JointData>* sentJointDataOut, int maxDataSize, AvatarDataRate* outboundDataRateOut) const {
|
||||
const QVector<JointData>& lastSentJointData, AvatarDataPacket::SendStatus& sendStatus,
|
||||
bool dropFaceTracking, bool distanceAdjust, glm::vec3 viewerPosition,
|
||||
QVector<JointData>* sentJointDataOut,
|
||||
int maxDataSize, AvatarDataRate* outboundDataRateOut) const {
|
||||
|
||||
bool cullSmallChanges = (dataDetail == CullSmallData);
|
||||
bool sendAll = (dataDetail == SendAllData);
|
||||
|
@ -532,7 +533,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
setAtBit16(flags, IS_FACE_TRACKER_CONNECTED);
|
||||
}
|
||||
// eye tracker state
|
||||
if (_headData->_isEyeTrackerConnected) {
|
||||
if (!_headData->_hasProceduralEyeMovement) {
|
||||
setAtBit16(flags, IS_EYE_TRACKER_CONNECTED);
|
||||
}
|
||||
// referential state
|
||||
|
@ -1150,7 +1151,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
+ (oneAtBit16(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
|
||||
|
||||
auto newFaceTrackerConnected = oneAtBit16(bitItems, IS_FACE_TRACKER_CONNECTED);
|
||||
auto newEyeTrackerConnected = oneAtBit16(bitItems, IS_EYE_TRACKER_CONNECTED);
|
||||
auto newHasntProceduralEyeMovement = oneAtBit16(bitItems, IS_EYE_TRACKER_CONNECTED);
|
||||
|
||||
auto newHasAudioEnabledFaceMovement = oneAtBit16(bitItems, AUDIO_ENABLED_FACE_MOVEMENT);
|
||||
auto newHasProceduralEyeFaceMovement = oneAtBit16(bitItems, PROCEDURAL_EYE_FACE_MOVEMENT);
|
||||
|
@ -1161,7 +1162,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
bool keyStateChanged = (_keyState != newKeyState);
|
||||
bool handStateChanged = (_handState != newHandState);
|
||||
bool faceStateChanged = (_headData->_isFaceTrackerConnected != newFaceTrackerConnected);
|
||||
bool eyeStateChanged = (_headData->_isEyeTrackerConnected != newEyeTrackerConnected);
|
||||
bool eyeStateChanged = (_headData->_hasProceduralEyeMovement == newHasntProceduralEyeMovement);
|
||||
bool audioEnableFaceMovementChanged = (_headData->getHasAudioEnabledFaceMovement() != newHasAudioEnabledFaceMovement);
|
||||
bool proceduralEyeFaceMovementChanged = (_headData->getHasProceduralEyeFaceMovement() != newHasProceduralEyeFaceMovement);
|
||||
bool proceduralBlinkFaceMovementChanged = (_headData->getHasProceduralBlinkFaceMovement() != newHasProceduralBlinkFaceMovement);
|
||||
|
@ -1174,7 +1175,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
_keyState = newKeyState;
|
||||
_handState = newHandState;
|
||||
_headData->_isFaceTrackerConnected = newFaceTrackerConnected;
|
||||
_headData->_isEyeTrackerConnected = newEyeTrackerConnected;
|
||||
_headData->setHasProceduralEyeMovement(!newHasntProceduralEyeMovement);
|
||||
_headData->setHasAudioEnabledFaceMovement(newHasAudioEnabledFaceMovement);
|
||||
_headData->setHasProceduralEyeFaceMovement(newHasProceduralEyeFaceMovement);
|
||||
_headData->setHasProceduralBlinkFaceMovement(newHasProceduralBlinkFaceMovement);
|
||||
|
|
|
@ -196,3 +196,40 @@ void HeadData::fromJson(const QJsonObject& json) {
|
|||
setHeadOrientation(quatFromJsonValue(json[JSON_AVATAR_HEAD_ROTATION]));
|
||||
}
|
||||
}
|
||||
|
||||
bool HeadData::getHasProceduralEyeFaceMovement() const {
|
||||
return _hasProceduralEyeFaceMovement;
|
||||
}
|
||||
|
||||
void HeadData::setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement) {
|
||||
_hasProceduralEyeFaceMovement = hasProceduralEyeFaceMovement;
|
||||
}
|
||||
|
||||
bool HeadData::getHasProceduralBlinkFaceMovement() const {
|
||||
// return _hasProceduralBlinkFaceMovement;
|
||||
return _hasProceduralBlinkFaceMovement && !_isFaceTrackerConnected;
|
||||
}
|
||||
|
||||
void HeadData::setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement) {
|
||||
_hasProceduralBlinkFaceMovement = hasProceduralBlinkFaceMovement;
|
||||
}
|
||||
|
||||
bool HeadData::getHasAudioEnabledFaceMovement() const {
|
||||
return _hasAudioEnabledFaceMovement;
|
||||
}
|
||||
|
||||
void HeadData::setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) {
|
||||
_hasAudioEnabledFaceMovement = hasAudioEnabledFaceMovement;
|
||||
}
|
||||
|
||||
bool HeadData::getHasProceduralEyeMovement() const {
|
||||
return _hasProceduralEyeMovement;
|
||||
}
|
||||
|
||||
void HeadData::setHasProceduralEyeMovement(bool hasProceduralEyeMovement) {
|
||||
_hasProceduralEyeMovement = hasProceduralEyeMovement;
|
||||
}
|
||||
|
||||
void HeadData::setFaceTrackerConnected(bool value) {
|
||||
_isFaceTrackerConnected = value;
|
||||
}
|
||||
|
|
|
@ -72,23 +72,17 @@ public:
|
|||
}
|
||||
bool lookAtPositionChangedSince(quint64 time) { return _lookAtPositionChanged >= time; }
|
||||
|
||||
bool getHasProceduralEyeFaceMovement() const { return _hasProceduralEyeFaceMovement; }
|
||||
bool getHasProceduralEyeFaceMovement() const;
|
||||
void setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement);
|
||||
bool getHasProceduralBlinkFaceMovement() const;
|
||||
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
|
||||
bool getHasAudioEnabledFaceMovement() const;
|
||||
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
|
||||
bool getHasProceduralEyeMovement() const;
|
||||
void setHasProceduralEyeMovement(bool hasProceduralEyeMovement);
|
||||
|
||||
void setHasProceduralEyeFaceMovement(const bool hasProceduralEyeFaceMovement) {
|
||||
_hasProceduralEyeFaceMovement = hasProceduralEyeFaceMovement;
|
||||
}
|
||||
|
||||
bool getHasProceduralBlinkFaceMovement() const { return _hasProceduralBlinkFaceMovement; }
|
||||
|
||||
void setHasProceduralBlinkFaceMovement(const bool hasProceduralBlinkFaceMovement) {
|
||||
_hasProceduralBlinkFaceMovement = hasProceduralBlinkFaceMovement;
|
||||
}
|
||||
|
||||
bool getHasAudioEnabledFaceMovement() const { return _hasAudioEnabledFaceMovement; }
|
||||
|
||||
void setHasAudioEnabledFaceMovement(const bool hasAudioEnabledFaceMovement) {
|
||||
_hasAudioEnabledFaceMovement = hasAudioEnabledFaceMovement;
|
||||
}
|
||||
void setFaceTrackerConnected(bool value);
|
||||
bool getFaceTrackerConnected() const { return _isFaceTrackerConnected; }
|
||||
|
||||
friend class AvatarData;
|
||||
|
||||
|
@ -107,8 +101,10 @@ protected:
|
|||
bool _hasAudioEnabledFaceMovement { true };
|
||||
bool _hasProceduralBlinkFaceMovement { true };
|
||||
bool _hasProceduralEyeFaceMovement { true };
|
||||
bool _hasProceduralEyeMovement { true };
|
||||
|
||||
bool _isFaceTrackerConnected { false };
|
||||
bool _isEyeTrackerConnected { false };
|
||||
|
||||
float _leftEyeBlink { 0.0f };
|
||||
float _rightEyeBlink { 0.0f };
|
||||
float _averageLoudness { 0.0f };
|
||||
|
|
|
@ -347,6 +347,10 @@ namespace controller {
|
|||
makePosePair(Action::HIPS, "Hips"),
|
||||
makePosePair(Action::SPINE2, "Spine2"),
|
||||
makePosePair(Action::HEAD, "Head"),
|
||||
makePosePair(Action::LEFT_EYE, "LeftEye"),
|
||||
makePosePair(Action::RIGHT_EYE, "RightEye"),
|
||||
makeAxisPair(Action::LEFT_EYE_BLINK, "LeftEyeBlink"),
|
||||
makeAxisPair(Action::RIGHT_EYE_BLINK, "RightEyeBlink"),
|
||||
|
||||
makePosePair(Action::LEFT_HAND_THUMB1, "LeftHandThumb1"),
|
||||
makePosePair(Action::LEFT_HAND_THUMB2, "LeftHandThumb2"),
|
||||
|
|
|
@ -181,6 +181,11 @@ enum class Action {
|
|||
TRACKED_OBJECT_15,
|
||||
SPRINT,
|
||||
|
||||
LEFT_EYE,
|
||||
RIGHT_EYE,
|
||||
LEFT_EYE_BLINK,
|
||||
RIGHT_EYE_BLINK,
|
||||
|
||||
NUM_ACTIONS
|
||||
};
|
||||
|
||||
|
|
|
@ -12,10 +12,13 @@
|
|||
|
||||
namespace controller {
|
||||
|
||||
AxisValue::AxisValue(const float value, const quint64 timestamp) :
|
||||
value(value), timestamp(timestamp) { }
|
||||
AxisValue::AxisValue(const float value, const quint64 timestamp, bool valid) :
|
||||
value(value), timestamp(timestamp), valid(valid) {
|
||||
}
|
||||
|
||||
bool AxisValue::operator==(const AxisValue& right) const {
|
||||
return value == right.value && timestamp == right.timestamp;
|
||||
return value == right.value &&
|
||||
timestamp == right.timestamp &&
|
||||
valid == right.valid;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,14 +21,14 @@ namespace controller {
|
|||
float value { 0.0f };
|
||||
// The value can be timestamped to determine if consecutive identical values should be output (e.g., mouse movement).
|
||||
quint64 timestamp { 0 };
|
||||
bool valid { false };
|
||||
|
||||
AxisValue() {}
|
||||
AxisValue(const float value, const quint64 timestamp);
|
||||
AxisValue(const float value, const quint64 timestamp, bool valid = true);
|
||||
|
||||
bool operator ==(const AxisValue& right) const;
|
||||
bool operator !=(const AxisValue& right) const { return !(*this == right); }
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif // hifi_controllers_AxisValue_h
|
||||
|
|
|
@ -77,13 +77,13 @@ namespace controller {
|
|||
return { getButton(channel), 0 };
|
||||
|
||||
case ChannelType::POSE:
|
||||
return { getPose(channel).valid ? 1.0f : 0.0f, 0 };
|
||||
return { getPose(channel).valid ? 1.0f : 0.0f, 0, getPose(channel).valid };
|
||||
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
return { 0.0f, 0 };
|
||||
return { 0.0f, 0, false };
|
||||
}
|
||||
|
||||
AxisValue InputDevice::getValue(const Input& input) const {
|
||||
|
|
|
@ -354,6 +354,10 @@ Input::NamedVector StandardController::getAvailableInputs() const {
|
|||
makePair(HIPS, "Hips"),
|
||||
makePair(SPINE2, "Spine2"),
|
||||
makePair(HEAD, "Head"),
|
||||
makePair(LEFT_EYE, "LeftEye"),
|
||||
makePair(RIGHT_EYE, "RightEye"),
|
||||
makePair(LEFT_EYE_BLINK, "LeftEyeBlink"),
|
||||
makePair(RIGHT_EYE_BLINK, "RightEyeBlink"),
|
||||
|
||||
// Aliases, PlayStation style names
|
||||
makePair(LB, "L1"),
|
||||
|
|
|
@ -90,6 +90,8 @@ namespace controller {
|
|||
// Grips
|
||||
LEFT_GRIP,
|
||||
RIGHT_GRIP,
|
||||
LEFT_EYE_BLINK,
|
||||
RIGHT_EYE_BLINK,
|
||||
NUM_STANDARD_AXES,
|
||||
LZ = LT,
|
||||
RZ = RT
|
||||
|
@ -174,6 +176,8 @@ namespace controller {
|
|||
TRACKED_OBJECT_13,
|
||||
TRACKED_OBJECT_14,
|
||||
TRACKED_OBJECT_15,
|
||||
LEFT_EYE,
|
||||
RIGHT_EYE,
|
||||
NUM_STANDARD_POSES
|
||||
};
|
||||
|
||||
|
|
|
@ -256,6 +256,9 @@ void UserInputMapper::update(float deltaTime) {
|
|||
for (auto& channel : _actionStates) {
|
||||
channel = 0.0f;
|
||||
}
|
||||
for (unsigned int i = 0; i < _actionStatesValid.size(); i++) {
|
||||
_actionStatesValid[i] = true;
|
||||
}
|
||||
|
||||
for (auto& channel : _poseStates) {
|
||||
channel = Pose();
|
||||
|
@ -1233,5 +1236,17 @@ void UserInputMapper::disableMapping(const Mapping::Pointer& mapping) {
|
|||
}
|
||||
}
|
||||
|
||||
void UserInputMapper::setActionState(Action action, float value, bool valid) {
|
||||
_actionStates[toInt(action)] = value;
|
||||
_actionStatesValid[toInt(action)] = valid;
|
||||
}
|
||||
|
||||
void UserInputMapper::deltaActionState(Action action, float delta, bool valid) {
|
||||
_actionStates[toInt(action)] += delta;
|
||||
bool wasValid = _actionStatesValid[toInt(action)];
|
||||
_actionStatesValid[toInt(action)] = wasValid & valid;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -82,13 +82,14 @@ namespace controller {
|
|||
QString getActionName(Action action) const;
|
||||
QString getStandardPoseName(uint16_t pose);
|
||||
float getActionState(Action action) const { return _actionStates[toInt(action)]; }
|
||||
bool getActionStateValid(Action action) const { return _actionStatesValid[toInt(action)]; }
|
||||
Pose getPoseState(Action action) const;
|
||||
int findAction(const QString& actionName) const;
|
||||
QVector<QString> getActionNames() const;
|
||||
Input inputFromAction(Action action) const { return getActionInputs()[toInt(action)].first; }
|
||||
|
||||
void setActionState(Action action, float value) { _actionStates[toInt(action)] = value; }
|
||||
void deltaActionState(Action action, float delta) { _actionStates[toInt(action)] += delta; }
|
||||
void setActionState(Action action, float value, bool valid = true);
|
||||
void deltaActionState(Action action, float delta, bool valid = true);
|
||||
void setActionState(Action action, const Pose& value) { _poseStates[toInt(action)] = value; }
|
||||
bool triggerHapticPulse(float strength, float duration, controller::Hand hand);
|
||||
bool triggerHapticPulseOnDevice(uint16 deviceID, float strength, float duration, controller::Hand hand);
|
||||
|
@ -146,6 +147,7 @@ namespace controller {
|
|||
std::vector<float> _actionStates = std::vector<float>(toInt(Action::NUM_ACTIONS), 0.0f);
|
||||
std::vector<float> _actionScales = std::vector<float>(toInt(Action::NUM_ACTIONS), 1.0f);
|
||||
std::vector<float> _lastActionStates = std::vector<float>(toInt(Action::NUM_ACTIONS), 0.0f);
|
||||
std::vector<bool> _actionStatesValid = std::vector<bool>(toInt(Action::NUM_ACTIONS), false);
|
||||
std::vector<Pose> _poseStates = std::vector<Pose>(toInt(Action::NUM_ACTIONS));
|
||||
std::vector<AxisValue> _lastStandardStates = std::vector<AxisValue>();
|
||||
|
||||
|
@ -167,7 +169,7 @@ namespace controller {
|
|||
ConditionalPointer conditionalFor(const QJSValue& endpoint);
|
||||
ConditionalPointer conditionalFor(const QScriptValue& endpoint);
|
||||
ConditionalPointer conditionalFor(const Input& endpoint) const;
|
||||
|
||||
|
||||
MappingPointer parseMapping(const QJsonValue& json);
|
||||
RoutePointer parseRoute(const QJsonValue& value);
|
||||
EndpointPointer parseDestination(const QJsonValue& value);
|
||||
|
|
|
@ -100,7 +100,7 @@ namespace controller {
|
|||
_currentPose = value;
|
||||
}
|
||||
protected:
|
||||
AxisValue _currentValue { 0.0f, 0 };
|
||||
AxisValue _currentValue { 0.0f, 0, false };
|
||||
Pose _currentPose {};
|
||||
};
|
||||
|
||||
|
|
|
@ -26,7 +26,7 @@ void ActionEndpoint::apply(AxisValue newValue, const Pointer& source) {
|
|||
_currentValue.value += newValue.value;
|
||||
|
||||
if (_input != Input::INVALID_INPUT) {
|
||||
userInputMapper->deltaActionState(Action(_input.getChannel()), newValue.value);
|
||||
userInputMapper->deltaActionState(Action(_input.getChannel()), newValue.value, newValue.valid);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -32,7 +32,7 @@ public:
|
|||
virtual void reset() override;
|
||||
|
||||
private:
|
||||
AxisValue _currentValue { 0.0f, 0 };
|
||||
AxisValue _currentValue { 0.0f, 0, false };
|
||||
Pose _currentPose{};
|
||||
};
|
||||
|
||||
|
|
|
@ -27,7 +27,9 @@ bool CompositeEndpoint::readable() const {
|
|||
AxisValue CompositeEndpoint::peek() const {
|
||||
auto negative = first->peek();
|
||||
auto positive = second->peek();
|
||||
auto result = AxisValue(positive.value - negative.value, std::max(positive.timestamp, negative.timestamp));
|
||||
auto result = AxisValue(positive.value - negative.value,
|
||||
std::max(positive.timestamp, negative.timestamp),
|
||||
negative.valid && positive.valid);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -35,7 +37,9 @@ AxisValue CompositeEndpoint::peek() const {
|
|||
AxisValue CompositeEndpoint::value() {
|
||||
auto negative = first->value();
|
||||
auto positive = second->value();
|
||||
auto result = AxisValue(positive.value - negative.value, std::max(positive.timestamp, negative.timestamp));
|
||||
auto result = AxisValue(positive.value - negative.value,
|
||||
std::max(positive.timestamp, negative.timestamp),
|
||||
negative.valid && positive.valid);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
|
@ -16,7 +16,7 @@ using namespace controller;
|
|||
|
||||
AxisValue InputEndpoint::peek() const {
|
||||
if (isPose()) {
|
||||
return peekPose().valid ? AxisValue(1.0f, 0) : AxisValue(0.0f, 0);
|
||||
return peekPose().valid ? AxisValue(1.0f, 0) : AxisValue(0.0f, 0, false);
|
||||
}
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
auto deviceProxy = userInputMapper->getDevice(_input);
|
||||
|
|
|
@ -41,7 +41,7 @@ protected:
|
|||
private:
|
||||
QScriptValue _callable;
|
||||
float _lastValueRead { 0.0f };
|
||||
AxisValue _lastValueWritten { 0.0f, 0 };
|
||||
AxisValue _lastValueWritten { 0.0f, 0, false };
|
||||
|
||||
bool _returnPose { false };
|
||||
Pose _lastPoseRead;
|
||||
|
|
|
@ -19,7 +19,7 @@ class ClampFilter : public Filter {
|
|||
public:
|
||||
ClampFilter(float min = 0.0, float max = 1.0) : _min(min), _max(max) {};
|
||||
virtual AxisValue apply(AxisValue value) const override {
|
||||
return { glm::clamp(value.value, _min, _max), value.timestamp };
|
||||
return { glm::clamp(value.value, _min, _max), value.timestamp, value.valid };
|
||||
}
|
||||
|
||||
virtual Pose apply(Pose value) const override { return value; }
|
||||
|
|
|
@ -20,7 +20,7 @@ public:
|
|||
ConstrainToIntegerFilter() = default;
|
||||
|
||||
virtual AxisValue apply(AxisValue value) const override {
|
||||
return { glm::sign(value.value), value.timestamp };
|
||||
return { glm::sign(value.value), value.timestamp, value.valid };
|
||||
}
|
||||
|
||||
virtual Pose apply(Pose value) const override { return value; }
|
||||
|
|
|
@ -20,7 +20,7 @@ public:
|
|||
ConstrainToPositiveIntegerFilter() = default;
|
||||
|
||||
virtual AxisValue apply(AxisValue value) const override {
|
||||
return { (value.value <= 0.0f) ? 0.0f : 1.0f, value.timestamp };
|
||||
return { (value.value <= 0.0f) ? 0.0f : 1.0f, value.timestamp, value.valid };
|
||||
}
|
||||
|
||||
virtual Pose apply(Pose value) const override { return value; }
|
||||
|
|
|
@ -18,7 +18,7 @@ AxisValue DeadZoneFilter::apply(AxisValue value) const {
|
|||
if (magnitude < _min) {
|
||||
return { 0.0f, value.timestamp };
|
||||
}
|
||||
return { (magnitude - _min) * scale, value.timestamp };
|
||||
return { (magnitude - _min) * scale, value.timestamp, value.valid };
|
||||
}
|
||||
|
||||
bool DeadZoneFilter::parseParameters(const QJsonValue& parameters) {
|
||||
|
|
|
@ -19,7 +19,6 @@ HysteresisFilter::HysteresisFilter(float min, float max) : _min(min), _max(max)
|
|||
}
|
||||
};
|
||||
|
||||
|
||||
AxisValue HysteresisFilter::apply(AxisValue value) const {
|
||||
if (_signaled) {
|
||||
if (value.value <= _min) {
|
||||
|
@ -30,7 +29,7 @@ AxisValue HysteresisFilter::apply(AxisValue value) const {
|
|||
_signaled = true;
|
||||
}
|
||||
}
|
||||
return { _signaled ? 1.0f : 0.0f, value.timestamp };
|
||||
return { _signaled ? 1.0f : 0.0f, value.timestamp, value.valid };
|
||||
}
|
||||
|
||||
bool HysteresisFilter::parseParameters(const QJsonValue& parameters) {
|
||||
|
|
|
@ -6,5 +6,5 @@ NotFilter::NotFilter() {
|
|||
}
|
||||
|
||||
AxisValue NotFilter::apply(AxisValue value) const {
|
||||
return { (value.value == 0.0f) ? 1.0f : 0.0f, value.timestamp };
|
||||
return { (value.value == 0.0f) ? 1.0f : 0.0f, value.timestamp, value.valid };
|
||||
}
|
||||
|
|
|
@ -29,7 +29,7 @@ AxisValue PulseFilter::apply(AxisValue value) const {
|
|||
_lastEmitTime = DEFAULT_LAST_EMIT_TIME;
|
||||
}
|
||||
|
||||
return { result, value.timestamp };
|
||||
return { result, value.timestamp, value.valid };
|
||||
}
|
||||
|
||||
bool PulseFilter::parseParameters(const QJsonValue& parameters) {
|
||||
|
|
|
@ -23,7 +23,7 @@ public:
|
|||
ScaleFilter(float scale) : _scale(scale) {}
|
||||
|
||||
virtual AxisValue apply(AxisValue value) const override {
|
||||
return { value.value * _scale, value.timestamp };
|
||||
return { value.value * _scale, value.timestamp, value.valid };
|
||||
}
|
||||
|
||||
virtual Pose apply(Pose value) const override {
|
||||
|
|
|
@ -393,7 +393,7 @@ void AnimDebugDraw::update() {
|
|||
glm::quat rot = std::get<0>(iter.second);
|
||||
glm::vec3 pos = std::get<1>(iter.second);
|
||||
glm::vec4 color = std::get<2>(iter.second);
|
||||
const float radius = POSE_RADIUS;
|
||||
const float radius = std::get<3>(iter.second) * POSE_RADIUS;
|
||||
addBone(AnimPose::identity, AnimPose(glm::vec3(1), rot, pos), radius, color, v);
|
||||
}
|
||||
|
||||
|
@ -402,7 +402,7 @@ void AnimDebugDraw::update() {
|
|||
glm::quat rot = std::get<0>(iter.second);
|
||||
glm::vec3 pos = std::get<1>(iter.second);
|
||||
glm::vec4 color = std::get<2>(iter.second);
|
||||
const float radius = POSE_RADIUS;
|
||||
const float radius = std::get<3>(iter.second) * POSE_RADIUS;
|
||||
addBone(myAvatarPose, AnimPose(glm::vec3(1), rot, pos), radius, color, v);
|
||||
}
|
||||
|
||||
|
|
|
@ -31,9 +31,10 @@ void DebugDraw::drawRay(const glm::vec3& start, const glm::vec3& end, const glm:
|
|||
_rays.push_back(Ray(start, end, color));
|
||||
}
|
||||
|
||||
void DebugDraw::addMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color) {
|
||||
void DebugDraw::addMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position,
|
||||
const glm::vec4& color, float size) {
|
||||
Lock lock(_mapMutex);
|
||||
_markers[key] = MarkerInfo(rotation, position, color);
|
||||
_markers[key] = MarkerInfo(rotation, position, color, size);
|
||||
}
|
||||
|
||||
void DebugDraw::removeMarker(const QString& key) {
|
||||
|
@ -41,9 +42,10 @@ void DebugDraw::removeMarker(const QString& key) {
|
|||
_markers.erase(key);
|
||||
}
|
||||
|
||||
void DebugDraw::addMyAvatarMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color) {
|
||||
void DebugDraw::addMyAvatarMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position,
|
||||
const glm::vec4& color, float size) {
|
||||
Lock lock(_mapMutex);
|
||||
_myAvatarMarkers[key] = MarkerInfo(rotation, position, color);
|
||||
_myAvatarMarkers[key] = MarkerInfo(rotation, position, color, size);
|
||||
}
|
||||
|
||||
void DebugDraw::removeMyAvatarMarker(const QString& key) {
|
||||
|
@ -83,4 +85,4 @@ void DebugDraw::drawRays(const std::vector<std::pair<glm::vec3, glm::vec3>>& lin
|
|||
auto point2 = translation + rotation * line.second;
|
||||
_rays.push_back(Ray(point1, point2, color));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -95,19 +95,22 @@ public:
|
|||
* @param {Quat} rotation - The orientation of the marker in world coordinates.
|
||||
* @param {Vec3} position - The position of the market in world coordinates.
|
||||
* @param {Vec4} color - The color of the marker.
|
||||
* @param {float} size - A float between 0.0 and 1.0 (10 cm) to control the size of the marker.
|
||||
* @example <caption>Briefly draw a debug marker in front of your avatar, in world coordinates.</caption>
|
||||
* var MARKER_NAME = "my marker";
|
||||
* DebugDraw.addMarker(
|
||||
* MARKER_NAME,
|
||||
* Quat.ZERO,
|
||||
* Vec3.sum(MyAvatar.position, Vec3.multiplyQbyV(MyAvatar.orientation, { x: 0, y: 0, z: -5})),
|
||||
* { red: 255, green: 0, blue: 0 }
|
||||
* { red: 255, green: 0, blue: 0 },
|
||||
* 1.0
|
||||
* );
|
||||
* Script.setTimeout(function () {
|
||||
* DebugDraw.removeMarker(MARKER_NAME);
|
||||
* }, 5000);
|
||||
*/
|
||||
Q_INVOKABLE void addMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color);
|
||||
Q_INVOKABLE void addMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position,
|
||||
const glm::vec4& color, float size = 1.0f);
|
||||
|
||||
/**jsdoc
|
||||
* Removes a debug marker that was added in world coordinates.
|
||||
|
@ -125,19 +128,22 @@ public:
|
|||
* @param {Quat} rotation - The orientation of the marker in avatar coordinates.
|
||||
* @param {Vec3} position - The position of the market in avatar coordinates.
|
||||
* @param {Vec4} color - color of the marker.
|
||||
* @param {float} size - A float between 0.0 and 1.0 (10 cm) to control the size of the marker.
|
||||
* @example <caption>Briefly draw a debug marker in front of your avatar, in avatar coordinates.</caption>
|
||||
* var MARKER_NAME = "My avatar marker";
|
||||
* DebugDraw.addMyAvatarMarker(
|
||||
* MARKER_NAME,
|
||||
* Quat.ZERO,
|
||||
* { x: 0, y: 0, z: -5 },
|
||||
* { red: 255, green: 0, blue: 0 }
|
||||
* { red: 255, green: 0, blue: 0 },
|
||||
* 1.0
|
||||
* );
|
||||
* Script.setTimeout(function () {
|
||||
* DebugDraw.removeMyAvatarMarker(MARKER_NAME);
|
||||
* }, 5000);
|
||||
*/
|
||||
Q_INVOKABLE void addMyAvatarMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position, const glm::vec4& color);
|
||||
Q_INVOKABLE void addMyAvatarMarker(const QString& key, const glm::quat& rotation, const glm::vec3& position,
|
||||
const glm::vec4& color, float size = 1.0f);
|
||||
|
||||
/**jsdoc
|
||||
* Removes a debug marker that was added in avatar coordinates.
|
||||
|
@ -146,7 +152,7 @@ public:
|
|||
*/
|
||||
Q_INVOKABLE void removeMyAvatarMarker(const QString& key);
|
||||
|
||||
using MarkerInfo = std::tuple<glm::quat, glm::vec3, glm::vec4>;
|
||||
using MarkerInfo = std::tuple<glm::quat, glm::vec3, glm::vec4, float>;
|
||||
using MarkerMap = std::map<QString, MarkerInfo>;
|
||||
using Ray = std::tuple<glm::vec3, glm::vec3, glm::vec4>;
|
||||
using Rays = std::vector<Ray>;
|
||||
|
|
|
@ -547,6 +547,28 @@ bool doLineSegmentsIntersect(glm::vec2 r1p1, glm::vec2 r1p2, glm::vec2 r2p1, glm
|
|||
(d4 == 0 && isOnSegment(r1p1.x, r1p1.y, r1p2.x, r1p2.y, r2p2.x, r2p2.y));
|
||||
}
|
||||
|
||||
bool findClosestApproachOfLines(glm::vec3 p1, glm::vec3 d1, glm::vec3 p2, glm::vec3 d2,
|
||||
// return values...
|
||||
float& t1, float& t2) {
|
||||
// https://math.stackexchange.com/questions/1993953/closest-points-between-two-lines/1993990#1993990
|
||||
// https://en.wikipedia.org/wiki/Skew_lines#Nearest_Points
|
||||
glm::vec3 n1 = glm::cross(d1, glm::cross(d2, d1));
|
||||
glm::vec3 n2 = glm::cross(d2, glm::cross(d1, d2));
|
||||
|
||||
float denom1 = glm::dot(d1, n2);
|
||||
float denom2 = glm::dot(d2, n1);
|
||||
|
||||
if (denom1 != 0.0f && denom2 != 0.0f) {
|
||||
t1 = glm::dot((p2 - p1), n2) / denom1;
|
||||
t2 = glm::dot((p1 - p2), n1) / denom2;
|
||||
return true;
|
||||
} else {
|
||||
t1 = 0.0f;
|
||||
t2 = 0.0f;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
bool isOnSegment(float xi, float yi, float xj, float yj, float xk, float yk) {
|
||||
return (xi <= xk || xj <= xk) && (xk <= xi || xk <= xj) &&
|
||||
(yi <= yk || yj <= yk) && (yk <= yi || yk <= yj);
|
||||
|
@ -1813,4 +1835,4 @@ bool solve_quartic(float a, float b, float c, float d, glm::vec4& roots) {
|
|||
|
||||
bool computeRealQuarticRoots(float a, float b, float c, float d, float e, glm::vec4& roots) {
|
||||
return solve_quartic(b / a, c / a, d / a, e / a, roots);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -150,6 +150,7 @@ int clipTriangleWithPlane(const Triangle& triangle, const Plane& plane, Triangle
|
|||
int clipTriangleWithPlanes(const Triangle& triangle, const Plane* planes, int planeCount, Triangle* clippedTriangles, int maxClippedTriangleCount);
|
||||
|
||||
bool doLineSegmentsIntersect(glm::vec2 r1p1, glm::vec2 r1p2, glm::vec2 r2p1, glm::vec2 r2p2);
|
||||
bool findClosestApproachOfLines(glm::vec3 p1, glm::vec3 d1, glm::vec3 p2, glm::vec3 d2, float& t1, float& t2);
|
||||
bool isOnSegment(float xi, float yi, float xj, float yj, float xk, float yk);
|
||||
int computeDirection(float xi, float yi, float xj, float yj, float xk, float yk);
|
||||
|
||||
|
|
|
@ -1,307 +0,0 @@
|
|||
//
|
||||
// Created by David Rowe on 27 Jul 2015.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "EyeTracker.h"
|
||||
|
||||
#include <QFuture>
|
||||
#include <QtConcurrent/QtConcurrentRun>
|
||||
|
||||
#include <SharedUtil.h>
|
||||
|
||||
#include "Logging.h"
|
||||
#include <OctreeConstants.h>
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
char* HIGH_FIDELITY_EYE_TRACKER_CALIBRATION = "HighFidelityEyeTrackerCalibration";
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
static void CALLBACK eyeTrackerCallback(smi_CallbackDataStruct* data) {
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
if (eyeTracker) { // Guard against a few callbacks that continue to be received after smi_quit().
|
||||
eyeTracker->processData(data);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
EyeTracker::~EyeTracker() {
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
if (_isStreaming) {
|
||||
int result = smi_quit();
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Error terminating tracking:" << smiReturnValueToString(result);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
void EyeTracker::processData(smi_CallbackDataStruct* data) {
|
||||
_lastProcessDataTimestamp = usecTimestampNow();
|
||||
|
||||
if (!_isEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (data->type == SMI_SIMPLE_GAZE_SAMPLE) {
|
||||
// Calculate the intersections of the left and right eye look-at vectors with a vertical plane along the monocular
|
||||
// gaze direction. Average these positions to give the look-at point.
|
||||
// If the eyes are parallel or diverged, gaze at a distant look-at point calculated the same as for non eye tracking.
|
||||
// Line-plane intersection: https://en.wikipedia.org/wiki/Line%E2%80%93plane_intersection
|
||||
|
||||
smi_SampleHMDStruct* sample = (smi_SampleHMDStruct*)data->result;
|
||||
// The iViewHMD coordinate system has x and z axes reversed compared to Interface, i.e., wearing the HMD:
|
||||
// - x is left
|
||||
// - y is up
|
||||
// - z is forwards
|
||||
|
||||
// Plane
|
||||
smi_Vec3d point = sample->gazeBasePoint; // mm
|
||||
smi_Vec3d direction = sample->gazeDirection;
|
||||
glm::vec3 planePoint = glm::vec3(-point.x, point.y, -point.z) / 1000.0f;
|
||||
glm::vec3 planeNormal = glm::vec3(-direction.z, 0.0f, direction.x);
|
||||
glm::vec3 monocularDirection = glm::vec3(-direction.x, direction.y, -direction.z);
|
||||
|
||||
// Left eye
|
||||
point = sample->left.gazeBasePoint; // mm
|
||||
direction = sample->left.gazeDirection;
|
||||
glm::vec3 leftLinePoint = glm::vec3(-point.x, point.y, -point.z) / 1000.0f;
|
||||
glm::vec3 leftLineDirection = glm::vec3(-direction.x, direction.y, -direction.z);
|
||||
|
||||
// Right eye
|
||||
point = sample->right.gazeBasePoint; // mm
|
||||
direction = sample->right.gazeDirection;
|
||||
glm::vec3 rightLinePoint = glm::vec3(-point.x, point.y, -point.z) / 1000.0f;
|
||||
glm::vec3 rightLineDirection = glm::vec3(-direction.x, direction.y, -direction.z);
|
||||
|
||||
// Plane - line dot products
|
||||
float leftLinePlaneDotProduct = glm::dot(leftLineDirection, planeNormal);
|
||||
float rightLinePlaneDotProduct = glm::dot(rightLineDirection, planeNormal);
|
||||
|
||||
// Gaze into distance if eyes are parallel or diverged; otherwise the look-at is the average of look-at points
|
||||
glm::vec3 lookAtPosition;
|
||||
if (abs(leftLinePlaneDotProduct) <= FLT_EPSILON || abs(rightLinePlaneDotProduct) <= FLT_EPSILON) {
|
||||
lookAtPosition = monocularDirection * (float)TREE_SCALE;
|
||||
} else {
|
||||
float leftDistance = glm::dot(planePoint - leftLinePoint, planeNormal) / leftLinePlaneDotProduct;
|
||||
float rightDistance = glm::dot(planePoint - rightLinePoint, planeNormal) / rightLinePlaneDotProduct;
|
||||
if (leftDistance <= 0.0f || rightDistance <= 0.0f
|
||||
|| leftDistance > (float)TREE_SCALE || rightDistance > (float)TREE_SCALE) {
|
||||
lookAtPosition = monocularDirection * (float)TREE_SCALE;
|
||||
} else {
|
||||
glm::vec3 leftIntersectionPoint = leftLinePoint + leftDistance * leftLineDirection;
|
||||
glm::vec3 rightIntersectionPoint = rightLinePoint + rightDistance * rightLineDirection;
|
||||
lookAtPosition = (leftIntersectionPoint + rightIntersectionPoint) / 2.0f;
|
||||
}
|
||||
}
|
||||
|
||||
if (glm::isnan(lookAtPosition.x) || glm::isnan(lookAtPosition.y) || glm::isnan(lookAtPosition.z)) {
|
||||
return;
|
||||
}
|
||||
|
||||
_lookAtPosition = lookAtPosition;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
void EyeTracker::init() {
|
||||
if (_isInitialized) {
|
||||
qCWarning(trackers) << "Eye Tracker: Already initialized";
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
int EyeTracker::startStreaming(bool simulate) {
|
||||
return smi_startStreaming(simulate); // This call blocks execution.
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
void EyeTracker::onStreamStarted() {
|
||||
if (!_isInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
int result = _startStreamingWatcher.result();
|
||||
_isStreaming = (result == SMI_RET_SUCCESS);
|
||||
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Error starting streaming:" << smiReturnValueToString(result);
|
||||
// Display error dialog unless SMI SDK has already displayed an error message.
|
||||
if (result != SMI_ERROR_HMD_NOT_SUPPORTED) {
|
||||
OffscreenUi::asyncWarning(nullptr, "Eye Tracker Error", smiReturnValueToString(result));
|
||||
}
|
||||
} else {
|
||||
qCDebug(interfaceapp) << "Eye Tracker: Started streaming";
|
||||
}
|
||||
|
||||
if (_isStreaming) {
|
||||
// Automatically load calibration if one has been saved.
|
||||
QString availableCalibrations = QString(smi_getAvailableCalibrations());
|
||||
if (availableCalibrations.contains(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION)) {
|
||||
result = smi_loadCalibration(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION);
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Error loading calibration:" << smiReturnValueToString(result);
|
||||
OffscreenUi::asyncWarning(nullptr, "Eye Tracker Error", "Error loading calibration"
|
||||
+ smiReturnValueToString(result));
|
||||
} else {
|
||||
qCDebug(interfaceapp) << "Eye Tracker: Loaded calibration";
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
void EyeTracker::setEnabled(bool enabled, bool simulate) {
|
||||
if (enabled && !_isInitialized) {
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
int result = smi_setCallback(eyeTrackerCallback);
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Error setting callback:" << smiReturnValueToString(result);
|
||||
OffscreenUi::asyncWarning(nullptr, "Eye Tracker Error", smiReturnValueToString(result));
|
||||
} else {
|
||||
_isInitialized = true;
|
||||
}
|
||||
|
||||
connect(&_startStreamingWatcher, SIGNAL(finished()), this, SLOT(onStreamStarted()));
|
||||
#endif
|
||||
}
|
||||
|
||||
if (!_isInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
qCDebug(interfaceapp) << "Eye Tracker: Set enabled =" << enabled << ", simulate =" << simulate;
|
||||
|
||||
// There is no smi_stopStreaming() method and after an smi_quit(), streaming cannot be restarted (at least not for
|
||||
// simulated data). So keep streaming once started in case tracking is re-enabled after stopping.
|
||||
|
||||
// Try to stop streaming if changing whether simulating or not.
|
||||
if (enabled && _isStreaming && _isStreamSimulating != simulate) {
|
||||
int result = smi_quit();
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Error stopping streaming:" << smiReturnValueToString(result);
|
||||
}
|
||||
_isStreaming = false;
|
||||
}
|
||||
|
||||
if (enabled && !_isStreaming) {
|
||||
// Start SMI streaming in a separate thread because it blocks.
|
||||
QFuture<int> future = QtConcurrent::run(this, &EyeTracker::startStreaming, simulate);
|
||||
_startStreamingWatcher.setFuture(future);
|
||||
_isStreamSimulating = simulate;
|
||||
}
|
||||
|
||||
_isEnabled = enabled;
|
||||
_isSimulating = simulate;
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
void EyeTracker::reset() {
|
||||
// Nothing to do.
|
||||
}
|
||||
|
||||
bool EyeTracker::isTracking() const {
|
||||
static const quint64 ACTIVE_TIMEOUT_USECS = 2000000; // 2 secs
|
||||
return _isEnabled && (usecTimestampNow() - _lastProcessDataTimestamp < ACTIVE_TIMEOUT_USECS);
|
||||
}
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
void EyeTracker::calibrate(int points) {
|
||||
|
||||
if (!_isStreaming) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Cannot calibrate because not streaming";
|
||||
return;
|
||||
}
|
||||
|
||||
smi_CalibrationHMDStruct* calibrationHMDStruct;
|
||||
smi_createCalibrationHMDStruct(&calibrationHMDStruct);
|
||||
|
||||
smi_CalibrationTypeEnum calibrationType;
|
||||
switch (points) {
|
||||
case 1:
|
||||
calibrationType = SMI_ONE_POINT_CALIBRATION;
|
||||
qCDebug(interfaceapp) << "Eye Tracker: One point calibration";
|
||||
break;
|
||||
case 3:
|
||||
calibrationType = SMI_THREE_POINT_CALIBRATION;
|
||||
qCDebug(interfaceapp) << "Eye Tracker: Three point calibration";
|
||||
break;
|
||||
case 5:
|
||||
calibrationType = SMI_FIVE_POINT_CALIBRATION;
|
||||
qCDebug(interfaceapp) << "Eye Tracker: Five point calibration";
|
||||
break;
|
||||
default:
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Invalid calibration specified";
|
||||
return;
|
||||
}
|
||||
|
||||
calibrationHMDStruct->type = calibrationType;
|
||||
calibrationHMDStruct->backgroundColor->blue = 0.5;
|
||||
calibrationHMDStruct->backgroundColor->green = 0.5;
|
||||
calibrationHMDStruct->backgroundColor->red = 0.5;
|
||||
calibrationHMDStruct->foregroundColor->blue = 1.0;
|
||||
calibrationHMDStruct->foregroundColor->green = 1.0;
|
||||
calibrationHMDStruct->foregroundColor->red = 1.0;
|
||||
|
||||
int result = smi_setupCalibration(calibrationHMDStruct);
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Error setting up calibration:" << smiReturnValueToString(result);
|
||||
return;
|
||||
} else {
|
||||
result = smi_calibrate();
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Error performing calibration:" << smiReturnValueToString(result);
|
||||
} else {
|
||||
result = smi_saveCalibration(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION);
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Error saving calibration:" << smiReturnValueToString(result);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
OffscreenUi::asyncWarning(nullptr, "Eye Tracker Error", "Calibration error: " + smiReturnValueToString(result));
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
QString EyeTracker::smiReturnValueToString(int value) {
|
||||
switch (value)
|
||||
{
|
||||
case smi_ErrorReturnValue::SMI_ERROR_NO_CALLBACK_SET:
|
||||
return "No callback set";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_CONNECTING_TO_HMD:
|
||||
return "Error connecting to HMD";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_HMD_NOT_SUPPORTED:
|
||||
return "HMD not supported";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_NOT_IMPLEMENTED:
|
||||
return "Not implmented";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_INVALID_PARAMETER:
|
||||
return "Invalid parameter";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_EYECAMERAS_NOT_AVAILABLE:
|
||||
return "Eye cameras not available";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_OCULUS_RUNTIME_NOT_SUPPORTED:
|
||||
return "Oculus runtime not supported";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_FILE_NOT_FOUND:
|
||||
return "File not found";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_FILE_EMPTY:
|
||||
return "File empty";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_UNKNOWN:
|
||||
return "Unknown error";
|
||||
default:
|
||||
QString number;
|
||||
number.setNum(value);
|
||||
return number;
|
||||
}
|
||||
}
|
||||
#endif
|
|
@ -1,68 +0,0 @@
|
|||
//
|
||||
// Created by David Rowe on 27 Jul 2015.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_EyeTracker_h
|
||||
#define hifi_EyeTracker_h
|
||||
|
||||
#include <QObject>
|
||||
#include <QFutureWatcher>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
#include <iViewHMDAPI.h>
|
||||
#endif
|
||||
|
||||
|
||||
class EyeTracker : public QObject, public Dependency {
|
||||
Q_OBJECT
|
||||
SINGLETON_DEPENDENCY
|
||||
|
||||
public:
|
||||
~EyeTracker();
|
||||
|
||||
void init();
|
||||
void setEnabled(bool enabled, bool simulate);
|
||||
void reset();
|
||||
|
||||
bool isInitialized() const { return _isInitialized; }
|
||||
bool isEnabled() const { return _isEnabled; }
|
||||
bool isTracking() const;
|
||||
bool isSimulating() const { return _isSimulating; }
|
||||
|
||||
glm::vec3 getLookAtPosition() const { return _lookAtPosition; } // From mid eye point in head frame.
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
void processData(smi_CallbackDataStruct* data);
|
||||
|
||||
void calibrate(int points);
|
||||
|
||||
int startStreaming(bool simulate);
|
||||
|
||||
private slots:
|
||||
void onStreamStarted();
|
||||
#endif
|
||||
|
||||
private:
|
||||
QString smiReturnValueToString(int value);
|
||||
|
||||
bool _isInitialized = false;
|
||||
bool _isEnabled = false;
|
||||
bool _isSimulating = false;
|
||||
bool _isStreaming = false;
|
||||
bool _isStreamSimulating = false;
|
||||
|
||||
quint64 _lastProcessDataTimestamp;
|
||||
|
||||
glm::vec3 _lookAtPosition;
|
||||
|
||||
QFutureWatcher<int> _startStreamingWatcher;
|
||||
};
|
||||
|
||||
#endif // hifi_EyeTracker_h
|
|
@ -76,6 +76,8 @@ public:
|
|||
int visionSqueezePerEye, float visionSqueezeGroundPlaneY,
|
||||
float visionSqueezeSpotlightSize) override;
|
||||
|
||||
glm::mat4 getSensorResetMatrix() const { return _sensorResetMat; }
|
||||
|
||||
protected:
|
||||
bool internalActivate() override;
|
||||
void internalDeactivate() override;
|
||||
|
|
Loading…
Reference in a new issue