mirror of
https://github.com/lubosz/overte.git
synced 2025-04-13 21:06:32 +02:00
Merge pull request #5458 from ctrlaltdavid/20639
CR for 20639 - Add SMI Rift eye tracker support
This commit is contained in:
commit
9308e9df4f
19 changed files with 660 additions and 35 deletions
66
cmake/modules/FindiViewHMD.cmake
Normal file
66
cmake/modules/FindiViewHMD.cmake
Normal file
|
@ -0,0 +1,66 @@
|
|||
#
|
||||
# FindiViewHMD.cmake
|
||||
#
|
||||
# Try to find the SMI iViewHMD eye tracker library
|
||||
#
|
||||
# You must provide a IVIEWHMD_ROOT_DIR which contains 3rdParty, include, and libs directories
|
||||
#
|
||||
# Once done this will define
|
||||
#
|
||||
# IVIEWHMD_FOUND - system found iViewHMD
|
||||
# IVIEWHMD_INCLUDE_DIRS - the iViewHMD include directory
|
||||
# IVIEWHMD_LIBRARIES - link this to use iViewHMD
|
||||
#
|
||||
# Created on 27 Jul 2015 by David Rowe
|
||||
# Copyright 2015 High Fidelity, Inc.
|
||||
#
|
||||
|
||||
if (WIN32)
|
||||
|
||||
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
|
||||
hifi_library_search_hints("iViewHMD")
|
||||
|
||||
find_path(IVIEWHMD_INCLUDE_DIRS iViewHMDAPI.h PATH_SUFFIXES include HINTS ${IVIEWHMD_SEARCH_DIRS})
|
||||
find_library(IVIEWHMD_LIBRARIES NAMES iViewHMDAPI PATH_SUFFIXES libs HINTS ${IVIEWHMD_SEARCH_DIRS})
|
||||
find_path(IVIEWHMD_API_DLL_PATH iViewHMDAPI.dll PATH_SUFFIXES libs HINTS ${IVIEWHMD_SEARCH_DIRS})
|
||||
list(APPEND IVIEWHMD_REQUIREMENTS IVIEWHMD_INCLUDE_DIRS IVIEWHMD_LIBRARIES IVIEWHMD_API_DLL_PATH)
|
||||
|
||||
set(IVIEWHMD_DLLS
|
||||
avcodec-53.dll
|
||||
avformat-53.dll
|
||||
avutil-51.dll
|
||||
libboost_filesystem-mgw45-mt-1_49.dll
|
||||
libboost_system-mgw45-mt-1_49.dll
|
||||
libboost_thread-mgw45-mt-1_49.dll
|
||||
libgcc_s_dw2-1.dll
|
||||
libiViewNG-LibCore.dll
|
||||
libopencv_calib3d244.dll
|
||||
libopencv_core244.dll
|
||||
libopencv_features2d244.dll
|
||||
libopencv_flann244.dll
|
||||
libopencv_highgui244.dll
|
||||
libopencv_imgproc244.dll
|
||||
libopencv_legacy244.dll
|
||||
libopencv_ml244.dll
|
||||
libopencv_video244.dll
|
||||
libstdc++-6.dll
|
||||
opencv_core220.dll
|
||||
opencv_highgui220.dll
|
||||
opencv_imgproc220.dll
|
||||
swscale-2.dll
|
||||
)
|
||||
|
||||
foreach(IVIEWHMD_DLL ${IVIEWHMD_DLLS})
|
||||
find_path(IVIEWHMD_DLL_PATH ${IVIEWHMD_DLL} PATH_SUFFIXES 3rdParty HINTS ${IVIEWHMD_SEARCH_DIRS})
|
||||
list(APPEND IVIEWHMD_REQUIREMENTS IVIEWHMD_DLL_PATH)
|
||||
list(APPEND IVIEWHMD_DLL_PATHS ${IVIEWHMD_DLL_PATH})
|
||||
endforeach()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(IVIEWHMD DEFAULT_MSG ${IVIEWHMD_REQUIREMENTS})
|
||||
|
||||
add_paths_to_fixup_libs(${IVIEWHMD_API_DLL_PATH} ${IVIEWHMD_DLL_PATHS})
|
||||
|
||||
mark_as_advanced(IVIEWHMD_INCLUDE_DIRS IVIEWHMD_LIBRARIES IVIEWHMD_SEARCH_DIRS)
|
||||
|
||||
endif()
|
|
@ -2,7 +2,7 @@ set(TARGET_NAME interface)
|
|||
project(${TARGET_NAME})
|
||||
|
||||
# set a default root dir for each of our optional externals if it was not passed
|
||||
set(OPTIONAL_EXTERNALS "Faceshift" "LeapMotion" "RtMidi" "RSSDK" "3DConnexionClient")
|
||||
set(OPTIONAL_EXTERNALS "Faceshift" "LeapMotion" "RtMidi" "RSSDK" "3DConnexionClient" "iViewHMD")
|
||||
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
|
||||
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
|
||||
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)
|
||||
|
|
14
interface/external/iViewHMD/readme.txt
vendored
Normal file
14
interface/external/iViewHMD/readme.txt
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
|
||||
Instructions for adding SMI HMD Eye Tracking to Interface on Windows
|
||||
David Rowe, 27 Jul 2015.
|
||||
|
||||
1. Download and install the SMI HMD Eye Tracking software from http://update.smivision.com/iViewNG-HMD.exe.
|
||||
|
||||
2. Copy the SDK folders (3rdParty, include, libs) from the SDK installation folder C:\Program Files (x86)\SMI\iViewNG-HMD\SDK
|
||||
into the interface/externals/iViewHMD folder. This readme.txt should be there as well.
|
||||
|
||||
You may optionally choose to copy the SDK folders to a location outside the repository (so you can re-use with different
|
||||
checkouts and different projects). If so, set the ENV variable "HIFI_LIB_DIR" to a directory containing a subfolder
|
||||
"iViewHMD" that contains the folders mentioned above.
|
||||
|
||||
3. Clear your build directory, run cmake and build, and you should be all set.
|
|
@ -113,6 +113,7 @@
|
|||
#include "audio/AudioScope.h"
|
||||
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
#include "devices/EyeTracker.h"
|
||||
#include "devices/Faceshift.h"
|
||||
#include "devices/Leapmotion.h"
|
||||
#include "devices/RealSense.h"
|
||||
|
@ -265,14 +266,14 @@ bool setupEssentials(int& argc, char** argv) {
|
|||
auto scriptCache = DependencyManager::set<ScriptCache>();
|
||||
auto soundCache = DependencyManager::set<SoundCache>();
|
||||
auto faceshift = DependencyManager::set<Faceshift>();
|
||||
auto ddeFaceTracker = DependencyManager::set<DdeFaceTracker>();
|
||||
auto eyeTracker = DependencyManager::set<EyeTracker>();
|
||||
auto audio = DependencyManager::set<AudioClient>();
|
||||
auto audioScope = DependencyManager::set<AudioScope>();
|
||||
auto deferredLightingEffect = DependencyManager::set<DeferredLightingEffect>();
|
||||
auto textureCache = DependencyManager::set<TextureCache>();
|
||||
auto framebufferCache = DependencyManager::set<FramebufferCache>();
|
||||
|
||||
auto animationCache = DependencyManager::set<AnimationCache>();
|
||||
auto ddeFaceTracker = DependencyManager::set<DdeFaceTracker>();
|
||||
auto modelBlender = DependencyManager::set<ModelBlender>();
|
||||
auto avatarManager = DependencyManager::set<AvatarManager>();
|
||||
auto lodManager = DependencyManager::set<LODManager>();
|
||||
|
@ -641,6 +642,12 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
|||
connect(ddeTracker.data(), &FaceTracker::muteToggled, this, &Application::faceTrackerMuteToggled);
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
eyeTracker->init();
|
||||
setActiveEyeTracker();
|
||||
#endif
|
||||
|
||||
_oldHandMouseX[0] = -1;
|
||||
_oldHandMouseY[0] = -1;
|
||||
_oldHandMouseX[1] = -1;
|
||||
|
@ -719,6 +726,9 @@ void Application::cleanupBeforeQuit() {
|
|||
#ifdef HAVE_DDE
|
||||
DependencyManager::destroy<DdeFaceTracker>();
|
||||
#endif
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
DependencyManager::destroy<EyeTracker>();
|
||||
#endif
|
||||
}
|
||||
|
||||
void Application::emptyLocalCache() {
|
||||
|
@ -2068,6 +2078,44 @@ void Application::setActiveFaceTracker() {
|
|||
#endif
|
||||
}
|
||||
|
||||
void Application::setActiveEyeTracker() {
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
if (!eyeTracker->isInitialized()) {
|
||||
return;
|
||||
}
|
||||
|
||||
bool isEyeTracking = Menu::getInstance()->isOptionChecked(MenuOption::SMIEyeTracking);
|
||||
bool isSimulating = Menu::getInstance()->isOptionChecked(MenuOption::SimulateEyeTracking);
|
||||
eyeTracker->setEnabled(isEyeTracking, isSimulating);
|
||||
|
||||
Menu::getInstance()->getActionForOption(MenuOption::OnePointCalibration)->setEnabled(isEyeTracking && !isSimulating);
|
||||
Menu::getInstance()->getActionForOption(MenuOption::ThreePointCalibration)->setEnabled(isEyeTracking && !isSimulating);
|
||||
Menu::getInstance()->getActionForOption(MenuOption::FivePointCalibration)->setEnabled(isEyeTracking && !isSimulating);
|
||||
#endif
|
||||
}
|
||||
|
||||
void Application::calibrateEyeTracker1Point() {
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
eyeTracker->calibrate(1);
|
||||
#endif
|
||||
}
|
||||
|
||||
void Application::calibrateEyeTracker3Points() {
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
eyeTracker->calibrate(3);
|
||||
#endif
|
||||
}
|
||||
|
||||
void Application::calibrateEyeTracker5Points() {
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
eyeTracker->calibrate(5);
|
||||
#endif
|
||||
}
|
||||
|
||||
bool Application::exportEntities(const QString& filename, const QVector<EntityItemID>& entityIDs) {
|
||||
QVector<EntityItemPointer> entities;
|
||||
|
||||
|
@ -2310,7 +2358,8 @@ void Application::updateMyAvatarLookAtPosition() {
|
|||
PerformanceWarning warn(showWarnings, "Application::updateMyAvatarLookAtPosition()");
|
||||
|
||||
_myAvatar->updateLookAtTargetAvatar();
|
||||
FaceTracker* tracker = getActiveFaceTracker();
|
||||
FaceTracker* faceTracker = getActiveFaceTracker();
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
|
||||
bool isLookingAtSomeone = false;
|
||||
glm::vec3 lookAtSpot;
|
||||
|
@ -2322,6 +2371,17 @@ void Application::updateMyAvatarLookAtPosition() {
|
|||
} else {
|
||||
lookAtSpot = _myCamera.getPosition() + transformPoint(_myAvatar->getSensorToWorldMatrix(), extractTranslation(getHMDSensorPose()));
|
||||
}
|
||||
} else if (eyeTracker->isTracking() && (isHMDMode() || eyeTracker->isSimulating())) {
|
||||
// Look at the point that the user is looking at.
|
||||
if (isHMDMode()) {
|
||||
glm::mat4 headPose = getActiveDisplayPlugin()->getHeadPose();
|
||||
glm::quat hmdRotation = glm::quat_cast(headPose);
|
||||
lookAtSpot = _myCamera.getPosition() +
|
||||
_myAvatar->getOrientation() * (hmdRotation * eyeTracker->getLookAtPosition());
|
||||
} else {
|
||||
lookAtSpot = _myAvatar->getHead()->getEyePosition() +
|
||||
(_myAvatar->getHead()->getFinalOrientationInWorldFrame() * eyeTracker->getLookAtPosition());
|
||||
}
|
||||
} else {
|
||||
AvatarSharedPointer lookingAt = _myAvatar->getLookAtTargetAvatar().lock();
|
||||
if (lookingAt && _myAvatar != lookingAt.get()) {
|
||||
|
@ -2354,17 +2414,24 @@ void Application::updateMyAvatarLookAtPosition() {
|
|||
}
|
||||
} else {
|
||||
// I am not looking at anyone else, so just look forward
|
||||
lookAtSpot = _myAvatar->getHead()->getEyePosition() +
|
||||
(_myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
|
||||
if (isHMDMode()) {
|
||||
glm::mat4 headPose = getActiveDisplayPlugin()->getHeadPose();
|
||||
glm::quat headRotation = glm::quat_cast(headPose);
|
||||
lookAtSpot = _myCamera.getPosition() +
|
||||
_myAvatar->getOrientation() * (headRotation * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
|
||||
} else {
|
||||
lookAtSpot = _myAvatar->getHead()->getEyePosition() +
|
||||
(_myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
|
||||
}
|
||||
}
|
||||
|
||||
// Deflect the eyes a bit to match the detected gaze from the face tracker if active.
|
||||
if (tracker && !tracker->isMuted()) {
|
||||
float eyePitch = tracker->getEstimatedEyePitch();
|
||||
float eyeYaw = tracker->getEstimatedEyeYaw();
|
||||
if (faceTracker && !faceTracker->isMuted()) {
|
||||
float eyePitch = faceTracker->getEstimatedEyePitch();
|
||||
float eyeYaw = faceTracker->getEstimatedEyeYaw();
|
||||
const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f;
|
||||
glm::vec3 origin = _myAvatar->getHead()->getEyePosition();
|
||||
float deflection = tracker->getEyeDeflection();
|
||||
float deflection = faceTracker->getEyeDeflection();
|
||||
if (isLookingAtSomeone) {
|
||||
deflection *= GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT;
|
||||
}
|
||||
|
@ -3434,6 +3501,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
|
|||
void Application::resetSensors() {
|
||||
DependencyManager::get<Faceshift>()->reset();
|
||||
DependencyManager::get<DdeFaceTracker>()->reset();
|
||||
DependencyManager::get<EyeTracker>()->reset();
|
||||
|
||||
getActiveDisplayPlugin()->resetSensors();
|
||||
|
||||
|
|
|
@ -292,9 +292,10 @@ public:
|
|||
virtual QGLWidget* getPrimarySurface() override;
|
||||
|
||||
void setActiveDisplayPlugin(const QString& pluginName);
|
||||
private:
|
||||
|
||||
DisplayPlugin * getActiveDisplayPlugin();
|
||||
const DisplayPlugin * getActiveDisplayPlugin() const;
|
||||
|
||||
public:
|
||||
|
||||
FileLogger* getLogger() { return _logger; }
|
||||
|
@ -428,6 +429,11 @@ public slots:
|
|||
void resetSensors();
|
||||
void setActiveFaceTracker();
|
||||
|
||||
void setActiveEyeTracker();
|
||||
void calibrateEyeTracker1Point();
|
||||
void calibrateEyeTracker3Points();
|
||||
void calibrateEyeTracker5Points();
|
||||
|
||||
void aboutApp();
|
||||
void showEditEntitiesHelp();
|
||||
|
||||
|
|
|
@ -425,6 +425,23 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::AutoMuteAudio, 0, false);
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
MenuWrapper* eyeTrackingMenu = avatarDebugMenu->addMenu("Eye Tracking");
|
||||
addCheckableActionToQMenuAndActionHash(eyeTrackingMenu, MenuOption::SMIEyeTracking, 0, false,
|
||||
qApp, SLOT(setActiveEyeTracker()));
|
||||
{
|
||||
MenuWrapper* calibrateEyeTrackingMenu = eyeTrackingMenu->addMenu("Calibrate");
|
||||
addActionToQMenuAndActionHash(calibrateEyeTrackingMenu, MenuOption::OnePointCalibration, 0,
|
||||
qApp, SLOT(calibrateEyeTracker1Point()));
|
||||
addActionToQMenuAndActionHash(calibrateEyeTrackingMenu, MenuOption::ThreePointCalibration, 0,
|
||||
qApp, SLOT(calibrateEyeTracker3Points()));
|
||||
addActionToQMenuAndActionHash(calibrateEyeTrackingMenu, MenuOption::FivePointCalibration, 0,
|
||||
qApp, SLOT(calibrateEyeTracker5Points()));
|
||||
}
|
||||
addCheckableActionToQMenuAndActionHash(eyeTrackingMenu, MenuOption::SimulateEyeTracking, 0, false,
|
||||
qApp, SLOT(setActiveEyeTracker()));
|
||||
#endif
|
||||
|
||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AvatarReceiveStats, 0, false,
|
||||
avatarManager.data(), SLOT(setShouldShowReceiveStats(bool)));
|
||||
|
@ -433,6 +450,7 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderHeadCollisionShapes);
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderBoundingCollisionShapes);
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtVectors, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtTargets, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderFocusIndicator, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowWhosLookingAtMe, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(avatarDebugMenu,
|
||||
|
|
|
@ -192,6 +192,7 @@ namespace MenuOption {
|
|||
const QString ExpandUpdateTiming = "Expand /update";
|
||||
const QString Faceshift = "Faceshift";
|
||||
const QString FirstPerson = "First Person";
|
||||
const QString FivePointCalibration = "5 Point Calibration";
|
||||
const QString Forward = "Forward";
|
||||
const QString FrameTimer = "Show Timer";
|
||||
const QString FullscreenMirror = "Fullscreen Mirror";
|
||||
|
@ -218,6 +219,7 @@ namespace MenuOption {
|
|||
const QString NamesAboveHeads = "Names Above Heads";
|
||||
const QString NoFaceTracking = "None";
|
||||
const QString OctreeStats = "Entity Statistics";
|
||||
const QString OnePointCalibration = "1 Point Calibration";
|
||||
const QString OnlyDisplayTopTen = "Only Display Top Ten";
|
||||
const QString OutputMenu = "Display>Mode";
|
||||
const QString PackageModel = "Package Model...";
|
||||
|
@ -232,6 +234,7 @@ namespace MenuOption {
|
|||
const QString RenderBoundingCollisionShapes = "Show Bounding Collision Shapes";
|
||||
const QString RenderFocusIndicator = "Show Eye Focus";
|
||||
const QString RenderHeadCollisionShapes = "Show Head Collision Shapes";
|
||||
const QString RenderLookAtTargets = "Show Look-at Targets";
|
||||
const QString RenderLookAtVectors = "Show Look-at Vectors";
|
||||
const QString RenderSkeletonCollisionShapes = "Show Skeleton Collision Shapes";
|
||||
const QString RenderTargetFramerate = "Framerate";
|
||||
|
@ -271,12 +274,15 @@ namespace MenuOption {
|
|||
const QString ShowRealtimeEntityStats = "Show Realtime Entity Stats";
|
||||
const QString ShowWhosLookingAtMe = "Show Who's Looking at Me";
|
||||
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode";
|
||||
const QString SimulateEyeTracking = "Simulate";
|
||||
const QString SMIEyeTracking = "SMI Eye Tracking";
|
||||
const QString Stars = "Stars";
|
||||
const QString Stats = "Stats";
|
||||
const QString StopAllScripts = "Stop All Scripts";
|
||||
const QString SuppressShortTimings = "Suppress Timings Less than 10ms";
|
||||
const QString TestPing = "Test Ping";
|
||||
const QString ThirdPerson = "Third Person";
|
||||
const QString ThreePointCalibration = "3 Point Calibration";
|
||||
const QString ThrottleFPSIfNotFocus = "Throttle FPS If Not Focus";
|
||||
const QString ToolWindow = "Tool Window";
|
||||
const QString TransmitterDrive = "Transmitter Drive";
|
||||
|
|
|
@ -69,6 +69,8 @@ namespace render {
|
|||
auto avatarPtr = static_pointer_cast<Avatar>(avatar);
|
||||
bool renderLookAtVectors = Menu::getInstance()->isOptionChecked(MenuOption::RenderLookAtVectors);
|
||||
avatarPtr->setDisplayingLookatVectors(renderLookAtVectors);
|
||||
bool renderLookAtTarget = Menu::getInstance()->isOptionChecked(MenuOption::RenderLookAtTargets);
|
||||
avatarPtr->setDisplayingLookatTarget(renderLookAtTarget);
|
||||
|
||||
if (avatarPtr->isInitialized() && args) {
|
||||
avatarPtr->render(args, Application::getInstance()->getCamera()->getPosition());
|
||||
|
@ -601,7 +603,9 @@ void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, floa
|
|||
|
||||
getHand()->render(renderArgs, false);
|
||||
}
|
||||
|
||||
getHead()->render(renderArgs, 1.0f, renderFrustum);
|
||||
getHead()->renderLookAts(renderArgs);
|
||||
}
|
||||
|
||||
bool Avatar::shouldRenderHead(const RenderArgs* renderArgs) const {
|
||||
|
|
|
@ -91,6 +91,7 @@ public:
|
|||
|
||||
//setters
|
||||
void setDisplayingLookatVectors(bool displayingLookatVectors) { getHead()->setRenderLookatVectors(displayingLookatVectors); }
|
||||
void setDisplayingLookatTarget(bool displayingLookatTarget) { getHead()->setRenderLookatTarget(displayingLookatTarget); }
|
||||
void setIsLookAtTarget(const bool isLookAtTarget) { _isLookAtTarget = isLookAtTarget; }
|
||||
bool getIsLookAtTarget() const { return _isLookAtTarget; }
|
||||
//getters
|
||||
|
|
|
@ -17,11 +17,13 @@
|
|||
|
||||
#include "Application.h"
|
||||
#include "Avatar.h"
|
||||
#include "DependencyManager.h"
|
||||
#include "GeometryUtil.h"
|
||||
#include "Head.h"
|
||||
#include "Menu.h"
|
||||
#include "Util.h"
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
#include "devices/EyeTracker.h"
|
||||
#include "devices/Faceshift.h"
|
||||
#include "AvatarRig.h"
|
||||
|
||||
|
@ -44,6 +46,7 @@ Head::Head(Avatar* owningAvatar) :
|
|||
_mouth3(0.0f),
|
||||
_mouth4(0.0f),
|
||||
_renderLookatVectors(false),
|
||||
_renderLookatTarget(false),
|
||||
_saccade(0.0f, 0.0f, 0.0f),
|
||||
_saccadeTarget(0.0f, 0.0f, 0.0f),
|
||||
_leftEyeBlinkVelocity(0.0f),
|
||||
|
@ -116,6 +119,9 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
|
|||
applyEyelidOffset(getFinalOrientationInWorldFrame());
|
||||
}
|
||||
}
|
||||
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
_isEyeTrackerConnected = eyeTracker->isTracking();
|
||||
}
|
||||
|
||||
if (!myAvatar->getStandingHMDSensorMode()) {
|
||||
|
@ -129,19 +135,24 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
|
|||
}
|
||||
|
||||
if (!(_isFaceTrackerConnected || billboard)) {
|
||||
// Update eye saccades
|
||||
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
|
||||
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
|
||||
const float MICROSACCADE_MAGNITUDE = 0.002f;
|
||||
const float SACCADE_MAGNITUDE = 0.04f;
|
||||
const float NOMINAL_FRAME_RATE = 60.0f;
|
||||
|
||||
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
|
||||
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
|
||||
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
|
||||
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
|
||||
if (!_isEyeTrackerConnected) {
|
||||
// Update eye saccades
|
||||
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
|
||||
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
|
||||
const float MICROSACCADE_MAGNITUDE = 0.002f;
|
||||
const float SACCADE_MAGNITUDE = 0.04f;
|
||||
const float NOMINAL_FRAME_RATE = 60.0f;
|
||||
|
||||
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
|
||||
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
|
||||
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
|
||||
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
|
||||
}
|
||||
_saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
|
||||
} else {
|
||||
_saccade = glm::vec3();
|
||||
}
|
||||
_saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
|
||||
|
||||
// Detect transition from talking to not; force blink after that and a delay
|
||||
bool forceBlink = false;
|
||||
|
@ -300,8 +311,18 @@ void Head::relaxLean(float deltaTime) {
|
|||
}
|
||||
|
||||
void Head::render(RenderArgs* renderArgs, float alpha, ViewFrustum* renderFrustum) {
|
||||
}
|
||||
|
||||
void Head::renderLookAts(RenderArgs* renderArgs) {
|
||||
renderLookAts(renderArgs, _leftEyePosition, _rightEyePosition);
|
||||
}
|
||||
|
||||
void Head::renderLookAts(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition) {
|
||||
if (_renderLookatVectors) {
|
||||
renderLookatVectors(renderArgs, _leftEyePosition, _rightEyePosition, getCorrectedLookAtPosition());
|
||||
renderLookatVectors(renderArgs, leftEyePosition, rightEyePosition, getCorrectedLookAtPosition());
|
||||
}
|
||||
if (_renderLookatTarget) {
|
||||
renderLookatTarget(renderArgs, getCorrectedLookAtPosition());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -418,4 +439,17 @@ void Head::renderLookatVectors(RenderArgs* renderArgs, glm::vec3 leftEyePosition
|
|||
geometryCache->renderLine(batch, rightEyePosition, lookatPosition, startColor, endColor, _rightEyeLookAtID);
|
||||
}
|
||||
|
||||
void Head::renderLookatTarget(RenderArgs* renderArgs, glm::vec3 lookatPosition) {
|
||||
auto& batch = *renderArgs->_batch;
|
||||
auto transform = Transform{};
|
||||
transform.setTranslation(lookatPosition);
|
||||
batch.setModelTransform(transform);
|
||||
|
||||
auto deferredLighting = DependencyManager::get<DeferredLightingEffect>();
|
||||
deferredLighting->bindSimpleProgram(batch);
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
const float LOOK_AT_TARGET_RADIUS = 0.075f;
|
||||
const glm::vec4 LOOK_AT_TARGET_COLOR = { 0.8f, 0.0f, 0.0f, 0.75f };
|
||||
geometryCache->renderSphere(batch, LOOK_AT_TARGET_RADIUS, 15, 15, LOOK_AT_TARGET_COLOR, true);
|
||||
}
|
||||
|
|
|
@ -39,6 +39,9 @@ public:
|
|||
void setAverageLoudness(float averageLoudness) { _averageLoudness = averageLoudness; }
|
||||
void setReturnToCenter (bool returnHeadToCenter) { _returnHeadToCenter = returnHeadToCenter; }
|
||||
void setRenderLookatVectors(bool onOff) { _renderLookatVectors = onOff; }
|
||||
void setRenderLookatTarget(bool onOff) { _renderLookatTarget = onOff; }
|
||||
void renderLookAts(RenderArgs* renderArgs);
|
||||
void renderLookAts(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition);
|
||||
|
||||
/// \return orientationBase+Delta
|
||||
glm::quat getFinalOrientationInLocalFrame() const;
|
||||
|
@ -123,6 +126,7 @@ private:
|
|||
float _mouth3;
|
||||
float _mouth4;
|
||||
bool _renderLookatVectors;
|
||||
bool _renderLookatTarget;
|
||||
glm::vec3 _saccade;
|
||||
glm::vec3 _saccadeTarget;
|
||||
float _leftEyeBlinkVelocity;
|
||||
|
@ -151,6 +155,7 @@ private:
|
|||
|
||||
// private methods
|
||||
void renderLookatVectors(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition, glm::vec3 lookatPosition);
|
||||
void renderLookatTarget(RenderArgs* renderArgs, glm::vec3 lookatPosition);
|
||||
void calculateMouthShapes();
|
||||
void applyEyelidOffset(glm::quat headOrientation);
|
||||
|
||||
|
|
|
@ -24,6 +24,7 @@
|
|||
#include <AnimationHandle.h>
|
||||
#include <AudioClient.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <display-plugins/DisplayPlugin.h>
|
||||
#include <GeometryUtil.h>
|
||||
#include <NodeList.h>
|
||||
#include <udt/PacketHeaders.h>
|
||||
|
@ -931,17 +932,13 @@ void MyAvatar::updateLookAtTargetAvatar() {
|
|||
const float HUMAN_EYE_SEPARATION = 0.065f;
|
||||
float myEyeSeparation = glm::length(getHead()->getLeftEyePosition() - getHead()->getRightEyePosition());
|
||||
gazeOffset = gazeOffset * HUMAN_EYE_SEPARATION / myEyeSeparation;
|
||||
|
||||
if (Application::getInstance()->isHMDMode()) {
|
||||
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
|
||||
+ glm::vec3(qApp->getHMDSensorPose()[3]) + gazeOffset);
|
||||
} else {
|
||||
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
|
||||
+ gazeOffset);
|
||||
}
|
||||
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
|
||||
+ gazeOffset);
|
||||
} else {
|
||||
avatar->getHead()->clearCorrectedLookAtPosition();
|
||||
}
|
||||
} else {
|
||||
avatar->getHead()->clearCorrectedLookAtPosition();
|
||||
}
|
||||
}
|
||||
auto avatarPointer = _lookAtTargetAvatar.lock();
|
||||
|
@ -1249,6 +1246,24 @@ void MyAvatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, fl
|
|||
if (shouldRenderHead(renderArgs)) {
|
||||
getHead()->render(renderArgs, 1.0f, renderFrustum);
|
||||
}
|
||||
|
||||
if (qApp->isHMDMode()) {
|
||||
glm::vec3 cameraPosition = Application::getInstance()->getCamera()->getPosition();
|
||||
|
||||
glm::mat4 leftEyePose = Application::getInstance()->getActiveDisplayPlugin()->getEyePose(Eye::Left);
|
||||
glm::vec3 leftEyePosition = glm::vec3(leftEyePose[3]);
|
||||
glm::mat4 rightEyePose = Application::getInstance()->getActiveDisplayPlugin()->getEyePose(Eye::Right);
|
||||
glm::vec3 rightEyePosition = glm::vec3(rightEyePose[3]);
|
||||
glm::mat4 headPose = Application::getInstance()->getActiveDisplayPlugin()->getHeadPose();
|
||||
glm::vec3 headPosition = glm::vec3(headPose[3]);
|
||||
|
||||
getHead()->renderLookAts(renderArgs,
|
||||
cameraPosition + getOrientation() * (leftEyePosition - headPosition),
|
||||
cameraPosition + getOrientation() * (rightEyePosition - headPosition));
|
||||
} else {
|
||||
getHead()->renderLookAts(renderArgs);
|
||||
}
|
||||
|
||||
getHand()->render(renderArgs, true);
|
||||
}
|
||||
|
||||
|
|
308
interface/src/devices/EyeTracker.cpp
Normal file
308
interface/src/devices/EyeTracker.cpp
Normal file
|
@ -0,0 +1,308 @@
|
|||
//
|
||||
// EyeTracker.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by David Rowe on 27 Jul 2015.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "EyeTracker.h"
|
||||
|
||||
#include <QFuture>
|
||||
#include <QMessageBox>
|
||||
#include <QtConcurrent/QtConcurrentRun>
|
||||
|
||||
#include <SharedUtil.h>
|
||||
|
||||
#include "InterfaceLogging.h"
|
||||
#include "OctreeConstants.h"
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
char* HIGH_FIDELITY_EYE_TRACKER_CALIBRATION = "HighFidelityEyeTrackerCalibration";
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
static void CALLBACK eyeTrackerCallback(smi_CallbackDataStruct* data) {
|
||||
auto eyeTracker = DependencyManager::get<EyeTracker>();
|
||||
if (eyeTracker) { // Guard against a few callbacks that continue to be received after smi_quit().
|
||||
eyeTracker->processData(data);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
EyeTracker::~EyeTracker() {
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
if (_isStreaming) {
|
||||
int result = smi_quit();
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Error terminating tracking:" << smiReturnValueToString(result);
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
void EyeTracker::processData(smi_CallbackDataStruct* data) {
|
||||
_lastProcessDataTimestamp = usecTimestampNow();
|
||||
|
||||
if (!_isEnabled) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (data->type == SMI_SIMPLE_GAZE_SAMPLE) {
|
||||
// Calculate the intersections of the left and right eye look-at vectors with a vertical plane along the monocular
|
||||
// gaze direction. Average these positions to give the look-at point.
|
||||
// If the eyes are parallel or diverged, gaze at a distant look-at point calculated the same as for non eye tracking.
|
||||
// Line-plane intersection: https://en.wikipedia.org/wiki/Line%E2%80%93plane_intersection
|
||||
|
||||
smi_SampleHMDStruct* sample = (smi_SampleHMDStruct*)data->result;
|
||||
// The iViewHMD coordinate system has x and z axes reversed compared to Interface, i.e., wearing the HMD:
|
||||
// - x is left
|
||||
// - y is up
|
||||
// - z is forwards
|
||||
|
||||
// Plane
|
||||
smi_Vec3d point = sample->gazeBasePoint; // mm
|
||||
smi_Vec3d direction = sample->gazeDirection;
|
||||
glm::vec3 planePoint = glm::vec3(-point.x, point.y, -point.z) / 1000.0f;
|
||||
glm::vec3 planeNormal = glm::vec3(-direction.z, 0.0f, direction.x);
|
||||
glm::vec3 monocularDirection = glm::vec3(-direction.x, direction.y, -direction.z);
|
||||
|
||||
// Left eye
|
||||
point = sample->left.gazeBasePoint; // mm
|
||||
direction = sample->left.gazeDirection;
|
||||
glm::vec3 leftLinePoint = glm::vec3(-point.x, point.y, -point.z) / 1000.0f;
|
||||
glm::vec3 leftLineDirection = glm::vec3(-direction.x, direction.y, -direction.z);
|
||||
|
||||
// Right eye
|
||||
point = sample->right.gazeBasePoint; // mm
|
||||
direction = sample->right.gazeDirection;
|
||||
glm::vec3 rightLinePoint = glm::vec3(-point.x, point.y, -point.z) / 1000.0f;
|
||||
glm::vec3 rightLineDirection = glm::vec3(-direction.x, direction.y, -direction.z);
|
||||
|
||||
// Plane - line dot products
|
||||
float leftLinePlaneDotProduct = glm::dot(leftLineDirection, planeNormal);
|
||||
float rightLinePlaneDotProduct = glm::dot(rightLineDirection, planeNormal);
|
||||
|
||||
// Gaze into distance if eyes are parallel or diverged; otherwise the look-at is the average of look-at points
|
||||
glm::vec3 lookAtPosition;
|
||||
if (abs(leftLinePlaneDotProduct) <= FLT_EPSILON || abs(rightLinePlaneDotProduct) <= FLT_EPSILON) {
|
||||
lookAtPosition = monocularDirection * (float)TREE_SCALE;
|
||||
} else {
|
||||
float leftDistance = glm::dot(planePoint - leftLinePoint, planeNormal) / leftLinePlaneDotProduct;
|
||||
float rightDistance = glm::dot(planePoint - rightLinePoint, planeNormal) / rightLinePlaneDotProduct;
|
||||
if (leftDistance <= 0.0f || rightDistance <= 0.0f
|
||||
|| leftDistance > (float)TREE_SCALE || rightDistance > (float)TREE_SCALE) {
|
||||
lookAtPosition = monocularDirection * (float)TREE_SCALE;
|
||||
} else {
|
||||
glm::vec3 leftIntersectionPoint = leftLinePoint + leftDistance * leftLineDirection;
|
||||
glm::vec3 rightIntersectionPoint = rightLinePoint + rightDistance * rightLineDirection;
|
||||
lookAtPosition = (leftIntersectionPoint + rightIntersectionPoint) / 2.0f;
|
||||
}
|
||||
}
|
||||
|
||||
if (glm::isnan(lookAtPosition.x) || glm::isnan(lookAtPosition.y) || glm::isnan(lookAtPosition.z)) {
|
||||
return;
|
||||
}
|
||||
|
||||
_lookAtPosition = lookAtPosition;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
void EyeTracker::init() {
|
||||
if (_isInitialized) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Already initialized";
|
||||
return;
|
||||
}
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
int result = smi_setCallback(eyeTrackerCallback);
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Error setting callback:" << smiReturnValueToString(result);
|
||||
QMessageBox::warning(nullptr, "Eye Tracker Error", smiReturnValueToString(result));
|
||||
} else {
|
||||
_isInitialized = true;
|
||||
}
|
||||
|
||||
connect(&_startStreamingWatcher, SIGNAL(finished()), this, SLOT(onStreamStarted()));
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
int EyeTracker::startStreaming(bool simulate) {
|
||||
return smi_startStreaming(simulate); // This call blocks execution.
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
void EyeTracker::onStreamStarted() {
|
||||
int result = _startStreamingWatcher.result();
|
||||
_isStreaming = (result == SMI_RET_SUCCESS);
|
||||
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Error starting streaming:" << smiReturnValueToString(result);
|
||||
// Display error dialog unless SMI SDK has already displayed an error message.
|
||||
if (result != SMI_ERROR_HMD_NOT_SUPPORTED) {
|
||||
QMessageBox::warning(nullptr, "Eye Tracker Error", smiReturnValueToString(result));
|
||||
}
|
||||
} else {
|
||||
qCDebug(interfaceapp) << "Eye Tracker: Started streaming";
|
||||
}
|
||||
|
||||
// TODO: Re-enable once saving / loading calibrations is working
|
||||
//if (_isStreaming) {
|
||||
// // Automatically load calibration if one has been saved.
|
||||
// QString availableCalibrations = QString(smi_getAvailableCalibrations());
|
||||
// if (availableCalibrations.contains(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION)) {
|
||||
// result = smi_loadCalibration(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION);
|
||||
// if (result != SMI_RET_SUCCESS) {
|
||||
// qCWarning(interfaceapp) << "Eye Tracker: Error loading calibration:" << smiReturnValueToString(result);
|
||||
// QMessageBox::warning(nullptr, "Eye Tracker Error", "Error loading calibration"
|
||||
// + smiReturnValueToString(result));
|
||||
// } else {
|
||||
// qCDebug(interfaceapp) << "Eye Tracker: Loaded calibration";
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
}
|
||||
#endif
|
||||
|
||||
void EyeTracker::setEnabled(bool enabled, bool simulate) {
|
||||
if (!_isInitialized) {
|
||||
return;
|
||||
}
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
qCDebug(interfaceapp) << "Eye Tracker: Set enabled =" << enabled << ", simulate =" << simulate;
|
||||
|
||||
// There is no smi_stopStreaming() method and after an smi_quit(), streaming cannot be restarted (at least not for
|
||||
// simulated data). So keep streaming once started in case tracking is re-enabled after stopping.
|
||||
|
||||
// Try to stop streaming if changing whether simulating or not.
|
||||
if (enabled && _isStreaming && _isStreamSimulating != simulate) {
|
||||
int result = smi_quit();
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Error stopping streaming:" << smiReturnValueToString(result);
|
||||
}
|
||||
_isStreaming = false;
|
||||
}
|
||||
|
||||
if (enabled && !_isStreaming) {
|
||||
// Start SMI streaming in a separate thread because it blocks.
|
||||
QFuture<int> future = QtConcurrent::run(this, &EyeTracker::startStreaming, simulate);
|
||||
_startStreamingWatcher.setFuture(future);
|
||||
_isStreamSimulating = simulate;
|
||||
}
|
||||
|
||||
_isEnabled = enabled;
|
||||
_isSimulating = simulate;
|
||||
|
||||
#endif
|
||||
}
|
||||
|
||||
void EyeTracker::reset() {
|
||||
// Nothing to do.
|
||||
}
|
||||
|
||||
bool EyeTracker::isTracking() const {
|
||||
static const quint64 ACTIVE_TIMEOUT_USECS = 2000000; // 2 secs
|
||||
return _isEnabled && (usecTimestampNow() - _lastProcessDataTimestamp < ACTIVE_TIMEOUT_USECS);
|
||||
}
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
void EyeTracker::calibrate(int points) {
|
||||
|
||||
if (!_isStreaming) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Cannot calibrate because not streaming";
|
||||
return;
|
||||
}
|
||||
|
||||
smi_CalibrationHMDStruct* calibrationHMDStruct;
|
||||
smi_createCalibrationHMDStruct(&calibrationHMDStruct);
|
||||
|
||||
smi_CalibrationTypeEnum calibrationType;
|
||||
switch (points) {
|
||||
case 1:
|
||||
calibrationType = SMI_ONE_POINT_CALIBRATION;
|
||||
qCDebug(interfaceapp) << "Eye Tracker: One point calibration";
|
||||
break;
|
||||
case 3:
|
||||
calibrationType = SMI_THREE_POINT_CALIBRATION;
|
||||
qCDebug(interfaceapp) << "Eye Tracker: Three point calibration";
|
||||
break;
|
||||
case 5:
|
||||
calibrationType = SMI_FIVE_POINT_CALIBRATION;
|
||||
qCDebug(interfaceapp) << "Eye Tracker: Five point calibration";
|
||||
break;
|
||||
default:
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Invalid calibration specified";
|
||||
return;
|
||||
}
|
||||
|
||||
calibrationHMDStruct->type = calibrationType;
|
||||
calibrationHMDStruct->backgroundColor->blue = 0.5;
|
||||
calibrationHMDStruct->backgroundColor->green = 0.5;
|
||||
calibrationHMDStruct->backgroundColor->red = 0.5;
|
||||
calibrationHMDStruct->foregroundColor->blue = 1.0;
|
||||
calibrationHMDStruct->foregroundColor->green = 1.0;
|
||||
calibrationHMDStruct->foregroundColor->red = 1.0;
|
||||
|
||||
int result = smi_setupCalibration(calibrationHMDStruct);
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Error setting up calibration:" << smiReturnValueToString(result);
|
||||
return;
|
||||
} else {
|
||||
result = smi_calibrate();
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Error performing calibration:" << smiReturnValueToString(result);
|
||||
} else {
|
||||
// TODO: Re - enable once saving / loading calibrations is working
|
||||
//result = smi_saveCalibration(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION);
|
||||
//if (result != SMI_RET_SUCCESS) {
|
||||
// qCWarning(interfaceapp) << "Eye Tracker: Error saving calibration:" << smiReturnValueToString(result);
|
||||
//}
|
||||
}
|
||||
}
|
||||
|
||||
if (result != SMI_RET_SUCCESS) {
|
||||
QMessageBox::warning(nullptr, "Eye Tracker Error", "Calibration error: " + smiReturnValueToString(result));
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
QString EyeTracker::smiReturnValueToString(int value) {
|
||||
switch (value)
|
||||
{
|
||||
case smi_ErrorReturnValue::SMI_ERROR_NO_CALLBACK_SET:
|
||||
return "No callback set";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_CONNECTING_TO_HMD:
|
||||
return "Error connecting to HMD";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_HMD_NOT_SUPPORTED:
|
||||
return "HMD not supported";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_NOT_IMPLEMENTED:
|
||||
return "Not implmented";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_INVALID_PARAMETER:
|
||||
return "Invalid parameter";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_EYECAMERAS_NOT_AVAILABLE:
|
||||
return "Eye cameras not available";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_OCULUS_RUNTIME_NOT_SUPPORTED:
|
||||
return "Oculus runtime not supported";
|
||||
// TODO: Re-enable once saving / loading calibrations is working
|
||||
//case smi_ErrorReturnValue::SMI_ERROR_FILE_NOT_FOUND:
|
||||
// return "File not found";
|
||||
//case smi_ErrorReturnValue::SMI_ERROR_FILE_EMPTY:
|
||||
// return "File empty";
|
||||
case smi_ErrorReturnValue::SMI_ERROR_UNKNOWN:
|
||||
return "Unknown error";
|
||||
default:
|
||||
QString number;
|
||||
number.setNum(value);
|
||||
return number;
|
||||
}
|
||||
}
|
||||
#endif
|
71
interface/src/devices/EyeTracker.h
Normal file
71
interface/src/devices/EyeTracker.h
Normal file
|
@ -0,0 +1,71 @@
|
|||
//
|
||||
// EyeTracker.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by David Rowe on 27 Jul 2015.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_EyeTracker_h
|
||||
#define hifi_EyeTracker_h
|
||||
|
||||
#include <QObject>
|
||||
#include <QFutureWatcher>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
#include <iViewHMDAPI.h>
|
||||
#endif
|
||||
|
||||
|
||||
class EyeTracker : public QObject, public Dependency {
|
||||
Q_OBJECT
|
||||
SINGLETON_DEPENDENCY
|
||||
|
||||
public:
|
||||
~EyeTracker();
|
||||
|
||||
void init();
|
||||
void setEnabled(bool enabled, bool simulate);
|
||||
void reset();
|
||||
|
||||
bool isInitialized() const { return _isInitialized; }
|
||||
bool isEnabled() const { return _isEnabled; }
|
||||
bool isTracking() const;
|
||||
bool isSimulating() const { return _isSimulating; }
|
||||
|
||||
glm::vec3 getLookAtPosition() const { return _lookAtPosition; } // From mid eye point in head frame.
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
void processData(smi_CallbackDataStruct* data);
|
||||
|
||||
void calibrate(int points);
|
||||
|
||||
int startStreaming(bool simulate);
|
||||
|
||||
private slots:
|
||||
void onStreamStarted();
|
||||
#endif
|
||||
|
||||
private:
|
||||
QString smiReturnValueToString(int value);
|
||||
|
||||
bool _isInitialized = false;
|
||||
bool _isEnabled = false;
|
||||
bool _isSimulating = false;
|
||||
bool _isStreaming = false;
|
||||
bool _isStreamSimulating = false;
|
||||
|
||||
quint64 _lastProcessDataTimestamp;
|
||||
|
||||
glm::vec3 _lookAtPosition;
|
||||
|
||||
QFutureWatcher<int> _startStreamingWatcher;
|
||||
};
|
||||
|
||||
#endif // hifi_EyeTracker_h
|
|
@ -184,6 +184,11 @@ QByteArray AvatarData::toByteArray() {
|
|||
if (_headData->_isFaceTrackerConnected) {
|
||||
setAtBit(bitItems, IS_FACESHIFT_CONNECTED);
|
||||
}
|
||||
// eye tracker state
|
||||
if (_headData->_isEyeTrackerConnected) {
|
||||
setAtBit(bitItems, IS_EYE_TRACKER_CONNECTED);
|
||||
}
|
||||
// referential state
|
||||
if (_referential != NULL && _referential->isValid()) {
|
||||
setAtBit(bitItems, HAS_REFERENTIAL);
|
||||
}
|
||||
|
@ -383,6 +388,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
+ (oneAtBit(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
|
||||
|
||||
_headData->_isFaceTrackerConnected = oneAtBit(bitItems, IS_FACESHIFT_CONNECTED);
|
||||
_headData->_isEyeTrackerConnected = oneAtBit(bitItems, IS_EYE_TRACKER_CONNECTED);
|
||||
bool hasReferential = oneAtBit(bitItems, HAS_REFERENTIAL);
|
||||
|
||||
// Referential
|
||||
|
|
|
@ -76,21 +76,21 @@ const quint32 AVATAR_MOTION_SCRIPTABLE_BITS =
|
|||
|
||||
const qint64 AVATAR_SILENCE_THRESHOLD_USECS = 5 * USECS_PER_SECOND;
|
||||
|
||||
// Bitset of state flags - we store the key state, hand state, faceshift, chat circling, and existance of
|
||||
// Bitset of state flags - we store the key state, hand state, Faceshift, eye tracking, and existence of
|
||||
// referential data in this bit set. The hand state is an octal, but is split into two sections to maintain
|
||||
// backward compatibility. The bits are ordered as such (0-7 left to right).
|
||||
// +-----+-----+-+-+-+--+
|
||||
// |K0,K1|H0,H1|F|C|R|H2|
|
||||
// |K0,K1|H0,H1|F|E|R|H2|
|
||||
// +-----+-----+-+-+-+--+
|
||||
// Key state - K0,K1 is found in the 1st and 2nd bits
|
||||
// Hand state - H0,H1,H2 is found in the 3rd, 4th, and 8th bits
|
||||
// Faceshift - F is found in the 5th bit
|
||||
// Chat Circling - C is found in the 6th bit
|
||||
// Eye tracker - E is found in the 6th bit
|
||||
// Referential Data - R is found in the 7th bit
|
||||
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits
|
||||
const int HAND_STATE_START_BIT = 2; // 3rd and 4th bits
|
||||
const int IS_FACESHIFT_CONNECTED = 4; // 5th bit
|
||||
const int UNUSED_AVATAR_STATE_BIT_5 = 5; // 6th bit (was CHAT_CIRCLING)
|
||||
const int IS_EYE_TRACKER_CONNECTED = 5; // 6th bit (was CHAT_CIRCLING)
|
||||
const int HAS_REFERENTIAL = 6; // 7th bit
|
||||
const int HAND_STATE_FINGER_POINTING_BIT = 7; // 8th bit
|
||||
|
||||
|
|
|
@ -32,6 +32,7 @@ HeadData::HeadData(AvatarData* owningAvatar) :
|
|||
_lookAtPosition(0.0f, 0.0f, 0.0f),
|
||||
_audioLoudness(0.0f),
|
||||
_isFaceTrackerConnected(false),
|
||||
_isEyeTrackerConnected(false),
|
||||
_leftEyeBlink(0.0f),
|
||||
_rightEyeBlink(0.0f),
|
||||
_averageLoudness(0.0f),
|
||||
|
|
|
@ -93,6 +93,7 @@ protected:
|
|||
glm::vec3 _lookAtPosition;
|
||||
float _audioLoudness;
|
||||
bool _isFaceTrackerConnected;
|
||||
bool _isEyeTrackerConnected;
|
||||
float _leftEyeBlink;
|
||||
float _rightEyeBlink;
|
||||
float _averageLoudness;
|
||||
|
|
|
@ -81,6 +81,7 @@ class MenuConstants : public QObject{
|
|||
|
||||
public:
|
||||
enum Item {
|
||||
RenderLookAtTargets,
|
||||
};
|
||||
|
||||
public:
|
||||
|
|
Loading…
Reference in a new issue