Merge branch 'master' of https://github.com/highfidelity/hifi into centerOrigin

This commit is contained in:
ZappoMan 2015-08-14 09:33:53 -07:00
commit 5fdd7304aa
25 changed files with 2214 additions and 142 deletions

View file

@ -0,0 +1,66 @@
#
# FindiViewHMD.cmake
#
# Try to find the SMI iViewHMD eye tracker library
#
# You must provide a IVIEWHMD_ROOT_DIR which contains 3rdParty, include, and libs directories
#
# Once done this will define
#
# IVIEWHMD_FOUND - system found iViewHMD
# IVIEWHMD_INCLUDE_DIRS - the iViewHMD include directory
# IVIEWHMD_LIBRARIES - link this to use iViewHMD
#
# Created on 27 Jul 2015 by David Rowe
# Copyright 2015 High Fidelity, Inc.
#
if (WIN32)
include("${MACRO_DIR}/HifiLibrarySearchHints.cmake")
hifi_library_search_hints("iViewHMD")
find_path(IVIEWHMD_INCLUDE_DIRS iViewHMDAPI.h PATH_SUFFIXES include HINTS ${IVIEWHMD_SEARCH_DIRS})
find_library(IVIEWHMD_LIBRARIES NAMES iViewHMDAPI PATH_SUFFIXES libs HINTS ${IVIEWHMD_SEARCH_DIRS})
find_path(IVIEWHMD_API_DLL_PATH iViewHMDAPI.dll PATH_SUFFIXES libs HINTS ${IVIEWHMD_SEARCH_DIRS})
list(APPEND IVIEWHMD_REQUIREMENTS IVIEWHMD_INCLUDE_DIRS IVIEWHMD_LIBRARIES IVIEWHMD_API_DLL_PATH)
set(IVIEWHMD_DLLS
avcodec-53.dll
avformat-53.dll
avutil-51.dll
libboost_filesystem-mgw45-mt-1_49.dll
libboost_system-mgw45-mt-1_49.dll
libboost_thread-mgw45-mt-1_49.dll
libgcc_s_dw2-1.dll
libiViewNG-LibCore.dll
libopencv_calib3d244.dll
libopencv_core244.dll
libopencv_features2d244.dll
libopencv_flann244.dll
libopencv_highgui244.dll
libopencv_imgproc244.dll
libopencv_legacy244.dll
libopencv_ml244.dll
libopencv_video244.dll
libstdc++-6.dll
opencv_core220.dll
opencv_highgui220.dll
opencv_imgproc220.dll
swscale-2.dll
)
foreach(IVIEWHMD_DLL ${IVIEWHMD_DLLS})
find_path(IVIEWHMD_DLL_PATH ${IVIEWHMD_DLL} PATH_SUFFIXES 3rdParty HINTS ${IVIEWHMD_SEARCH_DIRS})
list(APPEND IVIEWHMD_REQUIREMENTS IVIEWHMD_DLL_PATH)
list(APPEND IVIEWHMD_DLL_PATHS ${IVIEWHMD_DLL_PATH})
endforeach()
include(FindPackageHandleStandardArgs)
find_package_handle_standard_args(IVIEWHMD DEFAULT_MSG ${IVIEWHMD_REQUIREMENTS})
add_paths_to_fixup_libs(${IVIEWHMD_API_DLL_PATH} ${IVIEWHMD_DLL_PATHS})
mark_as_advanced(IVIEWHMD_INCLUDE_DIRS IVIEWHMD_LIBRARIES IVIEWHMD_SEARCH_DIRS)
endif()

View file

@ -0,0 +1,439 @@
//
// widgets-example.js
// games
//
// Copyright 2015 High Fidelity, Inc.
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var paddingX = 8;
var paddingY = 8;
var buttonWidth = 30;
var buttonHeight = 30;
var ICONS_URL = 'https://s3.amazonaws.com/hifi-public/marketplace/hificontent/Scripts/planets/images/';
var panelX = 1250;
var panelY = 500;
var panelWidth = 50;
var panelHeight = 210;
// var mainPanel = new UIPanel(panelX, panelY, panelWidth, panelHeight);
// var systemViewButton = mainPanel.addImage('solarsystems');
// var zoomButton = mainPanel.addImage('magnifier');
// var satelliteButton = mainPanel.addImage('satellite');
// var settingsButton = mainPanel.addImage('settings');
// var stopButton = mainPanel.addImage('close');
//
// mainPanel.show();
//
// var systemViewPanel = new UIPanel(panelX - 120, panelY, 120, 40);
// var reverseButton = systemViewPanel.addImage('reverse');
// var pauseButton = systemViewPanel.addImage('playpause');
// var forwardButton = systemViewPanel.addImage('forward');
//
// var zoomPanel = new UIPanel(panelX - 60, panelY + buttonHeight + paddingY, 650, 50);
// for (var i = 0; i < planets.length; ++i) {
// zoomPanel.addText(planets[i].name);
// }
Script.include('../libraries/uiwidgets.js');
UI.setDefaultVisibility(true);
UI.setErrorHandler(function(err) {
teardown();
// print(err);
// Script.stop();
});
// Controller.mouseMoveEvent.connect(function panelMouseMoveEvent(event) { return settings.mouseMoveEvent(event); });
// Controller.mousePressEvent.connect( function panelMousePressEvent(event) { return settings.mousePressEvent(event); });
// Controller.mouseDoublePressEvent.connect( function panelMouseDoublePressEvent(event) { return settings.mouseDoublePressEvent(event); });
// Controller.mouseReleaseEvent.connect(function(event) { return settings.mouseReleaseEvent(event); });
// Controller.keyPressEvent.connect(function(event) { return settings.keyPressEvent(event); });
// var ICON_WIDTH = 50.0;
// var ICON_HEIGHT = 50.0;
var ICON_WIDTH = 40.0;
var ICON_HEIGHT = 40.0;
var ICON_COLOR = UI.rgba(45, 45, 45, 0.7);
var FOCUSED_COLOR = UI.rgba(250, 250, 250, 1.0);
var PANEL_BACKGROUND_COLOR = UI.rgba(50, 50, 50, 0.7);
var PANEL_PADDING = 7.0;
var PANEL_BORDER = 12.0;
var SUBPANEL_GAP = 1.0;
var icons = [];
function addImage(panel, iconId) {
var icon = panel.add(new UI.Image({
'imageURL': ICONS_URL + iconId + '.svg',
'width': ICON_WIDTH,
'height': ICON_HEIGHT,
'color': ICON_COLOR,
'alpha': ICON_COLOR.a
}));
icons.push(icon);
return icon;
}
var panels = [];
function addPanel (properties) {
properties.background = properties.background || {};
properties.background.backgroundColor = properties.background.backgroundColor ||
PANEL_BACKGROUND_COLOR;
properties.background.backgroundAlpha = properties.background.backgroundAlpha ||
PANEL_BACKGROUND_COLOR.a;
properties.padding = properties.padding || { x: PANEL_PADDING, y: PANEL_PADDING };
properties.border = properties.border || { x: PANEL_BORDER, y: PANEL_BORDER };
var panel = new UI.WidgetStack(properties);
panels.push(panel);
return panel;
}
function makeDraggable (panel, target) {
if (!target)
target = panel;
var dragStart = null;
var initialPos = null;
panel.addAction('onDragBegin', function (event) {
dragStart = { x: event.x, y: event.y };
initialPos = { x: target.position.x, y: target.position.y };
});
panel.addAction('onDragUpdate', function (event) {
target.setPosition(
initialPos.x + event.x - dragStart.x,
initialPos.y + event.y - dragStart.y
);
UI.updateLayout();
});
panel.addAction('onDragEnd', function () {
dragStart = dragEnd = null;
});
}
// var panelContainer = new UI.WidgetContainer();
// panelContainer.setPosition(500, 250);
// panelContainer.setVisible(true);
var demoPane = addPanel({ dir: '+y' });
var demoLabel = demoPane.add(new UI.Label({
text: "< no events >",
width: 400, height: 20
}));
var demoButton = demoPane.add(new UI.Box({
width: 200, height: 80,
text: "Button"
}));
function setText(text) {
return function () {
demoLabel.setText(text);
UI.updateLayout();
};
}
function addDebugActions(widget, msg, actions) {
actions.forEach(function(action) {
widget.addAction(action, setText(action + " " + msg + widget));
});
}
var debugEvents = [
'onMouseOver',
'onMouseExit',
'onMouseDown',
'onMouseUp',
'onDragBegin',
'onDragEnd',
'onDragUpdate'
];
addDebugActions(demoPane, "(container) ", debugEvents);
addDebugActions(demoButton, "(button) ", debugEvents);
addDebugActions(demoLabel, "(label) ", debugEvents);
// demoPane.addAction('onMouseOver', setText("onMouseOver " + demoPane));
// demoPane.addAction('onMouseExit', setText("onMouseExit " + demoPane));
// demoPane.addAction('onMouseDown', setText("onMouseDown " + demoPane));
// demoPane.addAction('onMouseUp', setText("onMouseUp " + demoPane));
makeDraggable(demoPane, demoPane);
demoPane.setPosition(600, 200);
// demoButton.addAction('onMouseOver', setText("onMouseOver " + demoButton));
// demoButton.addAction('onMouseExit', setText("onMouseExit " + demoButton));
// demoButton.addAction()
// var resizablePanel = new UI.Label({
// text: "Resizable panel",
// width: 200, height: 200,
// backgroundAlpha: 0.5
// });
// resizablePanel.setPosition(1100, 200);
var debugToggle = new UI.Box({
text: "debug", width: 150, height: 20
});
debugToggle.setPosition(200, 0);
debugToggle.addAction('onClick', function () {
UI.debug.setVisible(!UI.debug.isVisible());
});
// debugEvents.forEach(function (action) {
// resizablePanel.addAction(action, function (event, widget) {
// widget.setText(action + " " + widget);
// });
// })
function join(obj) {
var s = "{";
var sep = "\n";
for (var k in obj) {
s += sep + k + ": " + (""+obj[k]).replace("\n", "\n");
sep = ",\n";
}
if (s.length > 1)
return s + " }";
return s + "}";
}
// resizablePanel.getOverlay().update({
// text: "" + join(resizablePanel.actions)
// });
setText = addDebugActions = undefined;
var tooltipWidget = new UI.Label({
text: "<tooltip>",
width: 500, height: 20,
visible: false
});
function addTooltip (widget, text) {
widget.addAction('onMouseOver', function (event, widget) {
tooltipWidget.setVisible(true);
tooltipWidget.setPosition(widget.position.x + widget.getWidth() + 20, widget.position.y);
tooltipWidget.setText(text);
UI.updateLayout();
});
widget.addAction('onMouseExit', function () {
tooltipWidget.setVisible(false);
UI.updateLayout();
});
}
var mainPanel = addPanel({ dir: '+y' });
mainPanel.setPosition(500, 250);
mainPanel.setVisible(true);
var systemViewButton = addImage(mainPanel, 'solarsystems');
var zoomButton = addImage(mainPanel, 'magnifier');
var satelliteButton = addImage(mainPanel, 'satellite');
var settingsButton = addImage(mainPanel, 'settings');
var stopButton = addImage(mainPanel, 'close');
addTooltip(systemViewButton, "system view");
addTooltip(zoomButton, "zoom");
addTooltip(satelliteButton, "satelite view");
addTooltip(settingsButton, "settings");
addTooltip(stopButton, "exit");
var systemViewPanel = addPanel({ dir: '+x', visible: false });
var reverseButton = addImage(systemViewPanel, 'reverse');
var pauseButton = addImage(systemViewPanel, 'playpause');
var forwardButton = addImage(systemViewPanel, 'forward');
var zoomPanel = addPanel({ dir: '+y', visible: true });
var label = new UI.Label({
text: "Foo",
width: 120,
height: 15,
color: UI.rgb(245, 290, 20),
alpha: 1.0,
backgroundColor: UI.rgb(10, 10, 10),
backgroundAlpha: 0.0
});
zoomPanel.add(label);
label.addAction('onMouseOver', function () {
label.setText("Bar");
UI.updateLayout();
});
label.addAction('onMouseExit', function () {
label.setText("Foo");
UI.updateLayout();
});
label.setText("Label id: " + label.id + ", parent id " + label.parent.id);
label.parent.addAction('onMouseOver', function () {
label.setText("on parent");
UI.updateLayout();
});
label.parent.addAction('onMouseExit', function () {
label.setText('exited parent');
UI.updateLayout();
});
var sliderLayout = zoomPanel.add(new UI.WidgetStack({
dir: '+x', visible: true, backgroundAlpha: 0.0
}));
var sliderLabel = sliderLayout.add(new UI.Label({
text: " ", width: 45, height: 20
}));
var slider = sliderLayout.add(new UI.Slider({
value: 10, maxValue: 100, minValue: 0,
width: 300, height: 20,
backgroundColor: UI.rgb(10, 10, 10),
backgroundAlpha: 1.0,
slider: { // slider knob
width: 30,
height: 18,
backgroundColor: UI.rgb(120, 120, 120),
backgroundAlpha: 1.0
}
}));
sliderLabel.setText("" + (+slider.getValue().toFixed(1)));
slider.onValueChanged = function (value) {
sliderLabel.setText("" + (+value.toFixed(1)));
UI.updateLayout();
}
var checkBoxLayout = zoomPanel.add(new UI.WidgetStack({
dir: '+x', visible: true, backgroundAlpha: 0.0
}));
// var padding = checkBoxLayout.add(new UI.Label({
// text: " ", width: 45, height: 20
// }));
var checkBoxLabel = checkBoxLayout.add(new UI.Label({
text: "set red", width: 60, height: 20,
backgroundAlpha: 0.0
}));
checkBoxLabel.setText("set red");
var defaultColor = UI.rgb(10, 10, 10);
var redColor = UI.rgb(210, 80, 80);
var checkbox = checkBoxLayout.add(new UI.Checkbox({
width: 20, height: 20, padding: { x: 3, y: 3 },
backgroundColor: defaultColor,
backgroundAlpha: 0.9,
checked: false,
onValueChanged: function (red) {
zoomPanel.getOverlay().update({
// backgroundAlpha: 0.1,
backgroundColor: red ? redColor : defaultColor
});
}
}));
addImage(zoomPanel, 'reverse');
UI.updateLayout();
var subpanels = [ systemViewPanel, zoomPanel ];
function hideSubpanelsExcept (panel) {
subpanels.forEach(function (x) {
if (x != panel) {
x.setVisible(false);
}
});
}
function attachPanel (panel, button) {
button.addAction('onClick', function () {
hideSubpanelsExcept(panel);
panel.setVisible(!panel.isVisible());
UI.updateLayout();
})
UI.addAttachment(panel, button, function (target, rel) {
target.setPosition(
rel.position.x - (target.getWidth() + target.border.x + SUBPANEL_GAP),
rel.position.y - target.border.y
);
});
}
attachPanel(systemViewPanel, systemViewButton);
attachPanel(zoomPanel, zoomButton);
var addColorToggle = function (widget) {
widget.addAction('onMouseOver', function () {
widget.setColor(FOCUSED_COLOR);
});
widget.addAction('onMouseExit', function () {
widget.setColor(ICON_COLOR);
});
}
reverseButton.addAction('onClick', function() {});
systemViewPanel.addAction('onMouseOver', function() {
hideSubpanels();
UI.updateLayout();
});
zoomButton.addAction('onClick', function() {
hideSubpanels();
UI.updateLayout();
});
UI.updateLayout();
stopButton.addAction('onClick', function() {
// Script.stop();
teardown();
});
// Panel drag behavior
// (click + drag on border to drag)
(function () {
var dragged = null;
this.startDrag = function (dragAction) {
dragged = dragAction;
}
this.updateDrag = function (event) {
if (dragged) {
print("Update drag");
dragged.updateDrag(event);
}
}
this.clearDrag = function (event) {
if (dragged)
print("End drag");
dragged = null;
}
})();
var buttons = icons;
buttons.map(addColorToggle);
panels.map(function (panel) { makeDraggable(panel, mainPanel); });
// Cleanup script resources
function teardown() {
UI.teardown();
// etc...
};
var inputHandler = {
onMouseMove: function (event) {
updateDrag(event);
UI.handleMouseMove(event);
},
onMousePress: function (event) {
UI.handleMousePress(event);
},
onMouseRelease: function (event) {
clearDrag(event);
UI.handleMouseRelease(event);
}
};
Controller.mousePressEvent.connect(inputHandler.onMousePress);
Controller.mouseMoveEvent.connect(inputHandler.onMouseMove);
Controller.mouseReleaseEvent.connect(inputHandler.onMouseRelease);
Script.scriptEnding.connect(teardown);

File diff suppressed because it is too large Load diff

View file

@ -2,7 +2,7 @@ set(TARGET_NAME interface)
project(${TARGET_NAME})
# set a default root dir for each of our optional externals if it was not passed
set(OPTIONAL_EXTERNALS "Faceshift" "LeapMotion" "RtMidi" "RSSDK" "3DConnexionClient")
set(OPTIONAL_EXTERNALS "Faceshift" "LeapMotion" "RtMidi" "RSSDK" "3DConnexionClient" "iViewHMD")
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
string(TOUPPER ${EXTERNAL} ${EXTERNAL}_UPPERCASE)
if (NOT ${${EXTERNAL}_UPPERCASE}_ROOT_DIR)

14
interface/external/iViewHMD/readme.txt vendored Normal file
View file

@ -0,0 +1,14 @@
Instructions for adding SMI HMD Eye Tracking to Interface on Windows
David Rowe, 27 Jul 2015.
1. Download and install the SMI HMD Eye Tracking software from http://update.smivision.com/iViewNG-HMD.exe.
2. Copy the SDK folders (3rdParty, include, libs) from the SDK installation folder C:\Program Files (x86)\SMI\iViewNG-HMD\SDK
into the interface/externals/iViewHMD folder. This readme.txt should be there as well.
You may optionally choose to copy the SDK folders to a location outside the repository (so you can re-use with different
checkouts and different projects). If so, set the ENV variable "HIFI_LIB_DIR" to a directory containing a subfolder
"iViewHMD" that contains the folders mentioned above.
3. Clear your build directory, run cmake and build, and you should be all set.

View file

@ -113,6 +113,7 @@
#include "audio/AudioScope.h"
#include "devices/DdeFaceTracker.h"
#include "devices/EyeTracker.h"
#include "devices/Faceshift.h"
#include "devices/Leapmotion.h"
#include "devices/RealSense.h"
@ -265,14 +266,14 @@ bool setupEssentials(int& argc, char** argv) {
auto scriptCache = DependencyManager::set<ScriptCache>();
auto soundCache = DependencyManager::set<SoundCache>();
auto faceshift = DependencyManager::set<Faceshift>();
auto ddeFaceTracker = DependencyManager::set<DdeFaceTracker>();
auto eyeTracker = DependencyManager::set<EyeTracker>();
auto audio = DependencyManager::set<AudioClient>();
auto audioScope = DependencyManager::set<AudioScope>();
auto deferredLightingEffect = DependencyManager::set<DeferredLightingEffect>();
auto textureCache = DependencyManager::set<TextureCache>();
auto framebufferCache = DependencyManager::set<FramebufferCache>();
auto animationCache = DependencyManager::set<AnimationCache>();
auto ddeFaceTracker = DependencyManager::set<DdeFaceTracker>();
auto modelBlender = DependencyManager::set<ModelBlender>();
auto avatarManager = DependencyManager::set<AvatarManager>();
auto lodManager = DependencyManager::set<LODManager>();
@ -641,6 +642,12 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
connect(ddeTracker.data(), &FaceTracker::muteToggled, this, &Application::faceTrackerMuteToggled);
#endif
#ifdef HAVE_IVIEWHMD
auto eyeTracker = DependencyManager::get<EyeTracker>();
eyeTracker->init();
setActiveEyeTracker();
#endif
_oldHandMouseX[0] = -1;
_oldHandMouseY[0] = -1;
_oldHandMouseX[1] = -1;
@ -719,6 +726,9 @@ void Application::cleanupBeforeQuit() {
#ifdef HAVE_DDE
DependencyManager::destroy<DdeFaceTracker>();
#endif
#ifdef HAVE_IVIEWHMD
DependencyManager::destroy<EyeTracker>();
#endif
}
void Application::emptyLocalCache() {
@ -2068,6 +2078,44 @@ void Application::setActiveFaceTracker() {
#endif
}
void Application::setActiveEyeTracker() {
#ifdef HAVE_IVIEWHMD
auto eyeTracker = DependencyManager::get<EyeTracker>();
if (!eyeTracker->isInitialized()) {
return;
}
bool isEyeTracking = Menu::getInstance()->isOptionChecked(MenuOption::SMIEyeTracking);
bool isSimulating = Menu::getInstance()->isOptionChecked(MenuOption::SimulateEyeTracking);
eyeTracker->setEnabled(isEyeTracking, isSimulating);
Menu::getInstance()->getActionForOption(MenuOption::OnePointCalibration)->setEnabled(isEyeTracking && !isSimulating);
Menu::getInstance()->getActionForOption(MenuOption::ThreePointCalibration)->setEnabled(isEyeTracking && !isSimulating);
Menu::getInstance()->getActionForOption(MenuOption::FivePointCalibration)->setEnabled(isEyeTracking && !isSimulating);
#endif
}
void Application::calibrateEyeTracker1Point() {
#ifdef HAVE_IVIEWHMD
auto eyeTracker = DependencyManager::get<EyeTracker>();
eyeTracker->calibrate(1);
#endif
}
void Application::calibrateEyeTracker3Points() {
#ifdef HAVE_IVIEWHMD
auto eyeTracker = DependencyManager::get<EyeTracker>();
eyeTracker->calibrate(3);
#endif
}
void Application::calibrateEyeTracker5Points() {
#ifdef HAVE_IVIEWHMD
auto eyeTracker = DependencyManager::get<EyeTracker>();
eyeTracker->calibrate(5);
#endif
}
bool Application::exportEntities(const QString& filename, const QVector<EntityItemID>& entityIDs) {
QVector<EntityItemPointer> entities;
@ -2310,7 +2358,8 @@ void Application::updateMyAvatarLookAtPosition() {
PerformanceWarning warn(showWarnings, "Application::updateMyAvatarLookAtPosition()");
_myAvatar->updateLookAtTargetAvatar();
FaceTracker* tracker = getActiveFaceTracker();
FaceTracker* faceTracker = getActiveFaceTracker();
auto eyeTracker = DependencyManager::get<EyeTracker>();
bool isLookingAtSomeone = false;
glm::vec3 lookAtSpot;
@ -2322,6 +2371,17 @@ void Application::updateMyAvatarLookAtPosition() {
} else {
lookAtSpot = _myCamera.getPosition() + transformPoint(_myAvatar->getSensorToWorldMatrix(), extractTranslation(getHMDSensorPose()));
}
} else if (eyeTracker->isTracking() && (isHMDMode() || eyeTracker->isSimulating())) {
// Look at the point that the user is looking at.
if (isHMDMode()) {
glm::mat4 headPose = getActiveDisplayPlugin()->getHeadPose();
glm::quat hmdRotation = glm::quat_cast(headPose);
lookAtSpot = _myCamera.getPosition() +
_myAvatar->getOrientation() * (hmdRotation * eyeTracker->getLookAtPosition());
} else {
lookAtSpot = _myAvatar->getHead()->getEyePosition() +
(_myAvatar->getHead()->getFinalOrientationInWorldFrame() * eyeTracker->getLookAtPosition());
}
} else {
AvatarSharedPointer lookingAt = _myAvatar->getLookAtTargetAvatar().lock();
if (lookingAt && _myAvatar != lookingAt.get()) {
@ -2337,7 +2397,9 @@ void Application::updateMyAvatarLookAtPosition() {
if (faceAngle < MAXIMUM_FACE_ANGLE) {
// Randomly look back and forth between look targets
switch (_myAvatar->getEyeContactTarget()) {
eyeContactTarget target = Menu::getInstance()->isOptionChecked(MenuOption::FixGaze) ?
LEFT_EYE : _myAvatar->getEyeContactTarget();
switch (target) {
case LEFT_EYE:
lookAtSpot = lookingAtHead->getLeftEyePosition();
break;
@ -2354,17 +2416,24 @@ void Application::updateMyAvatarLookAtPosition() {
}
} else {
// I am not looking at anyone else, so just look forward
lookAtSpot = _myAvatar->getHead()->getEyePosition() +
(_myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
if (isHMDMode()) {
glm::mat4 headPose = getActiveDisplayPlugin()->getHeadPose();
glm::quat headRotation = glm::quat_cast(headPose);
lookAtSpot = _myCamera.getPosition() +
_myAvatar->getOrientation() * (headRotation * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
} else {
lookAtSpot = _myAvatar->getHead()->getEyePosition() +
(_myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
}
}
// Deflect the eyes a bit to match the detected gaze from the face tracker if active.
if (tracker && !tracker->isMuted()) {
float eyePitch = tracker->getEstimatedEyePitch();
float eyeYaw = tracker->getEstimatedEyeYaw();
if (faceTracker && !faceTracker->isMuted()) {
float eyePitch = faceTracker->getEstimatedEyePitch();
float eyeYaw = faceTracker->getEstimatedEyeYaw();
const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f;
glm::vec3 origin = _myAvatar->getHead()->getEyePosition();
float deflection = tracker->getEyeDeflection();
float deflection = faceTracker->getEyeDeflection();
if (isLookingAtSomeone) {
deflection *= GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT;
}
@ -3434,6 +3503,7 @@ void Application::renderRearViewMirror(RenderArgs* renderArgs, const QRect& regi
void Application::resetSensors() {
DependencyManager::get<Faceshift>()->reset();
DependencyManager::get<DdeFaceTracker>()->reset();
DependencyManager::get<EyeTracker>()->reset();
getActiveDisplayPlugin()->resetSensors();

View file

@ -292,9 +292,10 @@ public:
virtual QGLWidget* getPrimarySurface() override;
void setActiveDisplayPlugin(const QString& pluginName);
private:
DisplayPlugin * getActiveDisplayPlugin();
const DisplayPlugin * getActiveDisplayPlugin() const;
public:
FileLogger* getLogger() { return _logger; }
@ -428,6 +429,11 @@ public slots:
void resetSensors();
void setActiveFaceTracker();
void setActiveEyeTracker();
void calibrateEyeTracker1Point();
void calibrateEyeTracker3Points();
void calibrateEyeTracker5Points();
void aboutApp();
void showEditEntitiesHelp();

View file

@ -425,6 +425,23 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::AutoMuteAudio, 0, false);
#endif
#ifdef HAVE_IVIEWHMD
MenuWrapper* eyeTrackingMenu = avatarDebugMenu->addMenu("Eye Tracking");
addCheckableActionToQMenuAndActionHash(eyeTrackingMenu, MenuOption::SMIEyeTracking, 0, false,
qApp, SLOT(setActiveEyeTracker()));
{
MenuWrapper* calibrateEyeTrackingMenu = eyeTrackingMenu->addMenu("Calibrate");
addActionToQMenuAndActionHash(calibrateEyeTrackingMenu, MenuOption::OnePointCalibration, 0,
qApp, SLOT(calibrateEyeTracker1Point()));
addActionToQMenuAndActionHash(calibrateEyeTrackingMenu, MenuOption::ThreePointCalibration, 0,
qApp, SLOT(calibrateEyeTracker3Points()));
addActionToQMenuAndActionHash(calibrateEyeTrackingMenu, MenuOption::FivePointCalibration, 0,
qApp, SLOT(calibrateEyeTracker5Points()));
}
addCheckableActionToQMenuAndActionHash(eyeTrackingMenu, MenuOption::SimulateEyeTracking, 0, false,
qApp, SLOT(setActiveEyeTracker()));
#endif
auto avatarManager = DependencyManager::get<AvatarManager>();
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AvatarReceiveStats, 0, false,
avatarManager.data(), SLOT(setShouldShowReceiveStats(bool)));
@ -433,8 +450,10 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderHeadCollisionShapes);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderBoundingCollisionShapes);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtVectors, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtTargets, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderFocusIndicator, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::ShowWhosLookingAtMe, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::FixGaze, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu,
MenuOption::Connexion,
0, false,

View file

@ -192,6 +192,8 @@ namespace MenuOption {
const QString ExpandUpdateTiming = "Expand /update";
const QString Faceshift = "Faceshift";
const QString FirstPerson = "First Person";
const QString FivePointCalibration = "5 Point Calibration";
const QString FixGaze = "Fix Gaze (no saccade)";
const QString Forward = "Forward";
const QString FrameTimer = "Show Timer";
const QString FullscreenMirror = "Fullscreen Mirror";
@ -218,6 +220,7 @@ namespace MenuOption {
const QString NamesAboveHeads = "Names Above Heads";
const QString NoFaceTracking = "None";
const QString OctreeStats = "Entity Statistics";
const QString OnePointCalibration = "1 Point Calibration";
const QString OnlyDisplayTopTen = "Only Display Top Ten";
const QString OutputMenu = "Display>Mode";
const QString PackageModel = "Package Model...";
@ -232,6 +235,7 @@ namespace MenuOption {
const QString RenderBoundingCollisionShapes = "Show Bounding Collision Shapes";
const QString RenderFocusIndicator = "Show Eye Focus";
const QString RenderHeadCollisionShapes = "Show Head Collision Shapes";
const QString RenderLookAtTargets = "Show Look-at Targets";
const QString RenderLookAtVectors = "Show Look-at Vectors";
const QString RenderSkeletonCollisionShapes = "Show Skeleton Collision Shapes";
const QString RenderTargetFramerate = "Framerate";
@ -271,12 +275,15 @@ namespace MenuOption {
const QString ShowRealtimeEntityStats = "Show Realtime Entity Stats";
const QString ShowWhosLookingAtMe = "Show Who's Looking at Me";
const QString StandingHMDSensorMode = "Standing HMD Sensor Mode";
const QString SimulateEyeTracking = "Simulate";
const QString SMIEyeTracking = "SMI Eye Tracking";
const QString Stars = "Stars";
const QString Stats = "Stats";
const QString StopAllScripts = "Stop All Scripts";
const QString SuppressShortTimings = "Suppress Timings Less than 10ms";
const QString TestPing = "Test Ping";
const QString ThirdPerson = "Third Person";
const QString ThreePointCalibration = "3 Point Calibration";
const QString ThrottleFPSIfNotFocus = "Throttle FPS If Not Focus";
const QString ToolWindow = "Tool Window";
const QString TransmitterDrive = "Transmitter Drive";

View file

@ -69,6 +69,8 @@ namespace render {
auto avatarPtr = static_pointer_cast<Avatar>(avatar);
bool renderLookAtVectors = Menu::getInstance()->isOptionChecked(MenuOption::RenderLookAtVectors);
avatarPtr->setDisplayingLookatVectors(renderLookAtVectors);
bool renderLookAtTarget = Menu::getInstance()->isOptionChecked(MenuOption::RenderLookAtTargets);
avatarPtr->setDisplayingLookatTarget(renderLookAtTarget);
if (avatarPtr->isInitialized() && args) {
avatarPtr->render(args, Application::getInstance()->getCamera()->getPosition());
@ -601,7 +603,9 @@ void Avatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, floa
getHand()->render(renderArgs, false);
}
getHead()->render(renderArgs, 1.0f, renderFrustum);
getHead()->renderLookAts(renderArgs);
}
bool Avatar::shouldRenderHead(const RenderArgs* renderArgs) const {

View file

@ -91,6 +91,7 @@ public:
//setters
void setDisplayingLookatVectors(bool displayingLookatVectors) { getHead()->setRenderLookatVectors(displayingLookatVectors); }
void setDisplayingLookatTarget(bool displayingLookatTarget) { getHead()->setRenderLookatTarget(displayingLookatTarget); }
void setIsLookAtTarget(const bool isLookAtTarget) { _isLookAtTarget = isLookAtTarget; }
bool getIsLookAtTarget() const { return _isLookAtTarget; }
//getters

View file

@ -17,11 +17,13 @@
#include "Application.h"
#include "Avatar.h"
#include "DependencyManager.h"
#include "GeometryUtil.h"
#include "Head.h"
#include "Menu.h"
#include "Util.h"
#include "devices/DdeFaceTracker.h"
#include "devices/EyeTracker.h"
#include "devices/Faceshift.h"
#include "AvatarRig.h"
@ -44,6 +46,7 @@ Head::Head(Avatar* owningAvatar) :
_mouth3(0.0f),
_mouth4(0.0f),
_renderLookatVectors(false),
_renderLookatTarget(false),
_saccade(0.0f, 0.0f, 0.0f),
_saccadeTarget(0.0f, 0.0f, 0.0f),
_leftEyeBlinkVelocity(0.0f),
@ -116,6 +119,9 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
applyEyelidOffset(getFinalOrientationInWorldFrame());
}
}
auto eyeTracker = DependencyManager::get<EyeTracker>();
_isEyeTrackerConnected = eyeTracker->isTracking();
}
if (!myAvatar->getStandingHMDSensorMode()) {
@ -129,19 +135,24 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
}
if (!(_isFaceTrackerConnected || billboard)) {
// Update eye saccades
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
const float MICROSACCADE_MAGNITUDE = 0.002f;
const float SACCADE_MAGNITUDE = 0.04f;
const float NOMINAL_FRAME_RATE = 60.0f;
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
if (!_isEyeTrackerConnected) {
// Update eye saccades
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
const float MICROSACCADE_MAGNITUDE = 0.002f;
const float SACCADE_MAGNITUDE = 0.04f;
const float NOMINAL_FRAME_RATE = 60.0f;
if (randFloat() < deltaTime / AVERAGE_MICROSACCADE_INTERVAL) {
_saccadeTarget = MICROSACCADE_MAGNITUDE * randVector();
} else if (randFloat() < deltaTime / AVERAGE_SACCADE_INTERVAL) {
_saccadeTarget = SACCADE_MAGNITUDE * randVector();
}
_saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
} else {
_saccade = glm::vec3();
}
_saccade += (_saccadeTarget - _saccade) * pow(0.5f, NOMINAL_FRAME_RATE * deltaTime);
// Detect transition from talking to not; force blink after that and a delay
bool forceBlink = false;
@ -218,6 +229,9 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
} else {
_saccade = glm::vec3();
}
if (Menu::getInstance()->isOptionChecked(MenuOption::FixGaze)) { // if debug menu turns off, use no saccade
_saccade = glm::vec3();
}
if (!isMine) {
_faceModel.setLODDistance(static_cast<Avatar*>(_owningAvatar)->getLODDistance());
@ -263,7 +277,7 @@ void Head::calculateMouthShapes() {
void Head::applyEyelidOffset(glm::quat headOrientation) {
// Adjusts the eyelid blendshape coefficients so that the eyelid follows the iris as the head pitches.
glm::quat eyeRotation = rotationBetween(headOrientation * IDENTITY_FRONT, getCorrectedLookAtPosition() - _eyePosition);
glm::quat eyeRotation = rotationBetween(headOrientation * IDENTITY_FRONT, getLookAtPosition() - _eyePosition);
eyeRotation = eyeRotation * glm::angleAxis(safeEulerAngles(headOrientation).y, IDENTITY_UP); // Rotation w.r.t. head
float eyePitch = safeEulerAngles(eyeRotation).x;
@ -300,8 +314,18 @@ void Head::relaxLean(float deltaTime) {
}
void Head::render(RenderArgs* renderArgs, float alpha, ViewFrustum* renderFrustum) {
}
void Head::renderLookAts(RenderArgs* renderArgs) {
renderLookAts(renderArgs, _leftEyePosition, _rightEyePosition);
}
void Head::renderLookAts(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition) {
if (_renderLookatVectors) {
renderLookatVectors(renderArgs, _leftEyePosition, _rightEyePosition, getCorrectedLookAtPosition());
renderLookatVectors(renderArgs, leftEyePosition, rightEyePosition, getCorrectedLookAtPosition());
}
if (_renderLookatTarget) {
renderLookatTarget(renderArgs, getCorrectedLookAtPosition());
}
}
@ -418,4 +442,17 @@ void Head::renderLookatVectors(RenderArgs* renderArgs, glm::vec3 leftEyePosition
geometryCache->renderLine(batch, rightEyePosition, lookatPosition, startColor, endColor, _rightEyeLookAtID);
}
void Head::renderLookatTarget(RenderArgs* renderArgs, glm::vec3 lookatPosition) {
auto& batch = *renderArgs->_batch;
auto transform = Transform{};
transform.setTranslation(lookatPosition);
batch.setModelTransform(transform);
auto deferredLighting = DependencyManager::get<DeferredLightingEffect>();
deferredLighting->bindSimpleProgram(batch);
auto geometryCache = DependencyManager::get<GeometryCache>();
const float LOOK_AT_TARGET_RADIUS = 0.075f;
const glm::vec4 LOOK_AT_TARGET_COLOR = { 0.8f, 0.0f, 0.0f, 0.75f };
geometryCache->renderSphere(batch, LOOK_AT_TARGET_RADIUS, 15, 15, LOOK_AT_TARGET_COLOR, true);
}

View file

@ -39,6 +39,9 @@ public:
void setAverageLoudness(float averageLoudness) { _averageLoudness = averageLoudness; }
void setReturnToCenter (bool returnHeadToCenter) { _returnHeadToCenter = returnHeadToCenter; }
void setRenderLookatVectors(bool onOff) { _renderLookatVectors = onOff; }
void setRenderLookatTarget(bool onOff) { _renderLookatTarget = onOff; }
void renderLookAts(RenderArgs* renderArgs);
void renderLookAts(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition);
/// \return orientationBase+Delta
glm::quat getFinalOrientationInLocalFrame() const;
@ -123,6 +126,7 @@ private:
float _mouth3;
float _mouth4;
bool _renderLookatVectors;
bool _renderLookatTarget;
glm::vec3 _saccade;
glm::vec3 _saccadeTarget;
float _leftEyeBlinkVelocity;
@ -151,6 +155,7 @@ private:
// private methods
void renderLookatVectors(RenderArgs* renderArgs, glm::vec3 leftEyePosition, glm::vec3 rightEyePosition, glm::vec3 lookatPosition);
void renderLookatTarget(RenderArgs* renderArgs, glm::vec3 lookatPosition);
void calculateMouthShapes();
void applyEyelidOffset(glm::quat headOrientation);

View file

@ -24,6 +24,7 @@
#include <AnimationHandle.h>
#include <AudioClient.h>
#include <DependencyManager.h>
#include <display-plugins/DisplayPlugin.h>
#include <GeometryUtil.h>
#include <NodeList.h>
#include <udt/PacketHeaders.h>
@ -931,17 +932,13 @@ void MyAvatar::updateLookAtTargetAvatar() {
const float HUMAN_EYE_SEPARATION = 0.065f;
float myEyeSeparation = glm::length(getHead()->getLeftEyePosition() - getHead()->getRightEyePosition());
gazeOffset = gazeOffset * HUMAN_EYE_SEPARATION / myEyeSeparation;
if (Application::getInstance()->isHMDMode()) {
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
+ glm::vec3(qApp->getHMDSensorPose()[3]) + gazeOffset);
} else {
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
+ gazeOffset);
}
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
+ gazeOffset);
} else {
avatar->getHead()->clearCorrectedLookAtPosition();
}
} else {
avatar->getHead()->clearCorrectedLookAtPosition();
}
}
auto avatarPointer = _lookAtTargetAvatar.lock();
@ -1249,6 +1246,24 @@ void MyAvatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, fl
if (shouldRenderHead(renderArgs)) {
getHead()->render(renderArgs, 1.0f, renderFrustum);
}
if (qApp->isHMDMode()) {
glm::vec3 cameraPosition = Application::getInstance()->getCamera()->getPosition();
glm::mat4 leftEyePose = Application::getInstance()->getActiveDisplayPlugin()->getEyePose(Eye::Left);
glm::vec3 leftEyePosition = glm::vec3(leftEyePose[3]);
glm::mat4 rightEyePose = Application::getInstance()->getActiveDisplayPlugin()->getEyePose(Eye::Right);
glm::vec3 rightEyePosition = glm::vec3(rightEyePose[3]);
glm::mat4 headPose = Application::getInstance()->getActiveDisplayPlugin()->getHeadPose();
glm::vec3 headPosition = glm::vec3(headPose[3]);
getHead()->renderLookAts(renderArgs,
cameraPosition + getOrientation() * (leftEyePosition - headPosition),
cameraPosition + getOrientation() * (rightEyePosition - headPosition));
} else {
getHead()->renderLookAts(renderArgs);
}
getHand()->render(renderArgs, true);
}

View file

@ -0,0 +1,308 @@
//
// EyeTracker.cpp
// interface/src/devices
//
// Created by David Rowe on 27 Jul 2015.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "EyeTracker.h"
#include <QFuture>
#include <QMessageBox>
#include <QtConcurrent/QtConcurrentRun>
#include <SharedUtil.h>
#include "InterfaceLogging.h"
#include "OctreeConstants.h"
#ifdef HAVE_IVIEWHMD
char* HIGH_FIDELITY_EYE_TRACKER_CALIBRATION = "HighFidelityEyeTrackerCalibration";
#endif
#ifdef HAVE_IVIEWHMD
static void CALLBACK eyeTrackerCallback(smi_CallbackDataStruct* data) {
auto eyeTracker = DependencyManager::get<EyeTracker>();
if (eyeTracker) { // Guard against a few callbacks that continue to be received after smi_quit().
eyeTracker->processData(data);
}
}
#endif
EyeTracker::~EyeTracker() {
#ifdef HAVE_IVIEWHMD
if (_isStreaming) {
int result = smi_quit();
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error terminating tracking:" << smiReturnValueToString(result);
}
}
#endif
}
#ifdef HAVE_IVIEWHMD
void EyeTracker::processData(smi_CallbackDataStruct* data) {
_lastProcessDataTimestamp = usecTimestampNow();
if (!_isEnabled) {
return;
}
if (data->type == SMI_SIMPLE_GAZE_SAMPLE) {
// Calculate the intersections of the left and right eye look-at vectors with a vertical plane along the monocular
// gaze direction. Average these positions to give the look-at point.
// If the eyes are parallel or diverged, gaze at a distant look-at point calculated the same as for non eye tracking.
// Line-plane intersection: https://en.wikipedia.org/wiki/Line%E2%80%93plane_intersection
smi_SampleHMDStruct* sample = (smi_SampleHMDStruct*)data->result;
// The iViewHMD coordinate system has x and z axes reversed compared to Interface, i.e., wearing the HMD:
// - x is left
// - y is up
// - z is forwards
// Plane
smi_Vec3d point = sample->gazeBasePoint; // mm
smi_Vec3d direction = sample->gazeDirection;
glm::vec3 planePoint = glm::vec3(-point.x, point.y, -point.z) / 1000.0f;
glm::vec3 planeNormal = glm::vec3(-direction.z, 0.0f, direction.x);
glm::vec3 monocularDirection = glm::vec3(-direction.x, direction.y, -direction.z);
// Left eye
point = sample->left.gazeBasePoint; // mm
direction = sample->left.gazeDirection;
glm::vec3 leftLinePoint = glm::vec3(-point.x, point.y, -point.z) / 1000.0f;
glm::vec3 leftLineDirection = glm::vec3(-direction.x, direction.y, -direction.z);
// Right eye
point = sample->right.gazeBasePoint; // mm
direction = sample->right.gazeDirection;
glm::vec3 rightLinePoint = glm::vec3(-point.x, point.y, -point.z) / 1000.0f;
glm::vec3 rightLineDirection = glm::vec3(-direction.x, direction.y, -direction.z);
// Plane - line dot products
float leftLinePlaneDotProduct = glm::dot(leftLineDirection, planeNormal);
float rightLinePlaneDotProduct = glm::dot(rightLineDirection, planeNormal);
// Gaze into distance if eyes are parallel or diverged; otherwise the look-at is the average of look-at points
glm::vec3 lookAtPosition;
if (abs(leftLinePlaneDotProduct) <= FLT_EPSILON || abs(rightLinePlaneDotProduct) <= FLT_EPSILON) {
lookAtPosition = monocularDirection * (float)TREE_SCALE;
} else {
float leftDistance = glm::dot(planePoint - leftLinePoint, planeNormal) / leftLinePlaneDotProduct;
float rightDistance = glm::dot(planePoint - rightLinePoint, planeNormal) / rightLinePlaneDotProduct;
if (leftDistance <= 0.0f || rightDistance <= 0.0f
|| leftDistance > (float)TREE_SCALE || rightDistance > (float)TREE_SCALE) {
lookAtPosition = monocularDirection * (float)TREE_SCALE;
} else {
glm::vec3 leftIntersectionPoint = leftLinePoint + leftDistance * leftLineDirection;
glm::vec3 rightIntersectionPoint = rightLinePoint + rightDistance * rightLineDirection;
lookAtPosition = (leftIntersectionPoint + rightIntersectionPoint) / 2.0f;
}
}
if (glm::isnan(lookAtPosition.x) || glm::isnan(lookAtPosition.y) || glm::isnan(lookAtPosition.z)) {
return;
}
_lookAtPosition = lookAtPosition;
}
}
#endif
void EyeTracker::init() {
if (_isInitialized) {
qCWarning(interfaceapp) << "Eye Tracker: Already initialized";
return;
}
#ifdef HAVE_IVIEWHMD
int result = smi_setCallback(eyeTrackerCallback);
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error setting callback:" << smiReturnValueToString(result);
QMessageBox::warning(nullptr, "Eye Tracker Error", smiReturnValueToString(result));
} else {
_isInitialized = true;
}
connect(&_startStreamingWatcher, SIGNAL(finished()), this, SLOT(onStreamStarted()));
#endif
}
#ifdef HAVE_IVIEWHMD
int EyeTracker::startStreaming(bool simulate) {
return smi_startStreaming(simulate); // This call blocks execution.
}
#endif
#ifdef HAVE_IVIEWHMD
void EyeTracker::onStreamStarted() {
int result = _startStreamingWatcher.result();
_isStreaming = (result == SMI_RET_SUCCESS);
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error starting streaming:" << smiReturnValueToString(result);
// Display error dialog unless SMI SDK has already displayed an error message.
if (result != SMI_ERROR_HMD_NOT_SUPPORTED) {
QMessageBox::warning(nullptr, "Eye Tracker Error", smiReturnValueToString(result));
}
} else {
qCDebug(interfaceapp) << "Eye Tracker: Started streaming";
}
// TODO: Re-enable once saving / loading calibrations is working
//if (_isStreaming) {
// // Automatically load calibration if one has been saved.
// QString availableCalibrations = QString(smi_getAvailableCalibrations());
// if (availableCalibrations.contains(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION)) {
// result = smi_loadCalibration(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION);
// if (result != SMI_RET_SUCCESS) {
// qCWarning(interfaceapp) << "Eye Tracker: Error loading calibration:" << smiReturnValueToString(result);
// QMessageBox::warning(nullptr, "Eye Tracker Error", "Error loading calibration"
// + smiReturnValueToString(result));
// } else {
// qCDebug(interfaceapp) << "Eye Tracker: Loaded calibration";
// }
// }
//}
}
#endif
void EyeTracker::setEnabled(bool enabled, bool simulate) {
if (!_isInitialized) {
return;
}
#ifdef HAVE_IVIEWHMD
qCDebug(interfaceapp) << "Eye Tracker: Set enabled =" << enabled << ", simulate =" << simulate;
// There is no smi_stopStreaming() method and after an smi_quit(), streaming cannot be restarted (at least not for
// simulated data). So keep streaming once started in case tracking is re-enabled after stopping.
// Try to stop streaming if changing whether simulating or not.
if (enabled && _isStreaming && _isStreamSimulating != simulate) {
int result = smi_quit();
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error stopping streaming:" << smiReturnValueToString(result);
}
_isStreaming = false;
}
if (enabled && !_isStreaming) {
// Start SMI streaming in a separate thread because it blocks.
QFuture<int> future = QtConcurrent::run(this, &EyeTracker::startStreaming, simulate);
_startStreamingWatcher.setFuture(future);
_isStreamSimulating = simulate;
}
_isEnabled = enabled;
_isSimulating = simulate;
#endif
}
void EyeTracker::reset() {
// Nothing to do.
}
bool EyeTracker::isTracking() const {
static const quint64 ACTIVE_TIMEOUT_USECS = 2000000; // 2 secs
return _isEnabled && (usecTimestampNow() - _lastProcessDataTimestamp < ACTIVE_TIMEOUT_USECS);
}
#ifdef HAVE_IVIEWHMD
void EyeTracker::calibrate(int points) {
if (!_isStreaming) {
qCWarning(interfaceapp) << "Eye Tracker: Cannot calibrate because not streaming";
return;
}
smi_CalibrationHMDStruct* calibrationHMDStruct;
smi_createCalibrationHMDStruct(&calibrationHMDStruct);
smi_CalibrationTypeEnum calibrationType;
switch (points) {
case 1:
calibrationType = SMI_ONE_POINT_CALIBRATION;
qCDebug(interfaceapp) << "Eye Tracker: One point calibration";
break;
case 3:
calibrationType = SMI_THREE_POINT_CALIBRATION;
qCDebug(interfaceapp) << "Eye Tracker: Three point calibration";
break;
case 5:
calibrationType = SMI_FIVE_POINT_CALIBRATION;
qCDebug(interfaceapp) << "Eye Tracker: Five point calibration";
break;
default:
qCWarning(interfaceapp) << "Eye Tracker: Invalid calibration specified";
return;
}
calibrationHMDStruct->type = calibrationType;
calibrationHMDStruct->backgroundColor->blue = 0.5;
calibrationHMDStruct->backgroundColor->green = 0.5;
calibrationHMDStruct->backgroundColor->red = 0.5;
calibrationHMDStruct->foregroundColor->blue = 1.0;
calibrationHMDStruct->foregroundColor->green = 1.0;
calibrationHMDStruct->foregroundColor->red = 1.0;
int result = smi_setupCalibration(calibrationHMDStruct);
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error setting up calibration:" << smiReturnValueToString(result);
return;
} else {
result = smi_calibrate();
if (result != SMI_RET_SUCCESS) {
qCWarning(interfaceapp) << "Eye Tracker: Error performing calibration:" << smiReturnValueToString(result);
} else {
// TODO: Re - enable once saving / loading calibrations is working
//result = smi_saveCalibration(HIGH_FIDELITY_EYE_TRACKER_CALIBRATION);
//if (result != SMI_RET_SUCCESS) {
// qCWarning(interfaceapp) << "Eye Tracker: Error saving calibration:" << smiReturnValueToString(result);
//}
}
}
if (result != SMI_RET_SUCCESS) {
QMessageBox::warning(nullptr, "Eye Tracker Error", "Calibration error: " + smiReturnValueToString(result));
}
}
#endif
#ifdef HAVE_IVIEWHMD
QString EyeTracker::smiReturnValueToString(int value) {
switch (value)
{
case smi_ErrorReturnValue::SMI_ERROR_NO_CALLBACK_SET:
return "No callback set";
case smi_ErrorReturnValue::SMI_ERROR_CONNECTING_TO_HMD:
return "Error connecting to HMD";
case smi_ErrorReturnValue::SMI_ERROR_HMD_NOT_SUPPORTED:
return "HMD not supported";
case smi_ErrorReturnValue::SMI_ERROR_NOT_IMPLEMENTED:
return "Not implmented";
case smi_ErrorReturnValue::SMI_ERROR_INVALID_PARAMETER:
return "Invalid parameter";
case smi_ErrorReturnValue::SMI_ERROR_EYECAMERAS_NOT_AVAILABLE:
return "Eye cameras not available";
case smi_ErrorReturnValue::SMI_ERROR_OCULUS_RUNTIME_NOT_SUPPORTED:
return "Oculus runtime not supported";
// TODO: Re-enable once saving / loading calibrations is working
//case smi_ErrorReturnValue::SMI_ERROR_FILE_NOT_FOUND:
// return "File not found";
//case smi_ErrorReturnValue::SMI_ERROR_FILE_EMPTY:
// return "File empty";
case smi_ErrorReturnValue::SMI_ERROR_UNKNOWN:
return "Unknown error";
default:
QString number;
number.setNum(value);
return number;
}
}
#endif

View file

@ -0,0 +1,71 @@
//
// EyeTracker.h
// interface/src/devices
//
// Created by David Rowe on 27 Jul 2015.
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_EyeTracker_h
#define hifi_EyeTracker_h
#include <QObject>
#include <QFutureWatcher>
#include <glm/glm.hpp>
#include <DependencyManager.h>
#ifdef HAVE_IVIEWHMD
#include <iViewHMDAPI.h>
#endif
class EyeTracker : public QObject, public Dependency {
Q_OBJECT
SINGLETON_DEPENDENCY
public:
~EyeTracker();
void init();
void setEnabled(bool enabled, bool simulate);
void reset();
bool isInitialized() const { return _isInitialized; }
bool isEnabled() const { return _isEnabled; }
bool isTracking() const;
bool isSimulating() const { return _isSimulating; }
glm::vec3 getLookAtPosition() const { return _lookAtPosition; } // From mid eye point in head frame.
#ifdef HAVE_IVIEWHMD
void processData(smi_CallbackDataStruct* data);
void calibrate(int points);
int startStreaming(bool simulate);
private slots:
void onStreamStarted();
#endif
private:
QString smiReturnValueToString(int value);
bool _isInitialized = false;
bool _isEnabled = false;
bool _isSimulating = false;
bool _isStreaming = false;
bool _isStreamSimulating = false;
quint64 _lastProcessDataTimestamp;
glm::vec3 _lookAtPosition;
QFutureWatcher<int> _startStreamingWatcher;
};
#endif // hifi_EyeTracker_h

View file

@ -161,16 +161,6 @@ QByteArray AvatarData::toByteArray() {
// Body scale
destinationBuffer += packFloatRatioToTwoByte(destinationBuffer, _targetScale);
// Head rotation
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getFinalPitch());
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getFinalYaw());
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->getFinalRoll());
// Body lean
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->_leanForward);
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->_leanSideways);
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _headData->_torsoTwist);
// Lookat Position
memcpy(destinationBuffer, &_headData->_lookAtPosition, sizeof(_headData->_lookAtPosition));
destinationBuffer += sizeof(_headData->_lookAtPosition);
@ -194,6 +184,11 @@ QByteArray AvatarData::toByteArray() {
if (_headData->_isFaceTrackerConnected) {
setAtBit(bitItems, IS_FACESHIFT_CONNECTED);
}
// eye tracker state
if (_headData->_isEyeTrackerConnected) {
setAtBit(bitItems, IS_EYE_TRACKER_CONNECTED);
}
// referential state
if (_referential != NULL && _referential->isValid()) {
setAtBit(bitItems, HAS_REFERENTIAL);
}
@ -278,25 +273,20 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
quint64 now = usecTimestampNow();
// The absolute minimum size of the update data is as follows:
// 50 bytes of "plain old data" {
// 36 bytes of "plain old data" {
// position = 12 bytes
// bodyYaw = 2 (compressed float)
// bodyPitch = 2 (compressed float)
// bodyRoll = 2 (compressed float)
// targetScale = 2 (compressed float)
// headPitch = 2 (compressed float)
// headYaw = 2 (compressed float)
// headRoll = 2 (compressed float)
// leanForward = 2 (compressed float)
// leanSideways = 2 (compressed float)
// torsoTwist = 2 (compressed float)
// lookAt = 12
// audioLoudness = 4
// }
// + 1 byte for varying data
// + 1 byte for pupilSize
// + 1 byte for numJoints (0)
// = 51 bytes
int minPossibleSize = 51;
// = 39 bytes
int minPossibleSize = 39;
int maxAvailableSize = buffer.size();
if (minPossibleSize > maxAvailableSize) {
@ -354,39 +344,6 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
_targetScale = scale;
} // 20 bytes
{ // Head rotation
//(NOTE: This needs to become a quaternion to save two bytes)
float headYaw, headPitch, headRoll;
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*) sourceBuffer, &headPitch);
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*) sourceBuffer, &headYaw);
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*) sourceBuffer, &headRoll);
if (glm::isnan(headYaw) || glm::isnan(headPitch) || glm::isnan(headRoll)) {
if (shouldLogError(now)) {
qCDebug(avatars) << "Discard nan AvatarData::headYaw,headPitch,headRoll; displayName = '" << _displayName << "'";
}
return maxAvailableSize;
}
_headData->setBasePitch(headPitch);
_headData->setBaseYaw(headYaw);
_headData->setBaseRoll(headRoll);
} // 6 bytes
{ // Head lean (relative to pelvis)
float leanForward, leanSideways, torsoTwist;
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*)sourceBuffer, &leanForward);
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*)sourceBuffer, &leanSideways);
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*)sourceBuffer, &torsoTwist);
if (glm::isnan(leanForward) || glm::isnan(leanSideways)) {
if (shouldLogError(now)) {
qCDebug(avatars) << "Discard nan AvatarData::leanForward,leanSideways,torsoTwise; displayName = '" << _displayName << "'";
}
return maxAvailableSize;
}
_headData->_leanForward = leanForward;
_headData->_leanSideways = leanSideways;
_headData->_torsoTwist = torsoTwist;
} // 6 bytes
{ // Lookat Position
glm::vec3 lookAt;
memcpy(&lookAt, sourceBuffer, sizeof(lookAt));
@ -431,6 +388,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
+ (oneAtBit(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
_headData->_isFaceTrackerConnected = oneAtBit(bitItems, IS_FACESHIFT_CONNECTED);
_headData->_isEyeTrackerConnected = oneAtBit(bitItems, IS_EYE_TRACKER_CONNECTED);
bool hasReferential = oneAtBit(bitItems, HAS_REFERENTIAL);
// Referential

View file

@ -76,21 +76,21 @@ const quint32 AVATAR_MOTION_SCRIPTABLE_BITS =
const qint64 AVATAR_SILENCE_THRESHOLD_USECS = 5 * USECS_PER_SECOND;
// Bitset of state flags - we store the key state, hand state, faceshift, chat circling, and existance of
// Bitset of state flags - we store the key state, hand state, Faceshift, eye tracking, and existence of
// referential data in this bit set. The hand state is an octal, but is split into two sections to maintain
// backward compatibility. The bits are ordered as such (0-7 left to right).
// +-----+-----+-+-+-+--+
// |K0,K1|H0,H1|F|C|R|H2|
// |K0,K1|H0,H1|F|E|R|H2|
// +-----+-----+-+-+-+--+
// Key state - K0,K1 is found in the 1st and 2nd bits
// Hand state - H0,H1,H2 is found in the 3rd, 4th, and 8th bits
// Faceshift - F is found in the 5th bit
// Chat Circling - C is found in the 6th bit
// Eye tracker - E is found in the 6th bit
// Referential Data - R is found in the 7th bit
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits
const int HAND_STATE_START_BIT = 2; // 3rd and 4th bits
const int IS_FACESHIFT_CONNECTED = 4; // 5th bit
const int UNUSED_AVATAR_STATE_BIT_5 = 5; // 6th bit (was CHAT_CIRCLING)
const int IS_EYE_TRACKER_CONNECTED = 5; // 6th bit (was CHAT_CIRCLING)
const int HAS_REFERENTIAL = 6; // 7th bit
const int HAND_STATE_FINGER_POINTING_BIT = 7; // 8th bit

View file

@ -32,6 +32,7 @@ HeadData::HeadData(AvatarData* owningAvatar) :
_lookAtPosition(0.0f, 0.0f, 0.0f),
_audioLoudness(0.0f),
_isFaceTrackerConnected(false),
_isEyeTrackerConnected(false),
_leftEyeBlink(0.0f),
_rightEyeBlink(0.0f),
_averageLoudness(0.0f),

View file

@ -93,6 +93,7 @@ protected:
glm::vec3 _lookAtPosition;
float _audioLoudness;
bool _isFaceTrackerConnected;
bool _isEyeTrackerConnected;
float _leftEyeBlink;
float _rightEyeBlink;
float _averageLoudness;

View file

@ -1189,74 +1189,84 @@ void EntityItem::setDimensions(const glm::vec3& value) {
return;
}
_transform.setScale(value);
requiresRecalcBoxes();
}
/// The maximum bounding cube for the entity, independent of it's rotation.
/// This accounts for the registration point (upon which rotation occurs around).
///
AACube EntityItem::getMaximumAACube() const {
// * we know that the position is the center of rotation
glm::vec3 centerOfRotation = getPosition(); // also where _registration point is
const AACube& EntityItem::getMaximumAACube() const {
if (_recalcMaxAACube) {
// * we know that the position is the center of rotation
glm::vec3 centerOfRotation = getPosition(); // also where _registration point is
// * we know that the registration point is the center of rotation
// * we can calculate the length of the furthest extent from the registration point
// as the dimensions * max (registrationPoint, (1.0,1.0,1.0) - registrationPoint)
glm::vec3 registrationPoint = (getDimensions() * getRegistrationPoint());
glm::vec3 registrationRemainder = (getDimensions() * (glm::vec3(1.0f, 1.0f, 1.0f) - getRegistrationPoint()));
glm::vec3 furthestExtentFromRegistration = glm::max(registrationPoint, registrationRemainder);
// * we know that the registration point is the center of rotation
// * we can calculate the length of the furthest extent from the registration point
// as the dimensions * max (registrationPoint, (1.0,1.0,1.0) - registrationPoint)
glm::vec3 registrationPoint = (getDimensions() * getRegistrationPoint());
glm::vec3 registrationRemainder = (getDimensions() * (glm::vec3(1.0f, 1.0f, 1.0f) - getRegistrationPoint()));
glm::vec3 furthestExtentFromRegistration = glm::max(registrationPoint, registrationRemainder);
// * we know that if you rotate in any direction you would create a sphere
// that has a radius of the length of furthest extent from registration point
float radius = glm::length(furthestExtentFromRegistration);
// * we know that if you rotate in any direction you would create a sphere
// that has a radius of the length of furthest extent from registration point
float radius = glm::length(furthestExtentFromRegistration);
// * we know that the minimum bounding cube of this maximum possible sphere is
// (center - radius) to (center + radius)
glm::vec3 minimumCorner = centerOfRotation - glm::vec3(radius, radius, radius);
// * we know that the minimum bounding cube of this maximum possible sphere is
// (center - radius) to (center + radius)
glm::vec3 minimumCorner = centerOfRotation - glm::vec3(radius, radius, radius);
AACube boundingCube(minimumCorner, radius * 2.0f);
return boundingCube;
_maxAACube = AACube(minimumCorner, radius * 2.0f);
_recalcMaxAACube = false;
}
return _maxAACube;
}
/// The minimum bounding cube for the entity accounting for it's rotation.
/// This accounts for the registration point (upon which rotation occurs around).
///
AACube EntityItem::getMinimumAACube() const {
// _position represents the position of the registration point.
glm::vec3 registrationRemainder = glm::vec3(1.0f, 1.0f, 1.0f) - _registrationPoint;
const AACube& EntityItem::getMinimumAACube() const {
if (_recalcMinAACube) {
// _position represents the position of the registration point.
glm::vec3 registrationRemainder = glm::vec3(1.0f, 1.0f, 1.0f) - _registrationPoint;
glm::vec3 unrotatedMinRelativeToEntity = - (getDimensions() * getRegistrationPoint());
glm::vec3 unrotatedMaxRelativeToEntity = getDimensions() * registrationRemainder;
Extents unrotatedExtentsRelativeToRegistrationPoint = { unrotatedMinRelativeToEntity, unrotatedMaxRelativeToEntity };
Extents rotatedExtentsRelativeToRegistrationPoint = unrotatedExtentsRelativeToRegistrationPoint.getRotated(getRotation());
glm::vec3 unrotatedMinRelativeToEntity = - (getDimensions() * getRegistrationPoint());
glm::vec3 unrotatedMaxRelativeToEntity = getDimensions() * registrationRemainder;
Extents unrotatedExtentsRelativeToRegistrationPoint = { unrotatedMinRelativeToEntity, unrotatedMaxRelativeToEntity };
Extents rotatedExtentsRelativeToRegistrationPoint = unrotatedExtentsRelativeToRegistrationPoint.getRotated(getRotation());
// shift the extents to be relative to the position/registration point
rotatedExtentsRelativeToRegistrationPoint.shiftBy(getPosition());
// shift the extents to be relative to the position/registration point
rotatedExtentsRelativeToRegistrationPoint.shiftBy(getPosition());
// the cube that best encompasses extents is...
AABox box(rotatedExtentsRelativeToRegistrationPoint);
glm::vec3 centerOfBox = box.calcCenter();
float longestSide = box.getLargestDimension();
float halfLongestSide = longestSide / 2.0f;
glm::vec3 cornerOfCube = centerOfBox - glm::vec3(halfLongestSide, halfLongestSide, halfLongestSide);
// the cube that best encompasses extents is...
AABox box(rotatedExtentsRelativeToRegistrationPoint);
glm::vec3 centerOfBox = box.calcCenter();
float longestSide = box.getLargestDimension();
float halfLongestSide = longestSide / 2.0f;
glm::vec3 cornerOfCube = centerOfBox - glm::vec3(halfLongestSide, halfLongestSide, halfLongestSide);
// old implementation... not correct!!!
return AACube(cornerOfCube, longestSide);
_minAACube = AACube(cornerOfCube, longestSide);
_recalcMinAACube = false;
}
return _minAACube;
}
AABox EntityItem::getAABox() const {
// _position represents the position of the registration point.
glm::vec3 registrationRemainder = glm::vec3(1.0f, 1.0f, 1.0f) - _registrationPoint;
const AABox& EntityItem::getAABox() const {
if (_recalcAABox) {
// _position represents the position of the registration point.
glm::vec3 registrationRemainder = glm::vec3(1.0f, 1.0f, 1.0f) - _registrationPoint;
glm::vec3 unrotatedMinRelativeToEntity = - (getDimensions() * _registrationPoint);
glm::vec3 unrotatedMaxRelativeToEntity = getDimensions() * registrationRemainder;
Extents unrotatedExtentsRelativeToRegistrationPoint = { unrotatedMinRelativeToEntity, unrotatedMaxRelativeToEntity };
Extents rotatedExtentsRelativeToRegistrationPoint = unrotatedExtentsRelativeToRegistrationPoint.getRotated(getRotation());
glm::vec3 unrotatedMinRelativeToEntity = - (getDimensions() * _registrationPoint);
glm::vec3 unrotatedMaxRelativeToEntity = getDimensions() * registrationRemainder;
Extents unrotatedExtentsRelativeToRegistrationPoint = { unrotatedMinRelativeToEntity, unrotatedMaxRelativeToEntity };
Extents rotatedExtentsRelativeToRegistrationPoint = unrotatedExtentsRelativeToRegistrationPoint.getRotated(getRotation());
// shift the extents to be relative to the position/registration point
rotatedExtentsRelativeToRegistrationPoint.shiftBy(getPosition());
// shift the extents to be relative to the position/registration point
rotatedExtentsRelativeToRegistrationPoint.shiftBy(getPosition());
return AABox(rotatedExtentsRelativeToRegistrationPoint);
_cachedAABox = AABox(rotatedExtentsRelativeToRegistrationPoint);
_recalcAABox = false;
}
return _cachedAABox;
}
// NOTE: This should only be used in cases of old bitstreams which only contain radius data

View file

@ -214,14 +214,16 @@ public:
void setTranformToCenter(const Transform& transform);
inline const Transform& getTransform() const { return _transform; }
inline void setTransform(const Transform& transform) { _transform = transform; }
inline void setTransform(const Transform& transform) { _transform = transform; requiresRecalcBoxes(); }
/// Position in meters (0.0 - TREE_SCALE)
inline const glm::vec3& getPosition() const { return _transform.getTranslation(); }
inline void setPosition(const glm::vec3& value) { _transform.setTranslation(value); }
inline void setPosition(const glm::vec3& value) { _transform.setTranslation(value); requiresRecalcBoxes(); }
inline const glm::quat& getRotation() const { return _transform.getRotation(); }
inline void setRotation(const glm::quat& rotation) { _transform.setRotation(rotation); }
inline void setRotation(const glm::quat& rotation) { _transform.setRotation(rotation); requiresRecalcBoxes(); }
inline void requiresRecalcBoxes() { _recalcAABox = true; _recalcMinAACube = true; _recalcMaxAACube = true; }
// Hyperlink related getters and setters
QString getHref() const { return _href; }
@ -286,9 +288,9 @@ public:
quint64 getExpiry() const;
// position, size, and bounds related helpers
AACube getMaximumAACube() const;
AACube getMinimumAACube() const;
AABox getAABox() const; /// axis aligned bounding box in world-frame (meters)
const AACube& getMaximumAACube() const;
const AACube& getMinimumAACube() const;
const AABox& getAABox() const; /// axis aligned bounding box in world-frame (meters)
const QString& getScript() const { return _script; }
void setScript(const QString& value) { _script = value; }
@ -303,7 +305,7 @@ public:
/// registration point as ratio of entity
void setRegistrationPoint(const glm::vec3& value)
{ _registrationPoint = glm::clamp(value, 0.0f, 1.0f); }
{ _registrationPoint = glm::clamp(value, 0.0f, 1.0f); requiresRecalcBoxes(); }
const glm::vec3& getAngularVelocity() const { return _angularVelocity; }
void setAngularVelocity(const glm::vec3& value) { _angularVelocity = value; }
@ -435,6 +437,13 @@ protected:
quint64 _changedOnServer;
Transform _transform;
mutable AABox _cachedAABox;
mutable AACube _maxAACube;
mutable AACube _minAACube;
mutable bool _recalcAABox = true;
mutable bool _recalcMinAACube = true;
mutable bool _recalcMaxAACube = true;
float _glowLevel;
float _localRenderAlpha;
float _density = ENTITY_ITEM_DEFAULT_DENSITY; // kg/m^3

View file

@ -65,7 +65,14 @@ ViveControllerManager::ViveControllerManager() :
bool ViveControllerManager::isSupported() const {
#ifdef Q_OS_WIN
return vr::VR_IsHmdPresent();
bool success = vr::VR_IsHmdPresent();
if (success) {
vr::HmdError eError = vr::HmdError_None;
auto hmd = vr::VR_Init(&eError);
success = (hmd != nullptr);
vr::VR_Shutdown();
}
return success;
#else
return false;
#endif

View file

@ -68,6 +68,8 @@ PacketVersion versionForPacketType(PacketType::Value packetType) {
case EntityEdit:
case EntityData:
return VERSION_ENTITIES_CENTER_ORIGIN;
case AvatarData:
return 12;
default:
return 11;
}

View file

@ -81,6 +81,7 @@ class MenuConstants : public QObject{
public:
enum Item {
RenderLookAtTargets,
};
public: