mirror of
https://github.com/overte-org/overte.git
synced 2025-04-10 19:29:07 +02:00
Merge remote-tracking branch 'upstream/master'
This commit is contained in:
commit
a3f637dc0e
46 changed files with 670 additions and 857 deletions
47
cmake/externals/faceshift/CMakeLists.txt
vendored
47
cmake/externals/faceshift/CMakeLists.txt
vendored
|
@ -1,47 +0,0 @@
|
|||
set(EXTERNAL_NAME faceshift)
|
||||
|
||||
include(ExternalProject)
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://hifi-public.s3.amazonaws.com/dependencies/faceshift.zip
|
||||
CMAKE_ARGS ${ANDROID_CMAKE_ARGS} -DCMAKE_INSTALL_PREFIX:PATH=<INSTALL_DIR>
|
||||
BINARY_DIR ${EXTERNAL_PROJECT_PREFIX}/build
|
||||
LOG_DOWNLOAD 1
|
||||
LOG_CONFIGURE 1
|
||||
LOG_BUILD 1
|
||||
)
|
||||
|
||||
# URL_MD5 1bdcb8a0b8d5b1ede434cc41efade41d
|
||||
|
||||
# Hide this external target (for ide users)
|
||||
set_target_properties(${EXTERNAL_NAME} PROPERTIES FOLDER "hidden/externals")
|
||||
|
||||
ExternalProject_Get_Property(${EXTERNAL_NAME} INSTALL_DIR)
|
||||
|
||||
string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
||||
set(${EXTERNAL_NAME_UPPER}_INCLUDE_DIRS ${INSTALL_DIR}/include CACHE FILEPATH "Path to Faceshift include directory")
|
||||
|
||||
set(LIBRARY_DEBUG_PATH "lib/Debug")
|
||||
set(LIBRARY_RELEASE_PATH "lib/Release")
|
||||
|
||||
if (WIN32)
|
||||
set(LIBRARY_PREFIX "")
|
||||
set(LIBRARY_EXT "lib")
|
||||
# use selected configuration in release path when building on Windows
|
||||
set(LIBRARY_RELEASE_PATH "$<$<CONFIG:RelWithDebInfo>:build/RelWithDebInfo>")
|
||||
set(LIBRARY_RELEASE_PATH "${LIBRARY_RELEASE_PATH}$<$<CONFIG:MinSizeRel>:build/MinSizeRel>")
|
||||
set(LIBRARY_RELEASE_PATH "${LIBRARY_RELEASE_PATH}$<$<OR:$<CONFIG:Release>,$<CONFIG:Debug>>:lib/Release>")
|
||||
elseif (APPLE)
|
||||
set(LIBRARY_EXT "a")
|
||||
set(LIBRARY_PREFIX "lib")
|
||||
|
||||
if (CMAKE_GENERATOR STREQUAL "Unix Makefiles")
|
||||
set(LIBRARY_DEBUG_PATH "build")
|
||||
set(LIBRARY_RELEASE_PATH "build")
|
||||
endif ()
|
||||
endif()
|
||||
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_DEBUG
|
||||
${INSTALL_DIR}/${LIBRARY_DEBUG_PATH}/${LIBRARY_PREFIX}faceshift.${LIBRARY_EXT} CACHE FILEPATH "Faceshift libraries")
|
||||
set(${EXTERNAL_NAME_UPPER}_LIBRARY_RELEASE
|
||||
${INSTALL_DIR}/${LIBRARY_RELEASE_PATH}/${LIBRARY_PREFIX}faceshift.${LIBRARY_EXT} CACHE FILEPATH "Faceshift libraries")
|
|
@ -1,14 +0,0 @@
|
|||
#
|
||||
# Copyright 2015 High Fidelity, Inc.
|
||||
# Created by Bradley Austin Davis on 2015/10/10
|
||||
#
|
||||
# Distributed under the Apache License, Version 2.0.
|
||||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
macro(TARGET_FACESHIFT)
|
||||
add_dependency_external_projects(faceshift)
|
||||
find_package(Faceshift REQUIRED)
|
||||
target_include_directories(${TARGET_NAME} PRIVATE ${FACESHIFT_INCLUDE_DIRS})
|
||||
target_link_libraries(${TARGET_NAME} ${FACESHIFT_LIBRARIES})
|
||||
add_definitions(-DHAVE_FACESHIFT)
|
||||
endmacro()
|
|
@ -1,26 +0,0 @@
|
|||
#
|
||||
# FindFaceshift.cmake
|
||||
#
|
||||
# Try to find the Faceshift networking library
|
||||
#
|
||||
# You must provide a FACESHIFT_ROOT_DIR which contains lib and include directories
|
||||
#
|
||||
# Once done this will define
|
||||
#
|
||||
# FACESHIFT_FOUND - system found Faceshift
|
||||
# FACESHIFT_INCLUDE_DIRS - the Faceshift include directory
|
||||
# FACESHIFT_LIBRARIES - Link this to use Faceshift
|
||||
#
|
||||
# Created on 8/30/2013 by Andrzej Kapolka
|
||||
# Copyright 2013 High Fidelity, Inc.
|
||||
#
|
||||
# Distributed under the Apache License, Version 2.0.
|
||||
# See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
#
|
||||
|
||||
include(SelectLibraryConfigurations)
|
||||
select_library_configurations(FACESHIFT)
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
find_package_handle_standard_args(Faceshift DEFAULT_MSG FACESHIFT_INCLUDE_DIRS FACESHIFT_LIBRARIES)
|
||||
mark_as_advanced(FACESHIFT_INCLUDE_DIRS FACESHIFT_LIBRARIES FACESHIFT_SEARCH_DIRS)
|
|
@ -194,7 +194,7 @@ link_hifi_libraries(
|
|||
recording fbx networking model-networking entities avatars
|
||||
audio audio-client animation script-engine physics
|
||||
render-utils entities-renderer avatars-renderer ui auto-updater
|
||||
controllers plugins image
|
||||
controllers plugins image trackers
|
||||
ui-plugins display-plugins input-plugins
|
||||
${NON_ANDROID_LIBRARIES}
|
||||
)
|
||||
|
@ -202,7 +202,6 @@ link_hifi_libraries(
|
|||
# include the binary directory of render-utils for shader includes
|
||||
target_include_directories(${TARGET_NAME} PRIVATE "${CMAKE_BINARY_DIR}/libraries/render-utils")
|
||||
|
||||
#fixme find a way to express faceshift as a plugin
|
||||
target_bullet()
|
||||
target_opengl()
|
||||
|
||||
|
@ -210,10 +209,6 @@ if (NOT ANDROID)
|
|||
target_glew()
|
||||
endif ()
|
||||
|
||||
if (WIN32 OR APPLE)
|
||||
target_faceshift()
|
||||
endif()
|
||||
|
||||
# perform standard include and linking for found externals
|
||||
foreach(EXTERNAL ${OPTIONAL_EXTERNALS})
|
||||
|
||||
|
|
|
@ -61,6 +61,11 @@
|
|||
{ "from": "Standard.RightHand", "to": "Actions.RightHand" },
|
||||
|
||||
{ "from": "Standard.LeftFoot", "to": "Actions.LeftFoot" },
|
||||
{ "from": "Standard.RightFoot", "to": "Actions.RightFoot" }
|
||||
{ "from": "Standard.RightFoot", "to": "Actions.RightFoot" },
|
||||
|
||||
{ "from": "Standard.Hips", "to": "Actions.Hips" },
|
||||
{ "from": "Standard.Spine2", "to": "Actions.Spine2" },
|
||||
|
||||
{ "from": "Standard.Head", "to": "Actions.Head" }
|
||||
]
|
||||
}
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
function shouldRaiseKeyboard() {
|
||||
var nodeName = document.activeElement.nodeName;
|
||||
var nodeType = document.activeElement.type;
|
||||
if (nodeName === "INPUT" && (nodeType === "text" || nodeType === "number" || nodeType === "password")
|
||||
if (nodeName === "INPUT" && ["email", "number", "password", "tel", "text", "url"].indexOf(nodeType) !== -1
|
||||
|| document.activeElement.nodeName === "TEXTAREA") {
|
||||
return true;
|
||||
} else {
|
||||
|
|
|
@ -128,6 +128,7 @@
|
|||
#include <QmlWebWindowClass.h>
|
||||
#include <Preferences.h>
|
||||
#include <display-plugins/CompositorHelper.h>
|
||||
#include <trackers/EyeTracker.h>
|
||||
|
||||
|
||||
#include "AudioClient.h"
|
||||
|
@ -136,8 +137,6 @@
|
|||
#include "avatar/ScriptAvatar.h"
|
||||
#include "CrashHandler.h"
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
#include "devices/EyeTracker.h"
|
||||
#include "devices/Faceshift.h"
|
||||
#include "devices/Leapmotion.h"
|
||||
#include "DiscoverabilityManager.h"
|
||||
#include "GLCanvas.h"
|
||||
|
@ -480,7 +479,6 @@ bool setupEssentials(int& argc, char** argv) {
|
|||
DependencyManager::set<ModelCache>();
|
||||
DependencyManager::set<ScriptCache>();
|
||||
DependencyManager::set<SoundCache>();
|
||||
DependencyManager::set<Faceshift>();
|
||||
DependencyManager::set<DdeFaceTracker>();
|
||||
DependencyManager::set<EyeTracker>();
|
||||
DependencyManager::set<AudioClient>();
|
||||
|
@ -1210,10 +1208,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
|
||||
this->installEventFilter(this);
|
||||
|
||||
// initialize our face trackers after loading the menu settings
|
||||
auto faceshiftTracker = DependencyManager::get<Faceshift>();
|
||||
faceshiftTracker->init();
|
||||
connect(faceshiftTracker.data(), &FaceTracker::muteToggled, this, &Application::faceTrackerMuteToggled);
|
||||
#ifdef HAVE_DDE
|
||||
auto ddeTracker = DependencyManager::get<DdeFaceTracker>();
|
||||
ddeTracker->init();
|
||||
|
@ -3626,20 +3620,13 @@ ivec2 Application::getMouse() const {
|
|||
}
|
||||
|
||||
FaceTracker* Application::getActiveFaceTracker() {
|
||||
auto faceshift = DependencyManager::get<Faceshift>();
|
||||
auto dde = DependencyManager::get<DdeFaceTracker>();
|
||||
|
||||
return (dde->isActive() ? static_cast<FaceTracker*>(dde.data()) :
|
||||
(faceshift->isActive() ? static_cast<FaceTracker*>(faceshift.data()) : nullptr));
|
||||
return dde->isActive() ? static_cast<FaceTracker*>(dde.data()) : nullptr;
|
||||
}
|
||||
|
||||
FaceTracker* Application::getSelectedFaceTracker() {
|
||||
FaceTracker* faceTracker = nullptr;
|
||||
#ifdef HAVE_FACESHIFT
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::Faceshift)) {
|
||||
faceTracker = DependencyManager::get<Faceshift>().data();
|
||||
}
|
||||
#endif
|
||||
#ifdef HAVE_DDE
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::UseCamera)) {
|
||||
faceTracker = DependencyManager::get<DdeFaceTracker>().data();
|
||||
|
@ -3649,15 +3636,8 @@ FaceTracker* Application::getSelectedFaceTracker() {
|
|||
}
|
||||
|
||||
void Application::setActiveFaceTracker() const {
|
||||
#if defined(HAVE_FACESHIFT) || defined(HAVE_DDE)
|
||||
bool isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
|
||||
#endif
|
||||
#ifdef HAVE_FACESHIFT
|
||||
auto faceshiftTracker = DependencyManager::get<Faceshift>();
|
||||
faceshiftTracker->setIsMuted(isMuted);
|
||||
faceshiftTracker->setEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift) && !isMuted);
|
||||
#endif
|
||||
#ifdef HAVE_DDE
|
||||
bool isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
|
||||
bool isUsingDDE = Menu::getInstance()->isOptionChecked(MenuOption::UseCamera);
|
||||
Menu::getInstance()->getActionForOption(MenuOption::BinaryEyelidControl)->setVisible(isUsingDDE);
|
||||
Menu::getInstance()->getActionForOption(MenuOption::CoupleEyelids)->setVisible(isUsingDDE);
|
||||
|
@ -4375,7 +4355,13 @@ void Application::update(float deltaTime) {
|
|||
controller::InputCalibrationData calibrationData = {
|
||||
myAvatar->getSensorToWorldMatrix(),
|
||||
createMatFromQuatAndPos(myAvatar->getOrientation(), myAvatar->getPosition()),
|
||||
myAvatar->getHMDSensorMatrix()
|
||||
myAvatar->getHMDSensorMatrix(),
|
||||
myAvatar->getCenterEyeCalibrationMat(),
|
||||
myAvatar->getHeadCalibrationMat(),
|
||||
myAvatar->getSpine2CalibrationMat(),
|
||||
myAvatar->getHipsCalibrationMat(),
|
||||
myAvatar->getLeftFootCalibrationMat(),
|
||||
myAvatar->getRightFootCalibrationMat()
|
||||
};
|
||||
|
||||
InputPluginPointer keyboardMousePlugin;
|
||||
|
@ -4423,6 +4409,13 @@ void Application::update(float deltaTime) {
|
|||
controller::Pose rightFootPose = userInputMapper->getPoseState(controller::Action::RIGHT_FOOT);
|
||||
myAvatar->setFootControllerPosesInSensorFrame(leftFootPose.transform(avatarToSensorMatrix), rightFootPose.transform(avatarToSensorMatrix));
|
||||
|
||||
controller::Pose hipsPose = userInputMapper->getPoseState(controller::Action::HIPS);
|
||||
controller::Pose spine2Pose = userInputMapper->getPoseState(controller::Action::SPINE2);
|
||||
myAvatar->setSpineControllerPosesInSensorFrame(hipsPose.transform(avatarToSensorMatrix), spine2Pose.transform(avatarToSensorMatrix));
|
||||
|
||||
controller::Pose headPose = userInputMapper->getPoseState(controller::Action::HEAD);
|
||||
myAvatar->setHeadControllerPoseInSensorFrame(headPose.transform(avatarToSensorMatrix));
|
||||
|
||||
updateThreads(deltaTime); // If running non-threaded, then give the threads some time to process...
|
||||
updateDialogs(deltaTime); // update various stats dialogs if present
|
||||
|
||||
|
@ -5133,7 +5126,6 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
|
|||
}
|
||||
|
||||
void Application::resetSensors(bool andReload) {
|
||||
DependencyManager::get<Faceshift>()->reset();
|
||||
DependencyManager::get<DdeFaceTracker>()->reset();
|
||||
DependencyManager::get<EyeTracker>()->reset();
|
||||
getActiveDisplayPlugin()->resetSensors();
|
||||
|
|
|
@ -34,7 +34,6 @@
|
|||
#include "avatar/AvatarManager.h"
|
||||
#include "AvatarBookmarks.h"
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
#include "devices/Faceshift.h"
|
||||
#include "MainWindow.h"
|
||||
#include "render/DrawStatus.h"
|
||||
#include "scripting/MenuScriptingInterface.h"
|
||||
|
@ -451,12 +450,6 @@ Menu::Menu() {
|
|||
qApp, SLOT(setActiveFaceTracker()));
|
||||
faceTrackerGroup->addAction(noFaceTracker);
|
||||
|
||||
#ifdef HAVE_FACESHIFT
|
||||
QAction* faceshiftFaceTracker = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::Faceshift,
|
||||
0, false,
|
||||
qApp, SLOT(setActiveFaceTracker()));
|
||||
faceTrackerGroup->addAction(faceshiftFaceTracker);
|
||||
#endif
|
||||
#ifdef HAVE_DDE
|
||||
QAction* ddeFaceTracker = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::UseCamera,
|
||||
0, true,
|
||||
|
@ -477,11 +470,10 @@ Menu::Menu() {
|
|||
QAction* ddeCalibrate = addActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::CalibrateCamera, 0,
|
||||
DependencyManager::get<DdeFaceTracker>().data(), SLOT(calibrate()));
|
||||
ddeCalibrate->setVisible(true); // DDE face tracking is on by default
|
||||
#endif
|
||||
#if defined(HAVE_FACESHIFT) || defined(HAVE_DDE)
|
||||
faceTrackingMenu->addSeparator();
|
||||
addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::MuteFaceTracking,
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_F, true); // DDE face tracking is on by default
|
||||
[](bool mute) { FaceTracker::setIsMuted(mute); },
|
||||
Qt::CTRL | Qt::SHIFT | Qt::Key_F, FaceTracker::isMuted());
|
||||
addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::AutoMuteAudio, 0, false);
|
||||
#endif
|
||||
|
||||
|
|
|
@ -105,7 +105,6 @@ namespace MenuOption {
|
|||
const QString ExpandPaintGLTiming = "Expand /paintGL";
|
||||
const QString ExpandPhysicsSimulationTiming = "Expand /physics";
|
||||
const QString ExpandUpdateTiming = "Expand /update";
|
||||
const QString Faceshift = "Faceshift";
|
||||
const QString FirstPerson = "First Person";
|
||||
const QString FivePointCalibration = "5 Point Calibration";
|
||||
const QString FixGaze = "Fix Gaze (no saccade)";
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
|
||||
#include <NodeList.h>
|
||||
#include <recording/Deck.h>
|
||||
#include <trackers/EyeTracker.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Avatar.h"
|
||||
|
@ -22,8 +23,6 @@
|
|||
#include "Menu.h"
|
||||
#include "Util.h"
|
||||
#include "devices/DdeFaceTracker.h"
|
||||
#include "devices/EyeTracker.h"
|
||||
#include "devices/Faceshift.h"
|
||||
#include <Rig.h>
|
||||
|
||||
using namespace std;
|
||||
|
@ -209,14 +208,14 @@ void Head::simulate(float deltaTime, bool isMine) {
|
|||
|
||||
// use data to update fake Faceshift blendshape coefficients
|
||||
calculateMouthShapes(deltaTime);
|
||||
DependencyManager::get<Faceshift>()->updateFakeCoefficients(_leftEyeBlink,
|
||||
_rightEyeBlink,
|
||||
_browAudioLift,
|
||||
_audioJawOpen,
|
||||
_mouth2,
|
||||
_mouth3,
|
||||
_mouth4,
|
||||
_blendshapeCoefficients);
|
||||
FaceTracker::updateFakeCoefficients(_leftEyeBlink,
|
||||
_rightEyeBlink,
|
||||
_browAudioLift,
|
||||
_audioJawOpen,
|
||||
_mouth2,
|
||||
_mouth3,
|
||||
_mouth4,
|
||||
_blendshapeCoefficients);
|
||||
|
||||
applyEyelidOffset(getOrientation());
|
||||
|
||||
|
|
|
@ -41,9 +41,9 @@
|
|||
#include <recording/Clip.h>
|
||||
#include <recording/Frame.h>
|
||||
#include <RecordingScriptingInterface.h>
|
||||
#include <trackers/FaceTracker.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "devices/Faceshift.h"
|
||||
#include "AvatarManager.h"
|
||||
#include "AvatarActionHold.h"
|
||||
#include "Menu.h"
|
||||
|
@ -82,6 +82,18 @@ const float MyAvatar::ZOOM_MIN = 0.5f;
|
|||
const float MyAvatar::ZOOM_MAX = 25.0f;
|
||||
const float MyAvatar::ZOOM_DEFAULT = 1.5f;
|
||||
|
||||
// default values, used when avatar is missing joints... (avatar space)
|
||||
// static const glm::quat DEFAULT_AVATAR_MIDDLE_EYE_ROT { Quaternions::Y_180 };
|
||||
static const glm::vec3 DEFAULT_AVATAR_MIDDLE_EYE_POS { 0.0f, 0.6f, 0.0f };
|
||||
static const glm::vec3 DEFAULT_AVATAR_HEAD_POS { 0.0f, 0.53f, 0.0f };
|
||||
static const glm::vec3 DEFAULT_AVATAR_NECK_POS { 0.0f, 0.445f, 0.025f };
|
||||
static const glm::vec3 DEFAULT_AVATAR_SPINE2_POS { 0.0f, 0.32f, 0.02f };
|
||||
static const glm::vec3 DEFAULT_AVATAR_HIPS_POS { 0.0f, 0.0f, 0.0f };
|
||||
static const glm::vec3 DEFAULT_AVATAR_LEFTFOOT_POS { -0.08f, -0.96f, 0.029f};
|
||||
static const glm::quat DEFAULT_AVATAR_LEFTFOOT_ROT { -0.40167322754859924f, 0.9154590368270874f, -0.005437685176730156f, -0.023744143545627594f };
|
||||
static const glm::vec3 DEFAULT_AVATAR_RIGHTFOOT_POS { 0.08f, -0.96f, 0.029f };
|
||||
static const glm::quat DEFAULT_AVATAR_RIGHTFOOT_ROT { -0.4016716778278351f, 0.9154615998268127f, 0.0053307069465518f, 0.023696165531873703f };
|
||||
|
||||
MyAvatar::MyAvatar(QThread* thread, RigPointer rig) :
|
||||
Avatar(thread, rig),
|
||||
_wasPushing(false),
|
||||
|
@ -650,18 +662,13 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
|||
}
|
||||
|
||||
FaceTracker* tracker = qApp->getActiveFaceTracker();
|
||||
bool inFacetracker = tracker && !tracker->isMuted();
|
||||
bool inFacetracker = tracker && !FaceTracker::isMuted();
|
||||
|
||||
if (inHmd) {
|
||||
estimatedPosition = extractTranslation(getHMDSensorMatrix());
|
||||
estimatedPosition.x *= -1.0f;
|
||||
_trackedHeadPosition = estimatedPosition;
|
||||
|
||||
const float OCULUS_LEAN_SCALE = 0.05f;
|
||||
estimatedPosition /= OCULUS_LEAN_SCALE;
|
||||
} else if (inFacetracker) {
|
||||
estimatedPosition = tracker->getHeadTranslation();
|
||||
_trackedHeadPosition = estimatedPosition;
|
||||
estimatedRotation = glm::degrees(safeEulerAngles(tracker->getHeadRotation()));
|
||||
}
|
||||
|
||||
|
@ -1378,6 +1385,65 @@ controller::Pose MyAvatar::getRightFootControllerPoseInAvatarFrame() const {
|
|||
return getRightFootControllerPoseInWorldFrame().transform(invAvatarMatrix);
|
||||
}
|
||||
|
||||
void MyAvatar::setSpineControllerPosesInSensorFrame(const controller::Pose& hips, const controller::Pose& spine2) {
|
||||
if (controller::InputDevice::getLowVelocityFilter()) {
|
||||
auto oldHipsPose = getHipsControllerPoseInSensorFrame();
|
||||
auto oldSpine2Pose = getSpine2ControllerPoseInSensorFrame();
|
||||
_hipsControllerPoseInSensorFrameCache.set(applyLowVelocityFilter(oldHipsPose, hips));
|
||||
_spine2ControllerPoseInSensorFrameCache.set(applyLowVelocityFilter(oldSpine2Pose, spine2));
|
||||
} else {
|
||||
_hipsControllerPoseInSensorFrameCache.set(hips);
|
||||
_spine2ControllerPoseInSensorFrameCache.set(spine2);
|
||||
}
|
||||
}
|
||||
|
||||
controller::Pose MyAvatar::getHipsControllerPoseInSensorFrame() const {
|
||||
return _hipsControllerPoseInSensorFrameCache.get();
|
||||
}
|
||||
|
||||
controller::Pose MyAvatar::getSpine2ControllerPoseInSensorFrame() const {
|
||||
return _spine2ControllerPoseInSensorFrameCache.get();
|
||||
}
|
||||
|
||||
controller::Pose MyAvatar::getHipsControllerPoseInWorldFrame() const {
|
||||
return _hipsControllerPoseInSensorFrameCache.get().transform(getSensorToWorldMatrix());
|
||||
}
|
||||
|
||||
controller::Pose MyAvatar::getSpine2ControllerPoseInWorldFrame() const {
|
||||
return _spine2ControllerPoseInSensorFrameCache.get().transform(getSensorToWorldMatrix());
|
||||
}
|
||||
|
||||
controller::Pose MyAvatar::getHipsControllerPoseInAvatarFrame() const {
|
||||
glm::mat4 invAvatarMatrix = glm::inverse(createMatFromQuatAndPos(getOrientation(), getPosition()));
|
||||
return getHipsControllerPoseInWorldFrame().transform(invAvatarMatrix);
|
||||
}
|
||||
|
||||
controller::Pose MyAvatar::getSpine2ControllerPoseInAvatarFrame() const {
|
||||
glm::mat4 invAvatarMatrix = glm::inverse(createMatFromQuatAndPos(getOrientation(), getPosition()));
|
||||
return getSpine2ControllerPoseInWorldFrame().transform(invAvatarMatrix);
|
||||
}
|
||||
|
||||
void MyAvatar::setHeadControllerPoseInSensorFrame(const controller::Pose& head) {
|
||||
if (controller::InputDevice::getLowVelocityFilter()) {
|
||||
auto oldHeadPose = getHeadControllerPoseInSensorFrame();
|
||||
_headControllerPoseInSensorFrameCache.set(applyLowVelocityFilter(oldHeadPose, head));
|
||||
} else {
|
||||
_headControllerPoseInSensorFrameCache.set(head);
|
||||
}
|
||||
}
|
||||
|
||||
controller::Pose MyAvatar::getHeadControllerPoseInSensorFrame() const {
|
||||
return _headControllerPoseInSensorFrameCache.get();
|
||||
}
|
||||
|
||||
controller::Pose MyAvatar::getHeadControllerPoseInWorldFrame() const {
|
||||
return _headControllerPoseInSensorFrameCache.get().transform(getSensorToWorldMatrix());
|
||||
}
|
||||
|
||||
controller::Pose MyAvatar::getHeadControllerPoseInAvatarFrame() const {
|
||||
glm::mat4 invAvatarMatrix = glm::inverse(createMatFromQuatAndPos(getOrientation(), getPosition()));
|
||||
return getHeadControllerPoseInWorldFrame().transform(invAvatarMatrix);
|
||||
}
|
||||
|
||||
void MyAvatar::updateMotors() {
|
||||
_characterController.clearMotors();
|
||||
|
@ -2220,22 +2286,17 @@ glm::mat4 MyAvatar::deriveBodyFromHMDSensor() const {
|
|||
const glm::quat hmdOrientation = getHMDSensorOrientation();
|
||||
const glm::quat hmdOrientationYawOnly = cancelOutRollAndPitch(hmdOrientation);
|
||||
|
||||
// 2 meter tall dude (in rig coordinates)
|
||||
const glm::vec3 DEFAULT_RIG_MIDDLE_EYE_POS(0.0f, 0.9f, 0.0f);
|
||||
const glm::vec3 DEFAULT_RIG_NECK_POS(0.0f, 0.70f, 0.0f);
|
||||
const glm::vec3 DEFAULT_RIG_HIPS_POS(0.0f, 0.05f, 0.0f);
|
||||
|
||||
int rightEyeIndex = _rig->indexOfJoint("RightEye");
|
||||
int leftEyeIndex = _rig->indexOfJoint("LeftEye");
|
||||
int neckIndex = _rig->indexOfJoint("Neck");
|
||||
int hipsIndex = _rig->indexOfJoint("Hips");
|
||||
|
||||
glm::vec3 rigMiddleEyePos = DEFAULT_RIG_MIDDLE_EYE_POS;
|
||||
glm::vec3 rigMiddleEyePos = DEFAULT_AVATAR_MIDDLE_EYE_POS;
|
||||
if (leftEyeIndex >= 0 && rightEyeIndex >= 0) {
|
||||
rigMiddleEyePos = (_rig->getAbsoluteDefaultPose(leftEyeIndex).trans() + _rig->getAbsoluteDefaultPose(rightEyeIndex).trans()) / 2.0f;
|
||||
}
|
||||
glm::vec3 rigNeckPos = neckIndex != -1 ? _rig->getAbsoluteDefaultPose(neckIndex).trans() : DEFAULT_RIG_NECK_POS;
|
||||
glm::vec3 rigHipsPos = hipsIndex != -1 ? _rig->getAbsoluteDefaultPose(hipsIndex).trans() : DEFAULT_RIG_HIPS_POS;
|
||||
glm::vec3 rigNeckPos = neckIndex != -1 ? _rig->getAbsoluteDefaultPose(neckIndex).trans() : DEFAULT_AVATAR_NECK_POS;
|
||||
glm::vec3 rigHipsPos = hipsIndex != -1 ? _rig->getAbsoluteDefaultPose(hipsIndex).trans() : DEFAULT_AVATAR_HIPS_POS;
|
||||
|
||||
glm::vec3 localEyes = (rigMiddleEyePos - rigHipsPos);
|
||||
glm::vec3 localNeck = (rigNeckPos - rigHipsPos);
|
||||
|
@ -2599,6 +2660,79 @@ glm::vec3 MyAvatar::getAbsoluteJointTranslationInObjectFrame(int index) const {
|
|||
}
|
||||
}
|
||||
|
||||
glm::mat4 MyAvatar::getCenterEyeCalibrationMat() const {
|
||||
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
|
||||
int rightEyeIndex = _rig->indexOfJoint("RightEye");
|
||||
int leftEyeIndex = _rig->indexOfJoint("LeftEye");
|
||||
if (rightEyeIndex >= 0 && leftEyeIndex >= 0) {
|
||||
auto centerEyePos = (getAbsoluteDefaultJointTranslationInObjectFrame(rightEyeIndex) + getAbsoluteDefaultJointTranslationInObjectFrame(leftEyeIndex)) * 0.5f;
|
||||
auto centerEyeRot = Quaternions::Y_180;
|
||||
return createMatFromQuatAndPos(centerEyeRot, centerEyePos);
|
||||
} else {
|
||||
return createMatFromQuatAndPos(DEFAULT_AVATAR_MIDDLE_EYE_POS, DEFAULT_AVATAR_MIDDLE_EYE_POS);
|
||||
}
|
||||
}
|
||||
|
||||
glm::mat4 MyAvatar::getHeadCalibrationMat() const {
|
||||
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
|
||||
int headIndex = _rig->indexOfJoint("Head");
|
||||
if (headIndex >= 0) {
|
||||
auto headPos = getAbsoluteDefaultJointTranslationInObjectFrame(headIndex);
|
||||
auto headRot = getAbsoluteDefaultJointRotationInObjectFrame(headIndex);
|
||||
return createMatFromQuatAndPos(headRot, headPos);
|
||||
} else {
|
||||
return createMatFromQuatAndPos(DEFAULT_AVATAR_HEAD_POS, DEFAULT_AVATAR_HEAD_POS);
|
||||
}
|
||||
}
|
||||
|
||||
glm::mat4 MyAvatar::getSpine2CalibrationMat() const {
|
||||
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
|
||||
int spine2Index = _rig->indexOfJoint("Spine2");
|
||||
if (spine2Index >= 0) {
|
||||
auto spine2Pos = getAbsoluteDefaultJointTranslationInObjectFrame(spine2Index);
|
||||
auto spine2Rot = getAbsoluteDefaultJointRotationInObjectFrame(spine2Index);
|
||||
return createMatFromQuatAndPos(spine2Rot, spine2Pos);
|
||||
} else {
|
||||
return createMatFromQuatAndPos(DEFAULT_AVATAR_SPINE2_POS, DEFAULT_AVATAR_SPINE2_POS);
|
||||
}
|
||||
}
|
||||
|
||||
glm::mat4 MyAvatar::getHipsCalibrationMat() const {
|
||||
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
|
||||
int hipsIndex = _rig->indexOfJoint("Hips");
|
||||
if (hipsIndex >= 0) {
|
||||
auto hipsPos = getAbsoluteDefaultJointTranslationInObjectFrame(hipsIndex);
|
||||
auto hipsRot = getAbsoluteDefaultJointRotationInObjectFrame(hipsIndex);
|
||||
return createMatFromQuatAndPos(hipsRot, hipsPos);
|
||||
} else {
|
||||
return createMatFromQuatAndPos(DEFAULT_AVATAR_HIPS_POS, DEFAULT_AVATAR_HIPS_POS);
|
||||
}
|
||||
}
|
||||
|
||||
glm::mat4 MyAvatar::getLeftFootCalibrationMat() const {
|
||||
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
|
||||
int leftFootIndex = _rig->indexOfJoint("LeftFoot");
|
||||
if (leftFootIndex >= 0) {
|
||||
auto leftFootPos = getAbsoluteDefaultJointTranslationInObjectFrame(leftFootIndex);
|
||||
auto leftFootRot = getAbsoluteDefaultJointRotationInObjectFrame(leftFootIndex);
|
||||
return createMatFromQuatAndPos(leftFootRot, leftFootPos);
|
||||
} else {
|
||||
return createMatFromQuatAndPos(DEFAULT_AVATAR_LEFTFOOT_POS, DEFAULT_AVATAR_LEFTFOOT_POS);
|
||||
}
|
||||
}
|
||||
|
||||
glm::mat4 MyAvatar::getRightFootCalibrationMat() const {
|
||||
// TODO: as an optimization cache this computation, then invalidate the cache when the avatar model is changed.
|
||||
int rightFootIndex = _rig->indexOfJoint("RightFoot");
|
||||
if (rightFootIndex >= 0) {
|
||||
auto rightFootPos = getAbsoluteDefaultJointTranslationInObjectFrame(rightFootIndex);
|
||||
auto rightFootRot = getAbsoluteDefaultJointRotationInObjectFrame(rightFootIndex);
|
||||
return createMatFromQuatAndPos(rightFootRot, rightFootPos);
|
||||
} else {
|
||||
return createMatFromQuatAndPos(DEFAULT_AVATAR_RIGHTFOOT_POS, DEFAULT_AVATAR_RIGHTFOOT_POS);
|
||||
}
|
||||
}
|
||||
|
||||
bool MyAvatar::pinJoint(int index, const glm::vec3& position, const glm::quat& orientation) {
|
||||
auto hipsIndex = getJointIndex("Hips");
|
||||
if (index != hipsIndex) {
|
||||
|
|
|
@ -353,7 +353,6 @@ public:
|
|||
|
||||
eyeContactTarget getEyeContactTarget();
|
||||
|
||||
Q_INVOKABLE glm::vec3 getTrackedHeadPosition() const { return _trackedHeadPosition; }
|
||||
Q_INVOKABLE glm::vec3 getHeadPosition() const { return getHead()->getPosition(); }
|
||||
Q_INVOKABLE float getHeadFinalYaw() const { return getHead()->getFinalYaw(); }
|
||||
Q_INVOKABLE float getHeadFinalRoll() const { return getHead()->getFinalRoll(); }
|
||||
|
@ -453,6 +452,19 @@ public:
|
|||
controller::Pose getLeftFootControllerPoseInAvatarFrame() const;
|
||||
controller::Pose getRightFootControllerPoseInAvatarFrame() const;
|
||||
|
||||
void setSpineControllerPosesInSensorFrame(const controller::Pose& hips, const controller::Pose& spine2);
|
||||
controller::Pose getHipsControllerPoseInSensorFrame() const;
|
||||
controller::Pose getSpine2ControllerPoseInSensorFrame() const;
|
||||
controller::Pose getHipsControllerPoseInWorldFrame() const;
|
||||
controller::Pose getSpine2ControllerPoseInWorldFrame() const;
|
||||
controller::Pose getHipsControllerPoseInAvatarFrame() const;
|
||||
controller::Pose getSpine2ControllerPoseInAvatarFrame() const;
|
||||
|
||||
void setHeadControllerPoseInSensorFrame(const controller::Pose& head);
|
||||
controller::Pose getHeadControllerPoseInSensorFrame() const;
|
||||
controller::Pose getHeadControllerPoseInWorldFrame() const;
|
||||
controller::Pose getHeadControllerPoseInAvatarFrame() const;
|
||||
|
||||
bool hasDriveInput() const;
|
||||
|
||||
Q_INVOKABLE void setCharacterControllerEnabled(bool enabled);
|
||||
|
@ -461,6 +473,14 @@ public:
|
|||
virtual glm::quat getAbsoluteJointRotationInObjectFrame(int index) const override;
|
||||
virtual glm::vec3 getAbsoluteJointTranslationInObjectFrame(int index) const override;
|
||||
|
||||
// all calibration matrices are in absolute avatar space.
|
||||
glm::mat4 getCenterEyeCalibrationMat() const;
|
||||
glm::mat4 getHeadCalibrationMat() const;
|
||||
glm::mat4 getSpine2CalibrationMat() const;
|
||||
glm::mat4 getHipsCalibrationMat() const;
|
||||
glm::mat4 getLeftFootCalibrationMat() const;
|
||||
glm::mat4 getRightFootCalibrationMat() const;
|
||||
|
||||
void addHoldAction(AvatarActionHold* holdAction); // thread-safe
|
||||
void removeHoldAction(AvatarActionHold* holdAction); // thread-safe
|
||||
void updateHoldActions(const AnimPose& prePhysicsPose, const AnimPose& postUpdatePose);
|
||||
|
@ -693,9 +713,11 @@ private:
|
|||
// These are stored in SENSOR frame
|
||||
ThreadSafeValueCache<controller::Pose> _leftHandControllerPoseInSensorFrameCache { controller::Pose() };
|
||||
ThreadSafeValueCache<controller::Pose> _rightHandControllerPoseInSensorFrameCache { controller::Pose() };
|
||||
|
||||
ThreadSafeValueCache<controller::Pose> _leftFootControllerPoseInSensorFrameCache{ controller::Pose() };
|
||||
ThreadSafeValueCache<controller::Pose> _rightFootControllerPoseInSensorFrameCache{ controller::Pose() };
|
||||
ThreadSafeValueCache<controller::Pose> _hipsControllerPoseInSensorFrameCache{ controller::Pose() };
|
||||
ThreadSafeValueCache<controller::Pose> _spine2ControllerPoseInSensorFrameCache{ controller::Pose() };
|
||||
ThreadSafeValueCache<controller::Pose> _headControllerPoseInSensorFrameCache{ controller::Pose() };
|
||||
|
||||
bool _hmdLeanRecenterEnabled = true;
|
||||
|
||||
|
|
|
@ -107,33 +107,49 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
|
||||
Rig::HeadParameters headParams;
|
||||
|
||||
if (qApp->isHMDMode()) {
|
||||
headParams.isInHMD = true;
|
||||
|
||||
// get HMD position from sensor space into world space, and back into rig space
|
||||
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
|
||||
glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
|
||||
glm::mat4 worldToRig = glm::inverse(rigToWorld);
|
||||
glm::mat4 rigHMDMat = worldToRig * worldHMDMat;
|
||||
|
||||
headParams.rigHeadPosition = extractTranslation(rigHMDMat);
|
||||
headParams.rigHeadOrientation = extractRotation(rigHMDMat);
|
||||
headParams.worldHeadOrientation = extractRotation(worldHMDMat);
|
||||
|
||||
// TODO: if hips target sensor is valid.
|
||||
// Copy it into headParams.hipsMatrix, and set headParams.hipsEnabled to true.
|
||||
|
||||
headParams.hipsEnabled = false;
|
||||
// input action is the highest priority source for head orientation.
|
||||
auto avatarHeadPose = myAvatar->getHeadControllerPoseInAvatarFrame();
|
||||
if (avatarHeadPose.isValid()) {
|
||||
glm::mat4 rigHeadMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHeadPose.getRotation(), avatarHeadPose.getTranslation());
|
||||
headParams.rigHeadPosition = extractTranslation(rigHeadMat);
|
||||
headParams.rigHeadOrientation = glmExtractRotation(rigHeadMat);
|
||||
headParams.headEnabled = true;
|
||||
} else {
|
||||
headParams.hipsEnabled = false;
|
||||
headParams.isInHMD = false;
|
||||
|
||||
// We don't have a valid localHeadPosition.
|
||||
headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame();
|
||||
headParams.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
|
||||
if (qApp->isHMDMode()) {
|
||||
// get HMD position from sensor space into world space, and back into rig space
|
||||
glm::mat4 worldHMDMat = myAvatar->getSensorToWorldMatrix() * myAvatar->getHMDSensorMatrix();
|
||||
glm::mat4 rigToWorld = createMatFromQuatAndPos(getRotation(), getTranslation());
|
||||
glm::mat4 worldToRig = glm::inverse(rigToWorld);
|
||||
glm::mat4 rigHMDMat = worldToRig * worldHMDMat;
|
||||
_rig->computeHeadFromHMD(AnimPose(rigHMDMat), headParams.rigHeadPosition, headParams.rigHeadOrientation);
|
||||
headParams.headEnabled = true;
|
||||
} else {
|
||||
// even though full head IK is disabled, the rig still needs the head orientation to rotate the head up and down in desktop mode.
|
||||
// preMult 180 is necessary to convert from avatar to rig coordinates.
|
||||
// postMult 180 is necessary to convert head from -z forward to z forward.
|
||||
headParams.rigHeadOrientation = Quaternions::Y_180 * head->getFinalOrientationInLocalFrame() * Quaternions::Y_180;
|
||||
headParams.headEnabled = false;
|
||||
}
|
||||
}
|
||||
|
||||
auto avatarHipsPose = myAvatar->getHipsControllerPoseInAvatarFrame();
|
||||
if (avatarHipsPose.isValid()) {
|
||||
glm::mat4 rigHipsMat = Matrices::Y_180 * createMatFromQuatAndPos(avatarHipsPose.getRotation(), avatarHipsPose.getTranslation());
|
||||
headParams.hipsMatrix = rigHipsMat;
|
||||
headParams.hipsEnabled = true;
|
||||
} else {
|
||||
headParams.hipsEnabled = false;
|
||||
}
|
||||
|
||||
auto avatarSpine2Pose = myAvatar->getSpine2ControllerPoseInAvatarFrame();
|
||||
if (avatarSpine2Pose.isValid()) {
|
||||
glm::mat4 rigSpine2Mat = Matrices::Y_180 * createMatFromQuatAndPos(avatarSpine2Pose.getRotation(), avatarSpine2Pose.getTranslation());
|
||||
headParams.spine2Matrix = rigSpine2Mat;
|
||||
headParams.spine2Enabled = true;
|
||||
} else {
|
||||
headParams.spine2Enabled = false;
|
||||
}
|
||||
|
||||
headParams.neckJointIndex = geometry.neckJointIndex;
|
||||
headParams.isTalking = head->getTimeWithoutTalking() <= 1.5f;
|
||||
|
||||
_rig->updateFromHeadParameters(headParams, deltaTime);
|
||||
|
@ -193,7 +209,6 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
Model::updateRig(deltaTime, parentTransform);
|
||||
|
||||
Rig::EyeParameters eyeParams;
|
||||
eyeParams.worldHeadOrientation = headParams.worldHeadOrientation;
|
||||
eyeParams.eyeLookAt = lookAt;
|
||||
eyeParams.eyeSaccade = head->getSaccade();
|
||||
eyeParams.modelRotation = getRotation();
|
||||
|
@ -225,7 +240,6 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
head->setBaseRoll(glm::degrees(-eulers.z));
|
||||
|
||||
Rig::EyeParameters eyeParams;
|
||||
eyeParams.worldHeadOrientation = head->getFinalOrientationInWorldFrame();
|
||||
eyeParams.eyeLookAt = lookAt;
|
||||
eyeParams.eyeSaccade = glm::vec3(0.0f);
|
||||
eyeParams.modelRotation = getRotation();
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
#include <DependencyManager.h>
|
||||
#include <ui/overlays/TextOverlay.h>
|
||||
|
||||
#include "FaceTracker.h"
|
||||
#include <trackers/FaceTracker.h>
|
||||
|
||||
class DdeFaceTracker : public FaceTracker, public Dependency {
|
||||
Q_OBJECT
|
||||
|
|
|
@ -1,310 +0,0 @@
|
|||
//
|
||||
// Faceshift.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Andrzej Kapolka on 9/3/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <QTimer>
|
||||
|
||||
#include <GLMHelpers.h>
|
||||
#include <NumericalConstants.h>
|
||||
#include <PerfStat.h>
|
||||
|
||||
#include "Faceshift.h"
|
||||
#include "Menu.h"
|
||||
#include "Util.h"
|
||||
#include "InterfaceLogging.h"
|
||||
|
||||
#ifdef HAVE_FACESHIFT
|
||||
using namespace fs;
|
||||
#endif
|
||||
|
||||
using namespace std;
|
||||
|
||||
const QString DEFAULT_FACESHIFT_HOSTNAME = "localhost";
|
||||
const quint16 FACESHIFT_PORT = 33433;
|
||||
|
||||
Faceshift::Faceshift() :
|
||||
_hostname("faceshiftHostname", DEFAULT_FACESHIFT_HOSTNAME)
|
||||
{
|
||||
#ifdef HAVE_FACESHIFT
|
||||
connect(&_tcpSocket, SIGNAL(connected()), SLOT(noteConnected()));
|
||||
connect(&_tcpSocket, SIGNAL(error(QAbstractSocket::SocketError)), SLOT(noteError(QAbstractSocket::SocketError)));
|
||||
connect(&_tcpSocket, SIGNAL(readyRead()), SLOT(readFromSocket()));
|
||||
connect(&_tcpSocket, SIGNAL(stateChanged(QAbstractSocket::SocketState)), SIGNAL(connectionStateChanged()));
|
||||
connect(&_tcpSocket, SIGNAL(disconnected()), SLOT(noteDisconnected()));
|
||||
|
||||
connect(&_udpSocket, SIGNAL(readyRead()), SLOT(readPendingDatagrams()));
|
||||
|
||||
_udpSocket.bind(FACESHIFT_PORT);
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef HAVE_FACESHIFT
|
||||
void Faceshift::init() {
|
||||
FaceTracker::init();
|
||||
setEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift) && !_isMuted);
|
||||
}
|
||||
|
||||
void Faceshift::update(float deltaTime) {
|
||||
if (!isActive()) {
|
||||
return;
|
||||
}
|
||||
FaceTracker::update(deltaTime);
|
||||
|
||||
// get the euler angles relative to the window
|
||||
glm::vec3 eulers = glm::degrees(safeEulerAngles(_headRotation * glm::quat(glm::radians(glm::vec3(
|
||||
(_eyeGazeLeftPitch + _eyeGazeRightPitch) / 2.0f, (_eyeGazeLeftYaw + _eyeGazeRightYaw) / 2.0f, 0.0f)))));
|
||||
|
||||
// compute and subtract the long term average
|
||||
const float LONG_TERM_AVERAGE_SMOOTHING = 0.999f;
|
||||
if (!_longTermAverageInitialized) {
|
||||
_longTermAverageEyePitch = eulers.x;
|
||||
_longTermAverageEyeYaw = eulers.y;
|
||||
_longTermAverageInitialized = true;
|
||||
|
||||
} else {
|
||||
_longTermAverageEyePitch = glm::mix(eulers.x, _longTermAverageEyePitch, LONG_TERM_AVERAGE_SMOOTHING);
|
||||
_longTermAverageEyeYaw = glm::mix(eulers.y, _longTermAverageEyeYaw, LONG_TERM_AVERAGE_SMOOTHING);
|
||||
}
|
||||
_estimatedEyePitch = eulers.x - _longTermAverageEyePitch;
|
||||
_estimatedEyeYaw = eulers.y - _longTermAverageEyeYaw;
|
||||
}
|
||||
|
||||
void Faceshift::reset() {
|
||||
if (_tcpSocket.state() == QAbstractSocket::ConnectedState) {
|
||||
qCDebug(interfaceapp, "Faceshift: Reset");
|
||||
|
||||
FaceTracker::reset();
|
||||
|
||||
string message;
|
||||
fsBinaryStream::encode_message(message, fsMsgCalibrateNeutral());
|
||||
send(message);
|
||||
}
|
||||
_longTermAverageInitialized = false;
|
||||
}
|
||||
|
||||
bool Faceshift::isActive() const {
|
||||
const quint64 ACTIVE_TIMEOUT_USECS = 1000000;
|
||||
return (usecTimestampNow() - _lastReceiveTimestamp) < ACTIVE_TIMEOUT_USECS;
|
||||
}
|
||||
|
||||
bool Faceshift::isTracking() const {
|
||||
return isActive() && _tracking;
|
||||
}
|
||||
#endif
|
||||
|
||||
|
||||
bool Faceshift::isConnectedOrConnecting() const {
|
||||
return _tcpSocket.state() == QAbstractSocket::ConnectedState ||
|
||||
(_tcpRetryCount == 0 && _tcpSocket.state() != QAbstractSocket::UnconnectedState);
|
||||
}
|
||||
|
||||
void Faceshift::updateFakeCoefficients(float leftBlink, float rightBlink, float browUp,
|
||||
float jawOpen, float mouth2, float mouth3, float mouth4, QVector<float>& coefficients) const {
|
||||
const int MMMM_BLENDSHAPE = 34;
|
||||
const int FUNNEL_BLENDSHAPE = 40;
|
||||
const int SMILE_LEFT_BLENDSHAPE = 28;
|
||||
const int SMILE_RIGHT_BLENDSHAPE = 29;
|
||||
const int MAX_FAKE_BLENDSHAPE = 40; // Largest modified blendshape from above and below
|
||||
|
||||
coefficients.resize(max((int)coefficients.size(), MAX_FAKE_BLENDSHAPE + 1));
|
||||
qFill(coefficients.begin(), coefficients.end(), 0.0f);
|
||||
coefficients[_leftBlinkIndex] = leftBlink;
|
||||
coefficients[_rightBlinkIndex] = rightBlink;
|
||||
coefficients[_browUpCenterIndex] = browUp;
|
||||
coefficients[_browUpLeftIndex] = browUp;
|
||||
coefficients[_browUpRightIndex] = browUp;
|
||||
coefficients[_jawOpenIndex] = jawOpen;
|
||||
coefficients[SMILE_LEFT_BLENDSHAPE] = coefficients[SMILE_RIGHT_BLENDSHAPE] = mouth4;
|
||||
coefficients[MMMM_BLENDSHAPE] = mouth2;
|
||||
coefficients[FUNNEL_BLENDSHAPE] = mouth3;
|
||||
}
|
||||
|
||||
void Faceshift::setEnabled(bool enabled) {
|
||||
// Don't enable until have explicitly initialized
|
||||
if (!_isInitialized) {
|
||||
return;
|
||||
}
|
||||
#ifdef HAVE_FACESHIFT
|
||||
if ((_tcpEnabled = enabled)) {
|
||||
connectSocket();
|
||||
} else {
|
||||
qCDebug(interfaceapp, "Faceshift: Disconnecting...");
|
||||
_tcpSocket.disconnectFromHost();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void Faceshift::connectSocket() {
|
||||
if (_tcpEnabled) {
|
||||
if (!_tcpRetryCount) {
|
||||
qCDebug(interfaceapp, "Faceshift: Connecting...");
|
||||
}
|
||||
|
||||
_tcpSocket.connectToHost(_hostname.get(), FACESHIFT_PORT);
|
||||
_tracking = false;
|
||||
}
|
||||
}
|
||||
|
||||
void Faceshift::noteConnected() {
|
||||
#ifdef HAVE_FACESHIFT
|
||||
qCDebug(interfaceapp, "Faceshift: Connected");
|
||||
// request the list of blendshape names
|
||||
string message;
|
||||
fsBinaryStream::encode_message(message, fsMsgSendBlendshapeNames());
|
||||
send(message);
|
||||
#endif
|
||||
}
|
||||
|
||||
void Faceshift::noteDisconnected() {
|
||||
#ifdef HAVE_FACESHIFT
|
||||
qCDebug(interfaceapp, "Faceshift: Disconnected");
|
||||
#endif
|
||||
}
|
||||
|
||||
void Faceshift::noteError(QAbstractSocket::SocketError error) {
|
||||
if (!_tcpRetryCount) {
|
||||
// Only spam log with fail to connect the first time, so that we can keep waiting for server
|
||||
qCWarning(interfaceapp) << "Faceshift: " << _tcpSocket.errorString();
|
||||
}
|
||||
// retry connection after a 2 second delay
|
||||
if (_tcpEnabled) {
|
||||
_tcpRetryCount++;
|
||||
QTimer::singleShot(2000, this, SLOT(connectSocket()));
|
||||
}
|
||||
}
|
||||
|
||||
void Faceshift::readPendingDatagrams() {
|
||||
QByteArray buffer;
|
||||
while (_udpSocket.hasPendingDatagrams()) {
|
||||
buffer.resize(_udpSocket.pendingDatagramSize());
|
||||
_udpSocket.readDatagram(buffer.data(), buffer.size());
|
||||
receive(buffer);
|
||||
}
|
||||
}
|
||||
|
||||
void Faceshift::readFromSocket() {
|
||||
receive(_tcpSocket.readAll());
|
||||
}
|
||||
|
||||
void Faceshift::send(const std::string& message) {
|
||||
_tcpSocket.write(message.data(), message.size());
|
||||
}
|
||||
|
||||
void Faceshift::receive(const QByteArray& buffer) {
|
||||
#ifdef HAVE_FACESHIFT
|
||||
_lastReceiveTimestamp = usecTimestampNow();
|
||||
|
||||
_stream.received(buffer.size(), buffer.constData());
|
||||
fsMsgPtr msg;
|
||||
for (fsMsgPtr msg; (msg = _stream.get_message()); ) {
|
||||
switch (msg->id()) {
|
||||
case fsMsg::MSG_OUT_TRACKING_STATE: {
|
||||
const fsTrackingData& data = static_pointer_cast<fsMsgTrackingState>(msg)->tracking_data();
|
||||
if ((_tracking = data.m_trackingSuccessful)) {
|
||||
glm::quat newRotation = glm::quat(data.m_headRotation.w, -data.m_headRotation.x,
|
||||
data.m_headRotation.y, -data.m_headRotation.z);
|
||||
// Compute angular velocity of the head
|
||||
glm::quat r = glm::normalize(newRotation * glm::inverse(_headRotation));
|
||||
float theta = 2 * acos(r.w);
|
||||
if (theta > EPSILON) {
|
||||
float rMag = glm::length(glm::vec3(r.x, r.y, r.z));
|
||||
_headAngularVelocity = theta / _averageFrameTime * glm::vec3(r.x, r.y, r.z) / rMag;
|
||||
} else {
|
||||
_headAngularVelocity = glm::vec3(0,0,0);
|
||||
}
|
||||
const float ANGULAR_VELOCITY_FILTER_STRENGTH = 0.3f;
|
||||
_headRotation = safeMix(_headRotation, newRotation, glm::clamp(glm::length(_headAngularVelocity) *
|
||||
ANGULAR_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f));
|
||||
|
||||
const float TRANSLATION_SCALE = 0.02f;
|
||||
glm::vec3 newHeadTranslation = glm::vec3(data.m_headTranslation.x, data.m_headTranslation.y,
|
||||
-data.m_headTranslation.z) * TRANSLATION_SCALE;
|
||||
|
||||
_headLinearVelocity = (newHeadTranslation - _lastHeadTranslation) / _averageFrameTime;
|
||||
|
||||
const float LINEAR_VELOCITY_FILTER_STRENGTH = 0.3f;
|
||||
float velocityFilter = glm::clamp(1.0f - glm::length(_headLinearVelocity) *
|
||||
LINEAR_VELOCITY_FILTER_STRENGTH, 0.0f, 1.0f);
|
||||
_filteredHeadTranslation = velocityFilter * _filteredHeadTranslation + (1.0f - velocityFilter) * newHeadTranslation;
|
||||
|
||||
_lastHeadTranslation = newHeadTranslation;
|
||||
_headTranslation = _filteredHeadTranslation;
|
||||
|
||||
_eyeGazeLeftPitch = -data.m_eyeGazeLeftPitch;
|
||||
_eyeGazeLeftYaw = data.m_eyeGazeLeftYaw;
|
||||
_eyeGazeRightPitch = -data.m_eyeGazeRightPitch;
|
||||
_eyeGazeRightYaw = data.m_eyeGazeRightYaw;
|
||||
_blendshapeCoefficients = QVector<float>::fromStdVector(data.m_coeffs);
|
||||
|
||||
const float FRAME_AVERAGING_FACTOR = 0.99f;
|
||||
quint64 usecsNow = usecTimestampNow();
|
||||
if (_lastMessageReceived != 0) {
|
||||
_averageFrameTime = FRAME_AVERAGING_FACTOR * _averageFrameTime +
|
||||
(1.0f - FRAME_AVERAGING_FACTOR) * (float)(usecsNow - _lastMessageReceived) / 1000000.0f;
|
||||
}
|
||||
_lastMessageReceived = usecsNow;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case fsMsg::MSG_OUT_BLENDSHAPE_NAMES: {
|
||||
const vector<string>& names = static_pointer_cast<fsMsgBlendshapeNames>(msg)->blendshape_names();
|
||||
for (int i = 0; i < (int)names.size(); i++) {
|
||||
if (names[i] == "EyeBlink_L") {
|
||||
_leftBlinkIndex = i;
|
||||
|
||||
} else if (names[i] == "EyeBlink_R") {
|
||||
_rightBlinkIndex = i;
|
||||
|
||||
} else if (names[i] == "EyeOpen_L") {
|
||||
_leftEyeOpenIndex = i;
|
||||
|
||||
} else if (names[i] == "EyeOpen_R") {
|
||||
_rightEyeOpenIndex = i;
|
||||
|
||||
} else if (names[i] == "BrowsD_L") {
|
||||
_browDownLeftIndex = i;
|
||||
|
||||
} else if (names[i] == "BrowsD_R") {
|
||||
_browDownRightIndex = i;
|
||||
|
||||
} else if (names[i] == "BrowsU_C") {
|
||||
_browUpCenterIndex = i;
|
||||
|
||||
} else if (names[i] == "BrowsU_L") {
|
||||
_browUpLeftIndex = i;
|
||||
|
||||
} else if (names[i] == "BrowsU_R") {
|
||||
_browUpRightIndex = i;
|
||||
|
||||
} else if (names[i] == "JawOpen") {
|
||||
_jawOpenIndex = i;
|
||||
|
||||
} else if (names[i] == "MouthSmile_L") {
|
||||
_mouthSmileLeftIndex = i;
|
||||
|
||||
} else if (names[i] == "MouthSmile_R") {
|
||||
_mouthSmileRightIndex = i;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
FaceTracker::countFrame();
|
||||
}
|
||||
|
||||
void Faceshift::setHostname(const QString& hostname) {
|
||||
_hostname.set(hostname);
|
||||
}
|
||||
|
|
@ -1,155 +0,0 @@
|
|||
//
|
||||
// Faceshift.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Andrzej Kapolka on 9/3/13.
|
||||
// Copyright 2013 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_Faceshift_h
|
||||
#define hifi_Faceshift_h
|
||||
|
||||
#include <QTcpSocket>
|
||||
#include <QUdpSocket>
|
||||
|
||||
#ifdef HAVE_FACESHIFT
|
||||
#include <fsbinarystream.h>
|
||||
#endif
|
||||
|
||||
#include <DependencyManager.h>
|
||||
#include <SettingHandle.h>
|
||||
|
||||
#include "FaceTracker.h"
|
||||
|
||||
const float STARTING_FACESHIFT_FRAME_TIME = 0.033f;
|
||||
|
||||
/// Handles interaction with the Faceshift software, which provides head position/orientation and facial features.
|
||||
class Faceshift : public FaceTracker, public Dependency {
|
||||
Q_OBJECT
|
||||
SINGLETON_DEPENDENCY
|
||||
|
||||
public:
|
||||
#ifdef HAVE_FACESHIFT
|
||||
// If we don't have faceshift, use the base class' methods
|
||||
virtual void init() override;
|
||||
virtual void update(float deltaTime) override;
|
||||
virtual void reset() override;
|
||||
|
||||
virtual bool isActive() const override;
|
||||
virtual bool isTracking() const override;
|
||||
#endif
|
||||
|
||||
bool isConnectedOrConnecting() const;
|
||||
|
||||
const glm::vec3& getHeadAngularVelocity() const { return _headAngularVelocity; }
|
||||
|
||||
// these pitch/yaw angles are in degrees
|
||||
float getEyeGazeLeftPitch() const { return _eyeGazeLeftPitch; }
|
||||
float getEyeGazeLeftYaw() const { return _eyeGazeLeftYaw; }
|
||||
|
||||
float getEyeGazeRightPitch() const { return _eyeGazeRightPitch; }
|
||||
float getEyeGazeRightYaw() const { return _eyeGazeRightYaw; }
|
||||
|
||||
float getLeftBlink() const { return getBlendshapeCoefficient(_leftBlinkIndex); }
|
||||
float getRightBlink() const { return getBlendshapeCoefficient(_rightBlinkIndex); }
|
||||
float getLeftEyeOpen() const { return getBlendshapeCoefficient(_leftEyeOpenIndex); }
|
||||
float getRightEyeOpen() const { return getBlendshapeCoefficient(_rightEyeOpenIndex); }
|
||||
|
||||
float getBrowDownLeft() const { return getBlendshapeCoefficient(_browDownLeftIndex); }
|
||||
float getBrowDownRight() const { return getBlendshapeCoefficient(_browDownRightIndex); }
|
||||
float getBrowUpCenter() const { return getBlendshapeCoefficient(_browUpCenterIndex); }
|
||||
float getBrowUpLeft() const { return getBlendshapeCoefficient(_browUpLeftIndex); }
|
||||
float getBrowUpRight() const { return getBlendshapeCoefficient(_browUpRightIndex); }
|
||||
|
||||
float getMouthSize() const { return getBlendshapeCoefficient(_jawOpenIndex); }
|
||||
float getMouthSmileLeft() const { return getBlendshapeCoefficient(_mouthSmileLeftIndex); }
|
||||
float getMouthSmileRight() const { return getBlendshapeCoefficient(_mouthSmileRightIndex); }
|
||||
|
||||
QString getHostname() { return _hostname.get(); }
|
||||
void setHostname(const QString& hostname);
|
||||
|
||||
void updateFakeCoefficients(float leftBlink,
|
||||
float rightBlink,
|
||||
float browUp,
|
||||
float jawOpen,
|
||||
float mouth2,
|
||||
float mouth3,
|
||||
float mouth4,
|
||||
QVector<float>& coefficients) const;
|
||||
|
||||
signals:
|
||||
void connectionStateChanged();
|
||||
|
||||
public slots:
|
||||
void setEnabled(bool enabled) override;
|
||||
|
||||
private slots:
|
||||
void connectSocket();
|
||||
void noteConnected();
|
||||
void noteError(QAbstractSocket::SocketError error);
|
||||
void readPendingDatagrams();
|
||||
void readFromSocket();
|
||||
void noteDisconnected();
|
||||
|
||||
private:
|
||||
Faceshift();
|
||||
virtual ~Faceshift() {}
|
||||
|
||||
void send(const std::string& message);
|
||||
void receive(const QByteArray& buffer);
|
||||
|
||||
QTcpSocket _tcpSocket;
|
||||
QUdpSocket _udpSocket;
|
||||
|
||||
#ifdef HAVE_FACESHIFT
|
||||
fs::fsBinaryStream _stream;
|
||||
#endif
|
||||
|
||||
bool _tcpEnabled = true;
|
||||
int _tcpRetryCount = 0;
|
||||
bool _tracking = false;
|
||||
quint64 _lastReceiveTimestamp = 0;
|
||||
quint64 _lastMessageReceived = 0;
|
||||
float _averageFrameTime = STARTING_FACESHIFT_FRAME_TIME;
|
||||
|
||||
glm::vec3 _headAngularVelocity = glm::vec3(0.0f);
|
||||
glm::vec3 _headLinearVelocity = glm::vec3(0.0f);
|
||||
glm::vec3 _lastHeadTranslation = glm::vec3(0.0f);
|
||||
glm::vec3 _filteredHeadTranslation = glm::vec3(0.0f);
|
||||
|
||||
// degrees
|
||||
float _eyeGazeLeftPitch = 0.0f;
|
||||
float _eyeGazeLeftYaw = 0.0f;
|
||||
float _eyeGazeRightPitch = 0.0f;
|
||||
float _eyeGazeRightYaw = 0.0f;
|
||||
|
||||
// degrees
|
||||
float _longTermAverageEyePitch = 0.0f;
|
||||
float _longTermAverageEyeYaw = 0.0f;
|
||||
bool _longTermAverageInitialized = false;
|
||||
|
||||
Setting::Handle<QString> _hostname;
|
||||
|
||||
// see http://support.faceshift.com/support/articles/35129-export-of-blendshapes
|
||||
int _leftBlinkIndex = 0;
|
||||
int _rightBlinkIndex = 1;
|
||||
int _leftEyeOpenIndex = 8;
|
||||
int _rightEyeOpenIndex = 9;
|
||||
|
||||
// Brows
|
||||
int _browDownLeftIndex = 14;
|
||||
int _browDownRightIndex = 15;
|
||||
int _browUpCenterIndex = 16;
|
||||
int _browUpLeftIndex = 17;
|
||||
int _browUpRightIndex = 18;
|
||||
|
||||
int _mouthSmileLeftIndex = 28;
|
||||
int _mouthSmileRightIndex = 29;
|
||||
|
||||
int _jawOpenIndex = 21;
|
||||
};
|
||||
|
||||
#endif // hifi_Faceshift_h
|
|
@ -14,7 +14,7 @@
|
|||
|
||||
#include <QDateTime>
|
||||
|
||||
#include "MotionTracker.h"
|
||||
#include <trackers/MotionTracker.h>
|
||||
|
||||
#ifdef HAVE_LEAPMOTION
|
||||
#include <Leap.h>
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
#include <plugins/PluginManager.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "devices/MotionTracker.h"
|
||||
#include <trackers/MotionTracker.h>
|
||||
|
||||
void ControllerScriptingInterface::handleMetaEvent(HFMetaEvent* event) {
|
||||
if (event->type() == HFActionEvent::startType()) {
|
||||
|
|
|
@ -11,9 +11,9 @@
|
|||
|
||||
#include <AudioClient.h>
|
||||
#include <SettingHandle.h>
|
||||
#include <trackers/FaceTracker.h>
|
||||
|
||||
#include "Application.h"
|
||||
#include "devices/FaceTracker.h"
|
||||
#include "Menu.h"
|
||||
|
||||
HIFI_QML_DEF(AvatarInputs)
|
||||
|
|
|
@ -11,7 +11,6 @@
|
|||
#include <AudioClient.h>
|
||||
#include <avatar/AvatarManager.h>
|
||||
#include <devices/DdeFaceTracker.h>
|
||||
#include <devices/Faceshift.h>
|
||||
#include <NetworkingConstants.h>
|
||||
#include <ScriptEngines.h>
|
||||
#include <OffscreenUi.h>
|
||||
|
@ -202,13 +201,6 @@ void setupPreferences() {
|
|||
auto setter = [](float value) { FaceTracker::setEyeDeflection(value); };
|
||||
preferences->addPreference(new SliderPreference(AVATAR_TUNING, "Face tracker eye deflection", getter, setter));
|
||||
}
|
||||
{
|
||||
auto getter = []()->QString { return DependencyManager::get<Faceshift>()->getHostname(); };
|
||||
auto setter = [](const QString& value) { DependencyManager::get<Faceshift>()->setHostname(value); };
|
||||
auto preference = new EditPreference(AVATAR_TUNING, "Faceshift hostname", getter, setter);
|
||||
preference->setPlaceholderText("localhost");
|
||||
preferences->addPreference(preference);
|
||||
}
|
||||
{
|
||||
auto getter = [=]()->QString { return myAvatar->getAnimGraphOverrideUrl().toString(); };
|
||||
auto setter = [=](const QString& value) { myAvatar->setAnimGraphOverrideUrl(QUrl(value)); };
|
||||
|
|
|
@ -46,7 +46,6 @@ static bool isEqual(const glm::quat& p, const glm::quat& q) {
|
|||
const glm::vec3 DEFAULT_RIGHT_EYE_POS(-0.3f, 0.9f, 0.0f);
|
||||
const glm::vec3 DEFAULT_LEFT_EYE_POS(0.3f, 0.9f, 0.0f);
|
||||
const glm::vec3 DEFAULT_HEAD_POS(0.0f, 0.75f, 0.0f);
|
||||
const glm::vec3 DEFAULT_NECK_POS(0.0f, 0.70f, 0.0f);
|
||||
|
||||
void Rig::overrideAnimation(const QString& url, float fps, bool loop, float firstFrame, float lastFrame) {
|
||||
|
||||
|
@ -1020,7 +1019,7 @@ glm::quat Rig::getJointDefaultRotationInParentFrame(int jointIndex) {
|
|||
}
|
||||
|
||||
void Rig::updateFromHeadParameters(const HeadParameters& params, float dt) {
|
||||
updateNeckJoint(params.neckJointIndex, params);
|
||||
updateHeadAnimVars(params);
|
||||
|
||||
_animVars.set("isTalking", params.isTalking);
|
||||
_animVars.set("notIsTalking", !params.isTalking);
|
||||
|
@ -1028,101 +1027,73 @@ void Rig::updateFromHeadParameters(const HeadParameters& params, float dt) {
|
|||
if (params.hipsEnabled) {
|
||||
_animVars.set("hipsType", (int)IKTarget::Type::RotationAndPosition);
|
||||
_animVars.set("hipsPosition", extractTranslation(params.hipsMatrix));
|
||||
_animVars.set("hipsRotation", glmExtractRotation(params.hipsMatrix) * Quaternions::Y_180);
|
||||
_animVars.set("hipsRotation", glmExtractRotation(params.hipsMatrix));
|
||||
} else {
|
||||
_animVars.set("hipsType", (int)IKTarget::Type::Unknown);
|
||||
}
|
||||
|
||||
// by default this IK target is disabled.
|
||||
_animVars.set("spine2Type", (int)IKTarget::Type::Unknown);
|
||||
if (params.spine2Enabled) {
|
||||
_animVars.set("spine2Type", (int)IKTarget::Type::RotationAndPosition);
|
||||
_animVars.set("spine2Position", extractTranslation(params.spine2Matrix));
|
||||
_animVars.set("spine2Rotation", glmExtractRotation(params.spine2Matrix));
|
||||
} else {
|
||||
_animVars.set("spine2Type", (int)IKTarget::Type::Unknown);
|
||||
}
|
||||
}
|
||||
|
||||
void Rig::updateFromEyeParameters(const EyeParameters& params) {
|
||||
updateEyeJoint(params.leftEyeJointIndex, params.modelTranslation, params.modelRotation,
|
||||
params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
|
||||
updateEyeJoint(params.rightEyeJointIndex, params.modelTranslation, params.modelRotation,
|
||||
params.worldHeadOrientation, params.eyeLookAt, params.eyeSaccade);
|
||||
updateEyeJoint(params.leftEyeJointIndex, params.modelTranslation, params.modelRotation, params.eyeLookAt, params.eyeSaccade);
|
||||
updateEyeJoint(params.rightEyeJointIndex, params.modelTranslation, params.modelRotation, params.eyeLookAt, params.eyeSaccade);
|
||||
}
|
||||
|
||||
void Rig::computeHeadNeckAnimVars(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut,
|
||||
glm::vec3& neckPositionOut, glm::quat& neckOrientationOut) const {
|
||||
void Rig::computeHeadFromHMD(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut) const {
|
||||
|
||||
// the input hmd values are in avatar/rig space
|
||||
const glm::vec3& hmdPosition = hmdPose.trans();
|
||||
const glm::quat& hmdOrientation = hmdPose.rot();
|
||||
|
||||
// the HMD looks down the negative z axis, but the head bone looks down the z axis, so apply a 180 degree rotation.
|
||||
const glm::quat& hmdOrientation = hmdPose.rot() * Quaternions::Y_180;
|
||||
|
||||
// TODO: cache jointIndices
|
||||
int rightEyeIndex = indexOfJoint("RightEye");
|
||||
int leftEyeIndex = indexOfJoint("LeftEye");
|
||||
int headIndex = indexOfJoint("Head");
|
||||
int neckIndex = indexOfJoint("Neck");
|
||||
|
||||
glm::vec3 absRightEyePos = rightEyeIndex != -1 ? getAbsoluteDefaultPose(rightEyeIndex).trans() : DEFAULT_RIGHT_EYE_POS;
|
||||
glm::vec3 absLeftEyePos = leftEyeIndex != -1 ? getAbsoluteDefaultPose(leftEyeIndex).trans() : DEFAULT_LEFT_EYE_POS;
|
||||
glm::vec3 absHeadPos = headIndex != -1 ? getAbsoluteDefaultPose(headIndex).trans() : DEFAULT_HEAD_POS;
|
||||
glm::vec3 absNeckPos = neckIndex != -1 ? getAbsoluteDefaultPose(neckIndex).trans() : DEFAULT_NECK_POS;
|
||||
|
||||
glm::vec3 absCenterEyePos = (absRightEyePos + absLeftEyePos) / 2.0f;
|
||||
glm::vec3 eyeOffset = absCenterEyePos - absHeadPos;
|
||||
glm::vec3 headOffset = absHeadPos - absNeckPos;
|
||||
|
||||
// apply simplistic head/neck model
|
||||
|
||||
// head
|
||||
headPositionOut = hmdPosition - hmdOrientation * eyeOffset;
|
||||
|
||||
headOrientationOut = hmdOrientation;
|
||||
|
||||
// neck
|
||||
neckPositionOut = hmdPosition - hmdOrientation * (headOffset + eyeOffset);
|
||||
|
||||
// slerp between default orientation and hmdOrientation
|
||||
neckOrientationOut = safeMix(hmdOrientation, _animSkeleton->getRelativeDefaultPose(neckIndex).rot(), 0.5f);
|
||||
}
|
||||
|
||||
void Rig::updateNeckJoint(int index, const HeadParameters& params) {
|
||||
if (_animSkeleton && index >= 0 && index < _animSkeleton->getNumJoints()) {
|
||||
glm::quat yFlip180 = glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
|
||||
if (params.isInHMD) {
|
||||
glm::vec3 headPos, neckPos;
|
||||
glm::quat headRot, neckRot;
|
||||
|
||||
AnimPose hmdPose(glm::vec3(1.0f), params.rigHeadOrientation * yFlip180, params.rigHeadPosition);
|
||||
computeHeadNeckAnimVars(hmdPose, headPos, headRot, neckPos, neckRot);
|
||||
|
||||
// debug rendering
|
||||
#ifdef DEBUG_RENDERING
|
||||
const glm::vec4 red(1.0f, 0.0f, 0.0f, 1.0f);
|
||||
const glm::vec4 green(0.0f, 1.0f, 0.0f, 1.0f);
|
||||
|
||||
// transform from bone into avatar space
|
||||
AnimPose headPose(glm::vec3(1), headRot, headPos);
|
||||
DebugDraw::getInstance().addMyAvatarMarker("headTarget", headPose.rot, headPose.trans, red);
|
||||
|
||||
// transform from bone into avatar space
|
||||
AnimPose neckPose(glm::vec3(1), neckRot, neckPos);
|
||||
DebugDraw::getInstance().addMyAvatarMarker("neckTarget", neckPose.rot, neckPose.trans, green);
|
||||
#endif
|
||||
|
||||
_animVars.set("headPosition", headPos);
|
||||
_animVars.set("headRotation", headRot);
|
||||
_animVars.set("headType", (int)IKTarget::Type::HmdHead);
|
||||
_animVars.set("neckPosition", neckPos);
|
||||
_animVars.set("neckRotation", neckRot);
|
||||
_animVars.set("neckType", (int)IKTarget::Type::Unknown); // 'Unknown' disables the target
|
||||
|
||||
void Rig::updateHeadAnimVars(const HeadParameters& params) {
|
||||
if (_animSkeleton) {
|
||||
if (params.headEnabled) {
|
||||
_animVars.set("headPosition", params.rigHeadPosition);
|
||||
_animVars.set("headRotation", params.rigHeadOrientation);
|
||||
if (params.hipsEnabled) {
|
||||
// Since there is an explicit hips ik target, switch the head to use the more generic RotationAndPosition IK chain type.
|
||||
// this will allow the spine to bend more, ensuring that it can reach the head target position.
|
||||
_animVars.set("headType", (int)IKTarget::Type::RotationAndPosition);
|
||||
} else {
|
||||
// When there is no hips IK target, use the HmdHead IK chain type. This will make the spine very stiff,
|
||||
// but because the IK _hipsOffset is enabled, the hips will naturally follow underneath the head.
|
||||
_animVars.set("headType", (int)IKTarget::Type::HmdHead);
|
||||
}
|
||||
} else {
|
||||
_animVars.unset("headPosition");
|
||||
_animVars.set("headRotation", params.rigHeadOrientation * yFlip180);
|
||||
_animVars.set("headAndNeckType", (int)IKTarget::Type::RotationOnly);
|
||||
_animVars.set("headRotation", params.rigHeadOrientation);
|
||||
_animVars.set("headType", (int)IKTarget::Type::RotationOnly);
|
||||
_animVars.unset("neckPosition");
|
||||
_animVars.unset("neckRotation");
|
||||
_animVars.set("neckType", (int)IKTarget::Type::RotationOnly);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::quat& worldHeadOrientation, const glm::vec3& lookAtSpot, const glm::vec3& saccade) {
|
||||
void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::vec3& lookAtSpot, const glm::vec3& saccade) {
|
||||
|
||||
// TODO: does not properly handle avatar scale.
|
||||
|
||||
|
|
|
@ -42,18 +42,17 @@ public:
|
|||
};
|
||||
|
||||
struct HeadParameters {
|
||||
glm::quat worldHeadOrientation = glm::quat(); // world space (-z forward)
|
||||
glm::quat rigHeadOrientation = glm::quat(); // rig space (-z forward)
|
||||
glm::vec3 rigHeadPosition = glm::vec3(); // rig space
|
||||
glm::mat4 hipsMatrix = glm::mat4(); // rig space
|
||||
glm::mat4 hipsMatrix = glm::mat4(); // rig space
|
||||
glm::mat4 spine2Matrix = glm::mat4(); // rig space
|
||||
glm::quat rigHeadOrientation = glm::quat(); // rig space (-z forward)
|
||||
glm::vec3 rigHeadPosition = glm::vec3(); // rig space
|
||||
bool hipsEnabled = false;
|
||||
bool isInHMD = false;
|
||||
int neckJointIndex = -1;
|
||||
bool headEnabled = false;
|
||||
bool spine2Enabled = false;
|
||||
bool isTalking = false;
|
||||
};
|
||||
|
||||
struct EyeParameters {
|
||||
glm::quat worldHeadOrientation = glm::quat();
|
||||
glm::vec3 eyeLookAt = glm::vec3(); // world space
|
||||
glm::vec3 eyeSaccade = glm::vec3(); // world space
|
||||
glm::vec3 modelTranslation = glm::vec3();
|
||||
|
@ -230,6 +229,9 @@ public:
|
|||
|
||||
void setEnableDebugDrawIKTargets(bool enableDebugDrawIKTargets) { _enableDebugDrawIKTargets = enableDebugDrawIKTargets; }
|
||||
|
||||
// input assumed to be in rig space
|
||||
void computeHeadFromHMD(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut) const;
|
||||
|
||||
signals:
|
||||
void onLoadComplete();
|
||||
|
||||
|
@ -239,10 +241,9 @@ protected:
|
|||
void applyOverridePoses();
|
||||
void buildAbsoluteRigPoses(const AnimPoseVec& relativePoses, AnimPoseVec& absolutePosesOut);
|
||||
|
||||
void updateNeckJoint(int index, const HeadParameters& params);
|
||||
void computeHeadNeckAnimVars(const AnimPose& hmdPose, glm::vec3& headPositionOut, glm::quat& headOrientationOut,
|
||||
glm::vec3& neckPositionOut, glm::quat& neckOrientationOut) const;
|
||||
void updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::quat& worldHeadOrientation, const glm::vec3& lookAt, const glm::vec3& saccade);
|
||||
void updateHeadAnimVars(const HeadParameters& params);
|
||||
|
||||
void updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm::quat& modelRotation, const glm::vec3& lookAt, const glm::vec3& saccade);
|
||||
void calcAnimAlpha(float speed, const std::vector<float>& referenceSpeeds, float* alphaOut) const;
|
||||
|
||||
AnimPose _modelOffset; // model to rig space
|
||||
|
|
|
@ -393,9 +393,9 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
if (isFingerPointing) {
|
||||
setAtBit(flags, HAND_STATE_FINGER_POINTING_BIT);
|
||||
}
|
||||
// faceshift state
|
||||
// face tracker state
|
||||
if (_headData->_isFaceTrackerConnected) {
|
||||
setAtBit(flags, IS_FACESHIFT_CONNECTED);
|
||||
setAtBit(flags, IS_FACE_TRACKER_CONNECTED);
|
||||
}
|
||||
// eye tracker state
|
||||
if (_headData->_isEyeTrackerConnected) {
|
||||
|
@ -883,7 +883,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
auto newHandState = getSemiNibbleAt(bitItems, HAND_STATE_START_BIT)
|
||||
+ (oneAtBit(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
|
||||
|
||||
auto newFaceTrackerConnected = oneAtBit(bitItems, IS_FACESHIFT_CONNECTED);
|
||||
auto newFaceTrackerConnected = oneAtBit(bitItems, IS_FACE_TRACKER_CONNECTED);
|
||||
auto newEyeTrackerConnected = oneAtBit(bitItems, IS_EYE_TRACKER_CONNECTED);
|
||||
|
||||
bool keyStateChanged = (_keyState != newKeyState);
|
||||
|
|
|
@ -99,7 +99,7 @@ const quint32 AVATAR_MOTION_SCRIPTABLE_BITS =
|
|||
// Referential Data - R is found in the 7th bit
|
||||
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits
|
||||
const int HAND_STATE_START_BIT = 2; // 3rd and 4th bits
|
||||
const int IS_FACESHIFT_CONNECTED = 4; // 5th bit
|
||||
const int IS_FACE_TRACKER_CONNECTED = 4; // 5th bit
|
||||
const int IS_EYE_TRACKER_CONNECTED = 5; // 6th bit (was CHAT_CIRCLING)
|
||||
const int HAS_REFERENTIAL = 6; // 7th bit
|
||||
const int HAND_STATE_FINGER_POINTING_BIT = 7; // 8th bit
|
||||
|
@ -218,7 +218,7 @@ namespace AvatarDataPacket {
|
|||
} PACKED_END;
|
||||
const size_t AVATAR_LOCAL_POSITION_SIZE = 12;
|
||||
|
||||
// only present if IS_FACESHIFT_CONNECTED flag is set in AvatarInfo.flags
|
||||
// only present if IS_FACE_TRACKER_CONNECTED flag is set in AvatarInfo.flags
|
||||
PACKED_BEGIN struct FaceTrackerInfo {
|
||||
float leftEyeBlink;
|
||||
float rightEyeBlink;
|
||||
|
|
|
@ -53,6 +53,9 @@ namespace controller {
|
|||
makePosePair(Action::RIGHT_HAND, "RightHand"),
|
||||
makePosePair(Action::LEFT_FOOT, "LeftFoot"),
|
||||
makePosePair(Action::RIGHT_FOOT, "RightFoot"),
|
||||
makePosePair(Action::HIPS, "Hips"),
|
||||
makePosePair(Action::SPINE2, "Spine2"),
|
||||
makePosePair(Action::HEAD, "Head"),
|
||||
|
||||
makeButtonPair(Action::LEFT_HAND_CLICK, "LeftHandClick"),
|
||||
makeButtonPair(Action::RIGHT_HAND_CLICK, "RightHandClick"),
|
||||
|
|
|
@ -44,6 +44,9 @@ enum class Action {
|
|||
RIGHT_HAND,
|
||||
LEFT_FOOT,
|
||||
RIGHT_FOOT,
|
||||
HIPS,
|
||||
SPINE2,
|
||||
HEAD,
|
||||
|
||||
LEFT_HAND_CLICK,
|
||||
RIGHT_HAND_CLICK,
|
||||
|
|
|
@ -16,9 +16,15 @@
|
|||
namespace controller {
|
||||
|
||||
struct InputCalibrationData {
|
||||
glm::mat4 sensorToWorldMat;
|
||||
glm::mat4 avatarMat;
|
||||
glm::mat4 hmdSensorMat;
|
||||
glm::mat4 sensorToWorldMat; // sensor to world
|
||||
glm::mat4 avatarMat; // avatar to world
|
||||
glm::mat4 hmdSensorMat; // hmd pos and orientation in sensor space
|
||||
glm::mat4 defaultCenterEyeMat; // default pose for the center of the eyes in avatar space.
|
||||
glm::mat4 defaultHeadMat; // default pose for head joint in avatar space
|
||||
glm::mat4 defaultSpine2; // default pose for spine2 joint in avatar space
|
||||
glm::mat4 defaultHips; // default pose for hips joint in avatar space
|
||||
glm::mat4 defaultLeftFoot; // default pose for leftFoot joint in avatar space
|
||||
glm::mat4 defaultRightFoot; // default pose for leftFoot joint in avatar space
|
||||
};
|
||||
|
||||
enum class ChannelType {
|
||||
|
|
|
@ -104,6 +104,9 @@ Input::NamedVector StandardController::getAvailableInputs() const {
|
|||
makePair(RIGHT_HAND, "RightHand"),
|
||||
makePair(LEFT_FOOT, "LeftFoot"),
|
||||
makePair(RIGHT_FOOT, "RightFoot"),
|
||||
makePair(HIPS, "Hips"),
|
||||
makePair(SPINE2, "Spine2"),
|
||||
makePair(HEAD, "Head"),
|
||||
|
||||
// Aliases, PlayStation style names
|
||||
makePair(LB, "L1"),
|
||||
|
|
|
@ -38,6 +38,11 @@ const quat Quaternions::X_180{ 0.0f, 1.0f, 0.0f, 0.0f };
|
|||
const quat Quaternions::Y_180{ 0.0f, 0.0f, 1.0f, 0.0f };
|
||||
const quat Quaternions::Z_180{ 0.0f, 0.0f, 0.0f, 1.0f };
|
||||
|
||||
const mat4 Matrices::IDENTITY { glm::mat4() };
|
||||
const mat4 Matrices::X_180 { createMatFromQuatAndPos(Quaternions::X_180, Vectors::ZERO) };
|
||||
const mat4 Matrices::Y_180 { createMatFromQuatAndPos(Quaternions::Y_180, Vectors::ZERO) };
|
||||
const mat4 Matrices::Z_180 { createMatFromQuatAndPos(Quaternions::Z_180, Vectors::ZERO) };
|
||||
|
||||
// Safe version of glm::mix; based on the code in Nick Bobick's article,
|
||||
// http://www.gamasutra.com/features/19980703/quaternions_01.htm (via Clyde,
|
||||
// https://github.com/threerings/clyde/blob/master/src/main/java/com/threerings/math/Quaternion.java)
|
||||
|
|
|
@ -54,6 +54,13 @@ const glm::vec3 IDENTITY_FORWARD = glm::vec3( 0.0f, 0.0f,-1.0f);
|
|||
|
||||
glm::quat safeMix(const glm::quat& q1, const glm::quat& q2, float alpha);
|
||||
|
||||
class Matrices {
|
||||
public:
|
||||
static const mat4 IDENTITY;
|
||||
static const mat4 X_180;
|
||||
static const mat4 Y_180;
|
||||
static const mat4 Z_180;
|
||||
};
|
||||
|
||||
class Quaternions {
|
||||
public:
|
||||
|
|
6
libraries/trackers/CMakeLists.txt
Normal file
6
libraries/trackers/CMakeLists.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
set(TARGET_NAME trackers)
|
||||
setup_hifi_library()
|
||||
GroupSources("src")
|
||||
link_hifi_libraries(shared)
|
||||
|
||||
target_bullet()
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// DeviceTracker.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Sam Cake on 6/20/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// DeviceTracker.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Sam Cake on 6/20/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// EyeTracker.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by David Rowe on 27 Jul 2015.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
|
@ -17,8 +14,8 @@
|
|||
|
||||
#include <SharedUtil.h>
|
||||
|
||||
#include "InterfaceLogging.h"
|
||||
#include "OctreeConstants.h"
|
||||
#include "Logging.h"
|
||||
#include <OctreeConstants.h>
|
||||
|
||||
#ifdef HAVE_IVIEWHMD
|
||||
char* HIGH_FIDELITY_EYE_TRACKER_CALIBRATION = "HighFidelityEyeTrackerCalibration";
|
||||
|
@ -115,7 +112,7 @@ void EyeTracker::processData(smi_CallbackDataStruct* data) {
|
|||
|
||||
void EyeTracker::init() {
|
||||
if (_isInitialized) {
|
||||
qCWarning(interfaceapp) << "Eye Tracker: Already initialized";
|
||||
qCWarning(trackers) << "Eye Tracker: Already initialized";
|
||||
return;
|
||||
}
|
||||
}
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// EyeTracker.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by David Rowe on 27 Jul 2015.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// FaceTracker.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Andrzej Kapolka on 4/9/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
|
@ -9,22 +6,21 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <QTimer>
|
||||
|
||||
#include <GLMHelpers.h>
|
||||
|
||||
#include "FaceTracker.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "Menu.h"
|
||||
|
||||
#include <QTimer>
|
||||
#include <GLMHelpers.h>
|
||||
#include "Logging.h"
|
||||
//#include "Menu.h"
|
||||
|
||||
const int FPS_TIMER_DELAY = 2000; // ms
|
||||
const int FPS_TIMER_DURATION = 2000; // ms
|
||||
|
||||
const float DEFAULT_EYE_DEFLECTION = 0.25f;
|
||||
Setting::Handle<float> FaceTracker::_eyeDeflection("faceshiftEyeDeflection", DEFAULT_EYE_DEFLECTION);
|
||||
bool FaceTracker::_isMuted { true };
|
||||
|
||||
void FaceTracker::init() {
|
||||
_isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
|
||||
_isInitialized = true; // FaceTracker can be used now
|
||||
}
|
||||
|
||||
|
@ -101,7 +97,7 @@ void FaceTracker::countFrame() {
|
|||
}
|
||||
|
||||
void FaceTracker::finishFPSTimer() {
|
||||
qCDebug(interfaceapp) << "Face tracker FPS =" << (float)_frameCount / ((float)FPS_TIMER_DURATION / 1000.0f);
|
||||
qCDebug(trackers) << "Face tracker FPS =" << (float)_frameCount / ((float)FPS_TIMER_DURATION / 1000.0f);
|
||||
_isCalculatingFPS = false;
|
||||
}
|
||||
|
||||
|
@ -113,3 +109,25 @@ void FaceTracker::toggleMute() {
|
|||
void FaceTracker::setEyeDeflection(float eyeDeflection) {
|
||||
_eyeDeflection.set(eyeDeflection);
|
||||
}
|
||||
|
||||
void FaceTracker::updateFakeCoefficients(float leftBlink, float rightBlink, float browUp,
|
||||
float jawOpen, float mouth2, float mouth3, float mouth4, QVector<float>& coefficients) {
|
||||
const int MMMM_BLENDSHAPE = 34;
|
||||
const int FUNNEL_BLENDSHAPE = 40;
|
||||
const int SMILE_LEFT_BLENDSHAPE = 28;
|
||||
const int SMILE_RIGHT_BLENDSHAPE = 29;
|
||||
const int MAX_FAKE_BLENDSHAPE = 40; // Largest modified blendshape from above and below
|
||||
|
||||
coefficients.resize(std::max((int)coefficients.size(), MAX_FAKE_BLENDSHAPE + 1));
|
||||
qFill(coefficients.begin(), coefficients.end(), 0.0f);
|
||||
coefficients[_leftBlinkIndex] = leftBlink;
|
||||
coefficients[_rightBlinkIndex] = rightBlink;
|
||||
coefficients[_browUpCenterIndex] = browUp;
|
||||
coefficients[_browUpLeftIndex] = browUp;
|
||||
coefficients[_browUpRightIndex] = browUp;
|
||||
coefficients[_jawOpenIndex] = jawOpen;
|
||||
coefficients[SMILE_LEFT_BLENDSHAPE] = coefficients[SMILE_RIGHT_BLENDSHAPE] = mouth4;
|
||||
coefficients[MMMM_BLENDSHAPE] = mouth2;
|
||||
coefficients[FUNNEL_BLENDSHAPE] = mouth3;
|
||||
}
|
||||
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// FaceTracker.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Andrzej Kapolka on 4/9/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
|
@ -20,7 +17,7 @@
|
|||
|
||||
#include <SettingHandle.h>
|
||||
|
||||
/// Base class for face trackers (Faceshift, DDE).
|
||||
/// Base class for face trackers (DDE, BinaryVR).
|
||||
class FaceTracker : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
|
@ -45,12 +42,21 @@ public:
|
|||
const QVector<float>& getBlendshapeCoefficients() const;
|
||||
float getBlendshapeCoefficient(int index) const;
|
||||
|
||||
bool isMuted() const { return _isMuted; }
|
||||
void setIsMuted(bool isMuted) { _isMuted = isMuted; }
|
||||
static bool isMuted() { return _isMuted; }
|
||||
static void setIsMuted(bool isMuted) { _isMuted = isMuted; }
|
||||
|
||||
static float getEyeDeflection() { return _eyeDeflection.get(); }
|
||||
static void setEyeDeflection(float eyeDeflection);
|
||||
|
||||
static void updateFakeCoefficients(float leftBlink,
|
||||
float rightBlink,
|
||||
float browUp,
|
||||
float jawOpen,
|
||||
float mouth2,
|
||||
float mouth3,
|
||||
float mouth4,
|
||||
QVector<float>& coefficients);
|
||||
|
||||
signals:
|
||||
void muteToggled();
|
||||
|
||||
|
@ -63,7 +69,7 @@ protected:
|
|||
virtual ~FaceTracker() {};
|
||||
|
||||
bool _isInitialized = false;
|
||||
bool _isMuted = true;
|
||||
static bool _isMuted;
|
||||
|
||||
glm::vec3 _headTranslation = glm::vec3(0.0f);
|
||||
glm::quat _headRotation = glm::quat();
|
||||
|
@ -84,6 +90,24 @@ private:
|
|||
bool _isCalculatingFPS = false;
|
||||
int _frameCount = 0;
|
||||
|
||||
// see http://support.faceshift.com/support/articles/35129-export-of-blendshapes
|
||||
static const int _leftBlinkIndex = 0;
|
||||
static const int _rightBlinkIndex = 1;
|
||||
static const int _leftEyeOpenIndex = 8;
|
||||
static const int _rightEyeOpenIndex = 9;
|
||||
|
||||
// Brows
|
||||
static const int _browDownLeftIndex = 14;
|
||||
static const int _browDownRightIndex = 15;
|
||||
static const int _browUpCenterIndex = 16;
|
||||
static const int _browUpLeftIndex = 17;
|
||||
static const int _browUpRightIndex = 18;
|
||||
|
||||
static const int _mouthSmileLeftIndex = 28;
|
||||
static const int _mouthSmileRightIndex = 29;
|
||||
|
||||
static const int _jawOpenIndex = 21;
|
||||
|
||||
static Setting::Handle<float> _eyeDeflection;
|
||||
};
|
||||
|
11
libraries/trackers/src/trackers/Logging.cpp
Normal file
11
libraries/trackers/src/trackers/Logging.cpp
Normal file
|
@ -0,0 +1,11 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2017/04/25
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "Logging.h"
|
||||
|
||||
Q_LOGGING_CATEGORY(trackers, "hifi.trackers")
|
16
libraries/trackers/src/trackers/Logging.h
Normal file
16
libraries/trackers/src/trackers/Logging.h
Normal file
|
@ -0,0 +1,16 @@
|
|||
//
|
||||
// Created by Bradley Austin Davis on 2017/04/25
|
||||
// Copyright 2013-2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_TrackersLogging_h
|
||||
#define hifi_TrackersLogging_h
|
||||
|
||||
#include <QtCore/QLoggingCategory>
|
||||
|
||||
Q_DECLARE_LOGGING_CATEGORY(trackers)
|
||||
|
||||
#endif // hifi_TrackersLogging_h
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// MotionTracker.cpp
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Sam Cake on 6/20/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
|
@ -10,8 +7,6 @@
|
|||
//
|
||||
|
||||
#include "MotionTracker.h"
|
||||
#include "GLMHelpers.h"
|
||||
|
||||
|
||||
// glm::mult(mat43, mat43) just the composition of the 2 matrices assuming they are in fact mat44 with the last raw = { 0, 0, 0, 1 }
|
||||
namespace glm {
|
|
@ -1,7 +1,4 @@
|
|||
//
|
||||
// MotionTracker.h
|
||||
// interface/src/devices
|
||||
//
|
||||
// Created by Sam Cake on 6/20/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
|
@ -14,20 +11,7 @@
|
|||
|
||||
#include "DeviceTracker.h"
|
||||
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic push
|
||||
#pragma GCC diagnostic ignored "-Wsign-compare"
|
||||
#endif
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
|
||||
#ifdef __GNUC__
|
||||
#pragma GCC diagnostic pop
|
||||
#endif
|
||||
|
||||
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
#include <glm/gtc/matrix_transform.hpp>
|
||||
#include <GLMHelpers.h>
|
||||
|
||||
/// Base class for device trackers.
|
||||
class MotionTracker : public DeviceTracker {
|
|
@ -223,6 +223,18 @@ QAction* Menu::addCheckableActionToQMenuAndActionHash(MenuWrapper* destinationMe
|
|||
return action;
|
||||
}
|
||||
|
||||
QAction* Menu::addCheckableActionToQMenuAndActionHash(MenuWrapper* destinationMenu,
|
||||
const QString& actionName,
|
||||
const std::function<void(bool)>& handler,
|
||||
const QKeySequence& shortcut,
|
||||
const bool checked,
|
||||
int menuItemLocation,
|
||||
const QString& grouping) {
|
||||
auto action = addCheckableActionToQMenuAndActionHash(destinationMenu, actionName, shortcut, checked, nullptr, nullptr, menuItemLocation, grouping);
|
||||
connect(action, &QAction::triggered, handler);
|
||||
return action;
|
||||
}
|
||||
|
||||
void Menu::removeAction(MenuWrapper* menu, const QString& actionName) {
|
||||
auto action = _actionHash.value(actionName);
|
||||
menu->removeAction(action);
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
#ifndef hifi_ui_Menu_h
|
||||
#define hifi_ui_Menu_h
|
||||
|
||||
#include <functional>
|
||||
|
||||
#include <QtCore/QDir>
|
||||
#include <QtCore/QPointer>
|
||||
#include <QtCore/QStandardPaths>
|
||||
|
@ -90,6 +92,14 @@ public:
|
|||
int menuItemLocation = UNSPECIFIED_POSITION,
|
||||
const QString& grouping = QString());
|
||||
|
||||
QAction* addCheckableActionToQMenuAndActionHash(MenuWrapper* destinationMenu,
|
||||
const QString& actionName,
|
||||
const std::function<void(bool)>& handler,
|
||||
const QKeySequence& shortcut = 0,
|
||||
const bool checked = false,
|
||||
int menuItemLocation = UNSPECIFIED_POSITION,
|
||||
const QString& grouping = QString());
|
||||
|
||||
void removeAction(MenuWrapper* menu, const QString& actionName);
|
||||
|
||||
public slots:
|
||||
|
|
105
scripts/developer/tests/hipsControllerTest.js
Normal file
105
scripts/developer/tests/hipsControllerTest.js
Normal file
|
@ -0,0 +1,105 @@
|
|||
//
|
||||
// hipsControllerTest.js
|
||||
//
|
||||
// Created by Anthony Thibault on 4/24/17
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Test procedural manipulation of the Avatar hips via the controller system.
|
||||
// Pull the left and right triggers on your hand controllers, you hips should begin to gyrate in an amusing mannor.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
|
||||
/* global Xform */
|
||||
Script.include("/~/system/libraries/Xform.js");
|
||||
|
||||
var triggerPressHandled = false;
|
||||
var rightTriggerPressed = false;
|
||||
var leftTriggerPressed = false;
|
||||
|
||||
var MAPPING_NAME = "com.highfidelity.hipsIkTest";
|
||||
|
||||
var mapping = Controller.newMapping(MAPPING_NAME);
|
||||
mapping.from([Controller.Standard.RTClick]).peek().to(function (value) {
|
||||
rightTriggerPressed = (value !== 0) ? true : false;
|
||||
});
|
||||
mapping.from([Controller.Standard.LTClick]).peek().to(function (value) {
|
||||
leftTriggerPressed = (value !== 0) ? true : false;
|
||||
});
|
||||
|
||||
Controller.enableMapping(MAPPING_NAME);
|
||||
|
||||
var CONTROLLER_MAPPING_NAME = "com.highfidelity.hipsIkTest.controller";
|
||||
var controllerMapping;
|
||||
|
||||
var ZERO = {x: 0, y: 0, z: 0};
|
||||
var X_AXIS = {x: 1, y: 0, z: 0};
|
||||
var Y_AXIS = {x: 0, y: 1, z: 0};
|
||||
var Y_180 = {x: 0, y: 1, z: 0, w: 0};
|
||||
var Y_180_XFORM = new Xform(Y_180, {x: 0, y: 0, z: 0});
|
||||
|
||||
var hips = undefined;
|
||||
|
||||
function computeCurrentXform(jointIndex) {
|
||||
var currentXform = new Xform(MyAvatar.getAbsoluteJointRotationInObjectFrame(jointIndex),
|
||||
MyAvatar.getAbsoluteJointTranslationInObjectFrame(jointIndex));
|
||||
return currentXform;
|
||||
}
|
||||
|
||||
function calibrate() {
|
||||
hips = computeCurrentXform(MyAvatar.getJointIndex("Hips"));
|
||||
}
|
||||
|
||||
function circleOffset(radius, theta, normal) {
|
||||
var pos = {x: radius * Math.cos(theta), y: radius * Math.sin(theta), z: 0};
|
||||
var lookAtRot = Quat.lookAt(normal, ZERO, X_AXIS);
|
||||
return Vec3.multiplyQbyV(lookAtRot, pos);
|
||||
}
|
||||
|
||||
var calibrationCount = 0;
|
||||
|
||||
function update(dt) {
|
||||
if (rightTriggerPressed && leftTriggerPressed) {
|
||||
if (!triggerPressHandled) {
|
||||
triggerPressHandled = true;
|
||||
if (controllerMapping) {
|
||||
hips = undefined;
|
||||
Controller.disableMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
|
||||
controllerMapping = undefined;
|
||||
} else {
|
||||
calibrate();
|
||||
calibrationCount++;
|
||||
controllerMapping = Controller.newMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
|
||||
|
||||
var n = Y_AXIS;
|
||||
var t = 0;
|
||||
if (hips) {
|
||||
controllerMapping.from(function () {
|
||||
t += (1 / 60) * 4;
|
||||
return {
|
||||
valid: true,
|
||||
translation: Vec3.sum(hips.pos, circleOffset(0.1, t, n)),
|
||||
rotation: hips.rot,
|
||||
velocity: ZERO,
|
||||
angularVelocity: ZERO
|
||||
};
|
||||
}).to(Controller.Standard.Hips);
|
||||
}
|
||||
Controller.enableMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
triggerPressHandled = false;
|
||||
}
|
||||
}
|
||||
|
||||
Script.update.connect(update);
|
||||
|
||||
Script.scriptEnding.connect(function () {
|
||||
Controller.disableMapping(MAPPING_NAME);
|
||||
if (controllerMapping) {
|
||||
Controller.disableMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
|
||||
}
|
||||
Script.update.disconnect(update);
|
||||
});
|
||||
|
|
@ -11,19 +11,22 @@ var TRACKED_OBJECT_POSES = [
|
|||
var triggerPressHandled = false;
|
||||
var rightTriggerPressed = false;
|
||||
var leftTriggerPressed = false;
|
||||
var calibrationCount = 0;
|
||||
|
||||
var MAPPING_NAME = "com.highfidelity.viveMotionCapture";
|
||||
|
||||
var mapping = Controller.newMapping(MAPPING_NAME);
|
||||
mapping.from([Controller.Standard.RTClick]).peek().to(function (value) {
|
||||
var TRIGGER_MAPPING_NAME = "com.highfidelity.viveMotionCapture.triggers";
|
||||
var triggerMapping = Controller.newMapping(TRIGGER_MAPPING_NAME);
|
||||
triggerMapping.from([Controller.Standard.RTClick]).peek().to(function (value) {
|
||||
rightTriggerPressed = (value !== 0) ? true : false;
|
||||
});
|
||||
mapping.from([Controller.Standard.LTClick]).peek().to(function (value) {
|
||||
triggerMapping.from([Controller.Standard.LTClick]).peek().to(function (value) {
|
||||
leftTriggerPressed = (value !== 0) ? true : false;
|
||||
});
|
||||
Controller.enableMapping(TRIGGER_MAPPING_NAME);
|
||||
|
||||
Controller.enableMapping(MAPPING_NAME);
|
||||
var CONTROLLER_MAPPING_NAME = "com.highfidelity.viveMotionCapture.controller";
|
||||
var controllerMapping;
|
||||
|
||||
var head;
|
||||
var leftFoot;
|
||||
var rightFoot;
|
||||
var hips;
|
||||
|
@ -75,8 +78,29 @@ function computeDefaultToReferenceXform() {
|
|||
}
|
||||
}
|
||||
|
||||
function computeHeadOffsetXform() {
|
||||
var leftEyeIndex = MyAvatar.getJointIndex("LeftEye");
|
||||
var rightEyeIndex = MyAvatar.getJointIndex("RightEye");
|
||||
var headIndex = MyAvatar.getJointIndex("Head");
|
||||
if (leftEyeIndex > 0 && rightEyeIndex > 0 && headIndex > 0) {
|
||||
var defaultHeadXform = new Xform(MyAvatar.getAbsoluteDefaultJointRotationInObjectFrame(headIndex),
|
||||
MyAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(headIndex));
|
||||
var defaultLeftEyeXform = new Xform(MyAvatar.getAbsoluteDefaultJointRotationInObjectFrame(leftEyeIndex),
|
||||
MyAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(leftEyeIndex));
|
||||
var defaultRightEyeXform = new Xform(MyAvatar.getAbsoluteDefaultJointRotationInObjectFrame(rightEyeIndex),
|
||||
MyAvatar.getAbsoluteDefaultJointTranslationInObjectFrame(rightEyeIndex));
|
||||
var defaultCenterEyePos = Vec3.multiply(0.5, Vec3.sum(defaultLeftEyeXform.pos, defaultRightEyeXform.pos));
|
||||
var defaultCenterEyeXform = new Xform(defaultLeftEyeXform.rot, defaultCenterEyePos);
|
||||
|
||||
return Xform.mul(defaultCenterEyeXform.inv(), defaultHeadXform);
|
||||
} else {
|
||||
return undefined;
|
||||
}
|
||||
}
|
||||
|
||||
function calibrate() {
|
||||
|
||||
head = undefined;
|
||||
leftFoot = undefined;
|
||||
rightFoot = undefined;
|
||||
hips = undefined;
|
||||
|
@ -84,6 +108,13 @@ function calibrate() {
|
|||
|
||||
var defaultToReferenceXform = computeDefaultToReferenceXform();
|
||||
|
||||
var headOffsetXform = computeHeadOffsetXform();
|
||||
print("AJT: computed headOffsetXform " + (headOffsetXform ? JSON.stringify(headOffsetXform) : "undefined"));
|
||||
|
||||
if (headOffsetXform) {
|
||||
head = { offsetXform: headOffsetXform };
|
||||
}
|
||||
|
||||
var poses = [];
|
||||
if (Controller.Hardware.Vive) {
|
||||
TRACKED_OBJECT_POSES.forEach(function (key) {
|
||||
|
@ -92,7 +123,8 @@ function calibrate() {
|
|||
if (pose.valid) {
|
||||
poses.push({
|
||||
channel: channel,
|
||||
pose: pose
|
||||
pose: pose,
|
||||
lastestPose: pose
|
||||
});
|
||||
}
|
||||
});
|
||||
|
@ -177,85 +209,91 @@ var ikTypes = {
|
|||
|
||||
var handlerId;
|
||||
|
||||
function computeIKTargetXform(jointInfo) {
|
||||
var pose = Controller.getPoseValue(jointInfo.channel);
|
||||
function convertJointInfoToPose(jointInfo) {
|
||||
var latestPose = jointInfo.latestPose;
|
||||
var offsetXform = jointInfo.offsetXform;
|
||||
return Xform.mul(Y_180_XFORM, Xform.mul(new Xform(pose.rotation, pose.translation), offsetXform));
|
||||
var xform = Xform.mul(new Xform(latestPose.rotation, latestPose.translation), offsetXform);
|
||||
return {
|
||||
valid: true,
|
||||
translation: xform.pos,
|
||||
rotation: xform.rot,
|
||||
velocity: Vec3.sum(latestPose.velocity, Vec3.cross(latestPose.angularVelocity, Vec3.subtract(xform.pos, latestPose.translation))),
|
||||
angularVelocity: latestPose.angularVelocity
|
||||
};
|
||||
}
|
||||
|
||||
function update(dt) {
|
||||
if (rightTriggerPressed && leftTriggerPressed) {
|
||||
if (!triggerPressHandled) {
|
||||
triggerPressHandled = true;
|
||||
if (handlerId) {
|
||||
print("AJT: UN-CALIBRATE!");
|
||||
if (controllerMapping) {
|
||||
|
||||
// go back to normal, vive pucks will be ignored.
|
||||
print("AJT: UN-CALIBRATE!");
|
||||
|
||||
head = undefined;
|
||||
leftFoot = undefined;
|
||||
rightFoot = undefined;
|
||||
hips = undefined;
|
||||
spine2 = undefined;
|
||||
if (handlerId) {
|
||||
print("AJT: un-hooking animation state handler");
|
||||
MyAvatar.removeAnimationStateHandler(handlerId);
|
||||
handlerId = undefined;
|
||||
}
|
||||
|
||||
Controller.disableMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
|
||||
controllerMapping = undefined;
|
||||
|
||||
} else {
|
||||
print("AJT: CALIBRATE!");
|
||||
calibrate();
|
||||
calibrationCount++;
|
||||
|
||||
var animVars = [];
|
||||
controllerMapping = Controller.newMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
|
||||
|
||||
if (head) {
|
||||
controllerMapping.from(function () {
|
||||
var worldToAvatarXform = (new Xform(MyAvatar.orientation, MyAvatar.position)).inv();
|
||||
head.latestPose = {
|
||||
valid: true,
|
||||
translation: worldToAvatarXform.xformPoint(HMD.position),
|
||||
rotation: Quat.multiply(worldToAvatarXform.rot, Quat.multiply(HMD.orientation, Y_180)), // postMult 180 rot flips head direction
|
||||
velocity: {x: 0, y: 0, z: 0}, // TODO: currently this is unused anyway...
|
||||
angularVelocity: {x: 0, y: 0, z: 0}
|
||||
};
|
||||
return convertJointInfoToPose(head);
|
||||
}).to(Controller.Standard.Head);
|
||||
}
|
||||
|
||||
if (leftFoot) {
|
||||
animVars.push("leftFootType");
|
||||
animVars.push("leftFootPosition");
|
||||
animVars.push("leftFootRotation");
|
||||
controllerMapping.from(leftFoot.channel).to(function (pose) {
|
||||
leftFoot.latestPose = pose;
|
||||
});
|
||||
controllerMapping.from(function () {
|
||||
return convertJointInfoToPose(leftFoot);
|
||||
}).to(Controller.Standard.LeftFoot);
|
||||
}
|
||||
if (rightFoot) {
|
||||
animVars.push("rightFootType");
|
||||
animVars.push("rightFootPosition");
|
||||
animVars.push("rightFootRotation");
|
||||
controllerMapping.from(rightFoot.channel).to(function (pose) {
|
||||
rightFoot.latestPose = pose;
|
||||
});
|
||||
controllerMapping.from(function () {
|
||||
return convertJointInfoToPose(rightFoot);
|
||||
}).to(Controller.Standard.RightFoot);
|
||||
}
|
||||
if (hips) {
|
||||
animVars.push("hipsType");
|
||||
animVars.push("hipsPosition");
|
||||
animVars.push("hipsRotation");
|
||||
controllerMapping.from(hips.channel).to(function (pose) {
|
||||
hips.latestPose = pose;
|
||||
});
|
||||
controllerMapping.from(function () {
|
||||
return convertJointInfoToPose(hips);
|
||||
}).to(Controller.Standard.Hips);
|
||||
}
|
||||
if (spine2) {
|
||||
animVars.push("spine2Type");
|
||||
animVars.push("spine2Position");
|
||||
animVars.push("spine2Rotation");
|
||||
controllerMapping.from(spine2.channel).to(function (pose) {
|
||||
spine2.latestPose = pose;
|
||||
});
|
||||
controllerMapping.from(function () {
|
||||
return convertJointInfoToPose(spine2);
|
||||
}).to(Controller.Standard.Spine2);
|
||||
}
|
||||
|
||||
// hook up new anim state handler that maps vive pucks to ik system.
|
||||
handlerId = MyAvatar.addAnimationStateHandler(function (props) {
|
||||
var result = {}, xform;
|
||||
if (rightFoot) {
|
||||
xform = computeIKTargetXform(rightFoot);
|
||||
result.rightFootType = ikTypes.RotationAndPosition;
|
||||
result.rightFootPosition = xform.pos;
|
||||
result.rightFootRotation = xform.rot;
|
||||
}
|
||||
if (leftFoot) {
|
||||
xform = computeIKTargetXform(leftFoot);
|
||||
result.leftFootType = ikTypes.RotationAndPosition;
|
||||
result.leftFootPosition = xform.pos;
|
||||
result.leftFootRotation = xform.rot;
|
||||
}
|
||||
if (hips) {
|
||||
xform = computeIKTargetXform(hips);
|
||||
result.hipsType = ikTypes.RotationAndPosition;
|
||||
result.hipsPosition = xform.pos;
|
||||
result.hipsRotation = xform.rot;
|
||||
}
|
||||
if (spine2) {
|
||||
xform = computeIKTargetXform(spine2);
|
||||
result.spine2Type = ikTypes.RotationAndPosition;
|
||||
result.spine2Position = xform.pos;
|
||||
result.spine2Rotation = xform.rot;
|
||||
}
|
||||
return result;
|
||||
}, animVars);
|
||||
Controller.enableMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
|
@ -301,7 +339,10 @@ function update(dt) {
|
|||
Script.update.connect(update);
|
||||
|
||||
Script.scriptEnding.connect(function () {
|
||||
Controller.disableMapping(MAPPING_NAME);
|
||||
Controller.disableMapping(TRIGGER_MAPPING_NAME);
|
||||
if (controllerMapping) {
|
||||
Controller.disableMapping(CONTROLLER_MAPPING_NAME + calibrationCount);
|
||||
}
|
||||
Script.update.disconnect(update);
|
||||
});
|
||||
|
||||
|
|
|
@ -114,6 +114,12 @@ int main(int argc, char** argv) {
|
|||
last = now;
|
||||
|
||||
InputCalibrationData calibrationData = {
|
||||
glm::mat4(),
|
||||
glm::mat4(),
|
||||
glm::mat4(),
|
||||
glm::mat4(),
|
||||
glm::mat4(),
|
||||
glm::mat4(),
|
||||
glm::mat4(),
|
||||
glm::mat4(),
|
||||
glm::mat4()
|
||||
|
@ -130,6 +136,12 @@ int main(int argc, char** argv) {
|
|||
|
||||
{
|
||||
InputCalibrationData calibrationData = {
|
||||
glm::mat4(),
|
||||
glm::mat4(),
|
||||
glm::mat4(),
|
||||
glm::mat4(),
|
||||
glm::mat4(),
|
||||
glm::mat4(),
|
||||
glm::mat4(),
|
||||
glm::mat4(),
|
||||
glm::mat4()
|
||||
|
|
Loading…
Reference in a new issue