mirror of
https://github.com/overte-org/overte.git
synced 2025-04-19 15:03:53 +02:00
commit
6c8291cc7d
11 changed files with 402 additions and 24 deletions
6
.gitignore
vendored
6
.gitignore
vendored
|
@ -39,5 +39,11 @@ interface/external/Leap/util/
|
|||
interface/external/Sixense/include/
|
||||
interface/external/Sixense/lib/
|
||||
|
||||
# Ignore Visage
|
||||
interface/external/visage/dependencies/
|
||||
interface/external/visage/include/
|
||||
interface/external/visage/lib/
|
||||
interface/resources/visage/
|
||||
|
||||
# Ignore interfaceCache for Linux users
|
||||
interface/interfaceCache/
|
||||
|
|
76
cmake/modules/FindVisage.cmake
Normal file
76
cmake/modules/FindVisage.cmake
Normal file
|
@ -0,0 +1,76 @@
|
|||
# Try to find the Visage controller library
|
||||
#
|
||||
# You must provide a VISAGE_ROOT_DIR which contains lib and include directories
|
||||
#
|
||||
# Once done this will define
|
||||
#
|
||||
# VISAGE_FOUND - system found Visage
|
||||
# VISAGE_INCLUDE_DIRS - the Visage include directory
|
||||
# VISAGE_LIBRARIES - Link this to use Visage
|
||||
#
|
||||
# Created on 2/11/2014 by Andrzej Kapolka
|
||||
# Copyright (c) 2014 High Fidelity
|
||||
#
|
||||
|
||||
if (VISAGE_LIBRARIES AND VISAGE_INCLUDE_DIRS)
|
||||
# in cache already
|
||||
set(VISAGE_FOUND TRUE)
|
||||
else (VISAGE_LIBRARIES AND VISAGE_INCLUDE_DIRS)
|
||||
find_path(VISAGE_INCLUDE_DIR VisageTracker2.h ${VISAGE_ROOT_DIR}/include)
|
||||
|
||||
if (APPLE)
|
||||
find_path(VISAGE_XML_INCLUDE_DIR libxml/xmlreader.h /usr/include/libxml2)
|
||||
find_path(VISAGE_OPENCV_INCLUDE_DIR cv.h ${VISAGE_ROOT_DIR}/dependencies/OpenCV_MacOSX/include)
|
||||
find_path(VISAGE_OPENCV2_INCLUDE_DIR opencv.hpp ${VISAGE_ROOT_DIR}/dependencies/OpenCV_MacOSX/include/opencv2)
|
||||
if (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR)
|
||||
set(VISAGE_INCLUDE_DIRS
|
||||
"${VISAGE_INCLUDE_DIR};${VISAGE_XML_INCLUDE_DIR};${VISAGE_OPENCV_INCLUDE_DIR};${VISAGE_OPENCV2_INCLUDE_DIR}"
|
||||
CACHE INTERNAL "Visage include dirs")
|
||||
endif (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR)
|
||||
|
||||
find_library(VISAGE_CORE_LIBRARY libvscore.a ${VISAGE_ROOT_DIR}/lib)
|
||||
find_library(VISAGE_VISION_LIBRARY libvsvision.a ${VISAGE_ROOT_DIR}/lib)
|
||||
find_library(VISAGE_OPENCV_LIBRARY libOpenCV.a ${VISAGE_ROOT_DIR}/dependencies/OpenCV_MacOSX/lib)
|
||||
if (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY)
|
||||
set(VISAGE_LIBRARIES "${VISAGE_CORE_LIBRARY};${VISAGE_VISION_LIBRARY};${VISAGE_OPENCV_LIBRARY}"
|
||||
CACHE INTERNAL "Visage libraries")
|
||||
endif (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY)
|
||||
|
||||
elseif (WIN32)
|
||||
find_path(VISAGE_XML_INCLUDE_DIR libxml/xmlreader.h ${VISAGE_ROOT_DIR}/dependencies/libxml2/include)
|
||||
find_path(VISAGE_OPENCV_INCLUDE_DIR opencv/cv.h ${VISAGE_ROOT_DIR}/dependencies/OpenCV/include)
|
||||
find_path(VISAGE_OPENCV2_INCLUDE_DIR cv.h ${VISAGE_ROOT_DIR}/dependencies/OpenCV/include/opencv)
|
||||
if (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR)
|
||||
set(VISAGE_INCLUDE_DIRS
|
||||
"${VISAGE_INCLUDE_DIR};${VISAGE_XML_INCLUDE_DIR};${VISAGE_OPENCV_INCLUDE_DIR};${VISAGE_OPENCV2_INCLUDE_DIR}"
|
||||
CACHE INTERNAL "Visage include dirs")
|
||||
endif (VISAGE_INCLUDE_DIR AND VISAGE_XML_INCLUDE_DIR AND VISAGE_OPENCV_INCLUDE_DIR AND VISAGE_OPENCV2_INCLUDE_DIR)
|
||||
|
||||
find_library(VISAGE_CORE_LIBRARY vscore.lib ${VISAGE_ROOT_DIR}/lib)
|
||||
find_library(VISAGE_VISION_LIBRARY vsvision.lib ${VISAGE_ROOT_DIR}/lib)
|
||||
find_library(VISAGE_OPENCV_LIBRARY opencv_core243.lib ${VISAGE_ROOT_DIR}/dependencies/OpenCV/lib)
|
||||
if (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY)
|
||||
set(VISAGE_LIBRARIES "${VISAGE_CORE_LIBRARY};${VISAGE_VISION_LIBRARY};${VISAGE_OPENCV_LIBRARY}"
|
||||
CACHE INTERNAL "Visage libraries")
|
||||
endif (VISAGE_CORE_LIBRARY AND VISAGE_VISION_LIBRARY AND VISAGE_OPENCV_LIBRARY)
|
||||
|
||||
endif ()
|
||||
|
||||
if (VISAGE_INCLUDE_DIRS AND VISAGE_LIBRARIES)
|
||||
set(VISAGE_FOUND TRUE)
|
||||
endif (VISAGE_INCLUDE_DIRS AND VISAGE_LIBRARIES)
|
||||
|
||||
if (VISAGE_FOUND)
|
||||
if (NOT VISAGE_FIND_QUIETLY)
|
||||
message(STATUS "Found Visage: ${VISAGE_LIBRARIES}")
|
||||
endif (NOT VISAGE_FIND_QUIETLY)
|
||||
else (VISAGE_FOUND)
|
||||
if (VISAGE_FIND_REQUIRED)
|
||||
message(FATAL_ERROR "Could not find Visage")
|
||||
endif (VISAGE_FIND_REQUIRED)
|
||||
endif (VISAGE_FOUND)
|
||||
|
||||
# show the VISAGE_INCLUDE_DIRS and VISAGE_LIBRARIES variables only in the advanced view
|
||||
mark_as_advanced(VISAGE_INCLUDE_DIRS VISAGE_LIBRARIES)
|
||||
|
||||
endif (VISAGE_LIBRARIES AND VISAGE_INCLUDE_DIRS)
|
|
@ -11,6 +11,7 @@ set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} "${CMAKE_CURRENT_SOURCE_DIR}/../cmake
|
|||
set(FACESHIFT_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external/faceshift)
|
||||
set(LIBOVR_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external/LibOVR)
|
||||
set(SIXENSE_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external/Sixense)
|
||||
set(VISAGE_ROOT_DIR ${CMAKE_CURRENT_SOURCE_DIR}/external/visage)
|
||||
|
||||
if (DEFINED ENV{JOB_ID})
|
||||
set(BUILD_SEQ $ENV{JOB_ID})
|
||||
|
@ -138,9 +139,10 @@ find_package(Faceshift)
|
|||
find_package(GLM REQUIRED)
|
||||
find_package(LibOVR)
|
||||
find_package(Sixense)
|
||||
find_package(Visage)
|
||||
find_package(ZLIB)
|
||||
|
||||
# likewise with Sixense library for Razer Hydra
|
||||
# include the Sixense library for Razer Hydra if available
|
||||
if (SIXENSE_FOUND AND NOT DISABLE_SIXENSE)
|
||||
add_definitions(-DHAVE_SIXENSE)
|
||||
include_directories(SYSTEM ${SIXENSE_INCLUDE_DIRS})
|
||||
|
@ -150,6 +152,21 @@ if (SIXENSE_FOUND AND NOT DISABLE_SIXENSE)
|
|||
target_link_libraries(${TARGET_NAME} ${SIXENSE_LIBRARIES})
|
||||
endif (SIXENSE_FOUND AND NOT DISABLE_SIXENSE)
|
||||
|
||||
# likewise with Visage library for webcam feature tracking
|
||||
if (VISAGE_FOUND AND NOT DISABLE_VISAGE)
|
||||
add_definitions(-DHAVE_VISAGE -DVISAGE_STATIC)
|
||||
include_directories(SYSTEM ${VISAGE_INCLUDE_DIRS})
|
||||
if (APPLE)
|
||||
add_definitions(-DMAC_OS_X)
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-comment -isystem ${VISAGE_INCLUDE_DIRS}")
|
||||
find_library(AVFoundation AVFoundation)
|
||||
find_library(CoreMedia CoreMedia)
|
||||
find_library(NEW_STD_LIBRARY libc++abi.dylib /usr/lib/)
|
||||
target_link_libraries(${TARGET_NAME} ${AVFoundation} ${CoreMedia} ${NEW_STD_LIBRARY})
|
||||
endif (APPLE)
|
||||
target_link_libraries(${TARGET_NAME} ${VISAGE_LIBRARIES})
|
||||
endif (VISAGE_FOUND AND NOT DISABLE_VISAGE)
|
||||
|
||||
# and with LibOVR for Oculus Rift
|
||||
if (LIBOVR_FOUND AND NOT DISABLE_LIBOVR)
|
||||
add_definitions(-DHAVE_LIBOVR)
|
||||
|
|
14
interface/external/visage/readme.txt
vendored
Normal file
14
interface/external/visage/readme.txt
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
|
||||
Instructions for adding the Visage driver to Interface
|
||||
Andrzej Kapolka, February 11, 2014
|
||||
|
||||
1. Copy the Visage sdk folders (lib, include, dependencies) into the interface/external/visage folder.
|
||||
This readme.txt should be there as well.
|
||||
|
||||
2. Copy the Visage configuration data folder (visageSDK-MacOS/Samples/MacOSX/data/) to interface/resources/visage
|
||||
(i.e., so that interface/resources/visage/candide3.wfm is accessible)
|
||||
|
||||
3. Copy the Visage license file to interface/resources/visage/license.vlc.
|
||||
|
||||
4. Delete your build directory, run cmake and build, and you should be all set.
|
||||
|
|
@ -2041,6 +2041,15 @@ void Application::updateFaceshift() {
|
|||
}
|
||||
}
|
||||
|
||||
void Application::updateVisage() {
|
||||
|
||||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||
PerformanceWarning warn(showWarnings, "Application::updateVisage()");
|
||||
|
||||
// Update Visage
|
||||
_visage.update();
|
||||
}
|
||||
|
||||
void Application::updateMyAvatarLookAtPosition(glm::vec3& lookAtSpot) {
|
||||
|
||||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||
|
@ -2061,13 +2070,25 @@ void Application::updateMyAvatarLookAtPosition(glm::vec3& lookAtSpot) {
|
|||
}
|
||||
lookAtSpot = _mouseRayOrigin + _mouseRayDirection * distance;
|
||||
}
|
||||
bool trackerActive = false;
|
||||
float eyePitch, eyeYaw;
|
||||
if (_faceshift.isActive()) {
|
||||
eyePitch = _faceshift.getEstimatedEyePitch();
|
||||
eyeYaw = _faceshift.getEstimatedEyeYaw();
|
||||
trackerActive = true;
|
||||
|
||||
} else if (_visage.isActive()) {
|
||||
eyePitch = _visage.getEstimatedEyePitch();
|
||||
eyeYaw = _visage.getEstimatedEyeYaw();
|
||||
trackerActive = true;
|
||||
}
|
||||
if (trackerActive) {
|
||||
// deflect using Faceshift gaze data
|
||||
glm::vec3 origin = _myAvatar->getHead().calculateAverageEyePosition();
|
||||
float pitchSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? -1.0f : 1.0f;
|
||||
float deflection = Menu::getInstance()->getFaceshiftEyeDeflection();
|
||||
lookAtSpot = origin + _myCamera.getRotation() * glm::quat(glm::radians(glm::vec3(
|
||||
_faceshift.getEstimatedEyePitch() * pitchSign * deflection, _faceshift.getEstimatedEyeYaw() * deflection, 0.0f))) *
|
||||
eyePitch * pitchSign * deflection, eyeYaw * deflection, 0.0f))) *
|
||||
glm::inverse(_myCamera.getRotation()) * (lookAtSpot - origin);
|
||||
}
|
||||
_myAvatar->getHead().setLookAtPosition(lookAtSpot);
|
||||
|
@ -2318,6 +2339,7 @@ void Application::update(float deltaTime) {
|
|||
glm::vec3 lookAtSpot;
|
||||
|
||||
updateFaceshift();
|
||||
updateVisage();
|
||||
_myAvatar->updateLookAtTargetAvatar(lookAtSpot);
|
||||
updateMyAvatarLookAtPosition(lookAtSpot);
|
||||
|
||||
|
@ -3821,6 +3843,7 @@ void Application::resetSensors() {
|
|||
_mouseY = _glWidget->height() / 2;
|
||||
|
||||
_faceshift.reset();
|
||||
_visage.reset();
|
||||
|
||||
if (OculusManager::isConnected()) {
|
||||
OculusManager::reset();
|
||||
|
|
|
@ -54,6 +54,7 @@
|
|||
#include "avatar/Profile.h"
|
||||
#include "devices/Faceshift.h"
|
||||
#include "devices/SixenseManager.h"
|
||||
#include "devices/Visage.h"
|
||||
#include "renderer/AmbientOcclusionEffect.h"
|
||||
#include "renderer/GeometryCache.h"
|
||||
#include "renderer/GlowEffect.h"
|
||||
|
@ -160,6 +161,7 @@ public:
|
|||
const glm::vec3& getMouseRayOrigin() const { return _mouseRayOrigin; }
|
||||
const glm::vec3& getMouseRayDirection() const { return _mouseRayDirection; }
|
||||
Faceshift* getFaceshift() { return &_faceshift; }
|
||||
Visage* getVisage() { return &_visage; }
|
||||
SixenseManager* getSixenseManager() { return &_sixenseManager; }
|
||||
BandwidthMeter* getBandwidthMeter() { return &_bandwidthMeter; }
|
||||
QSettings* getSettings() { return _settings; }
|
||||
|
@ -283,6 +285,7 @@ private:
|
|||
// Various helper functions called during update()
|
||||
void updateMouseRay();
|
||||
void updateFaceshift();
|
||||
void updateVisage();
|
||||
void updateMyAvatarLookAtPosition(glm::vec3& lookAtSpot);
|
||||
void updateHoverVoxels(float deltaTime, float& distance, BoxFace& face);
|
||||
void updateMouseVoxels(float deltaTime, float& distance, BoxFace& face);
|
||||
|
@ -382,6 +385,7 @@ private:
|
|||
Profile _profile; // The data-server linked profile for this user
|
||||
|
||||
Faceshift _faceshift;
|
||||
Visage _visage;
|
||||
|
||||
SixenseManager _sixenseManager;
|
||||
QStringList _activeScripts;
|
||||
|
|
|
@ -62,24 +62,20 @@ void Head::simulate(float deltaTime, bool isMine) {
|
|||
|
||||
// Update audio trailing average for rendering facial animations
|
||||
Faceshift* faceshift = Application::getInstance()->getFaceshift();
|
||||
Visage* visage = Application::getInstance()->getVisage();
|
||||
if (isMine) {
|
||||
_isFaceshiftConnected = faceshift->isActive();
|
||||
_isFaceshiftConnected = false;
|
||||
if (faceshift->isActive()) {
|
||||
_blendshapeCoefficients = faceshift->getBlendshapeCoefficients();
|
||||
_isFaceshiftConnected = true;
|
||||
|
||||
} else if (visage->isActive()) {
|
||||
_blendshapeCoefficients = visage->getBlendshapeCoefficients();
|
||||
_isFaceshiftConnected = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (isMine && faceshift->isActive()) {
|
||||
const float EYE_OPEN_SCALE = 0.5f;
|
||||
_leftEyeBlink = faceshift->getLeftBlink() - EYE_OPEN_SCALE * faceshift->getLeftEyeOpen();
|
||||
_rightEyeBlink = faceshift->getRightBlink() - EYE_OPEN_SCALE * faceshift->getRightEyeOpen();
|
||||
|
||||
// set these values based on how they'll be used. if we use faceshift in the long term, we'll want a complete
|
||||
// mapping between their blendshape coefficients and our avatar features
|
||||
const float MOUTH_SIZE_SCALE = 2500.0f;
|
||||
_averageLoudness = faceshift->getMouthSize() * faceshift->getMouthSize() * MOUTH_SIZE_SCALE;
|
||||
const float BROW_HEIGHT_SCALE = 0.005f;
|
||||
_browAudioLift = faceshift->getBrowUpCenter() * BROW_HEIGHT_SCALE;
|
||||
_blendshapeCoefficients = faceshift->getBlendshapeCoefficients();
|
||||
|
||||
} else if (!_isFaceshiftConnected) {
|
||||
if (!_isFaceshiftConnected) {
|
||||
// Update eye saccades
|
||||
const float AVERAGE_MICROSACCADE_INTERVAL = 0.50f;
|
||||
const float AVERAGE_SACCADE_INTERVAL = 4.0f;
|
||||
|
|
|
@ -337,22 +337,32 @@ const float MAX_PITCH = 90.0f;
|
|||
// Update avatar head rotation with sensor data
|
||||
void MyAvatar::updateFromGyros(float deltaTime) {
|
||||
Faceshift* faceshift = Application::getInstance()->getFaceshift();
|
||||
Visage* visage = Application::getInstance()->getVisage();
|
||||
glm::vec3 estimatedPosition, estimatedRotation;
|
||||
|
||||
bool trackerActive = false;
|
||||
if (faceshift->isActive()) {
|
||||
estimatedPosition = faceshift->getHeadTranslation();
|
||||
estimatedRotation = safeEulerAngles(faceshift->getHeadRotation());
|
||||
trackerActive = true;
|
||||
|
||||
} else if (visage->isActive()) {
|
||||
estimatedPosition = visage->getHeadTranslation();
|
||||
estimatedRotation = safeEulerAngles(visage->getHeadRotation());
|
||||
trackerActive = true;
|
||||
}
|
||||
if (trackerActive) {
|
||||
// Rotate the body if the head is turned beyond the screen
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::TurnWithHead)) {
|
||||
const float FACESHIFT_YAW_TURN_SENSITIVITY = 0.5f;
|
||||
const float FACESHIFT_MIN_YAW_TURN = 15.f;
|
||||
const float FACESHIFT_MAX_YAW_TURN = 50.f;
|
||||
if ( (fabs(estimatedRotation.y) > FACESHIFT_MIN_YAW_TURN) &&
|
||||
(fabs(estimatedRotation.y) < FACESHIFT_MAX_YAW_TURN) ) {
|
||||
const float TRACKER_YAW_TURN_SENSITIVITY = 0.5f;
|
||||
const float TRACKER_MIN_YAW_TURN = 15.f;
|
||||
const float TRACKER_MAX_YAW_TURN = 50.f;
|
||||
if ( (fabs(estimatedRotation.y) > TRACKER_MIN_YAW_TURN) &&
|
||||
(fabs(estimatedRotation.y) < TRACKER_MAX_YAW_TURN) ) {
|
||||
if (estimatedRotation.y > 0.f) {
|
||||
_bodyYawDelta += (estimatedRotation.y - FACESHIFT_MIN_YAW_TURN) * FACESHIFT_YAW_TURN_SENSITIVITY;
|
||||
_bodyYawDelta += (estimatedRotation.y - TRACKER_MIN_YAW_TURN) * TRACKER_YAW_TURN_SENSITIVITY;
|
||||
} else {
|
||||
_bodyYawDelta += (estimatedRotation.y + FACESHIFT_MIN_YAW_TURN) * FACESHIFT_YAW_TURN_SENSITIVITY;
|
||||
_bodyYawDelta += (estimatedRotation.y + TRACKER_MIN_YAW_TURN) * TRACKER_YAW_TURN_SENSITIVITY;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
164
interface/src/devices/Visage.cpp
Normal file
164
interface/src/devices/Visage.cpp
Normal file
|
@ -0,0 +1,164 @@
|
|||
//
|
||||
// Visage.cpp
|
||||
// interface
|
||||
//
|
||||
// Created by Andrzej Kapolka on 2/11/14.
|
||||
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#include <QHash>
|
||||
|
||||
#include <SharedUtil.h>
|
||||
|
||||
#ifdef HAVE_VISAGE
|
||||
#include <VisageTracker2.h>
|
||||
#endif
|
||||
|
||||
#include "Visage.h"
|
||||
#include "renderer/FBXReader.h"
|
||||
|
||||
namespace VisageSDK {
|
||||
#ifdef WIN32
|
||||
void __declspec(dllimport) initializeLicenseManager(char* licenseKeyFileName);
|
||||
#else
|
||||
void initializeLicenseManager(char* licenseKeyFileName);
|
||||
#endif
|
||||
}
|
||||
|
||||
using namespace VisageSDK;
|
||||
|
||||
const glm::vec3 DEFAULT_HEAD_ORIGIN(0.0f, 0.0f, 0.7f);
|
||||
|
||||
Visage::Visage() :
|
||||
_active(false),
|
||||
_headOrigin(DEFAULT_HEAD_ORIGIN),
|
||||
_estimatedEyePitch(0.0f),
|
||||
_estimatedEyeYaw(0.0f),
|
||||
_leftInnerBrowIndex(0),
|
||||
_rightInnerBrowIndex(0) {
|
||||
|
||||
#ifdef HAVE_VISAGE
|
||||
switchToResourcesParentIfRequired();
|
||||
QByteArray licensePath = "resources/visage/license.vlc";
|
||||
initializeLicenseManager(licensePath.data());
|
||||
_tracker = new VisageTracker2("resources/visage/Facial Features Tracker - Asymmetric.cfg");
|
||||
if (_tracker->trackFromCam()) {
|
||||
_data = new FaceData();
|
||||
|
||||
} else {
|
||||
delete _tracker;
|
||||
_tracker = NULL;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
Visage::~Visage() {
|
||||
#ifdef HAVE_VISAGE
|
||||
if (_tracker) {
|
||||
_tracker->stop();
|
||||
delete _tracker;
|
||||
delete _data;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
static int leftEyeBlinkIndex = 0;
|
||||
static int rightEyeBlinkIndex = 1;
|
||||
static int centerBrowIndex = 16;
|
||||
|
||||
static QHash<QByteArray, int> createBlendshapeIndices() {
|
||||
QHash<QByteArray, QByteArray> blendshapeMap;
|
||||
blendshapeMap.insert("Sneer", "au_nose_wrinkler");
|
||||
blendshapeMap.insert("JawFwd", "au_jaw_z_push");
|
||||
blendshapeMap.insert("JawLeft", "au_jaw_x_push");
|
||||
blendshapeMap.insert("JawOpen", "au_jaw_drop");
|
||||
blendshapeMap.insert("LipsLowerDown", "au_lower_lip_drop");
|
||||
blendshapeMap.insert("LipsUpperUp", "au_upper_lip_raiser");
|
||||
blendshapeMap.insert("LipsStretch_L", "au_lip_stretcher_left");
|
||||
blendshapeMap.insert("BrowsU_L", "au_left_outer_brow_raiser");
|
||||
blendshapeMap.insert("BrowsU_C", "au_left_inner_brow_raiser");
|
||||
blendshapeMap.insert("BrowsD_L", "au_left_brow_lowerer");
|
||||
blendshapeMap.insert("LipsStretch_R", "au_lip_stretcher_right");
|
||||
blendshapeMap.insert("BrowsU_R", "au_right_outer_brow_raiser");
|
||||
blendshapeMap.insert("BrowsU_C", "au_right_inner_brow_raiser");
|
||||
blendshapeMap.insert("BrowsD_R", "au_right_brow_lowerer");
|
||||
|
||||
QHash<QByteArray, int> blendshapeIndices;
|
||||
for (int i = 0;; i++) {
|
||||
QByteArray blendshape = FACESHIFT_BLENDSHAPES[i];
|
||||
if (blendshape.isEmpty()) {
|
||||
break;
|
||||
}
|
||||
if (blendshape == "EyeBlink_L") {
|
||||
leftEyeBlinkIndex = i;
|
||||
|
||||
} else if (blendshape == "EyeBlink_R") {
|
||||
rightEyeBlinkIndex = i;
|
||||
|
||||
} else if (blendshape == "BrowsU_C") {
|
||||
centerBrowIndex = i;
|
||||
}
|
||||
QByteArray mapping = blendshapeMap.value(blendshape);
|
||||
if (!mapping.isEmpty()) {
|
||||
blendshapeIndices.insert(mapping, i + 1);
|
||||
}
|
||||
}
|
||||
|
||||
return blendshapeIndices;
|
||||
}
|
||||
|
||||
static const QHash<QByteArray, int>& getBlendshapeIndices() {
|
||||
static QHash<QByteArray, int> blendshapeIndices = createBlendshapeIndices();
|
||||
return blendshapeIndices;
|
||||
}
|
||||
|
||||
const float TRANSLATION_SCALE = 20.0f;
|
||||
|
||||
void Visage::update() {
|
||||
#ifdef HAVE_VISAGE
|
||||
_active = (_tracker && _tracker->getTrackingData(_data) == TRACK_STAT_OK);
|
||||
if (!_active) {
|
||||
return;
|
||||
}
|
||||
_headRotation = glm::quat(glm::vec3(-_data->faceRotation[0], -_data->faceRotation[1], _data->faceRotation[2]));
|
||||
_headTranslation = (glm::vec3(_data->faceTranslation[0], _data->faceTranslation[1], _data->faceTranslation[2]) -
|
||||
_headOrigin) * TRANSLATION_SCALE;
|
||||
_estimatedEyePitch = glm::degrees(-_data->gazeDirection[1]);
|
||||
_estimatedEyeYaw = glm::degrees(-_data->gazeDirection[0]);
|
||||
|
||||
if (_blendshapeIndices.isEmpty()) {
|
||||
_blendshapeIndices.resize(_data->actionUnitCount);
|
||||
int maxIndex = -1;
|
||||
for (int i = 0; i < _data->actionUnitCount; i++) {
|
||||
QByteArray name = _data->actionUnitsNames[i];
|
||||
if (name == "au_left_inner_brow_raiser") {
|
||||
_leftInnerBrowIndex = i;
|
||||
} else if (name == "au_right_inner_brow_raiser") {
|
||||
_rightInnerBrowIndex = i;
|
||||
}
|
||||
int index = getBlendshapeIndices().value(name) - 1;
|
||||
maxIndex = qMax(maxIndex, _blendshapeIndices[i] = index);
|
||||
}
|
||||
_blendshapeCoefficients.resize(maxIndex + 1);
|
||||
}
|
||||
|
||||
qFill(_blendshapeCoefficients.begin(), _blendshapeCoefficients.end(), 0.0f);
|
||||
for (int i = 0; i < _data->actionUnitCount; i++) {
|
||||
if (!_data->actionUnitsUsed[i]) {
|
||||
continue;
|
||||
}
|
||||
int index = _blendshapeIndices.at(i);
|
||||
if (index != -1) {
|
||||
_blendshapeCoefficients[index] = _data->actionUnits[i];
|
||||
}
|
||||
}
|
||||
_blendshapeCoefficients[leftEyeBlinkIndex] = 1.0f - _data->eyeClosure[1];
|
||||
_blendshapeCoefficients[rightEyeBlinkIndex] = 1.0f - _data->eyeClosure[0];
|
||||
_blendshapeCoefficients[centerBrowIndex] = (_data->actionUnits[_leftInnerBrowIndex] +
|
||||
_data->actionUnits[_rightInnerBrowIndex]) * 0.5f;
|
||||
#endif
|
||||
}
|
||||
|
||||
void Visage::reset() {
|
||||
_headOrigin += _headTranslation / TRANSLATION_SCALE;
|
||||
}
|
65
interface/src/devices/Visage.h
Normal file
65
interface/src/devices/Visage.h
Normal file
|
@ -0,0 +1,65 @@
|
|||
//
|
||||
// Visage.h
|
||||
// interface
|
||||
//
|
||||
// Created by Andrzej Kapolka on 2/11/14.
|
||||
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
|
||||
//
|
||||
|
||||
#ifndef __interface__Visage__
|
||||
#define __interface__Visage__
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include <QVector>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
|
||||
namespace VisageSDK {
|
||||
class VisageTracker2;
|
||||
struct FaceData;
|
||||
}
|
||||
|
||||
/// Handles input from the Visage webcam feature tracking software.
|
||||
class Visage {
|
||||
public:
|
||||
|
||||
Visage();
|
||||
~Visage();
|
||||
|
||||
bool isActive() const { return _active; }
|
||||
|
||||
const glm::quat& getHeadRotation() const { return _headRotation; }
|
||||
const glm::vec3& getHeadTranslation() const { return _headTranslation; }
|
||||
|
||||
float getEstimatedEyePitch() const { return _estimatedEyePitch; }
|
||||
float getEstimatedEyeYaw() const { return _estimatedEyeYaw; }
|
||||
|
||||
const std::vector<float>& getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
|
||||
|
||||
void update();
|
||||
void reset();
|
||||
|
||||
private:
|
||||
|
||||
VisageSDK::VisageTracker2* _tracker;
|
||||
VisageSDK::FaceData* _data;
|
||||
|
||||
bool _active;
|
||||
glm::quat _headRotation;
|
||||
glm::vec3 _headTranslation;
|
||||
|
||||
glm::vec3 _headOrigin;
|
||||
|
||||
float _estimatedEyePitch;
|
||||
float _estimatedEyeYaw;
|
||||
|
||||
std::vector<float> _blendshapeCoefficients;
|
||||
|
||||
QVector<int> _blendshapeIndices;
|
||||
int _leftInnerBrowIndex;
|
||||
int _rightInnerBrowIndex;
|
||||
};
|
||||
|
||||
#endif /* defined(__interface__Visage__) */
|
|
@ -21,6 +21,9 @@ class FBXNode;
|
|||
|
||||
typedef QList<FBXNode> FBXNodeList;
|
||||
|
||||
/// The names of the blendshapes expected by Faceshift, terminated with an empty string.
|
||||
extern const char* FACESHIFT_BLENDSHAPES[];
|
||||
|
||||
/// A node within an FBX document.
|
||||
class FBXNode {
|
||||
public:
|
||||
|
|
Loading…
Reference in a new issue