mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-08-09 21:56:26 +02:00
More work on Faceplus integration/cleaning up support for multiple face
trackers.
This commit is contained in:
parent
98011da1ef
commit
5b0e7f818d
11 changed files with 178 additions and 80 deletions
|
@ -1342,6 +1342,12 @@ glm::vec3 Application::getMouseVoxelWorldCoordinates(const VoxelDetail& mouseVox
|
||||||
(mouseVoxel.z + mouseVoxel.s / 2.f) * TREE_SCALE);
|
(mouseVoxel.z + mouseVoxel.s / 2.f) * TREE_SCALE);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
FaceTracker* Application::getActiveFaceTracker() {
|
||||||
|
return _faceshift.isActive() ? static_cast<FaceTracker*>(&_faceshift) :
|
||||||
|
(_faceplus.isActive() ? static_cast<FaceTracker*>(&_faceplus) :
|
||||||
|
(_visage.isActive() ? static_cast<FaceTracker*>(&_visage) : NULL));
|
||||||
|
}
|
||||||
|
|
||||||
struct SendVoxelsOperationArgs {
|
struct SendVoxelsOperationArgs {
|
||||||
const unsigned char* newBaseOctCode;
|
const unsigned char* newBaseOctCode;
|
||||||
};
|
};
|
||||||
|
@ -1561,7 +1567,8 @@ void Application::init() {
|
||||||
}
|
}
|
||||||
qDebug("Loaded settings");
|
qDebug("Loaded settings");
|
||||||
|
|
||||||
// initialize Visage and Faceshift after loading the menu settings
|
// initialize our face trackers after loading the menu settings
|
||||||
|
_faceplus.init();
|
||||||
_faceshift.init();
|
_faceshift.init();
|
||||||
_visage.init();
|
_visage.init();
|
||||||
|
|
||||||
|
@ -1682,6 +1689,15 @@ void Application::updateMouseRay() {
|
||||||
_myAvatar->setMouseRay(_mouseRayOrigin, _mouseRayDirection);
|
_myAvatar->setMouseRay(_mouseRayOrigin, _mouseRayDirection);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Application::updateFaceplus() {
|
||||||
|
|
||||||
|
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||||
|
PerformanceWarning warn(showWarnings, "Application::updateFaceplus()");
|
||||||
|
|
||||||
|
// Update faceplus
|
||||||
|
_faceplus.update();
|
||||||
|
}
|
||||||
|
|
||||||
void Application::updateFaceshift() {
|
void Application::updateFaceshift() {
|
||||||
|
|
||||||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||||
|
@ -1726,19 +1742,11 @@ void Application::updateMyAvatarLookAtPosition() {
|
||||||
glm::distance(_mouseRayOrigin, _myAvatar->getHead()->calculateAverageEyePosition()));
|
glm::distance(_mouseRayOrigin, _myAvatar->getHead()->calculateAverageEyePosition()));
|
||||||
lookAtSpot = _mouseRayOrigin + _mouseRayDirection * qMax(minEyeDistance, distance);
|
lookAtSpot = _mouseRayOrigin + _mouseRayDirection * qMax(minEyeDistance, distance);
|
||||||
}
|
}
|
||||||
bool trackerActive = false;
|
FaceTracker* tracker = getActiveFaceTracker();
|
||||||
float eyePitch, eyeYaw;
|
if (tracker) {
|
||||||
if (_faceshift.isActive()) {
|
float eyePitch = tracker->getEstimatedEyePitch();
|
||||||
eyePitch = _faceshift.getEstimatedEyePitch();
|
float eyeYaw = tracker->getEstimatedEyeYaw();
|
||||||
eyeYaw = _faceshift.getEstimatedEyeYaw();
|
|
||||||
trackerActive = true;
|
|
||||||
|
|
||||||
} else if (_visage.isActive()) {
|
|
||||||
eyePitch = _visage.getEstimatedEyePitch();
|
|
||||||
eyeYaw = _visage.getEstimatedEyeYaw();
|
|
||||||
trackerActive = true;
|
|
||||||
}
|
|
||||||
if (trackerActive) {
|
|
||||||
// deflect using Faceshift gaze data
|
// deflect using Faceshift gaze data
|
||||||
glm::vec3 origin = _myAvatar->getHead()->calculateAverageEyePosition();
|
glm::vec3 origin = _myAvatar->getHead()->calculateAverageEyePosition();
|
||||||
float pitchSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? -1.0f : 1.0f;
|
float pitchSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? -1.0f : 1.0f;
|
||||||
|
@ -1824,15 +1832,15 @@ void Application::updateCamera(float deltaTime) {
|
||||||
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
bool showWarnings = Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings);
|
||||||
PerformanceWarning warn(showWarnings, "Application::updateCamera()");
|
PerformanceWarning warn(showWarnings, "Application::updateCamera()");
|
||||||
|
|
||||||
if (!OculusManager::isConnected() && !TV3DManager::isConnected()) {
|
if (!OculusManager::isConnected() && !TV3DManager::isConnected() &&
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::OffAxisProjection)) {
|
Menu::getInstance()->isOptionChecked(MenuOption::OffAxisProjection)) {
|
||||||
float xSign = _myCamera.getMode() == CAMERA_MODE_MIRROR ? 1.0f : -1.0f;
|
FaceTracker* tracker = getActiveFaceTracker();
|
||||||
if (_faceshift.isActive()) {
|
if (tracker) {
|
||||||
const float EYE_OFFSET_SCALE = 0.025f;
|
const float EYE_OFFSET_SCALE = 0.025f;
|
||||||
glm::vec3 position = _faceshift.getHeadTranslation() * EYE_OFFSET_SCALE;
|
glm::vec3 position = tracker->getHeadTranslation() * EYE_OFFSET_SCALE;
|
||||||
_myCamera.setEyeOffsetPosition(glm::vec3(position.x * xSign, position.y, -position.z));
|
float xSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? 1.0f : -1.0f;
|
||||||
updateProjectionMatrix();
|
_myCamera.setEyeOffsetPosition(glm::vec3(position.x * xSign, position.y, -position.z));
|
||||||
}
|
updateProjectionMatrix();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1886,6 +1894,7 @@ void Application::update(float deltaTime) {
|
||||||
// check what's under the mouse and update the mouse voxel
|
// check what's under the mouse and update the mouse voxel
|
||||||
updateMouseRay();
|
updateMouseRay();
|
||||||
|
|
||||||
|
updateFaceplus();
|
||||||
updateFaceshift();
|
updateFaceshift();
|
||||||
updateVisage();
|
updateVisage();
|
||||||
_myAvatar->updateLookAtTargetAvatar();
|
_myAvatar->updateLookAtTargetAvatar();
|
||||||
|
@ -2936,6 +2945,7 @@ void Application::resetSensors() {
|
||||||
_mouseX = _glWidget->width() / 2;
|
_mouseX = _glWidget->width() / 2;
|
||||||
_mouseY = _glWidget->height() / 2;
|
_mouseY = _glWidget->height() / 2;
|
||||||
|
|
||||||
|
_faceplus.reset();
|
||||||
_faceshift.reset();
|
_faceshift.reset();
|
||||||
_visage.reset();
|
_visage.reset();
|
||||||
|
|
||||||
|
|
|
@ -49,6 +49,7 @@
|
||||||
#include "avatar/Avatar.h"
|
#include "avatar/Avatar.h"
|
||||||
#include "avatar/AvatarManager.h"
|
#include "avatar/AvatarManager.h"
|
||||||
#include "avatar/MyAvatar.h"
|
#include "avatar/MyAvatar.h"
|
||||||
|
#include "devices/Faceplus.h"
|
||||||
#include "devices/Faceshift.h"
|
#include "devices/Faceshift.h"
|
||||||
#include "devices/SixenseManager.h"
|
#include "devices/SixenseManager.h"
|
||||||
#include "devices/Visage.h"
|
#include "devices/Visage.h"
|
||||||
|
@ -173,8 +174,10 @@ public:
|
||||||
bool isMouseHidden() const { return _mouseHidden; }
|
bool isMouseHidden() const { return _mouseHidden; }
|
||||||
const glm::vec3& getMouseRayOrigin() const { return _mouseRayOrigin; }
|
const glm::vec3& getMouseRayOrigin() const { return _mouseRayOrigin; }
|
||||||
const glm::vec3& getMouseRayDirection() const { return _mouseRayDirection; }
|
const glm::vec3& getMouseRayDirection() const { return _mouseRayDirection; }
|
||||||
|
Faceplus* getFaceplus() { return &_faceplus; }
|
||||||
Faceshift* getFaceshift() { return &_faceshift; }
|
Faceshift* getFaceshift() { return &_faceshift; }
|
||||||
Visage* getVisage() { return &_visage; }
|
Visage* getVisage() { return &_visage; }
|
||||||
|
FaceTracker* getActiveFaceTracker();
|
||||||
SixenseManager* getSixenseManager() { return &_sixenseManager; }
|
SixenseManager* getSixenseManager() { return &_sixenseManager; }
|
||||||
BandwidthMeter* getBandwidthMeter() { return &_bandwidthMeter; }
|
BandwidthMeter* getBandwidthMeter() { return &_bandwidthMeter; }
|
||||||
QUndoStack* getUndoStack() { return &_undoStack; }
|
QUndoStack* getUndoStack() { return &_undoStack; }
|
||||||
|
@ -313,6 +316,7 @@ private:
|
||||||
// Various helper functions called during update()
|
// Various helper functions called during update()
|
||||||
void updateLOD();
|
void updateLOD();
|
||||||
void updateMouseRay();
|
void updateMouseRay();
|
||||||
|
void updateFaceplus();
|
||||||
void updateFaceshift();
|
void updateFaceshift();
|
||||||
void updateVisage();
|
void updateVisage();
|
||||||
void updateMyAvatarLookAtPosition();
|
void updateMyAvatarLookAtPosition();
|
||||||
|
@ -412,9 +416,10 @@ private:
|
||||||
AvatarManager _avatarManager;
|
AvatarManager _avatarManager;
|
||||||
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
|
MyAvatar* _myAvatar; // TODO: move this and relevant code to AvatarManager (or MyAvatar as the case may be)
|
||||||
|
|
||||||
|
Faceplus _faceplus;
|
||||||
Faceshift _faceshift;
|
Faceshift _faceshift;
|
||||||
Visage _visage;
|
Visage _visage;
|
||||||
|
|
||||||
SixenseManager _sixenseManager;
|
SixenseManager _sixenseManager;
|
||||||
|
|
||||||
Camera _myCamera; // My view onto the world
|
Camera _myCamera; // My view onto the world
|
||||||
|
|
|
@ -309,24 +309,13 @@ void MyAvatar::simulate(float deltaTime) {
|
||||||
|
|
||||||
// Update avatar head rotation with sensor data
|
// Update avatar head rotation with sensor data
|
||||||
void MyAvatar::updateFromGyros(float deltaTime) {
|
void MyAvatar::updateFromGyros(float deltaTime) {
|
||||||
Faceshift* faceshift = Application::getInstance()->getFaceshift();
|
|
||||||
Visage* visage = Application::getInstance()->getVisage();
|
|
||||||
glm::vec3 estimatedPosition, estimatedRotation;
|
glm::vec3 estimatedPosition, estimatedRotation;
|
||||||
|
|
||||||
bool trackerActive = false;
|
FaceTracker* tracker = Application::getInstance()->getActiveFaceTracker();
|
||||||
if (faceshift->isActive()) {
|
if (tracker) {
|
||||||
estimatedPosition = faceshift->getHeadTranslation();
|
estimatedPosition = tracker->getHeadTranslation();
|
||||||
estimatedRotation = glm::degrees(safeEulerAngles(faceshift->getHeadRotation()));
|
estimatedRotation = glm::degrees(safeEulerAngles(tracker->getHeadRotation()));
|
||||||
trackerActive = true;
|
|
||||||
|
|
||||||
} else if (visage->isActive()) {
|
|
||||||
estimatedPosition = visage->getHeadTranslation();
|
|
||||||
estimatedRotation = glm::degrees(safeEulerAngles(visage->getHeadRotation()));
|
|
||||||
trackerActive = true;
|
|
||||||
}
|
|
||||||
|
|
||||||
Head* head = getHead();
|
|
||||||
if (trackerActive) {
|
|
||||||
// Rotate the body if the head is turned beyond the screen
|
// Rotate the body if the head is turned beyond the screen
|
||||||
if (Menu::getInstance()->isOptionChecked(MenuOption::TurnWithHead)) {
|
if (Menu::getInstance()->isOptionChecked(MenuOption::TurnWithHead)) {
|
||||||
const float TRACKER_YAW_TURN_SENSITIVITY = 0.5f;
|
const float TRACKER_YAW_TURN_SENSITIVITY = 0.5f;
|
||||||
|
@ -341,13 +330,14 @@ void MyAvatar::updateFromGyros(float deltaTime) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set the rotation of the avatar's head (as seen by others, not affecting view frustum)
|
// Set the rotation of the avatar's head (as seen by others, not affecting view frustum)
|
||||||
// to be scaled. Pitch is greater to emphasize nodding behavior / synchrony.
|
// to be scaled. Pitch is greater to emphasize nodding behavior / synchrony.
|
||||||
const float AVATAR_HEAD_PITCH_MAGNIFY = 1.0f;
|
const float AVATAR_HEAD_PITCH_MAGNIFY = 1.0f;
|
||||||
const float AVATAR_HEAD_YAW_MAGNIFY = 1.0f;
|
const float AVATAR_HEAD_YAW_MAGNIFY = 1.0f;
|
||||||
const float AVATAR_HEAD_ROLL_MAGNIFY = 1.0f;
|
const float AVATAR_HEAD_ROLL_MAGNIFY = 1.0f;
|
||||||
|
Head* head = getHead();
|
||||||
head->setDeltaPitch(estimatedRotation.x * AVATAR_HEAD_PITCH_MAGNIFY);
|
head->setDeltaPitch(estimatedRotation.x * AVATAR_HEAD_PITCH_MAGNIFY);
|
||||||
head->setDeltaYaw(estimatedRotation.y * AVATAR_HEAD_YAW_MAGNIFY);
|
head->setDeltaYaw(estimatedRotation.y * AVATAR_HEAD_YAW_MAGNIFY);
|
||||||
head->setDeltaRoll(estimatedRotation.z * AVATAR_HEAD_ROLL_MAGNIFY);
|
head->setDeltaRoll(estimatedRotation.z * AVATAR_HEAD_ROLL_MAGNIFY);
|
||||||
|
|
14
interface/src/devices/FaceTracker.cpp
Normal file
14
interface/src/devices/FaceTracker.cpp
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
//
|
||||||
|
// FaceTracker.cpp
|
||||||
|
// interface
|
||||||
|
//
|
||||||
|
// Created by Andrzej Kapolka on 4/8/14.
|
||||||
|
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
#include "FaceTracker.h"
|
||||||
|
|
||||||
|
FaceTracker::FaceTracker() :
|
||||||
|
_estimatedEyePitch(0.0f),
|
||||||
|
_estimatedEyeYaw(0.0f) {
|
||||||
|
}
|
43
interface/src/devices/FaceTracker.h
Normal file
43
interface/src/devices/FaceTracker.h
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
//
|
||||||
|
// FaceTracker.h
|
||||||
|
// interface
|
||||||
|
//
|
||||||
|
// Created by Andrzej Kapolka on 4/8/14.
|
||||||
|
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef __interface__FaceTracker__
|
||||||
|
#define __interface__FaceTracker__
|
||||||
|
|
||||||
|
#include <QObject>
|
||||||
|
#include <QVector>
|
||||||
|
|
||||||
|
#include <glm/glm.hpp>
|
||||||
|
#include <glm/gtc/quaternion.hpp>
|
||||||
|
|
||||||
|
/// Base class for face trackers (Faceshift, Visage, Faceplus).
|
||||||
|
class FaceTracker : public QObject {
|
||||||
|
Q_OBJECT
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
FaceTracker();
|
||||||
|
|
||||||
|
const glm::vec3& getHeadTranslation() const { return _headTranslation; }
|
||||||
|
const glm::quat& getHeadRotation() const { return _headRotation; }
|
||||||
|
|
||||||
|
float getEstimatedEyePitch() const { return _estimatedEyePitch; }
|
||||||
|
float getEstimatedEyeYaw() const { return _estimatedEyeYaw; }
|
||||||
|
|
||||||
|
const QVector<float>& getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
|
||||||
|
|
||||||
|
protected:
|
||||||
|
|
||||||
|
glm::vec3 _headTranslation;
|
||||||
|
glm::quat _headRotation;
|
||||||
|
float _estimatedEyePitch;
|
||||||
|
float _estimatedEyeYaw;
|
||||||
|
QVector<float> _blendshapeCoefficients;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif /* defined(__interface__FaceTracker__) */
|
32
interface/src/devices/Faceplus.cpp
Normal file
32
interface/src/devices/Faceplus.cpp
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
//
|
||||||
|
// Faceplus.cpp
|
||||||
|
// interface
|
||||||
|
//
|
||||||
|
// Created by Andrzej Kapolka on 4/8/14.
|
||||||
|
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifdef HAVE_FACEPLUS
|
||||||
|
#include <faceplus.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#include "Faceplus.h"
|
||||||
|
|
||||||
|
Faceplus::Faceplus() :
|
||||||
|
_active(false) {
|
||||||
|
}
|
||||||
|
|
||||||
|
void Faceplus::init() {
|
||||||
|
#ifdef HAVE_FACEPLUS
|
||||||
|
// these are ignored--any values will do
|
||||||
|
faceplus_log_in("username", "password");
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
|
void Faceplus::update() {
|
||||||
|
}
|
||||||
|
|
||||||
|
void Faceplus::reset() {
|
||||||
|
}
|
||||||
|
|
||||||
|
|
34
interface/src/devices/Faceplus.h
Normal file
34
interface/src/devices/Faceplus.h
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
//
|
||||||
|
// Faceplus.h
|
||||||
|
// interface
|
||||||
|
//
|
||||||
|
// Created by Andrzej Kapolka on 4/8/14.
|
||||||
|
// Copyright (c) 2014 High Fidelity, Inc. All rights reserved.
|
||||||
|
//
|
||||||
|
|
||||||
|
#ifndef __interface__Faceplus__
|
||||||
|
#define __interface__Faceplus__
|
||||||
|
|
||||||
|
#include "FaceTracker.h"
|
||||||
|
|
||||||
|
/// Interface for Mixamo FacePlus.
|
||||||
|
class Faceplus : public FaceTracker {
|
||||||
|
Q_OBJECT
|
||||||
|
|
||||||
|
public:
|
||||||
|
|
||||||
|
Faceplus();
|
||||||
|
|
||||||
|
void init();
|
||||||
|
|
||||||
|
bool isActive() const { return _active; }
|
||||||
|
|
||||||
|
void update();
|
||||||
|
void reset();
|
||||||
|
|
||||||
|
private:
|
||||||
|
|
||||||
|
bool _active;
|
||||||
|
};
|
||||||
|
|
||||||
|
#endif /* defined(__interface__Faceplus__) */
|
|
@ -42,9 +42,7 @@ Faceshift::Faceshift() :
|
||||||
_jawOpenIndex(21),
|
_jawOpenIndex(21),
|
||||||
_longTermAverageEyePitch(0.0f),
|
_longTermAverageEyePitch(0.0f),
|
||||||
_longTermAverageEyeYaw(0.0f),
|
_longTermAverageEyeYaw(0.0f),
|
||||||
_longTermAverageInitialized(false),
|
_longTermAverageInitialized(false)
|
||||||
_estimatedEyePitch(0.0f),
|
|
||||||
_estimatedEyeYaw(0.0f)
|
|
||||||
{
|
{
|
||||||
connect(&_tcpSocket, SIGNAL(connected()), SLOT(noteConnected()));
|
connect(&_tcpSocket, SIGNAL(connected()), SLOT(noteConnected()));
|
||||||
connect(&_tcpSocket, SIGNAL(error(QAbstractSocket::SocketError)), SLOT(noteError(QAbstractSocket::SocketError)));
|
connect(&_tcpSocket, SIGNAL(error(QAbstractSocket::SocketError)), SLOT(noteError(QAbstractSocket::SocketError)));
|
||||||
|
|
|
@ -17,8 +17,10 @@
|
||||||
|
|
||||||
#include <fsbinarystream.h>
|
#include <fsbinarystream.h>
|
||||||
|
|
||||||
|
#include "FaceTracker.h"
|
||||||
|
|
||||||
/// Handles interaction with the Faceshift software, which provides head position/orientation and facial features.
|
/// Handles interaction with the Faceshift software, which provides head position/orientation and facial features.
|
||||||
class Faceshift : public QObject {
|
class Faceshift : public FaceTracker {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -31,9 +33,7 @@ public:
|
||||||
|
|
||||||
bool isActive() const;
|
bool isActive() const;
|
||||||
|
|
||||||
const glm::quat& getHeadRotation() const { return _headRotation; }
|
|
||||||
const glm::vec3& getHeadAngularVelocity() const { return _headAngularVelocity; }
|
const glm::vec3& getHeadAngularVelocity() const { return _headAngularVelocity; }
|
||||||
const glm::vec3& getHeadTranslation() const { return _headTranslation; }
|
|
||||||
|
|
||||||
// these pitch/yaw angles are in degrees
|
// these pitch/yaw angles are in degrees
|
||||||
float getEyeGazeLeftPitch() const { return _eyeGazeLeftPitch; }
|
float getEyeGazeLeftPitch() const { return _eyeGazeLeftPitch; }
|
||||||
|
@ -42,11 +42,6 @@ public:
|
||||||
float getEyeGazeRightPitch() const { return _eyeGazeRightPitch; }
|
float getEyeGazeRightPitch() const { return _eyeGazeRightPitch; }
|
||||||
float getEyeGazeRightYaw() const { return _eyeGazeRightYaw; }
|
float getEyeGazeRightYaw() const { return _eyeGazeRightYaw; }
|
||||||
|
|
||||||
float getEstimatedEyePitch() const { return _estimatedEyePitch; }
|
|
||||||
float getEstimatedEyeYaw() const { return _estimatedEyeYaw; }
|
|
||||||
|
|
||||||
const QVector<float>& getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
|
|
||||||
|
|
||||||
float getLeftBlink() const { return getBlendshapeCoefficient(_leftBlinkIndex); }
|
float getLeftBlink() const { return getBlendshapeCoefficient(_leftBlinkIndex); }
|
||||||
float getRightBlink() const { return getBlendshapeCoefficient(_rightBlinkIndex); }
|
float getRightBlink() const { return getBlendshapeCoefficient(_rightBlinkIndex); }
|
||||||
float getLeftEyeOpen() const { return getBlendshapeCoefficient(_leftEyeOpenIndex); }
|
float getLeftEyeOpen() const { return getBlendshapeCoefficient(_leftEyeOpenIndex); }
|
||||||
|
@ -99,9 +94,7 @@ private:
|
||||||
bool _tracking;
|
bool _tracking;
|
||||||
quint64 _lastTrackingStateReceived;
|
quint64 _lastTrackingStateReceived;
|
||||||
|
|
||||||
glm::quat _headRotation;
|
|
||||||
glm::vec3 _headAngularVelocity;
|
glm::vec3 _headAngularVelocity;
|
||||||
glm::vec3 _headTranslation;
|
|
||||||
|
|
||||||
// degrees
|
// degrees
|
||||||
float _eyeGazeLeftPitch;
|
float _eyeGazeLeftPitch;
|
||||||
|
@ -109,8 +102,6 @@ private:
|
||||||
float _eyeGazeRightPitch;
|
float _eyeGazeRightPitch;
|
||||||
float _eyeGazeRightYaw;
|
float _eyeGazeRightYaw;
|
||||||
|
|
||||||
QVector<float> _blendshapeCoefficients;
|
|
||||||
|
|
||||||
int _leftBlinkIndex;
|
int _leftBlinkIndex;
|
||||||
int _rightBlinkIndex;
|
int _rightBlinkIndex;
|
||||||
int _leftEyeOpenIndex;
|
int _leftEyeOpenIndex;
|
||||||
|
@ -132,10 +123,6 @@ private:
|
||||||
float _longTermAverageEyePitch;
|
float _longTermAverageEyePitch;
|
||||||
float _longTermAverageEyeYaw;
|
float _longTermAverageEyeYaw;
|
||||||
bool _longTermAverageInitialized;
|
bool _longTermAverageInitialized;
|
||||||
|
|
||||||
// degrees
|
|
||||||
float _estimatedEyePitch;
|
|
||||||
float _estimatedEyeYaw;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif /* defined(__interface__Faceshift__) */
|
#endif /* defined(__interface__Faceshift__) */
|
||||||
|
|
|
@ -34,9 +34,7 @@ const glm::vec3 DEFAULT_HEAD_ORIGIN(0.0f, 0.0f, 0.7f);
|
||||||
Visage::Visage() :
|
Visage::Visage() :
|
||||||
_enabled(false),
|
_enabled(false),
|
||||||
_active(false),
|
_active(false),
|
||||||
_headOrigin(DEFAULT_HEAD_ORIGIN),
|
_headOrigin(DEFAULT_HEAD_ORIGIN) {
|
||||||
_estimatedEyePitch(0.0f),
|
|
||||||
_estimatedEyeYaw(0.0f) {
|
|
||||||
|
|
||||||
#ifdef HAVE_VISAGE
|
#ifdef HAVE_VISAGE
|
||||||
QByteArray licensePath = Application::resourcesPath().toLatin1() + "visage/license.vlc";
|
QByteArray licensePath = Application::resourcesPath().toLatin1() + "visage/license.vlc";
|
||||||
|
|
|
@ -16,13 +16,15 @@
|
||||||
#include <glm/glm.hpp>
|
#include <glm/glm.hpp>
|
||||||
#include <glm/gtc/quaternion.hpp>
|
#include <glm/gtc/quaternion.hpp>
|
||||||
|
|
||||||
|
#include "FaceTracker.h"
|
||||||
|
|
||||||
namespace VisageSDK {
|
namespace VisageSDK {
|
||||||
class VisageTracker2;
|
class VisageTracker2;
|
||||||
struct FaceData;
|
struct FaceData;
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Handles input from the Visage webcam feature tracking software.
|
/// Handles input from the Visage webcam feature tracking software.
|
||||||
class Visage : public QObject {
|
class Visage : public FaceTracker {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -34,14 +36,6 @@ public:
|
||||||
|
|
||||||
bool isActive() const { return _active; }
|
bool isActive() const { return _active; }
|
||||||
|
|
||||||
const glm::quat& getHeadRotation() const { return _headRotation; }
|
|
||||||
const glm::vec3& getHeadTranslation() const { return _headTranslation; }
|
|
||||||
|
|
||||||
float getEstimatedEyePitch() const { return _estimatedEyePitch; }
|
|
||||||
float getEstimatedEyeYaw() const { return _estimatedEyeYaw; }
|
|
||||||
|
|
||||||
const QVector<float>& getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
|
|
||||||
|
|
||||||
void update();
|
void update();
|
||||||
void reset();
|
void reset();
|
||||||
|
|
||||||
|
@ -61,15 +55,8 @@ private:
|
||||||
|
|
||||||
bool _enabled;
|
bool _enabled;
|
||||||
bool _active;
|
bool _active;
|
||||||
glm::quat _headRotation;
|
|
||||||
glm::vec3 _headTranslation;
|
|
||||||
|
|
||||||
glm::vec3 _headOrigin;
|
glm::vec3 _headOrigin;
|
||||||
|
|
||||||
float _estimatedEyePitch;
|
|
||||||
float _estimatedEyeYaw;
|
|
||||||
|
|
||||||
QVector<float> _blendshapeCoefficients;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif /* defined(__interface__Visage__) */
|
#endif /* defined(__interface__Visage__) */
|
||||||
|
|
Loading…
Reference in a new issue