Add "Mute Face Tracking" menu item

This commit is contained in:
David Rowe 2015-04-29 21:23:47 -07:00
parent dc9e5cad24
commit d053379831
12 changed files with 87 additions and 22 deletions

View file

@ -585,6 +585,14 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
// The offscreen UI needs to intercept the mouse and keyboard // The offscreen UI needs to intercept the mouse and keyboard
// events coming from the onscreen window // events coming from the onscreen window
_glWidget->installEventFilter(DependencyManager::get<OffscreenUi>().data()); _glWidget->installEventFilter(DependencyManager::get<OffscreenUi>().data());
// initialize our face trackers after loading the menu settings
auto faceshiftTracker = DependencyManager::get<Faceshift>();
faceshiftTracker->init();
connect(faceshiftTracker.data(), &FaceTracker::muteToggled, this, &Application::faceTrackerMuteToggled);
auto ddeTracker = DependencyManager::get<DdeFaceTracker>();
ddeTracker->init();
connect(ddeTracker.data(), &FaceTracker::muteToggled, this, &Application::faceTrackerMuteToggled);
} }
@ -911,6 +919,12 @@ void Application::audioMuteToggled() {
muteAction->setChecked(DependencyManager::get<AudioClient>()->isMuted()); muteAction->setChecked(DependencyManager::get<AudioClient>()->isMuted());
} }
void Application::faceTrackerMuteToggled() {
QAction* muteAction = Menu::getInstance()->getActionForOption(MenuOption::MuteFaceTracking);
Q_CHECK_PTR(muteAction);
muteAction->setChecked(getActiveFaceTracker()->isMuted());
}
void Application::aboutApp() { void Application::aboutApp() {
InfoView::forcedShow(INFO_HELP_PATH); InfoView::forcedShow(INFO_HELP_PATH);
} }
@ -1889,17 +1903,29 @@ FaceTracker* Application::getActiveFaceTracker() {
} }
void Application::setActiveFaceTracker() { void Application::setActiveFaceTracker() {
bool isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
#ifdef HAVE_FACESHIFT #ifdef HAVE_FACESHIFT
DependencyManager::get<Faceshift>()->setTCPEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift)); auto faceshiftTracker = DependencyManager::get<Faceshift>();
faceshiftTracker->setTCPEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift));
faceshiftTracker->setIsMuted(isMuted);
#endif #endif
#ifdef HAVE_DDE #ifdef HAVE_DDE
bool isUsingDDE = Menu::getInstance()->isOptionChecked(MenuOption::UseCamera); bool isUsingDDE = Menu::getInstance()->isOptionChecked(MenuOption::UseCamera);
Menu::getInstance()->getActionForOption(MenuOption::UseAudioForMouth)->setVisible(isUsingDDE); Menu::getInstance()->getActionForOption(MenuOption::UseAudioForMouth)->setVisible(isUsingDDE);
Menu::getInstance()->getActionForOption(MenuOption::VelocityFilter)->setVisible(isUsingDDE); Menu::getInstance()->getActionForOption(MenuOption::VelocityFilter)->setVisible(isUsingDDE);
DependencyManager::get<DdeFaceTracker>()->setEnabled(isUsingDDE); auto ddeTracker = DependencyManager::get<DdeFaceTracker>();
ddeTracker->setEnabled(isUsingDDE);
ddeTracker->setIsMuted(isMuted);
#endif #endif
} }
void Application::toggleFaceTrackerMute() {
FaceTracker* faceTracker = getActiveFaceTracker();
if (faceTracker) {
faceTracker->toggleMute();
}
}
bool Application::exportEntities(const QString& filename, const QVector<EntityItemID>& entityIDs) { bool Application::exportEntities(const QString& filename, const QVector<EntityItemID>& entityIDs) {
QVector<EntityItem*> entities; QVector<EntityItem*> entities;
@ -2068,10 +2094,6 @@ void Application::init() {
SixenseManager::getInstance().toggleSixense(true); SixenseManager::getInstance().toggleSixense(true);
#endif #endif
// initialize our face trackers after loading the menu settings
DependencyManager::get<Faceshift>()->init();
DependencyManager::get<DdeFaceTracker>()->init();
Leapmotion::init(); Leapmotion::init();
RealSense::init(); RealSense::init();
@ -2209,7 +2231,7 @@ void Application::updateMyAvatarLookAtPosition() {
isLookingAtSomeone = true; isLookingAtSomeone = true;
// If I am looking at someone else, look directly at one of their eyes // If I am looking at someone else, look directly at one of their eyes
if (tracker) { if (tracker && !tracker->isMuted()) {
// If a face tracker is active, look at the eye for the side my gaze is biased toward // If a face tracker is active, look at the eye for the side my gaze is biased toward
if (tracker->getEstimatedEyeYaw() > _myAvatar->getHead()->getFinalYaw()) { if (tracker->getEstimatedEyeYaw() > _myAvatar->getHead()->getFinalYaw()) {
// Look at their right eye // Look at their right eye
@ -2235,7 +2257,7 @@ void Application::updateMyAvatarLookAtPosition() {
// //
// Deflect the eyes a bit to match the detected Gaze from 3D camera if active // Deflect the eyes a bit to match the detected Gaze from 3D camera if active
// //
if (tracker) { if (tracker && !tracker->isMuted()) {
float eyePitch = tracker->getEstimatedEyePitch(); float eyePitch = tracker->getEstimatedEyePitch();
float eyeYaw = tracker->getEstimatedEyeYaw(); float eyeYaw = tracker->getEstimatedEyeYaw();
const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f; const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f;
@ -2290,7 +2312,7 @@ void Application::updateCamera(float deltaTime) {
if (!OculusManager::isConnected() && !TV3DManager::isConnected() && if (!OculusManager::isConnected() && !TV3DManager::isConnected() &&
Menu::getInstance()->isOptionChecked(MenuOption::OffAxisProjection)) { Menu::getInstance()->isOptionChecked(MenuOption::OffAxisProjection)) {
FaceTracker* tracker = getActiveFaceTracker(); FaceTracker* tracker = getActiveFaceTracker();
if (tracker) { if (tracker && !tracker->isMuted()) {
const float EYE_OFFSET_SCALE = 0.025f; const float EYE_OFFSET_SCALE = 0.025f;
glm::vec3 position = tracker->getHeadTranslation() * EYE_OFFSET_SCALE; glm::vec3 position = tracker->getHeadTranslation() * EYE_OFFSET_SCALE;
float xSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? 1.0f : -1.0f; float xSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? 1.0f : -1.0f;
@ -2353,7 +2375,7 @@ void Application::update(float deltaTime) {
PerformanceTimer perfTimer("devices"); PerformanceTimer perfTimer("devices");
DeviceTracker::updateAll(); DeviceTracker::updateAll();
FaceTracker* tracker = getActiveFaceTracker(); FaceTracker* tracker = getActiveFaceTracker();
if (tracker) { if (tracker && !tracker->isMuted()) {
tracker->update(deltaTime); tracker->update(deltaTime);
} }
SixenseManager::getInstance().update(deltaTime); SixenseManager::getInstance().update(deltaTime);

View file

@ -391,6 +391,7 @@ public slots:
void resetSensors(); void resetSensors();
void setActiveFaceTracker(); void setActiveFaceTracker();
void toggleFaceTrackerMute();
void aboutApp(); void aboutApp();
void showEditEntitiesHelp(); void showEditEntitiesHelp();
@ -432,6 +433,7 @@ private slots:
void runTests(); void runTests();
void audioMuteToggled(); void audioMuteToggled();
void faceTrackerMuteToggled();
void setCursorVisible(bool visible); void setCursorVisible(bool visible);

View file

@ -394,6 +394,12 @@ Menu::Menu() {
QAction* ddeFiltering = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::VelocityFilter, 0, true); QAction* ddeFiltering = addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::VelocityFilter, 0, true);
ddeFiltering->setVisible(false); ddeFiltering->setVisible(false);
#endif #endif
#if defined(HAVE_FACESHIFT) || defined(HAVE_DDE)
faceTrackingMenu->addSeparator();
addCheckableActionToQMenuAndActionHash(faceTrackingMenu, MenuOption::MuteFaceTracking,
0, false,
qApp, SLOT(toggleFaceTrackerMute()));
#endif
auto avatarManager = DependencyManager::get<AvatarManager>(); auto avatarManager = DependencyManager::get<AvatarManager>();
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AvatarReceiveStats, 0, false, addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AvatarReceiveStats, 0, false,

View file

@ -211,6 +211,7 @@ namespace MenuOption {
const QString Mirror = "Mirror"; const QString Mirror = "Mirror";
const QString MuteAudio = "Mute Microphone"; const QString MuteAudio = "Mute Microphone";
const QString MuteEnvironment = "Mute Environment"; const QString MuteEnvironment = "Mute Environment";
const QString MuteFaceTracking = "Mute Face Tracking";
const QString NoFaceTracking = "None"; const QString NoFaceTracking = "None";
const QString OctreeStats = "Entity Statistics"; const QString OctreeStats = "Entity Statistics";
const QString OffAxisProjection = "Off-Axis Projection"; const QString OffAxisProjection = "Off-Axis Projection";

View file

@ -90,7 +90,7 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) {
// Only use face trackers when not playing back a recording. // Only use face trackers when not playing back a recording.
if (!myAvatar->isPlaying()) { if (!myAvatar->isPlaying()) {
FaceTracker* faceTracker = Application::getInstance()->getActiveFaceTracker(); FaceTracker* faceTracker = Application::getInstance()->getActiveFaceTracker();
_isFaceTrackerConnected = faceTracker != NULL; _isFaceTrackerConnected = faceTracker != NULL && !faceTracker->isMuted();
if (_isFaceTrackerConnected) { if (_isFaceTrackerConnected) {
_blendshapeCoefficients = faceTracker->getBlendshapeCoefficients(); _blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();

View file

@ -243,7 +243,7 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
estimatedPosition /= OCULUS_LEAN_SCALE; estimatedPosition /= OCULUS_LEAN_SCALE;
} else { } else {
FaceTracker* tracker = Application::getInstance()->getActiveFaceTracker(); FaceTracker* tracker = Application::getInstance()->getActiveFaceTracker();
if (tracker) { if (tracker && !tracker->isMuted()) {
estimatedPosition = tracker->getHeadTranslation(); estimatedPosition = tracker->getHeadTranslation();
_trackedHeadPosition = estimatedPosition; _trackedHeadPosition = estimatedPosition;
estimatedRotation = glm::degrees(safeEulerAngles(tracker->getHeadRotation())); estimatedRotation = glm::degrees(safeEulerAngles(tracker->getHeadRotation()));

View file

@ -316,7 +316,13 @@ float DdeFaceTracker::getBlendshapeCoefficient(int index) const {
} }
void DdeFaceTracker::decodePacket(const QByteArray& buffer) { void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
if(buffer.size() > MIN_PACKET_SIZE) { _lastReceiveTimestamp = usecTimestampNow();
if (_isMuted) {
return;
}
if (buffer.size() > MIN_PACKET_SIZE) {
bool isFiltering = Menu::getInstance()->isOptionChecked(MenuOption::VelocityFilter); bool isFiltering = Menu::getInstance()->isOptionChecked(MenuOption::VelocityFilter);
Packet packet; Packet packet;
@ -328,7 +334,7 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
memcpy(&translation, packet.translation, sizeof(packet.translation)); memcpy(&translation, packet.translation, sizeof(packet.translation));
glm::quat rotation; glm::quat rotation;
memcpy(&rotation, &packet.rotation, sizeof(packet.rotation)); memcpy(&rotation, &packet.rotation, sizeof(packet.rotation));
if (_reset || (_lastReceiveTimestamp == 0)) { if (_reset || (_lastMessageReceived == 0)) {
memcpy(&_referenceTranslation, &translation, sizeof(glm::vec3)); memcpy(&_referenceTranslation, &translation, sizeof(glm::vec3));
memcpy(&_referenceRotation, &rotation, sizeof(glm::quat)); memcpy(&_referenceRotation, &rotation, sizeof(glm::quat));
_reset = false; _reset = false;
@ -503,5 +509,4 @@ void DdeFaceTracker::decodePacket(const QByteArray& buffer) {
} else { } else {
qCWarning(interfaceapp) << "DDE Face Tracker: Decode error"; qCWarning(interfaceapp) << "DDE Face Tracker: Decode error";
} }
_lastReceiveTimestamp = usecTimestampNow();
} }

View file

@ -15,16 +15,22 @@
#include "FaceTracker.h" #include "FaceTracker.h"
#include "InterfaceLogging.h" #include "InterfaceLogging.h"
#include "Menu.h"
const int FPS_TIMER_DELAY = 2000; // ms const int FPS_TIMER_DELAY = 2000; // ms
const int FPS_TIMER_DURATION = 2000; // ms const int FPS_TIMER_DURATION = 2000; // ms
FaceTracker::FaceTracker() : FaceTracker::FaceTracker() :
_isCalculatingFPS(false), _isCalculatingFPS(false),
_frameCount(0) _frameCount(0),
_isMuted(false)
{ {
} }
void FaceTracker::init() {
_isMuted = Menu::getInstance()->isOptionChecked(MenuOption::MuteFaceTracking);
}
inline float FaceTracker::getBlendshapeCoefficient(int index) const { inline float FaceTracker::getBlendshapeCoefficient(int index) const {
return isValidBlendshapeIndex(index) ? glm::mix(0.0f, _blendshapeCoefficients[index], getFadeCoefficient()) return isValidBlendshapeIndex(index) ? glm::mix(0.0f, _blendshapeCoefficients[index], getFadeCoefficient())
: 0.0f; : 0.0f;
@ -101,3 +107,8 @@ void FaceTracker::finishFPSTimer() {
qCDebug(interfaceapp) << "Face tracker FPS =" << (float)_frameCount / ((float)FPS_TIMER_DURATION / 1000.0f); qCDebug(interfaceapp) << "Face tracker FPS =" << (float)_frameCount / ((float)FPS_TIMER_DURATION / 1000.0f);
_isCalculatingFPS = false; _isCalculatingFPS = false;
} }
void FaceTracker::toggleMute() {
_isMuted = !_isMuted;
emit muteToggled();
}

View file

@ -26,7 +26,7 @@ public:
virtual bool isActive() const { return false; } virtual bool isActive() const { return false; }
virtual bool isTracking() const { return false; } virtual bool isTracking() const { return false; }
virtual void init() {} virtual void init();
virtual void update(float deltaTime); virtual void update(float deltaTime);
virtual void reset(); virtual void reset();
@ -42,6 +42,13 @@ public:
bool isValidBlendshapeIndex(int index) const { return index >= 0 && index < getNumBlendshapes(); } bool isValidBlendshapeIndex(int index) const { return index >= 0 && index < getNumBlendshapes(); }
const QVector<float>& getBlendshapeCoefficients() const; const QVector<float>& getBlendshapeCoefficients() const;
float getBlendshapeCoefficient(int index) const; float getBlendshapeCoefficient(int index) const;
bool isMuted() const { return _isMuted; }
void setIsMuted(bool isMuted) { _isMuted = isMuted; }
void toggleMute();
signals:
void muteToggled();
protected: protected:
FaceTracker(); FaceTracker();
@ -56,6 +63,8 @@ protected:
float _relaxationStatus = 0.0f; // Between 0.0f and 1.0f float _relaxationStatus = 0.0f; // Between 0.0f and 1.0f
float _fadeCoefficient = 0.0f; // Between 0.0f and 1.0f float _fadeCoefficient = 0.0f; // Between 0.0f and 1.0f
bool _isMuted;
void countFrame(); void countFrame();
private slots: private slots:

View file

@ -50,6 +50,7 @@ Faceshift::Faceshift() :
#ifdef HAVE_FACESHIFT #ifdef HAVE_FACESHIFT
void Faceshift::init() { void Faceshift::init() {
setTCPEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift)); setTCPEnabled(Menu::getInstance()->isOptionChecked(MenuOption::Faceshift));
FaceTracker::init();
} }
void Faceshift::update(float deltaTime) { void Faceshift::update(float deltaTime) {
@ -92,7 +93,7 @@ void Faceshift::reset() {
bool Faceshift::isActive() const { bool Faceshift::isActive() const {
const quint64 ACTIVE_TIMEOUT_USECS = 1000000; const quint64 ACTIVE_TIMEOUT_USECS = 1000000;
return (usecTimestampNow() - _lastTrackingStateReceived) < ACTIVE_TIMEOUT_USECS; return (usecTimestampNow() - _lastReceiveTimestamp) < ACTIVE_TIMEOUT_USECS;
} }
bool Faceshift::isTracking() const { bool Faceshift::isTracking() const {
@ -196,6 +197,12 @@ void Faceshift::send(const std::string& message) {
void Faceshift::receive(const QByteArray& buffer) { void Faceshift::receive(const QByteArray& buffer) {
#ifdef HAVE_FACESHIFT #ifdef HAVE_FACESHIFT
_lastReceiveTimestamp = usecTimestampNow();
if (_isMuted) {
return;
}
_stream.received(buffer.size(), buffer.constData()); _stream.received(buffer.size(), buffer.constData());
fsMsgPtr msg; fsMsgPtr msg;
for (fsMsgPtr msg; (msg = _stream.get_message()); ) { for (fsMsgPtr msg; (msg = _stream.get_message()); ) {
@ -240,11 +247,11 @@ void Faceshift::receive(const QByteArray& buffer) {
const float FRAME_AVERAGING_FACTOR = 0.99f; const float FRAME_AVERAGING_FACTOR = 0.99f;
quint64 usecsNow = usecTimestampNow(); quint64 usecsNow = usecTimestampNow();
if (_lastTrackingStateReceived != 0) { if (_lastMessageReceived != 0) {
_averageFrameTime = FRAME_AVERAGING_FACTOR * _averageFrameTime + _averageFrameTime = FRAME_AVERAGING_FACTOR * _averageFrameTime +
(1.0f - FRAME_AVERAGING_FACTOR) * (float)(usecsNow - _lastTrackingStateReceived) / 1000000.0f; (1.0f - FRAME_AVERAGING_FACTOR) * (float)(usecsNow - _lastMessageReceived) / 1000000.0f;
} }
_lastTrackingStateReceived = usecsNow; _lastMessageReceived = usecsNow;
} }
break; break;
} }

View file

@ -114,7 +114,8 @@ private:
bool _tcpEnabled = true; bool _tcpEnabled = true;
int _tcpRetryCount = 0; int _tcpRetryCount = 0;
bool _tracking = false; bool _tracking = false;
quint64 _lastTrackingStateReceived = 0; quint64 _lastReceiveTimestamp = 0;
quint64 _lastMessageReceived = 0;
float _averageFrameTime = STARTING_FACESHIFT_FRAME_TIME; float _averageFrameTime = STARTING_FACESHIFT_FRAME_TIME;
glm::vec3 _headAngularVelocity = glm::vec3(0.0f); glm::vec3 _headAngularVelocity = glm::vec3(0.0f);

View file

@ -161,6 +161,7 @@ public:
Mirror, Mirror,
MuteAudio, MuteAudio,
MuteEnvironment, MuteEnvironment,
MuteFaceTracking,
NoFaceTracking, NoFaceTracking,
NoShadows, NoShadows,
OctreeStats, OctreeStats,