From d704af21acff0739d4b5537e127c5c31e3e897b9 Mon Sep 17 00:00:00 2001 From: David Rowe Date: Wed, 18 Feb 2015 12:41:54 -0800 Subject: [PATCH] Generalize "faceshift" data handling to "facetracker" DDE face tracking data is now broadcast to other avatars. --- assignment-client/src/Agent.cpp | 2 +- interface/src/avatar/Head.cpp | 13 ++++--------- libraries/avatars/src/AvatarData.cpp | 14 +++++++------- libraries/avatars/src/AvatarData.h | 4 ++-- libraries/avatars/src/HeadData.cpp | 2 +- libraries/avatars/src/HeadData.h | 2 +- libraries/avatars/src/Player.cpp | 10 +++++----- 7 files changed, 21 insertions(+), 26 deletions(-) diff --git a/assignment-client/src/Agent.cpp b/assignment-client/src/Agent.cpp index 4755d9137a..323d5695ed 100644 --- a/assignment-client/src/Agent.cpp +++ b/assignment-client/src/Agent.cpp @@ -193,7 +193,7 @@ void Agent::run() { // setup an Avatar for the script to use ScriptableAvatar scriptedAvatar(&_scriptEngine); - scriptedAvatar.setForceFaceshiftConnected(true); + scriptedAvatar.setForceFaceTrackerConnected(true); // call model URL setters with empty URLs so our avatar, if user, will have the default models scriptedAvatar.setFaceModelURL(QUrl()); diff --git a/interface/src/avatar/Head.cpp b/interface/src/avatar/Head.cpp index afc1346ad8..52f1b3ee86 100644 --- a/interface/src/avatar/Head.cpp +++ b/interface/src/avatar/Head.cpp @@ -80,15 +80,10 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) { // Only use face trackers when not playing back a recording. if (!myAvatar->isPlaying()) { FaceTracker* faceTracker = Application::getInstance()->getActiveFaceTracker(); - auto dde = DependencyManager::get(); - auto faceshift = DependencyManager::get(); - - if ((_isFaceshiftConnected = (faceshift == faceTracker))) { + _isFaceTrackerConnected = faceTracker != NULL; + if (_isFaceTrackerConnected) { _blendshapeCoefficients = faceTracker->getBlendshapeCoefficients(); - } else if (dde->isActive()) { - faceTracker = dde.data(); - _blendshapeCoefficients = faceTracker->getBlendshapeCoefficients(); - } + } } // Twist the upper body to follow the rotation of the head, but only do this with my avatar, // since everyone else will see the full joint rotations for other people. @@ -109,7 +104,7 @@ void Head::simulate(float deltaTime, bool isMine, bool billboard) { _longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f)); } - if (!(_isFaceshiftConnected || billboard)) { + if (!(_isFaceTrackerConnected || billboard)) { // Update eye saccades const float AVERAGE_MICROSACCADE_INTERVAL = 0.50f; const float AVERAGE_SACCADE_INTERVAL = 4.0f; diff --git a/libraries/avatars/src/AvatarData.cpp b/libraries/avatars/src/AvatarData.cpp index 4c71c5e9ed..a3d330f84b 100644 --- a/libraries/avatars/src/AvatarData.cpp +++ b/libraries/avatars/src/AvatarData.cpp @@ -44,7 +44,7 @@ AvatarData::AvatarData() : _handState(0), _keyState(NO_KEY_DOWN), _isChatCirclingEnabled(false), - _forceFaceshiftConnected(false), + _forceFaceTrackerConnected(false), _hasNewJointRotations(true), _headData(NULL), _handData(NULL), @@ -136,8 +136,8 @@ QByteArray AvatarData::toByteArray() { if (!_headData) { _headData = new HeadData(this); } - if (_forceFaceshiftConnected) { - _headData->_isFaceshiftConnected = true; + if (_forceFaceTrackerConnected) { + _headData->_isFaceTrackerConnected = true; } QByteArray avatarDataByteArray; @@ -191,7 +191,7 @@ QByteArray AvatarData::toByteArray() { setAtBit(bitItems, HAND_STATE_FINGER_POINTING_BIT); } // faceshift state - if (_headData->_isFaceshiftConnected) { + if (_headData->_isFaceTrackerConnected) { setAtBit(bitItems, IS_FACESHIFT_CONNECTED); } if (_isChatCirclingEnabled) { @@ -208,7 +208,7 @@ QByteArray AvatarData::toByteArray() { } // If it is connected, pack up the data - if (_headData->_isFaceshiftConnected) { + if (_headData->_isFaceTrackerConnected) { memcpy(destinationBuffer, &_headData->_leftEyeBlink, sizeof(float)); destinationBuffer += sizeof(float); @@ -417,7 +417,7 @@ int AvatarData::parseDataAtOffset(const QByteArray& packet, int offset) { _handState = getSemiNibbleAt(bitItems, HAND_STATE_START_BIT) + (oneAtBit(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0); - _headData->_isFaceshiftConnected = oneAtBit(bitItems, IS_FACESHIFT_CONNECTED); + _headData->_isFaceTrackerConnected = oneAtBit(bitItems, IS_FACESHIFT_CONNECTED); _isChatCirclingEnabled = oneAtBit(bitItems, IS_CHAT_CIRCLING_ENABLED); bool hasReferential = oneAtBit(bitItems, HAS_REFERENTIAL); @@ -436,7 +436,7 @@ int AvatarData::parseDataAtOffset(const QByteArray& packet, int offset) { } - if (_headData->_isFaceshiftConnected) { + if (_headData->_isFaceTrackerConnected) { float leftEyeBlink, rightEyeBlink, averageLoudness, browAudioLift; minPossibleSize += sizeof(leftEyeBlink) + sizeof(rightEyeBlink) + sizeof(averageLoudness) + sizeof(browAudioLift); minPossibleSize++; // one byte for blendDataSize diff --git a/libraries/avatars/src/AvatarData.h b/libraries/avatars/src/AvatarData.h index 8848a261df..a8a330485c 100644 --- a/libraries/avatars/src/AvatarData.h +++ b/libraries/avatars/src/AvatarData.h @@ -241,7 +241,7 @@ public: Q_INVOKABLE void setBlendshape(QString name, float val) { _headData->setBlendshape(name, val); } - void setForceFaceshiftConnected(bool connected) { _forceFaceshiftConnected = connected; } + void setForceFaceTrackerConnected(bool connected) { _forceFaceTrackerConnected = connected; } // key state void setKeyState(KeyState s) { _keyState = s; } @@ -357,7 +357,7 @@ protected: KeyState _keyState; bool _isChatCirclingEnabled; - bool _forceFaceshiftConnected; + bool _forceFaceTrackerConnected; bool _hasNewJointRotations; // set in AvatarData, cleared in Avatar HeadData* _headData; diff --git a/libraries/avatars/src/HeadData.cpp b/libraries/avatars/src/HeadData.cpp index 8dec8368c9..7789385547 100644 --- a/libraries/avatars/src/HeadData.cpp +++ b/libraries/avatars/src/HeadData.cpp @@ -31,7 +31,7 @@ HeadData::HeadData(AvatarData* owningAvatar) : _torsoTwist(0.0f), _lookAtPosition(0.0f, 0.0f, 0.0f), _audioLoudness(0.0f), - _isFaceshiftConnected(false), + _isFaceTrackerConnected(false), _leftEyeBlink(0.0f), _rightEyeBlink(0.0f), _averageLoudness(0.0f), diff --git a/libraries/avatars/src/HeadData.h b/libraries/avatars/src/HeadData.h index cef5d5fbca..b180541914 100644 --- a/libraries/avatars/src/HeadData.h +++ b/libraries/avatars/src/HeadData.h @@ -92,7 +92,7 @@ protected: glm::vec3 _lookAtPosition; float _audioLoudness; - bool _isFaceshiftConnected; + bool _isFaceTrackerConnected; float _leftEyeBlink; float _rightEyeBlink; float _averageLoudness; diff --git a/libraries/avatars/src/Player.cpp b/libraries/avatars/src/Player.cpp index 2fabc39bac..2b92acb189 100644 --- a/libraries/avatars/src/Player.cpp +++ b/libraries/avatars/src/Player.cpp @@ -110,7 +110,7 @@ void Player::startPlaying() { } // Fake faceshift connection - _avatar->setForceFaceshiftConnected(true); + _avatar->setForceFaceTrackerConnected(true); qDebug() << "Recorder::startPlaying()"; setupAudioThread(); @@ -136,8 +136,8 @@ void Player::stopPlaying() { cleanupAudioThread(); _avatar->clearJointsData(); - // Turn off fake faceshift connection - _avatar->setForceFaceshiftConnected(false); + // Turn off fake face tracker connection + _avatar->setForceFaceTrackerConnected(false); if (_useAttachments) { _avatar->setAttachmentData(_currentContext.attachments); @@ -255,8 +255,8 @@ void Player::play() { HeadData* head = const_cast(_avatar->getHeadData()); if (head) { - // Make sure fake faceshift connection doesn't get turned off - _avatar->setForceFaceshiftConnected(true); + // Make sure fake face tracker connection doesn't get turned off + _avatar->setForceFaceTrackerConnected(true); QVector blendCoef(currentFrame.getBlendshapeCoefficients().size()); for (int i = 0; i < currentFrame.getBlendshapeCoefficients().size(); ++i) {