remove old/unused eyetracker code

This commit is contained in:
Seth Alves 2019-09-07 18:12:19 -07:00
parent 943348ba51
commit 5c570d28a0
5 changed files with 66 additions and 36 deletions

View file

@ -437,13 +437,6 @@ public slots:
void sendWrongProtocolVersionsSignature(bool checked) { ::sendWrongProtocolVersionsSignature(checked); } void sendWrongProtocolVersionsSignature(bool checked) { ::sendWrongProtocolVersionsSignature(checked); }
#endif #endif
#ifdef HAVE_IVIEWHMD
void setActiveEyeTracker();
void calibrateEyeTracker1Point();
void calibrateEyeTracker3Points();
void calibrateEyeTracker5Points();
#endif
static void showHelp(); static void showHelp();
void cycleCamera(); void cycleCamera();

View file

@ -17,7 +17,6 @@
#include <DependencyManager.h> #include <DependencyManager.h>
#include <GeometryUtil.h> #include <GeometryUtil.h>
#include <trackers/FaceTracker.h> #include <trackers/FaceTracker.h>
#include <trackers/EyeTracker.h>
#include <Rig.h> #include <Rig.h>
#include "Logging.h" #include "Logging.h"
@ -58,7 +57,7 @@ void Head::simulate(float deltaTime) {
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f)); _longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
} }
if (!_isEyeTrackerConnected) { if (getHasProceduralEyeMovement()) {
// Update eye saccades // Update eye saccades
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f; const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
const float AVERAGE_SACCADE_INTERVAL = 6.0f; const float AVERAGE_SACCADE_INTERVAL = 6.0f;
@ -82,6 +81,7 @@ void Head::simulate(float deltaTime) {
const float FULLY_OPEN = 0.0f; const float FULLY_OPEN = 0.0f;
const float FULLY_CLOSED = 1.0f; const float FULLY_CLOSED = 1.0f;
if (getHasProceduralBlinkFaceMovement()) { if (getHasProceduralBlinkFaceMovement()) {
// handle automatic blinks
// Detect transition from talking to not; force blink after that and a delay // Detect transition from talking to not; force blink after that and a delay
bool forceBlink = false; bool forceBlink = false;
const float TALKING_LOUDNESS = 150.0f; const float TALKING_LOUDNESS = 150.0f;
@ -129,7 +129,7 @@ void Head::simulate(float deltaTime) {
_leftEyeBlink = FULLY_OPEN; _leftEyeBlink = FULLY_OPEN;
} }
// use data to update fake Faceshift blendshape coefficients // use data to update fake Faceshift blendshape coefficients
if (getHasAudioEnabledFaceMovement()) { if (getHasAudioEnabledFaceMovement()) {
// Update audio attack data for facial animation (eyebrows and mouth) // Update audio attack data for facial animation (eyebrows and mouth)
float audioAttackAveragingRate = (10.0f - deltaTime * NORMAL_HZ) / 10.0f; // --> 0.9 at 60 Hz float audioAttackAveragingRate = (10.0f - deltaTime * NORMAL_HZ) / 10.0f; // --> 0.9 at 60 Hz
@ -152,7 +152,8 @@ void Head::simulate(float deltaTime) {
_mouthTime = 0.0f; _mouthTime = 0.0f;
} }
FaceTracker::updateFakeCoefficients(_leftEyeBlink, FaceTracker::updateFakeCoefficients(
_leftEyeBlink,
_rightEyeBlink, _rightEyeBlink,
_browAudioLift, _browAudioLift,
_audioJawOpen, _audioJawOpen,
@ -162,6 +163,8 @@ void Head::simulate(float deltaTime) {
_transientBlendshapeCoefficients); _transientBlendshapeCoefficients);
if (getHasProceduralEyeFaceMovement()) { if (getHasProceduralEyeFaceMovement()) {
// This controls two things, the eye brow and the upper eye lid, it is driven by the vertical up/down angle of the
// eyes relative to the head. This is to try to help prevent sleepy eyes/crazy eyes.
applyEyelidOffset(getOrientation()); applyEyelidOffset(getOrientation());
} }
@ -292,7 +295,7 @@ glm::quat Head::getFinalOrientationInLocalFrame() const {
} }
// Everyone else's head keeps track of a lookAtPosition that everybody sees the same, and refers to where that head // Everyone else's head keeps track of a lookAtPosition that everybody sees the same, and refers to where that head
// is looking in model space -- e.g., at someone's eyeball, or between their eyes, or mouth, etc. Everyon's Interface // is looking in model space -- e.g., at someone's eyeball, or between their eyes, or mouth, etc. Everyone's Interface
// will have the same value for the lookAtPosition of any given head. // will have the same value for the lookAtPosition of any given head.
// //
// Everyone else's head also keeps track of a correctedLookAtPosition that may be different for the same head within // Everyone else's head also keeps track of a correctedLookAtPosition that may be different for the same head within

View file

@ -245,9 +245,10 @@ QByteArray AvatarData::toByteArrayStateful(AvatarDataDetail dataDetail, bool dro
} }
QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSentTime, QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSentTime,
const QVector<JointData>& lastSentJointData, const QVector<JointData>& lastSentJointData, AvatarDataPacket::SendStatus& sendStatus,
AvatarDataPacket::SendStatus& sendStatus, bool dropFaceTracking, bool distanceAdjust, bool dropFaceTracking, bool distanceAdjust, glm::vec3 viewerPosition,
glm::vec3 viewerPosition, QVector<JointData>* sentJointDataOut, int maxDataSize, AvatarDataRate* outboundDataRateOut) const { QVector<JointData>* sentJointDataOut,
int maxDataSize, AvatarDataRate* outboundDataRateOut) const {
bool cullSmallChanges = (dataDetail == CullSmallData); bool cullSmallChanges = (dataDetail == CullSmallData);
bool sendAll = (dataDetail == SendAllData); bool sendAll = (dataDetail == SendAllData);
@ -532,7 +533,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
setAtBit16(flags, IS_FACE_TRACKER_CONNECTED); setAtBit16(flags, IS_FACE_TRACKER_CONNECTED);
} }
// eye tracker state // eye tracker state
if (_headData->_isEyeTrackerConnected) { if (!_headData->_hasProceduralEyeMovement) {
setAtBit16(flags, IS_EYE_TRACKER_CONNECTED); setAtBit16(flags, IS_EYE_TRACKER_CONNECTED);
} }
// referential state // referential state
@ -1150,7 +1151,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
+ (oneAtBit16(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0); + (oneAtBit16(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
auto newFaceTrackerConnected = oneAtBit16(bitItems, IS_FACE_TRACKER_CONNECTED); auto newFaceTrackerConnected = oneAtBit16(bitItems, IS_FACE_TRACKER_CONNECTED);
auto newEyeTrackerConnected = oneAtBit16(bitItems, IS_EYE_TRACKER_CONNECTED); auto newHasntProceduralEyeMovement = oneAtBit16(bitItems, IS_EYE_TRACKER_CONNECTED);
auto newHasAudioEnabledFaceMovement = oneAtBit16(bitItems, AUDIO_ENABLED_FACE_MOVEMENT); auto newHasAudioEnabledFaceMovement = oneAtBit16(bitItems, AUDIO_ENABLED_FACE_MOVEMENT);
auto newHasProceduralEyeFaceMovement = oneAtBit16(bitItems, PROCEDURAL_EYE_FACE_MOVEMENT); auto newHasProceduralEyeFaceMovement = oneAtBit16(bitItems, PROCEDURAL_EYE_FACE_MOVEMENT);
@ -1161,7 +1162,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
bool keyStateChanged = (_keyState != newKeyState); bool keyStateChanged = (_keyState != newKeyState);
bool handStateChanged = (_handState != newHandState); bool handStateChanged = (_handState != newHandState);
bool faceStateChanged = (_headData->_isFaceTrackerConnected != newFaceTrackerConnected); bool faceStateChanged = (_headData->_isFaceTrackerConnected != newFaceTrackerConnected);
bool eyeStateChanged = (_headData->_isEyeTrackerConnected != newEyeTrackerConnected); bool eyeStateChanged = (_headData->_hasProceduralEyeMovement == newHasntProceduralEyeMovement);
bool audioEnableFaceMovementChanged = (_headData->getHasAudioEnabledFaceMovement() != newHasAudioEnabledFaceMovement); bool audioEnableFaceMovementChanged = (_headData->getHasAudioEnabledFaceMovement() != newHasAudioEnabledFaceMovement);
bool proceduralEyeFaceMovementChanged = (_headData->getHasProceduralEyeFaceMovement() != newHasProceduralEyeFaceMovement); bool proceduralEyeFaceMovementChanged = (_headData->getHasProceduralEyeFaceMovement() != newHasProceduralEyeFaceMovement);
bool proceduralBlinkFaceMovementChanged = (_headData->getHasProceduralBlinkFaceMovement() != newHasProceduralBlinkFaceMovement); bool proceduralBlinkFaceMovementChanged = (_headData->getHasProceduralBlinkFaceMovement() != newHasProceduralBlinkFaceMovement);
@ -1174,7 +1175,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
_keyState = newKeyState; _keyState = newKeyState;
_handState = newHandState; _handState = newHandState;
_headData->_isFaceTrackerConnected = newFaceTrackerConnected; _headData->_isFaceTrackerConnected = newFaceTrackerConnected;
_headData->_isEyeTrackerConnected = newEyeTrackerConnected; _headData->setHasProceduralEyeMovement(!newHasntProceduralEyeMovement);
_headData->setHasAudioEnabledFaceMovement(newHasAudioEnabledFaceMovement); _headData->setHasAudioEnabledFaceMovement(newHasAudioEnabledFaceMovement);
_headData->setHasProceduralEyeFaceMovement(newHasProceduralEyeFaceMovement); _headData->setHasProceduralEyeFaceMovement(newHasProceduralEyeFaceMovement);
_headData->setHasProceduralBlinkFaceMovement(newHasProceduralBlinkFaceMovement); _headData->setHasProceduralBlinkFaceMovement(newHasProceduralBlinkFaceMovement);

View file

@ -196,3 +196,40 @@ void HeadData::fromJson(const QJsonObject& json) {
setHeadOrientation(quatFromJsonValue(json[JSON_AVATAR_HEAD_ROTATION])); setHeadOrientation(quatFromJsonValue(json[JSON_AVATAR_HEAD_ROTATION]));
} }
} }
bool HeadData::getHasProceduralEyeFaceMovement() const {
return _hasProceduralEyeFaceMovement;
}
void HeadData::setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement) {
_hasProceduralEyeFaceMovement = hasProceduralEyeFaceMovement;
}
bool HeadData::getHasProceduralBlinkFaceMovement() const {
// return _hasProceduralBlinkFaceMovement;
return _hasProceduralBlinkFaceMovement && !_isFaceTrackerConnected;
}
void HeadData::setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement) {
_hasProceduralBlinkFaceMovement = hasProceduralBlinkFaceMovement;
}
bool HeadData::getHasAudioEnabledFaceMovement() const {
return _hasAudioEnabledFaceMovement;
}
void HeadData::setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) {
_hasAudioEnabledFaceMovement = hasAudioEnabledFaceMovement;
}
bool HeadData::getHasProceduralEyeMovement() const {
return _hasProceduralEyeMovement;
}
void HeadData::setHasProceduralEyeMovement(bool hasProceduralEyeMovement) {
_hasProceduralEyeMovement = hasProceduralEyeMovement;
}
void HeadData::setFaceTrackerConnected(bool value) {
_isFaceTrackerConnected = value;
}

View file

@ -72,23 +72,17 @@ public:
} }
bool lookAtPositionChangedSince(quint64 time) { return _lookAtPositionChanged >= time; } bool lookAtPositionChangedSince(quint64 time) { return _lookAtPositionChanged >= time; }
bool getHasProceduralEyeFaceMovement() const { return _hasProceduralEyeFaceMovement; } bool getHasProceduralEyeFaceMovement() const;
void setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement);
bool getHasProceduralBlinkFaceMovement() const;
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
bool getHasAudioEnabledFaceMovement() const;
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
bool getHasProceduralEyeMovement() const;
void setHasProceduralEyeMovement(bool hasProceduralEyeMovement);
void setHasProceduralEyeFaceMovement(const bool hasProceduralEyeFaceMovement) { void setFaceTrackerConnected(bool value);
_hasProceduralEyeFaceMovement = hasProceduralEyeFaceMovement; bool getFaceTrackerConnected() const { return _isFaceTrackerConnected; }
}
bool getHasProceduralBlinkFaceMovement() const { return _hasProceduralBlinkFaceMovement; }
void setHasProceduralBlinkFaceMovement(const bool hasProceduralBlinkFaceMovement) {
_hasProceduralBlinkFaceMovement = hasProceduralBlinkFaceMovement;
}
bool getHasAudioEnabledFaceMovement() const { return _hasAudioEnabledFaceMovement; }
void setHasAudioEnabledFaceMovement(const bool hasAudioEnabledFaceMovement) {
_hasAudioEnabledFaceMovement = hasAudioEnabledFaceMovement;
}
friend class AvatarData; friend class AvatarData;
@ -107,8 +101,10 @@ protected:
bool _hasAudioEnabledFaceMovement { true }; bool _hasAudioEnabledFaceMovement { true };
bool _hasProceduralBlinkFaceMovement { true }; bool _hasProceduralBlinkFaceMovement { true };
bool _hasProceduralEyeFaceMovement { true }; bool _hasProceduralEyeFaceMovement { true };
bool _hasProceduralEyeMovement { true };
bool _isFaceTrackerConnected { false }; bool _isFaceTrackerConnected { false };
bool _isEyeTrackerConnected { false };
float _leftEyeBlink { 0.0f }; float _leftEyeBlink { 0.0f };
float _rightEyeBlink { 0.0f }; float _rightEyeBlink { 0.0f };
float _averageLoudness { 0.0f }; float _averageLoudness { 0.0f };