add MyAvatar.hasAudioEnabledFaceMovement API property which is enabled by default

This commit is contained in:
Thijs Wenker 2018-04-24 19:24:23 +02:00
parent 3764ee4a06
commit b0187ecfd3
3 changed files with 29 additions and 21 deletions

View file

@ -134,6 +134,7 @@ class MyAvatar : public Avatar {
Q_PROPERTY(AudioListenerMode audioListenerModeCamera READ getAudioListenerModeCamera)
Q_PROPERTY(AudioListenerMode audioListenerModeCustom READ getAudioListenerModeCustom)
Q_PROPERTY(bool hasScriptedBlendshapes READ getHasScriptedBlendshapes WRITE setHasScriptedBlendshapes)
Q_PROPERTY(bool hasAudioEnabledFaceMovement READ getHasAudioEnabledFaceMovement WRITE setHasAudioEnabledFaceMovement)
//TODO: make gravity feature work Q_PROPERTY(glm::vec3 gravity READ getGravity WRITE setGravity)
Q_PROPERTY(glm::vec3 leftHandPosition READ getLeftHandPosition)
@ -684,6 +685,8 @@ private:
bool getShouldRenderLocally() const { return _shouldRender; }
void setHasScriptedBlendshapes(bool hasScriptedBlendshapes) { _hasScriptedBlendShapes = hasScriptedBlendshapes; }
bool getHasScriptedBlendshapes() const override { return _hasScriptedBlendShapes; }
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) { _hasAudioEnabledFaceMovement = hasAudioEnabledFaceMovement; }
bool getHasAudioEnabledFaceMovement() const override { return _hasAudioEnabledFaceMovement; }
bool isMyAvatar() const override { return true; }
virtual int parseDataFromBuffer(const QByteArray& buffer) override;
virtual glm::vec3 getSkeletonPosition() const override;
@ -793,6 +796,7 @@ private:
float _hmdRollControlDeadZone { ROLL_CONTROL_DEAD_ZONE_DEFAULT };
float _hmdRollControlRate { ROLL_CONTROL_RATE_DEFAULT };
bool _hasScriptedBlendShapes { false };
bool _hasAudioEnabledFaceMovement { true };
// working copy -- see AvatarData for thread-safe _sensorToWorldMatrixCache, used for outward facing access
glm::mat4 _sensorToWorldMatrix { glm::mat4() };

View file

@ -47,14 +47,18 @@ void MyHead::simulate(float deltaTime) {
// Only use face trackers when not playing back a recording.
if (!player->isPlaying()) {
auto faceTracker = qApp->getActiveFaceTracker();
bool hasActualFaceTrackerConnected = faceTracker && !faceTracker->isMuted();
const bool hasActualFaceTrackerConnected = faceTracker && !faceTracker->isMuted();
_isFaceTrackerConnected = hasActualFaceTrackerConnected || _owningAvatar->getHasScriptedBlendshapes();
if (_isFaceTrackerConnected) {
if (hasActualFaceTrackerConnected) {
_transientBlendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
} else {
_transientBlendshapeCoefficients.fill(0, _blendshapeCoefficients.size());
}
if (typeid(*faceTracker) == typeid(DdeFaceTracker)) {
if (_owningAvatar->getHasAudioEnabledFaceMovement() || (faceTracker && (typeid(*faceTracker) == typeid(DdeFaceTracker))
&& Menu::getInstance()->isOptionChecked(MenuOption::UseAudioForMouth))) {
if (Menu::getInstance()->isOptionChecked(MenuOption::UseAudioForMouth)) {
calculateMouthShapes(deltaTime);
const int JAW_OPEN_BLENDSHAPE = 21;
@ -70,7 +74,6 @@ void MyHead::simulate(float deltaTime) {
}
applyEyelidOffset(getFinalOrientationInWorldFrame());
}
}
auto eyeTracker = DependencyManager::get<EyeTracker>();
_isEyeTrackerConnected = eyeTracker->isTracking();
}

View file

@ -503,6 +503,7 @@ public:
float getDomainLimitedScale() const;
virtual bool getHasScriptedBlendshapes() const { return false; }
virtual bool getHasAudioEnabledFaceMovement() const { return false; }
/**jsdoc
* returns the minimum scale allowed for this avatar in the current domain.