add MyAvatar.hasAudioEnabledFaceMovement API property which is enabled by default

This commit is contained in:
Thijs Wenker 2018-04-24 19:24:23 +02:00
parent 3764ee4a06
commit b0187ecfd3
3 changed files with 29 additions and 21 deletions

View file

@ -134,6 +134,7 @@ class MyAvatar : public Avatar {
Q_PROPERTY(AudioListenerMode audioListenerModeCamera READ getAudioListenerModeCamera)
Q_PROPERTY(AudioListenerMode audioListenerModeCustom READ getAudioListenerModeCustom)
Q_PROPERTY(bool hasScriptedBlendshapes READ getHasScriptedBlendshapes WRITE setHasScriptedBlendshapes)
Q_PROPERTY(bool hasAudioEnabledFaceMovement READ getHasAudioEnabledFaceMovement WRITE setHasAudioEnabledFaceMovement)
//TODO: make gravity feature work Q_PROPERTY(glm::vec3 gravity READ getGravity WRITE setGravity)
Q_PROPERTY(glm::vec3 leftHandPosition READ getLeftHandPosition)
@ -684,6 +685,8 @@ private:
bool getShouldRenderLocally() const { return _shouldRender; }
void setHasScriptedBlendshapes(bool hasScriptedBlendshapes) { _hasScriptedBlendShapes = hasScriptedBlendshapes; }
bool getHasScriptedBlendshapes() const override { return _hasScriptedBlendShapes; }
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) { _hasAudioEnabledFaceMovement = hasAudioEnabledFaceMovement; }
bool getHasAudioEnabledFaceMovement() const override { return _hasAudioEnabledFaceMovement; }
bool isMyAvatar() const override { return true; }
virtual int parseDataFromBuffer(const QByteArray& buffer) override;
virtual glm::vec3 getSkeletonPosition() const override;
@ -793,6 +796,7 @@ private:
float _hmdRollControlDeadZone { ROLL_CONTROL_DEAD_ZONE_DEFAULT };
float _hmdRollControlRate { ROLL_CONTROL_RATE_DEFAULT };
bool _hasScriptedBlendShapes { false };
bool _hasAudioEnabledFaceMovement { true };
// working copy -- see AvatarData for thread-safe _sensorToWorldMatrixCache, used for outward facing access
glm::mat4 _sensorToWorldMatrix { glm::mat4() };

View file

@ -47,29 +47,32 @@ void MyHead::simulate(float deltaTime) {
// Only use face trackers when not playing back a recording.
if (!player->isPlaying()) {
auto faceTracker = qApp->getActiveFaceTracker();
bool hasActualFaceTrackerConnected = faceTracker && !faceTracker->isMuted();
const bool hasActualFaceTrackerConnected = faceTracker && !faceTracker->isMuted();
_isFaceTrackerConnected = hasActualFaceTrackerConnected || _owningAvatar->getHasScriptedBlendshapes();
if (hasActualFaceTrackerConnected) {
_transientBlendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
if (typeid(*faceTracker) == typeid(DdeFaceTracker)) {
if (Menu::getInstance()->isOptionChecked(MenuOption::UseAudioForMouth)) {
calculateMouthShapes(deltaTime);
const int JAW_OPEN_BLENDSHAPE = 21;
const int MMMM_BLENDSHAPE = 34;
const int FUNNEL_BLENDSHAPE = 40;
const int SMILE_LEFT_BLENDSHAPE = 28;
const int SMILE_RIGHT_BLENDSHAPE = 29;
_transientBlendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen;
_transientBlendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4;
_transientBlendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4;
_transientBlendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2;
_transientBlendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3;
}
applyEyelidOffset(getFinalOrientationInWorldFrame());
if (_isFaceTrackerConnected) {
if (hasActualFaceTrackerConnected) {
_transientBlendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
} else {
_transientBlendshapeCoefficients.fill(0, _blendshapeCoefficients.size());
}
if (_owningAvatar->getHasAudioEnabledFaceMovement() || (faceTracker && (typeid(*faceTracker) == typeid(DdeFaceTracker))
&& Menu::getInstance()->isOptionChecked(MenuOption::UseAudioForMouth))) {
calculateMouthShapes(deltaTime);
const int JAW_OPEN_BLENDSHAPE = 21;
const int MMMM_BLENDSHAPE = 34;
const int FUNNEL_BLENDSHAPE = 40;
const int SMILE_LEFT_BLENDSHAPE = 28;
const int SMILE_RIGHT_BLENDSHAPE = 29;
_transientBlendshapeCoefficients[JAW_OPEN_BLENDSHAPE] += _audioJawOpen;
_transientBlendshapeCoefficients[SMILE_LEFT_BLENDSHAPE] += _mouth4;
_transientBlendshapeCoefficients[SMILE_RIGHT_BLENDSHAPE] += _mouth4;
_transientBlendshapeCoefficients[MMMM_BLENDSHAPE] += _mouth2;
_transientBlendshapeCoefficients[FUNNEL_BLENDSHAPE] += _mouth3;
}
applyEyelidOffset(getFinalOrientationInWorldFrame());
}
auto eyeTracker = DependencyManager::get<EyeTracker>();
_isEyeTrackerConnected = eyeTracker->isTracking();

View file

@ -503,6 +503,7 @@ public:
float getDomainLimitedScale() const;
virtual bool getHasScriptedBlendshapes() const { return false; }
virtual bool getHasAudioEnabledFaceMovement() const { return false; }
/**jsdoc
* returns the minimum scale allowed for this avatar in the current domain.