mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-04-10 22:21:57 +02:00
Blendshape script API work
* Moved hasScriptedBlendshapes, hasProceduralBlinkFaceMovement, hasProceduralEyeFaceMovement, hasAudioEnabledFaceMovement to AvatarData so they are accessable via agent scripts. * Marked setForceFaceTrackerConnected as depricated. * Updated jsdoc comments
This commit is contained in:
parent
a6842b5cba
commit
8411e6b033
9 changed files with 98 additions and 81 deletions
|
@ -376,7 +376,6 @@ void Agent::executeScript() {
|
|||
// setup an Avatar for the script to use
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
scriptedAvatar->setID(getSessionUUID());
|
||||
scriptedAvatar->setForceFaceTrackerConnected(true);
|
||||
|
||||
// call model URL setters with empty URLs so our avatar, if user, will have the default models
|
||||
scriptedAvatar->setSkeletonModelURL(QUrl());
|
||||
|
|
|
@ -3405,31 +3405,6 @@ bool MyAvatar::shouldRenderHead(const RenderArgs* renderArgs) const {
|
|||
return !defaultMode || (!firstPerson && !insideHead) || (overrideAnim && !insideHead);
|
||||
}
|
||||
|
||||
void MyAvatar::setHasScriptedBlendshapes(bool hasScriptedBlendshapes) {
|
||||
if (hasScriptedBlendshapes == _hasScriptedBlendShapes) {
|
||||
return;
|
||||
}
|
||||
if (!hasScriptedBlendshapes) {
|
||||
// send a forced avatarData update to make sure the script can send neutal blendshapes on unload
|
||||
// without having to wait for the update loop, make sure _hasScriptedBlendShapes is still true
|
||||
// before sending the update, or else it won't send the neutal blendshapes to the receiving clients
|
||||
sendAvatarDataPacket(true);
|
||||
}
|
||||
_hasScriptedBlendShapes = hasScriptedBlendshapes;
|
||||
}
|
||||
|
||||
void MyAvatar::setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement) {
|
||||
_headData->setHasProceduralBlinkFaceMovement(hasProceduralBlinkFaceMovement);
|
||||
}
|
||||
|
||||
void MyAvatar::setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement) {
|
||||
_headData->setHasProceduralEyeFaceMovement(hasProceduralEyeFaceMovement);
|
||||
}
|
||||
|
||||
void MyAvatar::setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) {
|
||||
_headData->setHasAudioEnabledFaceMovement(hasAudioEnabledFaceMovement);
|
||||
}
|
||||
|
||||
void MyAvatar::setRotationRecenterFilterLength(float length) {
|
||||
const float MINIMUM_ROTATION_RECENTER_FILTER_LENGTH = 0.01f;
|
||||
_rotationRecenterFilterLength = std::max(MINIMUM_ROTATION_RECENTER_FILTER_LENGTH, length);
|
||||
|
|
|
@ -184,12 +184,6 @@ class MyAvatar : public Avatar {
|
|||
* property value is <code>audioListenerModeCustom</code>.
|
||||
* @property {Quat} customListenOrientation=Quat.IDENTITY - The listening orientation used when the
|
||||
* <code>audioListenerMode</code> property value is <code>audioListenerModeCustom</code>.
|
||||
* @property {boolean} hasScriptedBlendshapes=false - <code>true</code> to transmit blendshapes over the network.
|
||||
* <p><strong>Note:</strong> Currently doesn't work. Use {@link MyAvatar.setForceFaceTrackerConnected} instead.</p>
|
||||
* @property {boolean} hasProceduralBlinkFaceMovement=true - <code>true</code> if procedural blinking is turned on.
|
||||
* @property {boolean} hasProceduralEyeFaceMovement=true - <code>true</code> if procedural eye movement is turned on.
|
||||
* @property {boolean} hasAudioEnabledFaceMovement=true - <code>true</code> to move the mouth blendshapes with voice audio
|
||||
* when <code>MyAvatar.hasScriptedBlendshapes</code> is enabled.
|
||||
* @property {number} rotationRecenterFilterLength - Configures how quickly the avatar root rotates to recenter its facing
|
||||
* direction to match that of the user's torso based on head and hands orientation. A smaller value makes the
|
||||
* recentering happen more quickly. The minimum value is <code>0.01</code>.
|
||||
|
@ -312,7 +306,10 @@ class MyAvatar : public Avatar {
|
|||
* @borrows Avatar.setAttachmentsVariant as setAttachmentsVariant
|
||||
* @borrows Avatar.updateAvatarEntity as updateAvatarEntity
|
||||
* @borrows Avatar.clearAvatarEntity as clearAvatarEntity
|
||||
* @borrows Avatar.setForceFaceTrackerConnected as setForceFaceTrackerConnected
|
||||
* @borrows Avatar.hasScriptedBlendshapes as hasScriptedBlendshapes
|
||||
* @borrows Avatar.hasProceduralBlinkFaceMovement as hasProceduralBlinkFaceMovement
|
||||
* @borrows Avatar.hasEyeFaceMovement as hasEyeFaceMovement
|
||||
* @borrows Avatar.hasAudioEnabledFaceMovement as hasAudioEnabledFaceMovement
|
||||
* @borrows Avatar.setSkeletonModelURL as setSkeletonModelURL
|
||||
* @borrows Avatar.getAttachmentData as getAttachmentData
|
||||
* @borrows Avatar.setAttachmentData as setAttachmentData
|
||||
|
@ -359,10 +356,6 @@ class MyAvatar : public Avatar {
|
|||
Q_PROPERTY(AudioListenerMode audioListenerModeCustom READ getAudioListenerModeCustom)
|
||||
Q_PROPERTY(glm::vec3 customListenPosition READ getCustomListenPosition WRITE setCustomListenPosition)
|
||||
Q_PROPERTY(glm::quat customListenOrientation READ getCustomListenOrientation WRITE setCustomListenOrientation)
|
||||
Q_PROPERTY(bool hasScriptedBlendshapes READ getHasScriptedBlendshapes WRITE setHasScriptedBlendshapes)
|
||||
Q_PROPERTY(bool hasProceduralBlinkFaceMovement READ getHasProceduralBlinkFaceMovement WRITE setHasProceduralBlinkFaceMovement)
|
||||
Q_PROPERTY(bool hasProceduralEyeFaceMovement READ getHasProceduralEyeFaceMovement WRITE setHasProceduralEyeFaceMovement)
|
||||
Q_PROPERTY(bool hasAudioEnabledFaceMovement READ getHasAudioEnabledFaceMovement WRITE setHasAudioEnabledFaceMovement)
|
||||
Q_PROPERTY(float rotationRecenterFilterLength READ getRotationRecenterFilterLength WRITE setRotationRecenterFilterLength)
|
||||
Q_PROPERTY(float rotationThreshold READ getRotationThreshold WRITE setRotationThreshold)
|
||||
Q_PROPERTY(bool enableStepResetRotation READ getEnableStepResetRotation WRITE setEnableStepResetRotation)
|
||||
|
@ -2555,14 +2548,6 @@ private:
|
|||
virtual bool shouldRenderHead(const RenderArgs* renderArgs) const override;
|
||||
void setShouldRenderLocally(bool shouldRender) { _shouldRender = shouldRender; setEnableMeshVisible(shouldRender); }
|
||||
bool getShouldRenderLocally() const { return _shouldRender; }
|
||||
void setHasScriptedBlendshapes(bool hasScriptedBlendshapes);
|
||||
bool getHasScriptedBlendshapes() const override { return _hasScriptedBlendShapes; }
|
||||
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
|
||||
bool getHasProceduralBlinkFaceMovement() const override { return _headData->getHasProceduralBlinkFaceMovement(); }
|
||||
void setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement);
|
||||
bool getHasProceduralEyeFaceMovement() const override { return _headData->getHasProceduralEyeFaceMovement(); }
|
||||
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
|
||||
bool getHasAudioEnabledFaceMovement() const override { return _headData->getHasAudioEnabledFaceMovement(); }
|
||||
void setRotationRecenterFilterLength(float length);
|
||||
float getRotationRecenterFilterLength() const { return _rotationRecenterFilterLength; }
|
||||
void setRotationThreshold(float angleRadians);
|
||||
|
|
|
@ -64,7 +64,9 @@ void MyHead::simulate(float deltaTime) {
|
|||
bool eyeLidsTracked =
|
||||
userInputMapper->getActionStateValid(controller::Action::LEFT_EYE_BLINK) &&
|
||||
userInputMapper->getActionStateValid(controller::Action::RIGHT_EYE_BLINK);
|
||||
setFaceTrackerConnected(eyeLidsTracked);
|
||||
|
||||
setHasScriptedBlendshapes(eyeLidsTracked);
|
||||
|
||||
if (eyeLidsTracked) {
|
||||
float leftEyeBlink = userInputMapper->getActionState(controller::Action::LEFT_EYE_BLINK);
|
||||
float rightEyeBlink = userInputMapper->getActionState(controller::Action::RIGHT_EYE_BLINK);
|
||||
|
|
|
@ -110,7 +110,7 @@ AvatarData::AvatarData() :
|
|||
_targetScale(1.0f),
|
||||
_handState(0),
|
||||
_keyState(NO_KEY_DOWN),
|
||||
_forceFaceTrackerConnected(false),
|
||||
_hasScriptedBlendshapes(false),
|
||||
_headData(NULL),
|
||||
_errorLogExpiry(0),
|
||||
_owningAvatarMixer(),
|
||||
|
@ -154,6 +154,32 @@ float AvatarData::getDomainLimitedScale() const {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
void AvatarData::setHasScriptedBlendshapes(bool hasScriptedBlendshapes) {
|
||||
if (hasScriptedBlendshapes == _hasScriptedBlendshapes) {
|
||||
return;
|
||||
}
|
||||
if (!hasScriptedBlendshapes) {
|
||||
// send a forced avatarData update to make sure the script can send neutal blendshapes on unload
|
||||
// without having to wait for the update loop, make sure _hasScriptedBlendShapes is still true
|
||||
// before sending the update, or else it won't send the neutal blendshapes to the receiving clients
|
||||
sendAvatarDataPacket(true);
|
||||
}
|
||||
_hasScriptedBlendshapes = hasScriptedBlendshapes;
|
||||
}
|
||||
|
||||
void AvatarData::setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement) {
|
||||
_headData->setHasProceduralBlinkFaceMovement(hasProceduralBlinkFaceMovement);
|
||||
}
|
||||
|
||||
void AvatarData::setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement) {
|
||||
_headData->setHasProceduralEyeFaceMovement(hasProceduralEyeFaceMovement);
|
||||
}
|
||||
|
||||
void AvatarData::setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) {
|
||||
_headData->setHasAudioEnabledFaceMovement(hasAudioEnabledFaceMovement);
|
||||
}
|
||||
|
||||
void AvatarData::setDomainMinimumHeight(float domainMinimumHeight) {
|
||||
_domainMinimumHeight = glm::clamp(domainMinimumHeight, MIN_AVATAR_HEIGHT, MAX_AVATAR_HEIGHT);
|
||||
}
|
||||
|
@ -206,8 +232,8 @@ void AvatarData::lazyInitHeadData() const {
|
|||
if (!_headData) {
|
||||
_headData = new HeadData(const_cast<AvatarData*>(this));
|
||||
}
|
||||
if (_forceFaceTrackerConnected) {
|
||||
_headData->_isFaceTrackerConnected = true;
|
||||
if (_hasScriptedBlendshapes) {
|
||||
_headData->_hasScriptedBlendshapes = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -338,7 +364,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
tranlationChangedSince(lastSentTime) ||
|
||||
parentInfoChangedSince(lastSentTime));
|
||||
hasHandControllers = _controllerLeftHandMatrixCache.isValid() || _controllerRightHandMatrixCache.isValid();
|
||||
hasFaceTrackerInfo = !dropFaceTracking && (hasFaceTracker() || getHasScriptedBlendshapes()) &&
|
||||
hasFaceTrackerInfo = !dropFaceTracking && getHasScriptedBlendshapes() &&
|
||||
(sendAll || faceTrackerInfoChangedSince(lastSentTime));
|
||||
hasJointData = !sendMinimum;
|
||||
hasJointDefaultPoseFlags = hasJointData;
|
||||
|
@ -529,8 +555,8 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
setAtBit16(flags, HAND_STATE_FINGER_POINTING_BIT);
|
||||
}
|
||||
// face tracker state
|
||||
if (_headData->_isFaceTrackerConnected) {
|
||||
setAtBit16(flags, IS_FACE_TRACKER_CONNECTED);
|
||||
if (_headData->_hasScriptedBlendshapes) {
|
||||
setAtBit16(flags, HAS_SCRIPTED_BLENDSHAPES);
|
||||
}
|
||||
// eye tracker state
|
||||
if (!_headData->_hasProceduralEyeMovement) {
|
||||
|
@ -1150,7 +1176,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
auto newHandState = getSemiNibbleAt(bitItems, HAND_STATE_START_BIT)
|
||||
+ (oneAtBit16(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
|
||||
|
||||
auto newFaceTrackerConnected = oneAtBit16(bitItems, IS_FACE_TRACKER_CONNECTED);
|
||||
auto newHasScriptedBlendshapes = oneAtBit16(bitItems, HAS_SCRIPTED_BLENDSHAPES);
|
||||
auto newHasntProceduralEyeMovement = oneAtBit16(bitItems, IS_EYE_TRACKER_CONNECTED);
|
||||
|
||||
auto newHasAudioEnabledFaceMovement = oneAtBit16(bitItems, AUDIO_ENABLED_FACE_MOVEMENT);
|
||||
|
@ -1161,7 +1187,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
|
||||
bool keyStateChanged = (_keyState != newKeyState);
|
||||
bool handStateChanged = (_handState != newHandState);
|
||||
bool faceStateChanged = (_headData->_isFaceTrackerConnected != newFaceTrackerConnected);
|
||||
bool faceStateChanged = (_headData->_hasScriptedBlendshapes != newHasScriptedBlendshapes);
|
||||
bool eyeStateChanged = (_headData->_hasProceduralEyeMovement == newHasntProceduralEyeMovement);
|
||||
bool audioEnableFaceMovementChanged = (_headData->getHasAudioEnabledFaceMovement() != newHasAudioEnabledFaceMovement);
|
||||
bool proceduralEyeFaceMovementChanged = (_headData->getHasProceduralEyeFaceMovement() != newHasProceduralEyeFaceMovement);
|
||||
|
@ -1174,7 +1200,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
|
||||
_keyState = newKeyState;
|
||||
_handState = newHandState;
|
||||
_headData->_isFaceTrackerConnected = newFaceTrackerConnected;
|
||||
_headData->_hasScriptedBlendshapes = newHasScriptedBlendshapes;
|
||||
_headData->setHasProceduralEyeMovement(!newHasntProceduralEyeMovement);
|
||||
_headData->setHasAudioEnabledFaceMovement(newHasAudioEnabledFaceMovement);
|
||||
_headData->setHasProceduralEyeFaceMovement(newHasProceduralEyeFaceMovement);
|
||||
|
|
|
@ -104,12 +104,12 @@ const quint32 AVATAR_MOTION_SCRIPTABLE_BITS =
|
|||
// Procedural Collide with other avatars is enabled 12th bit
|
||||
// Procedural Has Hero Priority is enabled 13th bit
|
||||
|
||||
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits
|
||||
const int HAND_STATE_START_BIT = 2; // 3rd and 4th bits
|
||||
const int IS_FACE_TRACKER_CONNECTED = 4; // 5th bit
|
||||
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits (UNUSED)
|
||||
const int HAND_STATE_START_BIT = 2; // 3rd and 4th bits (UNUSED)
|
||||
const int HAS_SCRIPTED_BLENDSHAPES = 4; // 5th bit
|
||||
const int IS_EYE_TRACKER_CONNECTED = 5; // 6th bit (was CHAT_CIRCLING)
|
||||
const int HAS_REFERENTIAL = 6; // 7th bit
|
||||
const int HAND_STATE_FINGER_POINTING_BIT = 7; // 8th bit
|
||||
const int HAND_STATE_FINGER_POINTING_BIT = 7; // 8th bit (UNUSED)
|
||||
const int AUDIO_ENABLED_FACE_MOVEMENT = 8; // 9th bit
|
||||
const int PROCEDURAL_EYE_FACE_MOVEMENT = 9; // 10th bit
|
||||
const int PROCEDURAL_BLINK_FACE_MOVEMENT = 10; // 11th bit
|
||||
|
@ -325,7 +325,7 @@ namespace AvatarDataPacket {
|
|||
|
||||
// variable length structure follows
|
||||
|
||||
// only present if IS_FACE_TRACKER_CONNECTED flag is set in AvatarInfo.flags
|
||||
// only present if HAS_SCRIPTED_BLENDSHAPES flag is set in AvatarInfo.flags
|
||||
PACKED_BEGIN struct FaceTrackerInfo {
|
||||
float leftEyeBlink;
|
||||
float rightEyeBlink;
|
||||
|
@ -534,6 +534,19 @@ class AvatarData : public QObject, public SpatiallyNestable {
|
|||
* size in the virtual world. <em>Read-only.</em>
|
||||
* @property {boolean} hasPriority - <code>true</code> if the avatar is in a "hero" zone, <code>false</code> if it isn't.
|
||||
* <em>Read-only.</em>
|
||||
* @property {boolean} hasScriptedBlendshapes=false - Set this to true before using the {@link MyAvatar.setBlendshape} method,
|
||||
* after you no longer want scripted control over the blendshapes set to back to false.<br /> NOTE: this property will
|
||||
* automatically become true if the Controller system has valid facial blendshape actions.
|
||||
* @property {boolean} hasProceduralBlinkFaceMovement=true - By default avatars will blink automatically by animating facial
|
||||
* blendshapes. Set this property to <code>false</code> to disable this automatic blinking. This can be useful if you
|
||||
* wish to fully control the blink facial blendshapes via the {@link MyAvatar.setBlendshape} method.
|
||||
* @property {boolean} hasProceduralEyeFaceMovement=true - By default the avatar eye facial blendshapes will be adjusted
|
||||
* automatically as the eyes move. This will prevent the iris is never obscured by the upper or lower lids. Set this
|
||||
* property to <code>false</code> to disable this automatic movement. This can be useful if you wish to fully control
|
||||
* the eye blendshapes via the {@link MyAvatar.setBlendshape} method.
|
||||
* @property {boolean} hasAudioEnabledFaceMovement=true - By default the avatar mouth blendshapes will animate based on
|
||||
* the microphone audio. Set this property to <code>false</code> to disable that animaiton. This can be useful if you
|
||||
* wish to fully control the blink facial blendshapes via the {@link MyAvatar.setBlendshape} method.
|
||||
*/
|
||||
Q_PROPERTY(glm::vec3 position READ getWorldPosition WRITE setPositionViaScript)
|
||||
Q_PROPERTY(float scale READ getDomainLimitedScale WRITE setTargetScale)
|
||||
|
@ -575,6 +588,11 @@ class AvatarData : public QObject, public SpatiallyNestable {
|
|||
|
||||
Q_PROPERTY(bool hasPriority READ getHasPriority)
|
||||
|
||||
Q_PROPERTY(bool hasScriptedBlendshapes READ getHasScriptedBlendshapes WRITE setHasScriptedBlendshapes)
|
||||
Q_PROPERTY(bool hasProceduralBlinkFaceMovement READ getHasProceduralBlinkFaceMovement WRITE setHasProceduralBlinkFaceMovement)
|
||||
Q_PROPERTY(bool hasProceduralEyeFaceMovement READ getHasProceduralEyeFaceMovement WRITE setHasProceduralEyeFaceMovement)
|
||||
Q_PROPERTY(bool hasAudioEnabledFaceMovement READ getHasAudioEnabledFaceMovement WRITE setHasAudioEnabledFaceMovement)
|
||||
|
||||
public:
|
||||
virtual QString getName() const override { return QString("Avatar:") + _displayName; }
|
||||
|
||||
|
@ -684,10 +702,14 @@ public:
|
|||
|
||||
float getDomainLimitedScale() const;
|
||||
|
||||
virtual bool getHasScriptedBlendshapes() const { return false; }
|
||||
virtual bool getHasProceduralBlinkFaceMovement() const { return true; }
|
||||
virtual bool getHasProceduralEyeFaceMovement() const { return true; }
|
||||
virtual bool getHasAudioEnabledFaceMovement() const { return false; }
|
||||
void setHasScriptedBlendshapes(bool hasScriptedBlendshapes);
|
||||
bool getHasScriptedBlendshapes() const { return _hasScriptedBlendshapes; }
|
||||
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
|
||||
bool getHasProceduralBlinkFaceMovement() const { return _headData->getHasProceduralBlinkFaceMovement(); }
|
||||
void setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement);
|
||||
bool getHasProceduralEyeFaceMovement() const { return _headData->getHasProceduralEyeFaceMovement(); }
|
||||
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
|
||||
bool getHasAudioEnabledFaceMovement() const { return _headData->getHasAudioEnabledFaceMovement(); }
|
||||
|
||||
/**jsdoc
|
||||
* Gets the minimum scale allowed for this avatar in the current domain.
|
||||
|
@ -1111,13 +1133,14 @@ public:
|
|||
|
||||
/**jsdoc
|
||||
* Sets the value of a blendshape to animate your avatar's face. To enable other users to see the resulting animation of
|
||||
* your avatar's face, use {@link Avatar.setForceFaceTrackerConnected} or {@link MyAvatar.setForceFaceTrackerConnected}.
|
||||
* your avatar's face, set {@link Avatar.hasScriptedBlendshapes} to true while using this API and back to false when your
|
||||
* animation is complete.
|
||||
* @function Avatar.setBlendshape
|
||||
* @param {string} name - The name of the blendshape, per the
|
||||
* {@link https://docs.highfidelity.com/create/avatars/avatar-standards.html#blendshapes Avatar Standards}.
|
||||
* @param {number} value - A value between <code>0.0</code> and <code>1.0</code>.
|
||||
* @example <caption>Open your avatar's mouth wide.</caption>
|
||||
* MyAvatar.setForceFaceTrackerConnected(true);
|
||||
* MyAvatar.hasScriptedBlendshapes = true;
|
||||
* MyAvatar.setBlendshape("JawOpen", 1.0);
|
||||
*
|
||||
* // Note: If using from the Avatar API, replace "MyAvatar" with "Avatar".
|
||||
|
@ -1163,15 +1186,16 @@ public:
|
|||
*/
|
||||
Q_INVOKABLE virtual void clearAvatarEntity(const QUuid& entityID, bool requiresRemovalFromTree = true);
|
||||
|
||||
|
||||
/**jsdoc
|
||||
* <p class="important">Deprecated: This method is deprecated and will be removed.</p>
|
||||
* Use Avatar.hasScriptedBlendshapes property instead.
|
||||
* Enables blendshapes set using {@link Avatar.setBlendshape} or {@link MyAvatar.setBlendshape} to be transmitted to other
|
||||
* users so that they can see the animation of your avatar's face.
|
||||
* @function Avatar.setForceFaceTrackerConnected
|
||||
* @param {boolean} connected - <code>true</code> to enable blendshape changes to be transmitted to other users,
|
||||
* <code>false</code> to disable.
|
||||
*/
|
||||
Q_INVOKABLE void setForceFaceTrackerConnected(bool connected) { _forceFaceTrackerConnected = connected; }
|
||||
Q_INVOKABLE void setForceFaceTrackerConnected(bool connected) { setHasScriptedBlendshapes(connected); }
|
||||
|
||||
// key state
|
||||
void setKeyState(KeyState s) { _keyState = s; }
|
||||
|
@ -1660,7 +1684,6 @@ protected:
|
|||
bool faceTrackerInfoChangedSince(quint64 time) const { return true; } // FIXME
|
||||
|
||||
bool hasParent() const { return !getParentID().isNull(); }
|
||||
bool hasFaceTracker() const { return _headData ? _headData->_isFaceTrackerConnected : false; }
|
||||
|
||||
QByteArray packSkeletonData() const;
|
||||
QByteArray packSkeletonModelURL() const;
|
||||
|
@ -1693,7 +1716,7 @@ protected:
|
|||
// key state
|
||||
KeyState _keyState;
|
||||
|
||||
bool _forceFaceTrackerConnected;
|
||||
bool _hasScriptedBlendshapes;
|
||||
bool _hasNewJointData { true }; // set in AvatarData, cleared in Avatar
|
||||
|
||||
mutable HeadData* _headData { nullptr };
|
||||
|
|
|
@ -206,8 +206,7 @@ void HeadData::setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement
|
|||
}
|
||||
|
||||
bool HeadData::getHasProceduralBlinkFaceMovement() const {
|
||||
// return _hasProceduralBlinkFaceMovement;
|
||||
return _hasProceduralBlinkFaceMovement && !_isFaceTrackerConnected;
|
||||
return _hasProceduralBlinkFaceMovement;
|
||||
}
|
||||
|
||||
void HeadData::setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement) {
|
||||
|
@ -230,6 +229,6 @@ void HeadData::setHasProceduralEyeMovement(bool hasProceduralEyeMovement) {
|
|||
_hasProceduralEyeMovement = hasProceduralEyeMovement;
|
||||
}
|
||||
|
||||
void HeadData::setFaceTrackerConnected(bool value) {
|
||||
_isFaceTrackerConnected = value;
|
||||
void HeadData::setHasScriptedBlendshapes(bool value) {
|
||||
_hasScriptedBlendshapes = value;
|
||||
}
|
||||
|
|
|
@ -81,8 +81,8 @@ public:
|
|||
bool getHasProceduralEyeMovement() const;
|
||||
void setHasProceduralEyeMovement(bool hasProceduralEyeMovement);
|
||||
|
||||
void setFaceTrackerConnected(bool value);
|
||||
bool getFaceTrackerConnected() const { return _isFaceTrackerConnected; }
|
||||
void setHasScriptedBlendshapes(bool value);
|
||||
bool getHasScriptedBlendshapes() const { return _hasScriptedBlendshapes; }
|
||||
|
||||
friend class AvatarData;
|
||||
|
||||
|
@ -103,7 +103,7 @@ protected:
|
|||
bool _hasProceduralEyeFaceMovement { true };
|
||||
bool _hasProceduralEyeMovement { true };
|
||||
|
||||
bool _isFaceTrackerConnected { false };
|
||||
bool _hasScriptedBlendshapes { false };
|
||||
|
||||
float _leftEyeBlink { 0.0f };
|
||||
float _rightEyeBlink { 0.0f };
|
||||
|
|
|
@ -34,7 +34,7 @@ const char* FACESHIFT_BLENDSHAPES[] = {
|
|||
"JawFwd",
|
||||
"JawLeft",
|
||||
"JawOpen",
|
||||
"JawChew",
|
||||
"JawChew", // legacy not in ARKit
|
||||
"JawRight",
|
||||
"MouthLeft",
|
||||
"MouthRight",
|
||||
|
@ -48,21 +48,29 @@ const char* FACESHIFT_BLENDSHAPES[] = {
|
|||
"LipsStretch_R",
|
||||
"LipsUpperClose",
|
||||
"LipsLowerClose",
|
||||
"LipsUpperUp",
|
||||
"LipsLowerDown",
|
||||
"LipsUpperUp", // legacy, split in ARKit
|
||||
"LipsLowerDown", // legacy, split in ARKit
|
||||
"LipsUpperOpen",
|
||||
"LipsLowerOpen",
|
||||
"LipsFunnel",
|
||||
"LipsPucker",
|
||||
"ChinLowerRaise",
|
||||
"ChinUpperRaise",
|
||||
"Sneer",
|
||||
"Sneer", // legacy, split in ARKit
|
||||
"Puff",
|
||||
"CheekSquint_L",
|
||||
"CheekSquint_R",
|
||||
""
|
||||
};
|
||||
|
||||
// new in ARKit
|
||||
// LipsTogether
|
||||
// MouthPressLeft
|
||||
// MouthPressRight
|
||||
// MouthShrugLower
|
||||
// MouthShrugUpper
|
||||
// TongueOut
|
||||
|
||||
const int NUM_FACESHIFT_BLENDSHAPES = sizeof(FACESHIFT_BLENDSHAPES) / sizeof(char*);
|
||||
|
||||
const int EYE_BLINK_L_INDEX = 0;
|
||||
|
|
Loading…
Reference in a new issue