mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-04-07 10:02:24 +02:00
ARKit Blendshape hookup work
This commit is contained in:
parent
6efd74a339
commit
389f5a1d33
17 changed files with 507 additions and 204 deletions
|
@ -166,10 +166,70 @@
|
|||
{ "from": "Standard.LeftEye", "to": "Actions.LeftEye" },
|
||||
{ "from": "Standard.RightEye", "to": "Actions.RightEye" },
|
||||
|
||||
// AJT: blendshapes
|
||||
{ "from": "Standard.LeftEyeBlink", "to": "Actions.LeftEyeBlink" },
|
||||
{ "from": "Standard.RightEyeBlink", "to": "Actions.RightEyeBlink" },
|
||||
|
||||
{ "from": "Standard.EyeBlink_L", "to": "Actions.EyeBlink_L" },
|
||||
{ "from": "Standard.EyeBlink_R", "to": "Actions.EyeBlink_R" },
|
||||
{ "from": "Standard.EyeSquint_L", "to": "Actions.EyeSquint_L" },
|
||||
{ "from": "Standard.EyeSquint_R", "to": "Actions.EyeSquint_R" },
|
||||
{ "from": "Standard.EyeDown_L", "to": "Actions.EyeDown_L" },
|
||||
{ "from": "Standard.EyeDown_R", "to": "Actions.EyeDown_R" },
|
||||
{ "from": "Standard.EyeIn_L", "to": "Actions.EyeIn_L" },
|
||||
{ "from": "Standard.EyeIn_R", "to": "Actions.EyeIn_R" },
|
||||
{ "from": "Standard.EyeOpen_L", "to": "Actions.EyeOpen_L" },
|
||||
{ "from": "Standard.EyeOpen_R", "to": "Actions.EyeOpen_R" },
|
||||
{ "from": "Standard.EyeOut_L", "to": "Actions.EyeOut_L" },
|
||||
{ "from": "Standard.EyeOut_R", "to": "Actions.EyeOut_R" },
|
||||
{ "from": "Standard.EyeUp_L", "to": "Actions.EyeUp_L" },
|
||||
{ "from": "Standard.EyeUp_R", "to": "Actions.EyeUp_R" },
|
||||
{ "from": "Standard.BrowsD_L", "to": "Actions.BrowsD_L" },
|
||||
{ "from": "Standard.BrowsD_R", "to": "Actions.BrowsD_R" },
|
||||
{ "from": "Standard.BrowsU_C", "to": "Actions.BrowsU_C" },
|
||||
{ "from": "Standard.BrowsU_L", "to": "Actions.BrowsU_L" },
|
||||
{ "from": "Standard.BrowsU_R", "to": "Actions.BrowsU_R" },
|
||||
{ "from": "Standard.JawFwd", "to": "Actions.JawFwd" },
|
||||
{ "from": "Standard.JawLeft", "to": "Actions.JawLeft" },
|
||||
{ "from": "Standard.JawOpen", "to": "Actions.JawOpen" },
|
||||
{ "from": "Standard.JawRight", "to": "Actions.JawRight" },
|
||||
{ "from": "Standard.MouthLeft", "to": "Actions.MouthLeft" },
|
||||
{ "from": "Standard.MouthRight", "to": "Actions.MouthRight" },
|
||||
{ "from": "Standard.MouthFrown_L", "to": "Actions.MouthFrown_L" },
|
||||
{ "from": "Standard.MouthFrown_R", "to": "Actions.MouthFrown_R" },
|
||||
{ "from": "Standard.MouthSmile_L", "to": "Actions.MouthSmile_L" },
|
||||
{ "from": "Standard.MouthSmile_R", "to": "Actions.MouthSmile_R" },
|
||||
{ "from": "Standard.MouthDimple_L", "to": "Actions.MouthDimple_L" },
|
||||
{ "from": "Standard.MouthDimple_R", "to": "Actions.MouthDimple_R" },
|
||||
{ "from": "Standard.LipsStretch_L", "to": "Actions.LipsStretch_L" },
|
||||
{ "from": "Standard.LipsStretch_R", "to": "Actions.LipsStretch_R" },
|
||||
{ "from": "Standard.LipsUpperClose", "to": "Actions.LipsUpperClose" },
|
||||
{ "from": "Standard.LipsLowerClose", "to": "Actions.LipsLowerClose" },
|
||||
{ "from": "Standard.LipsUpperOpen", "to": "Actions.LipsUpperOpen" },
|
||||
{ "from": "Standard.LipsLowerOpen", "to": "Actions.LipsLowerOpen" },
|
||||
{ "from": "Standard.LipsFunnel", "to": "Actions.LipsFunnel" },
|
||||
{ "from": "Standard.LipsPucker", "to": "Actions.LipsPucker" },
|
||||
{ "from": "Standard.Puff", "to": "Actions.Puff" },
|
||||
{ "from": "Standard.CheekSquint_L", "to": "Actions.CheekSquint_L" },
|
||||
{ "from": "Standard.CheekSquint_R", "to": "Actions.CheekSquint_R" },
|
||||
{ "from": "Standard.LipsTogether", "to": "Actions.LipsTogether" },
|
||||
{ "from": "Standard.MouthUpperUp_L", "to": "Actions.MouthUpperUp_L" },
|
||||
{ "from": "Standard.MouthUpperUp_R", "to": "Actions.MouthUpperUp_R" },
|
||||
{ "from": "Standard.MouthLowerDown_L", "to": "Actions.MouthLowerDown_L" },
|
||||
{ "from": "Standard.MouthLowerDown_R", "to": "Actions.MouthLowerDown_R" },
|
||||
{ "from": "Standard.MouthPress_L", "to": "Actions.MouthPress_L" },
|
||||
{ "from": "Standard.MouthPress_R", "to": "Actions.MouthPress_R" },
|
||||
{ "from": "Standard.MouthShrugLower", "to": "Actions.MouthShrugLower" },
|
||||
{ "from": "Standard.MouthShrugUpper", "to": "Actions.MouthShrugUpper" },
|
||||
{ "from": "Standard.NoseSneer_L", "to": "Actions.NoseSneer_L" },
|
||||
{ "from": "Standard.NoseSneer_R", "to": "Actions.NoseSneer_R" },
|
||||
{ "from": "Standard.TongueOut", "to": "Actions.TongueOut" },
|
||||
{ "from": "Standard.UserBlendshape0", "to": "Actions.UserBlendshape0" },
|
||||
{ "from": "Standard.UserBlendshape1", "to": "Actions.UserBlendshape1" },
|
||||
{ "from": "Standard.UserBlendshape2", "to": "Actions.UserBlendshape2" },
|
||||
{ "from": "Standard.UserBlendshape3", "to": "Actions.UserBlendshape3" },
|
||||
{ "from": "Standard.UserBlendshape4", "to": "Actions.UserBlendshape4" },
|
||||
{ "from": "Standard.UserBlendshape5", "to": "Actions.UserBlendshape5" },
|
||||
{ "from": "Standard.UserBlendshape6", "to": "Actions.UserBlendshape6" },
|
||||
{ "from": "Standard.UserBlendshape7", "to": "Actions.UserBlendshape7" },
|
||||
{ "from": "Standard.UserBlendshape8", "to": "Actions.UserBlendshape8" },
|
||||
{ "from": "Standard.UserBlendshape9", "to": "Actions.UserBlendshape9" },
|
||||
|
||||
{ "from": "Standard.TrackedObject00", "to" : "Actions.TrackedObject00" },
|
||||
{ "from": "Standard.TrackedObject01", "to" : "Actions.TrackedObject01" },
|
||||
|
|
|
@ -61,9 +61,70 @@
|
|||
{ "from": "Standard.LeftEye", "to": "Actions.LeftEye" },
|
||||
{ "from": "Standard.RightEye", "to": "Actions.RightEye" },
|
||||
|
||||
// AJT: blendshapes
|
||||
{ "from": "Standard.LeftEyeBlink", "to": "Actions.LeftEyeBlink" },
|
||||
{ "from": "Standard.RightEyeBlink", "to": "Actions.RightEyeBlink" },
|
||||
{ "from": "Standard.EyeBlink_L", "to": "Actions.EyeBlink_L" },
|
||||
{ "from": "Standard.EyeBlink_R", "to": "Actions.EyeBlink_R" },
|
||||
{ "from": "Standard.EyeSquint_L", "to": "Actions.EyeSquint_L" },
|
||||
{ "from": "Standard.EyeSquint_R", "to": "Actions.EyeSquint_R" },
|
||||
{ "from": "Standard.EyeDown_L", "to": "Actions.EyeDown_L" },
|
||||
{ "from": "Standard.EyeDown_R", "to": "Actions.EyeDown_R" },
|
||||
{ "from": "Standard.EyeIn_L", "to": "Actions.EyeIn_L" },
|
||||
{ "from": "Standard.EyeIn_R", "to": "Actions.EyeIn_R" },
|
||||
{ "from": "Standard.EyeOpen_L", "to": "Actions.EyeOpen_L" },
|
||||
{ "from": "Standard.EyeOpen_R", "to": "Actions.EyeOpen_R" },
|
||||
{ "from": "Standard.EyeOut_L", "to": "Actions.EyeOut_L" },
|
||||
{ "from": "Standard.EyeOut_R", "to": "Actions.EyeOut_R" },
|
||||
{ "from": "Standard.EyeUp_L", "to": "Actions.EyeUp_L" },
|
||||
{ "from": "Standard.EyeUp_R", "to": "Actions.EyeUp_R" },
|
||||
{ "from": "Standard.BrowsD_L", "to": "Actions.BrowsD_L" },
|
||||
{ "from": "Standard.BrowsD_R", "to": "Actions.BrowsD_R" },
|
||||
{ "from": "Standard.BrowsU_C", "to": "Actions.BrowsU_C" },
|
||||
{ "from": "Standard.BrowsU_L", "to": "Actions.BrowsU_L" },
|
||||
{ "from": "Standard.BrowsU_R", "to": "Actions.BrowsU_R" },
|
||||
{ "from": "Standard.JawFwd", "to": "Actions.JawFwd" },
|
||||
{ "from": "Standard.JawLeft", "to": "Actions.JawLeft" },
|
||||
{ "from": "Standard.JawOpen", "to": "Actions.JawOpen" },
|
||||
{ "from": "Standard.JawRight", "to": "Actions.JawRight" },
|
||||
{ "from": "Standard.MouthLeft", "to": "Actions.MouthLeft" },
|
||||
{ "from": "Standard.MouthRight", "to": "Actions.MouthRight" },
|
||||
{ "from": "Standard.MouthFrown_L", "to": "Actions.MouthFrown_L" },
|
||||
{ "from": "Standard.MouthFrown_R", "to": "Actions.MouthFrown_R" },
|
||||
{ "from": "Standard.MouthSmile_L", "to": "Actions.MouthSmile_L" },
|
||||
{ "from": "Standard.MouthSmile_R", "to": "Actions.MouthSmile_R" },
|
||||
{ "from": "Standard.MouthDimple_L", "to": "Actions.MouthDimple_L" },
|
||||
{ "from": "Standard.MouthDimple_R", "to": "Actions.MouthDimple_R" },
|
||||
{ "from": "Standard.LipsStretch_L", "to": "Actions.LipsStretch_L" },
|
||||
{ "from": "Standard.LipsStretch_R", "to": "Actions.LipsStretch_R" },
|
||||
{ "from": "Standard.LipsUpperClose", "to": "Actions.LipsUpperClose" },
|
||||
{ "from": "Standard.LipsLowerClose", "to": "Actions.LipsLowerClose" },
|
||||
{ "from": "Standard.LipsUpperOpen", "to": "Actions.LipsUpperOpen" },
|
||||
{ "from": "Standard.LipsLowerOpen", "to": "Actions.LipsLowerOpen" },
|
||||
{ "from": "Standard.LipsFunnel", "to": "Actions.LipsFunnel" },
|
||||
{ "from": "Standard.LipsPucker", "to": "Actions.LipsPucker" },
|
||||
{ "from": "Standard.Puff", "to": "Actions.Puff" },
|
||||
{ "from": "Standard.CheekSquint_L", "to": "Actions.CheekSquint_L" },
|
||||
{ "from": "Standard.CheekSquint_R", "to": "Actions.CheekSquint_R" },
|
||||
{ "from": "Standard.LipsTogether", "to": "Actions.LipsTogether" },
|
||||
{ "from": "Standard.MouthUpperUp_L", "to": "Actions.MouthUpperUp_L" },
|
||||
{ "from": "Standard.MouthUpperUp_R", "to": "Actions.MouthUpperUp_R" },
|
||||
{ "from": "Standard.MouthLowerDown_L", "to": "Actions.MouthLowerDown_L" },
|
||||
{ "from": "Standard.MouthLowerDown_R", "to": "Actions.MouthLowerDown_R" },
|
||||
{ "from": "Standard.MouthPress_L", "to": "Actions.MouthPress_L" },
|
||||
{ "from": "Standard.MouthPress_R", "to": "Actions.MouthPress_R" },
|
||||
{ "from": "Standard.MouthShrugLower", "to": "Actions.MouthShrugLower" },
|
||||
{ "from": "Standard.MouthShrugUpper", "to": "Actions.MouthShrugUpper" },
|
||||
{ "from": "Standard.NoseSneer_L", "to": "Actions.NoseSneer_L" },
|
||||
{ "from": "Standard.NoseSneer_R", "to": "Actions.NoseSneer_R" },
|
||||
{ "from": "Standard.TongueOut", "to": "Actions.TongueOut" },
|
||||
{ "from": "Standard.UserBlendshape0", "to": "Actions.UserBlendshape0" },
|
||||
{ "from": "Standard.UserBlendshape1", "to": "Actions.UserBlendshape1" },
|
||||
{ "from": "Standard.UserBlendshape2", "to": "Actions.UserBlendshape2" },
|
||||
{ "from": "Standard.UserBlendshape3", "to": "Actions.UserBlendshape3" },
|
||||
{ "from": "Standard.UserBlendshape4", "to": "Actions.UserBlendshape4" },
|
||||
{ "from": "Standard.UserBlendshape5", "to": "Actions.UserBlendshape5" },
|
||||
{ "from": "Standard.UserBlendshape6", "to": "Actions.UserBlendshape6" },
|
||||
{ "from": "Standard.UserBlendshape7", "to": "Actions.UserBlendshape7" },
|
||||
{ "from": "Standard.UserBlendshape8", "to": "Actions.UserBlendshape8" },
|
||||
{ "from": "Standard.UserBlendshape9", "to": "Actions.UserBlendshape9" },
|
||||
|
||||
{ "from": "Standard.TrackedObject00", "to" : "Actions.TrackedObject00" },
|
||||
{ "from": "Standard.TrackedObject01", "to" : "Actions.TrackedObject01" },
|
||||
|
|
|
@ -99,9 +99,8 @@
|
|||
{ "from": "Vive.LeftEye", "to" : "Standard.LeftEye" },
|
||||
{ "from": "Vive.RightEye", "to" : "Standard.RightEye" },
|
||||
|
||||
// AJT: blendshapes (only keep blink)
|
||||
{ "from": "Vive.LeftEyeBlink", "to" : "Standard.LeftEyeBlink" },
|
||||
{ "from": "Vive.RightEyeBlink", "to" : "Standard.RightEyeBlink" },
|
||||
{ "from": "Vive.EyeBlink_L", "to" : "Standard.EyeBlink_L" },
|
||||
{ "from": "Vive.EyeBlink_R", "to" : "Standard.EyeBlink_R" },
|
||||
|
||||
{
|
||||
"from": "Vive.LeftFoot", "to" : "Standard.LeftFoot",
|
||||
|
|
|
@ -780,18 +780,6 @@ void MyAvatar::update(float deltaTime) {
|
|||
emit energyChanged(currentEnergy);
|
||||
|
||||
updateEyeContactTarget(deltaTime);
|
||||
|
||||
// if we're getting eye rotations from a tracker, disable observer-side procedural eye motions
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
bool eyesTracked =
|
||||
userInputMapper->getPoseState(controller::Action::LEFT_EYE).valid &&
|
||||
userInputMapper->getPoseState(controller::Action::RIGHT_EYE).valid;
|
||||
|
||||
int leftEyeJointIndex = getJointIndex("LeftEye");
|
||||
int rightEyeJointIndex = getJointIndex("RightEye");
|
||||
bool eyesAreOverridden = getIsJointOverridden(leftEyeJointIndex) || getIsJointOverridden(rightEyeJointIndex);
|
||||
|
||||
_headData->setHasProceduralEyeMovement(!(eyesTracked || eyesAreOverridden));
|
||||
}
|
||||
|
||||
void MyAvatar::updateEyeContactTarget(float deltaTime) {
|
||||
|
|
|
@ -21,6 +21,73 @@
|
|||
|
||||
using namespace std;
|
||||
|
||||
static controller::Action blendshapeActions[] = {
|
||||
controller::Action::EYEBLINK_L,
|
||||
controller::Action::EYEBLINK_R,
|
||||
controller::Action::EYESQUINT_L,
|
||||
controller::Action::EYESQUINT_R,
|
||||
controller::Action::EYEDOWN_L,
|
||||
controller::Action::EYEDOWN_R,
|
||||
controller::Action::EYEIN_L,
|
||||
controller::Action::EYEIN_R,
|
||||
controller::Action::EYEOPEN_L,
|
||||
controller::Action::EYEOPEN_R,
|
||||
controller::Action::EYEOUT_L,
|
||||
controller::Action::EYEOUT_R,
|
||||
controller::Action::EYEUP_L,
|
||||
controller::Action::EYEUP_R,
|
||||
controller::Action::BROWSD_L,
|
||||
controller::Action::BROWSD_R,
|
||||
controller::Action::BROWSU_C,
|
||||
controller::Action::BROWSU_L,
|
||||
controller::Action::BROWSU_R,
|
||||
controller::Action::JAWFWD,
|
||||
controller::Action::JAWLEFT,
|
||||
controller::Action::JAWOPEN,
|
||||
controller::Action::JAWRIGHT,
|
||||
controller::Action::MOUTHLEFT,
|
||||
controller::Action::MOUTHRIGHT,
|
||||
controller::Action::MOUTHFROWN_L,
|
||||
controller::Action::MOUTHFROWN_R,
|
||||
controller::Action::MOUTHSMILE_L,
|
||||
controller::Action::MOUTHSMILE_R,
|
||||
controller::Action::MOUTHDIMPLE_L,
|
||||
controller::Action::MOUTHDIMPLE_R,
|
||||
controller::Action::LIPSSTRETCH_L,
|
||||
controller::Action::LIPSSTRETCH_R,
|
||||
controller::Action::LIPSUPPERCLOSE,
|
||||
controller::Action::LIPSLOWERCLOSE,
|
||||
controller::Action::LIPSUPPEROPEN,
|
||||
controller::Action::LIPSLOWEROPEN,
|
||||
controller::Action::LIPSFUNNEL,
|
||||
controller::Action::LIPSPUCKER,
|
||||
controller::Action::PUFF,
|
||||
controller::Action::CHEEKSQUINT_L,
|
||||
controller::Action::CHEEKSQUINT_R,
|
||||
controller::Action::LIPSTOGETHER,
|
||||
controller::Action::MOUTHUPPERUP_L,
|
||||
controller::Action::MOUTHUPPERUP_R,
|
||||
controller::Action::MOUTHLOWERDOWN_L,
|
||||
controller::Action::MOUTHLOWERDOWN_R,
|
||||
controller::Action::MOUTHPRESS_L,
|
||||
controller::Action::MOUTHPRESS_R,
|
||||
controller::Action::MOUTHSHRUGLOWER,
|
||||
controller::Action::MOUTHSHRUGUPPER,
|
||||
controller::Action::NOSESNEER_L,
|
||||
controller::Action::NOSESNEER_R,
|
||||
controller::Action::TONGUEOUT,
|
||||
controller::Action::USERBLENDSHAPE0,
|
||||
controller::Action::USERBLENDSHAPE1,
|
||||
controller::Action::USERBLENDSHAPE2,
|
||||
controller::Action::USERBLENDSHAPE3,
|
||||
controller::Action::USERBLENDSHAPE4,
|
||||
controller::Action::USERBLENDSHAPE5,
|
||||
controller::Action::USERBLENDSHAPE6,
|
||||
controller::Action::USERBLENDSHAPE7,
|
||||
controller::Action::USERBLENDSHAPE8,
|
||||
controller::Action::USERBLENDSHAPE9
|
||||
};
|
||||
|
||||
MyHead::MyHead(MyAvatar* owningAvatar) : Head(owningAvatar) {
|
||||
}
|
||||
|
||||
|
@ -44,40 +111,57 @@ void MyHead::simulate(float deltaTime) {
|
|||
auto player = DependencyManager::get<recording::Deck>();
|
||||
// Only use face trackers when not playing back a recording.
|
||||
if (!player->isPlaying()) {
|
||||
// TODO -- finish removing face-tracker specific code. To do this, add input channels for
|
||||
// each blendshape-coefficient and update the various json files to relay them in a useful way.
|
||||
// After that, input plugins can be used to drive the avatar's face, and the various "DDE" files
|
||||
// can be ported into the plugin and removed.
|
||||
//
|
||||
// auto faceTracker = qApp->getActiveFaceTracker();
|
||||
// const bool hasActualFaceTrackerConnected = faceTracker && !faceTracker->isMuted();
|
||||
// _isFaceTrackerConnected = hasActualFaceTrackerConnected || _owningAvatar->getHasScriptedBlendshapes();
|
||||
// if (_isFaceTrackerConnected) {
|
||||
// if (hasActualFaceTrackerConnected) {
|
||||
// _blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
|
||||
// }
|
||||
// }
|
||||
|
||||
// AJT: blendshapes
|
||||
|
||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||
|
||||
// if input system has control over blink blendshapes
|
||||
bool eyeLidsTracked =
|
||||
userInputMapper->getActionStateValid(controller::Action::LEFT_EYE_BLINK) &&
|
||||
userInputMapper->getActionStateValid(controller::Action::RIGHT_EYE_BLINK);
|
||||
userInputMapper->getActionStateValid(controller::Action::EYEBLINK_L) ||
|
||||
userInputMapper->getActionStateValid(controller::Action::EYEBLINK_R);
|
||||
|
||||
setHasScriptedBlendshapes(eyeLidsTracked);
|
||||
// if input system has control over the brows.
|
||||
bool browsTracked =
|
||||
userInputMapper->getActionStateValid(controller::Action::BROWSD_L) ||
|
||||
userInputMapper->getActionStateValid(controller::Action::BROWSD_R) ||
|
||||
userInputMapper->getActionStateValid(controller::Action::BROWSU_L) ||
|
||||
userInputMapper->getActionStateValid(controller::Action::BROWSU_R) ||
|
||||
userInputMapper->getActionStateValid(controller::Action::BROWSU_C);
|
||||
|
||||
if (eyeLidsTracked) {
|
||||
float leftEyeBlink = userInputMapper->getActionState(controller::Action::LEFT_EYE_BLINK);
|
||||
float rightEyeBlink = userInputMapper->getActionState(controller::Action::RIGHT_EYE_BLINK);
|
||||
_blendshapeCoefficients.resize(std::max(_blendshapeCoefficients.size(), 2));
|
||||
_blendshapeCoefficients[(int)Blendshapes::EyeBlink_L] = leftEyeBlink;
|
||||
_blendshapeCoefficients[(int)Blendshapes::EyeBlink_R] = rightEyeBlink;
|
||||
} else {
|
||||
const float FULLY_OPEN = 0.0f;
|
||||
_blendshapeCoefficients.resize(std::max(_blendshapeCoefficients.size(), 2));
|
||||
_blendshapeCoefficients[(int)Blendshapes::EyeBlink_L] = FULLY_OPEN;
|
||||
_blendshapeCoefficients[(int)Blendshapes::EyeBlink_R] = FULLY_OPEN;
|
||||
// if input system has control of mouth
|
||||
bool mouthTracked =
|
||||
userInputMapper->getActionStateValid(controller::Action::JAWOPEN) ||
|
||||
userInputMapper->getActionStateValid(controller::Action::LIPSUPPERCLOSE) ||
|
||||
userInputMapper->getActionStateValid(controller::Action::LIPSLOWERCLOSE) ||
|
||||
userInputMapper->getActionStateValid(controller::Action::LIPSFUNNEL) ||
|
||||
userInputMapper->getActionStateValid(controller::Action::MOUTHSMILE_L) ||
|
||||
userInputMapper->getActionStateValid(controller::Action::MOUTHSMILE_R);
|
||||
|
||||
bool eyesTracked =
|
||||
userInputMapper->getPoseState(controller::Action::LEFT_EYE).valid &&
|
||||
userInputMapper->getPoseState(controller::Action::RIGHT_EYE).valid;
|
||||
|
||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||
int leftEyeJointIndex = myAvatar->getJointIndex("LeftEye");
|
||||
int rightEyeJointIndex = myAvatar->getJointIndex("RightEye");
|
||||
bool eyeJointsOverridden = myAvatar->getIsJointOverridden(leftEyeJointIndex) || myAvatar->getIsJointOverridden(rightEyeJointIndex);
|
||||
|
||||
bool anyInputTracked = false;
|
||||
for (int i = 0; i < (int)Blendshapes::BlendshapeCount; i++) {
|
||||
anyInputTracked = anyInputTracked || userInputMapper->getActionStateValid(blendshapeActions[i]);
|
||||
}
|
||||
|
||||
setHasInputDrivenBlendshapes(anyInputTracked);
|
||||
|
||||
// suppress any procedural blendshape animation if they overlap with driven input.
|
||||
setSuppressProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation, eyeLidsTracked);
|
||||
setSuppressProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation, eyeLidsTracked || browsTracked);
|
||||
setSuppressProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation, mouthTracked);
|
||||
setSuppressProceduralAnimationFlag(HeadData::SaccadeProceduralEyeJointAnimation, eyesTracked || eyeJointsOverridden);
|
||||
|
||||
if (anyInputTracked) {
|
||||
for (int i = 0; i < (int)Blendshapes::BlendshapeCount; i++) {
|
||||
_blendshapeCoefficients[i] = userInputMapper->getActionState(blendshapeActions[i]);
|
||||
}
|
||||
}
|
||||
}
|
||||
Parent::simulate(deltaTime);
|
||||
|
|
|
@ -114,6 +114,9 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
|
||||
Head* head = _owningAvatar->getHead();
|
||||
|
||||
// AJT: blendshapes TODO: RE-enable this and avoid duplication with
|
||||
// SkeletonModel::updateRig()
|
||||
/*
|
||||
bool eyePosesValid = !head->getHasProceduralEyeMovement();
|
||||
glm::vec3 lookAt;
|
||||
if (eyePosesValid) {
|
||||
|
@ -121,6 +124,8 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
} else {
|
||||
lookAt = avoidCrossedEyes(head->getLookAtPosition());
|
||||
}
|
||||
*/
|
||||
glm::vec3 lookAt = avoidCrossedEyes(head->getLookAtPosition());
|
||||
|
||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||
assert(myAvatar);
|
||||
|
|
|
@ -72,7 +72,8 @@ void Head::simulate(float deltaTime) {
|
|||
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
|
||||
}
|
||||
|
||||
if (getHasProceduralEyeMovement()) {
|
||||
if (getProceduralAnimationFlag(HeadData::SaccadeProceduralEyeJointAnimation) &&
|
||||
!getSuppressProceduralAnimationFlag(HeadData::SaccadeProceduralEyeJointAnimation)) {
|
||||
// Update eye saccades
|
||||
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
|
||||
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
|
||||
|
@ -95,7 +96,8 @@ void Head::simulate(float deltaTime) {
|
|||
const float BLINK_START_VARIABILITY = 0.25f;
|
||||
const float FULLY_OPEN = 0.0f;
|
||||
const float FULLY_CLOSED = 1.0f;
|
||||
if (getHasProceduralBlinkFaceMovement()) {
|
||||
if (getProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation) &&
|
||||
!getSuppressProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation)) {
|
||||
// handle automatic blinks
|
||||
// Detect transition from talking to not; force blink after that and a delay
|
||||
bool forceBlink = false;
|
||||
|
@ -146,7 +148,8 @@ void Head::simulate(float deltaTime) {
|
|||
}
|
||||
|
||||
// use data to update fake Faceshift blendshape coefficients
|
||||
if (getHasAudioEnabledFaceMovement()) {
|
||||
if (getProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation) &&
|
||||
!getSuppressProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation)) {
|
||||
// Update audio attack data for facial animation (eyebrows and mouth)
|
||||
float audioAttackAveragingRate = (10.0f - deltaTime * NORMAL_HZ) / 10.0f; // --> 0.9 at 60 Hz
|
||||
_audioAttack = audioAttackAveragingRate * _audioAttack +
|
||||
|
@ -178,7 +181,8 @@ void Head::simulate(float deltaTime) {
|
|||
_mouth4,
|
||||
_transientBlendshapeCoefficients);
|
||||
|
||||
if (getHasProceduralEyeFaceMovement()) {
|
||||
if (getProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation) &&
|
||||
!getSuppressProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation)) {
|
||||
// This controls two things, the eye brow and the upper eye lid, it is driven by the vertical up/down angle of the
|
||||
// eyes relative to the head. This is to try to help prevent sleepy eyes/crazy eyes.
|
||||
applyEyelidOffset(getOrientation());
|
||||
|
|
|
@ -111,6 +111,8 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
|
||||
Head* head = _owningAvatar->getHead();
|
||||
|
||||
// AJT: blendshapes TODO: RE-enable this. but move into rig?
|
||||
/*
|
||||
bool eyePosesValid = !head->getHasProceduralEyeMovement();
|
||||
glm::vec3 lookAt;
|
||||
if (eyePosesValid) {
|
||||
|
@ -118,6 +120,8 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
|||
} else {
|
||||
lookAt = avoidCrossedEyes(head->getCorrectedLookAtPosition());
|
||||
}
|
||||
*/
|
||||
glm::vec3 lookAt = avoidCrossedEyes(head->getCorrectedLookAtPosition());
|
||||
|
||||
// no need to call Model::updateRig() because otherAvatars get their joint state
|
||||
// copied directly from AvtarData::_jointData (there are no Rig animations to blend)
|
||||
|
|
|
@ -110,7 +110,6 @@ AvatarData::AvatarData() :
|
|||
_targetScale(1.0f),
|
||||
_handState(0),
|
||||
_keyState(NO_KEY_DOWN),
|
||||
_hasScriptedBlendshapes(false),
|
||||
_headData(NULL),
|
||||
_errorLogExpiry(0),
|
||||
_owningAvatarMixer(),
|
||||
|
@ -156,7 +155,7 @@ float AvatarData::getDomainLimitedScale() const {
|
|||
|
||||
|
||||
void AvatarData::setHasScriptedBlendshapes(bool hasScriptedBlendshapes) {
|
||||
if (hasScriptedBlendshapes == _hasScriptedBlendshapes) {
|
||||
if (hasScriptedBlendshapes == _headData->getHasScriptedBlendshapes()) {
|
||||
return;
|
||||
}
|
||||
if (!hasScriptedBlendshapes) {
|
||||
|
@ -165,19 +164,35 @@ void AvatarData::setHasScriptedBlendshapes(bool hasScriptedBlendshapes) {
|
|||
// before sending the update, or else it won't send the neutal blendshapes to the receiving clients
|
||||
sendAvatarDataPacket(true);
|
||||
}
|
||||
_hasScriptedBlendshapes = hasScriptedBlendshapes;
|
||||
_headData->setHasScriptedBlendshapes(hasScriptedBlendshapes);
|
||||
}
|
||||
|
||||
void AvatarData::setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement) {
|
||||
_headData->setHasProceduralBlinkFaceMovement(hasProceduralBlinkFaceMovement);
|
||||
bool AvatarData::getHasScriptedBlendshapes() const {
|
||||
return _headData->getHasScriptedBlendshapes();
|
||||
}
|
||||
|
||||
void AvatarData::setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement) {
|
||||
_headData->setHasProceduralEyeFaceMovement(hasProceduralEyeFaceMovement);
|
||||
void AvatarData::setHasProceduralBlinkFaceMovement(bool value) {
|
||||
_headData->setProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation, value);
|
||||
}
|
||||
|
||||
void AvatarData::setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) {
|
||||
_headData->setHasAudioEnabledFaceMovement(hasAudioEnabledFaceMovement);
|
||||
bool AvatarData::getHasProceduralBlinkFaceMovement() const {
|
||||
return _headData->getProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation);
|
||||
}
|
||||
|
||||
void AvatarData::setHasProceduralEyeFaceMovement(bool value) {
|
||||
_headData->setProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation, value);
|
||||
}
|
||||
|
||||
bool AvatarData::getHasProceduralEyeFaceMovement() const {
|
||||
return _headData->getProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation);
|
||||
}
|
||||
|
||||
void AvatarData::setHasAudioEnabledFaceMovement(bool value) {
|
||||
_headData->setProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation, value);
|
||||
}
|
||||
|
||||
bool AvatarData::getHasAudioEnabledFaceMovement() const {
|
||||
return _headData->getProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation);
|
||||
}
|
||||
|
||||
void AvatarData::setDomainMinimumHeight(float domainMinimumHeight) {
|
||||
|
@ -232,9 +247,6 @@ void AvatarData::lazyInitHeadData() const {
|
|||
if (!_headData) {
|
||||
_headData = new HeadData(const_cast<AvatarData*>(this));
|
||||
}
|
||||
if (_hasScriptedBlendshapes) {
|
||||
_headData->_hasScriptedBlendshapes = true;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -555,27 +567,31 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
setAtBit16(flags, HAND_STATE_FINGER_POINTING_BIT);
|
||||
}
|
||||
// face tracker state
|
||||
if (_headData->_hasScriptedBlendshapes) {
|
||||
if (_headData->_hasScriptedBlendshapes || _headData->_hasInputDrivenBlendshapes) {
|
||||
setAtBit16(flags, HAS_SCRIPTED_BLENDSHAPES);
|
||||
}
|
||||
// eye tracker state
|
||||
if (!_headData->_hasProceduralEyeMovement) {
|
||||
setAtBit16(flags, IS_EYE_TRACKER_CONNECTED);
|
||||
if (_headData->getProceduralAnimationFlag(HeadData::SaccadeProceduralEyeJointAnimation) &&
|
||||
!_headData->getSuppressProceduralAnimationFlag(HeadData::SaccadeProceduralEyeJointAnimation)) {
|
||||
setAtBit16(flags, HAS_PROCEDURAL_EYE_MOVEMENT);
|
||||
}
|
||||
// referential state
|
||||
if (!parentID.isNull()) {
|
||||
setAtBit16(flags, HAS_REFERENTIAL);
|
||||
}
|
||||
// audio face movement
|
||||
if (_headData->getHasAudioEnabledFaceMovement()) {
|
||||
if (_headData->getProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation) &&
|
||||
!_headData->getSuppressProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation)) {
|
||||
setAtBit16(flags, AUDIO_ENABLED_FACE_MOVEMENT);
|
||||
}
|
||||
// procedural eye face movement
|
||||
if (_headData->getHasProceduralEyeFaceMovement()) {
|
||||
if (_headData->getProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation) &&
|
||||
!_headData->getSuppressProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation)) {
|
||||
setAtBit16(flags, PROCEDURAL_EYE_FACE_MOVEMENT);
|
||||
}
|
||||
// procedural blink face movement
|
||||
if (_headData->getHasProceduralBlinkFaceMovement()) {
|
||||
if (_headData->getProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation) &&
|
||||
!_headData->getSuppressProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation)) {
|
||||
setAtBit16(flags, PROCEDURAL_BLINK_FACE_MOVEMENT);
|
||||
}
|
||||
// avatar collisions enabled
|
||||
|
@ -1177,21 +1193,22 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
+ (oneAtBit16(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
|
||||
|
||||
auto newHasScriptedBlendshapes = oneAtBit16(bitItems, HAS_SCRIPTED_BLENDSHAPES);
|
||||
auto newHasntProceduralEyeMovement = oneAtBit16(bitItems, IS_EYE_TRACKER_CONNECTED);
|
||||
|
||||
auto newHasProceduralEyeMovement = oneAtBit16(bitItems, HAS_PROCEDURAL_EYE_MOVEMENT);
|
||||
auto newHasAudioEnabledFaceMovement = oneAtBit16(bitItems, AUDIO_ENABLED_FACE_MOVEMENT);
|
||||
auto newHasProceduralEyeFaceMovement = oneAtBit16(bitItems, PROCEDURAL_EYE_FACE_MOVEMENT);
|
||||
auto newHasProceduralBlinkFaceMovement = oneAtBit16(bitItems, PROCEDURAL_BLINK_FACE_MOVEMENT);
|
||||
|
||||
auto newCollideWithOtherAvatars = oneAtBit16(bitItems, COLLIDE_WITH_OTHER_AVATARS);
|
||||
auto newHasPriority = oneAtBit16(bitItems, HAS_HERO_PRIORITY);
|
||||
|
||||
bool keyStateChanged = (_keyState != newKeyState);
|
||||
bool handStateChanged = (_handState != newHandState);
|
||||
bool faceStateChanged = (_headData->_hasScriptedBlendshapes != newHasScriptedBlendshapes);
|
||||
bool eyeStateChanged = (_headData->_hasProceduralEyeMovement == newHasntProceduralEyeMovement);
|
||||
bool audioEnableFaceMovementChanged = (_headData->getHasAudioEnabledFaceMovement() != newHasAudioEnabledFaceMovement);
|
||||
bool proceduralEyeFaceMovementChanged = (_headData->getHasProceduralEyeFaceMovement() != newHasProceduralEyeFaceMovement);
|
||||
bool proceduralBlinkFaceMovementChanged = (_headData->getHasProceduralBlinkFaceMovement() != newHasProceduralBlinkFaceMovement);
|
||||
bool faceStateChanged = (_headData->getHasScriptedBlendshapes() != newHasScriptedBlendshapes);
|
||||
|
||||
bool eyeStateChanged = (_headData->getProceduralAnimationFlag(HeadData::SaccadeProceduralEyeJointAnimation) != newHasProceduralEyeMovement);
|
||||
bool audioEnableFaceMovementChanged = (_headData->getProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation) != newHasAudioEnabledFaceMovement);
|
||||
bool proceduralEyeFaceMovementChanged = (_headData->getProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation) != newHasProceduralEyeFaceMovement);
|
||||
bool proceduralBlinkFaceMovementChanged = (_headData->getProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation) != newHasProceduralBlinkFaceMovement);
|
||||
bool collideWithOtherAvatarsChanged = (_collideWithOtherAvatars != newCollideWithOtherAvatars);
|
||||
bool hasPriorityChanged = (getHasPriority() != newHasPriority);
|
||||
bool somethingChanged = keyStateChanged || handStateChanged || faceStateChanged || eyeStateChanged || audioEnableFaceMovementChanged ||
|
||||
|
@ -1200,11 +1217,11 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
|
||||
_keyState = newKeyState;
|
||||
_handState = newHandState;
|
||||
_headData->_hasScriptedBlendshapes = newHasScriptedBlendshapes;
|
||||
_headData->setHasProceduralEyeMovement(!newHasntProceduralEyeMovement);
|
||||
_headData->setHasAudioEnabledFaceMovement(newHasAudioEnabledFaceMovement);
|
||||
_headData->setHasProceduralEyeFaceMovement(newHasProceduralEyeFaceMovement);
|
||||
_headData->setHasProceduralBlinkFaceMovement(newHasProceduralBlinkFaceMovement);
|
||||
_headData->setHasScriptedBlendshapes(newHasScriptedBlendshapes);
|
||||
_headData->setProceduralAnimationFlag(HeadData::SaccadeProceduralEyeJointAnimation, newHasProceduralEyeMovement);
|
||||
_headData->setProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation, newHasAudioEnabledFaceMovement);
|
||||
_headData->setProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation, newHasProceduralEyeFaceMovement);
|
||||
_headData->setProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation, newHasProceduralBlinkFaceMovement);
|
||||
_collideWithOtherAvatars = newCollideWithOtherAvatars;
|
||||
setHasPriorityWithoutTimestampReset(newHasPriority);
|
||||
|
||||
|
@ -1289,7 +1306,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
sourceBuffer += sizeof(AvatarDataPacket::FaceTrackerInfo);
|
||||
|
||||
PACKET_READ_CHECK(FaceTrackerCoefficients, coefficientsSize);
|
||||
_headData->_blendshapeCoefficients.resize(numCoefficients); // make sure there's room for the copy!
|
||||
_headData->_blendshapeCoefficients.resize(std::min(numCoefficients, (int)Blendshapes::BlendshapeCount)); // make sure there's room for the copy!
|
||||
//only copy the blendshapes to headData, not the procedural face info
|
||||
memcpy(_headData->_blendshapeCoefficients.data(), sourceBuffer, coefficientsSize);
|
||||
sourceBuffer += coefficientsSize;
|
||||
|
|
|
@ -107,7 +107,7 @@ const quint32 AVATAR_MOTION_SCRIPTABLE_BITS =
|
|||
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits (UNUSED)
|
||||
const int HAND_STATE_START_BIT = 2; // 3rd and 4th bits (UNUSED)
|
||||
const int HAS_SCRIPTED_BLENDSHAPES = 4; // 5th bit
|
||||
const int IS_EYE_TRACKER_CONNECTED = 5; // 6th bit (was CHAT_CIRCLING)
|
||||
const int HAS_PROCEDURAL_EYE_MOVEMENT = 5; // 6th bit
|
||||
const int HAS_REFERENTIAL = 6; // 7th bit
|
||||
const int HAND_STATE_FINGER_POINTING_BIT = 7; // 8th bit (UNUSED)
|
||||
const int AUDIO_ENABLED_FACE_MOVEMENT = 8; // 9th bit
|
||||
|
@ -703,13 +703,13 @@ public:
|
|||
float getDomainLimitedScale() const;
|
||||
|
||||
void setHasScriptedBlendshapes(bool hasScriptedBlendshapes);
|
||||
bool getHasScriptedBlendshapes() const { return _hasScriptedBlendshapes; }
|
||||
bool getHasScriptedBlendshapes() const;
|
||||
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
|
||||
bool getHasProceduralBlinkFaceMovement() const { return _headData->getHasProceduralBlinkFaceMovement(); }
|
||||
bool getHasProceduralBlinkFaceMovement() const;
|
||||
void setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement);
|
||||
bool getHasProceduralEyeFaceMovement() const { return _headData->getHasProceduralEyeFaceMovement(); }
|
||||
bool getHasProceduralEyeFaceMovement() const;
|
||||
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
|
||||
bool getHasAudioEnabledFaceMovement() const { return _headData->getHasAudioEnabledFaceMovement(); }
|
||||
bool getHasAudioEnabledFaceMovement() const;
|
||||
|
||||
/**jsdoc
|
||||
* Gets the minimum scale allowed for this avatar in the current domain.
|
||||
|
@ -1716,7 +1716,6 @@ protected:
|
|||
// key state
|
||||
KeyState _keyState;
|
||||
|
||||
bool _hasScriptedBlendshapes;
|
||||
bool _hasNewJointData { true }; // set in AvatarData, cleared in Avatar
|
||||
|
||||
mutable HeadData* _headData { nullptr };
|
||||
|
|
|
@ -27,11 +27,10 @@ HeadData::HeadData(AvatarData* owningAvatar) :
|
|||
_basePitch(0.0f),
|
||||
_baseRoll(0.0f),
|
||||
_lookAtPosition(0.0f, 0.0f, 0.0f),
|
||||
_blendshapeCoefficients(QVector<float>(0, 0.0f)),
|
||||
_transientBlendshapeCoefficients(QVector<float>(0, 0.0f)),
|
||||
_summedBlendshapeCoefficients(QVector<float>(0, 0.0f)),
|
||||
_owningAvatar(owningAvatar)
|
||||
{
|
||||
_userProceduralAnimationFlags.assign((size_t)ProceduralAnimaitonTypeCount, true);
|
||||
_suppressProceduralAnimationFlags.assign((size_t)ProceduralAnimaitonTypeCount, false);
|
||||
computeBlendshapesLookupMap();
|
||||
}
|
||||
|
||||
|
@ -102,7 +101,7 @@ const QVector<float>& HeadData::getSummedBlendshapeCoefficients() {
|
|||
|
||||
void HeadData::setBlendshape(QString name, float val) {
|
||||
|
||||
//Check to see if the named blendshape exists, and then set its value if it does
|
||||
// Check to see if the named blendshape exists, and then set its value if it does
|
||||
auto it = _blendshapeLookupMap.find(name);
|
||||
if (it != _blendshapeLookupMap.end()) {
|
||||
if (_blendshapeCoefficients.size() <= it.value()) {
|
||||
|
@ -112,6 +111,19 @@ void HeadData::setBlendshape(QString name, float val) {
|
|||
_transientBlendshapeCoefficients.resize(it.value() + 1);
|
||||
}
|
||||
_blendshapeCoefficients[it.value()] = val;
|
||||
} else {
|
||||
// check to see if this is a legacy blendshape that is present in
|
||||
// ARKit blendshapes but is split. i.e. has left and right halfs.
|
||||
if (name == "LipsUpperUp") {
|
||||
_blendshapeCoefficients[(int)Blendshapes::MouthUpperUp_L] = val;
|
||||
_blendshapeCoefficients[(int)Blendshapes::MouthUpperUp_R] = val;
|
||||
} else if (name == "LipsLowerDown") {
|
||||
_blendshapeCoefficients[(int)Blendshapes::MouthLowerDown_L] = val;
|
||||
_blendshapeCoefficients[(int)Blendshapes::MouthLowerDown_R] = val;
|
||||
} else if (name == "Sneer") {
|
||||
_blendshapeCoefficients[(int)Blendshapes::NoseSneer_L] = val;
|
||||
_blendshapeCoefficients[(int)Blendshapes::NoseSneer_R] = val;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -197,38 +209,34 @@ void HeadData::fromJson(const QJsonObject& json) {
|
|||
}
|
||||
}
|
||||
|
||||
bool HeadData::getHasProceduralEyeFaceMovement() const {
|
||||
return _hasProceduralEyeFaceMovement;
|
||||
bool HeadData::getProceduralAnimationFlag(ProceduralAnimationType type) const {
|
||||
return _userProceduralAnimationFlags[(int)type];
|
||||
}
|
||||
|
||||
void HeadData::setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement) {
|
||||
_hasProceduralEyeFaceMovement = hasProceduralEyeFaceMovement;
|
||||
void HeadData::setProceduralAnimationFlag(ProceduralAnimationType type, bool value) {
|
||||
_userProceduralAnimationFlags[(int)type] = value;
|
||||
}
|
||||
|
||||
bool HeadData::getHasProceduralBlinkFaceMovement() const {
|
||||
return _hasProceduralBlinkFaceMovement;
|
||||
bool HeadData::getSuppressProceduralAnimationFlag(ProceduralAnimationType type) const {
|
||||
return _suppressProceduralAnimationFlags[(int)type];
|
||||
}
|
||||
|
||||
void HeadData::setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement) {
|
||||
_hasProceduralBlinkFaceMovement = hasProceduralBlinkFaceMovement;
|
||||
void HeadData::setSuppressProceduralAnimationFlag(ProceduralAnimationType type, bool value) {
|
||||
_suppressProceduralAnimationFlags[(int)type] = value;
|
||||
}
|
||||
|
||||
bool HeadData::getHasAudioEnabledFaceMovement() const {
|
||||
return _hasAudioEnabledFaceMovement;
|
||||
}
|
||||
|
||||
void HeadData::setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) {
|
||||
_hasAudioEnabledFaceMovement = hasAudioEnabledFaceMovement;
|
||||
}
|
||||
|
||||
bool HeadData::getHasProceduralEyeMovement() const {
|
||||
return _hasProceduralEyeMovement;
|
||||
}
|
||||
|
||||
void HeadData::setHasProceduralEyeMovement(bool hasProceduralEyeMovement) {
|
||||
_hasProceduralEyeMovement = hasProceduralEyeMovement;
|
||||
bool HeadData::getHasScriptedBlendshapes() const {
|
||||
return _hasScriptedBlendshapes;
|
||||
}
|
||||
|
||||
void HeadData::setHasScriptedBlendshapes(bool value) {
|
||||
_hasScriptedBlendshapes = value;
|
||||
}
|
||||
|
||||
bool HeadData::getHasInputDrivenBlendshapes() const {
|
||||
return _hasInputDrivenBlendshapes;
|
||||
}
|
||||
|
||||
void HeadData::setHasInputDrivenBlendshapes(bool value) {
|
||||
_hasInputDrivenBlendshapes = value;
|
||||
}
|
||||
|
|
|
@ -72,17 +72,29 @@ public:
|
|||
}
|
||||
bool lookAtPositionChangedSince(quint64 time) { return _lookAtPositionChanged >= time; }
|
||||
|
||||
bool getHasProceduralEyeFaceMovement() const;
|
||||
void setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement);
|
||||
bool getHasProceduralBlinkFaceMovement() const;
|
||||
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
|
||||
bool getHasAudioEnabledFaceMovement() const;
|
||||
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
|
||||
bool getHasProceduralEyeMovement() const;
|
||||
void setHasProceduralEyeMovement(bool hasProceduralEyeMovement);
|
||||
enum ProceduralAnimationType {
|
||||
AudioProceduralBlendshapeAnimation = 0,
|
||||
BlinkProceduralBlendshapeAnimation,
|
||||
LidAdjustmentProceduralBlendshapeAnimation,
|
||||
SaccadeProceduralEyeJointAnimation,
|
||||
ProceduralAnimaitonTypeCount,
|
||||
};
|
||||
|
||||
// called by scripts to enable or disable procedural blendshape or eye joint animations.
|
||||
bool getProceduralAnimationFlag(ProceduralAnimationType type) const;
|
||||
void setProceduralAnimationFlag(ProceduralAnimationType type, bool value);
|
||||
|
||||
// called by c++ to suppress, i.e. temporarily disable a procedural animation.
|
||||
bool getSuppressProceduralAnimationFlag(ProceduralAnimationType flag) const;
|
||||
void setSuppressProceduralAnimationFlag(ProceduralAnimationType flag, bool value);
|
||||
|
||||
// called by scripts to enable/disable manual adjustment of blendshapes
|
||||
void setHasScriptedBlendshapes(bool value);
|
||||
bool getHasScriptedBlendshapes() const { return _hasScriptedBlendshapes; }
|
||||
bool getHasScriptedBlendshapes() const;
|
||||
|
||||
// called by C++ code to denote the presence of manually driven blendshapes.
|
||||
void setHasInputDrivenBlendshapes(bool value);
|
||||
bool getHasInputDrivenBlendshapes() const;
|
||||
|
||||
friend class AvatarData;
|
||||
|
||||
|
@ -98,21 +110,20 @@ protected:
|
|||
glm::vec3 _lookAtPosition;
|
||||
quint64 _lookAtPositionChanged { 0 };
|
||||
|
||||
bool _hasAudioEnabledFaceMovement { true };
|
||||
bool _hasProceduralBlinkFaceMovement { true };
|
||||
bool _hasProceduralEyeFaceMovement { true };
|
||||
bool _hasProceduralEyeMovement { true };
|
||||
std::vector<bool> _userProceduralAnimationFlags;
|
||||
std::vector<bool> _suppressProceduralAnimationFlags;
|
||||
|
||||
bool _hasScriptedBlendshapes { false };
|
||||
bool _hasInputDrivenBlendshapes { false };
|
||||
|
||||
float _leftEyeBlink { 0.0f };
|
||||
float _rightEyeBlink { 0.0f };
|
||||
float _averageLoudness { 0.0f };
|
||||
float _browAudioLift { 0.0f };
|
||||
|
||||
QVector<float> _blendshapeCoefficients;
|
||||
QVector<float> _transientBlendshapeCoefficients;
|
||||
QVector<float> _summedBlendshapeCoefficients;
|
||||
QVector<float> _blendshapeCoefficients { (int)Blendshapes::BlendshapeCount, 0.0f };
|
||||
QVector<float> _transientBlendshapeCoefficients { (int)Blendshapes::BlendshapeCount, 0.0f };
|
||||
QVector<float> _summedBlendshapeCoefficients { (int)Blendshapes::BlendshapeCount, 0.0f };
|
||||
QMap<QString, int> _blendshapeLookupMap;
|
||||
AvatarData* _owningAvatar;
|
||||
|
||||
|
|
|
@ -185,70 +185,70 @@ enum class Action {
|
|||
RIGHT_EYE,
|
||||
|
||||
// AJT: blendshapes
|
||||
EyeBlink_L,
|
||||
EyeBlink_R,
|
||||
EyeSquint_L,
|
||||
EyeSquint_R,
|
||||
EyeDown_L,
|
||||
EyeDown_R,
|
||||
EyeIn_L,
|
||||
EyeIn_R,
|
||||
EyeOpen_L,
|
||||
EyeOpen_R,
|
||||
EyeOut_L,
|
||||
EyeOut_R,
|
||||
EyeUp_L,
|
||||
EyeUp_R,
|
||||
BrowsD_L,
|
||||
BrowsD_R,
|
||||
BrowsU_C,
|
||||
BrowsU_L,
|
||||
BrowsU_R,
|
||||
JawFwd,
|
||||
JawLeft,
|
||||
JawOpen,
|
||||
JawRight,
|
||||
MouthLeft,
|
||||
MouthRight,
|
||||
MouthFrown_L,
|
||||
MouthFrown_R,
|
||||
MouthSmile_L,
|
||||
MouthSmile_R,
|
||||
MouthDimple_L,
|
||||
MouthDimple_R,
|
||||
LipsStretch_L,
|
||||
LipsStretch_R,
|
||||
LipsUpperClose,
|
||||
LipsLowerClose,
|
||||
LipsUpperOpen,
|
||||
LipsLowerOpen,
|
||||
LipsFunnel,
|
||||
LipsPucker,
|
||||
Puff,
|
||||
CheekSquint_L,
|
||||
CheekSquint_R,
|
||||
LipsTogether,
|
||||
MouthUpperUp_L,
|
||||
MouthUpperUp_R,
|
||||
MouthLowerDown_L,
|
||||
MouthLowerDown_R,
|
||||
MouthPress_L,
|
||||
MouthPress_R,
|
||||
MouthShrugLower,
|
||||
MouthShrugUpper,
|
||||
NoseSneer_L,
|
||||
NoseSneer_R,
|
||||
TongueOut,
|
||||
UserBlendshape0,
|
||||
UserBlendshape1,
|
||||
UserBlendshape2,
|
||||
UserBlendshape3,
|
||||
UserBlendshape4,
|
||||
UserBlendshape5,
|
||||
UserBlendshape6,
|
||||
UserBlendshape7,
|
||||
UserBlendshape8,
|
||||
UserBlendshape9,
|
||||
EYEBLINK_L,
|
||||
EYEBLINK_R,
|
||||
EYESQUINT_L,
|
||||
EYESQUINT_R,
|
||||
EYEDOWN_L,
|
||||
EYEDOWN_R,
|
||||
EYEIN_L,
|
||||
EYEIN_R,
|
||||
EYEOPEN_L,
|
||||
EYEOPEN_R,
|
||||
EYEOUT_L,
|
||||
EYEOUT_R,
|
||||
EYEUP_L,
|
||||
EYEUP_R,
|
||||
BROWSD_L,
|
||||
BROWSD_R,
|
||||
BROWSU_C,
|
||||
BROWSU_L,
|
||||
BROWSU_R,
|
||||
JAWFWD,
|
||||
JAWLEFT,
|
||||
JAWOPEN,
|
||||
JAWRIGHT,
|
||||
MOUTHLEFT,
|
||||
MOUTHRIGHT,
|
||||
MOUTHFROWN_L,
|
||||
MOUTHFROWN_R,
|
||||
MOUTHSMILE_L,
|
||||
MOUTHSMILE_R,
|
||||
MOUTHDIMPLE_L,
|
||||
MOUTHDIMPLE_R,
|
||||
LIPSSTRETCH_L,
|
||||
LIPSSTRETCH_R,
|
||||
LIPSUPPERCLOSE,
|
||||
LIPSLOWERCLOSE,
|
||||
LIPSUPPEROPEN,
|
||||
LIPSLOWEROPEN,
|
||||
LIPSFUNNEL,
|
||||
LIPSPUCKER,
|
||||
PUFF,
|
||||
CHEEKSQUINT_L,
|
||||
CHEEKSQUINT_R,
|
||||
LIPSTOGETHER,
|
||||
MOUTHUPPERUP_L,
|
||||
MOUTHUPPERUP_R,
|
||||
MOUTHLOWERDOWN_L,
|
||||
MOUTHLOWERDOWN_R,
|
||||
MOUTHPRESS_L,
|
||||
MOUTHPRESS_R,
|
||||
MOUTHSHRUGLOWER,
|
||||
MOUTHSHRUGUPPER,
|
||||
NOSESNEER_L,
|
||||
NOSESNEER_R,
|
||||
TONGUEOUT,
|
||||
USERBLENDSHAPE0,
|
||||
USERBLENDSHAPE1,
|
||||
USERBLENDSHAPE2,
|
||||
USERBLENDSHAPE3,
|
||||
USERBLENDSHAPE4,
|
||||
USERBLENDSHAPE5,
|
||||
USERBLENDSHAPE6,
|
||||
USERBLENDSHAPE7,
|
||||
USERBLENDSHAPE8,
|
||||
USERBLENDSHAPE9,
|
||||
|
||||
NUM_ACTIONS
|
||||
};
|
||||
|
|
|
@ -92,8 +92,70 @@ namespace controller {
|
|||
RIGHT_GRIP,
|
||||
|
||||
// AJT: blendshapes
|
||||
LEFT_EYE_BLINK,
|
||||
RIGHT_EYE_BLINK,
|
||||
EYEBLINK_L,
|
||||
EYEBLINK_R,
|
||||
EYESQUINT_L,
|
||||
EYESQUINT_R,
|
||||
EYEDOWN_L,
|
||||
EYEDOWN_R,
|
||||
EYEIN_L,
|
||||
EYEIN_R,
|
||||
EYEOPEN_L,
|
||||
EYEOPEN_R,
|
||||
EYEOUT_L,
|
||||
EYEOUT_R,
|
||||
EYEUP_L,
|
||||
EYEUP_R,
|
||||
BROWSD_L,
|
||||
BROWSD_R,
|
||||
BROWSU_C,
|
||||
BROWSU_L,
|
||||
BROWSU_R,
|
||||
JAWFWD,
|
||||
JAWLEFT,
|
||||
JAWOPEN,
|
||||
JAWRIGHT,
|
||||
MOUTHLEFT,
|
||||
MOUTHRIGHT,
|
||||
MOUTHFROWN_L,
|
||||
MOUTHFROWN_R,
|
||||
MOUTHSMILE_L,
|
||||
MOUTHSMILE_R,
|
||||
MOUTHDIMPLE_L,
|
||||
MOUTHDIMPLE_R,
|
||||
LIPSSTRETCH_L,
|
||||
LIPSSTRETCH_R,
|
||||
LIPSUPPERCLOSE,
|
||||
LIPSLOWERCLOSE,
|
||||
LIPSUPPEROPEN,
|
||||
LIPSLOWEROPEN,
|
||||
LIPSFUNNEL,
|
||||
LIPSPUCKER,
|
||||
PUFF,
|
||||
CHEEKSQUINT_L,
|
||||
CHEEKSQUINT_R,
|
||||
LIPSTOGETHER,
|
||||
MOUTHUPPERUP_L,
|
||||
MOUTHUPPERUP_R,
|
||||
MOUTHLOWERDOWN_L,
|
||||
MOUTHLOWERDOWN_R,
|
||||
MOUTHPRESS_L,
|
||||
MOUTHPRESS_R,
|
||||
MOUTHSHRUGLOWER,
|
||||
MOUTHSHRUGUPPER,
|
||||
NOSESNEER_L,
|
||||
NOSESNEER_R,
|
||||
TONGUEOUT,
|
||||
USERBLENDSHAPE0,
|
||||
USERBLENDSHAPE1,
|
||||
USERBLENDSHAPE2,
|
||||
USERBLENDSHAPE3,
|
||||
USERBLENDSHAPE4,
|
||||
USERBLENDSHAPE5,
|
||||
USERBLENDSHAPE6,
|
||||
USERBLENDSHAPE7,
|
||||
USERBLENDSHAPE8,
|
||||
USERBLENDSHAPE9,
|
||||
|
||||
NUM_STANDARD_AXES,
|
||||
LZ = LT,
|
||||
|
|
|
@ -38,10 +38,10 @@ PacketVersion versionForPacketType(PacketType packetType) {
|
|||
return static_cast<PacketVersion>(EntityQueryPacketVersion::ConicalFrustums);
|
||||
case PacketType::AvatarIdentity:
|
||||
case PacketType::AvatarData:
|
||||
return static_cast<PacketVersion>(AvatarMixerPacketVersion::SendVerificationFailed);
|
||||
return static_cast<PacketVersion>(AvatarMixerPacketVersion::ARKitBlendshapes);
|
||||
case PacketType::BulkAvatarData:
|
||||
case PacketType::KillAvatar:
|
||||
return static_cast<PacketVersion>(AvatarMixerPacketVersion::SendVerificationFailed);
|
||||
return static_cast<PacketVersion>(AvatarMixerPacketVersion::ARKitBlendshapes);
|
||||
case PacketType::MessagesData:
|
||||
return static_cast<PacketVersion>(MessageDataVersion::TextOrBinaryData);
|
||||
// ICE packets
|
||||
|
|
|
@ -339,7 +339,8 @@ enum class AvatarMixerPacketVersion : PacketVersion {
|
|||
SendMaxTranslationDimension,
|
||||
FBXJointOrderChange,
|
||||
HandControllerSection,
|
||||
SendVerificationFailed
|
||||
SendVerificationFailed,
|
||||
ARKitBlendshapes
|
||||
};
|
||||
|
||||
enum class DomainConnectRequestVersion : PacketVersion {
|
||||
|
|
|
@ -91,7 +91,7 @@ enum class LegacyBlendshpaes : int {
|
|||
ChinUpperRaise, // not in ARKit
|
||||
Sneer, // split in ARKit
|
||||
LegacyBlendshapeCount
|
||||
}
|
||||
};
|
||||
|
||||
// NEW in ARKit
|
||||
// * LipsTogether
|
||||
|
|
Loading…
Reference in a new issue