mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-08-04 07:24:59 +02:00
ARKit Blendshape hookup work
This commit is contained in:
parent
6efd74a339
commit
389f5a1d33
17 changed files with 507 additions and 204 deletions
|
@ -166,10 +166,70 @@
|
||||||
{ "from": "Standard.LeftEye", "to": "Actions.LeftEye" },
|
{ "from": "Standard.LeftEye", "to": "Actions.LeftEye" },
|
||||||
{ "from": "Standard.RightEye", "to": "Actions.RightEye" },
|
{ "from": "Standard.RightEye", "to": "Actions.RightEye" },
|
||||||
|
|
||||||
// AJT: blendshapes
|
{ "from": "Standard.EyeBlink_L", "to": "Actions.EyeBlink_L" },
|
||||||
{ "from": "Standard.LeftEyeBlink", "to": "Actions.LeftEyeBlink" },
|
{ "from": "Standard.EyeBlink_R", "to": "Actions.EyeBlink_R" },
|
||||||
{ "from": "Standard.RightEyeBlink", "to": "Actions.RightEyeBlink" },
|
{ "from": "Standard.EyeSquint_L", "to": "Actions.EyeSquint_L" },
|
||||||
|
{ "from": "Standard.EyeSquint_R", "to": "Actions.EyeSquint_R" },
|
||||||
|
{ "from": "Standard.EyeDown_L", "to": "Actions.EyeDown_L" },
|
||||||
|
{ "from": "Standard.EyeDown_R", "to": "Actions.EyeDown_R" },
|
||||||
|
{ "from": "Standard.EyeIn_L", "to": "Actions.EyeIn_L" },
|
||||||
|
{ "from": "Standard.EyeIn_R", "to": "Actions.EyeIn_R" },
|
||||||
|
{ "from": "Standard.EyeOpen_L", "to": "Actions.EyeOpen_L" },
|
||||||
|
{ "from": "Standard.EyeOpen_R", "to": "Actions.EyeOpen_R" },
|
||||||
|
{ "from": "Standard.EyeOut_L", "to": "Actions.EyeOut_L" },
|
||||||
|
{ "from": "Standard.EyeOut_R", "to": "Actions.EyeOut_R" },
|
||||||
|
{ "from": "Standard.EyeUp_L", "to": "Actions.EyeUp_L" },
|
||||||
|
{ "from": "Standard.EyeUp_R", "to": "Actions.EyeUp_R" },
|
||||||
|
{ "from": "Standard.BrowsD_L", "to": "Actions.BrowsD_L" },
|
||||||
|
{ "from": "Standard.BrowsD_R", "to": "Actions.BrowsD_R" },
|
||||||
|
{ "from": "Standard.BrowsU_C", "to": "Actions.BrowsU_C" },
|
||||||
|
{ "from": "Standard.BrowsU_L", "to": "Actions.BrowsU_L" },
|
||||||
|
{ "from": "Standard.BrowsU_R", "to": "Actions.BrowsU_R" },
|
||||||
|
{ "from": "Standard.JawFwd", "to": "Actions.JawFwd" },
|
||||||
|
{ "from": "Standard.JawLeft", "to": "Actions.JawLeft" },
|
||||||
|
{ "from": "Standard.JawOpen", "to": "Actions.JawOpen" },
|
||||||
|
{ "from": "Standard.JawRight", "to": "Actions.JawRight" },
|
||||||
|
{ "from": "Standard.MouthLeft", "to": "Actions.MouthLeft" },
|
||||||
|
{ "from": "Standard.MouthRight", "to": "Actions.MouthRight" },
|
||||||
|
{ "from": "Standard.MouthFrown_L", "to": "Actions.MouthFrown_L" },
|
||||||
|
{ "from": "Standard.MouthFrown_R", "to": "Actions.MouthFrown_R" },
|
||||||
|
{ "from": "Standard.MouthSmile_L", "to": "Actions.MouthSmile_L" },
|
||||||
|
{ "from": "Standard.MouthSmile_R", "to": "Actions.MouthSmile_R" },
|
||||||
|
{ "from": "Standard.MouthDimple_L", "to": "Actions.MouthDimple_L" },
|
||||||
|
{ "from": "Standard.MouthDimple_R", "to": "Actions.MouthDimple_R" },
|
||||||
|
{ "from": "Standard.LipsStretch_L", "to": "Actions.LipsStretch_L" },
|
||||||
|
{ "from": "Standard.LipsStretch_R", "to": "Actions.LipsStretch_R" },
|
||||||
|
{ "from": "Standard.LipsUpperClose", "to": "Actions.LipsUpperClose" },
|
||||||
|
{ "from": "Standard.LipsLowerClose", "to": "Actions.LipsLowerClose" },
|
||||||
|
{ "from": "Standard.LipsUpperOpen", "to": "Actions.LipsUpperOpen" },
|
||||||
|
{ "from": "Standard.LipsLowerOpen", "to": "Actions.LipsLowerOpen" },
|
||||||
|
{ "from": "Standard.LipsFunnel", "to": "Actions.LipsFunnel" },
|
||||||
|
{ "from": "Standard.LipsPucker", "to": "Actions.LipsPucker" },
|
||||||
|
{ "from": "Standard.Puff", "to": "Actions.Puff" },
|
||||||
|
{ "from": "Standard.CheekSquint_L", "to": "Actions.CheekSquint_L" },
|
||||||
|
{ "from": "Standard.CheekSquint_R", "to": "Actions.CheekSquint_R" },
|
||||||
|
{ "from": "Standard.LipsTogether", "to": "Actions.LipsTogether" },
|
||||||
|
{ "from": "Standard.MouthUpperUp_L", "to": "Actions.MouthUpperUp_L" },
|
||||||
|
{ "from": "Standard.MouthUpperUp_R", "to": "Actions.MouthUpperUp_R" },
|
||||||
|
{ "from": "Standard.MouthLowerDown_L", "to": "Actions.MouthLowerDown_L" },
|
||||||
|
{ "from": "Standard.MouthLowerDown_R", "to": "Actions.MouthLowerDown_R" },
|
||||||
|
{ "from": "Standard.MouthPress_L", "to": "Actions.MouthPress_L" },
|
||||||
|
{ "from": "Standard.MouthPress_R", "to": "Actions.MouthPress_R" },
|
||||||
|
{ "from": "Standard.MouthShrugLower", "to": "Actions.MouthShrugLower" },
|
||||||
|
{ "from": "Standard.MouthShrugUpper", "to": "Actions.MouthShrugUpper" },
|
||||||
|
{ "from": "Standard.NoseSneer_L", "to": "Actions.NoseSneer_L" },
|
||||||
|
{ "from": "Standard.NoseSneer_R", "to": "Actions.NoseSneer_R" },
|
||||||
|
{ "from": "Standard.TongueOut", "to": "Actions.TongueOut" },
|
||||||
|
{ "from": "Standard.UserBlendshape0", "to": "Actions.UserBlendshape0" },
|
||||||
|
{ "from": "Standard.UserBlendshape1", "to": "Actions.UserBlendshape1" },
|
||||||
|
{ "from": "Standard.UserBlendshape2", "to": "Actions.UserBlendshape2" },
|
||||||
|
{ "from": "Standard.UserBlendshape3", "to": "Actions.UserBlendshape3" },
|
||||||
|
{ "from": "Standard.UserBlendshape4", "to": "Actions.UserBlendshape4" },
|
||||||
|
{ "from": "Standard.UserBlendshape5", "to": "Actions.UserBlendshape5" },
|
||||||
|
{ "from": "Standard.UserBlendshape6", "to": "Actions.UserBlendshape6" },
|
||||||
|
{ "from": "Standard.UserBlendshape7", "to": "Actions.UserBlendshape7" },
|
||||||
|
{ "from": "Standard.UserBlendshape8", "to": "Actions.UserBlendshape8" },
|
||||||
|
{ "from": "Standard.UserBlendshape9", "to": "Actions.UserBlendshape9" },
|
||||||
|
|
||||||
{ "from": "Standard.TrackedObject00", "to" : "Actions.TrackedObject00" },
|
{ "from": "Standard.TrackedObject00", "to" : "Actions.TrackedObject00" },
|
||||||
{ "from": "Standard.TrackedObject01", "to" : "Actions.TrackedObject01" },
|
{ "from": "Standard.TrackedObject01", "to" : "Actions.TrackedObject01" },
|
||||||
|
|
|
@ -61,9 +61,70 @@
|
||||||
{ "from": "Standard.LeftEye", "to": "Actions.LeftEye" },
|
{ "from": "Standard.LeftEye", "to": "Actions.LeftEye" },
|
||||||
{ "from": "Standard.RightEye", "to": "Actions.RightEye" },
|
{ "from": "Standard.RightEye", "to": "Actions.RightEye" },
|
||||||
|
|
||||||
// AJT: blendshapes
|
{ "from": "Standard.EyeBlink_L", "to": "Actions.EyeBlink_L" },
|
||||||
{ "from": "Standard.LeftEyeBlink", "to": "Actions.LeftEyeBlink" },
|
{ "from": "Standard.EyeBlink_R", "to": "Actions.EyeBlink_R" },
|
||||||
{ "from": "Standard.RightEyeBlink", "to": "Actions.RightEyeBlink" },
|
{ "from": "Standard.EyeSquint_L", "to": "Actions.EyeSquint_L" },
|
||||||
|
{ "from": "Standard.EyeSquint_R", "to": "Actions.EyeSquint_R" },
|
||||||
|
{ "from": "Standard.EyeDown_L", "to": "Actions.EyeDown_L" },
|
||||||
|
{ "from": "Standard.EyeDown_R", "to": "Actions.EyeDown_R" },
|
||||||
|
{ "from": "Standard.EyeIn_L", "to": "Actions.EyeIn_L" },
|
||||||
|
{ "from": "Standard.EyeIn_R", "to": "Actions.EyeIn_R" },
|
||||||
|
{ "from": "Standard.EyeOpen_L", "to": "Actions.EyeOpen_L" },
|
||||||
|
{ "from": "Standard.EyeOpen_R", "to": "Actions.EyeOpen_R" },
|
||||||
|
{ "from": "Standard.EyeOut_L", "to": "Actions.EyeOut_L" },
|
||||||
|
{ "from": "Standard.EyeOut_R", "to": "Actions.EyeOut_R" },
|
||||||
|
{ "from": "Standard.EyeUp_L", "to": "Actions.EyeUp_L" },
|
||||||
|
{ "from": "Standard.EyeUp_R", "to": "Actions.EyeUp_R" },
|
||||||
|
{ "from": "Standard.BrowsD_L", "to": "Actions.BrowsD_L" },
|
||||||
|
{ "from": "Standard.BrowsD_R", "to": "Actions.BrowsD_R" },
|
||||||
|
{ "from": "Standard.BrowsU_C", "to": "Actions.BrowsU_C" },
|
||||||
|
{ "from": "Standard.BrowsU_L", "to": "Actions.BrowsU_L" },
|
||||||
|
{ "from": "Standard.BrowsU_R", "to": "Actions.BrowsU_R" },
|
||||||
|
{ "from": "Standard.JawFwd", "to": "Actions.JawFwd" },
|
||||||
|
{ "from": "Standard.JawLeft", "to": "Actions.JawLeft" },
|
||||||
|
{ "from": "Standard.JawOpen", "to": "Actions.JawOpen" },
|
||||||
|
{ "from": "Standard.JawRight", "to": "Actions.JawRight" },
|
||||||
|
{ "from": "Standard.MouthLeft", "to": "Actions.MouthLeft" },
|
||||||
|
{ "from": "Standard.MouthRight", "to": "Actions.MouthRight" },
|
||||||
|
{ "from": "Standard.MouthFrown_L", "to": "Actions.MouthFrown_L" },
|
||||||
|
{ "from": "Standard.MouthFrown_R", "to": "Actions.MouthFrown_R" },
|
||||||
|
{ "from": "Standard.MouthSmile_L", "to": "Actions.MouthSmile_L" },
|
||||||
|
{ "from": "Standard.MouthSmile_R", "to": "Actions.MouthSmile_R" },
|
||||||
|
{ "from": "Standard.MouthDimple_L", "to": "Actions.MouthDimple_L" },
|
||||||
|
{ "from": "Standard.MouthDimple_R", "to": "Actions.MouthDimple_R" },
|
||||||
|
{ "from": "Standard.LipsStretch_L", "to": "Actions.LipsStretch_L" },
|
||||||
|
{ "from": "Standard.LipsStretch_R", "to": "Actions.LipsStretch_R" },
|
||||||
|
{ "from": "Standard.LipsUpperClose", "to": "Actions.LipsUpperClose" },
|
||||||
|
{ "from": "Standard.LipsLowerClose", "to": "Actions.LipsLowerClose" },
|
||||||
|
{ "from": "Standard.LipsUpperOpen", "to": "Actions.LipsUpperOpen" },
|
||||||
|
{ "from": "Standard.LipsLowerOpen", "to": "Actions.LipsLowerOpen" },
|
||||||
|
{ "from": "Standard.LipsFunnel", "to": "Actions.LipsFunnel" },
|
||||||
|
{ "from": "Standard.LipsPucker", "to": "Actions.LipsPucker" },
|
||||||
|
{ "from": "Standard.Puff", "to": "Actions.Puff" },
|
||||||
|
{ "from": "Standard.CheekSquint_L", "to": "Actions.CheekSquint_L" },
|
||||||
|
{ "from": "Standard.CheekSquint_R", "to": "Actions.CheekSquint_R" },
|
||||||
|
{ "from": "Standard.LipsTogether", "to": "Actions.LipsTogether" },
|
||||||
|
{ "from": "Standard.MouthUpperUp_L", "to": "Actions.MouthUpperUp_L" },
|
||||||
|
{ "from": "Standard.MouthUpperUp_R", "to": "Actions.MouthUpperUp_R" },
|
||||||
|
{ "from": "Standard.MouthLowerDown_L", "to": "Actions.MouthLowerDown_L" },
|
||||||
|
{ "from": "Standard.MouthLowerDown_R", "to": "Actions.MouthLowerDown_R" },
|
||||||
|
{ "from": "Standard.MouthPress_L", "to": "Actions.MouthPress_L" },
|
||||||
|
{ "from": "Standard.MouthPress_R", "to": "Actions.MouthPress_R" },
|
||||||
|
{ "from": "Standard.MouthShrugLower", "to": "Actions.MouthShrugLower" },
|
||||||
|
{ "from": "Standard.MouthShrugUpper", "to": "Actions.MouthShrugUpper" },
|
||||||
|
{ "from": "Standard.NoseSneer_L", "to": "Actions.NoseSneer_L" },
|
||||||
|
{ "from": "Standard.NoseSneer_R", "to": "Actions.NoseSneer_R" },
|
||||||
|
{ "from": "Standard.TongueOut", "to": "Actions.TongueOut" },
|
||||||
|
{ "from": "Standard.UserBlendshape0", "to": "Actions.UserBlendshape0" },
|
||||||
|
{ "from": "Standard.UserBlendshape1", "to": "Actions.UserBlendshape1" },
|
||||||
|
{ "from": "Standard.UserBlendshape2", "to": "Actions.UserBlendshape2" },
|
||||||
|
{ "from": "Standard.UserBlendshape3", "to": "Actions.UserBlendshape3" },
|
||||||
|
{ "from": "Standard.UserBlendshape4", "to": "Actions.UserBlendshape4" },
|
||||||
|
{ "from": "Standard.UserBlendshape5", "to": "Actions.UserBlendshape5" },
|
||||||
|
{ "from": "Standard.UserBlendshape6", "to": "Actions.UserBlendshape6" },
|
||||||
|
{ "from": "Standard.UserBlendshape7", "to": "Actions.UserBlendshape7" },
|
||||||
|
{ "from": "Standard.UserBlendshape8", "to": "Actions.UserBlendshape8" },
|
||||||
|
{ "from": "Standard.UserBlendshape9", "to": "Actions.UserBlendshape9" },
|
||||||
|
|
||||||
{ "from": "Standard.TrackedObject00", "to" : "Actions.TrackedObject00" },
|
{ "from": "Standard.TrackedObject00", "to" : "Actions.TrackedObject00" },
|
||||||
{ "from": "Standard.TrackedObject01", "to" : "Actions.TrackedObject01" },
|
{ "from": "Standard.TrackedObject01", "to" : "Actions.TrackedObject01" },
|
||||||
|
|
|
@ -99,9 +99,8 @@
|
||||||
{ "from": "Vive.LeftEye", "to" : "Standard.LeftEye" },
|
{ "from": "Vive.LeftEye", "to" : "Standard.LeftEye" },
|
||||||
{ "from": "Vive.RightEye", "to" : "Standard.RightEye" },
|
{ "from": "Vive.RightEye", "to" : "Standard.RightEye" },
|
||||||
|
|
||||||
// AJT: blendshapes (only keep blink)
|
{ "from": "Vive.EyeBlink_L", "to" : "Standard.EyeBlink_L" },
|
||||||
{ "from": "Vive.LeftEyeBlink", "to" : "Standard.LeftEyeBlink" },
|
{ "from": "Vive.EyeBlink_R", "to" : "Standard.EyeBlink_R" },
|
||||||
{ "from": "Vive.RightEyeBlink", "to" : "Standard.RightEyeBlink" },
|
|
||||||
|
|
||||||
{
|
{
|
||||||
"from": "Vive.LeftFoot", "to" : "Standard.LeftFoot",
|
"from": "Vive.LeftFoot", "to" : "Standard.LeftFoot",
|
||||||
|
|
|
@ -780,18 +780,6 @@ void MyAvatar::update(float deltaTime) {
|
||||||
emit energyChanged(currentEnergy);
|
emit energyChanged(currentEnergy);
|
||||||
|
|
||||||
updateEyeContactTarget(deltaTime);
|
updateEyeContactTarget(deltaTime);
|
||||||
|
|
||||||
// if we're getting eye rotations from a tracker, disable observer-side procedural eye motions
|
|
||||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
|
||||||
bool eyesTracked =
|
|
||||||
userInputMapper->getPoseState(controller::Action::LEFT_EYE).valid &&
|
|
||||||
userInputMapper->getPoseState(controller::Action::RIGHT_EYE).valid;
|
|
||||||
|
|
||||||
int leftEyeJointIndex = getJointIndex("LeftEye");
|
|
||||||
int rightEyeJointIndex = getJointIndex("RightEye");
|
|
||||||
bool eyesAreOverridden = getIsJointOverridden(leftEyeJointIndex) || getIsJointOverridden(rightEyeJointIndex);
|
|
||||||
|
|
||||||
_headData->setHasProceduralEyeMovement(!(eyesTracked || eyesAreOverridden));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void MyAvatar::updateEyeContactTarget(float deltaTime) {
|
void MyAvatar::updateEyeContactTarget(float deltaTime) {
|
||||||
|
|
|
@ -21,6 +21,73 @@
|
||||||
|
|
||||||
using namespace std;
|
using namespace std;
|
||||||
|
|
||||||
|
static controller::Action blendshapeActions[] = {
|
||||||
|
controller::Action::EYEBLINK_L,
|
||||||
|
controller::Action::EYEBLINK_R,
|
||||||
|
controller::Action::EYESQUINT_L,
|
||||||
|
controller::Action::EYESQUINT_R,
|
||||||
|
controller::Action::EYEDOWN_L,
|
||||||
|
controller::Action::EYEDOWN_R,
|
||||||
|
controller::Action::EYEIN_L,
|
||||||
|
controller::Action::EYEIN_R,
|
||||||
|
controller::Action::EYEOPEN_L,
|
||||||
|
controller::Action::EYEOPEN_R,
|
||||||
|
controller::Action::EYEOUT_L,
|
||||||
|
controller::Action::EYEOUT_R,
|
||||||
|
controller::Action::EYEUP_L,
|
||||||
|
controller::Action::EYEUP_R,
|
||||||
|
controller::Action::BROWSD_L,
|
||||||
|
controller::Action::BROWSD_R,
|
||||||
|
controller::Action::BROWSU_C,
|
||||||
|
controller::Action::BROWSU_L,
|
||||||
|
controller::Action::BROWSU_R,
|
||||||
|
controller::Action::JAWFWD,
|
||||||
|
controller::Action::JAWLEFT,
|
||||||
|
controller::Action::JAWOPEN,
|
||||||
|
controller::Action::JAWRIGHT,
|
||||||
|
controller::Action::MOUTHLEFT,
|
||||||
|
controller::Action::MOUTHRIGHT,
|
||||||
|
controller::Action::MOUTHFROWN_L,
|
||||||
|
controller::Action::MOUTHFROWN_R,
|
||||||
|
controller::Action::MOUTHSMILE_L,
|
||||||
|
controller::Action::MOUTHSMILE_R,
|
||||||
|
controller::Action::MOUTHDIMPLE_L,
|
||||||
|
controller::Action::MOUTHDIMPLE_R,
|
||||||
|
controller::Action::LIPSSTRETCH_L,
|
||||||
|
controller::Action::LIPSSTRETCH_R,
|
||||||
|
controller::Action::LIPSUPPERCLOSE,
|
||||||
|
controller::Action::LIPSLOWERCLOSE,
|
||||||
|
controller::Action::LIPSUPPEROPEN,
|
||||||
|
controller::Action::LIPSLOWEROPEN,
|
||||||
|
controller::Action::LIPSFUNNEL,
|
||||||
|
controller::Action::LIPSPUCKER,
|
||||||
|
controller::Action::PUFF,
|
||||||
|
controller::Action::CHEEKSQUINT_L,
|
||||||
|
controller::Action::CHEEKSQUINT_R,
|
||||||
|
controller::Action::LIPSTOGETHER,
|
||||||
|
controller::Action::MOUTHUPPERUP_L,
|
||||||
|
controller::Action::MOUTHUPPERUP_R,
|
||||||
|
controller::Action::MOUTHLOWERDOWN_L,
|
||||||
|
controller::Action::MOUTHLOWERDOWN_R,
|
||||||
|
controller::Action::MOUTHPRESS_L,
|
||||||
|
controller::Action::MOUTHPRESS_R,
|
||||||
|
controller::Action::MOUTHSHRUGLOWER,
|
||||||
|
controller::Action::MOUTHSHRUGUPPER,
|
||||||
|
controller::Action::NOSESNEER_L,
|
||||||
|
controller::Action::NOSESNEER_R,
|
||||||
|
controller::Action::TONGUEOUT,
|
||||||
|
controller::Action::USERBLENDSHAPE0,
|
||||||
|
controller::Action::USERBLENDSHAPE1,
|
||||||
|
controller::Action::USERBLENDSHAPE2,
|
||||||
|
controller::Action::USERBLENDSHAPE3,
|
||||||
|
controller::Action::USERBLENDSHAPE4,
|
||||||
|
controller::Action::USERBLENDSHAPE5,
|
||||||
|
controller::Action::USERBLENDSHAPE6,
|
||||||
|
controller::Action::USERBLENDSHAPE7,
|
||||||
|
controller::Action::USERBLENDSHAPE8,
|
||||||
|
controller::Action::USERBLENDSHAPE9
|
||||||
|
};
|
||||||
|
|
||||||
MyHead::MyHead(MyAvatar* owningAvatar) : Head(owningAvatar) {
|
MyHead::MyHead(MyAvatar* owningAvatar) : Head(owningAvatar) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -44,40 +111,57 @@ void MyHead::simulate(float deltaTime) {
|
||||||
auto player = DependencyManager::get<recording::Deck>();
|
auto player = DependencyManager::get<recording::Deck>();
|
||||||
// Only use face trackers when not playing back a recording.
|
// Only use face trackers when not playing back a recording.
|
||||||
if (!player->isPlaying()) {
|
if (!player->isPlaying()) {
|
||||||
// TODO -- finish removing face-tracker specific code. To do this, add input channels for
|
|
||||||
// each blendshape-coefficient and update the various json files to relay them in a useful way.
|
|
||||||
// After that, input plugins can be used to drive the avatar's face, and the various "DDE" files
|
|
||||||
// can be ported into the plugin and removed.
|
|
||||||
//
|
|
||||||
// auto faceTracker = qApp->getActiveFaceTracker();
|
|
||||||
// const bool hasActualFaceTrackerConnected = faceTracker && !faceTracker->isMuted();
|
|
||||||
// _isFaceTrackerConnected = hasActualFaceTrackerConnected || _owningAvatar->getHasScriptedBlendshapes();
|
|
||||||
// if (_isFaceTrackerConnected) {
|
|
||||||
// if (hasActualFaceTrackerConnected) {
|
|
||||||
// _blendshapeCoefficients = faceTracker->getBlendshapeCoefficients();
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// AJT: blendshapes
|
|
||||||
|
|
||||||
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
auto userInputMapper = DependencyManager::get<UserInputMapper>();
|
||||||
|
|
||||||
|
// if input system has control over blink blendshapes
|
||||||
bool eyeLidsTracked =
|
bool eyeLidsTracked =
|
||||||
userInputMapper->getActionStateValid(controller::Action::LEFT_EYE_BLINK) &&
|
userInputMapper->getActionStateValid(controller::Action::EYEBLINK_L) ||
|
||||||
userInputMapper->getActionStateValid(controller::Action::RIGHT_EYE_BLINK);
|
userInputMapper->getActionStateValid(controller::Action::EYEBLINK_R);
|
||||||
|
|
||||||
setHasScriptedBlendshapes(eyeLidsTracked);
|
// if input system has control over the brows.
|
||||||
|
bool browsTracked =
|
||||||
|
userInputMapper->getActionStateValid(controller::Action::BROWSD_L) ||
|
||||||
|
userInputMapper->getActionStateValid(controller::Action::BROWSD_R) ||
|
||||||
|
userInputMapper->getActionStateValid(controller::Action::BROWSU_L) ||
|
||||||
|
userInputMapper->getActionStateValid(controller::Action::BROWSU_R) ||
|
||||||
|
userInputMapper->getActionStateValid(controller::Action::BROWSU_C);
|
||||||
|
|
||||||
if (eyeLidsTracked) {
|
// if input system has control of mouth
|
||||||
float leftEyeBlink = userInputMapper->getActionState(controller::Action::LEFT_EYE_BLINK);
|
bool mouthTracked =
|
||||||
float rightEyeBlink = userInputMapper->getActionState(controller::Action::RIGHT_EYE_BLINK);
|
userInputMapper->getActionStateValid(controller::Action::JAWOPEN) ||
|
||||||
_blendshapeCoefficients.resize(std::max(_blendshapeCoefficients.size(), 2));
|
userInputMapper->getActionStateValid(controller::Action::LIPSUPPERCLOSE) ||
|
||||||
_blendshapeCoefficients[(int)Blendshapes::EyeBlink_L] = leftEyeBlink;
|
userInputMapper->getActionStateValid(controller::Action::LIPSLOWERCLOSE) ||
|
||||||
_blendshapeCoefficients[(int)Blendshapes::EyeBlink_R] = rightEyeBlink;
|
userInputMapper->getActionStateValid(controller::Action::LIPSFUNNEL) ||
|
||||||
} else {
|
userInputMapper->getActionStateValid(controller::Action::MOUTHSMILE_L) ||
|
||||||
const float FULLY_OPEN = 0.0f;
|
userInputMapper->getActionStateValid(controller::Action::MOUTHSMILE_R);
|
||||||
_blendshapeCoefficients.resize(std::max(_blendshapeCoefficients.size(), 2));
|
|
||||||
_blendshapeCoefficients[(int)Blendshapes::EyeBlink_L] = FULLY_OPEN;
|
bool eyesTracked =
|
||||||
_blendshapeCoefficients[(int)Blendshapes::EyeBlink_R] = FULLY_OPEN;
|
userInputMapper->getPoseState(controller::Action::LEFT_EYE).valid &&
|
||||||
|
userInputMapper->getPoseState(controller::Action::RIGHT_EYE).valid;
|
||||||
|
|
||||||
|
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||||
|
int leftEyeJointIndex = myAvatar->getJointIndex("LeftEye");
|
||||||
|
int rightEyeJointIndex = myAvatar->getJointIndex("RightEye");
|
||||||
|
bool eyeJointsOverridden = myAvatar->getIsJointOverridden(leftEyeJointIndex) || myAvatar->getIsJointOverridden(rightEyeJointIndex);
|
||||||
|
|
||||||
|
bool anyInputTracked = false;
|
||||||
|
for (int i = 0; i < (int)Blendshapes::BlendshapeCount; i++) {
|
||||||
|
anyInputTracked = anyInputTracked || userInputMapper->getActionStateValid(blendshapeActions[i]);
|
||||||
|
}
|
||||||
|
|
||||||
|
setHasInputDrivenBlendshapes(anyInputTracked);
|
||||||
|
|
||||||
|
// suppress any procedural blendshape animation if they overlap with driven input.
|
||||||
|
setSuppressProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation, eyeLidsTracked);
|
||||||
|
setSuppressProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation, eyeLidsTracked || browsTracked);
|
||||||
|
setSuppressProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation, mouthTracked);
|
||||||
|
setSuppressProceduralAnimationFlag(HeadData::SaccadeProceduralEyeJointAnimation, eyesTracked || eyeJointsOverridden);
|
||||||
|
|
||||||
|
if (anyInputTracked) {
|
||||||
|
for (int i = 0; i < (int)Blendshapes::BlendshapeCount; i++) {
|
||||||
|
_blendshapeCoefficients[i] = userInputMapper->getActionState(blendshapeActions[i]);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Parent::simulate(deltaTime);
|
Parent::simulate(deltaTime);
|
||||||
|
|
|
@ -114,6 +114,9 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||||
|
|
||||||
Head* head = _owningAvatar->getHead();
|
Head* head = _owningAvatar->getHead();
|
||||||
|
|
||||||
|
// AJT: blendshapes TODO: RE-enable this and avoid duplication with
|
||||||
|
// SkeletonModel::updateRig()
|
||||||
|
/*
|
||||||
bool eyePosesValid = !head->getHasProceduralEyeMovement();
|
bool eyePosesValid = !head->getHasProceduralEyeMovement();
|
||||||
glm::vec3 lookAt;
|
glm::vec3 lookAt;
|
||||||
if (eyePosesValid) {
|
if (eyePosesValid) {
|
||||||
|
@ -121,6 +124,8 @@ void MySkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||||
} else {
|
} else {
|
||||||
lookAt = avoidCrossedEyes(head->getLookAtPosition());
|
lookAt = avoidCrossedEyes(head->getLookAtPosition());
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
glm::vec3 lookAt = avoidCrossedEyes(head->getLookAtPosition());
|
||||||
|
|
||||||
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
MyAvatar* myAvatar = static_cast<MyAvatar*>(_owningAvatar);
|
||||||
assert(myAvatar);
|
assert(myAvatar);
|
||||||
|
|
|
@ -72,7 +72,8 @@ void Head::simulate(float deltaTime) {
|
||||||
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
|
_longTermAverageLoudness = glm::mix(_longTermAverageLoudness, _averageLoudness, glm::min(deltaTime / AUDIO_LONG_TERM_AVERAGING_SECS, 1.0f));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (getHasProceduralEyeMovement()) {
|
if (getProceduralAnimationFlag(HeadData::SaccadeProceduralEyeJointAnimation) &&
|
||||||
|
!getSuppressProceduralAnimationFlag(HeadData::SaccadeProceduralEyeJointAnimation)) {
|
||||||
// Update eye saccades
|
// Update eye saccades
|
||||||
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
|
const float AVERAGE_MICROSACCADE_INTERVAL = 1.0f;
|
||||||
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
|
const float AVERAGE_SACCADE_INTERVAL = 6.0f;
|
||||||
|
@ -95,7 +96,8 @@ void Head::simulate(float deltaTime) {
|
||||||
const float BLINK_START_VARIABILITY = 0.25f;
|
const float BLINK_START_VARIABILITY = 0.25f;
|
||||||
const float FULLY_OPEN = 0.0f;
|
const float FULLY_OPEN = 0.0f;
|
||||||
const float FULLY_CLOSED = 1.0f;
|
const float FULLY_CLOSED = 1.0f;
|
||||||
if (getHasProceduralBlinkFaceMovement()) {
|
if (getProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation) &&
|
||||||
|
!getSuppressProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation)) {
|
||||||
// handle automatic blinks
|
// handle automatic blinks
|
||||||
// Detect transition from talking to not; force blink after that and a delay
|
// Detect transition from talking to not; force blink after that and a delay
|
||||||
bool forceBlink = false;
|
bool forceBlink = false;
|
||||||
|
@ -146,7 +148,8 @@ void Head::simulate(float deltaTime) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// use data to update fake Faceshift blendshape coefficients
|
// use data to update fake Faceshift blendshape coefficients
|
||||||
if (getHasAudioEnabledFaceMovement()) {
|
if (getProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation) &&
|
||||||
|
!getSuppressProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation)) {
|
||||||
// Update audio attack data for facial animation (eyebrows and mouth)
|
// Update audio attack data for facial animation (eyebrows and mouth)
|
||||||
float audioAttackAveragingRate = (10.0f - deltaTime * NORMAL_HZ) / 10.0f; // --> 0.9 at 60 Hz
|
float audioAttackAveragingRate = (10.0f - deltaTime * NORMAL_HZ) / 10.0f; // --> 0.9 at 60 Hz
|
||||||
_audioAttack = audioAttackAveragingRate * _audioAttack +
|
_audioAttack = audioAttackAveragingRate * _audioAttack +
|
||||||
|
@ -178,7 +181,8 @@ void Head::simulate(float deltaTime) {
|
||||||
_mouth4,
|
_mouth4,
|
||||||
_transientBlendshapeCoefficients);
|
_transientBlendshapeCoefficients);
|
||||||
|
|
||||||
if (getHasProceduralEyeFaceMovement()) {
|
if (getProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation) &&
|
||||||
|
!getSuppressProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation)) {
|
||||||
// This controls two things, the eye brow and the upper eye lid, it is driven by the vertical up/down angle of the
|
// This controls two things, the eye brow and the upper eye lid, it is driven by the vertical up/down angle of the
|
||||||
// eyes relative to the head. This is to try to help prevent sleepy eyes/crazy eyes.
|
// eyes relative to the head. This is to try to help prevent sleepy eyes/crazy eyes.
|
||||||
applyEyelidOffset(getOrientation());
|
applyEyelidOffset(getOrientation());
|
||||||
|
|
|
@ -111,6 +111,8 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||||
|
|
||||||
Head* head = _owningAvatar->getHead();
|
Head* head = _owningAvatar->getHead();
|
||||||
|
|
||||||
|
// AJT: blendshapes TODO: RE-enable this. but move into rig?
|
||||||
|
/*
|
||||||
bool eyePosesValid = !head->getHasProceduralEyeMovement();
|
bool eyePosesValid = !head->getHasProceduralEyeMovement();
|
||||||
glm::vec3 lookAt;
|
glm::vec3 lookAt;
|
||||||
if (eyePosesValid) {
|
if (eyePosesValid) {
|
||||||
|
@ -118,6 +120,8 @@ void SkeletonModel::updateRig(float deltaTime, glm::mat4 parentTransform) {
|
||||||
} else {
|
} else {
|
||||||
lookAt = avoidCrossedEyes(head->getCorrectedLookAtPosition());
|
lookAt = avoidCrossedEyes(head->getCorrectedLookAtPosition());
|
||||||
}
|
}
|
||||||
|
*/
|
||||||
|
glm::vec3 lookAt = avoidCrossedEyes(head->getCorrectedLookAtPosition());
|
||||||
|
|
||||||
// no need to call Model::updateRig() because otherAvatars get their joint state
|
// no need to call Model::updateRig() because otherAvatars get their joint state
|
||||||
// copied directly from AvtarData::_jointData (there are no Rig animations to blend)
|
// copied directly from AvtarData::_jointData (there are no Rig animations to blend)
|
||||||
|
|
|
@ -110,7 +110,6 @@ AvatarData::AvatarData() :
|
||||||
_targetScale(1.0f),
|
_targetScale(1.0f),
|
||||||
_handState(0),
|
_handState(0),
|
||||||
_keyState(NO_KEY_DOWN),
|
_keyState(NO_KEY_DOWN),
|
||||||
_hasScriptedBlendshapes(false),
|
|
||||||
_headData(NULL),
|
_headData(NULL),
|
||||||
_errorLogExpiry(0),
|
_errorLogExpiry(0),
|
||||||
_owningAvatarMixer(),
|
_owningAvatarMixer(),
|
||||||
|
@ -156,7 +155,7 @@ float AvatarData::getDomainLimitedScale() const {
|
||||||
|
|
||||||
|
|
||||||
void AvatarData::setHasScriptedBlendshapes(bool hasScriptedBlendshapes) {
|
void AvatarData::setHasScriptedBlendshapes(bool hasScriptedBlendshapes) {
|
||||||
if (hasScriptedBlendshapes == _hasScriptedBlendshapes) {
|
if (hasScriptedBlendshapes == _headData->getHasScriptedBlendshapes()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (!hasScriptedBlendshapes) {
|
if (!hasScriptedBlendshapes) {
|
||||||
|
@ -165,19 +164,35 @@ void AvatarData::setHasScriptedBlendshapes(bool hasScriptedBlendshapes) {
|
||||||
// before sending the update, or else it won't send the neutal blendshapes to the receiving clients
|
// before sending the update, or else it won't send the neutal blendshapes to the receiving clients
|
||||||
sendAvatarDataPacket(true);
|
sendAvatarDataPacket(true);
|
||||||
}
|
}
|
||||||
_hasScriptedBlendshapes = hasScriptedBlendshapes;
|
_headData->setHasScriptedBlendshapes(hasScriptedBlendshapes);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AvatarData::setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement) {
|
bool AvatarData::getHasScriptedBlendshapes() const {
|
||||||
_headData->setHasProceduralBlinkFaceMovement(hasProceduralBlinkFaceMovement);
|
return _headData->getHasScriptedBlendshapes();
|
||||||
}
|
}
|
||||||
|
|
||||||
void AvatarData::setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement) {
|
void AvatarData::setHasProceduralBlinkFaceMovement(bool value) {
|
||||||
_headData->setHasProceduralEyeFaceMovement(hasProceduralEyeFaceMovement);
|
_headData->setProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation, value);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AvatarData::setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) {
|
bool AvatarData::getHasProceduralBlinkFaceMovement() const {
|
||||||
_headData->setHasAudioEnabledFaceMovement(hasAudioEnabledFaceMovement);
|
return _headData->getProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation);
|
||||||
|
}
|
||||||
|
|
||||||
|
void AvatarData::setHasProceduralEyeFaceMovement(bool value) {
|
||||||
|
_headData->setProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AvatarData::getHasProceduralEyeFaceMovement() const {
|
||||||
|
return _headData->getProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation);
|
||||||
|
}
|
||||||
|
|
||||||
|
void AvatarData::setHasAudioEnabledFaceMovement(bool value) {
|
||||||
|
_headData->setProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation, value);
|
||||||
|
}
|
||||||
|
|
||||||
|
bool AvatarData::getHasAudioEnabledFaceMovement() const {
|
||||||
|
return _headData->getProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation);
|
||||||
}
|
}
|
||||||
|
|
||||||
void AvatarData::setDomainMinimumHeight(float domainMinimumHeight) {
|
void AvatarData::setDomainMinimumHeight(float domainMinimumHeight) {
|
||||||
|
@ -232,9 +247,6 @@ void AvatarData::lazyInitHeadData() const {
|
||||||
if (!_headData) {
|
if (!_headData) {
|
||||||
_headData = new HeadData(const_cast<AvatarData*>(this));
|
_headData = new HeadData(const_cast<AvatarData*>(this));
|
||||||
}
|
}
|
||||||
if (_hasScriptedBlendshapes) {
|
|
||||||
_headData->_hasScriptedBlendshapes = true;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -555,27 +567,31 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
setAtBit16(flags, HAND_STATE_FINGER_POINTING_BIT);
|
setAtBit16(flags, HAND_STATE_FINGER_POINTING_BIT);
|
||||||
}
|
}
|
||||||
// face tracker state
|
// face tracker state
|
||||||
if (_headData->_hasScriptedBlendshapes) {
|
if (_headData->_hasScriptedBlendshapes || _headData->_hasInputDrivenBlendshapes) {
|
||||||
setAtBit16(flags, HAS_SCRIPTED_BLENDSHAPES);
|
setAtBit16(flags, HAS_SCRIPTED_BLENDSHAPES);
|
||||||
}
|
}
|
||||||
// eye tracker state
|
// eye tracker state
|
||||||
if (!_headData->_hasProceduralEyeMovement) {
|
if (_headData->getProceduralAnimationFlag(HeadData::SaccadeProceduralEyeJointAnimation) &&
|
||||||
setAtBit16(flags, IS_EYE_TRACKER_CONNECTED);
|
!_headData->getSuppressProceduralAnimationFlag(HeadData::SaccadeProceduralEyeJointAnimation)) {
|
||||||
|
setAtBit16(flags, HAS_PROCEDURAL_EYE_MOVEMENT);
|
||||||
}
|
}
|
||||||
// referential state
|
// referential state
|
||||||
if (!parentID.isNull()) {
|
if (!parentID.isNull()) {
|
||||||
setAtBit16(flags, HAS_REFERENTIAL);
|
setAtBit16(flags, HAS_REFERENTIAL);
|
||||||
}
|
}
|
||||||
// audio face movement
|
// audio face movement
|
||||||
if (_headData->getHasAudioEnabledFaceMovement()) {
|
if (_headData->getProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation) &&
|
||||||
|
!_headData->getSuppressProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation)) {
|
||||||
setAtBit16(flags, AUDIO_ENABLED_FACE_MOVEMENT);
|
setAtBit16(flags, AUDIO_ENABLED_FACE_MOVEMENT);
|
||||||
}
|
}
|
||||||
// procedural eye face movement
|
// procedural eye face movement
|
||||||
if (_headData->getHasProceduralEyeFaceMovement()) {
|
if (_headData->getProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation) &&
|
||||||
|
!_headData->getSuppressProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation)) {
|
||||||
setAtBit16(flags, PROCEDURAL_EYE_FACE_MOVEMENT);
|
setAtBit16(flags, PROCEDURAL_EYE_FACE_MOVEMENT);
|
||||||
}
|
}
|
||||||
// procedural blink face movement
|
// procedural blink face movement
|
||||||
if (_headData->getHasProceduralBlinkFaceMovement()) {
|
if (_headData->getProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation) &&
|
||||||
|
!_headData->getSuppressProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation)) {
|
||||||
setAtBit16(flags, PROCEDURAL_BLINK_FACE_MOVEMENT);
|
setAtBit16(flags, PROCEDURAL_BLINK_FACE_MOVEMENT);
|
||||||
}
|
}
|
||||||
// avatar collisions enabled
|
// avatar collisions enabled
|
||||||
|
@ -1177,21 +1193,22 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
||||||
+ (oneAtBit16(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
|
+ (oneAtBit16(bitItems, HAND_STATE_FINGER_POINTING_BIT) ? IS_FINGER_POINTING_FLAG : 0);
|
||||||
|
|
||||||
auto newHasScriptedBlendshapes = oneAtBit16(bitItems, HAS_SCRIPTED_BLENDSHAPES);
|
auto newHasScriptedBlendshapes = oneAtBit16(bitItems, HAS_SCRIPTED_BLENDSHAPES);
|
||||||
auto newHasntProceduralEyeMovement = oneAtBit16(bitItems, IS_EYE_TRACKER_CONNECTED);
|
auto newHasProceduralEyeMovement = oneAtBit16(bitItems, HAS_PROCEDURAL_EYE_MOVEMENT);
|
||||||
|
|
||||||
auto newHasAudioEnabledFaceMovement = oneAtBit16(bitItems, AUDIO_ENABLED_FACE_MOVEMENT);
|
auto newHasAudioEnabledFaceMovement = oneAtBit16(bitItems, AUDIO_ENABLED_FACE_MOVEMENT);
|
||||||
auto newHasProceduralEyeFaceMovement = oneAtBit16(bitItems, PROCEDURAL_EYE_FACE_MOVEMENT);
|
auto newHasProceduralEyeFaceMovement = oneAtBit16(bitItems, PROCEDURAL_EYE_FACE_MOVEMENT);
|
||||||
auto newHasProceduralBlinkFaceMovement = oneAtBit16(bitItems, PROCEDURAL_BLINK_FACE_MOVEMENT);
|
auto newHasProceduralBlinkFaceMovement = oneAtBit16(bitItems, PROCEDURAL_BLINK_FACE_MOVEMENT);
|
||||||
|
|
||||||
auto newCollideWithOtherAvatars = oneAtBit16(bitItems, COLLIDE_WITH_OTHER_AVATARS);
|
auto newCollideWithOtherAvatars = oneAtBit16(bitItems, COLLIDE_WITH_OTHER_AVATARS);
|
||||||
auto newHasPriority = oneAtBit16(bitItems, HAS_HERO_PRIORITY);
|
auto newHasPriority = oneAtBit16(bitItems, HAS_HERO_PRIORITY);
|
||||||
|
|
||||||
bool keyStateChanged = (_keyState != newKeyState);
|
bool keyStateChanged = (_keyState != newKeyState);
|
||||||
bool handStateChanged = (_handState != newHandState);
|
bool handStateChanged = (_handState != newHandState);
|
||||||
bool faceStateChanged = (_headData->_hasScriptedBlendshapes != newHasScriptedBlendshapes);
|
bool faceStateChanged = (_headData->getHasScriptedBlendshapes() != newHasScriptedBlendshapes);
|
||||||
bool eyeStateChanged = (_headData->_hasProceduralEyeMovement == newHasntProceduralEyeMovement);
|
|
||||||
bool audioEnableFaceMovementChanged = (_headData->getHasAudioEnabledFaceMovement() != newHasAudioEnabledFaceMovement);
|
bool eyeStateChanged = (_headData->getProceduralAnimationFlag(HeadData::SaccadeProceduralEyeJointAnimation) != newHasProceduralEyeMovement);
|
||||||
bool proceduralEyeFaceMovementChanged = (_headData->getHasProceduralEyeFaceMovement() != newHasProceduralEyeFaceMovement);
|
bool audioEnableFaceMovementChanged = (_headData->getProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation) != newHasAudioEnabledFaceMovement);
|
||||||
bool proceduralBlinkFaceMovementChanged = (_headData->getHasProceduralBlinkFaceMovement() != newHasProceduralBlinkFaceMovement);
|
bool proceduralEyeFaceMovementChanged = (_headData->getProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation) != newHasProceduralEyeFaceMovement);
|
||||||
|
bool proceduralBlinkFaceMovementChanged = (_headData->getProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation) != newHasProceduralBlinkFaceMovement);
|
||||||
bool collideWithOtherAvatarsChanged = (_collideWithOtherAvatars != newCollideWithOtherAvatars);
|
bool collideWithOtherAvatarsChanged = (_collideWithOtherAvatars != newCollideWithOtherAvatars);
|
||||||
bool hasPriorityChanged = (getHasPriority() != newHasPriority);
|
bool hasPriorityChanged = (getHasPriority() != newHasPriority);
|
||||||
bool somethingChanged = keyStateChanged || handStateChanged || faceStateChanged || eyeStateChanged || audioEnableFaceMovementChanged ||
|
bool somethingChanged = keyStateChanged || handStateChanged || faceStateChanged || eyeStateChanged || audioEnableFaceMovementChanged ||
|
||||||
|
@ -1200,11 +1217,11 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
||||||
|
|
||||||
_keyState = newKeyState;
|
_keyState = newKeyState;
|
||||||
_handState = newHandState;
|
_handState = newHandState;
|
||||||
_headData->_hasScriptedBlendshapes = newHasScriptedBlendshapes;
|
_headData->setHasScriptedBlendshapes(newHasScriptedBlendshapes);
|
||||||
_headData->setHasProceduralEyeMovement(!newHasntProceduralEyeMovement);
|
_headData->setProceduralAnimationFlag(HeadData::SaccadeProceduralEyeJointAnimation, newHasProceduralEyeMovement);
|
||||||
_headData->setHasAudioEnabledFaceMovement(newHasAudioEnabledFaceMovement);
|
_headData->setProceduralAnimationFlag(HeadData::AudioProceduralBlendshapeAnimation, newHasAudioEnabledFaceMovement);
|
||||||
_headData->setHasProceduralEyeFaceMovement(newHasProceduralEyeFaceMovement);
|
_headData->setProceduralAnimationFlag(HeadData::LidAdjustmentProceduralBlendshapeAnimation, newHasProceduralEyeFaceMovement);
|
||||||
_headData->setHasProceduralBlinkFaceMovement(newHasProceduralBlinkFaceMovement);
|
_headData->setProceduralAnimationFlag(HeadData::BlinkProceduralBlendshapeAnimation, newHasProceduralBlinkFaceMovement);
|
||||||
_collideWithOtherAvatars = newCollideWithOtherAvatars;
|
_collideWithOtherAvatars = newCollideWithOtherAvatars;
|
||||||
setHasPriorityWithoutTimestampReset(newHasPriority);
|
setHasPriorityWithoutTimestampReset(newHasPriority);
|
||||||
|
|
||||||
|
@ -1289,7 +1306,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
||||||
sourceBuffer += sizeof(AvatarDataPacket::FaceTrackerInfo);
|
sourceBuffer += sizeof(AvatarDataPacket::FaceTrackerInfo);
|
||||||
|
|
||||||
PACKET_READ_CHECK(FaceTrackerCoefficients, coefficientsSize);
|
PACKET_READ_CHECK(FaceTrackerCoefficients, coefficientsSize);
|
||||||
_headData->_blendshapeCoefficients.resize(numCoefficients); // make sure there's room for the copy!
|
_headData->_blendshapeCoefficients.resize(std::min(numCoefficients, (int)Blendshapes::BlendshapeCount)); // make sure there's room for the copy!
|
||||||
//only copy the blendshapes to headData, not the procedural face info
|
//only copy the blendshapes to headData, not the procedural face info
|
||||||
memcpy(_headData->_blendshapeCoefficients.data(), sourceBuffer, coefficientsSize);
|
memcpy(_headData->_blendshapeCoefficients.data(), sourceBuffer, coefficientsSize);
|
||||||
sourceBuffer += coefficientsSize;
|
sourceBuffer += coefficientsSize;
|
||||||
|
|
|
@ -107,7 +107,7 @@ const quint32 AVATAR_MOTION_SCRIPTABLE_BITS =
|
||||||
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits (UNUSED)
|
const int KEY_STATE_START_BIT = 0; // 1st and 2nd bits (UNUSED)
|
||||||
const int HAND_STATE_START_BIT = 2; // 3rd and 4th bits (UNUSED)
|
const int HAND_STATE_START_BIT = 2; // 3rd and 4th bits (UNUSED)
|
||||||
const int HAS_SCRIPTED_BLENDSHAPES = 4; // 5th bit
|
const int HAS_SCRIPTED_BLENDSHAPES = 4; // 5th bit
|
||||||
const int IS_EYE_TRACKER_CONNECTED = 5; // 6th bit (was CHAT_CIRCLING)
|
const int HAS_PROCEDURAL_EYE_MOVEMENT = 5; // 6th bit
|
||||||
const int HAS_REFERENTIAL = 6; // 7th bit
|
const int HAS_REFERENTIAL = 6; // 7th bit
|
||||||
const int HAND_STATE_FINGER_POINTING_BIT = 7; // 8th bit (UNUSED)
|
const int HAND_STATE_FINGER_POINTING_BIT = 7; // 8th bit (UNUSED)
|
||||||
const int AUDIO_ENABLED_FACE_MOVEMENT = 8; // 9th bit
|
const int AUDIO_ENABLED_FACE_MOVEMENT = 8; // 9th bit
|
||||||
|
@ -703,13 +703,13 @@ public:
|
||||||
float getDomainLimitedScale() const;
|
float getDomainLimitedScale() const;
|
||||||
|
|
||||||
void setHasScriptedBlendshapes(bool hasScriptedBlendshapes);
|
void setHasScriptedBlendshapes(bool hasScriptedBlendshapes);
|
||||||
bool getHasScriptedBlendshapes() const { return _hasScriptedBlendshapes; }
|
bool getHasScriptedBlendshapes() const;
|
||||||
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
|
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
|
||||||
bool getHasProceduralBlinkFaceMovement() const { return _headData->getHasProceduralBlinkFaceMovement(); }
|
bool getHasProceduralBlinkFaceMovement() const;
|
||||||
void setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement);
|
void setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement);
|
||||||
bool getHasProceduralEyeFaceMovement() const { return _headData->getHasProceduralEyeFaceMovement(); }
|
bool getHasProceduralEyeFaceMovement() const;
|
||||||
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
|
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
|
||||||
bool getHasAudioEnabledFaceMovement() const { return _headData->getHasAudioEnabledFaceMovement(); }
|
bool getHasAudioEnabledFaceMovement() const;
|
||||||
|
|
||||||
/**jsdoc
|
/**jsdoc
|
||||||
* Gets the minimum scale allowed for this avatar in the current domain.
|
* Gets the minimum scale allowed for this avatar in the current domain.
|
||||||
|
@ -1716,7 +1716,6 @@ protected:
|
||||||
// key state
|
// key state
|
||||||
KeyState _keyState;
|
KeyState _keyState;
|
||||||
|
|
||||||
bool _hasScriptedBlendshapes;
|
|
||||||
bool _hasNewJointData { true }; // set in AvatarData, cleared in Avatar
|
bool _hasNewJointData { true }; // set in AvatarData, cleared in Avatar
|
||||||
|
|
||||||
mutable HeadData* _headData { nullptr };
|
mutable HeadData* _headData { nullptr };
|
||||||
|
|
|
@ -27,11 +27,10 @@ HeadData::HeadData(AvatarData* owningAvatar) :
|
||||||
_basePitch(0.0f),
|
_basePitch(0.0f),
|
||||||
_baseRoll(0.0f),
|
_baseRoll(0.0f),
|
||||||
_lookAtPosition(0.0f, 0.0f, 0.0f),
|
_lookAtPosition(0.0f, 0.0f, 0.0f),
|
||||||
_blendshapeCoefficients(QVector<float>(0, 0.0f)),
|
|
||||||
_transientBlendshapeCoefficients(QVector<float>(0, 0.0f)),
|
|
||||||
_summedBlendshapeCoefficients(QVector<float>(0, 0.0f)),
|
|
||||||
_owningAvatar(owningAvatar)
|
_owningAvatar(owningAvatar)
|
||||||
{
|
{
|
||||||
|
_userProceduralAnimationFlags.assign((size_t)ProceduralAnimaitonTypeCount, true);
|
||||||
|
_suppressProceduralAnimationFlags.assign((size_t)ProceduralAnimaitonTypeCount, false);
|
||||||
computeBlendshapesLookupMap();
|
computeBlendshapesLookupMap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -112,6 +111,19 @@ void HeadData::setBlendshape(QString name, float val) {
|
||||||
_transientBlendshapeCoefficients.resize(it.value() + 1);
|
_transientBlendshapeCoefficients.resize(it.value() + 1);
|
||||||
}
|
}
|
||||||
_blendshapeCoefficients[it.value()] = val;
|
_blendshapeCoefficients[it.value()] = val;
|
||||||
|
} else {
|
||||||
|
// check to see if this is a legacy blendshape that is present in
|
||||||
|
// ARKit blendshapes but is split. i.e. has left and right halfs.
|
||||||
|
if (name == "LipsUpperUp") {
|
||||||
|
_blendshapeCoefficients[(int)Blendshapes::MouthUpperUp_L] = val;
|
||||||
|
_blendshapeCoefficients[(int)Blendshapes::MouthUpperUp_R] = val;
|
||||||
|
} else if (name == "LipsLowerDown") {
|
||||||
|
_blendshapeCoefficients[(int)Blendshapes::MouthLowerDown_L] = val;
|
||||||
|
_blendshapeCoefficients[(int)Blendshapes::MouthLowerDown_R] = val;
|
||||||
|
} else if (name == "Sneer") {
|
||||||
|
_blendshapeCoefficients[(int)Blendshapes::NoseSneer_L] = val;
|
||||||
|
_blendshapeCoefficients[(int)Blendshapes::NoseSneer_R] = val;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -197,38 +209,34 @@ void HeadData::fromJson(const QJsonObject& json) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
bool HeadData::getHasProceduralEyeFaceMovement() const {
|
bool HeadData::getProceduralAnimationFlag(ProceduralAnimationType type) const {
|
||||||
return _hasProceduralEyeFaceMovement;
|
return _userProceduralAnimationFlags[(int)type];
|
||||||
}
|
}
|
||||||
|
|
||||||
void HeadData::setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement) {
|
void HeadData::setProceduralAnimationFlag(ProceduralAnimationType type, bool value) {
|
||||||
_hasProceduralEyeFaceMovement = hasProceduralEyeFaceMovement;
|
_userProceduralAnimationFlags[(int)type] = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool HeadData::getHasProceduralBlinkFaceMovement() const {
|
bool HeadData::getSuppressProceduralAnimationFlag(ProceduralAnimationType type) const {
|
||||||
return _hasProceduralBlinkFaceMovement;
|
return _suppressProceduralAnimationFlags[(int)type];
|
||||||
}
|
}
|
||||||
|
|
||||||
void HeadData::setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement) {
|
void HeadData::setSuppressProceduralAnimationFlag(ProceduralAnimationType type, bool value) {
|
||||||
_hasProceduralBlinkFaceMovement = hasProceduralBlinkFaceMovement;
|
_suppressProceduralAnimationFlags[(int)type] = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool HeadData::getHasAudioEnabledFaceMovement() const {
|
bool HeadData::getHasScriptedBlendshapes() const {
|
||||||
return _hasAudioEnabledFaceMovement;
|
return _hasScriptedBlendshapes;
|
||||||
}
|
|
||||||
|
|
||||||
void HeadData::setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement) {
|
|
||||||
_hasAudioEnabledFaceMovement = hasAudioEnabledFaceMovement;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool HeadData::getHasProceduralEyeMovement() const {
|
|
||||||
return _hasProceduralEyeMovement;
|
|
||||||
}
|
|
||||||
|
|
||||||
void HeadData::setHasProceduralEyeMovement(bool hasProceduralEyeMovement) {
|
|
||||||
_hasProceduralEyeMovement = hasProceduralEyeMovement;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
void HeadData::setHasScriptedBlendshapes(bool value) {
|
void HeadData::setHasScriptedBlendshapes(bool value) {
|
||||||
_hasScriptedBlendshapes = value;
|
_hasScriptedBlendshapes = value;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool HeadData::getHasInputDrivenBlendshapes() const {
|
||||||
|
return _hasInputDrivenBlendshapes;
|
||||||
|
}
|
||||||
|
|
||||||
|
void HeadData::setHasInputDrivenBlendshapes(bool value) {
|
||||||
|
_hasInputDrivenBlendshapes = value;
|
||||||
|
}
|
||||||
|
|
|
@ -72,17 +72,29 @@ public:
|
||||||
}
|
}
|
||||||
bool lookAtPositionChangedSince(quint64 time) { return _lookAtPositionChanged >= time; }
|
bool lookAtPositionChangedSince(quint64 time) { return _lookAtPositionChanged >= time; }
|
||||||
|
|
||||||
bool getHasProceduralEyeFaceMovement() const;
|
enum ProceduralAnimationType {
|
||||||
void setHasProceduralEyeFaceMovement(bool hasProceduralEyeFaceMovement);
|
AudioProceduralBlendshapeAnimation = 0,
|
||||||
bool getHasProceduralBlinkFaceMovement() const;
|
BlinkProceduralBlendshapeAnimation,
|
||||||
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
|
LidAdjustmentProceduralBlendshapeAnimation,
|
||||||
bool getHasAudioEnabledFaceMovement() const;
|
SaccadeProceduralEyeJointAnimation,
|
||||||
void setHasAudioEnabledFaceMovement(bool hasAudioEnabledFaceMovement);
|
ProceduralAnimaitonTypeCount,
|
||||||
bool getHasProceduralEyeMovement() const;
|
};
|
||||||
void setHasProceduralEyeMovement(bool hasProceduralEyeMovement);
|
|
||||||
|
|
||||||
|
// called by scripts to enable or disable procedural blendshape or eye joint animations.
|
||||||
|
bool getProceduralAnimationFlag(ProceduralAnimationType type) const;
|
||||||
|
void setProceduralAnimationFlag(ProceduralAnimationType type, bool value);
|
||||||
|
|
||||||
|
// called by c++ to suppress, i.e. temporarily disable a procedural animation.
|
||||||
|
bool getSuppressProceduralAnimationFlag(ProceduralAnimationType flag) const;
|
||||||
|
void setSuppressProceduralAnimationFlag(ProceduralAnimationType flag, bool value);
|
||||||
|
|
||||||
|
// called by scripts to enable/disable manual adjustment of blendshapes
|
||||||
void setHasScriptedBlendshapes(bool value);
|
void setHasScriptedBlendshapes(bool value);
|
||||||
bool getHasScriptedBlendshapes() const { return _hasScriptedBlendshapes; }
|
bool getHasScriptedBlendshapes() const;
|
||||||
|
|
||||||
|
// called by C++ code to denote the presence of manually driven blendshapes.
|
||||||
|
void setHasInputDrivenBlendshapes(bool value);
|
||||||
|
bool getHasInputDrivenBlendshapes() const;
|
||||||
|
|
||||||
friend class AvatarData;
|
friend class AvatarData;
|
||||||
|
|
||||||
|
@ -98,21 +110,20 @@ protected:
|
||||||
glm::vec3 _lookAtPosition;
|
glm::vec3 _lookAtPosition;
|
||||||
quint64 _lookAtPositionChanged { 0 };
|
quint64 _lookAtPositionChanged { 0 };
|
||||||
|
|
||||||
bool _hasAudioEnabledFaceMovement { true };
|
std::vector<bool> _userProceduralAnimationFlags;
|
||||||
bool _hasProceduralBlinkFaceMovement { true };
|
std::vector<bool> _suppressProceduralAnimationFlags;
|
||||||
bool _hasProceduralEyeFaceMovement { true };
|
|
||||||
bool _hasProceduralEyeMovement { true };
|
|
||||||
|
|
||||||
bool _hasScriptedBlendshapes { false };
|
bool _hasScriptedBlendshapes { false };
|
||||||
|
bool _hasInputDrivenBlendshapes { false };
|
||||||
|
|
||||||
float _leftEyeBlink { 0.0f };
|
float _leftEyeBlink { 0.0f };
|
||||||
float _rightEyeBlink { 0.0f };
|
float _rightEyeBlink { 0.0f };
|
||||||
float _averageLoudness { 0.0f };
|
float _averageLoudness { 0.0f };
|
||||||
float _browAudioLift { 0.0f };
|
float _browAudioLift { 0.0f };
|
||||||
|
|
||||||
QVector<float> _blendshapeCoefficients;
|
QVector<float> _blendshapeCoefficients { (int)Blendshapes::BlendshapeCount, 0.0f };
|
||||||
QVector<float> _transientBlendshapeCoefficients;
|
QVector<float> _transientBlendshapeCoefficients { (int)Blendshapes::BlendshapeCount, 0.0f };
|
||||||
QVector<float> _summedBlendshapeCoefficients;
|
QVector<float> _summedBlendshapeCoefficients { (int)Blendshapes::BlendshapeCount, 0.0f };
|
||||||
QMap<QString, int> _blendshapeLookupMap;
|
QMap<QString, int> _blendshapeLookupMap;
|
||||||
AvatarData* _owningAvatar;
|
AvatarData* _owningAvatar;
|
||||||
|
|
||||||
|
|
|
@ -185,70 +185,70 @@ enum class Action {
|
||||||
RIGHT_EYE,
|
RIGHT_EYE,
|
||||||
|
|
||||||
// AJT: blendshapes
|
// AJT: blendshapes
|
||||||
EyeBlink_L,
|
EYEBLINK_L,
|
||||||
EyeBlink_R,
|
EYEBLINK_R,
|
||||||
EyeSquint_L,
|
EYESQUINT_L,
|
||||||
EyeSquint_R,
|
EYESQUINT_R,
|
||||||
EyeDown_L,
|
EYEDOWN_L,
|
||||||
EyeDown_R,
|
EYEDOWN_R,
|
||||||
EyeIn_L,
|
EYEIN_L,
|
||||||
EyeIn_R,
|
EYEIN_R,
|
||||||
EyeOpen_L,
|
EYEOPEN_L,
|
||||||
EyeOpen_R,
|
EYEOPEN_R,
|
||||||
EyeOut_L,
|
EYEOUT_L,
|
||||||
EyeOut_R,
|
EYEOUT_R,
|
||||||
EyeUp_L,
|
EYEUP_L,
|
||||||
EyeUp_R,
|
EYEUP_R,
|
||||||
BrowsD_L,
|
BROWSD_L,
|
||||||
BrowsD_R,
|
BROWSD_R,
|
||||||
BrowsU_C,
|
BROWSU_C,
|
||||||
BrowsU_L,
|
BROWSU_L,
|
||||||
BrowsU_R,
|
BROWSU_R,
|
||||||
JawFwd,
|
JAWFWD,
|
||||||
JawLeft,
|
JAWLEFT,
|
||||||
JawOpen,
|
JAWOPEN,
|
||||||
JawRight,
|
JAWRIGHT,
|
||||||
MouthLeft,
|
MOUTHLEFT,
|
||||||
MouthRight,
|
MOUTHRIGHT,
|
||||||
MouthFrown_L,
|
MOUTHFROWN_L,
|
||||||
MouthFrown_R,
|
MOUTHFROWN_R,
|
||||||
MouthSmile_L,
|
MOUTHSMILE_L,
|
||||||
MouthSmile_R,
|
MOUTHSMILE_R,
|
||||||
MouthDimple_L,
|
MOUTHDIMPLE_L,
|
||||||
MouthDimple_R,
|
MOUTHDIMPLE_R,
|
||||||
LipsStretch_L,
|
LIPSSTRETCH_L,
|
||||||
LipsStretch_R,
|
LIPSSTRETCH_R,
|
||||||
LipsUpperClose,
|
LIPSUPPERCLOSE,
|
||||||
LipsLowerClose,
|
LIPSLOWERCLOSE,
|
||||||
LipsUpperOpen,
|
LIPSUPPEROPEN,
|
||||||
LipsLowerOpen,
|
LIPSLOWEROPEN,
|
||||||
LipsFunnel,
|
LIPSFUNNEL,
|
||||||
LipsPucker,
|
LIPSPUCKER,
|
||||||
Puff,
|
PUFF,
|
||||||
CheekSquint_L,
|
CHEEKSQUINT_L,
|
||||||
CheekSquint_R,
|
CHEEKSQUINT_R,
|
||||||
LipsTogether,
|
LIPSTOGETHER,
|
||||||
MouthUpperUp_L,
|
MOUTHUPPERUP_L,
|
||||||
MouthUpperUp_R,
|
MOUTHUPPERUP_R,
|
||||||
MouthLowerDown_L,
|
MOUTHLOWERDOWN_L,
|
||||||
MouthLowerDown_R,
|
MOUTHLOWERDOWN_R,
|
||||||
MouthPress_L,
|
MOUTHPRESS_L,
|
||||||
MouthPress_R,
|
MOUTHPRESS_R,
|
||||||
MouthShrugLower,
|
MOUTHSHRUGLOWER,
|
||||||
MouthShrugUpper,
|
MOUTHSHRUGUPPER,
|
||||||
NoseSneer_L,
|
NOSESNEER_L,
|
||||||
NoseSneer_R,
|
NOSESNEER_R,
|
||||||
TongueOut,
|
TONGUEOUT,
|
||||||
UserBlendshape0,
|
USERBLENDSHAPE0,
|
||||||
UserBlendshape1,
|
USERBLENDSHAPE1,
|
||||||
UserBlendshape2,
|
USERBLENDSHAPE2,
|
||||||
UserBlendshape3,
|
USERBLENDSHAPE3,
|
||||||
UserBlendshape4,
|
USERBLENDSHAPE4,
|
||||||
UserBlendshape5,
|
USERBLENDSHAPE5,
|
||||||
UserBlendshape6,
|
USERBLENDSHAPE6,
|
||||||
UserBlendshape7,
|
USERBLENDSHAPE7,
|
||||||
UserBlendshape8,
|
USERBLENDSHAPE8,
|
||||||
UserBlendshape9,
|
USERBLENDSHAPE9,
|
||||||
|
|
||||||
NUM_ACTIONS
|
NUM_ACTIONS
|
||||||
};
|
};
|
||||||
|
|
|
@ -92,8 +92,70 @@ namespace controller {
|
||||||
RIGHT_GRIP,
|
RIGHT_GRIP,
|
||||||
|
|
||||||
// AJT: blendshapes
|
// AJT: blendshapes
|
||||||
LEFT_EYE_BLINK,
|
EYEBLINK_L,
|
||||||
RIGHT_EYE_BLINK,
|
EYEBLINK_R,
|
||||||
|
EYESQUINT_L,
|
||||||
|
EYESQUINT_R,
|
||||||
|
EYEDOWN_L,
|
||||||
|
EYEDOWN_R,
|
||||||
|
EYEIN_L,
|
||||||
|
EYEIN_R,
|
||||||
|
EYEOPEN_L,
|
||||||
|
EYEOPEN_R,
|
||||||
|
EYEOUT_L,
|
||||||
|
EYEOUT_R,
|
||||||
|
EYEUP_L,
|
||||||
|
EYEUP_R,
|
||||||
|
BROWSD_L,
|
||||||
|
BROWSD_R,
|
||||||
|
BROWSU_C,
|
||||||
|
BROWSU_L,
|
||||||
|
BROWSU_R,
|
||||||
|
JAWFWD,
|
||||||
|
JAWLEFT,
|
||||||
|
JAWOPEN,
|
||||||
|
JAWRIGHT,
|
||||||
|
MOUTHLEFT,
|
||||||
|
MOUTHRIGHT,
|
||||||
|
MOUTHFROWN_L,
|
||||||
|
MOUTHFROWN_R,
|
||||||
|
MOUTHSMILE_L,
|
||||||
|
MOUTHSMILE_R,
|
||||||
|
MOUTHDIMPLE_L,
|
||||||
|
MOUTHDIMPLE_R,
|
||||||
|
LIPSSTRETCH_L,
|
||||||
|
LIPSSTRETCH_R,
|
||||||
|
LIPSUPPERCLOSE,
|
||||||
|
LIPSLOWERCLOSE,
|
||||||
|
LIPSUPPEROPEN,
|
||||||
|
LIPSLOWEROPEN,
|
||||||
|
LIPSFUNNEL,
|
||||||
|
LIPSPUCKER,
|
||||||
|
PUFF,
|
||||||
|
CHEEKSQUINT_L,
|
||||||
|
CHEEKSQUINT_R,
|
||||||
|
LIPSTOGETHER,
|
||||||
|
MOUTHUPPERUP_L,
|
||||||
|
MOUTHUPPERUP_R,
|
||||||
|
MOUTHLOWERDOWN_L,
|
||||||
|
MOUTHLOWERDOWN_R,
|
||||||
|
MOUTHPRESS_L,
|
||||||
|
MOUTHPRESS_R,
|
||||||
|
MOUTHSHRUGLOWER,
|
||||||
|
MOUTHSHRUGUPPER,
|
||||||
|
NOSESNEER_L,
|
||||||
|
NOSESNEER_R,
|
||||||
|
TONGUEOUT,
|
||||||
|
USERBLENDSHAPE0,
|
||||||
|
USERBLENDSHAPE1,
|
||||||
|
USERBLENDSHAPE2,
|
||||||
|
USERBLENDSHAPE3,
|
||||||
|
USERBLENDSHAPE4,
|
||||||
|
USERBLENDSHAPE5,
|
||||||
|
USERBLENDSHAPE6,
|
||||||
|
USERBLENDSHAPE7,
|
||||||
|
USERBLENDSHAPE8,
|
||||||
|
USERBLENDSHAPE9,
|
||||||
|
|
||||||
NUM_STANDARD_AXES,
|
NUM_STANDARD_AXES,
|
||||||
LZ = LT,
|
LZ = LT,
|
||||||
|
|
|
@ -38,10 +38,10 @@ PacketVersion versionForPacketType(PacketType packetType) {
|
||||||
return static_cast<PacketVersion>(EntityQueryPacketVersion::ConicalFrustums);
|
return static_cast<PacketVersion>(EntityQueryPacketVersion::ConicalFrustums);
|
||||||
case PacketType::AvatarIdentity:
|
case PacketType::AvatarIdentity:
|
||||||
case PacketType::AvatarData:
|
case PacketType::AvatarData:
|
||||||
return static_cast<PacketVersion>(AvatarMixerPacketVersion::SendVerificationFailed);
|
return static_cast<PacketVersion>(AvatarMixerPacketVersion::ARKitBlendshapes);
|
||||||
case PacketType::BulkAvatarData:
|
case PacketType::BulkAvatarData:
|
||||||
case PacketType::KillAvatar:
|
case PacketType::KillAvatar:
|
||||||
return static_cast<PacketVersion>(AvatarMixerPacketVersion::SendVerificationFailed);
|
return static_cast<PacketVersion>(AvatarMixerPacketVersion::ARKitBlendshapes);
|
||||||
case PacketType::MessagesData:
|
case PacketType::MessagesData:
|
||||||
return static_cast<PacketVersion>(MessageDataVersion::TextOrBinaryData);
|
return static_cast<PacketVersion>(MessageDataVersion::TextOrBinaryData);
|
||||||
// ICE packets
|
// ICE packets
|
||||||
|
|
|
@ -339,7 +339,8 @@ enum class AvatarMixerPacketVersion : PacketVersion {
|
||||||
SendMaxTranslationDimension,
|
SendMaxTranslationDimension,
|
||||||
FBXJointOrderChange,
|
FBXJointOrderChange,
|
||||||
HandControllerSection,
|
HandControllerSection,
|
||||||
SendVerificationFailed
|
SendVerificationFailed,
|
||||||
|
ARKitBlendshapes
|
||||||
};
|
};
|
||||||
|
|
||||||
enum class DomainConnectRequestVersion : PacketVersion {
|
enum class DomainConnectRequestVersion : PacketVersion {
|
||||||
|
|
|
@ -91,7 +91,7 @@ enum class LegacyBlendshpaes : int {
|
||||||
ChinUpperRaise, // not in ARKit
|
ChinUpperRaise, // not in ARKit
|
||||||
Sneer, // split in ARKit
|
Sneer, // split in ARKit
|
||||||
LegacyBlendshapeCount
|
LegacyBlendshapeCount
|
||||||
}
|
};
|
||||||
|
|
||||||
// NEW in ARKit
|
// NEW in ARKit
|
||||||
// * LipsTogether
|
// * LipsTogether
|
||||||
|
|
Loading…
Reference in a new issue