Working on brow/mouth mapping.

This commit is contained in:
Andrzej Kapolka 2013-09-04 11:40:53 -07:00
parent f84d6f4f37
commit 679926ce42
4 changed files with 62 additions and 34 deletions

View file

@ -2276,7 +2276,7 @@ void Application::displaySide(Camera& whichCamera) {
}
// Render my own Avatar
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
if (_myCamera.getMode() == CAMERA_MODE_MIRROR && !_faceshift.isActive()) {
_myAvatar.getHead().setLookAtPosition(_myCamera.getPosition());
}
_myAvatar.render(Menu::getInstance()->isOptionChecked(MenuOption::Mirror),

View file

@ -163,41 +163,45 @@ void Head::simulate(float deltaTime, bool isMine, float gyroCameraSensitivity) {
_saccade += (_saccadeTarget - _saccade) * 0.50f;
// Update audio trailing average for rendering facial animations
const float AUDIO_AVERAGING_SECS = 0.05;
_averageLoudness = (1.f - deltaTime / AUDIO_AVERAGING_SECS) * _averageLoudness +
(deltaTime / AUDIO_AVERAGING_SECS) * _audioLoudness;
// Detect transition from talking to not; force blink after that and a delay
bool forceBlink = false;
const float TALKING_LOUDNESS = 100.0f;
const float BLINK_AFTER_TALKING = 0.25f;
if (_averageLoudness > TALKING_LOUDNESS) {
_timeWithoutTalking = 0.0f;
} else if (_timeWithoutTalking < BLINK_AFTER_TALKING && (_timeWithoutTalking += deltaTime) >= BLINK_AFTER_TALKING) {
forceBlink = true;
}
// Update audio attack data for facial animation (eyebrows and mouth)
_audioAttack = 0.9 * _audioAttack + 0.1 * fabs(_audioLoudness - _lastLoudness);
_lastLoudness = _audioLoudness;
const float BROW_LIFT_THRESHOLD = 100;
if (_audioAttack > BROW_LIFT_THRESHOLD)
_browAudioLift += sqrt(_audioAttack) * 0.00005;
float clamp = 0.01;
if (_browAudioLift > clamp) { _browAudioLift = clamp; }
_browAudioLift *= 0.7f;
// update eyelid blinking
Faceshift* faceshift = Application::getInstance()->getFaceshift();
if (isMine && faceshift->isActive()) {
_leftEyeBlink = faceshift->getLeftBlink();
_rightEyeBlink = faceshift->getRightBlink();
// set these values based on how they'll be used. if we use faceshift in the long term, we'll want a complete
// mapping between their blendshape coefficients and our avatar features
_averageLoudness = faceshift->getMouthSize();
_browAudioLift = faceshift->getBrowHeight();
} else {
const float AUDIO_AVERAGING_SECS = 0.05;
_averageLoudness = (1.f - deltaTime / AUDIO_AVERAGING_SECS) * _averageLoudness +
(deltaTime / AUDIO_AVERAGING_SECS) * _audioLoudness;
// Detect transition from talking to not; force blink after that and a delay
bool forceBlink = false;
const float TALKING_LOUDNESS = 100.0f;
const float BLINK_AFTER_TALKING = 0.25f;
if (_averageLoudness > TALKING_LOUDNESS) {
_timeWithoutTalking = 0.0f;
} else if (_timeWithoutTalking < BLINK_AFTER_TALKING && (_timeWithoutTalking += deltaTime) >= BLINK_AFTER_TALKING) {
forceBlink = true;
}
// Update audio attack data for facial animation (eyebrows and mouth)
_audioAttack = 0.9 * _audioAttack + 0.1 * fabs(_audioLoudness - _lastLoudness);
_lastLoudness = _audioLoudness;
const float BROW_LIFT_THRESHOLD = 100;
if (_audioAttack > BROW_LIFT_THRESHOLD)
_browAudioLift += sqrt(_audioAttack) * 0.00005;
float clamp = 0.01;
if (_browAudioLift > clamp) { _browAudioLift = clamp; }
_browAudioLift *= 0.7f;
const float BLINK_SPEED = 10.0f;
const float FULLY_OPEN = 0.0f;
const float FULLY_CLOSED = 1.0f;

View file

@ -13,9 +13,9 @@
using namespace fs;
using namespace std;
Faceshift::Faceshift() : _enabled(false), _eyeGazeLeftPitch(0.0f), _eyeGazeLeftYaw(0.0f),
_eyeGazeRightPitch(0.0f), _eyeGazeRightYaw(0.0f), _leftBlink(0.0f), _rightBlink(0.0f),
_leftBlinkIndex(-1), _rightBlinkIndex(-1) {
Faceshift::Faceshift() : _enabled(false), _eyeGazeLeftPitch(0.0f), _eyeGazeLeftYaw(0.0f), _eyeGazeRightPitch(0.0f),
_eyeGazeRightYaw(0.0f), _leftBlink(0.0f), _rightBlink(0.0f), _leftBlinkIndex(-1), _rightBlinkIndex(-1),
_browHeight(0.0f), _browUpCenterIndex(-1), _mouthSize(0.0f), _jawOpenIndex(-1) {
connect(&_socket, SIGNAL(connected()), SLOT(noteConnected()));
connect(&_socket, SIGNAL(error(QAbstractSocket::SocketError)), SLOT(noteError(QAbstractSocket::SocketError)));
connect(&_socket, SIGNAL(readyRead()), SLOT(readFromSocket()));
@ -82,6 +82,12 @@ void Faceshift::readFromSocket() {
if (_rightBlinkIndex != -1) {
_rightBlink = data.m_coeffs[_rightBlinkIndex];
}
if (_browUpCenterIndex != -1) {
_browHeight = data.m_coeffs[_browUpCenterIndex];
}
if (_jawOpenIndex != -1) {
_mouthSize = data.m_coeffs[_jawOpenIndex];
}
}
break;
}
@ -93,6 +99,12 @@ void Faceshift::readFromSocket() {
} else if (names[i] == "EyeBlink_R") {
_rightBlinkIndex = i;
} else if (names[i] == "BrowsU_C") {
_browUpCenterIndex = i;
} else if (names[i] == "JawOpen") {
_jawOpenIndex = i;
}
}
break;

View file

@ -38,6 +38,10 @@ public:
float getLeftBlink() const { return _leftBlink; }
float getRightBlink() const { return _rightBlink; }
float getBrowHeight() const { return _browHeight; }
float getMouthSize() const { return _mouthSize; }
public slots:
void setEnabled(bool enabled);
@ -71,6 +75,14 @@ private:
int _leftBlinkIndex;
int _rightBlinkIndex;
float _browHeight;
int _browUpCenterIndex;
float _mouthSize;
int _jawOpenIndex;
};
#endif /* defined(__interface__Faceshift__) */