Faceshift drives body rotation, remove gyro-driven rotation

This commit is contained in:
Freddy 2013-09-26 23:52:44 -07:00
parent 462d0b90c1
commit 6b0f9314ba
11 changed files with 38 additions and 62 deletions

View file

@ -1750,6 +1750,11 @@ void Application::update(float deltaTime) {
// Update faceshift
_faceshift.update();
// Copy angular velocity if measured by faceshift, to the head
if (_faceshift.isActive()) {
_myAvatar.getHead().setAngularVelocity(_faceshift.getHeadAngularVelocity());
}
// if we have faceshift, use that to compute the lookat direction
glm::vec3 lookAtRayOrigin = mouseRayOrigin, lookAtRayDirection = mouseRayDirection;
@ -1932,7 +1937,7 @@ void Application::update(float deltaTime) {
if (!avatar->isInitialized()) {
avatar->init();
}
avatar->simulate(deltaTime, NULL, 0.f);
avatar->simulate(deltaTime, NULL);
avatar->setMouseRay(mouseRayOrigin, mouseRayDirection);
}
node->unlock();
@ -1947,9 +1952,9 @@ void Application::update(float deltaTime) {
}
if (Menu::getInstance()->isOptionChecked(MenuOption::TransmitterDrive) && _myTransmitter.isConnected()) {
_myAvatar.simulate(deltaTime, &_myTransmitter, Menu::getInstance()->getGyroCameraSensitivity());
_myAvatar.simulate(deltaTime, &_myTransmitter);
} else {
_myAvatar.simulate(deltaTime, NULL, Menu::getInstance()->getGyroCameraSensitivity());
_myAvatar.simulate(deltaTime, NULL);
}
// no transmitter drive implies transmitter pick

View file

@ -485,7 +485,6 @@ void Menu::loadSettings(QSettings* settings) {
settings = Application::getInstance()->getSettings();
}
_gyroCameraSensitivity = loadSetting(settings, "gyroCameraSensitivity", 0.5f);
_audioJitterBufferSamples = loadSetting(settings, "audioJitterBufferSamples", 0);
_fieldOfView = loadSetting(settings, "fieldOfView", DEFAULT_FIELD_OF_VIEW_DEGREES);
_maxVoxels = loadSetting(settings, "maxVoxels", DEFAULT_MAX_VOXELS_PER_SYSTEM);
@ -509,7 +508,6 @@ void Menu::saveSettings(QSettings* settings) {
settings = Application::getInstance()->getSettings();
}
settings->setValue("gyroCameraSensitivity", _gyroCameraSensitivity);
settings->setValue("audioJitterBufferSamples", _audioJitterBufferSamples);
settings->setValue("fieldOfView", _fieldOfView);
settings->setValue("maxVoxels", _maxVoxels);
@ -778,10 +776,6 @@ void Menu::editPreferences() {
fieldOfView->setValue(_fieldOfView);
form->addRow("Vertical Field of View (Degrees):", fieldOfView);
QDoubleSpinBox* gyroCameraSensitivity = new QDoubleSpinBox();
gyroCameraSensitivity->setValue(_gyroCameraSensitivity);
form->addRow("Gyro Camera Sensitivity (0 - 1):", gyroCameraSensitivity);
QDoubleSpinBox* leanScale = new QDoubleSpinBox();
leanScale->setValue(applicationInstance->getAvatar()->getLeanScale());
form->addRow("Lean Scale:", leanScale);
@ -823,7 +817,6 @@ void Menu::editPreferences() {
applicationInstance->getAvatar()->getHead().setPupilDilation(pupilDilation->value() / (float)pupilDilation->maximum());
_gyroCameraSensitivity = gyroCameraSensitivity->value();
_maxVoxels = maxVoxels->value();
applicationInstance->getVoxels()->setMaxVoxels(_maxVoxels);

View file

@ -49,7 +49,6 @@ public:
float getAudioJitterBufferSamples() const { return _audioJitterBufferSamples; }
float getFieldOfView() const { return _fieldOfView; }
float getGyroCameraSensitivity() const { return _gyroCameraSensitivity; }
BandwidthDialog* getBandwidthDialog() const { return _bandwidthDialog; }
FrustumDrawMode getFrustumDrawMode() const { return _frustumDrawMode; }
ViewFrustumOffset getViewFrustumOffset() const { return _viewFrustumOffset; }
@ -114,7 +113,6 @@ private:
BandwidthDialog* _bandwidthDialog;
float _fieldOfView; /// in Degrees, doesn't apply to HMD like Oculus
FrustumDrawMode _frustumDrawMode;
float _gyroCameraSensitivity;
ViewFrustumOffset _viewFrustumOffset;
QActionGroup* _voxelModeActionsGroup;
VoxelStatsDialog* _voxelStatsDialog;

View file

@ -289,7 +289,7 @@ void Avatar::follow(Avatar* leadingAvatar) {
}
}
void Avatar::simulate(float deltaTime, Transmitter* transmitter, float gyroCameraSensitivity) {
void Avatar::simulate(float deltaTime, Transmitter* transmitter) {
glm::quat orientation = getOrientation();
glm::vec3 front = orientation * IDENTITY_FRONT;
@ -403,7 +403,7 @@ void Avatar::simulate(float deltaTime, Transmitter* transmitter, float gyroCamer
_head.setBodyRotation(glm::vec3(_bodyPitch, _bodyYaw, _bodyRoll));
_head.setPosition(_bodyBall[ BODY_BALL_HEAD_BASE ].position);
_head.setSkinColor(glm::vec3(SKIN_COLOR[0], SKIN_COLOR[1], SKIN_COLOR[2]));
_head.simulate(deltaTime, false, gyroCameraSensitivity);
_head.simulate(deltaTime, false);
_hand.simulate(deltaTime, false);
// use speed and angular velocity to determine walking vs. standing

View file

@ -135,7 +135,7 @@ public:
~Avatar();
void init();
void simulate(float deltaTime, Transmitter* transmitter, float gyroCameraSensitivity);
void simulate(float deltaTime, Transmitter* transmitter);
void follow(Avatar* leadingAvatar);
void render(bool lookingInMirror, bool renderAvatarBalls);

View file

@ -71,6 +71,7 @@ Head::Head(Avatar* owningAvatar) :
_audioAttack(0.0f),
_returnSpringScale(1.0f),
_bodyRotation(0.0f, 0.0f, 0.0f),
_angularVelocity(0,0,0),
_renderLookatVectors(false),
_mohawkInitialized(false),
_saccade(0.0f, 0.0f, 0.0f),
@ -132,7 +133,7 @@ void Head::resetHairPhysics() {
}
void Head::simulate(float deltaTime, bool isMine, float gyroCameraSensitivity) {
void Head::simulate(float deltaTime, bool isMine) {
// Update audio trailing average for rendering facial animations
Faceshift* faceshift = Application::getInstance()->getFaceshift();
@ -230,44 +231,6 @@ void Head::simulate(float deltaTime, bool isMine, float gyroCameraSensitivity) {
// based on the nature of the lookat position, determine if the eyes can look / are looking at it.
if (USING_PHYSICAL_MOHAWK) {
updateHairPhysics(deltaTime);
}
// Update camera pitch and yaw independently from motion of head (for gyro-based interface)
if (isMine && _cameraFollowsHead && (gyroCameraSensitivity > 0.f)) {
// If we are using gyros and using gyroLook, have the camera follow head but with a null region
// to create stable rendering view with small head movements.
const float CAMERA_FOLLOW_HEAD_RATE_START = 0.1f;
const float CAMERA_FOLLOW_HEAD_RATE_MAX = 1.0f;
const float CAMERA_FOLLOW_HEAD_RATE_RAMP_RATE = 1.05f;
const float CAMERA_STOP_TOLERANCE_DEGREES = 0.5f;
const float PITCH_START_RANGE = 20.f;
const float YAW_START_RANGE = 10.f;
float pitchStartTolerance = PITCH_START_RANGE
* (1.f - gyroCameraSensitivity)
+ (2.f * CAMERA_STOP_TOLERANCE_DEGREES);
float yawStartTolerance = YAW_START_RANGE
* (1.f - gyroCameraSensitivity)
+ (2.f * CAMERA_STOP_TOLERANCE_DEGREES);
float cameraHeadAngleDifference = glm::length(glm::vec2(_pitch - _cameraPitch, _yaw - _cameraYaw));
if (_isCameraMoving) {
_cameraFollowHeadRate = glm::clamp(_cameraFollowHeadRate * CAMERA_FOLLOW_HEAD_RATE_RAMP_RATE,
0.f,
CAMERA_FOLLOW_HEAD_RATE_MAX);
_cameraPitch += (_pitch - _cameraPitch) * _cameraFollowHeadRate;
_cameraYaw += (_yaw - _cameraYaw) * _cameraFollowHeadRate;
if (cameraHeadAngleDifference < CAMERA_STOP_TOLERANCE_DEGREES) {
_isCameraMoving = false;
}
} else {
if ((fabs(_pitch - _cameraPitch) > pitchStartTolerance) ||
(fabs(_yaw - _cameraYaw) > yawStartTolerance)) {
_isCameraMoving = true;
_cameraFollowHeadRate = CAMERA_FOLLOW_HEAD_RATE_START;
}
}
}
}

View file

@ -44,7 +44,7 @@ public:
void init();
void reset();
void simulate(float deltaTime, bool isMine, float gyroCameraSensitivity);
void simulate(float deltaTime, bool isMine);
void render(float alpha, bool isMine);
void renderMohawk();
@ -64,6 +64,8 @@ public:
glm::quat getOrientation() const;
glm::quat getCameraOrientation () const;
const glm::vec3& getAngularVelocity() const { return _angularVelocity; }
void setAngularVelocity(glm::vec3 angularVelocity) { _angularVelocity = angularVelocity; }
float getScale() const { return _scale; }
glm::vec3 getPosition() const { return _position; }
@ -116,6 +118,7 @@ private:
float _audioAttack;
float _returnSpringScale; //strength of return springs
glm::vec3 _bodyRotation;
glm::vec3 _angularVelocity;
bool _renderLookatVectors;
BendyLine _hairTuft[NUM_HAIR_TUFTS];
bool _mohawkInitialized;

View file

@ -65,7 +65,7 @@ void MyAvatar::reset() {
_hand.reset();
}
void MyAvatar::simulate(float deltaTime, Transmitter* transmitter, float gyroCameraSensitivity) {
void MyAvatar::simulate(float deltaTime, Transmitter* transmitter) {
glm::quat orientation = getOrientation();
glm::vec3 front = orientation * IDENTITY_FRONT;
@ -303,7 +303,7 @@ void MyAvatar::simulate(float deltaTime, Transmitter* transmitter, float gyroCam
_head.setPosition(_bodyBall[ BODY_BALL_HEAD_BASE ].position);
_head.setScale(_scale);
_head.setSkinColor(glm::vec3(SKIN_COLOR[0], SKIN_COLOR[1], SKIN_COLOR[2]));
_head.simulate(deltaTime, true, gyroCameraSensitivity);
_head.simulate(deltaTime, true);
_hand.simulate(deltaTime, true);
const float WALKING_SPEED_THRESHOLD = 0.2f;
@ -337,7 +337,13 @@ void MyAvatar::updateFromGyrosAndOrWebcam(bool gyroLook,
if (faceshift->isActive()) {
estimatedPosition = faceshift->getHeadTranslation();
estimatedRotation = safeEulerAngles(faceshift->getHeadRotation());
// Rotate the body if the head is turned quickly
glm::vec3 headAngularVelocity = faceshift->getHeadAngularVelocity();
const float FACESHIFT_YAW_VIEW_SENSITIVITY = 20.f;
const float FACESHIFT_MIN_YAW_VELOCITY = 1.0f;
if (fabs(headAngularVelocity.y) > FACESHIFT_MIN_YAW_VELOCITY) {
_bodyYawDelta += headAngularVelocity.y * FACESHIFT_YAW_VIEW_SENSITIVITY;
}
} else if (gyros->isActive()) {
estimatedRotation = gyros->getEstimatedRotation();

View file

@ -16,7 +16,7 @@ public:
MyAvatar(Node* owningNode = NULL);
void reset();
void simulate(float deltaTime, Transmitter* transmitter, float gyroCameraSensitivity);
void simulate(float deltaTime, Transmitter* transmitter);
void updateFromGyrosAndOrWebcam(bool gyroLook, float pitchFromTouch);
void render(bool lookingInMirror, bool renderAvatarBalls);
void renderScreenTint(ScreenTintLayer layer, Camera& whichCamera);

View file

@ -144,12 +144,18 @@ void Faceshift::receive(const QByteArray& buffer) {
if ((_tracking = data.m_trackingSuccessful)) {
glm::quat newRotation = glm::quat(data.m_headRotation.w, -data.m_headRotation.x,
data.m_headRotation.y, -data.m_headRotation.z);
// Compute angular velocity of the head
glm::quat r = newRotation * glm::inverse(_headRotation);
//printf("angle = %.3f\n", 2 * acos(r.w));
float theta = 2 * acos(r.w);
if (theta > EPSILON) {
float rMag = glm::length(glm::vec3(r.x, r.y, r.z));
float AVERAGE_FACESHIFT_FRAME_TIME = 0.033f;
_headAngularVelocity = theta / AVERAGE_FACESHIFT_FRAME_TIME * glm::vec3(r.x, r.y, r.z) / rMag;
} else {
_headAngularVelocity = glm::vec3(0,0,0);
}
_headRotation = newRotation;
//_headRotation = glm::quat(data.m_headRotation.w, -data.m_headRotation.x,
// data.m_headRotation.y, -data.m_headRotation.z);
const float TRANSLATION_SCALE = 0.02f;
_headTranslation = glm::vec3(data.m_headTranslation.x, data.m_headTranslation.y,
-data.m_headTranslation.z) * TRANSLATION_SCALE;

View file

@ -30,6 +30,7 @@ public:
bool isActive() const;
const glm::quat& getHeadRotation() const { return _headRotation; }
const glm::vec3& getHeadAngularVelocity() const { return _headAngularVelocity; }
const glm::vec3& getHeadTranslation() const { return _headTranslation; }
float getEyeGazeLeftPitch() const { return _eyeGazeLeftPitch; }
@ -88,6 +89,7 @@ private:
uint64_t _lastTrackingStateReceived;
glm::quat _headRotation;
glm::vec3 _headAngularVelocity;
glm::vec3 _headTranslation;
float _eyeGazeLeftPitch;