mirror of
https://github.com/overte-org/overte.git
synced 2025-04-25 12:53:03 +02:00
Merge branch 'master' of github.com:worklist/hifi
This commit is contained in:
commit
23e1fab9be
5 changed files with 63 additions and 29 deletions
|
@ -362,9 +362,7 @@ void Application::paintGL() {
|
|||
|
||||
glEnable(GL_LINE_SMOOTH);
|
||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||
|
||||
float headCameraScale = (_serialHeadSensor.isActive() || _webcam.isActive()) ? _headCameraPitchYawScale : 1.0f;
|
||||
|
||||
|
||||
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
|
||||
_myCamera.setTightness (100.0f);
|
||||
_myCamera.setTargetPosition(_myAvatar.getUprightHeadPosition());
|
||||
|
@ -380,11 +378,11 @@ void Application::paintGL() {
|
|||
} else if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) {
|
||||
_myCamera.setTightness(0.0f); // In first person, camera follows head exactly without delay
|
||||
_myCamera.setTargetPosition(_myAvatar.getUprightHeadPosition());
|
||||
_myCamera.setTargetRotation(_myAvatar.getHead().getCameraOrientation(headCameraScale));
|
||||
_myCamera.setTargetRotation(_myAvatar.getHead().getCameraOrientation());
|
||||
|
||||
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
||||
_myCamera.setTargetPosition(_myAvatar.getUprightHeadPosition());
|
||||
_myCamera.setTargetRotation(_myAvatar.getHead().getCameraOrientation(headCameraScale));
|
||||
_myCamera.setTargetRotation(_myAvatar.getHead().getCameraOrientation());
|
||||
}
|
||||
|
||||
// Update camera position
|
||||
|
@ -1458,8 +1456,8 @@ void Application::initMenu() {
|
|||
(_echoAudioMode = optionsMenu->addAction("Echo Audio"))->setCheckable(true);
|
||||
|
||||
optionsMenu->addAction("Noise", this, SLOT(setNoise(bool)), Qt::Key_N)->setCheckable(true);
|
||||
(_gyroLook = optionsMenu->addAction("Gyro Look"))->setCheckable(true);
|
||||
_gyroLook->setChecked(false);
|
||||
(_gyroLook = optionsMenu->addAction("Smooth Gyro Look"))->setCheckable(true);
|
||||
_gyroLook->setChecked(true);
|
||||
(_mouseLook = optionsMenu->addAction("Mouse Look"))->setCheckable(true);
|
||||
_mouseLook->setChecked(true);
|
||||
(_touchLook = optionsMenu->addAction("Touch Look"))->setCheckable(true);
|
||||
|
@ -1913,18 +1911,13 @@ void Application::update(float deltaTime) {
|
|||
void Application::updateAvatar(float deltaTime) {
|
||||
|
||||
// Update my avatar's head position from gyros and/or webcam
|
||||
_myAvatar.updateHeadFromGyrosAndOrWebcam();
|
||||
_myAvatar.updateHeadFromGyrosAndOrWebcam(_gyroLook->isChecked(),
|
||||
glm::vec3(_headCameraPitchYawScale,
|
||||
_headCameraPitchYawScale,
|
||||
_headCameraPitchYawScale));
|
||||
|
||||
if (_serialHeadSensor.isActive()) {
|
||||
|
||||
// Update avatar head translation
|
||||
if (_gyroLook->isChecked()) {
|
||||
glm::vec3 headPosition = _serialHeadSensor.getEstimatedPosition();
|
||||
const float HEAD_OFFSET_SCALING = 3.f;
|
||||
headPosition *= HEAD_OFFSET_SCALING;
|
||||
_myCamera.setEyeOffsetPosition(headPosition);
|
||||
}
|
||||
|
||||
// Grab latest readings from the gyros
|
||||
float measuredPitchRate = _serialHeadSensor.getLastPitchRate();
|
||||
float measuredYawRate = _serialHeadSensor.getLastYawRate();
|
||||
|
|
|
@ -285,10 +285,7 @@ void Avatar::reset() {
|
|||
}
|
||||
|
||||
// Update avatar head rotation with sensor data
|
||||
void Avatar::updateHeadFromGyrosAndOrWebcam() {
|
||||
const float AMPLIFY_PITCH = 2.f;
|
||||
const float AMPLIFY_YAW = 2.f;
|
||||
const float AMPLIFY_ROLL = 2.f;
|
||||
void Avatar::updateHeadFromGyrosAndOrWebcam(bool gyroLook, const glm::vec3& amplifyAngle) {
|
||||
|
||||
SerialInterface* gyros = Application::getInstance()->getSerialHeadSensor();
|
||||
Webcam* webcam = Application::getInstance()->getWebcam();
|
||||
|
@ -306,9 +303,10 @@ void Avatar::updateHeadFromGyrosAndOrWebcam() {
|
|||
} else {
|
||||
return;
|
||||
}
|
||||
_head.setPitch(estimatedRotation.x * AMPLIFY_PITCH);
|
||||
_head.setYaw(estimatedRotation.y * AMPLIFY_YAW);
|
||||
_head.setRoll(estimatedRotation.z * AMPLIFY_ROLL);
|
||||
_head.setPitch(estimatedRotation.x * amplifyAngle.x);
|
||||
_head.setYaw(estimatedRotation.y * amplifyAngle.y);
|
||||
_head.setRoll(estimatedRotation.z * amplifyAngle.z);
|
||||
_head.setCameraFollowsHead(gyroLook);
|
||||
|
||||
// Update torso lean distance based on accelerometer data
|
||||
const float TORSO_LENGTH = 0.5f;
|
||||
|
|
|
@ -87,7 +87,7 @@ public:
|
|||
void reset();
|
||||
void simulate(float deltaTime, Transmitter* transmitter);
|
||||
void updateThrust(float deltaTime, Transmitter * transmitter);
|
||||
void updateHeadFromGyrosAndOrWebcam();
|
||||
void updateHeadFromGyrosAndOrWebcam(bool gyroLook, const glm::vec3& amplifyAngles);
|
||||
void updateFromMouse(int mouseX, int mouseY, int screenWidth, int screenHeight);
|
||||
void updateFromTouch(float touchAvgDistX, float touchAvgDistY);
|
||||
void addBodyYaw(float y) {_bodyYaw += y;};
|
||||
|
|
|
@ -77,7 +77,12 @@ Head::Head(Avatar* owningAvatar) :
|
|||
_rightEyeBlink(0.0f),
|
||||
_leftEyeBlinkVelocity(0.0f),
|
||||
_rightEyeBlinkVelocity(0.0f),
|
||||
_timeWithoutTalking(0.0f)
|
||||
_timeWithoutTalking(0.0f),
|
||||
_cameraPitch(_pitch),
|
||||
_cameraYaw(_yaw),
|
||||
_isCameraMoving(false),
|
||||
_cameraFollowsHead(false),
|
||||
_cameraFollowHeadRate(0.0f)
|
||||
{
|
||||
if (USING_PHYSICAL_MOHAWK) {
|
||||
resetHairPhysics();
|
||||
|
@ -213,6 +218,37 @@ void Head::simulate(float deltaTime, bool isMine) {
|
|||
updateHairPhysics(deltaTime);
|
||||
}
|
||||
|
||||
// Update camera pitch and yaw independently from motion of head (for gyro-based interface)
|
||||
if (isMine && _cameraFollowsHead) {
|
||||
// If we are using gyros and using gyroLook, have the camera follow head but with a null region
|
||||
// to create stable rendering view with small head movements.
|
||||
const float CAMERA_FOLLOW_HEAD_RATE_START = 0.05f;
|
||||
const float CAMERA_FOLLOW_HEAD_RATE_MAX = 0.25f;
|
||||
const float CAMERA_FOLLOW_HEAD_RATE_RAMP_RATE = 1.5f;
|
||||
const float CAMERA_STOP_TOLERANCE_DEGREES = 0.25f;
|
||||
const float CAMERA_START_TOLERANCE_DEGREES = 15.0f;
|
||||
float cameraHeadAngleDifference = glm::length(glm::vec2(_pitch - _cameraPitch, _yaw - _cameraYaw));
|
||||
if (_isCameraMoving) {
|
||||
_cameraFollowHeadRate = glm::clamp(_cameraFollowHeadRate * CAMERA_FOLLOW_HEAD_RATE_RAMP_RATE,
|
||||
0.f,
|
||||
CAMERA_FOLLOW_HEAD_RATE_MAX);
|
||||
|
||||
_cameraPitch += (_pitch - _cameraPitch) * _cameraFollowHeadRate;
|
||||
_cameraYaw += (_yaw - _cameraYaw) * _cameraFollowHeadRate;
|
||||
if (cameraHeadAngleDifference < CAMERA_STOP_TOLERANCE_DEGREES) {
|
||||
_isCameraMoving = false;
|
||||
}
|
||||
} else {
|
||||
if (cameraHeadAngleDifference > CAMERA_START_TOLERANCE_DEGREES) {
|
||||
_isCameraMoving = true;
|
||||
_cameraFollowHeadRate = CAMERA_FOLLOW_HEAD_RATE_START;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// Camera always locked to head
|
||||
_cameraPitch = _pitch;
|
||||
_cameraYaw = _yaw;
|
||||
}
|
||||
}
|
||||
|
||||
void Head::calculateGeometry() {
|
||||
|
@ -358,10 +394,10 @@ glm::quat Head::getOrientation() const {
|
|||
glm::vec3(_pitch, -_yaw, -_roll) : glm::vec3(_pitch, _yaw, _roll)));
|
||||
}
|
||||
|
||||
glm::quat Head::getCameraOrientation (float pitchYawScale) const {
|
||||
glm::quat Head::getCameraOrientation () const {
|
||||
Avatar* owningAvatar = static_cast<Avatar*>(_owningAvatar);
|
||||
return owningAvatar->getWorldAlignedOrientation() * glm::quat(glm::radians(glm::vec3(
|
||||
_pitch * pitchYawScale, _yaw * pitchYawScale, 0.0f)));
|
||||
return owningAvatar->getWorldAlignedOrientation()
|
||||
* glm::quat(glm::radians(glm::vec3(_cameraPitch, _cameraYaw, 0.0f)));
|
||||
}
|
||||
|
||||
void Head::renderHeadSphere() {
|
||||
|
|
|
@ -48,8 +48,10 @@ public:
|
|||
void setReturnToCenter (bool returnHeadToCenter) { _returnHeadToCenter = returnHeadToCenter; }
|
||||
void setRenderLookatVectors(bool onOff ) { _renderLookatVectors = onOff; }
|
||||
|
||||
void setCameraFollowsHead(bool cameraFollowsHead) { _cameraFollowsHead = cameraFollowsHead; }
|
||||
|
||||
glm::quat getOrientation() const;
|
||||
glm::quat getCameraOrientation (float pitchYawScale) const;
|
||||
glm::quat getCameraOrientation () const;
|
||||
|
||||
glm::vec3 getPosition() const { return _position; }
|
||||
glm::vec3 getRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
|
||||
|
@ -112,6 +114,11 @@ private:
|
|||
float _leftEyeBlinkVelocity;
|
||||
float _rightEyeBlinkVelocity;
|
||||
float _timeWithoutTalking;
|
||||
float _cameraPitch; // Used to position the camera differently from the head
|
||||
float _cameraYaw;
|
||||
bool _isCameraMoving;
|
||||
bool _cameraFollowsHead;
|
||||
float _cameraFollowHeadRate;
|
||||
|
||||
static ProgramObject* _irisProgram;
|
||||
static GLuint _irisTextureID;
|
||||
|
|
Loading…
Reference in a new issue