mirror of
https://github.com/overte-org/overte.git
synced 2025-04-20 11:45:36 +02:00
Merge pull request #5252 from ctrlaltdavid/20601
DO NOT MERGE - QA for #20601 - Fix and improve eye gaze
This commit is contained in:
commit
574758eae9
7 changed files with 82 additions and 57 deletions
|
@ -2304,38 +2304,42 @@ void Application::updateMyAvatarLookAtPosition() {
|
|||
bool isLookingAtSomeone = false;
|
||||
glm::vec3 lookAtSpot;
|
||||
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
|
||||
// When I am in mirror mode, just look right at the camera (myself)
|
||||
// When I am in mirror mode, just look right at the camera (myself); don't switch gaze points because when physically
|
||||
// looking in a mirror one's eyes appear steady.
|
||||
if (!OculusManager::isConnected()) {
|
||||
lookAtSpot = _myCamera.getPosition();
|
||||
} else {
|
||||
if (_myAvatar->isLookingAtLeftEye()) {
|
||||
lookAtSpot = OculusManager::getLeftEyePosition();
|
||||
} else {
|
||||
lookAtSpot = OculusManager::getRightEyePosition();
|
||||
}
|
||||
lookAtSpot = _myCamera.getPosition() + OculusManager::getMidEyePosition();
|
||||
}
|
||||
|
||||
} else {
|
||||
AvatarSharedPointer lookingAt = _myAvatar->getLookAtTargetAvatar().lock();
|
||||
if (lookingAt && _myAvatar != lookingAt.get()) {
|
||||
isLookingAtSomeone = true;
|
||||
// If I am looking at someone else, look directly at one of their eyes
|
||||
if (tracker && !tracker->isMuted()) {
|
||||
// If a face tracker is active, look at the eye for the side my gaze is biased toward
|
||||
if (tracker->getEstimatedEyeYaw() > _myAvatar->getHead()->getFinalYaw()) {
|
||||
// Look at their right eye
|
||||
lookAtSpot = static_cast<Avatar*>(lookingAt.get())->getHead()->getRightEyePosition();
|
||||
} else {
|
||||
// Look at their left eye
|
||||
lookAtSpot = static_cast<Avatar*>(lookingAt.get())->getHead()->getLeftEyePosition();
|
||||
isLookingAtSomeone = true;
|
||||
Head* lookingAtHead = static_cast<Avatar*>(lookingAt.get())->getHead();
|
||||
|
||||
const float MAXIMUM_FACE_ANGLE = 65.0f * RADIANS_PER_DEGREE;
|
||||
glm::vec3 lookingAtFaceOrientation = lookingAtHead->getFinalOrientationInWorldFrame() * IDENTITY_FRONT;
|
||||
glm::vec3 fromLookingAtToMe = glm::normalize(_myAvatar->getHead()->getEyePosition()
|
||||
- lookingAtHead->getEyePosition());
|
||||
float faceAngle = glm::angle(lookingAtFaceOrientation, fromLookingAtToMe);
|
||||
|
||||
if (faceAngle < MAXIMUM_FACE_ANGLE) {
|
||||
// Randomly look back and forth between look targets
|
||||
switch (_myAvatar->getEyeContactTarget()) {
|
||||
case LEFT_EYE:
|
||||
lookAtSpot = lookingAtHead->getLeftEyePosition();
|
||||
break;
|
||||
case RIGHT_EYE:
|
||||
lookAtSpot = lookingAtHead->getRightEyePosition();
|
||||
break;
|
||||
case MOUTH:
|
||||
lookAtSpot = lookingAtHead->getMouthPosition();
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
// Need to add randomly looking back and forth between left and right eye for case with no tracker
|
||||
if (_myAvatar->isLookingAtLeftEye()) {
|
||||
lookAtSpot = static_cast<Avatar*>(lookingAt.get())->getHead()->getLeftEyePosition();
|
||||
} else {
|
||||
lookAtSpot = static_cast<Avatar*>(lookingAt.get())->getHead()->getRightEyePosition();
|
||||
}
|
||||
// Just look at their head (mid point between eyes)
|
||||
lookAtSpot = lookingAtHead->getEyePosition();
|
||||
}
|
||||
} else {
|
||||
// I am not looking at anyone else, so just look forward
|
||||
|
@ -2343,14 +2347,13 @@ void Application::updateMyAvatarLookAtPosition() {
|
|||
(_myAvatar->getHead()->getFinalOrientationInWorldFrame() * glm::vec3(0.0f, 0.0f, -TREE_SCALE));
|
||||
}
|
||||
}
|
||||
//
|
||||
// Deflect the eyes a bit to match the detected Gaze from 3D camera if active
|
||||
//
|
||||
if (tracker && !tracker->isMuted()) {
|
||||
|
||||
// Deflect the eyes a bit to match the detected gaze from Faceshift if active.
|
||||
// DDE doesn't track eyes.
|
||||
if (tracker && typeid(*tracker) == typeid(Faceshift) && !tracker->isMuted()) {
|
||||
float eyePitch = tracker->getEstimatedEyePitch();
|
||||
float eyeYaw = tracker->getEstimatedEyeYaw();
|
||||
const float GAZE_DEFLECTION_REDUCTION_DURING_EYE_CONTACT = 0.1f;
|
||||
// deflect using Faceshift gaze data
|
||||
glm::vec3 origin = _myAvatar->getHead()->getEyePosition();
|
||||
float pitchSign = (_myCamera.getMode() == CAMERA_MODE_MIRROR) ? -1.0f : 1.0f;
|
||||
float deflection = DependencyManager::get<Faceshift>()->getEyeDeflection();
|
||||
|
|
|
@ -296,7 +296,7 @@ void Head::relaxLean(float deltaTime) {
|
|||
|
||||
void Head::render(RenderArgs* renderArgs, float alpha, ViewFrustum* renderFrustum, bool postLighting) {
|
||||
if (_renderLookatVectors) {
|
||||
renderLookatVectors(renderArgs, _leftEyePosition, _rightEyePosition, getCorrectedLookAtPosition());
|
||||
renderLookatVectors(renderArgs, _leftEyePosition, _rightEyePosition, getCorrectedLookAtPosition());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -324,7 +324,7 @@ glm::vec3 Head::getCorrectedLookAtPosition() {
|
|||
}
|
||||
|
||||
void Head::setCorrectedLookAtPosition(glm::vec3 correctedLookAtPosition) {
|
||||
_isLookingAtMe = true;
|
||||
_isLookingAtMe = true;
|
||||
_correctedLookAtPosition = correctedLookAtPosition;
|
||||
}
|
||||
|
||||
|
|
|
@ -22,11 +22,6 @@
|
|||
#include "InterfaceConfig.h"
|
||||
#include "world.h"
|
||||
|
||||
enum eyeContactTargets {
|
||||
LEFT_EYE,
|
||||
RIGHT_EYE,
|
||||
MOUTH
|
||||
};
|
||||
|
||||
const float EYE_EAR_GAP = 0.08f;
|
||||
|
||||
|
@ -77,6 +72,7 @@ public:
|
|||
const glm::vec3& getLeftEyePosition() const { return _leftEyePosition; }
|
||||
glm::vec3 getRightEarPosition() const { return _rightEyePosition + (getRightDirection() * EYE_EAR_GAP) + (getFrontDirection() * -EYE_EAR_GAP); }
|
||||
glm::vec3 getLeftEarPosition() const { return _leftEyePosition + (getRightDirection() * -EYE_EAR_GAP) + (getFrontDirection() * -EYE_EAR_GAP); }
|
||||
glm::vec3 getMouthPosition() const { return _eyePosition - getUpDirection() * glm::length(_rightEyePosition - _leftEyePosition); }
|
||||
|
||||
FaceModel& getFaceModel() { return _faceModel; }
|
||||
const FaceModel& getFaceModel() const { return _faceModel; }
|
||||
|
@ -148,7 +144,7 @@ private:
|
|||
FaceModel _faceModel;
|
||||
|
||||
glm::vec3 _correctedLookAtPosition;
|
||||
|
||||
|
||||
int _leftEyeLookAtID;
|
||||
int _rightEyeLookAtID;
|
||||
|
||||
|
|
|
@ -34,6 +34,9 @@
|
|||
#include <TextRenderer.h>
|
||||
#include <UserActivityLogger.h>
|
||||
|
||||
#include "devices/Faceshift.h"
|
||||
#include "devices/OculusManager.h"
|
||||
|
||||
#include "Application.h"
|
||||
#include "AvatarManager.h"
|
||||
#include "Environment.h"
|
||||
|
@ -42,7 +45,6 @@
|
|||
#include "MyAvatar.h"
|
||||
#include "Physics.h"
|
||||
#include "Recorder.h"
|
||||
#include "devices/Faceshift.h"
|
||||
#include "Util.h"
|
||||
#include "InterfaceLogging.h"
|
||||
|
||||
|
@ -97,7 +99,7 @@ MyAvatar::MyAvatar() :
|
|||
_shouldRender(true),
|
||||
_billboardValid(false),
|
||||
_feetTouchFloor(true),
|
||||
_isLookingAtLeftEye(true),
|
||||
_eyeContactTarget(LEFT_EYE),
|
||||
_realWorldFieldOfView("realWorldFieldOfView",
|
||||
DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
|
||||
_firstPersonSkeletonModel(this),
|
||||
|
@ -884,7 +886,6 @@ void MyAvatar::updateLookAtTargetAvatar() {
|
|||
const float KEEP_LOOKING_AT_CURRENT_ANGLE_FACTOR = 1.3f;
|
||||
const float GREATEST_LOOKING_AT_DISTANCE = 10.0f;
|
||||
|
||||
int howManyLookingAtMe = 0;
|
||||
foreach (const AvatarSharedPointer& avatarPointer, DependencyManager::get<AvatarManager>()->getAvatarHash()) {
|
||||
Avatar* avatar = static_cast<Avatar*>(avatarPointer.get());
|
||||
bool isCurrentTarget = avatar->getIsLookAtTarget();
|
||||
|
@ -897,17 +898,22 @@ void MyAvatar::updateLookAtTargetAvatar() {
|
|||
_targetAvatarPosition = avatarPointer->getPosition();
|
||||
smallestAngleTo = angleTo;
|
||||
}
|
||||
// Check if this avatar is looking at me, and fix their gaze on my camera if so
|
||||
if (Application::getInstance()->isLookingAtMyAvatar(avatar)) {
|
||||
howManyLookingAtMe++;
|
||||
// Have that avatar look directly at my camera
|
||||
// Philip TODO: correct to look at left/right eye
|
||||
if (qApp->isHMDMode()) {
|
||||
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition());
|
||||
// FIXME what is the point of this?
|
||||
// avatar->getHead()->setCorrectedLookAtPosition(OculusManager::getLeftEyePosition());
|
||||
// Alter their gaze to look directly at my camera; this looks more natural than looking at my avatar's face.
|
||||
// Offset their gaze according to whether they're looking at one of my eyes or my mouth.
|
||||
glm::vec3 gazeOffset = avatar->getHead()->getLookAtPosition() - getHead()->getEyePosition();
|
||||
const float HUMAN_EYE_SEPARATION = 0.065f;
|
||||
float myEyeSeparation = glm::length(getHead()->getLeftEyePosition() - getHead()->getRightEyePosition());
|
||||
gazeOffset = gazeOffset * HUMAN_EYE_SEPARATION / myEyeSeparation;
|
||||
|
||||
if (Application::getInstance()->isHMDMode()) {
|
||||
//avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getCamera()->getPosition()
|
||||
// + OculusManager::getMidEyePosition() + gazeOffset);
|
||||
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
|
||||
+ OculusManager::getMidEyePosition() + gazeOffset);
|
||||
} else {
|
||||
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition());
|
||||
avatar->getHead()->setCorrectedLookAtPosition(Application::getInstance()->getViewFrustum()->getPosition()
|
||||
+ gazeOffset);
|
||||
}
|
||||
} else {
|
||||
avatar->getHead()->clearCorrectedLookAtPosition();
|
||||
|
@ -924,12 +930,24 @@ void MyAvatar::clearLookAtTargetAvatar() {
|
|||
_lookAtTargetAvatar.reset();
|
||||
}
|
||||
|
||||
bool MyAvatar::isLookingAtLeftEye() {
|
||||
float const CHANCE_OF_CHANGING_EYE = 0.01f;
|
||||
if (randFloat() < CHANCE_OF_CHANGING_EYE) {
|
||||
_isLookingAtLeftEye = !_isLookingAtLeftEye;
|
||||
eyeContactTarget MyAvatar::getEyeContactTarget() {
|
||||
float const CHANCE_OF_CHANGING_TARGET = 0.01f;
|
||||
if (randFloat() < CHANCE_OF_CHANGING_TARGET) {
|
||||
float const FIFTY_FIFTY_CHANCE = 0.5f;
|
||||
switch (_eyeContactTarget) {
|
||||
case LEFT_EYE:
|
||||
_eyeContactTarget = (randFloat() < FIFTY_FIFTY_CHANCE) ? MOUTH : RIGHT_EYE;
|
||||
break;
|
||||
case RIGHT_EYE:
|
||||
_eyeContactTarget = (randFloat() < FIFTY_FIFTY_CHANCE) ? LEFT_EYE : MOUTH;
|
||||
break;
|
||||
case MOUTH:
|
||||
_eyeContactTarget = (randFloat() < FIFTY_FIFTY_CHANCE) ? RIGHT_EYE : LEFT_EYE;
|
||||
break;
|
||||
}
|
||||
}
|
||||
return _isLookingAtLeftEye;
|
||||
|
||||
return _eyeContactTarget;
|
||||
}
|
||||
|
||||
glm::vec3 MyAvatar::getDefaultEyePosition() const {
|
||||
|
|
|
@ -19,6 +19,12 @@
|
|||
|
||||
class ModelItemID;
|
||||
|
||||
enum eyeContactTarget {
|
||||
LEFT_EYE,
|
||||
RIGHT_EYE,
|
||||
MOUTH
|
||||
};
|
||||
|
||||
class MyAvatar : public Avatar {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(bool shouldRenderLocally READ getShouldRenderLocally WRITE setShouldRenderLocally)
|
||||
|
@ -94,7 +100,7 @@ public:
|
|||
|
||||
bool isMyAvatar() const { return true; }
|
||||
|
||||
bool isLookingAtLeftEye();
|
||||
eyeContactTarget getEyeContactTarget();
|
||||
|
||||
virtual int parseDataAtOffset(const QByteArray& packet, int offset);
|
||||
|
||||
|
@ -252,7 +258,7 @@ private:
|
|||
QList<AnimationHandlePointer> _animationHandles;
|
||||
|
||||
bool _feetTouchFloor;
|
||||
bool _isLookingAtLeftEye;
|
||||
eyeContactTarget _eyeContactTarget;
|
||||
|
||||
RecorderPointer _recorder;
|
||||
|
||||
|
|
|
@ -283,6 +283,7 @@ static ovrVector3f _eyeOffsets[ovrEye_Count];
|
|||
|
||||
glm::vec3 OculusManager::getLeftEyePosition() { return _eyePositions[ovrEye_Left]; }
|
||||
glm::vec3 OculusManager::getRightEyePosition() { return _eyePositions[ovrEye_Right]; }
|
||||
glm::vec3 OculusManager::getMidEyePosition() { return (_eyePositions[ovrEye_Left] + _eyePositions[ovrEye_Right]) / 2.0f; }
|
||||
|
||||
void OculusManager::connect(QOpenGLContext* shareContext) {
|
||||
qCDebug(interfaceapp) << "Oculus SDK" << OVR_VERSION_STRING;
|
||||
|
@ -692,13 +693,13 @@ void OculusManager::display(QGLWidget * glCanvas, RenderArgs* renderArgs, const
|
|||
_eyeRenderPoses[eye] = eyePoses[eye];
|
||||
// Set the camera rotation for this eye
|
||||
|
||||
vec3 eyePosition = toGlm(_eyeRenderPoses[eye].Position);
|
||||
eyePosition = whichCamera.getRotation() * eyePosition;
|
||||
_eyePositions[eye] = toGlm(_eyeRenderPoses[eye].Position);
|
||||
_eyePositions[eye] = whichCamera.getRotation() * _eyePositions[eye];
|
||||
quat eyeRotation = toGlm(_eyeRenderPoses[eye].Orientation);
|
||||
|
||||
// Update our camera to what the application camera is doing
|
||||
_camera->setRotation(whichCamera.getRotation() * eyeRotation);
|
||||
_camera->setPosition(whichCamera.getPosition() + eyePosition);
|
||||
_camera->setPosition(whichCamera.getPosition() + _eyePositions[eye]);
|
||||
configureCamera(*_camera);
|
||||
_camera->update(1.0f / Application::getInstance()->getFps());
|
||||
|
||||
|
|
|
@ -47,6 +47,7 @@ public:
|
|||
|
||||
static glm::vec3 getLeftEyePosition();
|
||||
static glm::vec3 getRightEyePosition();
|
||||
static glm::vec3 getMidEyePosition();
|
||||
|
||||
static int getHMDScreen();
|
||||
|
||||
|
|
Loading…
Reference in a new issue