mirror of
https://github.com/overte-org/overte.git
synced 2025-08-10 08:48:53 +02:00
Avatar head roll/lean based on webcam data.
This commit is contained in:
parent
b14a0c1554
commit
b0dd5223e1
8 changed files with 83 additions and 25 deletions
|
@ -304,7 +304,7 @@ void Application::paintGL() {
|
||||||
glEnable(GL_LINE_SMOOTH);
|
glEnable(GL_LINE_SMOOTH);
|
||||||
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
|
||||||
|
|
||||||
float headCameraScale = _serialHeadSensor.active ? _headCameraPitchYawScale : 1.0f;
|
float headCameraScale = _serialHeadSensor.isActive() ? _headCameraPitchYawScale : 1.0f;
|
||||||
|
|
||||||
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
|
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
|
||||||
_myCamera.setTightness (100.0f);
|
_myCamera.setTightness (100.0f);
|
||||||
|
@ -775,7 +775,7 @@ void Application::timer() {
|
||||||
gettimeofday(&_timerStart, NULL);
|
gettimeofday(&_timerStart, NULL);
|
||||||
|
|
||||||
// if we haven't detected gyros, check for them now
|
// if we haven't detected gyros, check for them now
|
||||||
if (!_serialHeadSensor.active) {
|
if (!_serialHeadSensor.isActive()) {
|
||||||
_serialHeadSensor.pair();
|
_serialHeadSensor.pair();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1576,7 +1576,7 @@ void Application::update(float deltaTime) {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Read serial port interface devices
|
// Read serial port interface devices
|
||||||
if (_serialHeadSensor.active) {
|
if (_serialHeadSensor.isActive()) {
|
||||||
_serialHeadSensor.readData(deltaTime);
|
_serialHeadSensor.readData(deltaTime);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1654,8 +1654,10 @@ void Application::update(float deltaTime) {
|
||||||
|
|
||||||
void Application::updateAvatar(float deltaTime) {
|
void Application::updateAvatar(float deltaTime) {
|
||||||
|
|
||||||
|
// Update my avatar's head position from gyros and/or webcam
|
||||||
if (_serialHeadSensor.active) {
|
_myAvatar.updateHeadFromGyrosAndOrWebcam();
|
||||||
|
|
||||||
|
if (_serialHeadSensor.isActive()) {
|
||||||
|
|
||||||
// Update avatar head translation
|
// Update avatar head translation
|
||||||
if (_gyroLook->isChecked()) {
|
if (_gyroLook->isChecked()) {
|
||||||
|
@ -1664,10 +1666,7 @@ void Application::updateAvatar(float deltaTime) {
|
||||||
headPosition *= HEAD_OFFSET_SCALING;
|
headPosition *= HEAD_OFFSET_SCALING;
|
||||||
_myCamera.setEyeOffsetPosition(headPosition);
|
_myCamera.setEyeOffsetPosition(headPosition);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update my avatar's head position from gyros
|
|
||||||
_myAvatar.updateHeadFromGyros(deltaTime, &_serialHeadSensor);
|
|
||||||
|
|
||||||
// Grab latest readings from the gyros
|
// Grab latest readings from the gyros
|
||||||
float measuredPitchRate = _serialHeadSensor.getLastPitchRate();
|
float measuredPitchRate = _serialHeadSensor.getLastPitchRate();
|
||||||
float measuredYawRate = _serialHeadSensor.getLastYawRate();
|
float measuredYawRate = _serialHeadSensor.getLastYawRate();
|
||||||
|
@ -2522,7 +2521,7 @@ void Application::resetSensors() {
|
||||||
_headMouseX = _mouseX = _glWidget->width() / 2;
|
_headMouseX = _mouseX = _glWidget->width() / 2;
|
||||||
_headMouseY = _mouseY = _glWidget->height() / 2;
|
_headMouseY = _mouseY = _glWidget->height() / 2;
|
||||||
|
|
||||||
if (_serialHeadSensor.active) {
|
if (_serialHeadSensor.isActive()) {
|
||||||
_serialHeadSensor.resetAverages();
|
_serialHeadSensor.resetAverages();
|
||||||
}
|
}
|
||||||
_webcam.reset();
|
_webcam.reset();
|
||||||
|
|
|
@ -76,6 +76,7 @@ public:
|
||||||
VoxelSystem* getVoxels() { return &_voxels; }
|
VoxelSystem* getVoxels() { return &_voxels; }
|
||||||
QSettings* getSettings() { return _settings; }
|
QSettings* getSettings() { return _settings; }
|
||||||
Environment* getEnvironment() { return &_environment; }
|
Environment* getEnvironment() { return &_environment; }
|
||||||
|
SerialInterface* getSerialHeadSensor() { return &_serialHeadSensor; }
|
||||||
Webcam* getWebcam() { return &_webcam; }
|
Webcam* getWebcam() { return &_webcam; }
|
||||||
bool shouldEchoAudio() { return _echoAudioMode->isChecked(); }
|
bool shouldEchoAudio() { return _echoAudioMode->isChecked(); }
|
||||||
|
|
||||||
|
|
|
@ -280,22 +280,33 @@ void Avatar::reset() {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Update avatar head rotation with sensor data
|
// Update avatar head rotation with sensor data
|
||||||
void Avatar::updateHeadFromGyros(float deltaTime, SerialInterface* serialInterface) {
|
void Avatar::updateHeadFromGyrosAndOrWebcam() {
|
||||||
const float AMPLIFY_PITCH = 2.f;
|
const float AMPLIFY_PITCH = 1.f;
|
||||||
const float AMPLIFY_YAW = 2.f;
|
const float AMPLIFY_YAW = 1.f;
|
||||||
const float AMPLIFY_ROLL = 2.f;
|
const float AMPLIFY_ROLL = 1.f;
|
||||||
|
|
||||||
glm::vec3 estimatedRotation = serialInterface->getEstimatedRotation();
|
SerialInterface* gyros = Application::getInstance()->getSerialHeadSensor();
|
||||||
|
Webcam* webcam = Application::getInstance()->getWebcam();
|
||||||
|
glm::vec3 estimatedPosition, estimatedRotation;
|
||||||
|
if (gyros->isActive()) {
|
||||||
|
estimatedPosition = gyros->getEstimatedPosition();
|
||||||
|
estimatedRotation = gyros->getEstimatedRotation();
|
||||||
|
|
||||||
|
} else if (webcam->isActive()) {
|
||||||
|
estimatedPosition = webcam->getEstimatedPosition();
|
||||||
|
estimatedRotation = webcam->getEstimatedRotation();
|
||||||
|
}
|
||||||
_head.setPitch(estimatedRotation.x * AMPLIFY_PITCH);
|
_head.setPitch(estimatedRotation.x * AMPLIFY_PITCH);
|
||||||
_head.setYaw(estimatedRotation.y * AMPLIFY_YAW);
|
_head.setYaw(estimatedRotation.y * AMPLIFY_YAW);
|
||||||
_head.setRoll(estimatedRotation.z * AMPLIFY_ROLL);
|
_head.setRoll(estimatedRotation.z * AMPLIFY_ROLL);
|
||||||
|
|
||||||
// Update torso lean distance based on accelerometer data
|
// Update torso lean distance based on accelerometer data
|
||||||
glm::vec3 estimatedPosition = serialInterface->getEstimatedPosition() * _leanScale;
|
|
||||||
const float TORSO_LENGTH = 0.5f;
|
const float TORSO_LENGTH = 0.5f;
|
||||||
const float MAX_LEAN = 45.0f;
|
const float MAX_LEAN = 45.0f;
|
||||||
_head.setLeanSideways(glm::clamp(glm::degrees(atanf(-estimatedPosition.x / TORSO_LENGTH)), -MAX_LEAN, MAX_LEAN));
|
_head.setLeanSideways(glm::clamp(glm::degrees(atanf(-estimatedPosition.x * _leanScale / TORSO_LENGTH)),
|
||||||
_head.setLeanForward(glm::clamp(glm::degrees(atanf(estimatedPosition.z / TORSO_LENGTH)), -MAX_LEAN, MAX_LEAN));
|
-MAX_LEAN, MAX_LEAN));
|
||||||
|
_head.setLeanForward(glm::clamp(glm::degrees(atanf(estimatedPosition.z * _leanScale / TORSO_LENGTH)),
|
||||||
|
-MAX_LEAN, MAX_LEAN));
|
||||||
}
|
}
|
||||||
|
|
||||||
float Avatar::getAbsoluteHeadYaw() const {
|
float Avatar::getAbsoluteHeadYaw() const {
|
||||||
|
|
|
@ -86,7 +86,7 @@ public:
|
||||||
void reset();
|
void reset();
|
||||||
void simulate(float deltaTime, Transmitter* transmitter);
|
void simulate(float deltaTime, Transmitter* transmitter);
|
||||||
void updateThrust(float deltaTime, Transmitter * transmitter);
|
void updateThrust(float deltaTime, Transmitter * transmitter);
|
||||||
void updateHeadFromGyros(float frametime, SerialInterface * serialInterface);
|
void updateHeadFromGyrosAndOrWebcam();
|
||||||
void updateFromMouse(int mouseX, int mouseY, int screenWidth, int screenHeight);
|
void updateFromMouse(int mouseX, int mouseY, int screenWidth, int screenHeight);
|
||||||
void addBodyYaw(float y) {_bodyYaw += y;};
|
void addBodyYaw(float y) {_bodyYaw += y;};
|
||||||
void render(bool lookingInMirror, bool renderAvatarBalls);
|
void render(bool lookingInMirror, bool renderAvatarBalls);
|
||||||
|
|
|
@ -103,7 +103,7 @@ void SerialInterface::initializePort(char* portname) {
|
||||||
printLog("Connected.\n");
|
printLog("Connected.\n");
|
||||||
resetSerial();
|
resetSerial();
|
||||||
|
|
||||||
active = true;
|
_active = true;
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -359,7 +359,7 @@ void SerialInterface::resetAverages() {
|
||||||
void SerialInterface::resetSerial() {
|
void SerialInterface::resetSerial() {
|
||||||
#ifdef __APPLE__
|
#ifdef __APPLE__
|
||||||
resetAverages();
|
resetAverages();
|
||||||
active = false;
|
_active = false;
|
||||||
gettimeofday(&lastGoodRead, NULL);
|
gettimeofday(&lastGoodRead, NULL);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,7 +24,7 @@ extern const bool USING_INVENSENSE_MPU9150;
|
||||||
|
|
||||||
class SerialInterface {
|
class SerialInterface {
|
||||||
public:
|
public:
|
||||||
SerialInterface() : active(false),
|
SerialInterface() : _active(false),
|
||||||
_gravity(0, 0, 0),
|
_gravity(0, 0, 0),
|
||||||
_averageRotationRates(0, 0, 0),
|
_averageRotationRates(0, 0, 0),
|
||||||
_averageAcceleration(0, 0, 0),
|
_averageAcceleration(0, 0, 0),
|
||||||
|
@ -58,12 +58,13 @@ public:
|
||||||
|
|
||||||
void renderLevels(int width, int height);
|
void renderLevels(int width, int height);
|
||||||
void resetAverages();
|
void resetAverages();
|
||||||
bool active;
|
bool isActive() const { return _active; }
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void initializePort(char* portname);
|
void initializePort(char* portname);
|
||||||
void resetSerial();
|
void resetSerial();
|
||||||
|
|
||||||
|
bool _active;
|
||||||
int _serialDescriptor;
|
int _serialDescriptor;
|
||||||
int totalSamples;
|
int totalSamples;
|
||||||
timeval lastGoodRead;
|
timeval lastGoodRead;
|
||||||
|
|
|
@ -25,7 +25,7 @@ using namespace std;
|
||||||
int matMetaType = qRegisterMetaType<Mat>("cv::Mat");
|
int matMetaType = qRegisterMetaType<Mat>("cv::Mat");
|
||||||
int rotatedRectMetaType = qRegisterMetaType<RotatedRect>("cv::RotatedRect");
|
int rotatedRectMetaType = qRegisterMetaType<RotatedRect>("cv::RotatedRect");
|
||||||
|
|
||||||
Webcam::Webcam() : _enabled(false), _frameTextureID(0) {
|
Webcam::Webcam() : _enabled(false), _active(false), _frameTextureID(0) {
|
||||||
// the grabber simply runs as fast as possible
|
// the grabber simply runs as fast as possible
|
||||||
_grabber = new FrameGrabber();
|
_grabber = new FrameGrabber();
|
||||||
_grabber->moveToThread(&_grabberThread);
|
_grabber->moveToThread(&_grabberThread);
|
||||||
|
@ -46,10 +46,13 @@ void Webcam::setEnabled(bool enabled) {
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
_grabberThread.quit();
|
_grabberThread.quit();
|
||||||
|
_active = false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void Webcam::reset() {
|
void Webcam::reset() {
|
||||||
|
_initialFaceRect = RotatedRect();
|
||||||
|
|
||||||
if (_enabled) {
|
if (_enabled) {
|
||||||
// send a message to the grabber
|
// send a message to the grabber
|
||||||
QMetaObject::invokeMethod(_grabber, "reset");
|
QMetaObject::invokeMethod(_grabber, "reset");
|
||||||
|
@ -137,6 +140,38 @@ void Webcam::setFrame(const Mat& frame, const RotatedRect& faceRect) {
|
||||||
}
|
}
|
||||||
_lastFrameTimestamp = now;
|
_lastFrameTimestamp = now;
|
||||||
|
|
||||||
|
// roll is just the angle of the face rect (correcting for 180 degree rotations)
|
||||||
|
float roll = faceRect.angle;
|
||||||
|
if (roll < -90.0f) {
|
||||||
|
roll += 180.0f;
|
||||||
|
|
||||||
|
} else if (roll > 90.0f) {
|
||||||
|
roll -= 180.0f;
|
||||||
|
}
|
||||||
|
const float ROTATION_SMOOTHING = 0.5f;
|
||||||
|
_estimatedRotation.z = glm::mix(_estimatedRotation.z, roll, ROTATION_SMOOTHING);
|
||||||
|
|
||||||
|
// determine position based on translation and scaling of the face rect
|
||||||
|
if (_initialFaceRect.size.area() == 0) {
|
||||||
|
_initialFaceRect = faceRect;
|
||||||
|
_estimatedPosition = glm::vec3();
|
||||||
|
|
||||||
|
} else {
|
||||||
|
float proportion = sqrtf(_initialFaceRect.size.area() / (float)faceRect.size.area());
|
||||||
|
const float DISTANCE_TO_CAMERA = 0.333f;
|
||||||
|
const float POSITION_SCALE = 0.5f;
|
||||||
|
float z = DISTANCE_TO_CAMERA * proportion - DISTANCE_TO_CAMERA;
|
||||||
|
glm::vec3 position = glm::vec3(
|
||||||
|
(faceRect.center.x - _initialFaceRect.center.x) * proportion * POSITION_SCALE / _frameWidth,
|
||||||
|
(faceRect.center.y - _initialFaceRect.center.y) * proportion * POSITION_SCALE / _frameWidth,
|
||||||
|
z);
|
||||||
|
const float POSITION_SMOOTHING = 0.5f;
|
||||||
|
_estimatedPosition = glm::mix(_estimatedPosition, position, POSITION_SMOOTHING);
|
||||||
|
}
|
||||||
|
|
||||||
|
// note that we have data
|
||||||
|
_active = true;
|
||||||
|
|
||||||
// let the grabber know we're ready for the next frame
|
// let the grabber know we're ready for the next frame
|
||||||
QTimer::singleShot(qMax((int)remaining / 1000, 0), _grabber, SLOT(grabFrame()));
|
QTimer::singleShot(qMax((int)remaining / 1000, 0), _grabber, SLOT(grabFrame()));
|
||||||
}
|
}
|
||||||
|
@ -166,7 +201,7 @@ void FrameGrabber::grabFrame() {
|
||||||
cvSetCaptureProperty(_capture, CV_CAP_PROP_FRAME_HEIGHT, IDEAL_FRAME_HEIGHT);
|
cvSetCaptureProperty(_capture, CV_CAP_PROP_FRAME_HEIGHT, IDEAL_FRAME_HEIGHT);
|
||||||
|
|
||||||
#ifdef __APPLE__
|
#ifdef __APPLE__
|
||||||
configureCamera(0x5ac, 0x8510, false, 0.99, 0.5, 0.5, 0.5, true, 0.5);
|
configureCamera(0x5ac, 0x8510, false, 0.975, 0.5, 1.0, 0.5, true, 0.5);
|
||||||
#else
|
#else
|
||||||
cvSetCaptureProperty(_capture, CV_CAP_PROP_EXPOSURE, 0.5);
|
cvSetCaptureProperty(_capture, CV_CAP_PROP_EXPOSURE, 0.5);
|
||||||
cvSetCaptureProperty(_capture, CV_CAP_PROP_CONTRAST, 0.5);
|
cvSetCaptureProperty(_capture, CV_CAP_PROP_CONTRAST, 0.5);
|
||||||
|
|
|
@ -13,6 +13,8 @@
|
||||||
#include <QObject>
|
#include <QObject>
|
||||||
#include <QThread>
|
#include <QThread>
|
||||||
|
|
||||||
|
#include <glm/glm.hpp>
|
||||||
|
|
||||||
#include <opencv2/opencv.hpp>
|
#include <opencv2/opencv.hpp>
|
||||||
|
|
||||||
#include "InterfaceConfig.h"
|
#include "InterfaceConfig.h"
|
||||||
|
@ -31,6 +33,10 @@ public:
|
||||||
Webcam();
|
Webcam();
|
||||||
~Webcam();
|
~Webcam();
|
||||||
|
|
||||||
|
const bool isActive() const { return _active; }
|
||||||
|
const glm::vec3& getEstimatedPosition() const { return _estimatedPosition; }
|
||||||
|
const glm::vec3& getEstimatedRotation() const { return _estimatedRotation; }
|
||||||
|
|
||||||
void reset();
|
void reset();
|
||||||
void renderPreview(int screenWidth, int screenHeight);
|
void renderPreview(int screenWidth, int screenHeight);
|
||||||
|
|
||||||
|
@ -45,15 +51,20 @@ private:
|
||||||
FrameGrabber* _grabber;
|
FrameGrabber* _grabber;
|
||||||
|
|
||||||
bool _enabled;
|
bool _enabled;
|
||||||
|
bool _active;
|
||||||
int _frameWidth;
|
int _frameWidth;
|
||||||
int _frameHeight;
|
int _frameHeight;
|
||||||
GLuint _frameTextureID;
|
GLuint _frameTextureID;
|
||||||
cv::RotatedRect _faceRect;
|
cv::RotatedRect _faceRect;
|
||||||
|
cv::RotatedRect _initialFaceRect;
|
||||||
|
|
||||||
long long _startTimestamp;
|
long long _startTimestamp;
|
||||||
int _frameCount;
|
int _frameCount;
|
||||||
|
|
||||||
long long _lastFrameTimestamp;
|
long long _lastFrameTimestamp;
|
||||||
|
|
||||||
|
glm::vec3 _estimatedPosition;
|
||||||
|
glm::vec3 _estimatedRotation;
|
||||||
};
|
};
|
||||||
|
|
||||||
class FrameGrabber : public QObject {
|
class FrameGrabber : public QObject {
|
||||||
|
|
Loading…
Reference in a new issue