Avatar head roll/lean based on webcam data.

This commit is contained in:
Andrzej Kapolka 2013-06-20 14:20:03 -07:00
parent b14a0c1554
commit b0dd5223e1
8 changed files with 83 additions and 25 deletions

View file

@ -304,7 +304,7 @@ void Application::paintGL() {
glEnable(GL_LINE_SMOOTH);
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
float headCameraScale = _serialHeadSensor.active ? _headCameraPitchYawScale : 1.0f;
float headCameraScale = _serialHeadSensor.isActive() ? _headCameraPitchYawScale : 1.0f;
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
_myCamera.setTightness (100.0f);
@ -775,7 +775,7 @@ void Application::timer() {
gettimeofday(&_timerStart, NULL);
// if we haven't detected gyros, check for them now
if (!_serialHeadSensor.active) {
if (!_serialHeadSensor.isActive()) {
_serialHeadSensor.pair();
}
@ -1576,7 +1576,7 @@ void Application::update(float deltaTime) {
}
// Read serial port interface devices
if (_serialHeadSensor.active) {
if (_serialHeadSensor.isActive()) {
_serialHeadSensor.readData(deltaTime);
}
@ -1654,8 +1654,10 @@ void Application::update(float deltaTime) {
void Application::updateAvatar(float deltaTime) {
if (_serialHeadSensor.active) {
// Update my avatar's head position from gyros and/or webcam
_myAvatar.updateHeadFromGyrosAndOrWebcam();
if (_serialHeadSensor.isActive()) {
// Update avatar head translation
if (_gyroLook->isChecked()) {
@ -1664,10 +1666,7 @@ void Application::updateAvatar(float deltaTime) {
headPosition *= HEAD_OFFSET_SCALING;
_myCamera.setEyeOffsetPosition(headPosition);
}
// Update my avatar's head position from gyros
_myAvatar.updateHeadFromGyros(deltaTime, &_serialHeadSensor);
// Grab latest readings from the gyros
float measuredPitchRate = _serialHeadSensor.getLastPitchRate();
float measuredYawRate = _serialHeadSensor.getLastYawRate();
@ -2522,7 +2521,7 @@ void Application::resetSensors() {
_headMouseX = _mouseX = _glWidget->width() / 2;
_headMouseY = _mouseY = _glWidget->height() / 2;
if (_serialHeadSensor.active) {
if (_serialHeadSensor.isActive()) {
_serialHeadSensor.resetAverages();
}
_webcam.reset();

View file

@ -76,6 +76,7 @@ public:
VoxelSystem* getVoxels() { return &_voxels; }
QSettings* getSettings() { return _settings; }
Environment* getEnvironment() { return &_environment; }
SerialInterface* getSerialHeadSensor() { return &_serialHeadSensor; }
Webcam* getWebcam() { return &_webcam; }
bool shouldEchoAudio() { return _echoAudioMode->isChecked(); }

View file

@ -280,22 +280,33 @@ void Avatar::reset() {
}
// Update avatar head rotation with sensor data
void Avatar::updateHeadFromGyros(float deltaTime, SerialInterface* serialInterface) {
const float AMPLIFY_PITCH = 2.f;
const float AMPLIFY_YAW = 2.f;
const float AMPLIFY_ROLL = 2.f;
void Avatar::updateHeadFromGyrosAndOrWebcam() {
const float AMPLIFY_PITCH = 1.f;
const float AMPLIFY_YAW = 1.f;
const float AMPLIFY_ROLL = 1.f;
glm::vec3 estimatedRotation = serialInterface->getEstimatedRotation();
SerialInterface* gyros = Application::getInstance()->getSerialHeadSensor();
Webcam* webcam = Application::getInstance()->getWebcam();
glm::vec3 estimatedPosition, estimatedRotation;
if (gyros->isActive()) {
estimatedPosition = gyros->getEstimatedPosition();
estimatedRotation = gyros->getEstimatedRotation();
} else if (webcam->isActive()) {
estimatedPosition = webcam->getEstimatedPosition();
estimatedRotation = webcam->getEstimatedRotation();
}
_head.setPitch(estimatedRotation.x * AMPLIFY_PITCH);
_head.setYaw(estimatedRotation.y * AMPLIFY_YAW);
_head.setRoll(estimatedRotation.z * AMPLIFY_ROLL);
// Update torso lean distance based on accelerometer data
glm::vec3 estimatedPosition = serialInterface->getEstimatedPosition() * _leanScale;
const float TORSO_LENGTH = 0.5f;
const float MAX_LEAN = 45.0f;
_head.setLeanSideways(glm::clamp(glm::degrees(atanf(-estimatedPosition.x / TORSO_LENGTH)), -MAX_LEAN, MAX_LEAN));
_head.setLeanForward(glm::clamp(glm::degrees(atanf(estimatedPosition.z / TORSO_LENGTH)), -MAX_LEAN, MAX_LEAN));
_head.setLeanSideways(glm::clamp(glm::degrees(atanf(-estimatedPosition.x * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
_head.setLeanForward(glm::clamp(glm::degrees(atanf(estimatedPosition.z * _leanScale / TORSO_LENGTH)),
-MAX_LEAN, MAX_LEAN));
}
float Avatar::getAbsoluteHeadYaw() const {

View file

@ -86,7 +86,7 @@ public:
void reset();
void simulate(float deltaTime, Transmitter* transmitter);
void updateThrust(float deltaTime, Transmitter * transmitter);
void updateHeadFromGyros(float frametime, SerialInterface * serialInterface);
void updateHeadFromGyrosAndOrWebcam();
void updateFromMouse(int mouseX, int mouseY, int screenWidth, int screenHeight);
void addBodyYaw(float y) {_bodyYaw += y;};
void render(bool lookingInMirror, bool renderAvatarBalls);

View file

@ -103,7 +103,7 @@ void SerialInterface::initializePort(char* portname) {
printLog("Connected.\n");
resetSerial();
active = true;
_active = true;
#endif
}
@ -359,7 +359,7 @@ void SerialInterface::resetAverages() {
void SerialInterface::resetSerial() {
#ifdef __APPLE__
resetAverages();
active = false;
_active = false;
gettimeofday(&lastGoodRead, NULL);
#endif
}

View file

@ -24,7 +24,7 @@ extern const bool USING_INVENSENSE_MPU9150;
class SerialInterface {
public:
SerialInterface() : active(false),
SerialInterface() : _active(false),
_gravity(0, 0, 0),
_averageRotationRates(0, 0, 0),
_averageAcceleration(0, 0, 0),
@ -58,12 +58,13 @@ public:
void renderLevels(int width, int height);
void resetAverages();
bool active;
bool isActive() const { return _active; }
private:
void initializePort(char* portname);
void resetSerial();
bool _active;
int _serialDescriptor;
int totalSamples;
timeval lastGoodRead;

View file

@ -25,7 +25,7 @@ using namespace std;
int matMetaType = qRegisterMetaType<Mat>("cv::Mat");
int rotatedRectMetaType = qRegisterMetaType<RotatedRect>("cv::RotatedRect");
Webcam::Webcam() : _enabled(false), _frameTextureID(0) {
Webcam::Webcam() : _enabled(false), _active(false), _frameTextureID(0) {
// the grabber simply runs as fast as possible
_grabber = new FrameGrabber();
_grabber->moveToThread(&_grabberThread);
@ -46,10 +46,13 @@ void Webcam::setEnabled(bool enabled) {
} else {
_grabberThread.quit();
_active = false;
}
}
void Webcam::reset() {
_initialFaceRect = RotatedRect();
if (_enabled) {
// send a message to the grabber
QMetaObject::invokeMethod(_grabber, "reset");
@ -137,6 +140,38 @@ void Webcam::setFrame(const Mat& frame, const RotatedRect& faceRect) {
}
_lastFrameTimestamp = now;
// roll is just the angle of the face rect (correcting for 180 degree rotations)
float roll = faceRect.angle;
if (roll < -90.0f) {
roll += 180.0f;
} else if (roll > 90.0f) {
roll -= 180.0f;
}
const float ROTATION_SMOOTHING = 0.5f;
_estimatedRotation.z = glm::mix(_estimatedRotation.z, roll, ROTATION_SMOOTHING);
// determine position based on translation and scaling of the face rect
if (_initialFaceRect.size.area() == 0) {
_initialFaceRect = faceRect;
_estimatedPosition = glm::vec3();
} else {
float proportion = sqrtf(_initialFaceRect.size.area() / (float)faceRect.size.area());
const float DISTANCE_TO_CAMERA = 0.333f;
const float POSITION_SCALE = 0.5f;
float z = DISTANCE_TO_CAMERA * proportion - DISTANCE_TO_CAMERA;
glm::vec3 position = glm::vec3(
(faceRect.center.x - _initialFaceRect.center.x) * proportion * POSITION_SCALE / _frameWidth,
(faceRect.center.y - _initialFaceRect.center.y) * proportion * POSITION_SCALE / _frameWidth,
z);
const float POSITION_SMOOTHING = 0.5f;
_estimatedPosition = glm::mix(_estimatedPosition, position, POSITION_SMOOTHING);
}
// note that we have data
_active = true;
// let the grabber know we're ready for the next frame
QTimer::singleShot(qMax((int)remaining / 1000, 0), _grabber, SLOT(grabFrame()));
}
@ -166,7 +201,7 @@ void FrameGrabber::grabFrame() {
cvSetCaptureProperty(_capture, CV_CAP_PROP_FRAME_HEIGHT, IDEAL_FRAME_HEIGHT);
#ifdef __APPLE__
configureCamera(0x5ac, 0x8510, false, 0.99, 0.5, 0.5, 0.5, true, 0.5);
configureCamera(0x5ac, 0x8510, false, 0.975, 0.5, 1.0, 0.5, true, 0.5);
#else
cvSetCaptureProperty(_capture, CV_CAP_PROP_EXPOSURE, 0.5);
cvSetCaptureProperty(_capture, CV_CAP_PROP_CONTRAST, 0.5);

View file

@ -13,6 +13,8 @@
#include <QObject>
#include <QThread>
#include <glm/glm.hpp>
#include <opencv2/opencv.hpp>
#include "InterfaceConfig.h"
@ -31,6 +33,10 @@ public:
Webcam();
~Webcam();
const bool isActive() const { return _active; }
const glm::vec3& getEstimatedPosition() const { return _estimatedPosition; }
const glm::vec3& getEstimatedRotation() const { return _estimatedRotation; }
void reset();
void renderPreview(int screenWidth, int screenHeight);
@ -45,15 +51,20 @@ private:
FrameGrabber* _grabber;
bool _enabled;
bool _active;
int _frameWidth;
int _frameHeight;
GLuint _frameTextureID;
cv::RotatedRect _faceRect;
cv::RotatedRect _initialFaceRect;
long long _startTimestamp;
int _frameCount;
long long _lastFrameTimestamp;
glm::vec3 _estimatedPosition;
glm::vec3 _estimatedRotation;
};
class FrameGrabber : public QObject {