Merge branch 'master' of https://github.com/worklist/hifi into pointy

Conflicts:
	interface/src/Application.h
This commit is contained in:
Andrzej Kapolka 2013-07-12 15:34:55 -07:00
commit 763e6a2fa1
19 changed files with 522 additions and 159 deletions

42
interface/src/Application.cpp Executable file → Normal file
View file

@ -73,6 +73,8 @@ using namespace std;
static char STAR_FILE[] = "https://s3-us-west-1.amazonaws.com/highfidelity/stars.txt"; static char STAR_FILE[] = "https://s3-us-west-1.amazonaws.com/highfidelity/stars.txt";
static char STAR_CACHE_FILE[] = "cachedStars.txt"; static char STAR_CACHE_FILE[] = "cachedStars.txt";
static const bool TESTING_PARTICLE_SYSTEM = false;
static const int BANDWIDTH_METER_CLICK_MAX_DRAG_LENGTH = 6; // farther dragged clicks are ignored static const int BANDWIDTH_METER_CLICK_MAX_DRAG_LENGTH = 6; // farther dragged clicks are ignored
const glm::vec3 START_LOCATION(4.f, 0.f, 5.f); // Where one's own node begins in the world const glm::vec3 START_LOCATION(4.f, 0.f, 5.f); // Where one's own node begins in the world
@ -183,6 +185,8 @@ Application::Application(int& argc, char** argv, timeval &startup_time) :
_touchAvgX(0.0f), _touchAvgX(0.0f),
_touchAvgY(0.0f), _touchAvgY(0.0f),
_isTouchPressed(false), _isTouchPressed(false),
_yawFromTouch(0.0f),
_pitchFromTouch(0.0f),
_mousePressed(false), _mousePressed(false),
_mouseVoxelScale(1.0f / 1024.0f), _mouseVoxelScale(1.0f / 1024.0f),
_justEditedVoxel(false), _justEditedVoxel(false),
@ -969,6 +973,9 @@ void Application::idle() {
gettimeofday(&check, NULL); gettimeofday(&check, NULL);
// Only run simulation code if more than IDLE_SIMULATE_MSECS have passed since last time we ran // Only run simulation code if more than IDLE_SIMULATE_MSECS have passed since last time we ran
sendPostedEvents(NULL, QEvent::TouchBegin);
sendPostedEvents(NULL, QEvent::TouchUpdate);
sendPostedEvents(NULL, QEvent::TouchEnd);
double timeSinceLastUpdate = diffclock(&_lastTimeUpdated, &check); double timeSinceLastUpdate = diffclock(&_lastTimeUpdated, &check);
if (timeSinceLastUpdate > IDLE_SIMULATE_MSECS) { if (timeSinceLastUpdate > IDLE_SIMULATE_MSECS) {
@ -978,9 +985,6 @@ void Application::idle() {
// This is necessary because id the idle() call takes longer than the // This is necessary because id the idle() call takes longer than the
// interval between idle() calls, the event loop never gets to run, // interval between idle() calls, the event loop never gets to run,
// and touch events get delayed. // and touch events get delayed.
sendPostedEvents(NULL, QEvent::TouchBegin);
sendPostedEvents(NULL, QEvent::TouchUpdate);
sendPostedEvents(NULL, QEvent::TouchEnd);
const float BIGGEST_DELTA_TIME_SECS = 0.25f; const float BIGGEST_DELTA_TIME_SECS = 0.25f;
update(glm::clamp((float)timeSinceLastUpdate / 1000.f, 0.f, BIGGEST_DELTA_TIME_SECS)); update(glm::clamp((float)timeSinceLastUpdate / 1000.f, 0.f, BIGGEST_DELTA_TIME_SECS));
@ -1902,12 +1906,9 @@ void Application::update(float deltaTime) {
if (_isTouchPressed) { if (_isTouchPressed) {
float TOUCH_YAW_SCALE = -50.0f; float TOUCH_YAW_SCALE = -50.0f;
float TOUCH_PITCH_SCALE = -50.0f; float TOUCH_PITCH_SCALE = -50.0f;
_myAvatar.getHead().addYaw((_touchAvgX - _lastTouchAvgX) _yawFromTouch += ((_touchAvgX - _lastTouchAvgX) * TOUCH_YAW_SCALE * deltaTime);
* TOUCH_YAW_SCALE _pitchFromTouch += ((_touchAvgY - _lastTouchAvgY) * TOUCH_PITCH_SCALE * deltaTime);
* deltaTime);
_myAvatar.getHead().addPitch((_touchAvgY - _lastTouchAvgY)
* TOUCH_PITCH_SCALE
* deltaTime);
_lastTouchAvgX = _touchAvgX; _lastTouchAvgX = _touchAvgX;
_lastTouchAvgY = _touchAvgY; _lastTouchAvgY = _touchAvgY;
} }
@ -2007,15 +2008,28 @@ void Application::update(float deltaTime) {
_audio.setLastVelocity(_myAvatar.getVelocity()); _audio.setLastVelocity(_myAvatar.getVelocity());
_audio.eventuallyAnalyzePing(); _audio.eventuallyAnalyzePing();
#endif #endif
if (TESTING_PARTICLE_SYSTEM) {
_particleSystem.simulate(deltaTime);
}
} }
void Application::updateAvatar(float deltaTime) { void Application::updateAvatar(float deltaTime) {
// When head is rotated via touch/mouse look, slowly turn body to follow
const float BODY_FOLLOW_HEAD_RATE = 0.5f;
// update body yaw by body yaw delta
_myAvatar.setOrientation(_myAvatar.getOrientation()
* glm::quat(glm::vec3(0, _yawFromTouch * deltaTime * BODY_FOLLOW_HEAD_RATE, 0) * deltaTime));
_yawFromTouch -= _yawFromTouch * deltaTime * BODY_FOLLOW_HEAD_RATE;
// Update my avatar's state from gyros and/or webcam // Update my avatar's state from gyros and/or webcam
_myAvatar.updateFromGyrosAndOrWebcam(_gyroLook->isChecked(), _myAvatar.updateFromGyrosAndOrWebcam(_gyroLook->isChecked(),
glm::vec3(_headCameraPitchYawScale, glm::vec3(_headCameraPitchYawScale,
_headCameraPitchYawScale, _headCameraPitchYawScale,
_headCameraPitchYawScale)); _headCameraPitchYawScale),
_yawFromTouch,
_pitchFromTouch);
if (_serialHeadSensor.isActive()) { if (_serialHeadSensor.isActive()) {
@ -2055,8 +2069,8 @@ void Application::updateAvatar(float deltaTime) {
float yaw, pitch, roll; float yaw, pitch, roll;
OculusManager::getEulerAngles(yaw, pitch, roll); OculusManager::getEulerAngles(yaw, pitch, roll);
_myAvatar.getHead().setYaw(yaw); _myAvatar.getHead().setYaw(yaw + _yawFromTouch);
_myAvatar.getHead().setPitch(pitch); _myAvatar.getHead().setPitch(pitch + _pitchFromTouch);
_myAvatar.getHead().setRoll(roll); _myAvatar.getHead().setRoll(roll);
} }
@ -2441,6 +2455,10 @@ void Application::displaySide(Camera& whichCamera) {
_myAvatar.render(_lookingInMirror->isChecked(), _renderAvatarBalls->isChecked()); _myAvatar.render(_lookingInMirror->isChecked(), _renderAvatarBalls->isChecked());
_myAvatar.setDisplayingLookatVectors(_renderLookatOn->isChecked()); _myAvatar.setDisplayingLookatVectors(_renderLookatOn->isChecked());
} }
if (TESTING_PARTICLE_SYSTEM) {
_particleSystem.render();
}
// Render the world box // Render the world box
if (!_lookingInMirror->isChecked() && _renderStatsOn->isChecked()) { render_world_box(); } if (!_lookingInMirror->isChecked() && _renderStatsOn->isChecked()) { render_world_box(); }

View file

@ -31,6 +31,7 @@
#include "BandwidthMeter.h" #include "BandwidthMeter.h"
#include "Camera.h" #include "Camera.h"
#include "Environment.h" #include "Environment.h"
#include "ParticleSystem.h"
#include "SerialInterface.h" #include "SerialInterface.h"
#include "Stars.h" #include "Stars.h"
#include "Swatch.h" #include "Swatch.h"
@ -332,6 +333,8 @@ private:
float _touchDragStartedAvgX; float _touchDragStartedAvgX;
float _touchDragStartedAvgY; float _touchDragStartedAvgY;
bool _isTouchPressed; // true if multitouch has been pressed (clear when finished) bool _isTouchPressed; // true if multitouch has been pressed (clear when finished)
float _yawFromTouch;
float _pitchFromTouch;
VoxelDetail _mouseVoxelDragging; VoxelDetail _mouseVoxelDragging;
glm::vec3 _voxelThrust; glm::vec3 _voxelThrust;
@ -364,6 +367,8 @@ private:
GeometryCache _geometryCache; GeometryCache _geometryCache;
ParticleSystem _particleSystem;
#ifndef _WIN32 #ifndef _WIN32
Audio _audio; Audio _audio;
#endif #endif

View file

@ -448,7 +448,7 @@ void Audio::addReceivedAudioToBuffer(unsigned char* receivedData, int receivedBy
//printf("Got audio packet %d\n", _packetsReceivedThisPlayback); //printf("Got audio packet %d\n", _packetsReceivedThisPlayback);
_ringBuffer.parseData((unsigned char*) receivedData, PACKET_LENGTH_BYTES + sizeof(PACKET_TYPE)); _ringBuffer.parseData((unsigned char*) receivedData, receivedBytes);
Application::getInstance()->getBandwidthMeter()->inputStream(BandwidthMeter::AUDIO) Application::getInstance()->getBandwidthMeter()->inputStream(BandwidthMeter::AUDIO)
.updateValue(PACKET_LENGTH_BYTES + sizeof(PACKET_TYPE)); .updateValue(PACKET_LENGTH_BYTES + sizeof(PACKET_TYPE));

View file

@ -0,0 +1,183 @@
//
// ParticleSystem.cpp
// hifi
//
// Created by Jeffrey on July 10, 2013
//
#include <glm/glm.hpp>
#include "InterfaceConfig.h"
#include <SharedUtil.h>
#include "ParticleSystem.h"
ParticleSystem::ParticleSystem() {
_numberOfParticles = 1500;
assert(_numberOfParticles <= MAX_PARTICLES);
_bounce = 0.9f;
_timer = 0.0f;
_airFriction = 6.0f;
_jitter = 0.1f;
_homeAttraction = 0.0f;
_tornadoForce = 0.0f;
_neighborAttraction = 0.02f;
_neighborRepulsion = 0.9f;
_tornadoAxis = glm::normalize(glm::vec3(0.1f, 1.0f, 0.1f));
_home = glm::vec3(5.0f, 1.0f, 5.0f);
_TEST_bigSphereRadius = 0.5f;
_TEST_bigSpherePosition = glm::vec3( 5.0f, _TEST_bigSphereRadius, 5.0f);
for (unsigned int p = 0; p < _numberOfParticles; p++) {
_particle[p].position = _home;
_particle[p].velocity = glm::vec3(0.0f, 0.0f, 0.0f);
float radian = ((float)p / (float)_numberOfParticles) * PI_TIMES_TWO;
float wave = sinf(radian);
float red = 0.5f + 0.5f * wave;
float green = 0.3f + 0.3f * wave;
float blue = 0.2f - 0.2f * wave;
_particle[p].color = glm::vec3(red, green, blue);
_particle[p].age = 0.0f;
_particle[p].radius = 0.01f;
}
}
void ParticleSystem::simulate(float deltaTime) {
runSpecialEffectsTest(deltaTime);
for (unsigned int p = 0; p < _numberOfParticles; p++) {
updateParticle(p, deltaTime);
}
}
void ParticleSystem::runSpecialEffectsTest(float deltaTime) {
_timer += deltaTime;
_gravity = 0.01f + 0.01f * sinf( _timer * 0.52f );
_airFriction = 3.0f + 2.0f * sinf( _timer * 0.32f );
_jitter = 0.05f + 0.05f * sinf( _timer * 0.42f );
_homeAttraction = 0.01f + 0.01f * cosf( _timer * 0.6f );
_tornadoForce = 0.0f + 0.03f * sinf( _timer * 0.7f );
_neighborAttraction = 0.1f + 0.1f * cosf( _timer * 0.8f );
_neighborRepulsion = 0.4f + 0.3f * sinf( _timer * 0.4f );
_tornadoAxis = glm::vec3
(
0.0f + 0.5f * sinf( _timer * 0.55f ),
1.0f,
0.0f + 0.5f * cosf( _timer * 0.75f )
);
}
void ParticleSystem::updateParticle(int p, float deltaTime) {
_particle[p].age += deltaTime;
// apply random jitter
_particle[p].velocity +=
glm::vec3
(
-_jitter * ONE_HALF + _jitter * randFloat(),
-_jitter * ONE_HALF + _jitter * randFloat(),
-_jitter * ONE_HALF + _jitter * randFloat()
) * deltaTime;
// apply attraction to home position
glm::vec3 vectorToHome = _home - _particle[p].position;
_particle[p].velocity += vectorToHome * _homeAttraction * deltaTime;
// apply neighbor attraction
int neighbor = p + 1;
if (neighbor == _numberOfParticles ) {
neighbor = 0;
}
glm::vec3 vectorToNeighbor = _particle[p].position - _particle[neighbor].position;
_particle[p].velocity -= vectorToNeighbor * _neighborAttraction * deltaTime;
float distanceToNeighbor = glm::length(vectorToNeighbor);
if (distanceToNeighbor > 0.0f) {
_particle[neighbor].velocity += (vectorToNeighbor / ( 1.0f + distanceToNeighbor * distanceToNeighbor)) * _neighborRepulsion * deltaTime;
}
// apply tornado force
glm::vec3 tornadoDirection = glm::cross(vectorToHome, _tornadoAxis);
_particle[p].velocity += tornadoDirection * _tornadoForce * deltaTime;
// apply air friction
float drag = 1.0 - _airFriction * deltaTime;
if (drag < 0.0f) {
_particle[p].velocity = glm::vec3(0.0f, 0.0f, 0.0f);
} else {
_particle[p].velocity *= drag;
}
// apply gravity
_particle[p].velocity.y -= _gravity * deltaTime;
// update position by velocity
_particle[p].position += _particle[p].velocity;
// collision with ground
if (_particle[p].position.y < _particle[p].radius) {
_particle[p].position.y = _particle[p].radius;
if (_particle[p].velocity.y < 0.0f) {
_particle[p].velocity.y *= -_bounce;
}
}
// collision with sphere
glm::vec3 vectorToSphereCenter = _TEST_bigSpherePosition - _particle[p].position;
float distanceToSphereCenter = glm::length(vectorToSphereCenter);
float combinedRadius = _TEST_bigSphereRadius + _particle[p].radius;
if (distanceToSphereCenter < combinedRadius) {
if (distanceToSphereCenter > 0.0f){
glm::vec3 directionToSphereCenter = vectorToSphereCenter / distanceToSphereCenter;
_particle[p].position = _TEST_bigSpherePosition - directionToSphereCenter * combinedRadius;
}
}
}
void ParticleSystem::render() {
for (unsigned int p = 0; p < _numberOfParticles; p++) {
glColor3f(_particle[p].color.x, _particle[p].color.y, _particle[p].color.z);
glPushMatrix();
glTranslatef(_particle[p].position.x, _particle[p].position.y, _particle[p].position.z);
glutSolidSphere(_particle[p].radius, 6, 6);
glPopMatrix();
// render velocity lines
glColor4f( _particle[p].color.x, _particle[p].color.y, _particle[p].color.z, 0.5f);
glm::vec3 end = _particle[p].position - _particle[p].velocity * 2.0f;
glBegin(GL_LINES);
glVertex3f(_particle[p].position.x, _particle[p].position.y, _particle[p].position.z);
glVertex3f(end.x, end.y, end.z);
glEnd();
}
}

View file

@ -0,0 +1,59 @@
//
// ParticleSystem.h
// hifi
//
// Created by Jeffrey on July 10, 2013
//
//
#ifndef hifi_ParticleSystem_h
#define hifi_ParticleSystem_h
const int MAX_PARTICLES = 5000;
const int MAX_EMITTERS = 10;
class ParticleSystem {
public:
ParticleSystem();
void simulate(float deltaTime);
void render();
private:
struct Particle {
glm::vec3 position;
glm::vec3 velocity;
glm::vec3 color;
float age;
float radius;
};
struct Emitter {
glm::vec3 position;
glm::vec3 direction;
};
float _bounce;
float _gravity;
float _timer;
Emitter _emitter[MAX_EMITTERS];
Particle _particle[MAX_PARTICLES];
int _numberOfParticles;
glm::vec3 _home;
glm::vec3 _tornadoAxis;
float _airFriction;
float _jitter;
float _homeAttraction;
float _tornadoForce;
float _neighborAttraction;
float _neighborRepulsion;
float _TEST_bigSphereRadius;
glm::vec3 _TEST_bigSpherePosition;
// private methods
void updateParticle(int index, float deltaTime);
void runSpecialEffectsTest(float deltaTime);
};
#endif

40
interface/src/Physics.cpp Normal file
View file

@ -0,0 +1,40 @@
//
// Physics.cpp
// hifi
//
// Created by Philip on July 11, 2013
//
// Routines to help with doing virtual world physics
//
#include <glm/glm.hpp>
#include <SharedUtil.h>
#include "Util.h"
#include "world.h"
#include "Physics.h"
//
// Applies static friction: maxVelocity is the largest velocity for which there
// there is friction, and strength is the amount of friction force applied to reduce
// velocity.
//
void applyStaticFriction(float deltaTime, glm::vec3& velocity, float maxVelocity, float strength) {
float v = glm::length(velocity);
if (v < maxVelocity) {
velocity *= glm::clamp((1.0f - deltaTime * strength * (1.f - v / maxVelocity)), 0.0f, 1.0f);
}
}
//
// Applies velocity damping, with a strength value for linear and squared velocity damping
//
void applyDamping(float deltaTime, glm::vec3& velocity, float linearStrength, float squaredStrength) {
if (squaredStrength == 0.f) {
velocity *= glm::clamp(1.f - deltaTime * linearStrength, 0.f, 1.f);
} else {
velocity *= glm::clamp(1.f - deltaTime * (linearStrength + glm::length(velocity) * squaredStrength), 0.f, 1.f);
}
}

15
interface/src/Physics.h Normal file
View file

@ -0,0 +1,15 @@
//
// Balls.h
// hifi
//
// Created by Philip on 4/25/13.
//
//
#ifndef hifi_Physics_h
#define hifi_Physics_h
void applyStaticFriction(float deltaTime, glm::vec3& velocity, float maxVelocity, float strength);
void applyDamping(float deltaTime, glm::vec3& velocity, float linearStrength, float squaredStrength);
#endif

View file

@ -30,7 +30,7 @@ int jointVectorMetaType = qRegisterMetaType<JointVector>("JointVector");
int matMetaType = qRegisterMetaType<Mat>("cv::Mat"); int matMetaType = qRegisterMetaType<Mat>("cv::Mat");
int rotatedRectMetaType = qRegisterMetaType<RotatedRect>("cv::RotatedRect"); int rotatedRectMetaType = qRegisterMetaType<RotatedRect>("cv::RotatedRect");
Webcam::Webcam() : _enabled(false), _active(false), _frameTextureID(0), _depthTextureID(0) { Webcam::Webcam() : _enabled(false), _active(false), _colorTextureID(0), _depthTextureID(0) {
// the grabber simply runs as fast as possible // the grabber simply runs as fast as possible
_grabber = new FrameGrabber(); _grabber = new FrameGrabber();
_grabber->moveToThread(&_grabberThread); _grabber->moveToThread(&_grabberThread);
@ -65,13 +65,13 @@ void Webcam::reset() {
} }
void Webcam::renderPreview(int screenWidth, int screenHeight) { void Webcam::renderPreview(int screenWidth, int screenHeight) {
if (_enabled && _frameTextureID != 0) { if (_enabled && _colorTextureID != 0) {
glBindTexture(GL_TEXTURE_2D, _frameTextureID); glBindTexture(GL_TEXTURE_2D, _colorTextureID);
glEnable(GL_TEXTURE_2D); glEnable(GL_TEXTURE_2D);
glColor3f(1.0f, 1.0f, 1.0f); glColor3f(1.0f, 1.0f, 1.0f);
glBegin(GL_QUADS); glBegin(GL_QUADS);
const int PREVIEW_HEIGHT = 200; const int PREVIEW_HEIGHT = 200;
int previewWidth = _frameWidth * PREVIEW_HEIGHT / _frameHeight; int previewWidth = _textureSize.width * PREVIEW_HEIGHT / _textureSize.height;
int top = screenHeight - 600; int top = screenHeight - 600;
int left = screenWidth - previewWidth - 10; int left = screenWidth - previewWidth - 10;
@ -88,16 +88,14 @@ void Webcam::renderPreview(int screenWidth, int screenHeight) {
if (_depthTextureID != 0) { if (_depthTextureID != 0) {
glBindTexture(GL_TEXTURE_2D, _depthTextureID); glBindTexture(GL_TEXTURE_2D, _depthTextureID);
glBegin(GL_QUADS); glBegin(GL_QUADS);
int depthPreviewWidth = _depthWidth * PREVIEW_HEIGHT / _depthHeight;
int depthLeft = screenWidth - depthPreviewWidth - 10;
glTexCoord2f(0, 0); glTexCoord2f(0, 0);
glVertex2f(depthLeft, top - PREVIEW_HEIGHT); glVertex2f(left, top - PREVIEW_HEIGHT);
glTexCoord2f(1, 0); glTexCoord2f(1, 0);
glVertex2f(depthLeft + depthPreviewWidth, top - PREVIEW_HEIGHT); glVertex2f(left + previewWidth, top - PREVIEW_HEIGHT);
glTexCoord2f(1, 1); glTexCoord2f(1, 1);
glVertex2f(depthLeft + depthPreviewWidth, top); glVertex2f(left + previewWidth, top);
glTexCoord2f(0, 1); glTexCoord2f(0, 1);
glVertex2f(depthLeft, top); glVertex2f(left, top);
glEnd(); glEnd();
glBindTexture(GL_TEXTURE_2D, 0); glBindTexture(GL_TEXTURE_2D, 0);
@ -107,10 +105,10 @@ void Webcam::renderPreview(int screenWidth, int screenHeight) {
glColor3f(1.0f, 0.0f, 0.0f); glColor3f(1.0f, 0.0f, 0.0f);
glPointSize(4.0f); glPointSize(4.0f);
glBegin(GL_POINTS); glBegin(GL_POINTS);
float projectedScale = PREVIEW_HEIGHT / (float)_depthHeight; float projectedScale = PREVIEW_HEIGHT / _textureSize.height;
foreach (const Joint& joint, _joints) { foreach (const Joint& joint, _joints) {
if (joint.isValid) { if (joint.isValid) {
glVertex2f(depthLeft + joint.projected.x * projectedScale, glVertex2f(left + joint.projected.x * projectedScale,
top - PREVIEW_HEIGHT + joint.projected.y * projectedScale); top - PREVIEW_HEIGHT + joint.projected.y * projectedScale);
} }
} }
@ -126,8 +124,8 @@ void Webcam::renderPreview(int screenWidth, int screenHeight) {
glBegin(GL_LINE_LOOP); glBegin(GL_LINE_LOOP);
Point2f facePoints[4]; Point2f facePoints[4];
_faceRect.points(facePoints); _faceRect.points(facePoints);
float xScale = previewWidth / (float)_frameWidth; float xScale = previewWidth / _textureSize.width;
float yScale = PREVIEW_HEIGHT / (float)_frameHeight; float yScale = PREVIEW_HEIGHT / _textureSize.height;
glVertex2f(left + facePoints[0].x * xScale, top + facePoints[0].y * yScale); glVertex2f(left + facePoints[0].x * xScale, top + facePoints[0].y * yScale);
glVertex2f(left + facePoints[1].x * xScale, top + facePoints[1].y * yScale); glVertex2f(left + facePoints[1].x * xScale, top + facePoints[1].y * yScale);
glVertex2f(left + facePoints[2].x * xScale, top + facePoints[2].y * yScale); glVertex2f(left + facePoints[2].x * xScale, top + facePoints[2].y * yScale);
@ -148,21 +146,22 @@ Webcam::~Webcam() {
delete _grabber; delete _grabber;
} }
void Webcam::setFrame(const Mat& frame, int format, const Mat& depth, const Mat& depthPreview, void Webcam::setFrame(const Mat& color, int format, const Mat& depth, const Mat& depthPreview,
const RotatedRect& faceRect, const JointVector& joints) { const RotatedRect& faceRect, const JointVector& joints) {
IplImage image = frame; IplImage colorImage = color;
glPixelStorei(GL_UNPACK_ROW_LENGTH, image.widthStep / 3); glPixelStorei(GL_UNPACK_ROW_LENGTH, colorImage.widthStep / 3);
if (_frameTextureID == 0) { if (_colorTextureID == 0) {
glGenTextures(1, &_frameTextureID); glGenTextures(1, &_colorTextureID);
glBindTexture(GL_TEXTURE_2D, _frameTextureID); glBindTexture(GL_TEXTURE_2D, _colorTextureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, _frameWidth = image.width, _frameHeight = image.height, 0, format, glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, _textureSize.width = colorImage.width, _textureSize.height = colorImage.height,
GL_UNSIGNED_BYTE, image.imageData); 0, format, GL_UNSIGNED_BYTE, colorImage.imageData);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
printLog("Capturing video at %dx%d.\n", _frameWidth, _frameHeight); printLog("Capturing video at %gx%g.\n", _textureSize.width, _textureSize.height);
} else { } else {
glBindTexture(GL_TEXTURE_2D, _frameTextureID); glBindTexture(GL_TEXTURE_2D, _colorTextureID);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, _frameWidth, _frameHeight, format, GL_UNSIGNED_BYTE, image.imageData); glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, _textureSize.width, _textureSize.height, format,
GL_UNSIGNED_BYTE, colorImage.imageData);
} }
if (!depthPreview.empty()) { if (!depthPreview.empty()) {
@ -171,14 +170,13 @@ void Webcam::setFrame(const Mat& frame, int format, const Mat& depth, const Mat&
if (_depthTextureID == 0) { if (_depthTextureID == 0) {
glGenTextures(1, &_depthTextureID); glGenTextures(1, &_depthTextureID);
glBindTexture(GL_TEXTURE_2D, _depthTextureID); glBindTexture(GL_TEXTURE_2D, _depthTextureID);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, _depthWidth = depthImage.width, _depthHeight = depthImage.height, 0, glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, depthImage.width, depthImage.height, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, depthImage.imageData); GL_LUMINANCE, GL_UNSIGNED_BYTE, depthImage.imageData);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR); glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
printLog("Capturing depth at %dx%d.\n", _depthWidth, _depthHeight);
} else { } else {
glBindTexture(GL_TEXTURE_2D, _depthTextureID); glBindTexture(GL_TEXTURE_2D, _depthTextureID);
glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, _depthWidth, _depthHeight, GL_LUMINANCE, glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, _textureSize.width, _textureSize.height, GL_LUMINANCE,
GL_UNSIGNED_BYTE, depthImage.imageData); GL_UNSIGNED_BYTE, depthImage.imageData);
} }
} }
@ -201,6 +199,27 @@ void Webcam::setFrame(const Mat& frame, int format, const Mat& depth, const Mat&
} }
_lastFrameTimestamp = now; _lastFrameTimestamp = now;
// correct for 180 degree rotations
if (_faceRect.angle < -90.0f) {
_faceRect.angle += 180.0f;
} else if (_faceRect.angle > 90.0f) {
_faceRect.angle -= 180.0f;
}
// compute the smoothed face rect
if (_estimatedFaceRect.size.area() == 0) {
_estimatedFaceRect = _faceRect;
} else {
const float FACE_RECT_SMOOTHING = 0.9f;
_estimatedFaceRect.center.x = glm::mix(_faceRect.center.x, _estimatedFaceRect.center.x, FACE_RECT_SMOOTHING);
_estimatedFaceRect.center.y = glm::mix(_faceRect.center.y, _estimatedFaceRect.center.y, FACE_RECT_SMOOTHING);
_estimatedFaceRect.size.width = glm::mix(_faceRect.size.width, _estimatedFaceRect.size.width, FACE_RECT_SMOOTHING);
_estimatedFaceRect.size.height = glm::mix(_faceRect.size.height, _estimatedFaceRect.size.height, FACE_RECT_SMOOTHING);
_estimatedFaceRect.angle = glm::mix(_faceRect.angle, _estimatedFaceRect.angle, FACE_RECT_SMOOTHING);
}
// see if we have joint data // see if we have joint data
if (!_joints.isEmpty()) { if (!_joints.isEmpty()) {
_estimatedJoints.resize(NUM_AVATAR_JOINTS); _estimatedJoints.resize(NUM_AVATAR_JOINTS);
@ -227,30 +246,23 @@ void Webcam::setFrame(const Mat& frame, int format, const Mat& depth, const Mat&
_estimatedPosition = _estimatedJoints[AVATAR_JOINT_HEAD_BASE].position; _estimatedPosition = _estimatedJoints[AVATAR_JOINT_HEAD_BASE].position;
} else { } else {
// roll is just the angle of the face rect (correcting for 180 degree rotations) // roll is just the angle of the face rect
float roll = faceRect.angle;
if (roll < -90.0f) {
roll += 180.0f;
} else if (roll > 90.0f) {
roll -= 180.0f;
}
const float ROTATION_SMOOTHING = 0.95f; const float ROTATION_SMOOTHING = 0.95f;
_estimatedRotation.z = glm::mix(roll, _estimatedRotation.z, ROTATION_SMOOTHING); _estimatedRotation.z = glm::mix(_faceRect.angle, _estimatedRotation.z, ROTATION_SMOOTHING);
// determine position based on translation and scaling of the face rect // determine position based on translation and scaling of the face rect
if (_initialFaceRect.size.area() == 0) { if (_initialFaceRect.size.area() == 0) {
_initialFaceRect = faceRect; _initialFaceRect = _faceRect;
_estimatedPosition = glm::vec3(); _estimatedPosition = glm::vec3();
} else { } else {
float proportion = sqrtf(_initialFaceRect.size.area() / (float)faceRect.size.area()); float proportion = sqrtf(_initialFaceRect.size.area() / (float)_faceRect.size.area());
const float DISTANCE_TO_CAMERA = 0.333f; const float DISTANCE_TO_CAMERA = 0.333f;
const float POSITION_SCALE = 0.5f; const float POSITION_SCALE = 0.5f;
float z = DISTANCE_TO_CAMERA * proportion - DISTANCE_TO_CAMERA; float z = DISTANCE_TO_CAMERA * proportion - DISTANCE_TO_CAMERA;
glm::vec3 position = glm::vec3( glm::vec3 position = glm::vec3(
(faceRect.center.x - _initialFaceRect.center.x) * proportion * POSITION_SCALE / _frameWidth, (_faceRect.center.x - _initialFaceRect.center.x) * proportion * POSITION_SCALE / _textureSize.width,
(faceRect.center.y - _initialFaceRect.center.y) * proportion * POSITION_SCALE / _frameWidth, (_faceRect.center.y - _initialFaceRect.center.y) * proportion * POSITION_SCALE / _textureSize.width,
z); z);
const float POSITION_SMOOTHING = 0.95f; const float POSITION_SMOOTHING = 0.95f;
_estimatedPosition = glm::mix(position, _estimatedPosition, POSITION_SMOOTHING); _estimatedPosition = glm::mix(position, _estimatedPosition, POSITION_SMOOTHING);
@ -382,13 +394,13 @@ void FrameGrabber::grabFrame() {
return; return;
} }
int format = GL_BGR; int format = GL_BGR;
Mat frame, depth; Mat color, depth;
JointVector joints; JointVector joints;
#ifdef HAVE_OPENNI #ifdef HAVE_OPENNI
if (_depthGenerator.IsValid()) { if (_depthGenerator.IsValid()) {
_xnContext.WaitAnyUpdateAll(); _xnContext.WaitAnyUpdateAll();
frame = Mat(_imageMetaData.YRes(), _imageMetaData.XRes(), CV_8UC3, (void*)_imageGenerator.GetImageMap()); color = Mat(_imageMetaData.YRes(), _imageMetaData.XRes(), CV_8UC3, (void*)_imageGenerator.GetImageMap());
format = GL_RGB; format = GL_RGB;
depth = Mat(_depthMetaData.YRes(), _depthMetaData.XRes(), CV_16UC1, (void*)_depthGenerator.GetDepthMap()); depth = Mat(_depthMetaData.YRes(), _depthMetaData.XRes(), CV_16UC1, (void*)_depthGenerator.GetDepthMap());
@ -430,7 +442,7 @@ void FrameGrabber::grabFrame() {
} }
#endif #endif
if (frame.empty()) { if (color.empty()) {
IplImage* image = cvQueryFrame(_capture); IplImage* image = cvQueryFrame(_capture);
if (image == 0) { if (image == 0) {
// try again later // try again later
@ -443,7 +455,7 @@ void FrameGrabber::grabFrame() {
printLog("Invalid webcam image format.\n"); printLog("Invalid webcam image format.\n");
return; return;
} }
frame = image; color = image;
} }
// if we don't have a search window (yet), try using the face cascade // if we don't have a search window (yet), try using the face cascade
@ -452,10 +464,10 @@ void FrameGrabber::grabFrame() {
const float* range = ranges; const float* range = ranges;
if (_searchWindow.area() == 0) { if (_searchWindow.area() == 0) {
vector<cv::Rect> faces; vector<cv::Rect> faces;
_faceCascade.detectMultiScale(frame, faces, 1.1, 6); _faceCascade.detectMultiScale(color, faces, 1.1, 6);
if (!faces.empty()) { if (!faces.empty()) {
_searchWindow = faces.front(); _searchWindow = faces.front();
updateHSVFrame(frame, format); updateHSVFrame(color, format);
Mat faceHsv(_hsvFrame, _searchWindow); Mat faceHsv(_hsvFrame, _searchWindow);
Mat faceMask(_mask, _searchWindow); Mat faceMask(_mask, _searchWindow);
@ -468,7 +480,7 @@ void FrameGrabber::grabFrame() {
} }
RotatedRect faceRect; RotatedRect faceRect;
if (_searchWindow.area() > 0) { if (_searchWindow.area() > 0) {
updateHSVFrame(frame, format); updateHSVFrame(color, format);
calcBackProject(&_hsvFrame, 1, &channels, _histogram, _backProject, &range); calcBackProject(&_hsvFrame, 1, &channels, _histogram, _backProject, &range);
bitwise_and(_backProject, _mask, _backProject); bitwise_and(_backProject, _mask, _backProject);
@ -477,7 +489,7 @@ void FrameGrabber::grabFrame() {
_searchWindow = faceRect.boundingRect(); _searchWindow = faceRect.boundingRect();
} }
QMetaObject::invokeMethod(Application::getInstance()->getWebcam(), "setFrame", QMetaObject::invokeMethod(Application::getInstance()->getWebcam(), "setFrame",
Q_ARG(cv::Mat, frame), Q_ARG(int, format), Q_ARG(cv::Mat, depth), Q_ARG(cv::Mat, _grayDepthFrame), Q_ARG(cv::Mat, color), Q_ARG(int, format), Q_ARG(cv::Mat, depth), Q_ARG(cv::Mat, _grayDepthFrame),
Q_ARG(cv::RotatedRect, faceRect), Q_ARG(JointVector, joints)); Q_ARG(cv::RotatedRect, faceRect), Q_ARG(JointVector, joints));
} }

View file

@ -42,7 +42,13 @@ public:
Webcam(); Webcam();
~Webcam(); ~Webcam();
const bool isActive() const { return _active; } bool isActive() const { return _active; }
GLuint getColorTextureID() const { return _colorTextureID; }
GLuint getDepthTextureID() const { return _depthTextureID; }
const cv::Size2f& getTextureSize() const { return _textureSize; }
const cv::RotatedRect& getEstimatedFaceRect() const { return _estimatedFaceRect; }
const glm::vec3& getEstimatedPosition() const { return _estimatedPosition; } const glm::vec3& getEstimatedPosition() const { return _estimatedPosition; }
const glm::vec3& getEstimatedRotation() const { return _estimatedRotation; } const glm::vec3& getEstimatedRotation() const { return _estimatedRotation; }
const JointVector& getEstimatedJoints() const { return _estimatedJoints; } const JointVector& getEstimatedJoints() const { return _estimatedJoints; }
@ -53,7 +59,7 @@ public:
public slots: public slots:
void setEnabled(bool enabled); void setEnabled(bool enabled);
void setFrame(const cv::Mat& video, int format, const cv::Mat& depth, void setFrame(const cv::Mat& color, int format, const cv::Mat& depth,
const cv::Mat& depthPreview, const cv::RotatedRect& faceRect, const JointVector& joints); const cv::Mat& depthPreview, const cv::RotatedRect& faceRect, const JointVector& joints);
private: private:
@ -63,12 +69,9 @@ private:
bool _enabled; bool _enabled;
bool _active; bool _active;
int _frameWidth; GLuint _colorTextureID;
int _frameHeight;
int _depthWidth;
int _depthHeight;
GLuint _frameTextureID;
GLuint _depthTextureID; GLuint _depthTextureID;
cv::Size2f _textureSize;
cv::RotatedRect _faceRect; cv::RotatedRect _faceRect;
cv::RotatedRect _initialFaceRect; cv::RotatedRect _initialFaceRect;
JointVector _joints; JointVector _joints;
@ -78,6 +81,7 @@ private:
uint64_t _lastFrameTimestamp; uint64_t _lastFrameTimestamp;
cv::RotatedRect _estimatedFaceRect;
glm::vec3 _estimatedPosition; glm::vec3 _estimatedPosition;
glm::vec3 _estimatedRotation; glm::vec3 _estimatedRotation;
JointVector _estimatedJoints; JointVector _estimatedJoints;

View file

@ -16,12 +16,14 @@
#include "Hand.h" #include "Hand.h"
#include "Head.h" #include "Head.h"
#include "Log.h" #include "Log.h"
#include "Physics.h"
#include "ui/TextRenderer.h" #include "ui/TextRenderer.h"
#include <NodeList.h> #include <NodeList.h>
#include <NodeTypes.h> #include <NodeTypes.h>
#include <PacketHeaders.h> #include <PacketHeaders.h>
#include <OculusManager.h> #include <OculusManager.h>
using namespace std; using namespace std;
const bool BALLS_ON = false; const bool BALLS_ON = false;
@ -94,6 +96,8 @@ Avatar::Avatar(Node* owningNode) :
_elapsedTimeMoving(0.0f), _elapsedTimeMoving(0.0f),
_elapsedTimeStopped(0.0f), _elapsedTimeStopped(0.0f),
_elapsedTimeSinceCollision(0.0f), _elapsedTimeSinceCollision(0.0f),
_speedBrakes(false),
_isThrustOn(false),
_voxels(this) _voxels(this)
{ {
// give the pointer to our head to inherited _headData variable from AvatarData // give the pointer to our head to inherited _headData variable from AvatarData
@ -285,7 +289,10 @@ void Avatar::reset() {
} }
// Update avatar head rotation with sensor data // Update avatar head rotation with sensor data
void Avatar::updateFromGyrosAndOrWebcam(bool gyroLook, const glm::vec3& amplifyAngle) { void Avatar::updateFromGyrosAndOrWebcam(bool gyroLook,
const glm::vec3& amplifyAngle,
float yawFromTouch,
float pitchFromTouch) {
SerialInterface* gyros = Application::getInstance()->getSerialHeadSensor(); SerialInterface* gyros = Application::getInstance()->getSerialHeadSensor();
Webcam* webcam = Application::getInstance()->getWebcam(); Webcam* webcam = Application::getInstance()->getWebcam();
glm::vec3 estimatedPosition, estimatedRotation; glm::vec3 estimatedPosition, estimatedRotation;
@ -296,6 +303,8 @@ void Avatar::updateFromGyrosAndOrWebcam(bool gyroLook, const glm::vec3& amplifyA
estimatedRotation = webcam->getEstimatedRotation(); estimatedRotation = webcam->getEstimatedRotation();
} else { } else {
_head.setPitch(pitchFromTouch);
_head.setYaw(yawFromTouch);
return; return;
} }
if (webcam->isActive()) { if (webcam->isActive()) {
@ -316,8 +325,8 @@ void Avatar::updateFromGyrosAndOrWebcam(bool gyroLook, const glm::vec3& amplifyA
} }
} }
} }
_head.setPitch(estimatedRotation.x * amplifyAngle.x); _head.setPitch(estimatedRotation.x * amplifyAngle.x + pitchFromTouch);
_head.setYaw(estimatedRotation.y * amplifyAngle.y); _head.setYaw(estimatedRotation.y * amplifyAngle.y + yawFromTouch);
_head.setRoll(estimatedRotation.z * amplifyAngle.z); _head.setRoll(estimatedRotation.z * amplifyAngle.z);
_head.setCameraFollowsHead(gyroLook); _head.setCameraFollowsHead(gyroLook);
@ -356,16 +365,16 @@ void Avatar::updateThrust(float deltaTime, Transmitter * transmitter) {
// //
// Gather thrust information from keyboard and sensors to apply to avatar motion // Gather thrust information from keyboard and sensors to apply to avatar motion
// //
glm::quat orientation = getOrientation(); glm::quat orientation = getHead().getOrientation();
glm::vec3 front = orientation * IDENTITY_FRONT; glm::vec3 front = orientation * IDENTITY_FRONT;
glm::vec3 right = orientation * IDENTITY_RIGHT; glm::vec3 right = orientation * IDENTITY_RIGHT;
glm::vec3 up = orientation * IDENTITY_UP; glm::vec3 up = orientation * IDENTITY_UP;
const float THRUST_MAG_UP = 800.0f; const float THRUST_MAG_UP = 800.0f;
const float THRUST_MAG_DOWN = 200.f; const float THRUST_MAG_DOWN = 300.f;
const float THRUST_MAG_FWD = 300.f; const float THRUST_MAG_FWD = 500.f;
const float THRUST_MAG_BACK = 150.f; const float THRUST_MAG_BACK = 300.f;
const float THRUST_MAG_LATERAL = 200.f; const float THRUST_MAG_LATERAL = 250.f;
const float THRUST_JUMP = 120.f; const float THRUST_JUMP = 120.f;
// Add Thrusts from keyboard // Add Thrusts from keyboard
@ -413,14 +422,27 @@ void Avatar::updateThrust(float deltaTime, Transmitter * transmitter) {
up; up;
} }
} }
}
// Update speed brake status
const float MIN_SPEED_BRAKE_VELOCITY = 0.4f;
if ((glm::length(_thrust) == 0.0f) && _isThrustOn && (glm::length(_velocity) > MIN_SPEED_BRAKE_VELOCITY)) {
_speedBrakes = true;
}
if (_speedBrakes && (glm::length(_velocity) < MIN_SPEED_BRAKE_VELOCITY)) {
_speedBrakes = false;
}
_isThrustOn = (glm::length(_thrust) > EPSILON);
}
void Avatar::simulate(float deltaTime, Transmitter* transmitter) { void Avatar::simulate(float deltaTime, Transmitter* transmitter) {
glm::quat orientation = getOrientation(); glm::quat orientation = getOrientation();
glm::vec3 front = orientation * IDENTITY_FRONT; glm::vec3 front = orientation * IDENTITY_FRONT;
glm::vec3 right = orientation * IDENTITY_RIGHT; glm::vec3 right = orientation * IDENTITY_RIGHT;
// Update movement timers // Update movement timers
if (isMyAvatar()) { if (isMyAvatar()) {
_elapsedTimeSinceCollision += deltaTime; _elapsedTimeSinceCollision += deltaTime;
@ -443,9 +465,6 @@ void Avatar::simulate(float deltaTime, Transmitter* transmitter) {
glm::vec3 oldVelocity = getVelocity(); glm::vec3 oldVelocity = getVelocity();
if (isMyAvatar()) { if (isMyAvatar()) {
// update position by velocity
_position += _velocity * deltaTime;
// calculate speed // calculate speed
_speed = glm::length(_velocity); _speed = glm::length(_velocity);
} }
@ -480,7 +499,7 @@ void Avatar::simulate(float deltaTime, Transmitter* transmitter) {
enableHandMovement &= (it->jointID != AVATAR_JOINT_RIGHT_WRIST); enableHandMovement &= (it->jointID != AVATAR_JOINT_RIGHT_WRIST);
} }
// update avatar skeleton // update avatar skeleton
_skeleton.update(deltaTime, getOrientation(), _position); _skeleton.update(deltaTime, getOrientation(), _position);
//determine the lengths of the body springs now that we have updated the skeleton at least once //determine the lengths of the body springs now that we have updated the skeleton at least once
@ -501,51 +520,49 @@ void Avatar::simulate(float deltaTime, Transmitter* transmitter) {
_ballSpringsInitialized = true; _ballSpringsInitialized = true;
} }
// if this is not my avatar, then hand position comes from transmitted data // if this is not my avatar, then hand position comes from transmitted data
if (!isMyAvatar()) { if (!isMyAvatar()) {
_skeleton.joint[ AVATAR_JOINT_RIGHT_FINGERTIPS ].position = _handPosition; _skeleton.joint[ AVATAR_JOINT_RIGHT_FINGERTIPS ].position = _handPosition;
} }
//detect and respond to collisions with other avatars...
if (isMyAvatar()) {
updateAvatarCollisions(deltaTime);
}
//update the movement of the hand and process handshaking with other avatars... //update the movement of the hand and process handshaking with other avatars...
updateHandMovementAndTouching(deltaTime, enableHandMovement); updateHandMovementAndTouching(deltaTime, enableHandMovement);
_avatarTouch.simulate(deltaTime); _avatarTouch.simulate(deltaTime);
// apply gravity and collision with the ground/floor
if (isMyAvatar() && USING_AVATAR_GRAVITY) {
_velocity += _gravity * (GRAVITY_EARTH * deltaTime);
}
if (isMyAvatar()) { if (isMyAvatar()) {
// apply gravity
if (USING_AVATAR_GRAVITY) {
// For gravity, always move the avatar by the amount driven by gravity, so that the collision
// routines will detect it and collide every frame when pulled by gravity to a surface
//
_velocity += _gravity * (GRAVITY_EARTH * deltaTime);
_position += _gravity * (GRAVITY_EARTH * deltaTime) * deltaTime;
}
updateCollisionWithEnvironment(); updateCollisionWithEnvironment();
updateCollisionWithVoxels();
updateAvatarCollisions(deltaTime);
} }
// update body balls // update body balls
updateBodyBalls(deltaTime); updateBodyBalls(deltaTime);
// test for avatar collision response with the big sphere // test for avatar collision response with the big sphere
if (usingBigSphereCollisionTest) { if (usingBigSphereCollisionTest) {
updateCollisionWithSphere(_TEST_bigSpherePosition, _TEST_bigSphereRadius, deltaTime); updateCollisionWithSphere(_TEST_bigSpherePosition, _TEST_bigSphereRadius, deltaTime);
} }
// collision response with voxels
if (isMyAvatar()) {
updateCollisionWithVoxels();
}
if (isMyAvatar()) { if (isMyAvatar()) {
// add thrust to velocity // add thrust to velocity
_velocity += _thrust * deltaTime; _velocity += _thrust * deltaTime;
// update body yaw by body yaw delta // update body yaw by body yaw delta
orientation = orientation * glm::quat(glm::radians( orientation = orientation * glm::quat(glm::radians(
glm::vec3(_bodyPitchDelta, _bodyYawDelta, _bodyRollDelta) * deltaTime)); glm::vec3(_bodyPitchDelta, _bodyYawDelta, _bodyRollDelta) * deltaTime));
// decay body rotation momentum // decay body rotation momentum
float bodySpinMomentum = 1.0 - BODY_SPIN_FRICTION * deltaTime; float bodySpinMomentum = 1.0 - BODY_SPIN_FRICTION * deltaTime;
if (bodySpinMomentum < 0.0f) { bodySpinMomentum = 0.0f; } if (bodySpinMomentum < 0.0f) { bodySpinMomentum = 0.0f; }
@ -553,22 +570,19 @@ void Avatar::simulate(float deltaTime, Transmitter* transmitter) {
_bodyYawDelta *= bodySpinMomentum; _bodyYawDelta *= bodySpinMomentum;
_bodyRollDelta *= bodySpinMomentum; _bodyRollDelta *= bodySpinMomentum;
// Decay velocity. If velocity is really low, increase decay to simulate static friction const float MAX_STATIC_FRICTION_VELOCITY = 0.5f;
const float VELOCITY_DECAY_UNDER_THRUST = 0.2; const float STATIC_FRICTION_STRENGTH = 20.f;
const float VELOCITY_FAST_DECAY = 0.6; applyStaticFriction(deltaTime, _velocity, MAX_STATIC_FRICTION_VELOCITY, STATIC_FRICTION_STRENGTH);
const float VELOCITY_SLOW_DECAY = 3.0;
const float VELOCITY_FAST_THRESHOLD = 2.0f; const float LINEAR_DAMPING_STRENGTH = 1.0f;
float decayConstant, decay; const float SPEED_BRAKE_POWER = 10.0f;
if (glm::length(_thrust) > 0.f) { const float SQUARED_DAMPING_STRENGTH = 0.2f;
decayConstant = VELOCITY_DECAY_UNDER_THRUST; if (_speedBrakes) {
} else if (glm::length(_velocity) > VELOCITY_FAST_THRESHOLD) { applyDamping(deltaTime, _velocity, LINEAR_DAMPING_STRENGTH * SPEED_BRAKE_POWER, SQUARED_DAMPING_STRENGTH * SPEED_BRAKE_POWER);
decayConstant = VELOCITY_FAST_DECAY;
} else { } else {
decayConstant = VELOCITY_SLOW_DECAY; applyDamping(deltaTime, _velocity, LINEAR_DAMPING_STRENGTH, SQUARED_DAMPING_STRENGTH);
} }
decay = glm::clamp(1.0f - decayConstant * deltaTime, 0.0f, 1.0f);
_velocity *= decay;
//pitch and roll the body as a function of forward speed and turning delta //pitch and roll the body as a function of forward speed and turning delta
const float BODY_PITCH_WHILE_WALKING = -20.0; const float BODY_PITCH_WHILE_WALKING = -20.0;
const float BODY_ROLL_WHILE_TURNING = 0.2; const float BODY_ROLL_WHILE_TURNING = 0.2;
@ -659,6 +673,9 @@ void Avatar::simulate(float deltaTime, Transmitter* transmitter) {
_mode = AVATAR_MODE_INTERACTING; _mode = AVATAR_MODE_INTERACTING;
} }
// update position by velocity, and subtract the change added earlier for gravity
_position += _velocity * deltaTime;
// Zero thrust out now that we've added it to velocity in this frame // Zero thrust out now that we've added it to velocity in this frame
_thrust = glm::vec3(0, 0, 0); _thrust = glm::vec3(0, 0, 0);

View file

@ -87,7 +87,10 @@ public:
void reset(); void reset();
void simulate(float deltaTime, Transmitter* transmitter); void simulate(float deltaTime, Transmitter* transmitter);
void updateThrust(float deltaTime, Transmitter * transmitter); void updateThrust(float deltaTime, Transmitter * transmitter);
void updateFromGyrosAndOrWebcam(bool gyroLook, const glm::vec3& amplifyAngles); void updateFromGyrosAndOrWebcam(bool gyroLook,
const glm::vec3& amplifyAngle,
float yawFromTouch,
float pitchFromTouch);
void addBodyYaw(float y) {_bodyYaw += y;}; void addBodyYaw(float y) {_bodyYaw += y;};
void render(bool lookingInMirror, bool renderAvatarBalls); void render(bool lookingInMirror, bool renderAvatarBalls);
@ -210,6 +213,8 @@ private:
float _elapsedTimeMoving; // Timers to drive camera transitions when moving float _elapsedTimeMoving; // Timers to drive camera transitions when moving
float _elapsedTimeStopped; float _elapsedTimeStopped;
float _elapsedTimeSinceCollision; float _elapsedTimeSinceCollision;
bool _speedBrakes;
bool _isThrustOn;
AvatarVoxelSystem _voxels; AvatarVoxelSystem _voxels;

View file

@ -54,7 +54,7 @@ void Hand::calculateGeometry() {
_position = head.getPosition() + head.getOrientation() * offset; _position = head.getPosition() + head.getOrientation() * offset;
_orientation = head.getOrientation(); _orientation = head.getOrientation();
int numLeapBalls = _fingerTips.size() + _fingerRoots.size(); int numLeapBalls = _fingerTips.size();
_leapBalls.resize(numLeapBalls); _leapBalls.resize(numLeapBalls);
for (int i = 0; i < _fingerTips.size(); ++i) { for (int i = 0; i < _fingerTips.size(); ++i) {

View file

@ -82,7 +82,8 @@ Head::Head(Avatar* owningAvatar) :
_cameraYaw(_yaw), _cameraYaw(_yaw),
_isCameraMoving(false), _isCameraMoving(false),
_cameraFollowsHead(false), _cameraFollowsHead(false),
_cameraFollowHeadRate(0.0f) _cameraFollowHeadRate(0.0f),
_face(this)
{ {
if (USING_PHYSICAL_MOHAWK) { if (USING_PHYSICAL_MOHAWK) {
resetHairPhysics(); resetHairPhysics();
@ -289,17 +290,19 @@ void Head::render(float alpha) {
_renderAlpha = alpha; _renderAlpha = alpha;
calculateGeometry(); if (!_face.render(alpha)) {
calculateGeometry();
glEnable(GL_DEPTH_TEST); glEnable(GL_DEPTH_TEST);
glEnable(GL_RESCALE_NORMAL); glEnable(GL_RESCALE_NORMAL);
renderMohawk(); renderMohawk();
renderHeadSphere(); renderHeadSphere();
renderEyeBalls(); renderEyeBalls();
renderEars(); renderEars();
renderMouth(); renderMouth();
renderEyeBrows(); renderEyeBrows();
}
if (_renderLookatVectors) { if (_renderLookatVectors) {
renderLookatVectors(_leftEyePosition, _rightEyePosition, _lookAtPosition); renderLookatVectors(_leftEyePosition, _rightEyePosition, _lookAtPosition);

View file

@ -10,11 +10,17 @@
#include <glm/glm.hpp> #include <glm/glm.hpp>
#include <glm/gtc/quaternion.hpp> #include <glm/gtc/quaternion.hpp>
#include <SharedUtil.h>
#include <AvatarData.h> #include <AvatarData.h>
#include "world.h"
#include <VoxelConstants.h>
#include "Face.h"
#include "InterfaceConfig.h" #include "InterfaceConfig.h"
#include "SerialInterface.h" #include "SerialInterface.h"
#include <SharedUtil.h> #include "world.h"
enum eyeContactTargets enum eyeContactTargets
{ {
@ -53,12 +59,15 @@ public:
glm::quat getOrientation() const; glm::quat getOrientation() const;
glm::quat getCameraOrientation () const; glm::quat getCameraOrientation () const;
float getScale() const { return _scale; }
glm::vec3 getPosition() const { return _position; } glm::vec3 getPosition() const { return _position; }
const glm::vec3& getEyeLevelPosition() const { return _eyeLevelPosition; } const glm::vec3& getEyeLevelPosition() const { return _eyeLevelPosition; }
glm::vec3 getRightDirection() const { return getOrientation() * IDENTITY_RIGHT; } glm::vec3 getRightDirection() const { return getOrientation() * IDENTITY_RIGHT; }
glm::vec3 getUpDirection () const { return getOrientation() * IDENTITY_UP; } glm::vec3 getUpDirection () const { return getOrientation() * IDENTITY_UP; }
glm::vec3 getFrontDirection() const { return getOrientation() * IDENTITY_FRONT; } glm::vec3 getFrontDirection() const { return getOrientation() * IDENTITY_FRONT; }
Face& getFace() { return _face; }
const bool getReturnToCenter() const { return _returnHeadToCenter; } // Do you want head to try to return to center (depends on interface detected) const bool getReturnToCenter() const { return _returnHeadToCenter; } // Do you want head to try to return to center (depends on interface detected)
float getAverageLoudness() {return _averageLoudness;}; float getAverageLoudness() {return _averageLoudness;};
glm::vec3 calculateAverageEyePosition() { return _leftEyePosition + (_rightEyePosition - _leftEyePosition ) * ONE_HALF; } glm::vec3 calculateAverageEyePosition() { return _leftEyePosition + (_rightEyePosition - _leftEyePosition ) * ONE_HALF; }
@ -120,6 +129,7 @@ private:
bool _isCameraMoving; bool _isCameraMoving;
bool _cameraFollowsHead; bool _cameraFollowsHead;
float _cameraFollowHeadRate; float _cameraFollowHeadRate;
Face _face;
static ProgramObject* _irisProgram; static ProgramObject* _irisProgram;
static GLuint _irisTextureID; static GLuint _irisTextureID;

View file

@ -137,6 +137,11 @@ int AvatarData::getBroadcastData(unsigned char* destinationBuffer) {
if (numFingerVectors > 255) if (numFingerVectors > 255)
numFingerVectors = 0; // safety. We shouldn't ever get over 255, so consider that invalid. numFingerVectors = 0; // safety. We shouldn't ever get over 255, so consider that invalid.
/////////////////////////////////
// Temporarily disable Leap finger sending, as it's causing a crash whenever someone's got a Leap connected
numFingerVectors = 0;
/////////////////////////////////
*destinationBuffer++ = (unsigned char)numFingerVectors; *destinationBuffer++ = (unsigned char)numFingerVectors;
if (numFingerVectors > 0) { if (numFingerVectors > 0) {
@ -255,8 +260,8 @@ int AvatarData::parseData(unsigned char* sourceBuffer, int numBytes) {
// leap hand data // leap hand data
if (sourceBuffer - startPosition < numBytes) // safety check if (sourceBuffer - startPosition < numBytes) // safety check
{ {
std::vector<glm::vec3> fingerTips = _handData->getFingerTips(); std::vector<glm::vec3> fingerTips;
std::vector<glm::vec3> fingerRoots = _handData->getFingerRoots(); std::vector<glm::vec3> fingerRoots;
unsigned int numFingerVectors = *sourceBuffer++; unsigned int numFingerVectors = *sourceBuffer++;
unsigned int numFingerTips = numFingerVectors / 2; unsigned int numFingerTips = numFingerVectors / 2;
unsigned int numFingerRoots = numFingerVectors - numFingerTips; unsigned int numFingerRoots = numFingerVectors - numFingerTips;
@ -267,6 +272,11 @@ int AvatarData::parseData(unsigned char* sourceBuffer, int numBytes) {
sourceBuffer += unpackFloatScalarFromSignedTwoByteFixed((int16_t*) sourceBuffer, &(fingerTips[i].y), 4); sourceBuffer += unpackFloatScalarFromSignedTwoByteFixed((int16_t*) sourceBuffer, &(fingerTips[i].y), 4);
sourceBuffer += unpackFloatScalarFromSignedTwoByteFixed((int16_t*) sourceBuffer, &(fingerTips[i].z), 4); sourceBuffer += unpackFloatScalarFromSignedTwoByteFixed((int16_t*) sourceBuffer, &(fingerTips[i].z), 4);
} }
for (size_t i = 0; i < numFingerRoots; ++i) {
sourceBuffer += unpackFloatScalarFromSignedTwoByteFixed((int16_t*) sourceBuffer, &(fingerRoots[i].x), 4);
sourceBuffer += unpackFloatScalarFromSignedTwoByteFixed((int16_t*) sourceBuffer, &(fingerRoots[i].y), 4);
sourceBuffer += unpackFloatScalarFromSignedTwoByteFixed((int16_t*) sourceBuffer, &(fingerRoots[i].z), 4);
}
_handData->setFingerTips(fingerTips); _handData->setFingerTips(fingerTips);
_handData->setFingerRoots(fingerRoots); _handData->setFingerRoots(fingerRoots);
} }

View file

@ -20,6 +20,8 @@ const glm::vec3 IDENTITY_RIGHT = glm::vec3( 1.0f, 0.0f, 0.0f);
const glm::vec3 IDENTITY_UP = glm::vec3( 0.0f, 1.0f, 0.0f); const glm::vec3 IDENTITY_UP = glm::vec3( 0.0f, 1.0f, 0.0f);
const glm::vec3 IDENTITY_FRONT = glm::vec3( 0.0f, 0.0f,-1.0f); const glm::vec3 IDENTITY_FRONT = glm::vec3( 0.0f, 0.0f,-1.0f);
const bool LOW_RES_MONO = false; // while in "low res mode" do voxels switch to monochrome
const int TREE_SCALE = 128; const int TREE_SCALE = 128;
const int NUMBER_OF_CHILDREN = 8; const int NUMBER_OF_CHILDREN = 8;

View file

@ -1346,33 +1346,13 @@ int VoxelTree::encodeTreeBitstreamRecursion(VoxelNode* node, unsigned char* outp
// There are two types of nodes for which we want to send colors: // There are two types of nodes for which we want to send colors:
// 1) Leaves - obviously // 1) Leaves - obviously
// 2) Non-leaves who's children would be visible and beyond our LOD. // 2) Non-leaves who's children would be visible but are beyond our LOD.
// NOTE: This code works, but it's pretty expensive, because we're calculating distances for all the grand
// children, which we'll end up doing again later in the next level of recursion. We need to optimize this
// in the future.
bool isLeafOrLOD = childNode->isLeaf(); bool isLeafOrLOD = childNode->isLeaf();
if (params.viewFrustum && childNode->isColored() && !childNode->isLeaf()) { if (params.viewFrustum && childNode->isColored() && !childNode->isLeaf()) {
int grandChildrenInView = 0; int childLevel = childNode->getLevel();
int grandChildrenInLOD = 0; float childBoundary = boundaryDistanceForRenderLevel(childLevel + params.boundaryLevelAdjust);
float grandChildBoundaryDistance = boundaryDistanceForRenderLevel(childNode->getLevel() + float grandChildBoundary = boundaryDistanceForRenderLevel(childLevel + 1 + params.boundaryLevelAdjust);
1 + params.boundaryLevelAdjust); isLeafOrLOD = ((distance <= childBoundary) && !(distance <= grandChildBoundary));
for (int grandChildIndex = 0; grandChildIndex < NUMBER_OF_CHILDREN; grandChildIndex++) {
VoxelNode* grandChild = childNode->getChildAtIndex(grandChildIndex);
if (grandChild && grandChild->isColored() && grandChild->isInView(*params.viewFrustum)) {
grandChildrenInView++;
float grandChildDistance = grandChild->distanceToCamera(*params.viewFrustum);
if (grandChildDistance < grandChildBoundaryDistance) {
grandChildrenInLOD++;
}
}
}
// if any of our grandchildren ARE in view, then we don't want to include our color. If none are, then
// we do want to include our color
if (grandChildrenInView > 0 && grandChildrenInLOD == 0) {
isLeafOrLOD = true;
}
} }
// track children with actual color, only if the child wasn't previously in view! // track children with actual color, only if the child wasn't previously in view!

View file

@ -31,7 +31,7 @@ VoxelNodeData::VoxelNodeData(Node* owningNode) :
void VoxelNodeData::resetVoxelPacket() { void VoxelNodeData::resetVoxelPacket() {
// If we're moving, and the client asked for low res, then we force monochrome, otherwise, use // If we're moving, and the client asked for low res, then we force monochrome, otherwise, use
// the clients requested color state. // the clients requested color state.
_currentPacketIsColor = (getWantLowResMoving() && _viewFrustumChanging) ? false : getWantColor(); _currentPacketIsColor = (LOW_RES_MONO && getWantLowResMoving() && _viewFrustumChanging) ? false : getWantColor();
PACKET_TYPE voxelPacketType = _currentPacketIsColor ? PACKET_TYPE_VOXEL_DATA : PACKET_TYPE_VOXEL_DATA_MONOCHROME; PACKET_TYPE voxelPacketType = _currentPacketIsColor ? PACKET_TYPE_VOXEL_DATA : PACKET_TYPE_VOXEL_DATA_MONOCHROME;
int numBytesPacketHeader = populateTypeAndVersion(_voxelPacket, voxelPacketType); int numBytesPacketHeader = populateTypeAndVersion(_voxelPacket, voxelPacketType);
_voxelPacketAt = _voxelPacket + numBytesPacketHeader; _voxelPacketAt = _voxelPacket + numBytesPacketHeader;

View file

@ -133,7 +133,7 @@ void deepestLevelVoxelDistributor(NodeList* nodeList,
// If we're starting a fresh packet, then... // If we're starting a fresh packet, then...
// If we're moving, and the client asked for low res, then we force monochrome, otherwise, use // If we're moving, and the client asked for low res, then we force monochrome, otherwise, use
// the clients requested color state. // the clients requested color state.
bool wantColor = ((nodeData->getWantLowResMoving() && viewFrustumChanged) ? false : nodeData->getWantColor()); bool wantColor = LOW_RES_MONO && nodeData->getWantLowResMoving() && viewFrustumChanged ? false : nodeData->getWantColor();
// If we have a packet waiting, and our desired want color, doesn't match the current waiting packets color // If we have a packet waiting, and our desired want color, doesn't match the current waiting packets color
// then let's just send that waiting packet. // then let's just send that waiting packet.