mirror of
https://github.com/overte-org/overte.git
synced 2025-04-21 08:04:01 +02:00
Merge branch 'master' of https://github.com/worklist/hifi
This commit is contained in:
commit
d241dba573
11 changed files with 317 additions and 210 deletions
|
@ -56,16 +56,9 @@ static char STAR_CACHE_FILE[] = "cachedStars.txt";
|
|||
const glm::vec3 START_LOCATION(6.1f, 0, 1.4f); // Where one's own agent begins in the world
|
||||
// (will be overwritten if avatar data file is found)
|
||||
|
||||
const int IDLE_SIMULATE_MSECS = 16; // How often should call simulate and other stuff
|
||||
// in the idle loop? (60 FPS is default)
|
||||
const int IDLE_SIMULATE_MSECS = 16; // How often should call simulate and other stuff
|
||||
// in the idle loop? (60 FPS is default)
|
||||
|
||||
const bool USING_MOUSE_VIEW_SHIFT = false;
|
||||
const float MOUSE_VIEW_SHIFT_RATE = 40.0f;
|
||||
const float MOUSE_VIEW_SHIFT_YAW_MARGIN = (float)(1200 * 0.2f);
|
||||
const float MOUSE_VIEW_SHIFT_PITCH_MARGIN = (float)(800 * 0.2f);
|
||||
const float MOUSE_VIEW_SHIFT_YAW_LIMIT = 45.0;
|
||||
const float MOUSE_VIEW_SHIFT_PITCH_LIMIT = 30.0;
|
||||
|
||||
const bool DISPLAY_HEAD_MOUSE = true;
|
||||
|
||||
// customized canvas that simply forwards requests/events to the singleton application
|
||||
|
@ -137,8 +130,6 @@ Application::Application(int& argc, char** argv) :
|
|||
_viewFrustumOffsetRoll(0.0),
|
||||
_viewFrustumOffsetDistance(25.0),
|
||||
_viewFrustumOffsetUp(0.0),
|
||||
_mouseViewShiftYaw(0.0f),
|
||||
_mouseViewShiftPitch(0.0f),
|
||||
_audioScope(256, 200, true),
|
||||
_myAvatar(true),
|
||||
_mouseX(0),
|
||||
|
@ -307,18 +298,23 @@ void Application::paintGL() {
|
|||
|
||||
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
|
||||
_myCamera.setTargetPosition(_myAvatar.getSpringyHeadPosition());
|
||||
_myCamera.setTargetRotation(_myAvatar.getBodyYaw() - 180.0f, 0.0f, 0.0f);
|
||||
_myCamera.setTargetRotation(_myAvatar.getBodyYaw() - 180.0f,
|
||||
0.0f,
|
||||
0.0f);
|
||||
|
||||
} else {
|
||||
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) {
|
||||
_myCamera.setTargetPosition(_myAvatar.getSpringyHeadPosition());
|
||||
_myCamera.setTargetRotation(_myAvatar.getAbsoluteHeadYaw() - _mouseViewShiftYaw,
|
||||
_myAvatar.getRenderPitch() + _mouseViewShiftPitch, 0.0f);
|
||||
_myCamera.setTargetRotation(_myAvatar.getAbsoluteHeadYaw(),
|
||||
_myAvatar.getAbsoluteHeadPitch(),
|
||||
0.0f);
|
||||
|
||||
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
||||
_myCamera.setTargetPosition(_myAvatar.getHeadPosition());
|
||||
_myCamera.setTargetRotation(_myAvatar.getBodyYaw() - _mouseViewShiftYaw, _mouseViewShiftPitch, 0.0f);
|
||||
}
|
||||
_myCamera.setTargetRotation(_myAvatar.getBodyYaw(),
|
||||
0.0f,
|
||||
0.0f);
|
||||
}
|
||||
}
|
||||
|
||||
// important...
|
||||
|
@ -538,14 +534,14 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
shiftPaintingColor();
|
||||
break;
|
||||
|
||||
case Qt::Key_Minus:
|
||||
sendVoxelServerEraseAll();
|
||||
break;
|
||||
|
||||
case Qt::Key_Percent:
|
||||
sendVoxelServerAddScene();
|
||||
break;
|
||||
|
||||
case Qt::Key_Semicolon:
|
||||
_audio.startEchoTest();
|
||||
break;
|
||||
|
||||
case Qt::Key_L:
|
||||
_displayLevels = !_displayLevels;
|
||||
break;
|
||||
|
@ -782,11 +778,20 @@ void Application::idle() {
|
|||
if (diffclock(&_lastTimeIdle, &check) > IDLE_SIMULATE_MSECS) {
|
||||
|
||||
float deltaTime = 1.f/_fps;
|
||||
|
||||
// update behaviors for avatar hand movement: handControl takes mouse values as input,
|
||||
// and gives back 3D values modulated for smooth transitioning between interaction modes.
|
||||
_handControl.update(_mouseX, _mouseY);
|
||||
_myAvatar.setHandMovementValues(_handControl.getValues());
|
||||
|
||||
// Use Transmitter Hand to move hand if connected, else use mouse
|
||||
if (_myAvatar.isTransmitterV2Connected()) {
|
||||
const float HAND_FORCE_SCALING = 0.05f;
|
||||
const float* handAcceleration = _myAvatar.getTransmitterHandLastAcceleration();
|
||||
_myAvatar.setHandMovementValues(glm::vec3(-handAcceleration[0] * HAND_FORCE_SCALING,
|
||||
handAcceleration[1] * HAND_FORCE_SCALING,
|
||||
handAcceleration[2] * HAND_FORCE_SCALING));
|
||||
} else {
|
||||
// update behaviors for avatar hand movement: handControl takes mouse values as input,
|
||||
// and gives back 3D values modulated for smooth transitioning between interaction modes.
|
||||
_handControl.update(_mouseX, _mouseY);
|
||||
_myAvatar.setHandMovementValues(_handControl.getValues());
|
||||
}
|
||||
|
||||
// tell my avatar if the mouse is being pressed...
|
||||
_myAvatar.setMousePressed(_mousePressed);
|
||||
|
@ -856,10 +861,11 @@ void Application::idle() {
|
|||
// walking triggers the handControl to stop
|
||||
if (_myAvatar.getMode() == AVATAR_MODE_WALKING) {
|
||||
_handControl.stop();
|
||||
_mouseViewShiftYaw *= 0.9;
|
||||
_mouseViewShiftPitch *= 0.9;
|
||||
}
|
||||
|
||||
// Update from Mouse
|
||||
_myAvatar.updateFromMouse(_mouseX, _mouseY, _glWidget->width(), _glWidget->height());
|
||||
|
||||
// Read serial port interface devices
|
||||
if (_serialPort.active) {
|
||||
_serialPort.readData();
|
||||
|
@ -1125,6 +1131,9 @@ void Application::initMenu() {
|
|||
_renderAtmosphereOn->setShortcut(Qt::SHIFT | Qt::Key_A);
|
||||
(_renderAvatarsOn = renderMenu->addAction("Avatars"))->setCheckable(true);
|
||||
_renderAvatarsOn->setChecked(true);
|
||||
(_renderFrameTimerOn = renderMenu->addAction("Show Timer"))->setCheckable(true);
|
||||
_renderFrameTimerOn->setChecked(false);
|
||||
|
||||
renderMenu->addAction("First Person", this, SLOT(setRenderFirstPerson(bool)), Qt::Key_P)->setCheckable(true);
|
||||
(_oculusOn = renderMenu->addAction("Oculus", this, SLOT(setOculus(bool)), Qt::Key_O))->setCheckable(true);
|
||||
|
||||
|
@ -1223,8 +1232,8 @@ void Application::init() {
|
|||
|
||||
_handControl.setScreenDimensions(_glWidget->width(), _glWidget->height());
|
||||
|
||||
_headMouseX = _glWidget->width()/2;
|
||||
_headMouseY = _glWidget->height()/2;
|
||||
_headMouseX = _mouseX = _glWidget->width() / 2;
|
||||
_headMouseY = _mouseY = _glWidget->height() / 2;
|
||||
|
||||
_stars.readInput(STAR_FILE, STAR_CACHE_FILE, 0);
|
||||
|
||||
|
@ -1234,7 +1243,9 @@ void Application::init() {
|
|||
a.distance = 1.5f;
|
||||
a.tightness = 8.0f;
|
||||
_myCamera.setMode(CAMERA_MODE_THIRD_PERSON, a);
|
||||
_myAvatar.setDisplayingHead(true);
|
||||
_myAvatar.setDisplayingHead(true);
|
||||
|
||||
QCursor::setPos(_headMouseX, _headMouseY);
|
||||
|
||||
OculusManager::connect();
|
||||
|
||||
|
@ -1268,6 +1279,7 @@ void Application::updateAvatar(float deltaTime) {
|
|||
// Update head and body pitch and yaw based on measured gyro rates
|
||||
if (_gyroLook->isChecked()) {
|
||||
// Render Yaw
|
||||
/* NOTE: PER - Leave here until I get back and can modify to couple gyros to head pitch, yaw
|
||||
float renderYawSpring = fabs(_headMouseX - _glWidget->width() / 2.f) / (_glWidget->width() / 2.f);
|
||||
const float RENDER_YAW_MULTIPLY = 4.f;
|
||||
_myAvatar.setRenderYaw((1.f - renderYawSpring * deltaTime) * _myAvatar.getRenderYaw() +
|
||||
|
@ -1277,34 +1289,7 @@ void Application::updateAvatar(float deltaTime) {
|
|||
const float RENDER_PITCH_MULTIPLY = 4.f;
|
||||
_myAvatar.setRenderPitch((1.f - renderPitchSpring * deltaTime) * _myAvatar.getRenderPitch() +
|
||||
renderPitchSpring * deltaTime * -_myAvatar.getHeadPitch() * RENDER_PITCH_MULTIPLY);
|
||||
}
|
||||
|
||||
|
||||
if (USING_MOUSE_VIEW_SHIFT)
|
||||
{
|
||||
//make it so that when your mouse hits the edge of the screen, the camera shifts
|
||||
float rightBoundary = (float)_glWidget->width() - MOUSE_VIEW_SHIFT_YAW_MARGIN;
|
||||
float bottomBoundary = (float)_glWidget->height() - MOUSE_VIEW_SHIFT_PITCH_MARGIN;
|
||||
|
||||
if (_mouseX > rightBoundary) {
|
||||
float f = (_mouseX - rightBoundary) / ( (float)_glWidget->width() - rightBoundary);
|
||||
_mouseViewShiftYaw += MOUSE_VIEW_SHIFT_RATE * f * deltaTime;
|
||||
if (_mouseViewShiftYaw > MOUSE_VIEW_SHIFT_YAW_LIMIT) { _mouseViewShiftYaw = MOUSE_VIEW_SHIFT_YAW_LIMIT; }
|
||||
} else if (_mouseX < MOUSE_VIEW_SHIFT_YAW_MARGIN) {
|
||||
float f = 1.0 - (_mouseX / MOUSE_VIEW_SHIFT_YAW_MARGIN);
|
||||
_mouseViewShiftYaw -= MOUSE_VIEW_SHIFT_RATE * f * deltaTime;
|
||||
if (_mouseViewShiftYaw < -MOUSE_VIEW_SHIFT_YAW_LIMIT) { _mouseViewShiftYaw = -MOUSE_VIEW_SHIFT_YAW_LIMIT; }
|
||||
}
|
||||
if (_mouseY < MOUSE_VIEW_SHIFT_PITCH_MARGIN) {
|
||||
float f = 1.0 - (_mouseY / MOUSE_VIEW_SHIFT_PITCH_MARGIN);
|
||||
_mouseViewShiftPitch += MOUSE_VIEW_SHIFT_RATE * f * deltaTime;
|
||||
if (_mouseViewShiftPitch > MOUSE_VIEW_SHIFT_PITCH_LIMIT ) { _mouseViewShiftPitch = MOUSE_VIEW_SHIFT_PITCH_LIMIT; }
|
||||
}
|
||||
else if (_mouseY > bottomBoundary) {
|
||||
float f = (_mouseY - bottomBoundary) / ((float)_glWidget->height() - bottomBoundary);
|
||||
_mouseViewShiftPitch -= MOUSE_VIEW_SHIFT_RATE * f * deltaTime;
|
||||
if (_mouseViewShiftPitch < -MOUSE_VIEW_SHIFT_PITCH_LIMIT) { _mouseViewShiftPitch = -MOUSE_VIEW_SHIFT_PITCH_LIMIT; }
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
if (OculusManager::isConnected()) {
|
||||
|
@ -1673,6 +1658,7 @@ void Application::displayOverlay() {
|
|||
#ifndef _WIN32
|
||||
_audio.render(_glWidget->width(), _glWidget->height());
|
||||
_audioScope.render(20, _glWidget->height() - 200);
|
||||
//_audio.renderEchoCompare(); // PER: Will turn back on to further test echo
|
||||
#endif
|
||||
|
||||
//noiseTest(_glWidget->width(), _glWidget->height());
|
||||
|
@ -1695,6 +1681,10 @@ void Application::displayOverlay() {
|
|||
// Show detected levels from the serial I/O ADC channel sensors
|
||||
if (_displayLevels) _serialPort.renderLevels(_glWidget->width(), _glWidget->height());
|
||||
|
||||
// Show hand transmitter data if detected
|
||||
if (_myAvatar.isTransmitterV2Connected()) {
|
||||
_myAvatar.transmitterV2RenderLevels(_glWidget->width(), _glWidget->height());
|
||||
}
|
||||
// Display stats and log text onscreen
|
||||
glLineWidth(1.0f);
|
||||
glPointSize(1.0f);
|
||||
|
@ -1706,6 +1696,17 @@ void Application::displayOverlay() {
|
|||
if (_chatEntryOn) {
|
||||
_chatEntry.render(_glWidget->width(), _glWidget->height());
|
||||
}
|
||||
|
||||
// Show on-screen msec timer
|
||||
if (_renderFrameTimerOn->isChecked()) {
|
||||
char frameTimer[10];
|
||||
double mSecsNow = floor(usecTimestampNow() / 1000.0 + 0.5);
|
||||
mSecsNow = mSecsNow - floor(mSecsNow / 1000.0) * 1000.0;
|
||||
sprintf(frameTimer, "%3.0f\n", mSecsNow);
|
||||
drawtext(_glWidget->width() - 100, _glWidget->height() - 20, 0.30, 0, 1.0, 0, frameTimer, 0, 0, 0);
|
||||
drawtext(_glWidget->width() - 102, _glWidget->height() - 22, 0.30, 0, 1.0, 0, frameTimer, 1, 1, 1);
|
||||
}
|
||||
|
||||
|
||||
// Stats at upper right of screen about who domain server is telling us about
|
||||
glPointSize(1.0f);
|
||||
|
@ -1967,12 +1968,13 @@ void Application::deleteVoxelUnderCursor() {
|
|||
|
||||
void Application::resetSensors() {
|
||||
_myAvatar.setPosition(START_LOCATION);
|
||||
_headMouseX = _glWidget->width() / 2;
|
||||
_headMouseY = _glWidget->height() / 2;
|
||||
_headMouseX = _mouseX = _glWidget->width() / 2;
|
||||
_headMouseY = _mouseY = _glWidget->height() / 2;
|
||||
|
||||
if (_serialPort.active) {
|
||||
_serialPort.resetAverages();
|
||||
}
|
||||
}
|
||||
QCursor::setPos(_headMouseX, _headMouseY);
|
||||
_myAvatar.reset();
|
||||
}
|
||||
|
||||
|
@ -2031,17 +2033,13 @@ void* Application::networkReceive(void* args) {
|
|||
|
||||
switch (app->_incomingPacket[0]) {
|
||||
case PACKET_HEADER_TRANSMITTER_DATA_V1:
|
||||
// Process UDP packets that are sent to the client from local sensor devices
|
||||
// V1 = android app, or the Google Glass
|
||||
app->_myAvatar.processTransmitterData(app->_incomingPacket, bytesReceived);
|
||||
break;
|
||||
case PACKET_HEADER_TRANSMITTER_DATA_V2:
|
||||
float rotationRates[3];
|
||||
float accelerations[3];
|
||||
// V2 = IOS transmitter app
|
||||
app->_myAvatar.processTransmitterDataV2(app->_incomingPacket, bytesReceived);
|
||||
|
||||
memcpy(rotationRates, app->_incomingPacket + 2, sizeof(rotationRates));
|
||||
memcpy(accelerations, app->_incomingPacket + 3 + sizeof(rotationRates), sizeof(accelerations));
|
||||
|
||||
printf("The rotation: %f, %f, %f\n", rotationRates[0], rotationRates[1], rotationRates[2]);
|
||||
break;
|
||||
case PACKET_HEADER_MIXED_AUDIO:
|
||||
app->_audio.addReceivedAudioToBuffer(app->_incomingPacket, bytesReceived);
|
||||
|
|
|
@ -138,7 +138,8 @@ private:
|
|||
QAction* _renderAtmosphereOn; // Whether to display the atmosphere
|
||||
QAction* _renderAvatarsOn; // Whether to render avatars
|
||||
QAction* _oculusOn; // Whether to configure the display for the Oculus Rift
|
||||
QAction* _renderStatsOn; // Whether to show onscreen text overlay with stats
|
||||
QAction* _renderStatsOn; // Whether to show onscreen text overlay with stats
|
||||
QAction* _renderFrameTimerOn; // Whether to show onscreen text overlay with stats
|
||||
QAction* _logOn; // Whether to show on-screen log
|
||||
QActionGroup* _voxelModeActions; // The group of voxel edit mode actions
|
||||
QAction* _addVoxelMode; // Whether add voxel mode is enabled
|
||||
|
@ -182,9 +183,6 @@ private:
|
|||
float _viewFrustumOffsetDistance;
|
||||
float _viewFrustumOffsetUp;
|
||||
|
||||
float _mouseViewShiftYaw;
|
||||
float _mouseViewShiftPitch;
|
||||
|
||||
Oscilloscope _audioScope;
|
||||
|
||||
Avatar _myAvatar; // The rendered avatar of oneself
|
||||
|
|
|
@ -87,11 +87,26 @@ int audioCallback (const void* inputBuffer,
|
|||
Application* interface = (Application*) QCoreApplication::instance();
|
||||
Avatar* interfaceAvatar = interface->getAvatar();
|
||||
|
||||
int16_t *inputLeft = ((int16_t **) inputBuffer)[0];
|
||||
|
||||
int16_t* inputLeft = ((int16_t**) inputBuffer)[0];
|
||||
int16_t* outputLeft = ((int16_t**) outputBuffer)[0];
|
||||
int16_t* outputRight = ((int16_t**) outputBuffer)[1];
|
||||
|
||||
// Add Procedural effects to input samples
|
||||
parentAudio->addProceduralSounds(inputLeft, BUFFER_LENGTH_SAMPLES);
|
||||
|
||||
// add output (@speakers) data to the scope
|
||||
parentAudio->_scope->addSamples(1, outputLeft, PACKET_LENGTH_SAMPLES_PER_CHANNEL);
|
||||
parentAudio->_scope->addSamples(2, outputRight, PACKET_LENGTH_SAMPLES_PER_CHANNEL);
|
||||
|
||||
// if needed, add input/output data to echo analysis buffers
|
||||
if (parentAudio->_isGatheringEchoFrames) {
|
||||
memcpy(parentAudio->_echoInputSamples, inputLeft,
|
||||
PACKET_LENGTH_SAMPLES_PER_CHANNEL * sizeof(int16_t));
|
||||
memcpy(parentAudio->_echoOutputSamples, outputLeft,
|
||||
PACKET_LENGTH_SAMPLES_PER_CHANNEL * sizeof(int16_t));
|
||||
parentAudio->addedPingFrame();
|
||||
}
|
||||
|
||||
if (inputLeft != NULL) {
|
||||
|
||||
// Measure the loudness of the signal from the microphone and store in audio object
|
||||
|
@ -103,7 +118,7 @@ int audioCallback (const void* inputBuffer,
|
|||
loudness /= BUFFER_LENGTH_SAMPLES;
|
||||
parentAudio->_lastInputLoudness = loudness;
|
||||
|
||||
// add data to the scope
|
||||
// add input (@microphone) data to the scope
|
||||
parentAudio->_scope->addSamples(0, inputLeft, BUFFER_LENGTH_SAMPLES);
|
||||
|
||||
Agent* audioMixer = agentList->soloAgentOfType(AGENT_TYPE_AUDIO_MIXER);
|
||||
|
@ -151,9 +166,6 @@ int audioCallback (const void* inputBuffer,
|
|||
|
||||
}
|
||||
|
||||
int16_t* outputLeft = ((int16_t**) outputBuffer)[0];
|
||||
int16_t* outputRight = ((int16_t**) outputBuffer)[1];
|
||||
|
||||
memset(outputLeft, 0, PACKET_LENGTH_BYTES_PER_CHANNEL);
|
||||
memset(outputRight, 0, PACKET_LENGTH_BYTES_PER_CHANNEL);
|
||||
|
||||
|
@ -184,7 +196,6 @@ int audioCallback (const void* inputBuffer,
|
|||
}
|
||||
// play whatever we have in the audio buffer
|
||||
|
||||
|
||||
// if we haven't fired off the flange effect, check if we should
|
||||
// TODO: lastMeasuredHeadYaw is now relative to body - check if this still works.
|
||||
|
||||
|
@ -249,11 +260,6 @@ int audioCallback (const void* inputBuffer,
|
|||
outputLeft[s] = leftSample;
|
||||
outputRight[s] = rightSample;
|
||||
}
|
||||
|
||||
// add data to the scope
|
||||
parentAudio->_scope->addSamples(1, outputLeft, PACKET_LENGTH_SAMPLES_PER_CHANNEL);
|
||||
parentAudio->_scope->addSamples(2, outputRight, PACKET_LENGTH_SAMPLES_PER_CHANNEL);
|
||||
|
||||
ringBuffer->setNextOutput(ringBuffer->getNextOutput() + PACKET_LENGTH_SAMPLES);
|
||||
|
||||
if (ringBuffer->getNextOutput() == ringBuffer->getBuffer() + RING_BUFFER_SAMPLES) {
|
||||
|
@ -261,11 +267,19 @@ int audioCallback (const void* inputBuffer,
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (parentAudio->_isSendingEchoPing) {
|
||||
const float PING_PITCH = 4.f;
|
||||
const float PING_VOLUME = 32000.f;
|
||||
for (int s = 0; s < PACKET_LENGTH_SAMPLES_PER_CHANNEL; s++) {
|
||||
outputLeft[s] = outputRight[s] = (int16_t)(sinf((float) s / PING_PITCH) * PING_VOLUME);
|
||||
}
|
||||
parentAudio->_isGatheringEchoFrames = true;
|
||||
}
|
||||
gettimeofday(&parentAudio->_lastCallbackTime, NULL);
|
||||
return paContinue;
|
||||
}
|
||||
|
||||
|
||||
void outputPortAudioError(PaError error) {
|
||||
if (error != paNoError) {
|
||||
printLog("-- portaudio termination error --\n");
|
||||
|
@ -286,8 +300,12 @@ Audio::Audio(Oscilloscope* scope) :
|
|||
_lastAcceleration(0),
|
||||
_totalPacketsReceived(0),
|
||||
_firstPlaybackTime(),
|
||||
_packetsReceivedThisPlayback(0)
|
||||
{
|
||||
_packetsReceivedThisPlayback(0),
|
||||
_shouldStartEcho(false),
|
||||
_isSendingEchoPing(false),
|
||||
_echoPingFrameCount(0),
|
||||
_isGatheringEchoFrames(false)
|
||||
{
|
||||
outputPortAudioError(Pa_Initialize());
|
||||
outputPortAudioError(Pa_OpenDefaultStream(&_stream,
|
||||
2,
|
||||
|
@ -300,7 +318,12 @@ Audio::Audio(Oscilloscope* scope) :
|
|||
|
||||
// start the stream now that sources are good to go
|
||||
outputPortAudioError(Pa_StartStream(_stream));
|
||||
|
||||
|
||||
_echoInputSamples = new int16_t[BUFFER_LENGTH_BYTES];
|
||||
_echoOutputSamples = new int16_t[BUFFER_LENGTH_BYTES];
|
||||
memset(_echoInputSamples, 0, BUFFER_LENGTH_SAMPLES * sizeof(int));
|
||||
memset(_echoOutputSamples, 0, BUFFER_LENGTH_SAMPLES * sizeof(int));
|
||||
|
||||
gettimeofday(&_lastReceiveTime, NULL);
|
||||
}
|
||||
|
||||
|
@ -311,6 +334,28 @@ Audio::~Audio() {
|
|||
}
|
||||
}
|
||||
|
||||
void Audio::renderEchoCompare() {
|
||||
const int XPOS = 0;
|
||||
const int YPOS = 500;
|
||||
const int YSCALE = 500;
|
||||
const int XSCALE = 2;
|
||||
glPointSize(1.0);
|
||||
glLineWidth(1.0);
|
||||
glDisable(GL_LINE_SMOOTH);
|
||||
glColor3f(1,1,1);
|
||||
glBegin(GL_LINE_STRIP);
|
||||
for (int i = 0; i < BUFFER_LENGTH_SAMPLES; i++) {
|
||||
glVertex2f(XPOS + i * XSCALE, YPOS + _echoInputSamples[i]/YSCALE);
|
||||
}
|
||||
glEnd();
|
||||
glColor3f(0,1,1);
|
||||
glBegin(GL_LINE_STRIP);
|
||||
for (int i = 0; i < BUFFER_LENGTH_SAMPLES; i++) {
|
||||
glVertex2f(XPOS + i * XSCALE, YPOS + _echoOutputSamples[i]/YSCALE);
|
||||
}
|
||||
glEnd();
|
||||
}
|
||||
|
||||
// Take a pointer to the acquired microphone input samples and add procedural sounds
|
||||
void Audio::addProceduralSounds(int16_t* inputBuffer, int numSamples) {
|
||||
const float MAX_AUDIBLE_VELOCITY = 6.0;
|
||||
|
@ -324,11 +369,61 @@ void Audio::addProceduralSounds(int16_t* inputBuffer, int numSamples) {
|
|||
// Add a noise-modulated sinewave with volume that tapers off with speed increasing
|
||||
if ((speed > MIN_AUDIBLE_VELOCITY) && (speed < MAX_AUDIBLE_VELOCITY)) {
|
||||
for (int i = 0; i < numSamples; i++) {
|
||||
inputBuffer[i] += (int16_t)((cosf((float) i / SOUND_PITCH * speed) * randFloat()) * volume * speed);
|
||||
inputBuffer[i] += (int16_t)((sinf((float) i / SOUND_PITCH * speed) * randFloat()) * volume * speed);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Audio::startEchoTest() {
|
||||
_shouldStartEcho = true;
|
||||
_echoPingFrameCount = 0;
|
||||
_isSendingEchoPing = true;
|
||||
_isGatheringEchoFrames = false;
|
||||
}
|
||||
|
||||
void Audio::addedPingFrame() {
|
||||
const int ECHO_PING_FRAMES = 1;
|
||||
_echoPingFrameCount++;
|
||||
if (_echoPingFrameCount == ECHO_PING_FRAMES) {
|
||||
_isGatheringEchoFrames = false;
|
||||
_isSendingEchoPing = false;
|
||||
//startEchoTest();
|
||||
}
|
||||
}
|
||||
void Audio::analyzeEcho(int16_t* inputBuffer, int16_t* outputBuffer, int numSamples) {
|
||||
// Compare output and input streams, looking for evidence of correlation needing echo cancellation
|
||||
//
|
||||
// OFFSET_RANGE tells us how many samples to vary the analysis window when looking for correlation,
|
||||
// and should be equal to the largest physical distance between speaker and microphone, where
|
||||
// OFFSET_RANGE = 1 / (speedOfSound (meters / sec) / SamplingRate (samples / sec)) * distance
|
||||
//
|
||||
const int OFFSET_RANGE = 10;
|
||||
const int SIGNAL_FLOOR = 1000;
|
||||
float correlation[2 * OFFSET_RANGE + 1];
|
||||
int numChecked = 0;
|
||||
bool foundSignal = false;
|
||||
|
||||
memset(correlation, 0, sizeof(float) * (2 * OFFSET_RANGE + 1));
|
||||
|
||||
for (int offset = -OFFSET_RANGE; offset <= OFFSET_RANGE; offset++) {
|
||||
for (int i = 0; i < numSamples; i++) {
|
||||
if ((i + offset >= 0) && (i + offset < numSamples)) {
|
||||
correlation[offset + OFFSET_RANGE] +=
|
||||
(float) abs(inputBuffer[i] - outputBuffer[i + offset]);
|
||||
numChecked++;
|
||||
foundSignal |= (inputBuffer[i] > SIGNAL_FLOOR);
|
||||
}
|
||||
}
|
||||
correlation[offset + OFFSET_RANGE] /= numChecked;
|
||||
numChecked = 0;
|
||||
if (foundSignal) {
|
||||
printLog("%4.2f, ", correlation[offset + OFFSET_RANGE]);
|
||||
}
|
||||
}
|
||||
if (foundSignal) printLog("\n");
|
||||
}
|
||||
|
||||
|
||||
void Audio::addReceivedAudioToBuffer(unsigned char* receivedData, int receivedBytes) {
|
||||
const int NUM_INITIAL_PACKETS_DISCARD = 3;
|
||||
|
||||
|
@ -345,7 +440,6 @@ void Audio::addReceivedAudioToBuffer(unsigned char* receivedData, int receivedBy
|
|||
|
||||
if (::stdev.getSamples() > 500) {
|
||||
_measuredJitter = ::stdev.getStDev();
|
||||
//printLog("Avg: %4.2f, Stdev: %4.2f\n", stdev.getAverage(), sharedAudioData->measuredJitter);
|
||||
::stdev.reset();
|
||||
}
|
||||
|
||||
|
|
|
@ -32,8 +32,15 @@ public:
|
|||
void setLastVelocity(glm::vec3 lastVelocity) { _lastVelocity = lastVelocity; };
|
||||
|
||||
void addProceduralSounds(int16_t* inputBuffer, int numSamples);
|
||||
void analyzeEcho(int16_t* inputBuffer, int16_t* outputBuffer, int numSamples);
|
||||
|
||||
|
||||
void addReceivedAudioToBuffer(unsigned char* receivedData, int receivedBytes);
|
||||
|
||||
void startEchoTest();
|
||||
void addedPingFrame();
|
||||
void renderEchoCompare();
|
||||
|
||||
private:
|
||||
PaStream* _stream;
|
||||
AudioRingBuffer _ringBuffer;
|
||||
|
@ -50,6 +57,12 @@ private:
|
|||
int _totalPacketsReceived;
|
||||
timeval _firstPlaybackTime;
|
||||
int _packetsReceivedThisPlayback;
|
||||
bool _shouldStartEcho;
|
||||
bool _isSendingEchoPing;
|
||||
int _echoPingFrameCount;
|
||||
int16_t* _echoInputSamples;
|
||||
int16_t* _echoOutputSamples;
|
||||
bool _isGatheringEchoFrames;
|
||||
|
||||
// give access to AudioData class from audioCallback
|
||||
friend int audioCallback (const void*, void*, unsigned long, const PaStreamCallbackTimeInfo*, PaStreamCallbackFlags, void*);
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#include <vector>
|
||||
#include <lodepng.h>
|
||||
#include <SharedUtil.h>
|
||||
#include "world.h"
|
||||
#include "Avatar.h"
|
||||
#include "Head.h"
|
||||
#include "Log.h"
|
||||
|
@ -82,6 +83,7 @@ Avatar::Avatar(bool isMine) {
|
|||
_transmitterPackets = 0;
|
||||
_transmitterIsFirstData = true;
|
||||
_transmitterInitialReading = glm::vec3(0.f, 0.f, 0.f);
|
||||
_isTransmitterV2Connected = false;
|
||||
_speed = 0.0;
|
||||
_pelvisStandingHeight = 0.0f;
|
||||
_displayingHead = true;
|
||||
|
@ -96,8 +98,6 @@ Avatar::Avatar(bool isMine) {
|
|||
_head.initialize();
|
||||
|
||||
_movedHandOffset = glm::vec3(0.0f, 0.0f, 0.0f);
|
||||
_renderYaw = 0.0;
|
||||
_renderPitch = 0.0;
|
||||
_sphere = NULL;
|
||||
_handHoldingPosition = glm::vec3(0.0f, 0.0f, 0.0f);
|
||||
_distanceToNearestAvatar = std::numeric_limits<float>::max();
|
||||
|
@ -126,7 +126,6 @@ Avatar::Avatar(const Avatar &otherAvatar) {
|
|||
_mode = otherAvatar._mode;
|
||||
_isMine = otherAvatar._isMine;
|
||||
_renderYaw = otherAvatar._renderYaw;
|
||||
_renderPitch = otherAvatar._renderPitch;
|
||||
_maxArmLength = otherAvatar._maxArmLength;
|
||||
_transmitterTimer = otherAvatar._transmitterTimer;
|
||||
_transmitterIsFirstData = otherAvatar._transmitterIsFirstData;
|
||||
|
@ -134,6 +133,7 @@ Avatar::Avatar(const Avatar &otherAvatar) {
|
|||
_transmitterHz = otherAvatar._transmitterHz;
|
||||
_transmitterInitialReading = otherAvatar._transmitterInitialReading;
|
||||
_transmitterPackets = otherAvatar._transmitterPackets;
|
||||
_isTransmitterV2Connected = otherAvatar._isTransmitterV2Connected;
|
||||
_TEST_bigSphereRadius = otherAvatar._TEST_bigSphereRadius;
|
||||
_TEST_bigSpherePosition = otherAvatar._TEST_bigSpherePosition;
|
||||
_movedHandOffset = otherAvatar._movedHandOffset;
|
||||
|
@ -291,6 +291,20 @@ bool Avatar::getIsNearInteractingOther() {
|
|||
return _avatarTouch.getAbleToReachOtherAvatar();
|
||||
}
|
||||
|
||||
void Avatar::updateFromMouse(int mouseX, int mouseY, int screenWidth, int screenHeight) {
|
||||
// Update pitch and yaw based on mouse behavior
|
||||
const float MOUSE_MOVE_RADIUS = 0.25f;
|
||||
const float MOUSE_ROTATE_SPEED = 7.5f;
|
||||
float mouseLocationX = (float)mouseX / (float)screenWidth - 0.5f;
|
||||
|
||||
if (fabs(mouseLocationX) > MOUSE_MOVE_RADIUS) {
|
||||
float mouseMag = (fabs(mouseLocationX) - MOUSE_MOVE_RADIUS) / (0.5f - MOUSE_MOVE_RADIUS) * MOUSE_ROTATE_SPEED;
|
||||
setBodyYaw(getBodyYaw() - ((mouseLocationX > 0.f) ? mouseMag : -mouseMag));
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
void Avatar::simulate(float deltaTime) {
|
||||
|
||||
//figure out if the mouse cursor is over any body spheres...
|
||||
|
@ -595,110 +609,6 @@ void Avatar::updateHandMovementAndTouching(float deltaTime) {
|
|||
|
||||
void Avatar::updateHead(float deltaTime) {
|
||||
|
||||
/*
|
||||
// Decay head back to center if turned on
|
||||
if (_isMine && _returnHeadToCenter) {
|
||||
// Decay back toward center
|
||||
_headPitch *= (1.0f - DECAY * _head.returnSpringScale * 2 * deltaTime);
|
||||
_headYaw *= (1.0f - DECAY * _head.returnSpringScale * 2 * deltaTime);
|
||||
_headRoll *= (1.0f - DECAY * _head.returnSpringScale * 2 * deltaTime);
|
||||
}
|
||||
|
||||
// For invensense gyro, decay only slightly when roughly centered
|
||||
if (_isMine) {
|
||||
const float RETURN_RANGE = 15.0;
|
||||
const float RETURN_STRENGTH = 2.0;
|
||||
if (fabs(_headPitch) < RETURN_RANGE) { _headPitch *= (1.0f - RETURN_STRENGTH * deltaTime); }
|
||||
if (fabs(_headYaw) < RETURN_RANGE) { _headYaw *= (1.0f - RETURN_STRENGTH * deltaTime); }
|
||||
if (fabs(_headRoll) < RETURN_RANGE) { _headRoll *= (1.0f - RETURN_STRENGTH * deltaTime); }
|
||||
}
|
||||
|
||||
if (_head.noise) {
|
||||
// Move toward new target
|
||||
_headPitch += (_head.pitchTarget - _headPitch) * 10 * deltaTime; // (1.f - DECAY*deltaTime)*Pitch + ;
|
||||
_headYaw += (_head.yawTarget - _headYaw ) * 10 * deltaTime; // (1.f - DECAY*deltaTime);
|
||||
_headRoll *= 1.f - (DECAY * deltaTime);
|
||||
}
|
||||
|
||||
_head.leanForward *= (1.f - DECAY * 30 * deltaTime);
|
||||
_head.leanSideways *= (1.f - DECAY * 30 * deltaTime);
|
||||
|
||||
// Update where the avatar's eyes are
|
||||
//
|
||||
// First, decide if we are making eye contact or not
|
||||
if (randFloat() < 0.005) {
|
||||
_head.eyeContact = !_head.eyeContact;
|
||||
_head.eyeContact = 1;
|
||||
if (!_head.eyeContact) {
|
||||
// If we just stopped making eye contact,move the eyes markedly away
|
||||
_head.eyeballPitch[0] = _head.eyeballPitch[1] = _head.eyeballPitch[0] + 5.0 + (randFloat() - 0.5) * 10;
|
||||
_head.eyeballYaw [0] = _head.eyeballYaw [1] = _head.eyeballYaw [0] + 5.0 + (randFloat() - 0.5) * 5;
|
||||
} else {
|
||||
// If now making eye contact, turn head to look right at viewer
|
||||
SetNewHeadTarget(0,0);
|
||||
}
|
||||
}
|
||||
|
||||
const float DEGREES_BETWEEN_VIEWER_EYES = 3;
|
||||
const float DEGREES_TO_VIEWER_MOUTH = 7;
|
||||
|
||||
if (_head.eyeContact) {
|
||||
// Should we pick a new eye contact target?
|
||||
if (randFloat() < 0.01) {
|
||||
// Choose where to look next
|
||||
if (randFloat() < 0.1) {
|
||||
_head.eyeContactTarget = MOUTH;
|
||||
} else {
|
||||
if (randFloat() < 0.5) _head.eyeContactTarget = LEFT_EYE; else _head.eyeContactTarget = RIGHT_EYE;
|
||||
}
|
||||
}
|
||||
// Set eyeball pitch and yaw to make contact
|
||||
float eye_target_yaw_adjust = 0;
|
||||
float eye_target_pitch_adjust = 0;
|
||||
if (_head.eyeContactTarget == LEFT_EYE) eye_target_yaw_adjust = DEGREES_BETWEEN_VIEWER_EYES;
|
||||
if (_head.eyeContactTarget == RIGHT_EYE) eye_target_yaw_adjust = -DEGREES_BETWEEN_VIEWER_EYES;
|
||||
if (_head.eyeContactTarget == MOUTH) eye_target_pitch_adjust = DEGREES_TO_VIEWER_MOUTH;
|
||||
|
||||
_head.eyeballPitch[0] = _head.eyeballPitch[1] = -_headPitch + eye_target_pitch_adjust;
|
||||
_head.eyeballYaw[0] = _head.eyeballYaw[1] = -_headYaw + eye_target_yaw_adjust;
|
||||
}
|
||||
|
||||
if (_head.noise)
|
||||
{
|
||||
_headPitch += (randFloat() - 0.5) * 0.2 * _head.noiseEnvelope;
|
||||
_headYaw += (randFloat() - 0.5) * 0.3 *_head.noiseEnvelope;
|
||||
//PupilSize += (randFloat() - 0.5) * 0.001*NoiseEnvelope;
|
||||
|
||||
if (randFloat() < 0.005) _head.mouthWidth = MouthWidthChoices[rand()%3];
|
||||
|
||||
if (!_head.eyeContact) {
|
||||
if (randFloat() < 0.01) _head.eyeballPitch[0] = _head.eyeballPitch[1] = (randFloat() - 0.5) * 20;
|
||||
if (randFloat() < 0.01) _head.eyeballYaw[0] = _head.eyeballYaw[1] = (randFloat()- 0.5) * 10;
|
||||
}
|
||||
|
||||
if ((randFloat() < 0.005) && (fabs(_head.pitchTarget - _headPitch) < 1.0) && (fabs(_head.yawTarget - _headYaw) < 1.0)) {
|
||||
SetNewHeadTarget((randFloat()-0.5) * 20.0, (randFloat()-0.5) * 45.0);
|
||||
}
|
||||
|
||||
if (0) {
|
||||
|
||||
// Pick new target
|
||||
_head.pitchTarget = (randFloat() - 0.5) * 45;
|
||||
_head.yawTarget = (randFloat() - 0.5) * 22;
|
||||
}
|
||||
if (randFloat() < 0.01)
|
||||
{
|
||||
_head.eyebrowPitch[0] = _head.eyebrowPitch[1] = BrowPitchAngle[rand()%3];
|
||||
_head.eyebrowRoll [0] = _head.eyebrowRoll[1] = BrowRollAngle[rand()%5];
|
||||
_head.eyebrowRoll [1] *=-1;
|
||||
}
|
||||
}
|
||||
|
||||
// Update audio trailing average for rendering facial animations
|
||||
const float AUDIO_AVERAGING_SECS = 0.05;
|
||||
_head.averageLoudness = (1.f - deltaTime / AUDIO_AVERAGING_SECS) * _head.averageLoudness +
|
||||
(deltaTime / AUDIO_AVERAGING_SECS) * _audioLoudness;
|
||||
*/
|
||||
}
|
||||
|
||||
|
||||
|
@ -1410,6 +1320,84 @@ void Avatar::processTransmitterData(unsigned char* packetData, int numBytes) {
|
|||
|
||||
}
|
||||
}
|
||||
//
|
||||
// Process UDP data from version 2 Transmitter acting as Hand
|
||||
//
|
||||
void Avatar::processTransmitterDataV2(unsigned char* packetData, int numBytes) {
|
||||
if (numBytes == 3 + sizeof(_transmitterHandLastRotationRates) +
|
||||
sizeof(_transmitterHandLastAcceleration)) {
|
||||
memcpy(_transmitterHandLastRotationRates, packetData + 2,
|
||||
sizeof(_transmitterHandLastRotationRates));
|
||||
memcpy(_transmitterHandLastAcceleration, packetData + 3 +
|
||||
sizeof(_transmitterHandLastRotationRates),
|
||||
sizeof(_transmitterHandLastAcceleration));
|
||||
// Convert from transmitter units to internal units
|
||||
for (int i = 0; i < 3; i++) {
|
||||
_transmitterHandLastRotationRates[i] *= 180.f / PI;
|
||||
_transmitterHandLastAcceleration[i] *= GRAVITY_EARTH;
|
||||
}
|
||||
if (!_isTransmitterV2Connected) {
|
||||
printf("Transmitter V2 Connected.\n");
|
||||
_isTransmitterV2Connected = true;
|
||||
}
|
||||
} else {
|
||||
printf("Transmitter V2 packet read error.\n");
|
||||
}
|
||||
}
|
||||
|
||||
void Avatar::transmitterV2RenderLevels(int width, int height) {
|
||||
|
||||
char val[50];
|
||||
const int LEVEL_CORNER_X = 10;
|
||||
const int LEVEL_CORNER_Y = 400;
|
||||
|
||||
// Draw the numeric degree/sec values from the gyros
|
||||
sprintf(val, "Yaw %4.1f", _transmitterHandLastRotationRates[1]);
|
||||
drawtext(LEVEL_CORNER_X, LEVEL_CORNER_Y, 0.10, 0, 1.0, 1, val, 0, 1, 0);
|
||||
sprintf(val, "Pitch %4.1f", _transmitterHandLastRotationRates[0]);
|
||||
drawtext(LEVEL_CORNER_X, LEVEL_CORNER_Y + 15, 0.10, 0, 1.0, 1, val, 0, 1, 0);
|
||||
sprintf(val, "Roll %4.1f", _transmitterHandLastRotationRates[2]);
|
||||
drawtext(LEVEL_CORNER_X, LEVEL_CORNER_Y + 30, 0.10, 0, 1.0, 1, val, 0, 1, 0);
|
||||
sprintf(val, "X %4.3f", _transmitterHandLastAcceleration[0]);
|
||||
drawtext(LEVEL_CORNER_X, LEVEL_CORNER_Y + 45, 0.10, 0, 1.0, 1, val, 0, 1, 0);
|
||||
sprintf(val, "Y %4.3f", _transmitterHandLastAcceleration[1]);
|
||||
drawtext(LEVEL_CORNER_X, LEVEL_CORNER_Y + 60, 0.10, 0, 1.0, 1, val, 0, 1, 0);
|
||||
sprintf(val, "Z %4.3f", _transmitterHandLastAcceleration[2]);
|
||||
drawtext(LEVEL_CORNER_X, LEVEL_CORNER_Y + 75, 0.10, 0, 1.0, 1, val, 0, 1, 0);
|
||||
|
||||
// Draw the levels as horizontal lines
|
||||
const int LEVEL_CENTER = 150;
|
||||
const float ACCEL_VIEW_SCALING = 50.f;
|
||||
glLineWidth(2.0);
|
||||
glColor4f(1, 1, 1, 1);
|
||||
glBegin(GL_LINES);
|
||||
// Gyro rates
|
||||
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y - 3);
|
||||
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER + _transmitterHandLastRotationRates[1], LEVEL_CORNER_Y - 3);
|
||||
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y + 12);
|
||||
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER + _transmitterHandLastRotationRates[0], LEVEL_CORNER_Y + 12);
|
||||
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y + 27);
|
||||
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER + _transmitterHandLastRotationRates[2], LEVEL_CORNER_Y + 27);
|
||||
// Acceleration
|
||||
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y + 42);
|
||||
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER + (int)(_transmitterHandLastAcceleration[0] * ACCEL_VIEW_SCALING),
|
||||
LEVEL_CORNER_Y + 42);
|
||||
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y + 57);
|
||||
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER + (int)(_transmitterHandLastAcceleration[1] * ACCEL_VIEW_SCALING),
|
||||
LEVEL_CORNER_Y + 57);
|
||||
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y + 72);
|
||||
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER + (int)(_transmitterHandLastAcceleration[2] * ACCEL_VIEW_SCALING),
|
||||
LEVEL_CORNER_Y + 72);
|
||||
|
||||
glEnd();
|
||||
// Draw green vertical centerline
|
||||
glColor4f(0, 1, 0, 0.5);
|
||||
glBegin(GL_LINES);
|
||||
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y - 6);
|
||||
glVertex2f(LEVEL_CORNER_X + LEVEL_CENTER, LEVEL_CORNER_Y + 30);
|
||||
glEnd();
|
||||
}
|
||||
|
||||
|
||||
void Avatar::setHeadFromGyros(glm::vec3* eulerAngles, glm::vec3* angularVelocity, float deltaTime, float smoothingTime) {
|
||||
//
|
||||
|
|
|
@ -82,11 +82,8 @@ public:
|
|||
|
||||
void reset();
|
||||
void updateHeadFromGyros(float frametime, SerialInterface * serialInterface, glm::vec3 * gravity);
|
||||
void updateFromMouse(int mouseX, int mouseY, int screenWidth, int screenHeight);
|
||||
void setNoise (float mag) {_head.noise = mag;}
|
||||
void setRenderYaw(float y) {_renderYaw = y;}
|
||||
void setRenderPitch(float p) {_renderPitch = p;}
|
||||
float getRenderYaw() {return _renderYaw;}
|
||||
float getRenderPitch() {return _renderPitch;}
|
||||
float getLastMeasuredHeadYaw() const {return _head.yawRate;}
|
||||
float getBodyYaw() {return _bodyYaw;};
|
||||
void addBodyYaw(float y) {_bodyYaw += y;};
|
||||
|
@ -130,6 +127,12 @@ public:
|
|||
|
||||
// Related to getting transmitter UDP data used to animate the avatar hand
|
||||
void processTransmitterData(unsigned char * packetData, int numBytes);
|
||||
void processTransmitterDataV2(unsigned char * packetData, int numBytes);
|
||||
const bool isTransmitterV2Connected() const { return _isTransmitterV2Connected; };
|
||||
const float* getTransmitterHandLastAcceleration() const { return _transmitterHandLastAcceleration; };
|
||||
const float* getTransmitterHandLastRotationRates() const { return _transmitterHandLastRotationRates; };
|
||||
void transmitterV2RenderLevels(int width, int height);
|
||||
|
||||
float getTransmitterHz() { return _transmitterHz; };
|
||||
|
||||
void writeAvatarDataToFile();
|
||||
|
@ -184,6 +187,9 @@ private:
|
|||
float _transmitterHz;
|
||||
int _transmitterPackets;
|
||||
glm::vec3 _transmitterInitialReading;
|
||||
float _transmitterHandLastRotationRates[3];
|
||||
float _transmitterHandLastAcceleration[3];
|
||||
bool _isTransmitterV2Connected;
|
||||
float _pelvisStandingHeight;
|
||||
float _height;
|
||||
Balls* _balls;
|
||||
|
|
|
@ -292,14 +292,19 @@ void Head::render(bool lookingInMirror, float bodyYaw) {
|
|||
glPopMatrix();
|
||||
|
||||
// Mouth
|
||||
const float MIN_LOUDNESS_SCALE_WIDTH = 0.7f;
|
||||
const float WIDTH_SENSITIVITY = 60.f;
|
||||
const float HEIGHT_SENSITIVITY = 30.f;
|
||||
const float MIN_LOUDNESS_SCALE_HEIGHT = 1.0f;
|
||||
glPushMatrix();
|
||||
glTranslatef(0,-0.35,0.75);
|
||||
glColor3f(0,0,0);
|
||||
glRotatef(mouthPitch, 1, 0, 0);
|
||||
glRotatef(mouthYaw, 0, 0, 1);
|
||||
if (averageLoudness > 1.f) {
|
||||
glScalef(mouthWidth * (.7f + sqrt(averageLoudness) /60.f),
|
||||
mouthHeight * (1.f + sqrt(averageLoudness) /30.f), 1);
|
||||
|
||||
if ((averageLoudness > 1.f) && (averageLoudness < 10000.f)) {
|
||||
glScalef(mouthWidth * (MIN_LOUDNESS_SCALE_WIDTH + sqrt(averageLoudness) / WIDTH_SENSITIVITY),
|
||||
mouthHeight * (MIN_LOUDNESS_SCALE_HEIGHT + sqrt(averageLoudness) / HEIGHT_SENSITIVITY), 1);
|
||||
} else {
|
||||
glScalef(mouthWidth, mouthHeight, 1);
|
||||
}
|
||||
|
|
|
@ -113,18 +113,19 @@ void Oscilloscope::render(int x, int y) {
|
|||
}
|
||||
}
|
||||
|
||||
glLineWidth(2.0);
|
||||
glLineWidth(1.0);
|
||||
glDisable(GL_LINE_SMOOTH);
|
||||
glPushMatrix();
|
||||
glTranslatef((float)x + 0.0f, (float)y + _valHeight / 2.0f, 0.0f);
|
||||
glScaled(1.0f, _valHeight / 32767.0f, 1.0f);
|
||||
glVertexPointer(2, GL_SHORT, 0, _arrVertices);
|
||||
glEnableClientState(GL_VERTEX_ARRAY);
|
||||
glColor3f(1.0f, 1.0f, 1.0f);
|
||||
glDrawArrays(GL_LINES, MAX_SAMPLES * 0, usedWidth);
|
||||
glDrawArrays(GL_LINE_STRIP, MAX_SAMPLES * 0, usedWidth);
|
||||
glColor3f(0.0f, 1.0f ,1.0f);
|
||||
glDrawArrays(GL_LINES, MAX_SAMPLES * 1, usedWidth);
|
||||
glColor3f(1.0f, 1.0f ,0.0f);
|
||||
glDrawArrays(GL_LINES, MAX_SAMPLES * 2, usedWidth);
|
||||
glDrawArrays(GL_LINE_STRIP, MAX_SAMPLES * 1, usedWidth);
|
||||
glColor3f(0.0f, 1.0f ,1.0f);
|
||||
glDrawArrays(GL_LINE_STRIP, MAX_SAMPLES * 2, usedWidth);
|
||||
glDisableClientState(GL_VERTEX_ARRAY);
|
||||
glPopMatrix();
|
||||
}
|
||||
|
|
|
@ -187,7 +187,7 @@ void SerialInterface::readData() {
|
|||
convertHexToInt(sensorBuffer + 10, accelYRate);
|
||||
convertHexToInt(sensorBuffer + 14, accelXRate);
|
||||
|
||||
const float LSB_TO_METERS_PER_SECOND2 = 1.f / 16384.f * 9.80665f;
|
||||
const float LSB_TO_METERS_PER_SECOND2 = 1.f / 16384.f * GRAVITY_EARTH;
|
||||
// From MPU-9150 register map, with setting on
|
||||
// highest resolution = +/- 2G
|
||||
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
#error "This is an implementation file - not intended for direct inclusion."
|
||||
#endif
|
||||
|
||||
#include <locale.h>
|
||||
|
||||
#include "Config.h"
|
||||
|
||||
#include "starfield/data/InputVertex.h"
|
||||
|
@ -99,6 +101,7 @@ namespace starfield {
|
|||
// parse
|
||||
float azi, alt;
|
||||
unsigned c;
|
||||
setlocale(LC_NUMERIC, "C");
|
||||
if (sscanf(line, " %f %f #%x", & azi, & alt, & c) == 3) {
|
||||
|
||||
if (spaceFor( getBrightness(c) )) {
|
||||
|
|
|
@ -15,5 +15,6 @@
|
|||
const float WORLD_SIZE = 10.0;
|
||||
#define PI 3.14159265
|
||||
#define PIf 3.14159265f
|
||||
#define GRAVITY_EARTH 9.80665f;
|
||||
|
||||
#endif
|
||||
|
|
Loading…
Reference in a new issue