mirror of
https://github.com/overte-org/overte.git
synced 2025-08-04 00:23:33 +02:00
Merge remote-tracking branch 'upstream/master'
This commit is contained in:
commit
a75f655e40
7 changed files with 78 additions and 58 deletions
|
@ -437,15 +437,15 @@ float Audio::getInputLoudness() const {
|
|||
void Audio::render(int screenWidth, int screenHeight)
|
||||
{
|
||||
if (initialized) {
|
||||
glLineWidth(3);
|
||||
glLineWidth(2.0);
|
||||
glBegin(GL_LINES);
|
||||
glColor3f(1,1,1);
|
||||
|
||||
int startX = 50.0;
|
||||
int startX = 20.0;
|
||||
int currentX = startX;
|
||||
int topY = screenHeight - 90;
|
||||
int bottomY = screenHeight - 50;
|
||||
float frameWidth = 50.0;
|
||||
int topY = screenHeight - 40;
|
||||
int bottomY = screenHeight - 20;
|
||||
float frameWidth = 20.0;
|
||||
float halfY = topY + ((bottomY - topY) / 2.0);
|
||||
|
||||
// draw the lines for the base of the ring buffer
|
||||
|
@ -485,10 +485,10 @@ void Audio::render(int screenWidth, int screenHeight)
|
|||
}
|
||||
|
||||
glBegin(GL_QUADS);
|
||||
glVertex2f(startX, topY + 5);
|
||||
glVertex2f(startX + (remainingBuffer + timeLeftInCurrentBuffer)/AUDIO_CALLBACK_MSECS*frameWidth, topY + 5);
|
||||
glVertex2f(startX + (remainingBuffer + timeLeftInCurrentBuffer)/AUDIO_CALLBACK_MSECS*frameWidth, bottomY - 5);
|
||||
glVertex2f(startX, bottomY - 5);
|
||||
glVertex2f(startX, topY + 2);
|
||||
glVertex2f(startX + (remainingBuffer + timeLeftInCurrentBuffer)/AUDIO_CALLBACK_MSECS*frameWidth, topY + 2);
|
||||
glVertex2f(startX + (remainingBuffer + timeLeftInCurrentBuffer)/AUDIO_CALLBACK_MSECS*frameWidth, bottomY - 2);
|
||||
glVertex2f(startX, bottomY - 2);
|
||||
glEnd();
|
||||
|
||||
if (audioData->averagedLatency == 0.0) audioData->averagedLatency = remainingBuffer + timeLeftInCurrentBuffer;
|
||||
|
@ -505,7 +505,7 @@ void Audio::render(int screenWidth, int screenHeight)
|
|||
|
||||
char out[40];
|
||||
sprintf(out, "%3.0f\n", audioData->averagedLatency);
|
||||
drawtext(startX + audioData->averagedLatency/AUDIO_CALLBACK_MSECS*frameWidth - 10, topY-10, 0.08, 0, 1, 0, out, 1,1,0);
|
||||
drawtext(startX + audioData->averagedLatency/AUDIO_CALLBACK_MSECS*frameWidth - 10, topY-10, 0.10, 0, 1, 0, out, 1,1,0);
|
||||
//drawtext(startX + 0, topY-10, 0.08, 0, 1, 0, out, 1,1,0);
|
||||
|
||||
// Show a Cyan bar with the most recently measured jitter stdev
|
||||
|
@ -521,13 +521,10 @@ void Audio::render(int screenWidth, int screenHeight)
|
|||
glEnd();
|
||||
|
||||
sprintf(out,"%3.1f\n", audioData->measuredJitter);
|
||||
drawtext(startX + jitterPels - 5, topY-10, 0.08, 0, 1, 0, out, 0,1,1);
|
||||
drawtext(startX + jitterPels - 5, topY-10, 0.10, 0, 1, 0, out, 0,1,1);
|
||||
|
||||
sprintf(out, "%3.1fms\n", JITTER_BUFFER_LENGTH_MSECS);
|
||||
drawtext(startX - 10, bottomY + 20, 0.1, 0, 1, 0, out, 1, 0, 0);
|
||||
|
||||
sprintf(out, "%hd samples\n", JITTER_BUFFER_SAMPLES);
|
||||
drawtext(startX - 10, bottomY + 35, 0.1, 0, 1, 0, out, 1, 0, 0);
|
||||
drawtext(startX - 10, bottomY + 15, 0.1, 0, 1, 0, out, 1, 0, 0);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -30,13 +30,14 @@ AudioData::~AudioData() {
|
|||
|
||||
// Take a pointer to the acquired microphone input samples and add procedural sounds
|
||||
void AudioData::addProceduralSounds(int16_t* inputBuffer, int numSamples) {
|
||||
const float MAX_AUDIBLE_VELOCITY = 3.0;
|
||||
const float MAX_AUDIBLE_VELOCITY = 6.0;
|
||||
const float MIN_AUDIBLE_VELOCITY = 0.1;
|
||||
const float VOLUME = 200;
|
||||
float speed = glm::length(_lastVelocity);
|
||||
float volume = 400 * (1.f - speed/MAX_AUDIBLE_VELOCITY);
|
||||
// Add a noise-modulated sinewave with volume that tapers off with speed increasing
|
||||
if ((speed > MIN_AUDIBLE_VELOCITY) && (speed < MAX_AUDIBLE_VELOCITY)) {
|
||||
for (int i = 0; i < numSamples; i++) {
|
||||
inputBuffer[i] += (int16_t) ((randFloat() - 0.5f) * VOLUME * speed) ;
|
||||
inputBuffer[i] += (int16_t) ((cosf((float)i / 8.f * speed) * randFloat()) * volume * speed) ;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -264,7 +264,7 @@ void Avatar::reset() {
|
|||
|
||||
|
||||
// Update avatar head rotation with sensor data
|
||||
void Avatar::UpdateGyros(float frametime, SerialInterface* serialInterface, glm::vec3* gravity) {
|
||||
void Avatar::updateHeadFromGyros(float frametime, SerialInterface* serialInterface, glm::vec3* gravity) {
|
||||
float measuredPitchRate = 0.0f;
|
||||
float measuredRollRate = 0.0f;
|
||||
float measuredYawRate = 0.0f;
|
||||
|
@ -416,8 +416,8 @@ void Avatar::simulate(float deltaTime) {
|
|||
}
|
||||
|
||||
// If someone is near, damp velocity as a function of closeness
|
||||
const float AVATAR_BRAKING_RANGE = 1.2f;
|
||||
const float AVATAR_BRAKING_STRENGTH = 25.f;
|
||||
const float AVATAR_BRAKING_RANGE = 1.6f;
|
||||
const float AVATAR_BRAKING_STRENGTH = 35.f;
|
||||
if (_isMine && (_distanceToNearestAvatar < AVATAR_BRAKING_RANGE)) {
|
||||
_velocity *=
|
||||
(1.f - deltaTime * AVATAR_BRAKING_STRENGTH *
|
||||
|
@ -954,7 +954,13 @@ void Avatar::renderHead(bool lookingInMirror) {
|
|||
glColor3f(0,0,0);
|
||||
glRotatef(_head.mouthPitch, 1, 0, 0);
|
||||
glRotatef(_head.mouthYaw, 0, 0, 1);
|
||||
glScalef(_head.mouthWidth*(.7 + sqrt(_head.averageLoudness)/60.0), _head.mouthHeight*(1.0 + sqrt(_head.averageLoudness)/30.0), 1);
|
||||
if (_head.averageLoudness > 1.f) {
|
||||
glScalef(_head.mouthWidth * (.7f + sqrt(_head.averageLoudness) /60.f),
|
||||
_head.mouthHeight * (1.f + sqrt(_head.averageLoudness) /30.f), 1);
|
||||
} else {
|
||||
glScalef(_head.mouthWidth, _head.mouthHeight, 1);
|
||||
}
|
||||
|
||||
glutSolidCube(0.5);
|
||||
glPopMatrix();
|
||||
|
||||
|
|
|
@ -82,8 +82,8 @@ public:
|
|||
Avatar* clone() const;
|
||||
|
||||
void reset();
|
||||
void UpdateGyros(float frametime, SerialInterface * serialInterface, glm::vec3 * gravity);
|
||||
|
||||
|
||||
void updateHeadFromGyros(float frametime, SerialInterface * serialInterface, glm::vec3 * gravity);
|
||||
void setNoise (float mag) {_head.noise = mag;}
|
||||
void setScale(float s) {_head.scale = s; };
|
||||
void setRenderYaw(float y) {_renderYaw = y;}
|
||||
|
|
|
@ -80,7 +80,7 @@ void Oscilloscope::addSamples(unsigned ch, short const* data, unsigned n) {
|
|||
_arrWritePos[ch] = newWritePos;
|
||||
}
|
||||
|
||||
void Oscilloscope::render() {
|
||||
void Oscilloscope::render(int x, int y) {
|
||||
|
||||
if (! enabled) {
|
||||
return;
|
||||
|
@ -113,8 +113,9 @@ void Oscilloscope::render() {
|
|||
}
|
||||
}
|
||||
|
||||
glLineWidth(2.0);
|
||||
glPushMatrix();
|
||||
glTranslatef(0.0f, _valHeight / 2.0f, 0.0f);
|
||||
glTranslatef((float)x + 0.0f, (float)y + _valHeight / 2.0f, 0.0f);
|
||||
glScaled(1.0f, _valHeight / 32767.0f, 1.0f);
|
||||
glVertexPointer(2, GL_SHORT, 0, _arrVertices);
|
||||
glEnableClientState(GL_VERTEX_ARRAY);
|
||||
|
|
|
@ -36,7 +36,7 @@ public:
|
|||
|
||||
void addSamples(unsigned ch, short const* data, unsigned n);
|
||||
|
||||
void render();
|
||||
void render(int x, int y);
|
||||
|
||||
void setLowpass(float w) { assert(w > 0.0f && w <= 1.0f); _valLowpass = w; }
|
||||
void setDownsampling(unsigned f) { assert(f > 0); _valDownsample = f; }
|
||||
|
|
|
@ -127,8 +127,6 @@ glm::vec3 box(WORLD_SIZE,WORLD_SIZE,WORLD_SIZE);
|
|||
|
||||
VoxelSystem voxels;
|
||||
|
||||
bool wantToKillLocalVoxels = false;
|
||||
|
||||
Environment environment;
|
||||
|
||||
|
||||
|
@ -156,6 +154,8 @@ bool perfStatsOn = false; // Do we want to display perfStats?
|
|||
|
||||
bool logOn = true; // Whether to show on-screen log
|
||||
|
||||
bool wantToKillLocalVoxels = false;
|
||||
|
||||
int noiseOn = 0; // Whether to add random noise
|
||||
float noise = 1.0; // Overall magnitude scaling for random noise levels
|
||||
|
||||
|
@ -374,20 +374,29 @@ void reset_sensors() {
|
|||
//
|
||||
// Using gyro data, update both view frustum and avatar head position
|
||||
//
|
||||
void updateAvatar(float frametime) {
|
||||
float gyroPitchRate = serialPort.getRelativeValue(HEAD_PITCH_RATE);
|
||||
float gyroYawRate = serialPort.getRelativeValue(HEAD_YAW_RATE );
|
||||
void updateAvatar(float deltaTime) {
|
||||
|
||||
myAvatar.UpdateGyros(frametime, &serialPort, &gravity);
|
||||
|
||||
// Update my avatar's head position from gyros
|
||||
myAvatar.updateHeadFromGyros(deltaTime, &serialPort, &gravity);
|
||||
|
||||
// Grab latest readings from the gyros
|
||||
float measuredYawRate, measuredPitchRate;
|
||||
if (USING_INVENSENSE_MPU9150) {
|
||||
measuredPitchRate = serialPort.getLastPitchRate();
|
||||
measuredYawRate = serialPort.getLastYawRate();
|
||||
} else {
|
||||
measuredPitchRate = serialPort.getRelativeValue(HEAD_PITCH_RATE);
|
||||
measuredYawRate = serialPort.getRelativeValue(HEAD_YAW_RATE);
|
||||
}
|
||||
|
||||
// Update gyro-based mouse (X,Y on screen)
|
||||
const float MIN_MOUSE_RATE = 30.0;
|
||||
const float MOUSE_SENSITIVITY = 0.1f;
|
||||
if (powf(gyroYawRate*gyroYawRate +
|
||||
gyroPitchRate*gyroPitchRate, 0.5) > MIN_MOUSE_RATE)
|
||||
if (powf(measuredYawRate * measuredYawRate +
|
||||
measuredPitchRate * measuredPitchRate, 0.5) > MIN_MOUSE_RATE)
|
||||
{
|
||||
headMouseX += gyroYawRate*MOUSE_SENSITIVITY;
|
||||
headMouseY += gyroPitchRate*MOUSE_SENSITIVITY*(float)HEIGHT/(float)WIDTH;
|
||||
headMouseX += measuredYawRate*MOUSE_SENSITIVITY;
|
||||
headMouseY += measuredPitchRate*MOUSE_SENSITIVITY*(float)HEIGHT/(float)WIDTH;
|
||||
}
|
||||
headMouseX = max(headMouseX, 0);
|
||||
headMouseX = min(headMouseX, WIDTH);
|
||||
|
@ -397,26 +406,31 @@ void updateAvatar(float frametime) {
|
|||
// Update head and body pitch and yaw based on measured gyro rates
|
||||
if (::gyroLook) {
|
||||
// Yaw
|
||||
const float MIN_YAW_RATE = 50;
|
||||
const float YAW_SENSITIVITY = 1.0;
|
||||
const float MIN_YAW_RATE = 20.f;
|
||||
const float YAW_MAGNIFY = 3.0;
|
||||
|
||||
if (fabs(gyroYawRate) > MIN_YAW_RATE) {
|
||||
float addToBodyYaw = (gyroYawRate > 0.f)
|
||||
? gyroYawRate - MIN_YAW_RATE : gyroYawRate + MIN_YAW_RATE;
|
||||
if (fabs(measuredYawRate) > MIN_YAW_RATE) {
|
||||
float addToBodyYaw = (measuredYawRate > 0.f)
|
||||
? measuredYawRate - MIN_YAW_RATE : measuredYawRate + MIN_YAW_RATE;
|
||||
|
||||
myAvatar.addBodyYaw(-addToBodyYaw * YAW_SENSITIVITY * frametime);
|
||||
// If we are rotating the body (render angle), move the head reverse amount to compensate
|
||||
myAvatar.addBodyYaw(-addToBodyYaw * YAW_MAGNIFY * deltaTime);
|
||||
myAvatar.addHeadYaw(addToBodyYaw * YAW_MAGNIFY * deltaTime);
|
||||
}
|
||||
// Pitch NOTE: PER - Need to make camera able to pitch first!
|
||||
/*
|
||||
const float MIN_PITCH_RATE = 50;
|
||||
const float PITCH_SENSITIVITY = 1.0;
|
||||
// Pitch
|
||||
const float MIN_PITCH_RATE = 20.f;
|
||||
const float PITCH_MAGNIFY = 2.0;
|
||||
|
||||
if (fabs(gyroPitchRate) > MIN_PITCH_RATE) {
|
||||
float addToBodyPitch = (gyroPitchRate > 0.f)
|
||||
? gyroPitchRate - MIN_PITCH_RATE : gyroPitchRate + MIN_PITCH_RATE;
|
||||
if (fabs(measuredPitchRate) > MIN_PITCH_RATE) {
|
||||
float addToBodyPitch = (measuredPitchRate > 0.f)
|
||||
? measuredPitchRate - MIN_PITCH_RATE : measuredPitchRate + MIN_PITCH_RATE;
|
||||
|
||||
myAvatar.addBodyPitch(addToBodyPitch * PITCH_SENSITIVITY * frametime);
|
||||
*/
|
||||
myAvatar.setRenderPitch(myAvatar.getRenderPitch() + addToBodyPitch * PITCH_MAGNIFY * deltaTime);
|
||||
|
||||
}
|
||||
// Always decay the render pitch, assuming that we are never going to want to permanently look up or down
|
||||
const float RENDER_PITCH_DECAY = 1.0;
|
||||
myAvatar.setRenderPitch(myAvatar.getRenderPitch() * (1.f - RENDER_PITCH_DECAY * deltaTime));
|
||||
}
|
||||
|
||||
// Get audio loudness data from audio input device
|
||||
|
@ -441,7 +455,6 @@ void updateAvatar(float frametime) {
|
|||
myAvatar.setCameraFarClip(::viewFrustum.getFarClip());
|
||||
|
||||
AgentList* agentList = AgentList::getInstance();
|
||||
|
||||
if (agentList->getOwnerID() != UNKNOWN_AGENT_ID) {
|
||||
// if I know my ID, send head/hand data to the avatar mixer and voxel server
|
||||
unsigned char broadcastString[200];
|
||||
|
@ -915,7 +928,7 @@ void displayOverlay() {
|
|||
|
||||
#ifndef _WIN32
|
||||
audio.render(WIDTH, HEIGHT);
|
||||
audioScope.render();
|
||||
audioScope.render(20, HEIGHT - 200);
|
||||
#endif
|
||||
|
||||
//noiseTest(WIDTH, HEIGHT);
|
||||
|
@ -1027,12 +1040,12 @@ void display(void)
|
|||
//float firstPersonDistance = 0.0f;
|
||||
//float firstPersonTightness = 100.0f;
|
||||
|
||||
float firstPersonPitch = 20.0f;
|
||||
float firstPersonPitch = 20.0f + myAvatar.getRenderPitch();
|
||||
float firstPersonUpShift = 0.1f;
|
||||
float firstPersonDistance = 0.4f;
|
||||
float firstPersonTightness = 100.0f;
|
||||
|
||||
float thirdPersonPitch = 0.0f;
|
||||
float thirdPersonPitch = 0.0f + myAvatar.getRenderPitch();
|
||||
float thirdPersonUpShift = -0.2f;
|
||||
float thirdPersonDistance = 1.2f;
|
||||
float thirdPersonTightness = 8.0f;
|
||||
|
@ -1098,12 +1111,14 @@ void display(void)
|
|||
myCamera.update( 1.f/FPS );
|
||||
|
||||
// Render anything (like HUD items) that we want to be in 3D but not in worldspace
|
||||
/*
|
||||
const float HUD_Z_OFFSET = -5.f;
|
||||
glPushMatrix();
|
||||
glm::vec3 test(0.5, 0.5, 0.5);
|
||||
glTranslatef(1, 1, HUD_Z_OFFSET);
|
||||
drawVector(&test);
|
||||
glPopMatrix();
|
||||
*/
|
||||
|
||||
|
||||
// Note: whichCamera is used to pick between the normal camera myCamera for our
|
||||
|
@ -1703,7 +1718,7 @@ void idle(void) {
|
|||
serialPort.readData();
|
||||
}
|
||||
|
||||
// Sample hardware, update view frustum if needed, Lsend avatar data to mixer/agents
|
||||
// Sample hardware, update view frustum if needed, and send avatar data to mixer/agents
|
||||
updateAvatar(deltaTime);
|
||||
|
||||
// read incoming packets from network
|
||||
|
|
Loading…
Reference in a new issue