Merge branch 'master' of https://github.com/worklist/hifi into render_voxels_optimization

This commit is contained in:
ZappoMan 2013-05-09 10:28:05 -07:00
commit a08d89a074
12 changed files with 158 additions and 69 deletions

View file

@ -1,3 +1,5 @@
#!/usr/bin/env python
# #
# gen_stars.py # gen_stars.py
# interface # interface
@ -12,7 +14,7 @@ from random import random,randint
from math import sqrt, hypot, atan2, pi, fmod, degrees from math import sqrt, hypot, atan2, pi, fmod, degrees
from sys import argv,stderr from sys import argv,stderr
hemisphere_only, equator, meridians= False, 1000, 1000 hemisphere_only, equator, meridians= False, 0, 1000
n_random = 100000 n_random = 100000
if len(argv) > 1: if len(argv) > 1:
@ -50,10 +52,13 @@ for i in range(n_random):
g = max(0,min(255,w + randint(-20,60))) g = max(0,min(255,w + randint(-20,60)))
b = max(0,min(255,w + randint(-10,100))) b = max(0,min(255,w + randint(-10,100)))
# position # position
x,y,z = random()*2-1,random(),random()*2-1 while True:
if not hemisphere_only: x,y,z = random()*2-1,random(),random()*2-1
y = y*2-1 if not hemisphere_only:
l = sqrt(x*x + y*y + z*z) y = y*2-1
l = sqrt(x*x + y*y + z*z)
if l <= 1.0:
break
x /= l; y /= l; z /= l x /= l; y /= l; z /= l
xz = hypot(x,z) xz = hypot(x,z)

View file

@ -437,15 +437,15 @@ float Audio::getInputLoudness() const {
void Audio::render(int screenWidth, int screenHeight) void Audio::render(int screenWidth, int screenHeight)
{ {
if (initialized) { if (initialized) {
glLineWidth(3); glLineWidth(2.0);
glBegin(GL_LINES); glBegin(GL_LINES);
glColor3f(1,1,1); glColor3f(1,1,1);
int startX = 50.0; int startX = 20.0;
int currentX = startX; int currentX = startX;
int topY = screenHeight - 90; int topY = screenHeight - 40;
int bottomY = screenHeight - 50; int bottomY = screenHeight - 20;
float frameWidth = 50.0; float frameWidth = 20.0;
float halfY = topY + ((bottomY - topY) / 2.0); float halfY = topY + ((bottomY - topY) / 2.0);
// draw the lines for the base of the ring buffer // draw the lines for the base of the ring buffer
@ -485,10 +485,10 @@ void Audio::render(int screenWidth, int screenHeight)
} }
glBegin(GL_QUADS); glBegin(GL_QUADS);
glVertex2f(startX, topY + 5); glVertex2f(startX, topY + 2);
glVertex2f(startX + (remainingBuffer + timeLeftInCurrentBuffer)/AUDIO_CALLBACK_MSECS*frameWidth, topY + 5); glVertex2f(startX + (remainingBuffer + timeLeftInCurrentBuffer)/AUDIO_CALLBACK_MSECS*frameWidth, topY + 2);
glVertex2f(startX + (remainingBuffer + timeLeftInCurrentBuffer)/AUDIO_CALLBACK_MSECS*frameWidth, bottomY - 5); glVertex2f(startX + (remainingBuffer + timeLeftInCurrentBuffer)/AUDIO_CALLBACK_MSECS*frameWidth, bottomY - 2);
glVertex2f(startX, bottomY - 5); glVertex2f(startX, bottomY - 2);
glEnd(); glEnd();
if (audioData->averagedLatency == 0.0) audioData->averagedLatency = remainingBuffer + timeLeftInCurrentBuffer; if (audioData->averagedLatency == 0.0) audioData->averagedLatency = remainingBuffer + timeLeftInCurrentBuffer;
@ -505,7 +505,7 @@ void Audio::render(int screenWidth, int screenHeight)
char out[40]; char out[40];
sprintf(out, "%3.0f\n", audioData->averagedLatency); sprintf(out, "%3.0f\n", audioData->averagedLatency);
drawtext(startX + audioData->averagedLatency/AUDIO_CALLBACK_MSECS*frameWidth - 10, topY-10, 0.08, 0, 1, 0, out, 1,1,0); drawtext(startX + audioData->averagedLatency/AUDIO_CALLBACK_MSECS*frameWidth - 10, topY-10, 0.10, 0, 1, 0, out, 1,1,0);
//drawtext(startX + 0, topY-10, 0.08, 0, 1, 0, out, 1,1,0); //drawtext(startX + 0, topY-10, 0.08, 0, 1, 0, out, 1,1,0);
// Show a Cyan bar with the most recently measured jitter stdev // Show a Cyan bar with the most recently measured jitter stdev
@ -521,13 +521,10 @@ void Audio::render(int screenWidth, int screenHeight)
glEnd(); glEnd();
sprintf(out,"%3.1f\n", audioData->measuredJitter); sprintf(out,"%3.1f\n", audioData->measuredJitter);
drawtext(startX + jitterPels - 5, topY-10, 0.08, 0, 1, 0, out, 0,1,1); drawtext(startX + jitterPels - 5, topY-10, 0.10, 0, 1, 0, out, 0,1,1);
sprintf(out, "%3.1fms\n", JITTER_BUFFER_LENGTH_MSECS); sprintf(out, "%3.1fms\n", JITTER_BUFFER_LENGTH_MSECS);
drawtext(startX - 10, bottomY + 20, 0.1, 0, 1, 0, out, 1, 0, 0); drawtext(startX - 10, bottomY + 15, 0.1, 0, 1, 0, out, 1, 0, 0);
sprintf(out, "%hd samples\n", JITTER_BUFFER_SAMPLES);
drawtext(startX - 10, bottomY + 35, 0.1, 0, 1, 0, out, 1, 0, 0);
} }
} }

View file

@ -30,13 +30,14 @@ AudioData::~AudioData() {
// Take a pointer to the acquired microphone input samples and add procedural sounds // Take a pointer to the acquired microphone input samples and add procedural sounds
void AudioData::addProceduralSounds(int16_t* inputBuffer, int numSamples) { void AudioData::addProceduralSounds(int16_t* inputBuffer, int numSamples) {
const float MAX_AUDIBLE_VELOCITY = 3.0; const float MAX_AUDIBLE_VELOCITY = 6.0;
const float MIN_AUDIBLE_VELOCITY = 0.1; const float MIN_AUDIBLE_VELOCITY = 0.1;
const float VOLUME = 200;
float speed = glm::length(_lastVelocity); float speed = glm::length(_lastVelocity);
float volume = 400 * (1.f - speed/MAX_AUDIBLE_VELOCITY);
// Add a noise-modulated sinewave with volume that tapers off with speed increasing
if ((speed > MIN_AUDIBLE_VELOCITY) && (speed < MAX_AUDIBLE_VELOCITY)) { if ((speed > MIN_AUDIBLE_VELOCITY) && (speed < MAX_AUDIBLE_VELOCITY)) {
for (int i = 0; i < numSamples; i++) { for (int i = 0; i < numSamples; i++) {
inputBuffer[i] += (int16_t) ((randFloat() - 0.5f) * VOLUME * speed) ; inputBuffer[i] += (int16_t) ((cosf((float)i / 8.f * speed) * randFloat()) * volume * speed) ;
} }
} }

View file

@ -5,7 +5,6 @@
// Created by Philip Rosedale on 9/11/12. // Created by Philip Rosedale on 9/11/12.
// adapted by Jeffrey Ventrella // adapted by Jeffrey Ventrella
// Copyright (c) 2013 Physical, Inc.. All rights reserved. // Copyright (c) 2013 Physical, Inc.. All rights reserved.
//
#include <glm/glm.hpp> #include <glm/glm.hpp>
#include <vector> #include <vector>
@ -31,7 +30,7 @@ const float BODY_SPIN_FRICTION = 5.0;
const float BODY_UPRIGHT_FORCE = 10.0; const float BODY_UPRIGHT_FORCE = 10.0;
const float BODY_PITCH_WHILE_WALKING = 30.0; const float BODY_PITCH_WHILE_WALKING = 30.0;
const float BODY_ROLL_WHILE_TURNING = 0.1; const float BODY_ROLL_WHILE_TURNING = 0.1;
const float LIN_VEL_DECAY = 5.0; const float LIN_VEL_DECAY = 2.0;
const float MY_HAND_HOLDING_PULL = 0.2; const float MY_HAND_HOLDING_PULL = 0.2;
const float YOUR_HAND_HOLDING_PULL = 1.0; const float YOUR_HAND_HOLDING_PULL = 1.0;
const float BODY_SPRING_DEFAULT_TIGHTNESS = 1500.0f; const float BODY_SPRING_DEFAULT_TIGHTNESS = 1500.0f;
@ -265,7 +264,7 @@ void Avatar::reset() {
// Update avatar head rotation with sensor data // Update avatar head rotation with sensor data
void Avatar::UpdateGyros(float frametime, SerialInterface* serialInterface, glm::vec3* gravity) { void Avatar::updateHeadFromGyros(float frametime, SerialInterface* serialInterface, glm::vec3* gravity) {
float measuredPitchRate = 0.0f; float measuredPitchRate = 0.0f;
float measuredRollRate = 0.0f; float measuredRollRate = 0.0f;
float measuredYawRate = 0.0f; float measuredYawRate = 0.0f;
@ -412,8 +411,8 @@ void Avatar::simulate(float deltaTime) {
_velocity *= (1.0 - LIN_VEL_DECAY * deltaTime); _velocity *= (1.0 - LIN_VEL_DECAY * deltaTime);
// If someone is near, damp velocity as a function of closeness // If someone is near, damp velocity as a function of closeness
const float AVATAR_BRAKING_RANGE = 1.2f; const float AVATAR_BRAKING_RANGE = 1.6f;
const float AVATAR_BRAKING_STRENGTH = 25.f; const float AVATAR_BRAKING_STRENGTH = 35.f;
if (_isMine && (_distanceToNearestAvatar < AVATAR_BRAKING_RANGE)) { if (_isMine && (_distanceToNearestAvatar < AVATAR_BRAKING_RANGE)) {
_velocity *= _velocity *=
(1.f - deltaTime * AVATAR_BRAKING_STRENGTH * (1.f - deltaTime * AVATAR_BRAKING_STRENGTH *
@ -950,7 +949,13 @@ void Avatar::renderHead(bool lookingInMirror) {
glColor3f(0,0,0); glColor3f(0,0,0);
glRotatef(_head.mouthPitch, 1, 0, 0); glRotatef(_head.mouthPitch, 1, 0, 0);
glRotatef(_head.mouthYaw, 0, 0, 1); glRotatef(_head.mouthYaw, 0, 0, 1);
glScalef(_head.mouthWidth*(.7 + sqrt(_head.averageLoudness)/60.0), _head.mouthHeight*(1.0 + sqrt(_head.averageLoudness)/30.0), 1); if (_head.averageLoudness > 1.f) {
glScalef(_head.mouthWidth * (.7f + sqrt(_head.averageLoudness) /60.f),
_head.mouthHeight * (1.f + sqrt(_head.averageLoudness) /30.f), 1);
} else {
glScalef(_head.mouthWidth, _head.mouthHeight, 1);
}
glutSolidCube(0.5); glutSolidCube(0.5);
glPopMatrix(); glPopMatrix();

View file

@ -82,8 +82,8 @@ public:
Avatar* clone() const; Avatar* clone() const;
void reset(); void reset();
void UpdateGyros(float frametime, SerialInterface * serialInterface, glm::vec3 * gravity);
void updateHeadFromGyros(float frametime, SerialInterface * serialInterface, glm::vec3 * gravity);
void setNoise (float mag) {_head.noise = mag;} void setNoise (float mag) {_head.noise = mag;}
void setScale(float s) {_head.scale = s; }; void setScale(float s) {_head.scale = s; };
void setRenderYaw(float y) {_renderYaw = y;} void setRenderYaw(float y) {_renderYaw = y;}
@ -102,7 +102,10 @@ public:
void setLeanSideways(float dist); void setLeanSideways(float dist);
void addLean(float x, float z); void addLean(float x, float z);
const glm::vec3& getHeadPosition() const ; const glm::vec3& getHeadPosition() const ;
const glm::vec3& getJointPosition(AvatarJointID j) const { return _joint[j].position; };
//const glm::vec3& getJointPosition(AvatarJointID j) const { return _joint[j].position; };
const glm::vec3& getJointPosition(AvatarJointID j) const { return _joint[j].springyPosition; };
const glm::vec3& getBodyUpDirection() const { return _orientation.getUp(); }; const glm::vec3& getBodyUpDirection() const { return _orientation.getUp(); };
float getSpeed() const { return _speed; }; float getSpeed() const { return _speed; };
const glm::vec3& getVelocity() const { return _velocity; }; const glm::vec3& getVelocity() const { return _velocity; };

View file

@ -15,5 +15,62 @@ AvatarRenderer::AvatarRenderer() {
} }
// this method renders the avatar // this method renders the avatar
void AvatarRenderer::render(Avatar *avatar, bool lookingInMirror) { void AvatarRenderer::render(Avatar *avatarToRender, bool lookingInMirror, glm::vec3 cameraPosition) {
}
avatar = avatarToRender;
/*
// show avatar position
glColor4f(0.5f, 0.5f, 0.5f, 0.6);
glPushMatrix();
glm::vec3 j( avatar->getJointPosition( AVATAR_JOINT_PELVIS ) );
glTranslatef(j.x, j.y, j.z);
glScalef(0.08, 0.08, 0.08);
glutSolidSphere(1, 10, 10);
glPopMatrix();
*/
//renderDiskShadow(avatar->getJointPosition( AVATAR_JOINT_PELVIS ), glm::vec3(0.0f, 1.0f, 0.0f), 0.1f, 0.2f);
//renderBody();
}
void AvatarRenderer::renderBody() {
/*
// Render joint positions as spheres
for (int b = 0; b < NUM_AVATAR_JOINTS; b++) {
if (b != AVATAR_JOINT_HEAD_BASE) { // the head is rendered as a special case in "renderHead"
//show direction vectors of the bone orientation
//renderOrientationDirections(_joint[b].springyPosition, _joint[b].orientation, _joint[b].radius * 2.0);
glm::vec3 j( avatar->getJointPosition( AVATAR_JOINT_PELVIS ) );
glColor3fv(skinColor);
glPushMatrix();
glTranslatef(j.x, j.y, j.z);
glutSolidSphere(_joint[b].radius, 20.0f, 20.0f);
glPopMatrix();
}
}
// Render lines connecting the joint positions
glColor3f(0.4f, 0.5f, 0.6f);
glLineWidth(3.0);
for (int b = 1; b < NUM_AVATAR_JOINTS; b++) {
if (_joint[b].parent != AVATAR_JOINT_NULL)
if (b != AVATAR_JOINT_HEAD_TOP) {
glBegin(GL_LINE_STRIP);
glVertex3fv(&_joint[ _joint[ b ].parent ].springyPosition.x);
glVertex3fv(&_joint[ b ].springyPosition.x);
glEnd();
}
}
*/
}

View file

@ -16,10 +16,12 @@ class AvatarRenderer {
public: public:
AvatarRenderer(); AvatarRenderer();
void render(Avatar *avatar, bool lookingInMirror); void render(Avatar *avatarToRender, bool lookingInMirror, glm::vec3 cameraPosition );
private: private:
Avatar *avatar;
void renderBody();
}; };
#endif #endif

View file

@ -80,7 +80,7 @@ void Oscilloscope::addSamples(unsigned ch, short const* data, unsigned n) {
_arrWritePos[ch] = newWritePos; _arrWritePos[ch] = newWritePos;
} }
void Oscilloscope::render() { void Oscilloscope::render(int x, int y) {
if (! enabled) { if (! enabled) {
return; return;
@ -113,8 +113,9 @@ void Oscilloscope::render() {
} }
} }
glLineWidth(2.0);
glPushMatrix(); glPushMatrix();
glTranslatef(0.0f, _valHeight / 2.0f, 0.0f); glTranslatef((float)x + 0.0f, (float)y + _valHeight / 2.0f, 0.0f);
glScaled(1.0f, _valHeight / 32767.0f, 1.0f); glScaled(1.0f, _valHeight / 32767.0f, 1.0f);
glVertexPointer(2, GL_SHORT, 0, _arrVertices); glVertexPointer(2, GL_SHORT, 0, _arrVertices);
glEnableClientState(GL_VERTEX_ARRAY); glEnableClientState(GL_VERTEX_ARRAY);

View file

@ -36,7 +36,7 @@ public:
void addSamples(unsigned ch, short const* data, unsigned n); void addSamples(unsigned ch, short const* data, unsigned n);
void render(); void render(int x, int y);
void setLowpass(float w) { assert(w > 0.0f && w <= 1.0f); _valLowpass = w; } void setLowpass(float w) { assert(w > 0.0f && w <= 1.0f); _valLowpass = w; }
void setDownsampling(unsigned f) { assert(f > 0); _valDownsample = f; } void setDownsampling(unsigned f) { assert(f > 0); _valDownsample = f; }

View file

@ -78,6 +78,7 @@
#include "ViewFrustum.h" #include "ViewFrustum.h"
#include "HandControl.h" #include "HandControl.h"
#include "AvatarRenderer.h"
using namespace std; using namespace std;
@ -115,6 +116,8 @@ Avatar myAvatar(true); // The rendered avatar of oneself
Camera myCamera; // My view onto the world (sometimes on myself :) Camera myCamera; // My view onto the world (sometimes on myself :)
Camera viewFrustumOffsetCamera; // The camera we use to sometimes show the view frustum from an offset mode Camera viewFrustumOffsetCamera; // The camera we use to sometimes show the view frustum from an offset mode
AvatarRenderer avatarRenderer;
// Starfield information // Starfield information
char starFile[] = "https://s3-us-west-1.amazonaws.com/highfidelity/stars.txt"; char starFile[] = "https://s3-us-west-1.amazonaws.com/highfidelity/stars.txt";
char starCacheFile[] = "cachedStars.txt"; char starCacheFile[] = "cachedStars.txt";
@ -124,8 +127,6 @@ glm::vec3 box(WORLD_SIZE,WORLD_SIZE,WORLD_SIZE);
VoxelSystem voxels; VoxelSystem voxels;
bool wantToKillLocalVoxels = false;
Environment environment; Environment environment;
@ -153,6 +154,8 @@ bool perfStatsOn = false; // Do we want to display perfStats?
bool logOn = true; // Whether to show on-screen log bool logOn = true; // Whether to show on-screen log
bool wantToKillLocalVoxels = false;
int noiseOn = 0; // Whether to add random noise int noiseOn = 0; // Whether to add random noise
float noise = 1.0; // Overall magnitude scaling for random noise levels float noise = 1.0; // Overall magnitude scaling for random noise levels
@ -371,20 +374,29 @@ void reset_sensors() {
// //
// Using gyro data, update both view frustum and avatar head position // Using gyro data, update both view frustum and avatar head position
// //
void updateAvatar(float frametime) { void updateAvatar(float deltaTime) {
float gyroPitchRate = serialPort.getRelativeValue(HEAD_PITCH_RATE);
float gyroYawRate = serialPort.getRelativeValue(HEAD_YAW_RATE );
myAvatar.UpdateGyros(frametime, &serialPort, &gravity); // Update my avatar's head position from gyros
myAvatar.updateHeadFromGyros(deltaTime, &serialPort, &gravity);
// Grab latest readings from the gyros
float measuredYawRate, measuredPitchRate;
if (USING_INVENSENSE_MPU9150) {
measuredPitchRate = serialPort.getLastPitchRate();
measuredYawRate = serialPort.getLastYawRate();
} else {
measuredPitchRate = serialPort.getRelativeValue(HEAD_PITCH_RATE);
measuredYawRate = serialPort.getRelativeValue(HEAD_YAW_RATE);
}
// Update gyro-based mouse (X,Y on screen) // Update gyro-based mouse (X,Y on screen)
const float MIN_MOUSE_RATE = 30.0; const float MIN_MOUSE_RATE = 30.0;
const float MOUSE_SENSITIVITY = 0.1f; const float MOUSE_SENSITIVITY = 0.1f;
if (powf(gyroYawRate*gyroYawRate + if (powf(measuredYawRate * measuredYawRate +
gyroPitchRate*gyroPitchRate, 0.5) > MIN_MOUSE_RATE) measuredPitchRate * measuredPitchRate, 0.5) > MIN_MOUSE_RATE)
{ {
headMouseX += gyroYawRate*MOUSE_SENSITIVITY; headMouseX += measuredYawRate*MOUSE_SENSITIVITY;
headMouseY += gyroPitchRate*MOUSE_SENSITIVITY*(float)HEIGHT/(float)WIDTH; headMouseY += measuredPitchRate*MOUSE_SENSITIVITY*(float)HEIGHT/(float)WIDTH;
} }
headMouseX = max(headMouseX, 0); headMouseX = max(headMouseX, 0);
headMouseX = min(headMouseX, WIDTH); headMouseX = min(headMouseX, WIDTH);
@ -394,26 +406,31 @@ void updateAvatar(float frametime) {
// Update head and body pitch and yaw based on measured gyro rates // Update head and body pitch and yaw based on measured gyro rates
if (::gyroLook) { if (::gyroLook) {
// Yaw // Yaw
const float MIN_YAW_RATE = 50; const float MIN_YAW_RATE = 20.f;
const float YAW_SENSITIVITY = 1.0; const float YAW_MAGNIFY = 3.0;
if (fabs(gyroYawRate) > MIN_YAW_RATE) { if (fabs(measuredYawRate) > MIN_YAW_RATE) {
float addToBodyYaw = (gyroYawRate > 0.f) float addToBodyYaw = (measuredYawRate > 0.f)
? gyroYawRate - MIN_YAW_RATE : gyroYawRate + MIN_YAW_RATE; ? measuredYawRate - MIN_YAW_RATE : measuredYawRate + MIN_YAW_RATE;
myAvatar.addBodyYaw(-addToBodyYaw * YAW_SENSITIVITY * frametime); // If we are rotating the body (render angle), move the head reverse amount to compensate
myAvatar.addBodyYaw(-addToBodyYaw * YAW_MAGNIFY * deltaTime);
myAvatar.addHeadYaw(addToBodyYaw * YAW_MAGNIFY * deltaTime);
} }
// Pitch NOTE: PER - Need to make camera able to pitch first! // Pitch
/* const float MIN_PITCH_RATE = 20.f;
const float MIN_PITCH_RATE = 50; const float PITCH_MAGNIFY = 2.0;
const float PITCH_SENSITIVITY = 1.0;
if (fabs(gyroPitchRate) > MIN_PITCH_RATE) { if (fabs(measuredPitchRate) > MIN_PITCH_RATE) {
float addToBodyPitch = (gyroPitchRate > 0.f) float addToBodyPitch = (measuredPitchRate > 0.f)
? gyroPitchRate - MIN_PITCH_RATE : gyroPitchRate + MIN_PITCH_RATE; ? measuredPitchRate - MIN_PITCH_RATE : measuredPitchRate + MIN_PITCH_RATE;
myAvatar.addBodyPitch(addToBodyPitch * PITCH_SENSITIVITY * frametime); myAvatar.setRenderPitch(myAvatar.getRenderPitch() + addToBodyPitch * PITCH_MAGNIFY * deltaTime);
*/
}
// Always decay the render pitch, assuming that we are never going to want to permanently look up or down
const float RENDER_PITCH_DECAY = 1.0;
myAvatar.setRenderPitch(myAvatar.getRenderPitch() * (1.f - RENDER_PITCH_DECAY * deltaTime));
} }
// Get audio loudness data from audio input device // Get audio loudness data from audio input device
@ -438,7 +455,6 @@ void updateAvatar(float frametime) {
myAvatar.setCameraFarClip(::viewFrustum.getFarClip()); myAvatar.setCameraFarClip(::viewFrustum.getFarClip());
AgentList* agentList = AgentList::getInstance(); AgentList* agentList = AgentList::getInstance();
if (agentList->getOwnerID() != UNKNOWN_AGENT_ID) { if (agentList->getOwnerID() != UNKNOWN_AGENT_ID) {
// if I know my ID, send head/hand data to the avatar mixer and voxel server // if I know my ID, send head/hand data to the avatar mixer and voxel server
unsigned char broadcastString[200]; unsigned char broadcastString[200];
@ -912,7 +928,7 @@ void displayOverlay() {
#ifndef _WIN32 #ifndef _WIN32
audio.render(WIDTH, HEIGHT); audio.render(WIDTH, HEIGHT);
audioScope.render(); audioScope.render(20, HEIGHT - 200);
#endif #endif
//noiseTest(WIDTH, HEIGHT); //noiseTest(WIDTH, HEIGHT);
@ -1024,12 +1040,12 @@ void display(void)
//float firstPersonDistance = 0.0f; //float firstPersonDistance = 0.0f;
//float firstPersonTightness = 100.0f; //float firstPersonTightness = 100.0f;
float firstPersonPitch = 20.0f; float firstPersonPitch = 20.0f + myAvatar.getRenderPitch();
float firstPersonUpShift = 0.1f; float firstPersonUpShift = 0.1f;
float firstPersonDistance = 0.4f; float firstPersonDistance = 0.4f;
float firstPersonTightness = 100.0f; float firstPersonTightness = 100.0f;
float thirdPersonPitch = 0.0f; float thirdPersonPitch = 0.0f + myAvatar.getRenderPitch();
float thirdPersonUpShift = -0.2f; float thirdPersonUpShift = -0.2f;
float thirdPersonDistance = 1.2f; float thirdPersonDistance = 1.2f;
float thirdPersonTightness = 8.0f; float thirdPersonTightness = 8.0f;
@ -1095,12 +1111,14 @@ void display(void)
myCamera.update( 1.f/FPS ); myCamera.update( 1.f/FPS );
// Render anything (like HUD items) that we want to be in 3D but not in worldspace // Render anything (like HUD items) that we want to be in 3D but not in worldspace
/*
const float HUD_Z_OFFSET = -5.f; const float HUD_Z_OFFSET = -5.f;
glPushMatrix(); glPushMatrix();
glm::vec3 test(0.5, 0.5, 0.5); glm::vec3 test(0.5, 0.5, 0.5);
glTranslatef(1, 1, HUD_Z_OFFSET); glTranslatef(1, 1, HUD_Z_OFFSET);
drawVector(&test); drawVector(&test);
glPopMatrix(); glPopMatrix();
*/
// Note: whichCamera is used to pick between the normal camera myCamera for our // Note: whichCamera is used to pick between the normal camera myCamera for our
@ -1700,7 +1718,7 @@ void idle(void) {
serialPort.readData(); serialPort.readData();
} }
// Sample hardware, update view frustum if needed, Lsend avatar data to mixer/agents // Sample hardware, update view frustum if needed, and send avatar data to mixer/agents
updateAvatar(deltaTime); updateAvatar(deltaTime);
// read incoming packets from network // read incoming packets from network