mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-08-04 00:33:11 +02:00
Merge remote-tracking branch 'upstream/master'
This commit is contained in:
commit
dec7e5921c
13 changed files with 644 additions and 210 deletions
|
@ -31,6 +31,8 @@ const int HAND_TIMER_SLEEP_ITERATIONS = 50;
|
|||
|
||||
const float EVE_PELVIS_HEIGHT = 0.565925f;
|
||||
|
||||
const float AUDIO_INJECT_PROXIMITY = 0.4f;
|
||||
|
||||
bool stopReceiveAgentDataThread;
|
||||
bool injectAudioThreadRunning = false;
|
||||
|
||||
|
@ -43,20 +45,14 @@ void *receiveAgentData(void *args) {
|
|||
unsigned char incomingPacket[MAX_PACKET_SIZE];
|
||||
|
||||
AgentList* agentList = AgentList::getInstance();
|
||||
Agent* avatarMixer = NULL;
|
||||
|
||||
while (!::stopReceiveAgentDataThread) {
|
||||
if (agentList->getAgentSocket().receive(&senderAddress, incomingPacket, &bytesReceived)) {
|
||||
switch (incomingPacket[0]) {
|
||||
case PACKET_HEADER_BULK_AVATAR_DATA:
|
||||
// this is the positional data for other agents
|
||||
// eve doesn't care about this for now, so let's just update the receive time for the
|
||||
// avatar mixer - this makes sure it won't be killed during silent agent removal
|
||||
avatarMixer = agentList->soloAgentOfType(AGENT_TYPE_AVATAR_MIXER);
|
||||
|
||||
if (avatarMixer) {
|
||||
avatarMixer->setLastHeardMicrostamp(usecTimestampNow());
|
||||
}
|
||||
// pass that off to the agentList processBulkAgentData method
|
||||
agentList->processBulkAgentData(&senderAddress, incomingPacket, bytesReceived);
|
||||
|
||||
break;
|
||||
default:
|
||||
|
@ -94,6 +90,12 @@ void *injectAudio(void *args) {
|
|||
return NULL;
|
||||
}
|
||||
|
||||
void createAvatarDataForAgent(Agent* agent) {
|
||||
if (!agent->getLinkedData()) {
|
||||
agent->setLinkedData(new AvatarData());
|
||||
}
|
||||
}
|
||||
|
||||
int main(int argc, const char* argv[]) {
|
||||
// new seed for random audio sleep times
|
||||
srand(time(0));
|
||||
|
@ -133,13 +135,18 @@ int main(int argc, const char* argv[]) {
|
|||
// read eve's audio data
|
||||
AudioInjector eveAudioInjector("/etc/highfidelity/eve/resources/eve.raw");
|
||||
|
||||
// lower Eve's volume by setting the attentuation modifier (this is a value out of 255)
|
||||
eveAudioInjector.setAttenuationModifier(190);
|
||||
|
||||
// register the callback for agent data creation
|
||||
agentList->linkedDataCreateCallback = createAvatarDataForAgent;
|
||||
|
||||
unsigned char broadcastPacket[MAX_PACKET_SIZE];
|
||||
broadcastPacket[0] = PACKET_HEADER_HEAD_DATA;
|
||||
|
||||
timeval thisSend;
|
||||
double numMicrosecondsSleep = 0;
|
||||
|
||||
int numIterationsLeftBeforeAudioSend = 0;
|
||||
|
||||
pthread_t injectAudioThread;
|
||||
|
||||
int handStateTimer = 0;
|
||||
|
@ -162,24 +169,23 @@ int main(int argc, const char* argv[]) {
|
|||
// use the UDPSocket instance attached to our agent list to send avatar data to mixer
|
||||
agentList->getAgentSocket().send(avatarMixer->getActiveSocket(), broadcastPacket, packetPosition - broadcastPacket);
|
||||
}
|
||||
|
||||
// temporarily disable Eve's audio sending until the file is actually available on EC2 box
|
||||
if (numIterationsLeftBeforeAudioSend == 0) {
|
||||
if (!::injectAudioThreadRunning) {
|
||||
pthread_create(&injectAudioThread, NULL, injectAudio, (void*) &eveAudioInjector);
|
||||
|
||||
if (!::injectAudioThreadRunning) {
|
||||
// enumerate the other agents to decide if one is close enough that eve should talk
|
||||
for (AgentList::iterator agent = agentList->begin(); agent != agentList->end(); agent++) {
|
||||
AvatarData* avatarData = (AvatarData*) agent->getLinkedData();
|
||||
|
||||
numIterationsLeftBeforeAudioSend = randIntInRange(MIN_ITERATIONS_BETWEEN_AUDIO_SENDS,
|
||||
MAX_ITERATIONS_BETWEEN_AUDIO_SENDS);
|
||||
if (avatarData) {
|
||||
glm::vec3 tempVector = eve.getPosition() - avatarData->getPosition();
|
||||
float squareDistance = glm::dot(tempVector, tempVector);
|
||||
|
||||
if (squareDistance <= AUDIO_INJECT_PROXIMITY) {
|
||||
pthread_create(&injectAudioThread, NULL, injectAudio, (void*) &eveAudioInjector);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
numIterationsLeftBeforeAudioSend--;
|
||||
}
|
||||
|
||||
// sleep for the correct amount of time to have data send be consistently timed
|
||||
if ((numMicrosecondsSleep = (DATA_SEND_INTERVAL_MSECS * 1000) - (usecTimestampNow() - usecTimestamp(&thisSend))) > 0) {
|
||||
usleep(numMicrosecondsSleep);
|
||||
}
|
||||
|
||||
// simulate the effect of pressing and un-pressing the mouse button/pad
|
||||
handStateTimer++;
|
||||
|
||||
|
@ -189,6 +195,11 @@ int main(int argc, const char* argv[]) {
|
|||
eve.setHandState(0);
|
||||
} else if (handStateTimer >= ITERATIONS_BEFORE_HAND_GRAB + HAND_GRAB_DURATION_ITERATIONS + HAND_TIMER_SLEEP_ITERATIONS) {
|
||||
handStateTimer = 0;
|
||||
}
|
||||
|
||||
// sleep for the correct amount of time to have data send be consistently timed
|
||||
if ((numMicrosecondsSleep = (DATA_SEND_INTERVAL_MSECS * 1000) - (usecTimestampNow() - usecTimestamp(&thisSend))) > 0) {
|
||||
usleep(numMicrosecondsSleep);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -71,11 +71,6 @@ float flangeIntensity = 0;
|
|||
float flangeRate = 0;
|
||||
float flangeWeight = 0;
|
||||
|
||||
int16_t *walkingSoundArray;
|
||||
int walkingSoundSamples;
|
||||
int samplesLeftForWalk = 0;
|
||||
int16_t *sampleWalkPointer;
|
||||
|
||||
timeval firstPlaybackTimer;
|
||||
int packetsReceivedThisPlayback = 0;
|
||||
float usecsAtStartup = 0;
|
||||
|
@ -357,10 +352,6 @@ bool Audio::getMixerLoopbackFlag() {
|
|||
return audioData->mixerLoopbackFlag;
|
||||
}
|
||||
|
||||
void Audio::setWalkingState(bool newWalkState) {
|
||||
audioData->playWalkSound = newWalkState;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize portaudio and start an audio stream.
|
||||
* Should be called at the beginning of program exection.
|
||||
|
@ -368,23 +359,7 @@ void Audio::setWalkingState(bool newWalkState) {
|
|||
* @return Returns true if successful or false if an error occurred.
|
||||
Use Audio::getError() to retrieve the error code.
|
||||
*/
|
||||
Audio::Audio(Oscilloscope *s, Avatar *linkedAvatar)
|
||||
{
|
||||
// read the walking sound from the raw file and store it
|
||||
// in the in memory array
|
||||
|
||||
switchToResourcesParentIfRequired();
|
||||
FILE *soundFile = fopen("resources/audio/walking.raw", "r");
|
||||
|
||||
// get length of file:
|
||||
std::fseek(soundFile, 0, SEEK_END);
|
||||
walkingSoundSamples = std::ftell(soundFile) / sizeof(int16_t);
|
||||
walkingSoundArray = new int16_t[walkingSoundSamples];
|
||||
std::rewind(soundFile);
|
||||
|
||||
std::fread(walkingSoundArray, sizeof(int16_t), walkingSoundSamples, soundFile);
|
||||
std::fclose(soundFile);
|
||||
|
||||
Audio::Audio(Oscilloscope* s, Avatar* linkedAvatar) {
|
||||
paError = Pa_Initialize();
|
||||
if (paError != paNoError) goto error;
|
||||
|
||||
|
|
|
@ -28,8 +28,6 @@ public:
|
|||
float getInputLoudness() const;
|
||||
void updateMixerParams(in_addr_t mixerAddress, in_port_t mixerPort);
|
||||
|
||||
void setWalkingState(bool newWalkState);
|
||||
|
||||
void setLastAcceleration(glm::vec3 a) { audioData->setLastAcceleration(a); };
|
||||
void setLastVelocity(glm::vec3 v) { audioData->setLastVelocity(v); };
|
||||
|
||||
|
|
|
@ -38,7 +38,6 @@ class AudioData {
|
|||
float lastInputLoudness;
|
||||
|
||||
bool mixerLoopbackFlag;
|
||||
bool playWalkSound;
|
||||
|
||||
// Added avatar acceleration and velocity for procedural effects sounds from client
|
||||
void setLastVelocity(glm::vec3 v) { _lastVelocity = v; };
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
#include <lodepng.h>
|
||||
#include <SharedUtil.h>
|
||||
#include "Avatar.h"
|
||||
#include "Head.h"
|
||||
#include "Log.h"
|
||||
#include "ui/TextRenderer.h"
|
||||
#include <AgentList.h>
|
||||
|
@ -23,12 +24,11 @@ const bool BALLS_ON = false;
|
|||
const bool USING_AVATAR_GRAVITY = true;
|
||||
const float GRAVITY_SCALE = 10.0f;
|
||||
const float BOUNCE = 0.3f;
|
||||
const float DECAY = 0.1;
|
||||
const float THRUST_MAG = 1200.0;
|
||||
const float YAW_MAG = 500.0;
|
||||
const float BODY_SPIN_FRICTION = 5.0;
|
||||
const float BODY_UPRIGHT_FORCE = 10.0;
|
||||
const float BODY_PITCH_WHILE_WALKING = 30.0;
|
||||
const float BODY_PITCH_WHILE_WALKING = 40.0;
|
||||
const float BODY_ROLL_WHILE_TURNING = 0.1;
|
||||
const float VELOCITY_DECAY = 5.0;
|
||||
const float MY_HAND_HOLDING_PULL = 0.2;
|
||||
|
@ -52,6 +52,8 @@ const float AVATAR_BRAKING_STRENGTH = 30.0f;
|
|||
|
||||
float skinColor [] = {1.0, 0.84, 0.66};
|
||||
float lightBlue [] = {0.7, 0.8, 1.0};
|
||||
|
||||
/*
|
||||
float browColor [] = {210.0/255.0, 105.0/255.0, 30.0/255.0};
|
||||
float mouthColor[] = {1, 0, 0};
|
||||
|
||||
|
@ -64,16 +66,19 @@ float MouthWidthChoices[3] = {0.5, 0.77, 0.3};
|
|||
float browWidth = 0.8;
|
||||
float browThickness = 0.16;
|
||||
|
||||
bool usingBigSphereCollisionTest = true;
|
||||
|
||||
char iris_texture_file[] = "resources/images/green_eye.png";
|
||||
//char iris_texture_file[] = "resources/images/green_eye.png";
|
||||
*/
|
||||
bool usingBigSphereCollisionTest = true;
|
||||
|
||||
float chatMessageScale = 0.0015;
|
||||
float chatMessageHeight = 0.45;
|
||||
|
||||
/*
|
||||
vector<unsigned char> iris_texture;
|
||||
unsigned int iris_texture_width = 512;
|
||||
unsigned int iris_texture_height = 256;
|
||||
*/
|
||||
|
||||
Avatar::Avatar(bool isMine) {
|
||||
|
||||
|
@ -104,43 +109,8 @@ Avatar::Avatar(bool isMine) {
|
|||
|
||||
for (int i = 0; i < MAX_DRIVE_KEYS; i++) _driveKeys[i] = false;
|
||||
|
||||
_head.pupilSize = 0.10;
|
||||
_head.interPupilDistance = 0.6;
|
||||
_head.interBrowDistance = 0.75;
|
||||
_head.nominalPupilSize = 0.10;
|
||||
_head.pitchRate = 0.0;
|
||||
_head.yawRate = 0.0;
|
||||
_head.rollRate = 0.0;
|
||||
_head.eyebrowPitch[0] = -30;
|
||||
_head.eyebrowPitch[1] = -30;
|
||||
_head.eyebrowRoll [0] = 20;
|
||||
_head.eyebrowRoll [1] = -20;
|
||||
_head.mouthPitch = 0;
|
||||
_head.mouthYaw = 0;
|
||||
_head.mouthWidth = 1.0;
|
||||
_head.mouthHeight = 0.2;
|
||||
_head.eyeballPitch[0] = 0;
|
||||
_head.eyeballPitch[1] = 0;
|
||||
_head.eyeballScaleX = 1.2;
|
||||
_head.eyeballScaleY = 1.5;
|
||||
_head.eyeballScaleZ = 1.0;
|
||||
_head.eyeballYaw[0] = 0;
|
||||
_head.eyeballYaw[1] = 0;
|
||||
_head.pitchTarget = 0;
|
||||
_head.yawTarget = 0;
|
||||
_head.noiseEnvelope = 1.0;
|
||||
_head.pupilConverge = 10.0;
|
||||
_head.leanForward = 0.0;
|
||||
_head.leanSideways = 0.0;
|
||||
_head.eyeContact = 1;
|
||||
_head.eyeContactTarget = LEFT_EYE;
|
||||
_head.scale = 1.0;
|
||||
_head.audioAttack = 0.0;
|
||||
_head.averageLoudness = 0.0;
|
||||
_head.lastLoudness = 0.0;
|
||||
_head.browAudioLift = 0.0;
|
||||
_head.noise = 0;
|
||||
_head.returnSpringScale = 1.0;
|
||||
_head.initialize();
|
||||
|
||||
_movedHandOffset = glm::vec3(0.0f, 0.0f, 0.0f);
|
||||
_renderYaw = 0.0;
|
||||
_renderPitch = 0.0;
|
||||
|
@ -153,6 +123,7 @@ Avatar::Avatar(bool isMine) {
|
|||
|
||||
_avatarTouch.setReachableRadius(0.6);
|
||||
|
||||
/*
|
||||
if (iris_texture.size() == 0) {
|
||||
switchToResourcesParentIfRequired();
|
||||
unsigned error = lodepng::decode(iris_texture, iris_texture_width, iris_texture_height, iris_texture_file);
|
||||
|
@ -160,6 +131,7 @@ Avatar::Avatar(bool isMine) {
|
|||
printLog("error %u: %s\n", error, lodepng_error_text(error));
|
||||
}
|
||||
}
|
||||
*/
|
||||
|
||||
if (BALLS_ON) { _balls = new Balls(100); }
|
||||
else { _balls = NULL; }
|
||||
|
@ -239,7 +211,8 @@ Avatar::Avatar(const Avatar &otherAvatar) {
|
|||
_distanceToNearestAvatar = otherAvatar._distanceToNearestAvatar;
|
||||
|
||||
initializeSkeleton();
|
||||
|
||||
|
||||
/*
|
||||
if (iris_texture.size() == 0) {
|
||||
switchToResourcesParentIfRequired();
|
||||
unsigned error = lodepng::decode(iris_texture, iris_texture_width, iris_texture_height, iris_texture_file);
|
||||
|
@ -247,6 +220,7 @@ Avatar::Avatar(const Avatar &otherAvatar) {
|
|||
printLog("error %u: %s\n", error, lodepng_error_text(error));
|
||||
}
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
Avatar::~Avatar() {
|
||||
|
@ -452,8 +426,52 @@ void Avatar::simulate(float deltaTime) {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
// Get head position data from network for other people
|
||||
if (!_isMine) {
|
||||
_head.leanSideways = getHeadLeanSideways();
|
||||
_head.leanForward = getHeadLeanForward();
|
||||
}
|
||||
|
||||
//apply the head lean values to the springy position...
|
||||
if (fabs(_head.leanSideways + _head.leanForward) > 0.0f) {
|
||||
glm::vec3 headLean =
|
||||
_orientation.getRight() * _head.leanSideways +
|
||||
_orientation.getFront() * _head.leanForward;
|
||||
|
||||
// this is not a long-term solution, but it works ok for initial purposes of making the avatar lean
|
||||
_joint[ AVATAR_JOINT_TORSO ].springyPosition += headLean * 0.1f;
|
||||
_joint[ AVATAR_JOINT_CHEST ].springyPosition += headLean * 0.4f;
|
||||
_joint[ AVATAR_JOINT_NECK_BASE ].springyPosition += headLean * 0.7f;
|
||||
_joint[ AVATAR_JOINT_HEAD_BASE ].springyPosition += headLean * 1.0f;
|
||||
|
||||
_joint[ AVATAR_JOINT_LEFT_COLLAR ].springyPosition += headLean * 0.6f;
|
||||
_joint[ AVATAR_JOINT_LEFT_SHOULDER ].springyPosition += headLean * 0.6f;
|
||||
_joint[ AVATAR_JOINT_LEFT_ELBOW ].springyPosition += headLean * 0.2f;
|
||||
_joint[ AVATAR_JOINT_LEFT_WRIST ].springyPosition += headLean * 0.1f;
|
||||
_joint[ AVATAR_JOINT_LEFT_FINGERTIPS ].springyPosition += headLean * 0.0f;
|
||||
|
||||
_joint[ AVATAR_JOINT_RIGHT_COLLAR ].springyPosition += headLean * 0.6f;
|
||||
_joint[ AVATAR_JOINT_RIGHT_SHOULDER ].springyPosition += headLean * 0.6f;
|
||||
_joint[ AVATAR_JOINT_RIGHT_ELBOW ].springyPosition += headLean * 0.2f;
|
||||
_joint[ AVATAR_JOINT_RIGHT_WRIST ].springyPosition += headLean * 0.1f;
|
||||
_joint[ AVATAR_JOINT_RIGHT_FINGERTIPS ].springyPosition += headLean * 0.0f;
|
||||
}
|
||||
|
||||
|
||||
// update head state
|
||||
updateHead(deltaTime);
|
||||
_head.setPositionRotationAndScale(
|
||||
_joint[ AVATAR_JOINT_HEAD_BASE ].springyPosition,
|
||||
glm::vec3(_headYaw, _headPitch, _headRoll),
|
||||
_joint[ AVATAR_JOINT_HEAD_BASE ].radius
|
||||
);
|
||||
|
||||
_head.setAudioLoudness(_audioLoudness);
|
||||
_head.setSkinColor(glm::vec3(skinColor[0], skinColor[1], skinColor[2]));
|
||||
_head.simulate(deltaTime, _isMine);
|
||||
|
||||
// use speed and angular velocity to determine walking vs. standing
|
||||
if (_speed + fabs(_bodyYawDelta) > 0.2) {
|
||||
|
@ -463,7 +481,6 @@ void Avatar::simulate(float deltaTime) {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
void Avatar::updateHandMovementAndTouching(float deltaTime) {
|
||||
|
||||
// reset hand and arm positions according to hand movement
|
||||
|
@ -540,7 +557,6 @@ void Avatar::updateHandMovementAndTouching(float deltaTime) {
|
|||
_avatarTouch.setHoldingHands(false);
|
||||
}
|
||||
|
||||
|
||||
//if holding hands, apply the appropriate forces
|
||||
if (_avatarTouch.getHoldingHands()) {
|
||||
_joint[ AVATAR_JOINT_RIGHT_FINGERTIPS ].position +=
|
||||
|
@ -549,8 +565,8 @@ void Avatar::updateHandMovementAndTouching(float deltaTime) {
|
|||
- _joint[ AVATAR_JOINT_RIGHT_FINGERTIPS ].position
|
||||
) * 0.5f;
|
||||
|
||||
if (distanceBetweenOurHands > 0.2) {
|
||||
float force = 700.0f * deltaTime;
|
||||
if (distanceBetweenOurHands > 0.3) {
|
||||
float force = 10.0f * deltaTime;
|
||||
if (force > 1.0f) {force = 1.0f;}
|
||||
_velocity += vectorFromMyHandToYourHand * force;
|
||||
}
|
||||
|
@ -579,37 +595,7 @@ void Avatar::updateHandMovementAndTouching(float deltaTime) {
|
|||
|
||||
void Avatar::updateHead(float deltaTime) {
|
||||
|
||||
// Get head position data from network for other people
|
||||
if (!_isMine) {
|
||||
_head.leanSideways = getHeadLeanSideways();
|
||||
_head.leanForward = getHeadLeanForward();
|
||||
}
|
||||
|
||||
//apply the head lean values to the springy position...
|
||||
if (fabs(_head.leanSideways + _head.leanForward) > 0.0f) {
|
||||
glm::vec3 headLean =
|
||||
_orientation.getRight() * _head.leanSideways +
|
||||
_orientation.getFront() * _head.leanForward;
|
||||
|
||||
// this is not a long-term solution, but it works ok for initial purposes of making the avatar lean
|
||||
_joint[ AVATAR_JOINT_TORSO ].springyPosition += headLean * 0.1f;
|
||||
_joint[ AVATAR_JOINT_CHEST ].springyPosition += headLean * 0.4f;
|
||||
_joint[ AVATAR_JOINT_NECK_BASE ].springyPosition += headLean * 0.7f;
|
||||
_joint[ AVATAR_JOINT_HEAD_BASE ].springyPosition += headLean * 1.0f;
|
||||
|
||||
_joint[ AVATAR_JOINT_LEFT_COLLAR ].springyPosition += headLean * 0.6f;
|
||||
_joint[ AVATAR_JOINT_LEFT_SHOULDER ].springyPosition += headLean * 0.6f;
|
||||
_joint[ AVATAR_JOINT_LEFT_ELBOW ].springyPosition += headLean * 0.2f;
|
||||
_joint[ AVATAR_JOINT_LEFT_WRIST ].springyPosition += headLean * 0.1f;
|
||||
_joint[ AVATAR_JOINT_LEFT_FINGERTIPS ].springyPosition += headLean * 0.0f;
|
||||
|
||||
_joint[ AVATAR_JOINT_RIGHT_COLLAR ].springyPosition += headLean * 0.6f;
|
||||
_joint[ AVATAR_JOINT_RIGHT_SHOULDER ].springyPosition += headLean * 0.6f;
|
||||
_joint[ AVATAR_JOINT_RIGHT_ELBOW ].springyPosition += headLean * 0.2f;
|
||||
_joint[ AVATAR_JOINT_RIGHT_WRIST ].springyPosition += headLean * 0.1f;
|
||||
_joint[ AVATAR_JOINT_RIGHT_FINGERTIPS ].springyPosition += headLean * 0.0f;
|
||||
}
|
||||
|
||||
/*
|
||||
// Decay head back to center if turned on
|
||||
if (_isMine && _returnHeadToCenter) {
|
||||
// Decay back toward center
|
||||
|
@ -712,6 +698,7 @@ void Avatar::updateHead(float deltaTime) {
|
|||
const float AUDIO_AVERAGING_SECS = 0.05;
|
||||
_head.averageLoudness = (1.f - deltaTime / AUDIO_AVERAGING_SECS) * _head.averageLoudness +
|
||||
(deltaTime / AUDIO_AVERAGING_SECS) * _audioLoudness;
|
||||
*/
|
||||
}
|
||||
|
||||
|
||||
|
@ -894,7 +881,7 @@ void Avatar::render(bool lookingInMirror, glm::vec3 cameraPosition) {
|
|||
|
||||
// render head
|
||||
if (_displayingHead) {
|
||||
renderHead(lookingInMirror);
|
||||
_head.render(lookingInMirror, _bodyYaw);
|
||||
}
|
||||
|
||||
// if this is my avatar, then render my interactions with the other avatar
|
||||
|
@ -952,7 +939,10 @@ void Avatar::render(bool lookingInMirror, glm::vec3 cameraPosition) {
|
|||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
void Avatar::renderHead(bool lookingInMirror) {
|
||||
/*
|
||||
int side = 0;
|
||||
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
|
@ -1075,7 +1065,7 @@ void Avatar::renderHead(bool lookingInMirror) {
|
|||
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
gluQuadricOrientation(_sphere, GLU_OUTSIDE);
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, iris_texture_width, iris_texture_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, &iris_texture[0]);
|
||||
//glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, iris_texture_width, iris_texture_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, &iris_texture[0]);
|
||||
}
|
||||
|
||||
glPushMatrix();
|
||||
|
@ -1121,8 +1111,10 @@ void Avatar::renderHead(bool lookingInMirror) {
|
|||
|
||||
|
||||
glPopMatrix();
|
||||
*/
|
||||
}
|
||||
|
||||
|
||||
void Avatar::setHandMovementValues(glm::vec3 handOffset) {
|
||||
_movedHandOffset = handOffset;
|
||||
}
|
||||
|
@ -1479,11 +1471,6 @@ void Avatar::renderBody() {
|
|||
}
|
||||
}
|
||||
|
||||
void Avatar::SetNewHeadTarget(float pitch, float yaw) {
|
||||
_head.pitchTarget = pitch;
|
||||
_head.yawTarget = yaw;
|
||||
}
|
||||
|
||||
//
|
||||
// Process UDP interface data from Android transmitter or Google Glass
|
||||
//
|
||||
|
@ -1522,12 +1509,12 @@ void Avatar::processTransmitterData(unsigned char* packetData, int numBytes) {
|
|||
// If first packet received, note time, turn head spring return OFF, get start rotation
|
||||
gettimeofday(&_transmitterTimer, NULL);
|
||||
if (deviceType == DEVICE_GLASS) {
|
||||
setHeadReturnToCenter(true);
|
||||
setHeadSpringScale(10.f);
|
||||
_head.setReturnToCenter(true);
|
||||
_head.setSpringScale(10.f);
|
||||
printLog("Using Google Glass to drive head, springs ON.\n");
|
||||
|
||||
} else {
|
||||
setHeadReturnToCenter(false);
|
||||
_head.setReturnToCenter(false);
|
||||
printLog("Using Transmitter %s to drive head, springs OFF.\n", device);
|
||||
|
||||
}
|
||||
|
@ -1536,6 +1523,7 @@ void Avatar::processTransmitterData(unsigned char* packetData, int numBytes) {
|
|||
|
||||
_transmitterInitialReading = glm::vec3(rot3, rot2, rot1);
|
||||
}
|
||||
|
||||
const int TRANSMITTER_COUNT = 100;
|
||||
if (_transmitterPackets % TRANSMITTER_COUNT == 0) {
|
||||
// Every 100 packets, record the observed Hz of the transmitter data
|
||||
|
|
|
@ -18,8 +18,7 @@
|
|||
#include "InterfaceConfig.h"
|
||||
#include "SerialInterface.h"
|
||||
#include "Balls.h"
|
||||
|
||||
enum eyeContactTargets {LEFT_EYE, RIGHT_EYE, MOUTH};
|
||||
#include "Head.h"
|
||||
|
||||
enum DriveKeys
|
||||
{
|
||||
|
@ -82,10 +81,8 @@ public:
|
|||
Avatar* clone() const;
|
||||
|
||||
void reset();
|
||||
|
||||
void updateHeadFromGyros(float frametime, SerialInterface * serialInterface, glm::vec3 * gravity);
|
||||
void setNoise (float mag) {_head.noise = mag;}
|
||||
void setScale(float s) {_head.scale = s; };
|
||||
void setRenderYaw(float y) {_renderYaw = y;}
|
||||
void setRenderPitch(float p) {_renderPitch = p;}
|
||||
float getRenderYaw() {return _renderYaw;}
|
||||
|
@ -124,11 +121,6 @@ public:
|
|||
void updateArmIKAndConstraints( float deltaTime );
|
||||
void setDisplayingHead( bool displayingHead );
|
||||
|
||||
float getAverageLoudness() {return _head.averageLoudness;};
|
||||
void setAverageLoudness(float al) {_head.averageLoudness = al;};
|
||||
|
||||
void SetNewHeadTarget(float, float);
|
||||
|
||||
// Set what driving keys are being pressed to control thrust levels
|
||||
void setDriveKeys(int key, bool val) { _driveKeys[key] = val; };
|
||||
bool getDriveKeys(int key) { return _driveKeys[key]; };
|
||||
|
@ -165,48 +157,7 @@ private:
|
|||
bool isCollidable; // when false, the joint position will not register a collision
|
||||
};
|
||||
|
||||
struct AvatarHead
|
||||
{
|
||||
float pitchRate;
|
||||
float yawRate;
|
||||
float rollRate;
|
||||
float noise;
|
||||
float eyeballPitch[2];
|
||||
float eyeballYaw [2];
|
||||
float eyebrowPitch[2];
|
||||
float eyebrowRoll [2];
|
||||
float eyeballScaleX;
|
||||
float eyeballScaleY;
|
||||
float eyeballScaleZ;
|
||||
float interPupilDistance;
|
||||
float interBrowDistance;
|
||||
float nominalPupilSize;
|
||||
float pupilSize;
|
||||
float mouthPitch;
|
||||
float mouthYaw;
|
||||
float mouthWidth;
|
||||
float mouthHeight;
|
||||
float leanForward;
|
||||
float leanSideways;
|
||||
float pitchTarget;
|
||||
float yawTarget;
|
||||
float noiseEnvelope;
|
||||
float pupilConverge;
|
||||
float scale;
|
||||
int eyeContact;
|
||||
float browAudioLift;
|
||||
eyeContactTargets eyeContactTarget;
|
||||
|
||||
// Sound loudness information
|
||||
float lastLoudness;
|
||||
float averageLoudness;
|
||||
float audioAttack;
|
||||
|
||||
// Strength of return springs
|
||||
float returnSpringScale;
|
||||
};
|
||||
|
||||
AvatarHead _head;
|
||||
Head _head;
|
||||
bool _isMine;
|
||||
glm::vec3 _TEST_bigSpherePosition;
|
||||
float _TEST_bigSphereRadius;
|
||||
|
@ -239,7 +190,6 @@ private:
|
|||
Balls* _balls;
|
||||
AvatarTouch _avatarTouch;
|
||||
bool _displayingHead; // should be false if in first-person view
|
||||
bool _returnHeadToCenter;
|
||||
float _distanceToNearestAvatar; // How close is the nearest avatar?
|
||||
glm::vec3 _gravity;
|
||||
|
||||
|
@ -256,11 +206,6 @@ private:
|
|||
void updateCollisionWithSphere( glm::vec3 position, float radius, float deltaTime );
|
||||
void applyCollisionWithOtherAvatar( Avatar * other, float deltaTime );
|
||||
void setHeadFromGyros(glm::vec3 * eulerAngles, glm::vec3 * angularVelocity, float deltaTime, float smoothingTime);
|
||||
void setHeadSpringScale(float s) { _head.returnSpringScale = s; }
|
||||
|
||||
// Do you want head to try to return to center (depends on interface detected)
|
||||
void setHeadReturnToCenter(bool r) { _returnHeadToCenter = r; };
|
||||
const bool getHeadReturnToCenter() const { return _returnHeadToCenter; };
|
||||
};
|
||||
|
||||
#endif
|
||||
|
|
378
interface/src/Head.cpp
Normal file
378
interface/src/Head.cpp
Normal file
|
@ -0,0 +1,378 @@
|
|||
//
|
||||
// Head.cpp
|
||||
// hifi
|
||||
//
|
||||
// Created by Jeffrey on May, 10, 2013
|
||||
// Copyright (c) 2013 Physical, Inc.. All rights reserved.
|
||||
//
|
||||
|
||||
#include "Head.h"
|
||||
#include <vector>
|
||||
#include <SharedUtil.h>
|
||||
#include <lodepng.h>
|
||||
|
||||
using namespace std;
|
||||
|
||||
const float HEAD_MOTION_DECAY = 0.1;
|
||||
|
||||
|
||||
float _browColor [] = {210.0/255.0, 105.0/255.0, 30.0/255.0};
|
||||
float _mouthColor[] = {1, 0, 0};
|
||||
|
||||
float _BrowRollAngle [5] = {0, 15, 30, -30, -15};
|
||||
float _BrowPitchAngle[3] = {-70, -60, -50};
|
||||
float _eyeColor [3] = {1,1,1};
|
||||
|
||||
float _MouthWidthChoices[3] = {0.5, 0.77, 0.3};
|
||||
|
||||
float _browWidth = 0.8;
|
||||
float _browThickness = 0.16;
|
||||
|
||||
char iris_texture_file[] = "resources/images/green_eye.png";
|
||||
|
||||
vector<unsigned char> iris_texture;
|
||||
unsigned int iris_texture_width = 512;
|
||||
unsigned int iris_texture_height = 256;
|
||||
|
||||
Head::Head() {
|
||||
if (iris_texture.size() == 0) {
|
||||
switchToResourcesParentIfRequired();
|
||||
unsigned error = lodepng::decode(iris_texture, iris_texture_width, iris_texture_height, iris_texture_file);
|
||||
if (error != 0) {
|
||||
printLog("error %u: %s\n", error, lodepng_error_text(error));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Head::initialize() {
|
||||
|
||||
audioLoudness = 0.0;
|
||||
skinColor = glm::vec3(0.0f, 0.0f, 0.0f);
|
||||
position = glm::vec3(0.0f, 0.0f, 0.0f);
|
||||
yaw = 0.0f;
|
||||
pitch = 0.0f;
|
||||
roll = 0.0f;
|
||||
pupilSize = 0.10;
|
||||
interPupilDistance = 0.6;
|
||||
interBrowDistance = 0.75;
|
||||
nominalPupilSize = 0.10;
|
||||
pitchRate = 0.0;
|
||||
yawRate = 0.0;
|
||||
rollRate = 0.0;
|
||||
eyebrowPitch[0] = -30;
|
||||
eyebrowPitch[1] = -30;
|
||||
eyebrowRoll [0] = 20;
|
||||
eyebrowRoll [1] = -20;
|
||||
mouthPitch = 0;
|
||||
mouthYaw = 0;
|
||||
mouthWidth = 1.0;
|
||||
mouthHeight = 0.2;
|
||||
eyeballPitch[0] = 0;
|
||||
eyeballPitch[1] = 0;
|
||||
eyeballScaleX = 1.2;
|
||||
eyeballScaleY = 1.5;
|
||||
eyeballScaleZ = 1.0;
|
||||
eyeballYaw[0] = 0;
|
||||
eyeballYaw[1] = 0;
|
||||
pitchTarget = 0;
|
||||
yawTarget = 0;
|
||||
noiseEnvelope = 1.0;
|
||||
pupilConverge = 10.0;
|
||||
leanForward = 0.0;
|
||||
leanSideways = 0.0;
|
||||
eyeContact = 1;
|
||||
eyeContactTarget = LEFT_EYE;
|
||||
scale = 1.0;
|
||||
audioAttack = 0.0;
|
||||
averageLoudness = 0.0;
|
||||
lastLoudness = 0.0;
|
||||
browAudioLift = 0.0;
|
||||
noise = 0;
|
||||
returnSpringScale = 1.0;
|
||||
sphere = NULL;
|
||||
}
|
||||
|
||||
void Head::setPositionRotationAndScale(glm::vec3 p, glm::vec3 r, float s) {
|
||||
|
||||
position = p;
|
||||
scale = s;
|
||||
yaw = r.x;
|
||||
pitch = r.y;
|
||||
roll = r.z;
|
||||
}
|
||||
|
||||
void Head::setSkinColor(glm::vec3 c) {
|
||||
skinColor = c;
|
||||
}
|
||||
|
||||
void Head::setAudioLoudness(float loudness) {
|
||||
audioLoudness = loudness;
|
||||
}
|
||||
|
||||
|
||||
void Head::setNewTarget(float pitch, float yaw) {
|
||||
pitchTarget = pitch;
|
||||
yawTarget = yaw;
|
||||
}
|
||||
|
||||
void Head::simulate(float deltaTime, bool isMine) {
|
||||
|
||||
// Decay head back to center if turned on
|
||||
if (isMine && returnHeadToCenter) {
|
||||
// Decay back toward center
|
||||
pitch *= (1.0f - HEAD_MOTION_DECAY * returnSpringScale * 2 * deltaTime);
|
||||
yaw *= (1.0f - HEAD_MOTION_DECAY * returnSpringScale * 2 * deltaTime);
|
||||
roll *= (1.0f - HEAD_MOTION_DECAY * returnSpringScale * 2 * deltaTime);
|
||||
}
|
||||
|
||||
// For invensense gyro, decay only slightly when roughly centered
|
||||
if (isMine) {
|
||||
const float RETURN_RANGE = 15.0;
|
||||
const float RETURN_STRENGTH = 2.0;
|
||||
if (fabs(pitch) < RETURN_RANGE) { pitch *= (1.0f - RETURN_STRENGTH * deltaTime); }
|
||||
if (fabs(yaw) < RETURN_RANGE) { yaw *= (1.0f - RETURN_STRENGTH * deltaTime); }
|
||||
if (fabs(roll) < RETURN_RANGE) { roll *= (1.0f - RETURN_STRENGTH * deltaTime); }
|
||||
}
|
||||
|
||||
if (noise) {
|
||||
// Move toward new target
|
||||
pitch += (pitchTarget - pitch) * 10 * deltaTime; // (1.f - DECAY*deltaTime)*Pitch + ;
|
||||
yaw += (yawTarget - yaw ) * 10 * deltaTime; // (1.f - DECAY*deltaTime);
|
||||
roll *= 1.f - (HEAD_MOTION_DECAY * deltaTime);
|
||||
}
|
||||
|
||||
leanForward *= (1.f - HEAD_MOTION_DECAY * 30 * deltaTime);
|
||||
leanSideways *= (1.f - HEAD_MOTION_DECAY * 30 * deltaTime);
|
||||
|
||||
// Update where the avatar's eyes are
|
||||
//
|
||||
// First, decide if we are making eye contact or not
|
||||
if (randFloat() < 0.005) {
|
||||
eyeContact = !eyeContact;
|
||||
eyeContact = 1;
|
||||
if (!eyeContact) {
|
||||
// If we just stopped making eye contact,move the eyes markedly away
|
||||
eyeballPitch[0] = eyeballPitch[1] = eyeballPitch[0] + 5.0 + (randFloat() - 0.5) * 10;
|
||||
eyeballYaw [0] = eyeballYaw [1] = eyeballYaw [0] + 5.0 + (randFloat() - 0.5) * 5;
|
||||
} else {
|
||||
// If now making eye contact, turn head to look right at viewer
|
||||
setNewTarget(0,0);
|
||||
}
|
||||
}
|
||||
|
||||
const float DEGREES_BETWEEN_VIEWER_EYES = 3;
|
||||
const float DEGREES_TO_VIEWER_MOUTH = 7;
|
||||
|
||||
if (eyeContact) {
|
||||
// Should we pick a new eye contact target?
|
||||
if (randFloat() < 0.01) {
|
||||
// Choose where to look next
|
||||
if (randFloat() < 0.1) {
|
||||
eyeContactTarget = MOUTH;
|
||||
} else {
|
||||
if (randFloat() < 0.5) eyeContactTarget = LEFT_EYE; else eyeContactTarget = RIGHT_EYE;
|
||||
}
|
||||
}
|
||||
// Set eyeball pitch and yaw to make contact
|
||||
float eye_target_yaw_adjust = 0;
|
||||
float eye_target_pitch_adjust = 0;
|
||||
if (eyeContactTarget == LEFT_EYE) eye_target_yaw_adjust = DEGREES_BETWEEN_VIEWER_EYES;
|
||||
if (eyeContactTarget == RIGHT_EYE) eye_target_yaw_adjust = -DEGREES_BETWEEN_VIEWER_EYES;
|
||||
if (eyeContactTarget == MOUTH) eye_target_pitch_adjust = DEGREES_TO_VIEWER_MOUTH;
|
||||
|
||||
eyeballPitch[0] = eyeballPitch[1] = -pitch + eye_target_pitch_adjust;
|
||||
eyeballYaw [0] = eyeballYaw [1] = yaw + eye_target_yaw_adjust;
|
||||
}
|
||||
|
||||
if (noise)
|
||||
{
|
||||
pitch += (randFloat() - 0.5) * 0.2 * noiseEnvelope;
|
||||
yaw += (randFloat() - 0.5) * 0.3 *noiseEnvelope;
|
||||
//PupilSize += (randFloat() - 0.5) * 0.001*NoiseEnvelope;
|
||||
|
||||
if (randFloat() < 0.005) mouthWidth = _MouthWidthChoices[rand()%3];
|
||||
|
||||
if (!eyeContact) {
|
||||
if (randFloat() < 0.01) eyeballPitch[0] = eyeballPitch[1] = (randFloat() - 0.5) * 20;
|
||||
if (randFloat() < 0.01) eyeballYaw[0] = eyeballYaw[1] = (randFloat()- 0.5) * 10;
|
||||
}
|
||||
|
||||
if ((randFloat() < 0.005) && (fabs(pitchTarget - pitch) < 1.0) && (fabs(yawTarget - yaw) < 1.0)) {
|
||||
setNewTarget((randFloat()-0.5) * 20.0, (randFloat()-0.5) * 45.0);
|
||||
}
|
||||
|
||||
if (0) {
|
||||
|
||||
// Pick new target
|
||||
pitchTarget = (randFloat() - 0.5) * 45;
|
||||
yawTarget = (randFloat() - 0.5) * 22;
|
||||
}
|
||||
if (randFloat() < 0.01)
|
||||
{
|
||||
eyebrowPitch[0] = eyebrowPitch[1] = _BrowPitchAngle[rand()%3];
|
||||
eyebrowRoll [0] = eyebrowRoll[1] = _BrowRollAngle[rand()%5];
|
||||
eyebrowRoll [1] *=-1;
|
||||
}
|
||||
}
|
||||
|
||||
// Update audio trailing average for rendering facial animations
|
||||
const float AUDIO_AVERAGING_SECS = 0.05;
|
||||
averageLoudness = (1.f - deltaTime / AUDIO_AVERAGING_SECS) * averageLoudness +
|
||||
(deltaTime / AUDIO_AVERAGING_SECS) * audioLoudness;
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
void Head::render(bool lookingInMirror, float bodyYaw) {
|
||||
|
||||
int side = 0;
|
||||
|
||||
glEnable(GL_DEPTH_TEST);
|
||||
glEnable(GL_RESCALE_NORMAL);
|
||||
|
||||
glPushMatrix();
|
||||
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
|
||||
glScalef(scale, scale, scale);
|
||||
|
||||
if (lookingInMirror) {
|
||||
glRotatef(bodyYaw - yaw, 0, 1, 0);
|
||||
glRotatef(pitch, 1, 0, 0);
|
||||
glRotatef(-roll, 0, 0, 1);
|
||||
} else {
|
||||
glRotatef(bodyYaw + yaw, 0, 1, 0);
|
||||
glRotatef(pitch, 1, 0, 0);
|
||||
glRotatef(roll, 0, 0, 1);
|
||||
}
|
||||
|
||||
//glScalef(2.0, 2.0, 2.0);
|
||||
glColor3f(skinColor.x, skinColor.y, skinColor.z);
|
||||
|
||||
glutSolidSphere(1, 30, 30);
|
||||
|
||||
// Ears
|
||||
glPushMatrix();
|
||||
glTranslatef(1.0, 0, 0);
|
||||
for(side = 0; side < 2; side++) {
|
||||
glPushMatrix();
|
||||
glScalef(0.3, 0.65, .65);
|
||||
glutSolidSphere(0.5, 30, 30);
|
||||
glPopMatrix();
|
||||
glTranslatef(-2.0, 0, 0);
|
||||
}
|
||||
glPopMatrix();
|
||||
|
||||
// Update audio attack data for facial animation (eyebrows and mouth)
|
||||
audioAttack = 0.9 * audioAttack + 0.1 * fabs(audioLoudness - lastLoudness);
|
||||
lastLoudness = audioLoudness;
|
||||
|
||||
|
||||
const float BROW_LIFT_THRESHOLD = 100;
|
||||
if (audioAttack > BROW_LIFT_THRESHOLD)
|
||||
browAudioLift += sqrt(audioAttack) / 1000.0;
|
||||
|
||||
browAudioLift *= .90;
|
||||
|
||||
// Render Eyebrows
|
||||
glPushMatrix();
|
||||
glTranslatef(-interBrowDistance / 2.0,0.4,0.45);
|
||||
for(side = 0; side < 2; side++) {
|
||||
glColor3fv(_browColor);
|
||||
glPushMatrix();
|
||||
glTranslatef(0, 0.35 + browAudioLift, 0);
|
||||
glRotatef(eyebrowPitch[side]/2.0, 1, 0, 0);
|
||||
glRotatef(eyebrowRoll[side]/2.0, 0, 0, 1);
|
||||
glScalef(_browWidth, _browThickness, 1);
|
||||
glutSolidCube(0.5);
|
||||
glPopMatrix();
|
||||
glTranslatef(interBrowDistance, 0, 0);
|
||||
}
|
||||
glPopMatrix();
|
||||
|
||||
// Mouth
|
||||
glPushMatrix();
|
||||
glTranslatef(0,-0.35,0.75);
|
||||
glColor3f(0,0,0);
|
||||
glRotatef(mouthPitch, 1, 0, 0);
|
||||
glRotatef(mouthYaw, 0, 0, 1);
|
||||
if (averageLoudness > 1.f) {
|
||||
glScalef(mouthWidth * (.7f + sqrt(averageLoudness) /60.f),
|
||||
mouthHeight * (1.f + sqrt(averageLoudness) /30.f), 1);
|
||||
} else {
|
||||
glScalef(mouthWidth, mouthHeight, 1);
|
||||
}
|
||||
|
||||
glutSolidCube(0.5);
|
||||
glPopMatrix();
|
||||
|
||||
glTranslatef(0, 1.0, 0);
|
||||
|
||||
glTranslatef(-interPupilDistance/2.0,-0.68,0.7);
|
||||
// Right Eye
|
||||
glRotatef(-10, 1, 0, 0);
|
||||
glColor3fv(_eyeColor);
|
||||
glPushMatrix();
|
||||
{
|
||||
glTranslatef(interPupilDistance/10.0, 0, 0.05);
|
||||
glRotatef(20, 0, 0, 1);
|
||||
glScalef(eyeballScaleX, eyeballScaleY, eyeballScaleZ);
|
||||
glutSolidSphere(0.25, 30, 30);
|
||||
}
|
||||
glPopMatrix();
|
||||
|
||||
// Right Pupil
|
||||
if (sphere == NULL) {
|
||||
sphere = gluNewQuadric();
|
||||
gluQuadricTexture(sphere, GL_TRUE);
|
||||
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
|
||||
gluQuadricOrientation(sphere, GLU_OUTSIDE);
|
||||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGBA, iris_texture_width, iris_texture_height, 0, GL_RGBA, GL_UNSIGNED_BYTE, &iris_texture[0]);
|
||||
}
|
||||
|
||||
glPushMatrix();
|
||||
{
|
||||
glRotatef(eyeballPitch[1], 1, 0, 0);
|
||||
glRotatef(eyeballYaw[1] + yaw + pupilConverge, 0, 1, 0);
|
||||
glTranslatef(0,0,.35);
|
||||
glRotatef(-75,1,0,0);
|
||||
glScalef(1.0, 0.4, 1.0);
|
||||
|
||||
glEnable(GL_TEXTURE_2D);
|
||||
gluSphere(sphere, pupilSize, 15, 15);
|
||||
glDisable(GL_TEXTURE_2D);
|
||||
}
|
||||
|
||||
glPopMatrix();
|
||||
// Left Eye
|
||||
glColor3fv(_eyeColor);
|
||||
glTranslatef(interPupilDistance, 0, 0);
|
||||
glPushMatrix();
|
||||
{
|
||||
glTranslatef(-interPupilDistance/10.0, 0, .05);
|
||||
glRotatef(-20, 0, 0, 1);
|
||||
glScalef(eyeballScaleX, eyeballScaleY, eyeballScaleZ);
|
||||
glutSolidSphere(0.25, 30, 30);
|
||||
}
|
||||
glPopMatrix();
|
||||
// Left Pupil
|
||||
glPushMatrix();
|
||||
{
|
||||
glRotatef(eyeballPitch[0], 1, 0, 0);
|
||||
glRotatef(eyeballYaw[0] + yaw - pupilConverge, 0, 1, 0);
|
||||
glTranslatef(0, 0, .35);
|
||||
glRotatef(-75, 1, 0, 0);
|
||||
glScalef(1.0, 0.4, 1.0);
|
||||
|
||||
glEnable(GL_TEXTURE_2D);
|
||||
gluSphere(sphere, pupilSize, 15, 15);
|
||||
glDisable(GL_TEXTURE_2D);
|
||||
}
|
||||
glPopMatrix();
|
||||
|
||||
glPopMatrix();
|
||||
|
||||
}
|
||||
|
93
interface/src/Head.h
Normal file
93
interface/src/Head.h
Normal file
|
@ -0,0 +1,93 @@
|
|||
//
|
||||
// Head.h
|
||||
// hifi
|
||||
//
|
||||
// Created by Jeffrey on May, 10, 2013
|
||||
// Copyright (c) 2013 Physical, Inc.. All rights reserved.
|
||||
//
|
||||
|
||||
#ifndef hifi_Head_h
|
||||
#define hifi_Head_h
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtc/quaternion.hpp>
|
||||
#include <AvatarData.h>
|
||||
#include "world.h"
|
||||
#include "InterfaceConfig.h"
|
||||
#include "SerialInterface.h"
|
||||
|
||||
enum eyeContactTargets {LEFT_EYE, RIGHT_EYE, MOUTH};
|
||||
|
||||
class Head {
|
||||
public:
|
||||
Head();
|
||||
|
||||
void initialize();
|
||||
void simulate(float deltaTime, bool isMine);
|
||||
void setPositionRotationAndScale(glm::vec3 position, glm::vec3 rotation, float scale);
|
||||
void setSkinColor(glm::vec3 color);
|
||||
void setAudioLoudness(float loudness);
|
||||
void render(bool lookingInMirror, float bodyYaw);
|
||||
void setNewTarget(float, float);
|
||||
void setSpringScale(float s) { returnSpringScale = s; }
|
||||
|
||||
// Do you want head to try to return to center (depends on interface detected)
|
||||
void setReturnToCenter(bool r) { returnHeadToCenter = r; }
|
||||
const bool getReturnToCenter() const { return returnHeadToCenter; }
|
||||
|
||||
float getAverageLoudness() {return averageLoudness;};
|
||||
void setAverageLoudness(float al) { averageLoudness = al;};
|
||||
|
||||
//private:
|
||||
// I am making these public for now - just to get the code moved over quickly!
|
||||
|
||||
bool returnHeadToCenter;
|
||||
float audioLoudness;
|
||||
glm::vec3 skinColor;
|
||||
glm::vec3 position;
|
||||
glm::vec3 rotation;
|
||||
float yaw;
|
||||
float pitch;
|
||||
float roll;
|
||||
float pitchRate;
|
||||
float yawRate;
|
||||
float rollRate;
|
||||
float noise;
|
||||
float eyeballPitch[2];
|
||||
float eyeballYaw [2];
|
||||
float eyebrowPitch[2];
|
||||
float eyebrowRoll [2];
|
||||
float eyeballScaleX;
|
||||
float eyeballScaleY;
|
||||
float eyeballScaleZ;
|
||||
float interPupilDistance;
|
||||
float interBrowDistance;
|
||||
float nominalPupilSize;
|
||||
float pupilSize;
|
||||
float mouthPitch;
|
||||
float mouthYaw;
|
||||
float mouthWidth;
|
||||
float mouthHeight;
|
||||
float leanForward;
|
||||
float leanSideways;
|
||||
float pitchTarget;
|
||||
float yawTarget;
|
||||
float noiseEnvelope;
|
||||
float pupilConverge;
|
||||
float scale;
|
||||
int eyeContact;
|
||||
float browAudioLift;
|
||||
eyeContactTargets eyeContactTarget;
|
||||
|
||||
// Sound loudness information
|
||||
float lastLoudness;
|
||||
float averageLoudness;
|
||||
float audioAttack;
|
||||
|
||||
GLUquadric* sphere;
|
||||
|
||||
// Strength of return springs
|
||||
float returnSpringScale;
|
||||
};
|
||||
|
||||
#endif
|
|
@ -102,6 +102,13 @@ int screenHeight = 800;
|
|||
int fullscreen = 0;
|
||||
float aspectRatio = 1.0f;
|
||||
|
||||
bool USING_MOUSE_VIEW_SHIFT = false;
|
||||
float MOUSE_VIEW_SHIFT_RATE = 40.0f;
|
||||
float MOUSE_VIEW_SHIFT_YAW_MARGIN = (float)(WIDTH * 0.2f);
|
||||
float MOUSE_VIEW_SHIFT_PITCH_MARGIN = (float)(HEIGHT * 0.2f);
|
||||
float MOUSE_VIEW_SHIFT_YAW_LIMIT = 45.0;
|
||||
float MOUSE_VIEW_SHIFT_PITCH_LIMIT = 30.0;
|
||||
|
||||
//CameraMode defaultCameraMode = CAMERA_MODE_FIRST_PERSON;
|
||||
CameraMode defaultCameraMode = CAMERA_MODE_THIRD_PERSON;
|
||||
|
||||
|
@ -206,7 +213,6 @@ double elapsedTime;
|
|||
timeval applicationStartupTime;
|
||||
bool justStarted = true;
|
||||
|
||||
|
||||
// Every second, check the frame rates and other stuff
|
||||
void Timer(int extra) {
|
||||
gettimeofday(&timerEnd, NULL);
|
||||
|
@ -370,9 +376,7 @@ void sendVoxelEditMessage(PACKET_HEADER header, VoxelDetail& detail) {
|
|||
}
|
||||
}
|
||||
|
||||
//
|
||||
// Using gyro data, update both view frustum and avatar head position
|
||||
//
|
||||
void updateAvatar(float deltaTime) {
|
||||
|
||||
// Update my avatar's head position from gyros
|
||||
|
@ -409,7 +413,34 @@ void updateAvatar(float deltaTime) {
|
|||
const float RENDER_PITCH_MULTIPLY = 4.f;
|
||||
myAvatar.setRenderPitch((1.f - renderPitchSpring * deltaTime) * myAvatar.getRenderPitch() +
|
||||
renderPitchSpring * deltaTime * -myAvatar.getHeadPitch() * RENDER_PITCH_MULTIPLY);
|
||||
|
||||
}
|
||||
|
||||
|
||||
if (USING_MOUSE_VIEW_SHIFT)
|
||||
{
|
||||
//make it so that when your mouse hits the edge of the screen, the camera shifts
|
||||
float rightBoundary = (float)WIDTH - MOUSE_VIEW_SHIFT_YAW_MARGIN;
|
||||
float bottomBoundary = (float)HEIGHT - MOUSE_VIEW_SHIFT_PITCH_MARGIN;
|
||||
|
||||
if (mouseX > rightBoundary) {
|
||||
float f = (mouseX - rightBoundary) / ( (float)WIDTH - rightBoundary);
|
||||
mouseViewShiftYaw += MOUSE_VIEW_SHIFT_RATE * f * deltaTime;
|
||||
if (mouseViewShiftYaw > MOUSE_VIEW_SHIFT_YAW_LIMIT) { mouseViewShiftYaw = MOUSE_VIEW_SHIFT_YAW_LIMIT; }
|
||||
} else if (mouseX < MOUSE_VIEW_SHIFT_YAW_MARGIN) {
|
||||
float f = 1.0 - (mouseX / MOUSE_VIEW_SHIFT_YAW_MARGIN);
|
||||
mouseViewShiftYaw -= MOUSE_VIEW_SHIFT_RATE * f * deltaTime;
|
||||
if (mouseViewShiftYaw < -MOUSE_VIEW_SHIFT_YAW_LIMIT) { mouseViewShiftYaw = -MOUSE_VIEW_SHIFT_YAW_LIMIT; }
|
||||
}
|
||||
if (mouseY < MOUSE_VIEW_SHIFT_PITCH_MARGIN) {
|
||||
float f = 1.0 - (mouseY / MOUSE_VIEW_SHIFT_PITCH_MARGIN);
|
||||
mouseViewShiftPitch += MOUSE_VIEW_SHIFT_RATE * f * deltaTime;
|
||||
if ( mouseViewShiftPitch > MOUSE_VIEW_SHIFT_PITCH_LIMIT ) { mouseViewShiftPitch = MOUSE_VIEW_SHIFT_PITCH_LIMIT; }
|
||||
}
|
||||
else if (mouseY > bottomBoundary) {
|
||||
float f = (mouseY - bottomBoundary) / ((float)HEIGHT - bottomBoundary);
|
||||
mouseViewShiftPitch -= MOUSE_VIEW_SHIFT_RATE * f * deltaTime;
|
||||
if (mouseViewShiftPitch < -MOUSE_VIEW_SHIFT_PITCH_LIMIT) { mouseViewShiftPitch = -MOUSE_VIEW_SHIFT_PITCH_LIMIT; }
|
||||
}
|
||||
}
|
||||
|
||||
if (OculusManager::isConnected()) {
|
||||
|
@ -1032,7 +1063,7 @@ void display(void)
|
|||
myCamera.setDistance (1.5f);
|
||||
myCamera.setTightness (8.0f);
|
||||
myCamera.setTargetPosition(myAvatar.getHeadPosition());
|
||||
myCamera.setTargetRotation(myAvatar.getBodyYaw(), 0.0f, 0.0f);
|
||||
myCamera.setTargetRotation(myAvatar.getBodyYaw() - mouseViewShiftYaw, mouseViewShiftPitch, 0.0f);
|
||||
}
|
||||
|
||||
// important...
|
||||
|
@ -1540,9 +1571,6 @@ void specialkey(int k, int x, int y) {
|
|||
if (glutGetModifiers() == GLUT_ACTIVE_SHIFT) myAvatar.setDriveKeys(RIGHT, 1);
|
||||
else myAvatar.setDriveKeys(ROT_RIGHT, 1);
|
||||
}
|
||||
#ifndef _WIN32
|
||||
audio.setWalkingState(true);
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1800,6 +1828,8 @@ void idle(void) {
|
|||
// walking triggers the handControl to stop
|
||||
if (myAvatar.getMode() == AVATAR_MODE_WALKING) {
|
||||
handControl.stop();
|
||||
mouseViewShiftYaw *= 0.9;
|
||||
mouseViewShiftPitch *= 0.9;
|
||||
}
|
||||
|
||||
// Read serial port interface devices
|
||||
|
@ -1836,7 +1866,6 @@ void idle(void) {
|
|||
glutPostRedisplay();
|
||||
lastTimeIdle = check;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void reshape(int width, int height) {
|
||||
|
|
|
@ -262,8 +262,21 @@ ViewFrustum::location VoxelNode::inFrustum(const ViewFrustum& viewFrustum) const
|
|||
|
||||
float VoxelNode::distanceToCamera(const ViewFrustum& viewFrustum) const {
|
||||
glm::vec3 center = _box.getCenter() * (float)TREE_SCALE;
|
||||
float distanceToVoxelCenter = sqrtf(powf(viewFrustum.getPosition().x - center.x, 2) +
|
||||
powf(viewFrustum.getPosition().y - center.y, 2) +
|
||||
powf(viewFrustum.getPosition().z - center.z, 2));
|
||||
glm::vec3 temp = viewFrustum.getPosition() - center;
|
||||
float distanceSquared = glm::dot(temp, temp);
|
||||
float distanceToVoxelCenter = sqrtf(distanceSquared);
|
||||
return distanceToVoxelCenter;
|
||||
}
|
||||
|
||||
float VoxelNode::distanceSquareToPoint(const glm::vec3& point) const {
|
||||
glm::vec3 temp = point - _box.getCenter();
|
||||
float distanceSquare = glm::dot(temp, temp);
|
||||
return distanceSquare;
|
||||
}
|
||||
|
||||
float VoxelNode::distanceToPoint(const glm::vec3& point) const {
|
||||
glm::vec3 temp = point - _box.getCenter();
|
||||
float distanceSquare = glm::dot(temp, temp);
|
||||
float distance = sqrtf(distanceSquare);
|
||||
return distance;
|
||||
}
|
||||
|
|
|
@ -61,6 +61,11 @@ public:
|
|||
bool isInView(const ViewFrustum& viewFrustum) const;
|
||||
ViewFrustum::location inFrustum(const ViewFrustum& viewFrustum) const;
|
||||
float distanceToCamera(const ViewFrustum& viewFrustum) const;
|
||||
|
||||
// points are assumed to be in Voxel Coordinates (not TREE_SCALE'd)
|
||||
float distanceSquareToPoint(const glm::vec3& point) const; // when you don't need the actual distance, use this.
|
||||
float distanceToPoint(const glm::vec3& point) const;
|
||||
|
||||
bool isLeaf() const { return _childCount == 0; }
|
||||
void printDebugDetails(const char* label) const;
|
||||
bool isDirty() const { return _isDirty; };
|
||||
|
|
Loading…
Reference in a new issue