mirror of
https://github.com/overte-org/overte.git
synced 2025-04-22 11:53:28 +02:00
Fixes per review
This commit is contained in:
parent
1b039c3755
commit
78bd5ab40f
6 changed files with 42 additions and 45 deletions
|
@ -295,17 +295,22 @@ void Application::paintGL() {
|
|||
|
||||
} else if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {
|
||||
_myCamera.setTargetPosition(_myAvatar.getSpringyHeadPosition());
|
||||
_myCamera.setTargetRotation(_myAvatar.getBodyYaw() - 180.0f, 0.0f, 0.0f);
|
||||
_myCamera.setTargetRotation(_myAvatar.getBodyYaw() - 180.0f,
|
||||
0.0f,
|
||||
0.0f);
|
||||
|
||||
} else {
|
||||
if (_myCamera.getMode() == CAMERA_MODE_FIRST_PERSON) {
|
||||
_myCamera.setTargetPosition(_myAvatar.getSpringyHeadPosition());
|
||||
_myCamera.setTargetRotation(_myAvatar.getAbsoluteHeadYaw(),
|
||||
_myAvatar.getAbsoluteHeadPitch(), 0.0f);
|
||||
_myAvatar.getAbsoluteHeadPitch(),
|
||||
0.0f);
|
||||
|
||||
} else if (_myCamera.getMode() == CAMERA_MODE_THIRD_PERSON) {
|
||||
_myCamera.setTargetPosition(_myAvatar.getHeadPosition());
|
||||
_myCamera.setTargetRotation(_myAvatar.getBodyYaw(), 0.0f, 0.0f);
|
||||
_myCamera.setTargetRotation(_myAvatar.getBodyYaw(),
|
||||
0.0f,
|
||||
0.0f);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -776,7 +781,7 @@ void Application::idle() {
|
|||
float deltaTime = 1.f/_fps;
|
||||
|
||||
// Use Transmitter Hand to move hand if connected, else use mouse
|
||||
if (_myAvatar.transmitterV2IsConnected()) {
|
||||
if (_myAvatar.isTransmitterV2Connected()) {
|
||||
const float HAND_FORCE_SCALING = 0.05f;
|
||||
const float* handAcceleration = _myAvatar.getTransmitterHandLastAcceleration();
|
||||
_myAvatar.setHandMovementValues(glm::vec3(-handAcceleration[0] * HAND_FORCE_SCALING,
|
||||
|
@ -1678,7 +1683,7 @@ void Application::displayOverlay() {
|
|||
if (_displayLevels) _serialPort.renderLevels(_glWidget->width(), _glWidget->height());
|
||||
|
||||
// Show hand transmitter data if detected
|
||||
if (_myAvatar.transmitterV2IsConnected()) {
|
||||
if (_myAvatar.isTransmitterV2Connected()) {
|
||||
_myAvatar.transmitterV2RenderLevels(_glWidget->width(), _glWidget->height());
|
||||
}
|
||||
// Display stats and log text onscreen
|
||||
|
@ -2029,19 +2034,11 @@ void* Application::networkReceive(void* args) {
|
|||
|
||||
switch (app->_incomingPacket[0]) {
|
||||
case PACKET_HEADER_TRANSMITTER_DATA_V1:
|
||||
// Process UDP packets that are sent to the client from local sensor devices
|
||||
// V1 = android app, or the Google Glass
|
||||
app->_myAvatar.processTransmitterData(app->_incomingPacket, bytesReceived);
|
||||
break;
|
||||
case PACKET_HEADER_TRANSMITTER_DATA_V2:
|
||||
/*
|
||||
float rotationRates[3];
|
||||
float accelerations[3];
|
||||
|
||||
memcpy(rotationRates, app->_incomingPacket + 2, sizeof(rotationRates));
|
||||
memcpy(accelerations, app->_incomingPacket + 3 + sizeof(rotationRates), sizeof(accelerations));
|
||||
|
||||
printf("Acceleration: %f, %f, %f\n", accelerations[0], accelerations[1], accelerations[2]);
|
||||
*/
|
||||
// V2 = IOS transmitter app
|
||||
app->_myAvatar.processTransmitterDataV2(app->_incomingPacket, bytesReceived);
|
||||
|
||||
break;
|
||||
|
|
|
@ -87,9 +87,9 @@ int audioCallback (const void* inputBuffer,
|
|||
Application* interface = (Application*) QCoreApplication::instance();
|
||||
Avatar* interfaceAvatar = interface->getAvatar();
|
||||
|
||||
int16_t *inputLeft = ((int16_t **) inputBuffer)[0];
|
||||
int16_t *outputLeft = ((int16_t **) outputBuffer)[0];
|
||||
int16_t *outputRight = ((int16_t **) outputBuffer)[1];
|
||||
int16_t* inputLeft = ((int16_t**) inputBuffer)[0];
|
||||
int16_t* outputLeft = ((int16_t**) outputBuffer)[0];
|
||||
int16_t* outputRight = ((int16_t**) outputBuffer)[1];
|
||||
|
||||
// Add Procedural effects to input samples
|
||||
parentAudio->addProceduralSounds(inputLeft, BUFFER_LENGTH_SAMPLES);
|
||||
|
@ -99,7 +99,7 @@ int audioCallback (const void* inputBuffer,
|
|||
parentAudio->_scope->addSamples(2, outputRight, PACKET_LENGTH_SAMPLES_PER_CHANNEL);
|
||||
|
||||
// if needed, add input/output data to echo analysis buffers
|
||||
if (parentAudio->_gatheringEchoFrames) {
|
||||
if (parentAudio->_isGatheringEchoFrames) {
|
||||
memcpy(parentAudio->_echoInputSamples, inputLeft,
|
||||
PACKET_LENGTH_SAMPLES_PER_CHANNEL * sizeof(int16_t));
|
||||
memcpy(parentAudio->_echoOutputSamples, outputLeft,
|
||||
|
@ -267,13 +267,13 @@ int audioCallback (const void* inputBuffer,
|
|||
}
|
||||
}
|
||||
}
|
||||
if (parentAudio->_sendingEchoPing) {
|
||||
if (parentAudio->_isSendingEchoPing) {
|
||||
const float PING_PITCH = 4.f;
|
||||
const float PING_VOLUME = 32000.f;
|
||||
for (int s = 0; s < PACKET_LENGTH_SAMPLES_PER_CHANNEL; s++) {
|
||||
outputLeft[s] = outputRight[s] = (int16_t)(sinf((float) s / PING_PITCH) * PING_VOLUME);
|
||||
}
|
||||
parentAudio->_gatheringEchoFrames = true;
|
||||
parentAudio->_isGatheringEchoFrames = true;
|
||||
}
|
||||
gettimeofday(&parentAudio->_lastCallbackTime, NULL);
|
||||
return paContinue;
|
||||
|
@ -301,10 +301,10 @@ Audio::Audio(Oscilloscope* scope) :
|
|||
_totalPacketsReceived(0),
|
||||
_firstPlaybackTime(),
|
||||
_packetsReceivedThisPlayback(0),
|
||||
_startEcho(false),
|
||||
_sendingEchoPing(false),
|
||||
_shouldStartEcho(false),
|
||||
_isSendingEchoPing(false),
|
||||
_echoPingFrameCount(0),
|
||||
_gatheringEchoFrames(false)
|
||||
_isGatheringEchoFrames(false)
|
||||
{
|
||||
outputPortAudioError(Pa_Initialize());
|
||||
outputPortAudioError(Pa_OpenDefaultStream(&_stream,
|
||||
|
@ -375,18 +375,18 @@ void Audio::addProceduralSounds(int16_t* inputBuffer, int numSamples) {
|
|||
}
|
||||
|
||||
void Audio::startEchoTest() {
|
||||
_startEcho = true;
|
||||
_shouldStartEcho = true;
|
||||
_echoPingFrameCount = 0;
|
||||
_sendingEchoPing = true;
|
||||
_gatheringEchoFrames = false;
|
||||
_isSendingEchoPing = true;
|
||||
_isGatheringEchoFrames = false;
|
||||
}
|
||||
|
||||
void Audio::addedPingFrame() {
|
||||
const int ECHO_PING_FRAMES = 1;
|
||||
_echoPingFrameCount++;
|
||||
if (_echoPingFrameCount == ECHO_PING_FRAMES) {
|
||||
_gatheringEchoFrames = false;
|
||||
_sendingEchoPing = false;
|
||||
_isGatheringEchoFrames = false;
|
||||
_isSendingEchoPing = false;
|
||||
//startEchoTest();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -57,12 +57,12 @@ private:
|
|||
int _totalPacketsReceived;
|
||||
timeval _firstPlaybackTime;
|
||||
int _packetsReceivedThisPlayback;
|
||||
bool _startEcho;
|
||||
bool _sendingEchoPing;
|
||||
bool _shouldStartEcho;
|
||||
bool _isSendingEchoPing;
|
||||
int _echoPingFrameCount;
|
||||
int16_t* _echoInputSamples;
|
||||
int16_t* _echoOutputSamples;
|
||||
bool _gatheringEchoFrames;
|
||||
bool _isGatheringEchoFrames;
|
||||
|
||||
// give access to AudioData class from audioCallback
|
||||
friend int audioCallback (const void*, void*, unsigned long, const PaStreamCallbackTimeInfo*, PaStreamCallbackFlags, void*);
|
||||
|
|
|
@ -83,7 +83,7 @@ Avatar::Avatar(bool isMine) {
|
|||
_transmitterPackets = 0;
|
||||
_transmitterIsFirstData = true;
|
||||
_transmitterInitialReading = glm::vec3(0.f, 0.f, 0.f);
|
||||
_transmitterV2IsConnected = false;
|
||||
_isTransmitterV2Connected = false;
|
||||
_speed = 0.0;
|
||||
_pelvisStandingHeight = 0.0f;
|
||||
_displayingHead = true;
|
||||
|
@ -133,7 +133,7 @@ Avatar::Avatar(const Avatar &otherAvatar) {
|
|||
_transmitterHz = otherAvatar._transmitterHz;
|
||||
_transmitterInitialReading = otherAvatar._transmitterInitialReading;
|
||||
_transmitterPackets = otherAvatar._transmitterPackets;
|
||||
_transmitterV2IsConnected = otherAvatar._transmitterV2IsConnected;
|
||||
_isTransmitterV2Connected = otherAvatar._isTransmitterV2Connected;
|
||||
_TEST_bigSphereRadius = otherAvatar._TEST_bigSphereRadius;
|
||||
_TEST_bigSpherePosition = otherAvatar._TEST_bigSpherePosition;
|
||||
_movedHandOffset = otherAvatar._movedHandOffset;
|
||||
|
@ -299,11 +299,7 @@ void Avatar::updateFromMouse(int mouseX, int mouseY, int screenWidth, int scree
|
|||
|
||||
if (fabs(mouseLocationX) > MOUSE_MOVE_RADIUS) {
|
||||
float mouseMag = (fabs(mouseLocationX) - MOUSE_MOVE_RADIUS) / (0.5f - MOUSE_MOVE_RADIUS) * MOUSE_ROTATE_SPEED;
|
||||
setBodyYaw(getBodyYaw() -
|
||||
((mouseLocationX > 0.f) ?
|
||||
mouseMag :
|
||||
-mouseMag) );
|
||||
//printLog("yaw = %f\n", getBodyYaw());
|
||||
setBodyYaw(getBodyYaw() - ((mouseLocationX > 0.f) ? mouseMag : -mouseMag));
|
||||
}
|
||||
|
||||
return;
|
||||
|
@ -1340,9 +1336,9 @@ void Avatar::processTransmitterDataV2(unsigned char* packetData, int numBytes) {
|
|||
_transmitterHandLastRotationRates[i] *= 180.f / PI;
|
||||
_transmitterHandLastAcceleration[i] *= GRAVITY_EARTH;
|
||||
}
|
||||
if (!_transmitterV2IsConnected) {
|
||||
if (!_isTransmitterV2Connected) {
|
||||
printf("Transmitter V2 Connected.\n");
|
||||
_transmitterV2IsConnected = true;
|
||||
_isTransmitterV2Connected = true;
|
||||
}
|
||||
} else {
|
||||
printf("Transmitter V2 packet read error.\n");
|
||||
|
|
|
@ -128,7 +128,7 @@ public:
|
|||
// Related to getting transmitter UDP data used to animate the avatar hand
|
||||
void processTransmitterData(unsigned char * packetData, int numBytes);
|
||||
void processTransmitterDataV2(unsigned char * packetData, int numBytes);
|
||||
const bool transmitterV2IsConnected() const { return _transmitterV2IsConnected; };
|
||||
const bool isTransmitterV2Connected() const { return _isTransmitterV2Connected; };
|
||||
const float* getTransmitterHandLastAcceleration() const { return _transmitterHandLastAcceleration; };
|
||||
const float* getTransmitterHandLastRotationRates() const { return _transmitterHandLastRotationRates; };
|
||||
void transmitterV2RenderLevels(int width, int height);
|
||||
|
@ -189,7 +189,7 @@ private:
|
|||
glm::vec3 _transmitterInitialReading;
|
||||
float _transmitterHandLastRotationRates[3];
|
||||
float _transmitterHandLastAcceleration[3];
|
||||
bool _transmitterV2IsConnected;
|
||||
bool _isTransmitterV2Connected;
|
||||
float _pelvisStandingHeight;
|
||||
float _height;
|
||||
Balls* _balls;
|
||||
|
|
|
@ -292,6 +292,10 @@ void Head::render(bool lookingInMirror, float bodyYaw) {
|
|||
glPopMatrix();
|
||||
|
||||
// Mouth
|
||||
const float MIN_LOUDNESS_SCALE_WIDTH = 0.7f;
|
||||
const float WIDTH_SENSITIVITY = 60.f;
|
||||
const float HEIGHT_SENSITIVITY = 30.f;
|
||||
const float MIN_LOUDNESS_SCALE_HEIGHT = 1.0f;
|
||||
glPushMatrix();
|
||||
glTranslatef(0,-0.35,0.75);
|
||||
glColor3f(0,0,0);
|
||||
|
@ -299,8 +303,8 @@ void Head::render(bool lookingInMirror, float bodyYaw) {
|
|||
glRotatef(mouthYaw, 0, 0, 1);
|
||||
|
||||
if ((averageLoudness > 1.f) && (averageLoudness < 10000.f)) {
|
||||
glScalef(mouthWidth * (.7f + sqrt(averageLoudness) / 60.f),
|
||||
mouthHeight * (1.f + sqrt(averageLoudness) / 30.f), 1);
|
||||
glScalef(mouthWidth * (MIN_LOUDNESS_SCALE_WIDTH + sqrt(averageLoudness) / WIDTH_SENSITIVITY),
|
||||
mouthHeight * (MIN_LOUDNESS_SCALE_HEIGHT + sqrt(averageLoudness) / HEIGHT_SENSITIVITY), 1);
|
||||
} else {
|
||||
glScalef(mouthWidth, mouthHeight, 1);
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue