From 825c1c8876ee6d3a5594746891bcc4eb52266e9a Mon Sep 17 00:00:00 2001
From: Brad Hefta-Gaub <brad@highfidelity.io>
Date: Sat, 11 Jan 2014 00:07:35 -0800
Subject: [PATCH] fix windows warnings

---
 assignment-client/src/audio/AudioMixer.cpp    | 106 +++---
 .../src/audio/AudioMixerClientData.cpp        |  34 +-
 domain-server/src/DomainServer.cpp            |   4 +-
 interface/src/Audio.cpp                       | 264 +++++++-------
 interface/src/DataServerClient.cpp            |  72 ++--
 interface/src/avatar/MyAvatar.cpp             |   2 +-
 interface/src/avatar/SkeletonModel.cpp        |  64 ++--
 interface/src/devices/Faceshift.cpp           |  30 +-
 interface/src/renderer/FBXReader.cpp          | 334 +++++++++---------
 libraries/octree/src/OctreeElement.cpp        |   8 +-
 10 files changed, 459 insertions(+), 459 deletions(-)

diff --git a/assignment-client/src/audio/AudioMixer.cpp b/assignment-client/src/audio/AudioMixer.cpp
index e315d366f8..4410994aa6 100644
--- a/assignment-client/src/audio/AudioMixer.cpp
+++ b/assignment-client/src/audio/AudioMixer.cpp
@@ -64,7 +64,7 @@ void attachNewBufferToNode(Node *newNode) {
 AudioMixer::AudioMixer(const unsigned char* dataBuffer, int numBytes) :
     ThreadedAssignment(dataBuffer, numBytes)
 {
-    
+
 }
 
 void AudioMixer::addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuffer* bufferToAdd,
@@ -73,79 +73,79 @@ void AudioMixer::addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuf
     float attenuationCoefficient = 1.0f;
     int numSamplesDelay = 0;
     float weakChannelAmplitudeRatio = 1.0f;
-    
+
     const int PHASE_DELAY_AT_90 = 20;
-    
+
     if (bufferToAdd != listeningNodeBuffer) {
         // if the two buffer pointers do not match then these are different buffers
-        
+
         glm::vec3 listenerPosition = listeningNodeBuffer->getPosition();
         glm::vec3 relativePosition = bufferToAdd->getPosition() - listeningNodeBuffer->getPosition();
         glm::quat inverseOrientation = glm::inverse(listeningNodeBuffer->getOrientation());
-        
+
         float distanceSquareToSource = glm::dot(relativePosition, relativePosition);
         float radius = 0.0f;
-        
+
         if (bufferToAdd->getType() == PositionalAudioRingBuffer::Injector) {
             InjectedAudioRingBuffer* injectedBuffer = (InjectedAudioRingBuffer*) bufferToAdd;
             radius = injectedBuffer->getRadius();
             attenuationCoefficient *= injectedBuffer->getAttenuationRatio();
         }
-        
+
         if (radius == 0 || (distanceSquareToSource > radius * radius)) {
             // this is either not a spherical source, or the listener is outside the sphere
-            
+
             if (radius > 0) {
                 // this is a spherical source - the distance used for the coefficient
                 // needs to be the closest point on the boundary to the source
-                
+
                 // ovveride the distance to the node with the distance to the point on the
                 // boundary of the sphere
                 distanceSquareToSource -= (radius * radius);
-                
+
             } else {
                 // calculate the angle delivery for off-axis attenuation
                 glm::vec3 rotatedListenerPosition = glm::inverse(bufferToAdd->getOrientation()) * relativePosition;
-                
+
                 float angleOfDelivery = glm::angle(glm::vec3(0.0f, 0.0f, -1.0f),
                                                    glm::normalize(rotatedListenerPosition));
-                
+
                 const float MAX_OFF_AXIS_ATTENUATION = 0.2f;
                 const float OFF_AXIS_ATTENUATION_FORMULA_STEP = (1 - MAX_OFF_AXIS_ATTENUATION) / 2.0f;
-                
+
                 float offAxisCoefficient = MAX_OFF_AXIS_ATTENUATION +
                     (OFF_AXIS_ATTENUATION_FORMULA_STEP * (angleOfDelivery / 90.0f));
-                
+
                 // multiply the current attenuation coefficient by the calculated off axis coefficient
                 attenuationCoefficient *= offAxisCoefficient;
             }
-            
+
             glm::vec3 rotatedSourcePosition = inverseOrientation * relativePosition;
-            
+
             const float DISTANCE_SCALE = 2.5f;
             const float GEOMETRIC_AMPLITUDE_SCALAR = 0.3f;
             const float DISTANCE_LOG_BASE = 2.5f;
             const float DISTANCE_SCALE_LOG = logf(DISTANCE_SCALE) / logf(DISTANCE_LOG_BASE);
-            
+
             // calculate the distance coefficient using the distance to this node
             float distanceCoefficient = powf(GEOMETRIC_AMPLITUDE_SCALAR,
                                              DISTANCE_SCALE_LOG +
                                              (0.5f * logf(distanceSquareToSource) / logf(DISTANCE_LOG_BASE)) - 1);
             distanceCoefficient = std::min(1.0f, distanceCoefficient);
-            
+
             // multiply the current attenuation coefficient by the distance coefficient
             attenuationCoefficient *= distanceCoefficient;
-            
+
             // project the rotated source position vector onto the XZ plane
             rotatedSourcePosition.y = 0.0f;
-            
+
             // produce an oriented angle about the y-axis
             bearingRelativeAngleToSource = glm::orientedAngle(glm::vec3(0.0f, 0.0f, -1.0f),
                                                               glm::normalize(rotatedSourcePosition),
                                                               glm::vec3(0.0f, 1.0f, 0.0f));
-            
+
             const float PHASE_AMPLITUDE_RATIO_AT_90 = 0.5;
-            
+
             // figure out the number of samples of delay and the ratio of the amplitude
             // in the weak channel for audio spatialization
             float sinRatio = fabsf(sinf(glm::radians(bearingRelativeAngleToSource)));
@@ -153,11 +153,11 @@ void AudioMixer::addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuf
             weakChannelAmplitudeRatio = 1 - (PHASE_AMPLITUDE_RATIO_AT_90 * sinRatio);
         }
     }
-    
+
     // if the bearing relative angle to source is > 0 then the delayed channel is the right one
     int delayedChannelOffset = (bearingRelativeAngleToSource > 0.0f) ? 1 : 0;
     int goodChannelOffset = delayedChannelOffset == 0 ? 1 : 0;
-    
+
     for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s += 2) {
         if ((s / 2) < numSamplesDelay) {
             // pull the earlier sample for the delayed channel
@@ -165,12 +165,12 @@ void AudioMixer::addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuf
             _clientSamples[s + delayedChannelOffset] = glm::clamp(_clientSamples[s + delayedChannelOffset] + earlierSample,
                                                                     MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
         }
-        
+
         // pull the current sample for the good channel
         int16_t currentSample = (*bufferToAdd)[s / 2] * attenuationCoefficient;
         _clientSamples[s + goodChannelOffset] = glm::clamp(_clientSamples[s + goodChannelOffset] + currentSample,
                                                            MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
-        
+
         if ((s / 2) + numSamplesDelay < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL) {
             // place the current sample at the right spot in the delayed channel
             int16_t clampedSample = glm::clamp((int) (_clientSamples[s + (numSamplesDelay * 2) + delayedChannelOffset]
@@ -183,22 +183,22 @@ void AudioMixer::addBufferToMixForListeningNodeWithBuffer(PositionalAudioRingBuf
 
 void AudioMixer::prepareMixForListeningNode(Node* node) {
 	NodeList* nodeList = NodeList::getInstance();
-    
+
     AvatarAudioRingBuffer* nodeRingBuffer = ((AudioMixerClientData*) node->getLinkedData())->getAvatarAudioRingBuffer();
-    
+
     // zero out the client mix for this node
     memset(_clientSamples, 0, sizeof(_clientSamples));
-    
+
     // loop through all other nodes that have sufficient audio to mix
     for (NodeList::iterator otherNode = nodeList->begin(); otherNode != nodeList->end(); otherNode++) {
         if (otherNode->getLinkedData()) {
-            
+
             AudioMixerClientData* otherNodeClientData = (AudioMixerClientData*) otherNode->getLinkedData();
-            
+
             // enumerate the ARBs attached to the otherNode and add all that should be added to mix
-            for (int i = 0; i < otherNodeClientData->getRingBuffers().size(); i++) {
+            for (unsigned int i = 0; i < otherNodeClientData->getRingBuffers().size(); i++) {
                 PositionalAudioRingBuffer* otherNodeBuffer = otherNodeClientData->getRingBuffers()[i];
-                
+
                 if ((*otherNode != *node
                     || otherNodeBuffer->shouldLoopbackForNode())
                     && otherNodeBuffer->willBeAddedToMix()) {
@@ -217,14 +217,14 @@ void AudioMixer::processDatagram(const QByteArray& dataByteArray, const HifiSock
         || dataByteArray[0] == PACKET_TYPE_INJECT_AUDIO) {
         QUuid nodeUUID = QUuid::fromRfc4122(dataByteArray.mid(numBytesForPacketHeader((unsigned char*) dataByteArray.data()),
                                                               NUM_BYTES_RFC4122_UUID));
-        
+
         NodeList* nodeList = NodeList::getInstance();
-        
+
         Node* matchingNode = nodeList->nodeWithUUID(nodeUUID);
-        
+
         if (matchingNode) {
             nodeList->updateNodeWithData(matchingNode, senderSockAddr, (unsigned char*) dataByteArray.data(), dataByteArray.size());
-            
+
             if (!matchingNode->getActiveSocket()) {
                 // we don't have an active socket for this node, but they're talking to us
                 // this means they've heard from us and can reply, let's assume public is active
@@ -238,29 +238,29 @@ void AudioMixer::processDatagram(const QByteArray& dataByteArray, const HifiSock
 }
 
 void AudioMixer::run() {
-    
+
     commonInit(AUDIO_MIXER_LOGGING_TARGET_NAME, NODE_TYPE_AUDIO_MIXER);
-    
+
     NodeList* nodeList = NodeList::getInstance();
-    
+
     const char AUDIO_MIXER_NODE_TYPES_OF_INTEREST[2] = { NODE_TYPE_AGENT, NODE_TYPE_AUDIO_INJECTOR };
     nodeList->setNodeTypesOfInterest(AUDIO_MIXER_NODE_TYPES_OF_INTEREST, sizeof(AUDIO_MIXER_NODE_TYPES_OF_INTEREST));
-    
+
     nodeList->linkedDataCreateCallback = attachNewBufferToNode;
-    
+
     int nextFrame = 0;
     timeval startTime;
-    
+
     gettimeofday(&startTime, NULL);
-    
+
     int numBytesPacketHeader = numBytesForPacketHeader((unsigned char*) &PACKET_TYPE_MIXED_AUDIO);
     unsigned char clientPacket[NETWORK_BUFFER_LENGTH_BYTES_STEREO + numBytesPacketHeader];
     populateTypeAndVersion(clientPacket, PACKET_TYPE_MIXED_AUDIO);
-    
+
     while (!_isFinished) {
-        
+
         QCoreApplication::processEvents();
-        
+
         if (_isFinished) {
             break;
         }
@@ -270,33 +270,33 @@ void AudioMixer::run() {
                 ((AudioMixerClientData*) node->getLinkedData())->checkBuffersBeforeFrameSend(JITTER_BUFFER_SAMPLES);
             }
         }
-        
+
         for (NodeList::iterator node = nodeList->begin(); node != nodeList->end(); node++) {
             if (node->getType() == NODE_TYPE_AGENT && node->getActiveSocket() && node->getLinkedData()
                 && ((AudioMixerClientData*) node->getLinkedData())->getAvatarAudioRingBuffer()) {
                 prepareMixForListeningNode(&(*node));
-                
+
                 memcpy(clientPacket + numBytesPacketHeader, _clientSamples, sizeof(_clientSamples));
                 nodeList->getNodeSocket().writeDatagram((char*) clientPacket, sizeof(clientPacket),
                                                         node->getActiveSocket()->getAddress(),
                                                         node->getActiveSocket()->getPort());
             }
         }
-        
+
         // push forward the next output pointers for any audio buffers we used
         for (NodeList::iterator node = nodeList->begin(); node != nodeList->end(); node++) {
             if (node->getLinkedData()) {
                 ((AudioMixerClientData*) node->getLinkedData())->pushBuffersAfterFrameSend();
             }
         }
-        
+
         int usecToSleep = usecTimestamp(&startTime) + (++nextFrame * BUFFER_SEND_INTERVAL_USECS) - usecTimestampNow();
-        
+
         if (usecToSleep > 0) {
             usleep(usecToSleep);
         } else {
             qDebug("Took too much time, not sleeping!\n");
         }
-        
+
     }
 }
diff --git a/assignment-client/src/audio/AudioMixerClientData.cpp b/assignment-client/src/audio/AudioMixerClientData.cpp
index b876e5290d..0838de4f45 100644
--- a/assignment-client/src/audio/AudioMixerClientData.cpp
+++ b/assignment-client/src/audio/AudioMixerClientData.cpp
@@ -14,19 +14,19 @@
 #include "AudioMixerClientData.h"
 
 AudioMixerClientData::~AudioMixerClientData() {
-    for (int i = 0; i < _ringBuffers.size(); i++) {
+    for (unsigned int i = 0; i < _ringBuffers.size(); i++) {
         // delete this attached PositionalAudioRingBuffer
         delete _ringBuffers[i];
     }
 }
 
 AvatarAudioRingBuffer* AudioMixerClientData::getAvatarAudioRingBuffer() const {
-    for (int i = 0; i < _ringBuffers.size(); i++) {
+    for (unsigned int i = 0; i < _ringBuffers.size(); i++) {
         if (_ringBuffers[i]->getType() == PositionalAudioRingBuffer::Microphone) {
             return (AvatarAudioRingBuffer*) _ringBuffers[i];
         }
     }
-    
+
     // no AvatarAudioRingBuffer found - return NULL
     return NULL;
 }
@@ -34,49 +34,49 @@ AvatarAudioRingBuffer* AudioMixerClientData::getAvatarAudioRingBuffer() const {
 int AudioMixerClientData::parseData(unsigned char* packetData, int numBytes) {
     if (packetData[0] == PACKET_TYPE_MICROPHONE_AUDIO_WITH_ECHO
         || packetData[0] == PACKET_TYPE_MICROPHONE_AUDIO_NO_ECHO) {
-        
+
         // grab the AvatarAudioRingBuffer from the vector (or create it if it doesn't exist)
         AvatarAudioRingBuffer* avatarRingBuffer = getAvatarAudioRingBuffer();
-        
+
         if (!avatarRingBuffer) {
             // we don't have an AvatarAudioRingBuffer yet, so add it
             avatarRingBuffer = new AvatarAudioRingBuffer();
             _ringBuffers.push_back(avatarRingBuffer);
         }
-        
+
         // ask the AvatarAudioRingBuffer instance to parse the data
         avatarRingBuffer->parseData(packetData, numBytes);
     } else {
         // this is injected audio
-        
+
         // grab the stream identifier for this injected audio
         QByteArray rfcUUID = QByteArray((char*) packetData +  numBytesForPacketHeader(packetData) + NUM_BYTES_RFC4122_UUID,
                                         NUM_BYTES_RFC4122_UUID);
         QUuid streamIdentifier = QUuid::fromRfc4122(rfcUUID);
-        
+
         InjectedAudioRingBuffer* matchingInjectedRingBuffer = NULL;
-        
-        for (int i = 0; i < _ringBuffers.size(); i++) {
+
+        for (unsigned int i = 0; i < _ringBuffers.size(); i++) {
             if (_ringBuffers[i]->getType() == PositionalAudioRingBuffer::Injector
                 && ((InjectedAudioRingBuffer*) _ringBuffers[i])->getStreamIdentifier() == streamIdentifier) {
                 matchingInjectedRingBuffer = (InjectedAudioRingBuffer*) _ringBuffers[i];
             }
         }
-        
+
         if (!matchingInjectedRingBuffer) {
             // we don't have a matching injected audio ring buffer, so add it
             matchingInjectedRingBuffer = new InjectedAudioRingBuffer(streamIdentifier);
             _ringBuffers.push_back(matchingInjectedRingBuffer);
         }
-        
+
         matchingInjectedRingBuffer->parseData(packetData, numBytes);
     }
-    
+
     return 0;
 }
 
 void AudioMixerClientData::checkBuffersBeforeFrameSend(int jitterBufferLengthSamples) {
-    for (int i = 0; i < _ringBuffers.size(); i++) {
+    for (unsigned int i = 0; i < _ringBuffers.size(); i++) {
         if (_ringBuffers[i]->shouldBeAddedToMix(jitterBufferLengthSamples)) {
             // this is a ring buffer that is ready to go
             // set its flag so we know to push its buffer when all is said and done
@@ -86,13 +86,13 @@ void AudioMixerClientData::checkBuffersBeforeFrameSend(int jitterBufferLengthSam
 }
 
 void AudioMixerClientData::pushBuffersAfterFrameSend() {
-    for (int i = 0; i < _ringBuffers.size(); i++) {
+    for (unsigned int i = 0; i < _ringBuffers.size(); i++) {
         // this was a used buffer, push the output pointer forwards
         PositionalAudioRingBuffer* audioBuffer = _ringBuffers[i];
-        
+
         if (audioBuffer->willBeAddedToMix()) {
             audioBuffer->shiftReadPosition(NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
-            
+
             audioBuffer->setWillBeAddedToMix(false);
         } else if (audioBuffer->getType() == PositionalAudioRingBuffer::Injector
                    && audioBuffer->hasStarted() && audioBuffer->isStarved()) {
diff --git a/domain-server/src/DomainServer.cpp b/domain-server/src/DomainServer.cpp
index 06c2dd2446..3987ca0c71 100644
--- a/domain-server/src/DomainServer.cpp
+++ b/domain-server/src/DomainServer.cpp
@@ -542,7 +542,7 @@ void DomainServer::prepopulateStaticAssignmentFile() {
         QStringList multiConfigList = multiConfig.split(";");
 
         // read each config to a payload for a VS assignment
-        for (int i = 0; i < multiConfigList.size(); i++) {
+        for (unsigned int i = 0; i < multiConfigList.size(); i++) {
             QString config = multiConfigList.at(i);
 
             qDebug("config[%d]=%s\n", i, config.toLocal8Bit().constData());
@@ -584,7 +584,7 @@ void DomainServer::prepopulateStaticAssignmentFile() {
         QStringList multiConfigList = multiConfig.split(";");
 
         // read each config to a payload for a VS assignment
-        for (int i = 0; i < multiConfigList.size(); i++) {
+        for (unsigned int i = 0; i < multiConfigList.size(); i++) {
             QString config = multiConfigList.at(i);
 
             qDebug("config[%d]=%s\n", i, config.toLocal8Bit().constData());
diff --git a/interface/src/Audio.cpp b/interface/src/Audio.cpp
index 429b0ec66a..e7148d37e3 100644
--- a/interface/src/Audio.cpp
+++ b/interface/src/Audio.cpp
@@ -96,26 +96,26 @@ QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
             kAudioObjectPropertyScopeGlobal,
             kAudioObjectPropertyElementMaster
         };
-        
+
         if (mode == QAudio::AudioOutput) {
             propertyAddress.mSelector = kAudioHardwarePropertyDefaultOutputDevice;
         }
-        
-        
+
+
         OSStatus getPropertyError = AudioObjectGetPropertyData(kAudioObjectSystemObject,
                                                                &propertyAddress,
                                                                0,
                                                                NULL,
                                                                &propertySize,
                                                                &defaultDeviceID);
-        
+
         if (!getPropertyError && propertySize) {
             CFStringRef deviceName = NULL;
             propertySize = sizeof(deviceName);
             propertyAddress.mSelector = kAudioDevicePropertyDeviceNameCFString;
             getPropertyError = AudioObjectGetPropertyData(defaultDeviceID, &propertyAddress, 0,
                                                           NULL, &propertySize, &deviceName);
-            
+
             if (!getPropertyError && propertySize) {
                 // find a device in the list that matches the name we have and return it
                 foreach(QAudioDeviceInfo audioDevice, QAudioDeviceInfo::availableDevices(mode)) {
@@ -127,7 +127,7 @@ QAudioDeviceInfo defaultAudioDeviceForMode(QAudio::Mode mode) {
         }
     }
 #endif
-    
+
     // fallback for failed lookup is the default device
     return (mode == QAudio::AudioInput) ? QAudioDeviceInfo::defaultInputDevice() : QAudioDeviceInfo::defaultOutputDevice();
 }
@@ -138,28 +138,28 @@ bool adjustedFormatForAudioDevice(const QAudioDeviceInfo& audioDevice,
     if (!audioDevice.isFormatSupported(desiredAudioFormat)) {
         qDebug() << "The desired format for audio I/O is" << desiredAudioFormat << "\n";
         qDebug() << "The desired audio format is not supported by this device.\n";
-        
+
         if (desiredAudioFormat.channelCount() == 1) {
             adjustedAudioFormat = desiredAudioFormat;
             adjustedAudioFormat.setChannelCount(2);
-            
+
             if (audioDevice.isFormatSupported(adjustedAudioFormat)) {
                 return true;
             } else {
                 adjustedAudioFormat.setChannelCount(1);
             }
         }
-        
+
         if (audioDevice.supportedSampleRates().contains(SAMPLE_RATE * 2)) {
             // use 48, which is a sample downsample, upsample
             adjustedAudioFormat = desiredAudioFormat;
             adjustedAudioFormat.setSampleRate(SAMPLE_RATE * 2);
-            
+
             // return the nearest in case it needs 2 channels
             adjustedAudioFormat = audioDevice.nearestFormat(adjustedAudioFormat);
             return true;
         }
-        
+
         return false;
     } else {
         // set the adjustedAudioFormat to the desiredAudioFormat, since it will work
@@ -176,15 +176,15 @@ void linearResampling(int16_t* sourceSamples, int16_t* destinationSamples,
     } else {
         float sourceToDestinationFactor = (sourceAudioFormat.sampleRate() / (float) destinationAudioFormat.sampleRate())
             * (sourceAudioFormat.channelCount() / (float) destinationAudioFormat.channelCount());
-        
+
         // take into account the number of channels in source and destination
         // accomodate for the case where have an output with > 2 channels
         // this is the case with our HDMI capture
-        
+
         if (sourceToDestinationFactor >= 2) {
             // we need to downsample from 48 to 24
             // for now this only supports a mono output - this would be the case for audio input
-            
+
             for (int i = sourceAudioFormat.channelCount(); i < numSourceSamples; i += 2 * sourceAudioFormat.channelCount()) {
                 if (i + (sourceAudioFormat.channelCount()) >= numSourceSamples) {
                     destinationSamples[(i - sourceAudioFormat.channelCount()) / (int) sourceToDestinationFactor] =
@@ -197,7 +197,7 @@ void linearResampling(int16_t* sourceSamples, int16_t* destinationSamples,
                         + (sourceSamples[i + sourceAudioFormat.channelCount()] / 4);
                 }
             }
-            
+
         } else {
             // upsample from 24 to 48
             // for now this only supports a stereo to stereo conversion - this is our case for network audio to output
@@ -205,10 +205,10 @@ void linearResampling(int16_t* sourceSamples, int16_t* destinationSamples,
             int dtsSampleRateFactor = (destinationAudioFormat.sampleRate() / sourceAudioFormat.sampleRate());
             int sampleShift = destinationAudioFormat.channelCount() * dtsSampleRateFactor;
             int destinationToSourceFactor = (1 / sourceToDestinationFactor);
-            
+
             for (int i = 0; i < numDestinationSamples; i += sampleShift) {
                 sourceIndex = (i / destinationToSourceFactor);
-                
+
                 // fill the L/R channels and make the rest silent
                 for (int j = i; j < i + sampleShift; j++) {
                     if (j % destinationAudioFormat.channelCount() == 0) {
@@ -230,7 +230,7 @@ void linearResampling(int16_t* sourceSamples, int16_t* destinationSamples,
 const int CALLBACK_ACCELERATOR_RATIO = 2;
 
 void Audio::start() {
-    
+
     // set up the desired audio format
     _desiredInputFormat.setSampleRate(SAMPLE_RATE);
     _desiredInputFormat.setSampleSize(16);
@@ -238,102 +238,102 @@ void Audio::start() {
     _desiredInputFormat.setSampleType(QAudioFormat::SignedInt);
     _desiredInputFormat.setByteOrder(QAudioFormat::LittleEndian);
     _desiredInputFormat.setChannelCount(1);
-    
+
     _desiredOutputFormat = _desiredInputFormat;
     _desiredOutputFormat.setChannelCount(2);
-    
+
     QAudioDeviceInfo inputDeviceInfo = defaultAudioDeviceForMode(QAudio::AudioInput);
-    
+
     qDebug() << "The audio input device is" << inputDeviceInfo.deviceName() << "\n";
-    
+
     if (adjustedFormatForAudioDevice(inputDeviceInfo, _desiredInputFormat, _inputFormat)) {
         qDebug() << "The format to be used for audio input is" << _inputFormat << "\n";
-        
+
         _audioInput = new QAudioInput(inputDeviceInfo, _inputFormat, this);
         _numInputCallbackBytes = NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL * _inputFormat.channelCount()
             * (_inputFormat.sampleRate() / SAMPLE_RATE)
             / CALLBACK_ACCELERATOR_RATIO;
         _audioInput->setBufferSize(_numInputCallbackBytes);
-        
+
         QAudioDeviceInfo outputDeviceInfo = defaultAudioDeviceForMode(QAudio::AudioOutput);
-        
+
         qDebug() << "The audio output device is" << outputDeviceInfo.deviceName() << "\n";
 
         if (adjustedFormatForAudioDevice(outputDeviceInfo, _desiredOutputFormat, _outputFormat)) {
             qDebug() << "The format to be used for audio output is" << _outputFormat << "\n";
-            
+
             _inputRingBuffer.resizeForFrameSize(_numInputCallbackBytes * CALLBACK_ACCELERATOR_RATIO / sizeof(int16_t));
             _inputDevice = _audioInput->start();
             connect(_inputDevice, SIGNAL(readyRead()), this, SLOT(handleAudioInput()));
-            
+
             // setup our general output device for audio-mixer audio
             _audioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
             _outputDevice = _audioOutput->start();
-            
+
             // setup a loopback audio output device
             _loopbackAudioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
-            
+
             gettimeofday(&_lastReceiveTime, NULL);
         }
-        
+
         return;
     }
-    
+
     qDebug() << "Unable to set up audio I/O because of a problem with input or output formats.\n";
 }
 
 void Audio::handleAudioInput() {
     static char monoAudioDataPacket[MAX_PACKET_SIZE];
-    
+
     static int numBytesPacketHeader = numBytesForPacketHeader((unsigned char*) &PACKET_TYPE_MICROPHONE_AUDIO_NO_ECHO);
     static int leadingBytes = numBytesPacketHeader + sizeof(glm::vec3) + sizeof(glm::quat) +  NUM_BYTES_RFC4122_UUID;
-    
+
     static int16_t* monoAudioSamples = (int16_t*) (monoAudioDataPacket + leadingBytes);
-    
+
     static float inputToNetworkInputRatio = _numInputCallbackBytes * CALLBACK_ACCELERATOR_RATIO
         / NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL;
-    
+
     static int inputSamplesRequired = NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * inputToNetworkInputRatio;
-    
+
     QByteArray inputByteArray = _inputDevice->readAll();
-    
+
     if (Menu::getInstance()->isOptionChecked(MenuOption::EchoLocalAudio)) {
         // if this person wants local loopback add that to the locally injected audio
-        
+
         if (!_loopbackOutputDevice) {
             // we didn't have the loopback output device going so set that up now
             _loopbackOutputDevice = _loopbackAudioOutput->start();
         }
-        
+
         if (_inputFormat == _outputFormat) {
             _loopbackOutputDevice->write(inputByteArray);
         } else {
             static float loopbackOutputToInputRatio = (_outputFormat.sampleRate() / (float) _inputFormat.sampleRate())
                 * (_outputFormat.channelCount() / _inputFormat.channelCount());
-            
+
             QByteArray loopBackByteArray(inputByteArray.size() * loopbackOutputToInputRatio, 0);
-            
+
             linearResampling((int16_t*) inputByteArray.data(), (int16_t*) loopBackByteArray.data(),
                              inputByteArray.size() / sizeof(int16_t),
                              loopBackByteArray.size() / sizeof(int16_t), _inputFormat, _outputFormat);
-            
+
             _loopbackOutputDevice->write(loopBackByteArray);
         }
     }
-    
+
     _inputRingBuffer.writeData(inputByteArray.data(), inputByteArray.size());
-    
+
     while (_inputRingBuffer.samplesAvailable() > inputSamplesRequired) {
-        
+
         int16_t inputAudioSamples[inputSamplesRequired];
         _inputRingBuffer.readSamples(inputAudioSamples, inputSamplesRequired);
-        
+
         // zero out the monoAudioSamples array and the locally injected audio
         memset(monoAudioSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
-        
+
         // zero out the locally injected audio in preparation for audio procedural sounds
         memset(_localInjectedSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
-        
+
         if (!_muted) {
             // we aren't muted, downsample the input audio
             linearResampling((int16_t*) inputAudioSamples,
@@ -341,15 +341,15 @@ void Audio::handleAudioInput() {
                              inputSamplesRequired,
                              NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL,
                              _inputFormat, _desiredInputFormat);
-            
+
             float loudness = 0;
-            
+
             for (int i = 0; i < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; i++) {
                 loudness += fabsf(monoAudioSamples[i]);
             }
-            
+
             _lastInputLoudness = loudness / NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL;
-            
+
             // add input data just written to the scope
             QMetaObject::invokeMethod(_scope, "addSamples", Qt::QueuedConnection,
                                       Q_ARG(QByteArray, QByteArray((char*) monoAudioSamples,
@@ -359,47 +359,47 @@ void Audio::handleAudioInput() {
             // our input loudness is 0, since we're muted
             _lastInputLoudness = 0;
         }
-        
+
         // add procedural effects to the appropriate input samples
         addProceduralSounds(monoAudioSamples,
                             NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
-        
+
         NodeList* nodeList = NodeList::getInstance();
         Node* audioMixer = nodeList->soloNodeOfType(NODE_TYPE_AUDIO_MIXER);
-        
+
         if (audioMixer && nodeList->getNodeActiveSocketOrPing(audioMixer)) {
             MyAvatar* interfaceAvatar = Application::getInstance()->getAvatar();
-            
+
             glm::vec3 headPosition = interfaceAvatar->getHeadJointPosition();
             glm::quat headOrientation = interfaceAvatar->getHead().getOrientation();
-            
+
             // we need the amount of bytes in the buffer + 1 for type
             // + 12 for 3 floats for position + float for bearing + 1 attenuation byte
-            
+
             PACKET_TYPE packetType = Menu::getInstance()->isOptionChecked(MenuOption::EchoServerAudio)
                 ? PACKET_TYPE_MICROPHONE_AUDIO_WITH_ECHO : PACKET_TYPE_MICROPHONE_AUDIO_NO_ECHO;
-            
+
             char* currentPacketPtr = monoAudioDataPacket + populateTypeAndVersion((unsigned char*) monoAudioDataPacket,
                                                                                   packetType);
-            
+
             // pack Source Data
             QByteArray rfcUUID = NodeList::getInstance()->getOwnerUUID().toRfc4122();
             memcpy(currentPacketPtr, rfcUUID.constData(), rfcUUID.size());
             currentPacketPtr += rfcUUID.size();
-            
+
             // memcpy the three float positions
             memcpy(currentPacketPtr, &headPosition, sizeof(headPosition));
             currentPacketPtr += (sizeof(headPosition));
-            
+
             // memcpy our orientation
             memcpy(currentPacketPtr, &headOrientation, sizeof(headOrientation));
             currentPacketPtr += sizeof(headOrientation);
-            
+
             nodeList->getNodeSocket().writeDatagram(monoAudioDataPacket,
                                                     NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL + leadingBytes,
                                                     audioMixer->getActiveSocket()->getAddress(),
                                                     audioMixer->getActiveSocket()->getPort());
-            
+
             Application::getInstance()->getBandwidthMeter()->outputStream(BandwidthMeter::AUDIO)
                 .updateValue(NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL + leadingBytes);
         }
@@ -409,18 +409,18 @@ void Audio::handleAudioInput() {
 void Audio::addReceivedAudioToBuffer(const QByteArray& audioByteArray) {
     const int NUM_INITIAL_PACKETS_DISCARD = 3;
     const int STANDARD_DEVIATION_SAMPLE_COUNT = 500;
-    
+
     timeval currentReceiveTime;
     gettimeofday(&currentReceiveTime, NULL);
     _totalPacketsReceived++;
-    
+
     double timeDiff = diffclock(&_lastReceiveTime, &currentReceiveTime);
-    
+
     //  Discard first few received packets for computing jitter (often they pile up on start)
     if (_totalPacketsReceived > NUM_INITIAL_PACKETS_DISCARD) {
         _stdev.addValue(timeDiff);
     }
-    
+
     if (_stdev.getSamples() > STANDARD_DEVIATION_SAMPLE_COUNT) {
         _measuredJitter = _stdev.getStDev();
         _stdev.reset();
@@ -432,17 +432,17 @@ void Audio::addReceivedAudioToBuffer(const QByteArray& audioByteArray) {
             setJitterBufferSamples(glm::clamp((int)newJitterBufferSamples, 0, MAX_JITTER_BUFFER_SAMPLES));
         }
     }
-    
+
     _ringBuffer.parseData((unsigned char*) audioByteArray.data(), audioByteArray.size());
-    
+
     static float networkOutputToOutputRatio = (_desiredOutputFormat.sampleRate() / (float) _outputFormat.sampleRate())
         * (_desiredOutputFormat.channelCount() / (float) _outputFormat.channelCount());
-    
+
     static int numRequiredOutputSamples = NETWORK_BUFFER_LENGTH_SAMPLES_STEREO / networkOutputToOutputRatio;
-    
+
     QByteArray outputBuffer;
     outputBuffer.resize(numRequiredOutputSamples * sizeof(int16_t));
-    
+
     // if there is anything in the ring buffer, decide what to do
     if (_ringBuffer.samplesAvailable() > 0) {
         if (!_ringBuffer.isNotStarvedOrHasMinimumSamples(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO
@@ -452,61 +452,61 @@ void Audio::addReceivedAudioToBuffer(const QByteArray& audioByteArray) {
         } else {
             //  We are either already playing back, or we have enough audio to start playing back.
             _ringBuffer.setIsStarved(false);
-            
+
             // copy the samples we'll resample from the ring buffer - this also
             // pushes the read pointer of the ring buffer forwards
             int16_t ringBufferSamples[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO];
             _ringBuffer.readSamples(ringBufferSamples, NETWORK_BUFFER_LENGTH_SAMPLES_STEREO);
-            
+
             // add the next NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL from each QByteArray
             // in our _localInjectionByteArrays QVector to the _localInjectedSamples
-            
+
             // add to the output samples whatever is in the _localAudioOutput byte array
             // that lets this user hear sound effects and loopback (if enabled)
-            
-            for (int b = 0; b < _localInjectionByteArrays.size(); b++) {
+
+            for (unsigned int b = 0; b < _localInjectionByteArrays.size(); b++) {
                 QByteArray audioByteArray = _localInjectionByteArrays.at(b);
-                
+
                 int16_t* byteArraySamples = (int16_t*) audioByteArray.data();
-                
+
                 int samplesToRead = MIN(audioByteArray.size() / sizeof(int16_t),
                                         NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
-                
+
                 for (int i = 0; i < samplesToRead; i++) {
                     _localInjectedSamples[i] = glm::clamp(_localInjectedSamples[i] + byteArraySamples[i],
                                                           MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
                 }
-                
+
                 if (samplesToRead < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL) {
                     // there isn't anything left to inject from this byte array, remove it from the vector
                     _localInjectionByteArrays.remove(b);
                 } else {
                     // pull out the bytes we just read for outputs
                     audioByteArray.remove(0, samplesToRead * sizeof(int16_t));
-                    
+
                     // still data left to read - replace the byte array in the QVector with the smaller one
                     _localInjectionByteArrays.replace(b, audioByteArray);
                 }
             }
-            
+
             for (int i = 0; i < NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL; i++) {
                 ringBufferSamples[i * 2] = glm::clamp(ringBufferSamples[i * 2] + _localInjectedSamples[i],
                                                       MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
                 ringBufferSamples[(i * 2) + 1] = glm::clamp(ringBufferSamples[(i * 2) + 1] + _localInjectedSamples[i],
                                                             MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
             }
-            
+
             // copy the packet from the RB to the output
             linearResampling(ringBufferSamples,
                              (int16_t*) outputBuffer.data(),
                              NETWORK_BUFFER_LENGTH_SAMPLES_STEREO,
                              numRequiredOutputSamples,
                              _desiredOutputFormat, _outputFormat);
-            
+
             if (_outputDevice) {
-                
+
                 _outputDevice->write(outputBuffer);
-                
+
                 // add output (@speakers) data just written to the scope
                 QMetaObject::invokeMethod(_scope, "addSamples", Qt::QueuedConnection,
                                           Q_ARG(QByteArray, QByteArray((char*) ringBufferSamples,
@@ -514,7 +514,7 @@ void Audio::addReceivedAudioToBuffer(const QByteArray& audioByteArray) {
                                           Q_ARG(bool, true), Q_ARG(bool, false));
             }
         }
-        
+
     } else if (_audioOutput->bytesFree() == _audioOutput->bufferSize()) {
         // we don't have any audio data left in the output buffer, and the ring buffer from
         // the network has nothing in it either - we just starved
@@ -522,9 +522,9 @@ void Audio::addReceivedAudioToBuffer(const QByteArray& audioByteArray) {
         _ringBuffer.setIsStarved(true);
         _numFramesDisplayStarve = 10;
     }
-    
+
     Application::getInstance()->getBandwidthMeter()->inputStream(BandwidthMeter::AUDIO).updateValue(audioByteArray.size());
-    
+
     _lastReceiveTime = currentReceiveTime;
 }
 
@@ -541,59 +541,59 @@ void Audio::render(int screenWidth, int screenHeight) {
         glLineWidth(2.0);
         glBegin(GL_LINES);
         glColor3f(1,1,1);
-        
+
         int startX = 20.0;
         int currentX = startX;
         int topY = screenHeight - 40;
         int bottomY = screenHeight - 20;
         float frameWidth = 20.0;
         float halfY = topY + ((bottomY - topY) / 2.0);
-        
+
         // draw the lines for the base of the ring buffer
-        
+
         glVertex2f(currentX, topY);
         glVertex2f(currentX, bottomY);
-        
+
         for (int i = 0; i < RING_BUFFER_LENGTH_FRAMES; i++) {
             glVertex2f(currentX, halfY);
             glVertex2f(currentX + frameWidth, halfY);
             currentX += frameWidth;
-            
+
             glVertex2f(currentX, topY);
             glVertex2f(currentX, bottomY);
         }
         glEnd();
-        
+
         // show a bar with the amount of audio remaining in ring buffer and output device
         // beyond the current playback
-        
+
         int bytesLeftInAudioOutput = _audioOutput->bufferSize() - _audioOutput->bytesFree();
         float secondsLeftForAudioOutput = (bytesLeftInAudioOutput / sizeof(int16_t))
             / ((float) _outputFormat.sampleRate() * _outputFormat.channelCount());
         float secondsLeftForRingBuffer = _ringBuffer.samplesAvailable()
             / ((float) _desiredOutputFormat.sampleRate() * _desiredOutputFormat.channelCount());
         float msLeftForAudioOutput = (secondsLeftForAudioOutput + secondsLeftForRingBuffer) * 1000;
-        
+
         if (_numFramesDisplayStarve == 0) {
             glColor3f(0, 1, 0);
         } else {
             glColor3f(0.5 + (_numFramesDisplayStarve / 20.0f), 0, 0);
             _numFramesDisplayStarve--;
         }
-        
+
         if (_averagedLatency == 0.0) {
             _averagedLatency = msLeftForAudioOutput;
         } else {
             _averagedLatency = 0.99f * _averagedLatency + 0.01f * (msLeftForAudioOutput);
         }
-        
+
         glBegin(GL_QUADS);
         glVertex2f(startX, topY + 2);
         glVertex2f(startX + _averagedLatency / AUDIO_CALLBACK_MSECS * frameWidth, topY + 2);
         glVertex2f(startX + _averagedLatency / AUDIO_CALLBACK_MSECS * frameWidth, bottomY - 2);
         glVertex2f(startX, bottomY - 2);
         glEnd();
-        
+
         //  Show a yellow bar with the averaged msecs latency you are hearing (from time of packet receipt)
         glColor3f(1,1,0);
         glBegin(GL_QUADS);
@@ -602,13 +602,13 @@ void Audio::render(int screenWidth, int screenHeight) {
         glVertex2f(startX + _averagedLatency / AUDIO_CALLBACK_MSECS * frameWidth + 2, bottomY + 2);
         glVertex2f(startX + _averagedLatency / AUDIO_CALLBACK_MSECS * frameWidth - 2, bottomY + 2);
         glEnd();
-        
+
         char out[40];
         sprintf(out, "%3.0f\n", _averagedLatency);
         drawtext(startX + _averagedLatency / AUDIO_CALLBACK_MSECS * frameWidth - 10, topY - 9, 0.10, 0, 1, 0, out, 1,1,0);
-        
+
         //  Show a red bar with the 'start' point of one frame plus the jitter buffer
-        
+
         glColor3f(1, 0, 0);
         int jitterBufferPels = (1.f + (float)getJitterBufferSamples()
                                 / (float) NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL) * frameWidth;
@@ -620,14 +620,14 @@ void Audio::render(int screenWidth, int screenHeight) {
         } else {
             drawtext(startX, bottomY + 12, 0.10, 0, 1, 0, out, 1, 0, 0);
         }
-        
+
         glBegin(GL_QUADS);
         glVertex2f(startX + jitterBufferPels - 2, topY - 2);
         glVertex2f(startX + jitterBufferPels + 2, topY - 2);
         glVertex2f(startX + jitterBufferPels + 2, bottomY + 2);
         glVertex2f(startX + jitterBufferPels - 2, bottomY + 2);
         glEnd();
-        
+
     }
     renderToolIcon(screenHeight);
 }
@@ -638,12 +638,12 @@ void Audio::addProceduralSounds(int16_t* monoInput, int numSamples) {
     const float MIN_AUDIBLE_VELOCITY = 0.1;
     const int VOLUME_BASELINE = 400;
     const float SOUND_PITCH = 8.f;
-    
+
     float speed = glm::length(_lastVelocity);
     float volume = VOLUME_BASELINE * (1.f - speed / MAX_AUDIBLE_VELOCITY);
-    
+
     float sample;
-    
+
     // Travelling noise
     //  Add a noise-modulated sinewave with volume that tapers off with speed increasing
     if ((speed > MIN_AUDIBLE_VELOCITY) && (speed < MAX_AUDIBLE_VELOCITY)) {
@@ -661,23 +661,23 @@ void Audio::addProceduralSounds(int16_t* monoInput, int numSamples) {
     if (_collisionSoundMagnitude > COLLISION_SOUND_CUTOFF_LEVEL) {
         for (int i = 0; i < numSamples; i++) {
             t = (float) _proceduralEffectSample + (float) i;
-            
+
             sample = sinf(t * _collisionSoundFrequency)
                 + sinf(t * _collisionSoundFrequency / DOWN_TWO_OCTAVES)
                 + sinf(t * _collisionSoundFrequency / DOWN_FOUR_OCTAVES * UP_MAJOR_FIFTH);
             sample *= _collisionSoundMagnitude * COLLISION_SOUND_MAX_VOLUME;
-            
+
             int16_t collisionSample = (int16_t) sample;
-            
+
             monoInput[i] = glm::clamp(monoInput[i] + collisionSample, MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
             _localInjectedSamples[i] = glm::clamp(_localInjectedSamples[i] + collisionSample,
                                                   MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
-            
+
             _collisionSoundMagnitude *= _collisionSoundDuration;
         }
     }
     _proceduralEffectSample += numSamples;
-    
+
     //  Add a drum sound
     const float MAX_VOLUME = 32000.f;
     const float MAX_DURATION = 2.f;
@@ -690,13 +690,13 @@ void Audio::addProceduralSounds(int16_t* monoInput, int numSamples) {
             sample = sinf(t * frequency);
             sample += ((randFloat() - 0.5f) * NOISE_MAGNITUDE);
             sample *= _drumSoundVolume * MAX_VOLUME;
-            
+
             int16_t collisionSample = (int16_t) sample;
-            
+
             monoInput[i] = glm::clamp(monoInput[i] + collisionSample, MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
             _localInjectedSamples[i] = glm::clamp(_localInjectedSamples[i] + collisionSample,
                                                   MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
-            
+
             _drumSoundVolume *= (1.f - _drumSoundDecay);
         }
         _drumSoundSample += numSamples;
@@ -730,46 +730,46 @@ void Audio::handleAudioByteArray(const QByteArray& audioByteArray) {
 }
 
 void Audio::renderToolIcon(int screenHeight) {
-    
+
     _iconBounds = QRect(ICON_LEFT, screenHeight - BOTTOM_PADDING, ICON_SIZE, ICON_SIZE);
     glEnable(GL_TEXTURE_2D);
-    
+
     glBindTexture(GL_TEXTURE_2D, _micTextureId);
     glColor3f(1, 1, 1);
     glBegin(GL_QUADS);
-    
+
     glTexCoord2f(1, 1);
     glVertex2f(_iconBounds.left(), _iconBounds.top());
-    
+
     glTexCoord2f(0, 1);
     glVertex2f(_iconBounds.right(), _iconBounds.top());
-    
+
     glTexCoord2f(0, 0);
     glVertex2f(_iconBounds.right(), _iconBounds.bottom());
-    
+
     glTexCoord2f(1, 0);
     glVertex2f(_iconBounds.left(), _iconBounds.bottom());
-    
+
     glEnd();
-    
+
     if (_muted) {
         glBindTexture(GL_TEXTURE_2D, _muteTextureId);
         glBegin(GL_QUADS);
-        
+
         glTexCoord2f(1, 1);
         glVertex2f(_iconBounds.left(), _iconBounds.top());
-        
+
         glTexCoord2f(0, 1);
         glVertex2f(_iconBounds.right(), _iconBounds.top());
-        
+
         glTexCoord2f(0, 0);
         glVertex2f(_iconBounds.right(), _iconBounds.bottom());
-        
+
         glTexCoord2f(1, 0);
         glVertex2f(_iconBounds.left(), _iconBounds.bottom());
-        
+
         glEnd();
     }
-    
+
     glDisable(GL_TEXTURE_2D);
 }
\ No newline at end of file
diff --git a/interface/src/DataServerClient.cpp b/interface/src/DataServerClient.cpp
index 33fa59e3e4..525b63a5a5 100644
--- a/interface/src/DataServerClient.cpp
+++ b/interface/src/DataServerClient.cpp
@@ -34,33 +34,33 @@ const HifiSockAddr& DataServerClient::dataServerSockAddr() {
 void DataServerClient::putValueForKey(const QString& key, const char* value) {
     QString clientString = Application::getInstance()->getProfile()->getUserString();
     if (!clientString.isEmpty()) {
-        
+
         unsigned char* putPacket = new unsigned char[MAX_PACKET_SIZE];
-        
+
         // setup the header for this packet
         int numPacketBytes = populateTypeAndVersion(putPacket, PACKET_TYPE_DATA_SERVER_PUT);
-        
+
         // pack the client UUID, null terminated
         memcpy(putPacket + numPacketBytes, clientString.toLocal8Bit().constData(), clientString.toLocal8Bit().size());
         numPacketBytes += clientString.toLocal8Bit().size();
         putPacket[numPacketBytes++] = '\0';
-        
+
         // pack a 1 to designate that we are putting a single value
         putPacket[numPacketBytes++] = 1;
-        
+
         // pack the key, null terminated
         strcpy((char*) putPacket + numPacketBytes, key.toLocal8Bit().constData());
         numPacketBytes += key.size();
         putPacket[numPacketBytes++] = '\0';
-        
+
         // pack the value, null terminated
         strcpy((char*) putPacket + numPacketBytes, value);
         numPacketBytes += strlen(value);
         putPacket[numPacketBytes++] = '\0';
-        
+
         // add the putPacket to our vector of unconfirmed packets, will be deleted once put is confirmed
         // _unmatchedPackets.insert(std::pair<unsigned char*, int>(putPacket, numPacketBytes));
-        
+
         // send this put request to the data server
         NodeList::getInstance()->getNodeSocket().writeDatagram((char*) putPacket, numPacketBytes,
                                                                dataServerSockAddr().getAddress(),
@@ -81,27 +81,27 @@ void DataServerClient::getValuesForKeysAndUUID(const QStringList& keys, const QU
 void DataServerClient::getValuesForKeysAndUserString(const QStringList& keys, const QString& userString) {
     if (!userString.isEmpty() && keys.size() <= UCHAR_MAX) {
         unsigned char* getPacket = new unsigned char[MAX_PACKET_SIZE];
-        
+
         // setup the header for this packet
         int numPacketBytes = populateTypeAndVersion(getPacket, PACKET_TYPE_DATA_SERVER_GET);
-        
+
         // pack the user string (could be username or UUID string), null-terminate
         memcpy(getPacket + numPacketBytes, userString.toLocal8Bit().constData(), userString.toLocal8Bit().size());
         numPacketBytes += userString.toLocal8Bit().size();
         getPacket[numPacketBytes++] = '\0';
-        
+
         // pack one byte to designate the number of keys
         getPacket[numPacketBytes++] = keys.size();
-        
+
         QString keyString = keys.join(MULTI_KEY_VALUE_SEPARATOR);
-        
+
         // pack the key string, null terminated
         strcpy((char*) getPacket + numPacketBytes, keyString.toLocal8Bit().constData());
         numPacketBytes += keyString.size() + sizeof('\0');
-        
+
         // add the getPacket to our vector of uncofirmed packets, will be deleted once we get a response from the nameserver
         // _unmatchedPackets.insert(std::pair<unsigned char*, int>(getPacket, numPacketBytes));
-        
+
         // send the get to the data server
         NodeList::getInstance()->getNodeSocket().writeDatagram((char*) getPacket, numPacketBytes,
                                                                dataServerSockAddr().getAddress(),
@@ -120,25 +120,25 @@ void DataServerClient::processConfirmFromDataServer(unsigned char* packetData, i
 void DataServerClient::processSendFromDataServer(unsigned char* packetData, int numPacketBytes) {
     // pull the user string from the packet so we know who to associate this with
     int numHeaderBytes = numBytesForPacketHeader(packetData);
-    
+
     char* userStringPosition = (char*) packetData + numHeaderBytes;
-    
+
     QString userString(QByteArray(userStringPosition, strlen(userStringPosition)));
-    
+
     QUuid userUUID(userString);
-    
+
     char* keysPosition = (char*) packetData + numHeaderBytes + strlen(userStringPosition)
         + sizeof('\0') + sizeof(unsigned char);
     char* valuesPosition =  keysPosition + strlen(keysPosition) + sizeof('\0');
-    
+
     QStringList keyList = QString(keysPosition).split(MULTI_KEY_VALUE_SEPARATOR);
     QStringList valueList = QString(valuesPosition).split(MULTI_KEY_VALUE_SEPARATOR);
-    
+
     // user string was UUID, find matching avatar and associate data
-    for (int i = 0; i < keyList.size(); i++) {
+    for (size_t i = 0; i < keyList.size(); i++) {
         if (valueList[i] != " ") {
             if (keyList[i] == DataServerKey::FaceMeshURL) {
-                
+
                 if (userUUID.isNull() || userUUID == Application::getInstance()->getProfile()->getUUID()) {
                     qDebug("Changing user's face model URL to %s\n", valueList[i].toLocal8Bit().constData());
                     Application::getInstance()->getProfile()->setFaceModelURL(QUrl(valueList[i]));
@@ -148,7 +148,7 @@ void DataServerClient::processSendFromDataServer(unsigned char* packetData, int
                     for (NodeList::iterator node = nodeList->begin(); node != nodeList->end(); node++) {
                         if (node->getLinkedData() != NULL && node->getType() == NODE_TYPE_AGENT) {
                             Avatar* avatar = (Avatar *) node->getLinkedData();
-                            
+
                             if (avatar->getUUID() == userUUID) {
                                 QMetaObject::invokeMethod(&avatar->getHead().getFaceModel(),
                                     "setURL", Q_ARG(QUrl, QUrl(valueList[i])));
@@ -157,7 +157,7 @@ void DataServerClient::processSendFromDataServer(unsigned char* packetData, int
                     }
                 }
             } else if (keyList[i] == DataServerKey::SkeletonURL) {
-                
+
                 if (userUUID.isNull() || userUUID == Application::getInstance()->getProfile()->getUUID()) {
                     qDebug("Changing user's skeleton URL to %s\n", valueList[i].toLocal8Bit().constData());
                     Application::getInstance()->getProfile()->setSkeletonModelURL(QUrl(valueList[i]));
@@ -167,7 +167,7 @@ void DataServerClient::processSendFromDataServer(unsigned char* packetData, int
                     for (NodeList::iterator node = nodeList->begin(); node != nodeList->end(); node++) {
                         if (node->getLinkedData() != NULL && node->getType() == NODE_TYPE_AGENT) {
                             Avatar* avatar = (Avatar *) node->getLinkedData();
-                            
+
                             if (avatar->getUUID() == userUUID) {
                                 QMetaObject::invokeMethod(&avatar->getSkeletonModel(), "setURL",
                                     Q_ARG(QUrl, QUrl(valueList[i])));
@@ -177,33 +177,33 @@ void DataServerClient::processSendFromDataServer(unsigned char* packetData, int
                 }
             } else if (keyList[i] == DataServerKey::Domain && keyList[i + 1] == DataServerKey::Position
                        && valueList[i] != " " && valueList[i + 1] != " ") {
-                
+
                 QStringList coordinateItems = valueList[i + 1].split(',');
-                
+
                 if (coordinateItems.size() == 3) {
-                    
+
                     // send a node kill request, indicating to other clients that they should play the "disappeared" effect
                     NodeList::getInstance()->sendKillNode(&NODE_TYPE_AVATAR_MIXER, 1);
-                    
+
                     qDebug() << "Changing domain to" << valueList[i].toLocal8Bit().constData() <<
                         "and position to" << valueList[i + 1].toLocal8Bit().constData() <<
                         "to go to" << userString << "\n";
-                    
+
                     NodeList::getInstance()->setDomainHostname(valueList[i]);
-                    
+
                     glm::vec3 newPosition(coordinateItems[0].toFloat(),
                                           coordinateItems[1].toFloat(),
                                           coordinateItems[2].toFloat());
                     Application::getInstance()->getAvatar()->setPosition(newPosition);
                 }
-                
+
             } else if (keyList[i] == DataServerKey::UUID) {
                 // this is the user's UUID - set it on the profile
                 Application::getInstance()->getProfile()->setUUID(valueList[i]);
             }
         }
     }
-    
+
     // remove the matched packet from  our map so it isn't re-sent to the data-server
     // removeMatchedPacketFromMap(packetData, numPacketBytes);
 }
@@ -228,12 +228,12 @@ void DataServerClient::removeMatchedPacketFromMap(unsigned char* packetData, int
         if (memcmp(mapIterator->first + sizeof(PACKET_TYPE),
                    packetData + sizeof(PACKET_TYPE),
                    numPacketBytes - sizeof(PACKET_TYPE)) == 0) {
-            
+
             // this is a match - remove the confirmed packet from the vector and delete associated member
             // so it isn't sent back out
             delete[] mapIterator->first;
             _unmatchedPackets.erase(mapIterator);
-            
+
             // we've matched the packet - bail out
             break;
         }
diff --git a/interface/src/avatar/MyAvatar.cpp b/interface/src/avatar/MyAvatar.cpp
index 566e957c3b..3b6bfd83c5 100644
--- a/interface/src/avatar/MyAvatar.cpp
+++ b/interface/src/avatar/MyAvatar.cpp
@@ -919,7 +919,7 @@ void MyAvatar::updateChatCircle(float deltaTime) {
 
     // compute the accumulated centers
     glm::vec3 center = _position;
-    for (int i = 0; i < sortedAvatars.size(); i++) {
+    for (size_t i = 0; i < sortedAvatars.size(); i++) {
         SortedAvatar& sortedAvatar = sortedAvatars[i];
         sortedAvatar.accumulatedCenter = (center += sortedAvatar.avatar->getPosition()) / (i + 2.0f);
     }
diff --git a/interface/src/avatar/SkeletonModel.cpp b/interface/src/avatar/SkeletonModel.cpp
index 6723b766e8..04fe879aa2 100644
--- a/interface/src/avatar/SkeletonModel.cpp
+++ b/interface/src/avatar/SkeletonModel.cpp
@@ -21,21 +21,21 @@ void SkeletonModel::simulate(float deltaTime) {
     if (!isActive()) {
         return;
     }
-    
+
     setTranslation(_owningAvatar->getPosition());
     setRotation(_owningAvatar->getOrientation() * glm::angleAxis(180.0f, 0.0f, 1.0f, 0.0f));
     const float MODEL_SCALE = 0.0006f;
     setScale(glm::vec3(1.0f, 1.0f, 1.0f) * _owningAvatar->getScale() * MODEL_SCALE);
-    
+
     Model::simulate(deltaTime);
 
     // find the left and rightmost active Leap palms
-    int leftPalmIndex, rightPalmIndex;   
-    HandData& hand = _owningAvatar->getHand(); 
+    int leftPalmIndex, rightPalmIndex;
+    HandData& hand = _owningAvatar->getHand();
     hand.getLeftRightPalmIndices(leftPalmIndex, rightPalmIndex);
-    
+
     const float HAND_RESTORATION_RATE = 0.25f;
-    
+
     const FBXGeometry& geometry = _geometry->getFBXGeometry();
     if (leftPalmIndex == -1) {
         // no Leap data; set hands from mouse
@@ -45,13 +45,13 @@ void SkeletonModel::simulate(float deltaTime) {
             applyHandPosition(geometry.rightHandJointIndex, _owningAvatar->getHandPosition());
         }
         restoreLeftHandPosition(HAND_RESTORATION_RATE);
-    
+
     } else if (leftPalmIndex == rightPalmIndex) {
         // right hand only
-        applyPalmData(geometry.rightHandJointIndex, geometry.rightFingerJointIndices, geometry.rightFingertipJointIndices, 
+        applyPalmData(geometry.rightHandJointIndex, geometry.rightFingerJointIndices, geometry.rightFingertipJointIndices,
             hand.getPalms()[leftPalmIndex]);
         restoreLeftHandPosition(HAND_RESTORATION_RATE);
-        
+
     } else {
         applyPalmData(geometry.leftHandJointIndex, geometry.leftFingerJointIndices, geometry.leftFingertipJointIndices,
             hand.getPalms()[leftPalmIndex]);
@@ -65,39 +65,39 @@ bool SkeletonModel::render(float alpha) {
     if (_jointStates.isEmpty()) {
         return false;
     }
-    
+
     // only render the balls and sticks if the skeleton has no meshes
     if (_meshStates.isEmpty()) {
         const FBXGeometry& geometry = _geometry->getFBXGeometry();
-        
+
         glm::vec3 skinColor, darkSkinColor;
         _owningAvatar->getSkinColors(skinColor, darkSkinColor);
-        
-        for (int i = 0; i < _jointStates.size(); i++) {
+
+        for (size_t i = 0; i < _jointStates.size(); i++) {
             glPushMatrix();
-            
+
             glm::vec3 position;
             getJointPosition(i, position);
             Application::getInstance()->loadTranslatedViewMatrix(position);
-            
+
             glm::quat rotation;
             getJointRotation(i, rotation);
             glm::vec3 axis = glm::axis(rotation);
             glRotatef(glm::angle(rotation), axis.x, axis.y, axis.z);
-            
+
             glColor4f(skinColor.r, skinColor.g, skinColor.b, alpha);
             const float BALL_RADIUS = 0.005f;
             const int BALL_SUBDIVISIONS = 10;
             glutSolidSphere(BALL_RADIUS * _owningAvatar->getScale(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS);
-            
+
             glPopMatrix();
-            
+
             int parentIndex = geometry.joints[i].parentIndex;
             if (parentIndex == -1) {
                 continue;
             }
             glColor4f(darkSkinColor.r, darkSkinColor.g, darkSkinColor.b, alpha);
-            
+
             glm::vec3 parentPosition;
             getJointPosition(parentIndex, parentPosition);
             const float STICK_RADIUS = BALL_RADIUS * 0.1f;
@@ -105,13 +105,13 @@ bool SkeletonModel::render(float alpha) {
                                               STICK_RADIUS * _owningAvatar->getScale());
         }
     }
-    
+
     Model::render(alpha);
-    
+
     if (Menu::getInstance()->isOptionChecked(MenuOption::CollisionProxies)) {
         renderCollisionProxies(alpha);
     }
-    
+
     return true;
 }
 
@@ -130,7 +130,7 @@ void SkeletonModel::applyHandPosition(int jointIndex, const glm::vec3& position)
         return;
     }
     setJointPosition(jointIndex, position);
-    
+
     const FBXGeometry& geometry = _geometry->getFBXGeometry();
     glm::vec3 handPosition, elbowPosition;
     getJointPosition(jointIndex, handPosition);
@@ -142,7 +142,7 @@ void SkeletonModel::applyHandPosition(int jointIndex, const glm::vec3& position)
     }
     glm::quat handRotation;
     getJointRotation(jointIndex, handRotation, true);
-    
+
     // align hand with forearm
     float sign = (jointIndex == geometry.rightHandJointIndex) ? 1.0f : -1.0f;
     applyRotationDelta(jointIndex, rotationBetween(handRotation * glm::vec3(-sign, 0.0f, 0.0f), forearmVector), false);
@@ -160,7 +160,7 @@ void SkeletonModel::applyPalmData(int jointIndex, const QVector<int>& fingerJoin
     getJointRotation(jointIndex, palmRotation, true);
     applyRotationDelta(jointIndex, rotationBetween(palmRotation * geometry.palmDirection, palm.getNormal()), false);
     getJointRotation(jointIndex, palmRotation, true);
-    
+
     // sort the finger indices by raw x, get the average direction
     QVector<IndexValue> fingerIndices;
     glm::vec3 direction;
@@ -175,7 +175,7 @@ void SkeletonModel::applyPalmData(int jointIndex, const QVector<int>& fingerJoin
         fingerIndices.append(indexValue);
     }
     qSort(fingerIndices.begin(), fingerIndices.end());
-    
+
     // rotate palm according to average finger direction
     float directionLength = glm::length(direction);
     const int MIN_ROTATION_FINGERS = 3;
@@ -183,31 +183,31 @@ void SkeletonModel::applyPalmData(int jointIndex, const QVector<int>& fingerJoin
         applyRotationDelta(jointIndex, rotationBetween(palmRotation * glm::vec3(-sign, 0.0f, 0.0f), direction), false);
         getJointRotation(jointIndex, palmRotation, true);
     }
-    
+
     // no point in continuing if there are no fingers
     if (palm.getNumFingers() == 0 || fingerJointIndices.isEmpty()) {
         return;
     }
-     
+
     // match them up as best we can
     float proportion = fingerIndices.size() / (float)fingerJointIndices.size();
-    for (int i = 0; i < fingerJointIndices.size(); i++) {
+    for (size_t i = 0; i < fingerJointIndices.size(); i++) {
         int fingerIndex = fingerIndices.at(roundf(i * proportion)).index;
         glm::vec3 fingerVector = palm.getFingers()[fingerIndex].getTipPosition() -
             palm.getFingers()[fingerIndex].getRootPosition();
-        
+
         int fingerJointIndex = fingerJointIndices.at(i);
         int fingertipJointIndex = fingertipJointIndices.at(i);
         glm::vec3 jointVector = extractTranslation(geometry.joints.at(fingertipJointIndex).bindTransform) -
             extractTranslation(geometry.joints.at(fingerJointIndex).bindTransform);
-        
+
         setJointRotation(fingerJointIndex, rotationBetween(palmRotation * jointVector, fingerVector) * palmRotation, true);
     }
 }
 
 void SkeletonModel::updateJointState(int index) {
     Model::updateJointState(index);
-    
+
     if (index == _geometry->getFBXGeometry().rootJointIndex) {
         JointState& state = _jointStates[index];
         state.transform[3][0] = 0.0f;
diff --git a/interface/src/devices/Faceshift.cpp b/interface/src/devices/Faceshift.cpp
index 347bb82bc4..075898982d 100644
--- a/interface/src/devices/Faceshift.cpp
+++ b/interface/src/devices/Faceshift.cpp
@@ -32,7 +32,7 @@ Faceshift::Faceshift() :
     _rightBlinkIndex(1),
     _leftEyeOpenIndex(8),
     _rightEyeOpenIndex(9),
-    _browDownLeftIndex(14), 
+    _browDownLeftIndex(14),
     _browDownRightIndex(15),
     _browUpCenterIndex(16),
     _browUpLeftIndex(17),
@@ -49,9 +49,9 @@ Faceshift::Faceshift() :
     connect(&_tcpSocket, SIGNAL(connected()), SLOT(noteConnected()));
     connect(&_tcpSocket, SIGNAL(error(QAbstractSocket::SocketError)), SLOT(noteError(QAbstractSocket::SocketError)));
     connect(&_tcpSocket, SIGNAL(readyRead()), SLOT(readFromSocket()));
-    
+
     connect(&_udpSocket, SIGNAL(readyRead()), SLOT(readPendingDatagrams()));
-    
+
     _udpSocket.bind(FACESHIFT_PORT);
 }
 
@@ -67,14 +67,14 @@ void Faceshift::update() {
     // get the euler angles relative to the window
     glm::vec3 eulers = safeEulerAngles(_headRotation * glm::quat(glm::radians(glm::vec3(
         (_eyeGazeLeftPitch + _eyeGazeRightPitch) / 2.0f, (_eyeGazeLeftYaw + _eyeGazeRightYaw) / 2.0f, 0.0f))));
-    
+
     // compute and subtract the long term average
     const float LONG_TERM_AVERAGE_SMOOTHING = 0.999f;
     if (!_longTermAverageInitialized) {
         _longTermAverageEyePitch = eulers.x;
         _longTermAverageEyeYaw = eulers.y;
         _longTermAverageInitialized = true;
-        
+
     } else {
         _longTermAverageEyePitch = glm::mix(eulers.x, _longTermAverageEyePitch, LONG_TERM_AVERAGE_SMOOTHING);
         _longTermAverageEyeYaw = glm::mix(eulers.y, _longTermAverageEyeYaw, LONG_TERM_AVERAGE_SMOOTHING);
@@ -107,7 +107,7 @@ void Faceshift::updateFakeCoefficients(float leftBlink, float rightBlink, float
 void Faceshift::setTCPEnabled(bool enabled) {
     if ((_tcpEnabled = enabled)) {
         connectSocket();
-    
+
     } else {
         _tcpSocket.disconnectFromHost();
     }
@@ -118,7 +118,7 @@ void Faceshift::connectSocket() {
         if (!_tcpRetryCount) {
             qDebug("Faceshift: Connecting...\n");
         }
-    
+
         _tcpSocket.connectToHost("localhost", FACESHIFT_PORT);
         _tracking = false;
     }
@@ -126,7 +126,7 @@ void Faceshift::connectSocket() {
 
 void Faceshift::noteConnected() {
     qDebug("Faceshift: Connected.\n");
-    
+
     // request the list of blendshape names
     string message;
     fsBinaryStream::encode_message(message, fsMsgSendBlendshapeNames());
@@ -176,7 +176,7 @@ void Faceshift::receive(const QByteArray& buffer) {
                 if ((_tracking = data.m_trackingSuccessful)) {
                     glm::quat newRotation = glm::quat(data.m_headRotation.w, -data.m_headRotation.x,
                                                       data.m_headRotation.y, -data.m_headRotation.z);
-                    // Compute angular velocity of the head 
+                    // Compute angular velocity of the head
                     glm::quat r = newRotation * glm::inverse(_headRotation);
                     float theta = 2 * acos(r.w);
                     if (theta > EPSILON) {
@@ -187,7 +187,7 @@ void Faceshift::receive(const QByteArray& buffer) {
                         _headAngularVelocity = glm::vec3(0,0,0);
                     }
                     _headRotation = newRotation;
-                    
+
                     const float TRANSLATION_SCALE = 0.02f;
                     _headTranslation = glm::vec3(data.m_headTranslation.x, data.m_headTranslation.y,
                         -data.m_headTranslation.z) * TRANSLATION_SCALE;
@@ -196,17 +196,17 @@ void Faceshift::receive(const QByteArray& buffer) {
                     _eyeGazeRightPitch = -data.m_eyeGazeRightPitch;
                     _eyeGazeRightYaw = data.m_eyeGazeRightYaw;
                     _blendshapeCoefficients = data.m_coeffs;
-                    
+
                     _lastTrackingStateReceived = usecTimestampNow();
                 }
                 break;
             }
             case fsMsg::MSG_OUT_BLENDSHAPE_NAMES: {
                 const vector<string>& names = static_cast<fsMsgBlendshapeNames*>(msg.get())->blendshape_names();
-                for (int i = 0; i < names.size(); i++) {
+                for (size_t i = 0; i < names.size(); i++) {
                     if (names[i] == "EyeBlink_L") {
                         _leftBlinkIndex = i;
-                    
+
                     } else if (names[i] == "EyeBlink_R") {
                         _rightBlinkIndex = i;
 
@@ -233,10 +233,10 @@ void Faceshift::receive(const QByteArray& buffer) {
 
                     } else if (names[i] == "JawOpen") {
                         _jawOpenIndex = i;
-                        
+
                     } else if (names[i] == "MouthSmile_L") {
                         _mouthSmileLeftIndex = i;
-                        
+
                     } else if (names[i] == "MouthSmile_R") {
                         _mouthSmileRightIndex = i;
                     }
diff --git a/interface/src/renderer/FBXReader.cpp b/interface/src/renderer/FBXReader.cpp
index 1058bd02d9..f49b2b4d19 100644
--- a/interface/src/renderer/FBXReader.cpp
+++ b/interface/src/renderer/FBXReader.cpp
@@ -32,11 +32,11 @@ template<class T> QVariant readBinaryArray(QDataStream& in) {
     quint32 arrayLength;
     quint32 encoding;
     quint32 compressedLength;
-    
+
     in >> arrayLength;
     in >> encoding;
     in >> compressedLength;
-    
+
     QVector<T> values;
     const int DEFLATE_ENCODING = 1;
     if (encoding == DEFLATE_ENCODING) {
@@ -70,7 +70,7 @@ QVariant parseBinaryFBXProperty(QDataStream& in) {
         case 'Y': {
             qint16 value;
             in >> value;
-            return QVariant::fromValue(value);   
+            return QVariant::fromValue(value);
         }
         case 'C': {
             bool value;
@@ -128,12 +128,12 @@ FBXNode parseBinaryFBXNode(QDataStream& in) {
     quint32 propertyCount;
     quint32 propertyListLength;
     quint8 nameLength;
-    
+
     in >> endOffset;
     in >> propertyCount;
     in >> propertyListLength;
     in >> nameLength;
-    
+
     FBXNode node;
     const int MIN_VALID_OFFSET = 40;
     if (endOffset < MIN_VALID_OFFSET || nameLength == 0) {
@@ -141,38 +141,38 @@ FBXNode parseBinaryFBXNode(QDataStream& in) {
         return node;
     }
     node.name = in.device()->read(nameLength);
-    
+
     for (int i = 0; i < propertyCount; i++) {
-        node.properties.append(parseBinaryFBXProperty(in));    
+        node.properties.append(parseBinaryFBXProperty(in));
     }
-    
+
     while (endOffset > in.device()->pos()) {
         FBXNode child = parseBinaryFBXNode(in);
         if (child.name.isNull()) {
             return node;
-            
+
         } else {
             node.children.append(child);
         }
     }
-    
+
     return node;
 }
 
 class Tokenizer {
 public:
-    
+
     Tokenizer(QIODevice* device) : _device(device), _pushedBackToken(-1) { }
-    
+
     enum SpecialToken { DATUM_TOKEN = 0x100 };
-    
+
     int nextToken();
     const QByteArray& getDatum() const { return _datum; }
-    
+
     void pushBackToken(int token) { _pushedBackToken = token; }
-    
+
 private:
-    
+
     QIODevice* _device;
     QByteArray _datum;
     int _pushedBackToken;
@@ -194,13 +194,13 @@ int Tokenizer::nextToken() {
             case ';':
                 _device->readLine(); // skip the comment
                 break;
-                
+
             case ':':
             case '{':
             case '}':
             case ',':
                 return ch; // special punctuation
-            
+
             case '\"':
                 _datum = "";
                 while (_device->getChar(&ch)) {
@@ -214,8 +214,8 @@ int Tokenizer::nextToken() {
                     }
                     _datum.append(ch);
                 }
-                return DATUM_TOKEN;   
-                
+                return DATUM_TOKEN;
+
             default:
                 _datum = "";
                 _datum.append(ch);
@@ -234,16 +234,16 @@ int Tokenizer::nextToken() {
 
 FBXNode parseTextFBXNode(Tokenizer& tokenizer) {
     FBXNode node;
-    
+
     if (tokenizer.nextToken() != Tokenizer::DATUM_TOKEN) {
         return node;
     }
     node.name = tokenizer.getDatum();
-    
+
     if (tokenizer.nextToken() != ':') {
         return node;
     }
-    
+
     int token;
     bool expectingDatum = true;
     while ((token = tokenizer.nextToken()) != -1) {
@@ -255,17 +255,17 @@ FBXNode parseTextFBXNode(Tokenizer& tokenizer) {
         }
         if (token == ',') {
             expectingDatum = true;
-            
+
         } else if (token == Tokenizer::DATUM_TOKEN && expectingDatum) {
             node.properties.append(tokenizer.getDatum());
             expectingDatum = false;
-        
+
         } else {
             tokenizer.pushBackToken(token);
             return node;
         }
     }
-    
+
     return node;
 }
 
@@ -276,11 +276,11 @@ FBXNode parseFBX(QIODevice* device) {
         // parse as a text file
         FBXNode top;
         Tokenizer tokenizer(device);
-        while (device->bytesAvailable()) {    
+        while (device->bytesAvailable()) {
             FBXNode next = parseTextFBXNode(tokenizer);
             if (next.name.isNull()) {
                 return top;
-                
+
             } else {
                 top.children.append(next);
             }
@@ -290,32 +290,32 @@ FBXNode parseFBX(QIODevice* device) {
     QDataStream in(device);
     in.setByteOrder(QDataStream::LittleEndian);
     in.setVersion(QDataStream::Qt_4_5); // for single/double precision switch
-    
+
     // see http://code.blender.org/index.php/2013/08/fbx-binary-file-format-specification/ for an explanation
     // of the FBX binary format
-    
+
     // skip the rest of the header
     const int HEADER_SIZE = 27;
     in.skipRawData(HEADER_SIZE);
-    
+
     // parse the top-level node
     FBXNode top;
     while (device->bytesAvailable()) {
         FBXNode next = parseBinaryFBXNode(in);
         if (next.name.isNull()) {
             return top;
-            
+
         } else {
             top.children.append(next);
         }
     }
-    
+
     return top;
 }
 
 QVariantHash parseMapping(QIODevice* device) {
     QVariantHash properties;
-    
+
     QByteArray line;
     while (!(line = device->readLine()).isEmpty()) {
         if ((line = line.trimmed()).startsWith('#')) {
@@ -328,23 +328,23 @@ QVariantHash parseMapping(QIODevice* device) {
         QByteArray name = sections.at(0).trimmed();
         if (sections.size() == 2) {
             properties.insertMulti(name, sections.at(1).trimmed());
-        
+
         } else if (sections.size() == 3) {
             QVariantHash heading = properties.value(name).toHash();
             heading.insertMulti(sections.at(1).trimmed(), sections.at(2).trimmed());
             properties.insert(name, heading);
-            
+
         } else if (sections.size() >= 4) {
             QVariantHash heading = properties.value(name).toHash();
             QVariantList contents;
-            for (int i = 2; i < sections.size(); i++) {
+            for (size_t i = 2; i < sections.size(); i++) {
                 contents.append(sections.at(i).trimmed());
             }
             heading.insertMulti(sections.at(1).trimmed(), contents);
             properties.insert(name, heading);
         }
     }
-    
+
     return properties;
 }
 
@@ -481,15 +481,15 @@ const char* FACESHIFT_BLENDSHAPES[] = {
 class FBXModel {
 public:
     QString name;
-    
+
     int parentIndex;
-    
+
     glm::mat4 preTransform;
     glm::quat preRotation;
     glm::quat rotation;
     glm::quat postRotation;
     glm::mat4 postTransform;
-    
+
     glm::vec3 rotationMin;
     glm::vec3 rotationMax;
 };
@@ -501,7 +501,7 @@ glm::mat4 getGlobalTransform(const QMultiHash<QString, QString>& parentMap,
         const FBXModel& model = models.value(nodeID);
         globalTransform = model.preTransform * glm::mat4_cast(model.preRotation * model.rotation * model.postRotation) *
             model.postTransform * globalTransform;
-        
+
         QList<QString> parentIDs = parentMap.values(nodeID);
         nodeID = QString();
         foreach (const QString& parentID, parentIDs) {
@@ -511,7 +511,7 @@ glm::mat4 getGlobalTransform(const QMultiHash<QString, QString>& parentMap,
             }
         }
     }
-    
+
     return globalTransform;
 }
 
@@ -596,7 +596,7 @@ public:
     QVector<int> normalIndices;
     QVector<glm::vec2> texCoords;
     QVector<int> texCoordIndices;
-    
+
     QHash<Vertex, int> indices;
 };
 
@@ -605,14 +605,14 @@ void appendIndex(MeshData& data, QVector<int>& indices, int index) {
     if (vertexIndex < 0) {
         vertexIndex = -vertexIndex - 1;
     }
-    
+
     Vertex vertex;
     vertex.originalIndex = vertexIndex;
 
     glm::vec3 normal;
     if (data.normalIndices.isEmpty()) {
         normal = data.normals.at(data.normalsByVertex ? vertexIndex : index);
-        
+
     } else {
         int normalIndex = data.normalIndices.at(data.normalsByVertex ? vertexIndex : index);
         if (normalIndex >= 0) {
@@ -622,15 +622,15 @@ void appendIndex(MeshData& data, QVector<int>& indices, int index) {
 
     if (data.texCoordIndices.isEmpty()) {
         if (index < data.texCoords.size()) {
-            vertex.texCoord = data.texCoords.at(index);    
+            vertex.texCoord = data.texCoords.at(index);
         }
     } else {
         int texCoordIndex = data.texCoordIndices.at(index);
         if (texCoordIndex >= 0) {
-            vertex.texCoord = data.texCoords.at(texCoordIndex); 
+            vertex.texCoord = data.texCoords.at(texCoordIndex);
         }
     }
-    
+
     QHash<Vertex, int>::const_iterator it = data.indices.find(vertex);
     if (it == data.indices.constEnd()) {
         int newIndex = data.extracted.mesh.vertices.size();
@@ -640,32 +640,32 @@ void appendIndex(MeshData& data, QVector<int>& indices, int index) {
         data.extracted.mesh.vertices.append(data.vertices.at(vertexIndex));
         data.extracted.mesh.normals.append(normal);
         data.extracted.mesh.texCoords.append(vertex.texCoord);
-        
+
     } else {
         indices.append(*it);
         data.extracted.mesh.normals[*it] += normal;
     }
 }
 
-ExtractedMesh extractMesh(const FBXNode& object) {                
+ExtractedMesh extractMesh(const FBXNode& object) {
     MeshData data;
     QVector<int> materials;
     foreach (const FBXNode& child, object.children) {
         if (child.name == "Vertices") {
             data.vertices = createVec3Vector(getDoubleVector(child.properties, 0));
-            
+
         } else if (child.name == "PolygonVertexIndex") {
             data.polygonIndices = getIntVector(child.properties, 0);
-        
+
         } else if (child.name == "LayerElementNormal") {
             data.normalsByVertex = false;
             foreach (const FBXNode& subdata, child.children) {
                 if (subdata.name == "Normals") {
                     data.normals = createVec3Vector(getDoubleVector(subdata.properties, 0));
-                
+
                 } else if (subdata.name == "NormalsIndex") {
                     data.normalIndices = getIntVector(subdata.properties, 0);
-                    
+
                 } else if (subdata.name == "MappingInformationType" &&
                         subdata.properties.at(0) == "ByVertice") {
                     data.normalsByVertex = true;
@@ -675,7 +675,7 @@ ExtractedMesh extractMesh(const FBXNode& object) {
             foreach (const FBXNode& subdata, child.children) {
                 if (subdata.name == "UV") {
                     data.texCoords = createVec2Vector(getDoubleVector(subdata.properties, 0));
-                    
+
                 } else if (subdata.name == "UVIndex") {
                     data.texCoordIndices = getIntVector(subdata.properties, 0);
                 }
@@ -683,28 +683,28 @@ ExtractedMesh extractMesh(const FBXNode& object) {
         } else if (child.name == "LayerElementMaterial") {
             foreach (const FBXNode& subdata, child.children) {
                 if (subdata.name == "Materials") {
-                    materials = getIntVector(subdata.properties, 0);   
+                    materials = getIntVector(subdata.properties, 0);
                 }
             }
         }
     }
-    
+
     // convert the polygons to quads and triangles
     int polygonIndex = 0;
     for (int beginIndex = 0; beginIndex < data.polygonIndices.size(); polygonIndex++) {
         int endIndex = beginIndex;
         while (data.polygonIndices.at(endIndex++) >= 0);
-        
+
         int materialIndex = (polygonIndex < materials.size()) ? materials.at(polygonIndex) : 0;
         data.extracted.mesh.parts.resize(max(data.extracted.mesh.parts.size(), materialIndex + 1));
         FBXMeshPart& part = data.extracted.mesh.parts[materialIndex];
-        
+
         if (endIndex - beginIndex == 4) {
             appendIndex(data, part.quadIndices, beginIndex++);
             appendIndex(data, part.quadIndices, beginIndex++);
             appendIndex(data, part.quadIndices, beginIndex++);
             appendIndex(data, part.quadIndices, beginIndex++);
-            
+
         } else {
             for (int nextIndex = beginIndex + 1;; ) {
                 appendIndex(data, part.triangleIndices, beginIndex);
@@ -717,7 +717,7 @@ ExtractedMesh extractMesh(const FBXNode& object) {
             beginIndex = endIndex;
         }
     }
-    
+
     return data.extracted;
 }
 
@@ -754,7 +754,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
     QHash<QString, Material> materials;
     QHash<QString, QString> diffuseTextures;
     QHash<QString, QString> bumpTextures;
-    
+
     QVariantHash joints = mapping.value("joint").toHash();
     QString jointEyeLeftName = processID(joints.value("jointEyeLeft", "jointEyeLeft").toString());
     QString jointEyeRightName = processID(joints.value("jointEyeRight", "jointEyeRight").toString());
@@ -780,7 +780,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
     QVector<QString> jointRightFingerIDs(jointRightFingerNames.size());
     QVector<QString> jointLeftFingertipIDs(jointLeftFingertipNames.size());
     QVector<QString> jointRightFingertipIDs(jointRightFingertipNames.size());
-    
+
     QVariantHash blendshapeMappings = mapping.value("bs").toHash();
     typedef QPair<int, float> WeightedIndex;
     QMultiHash<QByteArray, WeightedIndex> blendshapeIndices;
@@ -801,31 +801,31 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
         }
     }
     QMultiHash<QString, WeightedIndex> blendshapeChannelIndices;
-    
+
     foreach (const FBXNode& child, node.children) {
         if (child.name == "Objects") {
-            foreach (const FBXNode& object, child.children) {    
+            foreach (const FBXNode& object, child.children) {
                 if (object.name == "Geometry") {
                     if (object.properties.at(2) == "Mesh") {
                         meshes.insert(getID(object.properties), extractMesh(object));
-                        
+
                     } else { // object.properties.at(2) == "Shape"
                         ExtractedBlendshape extracted = { getID(object.properties) };
-                        
+
                         foreach (const FBXNode& data, object.children) {
                             if (data.name == "Indexes") {
                                 extracted.blendshape.indices = getIntVector(data.properties, 0);
-                                
+
                             } else if (data.name == "Vertices") {
                                 extracted.blendshape.vertices = createVec3Vector(
                                     getDoubleVector(data.properties, 0));
-                                
+
                             } else if (data.name == "Normals") {
                                 extracted.blendshape.normals = createVec3Vector(
                                     getDoubleVector(data.properties, 0));
                             }
                         }
-                        
+
                         blendshapes.append(extracted);
                     }
                 } else if (object.name == "Model") {
@@ -833,44 +833,44 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
                     if (object.properties.size() == 3) {
                         name = object.properties.at(1).toString();
                         name = name.left(name.indexOf(QChar('\0')));
-                        
+
                     } else {
                         name = getID(object.properties);
                     }
                     int index;
                     if (name == jointEyeLeftName || name == "EyeL" || name == "joint_Leye") {
                         jointEyeLeftID = getID(object.properties);
-                        
+
                     } else if (name == jointEyeRightName || name == "EyeR" || name == "joint_Reye") {
                         jointEyeRightID = getID(object.properties);
-                        
+
                     } else if (name == jointNeckName || name == "NeckRot" || name == "joint_neck") {
                         jointNeckID = getID(object.properties);
-                        
+
                     } else if (name == jointRootName) {
                         jointRootID = getID(object.properties);
-                        
+
                     } else if (name == jointLeanName) {
                         jointLeanID = getID(object.properties);
-                        
+
                     } else if (name == jointHeadName) {
                         jointHeadID = getID(object.properties);
-                        
+
                     } else if (name == jointLeftHandName) {
                         jointLeftHandID = getID(object.properties);
-                        
+
                     } else if (name == jointRightHandName) {
                         jointRightHandID = getID(object.properties);
-                        
+
                     } else if ((index = jointLeftFingerNames.indexOf(name)) != -1) {
                         jointLeftFingerIDs[index] = getID(object.properties);
-                        
+
                     } else if ((index = jointRightFingerNames.indexOf(name)) != -1) {
                         jointRightFingerIDs[index] = getID(object.properties);
-                    
+
                     } else if ((index = jointLeftFingertipNames.indexOf(name)) != -1) {
                         jointLeftFingertipIDs[index] = getID(object.properties);
-                        
+
                     } else if ((index = jointRightFingertipNames.indexOf(name)) != -1) {
                         jointRightFingertipIDs[index] = getID(object.properties);
                     }
@@ -891,7 +891,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
                             properties = true;
                             propertyName = "Property";
                             index = 3;
-                            
+
                         } else if (subobject.name == "Properties70") {
                             properties = true;
                             propertyName = "P";
@@ -905,46 +905,46 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
 
                                     } else if (property.properties.at(0) == "RotationOffset") {
                                         rotationOffset = getVec3(property.properties, index);
-                                            
+
                                     } else if (property.properties.at(0) == "RotationPivot") {
                                         rotationPivot = getVec3(property.properties, index);
-                                    
+
                                     } else if (property.properties.at(0) == "PreRotation") {
                                         preRotation = getVec3(property.properties, index);
-                                            
+
                                     } else if (property.properties.at(0) == "Lcl Rotation") {
                                         rotation = getVec3(property.properties, index);
-                                    
+
                                     } else if (property.properties.at(0) == "PostRotation") {
                                         postRotation = getVec3(property.properties, index);
-                                        
+
                                     } else if (property.properties.at(0) == "ScalingPivot") {
                                         scalePivot = getVec3(property.properties, index);
-                                            
+
                                     } else if (property.properties.at(0) == "Lcl Scaling") {
                                         scale = getVec3(property.properties, index);
-                                        
+
                                     } else if (property.properties.at(0) == "RotationMin") {
                                         rotationMin = getVec3(property.properties, index);
-                                        
+
                                     } else if (property.properties.at(0) == "RotationMax") {
                                         rotationMax = getVec3(property.properties, index);
-                                    
+
                                     } else if (property.properties.at(0) == "RotationMinX") {
                                         rotationMinX = property.properties.at(index).toBool();
-                                        
+
                                     } else if (property.properties.at(0) == "RotationMinY") {
                                         rotationMinY = property.properties.at(index).toBool();
-                                    
+
                                     } else if (property.properties.at(0) == "RotationMinZ") {
                                         rotationMinZ = property.properties.at(index).toBool();
-                                    
+
                                     } else if (property.properties.at(0) == "RotationMaxX") {
                                         rotationMaxX = property.properties.at(index).toBool();
-                                        
+
                                     } else if (property.properties.at(0) == "RotationMaxY") {
                                         rotationMaxY = property.properties.at(index).toBool();
-                                    
+
                                     } else if (property.properties.at(0) == "RotationMaxZ") {
                                         rotationMaxZ = property.properties.at(index).toBool();
                                     }
@@ -957,8 +957,8 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
                     }
                     // see FBX documentation, http://download.autodesk.com/us/fbx/20112/FBX_SDK_HELP/index.html
                     model.preTransform = glm::translate(translation) * glm::translate(rotationOffset) *
-                        glm::translate(rotationPivot);      
-                    model.preRotation = glm::quat(glm::radians(preRotation));            
+                        glm::translate(rotationPivot);
+                    model.preRotation = glm::quat(glm::radians(preRotation));
                     model.rotation = glm::quat(glm::radians(rotation));
                     model.postRotation = glm::quat(glm::radians(postRotation));
                     model.postTransform = glm::translate(-rotationPivot) * glm::translate(scalePivot) *
@@ -988,21 +988,21 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
                             properties = true;
                             propertyName = "Property";
                             index = 3;
-                            
+
                         } else if (subobject.name == "Properties70") {
                             properties = true;
                             propertyName = "P";
                             index = 4;
                         }
-                        if (properties) {        
+                        if (properties) {
                             foreach (const FBXNode& property, subobject.children) {
                                 if (property.name == propertyName) {
                                     if (property.properties.at(0) == "DiffuseColor") {
                                         material.diffuse = getVec3(property.properties, index);
-                                        
+
                                     } else if (property.properties.at(0) == "SpecularColor") {
                                         material.specular = getVec3(property.properties, index);
-                                    
+
                                     } else if (property.properties.at(0) == "Shininess") {
                                         material.shininess = property.properties.at(index).value<double>();
                                     }
@@ -1011,24 +1011,24 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
                         }
                     }
                     materials.insert(getID(object.properties), material);
-                    
+
                 } else if (object.name == "Deformer") {
                     if (object.properties.last() == "Cluster") {
                         Cluster cluster;
                         foreach (const FBXNode& subobject, object.children) {
                             if (subobject.name == "Indexes") {
                                 cluster.indices = getIntVector(subobject.properties, 0);
-                                
+
                             } else if (subobject.name == "Weights") {
                                 cluster.weights = getDoubleVector(subobject.properties, 0);
-                            
+
                             } else if (subobject.name == "TransformLink") {
                                 QVector<double> values = getDoubleVector(subobject.properties, 0);
                                 cluster.transformLink = createMat4(values);
                             }
                         }
                         clusters.insert(getID(object.properties), cluster);
-                        
+
                     } else if (object.properties.last() == "BlendShapeChannel") {
                         QByteArray name = object.properties.at(1).toByteArray();
                         name = name.left(name.indexOf('\0'));
@@ -1044,13 +1044,13 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
                 }
             }
         } else if (child.name == "Connections") {
-            foreach (const FBXNode& connection, child.children) {    
+            foreach (const FBXNode& connection, child.children) {
                 if (connection.name == "C" || connection.name == "Connect") {
                     if (connection.properties.at(0) == "OP") {
                         QByteArray type = connection.properties.at(3).toByteArray().toLower();
-                        if (type.contains("diffuse")) { 
+                        if (type.contains("diffuse")) {
                             diffuseTextures.insert(getID(connection.properties, 2), getID(connection.properties, 1));
-                                                    
+
                         } else if (type.contains("bump")) {
                             bumpTextures.insert(getID(connection.properties, 2), getID(connection.properties, 1));
                         }
@@ -1061,7 +1061,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
             }
         }
     }
-    
+
     // assign the blendshapes to their corresponding meshes
     foreach (const ExtractedBlendshape& extracted, blendshapes) {
         QString blendshapeChannelID = parentMap.value(extracted.id);
@@ -1087,11 +1087,11 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
                         blendshape.vertices[*blendshapeIndex] += extracted.blendshape.vertices.at(i) * index.second;
                         blendshape.normals[*blendshapeIndex] += extracted.blendshape.normals.at(i) * index.second;
                     }
-                } 
+                }
             }
         }
     }
-    
+
     // get offset transform from mapping
     FBXGeometry geometry;
     float offsetScale = mapping.value("scale", 1.0f).toFloat();
@@ -1099,7 +1099,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
             mapping.value("ry").toFloat(), mapping.value("rz").toFloat())));
     geometry.offset = glm::translate(mapping.value("tx").toFloat(), mapping.value("ty").toFloat(),
         mapping.value("tz").toFloat()) * glm::mat4_cast(offsetRotation) * glm::scale(offsetScale, offsetScale, offsetScale);
-    
+
     // get the list of models in depth-first traversal order
     QVector<QString> modelIDs;
     QSet<QString> remainingModels;
@@ -1117,12 +1117,12 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
             }
             top = parentMap.value(top);
             break;
-            
+
             outerContinue: ;
         }
         appendModelIDs(top, childMap, models, remainingModels, modelIDs);
     }
-    
+
     // convert the models to joints
     QVariantList freeJoints = mapping.values("freeJoint");
     foreach (const QString& modelID, modelIDs) {
@@ -1130,7 +1130,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
         FBXJoint joint;
         joint.isFree = freeJoints.contains(model.name);
         joint.parentIndex = model.parentIndex;
-        
+
         // get the indices of all ancestors starting with the first free one (if any)
         joint.freeLineage.append(geometry.joints.size());
         int lastFreeIndex = joint.isFree ? 0 : -1;
@@ -1141,7 +1141,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
             joint.freeLineage.append(index);
         }
         joint.freeLineage.remove(lastFreeIndex + 1, joint.freeLineage.size() - lastFreeIndex - 1);
-        
+
         joint.preTransform = model.preTransform;
         joint.preRotation = model.preRotation;
         joint.rotation = model.rotation;
@@ -1150,11 +1150,11 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
         joint.rotationMin = model.rotationMin;
         joint.rotationMax = model.rotationMax;
         glm::quat combinedRotation = model.preRotation * model.rotation * model.postRotation;
-        if (joint.parentIndex == -1) {    
+        if (joint.parentIndex == -1) {
             joint.transform = geometry.offset * model.preTransform * glm::mat4_cast(combinedRotation) * model.postTransform;
             joint.inverseDefaultRotation = glm::inverse(combinedRotation);
             joint.distanceToParent = 0.0f;
-            
+
         } else {
             const FBXJoint& parentJoint = geometry.joints.at(joint.parentIndex);
             joint.transform = parentJoint.transform *
@@ -1166,9 +1166,9 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
         joint.boneRadius = 0.0f;
         joint.inverseBindRotation = joint.inverseDefaultRotation;
         geometry.joints.append(joint);
-        geometry.jointIndices.insert(model.name, geometry.joints.size() - 1);  
+        geometry.jointIndices.insert(model.name, geometry.joints.size() - 1);
     }
-    
+
     // find our special joints
     geometry.leftEyeJointIndex = modelIDs.indexOf(jointEyeLeftID);
     geometry.rightEyeJointIndex = modelIDs.indexOf(jointEyeRightID);
@@ -1182,23 +1182,23 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
     geometry.rightFingerJointIndices = getIndices(jointRightFingerIDs, modelIDs);
     geometry.leftFingertipJointIndices = getIndices(jointLeftFingertipIDs, modelIDs);
     geometry.rightFingertipJointIndices = getIndices(jointRightFingertipIDs, modelIDs);
-    
+
     // extract the translation component of the neck transform
     if (geometry.neckJointIndex != -1) {
         const glm::mat4& transform = geometry.joints.at(geometry.neckJointIndex).transform;
         geometry.neckPivot = glm::vec3(transform[3][0], transform[3][1], transform[3][2]);
     }
-    
+
     QVariantHash springs = mapping.value("spring").toHash();
     QVariant defaultSpring = springs.value("default");
     for (QHash<QString, ExtractedMesh>::iterator it = meshes.begin(); it != meshes.end(); it++) {
         ExtractedMesh& extracted = it.value();
-        
+
         // accumulate local transforms
         QString modelID = models.contains(it.key()) ? it.key() : parentMap.value(it.key());
         extracted.mesh.springiness = springs.value(models.value(modelID).name, defaultSpring).toFloat();
         glm::mat4 modelTransform = getGlobalTransform(parentMap, models, modelID);
-        
+
         // look for textures, material properties
         int partIndex = extracted.mesh.parts.size() - 1;
         bool generateTangents = false;
@@ -1221,7 +1221,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
             QString diffuseTextureID = diffuseTextures.value(childID);
             if (!diffuseTextureID.isNull()) {
                 part.diffuseFilename = textureFilenames.value(diffuseTextureID);
-                
+
                 // FBX files generated by 3DSMax have an intermediate texture parent, apparently
                 foreach (const QString& childTextureID, childMap.values(diffuseTextureID)) {
                     if (textureFilenames.contains(childTextureID)) {
@@ -1236,7 +1236,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
             }
             partIndex--;
         }
-        
+
         // if we have a normal map (and texture coordinates), we must compute tangents
         if (generateTangents && !extracted.mesh.texCoords.isEmpty()) {
             extracted.mesh.tangents.resize(extracted.mesh.vertices.size());
@@ -1254,7 +1254,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
                 }
             }
         }
-        
+
         // find the clusters with which the mesh is associated
         QVector<QString> clusterIDs;
         foreach (const QString& childID, childMap.values(it.key())) {
@@ -1265,7 +1265,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
                 FBXCluster fbxCluster;
                 const Cluster& cluster = clusters[clusterID];
                 clusterIDs.append(clusterID);
-                
+
                 // see http://stackoverflow.com/questions/13566608/loading-skinning-information-from-fbx for a discussion
                 // of skinning information in FBX
                 QString jointID = childMap.value(clusterID);
@@ -1276,14 +1276,14 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
                 }
                 fbxCluster.inverseBindMatrix = glm::inverse(cluster.transformLink) * modelTransform;
                 extracted.mesh.clusters.append(fbxCluster);
-                
+
                 // override the bind rotation with the transform link
                 FBXJoint& joint = geometry.joints[fbxCluster.jointIndex];
                 joint.inverseBindRotation = glm::inverse(extractRotation(cluster.transformLink));
                 joint.bindTransform = cluster.transformLink;
             }
         }
-        
+
         // if we don't have a skinned joint, parent to the model itself
         if (extracted.mesh.clusters.isEmpty()) {
             FBXCluster cluster;
@@ -1294,7 +1294,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
             }
             extracted.mesh.clusters.append(cluster);
         }
-        
+
         // whether we're skinned depends on how many clusters are attached
         const FBXCluster& firstFBXCluster = extracted.mesh.clusters.at(0);
         int maxJointIndex = firstFBXCluster.jointIndex;
@@ -1338,7 +1338,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
                                     vertex, boneEnd + boneDirection * proj));
                             }
                         }
-                        
+
                         // look for an unused slot in the weights vector
                         glm::vec4& weights = extracted.mesh.clusterWeights[it.value()];
                         for (int k = 0; k < 4; k++) {
@@ -1379,11 +1379,11 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
             }
         }
         extracted.mesh.isEye = (maxJointIndex == geometry.leftEyeJointIndex || maxJointIndex == geometry.rightEyeJointIndex);
-        
+
         // extract spring edges, connections if springy
         if (extracted.mesh.springiness > 0.0f) {
             QSet<QPair<int, int> > edges;
-            
+
             extracted.mesh.vertexConnections.resize(extracted.mesh.vertices.size());
             foreach (const FBXMeshPart& part, extracted.mesh.parts) {
                 for (int i = 0; i < part.quadIndices.size(); i += 4) {
@@ -1391,12 +1391,12 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
                     int index1 = part.quadIndices.at(i + 1);
                     int index2 = part.quadIndices.at(i + 2);
                     int index3 = part.quadIndices.at(i + 3);
-                    
+
                     edges.insert(QPair<int, int>(qMin(index0, index1), qMax(index0, index1)));
                     edges.insert(QPair<int, int>(qMin(index1, index2), qMax(index1, index2)));
                     edges.insert(QPair<int, int>(qMin(index2, index3), qMax(index2, index3)));
                     edges.insert(QPair<int, int>(qMin(index3, index0), qMax(index3, index0)));
-               
+
                     extracted.mesh.vertexConnections[index0].append(QPair<int, int>(index3, index1));
                     extracted.mesh.vertexConnections[index1].append(QPair<int, int>(index0, index2));
                     extracted.mesh.vertexConnections[index2].append(QPair<int, int>(index1, index3));
@@ -1406,46 +1406,46 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
                     int index0 = part.triangleIndices.at(i);
                     int index1 = part.triangleIndices.at(i + 1);
                     int index2 = part.triangleIndices.at(i + 2);
-                    
+
                     edges.insert(QPair<int, int>(qMin(index0, index1), qMax(index0, index1)));
                     edges.insert(QPair<int, int>(qMin(index1, index2), qMax(index1, index2)));
                     edges.insert(QPair<int, int>(qMin(index2, index0), qMax(index2, index0)));
-                    
+
                     extracted.mesh.vertexConnections[index0].append(QPair<int, int>(index2, index1));
                     extracted.mesh.vertexConnections[index1].append(QPair<int, int>(index0, index2));
                     extracted.mesh.vertexConnections[index2].append(QPair<int, int>(index1, index0));
                 }
             }
-            
+
             for (QSet<QPair<int, int> >::const_iterator edge = edges.constBegin(); edge != edges.constEnd(); edge++) {
                 extracted.mesh.springEdges.append(*edge);
             }
         }
-        
+
         geometry.meshes.append(extracted.mesh);
     }
-    
+
     geometry.palmDirection = parseVec3(mapping.value("palmDirection", "0, -1, 0").toString());
-    
+
     // process attachments
     QVariantHash attachments = mapping.value("attach").toHash();
     for (QVariantHash::const_iterator it = attachments.constBegin(); it != attachments.constEnd(); it++) {
         FBXAttachment attachment;
         attachment.jointIndex = modelIDs.indexOf(processID(it.key()));
         attachment.scale = glm::vec3(1.0f, 1.0f, 1.0f);
-        
+
         QVariantList properties = it->toList();
         if (properties.isEmpty()) {
             attachment.url = it->toString();
         } else {
             attachment.url = properties.at(0).toString();
-            
+
             if (properties.size() >= 2) {
                 attachment.translation = parseVec3(properties.at(1).toString());
-            
+
                 if (properties.size() >= 3) {
                     attachment.rotation = glm::quat(glm::radians(parseVec3(properties.at(2).toString())));
-                
+
                     if (properties.size() >= 4) {
                         attachment.scale = parseVec3(properties.at(3).toString());
                     }
@@ -1454,17 +1454,17 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
         }
         geometry.attachments.append(attachment);
     }
-    
+
     return geometry;
 }
 
 FBXGeometry readFBX(const QByteArray& model, const QByteArray& mapping) {
     QBuffer modelBuffer(const_cast<QByteArray*>(&model));
     modelBuffer.open(QIODevice::ReadOnly);
-    
+
     QBuffer mappingBuffer(const_cast<QByteArray*>(&mapping));
     mappingBuffer.open(QIODevice::ReadOnly);
-    
+
     return extractFBXGeometry(parseFBX(&modelBuffer), parseMapping(&mappingBuffer));
 }
 
@@ -1475,7 +1475,7 @@ bool addMeshVoxelsOperation(OctreeElement* element, void* extraData) {
     }
     FBXMesh& mesh = *static_cast<FBXMesh*>(extraData);
     FBXMeshPart& part = mesh.parts[0];
-    
+
     const int FACE_COUNT = 6;
     const int VERTICES_PER_FACE = 4;
     const int VERTEX_COUNT = FACE_COUNT * VERTICES_PER_FACE;
@@ -1487,7 +1487,7 @@ bool addMeshVoxelsOperation(OctreeElement* element, void* extraData) {
     }
     glm::vec3 corner = voxel->getCorner();
     float scale = voxel->getScale();
-    
+
     mesh.vertices.append(glm::vec3(corner.x, corner.y, corner.z));
     mesh.vertices.append(glm::vec3(corner.x, corner.y, corner.z + scale));
     mesh.vertices.append(glm::vec3(corner.x, corner.y + scale, corner.z + scale));
@@ -1495,7 +1495,7 @@ bool addMeshVoxelsOperation(OctreeElement* element, void* extraData) {
     for (int i = 0; i < VERTICES_PER_FACE; i++) {
         mesh.normals.append(glm::vec3(-1.0f, 0.0f, 0.0f));
     }
-    
+
     mesh.vertices.append(glm::vec3(corner.x + scale, corner.y, corner.z));
     mesh.vertices.append(glm::vec3(corner.x + scale, corner.y + scale, corner.z));
     mesh.vertices.append(glm::vec3(corner.x + scale, corner.y + scale, corner.z + scale));
@@ -1503,7 +1503,7 @@ bool addMeshVoxelsOperation(OctreeElement* element, void* extraData) {
     for (int i = 0; i < VERTICES_PER_FACE; i++) {
         mesh.normals.append(glm::vec3(1.0f, 0.0f, 0.0f));
     }
-    
+
     mesh.vertices.append(glm::vec3(corner.x + scale, corner.y, corner.z));
     mesh.vertices.append(glm::vec3(corner.x + scale, corner.y, corner.z + scale));
     mesh.vertices.append(glm::vec3(corner.x, corner.y, corner.z + scale));
@@ -1511,7 +1511,7 @@ bool addMeshVoxelsOperation(OctreeElement* element, void* extraData) {
     for (int i = 0; i < VERTICES_PER_FACE; i++) {
         mesh.normals.append(glm::vec3(0.0f, -1.0f, 0.0f));
     }
-    
+
     mesh.vertices.append(glm::vec3(corner.x, corner.y + scale, corner.z));
     mesh.vertices.append(glm::vec3(corner.x, corner.y + scale, corner.z + scale));
     mesh.vertices.append(glm::vec3(corner.x + scale, corner.y + scale, corner.z + scale));
@@ -1519,7 +1519,7 @@ bool addMeshVoxelsOperation(OctreeElement* element, void* extraData) {
     for (int i = 0; i < VERTICES_PER_FACE; i++) {
         mesh.normals.append(glm::vec3(0.0f, 1.0f, 0.0f));
     }
-    
+
     mesh.vertices.append(glm::vec3(corner.x, corner.y + scale, corner.z));
     mesh.vertices.append(glm::vec3(corner.x + scale, corner.y + scale, corner.z));
     mesh.vertices.append(glm::vec3(corner.x + scale, corner.y, corner.z));
@@ -1534,38 +1534,38 @@ bool addMeshVoxelsOperation(OctreeElement* element, void* extraData) {
     mesh.vertices.append(glm::vec3(corner.x, corner.y + scale, corner.z + scale));
     for (int i = 0; i < VERTICES_PER_FACE; i++) {
         mesh.normals.append(glm::vec3(0.0f, 0.0f, 1.0f));
-    }       
-    
+    }
+
     return true;
 }
 
 FBXGeometry readSVO(const QByteArray& model) {
     FBXGeometry geometry;
-    
+
     // we have one joint
     FBXJoint joint = { false };
     joint.parentIndex = -1;
     geometry.joints.append(joint);
-    
+
     // and one mesh with one cluster and one part
     FBXMesh mesh;
     mesh.isEye = false;
     mesh.springiness = 0.0f;
-    
+
     FBXCluster cluster = { 0 };
     mesh.clusters.append(cluster);
-    
+
     FBXMeshPart part;
     part.diffuseColor = glm::vec3(1.0f, 1.0f, 1.0f);
     part.shininess = 96.0f;
     mesh.parts.append(part);
-    
+
     VoxelTree tree;
     ReadBitstreamToTreeParams args(WANT_COLOR, NO_EXISTS_BITS);
     tree.readBitstreamToTree((unsigned char*)model.data(), model.size(), args);
     tree.recurseTreeWithOperation(addMeshVoxelsOperation, &mesh);
-    
+
     geometry.meshes.append(mesh);
-    
+
     return geometry;
 }
diff --git a/libraries/octree/src/OctreeElement.cpp b/libraries/octree/src/OctreeElement.cpp
index 0fad79e625..31eb9a94bb 100644
--- a/libraries/octree/src/OctreeElement.cpp
+++ b/libraries/octree/src/OctreeElement.cpp
@@ -1251,7 +1251,7 @@ void OctreeElement::addDeleteHook(OctreeElementDeleteHook* hook) {
 
 void OctreeElement::removeDeleteHook(OctreeElementDeleteHook* hook) {
     _deleteHooksLock.lockForWrite();
-    for (int i = 0; i < _deleteHooks.size(); i++) {
+    for (unsigned int i = 0; i < _deleteHooks.size(); i++) {
         if (_deleteHooks[i] == hook) {
             _deleteHooks.erase(_deleteHooks.begin() + i);
             break;
@@ -1262,7 +1262,7 @@ void OctreeElement::removeDeleteHook(OctreeElementDeleteHook* hook) {
 
 void OctreeElement::notifyDeleteHooks() {
     _deleteHooksLock.lockForRead();
-    for (int i = 0; i < _deleteHooks.size(); i++) {
+    for (unsigned int i = 0; i < _deleteHooks.size(); i++) {
         _deleteHooks[i]->elementDeleted(this);
     }
     _deleteHooksLock.unlock();
@@ -1275,7 +1275,7 @@ void OctreeElement::addUpdateHook(OctreeElementUpdateHook* hook) {
 }
 
 void OctreeElement::removeUpdateHook(OctreeElementUpdateHook* hook) {
-    for (int i = 0; i < _updateHooks.size(); i++) {
+    for (unsigned int i = 0; i < _updateHooks.size(); i++) {
         if (_updateHooks[i] == hook) {
             _updateHooks.erase(_updateHooks.begin() + i);
             return;
@@ -1284,7 +1284,7 @@ void OctreeElement::removeUpdateHook(OctreeElementUpdateHook* hook) {
 }
 
 void OctreeElement::notifyUpdateHooks() {
-    for (int i = 0; i < _updateHooks.size(); i++) {
+    for (unsigned int i = 0; i < _updateHooks.size(); i++) {
         _updateHooks[i]->elementUpdated(this);
     }
 }