merge upstream/master into andrew/ragdoll

This commit is contained in:
Andrew Meadows 2014-09-17 17:56:35 -07:00
commit 8359e672bb
57 changed files with 1359 additions and 536 deletions

View file

@ -57,6 +57,8 @@
#include "AvatarAudioStream.h"
#include "InjectedAudioStream.h"
#include "AudioMixer.h"
const float LOUDNESS_TO_DISTANCE_RATIO = 0.00001f;
@ -92,7 +94,9 @@ AudioMixer::AudioMixer(const QByteArray& packet) :
_timeSpentPerHashMatchCallStats(0, READ_DATAGRAMS_STATS_WINDOW_SECONDS),
_readPendingCallsPerSecondStats(1, READ_DATAGRAMS_STATS_WINDOW_SECONDS)
{
// constant defined in AudioMixer.h. However, we don't want to include this here
// we will soon find a better common home for these audio-related constants
_penumbraFilter.initialize(SAMPLE_RATE, (NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)) / 2);
}
AudioMixer::~AudioMixer() {
@ -102,7 +106,7 @@ AudioMixer::~AudioMixer() {
const float ATTENUATION_BEGINS_AT_DISTANCE = 1.0f;
const float ATTENUATION_AMOUNT_PER_DOUBLING_IN_DISTANCE = 0.18f;
const float ATTENUATION_EPSILON_DISTANCE = 0.1f;
const float RADIUS_OF_HEAD = 0.076f;
int AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream* streamToAdd,
AvatarAudioStream* listeningNodeStream) {
@ -112,6 +116,8 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
// Basically, we'll repeat that last frame until it has a frame to mix. Depending on how many times
// we've repeated that frame in a row, we'll gradually fade that repeated frame into silence.
// This improves the perceived quality of the audio slightly.
bool showDebug = false; // (randFloat() < 0.05f);
float repeatedFrameFadeFactor = 1.0f;
@ -140,188 +146,221 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
int numSamplesDelay = 0;
float weakChannelAmplitudeRatio = 1.0f;
bool shouldAttenuate = (streamToAdd != listeningNodeStream);
bool shouldDistanceAttenuate = true;
if (shouldAttenuate) {
// if the two stream pointers do not match then these are different streams
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream->getPosition();
float distanceBetween = glm::length(relativePosition);
if (distanceBetween < EPSILON) {
distanceBetween = EPSILON;
}
if (streamToAdd->getLastPopOutputTrailingLoudness() / distanceBetween <= _minAudibilityThreshold) {
// according to mixer performance we have decided this does not get to be mixed in
// bail out
return 0;
}
++_sumMixes;
if (streamToAdd->getListenerUnattenuatedZone()) {
shouldAttenuate = !streamToAdd->getListenerUnattenuatedZone()->contains(listeningNodeStream->getPosition());
}
if (streamToAdd->getType() == PositionalAudioStream::Injector) {
attenuationCoefficient *= reinterpret_cast<InjectedAudioStream*>(streamToAdd)->getAttenuationRatio();
}
shouldAttenuate = shouldAttenuate && distanceBetween > ATTENUATION_EPSILON_DISTANCE;
if (shouldAttenuate) {
glm::quat inverseOrientation = glm::inverse(listeningNodeStream->getOrientation());
float distanceSquareToSource = glm::dot(relativePosition, relativePosition);
float radius = 0.0f;
if (streamToAdd->getType() == PositionalAudioStream::Injector) {
radius = reinterpret_cast<InjectedAudioStream*>(streamToAdd)->getRadius();
}
if (radius == 0 || (distanceSquareToSource > radius * radius)) {
// this is either not a spherical source, or the listener is outside the sphere
if (radius > 0) {
// this is a spherical source - the distance used for the coefficient
// needs to be the closest point on the boundary to the source
// ovveride the distance to the node with the distance to the point on the
// boundary of the sphere
distanceSquareToSource -= (radius * radius);
} else {
// calculate the angle delivery for off-axis attenuation
glm::vec3 rotatedListenerPosition = glm::inverse(streamToAdd->getOrientation()) * relativePosition;
float angleOfDelivery = glm::angle(glm::vec3(0.0f, 0.0f, -1.0f),
glm::normalize(rotatedListenerPosition));
const float MAX_OFF_AXIS_ATTENUATION = 0.2f;
const float OFF_AXIS_ATTENUATION_FORMULA_STEP = (1 - MAX_OFF_AXIS_ATTENUATION) / 2.0f;
float offAxisCoefficient = MAX_OFF_AXIS_ATTENUATION +
(OFF_AXIS_ATTENUATION_FORMULA_STEP * (angleOfDelivery / PI_OVER_TWO));
// multiply the current attenuation coefficient by the calculated off axis coefficient
attenuationCoefficient *= offAxisCoefficient;
}
glm::vec3 rotatedSourcePosition = inverseOrientation * relativePosition;
if (distanceBetween >= ATTENUATION_BEGINS_AT_DISTANCE) {
// calculate the distance coefficient using the distance to this node
float distanceCoefficient = 1 - (logf(distanceBetween / ATTENUATION_BEGINS_AT_DISTANCE) / logf(2.0f)
* ATTENUATION_AMOUNT_PER_DOUBLING_IN_DISTANCE);
if (distanceCoefficient < 0) {
distanceCoefficient = 0;
}
// multiply the current attenuation coefficient by the distance coefficient
attenuationCoefficient *= distanceCoefficient;
}
// project the rotated source position vector onto the XZ plane
rotatedSourcePosition.y = 0.0f;
// produce an oriented angle about the y-axis
bearingRelativeAngleToSource = glm::orientedAngle(glm::vec3(0.0f, 0.0f, -1.0f),
glm::normalize(rotatedSourcePosition),
glm::vec3(0.0f, 1.0f, 0.0f));
const float PHASE_AMPLITUDE_RATIO_AT_90 = 0.5;
// figure out the number of samples of delay and the ratio of the amplitude
// in the weak channel for audio spatialization
float sinRatio = fabsf(sinf(bearingRelativeAngleToSource));
numSamplesDelay = SAMPLE_PHASE_DELAY_AT_90 * sinRatio;
weakChannelAmplitudeRatio = 1 - (PHASE_AMPLITUDE_RATIO_AT_90 * sinRatio);
}
// Is the source that I am mixing my own?
bool sourceIsSelf = (streamToAdd == listeningNodeStream);
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream->getPosition();
float distanceBetween = glm::length(relativePosition);
if (distanceBetween < EPSILON) {
distanceBetween = EPSILON;
}
if (streamToAdd->getLastPopOutputTrailingLoudness() / distanceBetween <= _minAudibilityThreshold) {
// according to mixer performance we have decided this does not get to be mixed in
// bail out
return 0;
}
++_sumMixes;
if (streamToAdd->getListenerUnattenuatedZone()) {
shouldDistanceAttenuate = !streamToAdd->getListenerUnattenuatedZone()->contains(listeningNodeStream->getPosition());
}
if (streamToAdd->getType() == PositionalAudioStream::Injector) {
attenuationCoefficient *= reinterpret_cast<InjectedAudioStream*>(streamToAdd)->getAttenuationRatio();
if (showDebug) {
qDebug() << "AttenuationRatio: " << reinterpret_cast<InjectedAudioStream*>(streamToAdd)->getAttenuationRatio();
}
}
AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd->getLastPopOutput();
if (showDebug) {
qDebug() << "distance: " << distanceBetween;
}
if (!streamToAdd->isStereo() && shouldAttenuate) {
// this is a mono stream, which means it gets full attenuation and spatialization
glm::quat inverseOrientation = glm::inverse(listeningNodeStream->getOrientation());
if (!sourceIsSelf && (streamToAdd->getType() == PositionalAudioStream::Microphone)) {
// source is another avatar, apply fixed off-axis attenuation to make them quieter as they turn away from listener
glm::vec3 rotatedListenerPosition = glm::inverse(streamToAdd->getOrientation()) * relativePosition;
// if the bearing relative angle to source is > 0 then the delayed channel is the right one
int delayedChannelOffset = (bearingRelativeAngleToSource > 0.0f) ? 1 : 0;
int goodChannelOffset = delayedChannelOffset == 0 ? 1 : 0;
float angleOfDelivery = glm::angle(glm::vec3(0.0f, 0.0f, -1.0f),
glm::normalize(rotatedListenerPosition));
int16_t correctStreamSample[2], delayStreamSample[2];
int delayedChannelIndex = 0;
const float MAX_OFF_AXIS_ATTENUATION = 0.2f;
const float OFF_AXIS_ATTENUATION_FORMULA_STEP = (1 - MAX_OFF_AXIS_ATTENUATION) / 2.0f;
const int SINGLE_STEREO_OFFSET = 2;
float attenuationAndFade = attenuationCoefficient * repeatedFrameFadeFactor;
float offAxisCoefficient = MAX_OFF_AXIS_ATTENUATION +
(OFF_AXIS_ATTENUATION_FORMULA_STEP * (angleOfDelivery / PI_OVER_TWO));
for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s += 4) {
if (showDebug) {
qDebug() << "angleOfDelivery" << angleOfDelivery << "offAxisCoefficient: " << offAxisCoefficient;
// setup the int16_t variables for the two sample sets
correctStreamSample[0] = streamPopOutput[s / 2] * attenuationAndFade;
correctStreamSample[1] = streamPopOutput[(s / 2) + 1] * attenuationAndFade;
delayedChannelIndex = s + (numSamplesDelay * 2) + delayedChannelOffset;
delayStreamSample[0] = correctStreamSample[0] * weakChannelAmplitudeRatio;
delayStreamSample[1] = correctStreamSample[1] * weakChannelAmplitudeRatio;
_clientSamples[s + goodChannelOffset] += correctStreamSample[0];
_clientSamples[s + goodChannelOffset + SINGLE_STEREO_OFFSET] += correctStreamSample[1];
_clientSamples[delayedChannelIndex] += delayStreamSample[0];
_clientSamples[delayedChannelIndex + SINGLE_STEREO_OFFSET] += delayStreamSample[1];
}
// multiply the current attenuation coefficient by the calculated off axis coefficient
attenuationCoefficient *= offAxisCoefficient;
}
if (shouldDistanceAttenuate && (distanceBetween >= ATTENUATION_BEGINS_AT_DISTANCE)) {
// calculate the distance coefficient using the distance to this node
float distanceCoefficient = 1 - (logf(distanceBetween / ATTENUATION_BEGINS_AT_DISTANCE) / logf(2.0f)
* ATTENUATION_AMOUNT_PER_DOUBLING_IN_DISTANCE);
if (distanceCoefficient < 0) {
distanceCoefficient = 0;
}
if (numSamplesDelay > 0) {
// if there was a sample delay for this stream, we need to pull samples prior to the popped output
// to stick at the beginning
float attenuationAndWeakChannelRatioAndFade = attenuationCoefficient * weakChannelAmplitudeRatio * repeatedFrameFadeFactor;
AudioRingBuffer::ConstIterator delayStreamPopOutput = streamPopOutput - numSamplesDelay;
// TODO: delayStreamPopOutput may be inside the last frame written if the ringbuffer is completely full
// maybe make AudioRingBuffer have 1 extra frame in its buffer
for (int i = 0; i < numSamplesDelay; i++) {
int parentIndex = i * 2;
_clientSamples[parentIndex + delayedChannelOffset] += *delayStreamPopOutput * attenuationAndWeakChannelRatioAndFade;
++delayStreamPopOutput;
}
}
} else {
int stereoDivider = streamToAdd->isStereo() ? 1 : 2;
if (!shouldAttenuate) {
attenuationCoefficient = 1.0f;
}
float attenuationAndFade = attenuationCoefficient * repeatedFrameFadeFactor;
for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s++) {
_clientSamples[s] = glm::clamp(_clientSamples[s] + (int)(streamPopOutput[s / stereoDivider] * attenuationAndFade),
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
// multiply the current attenuation coefficient by the distance coefficient
attenuationCoefficient *= distanceCoefficient;
if (showDebug) {
qDebug() << "distanceCoefficient: " << distanceCoefficient;
}
}
if (_enableFilter && shouldAttenuate) {
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream->getPosition();
glm::quat inverseOrientation = glm::inverse(listeningNodeStream->getOrientation());
if (!sourceIsSelf) {
// Compute sample delay for the two ears to create phase panning
glm::vec3 rotatedSourcePosition = inverseOrientation * relativePosition;
// project the rotated source position vector onto the XZ plane
rotatedSourcePosition.y = 0.0f;
// produce an oriented angle about the y-axis
float bearingAngleToSource = glm::orientedAngle(glm::vec3(0.0f, 0.0f, -1.0f),
glm::normalize(rotatedSourcePosition),
glm::vec3(0.0f, -1.0f, 0.0f));
bearingRelativeAngleToSource = glm::orientedAngle(glm::vec3(0.0f, 0.0f, -1.0f),
glm::normalize(rotatedSourcePosition),
glm::vec3(0.0f, 1.0f, 0.0f));
const float PHASE_AMPLITUDE_RATIO_AT_90 = 0.5;
// figure out the number of samples of delay and the ratio of the amplitude
// in the weak channel for audio spatialization
float sinRatio = fabsf(sinf(bearingRelativeAngleToSource));
numSamplesDelay = SAMPLE_PHASE_DELAY_AT_90 * sinRatio;
weakChannelAmplitudeRatio = 1 - (PHASE_AMPLITUDE_RATIO_AT_90 * sinRatio);
if (distanceBetween < RADIUS_OF_HEAD) {
// Diminish phase panning if source would be inside head
numSamplesDelay *= distanceBetween / RADIUS_OF_HEAD;
weakChannelAmplitudeRatio += (PHASE_AMPLITUDE_RATIO_AT_90 * sinRatio) * distanceBetween / RADIUS_OF_HEAD;
}
}
if (showDebug) {
qDebug() << "attenuation: " << attenuationCoefficient;
qDebug() << "bearingRelativeAngleToSource: " << bearingRelativeAngleToSource << " numSamplesDelay: " << numSamplesDelay;
}
AudioRingBuffer::ConstIterator streamPopOutput = streamToAdd->getLastPopOutput();
if (!streamToAdd->isStereo()) {
// this is a mono stream, which means it gets full attenuation and spatialization
// we need to do several things in this process:
// 1) convert from mono to stereo by copying each input sample into the left and right output samples
// 2)
// 2) apply an attenuation AND fade to all samples (left and right)
// 3) based on the bearing relative angle to the source we will weaken and delay either the left or
// right channel of the input into the output
// 4) because one of these channels is delayed, we will need to use historical samples from
// the input stream for that delayed channel
// Mono input to stereo output (item 1 above)
int OUTPUT_SAMPLES_PER_INPUT_SAMPLE = 2;
int inputSampleCount = NETWORK_BUFFER_LENGTH_SAMPLES_STEREO / OUTPUT_SAMPLES_PER_INPUT_SAMPLE;
int maxOutputIndex = NETWORK_BUFFER_LENGTH_SAMPLES_STEREO;
// attenuation and fade applied to all samples (item 2 above)
float attenuationAndFade = attenuationCoefficient * repeatedFrameFadeFactor;
// determine which side is weak and delayed (item 3 above)
bool rightSideWeakAndDelayed = (bearingRelativeAngleToSource > 0.0f);
// since we're converting from mono to stereo, we'll use these two indices to step through
// the output samples. we'll increment each index independently in the loop
int leftDestinationIndex = 0;
int rightDestinationIndex = 1;
// One of our two channels will be delayed (determined below). We'll use this index to step
// through filling in our output with the historical samples for the delayed channel. (item 4 above)
int delayedChannelHistoricalAudioOutputIndex;
// All samples will be attenuated by at least this much
float leftSideAttenuation = attenuationAndFade;
float rightSideAttenuation = attenuationAndFade;
// The weak/delayed channel will be attenuated by this additional amount
float attenuationAndWeakChannelRatioAndFade = attenuationAndFade * weakChannelAmplitudeRatio;
// Now, based on the determination of which side is weak and delayed, set up our true starting point
// for our indexes, as well as the appropriate attenuation for each channel
if (rightSideWeakAndDelayed) {
delayedChannelHistoricalAudioOutputIndex = rightDestinationIndex;
rightSideAttenuation = attenuationAndWeakChannelRatioAndFade;
rightDestinationIndex += (numSamplesDelay * OUTPUT_SAMPLES_PER_INPUT_SAMPLE);
} else {
delayedChannelHistoricalAudioOutputIndex = leftDestinationIndex;
leftSideAttenuation = attenuationAndWeakChannelRatioAndFade;
leftDestinationIndex += (numSamplesDelay * OUTPUT_SAMPLES_PER_INPUT_SAMPLE);
}
// If there was a sample delay for this stream, we need to pull samples prior to the official start of the input
// and stick those samples at the beginning of the output. We only need to loop through this for the weak/delayed
// side, since the normal side is fully handled below. (item 4 above)
if (numSamplesDelay > 0) {
// TODO: delayStreamSourceSamples may be inside the last frame written if the ringbuffer is completely full
// maybe make AudioRingBuffer have 1 extra frame in its buffer
AudioRingBuffer::ConstIterator delayStreamSourceSamples = streamPopOutput - numSamplesDelay;
for (int i = 0; i < numSamplesDelay; i++) {
int16_t originalHistoricalSample = *delayStreamSourceSamples;
_preMixSamples[delayedChannelHistoricalAudioOutputIndex] += originalHistoricalSample
* attenuationAndWeakChannelRatioAndFade;
++delayStreamSourceSamples; // move our input pointer
delayedChannelHistoricalAudioOutputIndex += OUTPUT_SAMPLES_PER_INPUT_SAMPLE; // move our output sample
}
}
// Here's where we copy the MONO input to the STEREO output, and account for delay and weak side attenuation
for (int inputSample = 0; inputSample < inputSampleCount; inputSample++) {
int16_t originalSample = streamPopOutput[inputSample];
int16_t leftSideSample = originalSample * leftSideAttenuation;
int16_t rightSideSample = originalSample * rightSideAttenuation;
// since we might be delayed, don't write beyond our maxOutputIndex
if (leftDestinationIndex <= maxOutputIndex) {
_preMixSamples[leftDestinationIndex] += leftSideSample;
}
if (rightDestinationIndex <= maxOutputIndex) {
_preMixSamples[rightDestinationIndex] += rightSideSample;
}
leftDestinationIndex += OUTPUT_SAMPLES_PER_INPUT_SAMPLE;
rightDestinationIndex += OUTPUT_SAMPLES_PER_INPUT_SAMPLE;
}
} else {
int stereoDivider = streamToAdd->isStereo() ? 1 : 2;
float attenuationAndFade = attenuationCoefficient * repeatedFrameFadeFactor;
for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s++) {
_preMixSamples[s] = glm::clamp(_preMixSamples[s] + (int)(streamPopOutput[s / stereoDivider] * attenuationAndFade),
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
}
}
if (!sourceIsSelf && _enableFilter) {
const float TWO_OVER_PI = 2.0f / PI;
const float ZERO_DB = 1.0f;
// const float NEGATIVE_ONE_DB = 0.891f;
//const float NEGATIVE_ONE_DB = 0.891f;
const float NEGATIVE_THREE_DB = 0.708f;
const float NEGATIVE_SIX_DB = 0.501f;
@ -337,45 +376,53 @@ int AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
float penumbraFilterGainL;
float penumbraFilterGainR;
// variable gain calculation broken down by quadrent
if (bearingAngleToSource < -PI_OVER_TWO && bearingAngleToSource > -PI) {
// variable gain calculation broken down by quadrant
if (-bearingRelativeAngleToSource < -PI_OVER_TWO && -bearingRelativeAngleToSource > -PI) {
penumbraFilterGainL = TWO_OVER_PI *
(FILTER_GAIN_AT_0 - FILTER_GAIN_AT_180) * (bearingAngleToSource + PI_OVER_TWO) + FILTER_GAIN_AT_0;
(FILTER_GAIN_AT_0 - FILTER_GAIN_AT_180) * (-bearingRelativeAngleToSource + PI_OVER_TWO) + FILTER_GAIN_AT_0;
penumbraFilterGainR = TWO_OVER_PI *
(FILTER_GAIN_AT_90 - FILTER_GAIN_AT_180) * (bearingAngleToSource + PI_OVER_TWO) + FILTER_GAIN_AT_90;
} else if (bearingAngleToSource <= PI && bearingAngleToSource > PI_OVER_TWO) {
(FILTER_GAIN_AT_90 - FILTER_GAIN_AT_180) * (-bearingRelativeAngleToSource + PI_OVER_TWO) + FILTER_GAIN_AT_90;
} else if (-bearingRelativeAngleToSource <= PI && -bearingRelativeAngleToSource > PI_OVER_TWO) {
penumbraFilterGainL = TWO_OVER_PI *
(FILTER_GAIN_AT_180 - FILTER_GAIN_AT_90) * (bearingAngleToSource - PI) + FILTER_GAIN_AT_180;
(FILTER_GAIN_AT_180 - FILTER_GAIN_AT_90) * (-bearingRelativeAngleToSource - PI) + FILTER_GAIN_AT_180;
penumbraFilterGainR = TWO_OVER_PI *
(FILTER_GAIN_AT_180 - FILTER_GAIN_AT_0) * (bearingAngleToSource - PI) + FILTER_GAIN_AT_180;
} else if (bearingAngleToSource <= PI_OVER_TWO && bearingAngleToSource > 0) {
(FILTER_GAIN_AT_180 - FILTER_GAIN_AT_0) * (-bearingRelativeAngleToSource - PI) + FILTER_GAIN_AT_180;
} else if (-bearingRelativeAngleToSource <= PI_OVER_TWO && -bearingRelativeAngleToSource > 0) {
penumbraFilterGainL = TWO_OVER_PI *
(FILTER_GAIN_AT_90 - FILTER_GAIN_AT_0) * (bearingAngleToSource - PI_OVER_TWO) + FILTER_GAIN_AT_90;
(FILTER_GAIN_AT_90 - FILTER_GAIN_AT_0) * (-bearingRelativeAngleToSource - PI_OVER_TWO) + FILTER_GAIN_AT_90;
penumbraFilterGainR = FILTER_GAIN_AT_0;
} else {
penumbraFilterGainL = FILTER_GAIN_AT_0;
penumbraFilterGainR = TWO_OVER_PI *
(FILTER_GAIN_AT_0 - FILTER_GAIN_AT_90) * (bearingAngleToSource) + FILTER_GAIN_AT_0;
(FILTER_GAIN_AT_0 - FILTER_GAIN_AT_90) * (-bearingRelativeAngleToSource) + FILTER_GAIN_AT_0;
}
if (distanceBetween < RADIUS_OF_HEAD) {
// Diminish effect if source would be inside head
penumbraFilterGainL += (1.f - penumbraFilterGainL) * (1.f - distanceBetween / RADIUS_OF_HEAD);
penumbraFilterGainR += (1.f - penumbraFilterGainR) * (1.f - distanceBetween / RADIUS_OF_HEAD);
}
#if 0
qDebug() << "avatar="
<< listeningNodeStream
<< "gainL="
qDebug() << "gainL="
<< penumbraFilterGainL
<< "gainR="
<< penumbraFilterGainR
<< "angle="
<< bearingAngleToSource;
<< -bearingRelativeAngleToSource;
#endif
// set the gain on both filter channels
AudioFilterHSF1s& penumbraFilter = streamToAdd->getFilter();
penumbraFilter.setParameters(0, 0, SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainL, penumbraFilterSlope);
penumbraFilter.setParameters(0, 1, SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainR, penumbraFilterSlope);
penumbraFilter.render(_clientSamples, _clientSamples, NETWORK_BUFFER_LENGTH_SAMPLES_STEREO / 2);
_penumbraFilter.reset();
_penumbraFilter.setParameters(0, 0, SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainL, penumbraFilterSlope);
_penumbraFilter.setParameters(0, 1, SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainR, penumbraFilterSlope);
_penumbraFilter.render(_preMixSamples, _preMixSamples, NETWORK_BUFFER_LENGTH_SAMPLES_STEREO / 2);
}
// Actually mix the _preMixSamples into the _mixSamples here.
for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s++) {
_mixSamples[s] = glm::clamp(_mixSamples[s] + _preMixSamples[s], MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
}
return 1;
@ -385,7 +432,8 @@ int AudioMixer::prepareMixForListeningNode(Node* node) {
AvatarAudioStream* nodeAudioStream = ((AudioMixerClientData*) node->getLinkedData())->getAvatarAudioStream();
// zero out the client mix for this node
memset(_clientSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
memset(_preMixSamples, 0, sizeof(_preMixSamples));
memset(_mixSamples, 0, sizeof(_mixSamples));
// loop through all other nodes that have sufficient audio to mix
int streamsMixed = 0;
@ -778,7 +826,7 @@ void AudioMixer::run() {
dataAt += sizeof(quint16);
// pack mixed audio samples
memcpy(dataAt, _clientSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
memcpy(dataAt, _mixSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
dataAt += NETWORK_BUFFER_LENGTH_BYTES_STEREO;
} else {
// pack header

View file

@ -13,6 +13,10 @@
#define hifi_AudioMixer_h
#include <AABox.h>
#include <AudioFormat.h> // For AudioFilterHSF1s and _penumbraFilter
#include <AudioBuffer.h> // For AudioFilterHSF1s and _penumbraFilter
#include <AudioFilter.h> // For AudioFilterHSF1s and _penumbraFilter
#include <AudioFilterBank.h> // For AudioFilterHSF1s and _penumbraFilter
#include <AudioRingBuffer.h>
#include <ThreadedAssignment.h>
@ -23,7 +27,6 @@ const int SAMPLE_PHASE_DELAY_AT_90 = 20;
const int READ_DATAGRAMS_STATS_WINDOW_SECONDS = 30;
/// Handles assignments of type AudioMixer - mixing streams of audio and re-distributing to various clients.
class AudioMixer : public ThreadedAssignment {
Q_OBJECT
@ -48,11 +51,17 @@ private:
/// prepares and sends a mix to one Node
int prepareMixForListeningNode(Node* node);
// used on a per stream basis to run the filter on before mixing, large enough to handle the historical
// data from a phase delay as well as an entire network buffer
int16_t _preMixSamples[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)];
// client samples capacity is larger than what will be sent to optimize mixing
// we are MMX adding 4 samples at a time so we need client samples to have an extra 4
int16_t _clientSamples[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)];
int16_t _mixSamples[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)];
AudioFilterHSF1s _penumbraFilter;
void perSecondActions();
QString getReadPendingDatagramsCallsPerSecondsStatsString() const;

View file

@ -19,28 +19,38 @@ AvatarAudioStream::AvatarAudioStream(bool isStereo, const InboundAudioStream::Se
}
int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) {
_shouldLoopbackForNode = (type == PacketTypeMicrophoneAudioWithEcho);
int readBytes = 0;
// read the channel flag
quint8 channelFlag = packetAfterSeqNum.at(readBytes);
bool isStereo = channelFlag == 1;
readBytes += sizeof(quint8);
if (type == PacketTypeSilentAudioFrame) {
const char* dataAt = packetAfterSeqNum.constData();
quint16 numSilentSamples = *(reinterpret_cast<const quint16*>(dataAt));
readBytes += sizeof(quint16);
numAudioSamples = (int)numSilentSamples;
// if isStereo value has changed, restart the ring buffer with new frame size
if (isStereo != _isStereo) {
_ringBuffer.resizeForFrameSize(isStereo ? NETWORK_BUFFER_LENGTH_SAMPLES_STEREO : NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
_isStereo = isStereo;
// read the positional data
readBytes += parsePositionalData(packetAfterSeqNum.mid(readBytes));
} else {
_shouldLoopbackForNode = (type == PacketTypeMicrophoneAudioWithEcho);
// read the channel flag
quint8 channelFlag = packetAfterSeqNum.at(readBytes);
bool isStereo = channelFlag == 1;
readBytes += sizeof(quint8);
// if isStereo value has changed, restart the ring buffer with new frame size
if (isStereo != _isStereo) {
_ringBuffer.resizeForFrameSize(isStereo ? NETWORK_BUFFER_LENGTH_SAMPLES_STEREO : NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL);
_isStereo = isStereo;
}
// read the positional data
readBytes += parsePositionalData(packetAfterSeqNum.mid(readBytes));
// calculate how many samples are in this packet
int numAudioBytes = packetAfterSeqNum.size() - readBytes;
numAudioSamples = numAudioBytes / sizeof(int16_t);
}
// read the positional data
readBytes += parsePositionalData(packetAfterSeqNum.mid(readBytes));
// calculate how many samples are in this packet
int numAudioBytes = packetAfterSeqNum.size() - readBytes;
numAudioSamples = numAudioBytes / sizeof(int16_t);
return readBytes;
}

View file

@ -17,7 +17,7 @@
<label for="<%- setting_id %>" class="col-sm-2 control-label"><%- setting.label %></label>
<div class="col-sm-10">
<% if (setting.type === "checkbox") { %>
<% var checked_box = (values[group_key] || {})[setting_key] || setting.default %>
<% var checked_box = _.has(values, group_key) ? values[group_key][setting_key] : setting.default %>
<input type="checkbox" id="<%- setting_id %>" <%- checked_box ? "checked" : "" %>>
<% } else { %>
<% if (setting.input_addon) { %>

View file

@ -72,6 +72,9 @@ DomainServer::DomainServer(int argc, char* argv[]) :
setupNodeListAndAssignments();
loadExistingSessionsFromSettings();
// check if we have the flag that enables dynamic IP
setupDynamicIPAddressUpdating();
}
}
@ -162,6 +165,8 @@ bool DomainServer::optionallySetupOAuth() {
return true;
}
const QString DOMAIN_CONFIG_ID_KEY = "id";
void DomainServer::setupNodeListAndAssignments(const QUuid& sessionUUID) {
const QString CUSTOM_PORT_OPTION = "port";
@ -190,8 +195,6 @@ void DomainServer::setupNodeListAndAssignments(const QUuid& sessionUUID) {
LimitedNodeList* nodeList = LimitedNodeList::createInstance(domainServerPort, domainServerDTLSPort);
const QString DOMAIN_CONFIG_ID_KEY = "id";
// set our LimitedNodeList UUID to match the UUID from our config
// nodes will currently use this to add resources to data-web that relate to our domain
nodeList->setSessionUUID(_argumentVariantMap.value(DOMAIN_CONFIG_ID_KEY).toString());
@ -209,27 +212,29 @@ void DomainServer::setupNodeListAndAssignments(const QUuid& sessionUUID) {
addStaticAssignmentsToQueue();
}
bool DomainServer::optionallySetupAssignmentPayment() {
// check if we have a username and password set via env
const QString PAY_FOR_ASSIGNMENTS_OPTION = "pay-for-assignments";
const QString HIFI_USERNAME_ENV_KEY = "DOMAIN_SERVER_USERNAME";
const QString HIFI_PASSWORD_ENV_KEY = "DOMAIN_SERVER_PASSWORD";
if (_argumentVariantMap.contains(PAY_FOR_ASSIGNMENTS_OPTION) &&
_argumentVariantMap.value(PAY_FOR_ASSIGNMENTS_OPTION).toBool()) {
if (!_oauthProviderURL.isEmpty()) {
const QString HIFI_USERNAME_ENV_KEY = "DOMAIN_SERVER_USERNAME";
const QString HIFI_PASSWORD_ENV_KEY = "DOMAIN_SERVER_PASSWORD";
bool DomainServer::hasOAuthProviderAndAuthInformation() {
if (!_oauthProviderURL.isEmpty()) {
static bool hasAttemptedAuthWithOAuthProvider = false;
if (!hasAttemptedAuthWithOAuthProvider) {
AccountManager& accountManager = AccountManager::getInstance();
accountManager.setAuthURL(_oauthProviderURL);
if (!accountManager.hasValidAccessToken()) {
// we don't have a valid access token so we need to get one
// check if we have a username and password set via env
QString username = QProcessEnvironment::systemEnvironment().value(HIFI_USERNAME_ENV_KEY);
QString password = QProcessEnvironment::systemEnvironment().value(HIFI_PASSWORD_ENV_KEY);
if (!username.isEmpty() && !password.isEmpty()) {
accountManager.requestAccessToken(username, password);
// connect to loginFailed signal from AccountManager so we can quit if that is the case
connect(&accountManager, &AccountManager::loginFailed, this, &DomainServer::loginFailed);
} else {
@ -238,34 +243,92 @@ bool DomainServer::optionallySetupAssignmentPayment() {
return false;
}
}
qDebug() << "Assignments will be paid for via" << qPrintable(_oauthProviderURL.toString());
// assume that the fact we are authing against HF data server means we will pay for assignments
// setup a timer to send transactions to pay assigned nodes every 30 seconds
QTimer* creditSetupTimer = new QTimer(this);
connect(creditSetupTimer, &QTimer::timeout, this, &DomainServer::setupPendingAssignmentCredits);
const qint64 CREDIT_CHECK_INTERVAL_MSECS = 5 * 1000;
creditSetupTimer->start(CREDIT_CHECK_INTERVAL_MSECS);
QTimer* nodePaymentTimer = new QTimer(this);
connect(nodePaymentTimer, &QTimer::timeout, this, &DomainServer::sendPendingTransactionsToServer);
const qint64 TRANSACTION_SEND_INTERVAL_MSECS = 30 * 1000;
nodePaymentTimer->start(TRANSACTION_SEND_INTERVAL_MSECS);
} else {
qDebug() << "Missing OAuth provider URL, but assigned node payment was enabled. domain-server will now quit.";
QMetaObject::invokeMethod(this, "quit", Qt::QueuedConnection);
return false;
hasAttemptedAuthWithOAuthProvider = true;
}
return true;
} else {
qDebug() << "Missing OAuth provider URL, but a domain-server feature was required that requires authentication." <<
"domain-server will now quit.";
QMetaObject::invokeMethod(this, "quit", Qt::QueuedConnection);
return false;
}
}
bool DomainServer::optionallySetupAssignmentPayment() {
const QString PAY_FOR_ASSIGNMENTS_OPTION = "pay-for-assignments";
if (_argumentVariantMap.contains(PAY_FOR_ASSIGNMENTS_OPTION) &&
_argumentVariantMap.value(PAY_FOR_ASSIGNMENTS_OPTION).toBool() &&
hasOAuthProviderAndAuthInformation()) {
qDebug() << "Assignments will be paid for via" << qPrintable(_oauthProviderURL.toString());
// assume that the fact we are authing against HF data server means we will pay for assignments
// setup a timer to send transactions to pay assigned nodes every 30 seconds
QTimer* creditSetupTimer = new QTimer(this);
connect(creditSetupTimer, &QTimer::timeout, this, &DomainServer::setupPendingAssignmentCredits);
const qint64 CREDIT_CHECK_INTERVAL_MSECS = 5 * 1000;
creditSetupTimer->start(CREDIT_CHECK_INTERVAL_MSECS);
QTimer* nodePaymentTimer = new QTimer(this);
connect(nodePaymentTimer, &QTimer::timeout, this, &DomainServer::sendPendingTransactionsToServer);
const qint64 TRANSACTION_SEND_INTERVAL_MSECS = 30 * 1000;
nodePaymentTimer->start(TRANSACTION_SEND_INTERVAL_MSECS);
}
return true;
}
void DomainServer::setupDynamicIPAddressUpdating() {
const QString ENABLE_DYNAMIC_IP_UPDATING_OPTION = "update-ip";
if (_argumentVariantMap.contains(ENABLE_DYNAMIC_IP_UPDATING_OPTION) &&
_argumentVariantMap.value(ENABLE_DYNAMIC_IP_UPDATING_OPTION).toBool() &&
hasOAuthProviderAndAuthInformation()) {
LimitedNodeList* nodeList = LimitedNodeList::getInstance();
const QUuid& domainID = nodeList->getSessionUUID();
if (!domainID.isNull()) {
qDebug() << "domain-server IP address will be updated for domain with ID"
<< uuidStringWithoutCurlyBraces(domainID) << "via" << _oauthProviderURL.toString();
const int STUN_IP_ADDRESS_CHECK_INTERVAL_MSECS = 30 * 1000;
// setup our timer to check our IP via stun every 30 seconds
QTimer* dynamicIPTimer = new QTimer(this);
connect(dynamicIPTimer, &QTimer::timeout, this, &DomainServer::requestCurrentIPAddressViaSTUN);
dynamicIPTimer->start(STUN_IP_ADDRESS_CHECK_INTERVAL_MSECS);
// send public socket changes to the data server so nodes can find us at our new IP
connect(nodeList, &LimitedNodeList::publicSockAddrChanged, this, &DomainServer::sendNewPublicSocketToDataServer);
if (!AccountManager::getInstance().hasValidAccessToken()) {
// we don't have an access token to talk to data-web yet, so
// check our IP address as soon as we get an AccountManager access token
connect(&AccountManager::getInstance(), &AccountManager::loginComplete,
this, &DomainServer::requestCurrentIPAddressViaSTUN);
} else {
// access token good to go, attempt to update our IP now
requestCurrentIPAddressViaSTUN();
}
} else {
qDebug() << "Cannot enable dynamic domain-server IP address updating without a domain ID."
<< "Please add an id to your config.json or pass it with the command line argument --id.";
qDebug() << "Failed dynamic IP address update setup. domain-server will now quit.";
QMetaObject::invokeMethod(this, "quit", Qt::QueuedConnection);
}
}
}
void DomainServer::loginFailed() {
qDebug() << "Login to data server has failed. domain-server will now quit";
QMetaObject::invokeMethod(this, "quit", Qt::QueuedConnection);
@ -836,37 +899,68 @@ void DomainServer::transactionJSONCallback(const QJsonObject& data) {
}
}
void DomainServer::requestCurrentIPAddressViaSTUN() {
LimitedNodeList::getInstance()->sendSTUNRequest();
}
void DomainServer::sendNewPublicSocketToDataServer(const HifiSockAddr& newPublicSockAddr) {
const QString DOMAIN_UPDATE = "/api/v1/domains/%1";
const QUuid& domainID = LimitedNodeList::getInstance()->getSessionUUID();
// setup the domain object to send to the data server
const QString DOMAIN_JSON_OBJECT = "{\"domain\":{\"network_address\":\"%1\"}}";
QString domainUpdateJSON = DOMAIN_JSON_OBJECT.arg(newPublicSockAddr.getAddress().toString());
AccountManager::getInstance().authenticatedRequest(DOMAIN_UPDATE.arg(uuidStringWithoutCurlyBraces(domainID)),
QNetworkAccessManager::PutOperation,
JSONCallbackParameters(),
domainUpdateJSON.toUtf8());
}
void DomainServer::processDatagram(const QByteArray& receivedPacket, const HifiSockAddr& senderSockAddr) {
LimitedNodeList* nodeList = LimitedNodeList::getInstance();
if (nodeList->packetVersionAndHashMatch(receivedPacket)) {
PacketType requestType = packetTypeForPacket(receivedPacket);
if (requestType == PacketTypeDomainConnectRequest) {
handleConnectRequest(receivedPacket, senderSockAddr);
} else if (requestType == PacketTypeDomainListRequest) {
QUuid nodeUUID = uuidFromPacketHeader(receivedPacket);
if (!nodeUUID.isNull() && nodeList->nodeWithUUID(nodeUUID)) {
NodeType_t throwawayNodeType;
HifiSockAddr nodePublicAddress, nodeLocalAddress;
int numNodeInfoBytes = parseNodeDataFromByteArray(throwawayNodeType, nodePublicAddress, nodeLocalAddress,
receivedPacket, senderSockAddr);
SharedNodePointer checkInNode = nodeList->updateSocketsForNode(nodeUUID, nodePublicAddress, nodeLocalAddress);
// update last receive to now
quint64 timeNow = usecTimestampNow();
checkInNode->setLastHeardMicrostamp(timeNow);
sendDomainListToNode(checkInNode, senderSockAddr, nodeInterestListFromPacket(receivedPacket, numNodeInfoBytes));
switch (requestType) {
case PacketTypeDomainConnectRequest:
handleConnectRequest(receivedPacket, senderSockAddr);
break;
case PacketTypeDomainListRequest: {
QUuid nodeUUID = uuidFromPacketHeader(receivedPacket);
if (!nodeUUID.isNull() && nodeList->nodeWithUUID(nodeUUID)) {
NodeType_t throwawayNodeType;
HifiSockAddr nodePublicAddress, nodeLocalAddress;
int numNodeInfoBytes = parseNodeDataFromByteArray(throwawayNodeType, nodePublicAddress, nodeLocalAddress,
receivedPacket, senderSockAddr);
SharedNodePointer checkInNode = nodeList->updateSocketsForNode(nodeUUID, nodePublicAddress, nodeLocalAddress);
// update last receive to now
quint64 timeNow = usecTimestampNow();
checkInNode->setLastHeardMicrostamp(timeNow);
sendDomainListToNode(checkInNode, senderSockAddr, nodeInterestListFromPacket(receivedPacket, numNodeInfoBytes));
}
break;
}
} else if (requestType == PacketTypeNodeJsonStats) {
SharedNodePointer matchingNode = nodeList->sendingNodeForPacket(receivedPacket);
if (matchingNode) {
reinterpret_cast<DomainServerNodeData*>(matchingNode->getLinkedData())->parseJSONStatsPacket(receivedPacket);
case PacketTypeNodeJsonStats: {
SharedNodePointer matchingNode = nodeList->sendingNodeForPacket(receivedPacket);
if (matchingNode) {
reinterpret_cast<DomainServerNodeData*>(matchingNode->getLinkedData())->parseJSONStatsPacket(receivedPacket);
}
break;
}
case PacketTypeStunResponse:
nodeList->processSTUNResponse(receivedPacket);
break;
default:
break;
}
}
}

View file

@ -58,11 +58,16 @@ private slots:
void readAvailableDatagrams();
void setupPendingAssignmentCredits();
void sendPendingTransactionsToServer();
void requestCurrentIPAddressViaSTUN();
void sendNewPublicSocketToDataServer(const HifiSockAddr& newPublicSockAddr);
private:
void setupNodeListAndAssignments(const QUuid& sessionUUID = QUuid::createUuid());
bool optionallySetupOAuth();
bool optionallyReadX509KeyAndCertificate();
bool hasOAuthProviderAndAuthInformation();
bool optionallySetupAssignmentPayment();
void setupDynamicIPAddressUpdating();
void processDatagram(const QByteArray& receivedPacket, const HifiSockAddr& senderSockAddr);

395
examples/leapHands.js Normal file
View file

@ -0,0 +1,395 @@
//
// leapHands.js
// examples
//
// Created by David Rowe on 8 Sep 2014.
// Copyright 2014 High Fidelity, Inc.
//
// This is an example script that uses the Leap Motion to make the avatar's hands replicate the user's hand actions.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var leapHands = (function () {
var hands,
wrists,
NUM_HANDS = 2, // 0 = left; 1 = right
fingers,
NUM_FINGERS = 5, // 0 = thumb; ...; 4 = pinky
THUMB = 0,
NUM_FINGER_JOINTS = 3, // 0 = metacarpal(hand)-proximal(finger) joint; ...; 2 = intermediate-distal(tip) joint
MAX_HAND_INACTIVE_COUNT = 20,
calibrationStatus,
UNCALIBRATED = 0,
CALIBRATING = 1,
CALIBRATED = 2,
CALIBRATION_TIME = 1000, // milliseconds
PI = 3.141593,
isWindows;
function printSkeletonJointNames() {
var jointNames,
i;
print(MyAvatar.skeletonModelURL);
print("Skeleton joint names ...");
jointNames = MyAvatar.getJointNames();
for (i = 0; i < jointNames.length; i += 1) {
print(i + ": " + jointNames[i]);
}
print("... skeleton joint names");
/*
http://public.highfidelity.io/models/skeletons/ron_standing.fst
Skeleton joint names ...
0: Hips
1: RightUpLeg
2: RightLeg
3: RightFoot
4: RightToeBase
5: RightToe_End
6: LeftUpLeg
7: LeftLeg
8: LeftFoot
9: LeftToeBase
10: LeftToe_End
11: Spine
12: Spine1
13: Spine2
14: RightShoulder
15: RightArm
16: RightForeArm
17: RightHand
18: RightHandPinky1
19: RightHandPinky2
20: RightHandPinky3
21: RightHandPinky4
22: RightHandRing1
23: RightHandRing2
24: RightHandRing3
25: RightHandRing4
26: RightHandMiddle1
27: RightHandMiddle2
28: RightHandMiddle3
29: RightHandMiddle4
30: RightHandIndex1
31: RightHandIndex2
32: RightHandIndex3
33: RightHandIndex4
34: RightHandThumb1
35: RightHandThumb2
36: RightHandThumb3
37: RightHandThumb4
38: LeftShoulder
39: LeftArm
40: LeftForeArm
41: LeftHand
42: LeftHandPinky1
43: LeftHandPinky2
44: LeftHandPinky3
45: LeftHandPinky4
46: LeftHandRing1
47: LeftHandRing2
48: LeftHandRing3
49: LeftHandRing4
50: LeftHandMiddle1
51: LeftHandMiddle2
52: LeftHandMiddle3
53: LeftHandMiddle4
54: LeftHandIndex1
55: LeftHandIndex2
56: LeftHandIndex3
57: LeftHandIndex4
58: LeftHandThumb1
59: LeftHandThumb2
60: LeftHandThumb3
61: LeftHandThumb4
62: Neck
63: Head
64: HeadTop_End
65: body
... skeleton joint names
*/
}
function finishCalibration() {
var avatarPosition,
avatarOrientation,
avatarHandPosition,
leapHandHeight,
h;
if (hands[0].controller.isActive() && hands[1].controller.isActive()) {
leapHandHeight = (hands[0].controller.getAbsTranslation().y + hands[1].controller.getAbsTranslation().y) / 2.0;
// TODO: Temporary detection of Windows to work around Leap Controller problem.
isWindows = (hands[1].controller.getAbsRotation().z > (0.25 * PI));
} else {
calibrationStatus = UNCALIBRATED;
return;
}
avatarPosition = MyAvatar.position;
avatarOrientation = MyAvatar.orientation;
for (h = 0; h < NUM_HANDS; h += 1) {
avatarHandPosition = MyAvatar.getJointPosition(hands[h].jointName);
hands[h].zeroPosition = {
x: avatarHandPosition.x - avatarPosition.x,
y: avatarHandPosition.y - avatarPosition.y,
z: avatarPosition.z - avatarHandPosition.z
};
hands[h].zeroPosition = Vec3.multiplyQbyV(MyAvatar.orientation, hands[h].zeroPosition);
hands[h].zeroPosition.y = hands[h].zeroPosition.y - leapHandHeight;
}
MyAvatar.clearJointData("LeftHand");
MyAvatar.clearJointData("LeftForeArm");
MyAvatar.clearJointData("LeftArm");
MyAvatar.clearJointData("RightHand");
MyAvatar.clearJointData("RightForeArm");
MyAvatar.clearJointData("RightArm");
calibrationStatus = CALIBRATED;
print("Leap Motion: Calibrated");
}
function calibrate() {
calibrationStatus = CALIBRATING;
// Set avatar arms vertical, forearms horizontal, as "zero" position for calibration
MyAvatar.setJointData("LeftArm", Quat.fromPitchYawRollDegrees(90.0, 0.0, -90.0));
MyAvatar.setJointData("LeftForeArm", Quat.fromPitchYawRollDegrees(90.0, 0.0, 180.0));
MyAvatar.setJointData("LeftHand", Quat.fromPitchYawRollRadians(0.0, 0.0, 0.0));
MyAvatar.setJointData("RightArm", Quat.fromPitchYawRollDegrees(90.0, 0.0, 90.0));
MyAvatar.setJointData("RightForeArm", Quat.fromPitchYawRollDegrees(90.0, 0.0, 180.0));
MyAvatar.setJointData("RightHand", Quat.fromPitchYawRollRadians(0.0, 0.0, 0.0));
// Wait for arms to assume their positions before calculating
Script.setTimeout(finishCalibration, CALIBRATION_TIME);
}
function checkCalibration() {
if (calibrationStatus === CALIBRATED) {
return true;
}
if (calibrationStatus !== CALIBRATING) {
calibrate();
}
return false;
}
function setUp() {
calibrationStatus = UNCALIBRATED;
// TODO: Leap Motion controller joint naming doesn't match up with skeleton joint naming; numbers are out by 1.
hands = [
{
jointName: "LeftHand",
controller: Controller.createInputController("Spatial", "joint_L_hand"),
inactiveCount: 0
},
{
jointName: "RightHand",
controller: Controller.createInputController("Spatial", "joint_R_hand"),
inactiveCount: 0
}
];
wrists = [
{ controller: Controller.createInputController("Spatial", "joint_L_wrist") },
{ controller: Controller.createInputController("Spatial", "joint_R_wrist") }
];
fingers = [{}, {}];
fingers[0] = [
[
{ jointName: "LeftHandThumb1", controller: Controller.createInputController("Spatial", "joint_L_thumb2") },
{ jointName: "LeftHandThumb2", controller: Controller.createInputController("Spatial", "joint_L_thumb3") },
{ jointName: "LeftHandThumb3", controller: Controller.createInputController("Spatial", "joint_L_thumb4") }
],
[
{ jointName: "LeftHandIndex1", controller: Controller.createInputController("Spatial", "joint_L_index2") },
{ jointName: "LeftHandIndex2", controller: Controller.createInputController("Spatial", "joint_L_index3") },
{ jointName: "LeftHandIndex3", controller: Controller.createInputController("Spatial", "joint_L_index4") }
],
[
{ jointName: "LeftHandMiddle1", controller: Controller.createInputController("Spatial", "joint_L_middle2") },
{ jointName: "LeftHandMiddle2", controller: Controller.createInputController("Spatial", "joint_L_middle3") },
{ jointName: "LeftHandMiddle3", controller: Controller.createInputController("Spatial", "joint_L_middle4") }
],
[
{ jointName: "LeftHandRing1", controller: Controller.createInputController("Spatial", "joint_L_ring2") },
{ jointName: "LeftHandRing2", controller: Controller.createInputController("Spatial", "joint_L_ring3") },
{ jointName: "LeftHandRing3", controller: Controller.createInputController("Spatial", "joint_L_ring4") }
],
[
{ jointName: "LeftHandPinky1", controller: Controller.createInputController("Spatial", "joint_L_pinky2") },
{ jointName: "LeftHandPinky2", controller: Controller.createInputController("Spatial", "joint_L_pinky3") },
{ jointName: "LeftHandPinky3", controller: Controller.createInputController("Spatial", "joint_L_pinky4") }
]
];
fingers[1] = [
[
{ jointName: "RightHandThumb1", controller: Controller.createInputController("Spatial", "joint_R_thumb2") },
{ jointName: "RightHandThumb2", controller: Controller.createInputController("Spatial", "joint_R_thumb3") },
{ jointName: "RightHandThumb3", controller: Controller.createInputController("Spatial", "joint_R_thumb4") }
],
[
{ jointName: "RightHandIndex1", controller: Controller.createInputController("Spatial", "joint_R_index2") },
{ jointName: "RightHandIndex2", controller: Controller.createInputController("Spatial", "joint_R_index3") },
{ jointName: "RightHandIndex3", controller: Controller.createInputController("Spatial", "joint_R_index4") }
],
[
{ jointName: "RightHandMiddle1", controller: Controller.createInputController("Spatial", "joint_R_middle2") },
{ jointName: "RightHandMiddle2", controller: Controller.createInputController("Spatial", "joint_R_middle3") },
{ jointName: "RightHandMiddle3", controller: Controller.createInputController("Spatial", "joint_R_middle4") }
],
[
{ jointName: "RightHandRing1", controller: Controller.createInputController("Spatial", "joint_R_ring2") },
{ jointName: "RightHandRing2", controller: Controller.createInputController("Spatial", "joint_R_ring3") },
{ jointName: "RightHandRing3", controller: Controller.createInputController("Spatial", "joint_R_ring4") }
],
[
{ jointName: "RightHandPinky1", controller: Controller.createInputController("Spatial", "joint_R_pinky2") },
{ jointName: "RightHandPinky2", controller: Controller.createInputController("Spatial", "joint_R_pinky3") },
{ jointName: "RightHandPinky3", controller: Controller.createInputController("Spatial", "joint_R_pinky4") }
]
];
}
function moveHands() {
var h,
i,
j,
side,
handOffset,
handRoll,
handPitch,
handYaw,
handRotation,
wristAbsRotation,
locRotation;
for (h = 0; h < NUM_HANDS; h += 1) {
side = h === 0 ? -1.0 : 1.0;
if (hands[h].controller.isActive()) {
// Calibrate when and if a controller is first active.
if (!checkCalibration()) {
return;
}
// Hand position ...
handOffset = hands[h].controller.getAbsTranslation();
handOffset = {
x: -handOffset.x,
y: hands[h].zeroPosition.y + handOffset.y,
z: hands[h].zeroPosition.z - handOffset.z
};
// TODO: 2.0* scale factor should not be necessary; Leap Motion controller code needs investigating.
handRoll = 2.0 * -hands[h].controller.getAbsRotation().z;
wristAbsRotation = wrists[h].controller.getAbsRotation();
handPitch = 2.0 * -wristAbsRotation.x;
handYaw = 2.0 * wristAbsRotation.y;
// TODO: Leap Motion controller's right-hand roll calculation only works if physical hand is upside down.
// Approximate fix is to add a fudge factor.
if (h === 1 && isWindows) {
handRoll = handRoll + 0.6 * PI;
}
// Hand position and orientation ...
if (h === 0) {
handRotation = Quat.multiply(Quat.angleAxis(-90.0, { x: 0, y: 1, z: 0 }),
Quat.fromVec3Radians({ x: handRoll, y: handYaw, z: -handPitch }));
} else {
handRotation = Quat.multiply(Quat.angleAxis(90.0, { x: 0, y: 1, z: 0 }),
Quat.fromVec3Radians({ x: -handRoll, y: handYaw, z: handPitch }));
}
MyAvatar.setJointModelPositionAndOrientation(hands[h].jointName, handOffset, handRotation, true);
// Finger joints ...
// TODO: 2.0 * scale factors should not be necessary; Leap Motion controller code needs investigating.
for (i = 0; i < NUM_FINGERS; i += 1) {
for (j = 0; j < NUM_FINGER_JOINTS; j += 1) {
if (fingers[h][i][j].controller !== null) {
locRotation = fingers[h][i][j].controller.getLocRotation();
if (i === THUMB) {
MyAvatar.setJointData(fingers[h][i][j].jointName,
Quat.fromPitchYawRollRadians(2.0 * side * locRotation.y, 2.0 * -locRotation.z,
2.0 * side * -locRotation.x));
} else {
MyAvatar.setJointData(fingers[h][i][j].jointName,
Quat.fromPitchYawRollRadians(2.0 * -locRotation.x, 0.0, 2.0 * -locRotation.y));
}
}
}
}
hands[h].inactiveCount = 0;
} else {
hands[h].inactiveCount += 1;
if (hands[h].inactiveCount === MAX_HAND_INACTIVE_COUNT) {
if (h === 0) {
MyAvatar.clearJointData("LeftHand");
MyAvatar.clearJointData("LeftForeArm");
MyAvatar.clearJointData("LeftArm");
} else {
MyAvatar.clearJointData("RightHand");
MyAvatar.clearJointData("RightForeArm");
MyAvatar.clearJointData("RightArm");
}
}
}
}
}
function tearDown() {
var h,
i,
j;
for (h = 0; h < NUM_HANDS; h += 1) {
Controller.releaseInputController(hands[h].controller);
Controller.releaseInputController(wrists[h].controller);
for (i = 0; i < NUM_FINGERS; i += 1) {
for (j = 0; j < NUM_FINGER_JOINTS; j += 1) {
if (fingers[h][i][j].controller !== null) {
Controller.releaseInputController(fingers[h][i][j].controller);
}
}
}
}
}
return {
printSkeletonJointNames: printSkeletonJointNames,
setUp : setUp,
moveHands : moveHands,
tearDown : tearDown
};
}());
//leapHands.printSkeletonJointNames();
leapHands.setUp();
Script.update.connect(leapHands.moveHands);
Script.scriptEnding.connect(leapHands.tearDown);

View file

@ -5,39 +5,51 @@
// Created by David Rowe on 5/29/14.
// Copyright 2014 High Fidelity, Inc.
//
// This example script plays a sound in a continuous loop.
// This example script plays a sound in a continuous loop, and creates a red sphere in front of you at the origin of the sound.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// A few sample files you may want to try:
var sound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Guitars/Guitar+-+Nylon+A.raw");
//var sound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/220Sine.wav");
//var sound = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Cocktail+Party+Snippets/Bandcamp.wav");
var soundPlaying = false;
var options = new AudioInjectionOptions();
options.position = Vec3.sum(Camera.getPosition(), Quat.getFront(MyAvatar.orientation));
options.volume = 0.5;
options.loop = true;
var playing = false;
var ball = false;
function keyPressEvent(event) {
if (event.text === "1") {
if (!Audio.isInjectorPlaying(soundPlaying)) {
var options = new AudioInjectionOptions();
options.position = MyAvatar.position;
options.volume = 0.5;
options.loop = true;
soundPlaying = Audio.playSound(sound, options);
print("Started sound loop");
} else {
Audio.stopInjector(soundPlaying);
print("Stopped sound loop");
}
function maybePlaySound(deltaTime) {
if (sound.downloaded) {
var properties = {
type: "Sphere",
position: options.position,
dimensions: { x: 0.2, y: 0.2, z: 0.2 },
color: { red: 200, green: 0, blue: 0 }
};
ball = Entities.addEntity(properties);
soundPlaying = Audio.playSound(sound, options);
print("Started sound looping.");
Script.update.disconnect(maybePlaySound);
}
}
function scriptEnding() {
if (Audio.isInjectorPlaying(soundPlaying)) {
Audio.stopInjector(soundPlaying);
print("Stopped sound loop");
Entities.deleteEntity(ball);
print("Stopped sound.");
}
}
// Connect a call back that happens every frame
Script.scriptEnding.connect(scriptEnding);
Controller.keyPressEvent.connect(keyPressEvent);
Script.update.connect(maybePlaySound);

View file

@ -48,6 +48,6 @@ void main(void) {
normalizedNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) +
gl_FragColor = vec4(base.rgb, gl_FrontMaterial.diffuse.a) * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb, 0.0);
}

View file

@ -66,6 +66,6 @@ void main(void) {
normalizedNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) +
gl_FragColor = vec4(base.rgb, gl_FrontMaterial.diffuse.a) * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb, 0.0);
}

View file

@ -79,6 +79,6 @@ void main(void) {
normalize(vec4(vec3(interpolatedPosition), 0.0))), viewNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) +
gl_FragColor = vec4(base.rgb, gl_FrontMaterial.diffuse.a) * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb, 0.0);
}

View file

@ -82,6 +82,7 @@ void main(void) {
normalize(vec4(interpolatedPosition.xyz, 0.0))), viewNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) + vec4(pow(specular, gl_FrontMaterial.shininess) *
gl_FrontLightProduct[0].specular.rgb * texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
gl_FragColor = vec4(base.rgb, gl_FrontMaterial.diffuse.a) * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb *
texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
}

View file

@ -69,6 +69,7 @@ void main(void) {
normalizedNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) + vec4(pow(specular, gl_FrontMaterial.shininess) *
gl_FrontLightProduct[0].specular.rgb * texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
gl_FragColor = vec4(base.rgb, gl_FrontMaterial.diffuse.a) * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb *
texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
}

View file

@ -60,6 +60,6 @@ void main(void) {
normalize(vec4(vec3(interpolatedPosition), 0.0))), viewNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) +
gl_FragColor = vec4(base.rgb, gl_FrontMaterial.diffuse.a) * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb, 0.0);
}

View file

@ -63,6 +63,7 @@ void main(void) {
normalize(vec4(interpolatedPosition.xyz, 0.0))), viewNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) + vec4(pow(specular, gl_FrontMaterial.shininess) *
gl_FrontLightProduct[0].specular.rgb * texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
gl_FragColor = vec4(base.rgb, gl_FrontMaterial.diffuse.a) * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb *
texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
}

View file

@ -58,6 +58,6 @@ void main(void) {
normalizedNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) +
gl_FragColor = vec4(base.rgb, gl_FrontMaterial.diffuse.a) * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb, 0.0);
}

View file

@ -70,6 +70,6 @@ void main(void) {
normalize(vec4(vec3(interpolatedPosition), 0.0))), viewNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) +
gl_FragColor = vec4(base.rgb, gl_FrontMaterial.diffuse.a) * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb, 0.0);
}

View file

@ -73,6 +73,7 @@ void main(void) {
normalize(vec4(interpolatedPosition.xyz, 0.0))), viewNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) + vec4(pow(specular, gl_FrontMaterial.shininess) *
gl_FrontLightProduct[0].specular.rgb * texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
gl_FragColor = vec4(base.rgb, gl_FrontMaterial.diffuse.a) * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb *
texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
}

View file

@ -61,6 +61,7 @@ void main(void) {
normalizedNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) + vec4(pow(specular, gl_FrontMaterial.shininess) *
gl_FrontLightProduct[0].specular.rgb * texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
gl_FragColor = vec4(base.rgb, gl_FrontMaterial.diffuse.a) * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb *
texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
}

View file

@ -51,7 +51,7 @@ void main(void) {
normalizedNormal));
// modulate texture by base color and add specular contribution
gl_FragColor = base * texture2D(diffuseMap, gl_TexCoord[0].st) + vec4(pow(specular, gl_FrontMaterial.shininess) *
gl_FrontLightProduct[0].specular.rgb * texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
gl_FragColor = vec4(base.rgb, gl_FrontMaterial.diffuse.a) * texture2D(diffuseMap, gl_TexCoord[0].st) +
vec4(pow(specular, gl_FrontMaterial.shininess) * gl_FrontLightProduct[0].specular.rgb *
texture2D(specularMap, gl_TexCoord[0].st).rgb, 0.0);
}

View file

@ -288,6 +288,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
// set the account manager's root URL and trigger a login request if we don't have the access token
accountManager.setAuthURL(DEFAULT_NODE_AUTH_URL);
UserActivityLogger::getInstance().launch(applicationVersion());
// grab the location manager instance early so it lives in our thread
LocationManager::getInstance();
// once the event loop has started, check and signal for an access token
QMetaObject::invokeMethod(&accountManager, "checkAndSignalForAccessToken", Qt::QueuedConnection);
@ -927,6 +930,13 @@ void Application::keyPressEvent(QKeyEvent* event) {
}
break;
case Qt::Key_N:
if (isMeta) {
Menu::getInstance()->triggerOption(MenuOption::NameLocation);
}
break;
case Qt::Key_Up:
if (_myCamera.getMode() == CAMERA_MODE_MIRROR) {

View file

@ -753,6 +753,15 @@ void Audio::handleAudioInput() {
quint16 numSilentSamples = numNetworkSamples;
memcpy(currentPacketPtr, &numSilentSamples, sizeof(quint16));
currentPacketPtr += sizeof(quint16);
// memcpy the three float positions
memcpy(currentPacketPtr, &headPosition, sizeof(headPosition));
currentPacketPtr += (sizeof(headPosition));
// memcpy our orientation
memcpy(currentPacketPtr, &headOrientation, sizeof(headOrientation));
currentPacketPtr += sizeof(headOrientation);
} else {
// set the mono/stereo byte
*currentPacketPtr++ = isStereo;

View file

@ -332,9 +332,9 @@ Menu::Menu() :
addCheckableActionToQMenuAndActionHash(nodeBordersMenu, MenuOption::ShowBordersVoxelNodes,
Qt::CTRL | Qt::SHIFT | Qt::Key_1, false,
&nodeBounds, SLOT(setShowVoxelNodes(bool)));
addCheckableActionToQMenuAndActionHash(nodeBordersMenu, MenuOption::ShowBordersModelNodes,
addCheckableActionToQMenuAndActionHash(nodeBordersMenu, MenuOption::ShowBordersEntityNodes,
Qt::CTRL | Qt::SHIFT | Qt::Key_2, false,
&nodeBounds, SLOT(setShowModelNodes(bool)));
&nodeBounds, SLOT(setShowEntityNodes(bool)));
addCheckableActionToQMenuAndActionHash(nodeBordersMenu, MenuOption::ShowBordersParticleNodes,
Qt::CTRL | Qt::SHIFT | Qt::Key_3, false,
&nodeBounds, SLOT(setShowParticleNodes(bool)));

View file

@ -441,7 +441,7 @@ namespace MenuOption {
const QString ScriptedMotorControl = "Enable Scripted Motor Control";
const QString SettingsExport = "Export Settings";
const QString SettingsImport = "Import Settings";
const QString ShowBordersModelNodes = "Show Model Nodes";
const QString ShowBordersEntityNodes = "Show Entity Nodes";
const QString ShowBordersParticleNodes = "Show Particle Nodes";
const QString ShowBordersVoxelNodes = "Show Voxel Nodes";
const QString ShowIKConstraints = "Show IK Constraints";

View file

@ -42,17 +42,15 @@ ScriptsModel::ScriptsModel(QObject* parent) :
_localDirectory(),
_fsWatcher(),
_localFiles(),
_remoteFiles() {
QString scriptPath = Menu::getInstance()->getScriptsLocation();
_localDirectory.setPath(scriptPath);
_remoteFiles()
{
_localDirectory.setFilter(QDir::Files | QDir::Readable);
_localDirectory.setNameFilters(QStringList("*.js"));
_fsWatcher.addPath(_localDirectory.absolutePath());
updateScriptsLocation(Menu::getInstance()->getScriptsLocation());
connect(&_fsWatcher, &QFileSystemWatcher::directoryChanged, this, &ScriptsModel::reloadLocalFiles);
connect(Menu::getInstance(), &Menu::scriptLocationChanged, this, &ScriptsModel::updateScriptsLocation);
reloadLocalFiles();
@ -88,8 +86,13 @@ int ScriptsModel::rowCount(const QModelIndex& parent) const {
void ScriptsModel::updateScriptsLocation(const QString& newPath) {
_fsWatcher.removePath(_localDirectory.absolutePath());
_localDirectory.setPath(newPath);
_fsWatcher.addPath(_localDirectory.absolutePath());
if (!_localDirectory.absolutePath().isEmpty()) {
_fsWatcher.addPath(_localDirectory.absolutePath());
}
reloadLocalFiles();
}

View file

@ -830,6 +830,28 @@ glm::quat Avatar::getJointCombinedRotation(const QString& name) const {
return rotation;
}
const float SCRIPT_PRIORITY = DEFAULT_PRIORITY + 1.0f;
void Avatar::setJointModelPositionAndOrientation(int index, glm::vec3 position, const glm::quat& rotation) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(const_cast<Avatar*>(this), "setJointModelPositionAndOrientation",
Qt::BlockingQueuedConnection, Q_ARG(const int, index), Q_ARG(const glm::vec3, position),
Q_ARG(const glm::quat&, rotation));
} else {
_skeletonModel.inverseKinematics(index, position, rotation, SCRIPT_PRIORITY);
}
}
void Avatar::setJointModelPositionAndOrientation(const QString& name, glm::vec3 position, const glm::quat& rotation) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(const_cast<Avatar*>(this), "setJointModelPositionAndOrientation",
Qt::BlockingQueuedConnection, Q_ARG(const QString&, name), Q_ARG(const glm::vec3, position),
Q_ARG(const glm::quat&, rotation));
} else {
_skeletonModel.inverseKinematics(getJointIndex(name), position, rotation, SCRIPT_PRIORITY);
}
}
void Avatar::scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const {
//Scale a world space vector as if it was relative to the position
positionToScale = _position + _scale * (positionToScale - _position);

View file

@ -151,6 +151,10 @@ public:
Q_INVOKABLE glm::quat getJointCombinedRotation(int index) const;
Q_INVOKABLE glm::quat getJointCombinedRotation(const QString& name) const;
Q_INVOKABLE void setJointModelPositionAndOrientation(int index, const glm::vec3 position, const glm::quat& rotation);
Q_INVOKABLE void setJointModelPositionAndOrientation(const QString& name, const glm::vec3 position,
const glm::quat& rotation);
Q_INVOKABLE glm::vec3 getVelocity() const { return _velocity; }
Q_INVOKABLE glm::vec3 getAcceleration() const { return _acceleration; }
Q_INVOKABLE glm::vec3 getAngularVelocity() const { return _angularVelocity; }

View file

@ -57,15 +57,15 @@ void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBX
* joint.rotation, DEFAULT_PRIORITY);
}
void FaceModel::maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
void FaceModel::maybeUpdateEyeRotation(Model* model, const JointState& parentState, const FBXJoint& joint, JointState& state) {
// likewise with the eye joints
// NOTE: at the moment we do the math in the world-frame, hence the inverse transform is more complex than usual.
glm::mat4 inverse = glm::inverse(glm::mat4_cast(_rotation) * parentState.getTransform() *
glm::mat4 inverse = glm::inverse(glm::mat4_cast(model->getRotation()) * parentState.getTransform() *
glm::translate(state.getDefaultTranslationInConstrainedFrame()) *
joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation));
glm::vec3 front = glm::vec3(inverse * glm::vec4(_owningHead->getFinalOrientationInWorldFrame() * IDENTITY_FRONT, 0.0f));
glm::vec3 lookAt = glm::vec3(inverse * glm::vec4(_owningHead->getLookAtPosition() +
_owningHead->getSaccade() - _translation, 1.0f));
_owningHead->getSaccade() - model->getTranslation(), 1.0f));
glm::quat between = rotationBetween(front, lookAt);
const float MAX_ANGLE = 30.0f * RADIANS_PER_DEGREE;
state.setRotationInConstrainedFrame(glm::angleAxis(glm::clamp(glm::angle(between), -MAX_ANGLE, MAX_ANGLE), glm::axis(between)) *
@ -82,7 +82,7 @@ void FaceModel::updateJointState(int index) {
maybeUpdateNeckRotation(parentState, joint, state);
} else if (index == geometry.leftEyeJointIndex || index == geometry.rightEyeJointIndex) {
maybeUpdateEyeRotation(parentState, joint, state);
maybeUpdateEyeRotation(this, parentState, joint, state);
}
}

View file

@ -27,7 +27,7 @@ public:
virtual void simulate(float deltaTime, bool fullUpdate = true);
virtual void maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
virtual void maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
virtual void maybeUpdateEyeRotation(Model* model, const JointState& parentState, const FBXJoint& joint, JointState& state);
virtual void updateJointState(int index);
/// Retrieve the positions of up to two eye meshes.

View file

@ -184,8 +184,8 @@ void Head::relaxLean(float deltaTime) {
}
void Head::render(float alpha, Model::RenderMode mode) {
if (_faceModel.render(alpha, mode, Menu::getInstance()->isOptionChecked(MenuOption::AvatarsReceiveShadows)) &&
_renderLookatVectors && mode != Model::SHADOW_RENDER_MODE) {
_faceModel.render(alpha, mode, Menu::getInstance()->isOptionChecked(MenuOption::AvatarsReceiveShadows));
if (_renderLookatVectors && mode != Model::SHADOW_RENDER_MODE) {
renderLookatVectors(_leftEyePosition, _rightEyePosition, _lookAtPosition);
}
}

View file

@ -1002,6 +1002,7 @@ void MyAvatar::clearJointData(int index) {
if (QThread::currentThread() == thread()) {
// HACK: ATM only JS scripts call clearJointData() on MyAvatar so we hardcode the priority
_skeletonModel.setJointState(index, false, glm::quat(), 0.0f);
_skeletonModel.clearJointAnimationPriority(index);
}
}
@ -1833,25 +1834,34 @@ void MyAvatar::resetSize() {
qDebug("Reseted scale to %f", _targetScale);
}
void MyAvatar::goToLocation(const glm::vec3& newPosition, bool hasOrientation, const glm::vec3& newOrientation) {
glm::quat quatOrientation = getOrientation();
void MyAvatar::goToLocation(const glm::vec3& newPosition,
bool hasOrientation, const glm::quat& newOrientation,
bool shouldFaceLocation) {
qDebug().nospace() << "MyAvatar goToLocation - moving to " << newPosition.x << ", "
<< newPosition.y << ", " << newPosition.z;
glm::vec3 shiftedPosition = newPosition;
if (hasOrientation) {
qDebug().nospace() << "MyAvatar goToLocation - new orientation is "
<< newOrientation.x << ", " << newOrientation.y << ", " << newOrientation.z;
<< newOrientation.x << ", " << newOrientation.y << ", " << newOrientation.z << ", " << newOrientation.w;
// orient the user to face the target
glm::quat quatOrientation = glm::quat(glm::radians(newOrientation))
* glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
glm::quat quatOrientation = newOrientation;
if (shouldFaceLocation) {
quatOrientation = newOrientation * glm::angleAxis(PI, glm::vec3(0.0f, 1.0f, 0.0f));
// move the user a couple units away
const float DISTANCE_TO_USER = 2.0f;
shiftedPosition = newPosition - quatOrientation * IDENTITY_FRONT * DISTANCE_TO_USER;
}
setOrientation(quatOrientation);
}
// move the user a couple units away
const float DISTANCE_TO_USER = 2.0f;
glm::vec3 shiftedPosition = newPosition - quatOrientation * IDENTITY_FRONT * DISTANCE_TO_USER;
slamPosition(shiftedPosition);
emit transformChanged();
}

View file

@ -163,7 +163,9 @@ public slots:
void decreaseSize();
void resetSize();
void goToLocation(const glm::vec3& newPosition, bool hasOrientation = false, const glm::vec3& newOrientation = glm::vec3());
void goToLocation(const glm::vec3& newPosition,
bool hasOrientation = false, const glm::quat& newOrientation = glm::quat(),
bool shouldFaceLocation = false);
// Set/Get update the thrust that will move the avatar around
void addThrust(glm::vec3 newThrust) { _thrust += newThrust; };

View file

@ -281,7 +281,7 @@ void SkeletonModel::maybeUpdateNeckRotation(const JointState& parentState, const
}
void SkeletonModel::maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
_owningAvatar->getHead()->getFaceModel().maybeUpdateEyeRotation(parentState, joint, state);
_owningAvatar->getHead()->getFaceModel().maybeUpdateEyeRotation(this, parentState, joint, state);
}
void SkeletonModel::renderJointConstraints(int jointIndex) {

View file

@ -9,10 +9,14 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <qhttpmultipart.h>
#include <qjsonobject.h>
#include <AccountManager.h>
#include "Application.h"
#include "ui/Snapshot.h"
#include "LocationManager.h"
const QString POST_LOCATION_CREATE = "/api/v1/locations/";
@ -24,13 +28,17 @@ LocationManager& LocationManager::getInstance() {
const QString UNKNOWN_ERROR_MESSAGE = "Unknown error creating named location. Please try again!";
void LocationManager::namedLocationDataReceived(const QJsonObject& data) {
if (data.isEmpty()) {
return;
}
const QString LOCATION_OBJECT_KEY = "location";
const QString LOCATION_ID_KEY = "id";
if (data.contains("status") && data["status"].toString() == "success") {
void LocationManager::namedLocationDataReceived(const QJsonObject& rootObject) {
if (rootObject.contains("status") && rootObject["status"].toString() == "success") {
emit creationCompleted(QString());
// successfuly created a location - grab the ID from the response and create a snapshot to upload
QString locationIDString = rootObject[LOCATION_OBJECT_KEY].toObject()[LOCATION_ID_KEY].toString();
updateSnapshotForExistingLocation(locationIDString);
} else {
emit creationCompleted(UNKNOWN_ERROR_MESSAGE);
}
@ -87,3 +95,57 @@ void LocationManager::errorDataReceived(QNetworkReply& errorReply) {
creationCompleted(UNKNOWN_ERROR_MESSAGE);
}
}
void LocationManager::locationImageUpdateSuccess(const QJsonObject& rootObject) {
qDebug() << "Successfuly updated a location image.";
}
void LocationManager::updateSnapshotForExistingLocation(const QString& locationID) {
// first create a snapshot and save it
Application* application = Application::getInstance();
QTemporaryFile* tempImageFile = Snapshot::saveTempSnapshot(application->getGLWidget(), application->getAvatar());
if (tempImageFile && tempImageFile->open()) {
AccountManager& accountManager = AccountManager::getInstance();
// setup a multipart that is in the AccountManager thread - we need this so it can be cleaned up after the QNetworkReply
QHttpMultiPart* imageFileMultiPart = new QHttpMultiPart(QHttpMultiPart::FormDataType);
imageFileMultiPart->moveToThread(accountManager.thread());
// parent the temp file to the QHttpMultipart after moving it to account manager thread
tempImageFile->moveToThread(accountManager.thread());
tempImageFile->setParent(imageFileMultiPart);
qDebug() << "Uploading a snapshot from" << QFileInfo(*tempImageFile).absoluteFilePath()
<< "as location image for" << locationID;
const QString LOCATION_IMAGE_NAME = "location[image]";
QHttpPart imagePart;
imagePart.setHeader(QNetworkRequest::ContentDispositionHeader,
QVariant("form-data; name=\"" + LOCATION_IMAGE_NAME + "\";"
" filename=\"" + QFileInfo(tempImageFile->fileName()).fileName().toUtf8() + "\""));
imagePart.setHeader(QNetworkRequest::ContentTypeHeader, QVariant("application/octet-stream"));
imagePart.setBodyDevice(tempImageFile);
imageFileMultiPart->append(imagePart);
const QString LOCATION_IMAGE_PUT_PATH = "api/v1/locations/%1/image";
JSONCallbackParameters imageCallbackParams;
imageCallbackParams.jsonCallbackReceiver = this;
imageCallbackParams.jsonCallbackMethod = "locationImageUpdateSuccess";
// make an authenticated request via account manager to upload the image
// don't do anything with error or success for now
AccountManager::getInstance().authenticatedRequest(LOCATION_IMAGE_PUT_PATH.arg(locationID),
QNetworkAccessManager::PutOperation,
JSONCallbackParameters(), QByteArray(), imageFileMultiPart);
} else {
qDebug() << "Couldn't open snapshot file to upload as location image. No location image will be stored.";
return;
}
}

View file

@ -35,8 +35,12 @@ signals:
void creationCompleted(const QString& errorMessage);
private slots:
void namedLocationDataReceived(const QJsonObject& data);
void namedLocationDataReceived(const QJsonObject& jsonObject);
void errorDataReceived(QNetworkReply& errorReply);
void locationImageUpdateSuccess(const QJsonObject& jsonObject);
private:
void updateSnapshotForExistingLocation(const QString& locationID);
};

View file

@ -684,10 +684,10 @@ bool NetworkMeshPart::isTranslucent() const {
return diffuseTexture && diffuseTexture->isTranslucent();
}
int NetworkMesh::getTranslucentPartCount() const {
int NetworkMesh::getTranslucentPartCount(const FBXMesh& fbxMesh) const {
int count = 0;
foreach (const NetworkMeshPart& part, parts) {
if (part.isTranslucent()) {
for (int i = 0; i < parts.size(); i++) {
if (parts.at(i).isTranslucent() || fbxMesh.parts.at(i).opacity != 1.0f) {
count++;
}
}

View file

@ -145,7 +145,7 @@ public:
QVector<NetworkMeshPart> parts;
int getTranslucentPartCount() const;
int getTranslucentPartCount(const FBXMesh& fbxMesh) const;
};
#endif // hifi_GeometryCache_h

View file

@ -608,13 +608,13 @@ bool Model::render(float alpha, RenderMode mode, bool receiveShadows) {
glAlphaFunc(GL_GREATER, 0.5f * alpha);
receiveShadows &= Menu::getInstance()->getShadowsEnabled();
renderMeshes(alpha, mode, false, receiveShadows);
renderMeshes(mode, false, receiveShadows);
glDisable(GL_ALPHA_TEST);
// render translucent meshes afterwards
renderMeshes(alpha, mode, true, receiveShadows);
renderMeshes(mode, true, receiveShadows);
glDisable(GL_CULL_FACE);
@ -894,6 +894,10 @@ void Model::setScaleToFit(bool scaleToFit, const glm::vec3& dimensions) {
}
void Model::setScaleToFit(bool scaleToFit, float largestDimension) {
if (!isActive()) {
return;
}
if (_scaleToFit != scaleToFit || glm::length(_scaleToFitDimensions) != largestDimension) {
_scaleToFit = scaleToFit;
@ -1356,7 +1360,7 @@ void Model::deleteGeometry() {
}
}
void Model::renderMeshes(float alpha, RenderMode mode, bool translucent, bool receiveShadows) {
void Model::renderMeshes(RenderMode mode, bool translucent, bool receiveShadows) {
updateVisibleJointStates();
const FBXGeometry& geometry = _geometry->getFBXGeometry();
const QVector<NetworkMesh>& networkMeshes = _geometry->getMeshes();
@ -1365,13 +1369,13 @@ void Model::renderMeshes(float alpha, RenderMode mode, bool translucent, bool re
for (int i = 0; i < networkMeshes.size(); i++) {
// exit early if the translucency doesn't match what we're drawing
const NetworkMesh& networkMesh = networkMeshes.at(i);
if (translucent ? (networkMesh.getTranslucentPartCount() == 0) :
(networkMesh.getTranslucentPartCount() == networkMesh.parts.size())) {
const FBXMesh& mesh = geometry.meshes.at(i);
if (translucent ? (networkMesh.getTranslucentPartCount(mesh) == 0) :
(networkMesh.getTranslucentPartCount(mesh) == networkMesh.parts.size())) {
continue;
}
const_cast<QOpenGLBuffer&>(networkMesh.indexBuffer).bind();
const FBXMesh& mesh = geometry.meshes.at(i);
int vertexCount = mesh.vertices.size();
if (vertexCount == 0) {
// sanity check
@ -1528,7 +1532,7 @@ void Model::renderMeshes(float alpha, RenderMode mode, bool translucent, bool re
if (!mesh.colors.isEmpty()) {
glEnableClientState(GL_COLOR_ARRAY);
} else {
glColor4f(1.0f, 1.0f, 1.0f, alpha);
glColor4f(1.0f, 1.0f, 1.0f, 1.0f);
}
if (!mesh.texCoords.isEmpty()) {
glEnableClientState(GL_TEXTURE_COORD_ARRAY);
@ -1538,7 +1542,7 @@ void Model::renderMeshes(float alpha, RenderMode mode, bool translucent, bool re
for (int j = 0; j < networkMesh.parts.size(); j++) {
const NetworkMeshPart& networkPart = networkMesh.parts.at(j);
const FBXMeshPart& part = mesh.parts.at(j);
if (networkPart.isTranslucent() != translucent) {
if ((networkPart.isTranslucent() || part.opacity != 1.0f) != translucent) {
offset += (part.quadIndices.size() + part.triangleIndices.size()) * sizeof(int);
continue;
}
@ -1547,8 +1551,8 @@ void Model::renderMeshes(float alpha, RenderMode mode, bool translucent, bool re
glBindTexture(GL_TEXTURE_2D, 0);
} else {
glm::vec4 diffuse = glm::vec4(part.diffuseColor, alpha);
glm::vec4 specular = glm::vec4(part.specularColor, alpha);
glm::vec4 diffuse = glm::vec4(part.diffuseColor, part.opacity);
glm::vec4 specular = glm::vec4(part.specularColor, part.opacity);
glMaterialfv(GL_FRONT, GL_AMBIENT, (const float*)&diffuse);
glMaterialfv(GL_FRONT, GL_DIFFUSE, (const float*)&diffuse);
glMaterialfv(GL_FRONT, GL_SPECULAR, (const float*)&specular);

View file

@ -183,6 +183,8 @@ public:
QVector<JointState>& getJointStates() { return _jointStates; }
const QVector<JointState>& getJointStates() const { return _jointStates; }
void inverseKinematics(int jointIndex, glm::vec3 position, const glm::quat& rotation, float priority);
protected:
QSharedPointer<NetworkGeometry> _geometry;
@ -238,8 +240,6 @@ protected:
bool useRotation = false, int lastFreeIndex = -1, bool allIntermediatesFree = false,
const glm::vec3& alignment = glm::vec3(0.0f, -1.0f, 0.0f), float priority = 1.0f);
void inverseKinematics(int jointIndex, glm::vec3 position, const glm::quat& rotation, float priority);
/// Restores the indexed joint to its default position.
/// \param fraction the fraction of the default position to apply (i.e., 0.25f to slerp one fourth of the way to
/// the original position
@ -256,7 +256,7 @@ private:
void applyNextGeometry();
void deleteGeometry();
void renderMeshes(float alpha, RenderMode mode, bool translucent, bool receiveShadows);
void renderMeshes(RenderMode mode, bool translucent, bool receiveShadows);
QVector<JointState> createJointStates(const FBXGeometry& geometry);
void initJointTransforms();

View file

@ -312,7 +312,8 @@ void ControllerScriptingInterface::updateInputControllers() {
InputController::InputController(int deviceTrackerId, int subTrackerId, QObject* parent) :
AbstractInputController(),
_deviceTrackerId(deviceTrackerId),
_subTrackerId(subTrackerId)
_subTrackerId(subTrackerId),
_isActive(false)
{
}

View file

@ -65,6 +65,24 @@ SnapshotMetaData* Snapshot::parseSnapshotData(QString snapshotPath) {
}
QString Snapshot::saveSnapshot(QGLWidget* widget, Avatar* avatar) {
QFile* snapshotFile = savedFileForSnapshot(widget, avatar, false);
// we don't need the snapshot file, so close it, grab its filename and delete it
snapshotFile->close();
QString snapshotPath = QFileInfo(*snapshotFile).absoluteFilePath();
delete snapshotFile;
return snapshotPath;
}
QTemporaryFile* Snapshot::saveTempSnapshot(QGLWidget* widget, Avatar* avatar) {
// return whatever we get back from saved file for snapshot
return static_cast<QTemporaryFile*>(savedFileForSnapshot(widget, avatar, true));;
}
QFile* Snapshot::savedFileForSnapshot(QGLWidget* widget, Avatar* avatar, bool isTemporary) {
QImage shot = widget->grabFrameBuffer();
glm::vec3 location = avatar->getPosition();
@ -91,16 +109,40 @@ QString Snapshot::saveSnapshot(QGLWidget* widget, Avatar* avatar) {
username.replace(QRegExp("[^A-Za-z0-9_]"), "-");
QDateTime now = QDateTime::currentDateTime();
QString fileName = Menu::getInstance()->getSnapshotsLocation();
if (!fileName.endsWith(QDir::separator())) {
fileName.append(QDir::separator());
}
fileName.append(QString(FILENAME_PATH_FORMAT.arg(username, now.toString(DATETIME_FORMAT), formattedLocation)));
shot.save(fileName, 0, 100);
return fileName;
QString filename = FILENAME_PATH_FORMAT.arg(username, now.toString(DATETIME_FORMAT), formattedLocation);
const int IMAGE_QUALITY = 100;
if (!isTemporary) {
QString snapshotFullPath = Menu::getInstance()->getSnapshotsLocation();
if (!snapshotFullPath.endsWith(QDir::separator())) {
snapshotFullPath.append(QDir::separator());
}
snapshotFullPath.append(filename);
QFile* imageFile = new QFile(snapshotFullPath);
imageFile->open(QIODevice::WriteOnly);
shot.save(imageFile, 0, IMAGE_QUALITY);
imageFile->close();
return imageFile;
} else {
QTemporaryFile* imageTempFile = new QTemporaryFile(QDir::tempPath() + "/XXXXXX-" + filename);
if (!imageTempFile->open()) {
qDebug() << "Unable to open QTemporaryFile for temp snapshot. Will not save.";
return NULL;
}
shot.save(imageTempFile, 0, IMAGE_QUALITY);
imageTempFile->close();
return imageTempFile;
}
}

View file

@ -42,7 +42,11 @@ class Snapshot {
public:
static QString saveSnapshot(QGLWidget* widget, Avatar* avatar);
static QTemporaryFile* saveTempSnapshot(QGLWidget* widget, Avatar* avatar);
static SnapshotMetaData* parseSnapshotData(QString snapshotPath);
private:
static QFile* savedFileForSnapshot(QGLWidget* widget, Avatar* avatar, bool isTemporary);
};
#endif // hifi_Snapshot_h

View file

@ -109,14 +109,8 @@ int InboundAudioStream::parseData(const QByteArray& packet) {
int networkSamples;
if (packetType == PacketTypeSilentAudioFrame) {
quint16 numSilentSamples = *(reinterpret_cast<const quint16*>(dataAt));
readBytes += sizeof(quint16);
networkSamples = (int)numSilentSamples;
} else {
// parse the info after the seq number and before the audio data (the stream properties)
readBytes += parseStreamProperties(packetType, packet.mid(readBytes), networkSamples);
}
// parse the info after the seq number and before the audio data (the stream properties)
readBytes += parseStreamProperties(packetType, packet.mid(readBytes), networkSamples);
// handle this packet based on its arrival status.
switch (arrivalInfo._status) {

View file

@ -33,10 +33,6 @@ PositionalAudioStream::PositionalAudioStream(PositionalAudioStream::Type type, b
_lastPopOutputLoudness(0.0f),
_listenerUnattenuatedZone(NULL)
{
// constant defined in AudioMixer.h. However, we don't want to include this here
// we will soon find a better common home for these audio-related constants
const int SAMPLE_PHASE_DELAY_AT_90 = 20;
_filter.initialize(SAMPLE_RATE, (NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)) / 2);
}
void PositionalAudioStream::resetStats() {

View file

@ -16,10 +16,6 @@
#include <AABox.h>
#include "InboundAudioStream.h"
#include "AudioFormat.h"
#include "AudioBuffer.h"
#include "AudioFilter.h"
#include "AudioFilterBank.h"
const int AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY = 100;
@ -50,8 +46,6 @@ public:
void setListenerUnattenuatedZone(AABox* listenerUnattenuatedZone) { _listenerUnattenuatedZone = listenerUnattenuatedZone; }
AudioFilterHSF1s& getFilter() { return _filter; }
protected:
// disallow copying of PositionalAudioStream objects
PositionalAudioStream(const PositionalAudioStream&);
@ -70,8 +64,6 @@ protected:
float _lastPopOutputTrailingLoudness;
float _lastPopOutputLoudness;
AABox* _listenerUnattenuatedZone;
AudioFilterHSF1s _filter;
};
#endif // hifi_PositionalAudioStream_h

View file

@ -657,6 +657,7 @@ public:
glm::vec3 diffuse;
glm::vec3 specular;
float shininess;
float opacity;
};
class Cluster {
@ -1280,7 +1281,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
textureContent.insert(filename, content);
}
} else if (object.name == "Material") {
Material material = { glm::vec3(1.0f, 1.0f, 1.0f), glm::vec3(1.0f, 1.0f, 1.0f), 96.0f };
Material material = { glm::vec3(1.0f, 1.0f, 1.0f), glm::vec3(1.0f, 1.0f, 1.0f), 96.0f, 1.0f };
foreach (const FBXNode& subobject, object.children) {
bool properties = false;
QByteArray propertyName;
@ -1306,6 +1307,9 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
} else if (property.properties.at(0) == "Shininess") {
material.shininess = property.properties.at(index).value<double>();
} else if (property.properties.at(0) == "Opacity") {
material.opacity = property.properties.at(index).value<double>();
}
}
}
@ -1602,6 +1606,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
part.diffuseColor = material.diffuse;
part.specularColor = material.specular;
part.shininess = material.shininess;
part.opacity = material.opacity;
if (!diffuseTexture.filename.isNull()) {
part.diffuseTexture = diffuseTexture;
}
@ -1750,12 +1755,24 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
// look for an unused slot in the weights vector
glm::vec4& weights = extracted.mesh.clusterWeights[it.value()];
for (int k = 0; k < 4; k++) {
int lowestIndex = -1;
float lowestWeight = FLT_MAX;
int k = 0;
for (; k < 4; k++) {
if (weights[k] == 0.0f) {
extracted.mesh.clusterIndices[it.value()][k] = i;
weights[k] = weight;
break;
}
if (weights[k] < lowestWeight) {
lowestIndex = k;
lowestWeight = weights[k];
}
}
if (k == 4) {
// no space for an additional weight; we must replace the lowest
weights[lowestIndex] = weight;
extracted.mesh.clusterIndices[it.value()][lowestIndex] = i;
}
}
}
@ -1764,6 +1781,14 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
maxJointIndex = jointIndex;
}
}
// normalize the weights if they don't add up to one
for (int i = 0; i < extracted.mesh.clusterWeights.size(); i++) {
glm::vec4& weights = extracted.mesh.clusterWeights[i];
float total = weights.x + weights.y + weights.z + weights.w;
if (total != 1.0f && total != 0.0f) {
weights /= total;
}
}
} else {
int jointIndex = maxJointIndex;
FBXJoint& joint = geometry.joints[jointIndex];
@ -2042,6 +2067,7 @@ FBXGeometry readSVO(const QByteArray& model) {
FBXMeshPart part;
part.diffuseColor = glm::vec3(1.0f, 1.0f, 1.0f);
part.shininess = 96.0f;
part.opacity = 1.0f;
mesh.parts.append(part);
VoxelTree tree;

View file

@ -109,6 +109,7 @@ public:
glm::vec3 diffuseColor;
glm::vec3 specularColor;
float shininess;
float opacity;
FBXTexture diffuseTexture;
FBXTexture normalTexture;

View file

@ -28,7 +28,7 @@ QString AddressManager::pathForPositionAndOrientation(const glm::vec3& position,
QString pathString = "/" + createByteArray(position);
if (hasOrientation) {
QString orientationString = createByteArray(glm::degrees(safeEulerAngles(orientation)));
QString orientationString = createByteArray(orientation);
pathString += "/" + orientationString;
}
@ -61,25 +61,25 @@ bool AddressManager::handleUrl(const QUrl& lookupUrl) {
// 3. location string (posX,posY,posZ/eulerX,eulerY,eulerZ)
// 4. domain network address (IP or dns resolvable hostname)
if (lookupUrl.isRelative()) {
// if this is a relative path then handle it as a relative viewpoint
handleRelativeViewpoint(lookupUrl.path());
} else {
// use our regex'ed helpers to figure out what we're supposed to do with this
if (!handleUsername(lookupUrl.authority())) {
// we're assuming this is either a network address or global place name
// check if it is a network address first
if (!handleNetworkAddress(lookupUrl.host())) {
// wasn't an address - lookup the place name
attemptPlaceNameLookup(lookupUrl.host());
}
// we may have a path that defines a relative viewpoint - if so we should jump to that now
handleRelativeViewpoint(lookupUrl.path());
// use our regex'ed helpers to figure out what we're supposed to do with this
if (!handleUsername(lookupUrl.authority())) {
// we're assuming this is either a network address or global place name
// check if it is a network address first
if (!handleNetworkAddress(lookupUrl.host())) {
// wasn't an address - lookup the place name
attemptPlaceNameLookup(lookupUrl.host());
}
// we may have a path that defines a relative viewpoint - if so we should jump to that now
handleRelativeViewpoint(lookupUrl.path());
}
return true;
} else if (lookupUrl.toString().startsWith('/')) {
qDebug() << "Going to relative path" << lookupUrl.path();
// if this is a relative path then handle it as a relative viewpoint
handleRelativeViewpoint(lookupUrl.path());
}
return false;
@ -89,10 +89,18 @@ void AddressManager::handleLookupString(const QString& lookupString) {
if (!lookupString.isEmpty()) {
// make this a valid hifi URL and handle it off to handleUrl
QString sanitizedString = lookupString;
const QRegExp HIFI_SCHEME_REGEX = QRegExp(HIFI_URL_SCHEME + ":\\/{1,2}", Qt::CaseInsensitive);
sanitizedString = sanitizedString.remove(HIFI_SCHEME_REGEX);
QUrl lookupURL;
handleUrl(QUrl(HIFI_URL_SCHEME + "://" + sanitizedString));
if (!lookupString.startsWith('/')) {
const QRegExp HIFI_SCHEME_REGEX = QRegExp(HIFI_URL_SCHEME + ":\\/{1,2}", Qt::CaseInsensitive);
sanitizedString = sanitizedString.remove(HIFI_SCHEME_REGEX);
lookupURL = QUrl(HIFI_URL_SCHEME + "://" + sanitizedString);
} else {
lookupURL = QUrl(lookupString);
}
handleUrl(lookupURL);
}
}
@ -124,9 +132,11 @@ void AddressManager::handleAPIResponse(const QJsonObject &jsonObject) {
returnedPath = domainObject[LOCATION_KEY].toObject()[LOCATION_PATH_KEY].toString();
}
bool shouldFaceViewpoint = dataObject.contains(ADDRESS_API_ONLINE_KEY);
if (!returnedPath.isEmpty()) {
// try to parse this returned path as a viewpoint, that's the only thing it could be for now
if (!handleRelativeViewpoint(returnedPath)) {
if (!handleRelativeViewpoint(returnedPath, shouldFaceViewpoint)) {
qDebug() << "Received a location path that was could not be handled as a viewpoint -" << returnedPath;
}
}
@ -183,38 +193,47 @@ bool AddressManager::handleNetworkAddress(const QString& lookupString) {
return false;
}
bool AddressManager::handleRelativeViewpoint(const QString& lookupString) {
bool AddressManager::handleRelativeViewpoint(const QString& lookupString, bool shouldFace) {
const QString FLOAT_REGEX_STRING = "([-+]?[0-9]*\\.?[0-9]+(?:[eE][-+]?[0-9]+)?)";
const QString TRIPLE_FLOAT_REGEX_STRING = QString("\\/") + FLOAT_REGEX_STRING + "\\s*,\\s*" +
FLOAT_REGEX_STRING + "\\s*,\\s*" + FLOAT_REGEX_STRING + "\\s*(?:$|\\/)";
const QString SPACED_COMMA_REGEX_STRING = "\\s*,\\s*";
const QString POSITION_REGEX_STRING = QString("\\/") + FLOAT_REGEX_STRING + SPACED_COMMA_REGEX_STRING +
FLOAT_REGEX_STRING + SPACED_COMMA_REGEX_STRING + FLOAT_REGEX_STRING + "\\s*(?:$|\\/)";
const QString QUAT_REGEX_STRING = QString("\\/") + FLOAT_REGEX_STRING + SPACED_COMMA_REGEX_STRING +
FLOAT_REGEX_STRING + SPACED_COMMA_REGEX_STRING + FLOAT_REGEX_STRING + SPACED_COMMA_REGEX_STRING +
FLOAT_REGEX_STRING + "\\s*$";
QRegExp tripleFloatRegex(TRIPLE_FLOAT_REGEX_STRING);
QRegExp positionRegex(POSITION_REGEX_STRING);
if (tripleFloatRegex.indexIn(lookupString) != -1) {
if (positionRegex.indexIn(lookupString) != -1) {
// we have at least a position, so emit our signal to say we need to change position
glm::vec3 newPosition(tripleFloatRegex.cap(1).toFloat(),
tripleFloatRegex.cap(2).toFloat(),
tripleFloatRegex.cap(3).toFloat());
glm::vec3 newPosition(positionRegex.cap(1).toFloat(),
positionRegex.cap(2).toFloat(),
positionRegex.cap(3).toFloat());
if (!isNaN(newPosition.x) && !isNaN(newPosition.y) && !isNaN(newPosition.z)) {
glm::vec3 newOrientation;
glm::quat newOrientation;
QRegExp orientationRegex(QUAT_REGEX_STRING);
// we may also have an orientation
if (lookupString[tripleFloatRegex.matchedLength() - 1] == QChar('/')
&& tripleFloatRegex.indexIn(lookupString, tripleFloatRegex.matchedLength() - 1) != -1) {
if (lookupString[positionRegex.matchedLength() - 1] == QChar('/')
&& orientationRegex.indexIn(lookupString, positionRegex.matchedLength() - 1) != -1) {
glm::vec3 newOrientation(tripleFloatRegex.cap(1).toFloat(),
tripleFloatRegex.cap(2).toFloat(),
tripleFloatRegex.cap(3).toFloat());
glm::quat newOrientation = glm::normalize(glm::quat(orientationRegex.cap(4).toFloat(),
orientationRegex.cap(1).toFloat(),
orientationRegex.cap(2).toFloat(),
orientationRegex.cap(3).toFloat()));
if (!isNaN(newOrientation.x) && !isNaN(newOrientation.y) && !isNaN(newOrientation.z)) {
emit locationChangeRequired(newPosition, true, newOrientation);
if (!isNaN(newOrientation.x) && !isNaN(newOrientation.y) && !isNaN(newOrientation.z)
&& !isNaN(newOrientation.w)) {
emit locationChangeRequired(newPosition, true, newOrientation, shouldFace);
return true;
} else {
qDebug() << "Orientation parsed from lookup string is invalid. Will not use for location change.";
}
}
emit locationChangeRequired(newPosition, false, newOrientation);
emit locationChangeRequired(newPosition, false, newOrientation, shouldFace);
} else {
qDebug() << "Could not jump to position from lookup string because it has an invalid value.";

View file

@ -40,14 +40,16 @@ signals:
void lookupResultIsOffline();
void lookupResultIsNotFound();
void possibleDomainChangeRequired(const QString& newHostname);
void locationChangeRequired(const glm::vec3& newPosition, bool hasOrientationChange, const glm::vec3& newOrientation);
void locationChangeRequired(const glm::vec3& newPosition,
bool hasOrientationChange, const glm::quat& newOrientation,
bool shouldFaceLocation);
private:
const JSONCallbackParameters& apiCallbackParameters();
bool handleUrl(const QUrl& lookupUrl);
bool handleNetworkAddress(const QString& lookupString);
bool handleRelativeViewpoint(const QString& pathSubsection);
bool handleRelativeViewpoint(const QString& pathSubsection, bool shouldFace = false);
bool handleUsername(const QString& lookupString);
};

View file

@ -67,6 +67,7 @@ LimitedNodeList::LimitedNodeList(unsigned short socketListenPort, unsigned short
_nodeHashMutex(QMutex::Recursive),
_nodeSocket(this),
_dtlsSocket(NULL),
_publicSockAddr(),
_numCollectedPackets(0),
_numCollectedBytes(0),
_packetStatTimer()
@ -502,3 +503,115 @@ void LimitedNodeList::removeSilentNodes() {
_nodeHashMutex.unlock();
}
const uint32_t RFC_5389_MAGIC_COOKIE = 0x2112A442;
const int NUM_BYTES_STUN_HEADER = 20;
void LimitedNodeList::sendSTUNRequest() {
unsigned char stunRequestPacket[NUM_BYTES_STUN_HEADER];
int packetIndex = 0;
const uint32_t RFC_5389_MAGIC_COOKIE_NETWORK_ORDER = htonl(RFC_5389_MAGIC_COOKIE);
// leading zeros + message type
const uint16_t REQUEST_MESSAGE_TYPE = htons(0x0001);
memcpy(stunRequestPacket + packetIndex, &REQUEST_MESSAGE_TYPE, sizeof(REQUEST_MESSAGE_TYPE));
packetIndex += sizeof(REQUEST_MESSAGE_TYPE);
// message length (no additional attributes are included)
uint16_t messageLength = 0;
memcpy(stunRequestPacket + packetIndex, &messageLength, sizeof(messageLength));
packetIndex += sizeof(messageLength);
memcpy(stunRequestPacket + packetIndex, &RFC_5389_MAGIC_COOKIE_NETWORK_ORDER, sizeof(RFC_5389_MAGIC_COOKIE_NETWORK_ORDER));
packetIndex += sizeof(RFC_5389_MAGIC_COOKIE_NETWORK_ORDER);
// transaction ID (random 12-byte unsigned integer)
const uint NUM_TRANSACTION_ID_BYTES = 12;
QUuid randomUUID = QUuid::createUuid();
memcpy(stunRequestPacket + packetIndex, randomUUID.toRfc4122().data(), NUM_TRANSACTION_ID_BYTES);
// lookup the IP for the STUN server
static HifiSockAddr stunSockAddr(STUN_SERVER_HOSTNAME, STUN_SERVER_PORT);
_nodeSocket.writeDatagram((char*) stunRequestPacket, sizeof(stunRequestPacket),
stunSockAddr.getAddress(), stunSockAddr.getPort());
}
bool LimitedNodeList::processSTUNResponse(const QByteArray& packet) {
// check the cookie to make sure this is actually a STUN response
// and read the first attribute and make sure it is a XOR_MAPPED_ADDRESS
const int NUM_BYTES_MESSAGE_TYPE_AND_LENGTH = 4;
const uint16_t XOR_MAPPED_ADDRESS_TYPE = htons(0x0020);
const uint32_t RFC_5389_MAGIC_COOKIE_NETWORK_ORDER = htonl(RFC_5389_MAGIC_COOKIE);
int attributeStartIndex = NUM_BYTES_STUN_HEADER;
if (memcmp(packet.data() + NUM_BYTES_MESSAGE_TYPE_AND_LENGTH,
&RFC_5389_MAGIC_COOKIE_NETWORK_ORDER,
sizeof(RFC_5389_MAGIC_COOKIE_NETWORK_ORDER)) == 0) {
// enumerate the attributes to find XOR_MAPPED_ADDRESS_TYPE
while (attributeStartIndex < packet.size()) {
if (memcmp(packet.data() + attributeStartIndex, &XOR_MAPPED_ADDRESS_TYPE, sizeof(XOR_MAPPED_ADDRESS_TYPE)) == 0) {
const int NUM_BYTES_STUN_ATTR_TYPE_AND_LENGTH = 4;
const int NUM_BYTES_FAMILY_ALIGN = 1;
const uint8_t IPV4_FAMILY_NETWORK_ORDER = htons(0x01) >> 8;
int byteIndex = attributeStartIndex + NUM_BYTES_STUN_ATTR_TYPE_AND_LENGTH + NUM_BYTES_FAMILY_ALIGN;
uint8_t addressFamily = 0;
memcpy(&addressFamily, packet.data() + byteIndex, sizeof(addressFamily));
byteIndex += sizeof(addressFamily);
if (addressFamily == IPV4_FAMILY_NETWORK_ORDER) {
// grab the X-Port
uint16_t xorMappedPort = 0;
memcpy(&xorMappedPort, packet.data() + byteIndex, sizeof(xorMappedPort));
uint16_t newPublicPort = ntohs(xorMappedPort) ^ (ntohl(RFC_5389_MAGIC_COOKIE_NETWORK_ORDER) >> 16);
byteIndex += sizeof(xorMappedPort);
// grab the X-Address
uint32_t xorMappedAddress = 0;
memcpy(&xorMappedAddress, packet.data() + byteIndex, sizeof(xorMappedAddress));
uint32_t stunAddress = ntohl(xorMappedAddress) ^ ntohl(RFC_5389_MAGIC_COOKIE_NETWORK_ORDER);
QHostAddress newPublicAddress = QHostAddress(stunAddress);
if (newPublicAddress != _publicSockAddr.getAddress() || newPublicPort != _publicSockAddr.getPort()) {
_publicSockAddr = HifiSockAddr(newPublicAddress, newPublicPort);
qDebug("New public socket received from STUN server is %s:%hu",
_publicSockAddr.getAddress().toString().toLocal8Bit().constData(),
_publicSockAddr.getPort());
emit publicSockAddrChanged(_publicSockAddr);
}
return true;
}
} else {
// push forward attributeStartIndex by the length of this attribute
const int NUM_BYTES_ATTRIBUTE_TYPE = 2;
uint16_t attributeLength = 0;
memcpy(&attributeLength, packet.data() + attributeStartIndex + NUM_BYTES_ATTRIBUTE_TYPE,
sizeof(attributeLength));
attributeLength = ntohs(attributeLength);
attributeStartIndex += NUM_BYTES_MESSAGE_TYPE_AND_LENGTH + attributeLength;
}
}
}
return false;
}

View file

@ -40,6 +40,9 @@ extern const QUrl DEFAULT_NODE_AUTH_URL;
const char DEFAULT_ASSIGNMENT_SERVER_HOSTNAME[] = "localhost";
const char STUN_SERVER_HOSTNAME[] = "stun.highfidelity.io";
const unsigned short STUN_SERVER_PORT = 3478;
class HifiSockAddr;
typedef QSet<NodeType_t> NodeSet;
@ -99,6 +102,9 @@ public:
void getPacketStats(float &packetsPerSecond, float &bytesPerSecond);
void resetPacketStats();
virtual void sendSTUNRequest();
virtual bool processSTUNResponse(const QByteArray& packet);
public slots:
void reset();
void eraseAllNodes();
@ -110,6 +116,7 @@ signals:
void uuidChanged(const QUuid& ownerUUID, const QUuid& oldUUID);
void nodeAdded(SharedNodePointer);
void nodeKilled(SharedNodePointer);
void publicSockAddrChanged(const HifiSockAddr& publicSockAddr);
protected:
static LimitedNodeList* _sharedInstance;
@ -130,6 +137,7 @@ protected:
QMutex _nodeHashMutex;
QUdpSocket _nodeSocket;
QUdpSocket* _dtlsSocket;
HifiSockAddr _publicSockAddr;
int _numCollectedPackets;
int _numCollectedBytes;
QElapsedTimer _packetStatTimer;

View file

@ -57,7 +57,6 @@ NodeList::NodeList(char newOwnerType, unsigned short socketListenPort, unsigned
_domainHandler(this),
_numNoReplyDomainCheckIns(0),
_assignmentServerSocket(),
_publicSockAddr(),
_hasCompletedInitialSTUNFailure(false),
_stunRequestsSinceSuccess(0)
{
@ -195,47 +194,16 @@ void NodeList::addSetOfNodeTypesToNodeInterestSet(const NodeSet& setOfNodeTypes)
_nodeTypesOfInterest.unite(setOfNodeTypes);
}
const uint32_t RFC_5389_MAGIC_COOKIE = 0x2112A442;
const int NUM_BYTES_STUN_HEADER = 20;
const unsigned int NUM_STUN_REQUESTS_BEFORE_FALLBACK = 5;
void NodeList::sendSTUNRequest() {
const char STUN_SERVER_HOSTNAME[] = "stun.highfidelity.io";
const unsigned short STUN_SERVER_PORT = 3478;
unsigned char stunRequestPacket[NUM_BYTES_STUN_HEADER];
int packetIndex = 0;
const uint32_t RFC_5389_MAGIC_COOKIE_NETWORK_ORDER = htonl(RFC_5389_MAGIC_COOKIE);
// leading zeros + message type
const uint16_t REQUEST_MESSAGE_TYPE = htons(0x0001);
memcpy(stunRequestPacket + packetIndex, &REQUEST_MESSAGE_TYPE, sizeof(REQUEST_MESSAGE_TYPE));
packetIndex += sizeof(REQUEST_MESSAGE_TYPE);
// message length (no additional attributes are included)
uint16_t messageLength = 0;
memcpy(stunRequestPacket + packetIndex, &messageLength, sizeof(messageLength));
packetIndex += sizeof(messageLength);
memcpy(stunRequestPacket + packetIndex, &RFC_5389_MAGIC_COOKIE_NETWORK_ORDER, sizeof(RFC_5389_MAGIC_COOKIE_NETWORK_ORDER));
packetIndex += sizeof(RFC_5389_MAGIC_COOKIE_NETWORK_ORDER);
// transaction ID (random 12-byte unsigned integer)
const uint NUM_TRANSACTION_ID_BYTES = 12;
QUuid randomUUID = QUuid::createUuid();
memcpy(stunRequestPacket + packetIndex, randomUUID.toRfc4122().data(), NUM_TRANSACTION_ID_BYTES);
// lookup the IP for the STUN server
static HifiSockAddr stunSockAddr(STUN_SERVER_HOSTNAME, STUN_SERVER_PORT);
if (!_hasCompletedInitialSTUNFailure) {
qDebug("Sending intial stun request to %s", stunSockAddr.getAddress().toString().toLocal8Bit().constData());
qDebug() << "Sending intial stun request to" << STUN_SERVER_HOSTNAME;
}
_nodeSocket.writeDatagram((char*) stunRequestPacket, sizeof(stunRequestPacket),
stunSockAddr.getAddress(), stunSockAddr.getPort());
LimitedNodeList::sendSTUNRequest();
_stunRequestsSinceSuccess++;
@ -255,79 +223,16 @@ void NodeList::sendSTUNRequest() {
}
}
void NodeList::processSTUNResponse(const QByteArray& packet) {
// check the cookie to make sure this is actually a STUN response
// and read the first attribute and make sure it is a XOR_MAPPED_ADDRESS
const int NUM_BYTES_MESSAGE_TYPE_AND_LENGTH = 4;
const uint16_t XOR_MAPPED_ADDRESS_TYPE = htons(0x0020);
const uint32_t RFC_5389_MAGIC_COOKIE_NETWORK_ORDER = htonl(RFC_5389_MAGIC_COOKIE);
int attributeStartIndex = NUM_BYTES_STUN_HEADER;
if (memcmp(packet.data() + NUM_BYTES_MESSAGE_TYPE_AND_LENGTH,
&RFC_5389_MAGIC_COOKIE_NETWORK_ORDER,
sizeof(RFC_5389_MAGIC_COOKIE_NETWORK_ORDER)) == 0) {
// enumerate the attributes to find XOR_MAPPED_ADDRESS_TYPE
while (attributeStartIndex < packet.size()) {
if (memcmp(packet.data() + attributeStartIndex, &XOR_MAPPED_ADDRESS_TYPE, sizeof(XOR_MAPPED_ADDRESS_TYPE)) == 0) {
const int NUM_BYTES_STUN_ATTR_TYPE_AND_LENGTH = 4;
const int NUM_BYTES_FAMILY_ALIGN = 1;
const uint8_t IPV4_FAMILY_NETWORK_ORDER = htons(0x01) >> 8;
// reset the number of failed STUN requests since last success
_stunRequestsSinceSuccess = 0;
int byteIndex = attributeStartIndex + NUM_BYTES_STUN_ATTR_TYPE_AND_LENGTH + NUM_BYTES_FAMILY_ALIGN;
uint8_t addressFamily = 0;
memcpy(&addressFamily, packet.data() + byteIndex, sizeof(addressFamily));
byteIndex += sizeof(addressFamily);
if (addressFamily == IPV4_FAMILY_NETWORK_ORDER) {
// grab the X-Port
uint16_t xorMappedPort = 0;
memcpy(&xorMappedPort, packet.data() + byteIndex, sizeof(xorMappedPort));
uint16_t newPublicPort = ntohs(xorMappedPort) ^ (ntohl(RFC_5389_MAGIC_COOKIE_NETWORK_ORDER) >> 16);
byteIndex += sizeof(xorMappedPort);
// grab the X-Address
uint32_t xorMappedAddress = 0;
memcpy(&xorMappedAddress, packet.data() + byteIndex, sizeof(xorMappedAddress));
uint32_t stunAddress = ntohl(xorMappedAddress) ^ ntohl(RFC_5389_MAGIC_COOKIE_NETWORK_ORDER);
QHostAddress newPublicAddress = QHostAddress(stunAddress);
if (newPublicAddress != _publicSockAddr.getAddress() || newPublicPort != _publicSockAddr.getPort()) {
_publicSockAddr = HifiSockAddr(newPublicAddress, newPublicPort);
qDebug("New public socket received from STUN server is %s:%hu",
_publicSockAddr.getAddress().toString().toLocal8Bit().constData(),
_publicSockAddr.getPort());
}
_hasCompletedInitialSTUNFailure = true;
break;
}
} else {
// push forward attributeStartIndex by the length of this attribute
const int NUM_BYTES_ATTRIBUTE_TYPE = 2;
uint16_t attributeLength = 0;
memcpy(&attributeLength, packet.data() + attributeStartIndex + NUM_BYTES_ATTRIBUTE_TYPE,
sizeof(attributeLength));
attributeLength = ntohs(attributeLength);
attributeStartIndex += NUM_BYTES_MESSAGE_TYPE_AND_LENGTH + attributeLength;
}
}
bool NodeList::processSTUNResponse(const QByteArray& packet) {
if (LimitedNodeList::processSTUNResponse(packet)) {
// reset the number of failed STUN requests since last success
_stunRequestsSinceSuccess = 0;
_hasCompletedInitialSTUNFailure = true;
return true;
} else {
return false;
}
}

View file

@ -89,8 +89,9 @@ private:
NodeList(char ownerType, unsigned short socketListenPort, unsigned short dtlsListenPort);
NodeList(NodeList const&); // Don't implement, needed to avoid copies of singleton
void operator=(NodeList const&); // Don't implement, needed to avoid copies of singleton
void sendSTUNRequest();
void processSTUNResponse(const QByteArray& packet);
bool processSTUNResponse(const QByteArray& packet);
void processDomainServerAuthRequest(const QByteArray& packet);
void requestAuthForDomainServer();
@ -102,7 +103,6 @@ private:
DomainHandler _domainHandler;
int _numNoReplyDomainCheckIns;
HifiSockAddr _assignmentServerSocket;
HifiSockAddr _publicSockAddr;
bool _hasCompletedInitialSTUNFailure;
unsigned int _stunRequestsSinceSuccess;
};

View file

@ -51,7 +51,7 @@ PacketVersion versionForPacketType(PacketType type) {
case PacketTypeMicrophoneAudioWithEcho:
return 2;
case PacketTypeSilentAudioFrame:
return 3;
return 4;
case PacketTypeMixedAudio:
return 1;
case PacketTypeAvatarData:

View file

@ -505,6 +505,11 @@ void ScriptEngine::run() {
// write the number of silent samples so the audio-mixer can uphold timing
packetStream.writeRawData(reinterpret_cast<const char*>(&SCRIPT_AUDIO_BUFFER_SAMPLES), sizeof(int16_t));
// use the orientation and position of this avatar for the source of this audio
packetStream.writeRawData(reinterpret_cast<const char*>(&_avatarData->getPosition()), sizeof(glm::vec3));
glm::quat headOrientation = _avatarData->getHeadOrientation();
packetStream.writeRawData(reinterpret_cast<const char*>(&headOrientation), sizeof(glm::quat));
} else if (nextSoundOutput) {
// assume scripted avatar audio is mono and set channel flag to zero
packetStream << (quint8)0;
@ -609,6 +614,7 @@ void ScriptEngine::timerFired() {
if (!callingTimer->isActive()) {
// this timer is done, we can kill it
_timerFunctionMap.remove(callingTimer);
delete callingTimer;
}
}

View file

@ -284,6 +284,11 @@ QByteArray createByteArray(const glm::vec3& vector) {
return QByteArray::number(vector.x) + ',' + QByteArray::number(vector.y) + ',' + QByteArray::number(vector.z);
}
QByteArray createByteArray(const glm::quat& quat) {
return QByteArray::number(quat.x) + ',' + QByteArray::number(quat.y) + "," + QByteArray::number(quat.z) + ","
+ QByteArray::number(quat.w);
}
bool isSimilarOrientation(const glm::quat& orientionA, const glm::quat& orientionB, float similarEnough) {
// Compute the angular distance between the two orientations
float angleOrientation = orientionA == orientionB ? 0.0f : glm::degrees(glm::angle(orientionA * glm::inverse(orientionB)));

View file

@ -77,6 +77,7 @@ float extractUniformScale(const glm::mat4& matrix);
float extractUniformScale(const glm::vec3& scale);
QByteArray createByteArray(const glm::vec3& vector);
QByteArray createByteArray(const glm::quat& quat);
/// \return bool are two orientations similar to each other
const float ORIENTATION_SIMILAR_ENOUGH = 5.0f; // 10 degrees in any direction