mirror of
https://github.com/overte-org/overte.git
synced 2025-04-18 07:56:25 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into 19964
Conflicts: interface/ui/preferencesDialog.ui
This commit is contained in:
commit
b527966db8
83 changed files with 3341 additions and 1615 deletions
12
BUILD.md
12
BUILD.md
|
@ -96,7 +96,9 @@ Currently building on Windows has been tested using the following compilers:
|
|||
|
||||
#####Windows SDK 7.1
|
||||
|
||||
Whichever version of Visual Studio you use, first install [Microsoft Windows SDK for Windows 7 and .NET Framework 4](http://www.microsoft.com/en-us/download/details.aspx?id=8279).
|
||||
Whichever version of Visual Studio you use, you will need [Microsoft Windows SDK for Windows 7 and .NET Framework 4](http://www.microsoft.com/en-us/download/details.aspx?id=8279).
|
||||
|
||||
NOTE: If using Visual Studio C++ 2010 Express, you need to follow a specific install order. See below before installing the Windows SDK.
|
||||
|
||||
######Windows 8.1
|
||||
You may have already downloaded the Windows 8 SDK (e.g. if you have previously installed Visual Studio 2013). If so, change CMAKE_PREFIX_PATH in %HIFI_DIR%\CMakeLists.txt to point to the Windows 8 SDK binaries. The default path is `C:\Program Files (x86)\Windows Kits\8.1\Lib\winv6.3\um\x86`
|
||||
|
@ -109,6 +111,14 @@ The following patches/service packs are also required:
|
|||
* [VS2010 SP1](http://www.microsoft.com/en-us/download/details.aspx?id=23691)
|
||||
* [VS2010 SP1 Compiler Update](http://www.microsoft.com/en-us/download/details.aspx?id=4422)
|
||||
|
||||
IMPORTANT: Use the following install order:
|
||||
Visual Studio C++ 2010 Express
|
||||
Windows SDK 7.1
|
||||
VS2010 SP1
|
||||
VS2010 SP1 Compiler Update
|
||||
|
||||
If you get an error while installing the VS2010 SP1 Compiler update saying that you don't have the Windows SDK installed, then uninstall all of the above and start again in the correct order.
|
||||
|
||||
Some of the build instructions will ask you to start a Visual Studio Command Prompt. You should have a shortcut in your Start menu called "Open Visual Studio Command Prompt (2010)" which will do so.
|
||||
|
||||
#####Visual Studio 2013
|
||||
|
|
|
@ -33,12 +33,17 @@
|
|||
|
||||
#include "Agent.h"
|
||||
|
||||
static const int RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES = 10;
|
||||
|
||||
Agent::Agent(const QByteArray& packet) :
|
||||
ThreadedAssignment(packet),
|
||||
_voxelEditSender(),
|
||||
_particleEditSender(),
|
||||
_modelEditSender(),
|
||||
_receivedAudioStream(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO, 1, false, 1, 0, false),
|
||||
_receivedAudioStream(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES,
|
||||
InboundAudioStream::Settings(0, false, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, false,
|
||||
DEFAULT_WINDOW_STARVE_THRESHOLD, DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES,
|
||||
DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION, false)),
|
||||
_avatarHashMap()
|
||||
{
|
||||
// be the parent of the script engine so it gets moved when we do
|
||||
|
@ -148,7 +153,7 @@ void Agent::readPendingDatagrams() {
|
|||
_voxelViewer.processDatagram(mutablePacket, sourceNode);
|
||||
}
|
||||
|
||||
} else if (datagramPacketType == PacketTypeMixedAudio) {
|
||||
} else if (datagramPacketType == PacketTypeMixedAudio || datagramPacketType == PacketTypeSilentAudioFrame) {
|
||||
|
||||
_receivedAudioStream.parseData(receivedPacket);
|
||||
|
||||
|
|
|
@ -69,12 +69,12 @@ void attachNewNodeDataToNode(Node *newNode) {
|
|||
}
|
||||
}
|
||||
|
||||
bool AudioMixer::_useDynamicJitterBuffers = false;
|
||||
int AudioMixer::_staticDesiredJitterBufferFrames = 0;
|
||||
int AudioMixer::_maxFramesOverDesired = 0;
|
||||
InboundAudioStream::Settings AudioMixer::_streamSettings;
|
||||
|
||||
bool AudioMixer::_printStreamStats = false;
|
||||
|
||||
bool AudioMixer::_enableFilter = false;
|
||||
|
||||
AudioMixer::AudioMixer(const QByteArray& packet) :
|
||||
ThreadedAssignment(packet),
|
||||
_trailingSleepRatio(1.0f),
|
||||
|
@ -85,7 +85,12 @@ AudioMixer::AudioMixer(const QByteArray& packet) :
|
|||
_sumMixes(0),
|
||||
_sourceUnattenuatedZone(NULL),
|
||||
_listenerUnattenuatedZone(NULL),
|
||||
_lastSendAudioStreamStatsTime(usecTimestampNow())
|
||||
_lastPerSecondCallbackTime(usecTimestampNow()),
|
||||
_sendAudioStreamStats(false),
|
||||
_datagramsReadPerCallStats(0, READ_DATAGRAMS_STATS_WINDOW_SECONDS),
|
||||
_timeSpentPerCallStats(0, READ_DATAGRAMS_STATS_WINDOW_SECONDS),
|
||||
_timeSpentPerHashMatchCallStats(0, READ_DATAGRAMS_STATS_WINDOW_SECONDS),
|
||||
_readPendingCallsPerSecondStats(1, READ_DATAGRAMS_STATS_WINDOW_SECONDS)
|
||||
{
|
||||
|
||||
}
|
||||
|
@ -99,15 +104,44 @@ const float ATTENUATION_BEGINS_AT_DISTANCE = 1.0f;
|
|||
const float ATTENUATION_AMOUNT_PER_DOUBLING_IN_DISTANCE = 0.18f;
|
||||
const float ATTENUATION_EPSILON_DISTANCE = 0.1f;
|
||||
|
||||
void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream* streamToAdd,
|
||||
int AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream* streamToAdd,
|
||||
AvatarAudioStream* listeningNodeStream) {
|
||||
// If repetition with fade is enabled:
|
||||
// If streamToAdd could not provide a frame (it was starved), then we'll mix its previously-mixed frame
|
||||
// This is preferable to not mixing it at all since that's equivalent to inserting silence.
|
||||
// Basically, we'll repeat that last frame until it has a frame to mix. Depending on how many times
|
||||
// we've repeated that frame in a row, we'll gradually fade that repeated frame into silence.
|
||||
// This improves the perceived quality of the audio slightly.
|
||||
|
||||
float repeatedFrameFadeFactor = 1.0f;
|
||||
|
||||
if (!streamToAdd->lastPopSucceeded()) {
|
||||
if (_streamSettings._repetitionWithFade && !streamToAdd->getLastPopOutput().isNull()) {
|
||||
// reptition with fade is enabled, and we do have a valid previous frame to repeat.
|
||||
// calculate its fade factor, which depends on how many times it's already been repeated.
|
||||
repeatedFrameFadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd->getConsecutiveNotMixedCount() - 1);
|
||||
if (repeatedFrameFadeFactor == 0.0f) {
|
||||
return 0;
|
||||
}
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
// at this point, we know streamToAdd's last pop output is valid
|
||||
|
||||
// if the frame we're about to mix is silent, bail
|
||||
if (streamToAdd->getLastPopOutputLoudness() == 0.0f) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
float bearingRelativeAngleToSource = 0.0f;
|
||||
float attenuationCoefficient = 1.0f;
|
||||
int numSamplesDelay = 0;
|
||||
float weakChannelAmplitudeRatio = 1.0f;
|
||||
|
||||
bool shouldAttenuate = (streamToAdd != listeningNodeStream);
|
||||
|
||||
|
||||
if (shouldAttenuate) {
|
||||
|
||||
// if the two stream pointers do not match then these are different streams
|
||||
|
@ -122,7 +156,7 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
|
|||
if (streamToAdd->getLastPopOutputTrailingLoudness() / distanceBetween <= _minAudibilityThreshold) {
|
||||
// according to mixer performance we have decided this does not get to be mixed in
|
||||
// bail out
|
||||
return;
|
||||
return 0;
|
||||
}
|
||||
|
||||
++_sumMixes;
|
||||
|
@ -222,12 +256,13 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
|
|||
int delayedChannelIndex = 0;
|
||||
|
||||
const int SINGLE_STEREO_OFFSET = 2;
|
||||
float attenuationAndFade = attenuationCoefficient * repeatedFrameFadeFactor;
|
||||
|
||||
for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s += 4) {
|
||||
|
||||
// setup the int16_t variables for the two sample sets
|
||||
correctStreamSample[0] = streamPopOutput[s / 2] * attenuationCoefficient;
|
||||
correctStreamSample[1] = streamPopOutput[(s / 2) + 1] * attenuationCoefficient;
|
||||
correctStreamSample[0] = streamPopOutput[s / 2] * attenuationAndFade;
|
||||
correctStreamSample[1] = streamPopOutput[(s / 2) + 1] * attenuationAndFade;
|
||||
|
||||
delayedChannelIndex = s + (numSamplesDelay * 2) + delayedChannelOffset;
|
||||
|
||||
|
@ -243,7 +278,7 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
|
|||
if (numSamplesDelay > 0) {
|
||||
// if there was a sample delay for this stream, we need to pull samples prior to the popped output
|
||||
// to stick at the beginning
|
||||
float attenuationAndWeakChannelRatio = attenuationCoefficient * weakChannelAmplitudeRatio;
|
||||
float attenuationAndWeakChannelRatioAndFade = attenuationCoefficient * weakChannelAmplitudeRatio * repeatedFrameFadeFactor;
|
||||
AudioRingBuffer::ConstIterator delayStreamPopOutput = streamPopOutput - numSamplesDelay;
|
||||
|
||||
// TODO: delayStreamPopOutput may be inside the last frame written if the ringbuffer is completely full
|
||||
|
@ -251,7 +286,7 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
|
|||
|
||||
for (int i = 0; i < numSamplesDelay; i++) {
|
||||
int parentIndex = i * 2;
|
||||
_clientSamples[parentIndex + delayedChannelOffset] += *delayStreamPopOutput * attenuationAndWeakChannelRatio;
|
||||
_clientSamples[parentIndex + delayedChannelOffset] += *delayStreamPopOutput * attenuationAndWeakChannelRatioAndFade;
|
||||
++delayStreamPopOutput;
|
||||
}
|
||||
}
|
||||
|
@ -262,41 +297,82 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
|
|||
attenuationCoefficient = 1.0f;
|
||||
}
|
||||
|
||||
float attenuationAndFade = attenuationCoefficient * repeatedFrameFadeFactor;
|
||||
|
||||
for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s++) {
|
||||
_clientSamples[s] = glm::clamp(_clientSamples[s] + (int)(streamPopOutput[s / stereoDivider] * attenuationCoefficient),
|
||||
_clientSamples[s] = glm::clamp(_clientSamples[s] + (int)(streamPopOutput[s / stereoDivider] * attenuationAndFade),
|
||||
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
|
||||
}
|
||||
}
|
||||
|
||||
if (_enableFilter && shouldAttenuate) {
|
||||
|
||||
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream->getPosition();
|
||||
if (relativePosition.z < 0) { // if the source is behind us
|
||||
AudioFilterHSF1s& penumbraFilter = streamToAdd->getFilter();
|
||||
|
||||
// calculate penumbra angle
|
||||
float headPenumbraAngle = glm::angle(glm::vec3(0.0f, 0.0f, -1.0f),
|
||||
glm::normalize(relativePosition));
|
||||
|
||||
if (relativePosition.x < 0) {
|
||||
headPenumbraAngle *= -1.0f; // [-pi/2,+pi/2]
|
||||
}
|
||||
|
||||
const float SQUARE_ROOT_OF_TWO_OVER_TWO = 0.71f; // half power
|
||||
const float ONE_OVER_TWO_PI = 1.0f / TWO_PI;
|
||||
const float FILTER_CUTOFF_FREQUENCY_HZ = 4000.0f;
|
||||
|
||||
// calculate the updated gain, frequency and slope. this will be tuned over time.
|
||||
const float penumbraFilterGainL = (-1.0f * ONE_OVER_TWO_PI * headPenumbraAngle) + SQUARE_ROOT_OF_TWO_OVER_TWO;
|
||||
const float penumbraFilterGainR = (+1.0f * ONE_OVER_TWO_PI * headPenumbraAngle) + SQUARE_ROOT_OF_TWO_OVER_TWO;
|
||||
const float penumbraFilterFrequency = FILTER_CUTOFF_FREQUENCY_HZ; // constant frequency
|
||||
const float penumbraFilterSlope = SQUARE_ROOT_OF_TWO_OVER_TWO; // constant slope
|
||||
|
||||
qDebug() << "penumbra gainL="
|
||||
<< penumbraFilterGainL
|
||||
<< "penumbra gainR="
|
||||
<< penumbraFilterGainR
|
||||
<< "penumbraAngle="
|
||||
<< headPenumbraAngle;
|
||||
|
||||
// set the gain on both filter channels
|
||||
penumbraFilter.setParameters(0, 0, SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainL, penumbraFilterSlope);
|
||||
penumbraFilter.setParameters(0, 1, SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainR, penumbraFilterSlope);
|
||||
|
||||
penumbraFilter.render(_clientSamples, _clientSamples, NETWORK_BUFFER_LENGTH_SAMPLES_STEREO / 2);
|
||||
}
|
||||
}
|
||||
|
||||
return 1;
|
||||
}
|
||||
|
||||
void AudioMixer::prepareMixForListeningNode(Node* node) {
|
||||
int AudioMixer::prepareMixForListeningNode(Node* node) {
|
||||
AvatarAudioStream* nodeAudioStream = ((AudioMixerClientData*) node->getLinkedData())->getAvatarAudioStream();
|
||||
|
||||
|
||||
// zero out the client mix for this node
|
||||
memset(_clientSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
|
||||
|
||||
// loop through all other nodes that have sufficient audio to mix
|
||||
int streamsMixed = 0;
|
||||
foreach (const SharedNodePointer& otherNode, NodeList::getInstance()->getNodeHash()) {
|
||||
if (otherNode->getLinkedData()) {
|
||||
|
||||
AudioMixerClientData* otherNodeClientData = (AudioMixerClientData*) otherNode->getLinkedData();
|
||||
|
||||
// enumerate the ARBs attached to the otherNode and add all that should be added to mix
|
||||
|
||||
const QHash<QUuid, PositionalAudioStream*>& otherNodeAudioStreams = otherNodeClientData->getAudioStreams();
|
||||
QHash<QUuid, PositionalAudioStream*>::ConstIterator i;
|
||||
for (i = otherNodeAudioStreams.begin(); i != otherNodeAudioStreams.constEnd(); i++) {
|
||||
for (i = otherNodeAudioStreams.constBegin(); i != otherNodeAudioStreams.constEnd(); i++) {
|
||||
PositionalAudioStream* otherNodeStream = i.value();
|
||||
|
||||
if ((*otherNode != *node || otherNodeStream->shouldLoopbackForNode())
|
||||
&& otherNodeStream->lastPopSucceeded()
|
||||
&& otherNodeStream->getLastPopOutputTrailingLoudness() > 0.0f) {
|
||||
|
||||
addStreamToMixForListeningNodeWithStream(otherNodeStream, nodeAudioStream);
|
||||
|
||||
if (*otherNode != *node || otherNodeStream->shouldLoopbackForNode()) {
|
||||
streamsMixed += addStreamToMixForListeningNodeWithStream(otherNodeStream, nodeAudioStream);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return streamsMixed;
|
||||
}
|
||||
|
||||
void AudioMixer::readPendingDatagram(const QByteArray& receivedPacket, const HifiSockAddr& senderSockAddr) {
|
||||
|
@ -332,7 +408,7 @@ void AudioMixer::readPendingDatagram(const QByteArray& receivedPacket, const Hif
|
|||
void AudioMixer::sendStatsPacket() {
|
||||
static QJsonObject statsObject;
|
||||
|
||||
statsObject["useDynamicJitterBuffers"] = _useDynamicJitterBuffers;
|
||||
statsObject["useDynamicJitterBuffers"] = _streamSettings._dynamicJitterBuffers;
|
||||
statsObject["trailing_sleep_percentage"] = _trailingSleepRatio * 100.0f;
|
||||
statsObject["performance_throttling_ratio"] = _performanceThrottlingRatio;
|
||||
|
||||
|
@ -358,9 +434,42 @@ void AudioMixer::sendStatsPacket() {
|
|||
int sizeOfStats = 0;
|
||||
int TOO_BIG_FOR_MTU = 1200; // some extra space for JSONification
|
||||
|
||||
QString property = "readPendingDatagram_calls_stats";
|
||||
QString value = getReadPendingDatagramsCallsPerSecondsStatsString();
|
||||
statsObject2[qPrintable(property)] = value;
|
||||
somethingToSend = true;
|
||||
sizeOfStats += property.size() + value.size();
|
||||
|
||||
property = "readPendingDatagram_packets_per_call_stats";
|
||||
value = getReadPendingDatagramsPacketsPerCallStatsString();
|
||||
statsObject2[qPrintable(property)] = value;
|
||||
somethingToSend = true;
|
||||
sizeOfStats += property.size() + value.size();
|
||||
|
||||
property = "readPendingDatagram_packets_time_per_call_stats";
|
||||
value = getReadPendingDatagramsTimeStatsString();
|
||||
statsObject2[qPrintable(property)] = value;
|
||||
somethingToSend = true;
|
||||
sizeOfStats += property.size() + value.size();
|
||||
|
||||
property = "readPendingDatagram_hashmatch_time_per_call_stats";
|
||||
value = getReadPendingDatagramsHashMatchTimeStatsString();
|
||||
statsObject2[qPrintable(property)] = value;
|
||||
somethingToSend = true;
|
||||
sizeOfStats += property.size() + value.size();
|
||||
|
||||
NodeList* nodeList = NodeList::getInstance();
|
||||
int clientNumber = 0;
|
||||
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
|
||||
|
||||
// if we're too large, send the packet
|
||||
if (sizeOfStats > TOO_BIG_FOR_MTU) {
|
||||
nodeList->sendStatsToDomainServer(statsObject2);
|
||||
sizeOfStats = 0;
|
||||
statsObject2 = QJsonObject(); // clear it
|
||||
somethingToSend = false;
|
||||
}
|
||||
|
||||
clientNumber++;
|
||||
AudioMixerClientData* clientData = static_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
if (clientData) {
|
||||
|
@ -370,14 +479,6 @@ void AudioMixer::sendStatsPacket() {
|
|||
somethingToSend = true;
|
||||
sizeOfStats += property.size() + value.size();
|
||||
}
|
||||
|
||||
// if we're too large, send the packet
|
||||
if (sizeOfStats > TOO_BIG_FOR_MTU) {
|
||||
nodeList->sendStatsToDomainServer(statsObject2);
|
||||
sizeOfStats = 0;
|
||||
statsObject2 = QJsonObject(); // clear it
|
||||
somethingToSend = false;
|
||||
}
|
||||
}
|
||||
|
||||
if (somethingToSend) {
|
||||
|
@ -448,41 +549,81 @@ void AudioMixer::run() {
|
|||
|
||||
if (settingsObject.contains(AUDIO_GROUP_KEY)) {
|
||||
QJsonObject audioGroupObject = settingsObject[AUDIO_GROUP_KEY].toObject();
|
||||
|
||||
|
||||
// check the payload to see if we have asked for dynamicJitterBuffer support
|
||||
const QString DYNAMIC_JITTER_BUFFER_JSON_KEY = "A-dynamic-jitter-buffer";
|
||||
bool shouldUseDynamicJitterBuffers = audioGroupObject[DYNAMIC_JITTER_BUFFER_JSON_KEY].toBool();
|
||||
if (shouldUseDynamicJitterBuffers) {
|
||||
_streamSettings._dynamicJitterBuffers = audioGroupObject[DYNAMIC_JITTER_BUFFER_JSON_KEY].toBool();
|
||||
if (_streamSettings._dynamicJitterBuffers) {
|
||||
qDebug() << "Enable dynamic jitter buffers.";
|
||||
_useDynamicJitterBuffers = true;
|
||||
} else {
|
||||
qDebug() << "Dynamic jitter buffers disabled.";
|
||||
_useDynamicJitterBuffers = false;
|
||||
}
|
||||
|
||||
|
||||
bool ok;
|
||||
|
||||
const QString DESIRED_JITTER_BUFFER_FRAMES_KEY = "B-desired-jitter-buffer-frames";
|
||||
_staticDesiredJitterBufferFrames = audioGroupObject[DESIRED_JITTER_BUFFER_FRAMES_KEY].toString().toInt(&ok);
|
||||
const QString DESIRED_JITTER_BUFFER_FRAMES_KEY = "B-static-desired-jitter-buffer-frames";
|
||||
_streamSettings._staticDesiredJitterBufferFrames = audioGroupObject[DESIRED_JITTER_BUFFER_FRAMES_KEY].toString().toInt(&ok);
|
||||
if (!ok) {
|
||||
_staticDesiredJitterBufferFrames = DEFAULT_DESIRED_JITTER_BUFFER_FRAMES;
|
||||
_streamSettings._staticDesiredJitterBufferFrames = DEFAULT_STATIC_DESIRED_JITTER_BUFFER_FRAMES;
|
||||
}
|
||||
qDebug() << "Static desired jitter buffer frames:" << _staticDesiredJitterBufferFrames;
|
||||
qDebug() << "Static desired jitter buffer frames:" << _streamSettings._staticDesiredJitterBufferFrames;
|
||||
|
||||
const QString MAX_FRAMES_OVER_DESIRED_JSON_KEY = "C-max-frames-over-desired";
|
||||
_maxFramesOverDesired = audioGroupObject[MAX_FRAMES_OVER_DESIRED_JSON_KEY].toString().toInt(&ok);
|
||||
_streamSettings._maxFramesOverDesired = audioGroupObject[MAX_FRAMES_OVER_DESIRED_JSON_KEY].toString().toInt(&ok);
|
||||
if (!ok) {
|
||||
_maxFramesOverDesired = DEFAULT_MAX_FRAMES_OVER_DESIRED;
|
||||
_streamSettings._maxFramesOverDesired = DEFAULT_MAX_FRAMES_OVER_DESIRED;
|
||||
}
|
||||
qDebug() << "Max frames over desired:" << _streamSettings._maxFramesOverDesired;
|
||||
|
||||
const QString USE_STDEV_FOR_DESIRED_CALC_JSON_KEY = "D-use-stdev-for-desired-calc";
|
||||
_streamSettings._useStDevForJitterCalc = audioGroupObject[USE_STDEV_FOR_DESIRED_CALC_JSON_KEY].toBool();
|
||||
if (_streamSettings._useStDevForJitterCalc) {
|
||||
qDebug() << "Using Philip's stdev method for jitter calc if dynamic jitter buffers enabled";
|
||||
} else {
|
||||
qDebug() << "Using Fred's max-gap method for jitter calc if dynamic jitter buffers enabled";
|
||||
}
|
||||
qDebug() << "Max frames over desired:" << _maxFramesOverDesired;
|
||||
|
||||
const QString PRINT_STREAM_STATS_JSON_KEY = "H-print-stream-stats";
|
||||
const QString WINDOW_STARVE_THRESHOLD_JSON_KEY = "E-window-starve-threshold";
|
||||
_streamSettings._windowStarveThreshold = audioGroupObject[WINDOW_STARVE_THRESHOLD_JSON_KEY].toString().toInt(&ok);
|
||||
if (!ok) {
|
||||
_streamSettings._windowStarveThreshold = DEFAULT_WINDOW_STARVE_THRESHOLD;
|
||||
}
|
||||
qDebug() << "Window A starve threshold:" << _streamSettings._windowStarveThreshold;
|
||||
|
||||
const QString WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES_JSON_KEY = "F-window-seconds-for-desired-calc-on-too-many-starves";
|
||||
_streamSettings._windowSecondsForDesiredCalcOnTooManyStarves = audioGroupObject[WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES_JSON_KEY].toString().toInt(&ok);
|
||||
if (!ok) {
|
||||
_streamSettings._windowSecondsForDesiredCalcOnTooManyStarves = DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES;
|
||||
}
|
||||
qDebug() << "Window A length:" << _streamSettings._windowSecondsForDesiredCalcOnTooManyStarves << "seconds";
|
||||
|
||||
const QString WINDOW_SECONDS_FOR_DESIRED_REDUCTION_JSON_KEY = "G-window-seconds-for-desired-reduction";
|
||||
_streamSettings._windowSecondsForDesiredReduction = audioGroupObject[WINDOW_SECONDS_FOR_DESIRED_REDUCTION_JSON_KEY].toString().toInt(&ok);
|
||||
if (!ok) {
|
||||
_streamSettings._windowSecondsForDesiredReduction = DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION;
|
||||
}
|
||||
qDebug() << "Window B length:" << _streamSettings._windowSecondsForDesiredReduction << "seconds";
|
||||
|
||||
const QString REPETITION_WITH_FADE_JSON_KEY = "H-repetition-with-fade";
|
||||
_streamSettings._repetitionWithFade = audioGroupObject[REPETITION_WITH_FADE_JSON_KEY].toBool();
|
||||
if (_streamSettings._repetitionWithFade) {
|
||||
qDebug() << "Repetition with fade enabled";
|
||||
} else {
|
||||
qDebug() << "Repetition with fade disabled";
|
||||
}
|
||||
|
||||
const QString PRINT_STREAM_STATS_JSON_KEY = "I-print-stream-stats";
|
||||
_printStreamStats = audioGroupObject[PRINT_STREAM_STATS_JSON_KEY].toBool();
|
||||
if (_printStreamStats) {
|
||||
qDebug() << "Stream stats will be printed to stdout";
|
||||
}
|
||||
|
||||
const QString UNATTENUATED_ZONE_KEY = "D-unattenuated-zone";
|
||||
const QString FILTER_KEY = "J-enable-filter";
|
||||
_enableFilter = audioGroupObject[FILTER_KEY].toBool();
|
||||
if (_enableFilter) {
|
||||
qDebug() << "Filter enabled";
|
||||
}
|
||||
|
||||
const QString UNATTENUATED_ZONE_KEY = "Z-unattenuated-zone";
|
||||
|
||||
QString unattenuatedZoneString = audioGroupObject[UNATTENUATED_ZONE_KEY].toString();
|
||||
if (!unattenuatedZoneString.isEmpty()) {
|
||||
|
@ -510,9 +651,8 @@ void AudioMixer::run() {
|
|||
int nextFrame = 0;
|
||||
QElapsedTimer timer;
|
||||
timer.start();
|
||||
|
||||
char* clientMixBuffer = new char[NETWORK_BUFFER_LENGTH_BYTES_STEREO + sizeof(quint16)
|
||||
+ numBytesForPacketHeaderGivenPacketType(PacketTypeMixedAudio)];
|
||||
|
||||
char clientMixBuffer[MAX_PACKET_SIZE];
|
||||
|
||||
int usecToSleep = BUFFER_SEND_INTERVAL_USECS;
|
||||
|
||||
|
@ -571,15 +711,13 @@ void AudioMixer::run() {
|
|||
if (!hasRatioChanged) {
|
||||
++framesSinceCutoffEvent;
|
||||
}
|
||||
|
||||
bool sendAudioStreamStats = false;
|
||||
quint64 now = usecTimestampNow();
|
||||
if (now - _lastSendAudioStreamStatsTime > TOO_LONG_SINCE_LAST_SEND_AUDIO_STREAM_STATS) {
|
||||
_lastSendAudioStreamStatsTime = now;
|
||||
sendAudioStreamStats = true;
|
||||
}
|
||||
|
||||
bool streamStatsPrinted = false;
|
||||
quint64 now = usecTimestampNow();
|
||||
if (now - _lastPerSecondCallbackTime > USECS_PER_SECOND) {
|
||||
perSecondActions();
|
||||
_lastPerSecondCallbackTime = now;
|
||||
}
|
||||
|
||||
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
|
||||
if (node->getLinkedData()) {
|
||||
AudioMixerClientData* nodeData = (AudioMixerClientData*)node->getLinkedData();
|
||||
|
@ -592,43 +730,52 @@ void AudioMixer::run() {
|
|||
if (node->getType() == NodeType::Agent && node->getActiveSocket()
|
||||
&& nodeData->getAvatarAudioStream()) {
|
||||
|
||||
prepareMixForListeningNode(node.data());
|
||||
int streamsMixed = prepareMixForListeningNode(node.data());
|
||||
|
||||
// pack header
|
||||
int numBytesPacketHeader = populatePacketHeader(clientMixBuffer, PacketTypeMixedAudio);
|
||||
char* dataAt = clientMixBuffer + numBytesPacketHeader;
|
||||
char* dataAt;
|
||||
if (streamsMixed > 0) {
|
||||
// pack header
|
||||
int numBytesPacketHeader = populatePacketHeader(clientMixBuffer, PacketTypeMixedAudio);
|
||||
dataAt = clientMixBuffer + numBytesPacketHeader;
|
||||
|
||||
// pack sequence number
|
||||
quint16 sequence = nodeData->getOutgoingSequenceNumber();
|
||||
memcpy(dataAt, &sequence, sizeof(quint16));
|
||||
dataAt += sizeof(quint16);
|
||||
// pack sequence number
|
||||
quint16 sequence = nodeData->getOutgoingSequenceNumber();
|
||||
memcpy(dataAt, &sequence, sizeof(quint16));
|
||||
dataAt += sizeof(quint16);
|
||||
|
||||
// pack mixed audio samples
|
||||
memcpy(dataAt, _clientSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
|
||||
dataAt += NETWORK_BUFFER_LENGTH_BYTES_STEREO;
|
||||
// pack mixed audio samples
|
||||
memcpy(dataAt, _clientSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
|
||||
dataAt += NETWORK_BUFFER_LENGTH_BYTES_STEREO;
|
||||
} else {
|
||||
// pack header
|
||||
int numBytesPacketHeader = populatePacketHeader(clientMixBuffer, PacketTypeSilentAudioFrame);
|
||||
dataAt = clientMixBuffer + numBytesPacketHeader;
|
||||
|
||||
// pack sequence number
|
||||
quint16 sequence = nodeData->getOutgoingSequenceNumber();
|
||||
memcpy(dataAt, &sequence, sizeof(quint16));
|
||||
dataAt += sizeof(quint16);
|
||||
|
||||
// pack number of silent audio samples
|
||||
quint16 numSilentSamples = NETWORK_BUFFER_LENGTH_SAMPLES_STEREO;
|
||||
memcpy(dataAt, &numSilentSamples, sizeof(quint16));
|
||||
dataAt += sizeof(quint16);
|
||||
}
|
||||
|
||||
// send mixed audio packet
|
||||
nodeList->writeDatagram(clientMixBuffer, dataAt - clientMixBuffer, node);
|
||||
nodeData->incrementOutgoingMixedAudioSequenceNumber();
|
||||
|
||||
// send an audio stream stats packet if it's time
|
||||
if (sendAudioStreamStats) {
|
||||
if (_sendAudioStreamStats) {
|
||||
nodeData->sendAudioStreamStatsPackets(node);
|
||||
|
||||
if (_printStreamStats) {
|
||||
printf("\nStats for agent %s:\n", node->getUUID().toString().toLatin1().data());
|
||||
nodeData->printUpstreamDownstreamStats();
|
||||
streamStatsPrinted = true;
|
||||
}
|
||||
_sendAudioStreamStats = false;
|
||||
}
|
||||
|
||||
++_sumListeners;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (streamStatsPrinted) {
|
||||
printf("\n----------------------------------------------------------------\n");
|
||||
}
|
||||
|
||||
++_numStatFrames;
|
||||
|
||||
|
@ -644,6 +791,90 @@ void AudioMixer::run() {
|
|||
usleep(usecToSleep);
|
||||
}
|
||||
}
|
||||
|
||||
delete[] clientMixBuffer;
|
||||
}
|
||||
|
||||
void AudioMixer::perSecondActions() {
|
||||
_sendAudioStreamStats = true;
|
||||
|
||||
int callsLastSecond = _datagramsReadPerCallStats.getCurrentIntervalSamples();
|
||||
_readPendingCallsPerSecondStats.update(callsLastSecond);
|
||||
|
||||
if (_printStreamStats) {
|
||||
|
||||
printf("\n================================================================================\n\n");
|
||||
|
||||
printf(" readPendingDatagram() calls per second | avg: %.2f, avg_30s: %.2f, last_second: %d\n",
|
||||
_readPendingCallsPerSecondStats.getAverage(),
|
||||
_readPendingCallsPerSecondStats.getWindowAverage(),
|
||||
callsLastSecond);
|
||||
|
||||
printf(" Datagrams read per call | avg: %.2f, avg_30s: %.2f, last_second: %.2f\n",
|
||||
_datagramsReadPerCallStats.getAverage(),
|
||||
_datagramsReadPerCallStats.getWindowAverage(),
|
||||
_datagramsReadPerCallStats.getCurrentIntervalAverage());
|
||||
|
||||
printf(" Usecs spent per readPendingDatagram() call | avg: %.2f, avg_30s: %.2f, last_second: %.2f\n",
|
||||
_timeSpentPerCallStats.getAverage(),
|
||||
_timeSpentPerCallStats.getWindowAverage(),
|
||||
_timeSpentPerCallStats.getCurrentIntervalAverage());
|
||||
|
||||
printf(" Usecs spent per packetVersionAndHashMatch() call | avg: %.2f, avg_30s: %.2f, last_second: %.2f\n",
|
||||
_timeSpentPerHashMatchCallStats.getAverage(),
|
||||
_timeSpentPerHashMatchCallStats.getWindowAverage(),
|
||||
_timeSpentPerHashMatchCallStats.getCurrentIntervalAverage());
|
||||
|
||||
double WINDOW_LENGTH_USECS = READ_DATAGRAMS_STATS_WINDOW_SECONDS * USECS_PER_SECOND;
|
||||
|
||||
printf(" %% time spent in readPendingDatagram() calls | avg_30s: %.6f%%, last_second: %.6f%%\n",
|
||||
_timeSpentPerCallStats.getWindowSum() / WINDOW_LENGTH_USECS * 100.0,
|
||||
_timeSpentPerCallStats.getCurrentIntervalSum() / USECS_PER_SECOND * 100.0);
|
||||
|
||||
printf("%% time spent in packetVersionAndHashMatch() calls: | avg_30s: %.6f%%, last_second: %.6f%%\n",
|
||||
_timeSpentPerHashMatchCallStats.getWindowSum() / WINDOW_LENGTH_USECS * 100.0,
|
||||
_timeSpentPerHashMatchCallStats.getCurrentIntervalSum() / USECS_PER_SECOND * 100.0);
|
||||
|
||||
foreach(const SharedNodePointer& node, NodeList::getInstance()->getNodeHash()) {
|
||||
if (node->getLinkedData()) {
|
||||
AudioMixerClientData* nodeData = (AudioMixerClientData*)node->getLinkedData();
|
||||
|
||||
if (node->getType() == NodeType::Agent && node->getActiveSocket()) {
|
||||
printf("\nStats for agent %s --------------------------------\n",
|
||||
node->getUUID().toString().toLatin1().data());
|
||||
nodeData->printUpstreamDownstreamStats();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
_datagramsReadPerCallStats.currentIntervalComplete();
|
||||
_timeSpentPerCallStats.currentIntervalComplete();
|
||||
_timeSpentPerHashMatchCallStats.currentIntervalComplete();
|
||||
}
|
||||
|
||||
QString AudioMixer::getReadPendingDatagramsCallsPerSecondsStatsString() const {
|
||||
QString result = "calls_per_sec_avg_30s: " + QString::number(_readPendingCallsPerSecondStats.getWindowAverage(), 'f', 2)
|
||||
+ " calls_last_sec: " + QString::number(_readPendingCallsPerSecondStats.getLastCompleteIntervalStats().getSum() + 0.5, 'f', 0);
|
||||
return result;
|
||||
}
|
||||
|
||||
QString AudioMixer::getReadPendingDatagramsPacketsPerCallStatsString() const {
|
||||
QString result = "pkts_per_call_avg_30s: " + QString::number(_datagramsReadPerCallStats.getWindowAverage(), 'f', 2)
|
||||
+ " pkts_per_call_avg_1s: " + QString::number(_datagramsReadPerCallStats.getLastCompleteIntervalStats().getAverage(), 'f', 2);
|
||||
return result;
|
||||
}
|
||||
|
||||
QString AudioMixer::getReadPendingDatagramsTimeStatsString() const {
|
||||
QString result = "usecs_per_call_avg_30s: " + QString::number(_timeSpentPerCallStats.getWindowAverage(), 'f', 2)
|
||||
+ " usecs_per_call_avg_1s: " + QString::number(_timeSpentPerCallStats.getLastCompleteIntervalStats().getAverage(), 'f', 2)
|
||||
+ " prct_time_in_call_30s: " + QString::number(_timeSpentPerCallStats.getWindowSum() / (READ_DATAGRAMS_STATS_WINDOW_SECONDS*USECS_PER_SECOND) * 100.0, 'f', 6) + "%"
|
||||
+ " prct_time_in_call_1s: " + QString::number(_timeSpentPerCallStats.getLastCompleteIntervalStats().getSum() / USECS_PER_SECOND * 100.0, 'f', 6) + "%";
|
||||
return result;
|
||||
}
|
||||
|
||||
QString AudioMixer::getReadPendingDatagramsHashMatchTimeStatsString() const {
|
||||
QString result = "usecs_per_hashmatch_avg_30s: " + QString::number(_timeSpentPerHashMatchCallStats.getWindowAverage(), 'f', 2)
|
||||
+ " usecs_per_hashmatch_avg_1s: " + QString::number(_timeSpentPerHashMatchCallStats.getLastCompleteIntervalStats().getAverage(), 'f', 2)
|
||||
+ " prct_time_in_hashmatch_30s: " + QString::number(_timeSpentPerHashMatchCallStats.getWindowSum() / (READ_DATAGRAMS_STATS_WINDOW_SECONDS*USECS_PER_SECOND) * 100.0, 'f', 6) + "%"
|
||||
+ " prct_time_in_hashmatch_1s: " + QString::number(_timeSpentPerHashMatchCallStats.getLastCompleteIntervalStats().getSum() / USECS_PER_SECOND * 100.0, 'f', 6) + "%";
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -21,7 +21,8 @@ class AvatarAudioStream;
|
|||
|
||||
const int SAMPLE_PHASE_DELAY_AT_90 = 20;
|
||||
|
||||
const quint64 TOO_LONG_SINCE_LAST_SEND_AUDIO_STREAM_STATS = 1 * USECS_PER_SECOND;
|
||||
const int READ_DATAGRAMS_STATS_WINDOW_SECONDS = 30;
|
||||
|
||||
|
||||
/// Handles assignments of type AudioMixer - mixing streams of audio and re-distributing to various clients.
|
||||
class AudioMixer : public ThreadedAssignment {
|
||||
|
@ -38,21 +39,26 @@ public slots:
|
|||
|
||||
void sendStatsPacket();
|
||||
|
||||
static bool getUseDynamicJitterBuffers() { return _useDynamicJitterBuffers; }
|
||||
static int getStaticDesiredJitterBufferFrames() { return _staticDesiredJitterBufferFrames; }
|
||||
static int getMaxFramesOverDesired() { return _maxFramesOverDesired; }
|
||||
|
||||
static const InboundAudioStream::Settings& getStreamSettings() { return _streamSettings; }
|
||||
|
||||
private:
|
||||
/// adds one stream to the mix for a listening node
|
||||
void addStreamToMixForListeningNodeWithStream(PositionalAudioStream* streamToAdd,
|
||||
int addStreamToMixForListeningNodeWithStream(PositionalAudioStream* streamToAdd,
|
||||
AvatarAudioStream* listeningNodeStream);
|
||||
|
||||
/// prepares and sends a mix to one Node
|
||||
void prepareMixForListeningNode(Node* node);
|
||||
int prepareMixForListeningNode(Node* node);
|
||||
|
||||
// client samples capacity is larger than what will be sent to optimize mixing
|
||||
// we are MMX adding 4 samples at a time so we need client samples to have an extra 4
|
||||
int16_t _clientSamples[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)];
|
||||
|
||||
void perSecondActions();
|
||||
|
||||
QString getReadPendingDatagramsCallsPerSecondsStatsString() const;
|
||||
QString getReadPendingDatagramsPacketsPerCallStatsString() const;
|
||||
QString getReadPendingDatagramsTimeStatsString() const;
|
||||
QString getReadPendingDatagramsHashMatchTimeStatsString() const;
|
||||
|
||||
float _trailingSleepRatio;
|
||||
float _minAudibilityThreshold;
|
||||
|
@ -63,13 +69,21 @@ private:
|
|||
AABox* _sourceUnattenuatedZone;
|
||||
AABox* _listenerUnattenuatedZone;
|
||||
|
||||
static bool _useDynamicJitterBuffers;
|
||||
static int _staticDesiredJitterBufferFrames;
|
||||
static int _maxFramesOverDesired;
|
||||
static InboundAudioStream::Settings _streamSettings;
|
||||
|
||||
static bool _printStreamStats;
|
||||
static bool _enableFilter;
|
||||
|
||||
quint64 _lastPerSecondCallbackTime;
|
||||
|
||||
quint64 _lastSendAudioStreamStatsTime;
|
||||
bool _sendAudioStreamStats;
|
||||
|
||||
// stats
|
||||
MovingMinMaxAvg<int> _datagramsReadPerCallStats; // update with # of datagrams read for each readPendingDatagrams call
|
||||
MovingMinMaxAvg<quint64> _timeSpentPerCallStats; // update with usecs spent inside each readPendingDatagrams call
|
||||
MovingMinMaxAvg<quint64> _timeSpentPerHashMatchCallStats; // update with usecs spent inside each packetVersionAndHashMatch call
|
||||
|
||||
MovingMinMaxAvg<int> _readPendingCallsPerSecondStats; // update with # of readPendingDatagrams calls in the last second
|
||||
};
|
||||
|
||||
#endif // hifi_AudioMixer_h
|
||||
|
|
|
@ -74,9 +74,7 @@ int AudioMixerClientData::parseData(const QByteArray& packet) {
|
|||
quint8 channelFlag = *(reinterpret_cast<const quint8*>(channelFlagAt));
|
||||
bool isStereo = channelFlag == 1;
|
||||
|
||||
_audioStreams.insert(nullUUID,
|
||||
matchingStream = new AvatarAudioStream(isStereo, AudioMixer::getUseDynamicJitterBuffers(),
|
||||
AudioMixer::getStaticDesiredJitterBufferFrames(), AudioMixer::getMaxFramesOverDesired()));
|
||||
_audioStreams.insert(nullUUID, matchingStream = new AvatarAudioStream(isStereo, AudioMixer::getStreamSettings()));
|
||||
} else {
|
||||
matchingStream = _audioStreams.value(nullUUID);
|
||||
}
|
||||
|
@ -88,9 +86,8 @@ int AudioMixerClientData::parseData(const QByteArray& packet) {
|
|||
QUuid streamIdentifier = QUuid::fromRfc4122(packet.mid(bytesBeforeStreamIdentifier, NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
if (!_audioStreams.contains(streamIdentifier)) {
|
||||
_audioStreams.insert(streamIdentifier,
|
||||
matchingStream = new InjectedAudioStream(streamIdentifier, AudioMixer::getUseDynamicJitterBuffers(),
|
||||
AudioMixer::getStaticDesiredJitterBufferFrames(), AudioMixer::getMaxFramesOverDesired()));
|
||||
// we don't have this injected stream yet, so add it
|
||||
_audioStreams.insert(streamIdentifier, matchingStream = new InjectedAudioStream(streamIdentifier, AudioMixer::getStreamSettings()));
|
||||
} else {
|
||||
matchingStream = _audioStreams.value(streamIdentifier);
|
||||
}
|
||||
|
@ -105,18 +102,15 @@ void AudioMixerClientData::checkBuffersBeforeFrameSend(AABox* checkSourceZone, A
|
|||
QHash<QUuid, PositionalAudioStream*>::ConstIterator i;
|
||||
for (i = _audioStreams.constBegin(); i != _audioStreams.constEnd(); i++) {
|
||||
PositionalAudioStream* stream = i.value();
|
||||
|
||||
if (stream->popFrames(1, true) > 0) {
|
||||
// this is a ring buffer that is ready to go
|
||||
|
||||
// calculate the trailing avg loudness for the next frame
|
||||
// that would be mixed in
|
||||
stream->updateLastPopOutputTrailingLoudness();
|
||||
|
||||
if (checkSourceZone && checkSourceZone->contains(stream->getPosition())) {
|
||||
stream->setListenerUnattenuatedZone(listenerZone);
|
||||
} else {
|
||||
stream->setListenerUnattenuatedZone(NULL);
|
||||
}
|
||||
stream->updateLastPopOutputLoudnessAndTrailingLoudness();
|
||||
}
|
||||
|
||||
if (checkSourceZone && checkSourceZone->contains(stream->getPosition())) {
|
||||
stream->setListenerUnattenuatedZone(listenerZone);
|
||||
} else {
|
||||
stream->setListenerUnattenuatedZone(NULL);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -185,7 +179,9 @@ void AudioMixerClientData::sendAudioStreamStatsPackets(const SharedNodePointer&
|
|||
|
||||
// pack the calculated number of stream stats
|
||||
for (int i = 0; i < numStreamStatsToPack; i++) {
|
||||
AudioStreamStats streamStats = audioStreamsIterator.value()->updateSeqHistoryAndGetAudioStreamStats();
|
||||
PositionalAudioStream* stream = audioStreamsIterator.value();
|
||||
stream->perSecondCallbackForUpdatingStats();
|
||||
AudioStreamStats streamStats = stream->getAudioStreamStats();
|
||||
memcpy(dataAt, &streamStats, sizeof(AudioStreamStats));
|
||||
dataAt += sizeof(AudioStreamStats);
|
||||
|
||||
|
|
|
@ -13,8 +13,8 @@
|
|||
|
||||
#include "AvatarAudioStream.h"
|
||||
|
||||
AvatarAudioStream::AvatarAudioStream(bool isStereo, bool dynamicJitterBuffer, int staticDesiredJitterBufferFrames, int maxFramesOverDesired) :
|
||||
PositionalAudioStream(PositionalAudioStream::Microphone, isStereo, dynamicJitterBuffer, staticDesiredJitterBufferFrames, maxFramesOverDesired)
|
||||
AvatarAudioStream::AvatarAudioStream(bool isStereo, const InboundAudioStream::Settings& settings) :
|
||||
PositionalAudioStream(PositionalAudioStream::Microphone, isStereo, settings)
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -38,26 +38,9 @@ int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray&
|
|||
// read the positional data
|
||||
readBytes += parsePositionalData(packetAfterSeqNum.mid(readBytes));
|
||||
|
||||
if (type == PacketTypeSilentAudioFrame) {
|
||||
int16_t numSilentSamples;
|
||||
memcpy(&numSilentSamples, packetAfterSeqNum.data() + readBytes, sizeof(int16_t));
|
||||
readBytes += sizeof(int16_t);
|
||||
|
||||
numAudioSamples = numSilentSamples;
|
||||
} else {
|
||||
int numAudioBytes = packetAfterSeqNum.size() - readBytes;
|
||||
numAudioSamples = numAudioBytes / sizeof(int16_t);
|
||||
}
|
||||
return readBytes;
|
||||
}
|
||||
|
||||
int AvatarAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples) {
|
||||
int readBytes = 0;
|
||||
if (type == PacketTypeSilentAudioFrame) {
|
||||
writeDroppableSilentSamples(numAudioSamples);
|
||||
} else {
|
||||
// there is audio data to read
|
||||
readBytes += _ringBuffer.writeData(packetAfterStreamProperties.data(), numAudioSamples * sizeof(int16_t));
|
||||
}
|
||||
// calculate how many samples are in this packet
|
||||
int numAudioBytes = packetAfterSeqNum.size() - readBytes;
|
||||
numAudioSamples = numAudioBytes / sizeof(int16_t);
|
||||
|
||||
return readBytes;
|
||||
}
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
class AvatarAudioStream : public PositionalAudioStream {
|
||||
public:
|
||||
AvatarAudioStream(bool isStereo, bool dynamicJitterBuffer, int staticDesiredJitterBufferFrames, int maxFramesOverDesired);
|
||||
AvatarAudioStream(bool isStereo, const InboundAudioStream::Settings& settings);
|
||||
|
||||
private:
|
||||
// disallow copying of AvatarAudioStream objects
|
||||
|
@ -26,7 +26,6 @@ private:
|
|||
AvatarAudioStream& operator= (const AvatarAudioStream&);
|
||||
|
||||
int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples);
|
||||
int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples);
|
||||
};
|
||||
|
||||
#endif // hifi_AvatarAudioStream_h
|
||||
|
|
|
@ -9,8 +9,8 @@
|
|||
"help": "Dynamically buffer client audio based on perceived jitter in packet receipt timing",
|
||||
"default": false
|
||||
},
|
||||
"B-desired-jitter-buffer-frames": {
|
||||
"label": "Desired Jitter Buffer Frames",
|
||||
"B-static-desired-jitter-buffer-frames": {
|
||||
"label": "Static Desired Jitter Buffer Frames",
|
||||
"help": "If dynamic jitter buffers is disabled, this determines the target number of frames maintained by the AudioMixer's jitter buffers",
|
||||
"placeholder": "1",
|
||||
"default": "1"
|
||||
|
@ -21,18 +21,54 @@
|
|||
"placeholder": "10",
|
||||
"default": "10"
|
||||
},
|
||||
"H-print-stream-stats": {
|
||||
"D-use-stdev-for-desired-calc": {
|
||||
"type": "checkbox",
|
||||
"label": "Use Stdev for Desired Jitter Frames Calc:",
|
||||
"help": "If checked, Philip's method (stdev of timegaps) is used to calculate desired jitter frames. Otherwise, Fred's method (max timegap) is used",
|
||||
"default": false
|
||||
},
|
||||
"E-window-starve-threshold": {
|
||||
"label": "Window Starve Threshold",
|
||||
"help": "If this many starves occur in an N-second window (N is the number in the next field), then the desired jitter frames will be re-evaluated using Window A.",
|
||||
"placeholder": "3",
|
||||
"default": "3"
|
||||
},
|
||||
"F-window-seconds-for-desired-calc-on-too-many-starves": {
|
||||
"label": "Timegaps Window (A) Seconds:",
|
||||
"help": "Window A contains a history of timegaps. Its max timegap is used to re-evaluate the desired jitter frames when too many starves occur within it.",
|
||||
"placeholder": "50",
|
||||
"default": "50"
|
||||
},
|
||||
"G-window-seconds-for-desired-reduction": {
|
||||
"label": "Timegaps Window (B) Seconds:",
|
||||
"help": "Window B contains a history of timegaps. Its max timegap is used as a ceiling for the desired jitter frames value.",
|
||||
"placeholder": "10",
|
||||
"default": "10"
|
||||
},
|
||||
"H-repetition-with-fade": {
|
||||
"type": "checkbox",
|
||||
"label": "Repetition with Fade:",
|
||||
"help": "If enabled, dropped frames and mixing during starves will repeat the last frame, eventually fading to silence",
|
||||
"default": false
|
||||
},
|
||||
"I-print-stream-stats": {
|
||||
"type": "checkbox",
|
||||
"label": "Print Stream Stats:",
|
||||
"help": "If enabled, audio upstream and downstream stats of each agent will be printed each second to stdout",
|
||||
"default": false
|
||||
},
|
||||
"D-unattenuated-zone": {
|
||||
"Z-unattenuated-zone": {
|
||||
"label": "Unattenuated Zone",
|
||||
"help": "Boxes for source and listener (corner x, corner y, corner z, size x, size y, size z, corner x, corner y, corner z, size x, size y, size z)",
|
||||
"placeholder": "no zone",
|
||||
"default": ""
|
||||
},
|
||||
"J-enable-filter": {
|
||||
"type": "checkbox",
|
||||
"label": "Enable Positional Filter",
|
||||
"help": "If enabled, positional audio stream uses lowpass filter",
|
||||
"default": false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
47
examples/PlayRecordingOnAC.js
Normal file
47
examples/PlayRecordingOnAC.js
Normal file
|
@ -0,0 +1,47 @@
|
|||
//
|
||||
// PlayRecordingOnAC.js
|
||||
// examples
|
||||
//
|
||||
// Created by Clément Brisset on 8/24/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
|
||||
var filename = "http://your.recording.url";
|
||||
var playFromCurrentLocation = true;
|
||||
|
||||
Avatar.faceModelURL = "http://public.highfidelity.io/models/heads/EvilPhilip_v7.fst";
|
||||
Avatar.skeletonModelURL = "http://public.highfidelity.io/models/skeletons/Philip_Carl_Body_A-Pose.fst";
|
||||
|
||||
// Set position here if playFromCurrentLocation is true
|
||||
Avatar.position = { x:1, y: 1, z: 1 };
|
||||
|
||||
Agent.isAvatar = true;
|
||||
|
||||
Avatar.loadRecording(filename);
|
||||
|
||||
count = 300; // This is necessary to wait for the audio mixer to connect
|
||||
function update(event) {
|
||||
if (count > 0) {
|
||||
count--;
|
||||
return;
|
||||
}
|
||||
if (count == 0) {
|
||||
Avatar.startPlaying(playFromCurrentLocation);
|
||||
Avatar.play();
|
||||
Vec3.print("Playing from ", Avatar.position);
|
||||
|
||||
count--;
|
||||
}
|
||||
|
||||
if (Avatar.isPlaying()) {
|
||||
Avatar.play();
|
||||
} else {
|
||||
Script.update.disconnect(update);
|
||||
}
|
||||
}
|
||||
|
||||
Script.update.connect(update);
|
|
@ -139,7 +139,9 @@ function moveUI() {
|
|||
function mousePressEvent(event) {
|
||||
clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y });
|
||||
|
||||
if (recordIcon === toolBar.clicked(clickedOverlay)) {
|
||||
print("Status: isPlaying=" + MyAvatar.isPlaying() + ", isRecording=" + MyAvatar.isRecording());
|
||||
|
||||
if (recordIcon === toolBar.clicked(clickedOverlay) && !MyAvatar.isPlaying()) {
|
||||
if (!MyAvatar.isRecording()) {
|
||||
MyAvatar.startRecording();
|
||||
toolBar.setBack(COLOR_ON, ALPHA_ON);
|
||||
|
@ -148,14 +150,14 @@ function mousePressEvent(event) {
|
|||
MyAvatar.loadLastRecording();
|
||||
toolBar.setBack(COLOR_OFF, ALPHA_OFF);
|
||||
}
|
||||
} else if (playIcon === toolBar.clicked(clickedOverlay)) {
|
||||
if (!MyAvatar.isRecording()) {
|
||||
if (MyAvatar.isPlaying()) {
|
||||
MyAvatar.stopPlaying();
|
||||
} else {
|
||||
MyAvatar.startPlaying();
|
||||
}
|
||||
}
|
||||
} else if (playIcon === toolBar.clicked(clickedOverlay) && !MyAvatar.isRecording()) {
|
||||
if (MyAvatar.isPlaying()) {
|
||||
MyAvatar.stopPlaying();
|
||||
} else {
|
||||
MyAvatar.setPlayFromCurrentLocation(true);
|
||||
MyAvatar.setPlayerLoop(true);
|
||||
MyAvatar.startPlaying(true);
|
||||
}
|
||||
} else if (saveIcon === toolBar.clicked(clickedOverlay)) {
|
||||
if (!MyAvatar.isRecording()) {
|
||||
recordingFile = Window.save("Save recording to file", ".", "*.rec");
|
||||
|
|
|
@ -51,9 +51,6 @@ var lastVoxelScale = 0;
|
|||
var dragStart = { x: 0, y: 0 };
|
||||
var wheelPixelsMoved = 0;
|
||||
|
||||
var mouseX = 0;
|
||||
var mouseY = 0;
|
||||
|
||||
// Create a table of the different colors you can choose
|
||||
var colors = new Array();
|
||||
colors[0] = { red: 120, green: 181, blue: 126 };
|
||||
|
@ -1041,8 +1038,6 @@ function mousePressEvent(event) {
|
|||
|
||||
// TODO: does any of this stuff need to execute if we're panning or orbiting?
|
||||
trackMouseEvent(event); // used by preview support
|
||||
mouseX = event.x;
|
||||
mouseY = event.y;
|
||||
var pickRay = Camera.computePickRay(event.x, event.y);
|
||||
var intersection = Voxels.findRayIntersection(pickRay);
|
||||
audioOptions.position = Vec3.sum(pickRay.origin, pickRay.direction);
|
||||
|
@ -1296,40 +1291,30 @@ function mouseMoveEvent(event) {
|
|||
}
|
||||
|
||||
if (isAdding) {
|
||||
// Watch the drag direction to tell which way to 'extrude' this voxel
|
||||
var pickRay = Camera.computePickRay(event.x, event.y);
|
||||
var distance = Vec3.length(Vec3.subtract(pickRay.origin, lastVoxelPosition));
|
||||
var mouseSpot = Vec3.sum(Vec3.multiply(pickRay.direction, distance), pickRay.origin);
|
||||
var delta = Vec3.subtract(mouseSpot, lastVoxelPosition);
|
||||
|
||||
if (!isExtruding) {
|
||||
var pickRay = Camera.computePickRay(event.x, event.y);
|
||||
var lastVoxelDistance = { x: pickRay.origin.x - lastVoxelPosition.x,
|
||||
y: pickRay.origin.y - lastVoxelPosition.y,
|
||||
z: pickRay.origin.z - lastVoxelPosition.z };
|
||||
var distance = Vec3.length(lastVoxelDistance);
|
||||
var mouseSpot = { x: pickRay.direction.x * distance, y: pickRay.direction.y * distance, z: pickRay.direction.z * distance };
|
||||
mouseSpot.x += pickRay.origin.x;
|
||||
mouseSpot.y += pickRay.origin.y;
|
||||
mouseSpot.z += pickRay.origin.z;
|
||||
var dx = mouseSpot.x - lastVoxelPosition.x;
|
||||
var dy = mouseSpot.y - lastVoxelPosition.y;
|
||||
var dz = mouseSpot.z - lastVoxelPosition.z;
|
||||
// Use the drag direction to tell which way to 'extrude' this voxel
|
||||
extrudeScale = lastVoxelScale;
|
||||
extrudeDirection = { x: 0, y: 0, z: 0 };
|
||||
isExtruding = true;
|
||||
if (dx > lastVoxelScale) extrudeDirection.x = extrudeScale;
|
||||
else if (dx < -lastVoxelScale) extrudeDirection.x = -extrudeScale;
|
||||
else if (dy > lastVoxelScale) extrudeDirection.y = extrudeScale;
|
||||
else if (dy < -lastVoxelScale) extrudeDirection.y = -extrudeScale;
|
||||
else if (dz > lastVoxelScale) extrudeDirection.z = extrudeScale;
|
||||
else if (dz < -lastVoxelScale) extrudeDirection.z = -extrudeScale;
|
||||
if (delta.x > lastVoxelScale) extrudeDirection.x = 1;
|
||||
else if (delta.x < -lastVoxelScale) extrudeDirection.x = -1;
|
||||
else if (delta.y > lastVoxelScale) extrudeDirection.y = 1;
|
||||
else if (delta.y < -lastVoxelScale) extrudeDirection.y = -1;
|
||||
else if (delta.z > lastVoxelScale) extrudeDirection.z = 1;
|
||||
else if (delta.z < -lastVoxelScale) extrudeDirection.z = -1;
|
||||
else isExtruding = false;
|
||||
} else {
|
||||
// We have got an extrusion direction, now look for mouse move beyond threshold to add new voxel
|
||||
var dx = event.x - mouseX;
|
||||
var dy = event.y - mouseY;
|
||||
if (Math.sqrt(dx*dx + dy*dy) > PIXELS_PER_EXTRUDE_VOXEL) {
|
||||
lastVoxelPosition = Vec3.sum(lastVoxelPosition, extrudeDirection);
|
||||
Voxels.setVoxel(lastVoxelPosition.x, lastVoxelPosition.y, lastVoxelPosition.z,
|
||||
extrudeScale, lastVoxelColor.red, lastVoxelColor.green, lastVoxelColor.blue);
|
||||
mouseX = event.x;
|
||||
mouseY = event.y;
|
||||
// Extrude if mouse has moved by a voxel in the extrude direction
|
||||
var distanceInDirection = Vec3.dot(delta, extrudeDirection);
|
||||
if (distanceInDirection > extrudeScale) {
|
||||
lastVoxelPosition = Vec3.sum(lastVoxelPosition, Vec3.multiply(extrudeDirection, extrudeScale));
|
||||
Voxels.setVoxel(lastVoxelPosition.x, lastVoxelPosition.y, lastVoxelPosition.z, extrudeScale,
|
||||
lastVoxelColor.red, lastVoxelColor.green, lastVoxelColor.blue);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
42
examples/playSoundOrbit.js
Normal file
42
examples/playSoundOrbit.js
Normal file
|
@ -0,0 +1,42 @@
|
|||
//
|
||||
// playSoundPath.js
|
||||
// examples
|
||||
//
|
||||
// Created by Craig Hansen-Sturm on 05/27/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
var soundClip = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Voxels/voxel create 3.raw");
|
||||
|
||||
var currentTime = 1.570079; // pi/2
|
||||
var deltaTime = 0.05;
|
||||
var distance = 1;
|
||||
var debug = 0;
|
||||
|
||||
function playSound() {
|
||||
var options = new AudioInjectionOptions();
|
||||
currentTime += deltaTime;
|
||||
|
||||
var s = distance * Math.sin(currentTime);
|
||||
var c = distance * Math.cos(currentTime);
|
||||
|
||||
var soundOffset = { x:s, y:0, z:c };
|
||||
|
||||
if (debug) {
|
||||
print("t=" + currentTime + "offset=" + soundOffset.x + "," + soundOffset.y + "," + soundOffset.z);
|
||||
}
|
||||
|
||||
var avatarPosition = MyAvatar.position;
|
||||
var soundPosition = Vec3.sum(avatarPosition,soundOffset);
|
||||
|
||||
options.position = soundPosition
|
||||
options.volume = 1.0;
|
||||
Audio.playSound(soundClip, options);
|
||||
}
|
||||
|
||||
Script.setInterval(playSound, 250);
|
||||
|
||||
|
|
@ -269,8 +269,7 @@ function update(deltaTime){
|
|||
}
|
||||
|
||||
var locationChanged = false;
|
||||
if (location.hostname != oldHost) {
|
||||
print("Changed domain");
|
||||
if (location.hostname != oldHost || !location.isConnected) {
|
||||
for (model in models) {
|
||||
removeIndicators(models[model]);
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
// the number of splats per pass
|
||||
const int SPLAT_COUNT = 4;
|
||||
|
||||
// the splat textures
|
||||
|
@ -22,7 +23,7 @@ varying vec4 alphaValues;
|
|||
void main(void) {
|
||||
// blend the splat textures
|
||||
gl_FragColor = gl_Color * (texture2D(diffuseMaps[0], gl_TexCoord[0].st) * alphaValues.x +
|
||||
texture2D(diffuseMaps[1], gl_TexCoord[0].st) * alphaValues.y +
|
||||
texture2D(diffuseMaps[2], gl_TexCoord[0].st) * alphaValues.z +
|
||||
texture2D(diffuseMaps[3], gl_TexCoord[0].st) * alphaValues.w);
|
||||
texture2D(diffuseMaps[1], gl_TexCoord[1].st) * alphaValues.y +
|
||||
texture2D(diffuseMaps[2], gl_TexCoord[2].st) * alphaValues.z +
|
||||
texture2D(diffuseMaps[3], gl_TexCoord[3].st) * alphaValues.w);
|
||||
}
|
||||
|
|
|
@ -23,6 +23,15 @@ uniform float heightScale;
|
|||
// the scale between height and texture textures
|
||||
uniform float textureScale;
|
||||
|
||||
// the splat texture offset
|
||||
uniform vec2 splatTextureOffset;
|
||||
|
||||
// the splat textures scales on the S axis
|
||||
uniform vec4 splatTextureScalesS;
|
||||
|
||||
// the splat texture scales on the T axis
|
||||
uniform vec4 splatTextureScalesT;
|
||||
|
||||
// the lower bounds of the values corresponding to the splat textures
|
||||
uniform vec4 textureValueMinima;
|
||||
|
||||
|
@ -35,16 +44,21 @@ varying vec4 alphaValues;
|
|||
void main(void) {
|
||||
// add the height to the position
|
||||
float height = texture2D(heightMap, gl_MultiTexCoord0.st).r;
|
||||
gl_Position = gl_ModelViewProjectionMatrix * (gl_Vertex + vec4(0.0, height, 0.0, 0.0));
|
||||
vec4 modelSpacePosition = gl_Vertex + vec4(0.0, height, 0.0, 0.0);
|
||||
gl_Position = gl_ModelViewProjectionMatrix * modelSpacePosition;
|
||||
|
||||
// the zero height should be invisible
|
||||
gl_FrontColor = vec4(1.0, 1.0, 1.0, 1.0 - step(height, 0.0));
|
||||
|
||||
// pass along the scaled/offset texture coordinates
|
||||
gl_TexCoord[0] = (gl_MultiTexCoord0 - vec4(heightScale, heightScale, 0.0, 0.0)) * textureScale;
|
||||
vec4 textureSpacePosition = vec4(modelSpacePosition.xz, 0.0, 1.0) + vec4(splatTextureOffset, 0.0, 0.0);
|
||||
gl_TexCoord[0] = textureSpacePosition * vec4(splatTextureScalesS[0], splatTextureScalesT[0], 0.0, 1.0);
|
||||
gl_TexCoord[1] = textureSpacePosition * vec4(splatTextureScalesS[1], splatTextureScalesT[1], 0.0, 1.0);
|
||||
gl_TexCoord[2] = textureSpacePosition * vec4(splatTextureScalesS[2], splatTextureScalesT[2], 0.0, 1.0);
|
||||
gl_TexCoord[3] = textureSpacePosition * vec4(splatTextureScalesS[3], splatTextureScalesT[3], 0.0, 1.0);
|
||||
|
||||
// compute the alpha values for each texture
|
||||
float value = texture2D(textureMap, gl_TexCoord[0].st).r;
|
||||
float value = texture2D(textureMap, (gl_MultiTexCoord0.st - vec2(heightScale, heightScale)) * textureScale).r;
|
||||
vec4 valueVector = vec4(value, value, value, value);
|
||||
alphaValues = step(textureValueMinima, valueVector) * step(valueVector, textureValueMaxima);
|
||||
}
|
||||
|
|
|
@ -246,7 +246,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
|
|||
connect(&domainHandler, SIGNAL(connectedToDomain(const QString&)), SLOT(updateWindowTitle()));
|
||||
connect(&domainHandler, SIGNAL(disconnectedFromDomain()), SLOT(updateWindowTitle()));
|
||||
connect(&domainHandler, &DomainHandler::settingsReceived, this, &Application::domainSettingsReceived);
|
||||
|
||||
connect(&domainHandler, &DomainHandler::hostnameChanged, Menu::getInstance(), &Menu::clearLoginDialogDisplayedFlag);
|
||||
|
||||
// hookup VoxelEditSender to PaymentManager so we can pay for octree edits
|
||||
const PaymentManager& paymentManager = PaymentManager::getInstance();
|
||||
connect(&_voxelEditSender, &VoxelEditPacketSender::octreePaymentRequired,
|
||||
|
@ -1772,14 +1773,7 @@ void Application::init() {
|
|||
_lastTimeUpdated.start();
|
||||
|
||||
Menu::getInstance()->loadSettings();
|
||||
if (Menu::getInstance()->getAudioJitterBufferFrames() != 0) {
|
||||
_audio.setDynamicJitterBuffers(false);
|
||||
_audio.setStaticDesiredJitterBufferFrames(Menu::getInstance()->getAudioJitterBufferFrames());
|
||||
} else {
|
||||
_audio.setDynamicJitterBuffers(true);
|
||||
}
|
||||
|
||||
_audio.setMaxFramesOverDesired(Menu::getInstance()->getMaxFramesOverDesired());
|
||||
_audio.setReceivedAudioStreamSettings(Menu::getInstance()->getReceivedAudioStreamSettings());
|
||||
|
||||
qDebug("Loaded settings");
|
||||
|
||||
|
|
|
@ -72,7 +72,7 @@ Audio::Audio(QObject* parent) :
|
|||
_proceduralAudioOutput(NULL),
|
||||
_proceduralOutputDevice(NULL),
|
||||
_inputRingBuffer(0),
|
||||
_receivedAudioStream(0, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, true, 0, 0, true),
|
||||
_receivedAudioStream(0, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, InboundAudioStream::Settings()),
|
||||
_isStereoInput(false),
|
||||
_averagedLatency(0.0),
|
||||
_lastInputLoudness(0),
|
||||
|
@ -105,6 +105,7 @@ Audio::Audio(QObject* parent) :
|
|||
_scopeInput(0),
|
||||
_scopeOutputLeft(0),
|
||||
_scopeOutputRight(0),
|
||||
_scopeLastFrame(),
|
||||
_statsEnabled(false),
|
||||
_statsShowInjectedStreams(false),
|
||||
_outgoingAvatarAudioSequenceNumber(0),
|
||||
|
@ -113,14 +114,17 @@ Audio::Audio(QObject* parent) :
|
|||
_audioOutputMsecsUnplayedStats(1, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
|
||||
_lastSentAudioPacket(0),
|
||||
_packetSentTimeGaps(1, APPROXIMATELY_30_SECONDS_OF_AUDIO_PACKETS),
|
||||
_audioOutputIODevice(*this)
|
||||
_audioOutputIODevice(_receivedAudioStream)
|
||||
{
|
||||
// clear the array of locally injected samples
|
||||
memset(_localProceduralSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
|
||||
// Create the noise sample array
|
||||
_noiseSampleFrames = new float[NUMBER_OF_NOISE_SAMPLE_FRAMES];
|
||||
|
||||
connect(&_receivedAudioStream, &MixedProcessedAudioStream::processSamples, this, &Audio::processReceivedAudioStreamSamples, Qt::DirectConnection);
|
||||
connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedSilence, this, &Audio::addStereoSilenceToScope, Qt::DirectConnection);
|
||||
connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedLastFrameRepeatedWithFade, this, &Audio::addLastFrameRepeatedWithFadeToScope, Qt::DirectConnection);
|
||||
connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedStereoSamples, this, &Audio::addStereoSamplesToScope, Qt::DirectConnection);
|
||||
connect(&_receivedAudioStream, &MixedProcessedAudioStream::processSamples, this, &Audio::processReceivedSamples, Qt::DirectConnection);
|
||||
}
|
||||
|
||||
void Audio::init(QGLWidget *parent) {
|
||||
|
@ -460,9 +464,12 @@ void Audio::handleAudioInput() {
|
|||
static char audioDataPacket[MAX_PACKET_SIZE];
|
||||
|
||||
static int numBytesPacketHeader = numBytesForPacketHeaderGivenPacketType(PacketTypeMicrophoneAudioNoEcho);
|
||||
static int leadingBytes = numBytesPacketHeader + sizeof(quint16) + sizeof(glm::vec3) + sizeof(glm::quat) + sizeof(quint8);
|
||||
|
||||
static int16_t* networkAudioSamples = (int16_t*) (audioDataPacket + leadingBytes);
|
||||
// NOTE: we assume PacketTypeMicrophoneAudioWithEcho has same size headers as
|
||||
// PacketTypeMicrophoneAudioNoEcho. If not, then networkAudioSamples will be pointing to the wrong place for writing
|
||||
// audio samples with echo.
|
||||
static int leadingBytes = numBytesPacketHeader + sizeof(quint16) + sizeof(glm::vec3) + sizeof(glm::quat) + sizeof(quint8);
|
||||
static int16_t* networkAudioSamples = (int16_t*)(audioDataPacket + leadingBytes);
|
||||
|
||||
float inputToNetworkInputRatio = calculateDeviceToNetworkInputRatio(_numInputCallbackBytes);
|
||||
|
||||
|
@ -668,9 +675,7 @@ void Audio::handleAudioInput() {
|
|||
if (!_isStereoInput && _scopeEnabled && !_scopeEnabledPause) {
|
||||
unsigned int numMonoAudioChannels = 1;
|
||||
unsigned int monoAudioChannel = 0;
|
||||
addBufferToScope(_scopeInput, _scopeInputOffset, networkAudioSamples, monoAudioChannel, numMonoAudioChannels);
|
||||
_scopeInputOffset += NETWORK_SAMPLES_PER_FRAME;
|
||||
_scopeInputOffset %= _samplesPerScope;
|
||||
_scopeInputOffset = addBufferToScope(_scopeInput, _scopeInputOffset, networkAudioSamples, NETWORK_SAMPLES_PER_FRAME, monoAudioChannel, numMonoAudioChannels);
|
||||
}
|
||||
|
||||
NodeList* nodeList = NodeList::getInstance();
|
||||
|
@ -686,19 +691,11 @@ void Audio::handleAudioInput() {
|
|||
glm::vec3 headPosition = interfaceAvatar->getHead()->getPosition();
|
||||
glm::quat headOrientation = interfaceAvatar->getHead()->getFinalOrientationInWorldFrame();
|
||||
quint8 isStereo = _isStereoInput ? 1 : 0;
|
||||
|
||||
int numAudioBytes = 0;
|
||||
|
||||
|
||||
PacketType packetType;
|
||||
if (_lastInputLoudness == 0) {
|
||||
packetType = PacketTypeSilentAudioFrame;
|
||||
|
||||
// we need to indicate how many silent samples this is to the audio mixer
|
||||
networkAudioSamples[0] = numNetworkSamples;
|
||||
numAudioBytes = sizeof(int16_t);
|
||||
} else {
|
||||
numAudioBytes = numNetworkBytes;
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::EchoServerAudio)) {
|
||||
packetType = PacketTypeMicrophoneAudioWithEcho;
|
||||
} else {
|
||||
|
@ -707,21 +704,31 @@ void Audio::handleAudioInput() {
|
|||
}
|
||||
|
||||
char* currentPacketPtr = audioDataPacket + populatePacketHeader(audioDataPacket, packetType);
|
||||
|
||||
|
||||
// pack sequence number
|
||||
memcpy(currentPacketPtr, &_outgoingAvatarAudioSequenceNumber, sizeof(quint16));
|
||||
currentPacketPtr += sizeof(quint16);
|
||||
|
||||
// set the mono/stereo byte
|
||||
*currentPacketPtr++ = isStereo;
|
||||
if (packetType == PacketTypeSilentAudioFrame) {
|
||||
// pack num silent samples
|
||||
quint16 numSilentSamples = numNetworkSamples;
|
||||
memcpy(currentPacketPtr, &numSilentSamples, sizeof(quint16));
|
||||
currentPacketPtr += sizeof(quint16);
|
||||
} else {
|
||||
// set the mono/stereo byte
|
||||
*currentPacketPtr++ = isStereo;
|
||||
|
||||
// memcpy the three float positions
|
||||
memcpy(currentPacketPtr, &headPosition, sizeof(headPosition));
|
||||
currentPacketPtr += (sizeof(headPosition));
|
||||
// memcpy the three float positions
|
||||
memcpy(currentPacketPtr, &headPosition, sizeof(headPosition));
|
||||
currentPacketPtr += (sizeof(headPosition));
|
||||
|
||||
// memcpy our orientation
|
||||
memcpy(currentPacketPtr, &headOrientation, sizeof(headOrientation));
|
||||
currentPacketPtr += sizeof(headOrientation);
|
||||
|
||||
// memcpy our orientation
|
||||
memcpy(currentPacketPtr, &headOrientation, sizeof(headOrientation));
|
||||
currentPacketPtr += sizeof(headOrientation);
|
||||
// audio samples have already been packed (written to networkAudioSamples)
|
||||
currentPacketPtr += numNetworkBytes;
|
||||
}
|
||||
|
||||
// first time this is 0
|
||||
if (_lastSentAudioPacket == 0) {
|
||||
|
@ -733,18 +740,58 @@ void Audio::handleAudioInput() {
|
|||
|
||||
_lastSentAudioPacket = now;
|
||||
}
|
||||
|
||||
nodeList->writeDatagram(audioDataPacket, numAudioBytes + leadingBytes, audioMixer);
|
||||
|
||||
int packetBytes = currentPacketPtr - audioDataPacket;
|
||||
nodeList->writeDatagram(audioDataPacket, packetBytes, audioMixer);
|
||||
_outgoingAvatarAudioSequenceNumber++;
|
||||
|
||||
Application::getInstance()->getBandwidthMeter()->outputStream(BandwidthMeter::AUDIO)
|
||||
.updateValue(numAudioBytes + leadingBytes);
|
||||
.updateValue(packetBytes);
|
||||
}
|
||||
delete[] inputAudioSamples;
|
||||
}
|
||||
}
|
||||
|
||||
void Audio::processReceivedAudioStreamSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer) {
|
||||
const int STEREO_FACTOR = 2;
|
||||
|
||||
void Audio::addStereoSilenceToScope(int silentSamplesPerChannel) {
|
||||
if (!_scopeEnabled || _scopeEnabledPause) {
|
||||
return;
|
||||
}
|
||||
addSilenceToScope(_scopeOutputLeft, _scopeOutputOffset, silentSamplesPerChannel);
|
||||
_scopeOutputOffset = addSilenceToScope(_scopeOutputRight, _scopeOutputOffset, silentSamplesPerChannel);
|
||||
}
|
||||
|
||||
void Audio::addStereoSamplesToScope(const QByteArray& samples) {
|
||||
if (!_scopeEnabled || _scopeEnabledPause) {
|
||||
return;
|
||||
}
|
||||
const int16_t* samplesData = reinterpret_cast<const int16_t*>(samples.data());
|
||||
int samplesPerChannel = samples.size() / sizeof(int16_t) / STEREO_FACTOR;
|
||||
|
||||
addBufferToScope(_scopeOutputLeft, _scopeOutputOffset, samplesData, samplesPerChannel, 0, STEREO_FACTOR);
|
||||
_scopeOutputOffset = addBufferToScope(_scopeOutputRight, _scopeOutputOffset, samplesData, samplesPerChannel, 1, STEREO_FACTOR);
|
||||
|
||||
_scopeLastFrame = samples.right(NETWORK_BUFFER_LENGTH_BYTES_STEREO);
|
||||
}
|
||||
|
||||
void Audio::addLastFrameRepeatedWithFadeToScope(int samplesPerChannel) {
|
||||
const int16_t* lastFrameData = reinterpret_cast<const int16_t*>(_scopeLastFrame.data());
|
||||
|
||||
int samplesRemaining = samplesPerChannel;
|
||||
int indexOfRepeat = 0;
|
||||
do {
|
||||
int samplesToWriteThisIteration = std::min(samplesRemaining, (int)NETWORK_SAMPLES_PER_FRAME);
|
||||
float fade = calculateRepeatedFrameFadeFactor(indexOfRepeat);
|
||||
addBufferToScope(_scopeOutputLeft, _scopeOutputOffset, lastFrameData, samplesToWriteThisIteration, 0, STEREO_FACTOR, fade);
|
||||
_scopeOutputOffset = addBufferToScope(_scopeOutputRight, _scopeOutputOffset, lastFrameData, samplesToWriteThisIteration, 1, STEREO_FACTOR, fade);
|
||||
|
||||
samplesRemaining -= samplesToWriteThisIteration;
|
||||
indexOfRepeat++;
|
||||
} while (samplesRemaining > 0);
|
||||
}
|
||||
|
||||
void Audio::processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer) {
|
||||
|
||||
const int numNetworkOutputSamples = inputBuffer.size() / sizeof(int16_t);
|
||||
const int numDeviceOutputSamples = numNetworkOutputSamples * (_outputFormat.sampleRate() * _outputFormat.channelCount())
|
||||
|
@ -789,30 +836,6 @@ void Audio::processReceivedAudioStreamSamples(const QByteArray& inputBuffer, QBy
|
|||
numNetworkOutputSamples,
|
||||
numDeviceOutputSamples,
|
||||
_desiredOutputFormat, _outputFormat);
|
||||
|
||||
|
||||
if (_scopeEnabled && !_scopeEnabledPause) {
|
||||
unsigned int numAudioChannels = _desiredOutputFormat.channelCount();
|
||||
const int16_t* samples = receivedSamples;
|
||||
for (int numSamples = numNetworkOutputSamples / numAudioChannels; numSamples > 0; numSamples -= NETWORK_SAMPLES_PER_FRAME) {
|
||||
|
||||
unsigned int audioChannel = 0;
|
||||
addBufferToScope(
|
||||
_scopeOutputLeft,
|
||||
_scopeOutputOffset,
|
||||
samples, audioChannel, numAudioChannels);
|
||||
|
||||
audioChannel = 1;
|
||||
addBufferToScope(
|
||||
_scopeOutputRight,
|
||||
_scopeOutputOffset,
|
||||
samples, audioChannel, numAudioChannels);
|
||||
|
||||
_scopeOutputOffset += NETWORK_SAMPLES_PER_FRAME;
|
||||
_scopeOutputOffset %= _samplesPerScope;
|
||||
samples += NETWORK_SAMPLES_PER_FRAME * numAudioChannels;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Audio::addReceivedAudioToStream(const QByteArray& audioByteArray) {
|
||||
|
@ -825,9 +848,6 @@ void Audio::addReceivedAudioToStream(const QByteArray& audioByteArray) {
|
|||
Application::getInstance()->getBandwidthMeter()->inputStream(BandwidthMeter::AUDIO).updateValue(audioByteArray.size());
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
void Audio::parseAudioStreamStatsPacket(const QByteArray& packet) {
|
||||
|
||||
int numBytesPacketHeader = numBytesForPacketHeader(packet);
|
||||
|
@ -860,12 +880,13 @@ void Audio::parseAudioStreamStatsPacket(const QByteArray& packet) {
|
|||
|
||||
void Audio::sendDownstreamAudioStatsPacket() {
|
||||
|
||||
// since this function is called every second, we'll sample some of our stats here
|
||||
|
||||
// since this function is called every second, we'll sample for some of our stats here
|
||||
_inputRingBufferMsecsAvailableStats.update(getInputRingBufferMsecsAvailable());
|
||||
|
||||
_audioOutputMsecsUnplayedStats.update(getAudioOutputMsecsUnplayed());
|
||||
|
||||
// also, call _receivedAudioStream's per-second callback
|
||||
_receivedAudioStream.perSecondCallbackForUpdatingStats();
|
||||
|
||||
char packet[MAX_PACKET_SIZE];
|
||||
|
||||
// pack header
|
||||
|
@ -883,7 +904,7 @@ void Audio::sendDownstreamAudioStatsPacket() {
|
|||
dataAt += sizeof(quint16);
|
||||
|
||||
// pack downstream audio stream stats
|
||||
AudioStreamStats stats = _receivedAudioStream.updateSeqHistoryAndGetAudioStreamStats();
|
||||
AudioStreamStats stats = _receivedAudioStream.getAudioStreamStats();
|
||||
memcpy(dataAt, &stats, sizeof(AudioStreamStats));
|
||||
dataAt += sizeof(AudioStreamStats);
|
||||
|
||||
|
@ -916,7 +937,7 @@ void Audio::addSpatialAudioToBuffer(unsigned int sampleTime, const QByteArray& s
|
|||
unsigned int delayCount = delay * _desiredOutputFormat.channelCount();
|
||||
unsigned int silentCount = (remaining < delayCount) ? remaining : delayCount;
|
||||
if (silentCount) {
|
||||
_spatialAudioRingBuffer.addSilentFrame(silentCount);
|
||||
_spatialAudioRingBuffer.addSilentSamples(silentCount);
|
||||
}
|
||||
|
||||
// Recalculate the number of remaining samples
|
||||
|
@ -1220,8 +1241,6 @@ void Audio::selectAudioFilterSmiley() {
|
|||
void Audio::toggleScope() {
|
||||
_scopeEnabled = !_scopeEnabled;
|
||||
if (_scopeEnabled) {
|
||||
_scopeInputOffset = 0;
|
||||
_scopeOutputOffset = 0;
|
||||
allocateScope();
|
||||
} else {
|
||||
freeScope();
|
||||
|
@ -1259,6 +1278,8 @@ void Audio::selectAudioScopeFiftyFrames() {
|
|||
}
|
||||
|
||||
void Audio::allocateScope() {
|
||||
_scopeInputOffset = 0;
|
||||
_scopeOutputOffset = 0;
|
||||
int num = _samplesPerScope * sizeof(int16_t);
|
||||
_scopeInput = new QByteArray(num, 0);
|
||||
_scopeOutputLeft = new QByteArray(num, 0);
|
||||
|
@ -1290,12 +1311,18 @@ void Audio::freeScope() {
|
|||
}
|
||||
}
|
||||
|
||||
void Audio::addBufferToScope(
|
||||
QByteArray* byteArray, unsigned int frameOffset, const int16_t* source, unsigned int sourceChannel, unsigned int sourceNumberOfChannels) {
|
||||
|
||||
int Audio::addBufferToScope(QByteArray* byteArray, int frameOffset, const int16_t* source, int sourceSamplesPerChannel,
|
||||
unsigned int sourceChannel, unsigned int sourceNumberOfChannels, float fade) {
|
||||
if (!_scopeEnabled || _scopeEnabledPause) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Constant multiplier to map sample value to vertical size of scope
|
||||
float multiplier = (float)MULTIPLIER_SCOPE_HEIGHT / logf(2.0f);
|
||||
|
||||
// Used to scale each sample. (logf(sample) + fadeOffset) is same as logf(sample * fade).
|
||||
float fadeOffset = logf(fade);
|
||||
|
||||
// Temporary variable receives sample value
|
||||
float sample;
|
||||
|
||||
|
@ -1306,17 +1333,41 @@ void Audio::addBufferToScope(
|
|||
// Short int pointer to mapped samples in byte array
|
||||
int16_t* destination = (int16_t*) byteArray->data();
|
||||
|
||||
for (unsigned int i = 0; i < NETWORK_SAMPLES_PER_FRAME; i++) {
|
||||
for (int i = 0; i < sourceSamplesPerChannel; i++) {
|
||||
sample = (float)source[i * sourceNumberOfChannels + sourceChannel];
|
||||
if (sample > 0) {
|
||||
value = (int16_t)(multiplier * logf(sample));
|
||||
} else if (sample < 0) {
|
||||
value = (int16_t)(-multiplier * logf(-sample));
|
||||
if (sample > 1) {
|
||||
value = (int16_t)(multiplier * (logf(sample) + fadeOffset));
|
||||
} else if (sample < -1) {
|
||||
value = (int16_t)(-multiplier * (logf(-sample) + fadeOffset));
|
||||
} else {
|
||||
value = 0;
|
||||
}
|
||||
destination[i + frameOffset] = value;
|
||||
destination[frameOffset] = value;
|
||||
frameOffset = (frameOffset == _samplesPerScope - 1) ? 0 : frameOffset + 1;
|
||||
}
|
||||
return frameOffset;
|
||||
}
|
||||
|
||||
int Audio::addSilenceToScope(QByteArray* byteArray, int frameOffset, int silentSamples) {
|
||||
|
||||
QMutexLocker lock(&_guard);
|
||||
// Short int pointer to mapped samples in byte array
|
||||
int16_t* destination = (int16_t*)byteArray->data();
|
||||
|
||||
if (silentSamples >= _samplesPerScope) {
|
||||
memset(destination, 0, byteArray->size());
|
||||
return frameOffset;
|
||||
}
|
||||
|
||||
int samplesToBufferEnd = _samplesPerScope - frameOffset;
|
||||
if (silentSamples > samplesToBufferEnd) {
|
||||
memset(destination + frameOffset, 0, samplesToBufferEnd * sizeof(int16_t));
|
||||
memset(destination, 0, silentSamples - samplesToBufferEnd * sizeof(int16_t));
|
||||
} else {
|
||||
memset(destination + frameOffset, 0, silentSamples * sizeof(int16_t));
|
||||
}
|
||||
|
||||
return (frameOffset + silentSamples) % _samplesPerScope;
|
||||
}
|
||||
|
||||
void Audio::renderStats(const float* color, int width, int height) {
|
||||
|
@ -1517,17 +1568,17 @@ void Audio::renderScope(int width, int height) {
|
|||
return;
|
||||
|
||||
static const float backgroundColor[4] = { 0.4f, 0.4f, 0.4f, 0.6f };
|
||||
static const float gridColor[4] = { 0.3f, 0.3f, 0.3f, 0.6f };
|
||||
static const float gridColor[4] = { 0.7f, 0.7f, 0.7f, 1.0f };
|
||||
static const float inputColor[4] = { 0.3f, 1.0f, 0.3f, 1.0f };
|
||||
static const float outputLeftColor[4] = { 1.0f, 0.3f, 0.3f, 1.0f };
|
||||
static const float outputRightColor[4] = { 0.3f, 0.3f, 1.0f, 1.0f };
|
||||
static const int gridRows = 2;
|
||||
int gridCols = _framesPerScope;
|
||||
|
||||
int x = (width - SCOPE_WIDTH) / 2;
|
||||
int y = (height - SCOPE_HEIGHT) / 2;
|
||||
int w = SCOPE_WIDTH;
|
||||
int h = SCOPE_HEIGHT;
|
||||
int x = (width - (int)SCOPE_WIDTH) / 2;
|
||||
int y = (height - (int)SCOPE_HEIGHT) / 2;
|
||||
int w = (int)SCOPE_WIDTH;
|
||||
int h = (int)SCOPE_HEIGHT;
|
||||
|
||||
renderBackground(backgroundColor, x, y, w, h);
|
||||
renderGrid(gridColor, x, y, w, h, gridRows, gridCols);
|
||||
|
@ -1717,7 +1768,7 @@ bool Audio::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo)
|
|||
// setup our general output device for audio-mixer audio
|
||||
_audioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
|
||||
_audioOutput->setBufferSize(AUDIO_OUTPUT_BUFFER_SIZE_FRAMES * _outputFrameSize * sizeof(int16_t));
|
||||
qDebug() << "Ring Buffer capacity in frames: " << _audioOutput->bufferSize() / sizeof(int16_t) / (float)_outputFrameSize;
|
||||
qDebug() << "Output Buffer capacity in frames: " << _audioOutput->bufferSize() / sizeof(int16_t) / (float)_outputFrameSize;
|
||||
|
||||
_audioOutputIODevice.start();
|
||||
_audioOutput->start(&_audioOutputIODevice);
|
||||
|
@ -1792,13 +1843,11 @@ float Audio::getInputRingBufferMsecsAvailable() const {
|
|||
}
|
||||
|
||||
qint64 Audio::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
|
||||
MixedProcessedAudioStream& receivedAUdioStream = _parent._receivedAudioStream;
|
||||
|
||||
int samplesRequested = maxSize / sizeof(int16_t);
|
||||
int samplesPopped;
|
||||
int bytesWritten;
|
||||
if ((samplesPopped = receivedAUdioStream.popSamples(samplesRequested, false)) > 0) {
|
||||
AudioRingBuffer::ConstIterator lastPopOutput = receivedAUdioStream.getLastPopOutput();
|
||||
if ((samplesPopped = _receivedAudioStream.popSamples(samplesRequested, false)) > 0) {
|
||||
AudioRingBuffer::ConstIterator lastPopOutput = _receivedAudioStream.getLastPopOutput();
|
||||
lastPopOutput.readSamples((int16_t*)data, samplesPopped);
|
||||
bytesWritten = samplesPopped * sizeof(int16_t);
|
||||
} else {
|
||||
|
|
|
@ -21,6 +21,7 @@
|
|||
#include "RingBufferHistory.h"
|
||||
#include "MovingMinMaxAvg.h"
|
||||
#include "AudioFilter.h"
|
||||
#include "AudioFilterBank.h"
|
||||
|
||||
#include <QAudio>
|
||||
#include <QAudioInput>
|
||||
|
@ -50,14 +51,14 @@ public:
|
|||
|
||||
class AudioOutputIODevice : public QIODevice {
|
||||
public:
|
||||
AudioOutputIODevice(Audio& parent) : _parent(parent) {};
|
||||
AudioOutputIODevice(MixedProcessedAudioStream& receivedAudioStream) : _receivedAudioStream(receivedAudioStream) {};
|
||||
|
||||
void start() { open(QIODevice::ReadOnly); }
|
||||
void stop() { close(); }
|
||||
qint64 readData(char * data, qint64 maxSize);
|
||||
qint64 writeData(const char * data, qint64 maxSize) { return 0; }
|
||||
private:
|
||||
Audio& _parent;
|
||||
MixedProcessedAudioStream& _receivedAudioStream;
|
||||
};
|
||||
|
||||
|
||||
|
@ -73,10 +74,7 @@ public:
|
|||
virtual void startCollisionSound(float magnitude, float frequency, float noise, float duration, bool flashScreen);
|
||||
virtual void startDrumSound(float volume, float frequency, float duration, float decay);
|
||||
|
||||
void setDynamicJitterBuffers(bool dynamicJitterBuffers) { _receivedAudioStream.setDynamicJitterBuffers(dynamicJitterBuffers); }
|
||||
void setStaticDesiredJitterBufferFrames(int staticDesiredJitterBufferFrames) { _receivedAudioStream.setStaticDesiredJitterBufferFrames(staticDesiredJitterBufferFrames); }
|
||||
|
||||
void setMaxFramesOverDesired(int maxFramesOverDesired) { _receivedAudioStream.setMaxFramesOverDesired(maxFramesOverDesired); }
|
||||
void setReceivedAudioStreamSettings(const InboundAudioStream::Settings& settings) { _receivedAudioStream.setSettings(settings); }
|
||||
|
||||
int getDesiredJitterBufferFrames() const { return _receivedAudioStream.getDesiredJitterBufferFrames(); }
|
||||
|
||||
|
@ -105,7 +103,6 @@ public:
|
|||
float getAudioOutputAverageMsecsUnplayed() const { return (float)_audioOutputMsecsUnplayedStats.getWindowAverage(); }
|
||||
|
||||
void setRecorder(RecorderPointer recorder) { _recorder = recorder; }
|
||||
void setPlayer(PlayerPointer player) { _player = player; }
|
||||
|
||||
public slots:
|
||||
void start();
|
||||
|
@ -113,7 +110,6 @@ public slots:
|
|||
void addReceivedAudioToStream(const QByteArray& audioByteArray);
|
||||
void parseAudioStreamStatsPacket(const QByteArray& packet);
|
||||
void addSpatialAudioToBuffer(unsigned int sampleTime, const QByteArray& spatialAudio, unsigned int numSamples);
|
||||
void processReceivedAudioStreamSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer);
|
||||
void handleAudioInput();
|
||||
void reset();
|
||||
void resetStats();
|
||||
|
@ -130,6 +126,10 @@ public slots:
|
|||
void selectAudioScopeFiveFrames();
|
||||
void selectAudioScopeTwentyFrames();
|
||||
void selectAudioScopeFiftyFrames();
|
||||
void addStereoSilenceToScope(int silentSamplesPerChannel);
|
||||
void addLastFrameRepeatedWithFadeToScope(int samplesPerChannel);
|
||||
void addStereoSamplesToScope(const QByteArray& samples);
|
||||
void processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer);
|
||||
void toggleAudioFilter();
|
||||
void selectAudioFilterFlat();
|
||||
void selectAudioFilterTrebleCut();
|
||||
|
@ -256,8 +256,9 @@ private:
|
|||
void reallocateScope(int frames);
|
||||
|
||||
// Audio scope methods for data acquisition
|
||||
void addBufferToScope(
|
||||
QByteArray* byteArray, unsigned int frameOffset, const int16_t* source, unsigned int sourceChannel, unsigned int sourceNumberOfChannels);
|
||||
int addBufferToScope(QByteArray* byteArray, int frameOffset, const int16_t* source, int sourceSamples,
|
||||
unsigned int sourceChannel, unsigned int sourceNumberOfChannels, float fade = 1.0f);
|
||||
int addSilenceToScope(QByteArray* byteArray, int frameOffset, int silentSamples);
|
||||
|
||||
// Audio scope methods for rendering
|
||||
void renderBackground(const float* color, int x, int y, int width, int height);
|
||||
|
@ -282,13 +283,14 @@ private:
|
|||
int _samplesPerScope;
|
||||
|
||||
// Multi-band parametric EQ
|
||||
bool _peqEnabled;
|
||||
AudioFilterPEQ3 _peq;
|
||||
bool _peqEnabled;
|
||||
AudioFilterPEQ3m _peq;
|
||||
|
||||
QMutex _guard;
|
||||
QByteArray* _scopeInput;
|
||||
QByteArray* _scopeOutputLeft;
|
||||
QByteArray* _scopeOutputRight;
|
||||
QByteArray _scopeLastFrame;
|
||||
#ifdef _WIN32
|
||||
static const unsigned int STATS_WIDTH = 1500;
|
||||
#else
|
||||
|
@ -314,7 +316,6 @@ private:
|
|||
AudioOutputIODevice _audioOutputIODevice;
|
||||
|
||||
WeakRecorderPointer _recorder;
|
||||
WeakPlayerPointer _player;
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -48,6 +48,7 @@ void DatagramProcessor::processDatagrams() {
|
|||
// only process this packet if we have a match on the packet version
|
||||
switch (packetTypeForPacket(incomingPacket)) {
|
||||
case PacketTypeMixedAudio:
|
||||
case PacketTypeSilentAudioFrame:
|
||||
QMetaObject::invokeMethod(&application->_audio, "addReceivedAudioToStream", Qt::QueuedConnection,
|
||||
Q_ARG(QByteArray, incomingPacket));
|
||||
break;
|
||||
|
|
|
@ -82,8 +82,7 @@ const int CONSOLE_HEIGHT = 200;
|
|||
|
||||
Menu::Menu() :
|
||||
_actionHash(),
|
||||
_audioJitterBufferFrames(0),
|
||||
_maxFramesOverDesired(0),
|
||||
_receivedAudioStreamSettings(),
|
||||
_bandwidthDialog(NULL),
|
||||
_fieldOfView(DEFAULT_FIELD_OF_VIEW_DEGREES),
|
||||
_realWorldFieldOfView(DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
|
||||
|
@ -115,6 +114,7 @@ Menu::Menu() :
|
|||
_loginAction(NULL),
|
||||
_preferencesDialog(NULL),
|
||||
_loginDialog(NULL),
|
||||
_hasLoginDialogDisplayed(false),
|
||||
_snapshotsLocation(),
|
||||
_scriptsLocation(),
|
||||
_walletPrivateKey()
|
||||
|
@ -680,8 +680,15 @@ void Menu::loadSettings(QSettings* settings) {
|
|||
lockedSettings = true;
|
||||
}
|
||||
|
||||
_audioJitterBufferFrames = loadSetting(settings, "audioJitterBufferFrames", 0);
|
||||
_maxFramesOverDesired = loadSetting(settings, "maxFramesOverDesired", DEFAULT_MAX_FRAMES_OVER_DESIRED);
|
||||
_receivedAudioStreamSettings._dynamicJitterBuffers = settings->value("dynamicJitterBuffers", DEFAULT_DYNAMIC_JITTER_BUFFERS).toBool();
|
||||
_receivedAudioStreamSettings._maxFramesOverDesired = settings->value("maxFramesOverDesired", DEFAULT_MAX_FRAMES_OVER_DESIRED).toInt();
|
||||
_receivedAudioStreamSettings._staticDesiredJitterBufferFrames = settings->value("staticDesiredJitterBufferFrames", DEFAULT_STATIC_DESIRED_JITTER_BUFFER_FRAMES).toInt();
|
||||
_receivedAudioStreamSettings._useStDevForJitterCalc = settings->value("useStDevForJitterCalc", DEFAULT_USE_STDEV_FOR_JITTER_CALC).toBool();
|
||||
_receivedAudioStreamSettings._windowStarveThreshold = settings->value("windowStarveThreshold", DEFAULT_WINDOW_STARVE_THRESHOLD).toInt();
|
||||
_receivedAudioStreamSettings._windowSecondsForDesiredCalcOnTooManyStarves = settings->value("windowSecondsForDesiredCalcOnTooManyStarves", DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES).toInt();
|
||||
_receivedAudioStreamSettings._windowSecondsForDesiredReduction = settings->value("windowSecondsForDesiredReduction", DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION).toInt();
|
||||
_receivedAudioStreamSettings._repetitionWithFade = settings->value("repetitionWithFade", DEFAULT_REPETITION_WITH_FADE).toBool();
|
||||
|
||||
_fieldOfView = loadSetting(settings, "fieldOfView", DEFAULT_FIELD_OF_VIEW_DEGREES);
|
||||
_realWorldFieldOfView = loadSetting(settings, "realWorldFieldOfView", DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES);
|
||||
_faceshiftEyeDeflection = loadSetting(settings, "faceshiftEyeDeflection", DEFAULT_FACESHIFT_EYE_DEFLECTION);
|
||||
|
@ -735,8 +742,15 @@ void Menu::saveSettings(QSettings* settings) {
|
|||
lockedSettings = true;
|
||||
}
|
||||
|
||||
settings->setValue("audioJitterBufferFrames", _audioJitterBufferFrames);
|
||||
settings->setValue("maxFramesOverDesired", _maxFramesOverDesired);
|
||||
settings->setValue("dynamicJitterBuffers", _receivedAudioStreamSettings._dynamicJitterBuffers);
|
||||
settings->setValue("maxFramesOverDesired", _receivedAudioStreamSettings._maxFramesOverDesired);
|
||||
settings->setValue("staticDesiredJitterBufferFrames", _receivedAudioStreamSettings._staticDesiredJitterBufferFrames);
|
||||
settings->setValue("useStDevForJitterCalc", _receivedAudioStreamSettings._useStDevForJitterCalc);
|
||||
settings->setValue("windowStarveThreshold", _receivedAudioStreamSettings._windowStarveThreshold);
|
||||
settings->setValue("windowSecondsForDesiredCalcOnTooManyStarves", _receivedAudioStreamSettings._windowSecondsForDesiredCalcOnTooManyStarves);
|
||||
settings->setValue("windowSecondsForDesiredReduction", _receivedAudioStreamSettings._windowSecondsForDesiredReduction);
|
||||
settings->setValue("repetitionWithFade", _receivedAudioStreamSettings._repetitionWithFade);
|
||||
|
||||
settings->setValue("fieldOfView", _fieldOfView);
|
||||
settings->setValue("faceshiftEyeDeflection", _faceshiftEyeDeflection);
|
||||
settings->setValue("maxVoxels", _maxVoxels);
|
||||
|
@ -1039,12 +1053,24 @@ void sendFakeEnterEvent() {
|
|||
|
||||
const float DIALOG_RATIO_OF_WINDOW = 0.30f;
|
||||
|
||||
void Menu::clearLoginDialogDisplayedFlag() {
|
||||
// Needed for domains that don't require login.
|
||||
_hasLoginDialogDisplayed = false;
|
||||
}
|
||||
|
||||
void Menu::loginForCurrentDomain() {
|
||||
if (!_loginDialog) {
|
||||
if (!_loginDialog && !_hasLoginDialogDisplayed) {
|
||||
_loginDialog = new LoginDialog(Application::getInstance()->getWindow());
|
||||
_loginDialog->show();
|
||||
_loginDialog->resizeAndPosition(false);
|
||||
}
|
||||
|
||||
_hasLoginDialogDisplayed = true;
|
||||
}
|
||||
|
||||
void Menu::showLoginForCurrentDomain() {
|
||||
_hasLoginDialogDisplayed = false;
|
||||
loginForCurrentDomain();
|
||||
}
|
||||
|
||||
void Menu::editPreferences() {
|
||||
|
@ -1391,7 +1417,7 @@ void Menu::toggleLoginMenuItem() {
|
|||
// change the menu item to login
|
||||
_loginAction->setText("Login");
|
||||
|
||||
connect(_loginAction, &QAction::triggered, this, &Menu::loginForCurrentDomain);
|
||||
connect(_loginAction, &QAction::triggered, this, &Menu::showLoginForCurrentDomain);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -89,10 +89,8 @@ public:
|
|||
void triggerOption(const QString& menuOption);
|
||||
QAction* getActionForOption(const QString& menuOption);
|
||||
|
||||
float getAudioJitterBufferFrames() const { return _audioJitterBufferFrames; }
|
||||
void setAudioJitterBufferFrames(float audioJitterBufferSamples) { _audioJitterBufferFrames = audioJitterBufferSamples; }
|
||||
int getMaxFramesOverDesired() const { return _maxFramesOverDesired; }
|
||||
void setMaxFramesOverDesired(int maxFramesOverDesired) { _maxFramesOverDesired = maxFramesOverDesired; }
|
||||
const InboundAudioStream::Settings& getReceivedAudioStreamSettings() const { return _receivedAudioStreamSettings; }
|
||||
void setReceivedAudioStreamSettings(const InboundAudioStream::Settings& receivedAudioStreamSettings) { _receivedAudioStreamSettings = receivedAudioStreamSettings; }
|
||||
float getFieldOfView() const { return _fieldOfView; }
|
||||
void setFieldOfView(float fieldOfView) { _fieldOfView = fieldOfView; }
|
||||
float getRealWorldFieldOfView() const { return _realWorldFieldOfView; }
|
||||
|
@ -177,7 +175,9 @@ signals:
|
|||
|
||||
public slots:
|
||||
|
||||
void clearLoginDialogDisplayedFlag();
|
||||
void loginForCurrentDomain();
|
||||
void showLoginForCurrentDomain();
|
||||
void bandwidthDetails();
|
||||
void octreeStatsDetails();
|
||||
void lodTools();
|
||||
|
@ -265,8 +265,7 @@ private:
|
|||
|
||||
|
||||
QHash<QString, QAction*> _actionHash;
|
||||
int _audioJitterBufferFrames; /// number of extra samples to wait before starting audio playback
|
||||
int _maxFramesOverDesired;
|
||||
InboundAudioStream::Settings _receivedAudioStreamSettings;
|
||||
BandwidthDialog* _bandwidthDialog;
|
||||
float _fieldOfView; /// in Degrees, doesn't apply to HMD like Oculus
|
||||
float _realWorldFieldOfView; // The actual FOV set by the user's monitor size and view distance
|
||||
|
@ -305,6 +304,7 @@ private:
|
|||
QPointer<AttachmentsDialog> _attachmentsDialog;
|
||||
QPointer<AnimationsDialog> _animationsDialog;
|
||||
QPointer<LoginDialog> _loginDialog;
|
||||
bool _hasLoginDialogDisplayed;
|
||||
QAction* _chatAction;
|
||||
QString _snapshotsLocation;
|
||||
QString _scriptsLocation;
|
||||
|
|
|
@ -822,22 +822,38 @@ void HeightfieldBuffer::render(bool cursor) {
|
|||
DefaultMetavoxelRendererImplementation::getSplatHeightScaleLocation(), 1.0f / _heightSize);
|
||||
DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue(
|
||||
DefaultMetavoxelRendererImplementation::getSplatTextureScaleLocation(), (float)_heightSize / innerSize);
|
||||
DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue(
|
||||
DefaultMetavoxelRendererImplementation::getSplatTextureOffsetLocation(),
|
||||
_translation.x / _scale, _translation.z / _scale);
|
||||
|
||||
glBindTexture(GL_TEXTURE_2D, _textureTextureID);
|
||||
|
||||
const int TEXTURES_PER_SPLAT = 4;
|
||||
for (int i = 0; i < _textures.size(); i += TEXTURES_PER_SPLAT) {
|
||||
QVector4D scalesS, scalesT;
|
||||
|
||||
for (int j = 0; j < SPLAT_COUNT; j++) {
|
||||
glActiveTexture(GL_TEXTURE0 + SPLAT_TEXTURE_UNITS[j]);
|
||||
int index = i + j;
|
||||
if (index < _networkTextures.size()) {
|
||||
const NetworkTexturePointer& texture = _networkTextures.at(index);
|
||||
glBindTexture(GL_TEXTURE_2D, texture ? texture->getID() : 0);
|
||||
if (texture) {
|
||||
HeightfieldTexture* heightfieldTexture = static_cast<HeightfieldTexture*>(_textures.at(index).data());
|
||||
scalesS[j] = _scale / heightfieldTexture->getScaleS();
|
||||
scalesT[j] = _scale / heightfieldTexture->getScaleT();
|
||||
glBindTexture(GL_TEXTURE_2D, texture->getID());
|
||||
} else {
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
}
|
||||
} else {
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
}
|
||||
}
|
||||
const float QUARTER_STEP = 0.25f * EIGHT_BIT_MAXIMUM_RECIPROCAL;
|
||||
DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue(
|
||||
DefaultMetavoxelRendererImplementation::getSplatTextureScalesSLocation(), scalesS);
|
||||
DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue(
|
||||
DefaultMetavoxelRendererImplementation::getSplatTextureScalesTLocation(), scalesT);
|
||||
DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue(
|
||||
DefaultMetavoxelRendererImplementation::getSplatTextureValueMinimaLocation(),
|
||||
(i + 1) * EIGHT_BIT_MAXIMUM_RECIPROCAL - QUARTER_STEP, (i + 2) * EIGHT_BIT_MAXIMUM_RECIPROCAL - QUARTER_STEP,
|
||||
|
@ -1050,6 +1066,9 @@ void DefaultMetavoxelRendererImplementation::init() {
|
|||
_splatHeightfieldProgram.setUniformValueArray("diffuseMaps", SPLAT_TEXTURE_UNITS, SPLAT_COUNT);
|
||||
_splatHeightScaleLocation = _splatHeightfieldProgram.uniformLocation("heightScale");
|
||||
_splatTextureScaleLocation = _splatHeightfieldProgram.uniformLocation("textureScale");
|
||||
_splatTextureOffsetLocation = _splatHeightfieldProgram.uniformLocation("splatTextureOffset");
|
||||
_splatTextureScalesSLocation = _splatHeightfieldProgram.uniformLocation("splatTextureScalesS");
|
||||
_splatTextureScalesTLocation = _splatHeightfieldProgram.uniformLocation("splatTextureScalesT");
|
||||
_splatTextureValueMinimaLocation = _splatHeightfieldProgram.uniformLocation("textureValueMinima");
|
||||
_splatTextureValueMaximaLocation = _splatHeightfieldProgram.uniformLocation("textureValueMaxima");
|
||||
_splatHeightfieldProgram.release();
|
||||
|
@ -1695,6 +1714,9 @@ int DefaultMetavoxelRendererImplementation::_baseColorScaleLocation;
|
|||
ProgramObject DefaultMetavoxelRendererImplementation::_splatHeightfieldProgram;
|
||||
int DefaultMetavoxelRendererImplementation::_splatHeightScaleLocation;
|
||||
int DefaultMetavoxelRendererImplementation::_splatTextureScaleLocation;
|
||||
int DefaultMetavoxelRendererImplementation::_splatTextureOffsetLocation;
|
||||
int DefaultMetavoxelRendererImplementation::_splatTextureScalesSLocation;
|
||||
int DefaultMetavoxelRendererImplementation::_splatTextureScalesTLocation;
|
||||
int DefaultMetavoxelRendererImplementation::_splatTextureValueMinimaLocation;
|
||||
int DefaultMetavoxelRendererImplementation::_splatTextureValueMaximaLocation;
|
||||
ProgramObject DefaultMetavoxelRendererImplementation::_lightHeightfieldProgram;
|
||||
|
|
|
@ -252,6 +252,9 @@ public:
|
|||
static ProgramObject& getSplatHeightfieldProgram() { return _splatHeightfieldProgram; }
|
||||
static int getSplatHeightScaleLocation() { return _splatHeightScaleLocation; }
|
||||
static int getSplatTextureScaleLocation() { return _splatTextureScaleLocation; }
|
||||
static int getSplatTextureOffsetLocation() { return _splatTextureOffsetLocation; }
|
||||
static int getSplatTextureScalesSLocation() { return _splatTextureScalesSLocation; }
|
||||
static int getSplatTextureScalesTLocation() { return _splatTextureScalesTLocation; }
|
||||
static int getSplatTextureValueMinimaLocation() { return _splatTextureValueMinimaLocation; }
|
||||
static int getSplatTextureValueMaximaLocation() { return _splatTextureValueMaximaLocation; }
|
||||
|
||||
|
@ -297,6 +300,9 @@ private:
|
|||
static ProgramObject _splatHeightfieldProgram;
|
||||
static int _splatHeightScaleLocation;
|
||||
static int _splatTextureScaleLocation;
|
||||
static int _splatTextureOffsetLocation;
|
||||
static int _splatTextureScalesSLocation;
|
||||
static int _splatTextureScalesTLocation;
|
||||
static int _splatTextureValueMinimaLocation;
|
||||
static int _splatTextureValueMaximaLocation;
|
||||
|
||||
|
|
|
@ -222,6 +222,18 @@ glm::vec3 Head::getScalePivot() const {
|
|||
return _faceModel.isActive() ? _faceModel.getTranslation() : _position;
|
||||
}
|
||||
|
||||
void Head::setFinalPitch(float finalPitch) {
|
||||
_deltaPitch = glm::clamp(finalPitch, MIN_HEAD_PITCH, MAX_HEAD_PITCH) - _basePitch;
|
||||
}
|
||||
|
||||
void Head::setFinalYaw(float finalYaw) {
|
||||
_deltaYaw = glm::clamp(finalYaw, MIN_HEAD_YAW, MAX_HEAD_YAW) - _baseYaw;
|
||||
}
|
||||
|
||||
void Head::setFinalRoll(float finalRoll) {
|
||||
_deltaRoll = glm::clamp(finalRoll, MIN_HEAD_ROLL, MAX_HEAD_ROLL) - _baseRoll;
|
||||
}
|
||||
|
||||
float Head::getFinalYaw() const {
|
||||
return glm::clamp(_baseYaw + _deltaYaw, MIN_HEAD_YAW, MAX_HEAD_YAW);
|
||||
}
|
||||
|
|
|
@ -95,6 +95,9 @@ public:
|
|||
void setDeltaRoll(float roll) { _deltaRoll = roll; }
|
||||
float getDeltaRoll() const { return _deltaRoll; }
|
||||
|
||||
virtual void setFinalYaw(float finalYaw);
|
||||
virtual void setFinalPitch(float finalPitch);
|
||||
virtual void setFinalRoll(float finalRoll);
|
||||
virtual float getFinalPitch() const;
|
||||
virtual float getFinalYaw() const;
|
||||
virtual float getFinalRoll() const;
|
||||
|
|
|
@ -81,6 +81,7 @@ MyAvatar::MyAvatar() :
|
|||
_billboardValid(false),
|
||||
_physicsSimulation()
|
||||
{
|
||||
ShapeCollider::initDispatchTable();
|
||||
for (int i = 0; i < MAX_DRIVE_KEYS; i++) {
|
||||
_driveKeys[i] = 0.0f;
|
||||
}
|
||||
|
@ -271,10 +272,12 @@ void MyAvatar::simulate(float deltaTime) {
|
|||
// Update avatar head rotation with sensor data
|
||||
void MyAvatar::updateFromTrackers(float deltaTime) {
|
||||
glm::vec3 estimatedPosition, estimatedRotation;
|
||||
|
||||
if (isPlaying()) {
|
||||
estimatedRotation = glm::degrees(safeEulerAngles(_player->getHeadRotation()));
|
||||
} else if (Application::getInstance()->getPrioVR()->hasHeadRotation()) {
|
||||
|
||||
if (isPlaying() && !OculusManager::isConnected()) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (Application::getInstance()->getPrioVR()->hasHeadRotation()) {
|
||||
estimatedRotation = glm::degrees(safeEulerAngles(Application::getInstance()->getPrioVR()->getHeadRotation()));
|
||||
estimatedRotation.x *= -1.0f;
|
||||
estimatedRotation.z *= -1.0f;
|
||||
|
@ -326,11 +329,6 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
|
|||
}
|
||||
head->setDeltaRoll(estimatedRotation.z);
|
||||
|
||||
if (isPlaying()) {
|
||||
head->setLeanSideways(_player->getLeanSideways());
|
||||
head->setLeanForward(_player->getLeanForward());
|
||||
return;
|
||||
}
|
||||
// the priovr can give us exact lean
|
||||
if (Application::getInstance()->getPrioVR()->isActive()) {
|
||||
glm::vec3 eulers = glm::degrees(safeEulerAngles(Application::getInstance()->getPrioVR()->getTorsoRotation()));
|
||||
|
@ -575,58 +573,6 @@ void MyAvatar::saveRecording(QString filename) {
|
|||
}
|
||||
}
|
||||
|
||||
bool MyAvatar::isPlaying() {
|
||||
if (!_player) {
|
||||
return false;
|
||||
}
|
||||
if (QThread::currentThread() != thread()) {
|
||||
bool result;
|
||||
QMetaObject::invokeMethod(this, "isPlaying", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(bool, result));
|
||||
return result;
|
||||
}
|
||||
return _player && _player->isPlaying();
|
||||
}
|
||||
|
||||
qint64 MyAvatar::playerElapsed() {
|
||||
if (!_player) {
|
||||
return 0;
|
||||
}
|
||||
if (QThread::currentThread() != thread()) {
|
||||
qint64 result;
|
||||
QMetaObject::invokeMethod(this, "playerElapsed", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(qint64, result));
|
||||
return result;
|
||||
}
|
||||
return _player->elapsed();
|
||||
}
|
||||
|
||||
qint64 MyAvatar::playerLength() {
|
||||
if (!_player) {
|
||||
return 0;
|
||||
}
|
||||
if (QThread::currentThread() != thread()) {
|
||||
qint64 result;
|
||||
QMetaObject::invokeMethod(this, "playerLength", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(qint64, result));
|
||||
return result;
|
||||
}
|
||||
return _player->getRecording()->getLength();
|
||||
}
|
||||
|
||||
void MyAvatar::loadRecording(QString filename) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "loadRecording", Qt::BlockingQueuedConnection,
|
||||
Q_ARG(QString, filename));
|
||||
return;
|
||||
}
|
||||
if (!_player) {
|
||||
_player = PlayerPointer(new Player(this));
|
||||
}
|
||||
|
||||
_player->loadFromFile(filename);
|
||||
}
|
||||
|
||||
void MyAvatar::loadLastRecording() {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "loadLastRecording", Qt::BlockingQueuedConnection);
|
||||
|
@ -643,32 +589,6 @@ void MyAvatar::loadLastRecording() {
|
|||
_player->loadRecording(_recorder->getRecording());
|
||||
}
|
||||
|
||||
void MyAvatar::startPlaying() {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "startPlaying", Qt::BlockingQueuedConnection);
|
||||
return;
|
||||
}
|
||||
if (!_player) {
|
||||
_player = PlayerPointer(new Player(this));
|
||||
}
|
||||
|
||||
Application::getInstance()->getAudio()->setPlayer(_player);
|
||||
_player->startPlaying();
|
||||
}
|
||||
|
||||
void MyAvatar::stopPlaying() {
|
||||
if (!_player) {
|
||||
return;
|
||||
}
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "stopPlaying", Qt::BlockingQueuedConnection);
|
||||
return;
|
||||
}
|
||||
if (_player) {
|
||||
_player->stopPlaying();
|
||||
}
|
||||
}
|
||||
|
||||
void MyAvatar::setLocalGravity(glm::vec3 gravity) {
|
||||
_motionBehaviors |= AVATAR_MOTION_OBEY_LOCAL_GRAVITY;
|
||||
// Environmental and Local gravities are incompatible. Since Local is being set here
|
||||
|
@ -1976,12 +1896,8 @@ void MyAvatar::resetSize() {
|
|||
}
|
||||
|
||||
void MyAvatar::goToLocationFromResponse(const QJsonObject& jsonObject) {
|
||||
if (jsonObject["status"].toString() == "success") {
|
||||
QJsonObject locationObject = jsonObject["data"].toObject()["address"].toObject();
|
||||
goToLocationFromAddress(locationObject);
|
||||
} else {
|
||||
QMessageBox::warning(Application::getInstance()->getWindow(), "", "That user or location could not be found.");
|
||||
}
|
||||
QJsonObject locationObject = jsonObject["data"].toObject()["address"].toObject();
|
||||
goToLocationFromAddress(locationObject);
|
||||
}
|
||||
|
||||
void MyAvatar::goToLocationFromAddress(const QJsonObject& locationObject) {
|
||||
|
|
|
@ -176,15 +176,7 @@ public slots:
|
|||
void startRecording();
|
||||
void stopRecording();
|
||||
void saveRecording(QString filename);
|
||||
|
||||
bool isPlaying();
|
||||
qint64 playerElapsed();
|
||||
qint64 playerLength();
|
||||
void loadRecording(QString filename);
|
||||
void loadLastRecording();
|
||||
void startPlaying();
|
||||
void stopPlaying();
|
||||
|
||||
|
||||
signals:
|
||||
void transformChanged();
|
||||
|
@ -224,7 +216,6 @@ private:
|
|||
PhysicsSimulation _physicsSimulation;
|
||||
|
||||
RecorderPointer _recorder;
|
||||
PlayerPointer _player;
|
||||
|
||||
// private methods
|
||||
float computeDistanceToFloor(const glm::vec3& startPoint);
|
||||
|
|
|
@ -620,19 +620,19 @@ void SkeletonModel::buildShapes() {
|
|||
Shape::Type type = joint.shapeType;
|
||||
int parentIndex = joint.parentIndex;
|
||||
if (parentIndex == -1 || radius < EPSILON) {
|
||||
type = Shape::UNKNOWN_SHAPE;
|
||||
} else if (type == Shape::CAPSULE_SHAPE && halfHeight < EPSILON) {
|
||||
type = UNKNOWN_SHAPE;
|
||||
} else if (type == CAPSULE_SHAPE && halfHeight < EPSILON) {
|
||||
// this shape is forced to be a sphere
|
||||
type = Shape::SPHERE_SHAPE;
|
||||
type = SPHERE_SHAPE;
|
||||
}
|
||||
Shape* shape = NULL;
|
||||
if (type == Shape::SPHERE_SHAPE) {
|
||||
if (type == SPHERE_SHAPE) {
|
||||
shape = new VerletSphereShape(radius, &(points[i]));
|
||||
shape->setEntity(this);
|
||||
float mass = massScale * glm::max(MIN_JOINT_MASS, DENSITY_OF_WATER * shape->getVolume());
|
||||
points[i].setMass(mass);
|
||||
totalMass += mass;
|
||||
} else if (type == Shape::CAPSULE_SHAPE) {
|
||||
} else if (type == CAPSULE_SHAPE) {
|
||||
assert(parentIndex != -1);
|
||||
shape = new VerletCapsuleShape(radius, &(points[parentIndex]), &(points[i]));
|
||||
shape->setEntity(this);
|
||||
|
@ -731,7 +731,7 @@ void SkeletonModel::computeBoundingShape(const FBXGeometry& geometry) {
|
|||
shapeExtents.reset();
|
||||
glm::vec3 localPosition = shape->getTranslation();
|
||||
int type = shape->getType();
|
||||
if (type == Shape::CAPSULE_SHAPE) {
|
||||
if (type == CAPSULE_SHAPE) {
|
||||
// add the two furthest surface points of the capsule
|
||||
CapsuleShape* capsule = static_cast<CapsuleShape*>(shape);
|
||||
glm::vec3 axis;
|
||||
|
@ -743,7 +743,7 @@ void SkeletonModel::computeBoundingShape(const FBXGeometry& geometry) {
|
|||
shapeExtents.addPoint(localPosition + axis);
|
||||
shapeExtents.addPoint(localPosition - axis);
|
||||
totalExtents.addExtents(shapeExtents);
|
||||
} else if (type == Shape::SPHERE_SHAPE) {
|
||||
} else if (type == SPHERE_SHAPE) {
|
||||
float radius = shape->getBoundingRadius();
|
||||
glm::vec3 axis = glm::vec3(radius);
|
||||
shapeExtents.addPoint(localPosition + axis);
|
||||
|
@ -847,13 +847,13 @@ void SkeletonModel::renderJointCollisionShapes(float alpha) {
|
|||
|
||||
glPushMatrix();
|
||||
// shapes are stored in simulation-frame but we want position to be model-relative
|
||||
if (shape->getType() == Shape::SPHERE_SHAPE) {
|
||||
if (shape->getType() == SPHERE_SHAPE) {
|
||||
glm::vec3 position = shape->getTranslation() - simulationTranslation;
|
||||
glTranslatef(position.x, position.y, position.z);
|
||||
// draw a grey sphere at shape position
|
||||
glColor4f(0.75f, 0.75f, 0.75f, alpha);
|
||||
glutSolidSphere(shape->getBoundingRadius(), BALL_SUBDIVISIONS, BALL_SUBDIVISIONS);
|
||||
} else if (shape->getType() == Shape::CAPSULE_SHAPE) {
|
||||
} else if (shape->getType() == CAPSULE_SHAPE) {
|
||||
CapsuleShape* capsule = static_cast<CapsuleShape*>(shape);
|
||||
|
||||
// draw a blue sphere at the capsule endpoint
|
||||
|
|
|
@ -85,9 +85,13 @@ void LocationManager::goTo(QString destination) {
|
|||
if (!goToDestination(destination)) {
|
||||
destination = QString(QUrl::toPercentEncoding(destination));
|
||||
UserActivityLogger::getInstance().wentTo(OTHER_DESTINATION_TYPE, destination);
|
||||
|
||||
JSONCallbackParameters callbackParams;
|
||||
callbackParams.jsonCallbackReceiver = this;
|
||||
callbackParams.jsonCallbackMethod = "goToAddressFromResponse";
|
||||
callbackParams.errorCallbackReceiver = this;
|
||||
callbackParams.errorCallbackMethod = "handleAddressLookupError";
|
||||
|
||||
AccountManager::getInstance().authenticatedRequest(GET_ADDRESSES.arg(destination),
|
||||
QNetworkAccessManager::GetOperation,
|
||||
callbackParams);
|
||||
|
@ -96,21 +100,17 @@ void LocationManager::goTo(QString destination) {
|
|||
|
||||
void LocationManager::goToAddressFromResponse(const QJsonObject& responseData) {
|
||||
QJsonValue status = responseData["status"];
|
||||
qDebug() << responseData;
|
||||
if (!status.isUndefined() && status.toString() == "success") {
|
||||
const QJsonObject& data = responseData["data"].toObject();
|
||||
const QJsonValue& userObject = data["user"];
|
||||
const QJsonValue& placeObject = data["place"];
|
||||
|
||||
if (!placeObject.isUndefined() && !userObject.isUndefined()) {
|
||||
emit multipleDestinationsFound(userObject.toObject(), placeObject.toObject());
|
||||
} else if (placeObject.isUndefined()) {
|
||||
Application::getInstance()->getAvatar()->goToLocationFromAddress(userObject.toObject()["address"].toObject());
|
||||
} else {
|
||||
Application::getInstance()->getAvatar()->goToLocationFromAddress(placeObject.toObject()["address"].toObject());
|
||||
}
|
||||
|
||||
const QJsonObject& data = responseData["data"].toObject();
|
||||
const QJsonValue& userObject = data["user"];
|
||||
const QJsonValue& placeObject = data["place"];
|
||||
|
||||
if (!placeObject.isUndefined() && !userObject.isUndefined()) {
|
||||
emit multipleDestinationsFound(userObject.toObject(), placeObject.toObject());
|
||||
} else if (placeObject.isUndefined()) {
|
||||
Application::getInstance()->getAvatar()->goToLocationFromAddress(userObject.toObject()["address"].toObject());
|
||||
} else {
|
||||
QMessageBox::warning(Application::getInstance()->getWindow(), "", "That user or location could not be found.");
|
||||
Application::getInstance()->getAvatar()->goToLocationFromAddress(placeObject.toObject()["address"].toObject());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -118,6 +118,8 @@ void LocationManager::goToUser(QString userName) {
|
|||
JSONCallbackParameters callbackParams;
|
||||
callbackParams.jsonCallbackReceiver = Application::getInstance()->getAvatar();
|
||||
callbackParams.jsonCallbackMethod = "goToLocationFromResponse";
|
||||
callbackParams.errorCallbackReceiver = this;
|
||||
callbackParams.errorCallbackMethod = "handleAddressLookupError";
|
||||
|
||||
userName = QString(QUrl::toPercentEncoding(userName));
|
||||
AccountManager::getInstance().authenticatedRequest(GET_USER_ADDRESS.arg(userName),
|
||||
|
@ -129,6 +131,8 @@ void LocationManager::goToPlace(QString placeName) {
|
|||
JSONCallbackParameters callbackParams;
|
||||
callbackParams.jsonCallbackReceiver = Application::getInstance()->getAvatar();
|
||||
callbackParams.jsonCallbackMethod = "goToLocationFromResponse";
|
||||
callbackParams.errorCallbackReceiver = this;
|
||||
callbackParams.errorCallbackMethod = "handleAddressLookupError";
|
||||
|
||||
placeName = QString(QUrl::toPercentEncoding(placeName));
|
||||
AccountManager::getInstance().authenticatedRequest(GET_PLACE_ADDRESS.arg(placeName),
|
||||
|
@ -212,6 +216,19 @@ bool LocationManager::goToDestination(QString destination) {
|
|||
return false;
|
||||
}
|
||||
|
||||
void LocationManager::handleAddressLookupError(QNetworkReply::NetworkError networkError,
|
||||
const QString& errorString) {
|
||||
QString messageBoxString;
|
||||
|
||||
if (networkError == QNetworkReply::ContentNotFoundError) {
|
||||
messageBoxString = "That address could not be found.";
|
||||
} else {
|
||||
messageBoxString = errorString;
|
||||
}
|
||||
|
||||
QMessageBox::warning(Application::getInstance()->getWindow(), "", messageBoxString);
|
||||
}
|
||||
|
||||
void LocationManager::replaceLastOccurrence(const QChar search, const QChar replace, QString& string) {
|
||||
int lastIndex;
|
||||
lastIndex = string.lastIndexOf(search);
|
||||
|
|
|
@ -37,6 +37,9 @@ public:
|
|||
void goToPlace(QString placeName);
|
||||
void goToOrientation(QString orientation);
|
||||
bool goToDestination(QString destination);
|
||||
|
||||
public slots:
|
||||
void handleAddressLookupError(QNetworkReply::NetworkError networkError, const QString& errorString);
|
||||
|
||||
private:
|
||||
void replaceLastOccurrence(const QChar search, const QChar replace, QString& string);
|
||||
|
|
|
@ -73,7 +73,7 @@ void JointState::setFBXJoint(const FBXJoint* joint) {
|
|||
}
|
||||
}
|
||||
|
||||
void JointState::updateConstraint() {
|
||||
void JointState::buildConstraint() {
|
||||
if (_constraint) {
|
||||
delete _constraint;
|
||||
_constraint = NULL;
|
||||
|
|
|
@ -32,7 +32,7 @@ public:
|
|||
void setFBXJoint(const FBXJoint* joint);
|
||||
const FBXJoint& getFBXJoint() const { return *_fbxJoint; }
|
||||
|
||||
void updateConstraint();
|
||||
void buildConstraint();
|
||||
void copyState(const JointState& state);
|
||||
|
||||
void initTransform(const glm::mat4& parentTransform);
|
||||
|
|
|
@ -547,7 +547,7 @@ void Model::setJointStates(QVector<JointState> states) {
|
|||
if (distance > radius) {
|
||||
radius = distance;
|
||||
}
|
||||
_jointStates[i].updateConstraint();
|
||||
_jointStates[i].buildConstraint();
|
||||
}
|
||||
for (int i = 0; i < _jointStates.size(); i++) {
|
||||
_jointStates[i].slaveVisibleTransform();
|
||||
|
@ -708,12 +708,10 @@ void Model::clearJointAnimationPriority(int index) {
|
|||
void Model::setJointState(int index, bool valid, const glm::quat& rotation, float priority) {
|
||||
if (index != -1 && index < _jointStates.size()) {
|
||||
JointState& state = _jointStates[index];
|
||||
if (priority >= state._animationPriority) {
|
||||
if (valid) {
|
||||
state.setRotationInConstrainedFrame(rotation, priority);
|
||||
} else {
|
||||
state.restoreRotation(1.0f, priority);
|
||||
}
|
||||
if (valid) {
|
||||
state.setRotationInConstrainedFrame(rotation, priority);
|
||||
} else {
|
||||
state.restoreRotation(1.0f, priority);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1196,7 +1194,7 @@ void Model::inverseKinematics(int endIndex, glm::vec3 targetPosition, const glm:
|
|||
}
|
||||
|
||||
// Apply the rotation, but use mixRotationDelta() which blends a bit of the default pose
|
||||
// at in the process. This provides stability to the IK solution for most models.
|
||||
// in the process. This provides stability to the IK solution for most models.
|
||||
glm::quat oldNextRotation = nextState.getRotation();
|
||||
float mixFactor = 0.03f;
|
||||
nextState.mixRotationDelta(deltaRotation, mixFactor, priority);
|
||||
|
|
|
@ -320,6 +320,7 @@ Texture::~Texture() {
|
|||
|
||||
NetworkTexture::NetworkTexture(const QUrl& url, TextureType type, const QByteArray& content) :
|
||||
Resource(url, !content.isEmpty()),
|
||||
_type(type),
|
||||
_translucent(false) {
|
||||
|
||||
if (!url.isValid()) {
|
||||
|
@ -415,8 +416,12 @@ void ImageReader::run() {
|
|||
blueTotal += qBlue(rgb);
|
||||
}
|
||||
}
|
||||
QColor averageColor(EIGHT_BIT_MAXIMUM, EIGHT_BIT_MAXIMUM, EIGHT_BIT_MAXIMUM);
|
||||
if (imageArea > 0) {
|
||||
averageColor.setRgb(redTotal / imageArea, greenTotal / imageArea, blueTotal / imageArea);
|
||||
}
|
||||
QMetaObject::invokeMethod(texture.data(), "setImage", Q_ARG(const QImage&, image), Q_ARG(bool, false),
|
||||
Q_ARG(const QColor&, QColor(redTotal / imageArea, greenTotal / imageArea, blueTotal / imageArea)));
|
||||
Q_ARG(const QColor&, averageColor));
|
||||
return;
|
||||
}
|
||||
if (image.format() != QImage::Format_ARGB32) {
|
||||
|
@ -474,7 +479,13 @@ void NetworkTexture::setImage(const QImage& image, bool translucent, const QColo
|
|||
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, image.width(), image.height(), 0,
|
||||
GL_RGB, GL_UNSIGNED_BYTE, image.constBits());
|
||||
}
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
if (_type == SPLAT_TEXTURE) {
|
||||
// generate mipmaps for splat textures
|
||||
glGenerateMipmap(GL_TEXTURE_2D);
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
|
||||
} else {
|
||||
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
|
||||
}
|
||||
glBindTexture(GL_TEXTURE_2D, 0);
|
||||
}
|
||||
|
||||
|
|
|
@ -145,6 +145,7 @@ protected:
|
|||
|
||||
private:
|
||||
|
||||
TextureType _type;
|
||||
bool _translucent;
|
||||
QColor _averageColor;
|
||||
};
|
||||
|
|
|
@ -20,6 +20,10 @@ LocationScriptingInterface* LocationScriptingInterface::getInstance() {
|
|||
return &sharedInstance;
|
||||
}
|
||||
|
||||
bool LocationScriptingInterface::isConnected() {
|
||||
return NodeList::getInstance()->getDomainHandler().isConnected();
|
||||
}
|
||||
|
||||
QString LocationScriptingInterface::getHref() {
|
||||
return getProtocol() + "//" + getHostname() + getPathname();
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@
|
|||
|
||||
class LocationScriptingInterface : public QObject {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(bool isConnected READ isConnected)
|
||||
Q_PROPERTY(QString href READ getHref)
|
||||
Q_PROPERTY(QString protocol READ getProtocol)
|
||||
Q_PROPERTY(QString hostname READ getHostname)
|
||||
|
@ -30,6 +31,7 @@ class LocationScriptingInterface : public QObject {
|
|||
public:
|
||||
static LocationScriptingInterface* getInstance();
|
||||
|
||||
bool isConnected();
|
||||
QString getHref();
|
||||
QString getProtocol() { return CUSTOM_URL_SCHEME; };
|
||||
QString getPathname();
|
||||
|
|
|
@ -116,9 +116,16 @@ void PreferencesDialog::loadPreferences() {
|
|||
ui.faceshiftEyeDeflectionSider->setValue(menuInstance->getFaceshiftEyeDeflection() *
|
||||
ui.faceshiftEyeDeflectionSider->maximum());
|
||||
|
||||
ui.audioJitterSpin->setValue(menuInstance->getAudioJitterBufferFrames());
|
||||
const InboundAudioStream::Settings& streamSettings = menuInstance->getReceivedAudioStreamSettings();
|
||||
|
||||
ui.maxFramesOverDesiredSpin->setValue(menuInstance->getMaxFramesOverDesired());
|
||||
ui.dynamicJitterBuffersCheckBox->setChecked(streamSettings._dynamicJitterBuffers);
|
||||
ui.staticDesiredJitterBufferFramesSpin->setValue(streamSettings._staticDesiredJitterBufferFrames);
|
||||
ui.maxFramesOverDesiredSpin->setValue(streamSettings._maxFramesOverDesired);
|
||||
ui.useStdevForJitterCalcCheckBox->setChecked(streamSettings._useStDevForJitterCalc);
|
||||
ui.windowStarveThresholdSpin->setValue(streamSettings._windowStarveThreshold);
|
||||
ui.windowSecondsForDesiredCalcOnTooManyStarvesSpin->setValue(streamSettings._windowSecondsForDesiredCalcOnTooManyStarves);
|
||||
ui.windowSecondsForDesiredReductionSpin->setValue(streamSettings._windowSecondsForDesiredReduction);
|
||||
ui.repetitionWithFadeCheckBox->setChecked(streamSettings._repetitionWithFade);
|
||||
|
||||
ui.realWorldFieldOfViewSpin->setValue(menuInstance->getRealWorldFieldOfView());
|
||||
|
||||
|
@ -208,16 +215,18 @@ void PreferencesDialog::savePreferences() {
|
|||
|
||||
Menu::getInstance()->setInvertSixenseButtons(ui.invertSixenseButtonsCheckBox->isChecked());
|
||||
|
||||
Menu::getInstance()->setAudioJitterBufferFrames(ui.audioJitterSpin->value());
|
||||
if (Menu::getInstance()->getAudioJitterBufferFrames() != 0) {
|
||||
Application::getInstance()->getAudio()->setDynamicJitterBuffers(false);
|
||||
Application::getInstance()->getAudio()->setStaticDesiredJitterBufferFrames(Menu::getInstance()->getAudioJitterBufferFrames());
|
||||
} else {
|
||||
Application::getInstance()->getAudio()->setDynamicJitterBuffers(true);
|
||||
}
|
||||
InboundAudioStream::Settings streamSettings;
|
||||
streamSettings._dynamicJitterBuffers = ui.dynamicJitterBuffersCheckBox->isChecked();
|
||||
streamSettings._staticDesiredJitterBufferFrames = ui.staticDesiredJitterBufferFramesSpin->value();
|
||||
streamSettings._maxFramesOverDesired = ui.maxFramesOverDesiredSpin->value();
|
||||
streamSettings._useStDevForJitterCalc = ui.useStdevForJitterCalcCheckBox->isChecked();
|
||||
streamSettings._windowStarveThreshold = ui.windowStarveThresholdSpin->value();
|
||||
streamSettings._windowSecondsForDesiredCalcOnTooManyStarves = ui.windowSecondsForDesiredCalcOnTooManyStarvesSpin->value();
|
||||
streamSettings._windowSecondsForDesiredReduction = ui.windowSecondsForDesiredReductionSpin->value();
|
||||
streamSettings._repetitionWithFade = ui.repetitionWithFadeCheckBox->isChecked();
|
||||
|
||||
Menu::getInstance()->setMaxFramesOverDesired(ui.maxFramesOverDesiredSpin->value());
|
||||
Application::getInstance()->getAudio()->setMaxFramesOverDesired(Menu::getInstance()->getMaxFramesOverDesired());
|
||||
Menu::getInstance()->setReceivedAudioStreamSettings(streamSettings);
|
||||
Application::getInstance()->getAudio()->setReceivedAudioStreamSettings(streamSettings);
|
||||
|
||||
Application::getInstance()->resizeGL(Application::getInstance()->getGLWidget()->width(),
|
||||
Application::getInstance()->getGLWidget()->height());
|
||||
|
|
|
@ -1192,6 +1192,97 @@
|
|||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
|
||||
<!-- dynamic jitter buffers ____________________________________________________________________________ -->
|
||||
<item>
|
||||
<layout class="QHBoxLayout" name="horizontalLayout_23">
|
||||
<property name="spacing">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="topMargin">
|
||||
<number>10</number>
|
||||
</property>
|
||||
<property name="rightMargin">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="bottomMargin">
|
||||
<number>10</number>
|
||||
</property>
|
||||
<item>
|
||||
<widget class="QLabel" name="label_20">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="styleSheet">
|
||||
<string notr="true">color: rgb(51, 51, 51)</string>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Enable Dynamic Jitter Buffers</string>
|
||||
</property>
|
||||
<property name="indent">
|
||||
<number>15</number>
|
||||
</property>
|
||||
<property name="buddy">
|
||||
<cstring>dynamicJitterBuffersCheckBox</cstring>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<spacer name="horizontalSpacer_17">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="orientation">
|
||||
<enum>Qt::Horizontal</enum>
|
||||
</property>
|
||||
<property name="sizeHint" stdset="0">
|
||||
<size>
|
||||
<width>40</width>
|
||||
<height>20</height>
|
||||
</size>
|
||||
</property>
|
||||
</spacer>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QCheckBox" name="dynamicJitterBuffersCheckBox">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Minimum" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>32</width>
|
||||
<height>0</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="baseSize">
|
||||
<size>
|
||||
<width>0</width>
|
||||
<height>0</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string/>
|
||||
</property>
|
||||
<property name="iconSize">
|
||||
<size>
|
||||
<width>32</width>
|
||||
<height>32</height>
|
||||
</size>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
|
||||
|
||||
<!-- static desired jitter frames____________________________________________________________________________ -->
|
||||
<item>
|
||||
<layout class="QHBoxLayout" name="horizontalLayout_8">
|
||||
<property name="spacing">
|
||||
|
@ -1217,13 +1308,13 @@
|
|||
<string notr="true">color: rgb(51, 51, 51)</string>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Audio Jitter Buffer Frames (0 for automatic)</string>
|
||||
<string>Static Jitter Buffer Frames</string>
|
||||
</property>
|
||||
<property name="indent">
|
||||
<number>15</number>
|
||||
</property>
|
||||
<property name="buddy">
|
||||
<cstring>audioJitterSpin</cstring>
|
||||
<cstring>staticDesiredJitterBufferFramesSpin</cstring>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
|
@ -1246,7 +1337,13 @@
|
|||
</spacer>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QSpinBox" name="audioJitterSpin">
|
||||
<widget class="QSpinBox" name="staticDesiredJitterBufferFramesSpin">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>100</width>
|
||||
|
@ -1350,6 +1447,560 @@
|
|||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
<!-- max frames over desired ____________________________________________________________________________ -->
|
||||
<item>
|
||||
<layout class="QHBoxLayout" name="horizontalLayout_13">
|
||||
<property name="spacing">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="topMargin">
|
||||
<number>10</number>
|
||||
</property>
|
||||
<property name="rightMargin">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="bottomMargin">
|
||||
<number>10</number>
|
||||
</property>
|
||||
<item alignment="Qt::AlignLeft">
|
||||
<widget class="QLabel" name="label_10">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="styleSheet">
|
||||
<string notr="true">color: rgb(51, 51, 51)</string>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Max Frames Over Desired</string>
|
||||
</property>
|
||||
<property name="indent">
|
||||
<number>15</number>
|
||||
</property>
|
||||
<property name="buddy">
|
||||
<cstring>maxFramesOverDesiredSpin</cstring>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<spacer name="horizontalSpacer_20">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="orientation">
|
||||
<enum>Qt::Horizontal</enum>
|
||||
</property>
|
||||
<property name="sizeHint" stdset="0">
|
||||
<size>
|
||||
<width>40</width>
|
||||
<height>20</height>
|
||||
</size>
|
||||
</property>
|
||||
</spacer>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QSpinBox" name="maxFramesOverDesiredSpin">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>95</width>
|
||||
<height>36</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>70</width>
|
||||
<height>16777215</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="minimum">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="maximum">
|
||||
<number>10000</number>
|
||||
</property>
|
||||
<property name="value">
|
||||
<number>1</number>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
|
||||
|
||||
<!-- use stdev for jitter calc ____________________________________________________________________________ -->
|
||||
<item>
|
||||
<layout class="QHBoxLayout" name="horizontalLayout_19">
|
||||
<property name="spacing">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="topMargin">
|
||||
<number>10</number>
|
||||
</property>
|
||||
<property name="rightMargin">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="bottomMargin">
|
||||
<number>10</number>
|
||||
</property>
|
||||
<item>
|
||||
<widget class="QLabel" name="label_16">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="styleSheet">
|
||||
<string notr="true">color: rgb(51, 51, 51)</string>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Use Stdev for Dynamic Jitter Calc</string>
|
||||
</property>
|
||||
<property name="indent">
|
||||
<number>15</number>
|
||||
</property>
|
||||
<property name="buddy">
|
||||
<cstring>useStdevForJitterCalcCheckBox</cstring>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<spacer name="horizontalSpacer_21">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="orientation">
|
||||
<enum>Qt::Horizontal</enum>
|
||||
</property>
|
||||
<property name="sizeHint" stdset="0">
|
||||
<size>
|
||||
<width>40</width>
|
||||
<height>20</height>
|
||||
</size>
|
||||
</property>
|
||||
</spacer>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QCheckBox" name="useStdevForJitterCalcCheckBox">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Minimum" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>32</width>
|
||||
<height>0</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="baseSize">
|
||||
<size>
|
||||
<width>0</width>
|
||||
<height>0</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string/>
|
||||
</property>
|
||||
<property name="iconSize">
|
||||
<size>
|
||||
<width>32</width>
|
||||
<height>32</height>
|
||||
</size>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
|
||||
|
||||
|
||||
<!-- window starve threshold ____________________________________________________________________________ -->
|
||||
<item>
|
||||
<layout class="QHBoxLayout" name="horizontalLayout_20">
|
||||
<property name="spacing">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="topMargin">
|
||||
<number>10</number>
|
||||
</property>
|
||||
<property name="rightMargin">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="bottomMargin">
|
||||
<number>10</number>
|
||||
</property>
|
||||
<item alignment="Qt::AlignLeft">
|
||||
<widget class="QLabel" name="label_17">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="styleSheet">
|
||||
<string notr="true">color: rgb(51, 51, 51)</string>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Window A Starve Threshold</string>
|
||||
</property>
|
||||
<property name="indent">
|
||||
<number>15</number>
|
||||
</property>
|
||||
<property name="buddy">
|
||||
<cstring>windowStarveThresholdSpin</cstring>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<spacer name="horizontalSpacer_22">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="orientation">
|
||||
<enum>Qt::Horizontal</enum>
|
||||
</property>
|
||||
<property name="sizeHint" stdset="0">
|
||||
<size>
|
||||
<width>40</width>
|
||||
<height>20</height>
|
||||
</size>
|
||||
</property>
|
||||
</spacer>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QSpinBox" name="windowStarveThresholdSpin">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>95</width>
|
||||
<height>36</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>70</width>
|
||||
<height>16777215</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="minimum">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="maximum">
|
||||
<number>10000</number>
|
||||
</property>
|
||||
<property name="value">
|
||||
<number>1</number>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
|
||||
|
||||
<!-- window A seconds ____________________________________________________________________________ -->
|
||||
<item>
|
||||
<layout class="QHBoxLayout" name="horizontalLayout_21">
|
||||
<property name="spacing">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="topMargin">
|
||||
<number>10</number>
|
||||
</property>
|
||||
<property name="rightMargin">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="bottomMargin">
|
||||
<number>10</number>
|
||||
</property>
|
||||
<item alignment="Qt::AlignLeft">
|
||||
<widget class="QLabel" name="label_18">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="styleSheet">
|
||||
<string notr="true">color: rgb(51, 51, 51)</string>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Window A (raise desired on N starves) Seconds</string>
|
||||
</property>
|
||||
<property name="indent">
|
||||
<number>15</number>
|
||||
</property>
|
||||
<property name="buddy">
|
||||
<cstring>windowSecondsForDesiredCalcOnTooManyStarvesSpin</cstring>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<spacer name="horizontalSpacer_23">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="orientation">
|
||||
<enum>Qt::Horizontal</enum>
|
||||
</property>
|
||||
<property name="sizeHint" stdset="0">
|
||||
<size>
|
||||
<width>40</width>
|
||||
<height>20</height>
|
||||
</size>
|
||||
</property>
|
||||
</spacer>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QSpinBox" name="windowSecondsForDesiredCalcOnTooManyStarvesSpin">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>95</width>
|
||||
<height>36</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>70</width>
|
||||
<height>16777215</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="minimum">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="maximum">
|
||||
<number>10000</number>
|
||||
</property>
|
||||
<property name="value">
|
||||
<number>1</number>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
|
||||
|
||||
<!-- window B seconds ____________________________________________________________________________ -->
|
||||
<item>
|
||||
<layout class="QHBoxLayout" name="horizontalLayout_22">
|
||||
<property name="spacing">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="topMargin">
|
||||
<number>10</number>
|
||||
</property>
|
||||
<property name="rightMargin">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="bottomMargin">
|
||||
<number>10</number>
|
||||
</property>
|
||||
<item alignment="Qt::AlignLeft">
|
||||
<widget class="QLabel" name="label_19">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="styleSheet">
|
||||
<string notr="true">color: rgb(51, 51, 51)</string>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Window B (desired ceiling) Seconds</string>
|
||||
</property>
|
||||
<property name="indent">
|
||||
<number>15</number>
|
||||
</property>
|
||||
<property name="buddy">
|
||||
<cstring>windowSecondsForDesiredReductionSpin</cstring>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<spacer name="horizontalSpacer_24">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="orientation">
|
||||
<enum>Qt::Horizontal</enum>
|
||||
</property>
|
||||
<property name="sizeHint" stdset="0">
|
||||
<size>
|
||||
<width>40</width>
|
||||
<height>20</height>
|
||||
</size>
|
||||
</property>
|
||||
</spacer>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QSpinBox" name="windowSecondsForDesiredReductionSpin">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>95</width>
|
||||
<height>36</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="maximumSize">
|
||||
<size>
|
||||
<width>70</width>
|
||||
<height>16777215</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="minimum">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="maximum">
|
||||
<number>10000</number>
|
||||
</property>
|
||||
<property name="value">
|
||||
<number>1</number>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
|
||||
|
||||
<!-- repetition with fade ____________________________________________________________________________ -->
|
||||
<item>
|
||||
<layout class="QHBoxLayout" name="horizontalLayout_24">
|
||||
<property name="spacing">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="topMargin">
|
||||
<number>10</number>
|
||||
</property>
|
||||
<property name="rightMargin">
|
||||
<number>0</number>
|
||||
</property>
|
||||
<property name="bottomMargin">
|
||||
<number>10</number>
|
||||
</property>
|
||||
<item>
|
||||
<widget class="QLabel" name="label_21">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="styleSheet">
|
||||
<string notr="true">color: rgb(51, 51, 51)</string>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string>Repetition with Fade</string>
|
||||
</property>
|
||||
<property name="indent">
|
||||
<number>15</number>
|
||||
</property>
|
||||
<property name="buddy">
|
||||
<cstring>repetitionWithFadeCheckBox</cstring>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
<item>
|
||||
<spacer name="horizontalSpacer_25">
|
||||
<property name="font">
|
||||
<font>
|
||||
<family>Arial</family>
|
||||
</font>
|
||||
</property>
|
||||
<property name="orientation">
|
||||
<enum>Qt::Horizontal</enum>
|
||||
</property>
|
||||
<property name="sizeHint" stdset="0">
|
||||
<size>
|
||||
<width>40</width>
|
||||
<height>20</height>
|
||||
</size>
|
||||
</property>
|
||||
</spacer>
|
||||
</item>
|
||||
<item>
|
||||
<widget class="QCheckBox" name="repetitionWithFadeCheckBox">
|
||||
<property name="sizePolicy">
|
||||
<sizepolicy hsizetype="Minimum" vsizetype="Fixed">
|
||||
<horstretch>0</horstretch>
|
||||
<verstretch>0</verstretch>
|
||||
</sizepolicy>
|
||||
</property>
|
||||
<property name="minimumSize">
|
||||
<size>
|
||||
<width>32</width>
|
||||
<height>0</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="baseSize">
|
||||
<size>
|
||||
<width>0</width>
|
||||
<height>0</height>
|
||||
</size>
|
||||
</property>
|
||||
<property name="text">
|
||||
<string/>
|
||||
</property>
|
||||
<property name="iconSize">
|
||||
<size>
|
||||
<width>32</width>
|
||||
<height>32</height>
|
||||
</size>
|
||||
</property>
|
||||
</widget>
|
||||
</item>
|
||||
</layout>
|
||||
</item>
|
||||
|
||||
<item>
|
||||
<layout class="QHBoxLayout" name="horizontalLayout_6">
|
||||
<property name="spacing">
|
||||
|
|
|
@ -1,26 +0,0 @@
|
|||
//
|
||||
// AudioFilter.cpp
|
||||
// hifi
|
||||
//
|
||||
// Created by Craig Hansen-Sturm on 8/10/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <math.h>
|
||||
#include <vector>
|
||||
#include <SharedUtil.h>
|
||||
#include "AudioRingBuffer.h"
|
||||
#include "AudioFilter.h"
|
||||
|
||||
template<>
|
||||
AudioFilterPEQ3::FilterParameter AudioFilterPEQ3::_profiles[ AudioFilterPEQ3::_profileCount ][ AudioFilterPEQ3::_filterCount ] = {
|
||||
|
||||
// Freq Gain Q Freq Gain Q Freq Gain Q
|
||||
{ { 300.0f, 1.0f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 1.0f, 1.0f } }, // flat response (default)
|
||||
{ { 300.0f, 1.0f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 0.1f, 1.0f } }, // treble cut
|
||||
{ { 300.0f, 0.1f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 1.0f, 1.0f } }, // bass cut
|
||||
{ { 300.0f, 1.5f, 0.71f }, { 1000.0f, 0.5f, 1.0f }, { 4000.0f, 1.50f, 0.71f } } // smiley curve
|
||||
};
|
|
@ -12,7 +12,7 @@
|
|||
#ifndef hifi_AudioFilter_h
|
||||
#define hifi_AudioFilter_h
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////
|
||||
//
|
||||
// Implements a standard biquad filter in "Direct Form 1"
|
||||
// Reference http://www.musicdsp.org/files/Audio-EQ-Cookbook.txt
|
||||
//
|
||||
|
@ -51,15 +51,15 @@ public:
|
|||
//
|
||||
// public interface
|
||||
//
|
||||
void setParameters( const float a0, const float a1, const float a2, const float b1, const float b2 ) {
|
||||
void setParameters(const float a0, const float a1, const float a2, const float b1, const float b2) {
|
||||
_a0 = a0; _a1 = a1; _a2 = a2; _b1 = b1; _b2 = b2;
|
||||
}
|
||||
|
||||
void getParameters( float& a0, float& a1, float& a2, float& b1, float& b2 ) {
|
||||
void getParameters(float& a0, float& a1, float& a2, float& b1, float& b2) {
|
||||
a0 = _a0; a1 = _a1; a2 = _a2; b1 = _b1; b2 = _b2;
|
||||
}
|
||||
|
||||
void render( const float* in, float* out, const int frames) {
|
||||
void render(const float* in, float* out, const int frames) {
|
||||
|
||||
float x;
|
||||
float y;
|
||||
|
@ -90,209 +90,223 @@ public:
|
|||
}
|
||||
};
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Implements a single-band parametric EQ using a biquad "peaking EQ" configuration
|
||||
//
|
||||
// gain > 1.0 boosts the center frequency
|
||||
// gain < 1.0 cuts the center frequency
|
||||
//
|
||||
class AudioParametricEQ {
|
||||
|
||||
//
|
||||
// Implements common base class interface for all Audio Filter Objects
|
||||
//
|
||||
template< class T >
|
||||
class AudioFilter {
|
||||
|
||||
protected:
|
||||
|
||||
//
|
||||
// private data
|
||||
// data
|
||||
//
|
||||
AudioBiquad _kernel;
|
||||
float _sampleRate;
|
||||
float _frequency;
|
||||
float _gain;
|
||||
float _slope;
|
||||
|
||||
|
||||
//
|
||||
// helpers
|
||||
//
|
||||
void updateKernel() {
|
||||
|
||||
/*
|
||||
a0 = 1 + alpha*A
|
||||
a1 = -2*cos(w0)
|
||||
a2 = 1 - alpha*A
|
||||
b1 = -2*cos(w0)
|
||||
b2 = 1 - alpha/A
|
||||
*/
|
||||
|
||||
const float a = _gain;
|
||||
const float omega = TWO_PI * _frequency / _sampleRate;
|
||||
const float alpha = 0.5f * sinf(omega) / _slope;
|
||||
const float gamma = 1.0f / ( 1.0f + (alpha/a) );
|
||||
|
||||
const float a0 = 1.0f + (alpha*a);
|
||||
const float a1 = -2.0f * cosf(omega);
|
||||
const float a2 = 1.0f - (alpha*a);
|
||||
const float b1 = a1;
|
||||
const float b2 = 1.0f - (alpha/a);
|
||||
|
||||
_kernel.setParameters( a0*gamma,a1*gamma,a2*gamma,b1*gamma,b2*gamma );
|
||||
static_cast<T*>(this)->updateKernel();
|
||||
}
|
||||
|
||||
|
||||
public:
|
||||
//
|
||||
// ctor/dtor
|
||||
//
|
||||
AudioParametricEQ() {
|
||||
|
||||
AudioFilter() {
|
||||
setParameters(0.,0.,0.,0.);
|
||||
updateKernel();
|
||||
}
|
||||
|
||||
~AudioParametricEQ() {
|
||||
|
||||
~AudioFilter() {
|
||||
}
|
||||
|
||||
|
||||
//
|
||||
// public interface
|
||||
//
|
||||
void setParameters( const float sampleRate, const float frequency, const float gain, const float slope ) {
|
||||
|
||||
_sampleRate = std::max(sampleRate,1.0f);
|
||||
_frequency = std::max(frequency,2.0f);
|
||||
_gain = std::max(gain,0.0f);
|
||||
_slope = std::max(slope,0.00001f);
|
||||
|
||||
void setParameters(const float sampleRate, const float frequency, const float gain, const float slope) {
|
||||
|
||||
_sampleRate = std::max(sampleRate, 1.0f);
|
||||
_frequency = std::max(frequency, 2.0f);
|
||||
_gain = std::max(gain, 0.0f);
|
||||
_slope = std::max(slope, 0.00001f);
|
||||
|
||||
updateKernel();
|
||||
}
|
||||
|
||||
void getParameters( float& sampleRate, float& frequency, float& gain, float& slope ) {
|
||||
|
||||
void getParameters(float& sampleRate, float& frequency, float& gain, float& slope) {
|
||||
sampleRate = _sampleRate; frequency = _frequency; gain = _gain; slope = _slope;
|
||||
}
|
||||
|
||||
void render(const float* in, float* out, const int frames ) {
|
||||
|
||||
void render(const float* in, float* out, const int frames) {
|
||||
_kernel.render(in,out,frames);
|
||||
}
|
||||
|
||||
|
||||
void reset() {
|
||||
_kernel.reset();
|
||||
}
|
||||
};
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Helper/convenience class that implements a bank of EQ objects
|
||||
//
|
||||
template< typename T, const int N>
|
||||
class AudioFilterBank {
|
||||
|
||||
//
|
||||
// types
|
||||
//
|
||||
struct FilterParameter {
|
||||
float _p1;
|
||||
float _p2;
|
||||
float _p3;
|
||||
};
|
||||
|
||||
//
|
||||
// private static data
|
||||
//
|
||||
static const int _filterCount = N;
|
||||
static const int _profileCount = 4;
|
||||
|
||||
static FilterParameter _profiles[_profileCount][_filterCount];
|
||||
|
||||
//
|
||||
// private data
|
||||
//
|
||||
T _filters[ _filterCount ];
|
||||
float* _buffer;
|
||||
float _sampleRate;
|
||||
uint16_t _frameCount;
|
||||
|
||||
// Implements a low-shelf filter using a biquad
|
||||
//
|
||||
class AudioFilterLSF : public AudioFilter< AudioFilterLSF >
|
||||
{
|
||||
public:
|
||||
|
||||
|
||||
//
|
||||
// ctor/dtor
|
||||
// helpers
|
||||
//
|
||||
AudioFilterBank()
|
||||
: _buffer(NULL)
|
||||
, _sampleRate(0.)
|
||||
, _frameCount(0) {
|
||||
}
|
||||
|
||||
~AudioFilterBank() {
|
||||
finalize();
|
||||
}
|
||||
|
||||
//
|
||||
// public interface
|
||||
//
|
||||
void initialize( const float sampleRate, const int frameCount ) {
|
||||
finalize();
|
||||
void updateKernel() {
|
||||
|
||||
_buffer = (float*)malloc( frameCount * sizeof(float) );
|
||||
if(!_buffer) {
|
||||
return;
|
||||
}
|
||||
const float a = _gain;
|
||||
const float aAdd1 = a + 1.0f;
|
||||
const float aSub1 = a - 1.0f;
|
||||
const float omega = TWO_PI * _frequency / _sampleRate;
|
||||
const float aAdd1TimesCosOmega = aAdd1 * cosf(omega);
|
||||
const float aSub1TimesCosOmega = aSub1 * cosf(omega);
|
||||
const float alpha = 0.5f * sinf(omega) / _slope;
|
||||
const float zeta = 2.0f * sqrtf(a) * alpha;
|
||||
/*
|
||||
b0 = A*( (A+1) - (A-1)*cos(w0) + 2*sqrt(A)*alpha )
|
||||
b1 = 2*A*( (A-1) - (A+1)*cos(w0) )
|
||||
b2 = A*( (A+1) - (A-1)*cos(w0) - 2*sqrt(A)*alpha )
|
||||
a0 = (A+1) + (A-1)*cos(w0) + 2*sqrt(A)*alpha
|
||||
a1 = -2*( (A-1) + (A+1)*cos(w0) )
|
||||
a2 = (A+1) + (A-1)*cos(w0) - 2*sqrt(A)*alpha
|
||||
*/
|
||||
const float b0 = +1.0f * (aAdd1 - aSub1TimesCosOmega + zeta) * a;
|
||||
const float b1 = +2.0f * (aSub1 - aAdd1TimesCosOmega + ZERO) * a;
|
||||
const float b2 = +1.0f * (aAdd1 - aSub1TimesCosOmega - zeta) * a;
|
||||
const float a0 = +1.0f * (aAdd1 + aSub1TimesCosOmega + zeta);
|
||||
const float a1 = -2.0f * (aSub1 + aAdd1TimesCosOmega + ZERO);
|
||||
const float a2 = +1.0f * (aAdd1 + aSub1TimesCosOmega - zeta);
|
||||
|
||||
_sampleRate = sampleRate;
|
||||
_frameCount = frameCount;
|
||||
|
||||
reset();
|
||||
loadProfile(0); // load default profile "flat response" into the bank (see AudioFilter.cpp)
|
||||
const float normA0 = 1.0f / a0;
|
||||
|
||||
_kernel.setParameters(b0 * normA0, b1 * normA0 , b2 * normA0, a1 * normA0, a2 * normA0);
|
||||
}
|
||||
|
||||
void finalize() {
|
||||
if (_buffer ) {
|
||||
free (_buffer);
|
||||
_buffer = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
void loadProfile( int profileIndex ) {
|
||||
if (profileIndex >= 0 && profileIndex < _profileCount) {
|
||||
|
||||
for (int i = 0; i < _filterCount; ++i) {
|
||||
FilterParameter p = _profiles[profileIndex][i];
|
||||
|
||||
_filters[i].setParameters(_sampleRate,p._p1,p._p2,p._p3);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void render( const float* in, float* out, const int frameCount ) {
|
||||
for (int i = 0; i < _filterCount; ++i) {
|
||||
_filters[i].render( in, out, frameCount );
|
||||
}
|
||||
}
|
||||
|
||||
void render( const int16_t* in, int16_t* out, const int frameCount ) {
|
||||
if (!_buffer || ( frameCount > _frameCount ))
|
||||
return;
|
||||
|
||||
const int scale = (2 << ((8*sizeof(int16_t))-1));
|
||||
|
||||
// convert int16_t to float32 (normalized to -1. ... 1.)
|
||||
for (int i = 0; i < frameCount; ++i) {
|
||||
_buffer[i] = ((float)(*in++)) / scale;
|
||||
}
|
||||
// for this filter, we share input/output buffers at each stage, but our design does not mandate this
|
||||
render( _buffer, _buffer, frameCount );
|
||||
|
||||
// convert float32 to int16_t
|
||||
for (int i = 0; i < frameCount; ++i) {
|
||||
*out++ = (int16_t)(_buffer[i] * scale);
|
||||
}
|
||||
}
|
||||
|
||||
void reset() {
|
||||
for (int i = 0; i < _filterCount; ++i ) {
|
||||
_filters[i].reset();
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
////////////////////////////////////////////////////////////////////////////////////////////
|
||||
// Specializations of AudioFilterBank
|
||||
//
|
||||
typedef AudioFilterBank< AudioParametricEQ, 1> AudioFilterPEQ1; // bank with one band of PEQ
|
||||
typedef AudioFilterBank< AudioParametricEQ, 2> AudioFilterPEQ2; // bank with two bands of PEQ
|
||||
typedef AudioFilterBank< AudioParametricEQ, 3> AudioFilterPEQ3; // bank with three bands of PEQ
|
||||
// etc....
|
||||
// Implements a hi-shelf filter using a biquad
|
||||
//
|
||||
class AudioFilterHSF : public AudioFilter< AudioFilterHSF >
|
||||
{
|
||||
public:
|
||||
|
||||
//
|
||||
// helpers
|
||||
//
|
||||
void updateKernel() {
|
||||
|
||||
const float a = _gain;
|
||||
const float aAdd1 = a + 1.0f;
|
||||
const float aSub1 = a - 1.0f;
|
||||
const float omega = TWO_PI * _frequency / _sampleRate;
|
||||
const float aAdd1TimesCosOmega = aAdd1 * cosf(omega);
|
||||
const float aSub1TimesCosOmega = aSub1 * cosf(omega);
|
||||
const float alpha = 0.5f * sinf(omega) / _slope;
|
||||
const float zeta = 2.0f * sqrtf(a) * alpha;
|
||||
/*
|
||||
b0 = A*( (A+1) + (A-1)*cos(w0) + 2*sqrt(A)*alpha )
|
||||
b1 = -2*A*( (A-1) + (A+1)*cos(w0) )
|
||||
b2 = A*( (A+1) + (A-1)*cos(w0) - 2*sqrt(A)*alpha )
|
||||
a0 = (A+1) - (A-1)*cos(w0) + 2*sqrt(A)*alpha
|
||||
a1 = 2*( (A-1) - (A+1)*cos(w0) )
|
||||
a2 = (A+1) - (A-1)*cos(w0) - 2*sqrt(A)*alpha
|
||||
*/
|
||||
const float b0 = +1.0f * (aAdd1 + aSub1TimesCosOmega + zeta) * a;
|
||||
const float b1 = -2.0f * (aSub1 + aAdd1TimesCosOmega + ZERO) * a;
|
||||
const float b2 = +1.0f * (aAdd1 + aSub1TimesCosOmega - zeta) * a;
|
||||
const float a0 = +1.0f * (aAdd1 - aSub1TimesCosOmega + zeta);
|
||||
const float a1 = +2.0f * (aSub1 - aAdd1TimesCosOmega + ZERO);
|
||||
const float a2 = +1.0f * (aAdd1 - aSub1TimesCosOmega - zeta);
|
||||
|
||||
const float normA0 = 1.0f / a0;
|
||||
|
||||
_kernel.setParameters(b0 * normA0, b1 * normA0 , b2 * normA0, a1 * normA0, a2 * normA0);
|
||||
}
|
||||
};
|
||||
|
||||
//
|
||||
// Implements a all-pass filter using a biquad
|
||||
//
|
||||
class AudioFilterALL : public AudioFilter< AudioFilterALL >
|
||||
{
|
||||
public:
|
||||
|
||||
//
|
||||
// helpers
|
||||
//
|
||||
void updateKernel() {
|
||||
|
||||
const float omega = TWO_PI * _frequency / _sampleRate;
|
||||
const float cosOmega = cosf(omega);
|
||||
const float alpha = 0.5f * sinf(omega) / _slope;
|
||||
/*
|
||||
b0 = 1 - alpha
|
||||
b1 = -2*cos(w0)
|
||||
b2 = 1 + alpha
|
||||
a0 = 1 + alpha
|
||||
a1 = -2*cos(w0)
|
||||
a2 = 1 - alpha
|
||||
*/
|
||||
const float b0 = +1.0f - alpha;
|
||||
const float b1 = -2.0f * cosOmega;
|
||||
const float b2 = +1.0f + alpha;
|
||||
const float a0 = +1.0f + alpha;
|
||||
const float a1 = -2.0f * cosOmega;
|
||||
const float a2 = +1.0f - alpha;
|
||||
|
||||
const float normA0 = 1.0f / a0;
|
||||
|
||||
_kernel.setParameters(b0 * normA0, b1 * normA0 , b2 * normA0, a1 * normA0, a2 * normA0);
|
||||
}
|
||||
};
|
||||
|
||||
//
|
||||
// Implements a single-band parametric EQ using a biquad "peaking EQ" configuration
|
||||
//
|
||||
class AudioFilterPEQ : public AudioFilter< AudioFilterPEQ >
|
||||
{
|
||||
public:
|
||||
|
||||
//
|
||||
// helpers
|
||||
//
|
||||
void updateKernel() {
|
||||
|
||||
const float a = _gain;
|
||||
const float omega = TWO_PI * _frequency / _sampleRate;
|
||||
const float cosOmega = cosf(omega);
|
||||
const float alpha = 0.5f * sinf(omega) / _slope;
|
||||
const float alphaMulA = alpha * a;
|
||||
const float alphaDivA = alpha / a;
|
||||
/*
|
||||
b0 = 1 + alpha*A
|
||||
b1 = -2*cos(w0)
|
||||
b2 = 1 - alpha*A
|
||||
a0 = 1 + alpha/A
|
||||
a1 = -2*cos(w0)
|
||||
a2 = 1 - alpha/A
|
||||
*/
|
||||
const float b0 = +1.0f + alphaMulA;
|
||||
const float b1 = -2.0f * cosOmega;
|
||||
const float b2 = +1.0f - alphaMulA;
|
||||
const float a0 = +1.0f + alphaDivA;
|
||||
const float a1 = -2.0f * cosOmega;
|
||||
const float a2 = +1.0f - alphaDivA;
|
||||
|
||||
const float normA0 = 1.0f / a0;
|
||||
|
||||
_kernel.setParameters(b0 * normA0, b1 * normA0 , b2 * normA0, a1 * normA0, a2 * normA0);
|
||||
}
|
||||
};
|
||||
|
||||
#endif // hifi_AudioFilter_h
|
||||
|
|
48
libraries/audio/src/AudioFilterBank.cpp
Normal file
48
libraries/audio/src/AudioFilterBank.cpp
Normal file
|
@ -0,0 +1,48 @@
|
|||
//
|
||||
// AudioFilterBank.cpp
|
||||
// hifi
|
||||
//
|
||||
// Created by Craig Hansen-Sturm on 8/10/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <math.h>
|
||||
#include <SharedUtil.h>
|
||||
#include "AudioRingBuffer.h"
|
||||
#include "AudioFilter.h"
|
||||
#include "AudioFilterBank.h"
|
||||
|
||||
template<>
|
||||
AudioFilterLSF1s::FilterParameter
|
||||
AudioFilterLSF1s::_profiles[ AudioFilterLSF1s::_profileCount ][ AudioFilterLSF1s::_filterCount ] = {
|
||||
// Freq Gain Slope
|
||||
{ { 1000.0f, 1.0f, 1.0f } } // flat response (default)
|
||||
};
|
||||
|
||||
template<>
|
||||
AudioFilterHSF1s::FilterParameter
|
||||
AudioFilterHSF1s::_profiles[ AudioFilterHSF1s::_profileCount ][ AudioFilterHSF1s::_filterCount ] = {
|
||||
// Freq Gain Slope
|
||||
{ { 1000.0f, 1.0f, 1.0f } } // flat response (default)
|
||||
};
|
||||
|
||||
template<>
|
||||
AudioFilterPEQ1s::FilterParameter
|
||||
AudioFilterPEQ1s::_profiles[ AudioFilterPEQ1s::_profileCount ][ AudioFilterPEQ1s::_filterCount ] = {
|
||||
// Freq Gain Q
|
||||
{ { 1000.0f, 1.0f, 1.0f } } // flat response (default)
|
||||
};
|
||||
|
||||
template<>
|
||||
AudioFilterPEQ3m::FilterParameter
|
||||
AudioFilterPEQ3m::_profiles[ AudioFilterPEQ3m::_profileCount ][ AudioFilterPEQ3m::_filterCount ] = {
|
||||
|
||||
// Freq Gain Q Freq Gain Q Freq Gain Q
|
||||
{ { 300.0f, 1.0f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 1.0f, 1.0f } }, // flat response (default)
|
||||
{ { 300.0f, 1.0f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 0.1f, 1.0f } }, // treble cut
|
||||
{ { 300.0f, 0.1f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 1.0f, 1.0f } }, // bass cut
|
||||
{ { 300.0f, 1.5f, 0.71f }, { 1000.0f, 0.5f, 1.0f }, { 4000.0f, 1.50f, 0.71f } } // smiley curve
|
||||
};
|
170
libraries/audio/src/AudioFilterBank.h
Normal file
170
libraries/audio/src/AudioFilterBank.h
Normal file
|
@ -0,0 +1,170 @@
|
|||
//
|
||||
// AudioFilterBank.h
|
||||
// hifi
|
||||
//
|
||||
// Created by Craig Hansen-Sturm on 8/23/14.
|
||||
// Copyright 2014 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_AudioFilterBank_h
|
||||
#define hifi_AudioFilterBank_h
|
||||
|
||||
//
|
||||
// Helper/convenience class that implements a bank of Filter objects
|
||||
//
|
||||
template< typename T, const int N, const int C >
|
||||
class AudioFilterBank {
|
||||
|
||||
//
|
||||
// types
|
||||
//
|
||||
struct FilterParameter {
|
||||
float _p1;
|
||||
float _p2;
|
||||
float _p3;
|
||||
};
|
||||
|
||||
//
|
||||
// private static data
|
||||
//
|
||||
static const int _filterCount = N;
|
||||
static const int _channelCount = C;
|
||||
static const int _profileCount = 4;
|
||||
|
||||
static FilterParameter _profiles[ _profileCount ][ _filterCount ];
|
||||
|
||||
//
|
||||
// private data
|
||||
//
|
||||
T _filters[ _filterCount ][ _channelCount ];
|
||||
float* _buffer[ _channelCount ];
|
||||
float _sampleRate;
|
||||
uint16_t _frameCount;
|
||||
|
||||
public:
|
||||
|
||||
//
|
||||
// ctor/dtor
|
||||
//
|
||||
AudioFilterBank()
|
||||
: _sampleRate(0.)
|
||||
, _frameCount(0) {
|
||||
for (int i = 0; i < _channelCount; ++i) {
|
||||
_buffer[ i ] = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
~AudioFilterBank() {
|
||||
finalize();
|
||||
}
|
||||
|
||||
//
|
||||
// public interface
|
||||
//
|
||||
void initialize(const float sampleRate, const int frameCount) {
|
||||
finalize();
|
||||
|
||||
for (int i = 0; i < _channelCount; ++i) {
|
||||
_buffer[i] = (float*)malloc(frameCount * sizeof(float));
|
||||
}
|
||||
|
||||
_sampleRate = sampleRate;
|
||||
_frameCount = frameCount;
|
||||
|
||||
reset();
|
||||
loadProfile(0); // load default profile "flat response" into the bank (see AudioFilterBank.cpp)
|
||||
}
|
||||
|
||||
void finalize() {
|
||||
for (int i = 0; i < _channelCount; ++i) {
|
||||
if (_buffer[i]) {
|
||||
free (_buffer[i]);
|
||||
_buffer[i] = NULL;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void loadProfile(int profileIndex) {
|
||||
if (profileIndex >= 0 && profileIndex < _profileCount) {
|
||||
|
||||
for (int i = 0; i < _filterCount; ++i) {
|
||||
FilterParameter p = _profiles[profileIndex][i];
|
||||
|
||||
for (int j = 0; j < _channelCount; ++j) {
|
||||
_filters[i][j].setParameters(_sampleRate,p._p1,p._p2,p._p3);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void setParameters(int filterStage, int filterChannel, const float sampleRate, const float frequency, const float gain,
|
||||
const float slope) {
|
||||
if (filterStage >= 0 && filterStage < _filterCount && filterChannel >= 0 && filterChannel < _channelCount) {
|
||||
_filters[filterStage][filterChannel].setParameters(sampleRate,frequency,gain,slope);
|
||||
}
|
||||
}
|
||||
|
||||
void getParameters(int filterStage, int filterChannel, float& sampleRate, float& frequency, float& gain, float& slope) {
|
||||
if (filterStage >= 0 && filterStage < _filterCount && filterChannel >= 0 && filterChannel < _channelCount) {
|
||||
_filters[filterStage][filterChannel].getParameters(sampleRate,frequency,gain,slope);
|
||||
}
|
||||
}
|
||||
|
||||
void render(const int16_t* in, int16_t* out, const int frameCount) {
|
||||
if (!_buffer || (frameCount > _frameCount))
|
||||
return;
|
||||
|
||||
const int scale = (2 << ((8 * sizeof(int16_t)) - 1));
|
||||
|
||||
// de-interleave and convert int16_t to float32 (normalized to -1. ... 1.)
|
||||
for (int i = 0; i < frameCount; ++i) {
|
||||
for (int j = 0; j < _channelCount; ++j) {
|
||||
_buffer[j][i] = ((float)(*in++)) / scale;
|
||||
}
|
||||
}
|
||||
|
||||
// now step through each filter
|
||||
for (int i = 0; i < _channelCount; ++i) {
|
||||
for (int j = 0; j < _filterCount; ++j) {
|
||||
_filters[j][i].render( &_buffer[i][0], &_buffer[i][0], frameCount );
|
||||
}
|
||||
}
|
||||
|
||||
// convert float32 to int16_t and interleave
|
||||
for (int i = 0; i < frameCount; ++i) {
|
||||
for (int j = 0; j < _channelCount; ++j) {
|
||||
*out++ = (int16_t)(_buffer[j][i] * scale);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void reset() {
|
||||
for (int i = 0; i < _filterCount; ++i) {
|
||||
for (int j = 0; j < _channelCount; ++j) {
|
||||
_filters[i][j].reset();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
//
|
||||
// Specializations of AudioFilterBank
|
||||
//
|
||||
typedef AudioFilterBank< AudioFilterLSF, 1, 1> AudioFilterLSF1m; // mono bank with one band of LSF
|
||||
typedef AudioFilterBank< AudioFilterLSF, 1, 2> AudioFilterLSF1s; // stereo bank with one band of LSF
|
||||
typedef AudioFilterBank< AudioFilterHSF, 1, 1> AudioFilterHSF1m; // mono bank with one band of HSF
|
||||
typedef AudioFilterBank< AudioFilterHSF, 1, 2> AudioFilterHSF1s; // stereo bank with one band of HSF
|
||||
typedef AudioFilterBank< AudioFilterPEQ, 1, 1> AudioFilterPEQ1m; // mono bank with one band of PEQ
|
||||
typedef AudioFilterBank< AudioFilterPEQ, 2, 1> AudioFilterPEQ2m; // mono bank with two bands of PEQ
|
||||
typedef AudioFilterBank< AudioFilterPEQ, 3, 1> AudioFilterPEQ3m; // mono bank with three bands of PEQ
|
||||
typedef AudioFilterBank< AudioFilterPEQ, 1, 2> AudioFilterPEQ1s; // stereo bank with one band of PEQ
|
||||
typedef AudioFilterBank< AudioFilterPEQ, 2, 2> AudioFilterPEQ2s; // stereo bank with two bands of PEQ
|
||||
typedef AudioFilterBank< AudioFilterPEQ, 3, 2> AudioFilterPEQ3s; // stereo bank with three bands of PEQ
|
||||
// etc....
|
||||
|
||||
|
||||
#endif // hifi_AudioFilter_h
|
|
@ -57,8 +57,6 @@ void AudioInjector::injectAudio() {
|
|||
|
||||
}
|
||||
|
||||
NodeList* nodeList = NodeList::getInstance();
|
||||
|
||||
// setup the packet for injected audio
|
||||
QByteArray injectAudioPacket = byteArrayWithPopulatedHeader(PacketTypeInjectAudio);
|
||||
QDataStream packetStream(&injectAudioPacket, QIODevice::Append);
|
||||
|
@ -122,6 +120,7 @@ void AudioInjector::injectAudio() {
|
|||
memcpy(injectAudioPacket.data() + numPreAudioDataBytes, soundByteArray.data() + currentSendPosition, bytesToCopy);
|
||||
|
||||
// grab our audio mixer from the NodeList, if it exists
|
||||
NodeList* nodeList = NodeList::getInstance();
|
||||
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
|
||||
|
||||
// send off this audio packet
|
||||
|
|
|
@ -20,18 +20,16 @@
|
|||
#include "AudioRingBuffer.h"
|
||||
|
||||
AudioRingBuffer::AudioRingBuffer(int numFrameSamples, bool randomAccessMode, int numFramesCapacity) :
|
||||
_frameCapacity(numFramesCapacity),
|
||||
_sampleCapacity(numFrameSamples * numFramesCapacity),
|
||||
_isFull(false),
|
||||
_numFrameSamples(numFrameSamples),
|
||||
_randomAccessMode(randomAccessMode),
|
||||
_overflowCount(0)
|
||||
_frameCapacity(numFramesCapacity),
|
||||
_sampleCapacity(numFrameSamples * numFramesCapacity),
|
||||
_bufferLength(numFrameSamples * (numFramesCapacity + 1)),
|
||||
_numFrameSamples(numFrameSamples),
|
||||
_randomAccessMode(randomAccessMode),
|
||||
_overflowCount(0)
|
||||
{
|
||||
if (numFrameSamples) {
|
||||
_buffer = new int16_t[_sampleCapacity];
|
||||
if (_randomAccessMode) {
|
||||
memset(_buffer, 0, _sampleCapacity * sizeof(int16_t));
|
||||
}
|
||||
_buffer = new int16_t[_bufferLength];
|
||||
memset(_buffer, 0, _bufferLength * sizeof(int16_t));
|
||||
_nextOutput = _buffer;
|
||||
_endOfLastWrite = _buffer;
|
||||
} else {
|
||||
|
@ -53,28 +51,29 @@ void AudioRingBuffer::reset() {
|
|||
void AudioRingBuffer::resizeForFrameSize(int numFrameSamples) {
|
||||
delete[] _buffer;
|
||||
_sampleCapacity = numFrameSamples * _frameCapacity;
|
||||
_bufferLength = numFrameSamples * (_frameCapacity + 1);
|
||||
_numFrameSamples = numFrameSamples;
|
||||
_buffer = new int16_t[_sampleCapacity];
|
||||
_buffer = new int16_t[_bufferLength];
|
||||
memset(_buffer, 0, _bufferLength * sizeof(int16_t));
|
||||
if (_randomAccessMode) {
|
||||
memset(_buffer, 0, _sampleCapacity * sizeof(int16_t));
|
||||
memset(_buffer, 0, _bufferLength * sizeof(int16_t));
|
||||
}
|
||||
reset();
|
||||
}
|
||||
|
||||
void AudioRingBuffer::clear() {
|
||||
_isFull = false;
|
||||
_endOfLastWrite = _buffer;
|
||||
_nextOutput = _buffer;
|
||||
}
|
||||
|
||||
int AudioRingBuffer::readSamples(int16_t* destination, int maxSamples) {
|
||||
return readData((char*) destination, maxSamples * sizeof(int16_t));
|
||||
return readData((char*)destination, maxSamples * sizeof(int16_t)) / sizeof(int16_t);
|
||||
}
|
||||
|
||||
int AudioRingBuffer::readData(char *data, int maxSize) {
|
||||
|
||||
// only copy up to the number of samples we have available
|
||||
int numReadSamples = std::min((int) (maxSize / sizeof(int16_t)), samplesAvailable());
|
||||
int numReadSamples = std::min((int)(maxSize / sizeof(int16_t)), samplesAvailable());
|
||||
|
||||
// If we're in random access mode, then we consider our number of available read samples slightly
|
||||
// differently. Namely, if anything has been written, we say we have as many samples as they ask for
|
||||
|
@ -83,16 +82,16 @@ int AudioRingBuffer::readData(char *data, int maxSize) {
|
|||
numReadSamples = _endOfLastWrite ? (maxSize / sizeof(int16_t)) : 0;
|
||||
}
|
||||
|
||||
if (_nextOutput + numReadSamples > _buffer + _sampleCapacity) {
|
||||
if (_nextOutput + numReadSamples > _buffer + _bufferLength) {
|
||||
// we're going to need to do two reads to get this data, it wraps around the edge
|
||||
|
||||
// read to the end of the buffer
|
||||
int numSamplesToEnd = (_buffer + _sampleCapacity) - _nextOutput;
|
||||
int numSamplesToEnd = (_buffer + _bufferLength) - _nextOutput;
|
||||
memcpy(data, _nextOutput, numSamplesToEnd * sizeof(int16_t));
|
||||
if (_randomAccessMode) {
|
||||
memset(_nextOutput, 0, numSamplesToEnd * sizeof(int16_t)); // clear it
|
||||
}
|
||||
|
||||
|
||||
// read the rest from the beginning of the buffer
|
||||
memcpy(data + (numSamplesToEnd * sizeof(int16_t)), _buffer, (numReadSamples - numSamplesToEnd) * sizeof(int16_t));
|
||||
if (_randomAccessMode) {
|
||||
|
@ -108,22 +107,19 @@ int AudioRingBuffer::readData(char *data, int maxSize) {
|
|||
|
||||
// push the position of _nextOutput by the number of samples read
|
||||
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, numReadSamples);
|
||||
if (numReadSamples > 0) {
|
||||
_isFull = false;
|
||||
}
|
||||
|
||||
return numReadSamples * sizeof(int16_t);
|
||||
}
|
||||
|
||||
int AudioRingBuffer::writeSamples(const int16_t* source, int maxSamples) {
|
||||
return writeData((const char*) source, maxSamples * sizeof(int16_t));
|
||||
int AudioRingBuffer::writeSamples(const int16_t* source, int maxSamples) {
|
||||
return writeData((const char*)source, maxSamples * sizeof(int16_t)) / sizeof(int16_t);
|
||||
}
|
||||
|
||||
int AudioRingBuffer::writeData(const char* data, int maxSize) {
|
||||
// make sure we have enough bytes left for this to be the right amount of audio
|
||||
// otherwise we should not copy that data, and leave the buffer pointers where they are
|
||||
int samplesToCopy = std::min((int)(maxSize / sizeof(int16_t)), _sampleCapacity);
|
||||
|
||||
|
||||
int samplesRoomFor = _sampleCapacity - samplesAvailable();
|
||||
if (samplesToCopy > samplesRoomFor) {
|
||||
// there's not enough room for this write. erase old data to make room for this new data
|
||||
|
@ -132,19 +128,16 @@ int AudioRingBuffer::writeData(const char* data, int maxSize) {
|
|||
_overflowCount++;
|
||||
qDebug() << "Overflowed ring buffer! Overwriting old data";
|
||||
}
|
||||
|
||||
if (_endOfLastWrite + samplesToCopy <= _buffer + _sampleCapacity) {
|
||||
|
||||
if (_endOfLastWrite + samplesToCopy <= _buffer + _bufferLength) {
|
||||
memcpy(_endOfLastWrite, data, samplesToCopy * sizeof(int16_t));
|
||||
} else {
|
||||
int numSamplesToEnd = (_buffer + _sampleCapacity) - _endOfLastWrite;
|
||||
int numSamplesToEnd = (_buffer + _bufferLength) - _endOfLastWrite;
|
||||
memcpy(_endOfLastWrite, data, numSamplesToEnd * sizeof(int16_t));
|
||||
memcpy(_buffer, data + (numSamplesToEnd * sizeof(int16_t)), (samplesToCopy - numSamplesToEnd) * sizeof(int16_t));
|
||||
}
|
||||
|
||||
_endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, samplesToCopy);
|
||||
if (samplesToCopy > 0 && _endOfLastWrite == _nextOutput) {
|
||||
_isFull = true;
|
||||
}
|
||||
|
||||
return samplesToCopy * sizeof(int16_t);
|
||||
}
|
||||
|
@ -158,61 +151,52 @@ const int16_t& AudioRingBuffer::operator[] (const int index) const {
|
|||
}
|
||||
|
||||
void AudioRingBuffer::shiftReadPosition(unsigned int numSamples) {
|
||||
if (numSamples > 0) {
|
||||
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, numSamples);
|
||||
_isFull = false;
|
||||
}
|
||||
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, numSamples);
|
||||
}
|
||||
|
||||
int AudioRingBuffer::samplesAvailable() const {
|
||||
if (!_endOfLastWrite) {
|
||||
return 0;
|
||||
}
|
||||
if (_isFull) {
|
||||
return _sampleCapacity;
|
||||
}
|
||||
|
||||
int sampleDifference = _endOfLastWrite - _nextOutput;
|
||||
if (sampleDifference < 0) {
|
||||
sampleDifference += _sampleCapacity;
|
||||
sampleDifference += _bufferLength;
|
||||
}
|
||||
return sampleDifference;
|
||||
}
|
||||
|
||||
int AudioRingBuffer::addSilentFrame(int numSilentSamples) {
|
||||
int AudioRingBuffer::addSilentSamples(int silentSamples) {
|
||||
|
||||
int samplesRoomFor = _sampleCapacity - samplesAvailable();
|
||||
if (numSilentSamples > samplesRoomFor) {
|
||||
if (silentSamples > samplesRoomFor) {
|
||||
// there's not enough room for this write. write as many silent samples as we have room for
|
||||
numSilentSamples = samplesRoomFor;
|
||||
silentSamples = samplesRoomFor;
|
||||
qDebug() << "Dropping some silent samples to prevent ring buffer overflow";
|
||||
}
|
||||
|
||||
// memset zeroes into the buffer, accomodate a wrap around the end
|
||||
// push the _endOfLastWrite to the correct spot
|
||||
if (_endOfLastWrite + numSilentSamples <= _buffer + _sampleCapacity) {
|
||||
memset(_endOfLastWrite, 0, numSilentSamples * sizeof(int16_t));
|
||||
if (_endOfLastWrite + silentSamples <= _buffer + _bufferLength) {
|
||||
memset(_endOfLastWrite, 0, silentSamples * sizeof(int16_t));
|
||||
} else {
|
||||
int numSamplesToEnd = (_buffer + _sampleCapacity) - _endOfLastWrite;
|
||||
int numSamplesToEnd = (_buffer + _bufferLength) - _endOfLastWrite;
|
||||
memset(_endOfLastWrite, 0, numSamplesToEnd * sizeof(int16_t));
|
||||
memset(_buffer, 0, (numSilentSamples - numSamplesToEnd) * sizeof(int16_t));
|
||||
}
|
||||
_endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, numSilentSamples);
|
||||
if (numSilentSamples > 0 && _nextOutput == _endOfLastWrite) {
|
||||
_isFull = true;
|
||||
memset(_buffer, 0, (silentSamples - numSamplesToEnd) * sizeof(int16_t));
|
||||
}
|
||||
_endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, silentSamples);
|
||||
|
||||
return numSilentSamples * sizeof(int16_t);
|
||||
return silentSamples;
|
||||
}
|
||||
|
||||
int16_t* AudioRingBuffer::shiftedPositionAccomodatingWrap(int16_t* position, int numSamplesShift) const {
|
||||
|
||||
if (numSamplesShift > 0 && position + numSamplesShift >= _buffer + _sampleCapacity) {
|
||||
if (numSamplesShift > 0 && position + numSamplesShift >= _buffer + _bufferLength) {
|
||||
// this shift will wrap the position around to the beginning of the ring
|
||||
return position + numSamplesShift - _sampleCapacity;
|
||||
return position + numSamplesShift - _bufferLength;
|
||||
} else if (numSamplesShift < 0 && position + numSamplesShift < _buffer) {
|
||||
// this shift will go around to the end of the ring
|
||||
return position + numSamplesShift + _sampleCapacity;
|
||||
return position + numSamplesShift + _bufferLength;
|
||||
} else {
|
||||
return position + numSamplesShift;
|
||||
}
|
||||
|
@ -221,7 +205,7 @@ int16_t* AudioRingBuffer::shiftedPositionAccomodatingWrap(int16_t* position, int
|
|||
float AudioRingBuffer::getFrameLoudness(const int16_t* frameStart) const {
|
||||
float loudness = 0.0f;
|
||||
const int16_t* sampleAt = frameStart;
|
||||
const int16_t* _bufferLastAt = _buffer + _sampleCapacity - 1;
|
||||
const int16_t* _bufferLastAt = _buffer + _bufferLength - 1;
|
||||
|
||||
for (int i = 0; i < _numFrameSamples; ++i) {
|
||||
loudness += fabsf(*sampleAt);
|
||||
|
@ -229,11 +213,14 @@ float AudioRingBuffer::getFrameLoudness(const int16_t* frameStart) const {
|
|||
}
|
||||
loudness /= _numFrameSamples;
|
||||
loudness /= MAX_SAMPLE_VALUE;
|
||||
|
||||
|
||||
return loudness;
|
||||
}
|
||||
|
||||
float AudioRingBuffer::getFrameLoudness(ConstIterator frameStart) const {
|
||||
if (frameStart.isNull()) {
|
||||
return 0.0f;
|
||||
}
|
||||
return getFrameLoudness(&(*frameStart));
|
||||
}
|
||||
|
||||
|
@ -241,3 +228,44 @@ float AudioRingBuffer::getNextOutputFrameLoudness() const {
|
|||
return getFrameLoudness(_nextOutput);
|
||||
}
|
||||
|
||||
int AudioRingBuffer::writeSamples(ConstIterator source, int maxSamples) {
|
||||
int samplesToCopy = std::min(maxSamples, _sampleCapacity);
|
||||
int samplesRoomFor = _sampleCapacity - samplesAvailable();
|
||||
if (samplesToCopy > samplesRoomFor) {
|
||||
// there's not enough room for this write. erase old data to make room for this new data
|
||||
int samplesToDelete = samplesToCopy - samplesRoomFor;
|
||||
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, samplesToDelete);
|
||||
_overflowCount++;
|
||||
qDebug() << "Overflowed ring buffer! Overwriting old data";
|
||||
}
|
||||
|
||||
int16_t* bufferLast = _buffer + _bufferLength - 1;
|
||||
for (int i = 0; i < samplesToCopy; i++) {
|
||||
*_endOfLastWrite = *source;
|
||||
_endOfLastWrite = (_endOfLastWrite == bufferLast) ? _buffer : _endOfLastWrite + 1;
|
||||
++source;
|
||||
}
|
||||
|
||||
return samplesToCopy;
|
||||
}
|
||||
|
||||
int AudioRingBuffer::writeSamplesWithFade(ConstIterator source, int maxSamples, float fade) {
|
||||
int samplesToCopy = std::min(maxSamples, _sampleCapacity);
|
||||
int samplesRoomFor = _sampleCapacity - samplesAvailable();
|
||||
if (samplesToCopy > samplesRoomFor) {
|
||||
// there's not enough room for this write. erase old data to make room for this new data
|
||||
int samplesToDelete = samplesToCopy - samplesRoomFor;
|
||||
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, samplesToDelete);
|
||||
_overflowCount++;
|
||||
qDebug() << "Overflowed ring buffer! Overwriting old data";
|
||||
}
|
||||
|
||||
int16_t* bufferLast = _buffer + _bufferLength - 1;
|
||||
for (int i = 0; i < samplesToCopy; i++) {
|
||||
*_endOfLastWrite = (int16_t)((float)(*source) * fade);
|
||||
_endOfLastWrite = (_endOfLastWrite == bufferLast) ? _buffer : _endOfLastWrite + 1;
|
||||
++source;
|
||||
}
|
||||
|
||||
return samplesToCopy;
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ const int NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL = 512;
|
|||
const int NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL = NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL / sizeof(int16_t);
|
||||
|
||||
const unsigned int BUFFER_SEND_INTERVAL_USECS = floorf((NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL
|
||||
/ (float) SAMPLE_RATE) * USECS_PER_SECOND);
|
||||
/ (float)SAMPLE_RATE) * USECS_PER_SECOND);
|
||||
|
||||
const int MAX_SAMPLE_VALUE = std::numeric_limits<int16_t>::max();
|
||||
const int MIN_SAMPLE_VALUE = std::numeric_limits<int16_t>::min();
|
||||
|
@ -42,33 +42,33 @@ public:
|
|||
|
||||
void reset();
|
||||
void resizeForFrameSize(int numFrameSamples);
|
||||
|
||||
|
||||
void clear();
|
||||
|
||||
int getSampleCapacity() const { return _sampleCapacity; }
|
||||
int getFrameCapacity() const { return _frameCapacity; }
|
||||
|
||||
|
||||
int readSamples(int16_t* destination, int maxSamples);
|
||||
int writeSamples(const int16_t* source, int maxSamples);
|
||||
|
||||
|
||||
int readData(char* data, int maxSize);
|
||||
int writeData(const char* data, int maxSize);
|
||||
|
||||
|
||||
int16_t& operator[](const int index);
|
||||
const int16_t& operator[] (const int index) const;
|
||||
|
||||
|
||||
void shiftReadPosition(unsigned int numSamples);
|
||||
|
||||
float getNextOutputFrameLoudness() const;
|
||||
|
||||
|
||||
int samplesAvailable() const;
|
||||
int framesAvailable() const { return samplesAvailable() / _numFrameSamples; }
|
||||
|
||||
int getNumFrameSamples() const { return _numFrameSamples; }
|
||||
|
||||
|
||||
int getOverflowCount() const { return _overflowCount; } /// how many times has the ring buffer has overwritten old data
|
||||
|
||||
int addSilentFrame(int numSilentSamples);
|
||||
|
||||
int addSilentSamples(int samples);
|
||||
|
||||
private:
|
||||
float getFrameLoudness(const int16_t* frameStart) const;
|
||||
|
@ -77,12 +77,12 @@ protected:
|
|||
// disallow copying of AudioRingBuffer objects
|
||||
AudioRingBuffer(const AudioRingBuffer&);
|
||||
AudioRingBuffer& operator= (const AudioRingBuffer&);
|
||||
|
||||
|
||||
int16_t* shiftedPositionAccomodatingWrap(int16_t* position, int numSamplesShift) const;
|
||||
|
||||
int _frameCapacity;
|
||||
int _sampleCapacity;
|
||||
bool _isFull;
|
||||
int _bufferLength; // actual length of _buffer: will be one frame larger than _sampleCapacity
|
||||
int _numFrameSamples;
|
||||
int16_t* _nextOutput;
|
||||
int16_t* _endOfLastWrite;
|
||||
|
@ -95,23 +95,25 @@ public:
|
|||
class ConstIterator { //public std::iterator < std::forward_iterator_tag, int16_t > {
|
||||
public:
|
||||
ConstIterator()
|
||||
: _capacity(0),
|
||||
: _bufferLength(0),
|
||||
_bufferFirst(NULL),
|
||||
_bufferLast(NULL),
|
||||
_at(NULL) {}
|
||||
|
||||
ConstIterator(int16_t* bufferFirst, int capacity, int16_t* at)
|
||||
: _capacity(capacity),
|
||||
: _bufferLength(capacity),
|
||||
_bufferFirst(bufferFirst),
|
||||
_bufferLast(bufferFirst + capacity - 1),
|
||||
_at(at) {}
|
||||
|
||||
bool isNull() const { return _at == NULL; }
|
||||
|
||||
bool operator==(const ConstIterator& rhs) { return _at == rhs._at; }
|
||||
bool operator!=(const ConstIterator& rhs) { return _at != rhs._at; }
|
||||
const int16_t& operator*() { return *_at; }
|
||||
|
||||
ConstIterator& operator=(const ConstIterator& rhs) {
|
||||
_capacity = rhs._capacity;
|
||||
_bufferLength = rhs._bufferLength;
|
||||
_bufferFirst = rhs._bufferFirst;
|
||||
_bufferLast = rhs._bufferLast;
|
||||
_at = rhs._at;
|
||||
|
@ -145,40 +147,54 @@ public:
|
|||
}
|
||||
|
||||
ConstIterator operator+(int i) {
|
||||
return ConstIterator(_bufferFirst, _capacity, atShiftedBy(i));
|
||||
return ConstIterator(_bufferFirst, _bufferLength, atShiftedBy(i));
|
||||
}
|
||||
|
||||
ConstIterator operator-(int i) {
|
||||
return ConstIterator(_bufferFirst, _capacity, atShiftedBy(-i));
|
||||
return ConstIterator(_bufferFirst, _bufferLength, atShiftedBy(-i));
|
||||
}
|
||||
|
||||
void readSamples(int16_t* dest, int numSamples) {
|
||||
int16_t* at = _at;
|
||||
for (int i = 0; i < numSamples; i++) {
|
||||
*dest = *(*this);
|
||||
*dest = *at;
|
||||
++dest;
|
||||
++(*this);
|
||||
at = (at == _bufferLast) ? _bufferFirst : at + 1;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void readSamplesWithFade(int16_t* dest, int numSamples, float fade) {
|
||||
int16_t* at = _at;
|
||||
for (int i = 0; i < numSamples; i++) {
|
||||
*dest = (float)*at * fade;
|
||||
++dest;
|
||||
at = (at == _bufferLast) ? _bufferFirst : at + 1;
|
||||
}
|
||||
}
|
||||
|
||||
private:
|
||||
int16_t* atShiftedBy(int i) {
|
||||
i = (_at - _bufferFirst + i) % _capacity;
|
||||
i = (_at - _bufferFirst + i) % _bufferLength;
|
||||
if (i < 0) {
|
||||
i += _capacity;
|
||||
i += _bufferLength;
|
||||
}
|
||||
return _bufferFirst + i;
|
||||
}
|
||||
|
||||
private:
|
||||
int _capacity;
|
||||
int _bufferLength;
|
||||
int16_t* _bufferFirst;
|
||||
int16_t* _bufferLast;
|
||||
int16_t* _at;
|
||||
};
|
||||
|
||||
ConstIterator nextOutput() const { return ConstIterator(_buffer, _sampleCapacity, _nextOutput); }
|
||||
ConstIterator nextOutput() const { return ConstIterator(_buffer, _bufferLength, _nextOutput); }
|
||||
ConstIterator lastFrameWritten() const { return ConstIterator(_buffer, _bufferLength, _endOfLastWrite) - _numFrameSamples; }
|
||||
|
||||
float getFrameLoudness(ConstIterator frameStart) const;
|
||||
|
||||
int writeSamples(ConstIterator source, int maxSamples);
|
||||
int writeSamplesWithFade(ConstIterator source, int maxSamples, float fade);
|
||||
};
|
||||
|
||||
#endif // hifi_AudioRingBuffer_h
|
||||
|
|
|
@ -14,30 +14,37 @@
|
|||
#include "InboundAudioStream.h"
|
||||
#include "PacketHeaders.h"
|
||||
|
||||
InboundAudioStream::InboundAudioStream(int numFrameSamples, int numFramesCapacity,
|
||||
bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc) :
|
||||
const int STARVE_HISTORY_CAPACITY = 50;
|
||||
|
||||
InboundAudioStream::InboundAudioStream(int numFrameSamples, int numFramesCapacity, const Settings& settings) :
|
||||
_ringBuffer(numFrameSamples, false, numFramesCapacity),
|
||||
_lastPopSucceeded(false),
|
||||
_lastPopOutput(),
|
||||
_dynamicJitterBuffers(dynamicJitterBuffers),
|
||||
_staticDesiredJitterBufferFrames(staticDesiredJitterBufferFrames),
|
||||
_useStDevForJitterCalc(useStDevForJitterCalc),
|
||||
_calculatedJitterBufferFramesUsingMaxGap(0),
|
||||
_calculatedJitterBufferFramesUsingStDev(0),
|
||||
_desiredJitterBufferFrames(dynamicJitterBuffers ? 1 : staticDesiredJitterBufferFrames),
|
||||
_maxFramesOverDesired(maxFramesOverDesired),
|
||||
_dynamicJitterBuffers(settings._dynamicJitterBuffers),
|
||||
_staticDesiredJitterBufferFrames(settings._staticDesiredJitterBufferFrames),
|
||||
_useStDevForJitterCalc(settings._useStDevForJitterCalc),
|
||||
_desiredJitterBufferFrames(settings._dynamicJitterBuffers ? 1 : settings._staticDesiredJitterBufferFrames),
|
||||
_maxFramesOverDesired(settings._maxFramesOverDesired),
|
||||
_isStarved(true),
|
||||
_hasStarted(false),
|
||||
_consecutiveNotMixedCount(0),
|
||||
_starveCount(0),
|
||||
_silentFramesDropped(0),
|
||||
_oldFramesDropped(0),
|
||||
_incomingSequenceNumberStats(INCOMING_SEQ_STATS_HISTORY_LENGTH_SECONDS),
|
||||
_lastFrameReceivedTime(0),
|
||||
_interframeTimeGapStatsForJitterCalc(TIME_GAPS_FOR_JITTER_CALC_INTERVAL_SAMPLES, TIME_GAPS_FOR_JITTER_CALC_WINDOW_INTERVALS),
|
||||
_interframeTimeGapStatsForStatsPacket(TIME_GAPS_FOR_STATS_PACKET_INTERVAL_SAMPLES, TIME_GAPS_FOR_STATS_PACKET_WINDOW_INTERVALS),
|
||||
_incomingSequenceNumberStats(STATS_FOR_STATS_PACKET_WINDOW_SECONDS),
|
||||
_lastPacketReceivedTime(0),
|
||||
_timeGapStatsForDesiredCalcOnTooManyStarves(0, settings._windowSecondsForDesiredCalcOnTooManyStarves),
|
||||
_calculatedJitterBufferFramesUsingMaxGap(0),
|
||||
_stdevStatsForDesiredCalcOnTooManyStarves(),
|
||||
_calculatedJitterBufferFramesUsingStDev(0),
|
||||
_timeGapStatsForDesiredReduction(0, settings._windowSecondsForDesiredReduction),
|
||||
_starveHistoryWindowSeconds(settings._windowSecondsForDesiredCalcOnTooManyStarves),
|
||||
_starveHistory(STARVE_HISTORY_CAPACITY),
|
||||
_starveThreshold(settings._windowStarveThreshold),
|
||||
_framesAvailableStat(),
|
||||
_currentJitterBufferFrames(0)
|
||||
_currentJitterBufferFrames(0),
|
||||
_timeGapStatsForStatsPacket(0, STATS_FOR_STATS_PACKET_WINDOW_SECONDS),
|
||||
_repetitionWithFade(settings._repetitionWithFade)
|
||||
{
|
||||
}
|
||||
|
||||
|
@ -59,11 +66,14 @@ void InboundAudioStream::resetStats() {
|
|||
_silentFramesDropped = 0;
|
||||
_oldFramesDropped = 0;
|
||||
_incomingSequenceNumberStats.reset();
|
||||
_lastFrameReceivedTime = 0;
|
||||
_interframeTimeGapStatsForJitterCalc.reset();
|
||||
_interframeTimeGapStatsForStatsPacket.reset();
|
||||
_lastPacketReceivedTime = 0;
|
||||
_timeGapStatsForDesiredCalcOnTooManyStarves.reset();
|
||||
_stdevStatsForDesiredCalcOnTooManyStarves = StDev();
|
||||
_timeGapStatsForDesiredReduction.reset();
|
||||
_starveHistory.clear();
|
||||
_framesAvailableStat.reset();
|
||||
_currentJitterBufferFrames = 0;
|
||||
_timeGapStatsForStatsPacket.reset();
|
||||
}
|
||||
|
||||
void InboundAudioStream::clearBuffer() {
|
||||
|
@ -72,8 +82,11 @@ void InboundAudioStream::clearBuffer() {
|
|||
_currentJitterBufferFrames = 0;
|
||||
}
|
||||
|
||||
int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples) {
|
||||
return _ringBuffer.writeData(packetAfterStreamProperties.data(), numAudioSamples * sizeof(int16_t));
|
||||
void InboundAudioStream::perSecondCallbackForUpdatingStats() {
|
||||
_incomingSequenceNumberStats.pushStatsToHistory();
|
||||
_timeGapStatsForDesiredCalcOnTooManyStarves.currentIntervalComplete();
|
||||
_timeGapStatsForDesiredReduction.currentIntervalComplete();
|
||||
_timeGapStatsForStatsPacket.currentIntervalComplete();
|
||||
}
|
||||
|
||||
int InboundAudioStream::parseData(const QByteArray& packet) {
|
||||
|
@ -83,36 +96,51 @@ int InboundAudioStream::parseData(const QByteArray& packet) {
|
|||
|
||||
// parse header
|
||||
int numBytesHeader = numBytesForPacketHeader(packet);
|
||||
const char* sequenceAt = packet.constData() + numBytesHeader;
|
||||
const char* dataAt = packet.constData() + numBytesHeader;
|
||||
int readBytes = numBytesHeader;
|
||||
|
||||
// parse sequence number and track it
|
||||
quint16 sequence = *(reinterpret_cast<const quint16*>(sequenceAt));
|
||||
quint16 sequence = *(reinterpret_cast<const quint16*>(dataAt));
|
||||
dataAt += sizeof(quint16);
|
||||
readBytes += sizeof(quint16);
|
||||
SequenceNumberStats::ArrivalInfo arrivalInfo = _incomingSequenceNumberStats.sequenceNumberReceived(sequence, senderUUID);
|
||||
|
||||
frameReceivedUpdateTimingStats();
|
||||
packetReceivedUpdateTimingStats();
|
||||
|
||||
// TODO: handle generalized silent packet here?????
|
||||
int networkSamples;
|
||||
|
||||
// parse the info after the seq number and before the audio data.(the stream properties)
|
||||
int numAudioSamples;
|
||||
readBytes += parseStreamProperties(packetType, packet.mid(readBytes), numAudioSamples);
|
||||
if (packetType == PacketTypeSilentAudioFrame) {
|
||||
quint16 numSilentSamples = *(reinterpret_cast<const quint16*>(dataAt));
|
||||
readBytes += sizeof(quint16);
|
||||
networkSamples = (int)numSilentSamples;
|
||||
} else {
|
||||
// parse the info after the seq number and before the audio data (the stream properties)
|
||||
readBytes += parseStreamProperties(packetType, packet.mid(readBytes), networkSamples);
|
||||
}
|
||||
|
||||
// handle this packet based on its arrival status.
|
||||
// For now, late packets are ignored. It may be good in the future to insert the late audio frame
|
||||
// into the ring buffer to fill in the missing frame if it hasn't been mixed yet.
|
||||
switch (arrivalInfo._status) {
|
||||
case SequenceNumberStats::Early: {
|
||||
// Packet is early; write droppable silent samples for each of the skipped packets.
|
||||
// NOTE: we assume that each dropped packet contains the same number of samples
|
||||
// as the packet we just received.
|
||||
int packetsDropped = arrivalInfo._seqDiffFromExpected;
|
||||
writeSamplesForDroppedPackets(packetsDropped * numAudioSamples);
|
||||
writeSamplesForDroppedPackets(packetsDropped * networkSamples);
|
||||
|
||||
// fall through to OnTime case
|
||||
}
|
||||
case SequenceNumberStats::OnTime: {
|
||||
readBytes += parseAudioData(packetType, packet.mid(readBytes), numAudioSamples);
|
||||
// Packet is on time; parse its data to the ringbuffer
|
||||
if (packetType == PacketTypeSilentAudioFrame) {
|
||||
writeDroppableSilentSamples(networkSamples);
|
||||
} else {
|
||||
readBytes += parseAudioData(packetType, packet.mid(readBytes), networkSamples);
|
||||
}
|
||||
break;
|
||||
}
|
||||
default: {
|
||||
// For now, late packets are ignored. It may be good in the future to insert the late audio packet data
|
||||
// into the ring buffer to fill in the missing frame if it hasn't been mixed yet.
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -139,6 +167,43 @@ int InboundAudioStream::parseData(const QByteArray& packet) {
|
|||
return readBytes;
|
||||
}
|
||||
|
||||
int InboundAudioStream::parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) {
|
||||
// mixed audio packets do not have any info between the seq num and the audio data.
|
||||
numAudioSamples = packetAfterSeqNum.size() / sizeof(int16_t);
|
||||
return 0;
|
||||
}
|
||||
|
||||
int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples) {
|
||||
return _ringBuffer.writeData(packetAfterStreamProperties.data(), numAudioSamples * sizeof(int16_t));
|
||||
}
|
||||
|
||||
int InboundAudioStream::writeDroppableSilentSamples(int silentSamples) {
|
||||
// calculate how many silent frames we should drop.
|
||||
int samplesPerFrame = _ringBuffer.getNumFrameSamples();
|
||||
int desiredJitterBufferFramesPlusPadding = _desiredJitterBufferFrames + DESIRED_JITTER_BUFFER_FRAMES_PADDING;
|
||||
int numSilentFramesToDrop = 0;
|
||||
|
||||
if (silentSamples >= samplesPerFrame && _currentJitterBufferFrames > desiredJitterBufferFramesPlusPadding) {
|
||||
|
||||
// our avg jitter buffer size exceeds its desired value, so ignore some silent
|
||||
// frames to get that size as close to desired as possible
|
||||
int numSilentFramesToDropDesired = _currentJitterBufferFrames - desiredJitterBufferFramesPlusPadding;
|
||||
int numSilentFramesReceived = silentSamples / samplesPerFrame;
|
||||
numSilentFramesToDrop = std::min(numSilentFramesToDropDesired, numSilentFramesReceived);
|
||||
|
||||
// dont reset _currentJitterBufferFrames here; we want to be able to drop further silent frames
|
||||
// without waiting for _framesAvailableStat to fill up to 10s of samples.
|
||||
_currentJitterBufferFrames -= numSilentFramesToDrop;
|
||||
_silentFramesDropped += numSilentFramesToDrop;
|
||||
|
||||
_framesAvailableStat.reset();
|
||||
}
|
||||
|
||||
int ret = _ringBuffer.addSilentSamples(silentSamples - numSilentFramesToDrop * samplesPerFrame);
|
||||
|
||||
return ret;
|
||||
}
|
||||
|
||||
int InboundAudioStream::popSamples(int maxSamples, bool allOrNothing, bool starveIfNoSamplesPopped) {
|
||||
int samplesPopped = 0;
|
||||
int samplesAvailable = _ringBuffer.samplesAvailable();
|
||||
|
@ -216,12 +281,61 @@ void InboundAudioStream::framesAvailableChanged() {
|
|||
}
|
||||
|
||||
void InboundAudioStream::setToStarved() {
|
||||
_isStarved = true;
|
||||
_consecutiveNotMixedCount = 0;
|
||||
_starveCount++;
|
||||
// if we have more than the desired frames when setToStarved() is called, then we'll immediately
|
||||
// be considered refilled. in that case, there's no need to set _isStarved to true.
|
||||
_isStarved = (_ringBuffer.framesAvailable() < _desiredJitterBufferFrames);
|
||||
|
||||
// record the time of this starve in the starve history
|
||||
quint64 now = usecTimestampNow();
|
||||
_starveHistory.insert(now);
|
||||
|
||||
if (_dynamicJitterBuffers) {
|
||||
// dynamic jitter buffers are enabled. check if this starve put us over the window
|
||||
// starve threshold
|
||||
quint64 windowEnd = now - _starveHistoryWindowSeconds * USECS_PER_SECOND;
|
||||
RingBufferHistory<quint64>::Iterator starvesIterator = _starveHistory.begin();
|
||||
RingBufferHistory<quint64>::Iterator end = _starveHistory.end();
|
||||
int starvesInWindow = 1;
|
||||
do {
|
||||
++starvesIterator;
|
||||
if (*starvesIterator < windowEnd) {
|
||||
break;
|
||||
}
|
||||
starvesInWindow++;
|
||||
} while (starvesIterator != end);
|
||||
|
||||
// this starve put us over the starve threshold. update _desiredJitterBufferFrames to
|
||||
// value determined by window A.
|
||||
if (starvesInWindow >= _starveThreshold) {
|
||||
int calculatedJitterBufferFrames;
|
||||
if (_useStDevForJitterCalc) {
|
||||
calculatedJitterBufferFrames = _calculatedJitterBufferFramesUsingStDev;
|
||||
} else {
|
||||
// we don't know when the next packet will arrive, so it's possible the gap between the last packet and the
|
||||
// next packet will exceed the max time gap in the window. If the time since the last packet has already exceeded
|
||||
// the window max gap, then we should use that value to calculate desired frames.
|
||||
int framesSinceLastPacket = ceilf((float)(now - _lastPacketReceivedTime) / (float)BUFFER_SEND_INTERVAL_USECS);
|
||||
calculatedJitterBufferFrames = std::max(_calculatedJitterBufferFramesUsingMaxGap, framesSinceLastPacket);
|
||||
}
|
||||
// make sure _desiredJitterBufferFrames does not become lower here
|
||||
if (calculatedJitterBufferFrames >= _desiredJitterBufferFrames) {
|
||||
_desiredJitterBufferFrames = calculatedJitterBufferFrames;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void InboundAudioStream::setSettings(const Settings& settings) {
|
||||
setMaxFramesOverDesired(settings._maxFramesOverDesired);
|
||||
setDynamicJitterBuffers(settings._dynamicJitterBuffers);
|
||||
setStaticDesiredJitterBufferFrames(settings._staticDesiredJitterBufferFrames);
|
||||
setUseStDevForJitterCalc(settings._useStDevForJitterCalc);
|
||||
setWindowStarveThreshold(settings._windowStarveThreshold);
|
||||
setWindowSecondsForDesiredCalcOnTooManyStarves(settings._windowSecondsForDesiredCalcOnTooManyStarves);
|
||||
setWindowSecondsForDesiredReduction(settings._windowSecondsForDesiredReduction);
|
||||
setRepetitionWithFade(settings._repetitionWithFade);
|
||||
}
|
||||
|
||||
void InboundAudioStream::setDynamicJitterBuffers(bool dynamicJitterBuffers) {
|
||||
|
@ -229,6 +343,7 @@ void InboundAudioStream::setDynamicJitterBuffers(bool dynamicJitterBuffers) {
|
|||
_desiredJitterBufferFrames = _staticDesiredJitterBufferFrames;
|
||||
} else {
|
||||
if (!_dynamicJitterBuffers) {
|
||||
// if we're enabling dynamic jitter buffer frames, start desired frames at 1
|
||||
_desiredJitterBufferFrames = 1;
|
||||
}
|
||||
}
|
||||
|
@ -242,90 +357,102 @@ void InboundAudioStream::setStaticDesiredJitterBufferFrames(int staticDesiredJit
|
|||
}
|
||||
}
|
||||
|
||||
void InboundAudioStream::setWindowSecondsForDesiredCalcOnTooManyStarves(int windowSecondsForDesiredCalcOnTooManyStarves) {
|
||||
_timeGapStatsForDesiredCalcOnTooManyStarves.setWindowIntervals(windowSecondsForDesiredCalcOnTooManyStarves);
|
||||
_starveHistoryWindowSeconds = windowSecondsForDesiredCalcOnTooManyStarves;
|
||||
}
|
||||
|
||||
void InboundAudioStream::setWindowSecondsForDesiredReduction(int windowSecondsForDesiredReduction) {
|
||||
_timeGapStatsForDesiredReduction.setWindowIntervals(windowSecondsForDesiredReduction);
|
||||
}
|
||||
|
||||
|
||||
int InboundAudioStream::clampDesiredJitterBufferFramesValue(int desired) const {
|
||||
const int MIN_FRAMES_DESIRED = 0;
|
||||
const int MAX_FRAMES_DESIRED = _ringBuffer.getFrameCapacity();
|
||||
return glm::clamp(desired, MIN_FRAMES_DESIRED, MAX_FRAMES_DESIRED);
|
||||
}
|
||||
|
||||
void InboundAudioStream::frameReceivedUpdateTimingStats() {
|
||||
|
||||
void InboundAudioStream::packetReceivedUpdateTimingStats() {
|
||||
|
||||
// update our timegap stats and desired jitter buffer frames if necessary
|
||||
// discard the first few packets we receive since they usually have gaps that aren't represensative of normal jitter
|
||||
const int NUM_INITIAL_PACKETS_DISCARD = 3;
|
||||
quint64 now = usecTimestampNow();
|
||||
if (_incomingSequenceNumberStats.getReceived() > NUM_INITIAL_PACKETS_DISCARD) {
|
||||
quint64 gap = now - _lastFrameReceivedTime;
|
||||
_interframeTimeGapStatsForStatsPacket.update(gap);
|
||||
quint64 gap = now - _lastPacketReceivedTime;
|
||||
_timeGapStatsForStatsPacket.update(gap);
|
||||
|
||||
const float USECS_PER_FRAME = NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * USECS_PER_SECOND / (float)SAMPLE_RATE;
|
||||
// update all stats used for desired frames calculations under dynamic jitter buffer mode
|
||||
_timeGapStatsForDesiredCalcOnTooManyStarves.update(gap);
|
||||
_stdevStatsForDesiredCalcOnTooManyStarves.addValue(gap);
|
||||
_timeGapStatsForDesiredReduction.update(gap);
|
||||
|
||||
// update stats for Freddy's method of jitter calc
|
||||
_interframeTimeGapStatsForJitterCalc.update(gap);
|
||||
if (_interframeTimeGapStatsForJitterCalc.getNewStatsAvailableFlag()) {
|
||||
_calculatedJitterBufferFramesUsingMaxGap = ceilf((float)_interframeTimeGapStatsForJitterCalc.getWindowMax() / USECS_PER_FRAME);
|
||||
_interframeTimeGapStatsForJitterCalc.clearNewStatsAvailableFlag();
|
||||
|
||||
if (_dynamicJitterBuffers && !_useStDevForJitterCalc) {
|
||||
_desiredJitterBufferFrames = clampDesiredJitterBufferFramesValue(_calculatedJitterBufferFramesUsingMaxGap);
|
||||
}
|
||||
if (_timeGapStatsForDesiredCalcOnTooManyStarves.getNewStatsAvailableFlag()) {
|
||||
_calculatedJitterBufferFramesUsingMaxGap = ceilf((float)_timeGapStatsForDesiredCalcOnTooManyStarves.getWindowMax()
|
||||
/ (float)BUFFER_SEND_INTERVAL_USECS);
|
||||
_timeGapStatsForDesiredCalcOnTooManyStarves.clearNewStatsAvailableFlag();
|
||||
}
|
||||
|
||||
// update stats for Philip's method of jitter calc
|
||||
_stdev.addValue(gap);
|
||||
const int STANDARD_DEVIATION_SAMPLE_COUNT = 500;
|
||||
if (_stdev.getSamples() > STANDARD_DEVIATION_SAMPLE_COUNT) {
|
||||
if (_stdevStatsForDesiredCalcOnTooManyStarves.getSamples() > STANDARD_DEVIATION_SAMPLE_COUNT) {
|
||||
const float NUM_STANDARD_DEVIATIONS = 3.0f;
|
||||
_calculatedJitterBufferFramesUsingStDev = (int)ceilf(NUM_STANDARD_DEVIATIONS * _stdev.getStDev() / USECS_PER_FRAME);
|
||||
_stdev.reset();
|
||||
_calculatedJitterBufferFramesUsingStDev = ceilf(NUM_STANDARD_DEVIATIONS * _stdevStatsForDesiredCalcOnTooManyStarves.getStDev()
|
||||
/ (float)BUFFER_SEND_INTERVAL_USECS);
|
||||
_stdevStatsForDesiredCalcOnTooManyStarves.reset();
|
||||
}
|
||||
|
||||
if (_dynamicJitterBuffers && _useStDevForJitterCalc) {
|
||||
_desiredJitterBufferFrames = clampDesiredJitterBufferFramesValue(_calculatedJitterBufferFramesUsingStDev);
|
||||
if (_dynamicJitterBuffers) {
|
||||
// if the max gap in window B (_timeGapStatsForDesiredReduction) corresponds to a smaller number of frames than _desiredJitterBufferFrames,
|
||||
// then reduce _desiredJitterBufferFrames to that number of frames.
|
||||
if (_timeGapStatsForDesiredReduction.getNewStatsAvailableFlag() && _timeGapStatsForDesiredReduction.isWindowFilled()) {
|
||||
int calculatedJitterBufferFrames = ceilf((float)_timeGapStatsForDesiredReduction.getWindowMax() / (float)BUFFER_SEND_INTERVAL_USECS);
|
||||
if (calculatedJitterBufferFrames < _desiredJitterBufferFrames) {
|
||||
_desiredJitterBufferFrames = calculatedJitterBufferFrames;
|
||||
}
|
||||
_timeGapStatsForDesiredReduction.clearNewStatsAvailableFlag();
|
||||
}
|
||||
}
|
||||
}
|
||||
_lastFrameReceivedTime = now;
|
||||
|
||||
_lastPacketReceivedTime = now;
|
||||
}
|
||||
|
||||
int InboundAudioStream::writeDroppableSilentSamples(int numSilentSamples) {
|
||||
|
||||
// calculate how many silent frames we should drop.
|
||||
int samplesPerFrame = _ringBuffer.getNumFrameSamples();
|
||||
int desiredJitterBufferFramesPlusPadding = _desiredJitterBufferFrames + DESIRED_JITTER_BUFFER_FRAMES_PADDING;
|
||||
int numSilentFramesToDrop = 0;
|
||||
|
||||
if (numSilentSamples >= samplesPerFrame && _currentJitterBufferFrames > desiredJitterBufferFramesPlusPadding) {
|
||||
|
||||
// our avg jitter buffer size exceeds its desired value, so ignore some silent
|
||||
// frames to get that size as close to desired as possible
|
||||
int numSilentFramesToDropDesired = _currentJitterBufferFrames - desiredJitterBufferFramesPlusPadding;
|
||||
int numSilentFramesReceived = numSilentSamples / samplesPerFrame;
|
||||
numSilentFramesToDrop = std::min(numSilentFramesToDropDesired, numSilentFramesReceived);
|
||||
|
||||
// dont reset _currentJitterBufferFrames here; we want to be able to drop further silent frames
|
||||
// without waiting for _framesAvailableStat to fill up to 10s of samples.
|
||||
_currentJitterBufferFrames -= numSilentFramesToDrop;
|
||||
_silentFramesDropped += numSilentFramesToDrop;
|
||||
|
||||
_framesAvailableStat.reset();
|
||||
int InboundAudioStream::writeSamplesForDroppedPackets(int networkSamples) {
|
||||
if (_repetitionWithFade) {
|
||||
return writeLastFrameRepeatedWithFade(networkSamples);
|
||||
}
|
||||
|
||||
return _ringBuffer.addSilentFrame(numSilentSamples - numSilentFramesToDrop * samplesPerFrame);
|
||||
return writeDroppableSilentSamples(networkSamples);
|
||||
}
|
||||
|
||||
int InboundAudioStream::writeSamplesForDroppedPackets(int numSamples) {
|
||||
return writeDroppableSilentSamples(numSamples);
|
||||
int InboundAudioStream::writeLastFrameRepeatedWithFade(int samples) {
|
||||
AudioRingBuffer::ConstIterator frameToRepeat = _ringBuffer.lastFrameWritten();
|
||||
int frameSize = _ringBuffer.getNumFrameSamples();
|
||||
int samplesToWrite = samples;
|
||||
int indexOfRepeat = 0;
|
||||
do {
|
||||
int samplesToWriteThisIteration = std::min(samplesToWrite, frameSize);
|
||||
float fade = calculateRepeatedFrameFadeFactor(indexOfRepeat);
|
||||
if (fade == 1.0f) {
|
||||
samplesToWrite -= _ringBuffer.writeSamples(frameToRepeat, samplesToWriteThisIteration);
|
||||
} else {
|
||||
samplesToWrite -= _ringBuffer.writeSamplesWithFade(frameToRepeat, samplesToWriteThisIteration, fade);
|
||||
}
|
||||
indexOfRepeat++;
|
||||
} while (samplesToWrite > 0);
|
||||
|
||||
return samples;
|
||||
}
|
||||
|
||||
AudioStreamStats InboundAudioStream::getAudioStreamStats() const {
|
||||
AudioStreamStats streamStats;
|
||||
|
||||
streamStats._timeGapMin = _interframeTimeGapStatsForStatsPacket.getMin();
|
||||
streamStats._timeGapMax = _interframeTimeGapStatsForStatsPacket.getMax();
|
||||
streamStats._timeGapAverage = _interframeTimeGapStatsForStatsPacket.getAverage();
|
||||
streamStats._timeGapWindowMin = _interframeTimeGapStatsForStatsPacket.getWindowMin();
|
||||
streamStats._timeGapWindowMax = _interframeTimeGapStatsForStatsPacket.getWindowMax();
|
||||
streamStats._timeGapWindowAverage = _interframeTimeGapStatsForStatsPacket.getWindowAverage();
|
||||
streamStats._timeGapMin = _timeGapStatsForStatsPacket.getMin();
|
||||
streamStats._timeGapMax = _timeGapStatsForStatsPacket.getMax();
|
||||
streamStats._timeGapAverage = _timeGapStatsForStatsPacket.getAverage();
|
||||
streamStats._timeGapWindowMin = _timeGapStatsForStatsPacket.getWindowMin();
|
||||
streamStats._timeGapWindowMax = _timeGapStatsForStatsPacket.getWindowMax();
|
||||
streamStats._timeGapWindowAverage = _timeGapStatsForStatsPacket.getWindowAverage();
|
||||
|
||||
streamStats._framesAvailable = _ringBuffer.framesAvailable();
|
||||
streamStats._framesAvailableAverage = _framesAvailableStat.getAverage();
|
||||
|
@ -341,7 +468,24 @@ AudioStreamStats InboundAudioStream::getAudioStreamStats() const {
|
|||
return streamStats;
|
||||
}
|
||||
|
||||
AudioStreamStats InboundAudioStream::updateSeqHistoryAndGetAudioStreamStats() {
|
||||
_incomingSequenceNumberStats.pushStatsToHistory();
|
||||
return getAudioStreamStats();
|
||||
float calculateRepeatedFrameFadeFactor(int indexOfRepeat) {
|
||||
// fade factor scheme is from this paper:
|
||||
// http://inst.eecs.berkeley.edu/~ee290t/sp04/lectures/packet_loss_recov_paper11.pdf
|
||||
|
||||
const float INITIAL_MSECS_NO_FADE = 20.0f;
|
||||
const float MSECS_FADE_TO_ZERO = 320.0f;
|
||||
|
||||
const float INITIAL_FRAMES_NO_FADE = INITIAL_MSECS_NO_FADE * (float)USECS_PER_MSEC / (float)BUFFER_SEND_INTERVAL_USECS;
|
||||
const float FRAMES_FADE_TO_ZERO = MSECS_FADE_TO_ZERO * (float)USECS_PER_MSEC / (float)BUFFER_SEND_INTERVAL_USECS;
|
||||
|
||||
const float SAMPLE_RANGE = std::numeric_limits<int16_t>::max();
|
||||
|
||||
if (indexOfRepeat <= INITIAL_FRAMES_NO_FADE) {
|
||||
return 1.0f;
|
||||
} else if (indexOfRepeat <= INITIAL_FRAMES_NO_FADE + FRAMES_FADE_TO_ZERO) {
|
||||
return pow(SAMPLE_RANGE, -(indexOfRepeat - INITIAL_FRAMES_NO_FADE) / FRAMES_FADE_TO_ZERO);
|
||||
|
||||
//return 1.0f - ((indexOfRepeat - INITIAL_FRAMES_NO_FADE) / FRAMES_FADE_TO_ZERO);
|
||||
}
|
||||
return 0.0f;
|
||||
}
|
||||
|
|
|
@ -22,43 +22,84 @@
|
|||
#include "TimeWeightedAvg.h"
|
||||
|
||||
// This adds some number of frames to the desired jitter buffer frames target we use when we're dropping frames.
|
||||
// The larger this value is, the less aggressive we are about reducing the jitter buffer length.
|
||||
// Setting this to 0 will try to get the jitter buffer to be exactly _desiredJitterBufferFrames long when dropping frames,
|
||||
// The larger this value is, the less frames we drop when attempting to reduce the jitter buffer length.
|
||||
// Setting this to 0 will try to get the jitter buffer to be exactly _desiredJitterBufferFrames when dropping frames,
|
||||
// which could lead to a starve soon after.
|
||||
const int DESIRED_JITTER_BUFFER_FRAMES_PADDING = 1;
|
||||
|
||||
// the time gaps stats for _desiredJitterBufferFrames calculation
|
||||
// will recalculate the max for the past 5000 samples every 500 samples
|
||||
const int TIME_GAPS_FOR_JITTER_CALC_INTERVAL_SAMPLES = 500;
|
||||
const int TIME_GAPS_FOR_JITTER_CALC_WINDOW_INTERVALS = 10;
|
||||
|
||||
// the time gap stats for constructing AudioStreamStats will
|
||||
// recalculate min/max/avg every ~1 second for the past ~30 seconds of time gap data
|
||||
const int TIME_GAPS_FOR_STATS_PACKET_INTERVAL_SAMPLES = USECS_PER_SECOND / BUFFER_SEND_INTERVAL_USECS;
|
||||
const int TIME_GAPS_FOR_STATS_PACKET_WINDOW_INTERVALS = 30;
|
||||
// this controls the length of the window for stats used in the stats packet (not the stats used in
|
||||
// _desiredJitterBufferFrames calculation)
|
||||
const int STATS_FOR_STATS_PACKET_WINDOW_SECONDS = 30;
|
||||
|
||||
// this controls the window size of the time-weighted avg of frames available. Every time the window fills up,
|
||||
// _currentJitterBufferFrames is updated with the time-weighted avg and the running time-weighted avg is reset.
|
||||
const int FRAMES_AVAILABLE_STAT_WINDOW_USECS = 2 * USECS_PER_SECOND;
|
||||
|
||||
// the internal history buffer of the incoming seq stats will cover 30s to calculate
|
||||
// packet loss % over last 30s
|
||||
const int INCOMING_SEQ_STATS_HISTORY_LENGTH_SECONDS = 30;
|
||||
|
||||
const int INBOUND_RING_BUFFER_FRAME_CAPACITY = 100;
|
||||
const int FRAMES_AVAILABLE_STAT_WINDOW_USECS = 10 * USECS_PER_SECOND;
|
||||
|
||||
// default values for members of the Settings struct
|
||||
const int DEFAULT_MAX_FRAMES_OVER_DESIRED = 10;
|
||||
const int DEFAULT_DESIRED_JITTER_BUFFER_FRAMES = 1;
|
||||
const bool DEFAULT_DYNAMIC_JITTER_BUFFERS = true;
|
||||
const int DEFAULT_STATIC_DESIRED_JITTER_BUFFER_FRAMES = 1;
|
||||
const bool DEFAULT_USE_STDEV_FOR_JITTER_CALC = false;
|
||||
const int DEFAULT_WINDOW_STARVE_THRESHOLD = 3;
|
||||
const int DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES = 50;
|
||||
const int DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION = 10;
|
||||
const bool DEFAULT_REPETITION_WITH_FADE = true;
|
||||
|
||||
class InboundAudioStream : public NodeData {
|
||||
Q_OBJECT
|
||||
public:
|
||||
InboundAudioStream(int numFrameSamples, int numFramesCapacity,
|
||||
bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired,
|
||||
bool useStDevForJitterCalc = false);
|
||||
class Settings {
|
||||
public:
|
||||
Settings()
|
||||
: _maxFramesOverDesired(DEFAULT_MAX_FRAMES_OVER_DESIRED),
|
||||
_dynamicJitterBuffers(DEFAULT_DYNAMIC_JITTER_BUFFERS),
|
||||
_staticDesiredJitterBufferFrames(DEFAULT_STATIC_DESIRED_JITTER_BUFFER_FRAMES),
|
||||
_useStDevForJitterCalc(DEFAULT_USE_STDEV_FOR_JITTER_CALC),
|
||||
_windowStarveThreshold(DEFAULT_WINDOW_STARVE_THRESHOLD),
|
||||
_windowSecondsForDesiredCalcOnTooManyStarves(DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES),
|
||||
_windowSecondsForDesiredReduction(DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION),
|
||||
_repetitionWithFade(DEFAULT_REPETITION_WITH_FADE)
|
||||
{}
|
||||
|
||||
Settings(int maxFramesOverDesired, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames,
|
||||
bool useStDevForJitterCalc, int windowStarveThreshold, int windowSecondsForDesiredCalcOnTooManyStarves,
|
||||
int _windowSecondsForDesiredReduction, bool repetitionWithFade)
|
||||
: _maxFramesOverDesired(maxFramesOverDesired),
|
||||
_dynamicJitterBuffers(dynamicJitterBuffers),
|
||||
_staticDesiredJitterBufferFrames(staticDesiredJitterBufferFrames),
|
||||
_useStDevForJitterCalc(useStDevForJitterCalc),
|
||||
_windowStarveThreshold(windowStarveThreshold),
|
||||
_windowSecondsForDesiredCalcOnTooManyStarves(windowSecondsForDesiredCalcOnTooManyStarves),
|
||||
_windowSecondsForDesiredReduction(windowSecondsForDesiredCalcOnTooManyStarves),
|
||||
_repetitionWithFade(repetitionWithFade)
|
||||
{}
|
||||
|
||||
// max number of frames over desired in the ringbuffer.
|
||||
int _maxFramesOverDesired;
|
||||
|
||||
// if false, _desiredJitterBufferFrames will always be _staticDesiredJitterBufferFrames. Otherwise,
|
||||
// either fred or philip's method will be used to calculate _desiredJitterBufferFrames based on packet timegaps.
|
||||
bool _dynamicJitterBuffers;
|
||||
|
||||
// settings for static jitter buffer mode
|
||||
int _staticDesiredJitterBufferFrames;
|
||||
|
||||
// settings for dynamic jitter buffer mode
|
||||
bool _useStDevForJitterCalc; // if true, philip's method is used. otherwise, fred's method is used.
|
||||
int _windowStarveThreshold;
|
||||
int _windowSecondsForDesiredCalcOnTooManyStarves;
|
||||
int _windowSecondsForDesiredReduction;
|
||||
|
||||
// if true, the prev frame will be repeated (fading to silence) for dropped frames.
|
||||
// otherwise, silence will be inserted.
|
||||
bool _repetitionWithFade;
|
||||
};
|
||||
|
||||
public:
|
||||
InboundAudioStream(int numFrameSamples, int numFramesCapacity, const Settings& settings);
|
||||
|
||||
void reset();
|
||||
void resetStats();
|
||||
virtual void resetStats();
|
||||
void clearBuffer();
|
||||
|
||||
virtual int parseData(const QByteArray& packet);
|
||||
|
@ -72,14 +113,18 @@ public:
|
|||
|
||||
void setToStarved();
|
||||
|
||||
|
||||
void setDynamicJitterBuffers(bool dynamicJitterBuffers);
|
||||
void setStaticDesiredJitterBufferFrames(int staticDesiredJitterBufferFrames);
|
||||
|
||||
/// this function should be called once per second to ensure the seq num stats history spans ~30 seconds
|
||||
AudioStreamStats updateSeqHistoryAndGetAudioStreamStats();
|
||||
void setSettings(const Settings& settings);
|
||||
|
||||
void setMaxFramesOverDesired(int maxFramesOverDesired) { _maxFramesOverDesired = maxFramesOverDesired; }
|
||||
void setDynamicJitterBuffers(bool setDynamicJitterBuffers);
|
||||
void setStaticDesiredJitterBufferFrames(int staticDesiredJitterBufferFrames);
|
||||
void setUseStDevForJitterCalc(bool useStDevForJitterCalc) { _useStDevForJitterCalc = useStDevForJitterCalc; }
|
||||
void setWindowStarveThreshold(int windowStarveThreshold) { _starveThreshold = windowStarveThreshold; }
|
||||
void setWindowSecondsForDesiredCalcOnTooManyStarves(int windowSecondsForDesiredCalcOnTooManyStarves);
|
||||
void setWindowSecondsForDesiredReduction(int windowSecondsForDesiredReduction);
|
||||
void setRepetitionWithFade(bool repetitionWithFade) { _repetitionWithFade = repetitionWithFade; }
|
||||
|
||||
|
||||
virtual AudioStreamStats getAudioStreamStats() const;
|
||||
|
||||
|
@ -110,11 +155,17 @@ public:
|
|||
|
||||
int getPacketsReceived() const { return _incomingSequenceNumberStats.getReceived(); }
|
||||
|
||||
public slots:
|
||||
/// This function should be called every second for all the stats to function properly. If dynamic jitter buffers
|
||||
/// is enabled, those stats are used to calculate _desiredJitterBufferFrames.
|
||||
/// If the stats are not used and dynamic jitter buffers is disabled, it's not necessary to call this function.
|
||||
void perSecondCallbackForUpdatingStats();
|
||||
|
||||
private:
|
||||
void frameReceivedUpdateTimingStats();
|
||||
void packetReceivedUpdateTimingStats();
|
||||
int clampDesiredJitterBufferFramesValue(int desired) const;
|
||||
|
||||
int writeSamplesForDroppedPackets(int numSamples);
|
||||
int writeSamplesForDroppedPackets(int networkSamples);
|
||||
|
||||
void popSamplesNoCheck(int samples);
|
||||
void framesAvailableChanged();
|
||||
|
@ -126,13 +177,19 @@ protected:
|
|||
|
||||
/// parses the info between the seq num and the audio data in the network packet and calculates
|
||||
/// how many audio samples this packet contains (used when filling in samples for dropped packets).
|
||||
virtual int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) = 0;
|
||||
/// default implementation assumes no stream properties and raw audio samples after stream propertiess
|
||||
virtual int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& networkSamples);
|
||||
|
||||
/// parses the audio data in the network packet.
|
||||
/// default implementation assumes packet contains raw audio samples after stream properties
|
||||
virtual int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples);
|
||||
virtual int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int networkSamples);
|
||||
|
||||
int writeDroppableSilentSamples(int numSilentSamples);
|
||||
/// writes silent samples to the buffer that may be dropped to reduce latency caused by the buffer
|
||||
virtual int writeDroppableSilentSamples(int silentSamples);
|
||||
|
||||
/// writes the last written frame repeatedly, gradually fading to silence.
|
||||
/// used for writing samples for dropped packets.
|
||||
virtual int writeLastFrameRepeatedWithFade(int samples);
|
||||
|
||||
protected:
|
||||
|
||||
|
@ -147,8 +204,6 @@ protected:
|
|||
// if jitter buffer is dynamic, this determines what method of calculating _desiredJitterBufferFrames
|
||||
// if true, Philip's timegap std dev calculation is used. Otherwise, Freddy's max timegap calculation is used
|
||||
bool _useStDevForJitterCalc;
|
||||
int _calculatedJitterBufferFramesUsingMaxGap;
|
||||
int _calculatedJitterBufferFramesUsingStDev;
|
||||
|
||||
int _desiredJitterBufferFrames;
|
||||
|
||||
|
@ -168,16 +223,28 @@ protected:
|
|||
|
||||
SequenceNumberStats _incomingSequenceNumberStats;
|
||||
|
||||
quint64 _lastFrameReceivedTime;
|
||||
MovingMinMaxAvg<quint64> _interframeTimeGapStatsForJitterCalc;
|
||||
StDev _stdev;
|
||||
MovingMinMaxAvg<quint64> _interframeTimeGapStatsForStatsPacket;
|
||||
|
||||
quint64 _lastPacketReceivedTime;
|
||||
MovingMinMaxAvg<quint64> _timeGapStatsForDesiredCalcOnTooManyStarves; // for Freddy's method
|
||||
int _calculatedJitterBufferFramesUsingMaxGap;
|
||||
StDev _stdevStatsForDesiredCalcOnTooManyStarves; // for Philip's method
|
||||
int _calculatedJitterBufferFramesUsingStDev; // the most recent desired frames calculated by Philip's method
|
||||
MovingMinMaxAvg<quint64> _timeGapStatsForDesiredReduction;
|
||||
|
||||
int _starveHistoryWindowSeconds;
|
||||
RingBufferHistory<quint64> _starveHistory;
|
||||
int _starveThreshold;
|
||||
|
||||
TimeWeightedAvg<int> _framesAvailableStat;
|
||||
|
||||
// this value is based on the time-weighted avg from _framesAvailableStat. it is only used for
|
||||
// this value is periodically updated with the time-weighted avg from _framesAvailableStat. it is only used for
|
||||
// dropping silent frames right now.
|
||||
int _currentJitterBufferFrames;
|
||||
|
||||
MovingMinMaxAvg<quint64> _timeGapStatsForStatsPacket;
|
||||
|
||||
bool _repetitionWithFade;
|
||||
};
|
||||
|
||||
float calculateRepeatedFrameFadeFactor(int indexOfRepeat);
|
||||
|
||||
#endif // hifi_InboundAudioStream_h
|
||||
|
|
|
@ -19,8 +19,8 @@
|
|||
|
||||
#include "InjectedAudioStream.h"
|
||||
|
||||
InjectedAudioStream::InjectedAudioStream(const QUuid& streamIdentifier, bool dynamicJitterBuffer, int staticDesiredJitterBufferFrames, int maxFramesOverDesired) :
|
||||
PositionalAudioStream(PositionalAudioStream::Injector, false, dynamicJitterBuffer, staticDesiredJitterBufferFrames, maxFramesOverDesired),
|
||||
InjectedAudioStream::InjectedAudioStream(const QUuid& streamIdentifier, const InboundAudioStream::Settings& settings) :
|
||||
PositionalAudioStream(PositionalAudioStream::Injector, false, settings),
|
||||
_streamIdentifier(streamIdentifier),
|
||||
_radius(0.0f),
|
||||
_attenuationRatio(0)
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
|
||||
class InjectedAudioStream : public PositionalAudioStream {
|
||||
public:
|
||||
InjectedAudioStream(const QUuid& streamIdentifier, bool dynamicJitterBuffer, int staticDesiredJitterBufferFrames, int maxFramesOverDesired);
|
||||
InjectedAudioStream(const QUuid& streamIdentifier, const InboundAudioStream::Settings& settings);
|
||||
|
||||
float getRadius() const { return _radius; }
|
||||
float getAttenuationRatio() const { return _attenuationRatio; }
|
||||
|
|
|
@ -11,13 +11,7 @@
|
|||
|
||||
#include "MixedAudioStream.h"
|
||||
|
||||
MixedAudioStream::MixedAudioStream(int numFrameSamples, int numFramesCapacity, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc)
|
||||
: InboundAudioStream(numFrameSamples, numFramesCapacity, dynamicJitterBuffers, staticDesiredJitterBufferFrames, maxFramesOverDesired, useStDevForJitterCalc)
|
||||
MixedAudioStream::MixedAudioStream(int numFrameSamples, int numFramesCapacity, const InboundAudioStream::Settings& settings)
|
||||
: InboundAudioStream(numFrameSamples, numFramesCapacity, settings)
|
||||
{
|
||||
}
|
||||
|
||||
int MixedAudioStream::parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) {
|
||||
// mixed audio packets do not have any info between the seq num and the audio data.
|
||||
numAudioSamples = packetAfterSeqNum.size() / sizeof(int16_t);
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -17,12 +17,9 @@
|
|||
|
||||
class MixedAudioStream : public InboundAudioStream {
|
||||
public:
|
||||
MixedAudioStream(int numFrameSamples, int numFramesCapacity, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc);
|
||||
MixedAudioStream(int numFrameSamples, int numFramesCapacity, const InboundAudioStream::Settings& settings);
|
||||
|
||||
float getNextOutputFrameLoudness() const { return _ringBuffer.getNextOutputFrameLoudness(); }
|
||||
|
||||
protected:
|
||||
int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples);
|
||||
};
|
||||
|
||||
#endif // hifi_MixedAudioStream_h
|
||||
|
|
|
@ -11,35 +11,53 @@
|
|||
|
||||
#include "MixedProcessedAudioStream.h"
|
||||
|
||||
MixedProcessedAudioStream ::MixedProcessedAudioStream (int numFrameSamples, int numFramesCapacity, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc)
|
||||
: InboundAudioStream(numFrameSamples, numFramesCapacity, dynamicJitterBuffers, staticDesiredJitterBufferFrames, maxFramesOverDesired, useStDevForJitterCalc)
|
||||
static const int STEREO_FACTOR = 2;
|
||||
|
||||
MixedProcessedAudioStream::MixedProcessedAudioStream(int numFrameSamples, int numFramesCapacity, const InboundAudioStream::Settings& settings)
|
||||
: InboundAudioStream(numFrameSamples, numFramesCapacity, settings)
|
||||
{
|
||||
}
|
||||
|
||||
void MixedProcessedAudioStream::outputFormatChanged(int outputFormatChannelCountTimesSampleRate) {
|
||||
_outputFormatChannelsTimesSampleRate = outputFormatChannelCountTimesSampleRate;
|
||||
int deviceOutputFrameSize = NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * _outputFormatChannelsTimesSampleRate / SAMPLE_RATE;
|
||||
int deviceOutputFrameSize = networkToDeviceSamples(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO);
|
||||
_ringBuffer.resizeForFrameSize(deviceOutputFrameSize);
|
||||
}
|
||||
|
||||
int MixedProcessedAudioStream::parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) {
|
||||
// mixed audio packets do not have any info between the seq num and the audio data.
|
||||
int numNetworkSamples = packetAfterSeqNum.size() / sizeof(int16_t);
|
||||
int MixedProcessedAudioStream::writeDroppableSilentSamples(int silentSamples) {
|
||||
|
||||
int deviceSilentSamplesWritten = InboundAudioStream::writeDroppableSilentSamples(networkToDeviceSamples(silentSamples));
|
||||
|
||||
emit addedSilence(deviceToNetworkSamples(deviceSilentSamplesWritten) / STEREO_FACTOR);
|
||||
|
||||
// since numAudioSamples is used to know how many samples to add for each dropped packet before this one,
|
||||
// we want to set it to the number of device audio samples since this stream contains device audio samples, not network samples.
|
||||
const int STEREO_DIVIDER = 2;
|
||||
numAudioSamples = numNetworkSamples * _outputFormatChannelsTimesSampleRate / (STEREO_DIVIDER * SAMPLE_RATE);
|
||||
|
||||
return 0;
|
||||
return deviceSilentSamplesWritten;
|
||||
}
|
||||
|
||||
int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples) {
|
||||
int MixedProcessedAudioStream::writeLastFrameRepeatedWithFade(int samples) {
|
||||
|
||||
int deviceSamplesWritten = InboundAudioStream::writeLastFrameRepeatedWithFade(networkToDeviceSamples(samples));
|
||||
|
||||
emit addedLastFrameRepeatedWithFade(deviceToNetworkSamples(deviceSamplesWritten) / STEREO_FACTOR);
|
||||
|
||||
return deviceSamplesWritten;
|
||||
}
|
||||
|
||||
int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int networkSamples) {
|
||||
|
||||
emit addedStereoSamples(packetAfterStreamProperties);
|
||||
|
||||
QByteArray outputBuffer;
|
||||
emit processSamples(packetAfterStreamProperties, outputBuffer);
|
||||
|
||||
_ringBuffer.writeData(outputBuffer.data(), outputBuffer.size());
|
||||
|
||||
_ringBuffer.writeData(outputBuffer.data(), outputBuffer.size());
|
||||
|
||||
return packetAfterStreamProperties.size();
|
||||
}
|
||||
|
||||
int MixedProcessedAudioStream::networkToDeviceSamples(int networkSamples) {
|
||||
return (quint64)networkSamples * (quint64)_outputFormatChannelsTimesSampleRate / (quint64)(STEREO_FACTOR * SAMPLE_RATE);
|
||||
}
|
||||
|
||||
int MixedProcessedAudioStream::deviceToNetworkSamples(int deviceSamples) {
|
||||
return (quint64)deviceSamples * (quint64)(STEREO_FACTOR * SAMPLE_RATE) / (quint64)_outputFormatChannelsTimesSampleRate;
|
||||
}
|
||||
|
|
|
@ -14,21 +14,32 @@
|
|||
|
||||
#include "InboundAudioStream.h"
|
||||
|
||||
class Audio;
|
||||
|
||||
class MixedProcessedAudioStream : public InboundAudioStream {
|
||||
Q_OBJECT
|
||||
public:
|
||||
MixedProcessedAudioStream (int numFrameSamples, int numFramesCapacity, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc);
|
||||
MixedProcessedAudioStream(int numFrameSamples, int numFramesCapacity, const InboundAudioStream::Settings& settings);
|
||||
|
||||
signals:
|
||||
|
||||
void addedSilence(int silentSamplesPerChannel);
|
||||
void addedLastFrameRepeatedWithFade(int samplesPerChannel);
|
||||
void addedStereoSamples(const QByteArray& samples);
|
||||
|
||||
void processSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer);
|
||||
|
||||
public:
|
||||
void outputFormatChanged(int outputFormatChannelCountTimesSampleRate);
|
||||
|
||||
protected:
|
||||
int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples);
|
||||
int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples);
|
||||
int writeDroppableSilentSamples(int silentSamples);
|
||||
int writeLastFrameRepeatedWithFade(int samples);
|
||||
int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int networkSamples);
|
||||
|
||||
private:
|
||||
int networkToDeviceSamples(int networkSamples);
|
||||
int deviceToNetworkSamples(int deviceSamples);
|
||||
|
||||
private:
|
||||
int _outputFormatChannelsTimesSampleRate;
|
||||
|
|
|
@ -21,32 +21,41 @@
|
|||
#include <PacketHeaders.h>
|
||||
#include <UUID.h>
|
||||
|
||||
PositionalAudioStream::PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, bool dynamicJitterBuffers,
|
||||
int staticDesiredJitterBufferFrames, int maxFramesOverDesired) :
|
||||
PositionalAudioStream::PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, const InboundAudioStream::Settings& settings) :
|
||||
InboundAudioStream(isStereo ? NETWORK_BUFFER_LENGTH_SAMPLES_STEREO : NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL,
|
||||
AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY, dynamicJitterBuffers, staticDesiredJitterBufferFrames, maxFramesOverDesired),
|
||||
AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY, settings),
|
||||
_type(type),
|
||||
_position(0.0f, 0.0f, 0.0f),
|
||||
_orientation(0.0f, 0.0f, 0.0f, 0.0f),
|
||||
_shouldLoopbackForNode(false),
|
||||
_isStereo(isStereo),
|
||||
_lastPopOutputTrailingLoudness(0.0f),
|
||||
_lastPopOutputLoudness(0.0f),
|
||||
_listenerUnattenuatedZone(NULL)
|
||||
{
|
||||
// constant defined in AudioMixer.h. However, we don't want to include this here
|
||||
// we will soon find a better common home for these audio-related constants
|
||||
const int SAMPLE_PHASE_DELAY_AT_90 = 20;
|
||||
_filter.initialize(SAMPLE_RATE, (NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)) / 2);
|
||||
}
|
||||
|
||||
void PositionalAudioStream::updateLastPopOutputTrailingLoudness() {
|
||||
float lastPopLoudness = _ringBuffer.getFrameLoudness(_lastPopOutput);
|
||||
void PositionalAudioStream::resetStats() {
|
||||
_lastPopOutputTrailingLoudness = 0.0f;
|
||||
_lastPopOutputLoudness = 0.0f;
|
||||
}
|
||||
|
||||
void PositionalAudioStream::updateLastPopOutputLoudnessAndTrailingLoudness() {
|
||||
_lastPopOutputLoudness = _ringBuffer.getFrameLoudness(_lastPopOutput);
|
||||
|
||||
const int TRAILING_AVERAGE_FRAMES = 100;
|
||||
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
|
||||
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
|
||||
const float LOUDNESS_EPSILON = 0.000001f;
|
||||
|
||||
if (lastPopLoudness >= _lastPopOutputTrailingLoudness) {
|
||||
_lastPopOutputTrailingLoudness = lastPopLoudness;
|
||||
if (_lastPopOutputLoudness >= _lastPopOutputTrailingLoudness) {
|
||||
_lastPopOutputTrailingLoudness = _lastPopOutputLoudness;
|
||||
} else {
|
||||
_lastPopOutputTrailingLoudness = (_lastPopOutputTrailingLoudness * PREVIOUS_FRAMES_RATIO) + (CURRENT_FRAME_RATIO * lastPopLoudness);
|
||||
_lastPopOutputTrailingLoudness = (_lastPopOutputTrailingLoudness * PREVIOUS_FRAMES_RATIO) + (CURRENT_FRAME_RATIO * _lastPopOutputLoudness);
|
||||
|
||||
if (_lastPopOutputTrailingLoudness < LOUDNESS_EPSILON) {
|
||||
_lastPopOutputTrailingLoudness = 0;
|
||||
|
|
|
@ -16,6 +16,8 @@
|
|||
#include <AABox.h>
|
||||
|
||||
#include "InboundAudioStream.h"
|
||||
#include "AudioFilter.h"
|
||||
#include "AudioFilterBank.h"
|
||||
|
||||
const int AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY = 100;
|
||||
|
||||
|
@ -27,13 +29,15 @@ public:
|
|||
Injector
|
||||
};
|
||||
|
||||
PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames,
|
||||
int maxFramesOverDesired);
|
||||
PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, const InboundAudioStream::Settings& settings);
|
||||
|
||||
virtual void resetStats();
|
||||
|
||||
virtual AudioStreamStats getAudioStreamStats() const;
|
||||
|
||||
void updateLastPopOutputTrailingLoudness();
|
||||
void updateLastPopOutputLoudnessAndTrailingLoudness();
|
||||
float getLastPopOutputTrailingLoudness() const { return _lastPopOutputTrailingLoudness; }
|
||||
float getLastPopOutputLoudness() const { return _lastPopOutputLoudness; }
|
||||
|
||||
bool shouldLoopbackForNode() const { return _shouldLoopbackForNode; }
|
||||
bool isStereo() const { return _isStereo; }
|
||||
|
@ -44,6 +48,8 @@ public:
|
|||
|
||||
void setListenerUnattenuatedZone(AABox* listenerUnattenuatedZone) { _listenerUnattenuatedZone = listenerUnattenuatedZone; }
|
||||
|
||||
AudioFilterHSF1s& getFilter() { return _filter; }
|
||||
|
||||
protected:
|
||||
// disallow copying of PositionalAudioStream objects
|
||||
PositionalAudioStream(const PositionalAudioStream&);
|
||||
|
@ -60,7 +66,10 @@ protected:
|
|||
bool _isStereo;
|
||||
|
||||
float _lastPopOutputTrailingLoudness;
|
||||
float _lastPopOutputLoudness;
|
||||
AABox* _listenerUnattenuatedZone;
|
||||
|
||||
AudioFilterHSF1s _filter;
|
||||
};
|
||||
|
||||
#endif // hifi_PositionalAudioStream_h
|
||||
|
|
|
@ -135,9 +135,9 @@ QByteArray AvatarData::toByteArray() {
|
|||
// lazily allocate memory for HeadData in case we're not an Avatar instance
|
||||
if (!_headData) {
|
||||
_headData = new HeadData(this);
|
||||
if (_forceFaceshiftConnected) {
|
||||
_headData->_isFaceshiftConnected = true;
|
||||
}
|
||||
}
|
||||
if (_forceFaceshiftConnected) {
|
||||
_headData->_isFaceshiftConnected = true;
|
||||
}
|
||||
|
||||
QByteArray avatarDataByteArray;
|
||||
|
@ -153,7 +153,7 @@ QByteArray AvatarData::toByteArray() {
|
|||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _bodyYaw);
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _bodyPitch);
|
||||
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _bodyRoll);
|
||||
|
||||
|
||||
// Body scale
|
||||
destinationBuffer += packFloatRatioToTwoByte(destinationBuffer, _targetScale);
|
||||
|
||||
|
@ -585,6 +585,101 @@ bool AvatarData::hasReferential() {
|
|||
return _referential != NULL;
|
||||
}
|
||||
|
||||
bool AvatarData::isPlaying() {
|
||||
if (!_player) {
|
||||
return false;
|
||||
}
|
||||
if (QThread::currentThread() != thread()) {
|
||||
bool result;
|
||||
QMetaObject::invokeMethod(this, "isPlaying", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(bool, result));
|
||||
return result;
|
||||
}
|
||||
return _player && _player->isPlaying();
|
||||
}
|
||||
|
||||
qint64 AvatarData::playerElapsed() {
|
||||
if (!_player) {
|
||||
return 0;
|
||||
}
|
||||
if (QThread::currentThread() != thread()) {
|
||||
qint64 result;
|
||||
QMetaObject::invokeMethod(this, "playerElapsed", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(qint64, result));
|
||||
return result;
|
||||
}
|
||||
return _player->elapsed();
|
||||
}
|
||||
|
||||
qint64 AvatarData::playerLength() {
|
||||
if (!_player) {
|
||||
return 0;
|
||||
}
|
||||
if (QThread::currentThread() != thread()) {
|
||||
qint64 result;
|
||||
QMetaObject::invokeMethod(this, "playerLength", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(qint64, result));
|
||||
return result;
|
||||
}
|
||||
return _player->getRecording()->getLength();
|
||||
}
|
||||
|
||||
void AvatarData::loadRecording(QString filename) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "loadRecording", Qt::BlockingQueuedConnection,
|
||||
Q_ARG(QString, filename));
|
||||
return;
|
||||
}
|
||||
if (!_player) {
|
||||
_player = PlayerPointer(new Player(this));
|
||||
}
|
||||
|
||||
_player->loadFromFile(filename);
|
||||
}
|
||||
|
||||
void AvatarData::startPlaying() {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "startPlaying", Qt::BlockingQueuedConnection);
|
||||
return;
|
||||
}
|
||||
if (!_player) {
|
||||
_player = PlayerPointer(new Player(this));
|
||||
}
|
||||
_player->startPlaying();
|
||||
}
|
||||
|
||||
void AvatarData::setPlayFromCurrentLocation(bool playFromCurrentLocation) {
|
||||
_player->setPlayFromCurrentLocation(playFromCurrentLocation);
|
||||
}
|
||||
|
||||
void AvatarData::setPlayerLoop(bool loop) {
|
||||
_player->setLoop(loop);
|
||||
}
|
||||
|
||||
void AvatarData::play() {
|
||||
if (isPlaying()) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "play", Qt::BlockingQueuedConnection);
|
||||
return;
|
||||
}
|
||||
|
||||
_player->play();
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarData::stopPlaying() {
|
||||
if (!_player) {
|
||||
return;
|
||||
}
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "stopPlaying", Qt::BlockingQueuedConnection);
|
||||
return;
|
||||
}
|
||||
if (_player) {
|
||||
_player->stopPlaying();
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarData::changeReferential(Referential *ref) {
|
||||
delete _referential;
|
||||
_referential = ref;
|
||||
|
@ -705,6 +800,9 @@ void AvatarData::setJointRotations(QVector<glm::quat> jointRotations) {
|
|||
"setJointRotations", Qt::BlockingQueuedConnection,
|
||||
Q_ARG(QVector<glm::quat>, jointRotations));
|
||||
}
|
||||
if (_jointData.size() < jointRotations.size()) {
|
||||
_jointData.resize(jointRotations.size());
|
||||
}
|
||||
for (int i = 0; i < jointRotations.size(); ++i) {
|
||||
if (i < _jointData.size()) {
|
||||
setJointData(i, jointRotations[i]);
|
||||
|
|
|
@ -49,6 +49,7 @@ typedef unsigned long long quint64;
|
|||
|
||||
#include <Node.h>
|
||||
|
||||
#include "Recorder.h"
|
||||
#include "Referential.h"
|
||||
#include "HeadData.h"
|
||||
#include "HandData.h"
|
||||
|
@ -298,6 +299,16 @@ public slots:
|
|||
void setSessionUUID(const QUuid& sessionUUID) { _sessionUUID = sessionUUID; }
|
||||
bool hasReferential();
|
||||
|
||||
bool isPlaying();
|
||||
qint64 playerElapsed();
|
||||
qint64 playerLength();
|
||||
void loadRecording(QString filename);
|
||||
void startPlaying();
|
||||
void setPlayFromCurrentLocation(bool playFromCurrentLocation);
|
||||
void setPlayerLoop(bool loop);
|
||||
void play();
|
||||
void stopPlaying();
|
||||
|
||||
protected:
|
||||
QUuid _sessionUUID;
|
||||
glm::vec3 _position;
|
||||
|
@ -351,6 +362,8 @@ protected:
|
|||
QWeakPointer<Node> _owningAvatarMixer;
|
||||
QElapsedTimer _lastUpdateTimer;
|
||||
|
||||
PlayerPointer _player;
|
||||
|
||||
/// Loads the joint indices, names from the FST file (if any)
|
||||
virtual void updateJointMappings();
|
||||
void changeReferential(Referential* ref);
|
||||
|
|
|
@ -41,6 +41,10 @@ public:
|
|||
void setBasePitch(float pitch) { _basePitch = glm::clamp(pitch, MIN_HEAD_PITCH, MAX_HEAD_PITCH); }
|
||||
float getBaseRoll() const { return _baseRoll; }
|
||||
void setBaseRoll(float roll) { _baseRoll = glm::clamp(roll, MIN_HEAD_ROLL, MAX_HEAD_ROLL); }
|
||||
|
||||
virtual void setFinalYaw(float finalYaw) { _baseYaw = finalYaw; }
|
||||
virtual void setFinalPitch(float finalPitch) { _basePitch = finalPitch; }
|
||||
virtual void setFinalRoll(float finalRoll) { _baseRoll = finalRoll; }
|
||||
virtual float getFinalYaw() const { return _baseYaw; }
|
||||
virtual float getFinalPitch() const { return _basePitch; }
|
||||
virtual float getFinalRoll() const { return _baseRoll; }
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
#include <QMetaObject>
|
||||
#include <QObject>
|
||||
|
||||
#include "AvatarData.h"
|
||||
#include "Recorder.h"
|
||||
|
||||
void RecordingFrame::setBlendshapeCoefficients(QVector<float> blendshapeCoefficients) {
|
||||
|
@ -164,7 +165,10 @@ void Recorder::record(char* samples, int size) {
|
|||
Player::Player(AvatarData* avatar) :
|
||||
_recording(new Recording()),
|
||||
_avatar(avatar),
|
||||
_audioThread(NULL)
|
||||
_audioThread(NULL),
|
||||
_startingScale(1.0f),
|
||||
_playFromCurrentPosition(true),
|
||||
_loop(false)
|
||||
{
|
||||
_timer.invalidate();
|
||||
_options.setLoop(false);
|
||||
|
@ -228,6 +232,19 @@ void Player::startPlaying() {
|
|||
_audioThread->start();
|
||||
QMetaObject::invokeMethod(_injector.data(), "injectAudio", Qt::QueuedConnection);
|
||||
|
||||
// Fake faceshift connection
|
||||
_avatar->setForceFaceshiftConnected(true);
|
||||
|
||||
if (_playFromCurrentPosition) {
|
||||
_startingPosition = _avatar->getPosition();
|
||||
_startingRotation = _avatar->getOrientation();
|
||||
_startingScale = _avatar->getTargetScale();
|
||||
} else {
|
||||
_startingPosition = _recording->getFrame(0).getTranslation();
|
||||
_startingRotation = _recording->getFrame(0).getRotation();
|
||||
_startingScale = _recording->getFrame(0).getScale();
|
||||
}
|
||||
|
||||
_timer.start();
|
||||
}
|
||||
}
|
||||
|
@ -251,6 +268,10 @@ void Player::stopPlaying() {
|
|||
_audioThread, &QThread::deleteLater);
|
||||
_injector.clear();
|
||||
_audioThread = NULL;
|
||||
|
||||
// Turn off fake faceshift connection
|
||||
_avatar->setForceFaceshiftConnected(false);
|
||||
|
||||
qDebug() << "Recorder::stopPlaying()";
|
||||
}
|
||||
|
||||
|
@ -269,29 +290,40 @@ void Player::loadRecording(RecordingPointer recording) {
|
|||
|
||||
void Player::play() {
|
||||
computeCurrentFrame();
|
||||
if (_currentFrame < 0 || _currentFrame >= _recording->getFrameNumber() - 1) {
|
||||
if (_currentFrame < 0 || (_currentFrame >= _recording->getFrameNumber() - 1)) {
|
||||
// If it's the end of the recording, stop playing
|
||||
stopPlaying();
|
||||
|
||||
if (_loop) {
|
||||
startPlaying();
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (_currentFrame == 0) {
|
||||
_avatar->setPosition(_recording->getFrame(_currentFrame).getTranslation());
|
||||
_avatar->setOrientation(_recording->getFrame(_currentFrame).getRotation());
|
||||
_avatar->setTargetScale(_recording->getFrame(_currentFrame).getScale());
|
||||
_avatar->setJointRotations(_recording->getFrame(_currentFrame).getJointRotations());
|
||||
HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
|
||||
head->setBlendshapeCoefficients(_recording->getFrame(_currentFrame).getBlendshapeCoefficients());
|
||||
} else {
|
||||
_avatar->setPosition(_recording->getFrame(0).getTranslation() +
|
||||
_recording->getFrame(_currentFrame).getTranslation());
|
||||
_avatar->setOrientation(_recording->getFrame(0).getRotation() *
|
||||
_recording->getFrame(_currentFrame).getRotation());
|
||||
_avatar->setTargetScale(_recording->getFrame(0).getScale() *
|
||||
_recording->getFrame(_currentFrame).getScale());
|
||||
_avatar->setJointRotations(_recording->getFrame(_currentFrame).getJointRotations());
|
||||
HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
|
||||
// Don't play frame 0
|
||||
// only meant to store absolute values
|
||||
return;
|
||||
}
|
||||
|
||||
_avatar->setPosition(_startingPosition +
|
||||
glm::inverse(_recording->getFrame(0).getRotation()) * _startingRotation *
|
||||
_recording->getFrame(_currentFrame).getTranslation());
|
||||
_avatar->setOrientation(_startingRotation *
|
||||
_recording->getFrame(_currentFrame).getRotation());
|
||||
_avatar->setTargetScale(_startingScale *
|
||||
_recording->getFrame(_currentFrame).getScale());
|
||||
_avatar->setJointRotations(_recording->getFrame(_currentFrame).getJointRotations());
|
||||
|
||||
HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
|
||||
if (head) {
|
||||
head->setBlendshapeCoefficients(_recording->getFrame(_currentFrame).getBlendshapeCoefficients());
|
||||
head->setLeanSideways(_recording->getFrame(_currentFrame).getLeanSideways());
|
||||
head->setLeanForward(_recording->getFrame(_currentFrame).getLeanForward());
|
||||
glm::vec3 eulers = glm::degrees(safeEulerAngles(_recording->getFrame(_currentFrame).getHeadRotation()));
|
||||
head->setFinalPitch(eulers.x);
|
||||
head->setFinalYaw(eulers.y);
|
||||
head->setFinalRoll(eulers.z);
|
||||
}
|
||||
|
||||
_options.setPosition(_avatar->getPosition());
|
||||
|
@ -299,6 +331,14 @@ void Player::play() {
|
|||
_injector->setOptions(_options);
|
||||
}
|
||||
|
||||
void Player::setPlayFromCurrentLocation(bool playFromCurrentLocation) {
|
||||
_playFromCurrentPosition = playFromCurrentLocation;
|
||||
}
|
||||
|
||||
void Player::setLoop(bool loop) {
|
||||
_loop = loop;
|
||||
}
|
||||
|
||||
bool Player::computeCurrentFrame() {
|
||||
if (!isPlaying()) {
|
||||
_currentFrame = -1;
|
|
@ -23,10 +23,10 @@
|
|||
#include <glm/gtx/quaternion.hpp>
|
||||
|
||||
#include <AudioInjector.h>
|
||||
#include <AvatarData.h>
|
||||
#include <SharedUtil.h>
|
||||
#include <Sound.h>
|
||||
|
||||
class AvatarData;
|
||||
class Recorder;
|
||||
class Recording;
|
||||
class Player;
|
||||
|
@ -97,6 +97,7 @@ private:
|
|||
QVector<qint32> _timestamps;
|
||||
QVector<RecordingFrame> _frames;
|
||||
|
||||
bool _stereo;
|
||||
Sound* _audio;
|
||||
|
||||
friend class Recorder;
|
||||
|
@ -152,6 +153,9 @@ public slots:
|
|||
void loadRecording(RecordingPointer recording);
|
||||
void play();
|
||||
|
||||
void setPlayFromCurrentLocation(bool playFromCurrentLocation);
|
||||
void setLoop(bool loop);
|
||||
|
||||
private:
|
||||
bool computeCurrentFrame();
|
||||
|
||||
|
@ -164,6 +168,13 @@ private:
|
|||
|
||||
AvatarData* _avatar;
|
||||
QThread* _audioThread;
|
||||
|
||||
glm::vec3 _startingPosition;
|
||||
glm::quat _startingRotation;
|
||||
float _startingScale;
|
||||
|
||||
bool _playFromCurrentPosition;
|
||||
bool _loop;
|
||||
};
|
||||
|
||||
void writeRecordingToFile(RecordingPointer recording, QString file);
|
|
@ -107,7 +107,7 @@ int Referential::packExtraData(unsigned char *destinationBuffer) const {
|
|||
|
||||
int Referential::unpackExtraData(const unsigned char* sourceBuffer, int size) {
|
||||
_extraDataBuffer.clear();
|
||||
_extraDataBuffer.setRawData(reinterpret_cast<const char*>(sourceBuffer), size);
|
||||
_extraDataBuffer.append(reinterpret_cast<const char*>(sourceBuffer), size);
|
||||
return size;
|
||||
}
|
||||
|
||||
|
|
|
@ -1503,7 +1503,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
|
|||
joint.inverseBindRotation = joint.inverseDefaultRotation;
|
||||
joint.name = model.name;
|
||||
joint.shapePosition = glm::vec3(0.f);
|
||||
joint.shapeType = Shape::UNKNOWN_SHAPE;
|
||||
joint.shapeType = UNKNOWN_SHAPE;
|
||||
geometry.joints.append(joint);
|
||||
geometry.jointIndices.insert(model.name, geometry.joints.size());
|
||||
|
||||
|
@ -1848,10 +1848,10 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
|
|||
if (collideLikeCapsule) {
|
||||
joint.shapeRotation = rotationBetween(defaultCapsuleAxis, jointShapeInfo.boneBegin);
|
||||
joint.shapePosition = 0.5f * jointShapeInfo.boneBegin;
|
||||
joint.shapeType = Shape::CAPSULE_SHAPE;
|
||||
joint.shapeType = CAPSULE_SHAPE;
|
||||
} else {
|
||||
// collide the joint like a sphere
|
||||
joint.shapeType = Shape::SPHERE_SHAPE;
|
||||
joint.shapeType = SPHERE_SHAPE;
|
||||
if (jointShapeInfo.numVertices > 0) {
|
||||
jointShapeInfo.averageVertex /= (float)jointShapeInfo.numVertices;
|
||||
joint.shapePosition = jointShapeInfo.averageVertex;
|
||||
|
@ -1872,7 +1872,7 @@ FBXGeometry extractFBXGeometry(const FBXNode& node, const QVariantHash& mapping)
|
|||
// The shape is further from both joint endpoints than the endpoints are from each other
|
||||
// which probably means the model has a bad transform somewhere. We disable this shape
|
||||
// by setting its type to UNKNOWN_SHAPE.
|
||||
joint.shapeType = Shape::UNKNOWN_SHAPE;
|
||||
joint.shapeType = UNKNOWN_SHAPE;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1055,7 +1055,9 @@ void HeightfieldTextureData::read(Bitstream& in, int bytes) {
|
|||
in >> _textures;
|
||||
}
|
||||
|
||||
HeightfieldTexture::HeightfieldTexture() {
|
||||
HeightfieldTexture::HeightfieldTexture() :
|
||||
_scaleS(1.0f),
|
||||
_scaleT(1.0f) {
|
||||
}
|
||||
|
||||
HeightfieldAttribute::HeightfieldAttribute(const QString& name) :
|
||||
|
|
|
@ -548,16 +548,23 @@ private:
|
|||
class HeightfieldTexture : public SharedObject {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(QUrl url MEMBER _url)
|
||||
|
||||
Q_PROPERTY(float scaleS MEMBER _scaleS)
|
||||
Q_PROPERTY(float scaleT MEMBER _scaleT)
|
||||
|
||||
public:
|
||||
|
||||
Q_INVOKABLE HeightfieldTexture();
|
||||
|
||||
const QUrl& getURL() const { return _url; }
|
||||
|
||||
float getScaleS() const { return _scaleS; }
|
||||
float getScaleT() const { return _scaleT; }
|
||||
|
||||
private:
|
||||
|
||||
QUrl _url;
|
||||
float _scaleS;
|
||||
float _scaleT;
|
||||
};
|
||||
|
||||
/// An attribute that stores heightfield data.
|
||||
|
|
|
@ -49,8 +49,9 @@ PacketVersion versionForPacketType(PacketType type) {
|
|||
switch (type) {
|
||||
case PacketTypeMicrophoneAudioNoEcho:
|
||||
case PacketTypeMicrophoneAudioWithEcho:
|
||||
case PacketTypeSilentAudioFrame:
|
||||
return 2;
|
||||
case PacketTypeSilentAudioFrame:
|
||||
return 3;
|
||||
case PacketTypeMixedAudio:
|
||||
return 1;
|
||||
case PacketTypeAvatarData:
|
||||
|
@ -81,7 +82,7 @@ PacketVersion versionForPacketType(PacketType type) {
|
|||
case PacketTypeAudioStreamStats:
|
||||
return 1;
|
||||
case PacketTypeMetavoxelData:
|
||||
return 2;
|
||||
return 3;
|
||||
default:
|
||||
return 0;
|
||||
}
|
||||
|
|
|
@ -486,14 +486,6 @@ void ScriptEngine::run() {
|
|||
// pack a placeholder value for sequence number for now, will be packed when destination node is known
|
||||
int numPreSequenceNumberBytes = audioPacket.size();
|
||||
packetStream << (quint16) 0;
|
||||
|
||||
// assume scripted avatar audio is mono and set channel flag to zero
|
||||
packetStream << (quint8) 0;
|
||||
|
||||
// use the orientation and position of this avatar for the source of this audio
|
||||
packetStream.writeRawData(reinterpret_cast<const char*>(&_avatarData->getPosition()), sizeof(glm::vec3));
|
||||
glm::quat headOrientation = _avatarData->getHeadOrientation();
|
||||
packetStream.writeRawData(reinterpret_cast<const char*>(&headOrientation), sizeof(glm::quat));
|
||||
|
||||
if (silentFrame) {
|
||||
if (!_isListeningToAudioStream) {
|
||||
|
@ -503,12 +495,20 @@ void ScriptEngine::run() {
|
|||
|
||||
// write the number of silent samples so the audio-mixer can uphold timing
|
||||
packetStream.writeRawData(reinterpret_cast<const char*>(&SCRIPT_AUDIO_BUFFER_SAMPLES), sizeof(int16_t));
|
||||
} else if (nextSoundOutput) {
|
||||
// write the raw audio data
|
||||
packetStream.writeRawData(reinterpret_cast<const char*>(nextSoundOutput),
|
||||
numAvailableSamples * sizeof(int16_t));
|
||||
}
|
||||
|
||||
} else if (nextSoundOutput) {
|
||||
// assume scripted avatar audio is mono and set channel flag to zero
|
||||
packetStream << (quint8)0;
|
||||
|
||||
// use the orientation and position of this avatar for the source of this audio
|
||||
packetStream.writeRawData(reinterpret_cast<const char*>(&_avatarData->getPosition()), sizeof(glm::vec3));
|
||||
glm::quat headOrientation = _avatarData->getHeadOrientation();
|
||||
packetStream.writeRawData(reinterpret_cast<const char*>(&headOrientation), sizeof(glm::quat));
|
||||
|
||||
// write the raw audio data
|
||||
packetStream.writeRawData(reinterpret_cast<const char*>(nextSoundOutput), numAvailableSamples * sizeof(int16_t));
|
||||
}
|
||||
|
||||
// write audio packet to AudioMixer nodes
|
||||
NodeList* nodeList = NodeList::getInstance();
|
||||
foreach(const SharedNodePointer& node, nodeList->getNodeHash()) {
|
||||
|
|
|
@ -18,20 +18,20 @@
|
|||
#include "SharedUtil.h"
|
||||
|
||||
|
||||
CapsuleShape::CapsuleShape() : Shape(Shape::CAPSULE_SHAPE), _radius(0.0f), _halfHeight(0.0f) {}
|
||||
CapsuleShape::CapsuleShape() : Shape(CAPSULE_SHAPE), _radius(0.0f), _halfHeight(0.0f) {}
|
||||
|
||||
CapsuleShape::CapsuleShape(float radius, float halfHeight) : Shape(Shape::CAPSULE_SHAPE),
|
||||
CapsuleShape::CapsuleShape(float radius, float halfHeight) : Shape(CAPSULE_SHAPE),
|
||||
_radius(radius), _halfHeight(halfHeight) {
|
||||
updateBoundingRadius();
|
||||
}
|
||||
|
||||
CapsuleShape::CapsuleShape(float radius, float halfHeight, const glm::vec3& position, const glm::quat& rotation) :
|
||||
Shape(Shape::CAPSULE_SHAPE, position, rotation), _radius(radius), _halfHeight(halfHeight) {
|
||||
Shape(CAPSULE_SHAPE, position, rotation), _radius(radius), _halfHeight(halfHeight) {
|
||||
updateBoundingRadius();
|
||||
}
|
||||
|
||||
CapsuleShape::CapsuleShape(float radius, const glm::vec3& startPoint, const glm::vec3& endPoint) :
|
||||
Shape(Shape::CAPSULE_SHAPE), _radius(radius), _halfHeight(0.0f) {
|
||||
Shape(CAPSULE_SHAPE), _radius(radius), _halfHeight(0.0f) {
|
||||
setEndPoints(startPoint, endPoint);
|
||||
}
|
||||
|
||||
|
|
|
@ -18,45 +18,63 @@
|
|||
#include "RingBufferHistory.h"
|
||||
|
||||
template <typename T>
|
||||
class MovingMinMaxAvg {
|
||||
class MinMaxAvg {
|
||||
public:
|
||||
MinMaxAvg()
|
||||
: _min(std::numeric_limits<T>::max()),
|
||||
_max(std::numeric_limits<T>::min()),
|
||||
_average(0.0),
|
||||
_samples(0)
|
||||
{}
|
||||
|
||||
void reset() {
|
||||
_min = std::numeric_limits<T>::max();
|
||||
_max = std::numeric_limits<T>::min();
|
||||
_average = 0.0;
|
||||
_samples = 0;
|
||||
}
|
||||
|
||||
void update(T sample) {
|
||||
if (sample < _min) {
|
||||
_min = sample;
|
||||
}
|
||||
if (sample > _max) {
|
||||
_max = sample;
|
||||
}
|
||||
double totalSamples = _samples + 1;
|
||||
_average = _average * ((double)_samples / totalSamples)
|
||||
+ (double)sample / totalSamples;
|
||||
_samples++;
|
||||
}
|
||||
|
||||
void update(const MinMaxAvg<T>& other) {
|
||||
if (other._min < _min) {
|
||||
_min = other._min;
|
||||
}
|
||||
if (other._max > _max) {
|
||||
_max = other._max;
|
||||
}
|
||||
double totalSamples = _samples + other._samples;
|
||||
_average = _average * ((double)_samples / totalSamples)
|
||||
+ other._average * ((double)other._samples / totalSamples);
|
||||
_samples += other._samples;
|
||||
}
|
||||
|
||||
T getMin() const { return _min; }
|
||||
T getMax() const { return _max; }
|
||||
double getAverage() const { return _average; }
|
||||
int getSamples() const { return _samples; }
|
||||
double getSum() const { return _samples * _average; }
|
||||
|
||||
private:
|
||||
class Stats {
|
||||
public:
|
||||
Stats()
|
||||
: _min(std::numeric_limits<T>::max()),
|
||||
_max(std::numeric_limits<T>::min()),
|
||||
_average(0.0) {}
|
||||
|
||||
void updateWithSample(T sample, int& numSamplesInAverage) {
|
||||
if (sample < _min) {
|
||||
_min = sample;
|
||||
}
|
||||
if (sample > _max) {
|
||||
_max = sample;
|
||||
}
|
||||
_average = _average * ((double)numSamplesInAverage / (numSamplesInAverage + 1))
|
||||
+ (double)sample / (numSamplesInAverage + 1);
|
||||
numSamplesInAverage++;
|
||||
}
|
||||
|
||||
void updateWithOtherStats(const Stats& other, int& numStatsInAverage) {
|
||||
if (other._min < _min) {
|
||||
_min = other._min;
|
||||
}
|
||||
if (other._max > _max) {
|
||||
_max = other._max;
|
||||
}
|
||||
_average = _average * ((double)numStatsInAverage / (numStatsInAverage + 1))
|
||||
+ other._average / (numStatsInAverage + 1);
|
||||
numStatsInAverage++;
|
||||
}
|
||||
|
||||
T _min;
|
||||
T _max;
|
||||
double _average;
|
||||
};
|
||||
T _min;
|
||||
T _max;
|
||||
double _average;
|
||||
int _samples;
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
class MovingMinMaxAvg {
|
||||
public:
|
||||
// This class collects 3 stats (min, max, avg) over a moving window of samples.
|
||||
// The moving window contains _windowIntervals * _intervalLength samples.
|
||||
|
@ -66,66 +84,98 @@ public:
|
|||
// this class with MovingMinMaxAvg(100, 50). If you want a moving min of the past 100 samples updated on every
|
||||
// new sample, instantiate this class with MovingMinMaxAvg(1, 100).
|
||||
|
||||
|
||||
/// use intervalLength = 0 to use in manual mode, where the currentIntervalComplete() function must
|
||||
/// be called to complete an interval
|
||||
MovingMinMaxAvg(int intervalLength, int windowIntervals)
|
||||
: _intervalLength(intervalLength),
|
||||
_windowIntervals(windowIntervals),
|
||||
_overallStats(),
|
||||
_samplesCollected(0),
|
||||
_windowStats(),
|
||||
_existingSamplesInCurrentInterval(0),
|
||||
_currentIntervalStats(),
|
||||
_intervalStats(windowIntervals),
|
||||
_newStatsAvailable(false)
|
||||
{}
|
||||
|
||||
void reset() {
|
||||
_overallStats = Stats();
|
||||
_samplesCollected = 0;
|
||||
_windowStats = Stats();
|
||||
_existingSamplesInCurrentInterval = 0;
|
||||
_currentIntervalStats = Stats();
|
||||
_overallStats.reset();
|
||||
_windowStats.reset();
|
||||
_currentIntervalStats.reset();
|
||||
_intervalStats.clear();
|
||||
_newStatsAvailable = false;
|
||||
}
|
||||
|
||||
void setWindowIntervals(int windowIntervals) {
|
||||
_windowIntervals = windowIntervals;
|
||||
_overallStats.reset();
|
||||
_windowStats.reset();
|
||||
_currentIntervalStats.reset();
|
||||
_intervalStats.setCapacity(_windowIntervals);
|
||||
_newStatsAvailable = false;
|
||||
}
|
||||
|
||||
void update(T newSample) {
|
||||
// update overall stats
|
||||
_overallStats.updateWithSample(newSample, _samplesCollected);
|
||||
_overallStats.update(newSample);
|
||||
|
||||
// update the current interval stats
|
||||
_currentIntervalStats.updateWithSample(newSample, _existingSamplesInCurrentInterval);
|
||||
_currentIntervalStats.update(newSample);
|
||||
|
||||
// if the current interval of samples is now full, record its stats into our past intervals' stats
|
||||
if (_existingSamplesInCurrentInterval == _intervalLength) {
|
||||
|
||||
// record current interval's stats, then reset them
|
||||
_intervalStats.insert(_currentIntervalStats);
|
||||
_currentIntervalStats = Stats();
|
||||
_existingSamplesInCurrentInterval = 0;
|
||||
|
||||
// update the window's stats by combining the intervals' stats
|
||||
typename RingBufferHistory<Stats>::Iterator i = _intervalStats.begin();
|
||||
typename RingBufferHistory<Stats>::Iterator end = _intervalStats.end();
|
||||
_windowStats = Stats();
|
||||
int intervalsIncludedInWindowStats = 0;
|
||||
while (i != end) {
|
||||
_windowStats.updateWithOtherStats(*i, intervalsIncludedInWindowStats);
|
||||
i++;
|
||||
}
|
||||
|
||||
_newStatsAvailable = true;
|
||||
// NOTE: if _intervalLength is 0 (manual mode), currentIntervalComplete() will not be called here.
|
||||
if (_currentIntervalStats.getSamples() == _intervalLength) {
|
||||
currentIntervalComplete();
|
||||
}
|
||||
}
|
||||
|
||||
/// This function can be called to manually control when each interval ends. For example, if each interval
|
||||
/// needs to last T seconds as opposed to N samples, this function should be called every T seconds.
|
||||
void currentIntervalComplete() {
|
||||
// record current interval's stats, then reset them
|
||||
_intervalStats.insert(_currentIntervalStats);
|
||||
_currentIntervalStats.reset();
|
||||
|
||||
// update the window's stats by combining the intervals' stats
|
||||
typename RingBufferHistory< MinMaxAvg<T> >::Iterator i = _intervalStats.begin();
|
||||
typename RingBufferHistory< MinMaxAvg<T> >::Iterator end = _intervalStats.end();
|
||||
_windowStats.reset();
|
||||
while (i != end) {
|
||||
_windowStats.update(*i);
|
||||
++i;
|
||||
}
|
||||
|
||||
_newStatsAvailable = true;
|
||||
}
|
||||
|
||||
bool getNewStatsAvailableFlag() const { return _newStatsAvailable; }
|
||||
void clearNewStatsAvailableFlag() { _newStatsAvailable = false; }
|
||||
|
||||
T getMin() const { return _overallStats._min; }
|
||||
T getMax() const { return _overallStats._max; }
|
||||
double getAverage() const { return _overallStats._average; }
|
||||
T getWindowMin() const { return _windowStats._min; }
|
||||
T getWindowMax() const { return _windowStats._max; }
|
||||
double getWindowAverage() const { return _windowStats._average; }
|
||||
T getMin() const { return _overallStats.getMin(); }
|
||||
T getMax() const { return _overallStats.getMax(); }
|
||||
double getAverage() const { return _overallStats.getAverage(); }
|
||||
int getSamples() const { return _overallStats.getSamples(); }
|
||||
double getSum() const { return _overallStats.getSum(); }
|
||||
|
||||
T getWindowMin() const { return _windowStats.getMin(); }
|
||||
T getWindowMax() const { return _windowStats.getMax(); }
|
||||
double getWindowAverage() const { return _windowStats.getAverage(); }
|
||||
int getWindowSamples() const { return _windowStats.getSamples(); }
|
||||
double getWindowSum() const { return _windowStats.getSum(); }
|
||||
|
||||
T getCurrentIntervalMin() const { return _currentIntervalStats.getMin(); }
|
||||
T getCurrentIntervalMax() const { return _currentIntervalStats.getMax(); }
|
||||
double getCurrentIntervalAverage() const { return _currentIntervalStats.getAverage(); }
|
||||
int getCurrentIntervalSamples() const { return _currentIntervalStats.getSamples(); }
|
||||
double getCurrentIntervalSum() const { return _currentIntervalStats.getSum(); }
|
||||
|
||||
const MinMaxAvg<T>& getOverallStats() const{ return _overallStats; }
|
||||
const MinMaxAvg<T>& getWindowStats() const{ return _windowStats; }
|
||||
const MinMaxAvg<T>& getCurrentIntervalStats() const { return _currentIntervalStats; }
|
||||
|
||||
MinMaxAvg<T> getLastCompleteIntervalStats() const {
|
||||
const MinMaxAvg<T>* stats = _intervalStats.getNewestEntry();
|
||||
return stats == NULL ? MinMaxAvg<T>() : *stats;
|
||||
}
|
||||
|
||||
bool isWindowFilled() const { return _intervalStats.isFilled(); }
|
||||
|
||||
|
@ -134,18 +184,16 @@ private:
|
|||
int _windowIntervals;
|
||||
|
||||
// these are min/max/avg stats for all samples collected.
|
||||
Stats _overallStats;
|
||||
int _samplesCollected;
|
||||
MinMaxAvg<T> _overallStats;
|
||||
|
||||
// these are the min/max/avg stats for the samples in the moving window
|
||||
Stats _windowStats;
|
||||
int _existingSamplesInCurrentInterval;
|
||||
MinMaxAvg<T> _windowStats;
|
||||
|
||||
// these are the min/max/avg stats for the current interval
|
||||
Stats _currentIntervalStats;
|
||||
// these are the min/max/avg stats for the samples in the current interval
|
||||
MinMaxAvg<T> _currentIntervalStats;
|
||||
|
||||
// these are stored stats for the past intervals in the window
|
||||
RingBufferHistory<Stats> _intervalStats;
|
||||
RingBufferHistory< MinMaxAvg<T> > _intervalStats;
|
||||
|
||||
bool _newStatsAvailable;
|
||||
};
|
||||
|
|
|
@ -12,8 +12,10 @@
|
|||
#include "PhysicsEntity.h"
|
||||
|
||||
#include "PhysicsSimulation.h"
|
||||
#include "PlaneShape.h"
|
||||
#include "Shape.h"
|
||||
#include "ShapeCollider.h"
|
||||
#include "SphereShape.h"
|
||||
|
||||
PhysicsEntity::PhysicsEntity() :
|
||||
_translation(0.0f),
|
||||
|
|
|
@ -16,8 +16,9 @@
|
|||
#include "PerfStat.h"
|
||||
#include "PhysicsEntity.h"
|
||||
#include "Ragdoll.h"
|
||||
#include "SharedUtil.h"
|
||||
#include "Shape.h"
|
||||
#include "ShapeCollider.h"
|
||||
#include "SharedUtil.h"
|
||||
|
||||
int MAX_DOLLS_PER_SIMULATION = 16;
|
||||
int MAX_ENTITIES_PER_SIMULATION = 64;
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
const glm::vec3 UNROTATED_NORMAL(0.0f, 1.0f, 0.0f);
|
||||
|
||||
PlaneShape::PlaneShape(const glm::vec4& coefficients) :
|
||||
Shape(Shape::PLANE_SHAPE) {
|
||||
Shape(PLANE_SHAPE) {
|
||||
|
||||
glm::vec3 normal = glm::vec3(coefficients);
|
||||
_translation = -normal * coefficients.w;
|
||||
|
|
|
@ -35,6 +35,14 @@ public:
|
|||
_numEntries = 0;
|
||||
}
|
||||
|
||||
void setCapacity(int capacity) {
|
||||
_size = capacity + 1;
|
||||
_capacity = capacity;
|
||||
_newestEntryAtIndex = 0;
|
||||
_numEntries = 0;
|
||||
_buffer.resize(_size);
|
||||
}
|
||||
|
||||
void insert(const T& entry) {
|
||||
// increment newest entry index cyclically
|
||||
_newestEntryAtIndex = (_newestEntryAtIndex == _size - 1) ? 0 : _newestEntryAtIndex + 1;
|
||||
|
@ -83,9 +91,14 @@ private:
|
|||
QVector<T> _buffer;
|
||||
|
||||
public:
|
||||
class Iterator : public std::iterator < std::forward_iterator_tag, T > {
|
||||
class Iterator : public std::iterator < std::random_access_iterator_tag, T > {
|
||||
public:
|
||||
Iterator(T* bufferFirst, T* bufferLast, T* at) : _bufferFirst(bufferFirst), _bufferLast(bufferLast), _at(at) {}
|
||||
Iterator(T* bufferFirst, T* bufferLast, T* newestAt, T* at)
|
||||
: _bufferFirst(bufferFirst),
|
||||
_bufferLast(bufferLast),
|
||||
_bufferLength(bufferLast - bufferFirst + 1),
|
||||
_newestAt(newestAt),
|
||||
_at(at) {}
|
||||
|
||||
bool operator==(const Iterator& rhs) { return _at == rhs._at; }
|
||||
bool operator!=(const Iterator& rhs) { return _at != rhs._at; }
|
||||
|
@ -103,20 +116,95 @@ public:
|
|||
return tmp;
|
||||
}
|
||||
|
||||
Iterator& operator--() {
|
||||
_at = (_at == _bufferLast) ? _bufferFirst : _at + 1;
|
||||
return *this;
|
||||
}
|
||||
|
||||
Iterator operator--(int) {
|
||||
Iterator tmp(*this);
|
||||
--(*this);
|
||||
return tmp;
|
||||
}
|
||||
|
||||
Iterator operator+(int add) {
|
||||
Iterator sum(*this);
|
||||
sum._at = atShiftedBy(add);
|
||||
return sum;
|
||||
}
|
||||
|
||||
Iterator operator-(int sub) {
|
||||
Iterator sum(*this);
|
||||
sum._at = atShiftedBy(-sub);
|
||||
return sum;
|
||||
}
|
||||
|
||||
Iterator& operator+=(int add) {
|
||||
_at = atShiftedBy(add);
|
||||
return *this;
|
||||
}
|
||||
|
||||
Iterator& operator-=(int sub) {
|
||||
_at = atShiftedBy(-sub);
|
||||
return *this;
|
||||
}
|
||||
|
||||
T& operator[](int i) {
|
||||
return *(atShiftedBy(i));
|
||||
}
|
||||
|
||||
bool operator<(const Iterator& rhs) {
|
||||
return age() < rhs.age();
|
||||
}
|
||||
|
||||
bool operator>(const Iterator& rhs) {
|
||||
return age() > rhs.age();
|
||||
}
|
||||
|
||||
bool operator<=(const Iterator& rhs) {
|
||||
return age() < rhs.age();
|
||||
}
|
||||
|
||||
bool operator>=(const Iterator& rhs) {
|
||||
return age() >= rhs.age();
|
||||
}
|
||||
|
||||
int operator-(const Iterator& rhs) {
|
||||
return age() - rhs.age();
|
||||
}
|
||||
|
||||
private:
|
||||
T* const _bufferFirst;
|
||||
T* const _bufferLast;
|
||||
T* atShiftedBy(int i) { // shifts i places towards _bufferFirst (towards older entries)
|
||||
i = (_at - _bufferFirst - i) % _bufferLength;
|
||||
if (i < 0) {
|
||||
i += _bufferLength;
|
||||
}
|
||||
return _bufferFirst + i;
|
||||
}
|
||||
|
||||
int age() {
|
||||
int age = _newestAt - _at;
|
||||
if (age < 0) {
|
||||
age += _bufferLength;
|
||||
}
|
||||
return age;
|
||||
}
|
||||
|
||||
T* _bufferFirst;
|
||||
T* _bufferLast;
|
||||
int _bufferLength;
|
||||
T* _newestAt;
|
||||
T* _at;
|
||||
};
|
||||
|
||||
Iterator begin() { return Iterator(&_buffer.first(), &_buffer.last(), &_buffer[_newestEntryAtIndex]); }
|
||||
Iterator begin() { return Iterator(&_buffer.first(), &_buffer.last(), &_buffer[_newestEntryAtIndex], &_buffer[_newestEntryAtIndex]); }
|
||||
|
||||
Iterator end() {
|
||||
int endAtIndex = _newestEntryAtIndex - _numEntries;
|
||||
if (endAtIndex < 0) {
|
||||
endAtIndex += _size;
|
||||
}
|
||||
return Iterator(&_buffer.first(), &_buffer.last(), &_buffer[endAtIndex]);
|
||||
return Iterator(&_buffer.first(), &_buffer.last(), &_buffer[_newestEntryAtIndex], &_buffer[endAtIndex]);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
|
@ -22,17 +22,18 @@ class VerletPoint;
|
|||
|
||||
const float MAX_SHAPE_MASS = 1.0e18f; // something less than sqrt(FLT_MAX)
|
||||
|
||||
const quint8 SPHERE_SHAPE = 0;
|
||||
const quint8 CAPSULE_SHAPE = 1;
|
||||
const quint8 PLANE_SHAPE = 2;
|
||||
const quint8 LIST_SHAPE = 3;
|
||||
const quint8 UNKNOWN_SHAPE = 4;
|
||||
|
||||
class Shape {
|
||||
public:
|
||||
static quint32 getNextID() { static quint32 nextID = 0; return ++nextID; }
|
||||
|
||||
enum Type{
|
||||
UNKNOWN_SHAPE = 0,
|
||||
SPHERE_SHAPE,
|
||||
CAPSULE_SHAPE,
|
||||
PLANE_SHAPE,
|
||||
LIST_SHAPE
|
||||
};
|
||||
typedef quint8 Type;
|
||||
|
||||
static quint32 getNextID() { static quint32 nextID = 0; return ++nextID; }
|
||||
|
||||
Shape() : _type(UNKNOWN_SHAPE), _owningEntity(NULL), _boundingRadius(0.f),
|
||||
_translation(0.f), _rotation(), _mass(MAX_SHAPE_MASS) {
|
||||
|
@ -40,7 +41,7 @@ public:
|
|||
}
|
||||
virtual ~Shape() { }
|
||||
|
||||
int getType() const { return _type; }
|
||||
Type getType() const { return _type; }
|
||||
quint32 getID() const { return _id; }
|
||||
|
||||
void setEntity(PhysicsEntity* entity) { _owningEntity = entity; }
|
||||
|
@ -95,8 +96,8 @@ protected:
|
|||
|
||||
void setBoundingRadius(float radius) { _boundingRadius = radius; }
|
||||
|
||||
int _type;
|
||||
unsigned int _id;
|
||||
Type _type;
|
||||
quint32 _id;
|
||||
PhysicsEntity* _owningEntity;
|
||||
float _boundingRadius;
|
||||
glm::vec3 _translation;
|
||||
|
|
|
@ -15,85 +15,70 @@
|
|||
|
||||
#include "GeometryUtil.h"
|
||||
#include "ShapeCollider.h"
|
||||
#include "CapsuleShape.h"
|
||||
#include "ListShape.h"
|
||||
#include "PlaneShape.h"
|
||||
#include "SphereShape.h"
|
||||
|
||||
// NOTE:
|
||||
//
|
||||
// * Large ListShape's are inefficient keep the lists short.
|
||||
// * Collisions between lists of lists work in theory but are not recommended.
|
||||
|
||||
const Shape::Type NUM_SHAPE_TYPES = 5;
|
||||
const quint8 NUM__DISPATCH_CELLS = NUM_SHAPE_TYPES * NUM_SHAPE_TYPES;
|
||||
|
||||
Shape::Type getDispatchKey(Shape::Type typeA, Shape::Type typeB) {
|
||||
return typeA + NUM_SHAPE_TYPES * typeB;
|
||||
}
|
||||
|
||||
// dummy dispatch for any non-implemented pairings
|
||||
bool notImplemented(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// NOTE: hardcode the number of dispatchTable entries (NUM_SHAPE_TYPES ^2)
|
||||
bool (*dispatchTable[NUM__DISPATCH_CELLS])(const Shape*, const Shape*, CollisionList&);
|
||||
|
||||
namespace ShapeCollider {
|
||||
|
||||
bool collideShapes(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
// TODO: make a fast lookup for correct method
|
||||
int typeA = shapeA->getType();
|
||||
int typeB = shapeB->getType();
|
||||
if (typeA == Shape::SPHERE_SHAPE) {
|
||||
const SphereShape* sphereA = static_cast<const SphereShape*>(shapeA);
|
||||
if (typeB == Shape::SPHERE_SHAPE) {
|
||||
return sphereSphere(sphereA, static_cast<const SphereShape*>(shapeB), collisions);
|
||||
} else if (typeB == Shape::CAPSULE_SHAPE) {
|
||||
return sphereCapsule(sphereA, static_cast<const CapsuleShape*>(shapeB), collisions);
|
||||
} else if (typeB == Shape::PLANE_SHAPE) {
|
||||
return spherePlane(sphereA, static_cast<const PlaneShape*>(shapeB), collisions);
|
||||
}
|
||||
} else if (typeA == Shape::CAPSULE_SHAPE) {
|
||||
const CapsuleShape* capsuleA = static_cast<const CapsuleShape*>(shapeA);
|
||||
if (typeB == Shape::SPHERE_SHAPE) {
|
||||
return capsuleSphere(capsuleA, static_cast<const SphereShape*>(shapeB), collisions);
|
||||
} else if (typeB == Shape::CAPSULE_SHAPE) {
|
||||
return capsuleCapsule(capsuleA, static_cast<const CapsuleShape*>(shapeB), collisions);
|
||||
} else if (typeB == Shape::PLANE_SHAPE) {
|
||||
return capsulePlane(capsuleA, static_cast<const PlaneShape*>(shapeB), collisions);
|
||||
}
|
||||
} else if (typeA == Shape::PLANE_SHAPE) {
|
||||
const PlaneShape* planeA = static_cast<const PlaneShape*>(shapeA);
|
||||
if (typeB == Shape::SPHERE_SHAPE) {
|
||||
return planeSphere(planeA, static_cast<const SphereShape*>(shapeB), collisions);
|
||||
} else if (typeB == Shape::CAPSULE_SHAPE) {
|
||||
return planeCapsule(planeA, static_cast<const CapsuleShape*>(shapeB), collisions);
|
||||
} else if (typeB == Shape::PLANE_SHAPE) {
|
||||
return planePlane(planeA, static_cast<const PlaneShape*>(shapeB), collisions);
|
||||
}
|
||||
} else if (typeA == Shape::LIST_SHAPE) {
|
||||
const ListShape* listA = static_cast<const ListShape*>(shapeA);
|
||||
if (typeB == Shape::SPHERE_SHAPE) {
|
||||
return listSphere(listA, static_cast<const SphereShape*>(shapeB), collisions);
|
||||
} else if (typeB == Shape::CAPSULE_SHAPE) {
|
||||
return listCapsule(listA, static_cast<const CapsuleShape*>(shapeB), collisions);
|
||||
} else if (typeB == Shape::PLANE_SHAPE) {
|
||||
return listPlane(listA, static_cast<const PlaneShape*>(shapeB), collisions);
|
||||
}
|
||||
// NOTE: the dispatch table must be initialized before the ShapeCollider is used.
|
||||
void initDispatchTable() {
|
||||
for (Shape::Type i = 0; i < NUM__DISPATCH_CELLS; ++i) {
|
||||
dispatchTable[i] = ¬Implemented;
|
||||
}
|
||||
return false;
|
||||
|
||||
// NOTE: no need to update any that are notImplemented, but we leave them
|
||||
// commented out in the code so that we remember that they exist.
|
||||
dispatchTable[getDispatchKey(SPHERE_SHAPE, SPHERE_SHAPE)] = &sphereVsSphere;
|
||||
dispatchTable[getDispatchKey(SPHERE_SHAPE, CAPSULE_SHAPE)] = &sphereVsCapsule;
|
||||
dispatchTable[getDispatchKey(SPHERE_SHAPE, PLANE_SHAPE)] = &sphereVsPlane;
|
||||
dispatchTable[getDispatchKey(SPHERE_SHAPE, LIST_SHAPE)] = &shapeVsList;
|
||||
|
||||
dispatchTable[getDispatchKey(CAPSULE_SHAPE, SPHERE_SHAPE)] = &capsuleVsSphere;
|
||||
dispatchTable[getDispatchKey(CAPSULE_SHAPE, CAPSULE_SHAPE)] = &capsuleVsCapsule;
|
||||
dispatchTable[getDispatchKey(CAPSULE_SHAPE, PLANE_SHAPE)] = &capsuleVsPlane;
|
||||
dispatchTable[getDispatchKey(CAPSULE_SHAPE, LIST_SHAPE)] = &shapeVsList;
|
||||
|
||||
dispatchTable[getDispatchKey(PLANE_SHAPE, SPHERE_SHAPE)] = &planeVsSphere;
|
||||
dispatchTable[getDispatchKey(PLANE_SHAPE, CAPSULE_SHAPE)] = &planeVsCapsule;
|
||||
dispatchTable[getDispatchKey(PLANE_SHAPE, PLANE_SHAPE)] = &planeVsPlane;
|
||||
dispatchTable[getDispatchKey(PLANE_SHAPE, LIST_SHAPE)] = &shapeVsList;
|
||||
|
||||
dispatchTable[getDispatchKey(LIST_SHAPE, SPHERE_SHAPE)] = &listVsShape;
|
||||
dispatchTable[getDispatchKey(LIST_SHAPE, CAPSULE_SHAPE)] = &listVsShape;
|
||||
dispatchTable[getDispatchKey(LIST_SHAPE, PLANE_SHAPE)] = &listVsShape;
|
||||
dispatchTable[getDispatchKey(LIST_SHAPE, LIST_SHAPE)] = &listVsList;
|
||||
|
||||
// all of the UNKNOWN_SHAPE pairings are notImplemented
|
||||
}
|
||||
|
||||
bool collideShapes(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
return (*dispatchTable[shapeA->getType() + NUM_SHAPE_TYPES * shapeB->getType()])(shapeA, shapeB, collisions);
|
||||
}
|
||||
|
||||
static CollisionList tempCollisions(32);
|
||||
|
||||
bool collideShapesCoarse(const QVector<const Shape*>& shapesA, const QVector<const Shape*>& shapesB, CollisionInfo& collision) {
|
||||
tempCollisions.clear();
|
||||
foreach (const Shape* shapeA, shapesA) {
|
||||
foreach (const Shape* shapeB, shapesB) {
|
||||
collideShapes(shapeA, shapeB, tempCollisions);
|
||||
}
|
||||
}
|
||||
if (tempCollisions.size() > 0) {
|
||||
glm::vec3 totalPenetration(0.0f);
|
||||
glm::vec3 averageContactPoint(0.0f);
|
||||
for (int j = 0; j < tempCollisions.size(); ++j) {
|
||||
CollisionInfo* c = tempCollisions.getCollision(j);
|
||||
totalPenetration = addPenetrations(totalPenetration, c->_penetration);
|
||||
averageContactPoint += c->_contactPoint;
|
||||
}
|
||||
collision._penetration = totalPenetration;
|
||||
collision._contactPoint = averageContactPoint / (float)(tempCollisions.size());
|
||||
// there are no valid shape pointers for this collision so we set them NULL
|
||||
collision._shapeA = NULL;
|
||||
collision._shapeB = NULL;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool collideShapeWithShapes(const Shape* shapeA, const QVector<Shape*>& shapes, int startIndex, CollisionList& collisions) {
|
||||
bool collided = false;
|
||||
if (shapeA) {
|
||||
|
@ -133,21 +118,21 @@ bool collideShapesWithShapes(const QVector<Shape*>& shapesA, const QVector<Shape
|
|||
}
|
||||
|
||||
bool collideShapeWithAACube(const Shape* shapeA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions) {
|
||||
int typeA = shapeA->getType();
|
||||
if (typeA == Shape::SPHERE_SHAPE) {
|
||||
return sphereAACube(static_cast<const SphereShape*>(shapeA), cubeCenter, cubeSide, collisions);
|
||||
} else if (typeA == Shape::CAPSULE_SHAPE) {
|
||||
return capsuleAACube(static_cast<const CapsuleShape*>(shapeA), cubeCenter, cubeSide, collisions);
|
||||
} else if (typeA == Shape::LIST_SHAPE) {
|
||||
Shape::Type typeA = shapeA->getType();
|
||||
if (typeA == SPHERE_SHAPE) {
|
||||
return sphereVsAACube(static_cast<const SphereShape*>(shapeA), cubeCenter, cubeSide, collisions);
|
||||
} else if (typeA == CAPSULE_SHAPE) {
|
||||
return capsuleVsAACube(static_cast<const CapsuleShape*>(shapeA), cubeCenter, cubeSide, collisions);
|
||||
} else if (typeA == LIST_SHAPE) {
|
||||
const ListShape* listA = static_cast<const ListShape*>(shapeA);
|
||||
bool touching = false;
|
||||
for (int i = 0; i < listA->size() && !collisions.isFull(); ++i) {
|
||||
const Shape* subShape = listA->getSubShape(i);
|
||||
int subType = subShape->getType();
|
||||
if (subType == Shape::SPHERE_SHAPE) {
|
||||
touching = sphereAACube(static_cast<const SphereShape*>(subShape), cubeCenter, cubeSide, collisions) || touching;
|
||||
} else if (subType == Shape::CAPSULE_SHAPE) {
|
||||
touching = capsuleAACube(static_cast<const CapsuleShape*>(subShape), cubeCenter, cubeSide, collisions) || touching;
|
||||
if (subType == SPHERE_SHAPE) {
|
||||
touching = sphereVsAACube(static_cast<const SphereShape*>(subShape), cubeCenter, cubeSide, collisions) || touching;
|
||||
} else if (subType == CAPSULE_SHAPE) {
|
||||
touching = capsuleVsAACube(static_cast<const CapsuleShape*>(subShape), cubeCenter, cubeSide, collisions) || touching;
|
||||
}
|
||||
}
|
||||
return touching;
|
||||
|
@ -155,7 +140,9 @@ bool collideShapeWithAACube(const Shape* shapeA, const glm::vec3& cubeCenter, fl
|
|||
return false;
|
||||
}
|
||||
|
||||
bool sphereSphere(const SphereShape* sphereA, const SphereShape* sphereB, CollisionList& collisions) {
|
||||
bool sphereVsSphere(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
const SphereShape* sphereA = static_cast<const SphereShape*>(shapeA);
|
||||
const SphereShape* sphereB = static_cast<const SphereShape*>(shapeB);
|
||||
glm::vec3 BA = sphereB->getTranslation() - sphereA->getTranslation();
|
||||
float distanceSquared = glm::dot(BA, BA);
|
||||
float totalRadius = sphereA->getRadius() + sphereB->getRadius();
|
||||
|
@ -183,7 +170,9 @@ bool sphereSphere(const SphereShape* sphereA, const SphereShape* sphereB, Collis
|
|||
return false;
|
||||
}
|
||||
|
||||
bool sphereCapsule(const SphereShape* sphereA, const CapsuleShape* capsuleB, CollisionList& collisions) {
|
||||
bool sphereVsCapsule(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
const SphereShape* sphereA = static_cast<const SphereShape*>(shapeA);
|
||||
const CapsuleShape* capsuleB = static_cast<const CapsuleShape*>(shapeB);
|
||||
// find sphereA's closest approach to axis of capsuleB
|
||||
glm::vec3 BA = capsuleB->getTranslation() - sphereA->getTranslation();
|
||||
glm::vec3 capsuleAxis;
|
||||
|
@ -252,7 +241,9 @@ bool sphereCapsule(const SphereShape* sphereA, const CapsuleShape* capsuleB, Col
|
|||
return false;
|
||||
}
|
||||
|
||||
bool spherePlane(const SphereShape* sphereA, const PlaneShape* planeB, CollisionList& collisions) {
|
||||
bool sphereVsPlane(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
const SphereShape* sphereA = static_cast<const SphereShape*>(shapeA);
|
||||
const PlaneShape* planeB = static_cast<const PlaneShape*>(shapeB);
|
||||
glm::vec3 penetration;
|
||||
if (findSpherePlanePenetration(sphereA->getTranslation(), sphereA->getRadius(), planeB->getCoefficients(), penetration)) {
|
||||
CollisionInfo* collision = collisions.getNewCollision();
|
||||
|
@ -268,79 +259,8 @@ bool spherePlane(const SphereShape* sphereA, const PlaneShape* planeB, Collision
|
|||
return false;
|
||||
}
|
||||
|
||||
bool capsuleSphere(const CapsuleShape* capsuleA, const SphereShape* sphereB, CollisionList& collisions) {
|
||||
// find sphereB's closest approach to axis of capsuleA
|
||||
glm::vec3 AB = capsuleA->getTranslation() - sphereB->getTranslation();
|
||||
glm::vec3 capsuleAxis;
|
||||
capsuleA->computeNormalizedAxis(capsuleAxis);
|
||||
float axialDistance = - glm::dot(AB, capsuleAxis);
|
||||
float absAxialDistance = fabsf(axialDistance);
|
||||
float totalRadius = sphereB->getRadius() + capsuleA->getRadius();
|
||||
if (absAxialDistance < totalRadius + capsuleA->getHalfHeight()) {
|
||||
glm::vec3 radialAxis = AB + axialDistance * capsuleAxis; // from sphereB to axis of capsuleA
|
||||
float radialDistance2 = glm::length2(radialAxis);
|
||||
float totalRadius2 = totalRadius * totalRadius;
|
||||
if (radialDistance2 > totalRadius2) {
|
||||
// sphere is too far from capsule axis
|
||||
return false;
|
||||
}
|
||||
|
||||
// closestApproach = point on capsuleA's axis that is closest to sphereB's center
|
||||
glm::vec3 closestApproach = capsuleA->getTranslation() + axialDistance * capsuleAxis;
|
||||
|
||||
if (absAxialDistance > capsuleA->getHalfHeight()) {
|
||||
// sphere hits capsule on a cap
|
||||
// --> recompute radialAxis and closestApproach
|
||||
float sign = (axialDistance > 0.0f) ? 1.0f : -1.0f;
|
||||
closestApproach = capsuleA->getTranslation() + (sign * capsuleA->getHalfHeight()) * capsuleAxis;
|
||||
radialAxis = closestApproach - sphereB->getTranslation();
|
||||
radialDistance2 = glm::length2(radialAxis);
|
||||
if (radialDistance2 > totalRadius2) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
if (radialDistance2 > EPSILON * EPSILON) {
|
||||
CollisionInfo* collision = collisions.getNewCollision();
|
||||
if (!collision) {
|
||||
// collisions list is full
|
||||
return false;
|
||||
}
|
||||
// normalize the radialAxis
|
||||
float radialDistance = sqrtf(radialDistance2);
|
||||
radialAxis /= radialDistance;
|
||||
// penetration points from A into B
|
||||
collision->_penetration = (radialDistance - totalRadius) * radialAxis; // points from A into B
|
||||
// contactPoint is on surface of capsuleA
|
||||
collision->_contactPoint = closestApproach - capsuleA->getRadius() * radialAxis;
|
||||
collision->_shapeA = capsuleA;
|
||||
collision->_shapeB = sphereB;
|
||||
} else {
|
||||
// A is on B's axis, so the penetration is undefined...
|
||||
if (absAxialDistance > capsuleA->getHalfHeight()) {
|
||||
// ...for the cylinder case (for now we pretend the collision doesn't exist)
|
||||
return false;
|
||||
} else {
|
||||
CollisionInfo* collision = collisions.getNewCollision();
|
||||
if (!collision) {
|
||||
// collisions list is full
|
||||
return false;
|
||||
}
|
||||
// ... but still defined for the cap case
|
||||
if (axialDistance < 0.0f) {
|
||||
// we're hitting the start cap, so we negate the capsuleAxis
|
||||
capsuleAxis *= -1;
|
||||
}
|
||||
float sign = (axialDistance > 0.0f) ? 1.0f : -1.0f;
|
||||
collision->_penetration = (sign * (totalRadius + capsuleA->getHalfHeight() - absAxialDistance)) * capsuleAxis;
|
||||
// contactPoint is on surface of sphereA
|
||||
collision->_contactPoint = closestApproach + (sign * capsuleA->getRadius()) * capsuleAxis;
|
||||
collision->_shapeA = capsuleA;
|
||||
collision->_shapeB = sphereB;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
bool capsuleVsSphere(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
return sphereVsCapsule(shapeB, shapeA, collisions);
|
||||
}
|
||||
|
||||
/// \param lineP point on line
|
||||
|
@ -409,7 +329,9 @@ bool lineCylinder(const glm::vec3& lineP, const glm::vec3& lineDir,
|
|||
return true;
|
||||
}
|
||||
|
||||
bool capsuleCapsule(const CapsuleShape* capsuleA, const CapsuleShape* capsuleB, CollisionList& collisions) {
|
||||
bool capsuleVsCapsule(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
const CapsuleShape* capsuleA = static_cast<const CapsuleShape*>(shapeA);
|
||||
const CapsuleShape* capsuleB = static_cast<const CapsuleShape*>(shapeB);
|
||||
glm::vec3 axisA;
|
||||
capsuleA->computeNormalizedAxis(axisA);
|
||||
glm::vec3 axisB;
|
||||
|
@ -568,7 +490,9 @@ bool capsuleCapsule(const CapsuleShape* capsuleA, const CapsuleShape* capsuleB,
|
|||
return false;
|
||||
}
|
||||
|
||||
bool capsulePlane(const CapsuleShape* capsuleA, const PlaneShape* planeB, CollisionList& collisions) {
|
||||
bool capsuleVsPlane(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
const CapsuleShape* capsuleA = static_cast<const CapsuleShape*>(shapeA);
|
||||
const PlaneShape* planeB = static_cast<const PlaneShape*>(shapeB);
|
||||
glm::vec3 start, end, penetration;
|
||||
capsuleA->getStartPoint(start);
|
||||
capsuleA->getEndPoint(end);
|
||||
|
@ -588,147 +512,44 @@ bool capsulePlane(const CapsuleShape* capsuleA, const PlaneShape* planeB, Collis
|
|||
return false;
|
||||
}
|
||||
|
||||
bool planeSphere(const PlaneShape* planeA, const SphereShape* sphereB, CollisionList& collisions) {
|
||||
glm::vec3 penetration;
|
||||
if (findSpherePlanePenetration(sphereB->getTranslation(), sphereB->getRadius(), planeA->getCoefficients(), penetration)) {
|
||||
CollisionInfo* collision = collisions.getNewCollision();
|
||||
if (!collision) {
|
||||
return false; // collision list is full
|
||||
}
|
||||
collision->_penetration = -penetration;
|
||||
collision->_contactPoint = sphereB->getTranslation() +
|
||||
(sphereB->getRadius() / glm::length(penetration) - 1.0f) * penetration;
|
||||
collision->_shapeA = planeA;
|
||||
collision->_shapeB = sphereB;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
bool planeVsSphere(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
return sphereVsPlane(shapeB, shapeA, collisions);
|
||||
}
|
||||
|
||||
bool planeCapsule(const PlaneShape* planeA, const CapsuleShape* capsuleB, CollisionList& collisions) {
|
||||
glm::vec3 start, end, penetration;
|
||||
capsuleB->getStartPoint(start);
|
||||
capsuleB->getEndPoint(end);
|
||||
glm::vec4 plane = planeA->getCoefficients();
|
||||
if (findCapsulePlanePenetration(start, end, capsuleB->getRadius(), plane, penetration)) {
|
||||
CollisionInfo* collision = collisions.getNewCollision();
|
||||
if (!collision) {
|
||||
return false; // collision list is full
|
||||
}
|
||||
collision->_penetration = -penetration;
|
||||
glm::vec3 deepestEnd = (glm::dot(start, glm::vec3(plane)) < glm::dot(end, glm::vec3(plane))) ? start : end;
|
||||
collision->_contactPoint = deepestEnd + (capsuleB->getRadius() / glm::length(penetration) - 1.0f) * penetration;
|
||||
collision->_shapeA = planeA;
|
||||
collision->_shapeB = capsuleB;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
bool planeVsCapsule(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
return capsuleVsPlane(shapeB, shapeA, collisions);
|
||||
}
|
||||
|
||||
bool planePlane(const PlaneShape* planeA, const PlaneShape* planeB, CollisionList& collisions) {
|
||||
bool planeVsPlane(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
// technically, planes always collide unless they're parallel and not coincident; however, that's
|
||||
// not going to give us any useful information
|
||||
return false;
|
||||
}
|
||||
|
||||
bool sphereList(const SphereShape* sphereA, const ListShape* listB, CollisionList& collisions) {
|
||||
bool shapeVsList(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
bool touching = false;
|
||||
const ListShape* listB = static_cast<const ListShape*>(shapeB);
|
||||
for (int i = 0; i < listB->size() && !collisions.isFull(); ++i) {
|
||||
const Shape* subShape = listB->getSubShape(i);
|
||||
int subType = subShape->getType();
|
||||
if (subType == Shape::SPHERE_SHAPE) {
|
||||
touching = sphereSphere(sphereA, static_cast<const SphereShape*>(subShape), collisions) || touching;
|
||||
} else if (subType == Shape::CAPSULE_SHAPE) {
|
||||
touching = sphereCapsule(sphereA, static_cast<const CapsuleShape*>(subShape), collisions) || touching;
|
||||
} else if (subType == Shape::PLANE_SHAPE) {
|
||||
touching = spherePlane(sphereA, static_cast<const PlaneShape*>(subShape), collisions) || touching;
|
||||
}
|
||||
touching = collideShapes(shapeA, subShape, collisions) || touching;
|
||||
}
|
||||
return touching;
|
||||
}
|
||||
|
||||
bool capsuleList(const CapsuleShape* capsuleA, const ListShape* listB, CollisionList& collisions) {
|
||||
bool touching = false;
|
||||
for (int i = 0; i < listB->size() && !collisions.isFull(); ++i) {
|
||||
const Shape* subShape = listB->getSubShape(i);
|
||||
int subType = subShape->getType();
|
||||
if (subType == Shape::SPHERE_SHAPE) {
|
||||
touching = capsuleSphere(capsuleA, static_cast<const SphereShape*>(subShape), collisions) || touching;
|
||||
} else if (subType == Shape::CAPSULE_SHAPE) {
|
||||
touching = capsuleCapsule(capsuleA, static_cast<const CapsuleShape*>(subShape), collisions) || touching;
|
||||
} else if (subType == Shape::PLANE_SHAPE) {
|
||||
touching = capsulePlane(capsuleA, static_cast<const PlaneShape*>(subShape), collisions) || touching;
|
||||
}
|
||||
}
|
||||
return touching;
|
||||
}
|
||||
|
||||
bool planeList(const PlaneShape* planeA, const ListShape* listB, CollisionList& collisions) {
|
||||
bool touching = false;
|
||||
for (int i = 0; i < listB->size() && !collisions.isFull(); ++i) {
|
||||
const Shape* subShape = listB->getSubShape(i);
|
||||
int subType = subShape->getType();
|
||||
if (subType == Shape::SPHERE_SHAPE) {
|
||||
touching = planeSphere(planeA, static_cast<const SphereShape*>(subShape), collisions) || touching;
|
||||
} else if (subType == Shape::CAPSULE_SHAPE) {
|
||||
touching = planeCapsule(planeA, static_cast<const CapsuleShape*>(subShape), collisions) || touching;
|
||||
} else if (subType == Shape::PLANE_SHAPE) {
|
||||
touching = planePlane(planeA, static_cast<const PlaneShape*>(subShape), collisions) || touching;
|
||||
}
|
||||
}
|
||||
return touching;
|
||||
}
|
||||
|
||||
bool listSphere(const ListShape* listA, const SphereShape* sphereB, CollisionList& collisions) {
|
||||
bool listVsShape(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
bool touching = false;
|
||||
const ListShape* listA = static_cast<const ListShape*>(shapeA);
|
||||
for (int i = 0; i < listA->size() && !collisions.isFull(); ++i) {
|
||||
const Shape* subShape = listA->getSubShape(i);
|
||||
int subType = subShape->getType();
|
||||
if (subType == Shape::SPHERE_SHAPE) {
|
||||
touching = sphereSphere(static_cast<const SphereShape*>(subShape), sphereB, collisions) || touching;
|
||||
} else if (subType == Shape::CAPSULE_SHAPE) {
|
||||
touching = capsuleSphere(static_cast<const CapsuleShape*>(subShape), sphereB, collisions) || touching;
|
||||
} else if (subType == Shape::PLANE_SHAPE) {
|
||||
touching = planeSphere(static_cast<const PlaneShape*>(subShape), sphereB, collisions) || touching;
|
||||
}
|
||||
touching = collideShapes(subShape, shapeB, collisions) || touching;
|
||||
}
|
||||
return touching;
|
||||
}
|
||||
|
||||
bool listCapsule(const ListShape* listA, const CapsuleShape* capsuleB, CollisionList& collisions) {
|
||||
bool touching = false;
|
||||
for (int i = 0; i < listA->size() && !collisions.isFull(); ++i) {
|
||||
const Shape* subShape = listA->getSubShape(i);
|
||||
int subType = subShape->getType();
|
||||
if (subType == Shape::SPHERE_SHAPE) {
|
||||
touching = sphereCapsule(static_cast<const SphereShape*>(subShape), capsuleB, collisions) || touching;
|
||||
} else if (subType == Shape::CAPSULE_SHAPE) {
|
||||
touching = capsuleCapsule(static_cast<const CapsuleShape*>(subShape), capsuleB, collisions) || touching;
|
||||
} else if (subType == Shape::PLANE_SHAPE) {
|
||||
touching = planeCapsule(static_cast<const PlaneShape*>(subShape), capsuleB, collisions) || touching;
|
||||
}
|
||||
}
|
||||
return touching;
|
||||
}
|
||||
|
||||
bool listPlane(const ListShape* listA, const PlaneShape* planeB, CollisionList& collisions) {
|
||||
bool touching = false;
|
||||
for (int i = 0; i < listA->size() && !collisions.isFull(); ++i) {
|
||||
const Shape* subShape = listA->getSubShape(i);
|
||||
int subType = subShape->getType();
|
||||
if (subType == Shape::SPHERE_SHAPE) {
|
||||
touching = spherePlane(static_cast<const SphereShape*>(subShape), planeB, collisions) || touching;
|
||||
} else if (subType == Shape::CAPSULE_SHAPE) {
|
||||
touching = capsulePlane(static_cast<const CapsuleShape*>(subShape), planeB, collisions) || touching;
|
||||
} else if (subType == Shape::PLANE_SHAPE) {
|
||||
touching = planePlane(static_cast<const PlaneShape*>(subShape), planeB, collisions) || touching;
|
||||
}
|
||||
}
|
||||
return touching;
|
||||
}
|
||||
|
||||
bool listList(const ListShape* listA, const ListShape* listB, CollisionList& collisions) {
|
||||
bool listVsList(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions) {
|
||||
bool touching = false;
|
||||
const ListShape* listA = static_cast<const ListShape*>(shapeA);
|
||||
const ListShape* listB = static_cast<const ListShape*>(shapeB);
|
||||
for (int i = 0; i < listA->size() && !collisions.isFull(); ++i) {
|
||||
const Shape* subShape = listA->getSubShape(i);
|
||||
for (int j = 0; j < listB->size() && !collisions.isFull(); ++j) {
|
||||
|
@ -739,7 +560,7 @@ bool listList(const ListShape* listA, const ListShape* listB, CollisionList& col
|
|||
}
|
||||
|
||||
// helper function
|
||||
bool sphereAACube(const glm::vec3& sphereCenter, float sphereRadius, const glm::vec3& cubeCenter,
|
||||
bool sphereVsAACube(const glm::vec3& sphereCenter, float sphereRadius, const glm::vec3& cubeCenter,
|
||||
float cubeSide, CollisionList& collisions) {
|
||||
// sphere is A
|
||||
// cube is B
|
||||
|
@ -887,11 +708,11 @@ bool sphereAACube_StarkAngles(const glm::vec3& sphereCenter, float sphereRadius,
|
|||
}
|
||||
*/
|
||||
|
||||
bool sphereAACube(const SphereShape* sphereA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions) {
|
||||
return sphereAACube(sphereA->getTranslation(), sphereA->getRadius(), cubeCenter, cubeSide, collisions);
|
||||
bool sphereVsAACube(const SphereShape* sphereA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions) {
|
||||
return sphereVsAACube(sphereA->getTranslation(), sphereA->getRadius(), cubeCenter, cubeSide, collisions);
|
||||
}
|
||||
|
||||
bool capsuleAACube(const CapsuleShape* capsuleA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions) {
|
||||
bool capsuleVsAACube(const CapsuleShape* capsuleA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions) {
|
||||
// find nerest approach of capsule line segment to cube
|
||||
glm::vec3 capsuleAxis;
|
||||
capsuleA->computeNormalizedAxis(capsuleAxis);
|
||||
|
@ -904,7 +725,7 @@ bool capsuleAACube(const CapsuleShape* capsuleA, const glm::vec3& cubeCenter, fl
|
|||
}
|
||||
glm::vec3 nearestApproach = capsuleA->getTranslation() + offset * capsuleAxis;
|
||||
// collide nearest approach like a sphere at that point
|
||||
return sphereAACube(nearestApproach, capsuleA->getRadius(), cubeCenter, cubeSide, collisions);
|
||||
return sphereVsAACube(nearestApproach, capsuleA->getRadius(), cubeCenter, cubeSide, collisions);
|
||||
}
|
||||
|
||||
bool findRayIntersectionWithShapes(const QVector<Shape*> shapes, const glm::vec3& rayStart, const glm::vec3& rayDirection, float& minDistance) {
|
||||
|
|
|
@ -14,27 +14,24 @@
|
|||
|
||||
#include <QVector>
|
||||
|
||||
#include "CapsuleShape.h"
|
||||
#include "CollisionInfo.h"
|
||||
#include "ListShape.h"
|
||||
#include "PlaneShape.h"
|
||||
#include "SharedUtil.h"
|
||||
#include "SphereShape.h"
|
||||
|
||||
class Shape;
|
||||
class SphereShape;
|
||||
class CapsuleShape;
|
||||
|
||||
namespace ShapeCollider {
|
||||
|
||||
/// MUST CALL this FIRST before using the ShapeCollider
|
||||
void initDispatchTable();
|
||||
|
||||
/// \param shapeA pointer to first shape (cannot be NULL)
|
||||
/// \param shapeB pointer to second shape (cannot be NULL)
|
||||
/// \param collisions[out] collision details
|
||||
/// \return true if shapes collide
|
||||
bool collideShapes(const Shape* shapeA, const Shape* shapeB, CollisionList& collisions);
|
||||
|
||||
/// \param shapesA list of shapes
|
||||
/// \param shapeB list of shapes
|
||||
/// \param collisions[out] average collision details
|
||||
/// \return true if any shapes collide
|
||||
bool collideShapesCoarse(const QVector<const Shape*>& shapesA, const QVector<const Shape*>& shapesB, CollisionInfo& collision);
|
||||
|
||||
bool collideShapeWithShapes(const Shape* shapeA, const QVector<Shape*>& shapes, int startIndex, CollisionList& collisions);
|
||||
bool collideShapesWithShapes(const QVector<Shape*>& shapesA, const QVector<Shape*>& shapesB, CollisionList& collisions);
|
||||
|
||||
|
@ -49,111 +46,87 @@ namespace ShapeCollider {
|
|||
/// \param sphereB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool sphereSphere(const SphereShape* sphereA, const SphereShape* sphereB, CollisionList& collisions);
|
||||
bool sphereVsSphere(const Shape* sphereA, const Shape* sphereB, CollisionList& collisions);
|
||||
|
||||
/// \param sphereA pointer to first shape (cannot be NULL)
|
||||
/// \param capsuleB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool sphereCapsule(const SphereShape* sphereA, const CapsuleShape* capsuleB, CollisionList& collisions);
|
||||
bool sphereVsCapsule(const Shape* sphereA, const Shape* capsuleB, CollisionList& collisions);
|
||||
|
||||
/// \param sphereA pointer to first shape (cannot be NULL)
|
||||
/// \param planeB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool spherePlane(const SphereShape* sphereA, const PlaneShape* planeB, CollisionList& collisions);
|
||||
bool sphereVsPlane(const Shape* sphereA, const Shape* planeB, CollisionList& collisions);
|
||||
|
||||
/// \param capsuleA pointer to first shape (cannot be NULL)
|
||||
/// \param sphereB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool capsuleSphere(const CapsuleShape* capsuleA, const SphereShape* sphereB, CollisionList& collisions);
|
||||
bool capsuleVsSphere(const Shape* capsuleA, const Shape* sphereB, CollisionList& collisions);
|
||||
|
||||
/// \param capsuleA pointer to first shape (cannot be NULL)
|
||||
/// \param capsuleB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool capsuleCapsule(const CapsuleShape* capsuleA, const CapsuleShape* capsuleB, CollisionList& collisions);
|
||||
bool capsuleVsCapsule(const Shape* capsuleA, const Shape* capsuleB, CollisionList& collisions);
|
||||
|
||||
/// \param capsuleA pointer to first shape (cannot be NULL)
|
||||
/// \param planeB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool capsulePlane(const CapsuleShape* capsuleA, const PlaneShape* planeB, CollisionList& collisions);
|
||||
bool capsuleVsPlane(const Shape* capsuleA, const Shape* planeB, CollisionList& collisions);
|
||||
|
||||
/// \param planeA pointer to first shape (cannot be NULL)
|
||||
/// \param sphereB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool planeSphere(const PlaneShape* planeA, const SphereShape* sphereB, CollisionList& collisions);
|
||||
bool planeVsSphere(const Shape* planeA, const Shape* sphereB, CollisionList& collisions);
|
||||
|
||||
/// \param planeA pointer to first shape (cannot be NULL)
|
||||
/// \param capsuleB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool planeCapsule(const PlaneShape* planeA, const CapsuleShape* capsuleB, CollisionList& collisions);
|
||||
bool planeVsCapsule(const Shape* planeA, const Shape* capsuleB, CollisionList& collisions);
|
||||
|
||||
/// \param planeA pointer to first shape (cannot be NULL)
|
||||
/// \param planeB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool planePlane(const PlaneShape* planeA, const PlaneShape* planeB, CollisionList& collisions);
|
||||
bool planeVsPlane(const Shape* planeA, const Shape* planeB, CollisionList& collisions);
|
||||
|
||||
/// \param sphereA pointer to first shape (cannot be NULL)
|
||||
/// \param shapeA pointer to first shape (cannot be NULL)
|
||||
/// \param listB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool sphereList(const SphereShape* sphereA, const ListShape* listB, CollisionList& collisions);
|
||||
bool shapeVsList(const Shape* shapeA, const Shape* listB, CollisionList& collisions);
|
||||
|
||||
/// \param capuleA pointer to first shape (cannot be NULL)
|
||||
/// \param listA pointer to first shape (cannot be NULL)
|
||||
/// \param shapeB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool listVsShape(const Shape* listA, const Shape* shapeB, CollisionList& collisions);
|
||||
|
||||
/// \param listA pointer to first shape (cannot be NULL)
|
||||
/// \param listB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool capsuleList(const CapsuleShape* capsuleA, const ListShape* listB, CollisionList& collisions);
|
||||
|
||||
/// \param planeA pointer to first shape (cannot be NULL)
|
||||
/// \param listB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool planeList(const PlaneShape* planeA, const ListShape* listB, CollisionList& collisions);
|
||||
|
||||
/// \param listA pointer to first shape (cannot be NULL)
|
||||
/// \param sphereB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool listSphere(const ListShape* listA, const SphereShape* sphereB, CollisionList& collisions);
|
||||
|
||||
/// \param listA pointer to first shape (cannot be NULL)
|
||||
/// \param capsuleB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool listCapsule(const ListShape* listA, const CapsuleShape* capsuleB, CollisionList& collisions);
|
||||
|
||||
/// \param listA pointer to first shape (cannot be NULL)
|
||||
/// \param planeB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool listPlane(const ListShape* listA, const PlaneShape* planeB, CollisionList& collisions);
|
||||
|
||||
/// \param listA pointer to first shape (cannot be NULL)
|
||||
/// \param capsuleB pointer to second shape (cannot be NULL)
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if shapes collide
|
||||
bool listList(const ListShape* listA, const ListShape* listB, CollisionList& collisions);
|
||||
bool listVsList(const Shape* listA, const Shape* listB, CollisionList& collisions);
|
||||
|
||||
/// \param sphereA pointer to sphere (cannot be NULL)
|
||||
/// \param cubeCenter center of cube
|
||||
/// \param cubeSide lenght of side of cube
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if sphereA collides with axis aligned cube
|
||||
bool sphereAACube(const SphereShape* sphereA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions);
|
||||
bool sphereVsAACube(const SphereShape* sphereA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions);
|
||||
|
||||
/// \param capsuleA pointer to capsule (cannot be NULL)
|
||||
/// \param cubeCenter center of cube
|
||||
/// \param cubeSide lenght of side of cube
|
||||
/// \param[out] collisions where to append collision details
|
||||
/// \return true if capsuleA collides with axis aligned cube
|
||||
bool capsuleAACube(const CapsuleShape* capsuleA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions);
|
||||
bool capsuleVsAACube(const CapsuleShape* capsuleA, const glm::vec3& cubeCenter, float cubeSide, CollisionList& collisions);
|
||||
|
||||
/// \param shapes list of pointers to shapes (shape pointers may be NULL)
|
||||
/// \param startPoint beginning of ray
|
||||
|
|
|
@ -18,13 +18,13 @@
|
|||
|
||||
class SphereShape : public Shape {
|
||||
public:
|
||||
SphereShape() : Shape(Shape::SPHERE_SHAPE) {}
|
||||
SphereShape() : Shape(SPHERE_SHAPE) {}
|
||||
|
||||
SphereShape(float radius) : Shape(Shape::SPHERE_SHAPE) {
|
||||
SphereShape(float radius) : Shape(SPHERE_SHAPE) {
|
||||
_boundingRadius = radius;
|
||||
}
|
||||
|
||||
SphereShape(float radius, const glm::vec3& position) : Shape(Shape::SPHERE_SHAPE, position) {
|
||||
SphereShape(float radius, const glm::vec3& position) : Shape(SPHERE_SHAPE, position) {
|
||||
_boundingRadius = radius;
|
||||
}
|
||||
|
||||
|
|
|
@ -27,28 +27,28 @@ void AudioRingBufferTests::runAllTests() {
|
|||
|
||||
int16_t readData[10000];
|
||||
int readIndexAt;
|
||||
|
||||
|
||||
|
||||
AudioRingBuffer ringBuffer(10, false, 10); // makes buffer of 100 int16_t samples
|
||||
for (int T = 0; T < 300; T++) {
|
||||
|
||||
|
||||
writeIndexAt = 0;
|
||||
readIndexAt = 0;
|
||||
|
||||
// write 73 samples, 73 samples in buffer
|
||||
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 73) / sizeof(int16_t);
|
||||
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 73);
|
||||
assertBufferSize(ringBuffer, 73);
|
||||
|
||||
// read 43 samples, 30 samples in buffer
|
||||
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 43) / sizeof(int16_t);
|
||||
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 43);
|
||||
assertBufferSize(ringBuffer, 30);
|
||||
|
||||
// write 70 samples, 100 samples in buffer (full)
|
||||
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 70) / sizeof(int16_t);
|
||||
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 70);
|
||||
assertBufferSize(ringBuffer, 100);
|
||||
|
||||
// read 100 samples, 0 samples in buffer (empty)
|
||||
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 100) / sizeof(int16_t);
|
||||
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 100);
|
||||
assertBufferSize(ringBuffer, 0);
|
||||
|
||||
|
||||
|
@ -65,15 +65,15 @@ void AudioRingBufferTests::runAllTests() {
|
|||
readIndexAt = 0;
|
||||
|
||||
// write 59 samples, 59 samples in buffer
|
||||
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 59) / sizeof(int16_t);
|
||||
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 59);
|
||||
assertBufferSize(ringBuffer, 59);
|
||||
|
||||
// write 99 samples, 100 samples in buffer
|
||||
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 99) / sizeof(int16_t);
|
||||
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 99);
|
||||
assertBufferSize(ringBuffer, 100);
|
||||
|
||||
// read 100 samples, 0 samples in buffer
|
||||
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 100) / sizeof(int16_t);
|
||||
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 100);
|
||||
assertBufferSize(ringBuffer, 0);
|
||||
|
||||
// verify 100 samples of read data
|
||||
|
@ -88,23 +88,23 @@ void AudioRingBufferTests::runAllTests() {
|
|||
readIndexAt = 0;
|
||||
|
||||
// write 77 samples, 77 samples in buffer
|
||||
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 77) / sizeof(int16_t);
|
||||
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 77);
|
||||
assertBufferSize(ringBuffer, 77);
|
||||
|
||||
// write 24 samples, 100 samples in buffer (overwrote one sample: "0")
|
||||
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 24) / sizeof(int16_t);
|
||||
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 24);
|
||||
assertBufferSize(ringBuffer, 100);
|
||||
|
||||
// write 29 silent samples, 100 samples in buffer, make sure non were added
|
||||
int samplesWritten;
|
||||
if ((samplesWritten = ringBuffer.addSilentFrame(29)) != 0) {
|
||||
qDebug("addSilentFrame(29) incorrect! Expected: 0 Actual: %d", samplesWritten);
|
||||
if ((samplesWritten = ringBuffer.addSilentSamples(29)) != 0) {
|
||||
qDebug("addSilentSamples(29) incorrect! Expected: 0 Actual: %d", samplesWritten);
|
||||
return;
|
||||
}
|
||||
assertBufferSize(ringBuffer, 100);
|
||||
|
||||
// read 3 samples, 97 samples in buffer (expect to read "1", "2", "3")
|
||||
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 3) / sizeof(int16_t);
|
||||
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 3);
|
||||
for (int i = 0; i < 3; i++) {
|
||||
if (readData[i] != i + 1) {
|
||||
qDebug("Second readData[%d] incorrect! Expcted: %d Actual: %d", i, i + 1, readData[i]);
|
||||
|
@ -114,14 +114,14 @@ void AudioRingBufferTests::runAllTests() {
|
|||
assertBufferSize(ringBuffer, 97);
|
||||
|
||||
// write 4 silent samples, 100 samples in buffer
|
||||
if ((samplesWritten = ringBuffer.addSilentFrame(4) / sizeof(int16_t)) != 3) {
|
||||
qDebug("addSilentFrame(4) incorrect! Exptected: 3 Actual: %d", samplesWritten);
|
||||
if ((samplesWritten = ringBuffer.addSilentSamples(4)) != 3) {
|
||||
qDebug("addSilentSamples(4) incorrect! Exptected: 3 Actual: %d", samplesWritten);
|
||||
return;
|
||||
}
|
||||
assertBufferSize(ringBuffer, 100);
|
||||
|
||||
// read back 97 samples (the non-silent samples), 3 samples in buffer (expect to read "4" thru "100")
|
||||
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 97) / sizeof(int16_t);
|
||||
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 97);
|
||||
for (int i = 3; i < 100; i++) {
|
||||
if (readData[i] != i + 1) {
|
||||
qDebug("third readData[%d] incorrect! Expcted: %d Actual: %d", i, i + 1, readData[i]);
|
||||
|
@ -131,7 +131,7 @@ void AudioRingBufferTests::runAllTests() {
|
|||
assertBufferSize(ringBuffer, 3);
|
||||
|
||||
// read back 3 silent samples, 0 samples in buffer
|
||||
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 3) / sizeof(int16_t);
|
||||
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 3);
|
||||
for (int i = 100; i < 103; i++) {
|
||||
if (readData[i] != 0) {
|
||||
qDebug("Fourth readData[%d] incorrect! Expcted: %d Actual: %d", i, 0, readData[i]);
|
||||
|
@ -143,4 +143,3 @@ void AudioRingBufferTests::runAllTests() {
|
|||
|
||||
qDebug() << "PASSED";
|
||||
}
|
||||
|
||||
|
|
|
@ -271,9 +271,10 @@ void runReceive(const char* addressOption, int port, int gap, int size, int repo
|
|||
|
||||
quint64 networkStart = usecTimestampNow();
|
||||
n = recvfrom(sockfd, inputBuffer, size, 0, NULL, NULL); // we don't care about where it came from
|
||||
|
||||
quint64 networkEnd = usecTimestampNow();
|
||||
float networkElapsed = (float)(networkEnd - networkStart);
|
||||
|
||||
|
||||
if (n < 0) {
|
||||
std::cout << "Receive error: " << strerror(errno) << "\n";
|
||||
}
|
||||
|
|
|
@ -16,7 +16,9 @@
|
|||
#include <glm/glm.hpp>
|
||||
#include <glm/gtx/quaternion.hpp>
|
||||
|
||||
#include <CapsuleShape.h>
|
||||
#include <CollisionInfo.h>
|
||||
#include <PlaneShape.h>
|
||||
#include <ShapeCollider.h>
|
||||
#include <SharedUtil.h>
|
||||
#include <SphereShape.h>
|
||||
|
@ -71,8 +73,7 @@ void ShapeColliderTests::sphereMissesSphere() {
|
|||
|
||||
if (collisions.size() > 0) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: expected empty collision list but size is " << collisions.size()
|
||||
<< std::endl;
|
||||
<< " ERROR: expected empty collision list but size is " << collisions.size() << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -112,6 +113,7 @@ void ShapeColliderTests::sphereTouchesSphere() {
|
|||
if (!collision) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: null collision" << std::endl;
|
||||
return;
|
||||
}
|
||||
|
||||
// penetration points from sphereA into sphereB
|
||||
|
@ -119,7 +121,7 @@ void ShapeColliderTests::sphereTouchesSphere() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of sphereA
|
||||
|
@ -129,7 +131,7 @@ void ShapeColliderTests::sphereTouchesSphere() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -149,7 +151,7 @@ void ShapeColliderTests::sphereTouchesSphere() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of sphereA
|
||||
|
@ -159,7 +161,7 @@ void ShapeColliderTests::sphereTouchesSphere() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -199,23 +201,20 @@ void ShapeColliderTests::sphereMissesCapsule() {
|
|||
if (ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: sphere and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: sphere and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
|
||||
// capsuleB against sphereA
|
||||
if (ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: sphere and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: sphere and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
if (collisions.size() > 0) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: expected empty collision list but size is " << collisions.size()
|
||||
<< std::endl;
|
||||
<< " ERROR: expected empty collision list but size is " << collisions.size() << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -241,8 +240,7 @@ void ShapeColliderTests::sphereTouchesCapsule() {
|
|||
if (!ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: sphere and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: sphere and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -254,7 +252,7 @@ void ShapeColliderTests::sphereTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of sphereA
|
||||
|
@ -263,15 +261,14 @@ void ShapeColliderTests::sphereTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
|
||||
// capsuleB collides with sphereA
|
||||
if (!ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and sphere should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and sphere should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -279,33 +276,41 @@ void ShapeColliderTests::sphereTouchesCapsule() {
|
|||
// penetration points from sphereA into capsuleB
|
||||
collision = collisions.getCollision(numCollisions - 1);
|
||||
expectedPenetration = - (radialOffset - totalRadius) * xAxis;
|
||||
if (collision->_shapeA == &sphereA) {
|
||||
// the ShapeCollider swapped the order of the shapes
|
||||
expectedPenetration *= -1.0f;
|
||||
}
|
||||
inaccuracy = glm::length(collision->_penetration - expectedPenetration);
|
||||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of capsuleB
|
||||
glm::vec3 BtoA = sphereA.getTranslation() - capsuleB.getTranslation();
|
||||
glm::vec3 closestApproach = capsuleB.getTranslation() + glm::dot(BtoA, yAxis) * yAxis;
|
||||
expectedContactPoint = closestApproach + radiusB * glm::normalize(BtoA - closestApproach);
|
||||
if (collision->_shapeA == &sphereA) {
|
||||
// the ShapeCollider swapped the order of the shapes
|
||||
closestApproach = sphereA.getTranslation() - glm::dot(BtoA, yAxis) * yAxis;
|
||||
expectedContactPoint = closestApproach - radiusB * glm::normalize(BtoA - closestApproach);
|
||||
}
|
||||
inaccuracy = glm::length(collision->_contactPoint - expectedContactPoint);
|
||||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
}
|
||||
{ // sphereA hits end cap at axis
|
||||
glm::vec3 axialOffset = (halfHeightB + alpha * radiusA + beta * radiusB) * yAxis;
|
||||
sphereA.setTranslation(axialOffset * yAxis);
|
||||
sphereA.setTranslation(axialOffset);
|
||||
|
||||
if (!ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: sphere and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: sphere and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -317,7 +322,7 @@ void ShapeColliderTests::sphereTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of sphereA
|
||||
|
@ -326,15 +331,14 @@ void ShapeColliderTests::sphereTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
|
||||
// capsuleB collides with sphereA
|
||||
if (!ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and sphere should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and sphere should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -342,33 +346,40 @@ void ShapeColliderTests::sphereTouchesCapsule() {
|
|||
// penetration points from sphereA into capsuleB
|
||||
collision = collisions.getCollision(numCollisions - 1);
|
||||
expectedPenetration = ((1.0f - alpha) * radiusA + (1.0f - beta) * radiusB) * yAxis;
|
||||
if (collision->_shapeA == &sphereA) {
|
||||
// the ShapeCollider swapped the order of the shapes
|
||||
expectedPenetration *= -1.0f;
|
||||
}
|
||||
inaccuracy = glm::length(collision->_penetration - expectedPenetration);
|
||||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of capsuleB
|
||||
glm::vec3 endPoint;
|
||||
capsuleB.getEndPoint(endPoint);
|
||||
expectedContactPoint = endPoint + radiusB * yAxis;
|
||||
if (collision->_shapeA == &sphereA) {
|
||||
// the ShapeCollider swapped the order of the shapes
|
||||
expectedContactPoint = axialOffset - radiusA * yAxis;
|
||||
}
|
||||
inaccuracy = glm::length(collision->_contactPoint - expectedContactPoint);
|
||||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
}
|
||||
{ // sphereA hits start cap at axis
|
||||
glm::vec3 axialOffset = - (halfHeightB + alpha * radiusA + beta * radiusB) * yAxis;
|
||||
sphereA.setTranslation(axialOffset * yAxis);
|
||||
sphereA.setTranslation(axialOffset);
|
||||
|
||||
if (!ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: sphere and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: sphere and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -380,7 +391,7 @@ void ShapeColliderTests::sphereTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of sphereA
|
||||
|
@ -389,15 +400,14 @@ void ShapeColliderTests::sphereTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
|
||||
// capsuleB collides with sphereA
|
||||
if (!ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and sphere should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and sphere should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -405,22 +415,30 @@ void ShapeColliderTests::sphereTouchesCapsule() {
|
|||
// penetration points from sphereA into capsuleB
|
||||
collision = collisions.getCollision(numCollisions - 1);
|
||||
expectedPenetration = - ((1.0f - alpha) * radiusA + (1.0f - beta) * radiusB) * yAxis;
|
||||
if (collision->_shapeA == &sphereA) {
|
||||
// the ShapeCollider swapped the order of the shapes
|
||||
expectedPenetration *= -1.0f;
|
||||
}
|
||||
inaccuracy = glm::length(collision->_penetration - expectedPenetration);
|
||||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of capsuleB
|
||||
glm::vec3 startPoint;
|
||||
capsuleB.getStartPoint(startPoint);
|
||||
expectedContactPoint = startPoint - radiusB * yAxis;
|
||||
if (collision->_shapeA == &sphereA) {
|
||||
// the ShapeCollider swapped the order of the shapes
|
||||
expectedContactPoint = axialOffset + radiusA * yAxis;
|
||||
}
|
||||
inaccuracy = glm::length(collision->_contactPoint - expectedContactPoint);
|
||||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
}
|
||||
if (collisions.size() != numCollisions) {
|
||||
|
@ -450,14 +468,12 @@ void ShapeColliderTests::capsuleMissesCapsule() {
|
|||
if (ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
if (ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
|
||||
// end to end
|
||||
|
@ -465,14 +481,12 @@ void ShapeColliderTests::capsuleMissesCapsule() {
|
|||
if (ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
if (ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
|
||||
// rotate B and move it to the side
|
||||
|
@ -482,20 +496,17 @@ void ShapeColliderTests::capsuleMissesCapsule() {
|
|||
if (ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
if (ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
|
||||
if (collisions.size() > 0) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: expected empty collision list but size is " << collisions.size()
|
||||
<< std::endl;
|
||||
<< " ERROR: expected empty collision list but size is " << collisions.size() << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -520,16 +531,14 @@ void ShapeColliderTests::capsuleTouchesCapsule() {
|
|||
if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -541,16 +550,14 @@ void ShapeColliderTests::capsuleTouchesCapsule() {
|
|||
if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -564,16 +571,14 @@ void ShapeColliderTests::capsuleTouchesCapsule() {
|
|||
if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -590,8 +595,7 @@ void ShapeColliderTests::capsuleTouchesCapsule() {
|
|||
if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -602,7 +606,7 @@ void ShapeColliderTests::capsuleTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
glm::vec3 expectedContactPoint = capsuleA.getTranslation() + radiusA * xAxis;
|
||||
|
@ -610,15 +614,14 @@ void ShapeColliderTests::capsuleTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
|
||||
// capsuleB vs capsuleA
|
||||
if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -629,8 +632,7 @@ void ShapeColliderTests::capsuleTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration
|
||||
<< std::endl;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
expectedContactPoint = capsuleB.getTranslation() - (radiusB + halfHeightB) * xAxis;
|
||||
|
@ -638,8 +640,7 @@ void ShapeColliderTests::capsuleTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint
|
||||
<< std::endl;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -655,8 +656,7 @@ void ShapeColliderTests::capsuleTouchesCapsule() {
|
|||
if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -667,8 +667,7 @@ void ShapeColliderTests::capsuleTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration
|
||||
<< std::endl;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
glm::vec3 expectedContactPoint = capsuleA.getTranslation() + radiusA * zAxis + shift * yAxis;
|
||||
|
@ -676,8 +675,7 @@ void ShapeColliderTests::capsuleTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint
|
||||
<< std::endl;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -710,8 +708,9 @@ void ShapeColliderTests::sphereTouchesAACubeFaces() {
|
|||
sphereCenter = cubeCenter + sphereOffset * axis;
|
||||
sphere.setTranslation(sphereCenter);
|
||||
|
||||
if (!ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should collide with cube. axis = " << axis << std::endl;
|
||||
if (!ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should collide with cube. axis = " << axis
|
||||
<< std::endl;
|
||||
}
|
||||
CollisionInfo* collision = collisions[0];
|
||||
if (!collision) {
|
||||
|
@ -721,17 +720,13 @@ void ShapeColliderTests::sphereTouchesAACubeFaces() {
|
|||
glm::vec3 expectedPenetration = - overlap * axis;
|
||||
if (glm::distance(expectedPenetration, collision->_penetration) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: penetration = " << collision->_penetration
|
||||
<< " expected " << expectedPenetration
|
||||
<< " axis = " << axis
|
||||
<< std::endl;
|
||||
<< " expected " << expectedPenetration << " axis = " << axis << std::endl;
|
||||
}
|
||||
|
||||
glm::vec3 expectedContact = sphereCenter - sphereRadius * axis;
|
||||
if (glm::distance(expectedContact, collision->_contactPoint) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: contactaPoint = " << collision->_contactPoint
|
||||
<< " expected " << expectedContact
|
||||
<< " axis = " << axis
|
||||
<< std::endl;
|
||||
<< " expected " << expectedContact << " axis = " << axis << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -743,32 +738,26 @@ void ShapeColliderTests::sphereTouchesAACubeFaces() {
|
|||
sphereCenter = cubeCenter + sphereOffset * axis;
|
||||
sphere.setTranslation(sphereCenter);
|
||||
|
||||
if (!ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
if (!ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should collide with cube."
|
||||
<< " axis = " << axis
|
||||
<< std::endl;
|
||||
<< " axis = " << axis << std::endl;
|
||||
}
|
||||
CollisionInfo* collision = collisions[0];
|
||||
if (!collision) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: no CollisionInfo on y-axis."
|
||||
<< " axis = " << axis
|
||||
<< std::endl;
|
||||
<< " axis = " << axis << std::endl;
|
||||
}
|
||||
|
||||
glm::vec3 expectedPenetration = - overlap * axis;
|
||||
if (glm::distance(expectedPenetration, collision->_penetration) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: penetration = " << collision->_penetration
|
||||
<< " expected " << expectedPenetration
|
||||
<< " axis = " << axis
|
||||
<< std::endl;
|
||||
<< " expected " << expectedPenetration << " axis = " << axis << std::endl;
|
||||
}
|
||||
|
||||
glm::vec3 expectedContact = sphereCenter - sphereRadius * axis;
|
||||
if (glm::distance(expectedContact, collision->_contactPoint) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: contactaPoint = " << collision->_contactPoint
|
||||
<< " expected " << expectedContact
|
||||
<< " axis = " << axis
|
||||
<< std::endl;
|
||||
<< " expected " << expectedContact << " axis = " << axis << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -817,7 +806,7 @@ void ShapeColliderTests::sphereTouchesAACubeEdges() {
|
|||
sphereCenter = cubeCenter + (lengthAxis * 0.5f * cubeSide + sphereRadius - overlap) * axis;
|
||||
sphere.setTranslation(sphereCenter);
|
||||
|
||||
if (!ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
if (!ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should collide with cube. axis = " << axis << std::endl;
|
||||
}
|
||||
CollisionInfo* collision = collisions[i];
|
||||
|
@ -828,17 +817,13 @@ void ShapeColliderTests::sphereTouchesAACubeEdges() {
|
|||
glm::vec3 expectedPenetration = - overlap * axis;
|
||||
if (glm::distance(expectedPenetration, collision->_penetration) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: penetration = " << collision->_penetration
|
||||
<< " expected " << expectedPenetration
|
||||
<< " axis = " << axis
|
||||
<< std::endl;
|
||||
<< " expected " << expectedPenetration << " axis = " << axis << std::endl;
|
||||
}
|
||||
|
||||
glm::vec3 expectedContact = sphereCenter - sphereRadius * axis;
|
||||
if (glm::distance(expectedContact, collision->_contactPoint) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: contactaPoint = " << collision->_contactPoint
|
||||
<< " expected " << expectedContact
|
||||
<< " axis = " << axis
|
||||
<< std::endl;
|
||||
<< " expected " << expectedContact << " axis = " << axis << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -858,42 +843,42 @@ void ShapeColliderTests::sphereMissesAACube() {
|
|||
// top
|
||||
sphereCenter = cubeCenter + sphereOffset * yAxis;
|
||||
sphere.setTranslation(sphereCenter);
|
||||
if (ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
if (ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should NOT collide with cube" << std::endl;
|
||||
}
|
||||
|
||||
// bottom
|
||||
sphereCenter = cubeCenter - sphereOffset * yAxis;
|
||||
sphere.setTranslation(sphereCenter);
|
||||
if (ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
if (ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should NOT collide with cube" << std::endl;
|
||||
}
|
||||
|
||||
// left
|
||||
sphereCenter = cubeCenter + sphereOffset * xAxis;
|
||||
sphere.setTranslation(sphereCenter);
|
||||
if (ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
if (ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should NOT collide with cube" << std::endl;
|
||||
}
|
||||
|
||||
// right
|
||||
sphereCenter = cubeCenter - sphereOffset * xAxis;
|
||||
sphere.setTranslation(sphereCenter);
|
||||
if (ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
if (ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should NOT collide with cube" << std::endl;
|
||||
}
|
||||
|
||||
// forward
|
||||
sphereCenter = cubeCenter + sphereOffset * zAxis;
|
||||
sphere.setTranslation(sphereCenter);
|
||||
if (ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
if (ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should NOT collide with cube" << std::endl;
|
||||
}
|
||||
|
||||
// back
|
||||
sphereCenter = cubeCenter - sphereOffset * zAxis;
|
||||
sphere.setTranslation(sphereCenter);
|
||||
if (ShapeCollider::sphereAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
if (ShapeCollider::sphereVsAACube(&sphere, cubeCenter, cubeSide, collisions)){
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: sphere should NOT collide with cube" << std::endl;
|
||||
}
|
||||
}
|
||||
|
@ -965,7 +950,8 @@ void ShapeColliderTests::rayHitsSphere() {
|
|||
float expectedDistance = startDistance - radius;
|
||||
float relativeError = fabsf(distance - expectedDistance) / startDistance;
|
||||
if (relativeError > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray sphere intersection distance error = " << relativeError << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray sphere intersection distance error = "
|
||||
<< relativeError << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1022,7 +1008,8 @@ void ShapeColliderTests::rayBarelyMissesSphere() {
|
|||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should just barely miss sphere" << std::endl;
|
||||
}
|
||||
if (distance != FLT_MAX) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss"
|
||||
<< std::endl;
|
||||
}
|
||||
|
||||
// translate and rotate the whole system...
|
||||
|
@ -1040,7 +1027,8 @@ void ShapeColliderTests::rayBarelyMissesSphere() {
|
|||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should just barely miss sphere" << std::endl;
|
||||
}
|
||||
if (distance != FLT_MAX) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss"
|
||||
<< std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1062,7 +1050,8 @@ void ShapeColliderTests::rayHitsCapsule() {
|
|||
float expectedDistance = startDistance - radius;
|
||||
float relativeError = fabsf(distance - expectedDistance) / startDistance;
|
||||
if (relativeError > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " << relativeError << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = "
|
||||
<< relativeError << std::endl;
|
||||
}
|
||||
|
||||
// toward top of cylindrical wall
|
||||
|
@ -1073,7 +1062,8 @@ void ShapeColliderTests::rayHitsCapsule() {
|
|||
}
|
||||
relativeError = fabsf(distance - expectedDistance) / startDistance;
|
||||
if (relativeError > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " << relativeError << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = "
|
||||
<< relativeError << std::endl;
|
||||
}
|
||||
|
||||
// toward top cap
|
||||
|
@ -1085,7 +1075,8 @@ void ShapeColliderTests::rayHitsCapsule() {
|
|||
}
|
||||
relativeError = fabsf(distance - expectedDistance) / startDistance;
|
||||
if (relativeError > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " << relativeError << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = "
|
||||
<< relativeError << std::endl;
|
||||
}
|
||||
|
||||
const float EDGE_CASE_SLOP_FACTOR = 20.0f;
|
||||
|
@ -1100,7 +1091,8 @@ void ShapeColliderTests::rayHitsCapsule() {
|
|||
relativeError = fabsf(distance - expectedDistance) / startDistance;
|
||||
// for edge cases we allow a LOT of error
|
||||
if (relativeError > EDGE_CASE_SLOP_FACTOR * EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " << relativeError << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = "
|
||||
<< relativeError << std::endl;
|
||||
}
|
||||
|
||||
// toward tip of bottom cap
|
||||
|
@ -1113,7 +1105,8 @@ void ShapeColliderTests::rayHitsCapsule() {
|
|||
relativeError = fabsf(distance - expectedDistance) / startDistance;
|
||||
// for edge cases we allow a LOT of error
|
||||
if (relativeError > EDGE_CASE_SLOP_FACTOR * EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " << relativeError << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = "
|
||||
<< relativeError << std::endl;
|
||||
}
|
||||
|
||||
// toward edge of capsule cylindrical face
|
||||
|
@ -1127,7 +1120,8 @@ void ShapeColliderTests::rayHitsCapsule() {
|
|||
relativeError = fabsf(distance - expectedDistance) / startDistance;
|
||||
// for edge cases we allow a LOT of error
|
||||
if (relativeError > EDGE_CASE_SLOP_FACTOR * EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = " << relativeError << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray capsule intersection distance error = "
|
||||
<< relativeError << std::endl;
|
||||
}
|
||||
}
|
||||
// TODO: test at steep angles near cylinder/cap junction
|
||||
|
@ -1154,7 +1148,8 @@ void ShapeColliderTests::rayMissesCapsule() {
|
|||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should miss capsule" << std::endl;
|
||||
}
|
||||
if (distance != FLT_MAX) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss"
|
||||
<< std::endl;
|
||||
}
|
||||
|
||||
// below bottom cap
|
||||
|
@ -1164,7 +1159,8 @@ void ShapeColliderTests::rayMissesCapsule() {
|
|||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should miss capsule" << std::endl;
|
||||
}
|
||||
if (distance != FLT_MAX) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss"
|
||||
<< std::endl;
|
||||
}
|
||||
|
||||
// past edge of capsule cylindrical face
|
||||
|
@ -1175,7 +1171,8 @@ void ShapeColliderTests::rayMissesCapsule() {
|
|||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should miss capsule" << std::endl;
|
||||
}
|
||||
if (distance != FLT_MAX) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss"
|
||||
<< std::endl;
|
||||
}
|
||||
}
|
||||
// TODO: test at steep angles near edge
|
||||
|
@ -1201,7 +1198,8 @@ void ShapeColliderTests::rayHitsPlane() {
|
|||
float expectedDistance = SQUARE_ROOT_OF_3 * planeDistanceFromOrigin;
|
||||
float relativeError = fabsf(distance - expectedDistance) / planeDistanceFromOrigin;
|
||||
if (relativeError > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray plane intersection distance error = " << relativeError << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray plane intersection distance error = "
|
||||
<< relativeError << std::endl;
|
||||
}
|
||||
|
||||
// rotate the whole system and try again
|
||||
|
@ -1222,7 +1220,8 @@ void ShapeColliderTests::rayHitsPlane() {
|
|||
expectedDistance = SQUARE_ROOT_OF_3 * planeDistanceFromOrigin;
|
||||
relativeError = fabsf(distance - expectedDistance) / planeDistanceFromOrigin;
|
||||
if (relativeError > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray plane intersection distance error = " << relativeError << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray plane intersection distance error = "
|
||||
<< relativeError << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1243,7 +1242,8 @@ void ShapeColliderTests::rayMissesPlane() {
|
|||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should miss plane" << std::endl;
|
||||
}
|
||||
if (distance != FLT_MAX) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss"
|
||||
<< std::endl;
|
||||
}
|
||||
|
||||
// rotate the whole system and try again
|
||||
|
@ -1261,7 +1261,8 @@ void ShapeColliderTests::rayMissesPlane() {
|
|||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should miss plane" << std::endl;
|
||||
}
|
||||
if (distance != FLT_MAX) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss"
|
||||
<< std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1275,7 +1276,8 @@ void ShapeColliderTests::rayMissesPlane() {
|
|||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should miss plane" << std::endl;
|
||||
}
|
||||
if (distance != FLT_MAX) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss"
|
||||
<< std::endl;
|
||||
}
|
||||
|
||||
// rotate the whole system and try again
|
||||
|
@ -1293,12 +1295,47 @@ void ShapeColliderTests::rayMissesPlane() {
|
|||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: ray should miss plane" << std::endl;
|
||||
}
|
||||
if (distance != FLT_MAX) {
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss" << std::endl;
|
||||
std::cout << __FILE__ << ":" << __LINE__ << " ERROR: distance should be unchanged after intersection miss"
|
||||
<< std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void ShapeColliderTests::measureTimeOfCollisionDispatch() {
|
||||
/* KEEP for future manual testing
|
||||
// create two non-colliding spheres
|
||||
float radiusA = 7.0f;
|
||||
float radiusB = 3.0f;
|
||||
float alpha = 1.2f;
|
||||
float beta = 1.3f;
|
||||
glm::vec3 offsetDirection = glm::normalize(glm::vec3(1.0f, 2.0f, 3.0f));
|
||||
float offsetDistance = alpha * radiusA + beta * radiusB;
|
||||
|
||||
SphereShape sphereA(radiusA, origin);
|
||||
SphereShape sphereB(radiusB, offsetDistance * offsetDirection);
|
||||
CollisionList collisions(16);
|
||||
|
||||
//int numTests = 1;
|
||||
quint64 oldTime;
|
||||
quint64 newTime;
|
||||
int numTests = 100000000;
|
||||
{
|
||||
quint64 startTime = usecTimestampNow();
|
||||
for (int i = 0; i < numTests; ++i) {
|
||||
ShapeCollider::collideShapes(&sphereA, &sphereB, collisions);
|
||||
}
|
||||
quint64 endTime = usecTimestampNow();
|
||||
std::cout << numTests << " non-colliding collisions in " << (endTime - startTime) << " usec" << std::endl;
|
||||
newTime = endTime - startTime;
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
void ShapeColliderTests::runAllTests() {
|
||||
ShapeCollider::initDispatchTable();
|
||||
|
||||
//measureTimeOfCollisionDispatch();
|
||||
|
||||
sphereMissesSphere();
|
||||
sphereTouchesSphere();
|
||||
|
||||
|
|
|
@ -35,6 +35,8 @@ namespace ShapeColliderTests {
|
|||
void rayHitsPlane();
|
||||
void rayMissesPlane();
|
||||
|
||||
void measureTimeOfCollisionDispatch();
|
||||
|
||||
void runAllTests();
|
||||
}
|
||||
|
||||
|
|
|
@ -102,8 +102,7 @@ void VerletShapeTests::sphereMissesSphere() {
|
|||
|
||||
if (collisions.size() > 0) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: expected empty collision list but size is " << collisions.size()
|
||||
<< std::endl;
|
||||
<< " ERROR: expected empty collision list but size is " << collisions.size() << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -159,7 +158,7 @@ void VerletShapeTests::sphereTouchesSphere() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of sphereA
|
||||
|
@ -169,7 +168,7 @@ void VerletShapeTests::sphereTouchesSphere() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -189,7 +188,7 @@ void VerletShapeTests::sphereTouchesSphere() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of sphereA
|
||||
|
@ -199,7 +198,7 @@ void VerletShapeTests::sphereTouchesSphere() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -247,23 +246,20 @@ void VerletShapeTests::sphereMissesCapsule() {
|
|||
if (ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: sphere and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: sphere and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
|
||||
// capsuleB against sphereA
|
||||
if (ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: sphere and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: sphere and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
if (collisions.size() > 0) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: expected empty collision list but size is " << collisions.size()
|
||||
<< std::endl;
|
||||
<< " ERROR: expected empty collision list but size is " << collisions.size() << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -297,8 +293,7 @@ void VerletShapeTests::sphereTouchesCapsule() {
|
|||
if (!ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: sphere and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: sphere and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -310,7 +305,7 @@ void VerletShapeTests::sphereTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of sphereA
|
||||
|
@ -319,15 +314,14 @@ void VerletShapeTests::sphereTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
|
||||
// capsuleB collides with sphereA
|
||||
if (!ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and sphere should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and sphere should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -335,33 +329,41 @@ void VerletShapeTests::sphereTouchesCapsule() {
|
|||
// penetration points from sphereA into capsuleB
|
||||
collision = collisions.getCollision(numCollisions - 1);
|
||||
expectedPenetration = - (radialOffset - totalRadius) * xAxis;
|
||||
if (collision->_shapeA == &sphereA) {
|
||||
// the ShapeCollider swapped the order of the shapes
|
||||
expectedPenetration *= -1.0f;
|
||||
}
|
||||
inaccuracy = glm::length(collision->_penetration - expectedPenetration);
|
||||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of capsuleB
|
||||
glm::vec3 BtoA = sphereA.getTranslation() - capsuleB.getTranslation();
|
||||
glm::vec3 closestApproach = capsuleB.getTranslation() + glm::dot(BtoA, yAxis) * yAxis;
|
||||
expectedContactPoint = closestApproach + radiusB * glm::normalize(BtoA - closestApproach);
|
||||
if (collision->_shapeA == &sphereA) {
|
||||
// the ShapeCollider swapped the order of the shapes
|
||||
closestApproach = sphereA.getTranslation() - glm::dot(BtoA, yAxis) * yAxis;
|
||||
expectedContactPoint = closestApproach - radiusB * glm::normalize(BtoA - closestApproach);
|
||||
}
|
||||
inaccuracy = glm::length(collision->_contactPoint - expectedContactPoint);
|
||||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
}
|
||||
{ // sphereA hits end cap at axis
|
||||
glm::vec3 axialOffset = (halfHeightB + alpha * radiusA + beta * radiusB) * yAxis;
|
||||
sphereA.setTranslation(axialOffset * yAxis);
|
||||
sphereA.setTranslation(axialOffset);
|
||||
|
||||
if (!ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: sphere and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: sphere and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -373,7 +375,7 @@ void VerletShapeTests::sphereTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of sphereA
|
||||
|
@ -382,15 +384,14 @@ void VerletShapeTests::sphereTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
|
||||
// capsuleB collides with sphereA
|
||||
if (!ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and sphere should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and sphere should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -398,33 +399,40 @@ void VerletShapeTests::sphereTouchesCapsule() {
|
|||
// penetration points from sphereA into capsuleB
|
||||
collision = collisions.getCollision(numCollisions - 1);
|
||||
expectedPenetration = ((1.0f - alpha) * radiusA + (1.0f - beta) * radiusB) * yAxis;
|
||||
if (collision->_shapeA == &sphereA) {
|
||||
// the ShapeCollider swapped the order of the shapes
|
||||
expectedPenetration *= -1.0f;
|
||||
}
|
||||
inaccuracy = glm::length(collision->_penetration - expectedPenetration);
|
||||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of capsuleB
|
||||
glm::vec3 endPoint;
|
||||
capsuleB.getEndPoint(endPoint);
|
||||
expectedContactPoint = endPoint + radiusB * yAxis;
|
||||
if (collision->_shapeA == &sphereA) {
|
||||
// the ShapeCollider swapped the order of the shapes
|
||||
expectedContactPoint = axialOffset - radiusA * yAxis;
|
||||
}
|
||||
inaccuracy = glm::length(collision->_contactPoint - expectedContactPoint);
|
||||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
}
|
||||
{ // sphereA hits start cap at axis
|
||||
glm::vec3 axialOffset = - (halfHeightB + alpha * radiusA + beta * radiusB) * yAxis;
|
||||
sphereA.setTranslation(axialOffset * yAxis);
|
||||
sphereA.setTranslation(axialOffset);
|
||||
|
||||
if (!ShapeCollider::collideShapes(&sphereA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: sphere and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: sphere and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -436,7 +444,7 @@ void VerletShapeTests::sphereTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of sphereA
|
||||
|
@ -445,15 +453,14 @@ void VerletShapeTests::sphereTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
|
||||
// capsuleB collides with sphereA
|
||||
if (!ShapeCollider::collideShapes(&capsuleB, &sphereA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and sphere should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and sphere should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -461,22 +468,30 @@ void VerletShapeTests::sphereTouchesCapsule() {
|
|||
// penetration points from sphereA into capsuleB
|
||||
collision = collisions.getCollision(numCollisions - 1);
|
||||
expectedPenetration = - ((1.0f - alpha) * radiusA + (1.0f - beta) * radiusB) * yAxis;
|
||||
if (collision->_shapeA == &sphereA) {
|
||||
// the ShapeCollider swapped the order of the shapes
|
||||
expectedPenetration *= -1.0f;
|
||||
}
|
||||
inaccuracy = glm::length(collision->_penetration - expectedPenetration);
|
||||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
// contactPoint is on surface of capsuleB
|
||||
glm::vec3 startPoint;
|
||||
capsuleB.getStartPoint(startPoint);
|
||||
expectedContactPoint = startPoint - radiusB * yAxis;
|
||||
if (collision->_shapeA == &sphereA) {
|
||||
// the ShapeCollider swapped the order of the shapes
|
||||
expectedContactPoint = axialOffset + radiusA * yAxis;
|
||||
}
|
||||
inaccuracy = glm::length(collision->_contactPoint - expectedContactPoint);
|
||||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
}
|
||||
if (collisions.size() != numCollisions) {
|
||||
|
@ -515,14 +530,12 @@ void VerletShapeTests::capsuleMissesCapsule() {
|
|||
if (ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
if (ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
|
||||
// end to end
|
||||
|
@ -530,14 +543,12 @@ void VerletShapeTests::capsuleMissesCapsule() {
|
|||
if (ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
if (ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
|
||||
// rotate B and move it to the side
|
||||
|
@ -547,20 +558,17 @@ void VerletShapeTests::capsuleMissesCapsule() {
|
|||
if (ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
if (ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should NOT touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should NOT touch" << std::endl;
|
||||
}
|
||||
|
||||
if (collisions.size() > 0) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: expected empty collision list but size is " << collisions.size()
|
||||
<< std::endl;
|
||||
<< " ERROR: expected empty collision list but size is " << collisions.size() << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -594,16 +602,14 @@ void VerletShapeTests::capsuleTouchesCapsule() {
|
|||
if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -615,16 +621,14 @@ void VerletShapeTests::capsuleTouchesCapsule() {
|
|||
if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -638,16 +642,14 @@ void VerletShapeTests::capsuleTouchesCapsule() {
|
|||
if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -664,8 +666,7 @@ void VerletShapeTests::capsuleTouchesCapsule() {
|
|||
if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -676,7 +677,7 @@ void VerletShapeTests::capsuleTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
glm::vec3 expectedContactPoint = capsuleA.getTranslation() + radiusA * xAxis;
|
||||
|
@ -684,15 +685,14 @@ void VerletShapeTests::capsuleTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
|
||||
// capsuleB vs capsuleA
|
||||
if (!ShapeCollider::collideShapes(&capsuleB, &capsuleA, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -703,8 +703,7 @@ void VerletShapeTests::capsuleTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration
|
||||
<< std::endl;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
expectedContactPoint = capsuleB.getTranslation() - (radiusB + halfHeightB) * xAxis;
|
||||
|
@ -712,8 +711,7 @@ void VerletShapeTests::capsuleTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint
|
||||
<< std::endl;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -729,8 +727,7 @@ void VerletShapeTests::capsuleTouchesCapsule() {
|
|||
if (!ShapeCollider::collideShapes(&capsuleA, &capsuleB, collisions))
|
||||
{
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: capsule and capsule should touch"
|
||||
<< std::endl;
|
||||
<< " ERROR: capsule and capsule should touch" << std::endl;
|
||||
} else {
|
||||
++numCollisions;
|
||||
}
|
||||
|
@ -741,8 +738,7 @@ void VerletShapeTests::capsuleTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad penetration: expected = " << expectedPenetration
|
||||
<< " actual = " << collision->_penetration
|
||||
<< std::endl;
|
||||
<< " actual = " << collision->_penetration << std::endl;
|
||||
}
|
||||
|
||||
glm::vec3 expectedContactPoint = capsuleA.getTranslation() + radiusA * zAxis + shift * yAxis;
|
||||
|
@ -750,13 +746,14 @@ void VerletShapeTests::capsuleTouchesCapsule() {
|
|||
if (fabs(inaccuracy) > EPSILON) {
|
||||
std::cout << __FILE__ << ":" << __LINE__
|
||||
<< " ERROR: bad contactPoint: expected = " << expectedContactPoint
|
||||
<< " actual = " << collision->_contactPoint
|
||||
<< std::endl;
|
||||
<< " actual = " << collision->_contactPoint << std::endl;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void VerletShapeTests::runAllTests() {
|
||||
ShapeCollider::initDispatchTable();
|
||||
|
||||
setSpherePosition();
|
||||
sphereMissesSphere();
|
||||
sphereTouchesSphere();
|
||||
|
|
|
@ -16,6 +16,7 @@ int main(int argc, char** argv) {
|
|||
MovingMinMaxAvgTests::runAllTests();
|
||||
MovingPercentileTests::runAllTests();
|
||||
AngularConstraintTests::runAllTests();
|
||||
printf("tests complete, press enter to exit\n");
|
||||
getchar();
|
||||
return 0;
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue