3
0
Fork 0
mirror of https://github.com/lubosz/overte.git synced 2025-04-26 18:15:49 +02:00

merge upstream/master into andrew/ragdoll

This commit is contained in:
Andrew Meadows 2014-08-29 15:48:40 -07:00
commit ebcc960ec7
89 changed files with 4597 additions and 1354 deletions
BUILD.md
assignment-client/src
domain-server/resources/web/settings
examples
interface
libraries
tests
audio/src
jitter/src
shared/src

View file

@ -96,7 +96,9 @@ Currently building on Windows has been tested using the following compilers:
#####Windows SDK 7.1
Whichever version of Visual Studio you use, first install [Microsoft Windows SDK for Windows 7 and .NET Framework 4](http://www.microsoft.com/en-us/download/details.aspx?id=8279).
Whichever version of Visual Studio you use, you will need [Microsoft Windows SDK for Windows 7 and .NET Framework 4](http://www.microsoft.com/en-us/download/details.aspx?id=8279).
NOTE: If using Visual Studio C++ 2010 Express, you need to follow a specific install order. See below before installing the Windows SDK.
######Windows 8.1
You may have already downloaded the Windows 8 SDK (e.g. if you have previously installed Visual Studio 2013). If so, change CMAKE_PREFIX_PATH in %HIFI_DIR%\CMakeLists.txt to point to the Windows 8 SDK binaries. The default path is `C:\Program Files (x86)\Windows Kits\8.1\Lib\winv6.3\um\x86`
@ -109,6 +111,14 @@ The following patches/service packs are also required:
* [VS2010 SP1](http://www.microsoft.com/en-us/download/details.aspx?id=23691)
* [VS2010 SP1 Compiler Update](http://www.microsoft.com/en-us/download/details.aspx?id=4422)
IMPORTANT: Use the following install order:
Visual Studio C++ 2010 Express
Windows SDK 7.1
VS2010 SP1
VS2010 SP1 Compiler Update
If you get an error while installing the VS2010 SP1 Compiler update saying that you don't have the Windows SDK installed, then uninstall all of the above and start again in the correct order.
Some of the build instructions will ask you to start a Visual Studio Command Prompt. You should have a shortcut in your Start menu called "Open Visual Studio Command Prompt (2010)" which will do so.
#####Visual Studio 2013

View file

@ -33,12 +33,17 @@
#include "Agent.h"
static const int RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES = 10;
Agent::Agent(const QByteArray& packet) :
ThreadedAssignment(packet),
_voxelEditSender(),
_particleEditSender(),
_modelEditSender(),
_receivedAudioStream(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO, 1, false, 1, 0, false),
_receivedAudioStream(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES,
InboundAudioStream::Settings(0, false, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, false,
DEFAULT_WINDOW_STARVE_THRESHOLD, DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES,
DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION, false)),
_avatarHashMap()
{
// be the parent of the script engine so it gets moved when we do
@ -148,7 +153,7 @@ void Agent::readPendingDatagrams() {
_voxelViewer.processDatagram(mutablePacket, sourceNode);
}
} else if (datagramPacketType == PacketTypeMixedAudio) {
} else if (datagramPacketType == PacketTypeMixedAudio || datagramPacketType == PacketTypeSilentAudioFrame) {
_receivedAudioStream.parseData(receivedPacket);

View file

@ -69,12 +69,12 @@ void attachNewNodeDataToNode(Node *newNode) {
}
}
bool AudioMixer::_useDynamicJitterBuffers = false;
int AudioMixer::_staticDesiredJitterBufferFrames = 0;
int AudioMixer::_maxFramesOverDesired = 0;
InboundAudioStream::Settings AudioMixer::_streamSettings;
bool AudioMixer::_printStreamStats = false;
bool AudioMixer::_enableFilter = false;
AudioMixer::AudioMixer(const QByteArray& packet) :
ThreadedAssignment(packet),
_trailingSleepRatio(1.0f),
@ -85,7 +85,12 @@ AudioMixer::AudioMixer(const QByteArray& packet) :
_sumMixes(0),
_sourceUnattenuatedZone(NULL),
_listenerUnattenuatedZone(NULL),
_lastSendAudioStreamStatsTime(usecTimestampNow())
_lastPerSecondCallbackTime(usecTimestampNow()),
_sendAudioStreamStats(false),
_datagramsReadPerCallStats(0, READ_DATAGRAMS_STATS_WINDOW_SECONDS),
_timeSpentPerCallStats(0, READ_DATAGRAMS_STATS_WINDOW_SECONDS),
_timeSpentPerHashMatchCallStats(0, READ_DATAGRAMS_STATS_WINDOW_SECONDS),
_readPendingCallsPerSecondStats(1, READ_DATAGRAMS_STATS_WINDOW_SECONDS)
{
}
@ -99,15 +104,44 @@ const float ATTENUATION_BEGINS_AT_DISTANCE = 1.0f;
const float ATTENUATION_AMOUNT_PER_DOUBLING_IN_DISTANCE = 0.18f;
const float ATTENUATION_EPSILON_DISTANCE = 0.1f;
void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream* streamToAdd,
int AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream* streamToAdd,
AvatarAudioStream* listeningNodeStream) {
// If repetition with fade is enabled:
// If streamToAdd could not provide a frame (it was starved), then we'll mix its previously-mixed frame
// This is preferable to not mixing it at all since that's equivalent to inserting silence.
// Basically, we'll repeat that last frame until it has a frame to mix. Depending on how many times
// we've repeated that frame in a row, we'll gradually fade that repeated frame into silence.
// This improves the perceived quality of the audio slightly.
float repeatedFrameFadeFactor = 1.0f;
if (!streamToAdd->lastPopSucceeded()) {
if (_streamSettings._repetitionWithFade && !streamToAdd->getLastPopOutput().isNull()) {
// reptition with fade is enabled, and we do have a valid previous frame to repeat.
// calculate its fade factor, which depends on how many times it's already been repeated.
repeatedFrameFadeFactor = calculateRepeatedFrameFadeFactor(streamToAdd->getConsecutiveNotMixedCount() - 1);
if (repeatedFrameFadeFactor == 0.0f) {
return 0;
}
} else {
return 0;
}
}
// at this point, we know streamToAdd's last pop output is valid
// if the frame we're about to mix is silent, bail
if (streamToAdd->getLastPopOutputLoudness() == 0.0f) {
return 0;
}
float bearingRelativeAngleToSource = 0.0f;
float attenuationCoefficient = 1.0f;
int numSamplesDelay = 0;
float weakChannelAmplitudeRatio = 1.0f;
bool shouldAttenuate = (streamToAdd != listeningNodeStream);
if (shouldAttenuate) {
// if the two stream pointers do not match then these are different streams
@ -122,7 +156,7 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
if (streamToAdd->getLastPopOutputTrailingLoudness() / distanceBetween <= _minAudibilityThreshold) {
// according to mixer performance we have decided this does not get to be mixed in
// bail out
return;
return 0;
}
++_sumMixes;
@ -222,12 +256,13 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
int delayedChannelIndex = 0;
const int SINGLE_STEREO_OFFSET = 2;
float attenuationAndFade = attenuationCoefficient * repeatedFrameFadeFactor;
for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s += 4) {
// setup the int16_t variables for the two sample sets
correctStreamSample[0] = streamPopOutput[s / 2] * attenuationCoefficient;
correctStreamSample[1] = streamPopOutput[(s / 2) + 1] * attenuationCoefficient;
correctStreamSample[0] = streamPopOutput[s / 2] * attenuationAndFade;
correctStreamSample[1] = streamPopOutput[(s / 2) + 1] * attenuationAndFade;
delayedChannelIndex = s + (numSamplesDelay * 2) + delayedChannelOffset;
@ -243,7 +278,7 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
if (numSamplesDelay > 0) {
// if there was a sample delay for this stream, we need to pull samples prior to the popped output
// to stick at the beginning
float attenuationAndWeakChannelRatio = attenuationCoefficient * weakChannelAmplitudeRatio;
float attenuationAndWeakChannelRatioAndFade = attenuationCoefficient * weakChannelAmplitudeRatio * repeatedFrameFadeFactor;
AudioRingBuffer::ConstIterator delayStreamPopOutput = streamPopOutput - numSamplesDelay;
// TODO: delayStreamPopOutput may be inside the last frame written if the ringbuffer is completely full
@ -251,7 +286,7 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
for (int i = 0; i < numSamplesDelay; i++) {
int parentIndex = i * 2;
_clientSamples[parentIndex + delayedChannelOffset] += *delayStreamPopOutput * attenuationAndWeakChannelRatio;
_clientSamples[parentIndex + delayedChannelOffset] += *delayStreamPopOutput * attenuationAndWeakChannelRatioAndFade;
++delayStreamPopOutput;
}
}
@ -262,41 +297,82 @@ void AudioMixer::addStreamToMixForListeningNodeWithStream(PositionalAudioStream*
attenuationCoefficient = 1.0f;
}
float attenuationAndFade = attenuationCoefficient * repeatedFrameFadeFactor;
for (int s = 0; s < NETWORK_BUFFER_LENGTH_SAMPLES_STEREO; s++) {
_clientSamples[s] = glm::clamp(_clientSamples[s] + (int)(streamPopOutput[s / stereoDivider] * attenuationCoefficient),
_clientSamples[s] = glm::clamp(_clientSamples[s] + (int)(streamPopOutput[s / stereoDivider] * attenuationAndFade),
MIN_SAMPLE_VALUE, MAX_SAMPLE_VALUE);
}
}
if (_enableFilter && shouldAttenuate) {
glm::vec3 relativePosition = streamToAdd->getPosition() - listeningNodeStream->getPosition();
if (relativePosition.z < 0) { // if the source is behind us
AudioFilterHSF1s& penumbraFilter = streamToAdd->getFilter();
// calculate penumbra angle
float headPenumbraAngle = glm::angle(glm::vec3(0.0f, 0.0f, -1.0f),
glm::normalize(relativePosition));
if (relativePosition.x < 0) {
headPenumbraAngle *= -1.0f; // [-pi/2,+pi/2]
}
const float SQUARE_ROOT_OF_TWO_OVER_TWO = 0.71f; // half power
const float ONE_OVER_TWO_PI = 1.0f / TWO_PI;
const float FILTER_CUTOFF_FREQUENCY_HZ = 4000.0f;
// calculate the updated gain, frequency and slope. this will be tuned over time.
const float penumbraFilterGainL = (-1.0f * ONE_OVER_TWO_PI * headPenumbraAngle) + SQUARE_ROOT_OF_TWO_OVER_TWO;
const float penumbraFilterGainR = (+1.0f * ONE_OVER_TWO_PI * headPenumbraAngle) + SQUARE_ROOT_OF_TWO_OVER_TWO;
const float penumbraFilterFrequency = FILTER_CUTOFF_FREQUENCY_HZ; // constant frequency
const float penumbraFilterSlope = SQUARE_ROOT_OF_TWO_OVER_TWO; // constant slope
qDebug() << "penumbra gainL="
<< penumbraFilterGainL
<< "penumbra gainR="
<< penumbraFilterGainR
<< "penumbraAngle="
<< headPenumbraAngle;
// set the gain on both filter channels
penumbraFilter.setParameters(0, 0, SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainL, penumbraFilterSlope);
penumbraFilter.setParameters(0, 1, SAMPLE_RATE, penumbraFilterFrequency, penumbraFilterGainR, penumbraFilterSlope);
penumbraFilter.render(_clientSamples, _clientSamples, NETWORK_BUFFER_LENGTH_SAMPLES_STEREO / 2);
}
}
return 1;
}
void AudioMixer::prepareMixForListeningNode(Node* node) {
int AudioMixer::prepareMixForListeningNode(Node* node) {
AvatarAudioStream* nodeAudioStream = ((AudioMixerClientData*) node->getLinkedData())->getAvatarAudioStream();
// zero out the client mix for this node
memset(_clientSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
// loop through all other nodes that have sufficient audio to mix
int streamsMixed = 0;
foreach (const SharedNodePointer& otherNode, NodeList::getInstance()->getNodeHash()) {
if (otherNode->getLinkedData()) {
AudioMixerClientData* otherNodeClientData = (AudioMixerClientData*) otherNode->getLinkedData();
// enumerate the ARBs attached to the otherNode and add all that should be added to mix
const QHash<QUuid, PositionalAudioStream*>& otherNodeAudioStreams = otherNodeClientData->getAudioStreams();
QHash<QUuid, PositionalAudioStream*>::ConstIterator i;
for (i = otherNodeAudioStreams.begin(); i != otherNodeAudioStreams.constEnd(); i++) {
for (i = otherNodeAudioStreams.constBegin(); i != otherNodeAudioStreams.constEnd(); i++) {
PositionalAudioStream* otherNodeStream = i.value();
if ((*otherNode != *node || otherNodeStream->shouldLoopbackForNode())
&& otherNodeStream->lastPopSucceeded()
&& otherNodeStream->getLastPopOutputTrailingLoudness() > 0.0f) {
addStreamToMixForListeningNodeWithStream(otherNodeStream, nodeAudioStream);
if (*otherNode != *node || otherNodeStream->shouldLoopbackForNode()) {
streamsMixed += addStreamToMixForListeningNodeWithStream(otherNodeStream, nodeAudioStream);
}
}
}
}
return streamsMixed;
}
void AudioMixer::readPendingDatagram(const QByteArray& receivedPacket, const HifiSockAddr& senderSockAddr) {
@ -332,7 +408,7 @@ void AudioMixer::readPendingDatagram(const QByteArray& receivedPacket, const Hif
void AudioMixer::sendStatsPacket() {
static QJsonObject statsObject;
statsObject["useDynamicJitterBuffers"] = _useDynamicJitterBuffers;
statsObject["useDynamicJitterBuffers"] = _streamSettings._dynamicJitterBuffers;
statsObject["trailing_sleep_percentage"] = _trailingSleepRatio * 100.0f;
statsObject["performance_throttling_ratio"] = _performanceThrottlingRatio;
@ -358,9 +434,42 @@ void AudioMixer::sendStatsPacket() {
int sizeOfStats = 0;
int TOO_BIG_FOR_MTU = 1200; // some extra space for JSONification
QString property = "readPendingDatagram_calls_stats";
QString value = getReadPendingDatagramsCallsPerSecondsStatsString();
statsObject2[qPrintable(property)] = value;
somethingToSend = true;
sizeOfStats += property.size() + value.size();
property = "readPendingDatagram_packets_per_call_stats";
value = getReadPendingDatagramsPacketsPerCallStatsString();
statsObject2[qPrintable(property)] = value;
somethingToSend = true;
sizeOfStats += property.size() + value.size();
property = "readPendingDatagram_packets_time_per_call_stats";
value = getReadPendingDatagramsTimeStatsString();
statsObject2[qPrintable(property)] = value;
somethingToSend = true;
sizeOfStats += property.size() + value.size();
property = "readPendingDatagram_hashmatch_time_per_call_stats";
value = getReadPendingDatagramsHashMatchTimeStatsString();
statsObject2[qPrintable(property)] = value;
somethingToSend = true;
sizeOfStats += property.size() + value.size();
NodeList* nodeList = NodeList::getInstance();
int clientNumber = 0;
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
// if we're too large, send the packet
if (sizeOfStats > TOO_BIG_FOR_MTU) {
nodeList->sendStatsToDomainServer(statsObject2);
sizeOfStats = 0;
statsObject2 = QJsonObject(); // clear it
somethingToSend = false;
}
clientNumber++;
AudioMixerClientData* clientData = static_cast<AudioMixerClientData*>(node->getLinkedData());
if (clientData) {
@ -370,14 +479,6 @@ void AudioMixer::sendStatsPacket() {
somethingToSend = true;
sizeOfStats += property.size() + value.size();
}
// if we're too large, send the packet
if (sizeOfStats > TOO_BIG_FOR_MTU) {
nodeList->sendStatsToDomainServer(statsObject2);
sizeOfStats = 0;
statsObject2 = QJsonObject(); // clear it
somethingToSend = false;
}
}
if (somethingToSend) {
@ -448,41 +549,81 @@ void AudioMixer::run() {
if (settingsObject.contains(AUDIO_GROUP_KEY)) {
QJsonObject audioGroupObject = settingsObject[AUDIO_GROUP_KEY].toObject();
// check the payload to see if we have asked for dynamicJitterBuffer support
const QString DYNAMIC_JITTER_BUFFER_JSON_KEY = "A-dynamic-jitter-buffer";
bool shouldUseDynamicJitterBuffers = audioGroupObject[DYNAMIC_JITTER_BUFFER_JSON_KEY].toBool();
if (shouldUseDynamicJitterBuffers) {
_streamSettings._dynamicJitterBuffers = audioGroupObject[DYNAMIC_JITTER_BUFFER_JSON_KEY].toBool();
if (_streamSettings._dynamicJitterBuffers) {
qDebug() << "Enable dynamic jitter buffers.";
_useDynamicJitterBuffers = true;
} else {
qDebug() << "Dynamic jitter buffers disabled.";
_useDynamicJitterBuffers = false;
}
bool ok;
const QString DESIRED_JITTER_BUFFER_FRAMES_KEY = "B-desired-jitter-buffer-frames";
_staticDesiredJitterBufferFrames = audioGroupObject[DESIRED_JITTER_BUFFER_FRAMES_KEY].toString().toInt(&ok);
const QString DESIRED_JITTER_BUFFER_FRAMES_KEY = "B-static-desired-jitter-buffer-frames";
_streamSettings._staticDesiredJitterBufferFrames = audioGroupObject[DESIRED_JITTER_BUFFER_FRAMES_KEY].toString().toInt(&ok);
if (!ok) {
_staticDesiredJitterBufferFrames = DEFAULT_DESIRED_JITTER_BUFFER_FRAMES;
_streamSettings._staticDesiredJitterBufferFrames = DEFAULT_STATIC_DESIRED_JITTER_BUFFER_FRAMES;
}
qDebug() << "Static desired jitter buffer frames:" << _staticDesiredJitterBufferFrames;
qDebug() << "Static desired jitter buffer frames:" << _streamSettings._staticDesiredJitterBufferFrames;
const QString MAX_FRAMES_OVER_DESIRED_JSON_KEY = "C-max-frames-over-desired";
_maxFramesOverDesired = audioGroupObject[MAX_FRAMES_OVER_DESIRED_JSON_KEY].toString().toInt(&ok);
_streamSettings._maxFramesOverDesired = audioGroupObject[MAX_FRAMES_OVER_DESIRED_JSON_KEY].toString().toInt(&ok);
if (!ok) {
_maxFramesOverDesired = DEFAULT_MAX_FRAMES_OVER_DESIRED;
_streamSettings._maxFramesOverDesired = DEFAULT_MAX_FRAMES_OVER_DESIRED;
}
qDebug() << "Max frames over desired:" << _streamSettings._maxFramesOverDesired;
const QString USE_STDEV_FOR_DESIRED_CALC_JSON_KEY = "D-use-stdev-for-desired-calc";
_streamSettings._useStDevForJitterCalc = audioGroupObject[USE_STDEV_FOR_DESIRED_CALC_JSON_KEY].toBool();
if (_streamSettings._useStDevForJitterCalc) {
qDebug() << "Using Philip's stdev method for jitter calc if dynamic jitter buffers enabled";
} else {
qDebug() << "Using Fred's max-gap method for jitter calc if dynamic jitter buffers enabled";
}
qDebug() << "Max frames over desired:" << _maxFramesOverDesired;
const QString PRINT_STREAM_STATS_JSON_KEY = "H-print-stream-stats";
const QString WINDOW_STARVE_THRESHOLD_JSON_KEY = "E-window-starve-threshold";
_streamSettings._windowStarveThreshold = audioGroupObject[WINDOW_STARVE_THRESHOLD_JSON_KEY].toString().toInt(&ok);
if (!ok) {
_streamSettings._windowStarveThreshold = DEFAULT_WINDOW_STARVE_THRESHOLD;
}
qDebug() << "Window A starve threshold:" << _streamSettings._windowStarveThreshold;
const QString WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES_JSON_KEY = "F-window-seconds-for-desired-calc-on-too-many-starves";
_streamSettings._windowSecondsForDesiredCalcOnTooManyStarves = audioGroupObject[WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES_JSON_KEY].toString().toInt(&ok);
if (!ok) {
_streamSettings._windowSecondsForDesiredCalcOnTooManyStarves = DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES;
}
qDebug() << "Window A length:" << _streamSettings._windowSecondsForDesiredCalcOnTooManyStarves << "seconds";
const QString WINDOW_SECONDS_FOR_DESIRED_REDUCTION_JSON_KEY = "G-window-seconds-for-desired-reduction";
_streamSettings._windowSecondsForDesiredReduction = audioGroupObject[WINDOW_SECONDS_FOR_DESIRED_REDUCTION_JSON_KEY].toString().toInt(&ok);
if (!ok) {
_streamSettings._windowSecondsForDesiredReduction = DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION;
}
qDebug() << "Window B length:" << _streamSettings._windowSecondsForDesiredReduction << "seconds";
const QString REPETITION_WITH_FADE_JSON_KEY = "H-repetition-with-fade";
_streamSettings._repetitionWithFade = audioGroupObject[REPETITION_WITH_FADE_JSON_KEY].toBool();
if (_streamSettings._repetitionWithFade) {
qDebug() << "Repetition with fade enabled";
} else {
qDebug() << "Repetition with fade disabled";
}
const QString PRINT_STREAM_STATS_JSON_KEY = "I-print-stream-stats";
_printStreamStats = audioGroupObject[PRINT_STREAM_STATS_JSON_KEY].toBool();
if (_printStreamStats) {
qDebug() << "Stream stats will be printed to stdout";
}
const QString UNATTENUATED_ZONE_KEY = "D-unattenuated-zone";
const QString FILTER_KEY = "J-enable-filter";
_enableFilter = audioGroupObject[FILTER_KEY].toBool();
if (_enableFilter) {
qDebug() << "Filter enabled";
}
const QString UNATTENUATED_ZONE_KEY = "Z-unattenuated-zone";
QString unattenuatedZoneString = audioGroupObject[UNATTENUATED_ZONE_KEY].toString();
if (!unattenuatedZoneString.isEmpty()) {
@ -510,9 +651,8 @@ void AudioMixer::run() {
int nextFrame = 0;
QElapsedTimer timer;
timer.start();
char* clientMixBuffer = new char[NETWORK_BUFFER_LENGTH_BYTES_STEREO + sizeof(quint16)
+ numBytesForPacketHeaderGivenPacketType(PacketTypeMixedAudio)];
char clientMixBuffer[MAX_PACKET_SIZE];
int usecToSleep = BUFFER_SEND_INTERVAL_USECS;
@ -571,15 +711,13 @@ void AudioMixer::run() {
if (!hasRatioChanged) {
++framesSinceCutoffEvent;
}
bool sendAudioStreamStats = false;
quint64 now = usecTimestampNow();
if (now - _lastSendAudioStreamStatsTime > TOO_LONG_SINCE_LAST_SEND_AUDIO_STREAM_STATS) {
_lastSendAudioStreamStatsTime = now;
sendAudioStreamStats = true;
}
bool streamStatsPrinted = false;
quint64 now = usecTimestampNow();
if (now - _lastPerSecondCallbackTime > USECS_PER_SECOND) {
perSecondActions();
_lastPerSecondCallbackTime = now;
}
foreach (const SharedNodePointer& node, nodeList->getNodeHash()) {
if (node->getLinkedData()) {
AudioMixerClientData* nodeData = (AudioMixerClientData*)node->getLinkedData();
@ -592,43 +730,52 @@ void AudioMixer::run() {
if (node->getType() == NodeType::Agent && node->getActiveSocket()
&& nodeData->getAvatarAudioStream()) {
prepareMixForListeningNode(node.data());
int streamsMixed = prepareMixForListeningNode(node.data());
// pack header
int numBytesPacketHeader = populatePacketHeader(clientMixBuffer, PacketTypeMixedAudio);
char* dataAt = clientMixBuffer + numBytesPacketHeader;
char* dataAt;
if (streamsMixed > 0) {
// pack header
int numBytesPacketHeader = populatePacketHeader(clientMixBuffer, PacketTypeMixedAudio);
dataAt = clientMixBuffer + numBytesPacketHeader;
// pack sequence number
quint16 sequence = nodeData->getOutgoingSequenceNumber();
memcpy(dataAt, &sequence, sizeof(quint16));
dataAt += sizeof(quint16);
// pack sequence number
quint16 sequence = nodeData->getOutgoingSequenceNumber();
memcpy(dataAt, &sequence, sizeof(quint16));
dataAt += sizeof(quint16);
// pack mixed audio samples
memcpy(dataAt, _clientSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
dataAt += NETWORK_BUFFER_LENGTH_BYTES_STEREO;
// pack mixed audio samples
memcpy(dataAt, _clientSamples, NETWORK_BUFFER_LENGTH_BYTES_STEREO);
dataAt += NETWORK_BUFFER_LENGTH_BYTES_STEREO;
} else {
// pack header
int numBytesPacketHeader = populatePacketHeader(clientMixBuffer, PacketTypeSilentAudioFrame);
dataAt = clientMixBuffer + numBytesPacketHeader;
// pack sequence number
quint16 sequence = nodeData->getOutgoingSequenceNumber();
memcpy(dataAt, &sequence, sizeof(quint16));
dataAt += sizeof(quint16);
// pack number of silent audio samples
quint16 numSilentSamples = NETWORK_BUFFER_LENGTH_SAMPLES_STEREO;
memcpy(dataAt, &numSilentSamples, sizeof(quint16));
dataAt += sizeof(quint16);
}
// send mixed audio packet
nodeList->writeDatagram(clientMixBuffer, dataAt - clientMixBuffer, node);
nodeData->incrementOutgoingMixedAudioSequenceNumber();
// send an audio stream stats packet if it's time
if (sendAudioStreamStats) {
if (_sendAudioStreamStats) {
nodeData->sendAudioStreamStatsPackets(node);
if (_printStreamStats) {
printf("\nStats for agent %s:\n", node->getUUID().toString().toLatin1().data());
nodeData->printUpstreamDownstreamStats();
streamStatsPrinted = true;
}
_sendAudioStreamStats = false;
}
++_sumListeners;
}
}
}
if (streamStatsPrinted) {
printf("\n----------------------------------------------------------------\n");
}
++_numStatFrames;
@ -644,6 +791,90 @@ void AudioMixer::run() {
usleep(usecToSleep);
}
}
delete[] clientMixBuffer;
}
void AudioMixer::perSecondActions() {
_sendAudioStreamStats = true;
int callsLastSecond = _datagramsReadPerCallStats.getCurrentIntervalSamples();
_readPendingCallsPerSecondStats.update(callsLastSecond);
if (_printStreamStats) {
printf("\n================================================================================\n\n");
printf(" readPendingDatagram() calls per second | avg: %.2f, avg_30s: %.2f, last_second: %d\n",
_readPendingCallsPerSecondStats.getAverage(),
_readPendingCallsPerSecondStats.getWindowAverage(),
callsLastSecond);
printf(" Datagrams read per call | avg: %.2f, avg_30s: %.2f, last_second: %.2f\n",
_datagramsReadPerCallStats.getAverage(),
_datagramsReadPerCallStats.getWindowAverage(),
_datagramsReadPerCallStats.getCurrentIntervalAverage());
printf(" Usecs spent per readPendingDatagram() call | avg: %.2f, avg_30s: %.2f, last_second: %.2f\n",
_timeSpentPerCallStats.getAverage(),
_timeSpentPerCallStats.getWindowAverage(),
_timeSpentPerCallStats.getCurrentIntervalAverage());
printf(" Usecs spent per packetVersionAndHashMatch() call | avg: %.2f, avg_30s: %.2f, last_second: %.2f\n",
_timeSpentPerHashMatchCallStats.getAverage(),
_timeSpentPerHashMatchCallStats.getWindowAverage(),
_timeSpentPerHashMatchCallStats.getCurrentIntervalAverage());
double WINDOW_LENGTH_USECS = READ_DATAGRAMS_STATS_WINDOW_SECONDS * USECS_PER_SECOND;
printf(" %% time spent in readPendingDatagram() calls | avg_30s: %.6f%%, last_second: %.6f%%\n",
_timeSpentPerCallStats.getWindowSum() / WINDOW_LENGTH_USECS * 100.0,
_timeSpentPerCallStats.getCurrentIntervalSum() / USECS_PER_SECOND * 100.0);
printf("%% time spent in packetVersionAndHashMatch() calls: | avg_30s: %.6f%%, last_second: %.6f%%\n",
_timeSpentPerHashMatchCallStats.getWindowSum() / WINDOW_LENGTH_USECS * 100.0,
_timeSpentPerHashMatchCallStats.getCurrentIntervalSum() / USECS_PER_SECOND * 100.0);
foreach(const SharedNodePointer& node, NodeList::getInstance()->getNodeHash()) {
if (node->getLinkedData()) {
AudioMixerClientData* nodeData = (AudioMixerClientData*)node->getLinkedData();
if (node->getType() == NodeType::Agent && node->getActiveSocket()) {
printf("\nStats for agent %s --------------------------------\n",
node->getUUID().toString().toLatin1().data());
nodeData->printUpstreamDownstreamStats();
}
}
}
}
_datagramsReadPerCallStats.currentIntervalComplete();
_timeSpentPerCallStats.currentIntervalComplete();
_timeSpentPerHashMatchCallStats.currentIntervalComplete();
}
QString AudioMixer::getReadPendingDatagramsCallsPerSecondsStatsString() const {
QString result = "calls_per_sec_avg_30s: " + QString::number(_readPendingCallsPerSecondStats.getWindowAverage(), 'f', 2)
+ " calls_last_sec: " + QString::number(_readPendingCallsPerSecondStats.getLastCompleteIntervalStats().getSum() + 0.5, 'f', 0);
return result;
}
QString AudioMixer::getReadPendingDatagramsPacketsPerCallStatsString() const {
QString result = "pkts_per_call_avg_30s: " + QString::number(_datagramsReadPerCallStats.getWindowAverage(), 'f', 2)
+ " pkts_per_call_avg_1s: " + QString::number(_datagramsReadPerCallStats.getLastCompleteIntervalStats().getAverage(), 'f', 2);
return result;
}
QString AudioMixer::getReadPendingDatagramsTimeStatsString() const {
QString result = "usecs_per_call_avg_30s: " + QString::number(_timeSpentPerCallStats.getWindowAverage(), 'f', 2)
+ " usecs_per_call_avg_1s: " + QString::number(_timeSpentPerCallStats.getLastCompleteIntervalStats().getAverage(), 'f', 2)
+ " prct_time_in_call_30s: " + QString::number(_timeSpentPerCallStats.getWindowSum() / (READ_DATAGRAMS_STATS_WINDOW_SECONDS*USECS_PER_SECOND) * 100.0, 'f', 6) + "%"
+ " prct_time_in_call_1s: " + QString::number(_timeSpentPerCallStats.getLastCompleteIntervalStats().getSum() / USECS_PER_SECOND * 100.0, 'f', 6) + "%";
return result;
}
QString AudioMixer::getReadPendingDatagramsHashMatchTimeStatsString() const {
QString result = "usecs_per_hashmatch_avg_30s: " + QString::number(_timeSpentPerHashMatchCallStats.getWindowAverage(), 'f', 2)
+ " usecs_per_hashmatch_avg_1s: " + QString::number(_timeSpentPerHashMatchCallStats.getLastCompleteIntervalStats().getAverage(), 'f', 2)
+ " prct_time_in_hashmatch_30s: " + QString::number(_timeSpentPerHashMatchCallStats.getWindowSum() / (READ_DATAGRAMS_STATS_WINDOW_SECONDS*USECS_PER_SECOND) * 100.0, 'f', 6) + "%"
+ " prct_time_in_hashmatch_1s: " + QString::number(_timeSpentPerHashMatchCallStats.getLastCompleteIntervalStats().getSum() / USECS_PER_SECOND * 100.0, 'f', 6) + "%";
return result;
}

View file

@ -21,7 +21,8 @@ class AvatarAudioStream;
const int SAMPLE_PHASE_DELAY_AT_90 = 20;
const quint64 TOO_LONG_SINCE_LAST_SEND_AUDIO_STREAM_STATS = 1 * USECS_PER_SECOND;
const int READ_DATAGRAMS_STATS_WINDOW_SECONDS = 30;
/// Handles assignments of type AudioMixer - mixing streams of audio and re-distributing to various clients.
class AudioMixer : public ThreadedAssignment {
@ -38,21 +39,26 @@ public slots:
void sendStatsPacket();
static bool getUseDynamicJitterBuffers() { return _useDynamicJitterBuffers; }
static int getStaticDesiredJitterBufferFrames() { return _staticDesiredJitterBufferFrames; }
static int getMaxFramesOverDesired() { return _maxFramesOverDesired; }
static const InboundAudioStream::Settings& getStreamSettings() { return _streamSettings; }
private:
/// adds one stream to the mix for a listening node
void addStreamToMixForListeningNodeWithStream(PositionalAudioStream* streamToAdd,
int addStreamToMixForListeningNodeWithStream(PositionalAudioStream* streamToAdd,
AvatarAudioStream* listeningNodeStream);
/// prepares and sends a mix to one Node
void prepareMixForListeningNode(Node* node);
int prepareMixForListeningNode(Node* node);
// client samples capacity is larger than what will be sent to optimize mixing
// we are MMX adding 4 samples at a time so we need client samples to have an extra 4
int16_t _clientSamples[NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)];
void perSecondActions();
QString getReadPendingDatagramsCallsPerSecondsStatsString() const;
QString getReadPendingDatagramsPacketsPerCallStatsString() const;
QString getReadPendingDatagramsTimeStatsString() const;
QString getReadPendingDatagramsHashMatchTimeStatsString() const;
float _trailingSleepRatio;
float _minAudibilityThreshold;
@ -63,13 +69,21 @@ private:
AABox* _sourceUnattenuatedZone;
AABox* _listenerUnattenuatedZone;
static bool _useDynamicJitterBuffers;
static int _staticDesiredJitterBufferFrames;
static int _maxFramesOverDesired;
static InboundAudioStream::Settings _streamSettings;
static bool _printStreamStats;
static bool _enableFilter;
quint64 _lastPerSecondCallbackTime;
quint64 _lastSendAudioStreamStatsTime;
bool _sendAudioStreamStats;
// stats
MovingMinMaxAvg<int> _datagramsReadPerCallStats; // update with # of datagrams read for each readPendingDatagrams call
MovingMinMaxAvg<quint64> _timeSpentPerCallStats; // update with usecs spent inside each readPendingDatagrams call
MovingMinMaxAvg<quint64> _timeSpentPerHashMatchCallStats; // update with usecs spent inside each packetVersionAndHashMatch call
MovingMinMaxAvg<int> _readPendingCallsPerSecondStats; // update with # of readPendingDatagrams calls in the last second
};
#endif // hifi_AudioMixer_h

View file

@ -74,9 +74,7 @@ int AudioMixerClientData::parseData(const QByteArray& packet) {
quint8 channelFlag = *(reinterpret_cast<const quint8*>(channelFlagAt));
bool isStereo = channelFlag == 1;
_audioStreams.insert(nullUUID,
matchingStream = new AvatarAudioStream(isStereo, AudioMixer::getUseDynamicJitterBuffers(),
AudioMixer::getStaticDesiredJitterBufferFrames(), AudioMixer::getMaxFramesOverDesired()));
_audioStreams.insert(nullUUID, matchingStream = new AvatarAudioStream(isStereo, AudioMixer::getStreamSettings()));
} else {
matchingStream = _audioStreams.value(nullUUID);
}
@ -88,9 +86,8 @@ int AudioMixerClientData::parseData(const QByteArray& packet) {
QUuid streamIdentifier = QUuid::fromRfc4122(packet.mid(bytesBeforeStreamIdentifier, NUM_BYTES_RFC4122_UUID));
if (!_audioStreams.contains(streamIdentifier)) {
_audioStreams.insert(streamIdentifier,
matchingStream = new InjectedAudioStream(streamIdentifier, AudioMixer::getUseDynamicJitterBuffers(),
AudioMixer::getStaticDesiredJitterBufferFrames(), AudioMixer::getMaxFramesOverDesired()));
// we don't have this injected stream yet, so add it
_audioStreams.insert(streamIdentifier, matchingStream = new InjectedAudioStream(streamIdentifier, AudioMixer::getStreamSettings()));
} else {
matchingStream = _audioStreams.value(streamIdentifier);
}
@ -105,18 +102,15 @@ void AudioMixerClientData::checkBuffersBeforeFrameSend(AABox* checkSourceZone, A
QHash<QUuid, PositionalAudioStream*>::ConstIterator i;
for (i = _audioStreams.constBegin(); i != _audioStreams.constEnd(); i++) {
PositionalAudioStream* stream = i.value();
if (stream->popFrames(1, true) > 0) {
// this is a ring buffer that is ready to go
// calculate the trailing avg loudness for the next frame
// that would be mixed in
stream->updateLastPopOutputTrailingLoudness();
if (checkSourceZone && checkSourceZone->contains(stream->getPosition())) {
stream->setListenerUnattenuatedZone(listenerZone);
} else {
stream->setListenerUnattenuatedZone(NULL);
}
stream->updateLastPopOutputLoudnessAndTrailingLoudness();
}
if (checkSourceZone && checkSourceZone->contains(stream->getPosition())) {
stream->setListenerUnattenuatedZone(listenerZone);
} else {
stream->setListenerUnattenuatedZone(NULL);
}
}
}
@ -185,7 +179,9 @@ void AudioMixerClientData::sendAudioStreamStatsPackets(const SharedNodePointer&
// pack the calculated number of stream stats
for (int i = 0; i < numStreamStatsToPack; i++) {
AudioStreamStats streamStats = audioStreamsIterator.value()->updateSeqHistoryAndGetAudioStreamStats();
PositionalAudioStream* stream = audioStreamsIterator.value();
stream->perSecondCallbackForUpdatingStats();
AudioStreamStats streamStats = stream->getAudioStreamStats();
memcpy(dataAt, &streamStats, sizeof(AudioStreamStats));
dataAt += sizeof(AudioStreamStats);

View file

@ -13,8 +13,8 @@
#include "AvatarAudioStream.h"
AvatarAudioStream::AvatarAudioStream(bool isStereo, bool dynamicJitterBuffer, int staticDesiredJitterBufferFrames, int maxFramesOverDesired) :
PositionalAudioStream(PositionalAudioStream::Microphone, isStereo, dynamicJitterBuffer, staticDesiredJitterBufferFrames, maxFramesOverDesired)
AvatarAudioStream::AvatarAudioStream(bool isStereo, const InboundAudioStream::Settings& settings) :
PositionalAudioStream(PositionalAudioStream::Microphone, isStereo, settings)
{
}
@ -38,26 +38,9 @@ int AvatarAudioStream::parseStreamProperties(PacketType type, const QByteArray&
// read the positional data
readBytes += parsePositionalData(packetAfterSeqNum.mid(readBytes));
if (type == PacketTypeSilentAudioFrame) {
int16_t numSilentSamples;
memcpy(&numSilentSamples, packetAfterSeqNum.data() + readBytes, sizeof(int16_t));
readBytes += sizeof(int16_t);
numAudioSamples = numSilentSamples;
} else {
int numAudioBytes = packetAfterSeqNum.size() - readBytes;
numAudioSamples = numAudioBytes / sizeof(int16_t);
}
return readBytes;
}
int AvatarAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples) {
int readBytes = 0;
if (type == PacketTypeSilentAudioFrame) {
writeDroppableSilentSamples(numAudioSamples);
} else {
// there is audio data to read
readBytes += _ringBuffer.writeData(packetAfterStreamProperties.data(), numAudioSamples * sizeof(int16_t));
}
// calculate how many samples are in this packet
int numAudioBytes = packetAfterSeqNum.size() - readBytes;
numAudioSamples = numAudioBytes / sizeof(int16_t);
return readBytes;
}

View file

@ -18,7 +18,7 @@
class AvatarAudioStream : public PositionalAudioStream {
public:
AvatarAudioStream(bool isStereo, bool dynamicJitterBuffer, int staticDesiredJitterBufferFrames, int maxFramesOverDesired);
AvatarAudioStream(bool isStereo, const InboundAudioStream::Settings& settings);
private:
// disallow copying of AvatarAudioStream objects
@ -26,7 +26,6 @@ private:
AvatarAudioStream& operator= (const AvatarAudioStream&);
int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples);
int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples);
};
#endif // hifi_AvatarAudioStream_h

View file

@ -9,8 +9,8 @@
"help": "Dynamically buffer client audio based on perceived jitter in packet receipt timing",
"default": false
},
"B-desired-jitter-buffer-frames": {
"label": "Desired Jitter Buffer Frames",
"B-static-desired-jitter-buffer-frames": {
"label": "Static Desired Jitter Buffer Frames",
"help": "If dynamic jitter buffers is disabled, this determines the target number of frames maintained by the AudioMixer's jitter buffers",
"placeholder": "1",
"default": "1"
@ -21,18 +21,54 @@
"placeholder": "10",
"default": "10"
},
"H-print-stream-stats": {
"D-use-stdev-for-desired-calc": {
"type": "checkbox",
"label": "Use Stdev for Desired Jitter Frames Calc:",
"help": "If checked, Philip's method (stdev of timegaps) is used to calculate desired jitter frames. Otherwise, Fred's method (max timegap) is used",
"default": false
},
"E-window-starve-threshold": {
"label": "Window Starve Threshold",
"help": "If this many starves occur in an N-second window (N is the number in the next field), then the desired jitter frames will be re-evaluated using Window A.",
"placeholder": "3",
"default": "3"
},
"F-window-seconds-for-desired-calc-on-too-many-starves": {
"label": "Timegaps Window (A) Seconds:",
"help": "Window A contains a history of timegaps. Its max timegap is used to re-evaluate the desired jitter frames when too many starves occur within it.",
"placeholder": "50",
"default": "50"
},
"G-window-seconds-for-desired-reduction": {
"label": "Timegaps Window (B) Seconds:",
"help": "Window B contains a history of timegaps. Its max timegap is used as a ceiling for the desired jitter frames value.",
"placeholder": "10",
"default": "10"
},
"H-repetition-with-fade": {
"type": "checkbox",
"label": "Repetition with Fade:",
"help": "If enabled, dropped frames and mixing during starves will repeat the last frame, eventually fading to silence",
"default": false
},
"I-print-stream-stats": {
"type": "checkbox",
"label": "Print Stream Stats:",
"help": "If enabled, audio upstream and downstream stats of each agent will be printed each second to stdout",
"default": false
},
"D-unattenuated-zone": {
"Z-unattenuated-zone": {
"label": "Unattenuated Zone",
"help": "Boxes for source and listener (corner x, corner y, corner z, size x, size y, size z, corner x, corner y, corner z, size x, size y, size z)",
"placeholder": "no zone",
"default": ""
},
"J-enable-filter": {
"type": "checkbox",
"label": "Enable Positional Filter",
"help": "If enabled, positional audio stream uses lowpass filter",
"default": false
}
}
}
}
}

View file

@ -0,0 +1,47 @@
//
// PlayRecordingOnAC.js
// examples
//
// Created by Clément Brisset on 8/24/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var filename = "http://your.recording.url";
var playFromCurrentLocation = true;
Avatar.faceModelURL = "http://public.highfidelity.io/models/heads/EvilPhilip_v7.fst";
Avatar.skeletonModelURL = "http://public.highfidelity.io/models/skeletons/Philip_Carl_Body_A-Pose.fst";
// Set position here if playFromCurrentLocation is true
Avatar.position = { x:1, y: 1, z: 1 };
Agent.isAvatar = true;
Avatar.loadRecording(filename);
count = 300; // This is necessary to wait for the audio mixer to connect
function update(event) {
if (count > 0) {
count--;
return;
}
if (count == 0) {
Avatar.startPlaying(playFromCurrentLocation);
Avatar.play();
Vec3.print("Playing from ", Avatar.position);
count--;
}
if (Avatar.isPlaying()) {
Avatar.play();
} else {
Script.update.disconnect(update);
}
}
Script.update.connect(update);

View file

@ -139,7 +139,9 @@ function moveUI() {
function mousePressEvent(event) {
clickedOverlay = Overlays.getOverlayAtPoint({ x: event.x, y: event.y });
if (recordIcon === toolBar.clicked(clickedOverlay)) {
print("Status: isPlaying=" + MyAvatar.isPlaying() + ", isRecording=" + MyAvatar.isRecording());
if (recordIcon === toolBar.clicked(clickedOverlay) && !MyAvatar.isPlaying()) {
if (!MyAvatar.isRecording()) {
MyAvatar.startRecording();
toolBar.setBack(COLOR_ON, ALPHA_ON);
@ -148,14 +150,14 @@ function mousePressEvent(event) {
MyAvatar.loadLastRecording();
toolBar.setBack(COLOR_OFF, ALPHA_OFF);
}
} else if (playIcon === toolBar.clicked(clickedOverlay)) {
if (!MyAvatar.isRecording()) {
if (MyAvatar.isPlaying()) {
MyAvatar.stopPlaying();
} else {
MyAvatar.startPlaying();
}
}
} else if (playIcon === toolBar.clicked(clickedOverlay) && !MyAvatar.isRecording()) {
if (MyAvatar.isPlaying()) {
MyAvatar.stopPlaying();
} else {
MyAvatar.setPlayFromCurrentLocation(true);
MyAvatar.setPlayerLoop(true);
MyAvatar.startPlaying(true);
}
} else if (saveIcon === toolBar.clicked(clickedOverlay)) {
if (!MyAvatar.isRecording()) {
recordingFile = Window.save("Save recording to file", ".", "*.rec");

View file

@ -51,9 +51,6 @@ var lastVoxelScale = 0;
var dragStart = { x: 0, y: 0 };
var wheelPixelsMoved = 0;
var mouseX = 0;
var mouseY = 0;
// Create a table of the different colors you can choose
var colors = new Array();
colors[0] = { red: 120, green: 181, blue: 126 };
@ -1041,8 +1038,6 @@ function mousePressEvent(event) {
// TODO: does any of this stuff need to execute if we're panning or orbiting?
trackMouseEvent(event); // used by preview support
mouseX = event.x;
mouseY = event.y;
var pickRay = Camera.computePickRay(event.x, event.y);
var intersection = Voxels.findRayIntersection(pickRay);
audioOptions.position = Vec3.sum(pickRay.origin, pickRay.direction);
@ -1296,40 +1291,30 @@ function mouseMoveEvent(event) {
}
if (isAdding) {
// Watch the drag direction to tell which way to 'extrude' this voxel
var pickRay = Camera.computePickRay(event.x, event.y);
var distance = Vec3.length(Vec3.subtract(pickRay.origin, lastVoxelPosition));
var mouseSpot = Vec3.sum(Vec3.multiply(pickRay.direction, distance), pickRay.origin);
var delta = Vec3.subtract(mouseSpot, lastVoxelPosition);
if (!isExtruding) {
var pickRay = Camera.computePickRay(event.x, event.y);
var lastVoxelDistance = { x: pickRay.origin.x - lastVoxelPosition.x,
y: pickRay.origin.y - lastVoxelPosition.y,
z: pickRay.origin.z - lastVoxelPosition.z };
var distance = Vec3.length(lastVoxelDistance);
var mouseSpot = { x: pickRay.direction.x * distance, y: pickRay.direction.y * distance, z: pickRay.direction.z * distance };
mouseSpot.x += pickRay.origin.x;
mouseSpot.y += pickRay.origin.y;
mouseSpot.z += pickRay.origin.z;
var dx = mouseSpot.x - lastVoxelPosition.x;
var dy = mouseSpot.y - lastVoxelPosition.y;
var dz = mouseSpot.z - lastVoxelPosition.z;
// Use the drag direction to tell which way to 'extrude' this voxel
extrudeScale = lastVoxelScale;
extrudeDirection = { x: 0, y: 0, z: 0 };
isExtruding = true;
if (dx > lastVoxelScale) extrudeDirection.x = extrudeScale;
else if (dx < -lastVoxelScale) extrudeDirection.x = -extrudeScale;
else if (dy > lastVoxelScale) extrudeDirection.y = extrudeScale;
else if (dy < -lastVoxelScale) extrudeDirection.y = -extrudeScale;
else if (dz > lastVoxelScale) extrudeDirection.z = extrudeScale;
else if (dz < -lastVoxelScale) extrudeDirection.z = -extrudeScale;
if (delta.x > lastVoxelScale) extrudeDirection.x = 1;
else if (delta.x < -lastVoxelScale) extrudeDirection.x = -1;
else if (delta.y > lastVoxelScale) extrudeDirection.y = 1;
else if (delta.y < -lastVoxelScale) extrudeDirection.y = -1;
else if (delta.z > lastVoxelScale) extrudeDirection.z = 1;
else if (delta.z < -lastVoxelScale) extrudeDirection.z = -1;
else isExtruding = false;
} else {
// We have got an extrusion direction, now look for mouse move beyond threshold to add new voxel
var dx = event.x - mouseX;
var dy = event.y - mouseY;
if (Math.sqrt(dx*dx + dy*dy) > PIXELS_PER_EXTRUDE_VOXEL) {
lastVoxelPosition = Vec3.sum(lastVoxelPosition, extrudeDirection);
Voxels.setVoxel(lastVoxelPosition.x, lastVoxelPosition.y, lastVoxelPosition.z,
extrudeScale, lastVoxelColor.red, lastVoxelColor.green, lastVoxelColor.blue);
mouseX = event.x;
mouseY = event.y;
// Extrude if mouse has moved by a voxel in the extrude direction
var distanceInDirection = Vec3.dot(delta, extrudeDirection);
if (distanceInDirection > extrudeScale) {
lastVoxelPosition = Vec3.sum(lastVoxelPosition, Vec3.multiply(extrudeDirection, extrudeScale));
Voxels.setVoxel(lastVoxelPosition.x, lastVoxelPosition.y, lastVoxelPosition.z, extrudeScale,
lastVoxelColor.red, lastVoxelColor.green, lastVoxelColor.blue);
}
}
}

View file

@ -0,0 +1,42 @@
//
// playSoundPath.js
// examples
//
// Created by Craig Hansen-Sturm on 05/27/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
var soundClip = new Sound("https://s3-us-west-1.amazonaws.com/highfidelity-public/sounds/Voxels/voxel create 3.raw");
var currentTime = 1.570079; // pi/2
var deltaTime = 0.05;
var distance = 1;
var debug = 0;
function playSound() {
var options = new AudioInjectionOptions();
currentTime += deltaTime;
var s = distance * Math.sin(currentTime);
var c = distance * Math.cos(currentTime);
var soundOffset = { x:s, y:0, z:c };
if (debug) {
print("t=" + currentTime + "offset=" + soundOffset.x + "," + soundOffset.y + "," + soundOffset.z);
}
var avatarPosition = MyAvatar.position;
var soundPosition = Vec3.sum(avatarPosition,soundOffset);
options.position = soundPosition
options.volume = 1.0;
Audio.playSound(soundClip, options);
}
Script.setInterval(playSound, 250);

View file

@ -269,8 +269,7 @@ function update(deltaTime){
}
var locationChanged = false;
if (location.hostname != oldHost) {
print("Changed domain");
if (location.hostname != oldHost || !location.isConnected) {
for (model in models) {
removeIndicators(models[model]);
}

View file

@ -2,15 +2,15 @@
Instructions for adding the Oculus library (LibOVR) to Interface
Stephen Birarda, March 6, 2014
You can download the Oculus SDK from https://developer.oculusvr.com/ (account creation required). Interface has been tested with SDK version 0.3.2.
You can download the Oculus SDK from https://developer.oculusvr.com/ (account creation required). Interface has been tested with SDK version 0.4.1.
1. Copy the Oculus SDK folders from the LibOVR directory (Lib, Include, Src) into the interface/externals/oculus folder.
1. Copy the Oculus SDK folders from the LibOVR directory (Lib, Include, Src) into the interface/externals/libovr folder.
This readme.txt should be there as well.
You may optionally choose to copy the SDK folders to a location outside the repository (so you can re-use with different checkouts and different projects).
If so our CMake find module expects you to set the ENV variable 'HIFI_LIB_DIR' to a directory containing a subfolder 'oculus' that contains the three folders mentioned above.
NOTE: For Windows users, you should copy libovr.lib and libovrd.lib from the \oculus\Lib\Win32\VS2010 directory to the \oculus\Lib\Win32\ directory.
NOTE: For Windows users, you should copy libovr.lib and libovrd.lib from the \oculus\Lib\Win32\VS2010 directory to the \libovr\Lib\Win32\ directory.
2. Clear your build directory, run cmake and build, and you should be all set.

View file

@ -0,0 +1,20 @@
#version 120
//
// metavoxel_heightfield_base.frag
// fragment shader
//
// Created by Andrzej Kapolka on 8/20/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the diffuse texture
uniform sampler2D diffuseMap;
void main(void) {
// compute the base color based on OpenGL lighting model
gl_FragColor = gl_Color * texture2D(diffuseMap, gl_TexCoord[0].st);
}

View file

@ -0,0 +1,33 @@
#version 120
//
// metavoxel_heightfield_base.vert
// vertex shader
//
// Created by Andrzej Kapolka on 8/20/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the height texture
uniform sampler2D heightMap;
// the distance between height points in texture space
uniform float heightScale;
// the scale between height and color textures
uniform float colorScale;
void main(void) {
// add the height to the position
float height = texture2D(heightMap, gl_MultiTexCoord0.st).r;
gl_Position = gl_ModelViewProjectionMatrix * (gl_Vertex + vec4(0.0, height, 0.0, 0.0));
// the zero height should be invisible
gl_FrontColor = vec4(1.0, 1.0, 1.0, step(height, 0.0));
// pass along the scaled/offset texture coordinates
gl_TexCoord[0] = (gl_MultiTexCoord0 - vec4(heightScale, heightScale, 0.0, 0.0)) * colorScale;
}

View file

@ -0,0 +1,21 @@
#version 120
//
// metavoxel_heightfield_light.frag
// fragment shader
//
// Created by Andrzej Kapolka on 8/20/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the interpolated normal
varying vec4 normal;
void main(void) {
// compute the base color based on OpenGL lighting model
gl_FragColor = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
gl_FrontLightProduct[0].diffuse * max(0.0, dot(normalize(normal), gl_LightSource[0].position)));
}

View file

@ -0,0 +1,45 @@
#version 120
//
// metavoxel_heighfield_light.vert
// vertex shader
//
// Created by Andrzej Kapolka on 8/20/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the height texture
uniform sampler2D heightMap;
// the distance between height points in texture space
uniform float heightScale;
// the interpolated position
varying vec4 position;
// the interpolated normal
varying vec4 normal;
void main(void) {
// transform and store the normal for interpolation
vec2 heightCoord = gl_MultiTexCoord0.st;
float deltaX = texture2D(heightMap, heightCoord - vec2(heightScale, 0.0)).r -
texture2D(heightMap, heightCoord + vec2(heightScale, 0.0)).r;
float deltaZ = texture2D(heightMap, heightCoord - vec2(0.0, heightScale)).r -
texture2D(heightMap, heightCoord + vec2(0.0, heightScale)).r;
normal = normalize(gl_ModelViewMatrix * vec4(deltaX, heightScale, deltaZ, 0.0));
// add the height to the position
float height = texture2D(heightMap, heightCoord).r;
position = gl_ModelViewMatrix * (gl_Vertex + vec4(0.0, height, 0.0, 0.0));
gl_Position = gl_ProjectionMatrix * position;
// the zero height should be invisible
gl_FrontColor = vec4(1.0, 1.0, 1.0, step(height, 0.0));
// and the shadow texture coordinates
gl_TexCoord[1] = vec4(dot(gl_EyePlaneS[0], position), dot(gl_EyePlaneT[0], position), dot(gl_EyePlaneR[0], position), 1.0);
}

View file

@ -0,0 +1,44 @@
#version 120
//
// metavoxel_heightfield_light_cascaded_shadow_map.frag
// fragment shader
//
// Created by Andrzej Kapolka on 8/20/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the shadow texture
uniform sampler2DShadow shadowMap;
// the distances to the cascade sections
uniform vec3 shadowDistances;
// the inverse of the size of the shadow map
const float shadowScale = 1.0 / 2048.0;
// the interpolated position
varying vec4 position;
// the interpolated normal
varying vec4 normal;
void main(void) {
// compute the index of the cascade to use and the corresponding texture coordinates
int shadowIndex = int(dot(step(vec3(position.z), shadowDistances), vec3(1.0, 1.0, 1.0)));
vec3 shadowTexCoord = vec3(dot(gl_EyePlaneS[shadowIndex], position), dot(gl_EyePlaneT[shadowIndex], position),
dot(gl_EyePlaneR[shadowIndex], position));
// compute the base color based on OpenGL lighting model
float diffuse = dot(normalize(normal), gl_LightSource[0].position);
float facingLight = step(0.0, diffuse) * 0.25 *
(shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(-shadowScale, shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, shadowTexCoord + vec3(shadowScale, shadowScale, 0.0)).r);
gl_FragColor = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
}

View file

@ -0,0 +1,33 @@
#version 120
//
// metavoxel_heightfield_light_shadow_map.frag
// fragment shader
//
// Created by Andrzej Kapolka on 8/20/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the shadow texture
uniform sampler2DShadow shadowMap;
// the inverse of the size of the shadow map
const float shadowScale = 1.0 / 2048.0;
// the interpolated normal
varying vec4 normal;
void main(void) {
// compute the base color based on OpenGL lighting model
float diffuse = dot(normalize(normal), gl_LightSource[0].position);
float facingLight = step(0.0, diffuse) * 0.25 *
(shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(-shadowScale, shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, -shadowScale, 0.0)).r +
shadow2D(shadowMap, gl_TexCoord[1].stp + vec3(shadowScale, shadowScale, 0.0)).r);
gl_FragColor = gl_Color * (gl_FrontLightModelProduct.sceneColor + gl_FrontLightProduct[0].ambient +
gl_FrontLightProduct[0].diffuse * (diffuse * facingLight));
}

View file

@ -0,0 +1,29 @@
#version 120
//
// metavoxel_heightfield_splat.frag
// fragment shader
//
// Created by Andrzej Kapolka on 8/20/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the number of splats per pass
const int SPLAT_COUNT = 4;
// the splat textures
uniform sampler2D diffuseMaps[SPLAT_COUNT];
// alpha values for the four splat textures
varying vec4 alphaValues;
void main(void) {
// blend the splat textures
gl_FragColor = gl_Color * (texture2D(diffuseMaps[0], gl_TexCoord[0].st) * alphaValues.x +
texture2D(diffuseMaps[1], gl_TexCoord[1].st) * alphaValues.y +
texture2D(diffuseMaps[2], gl_TexCoord[2].st) * alphaValues.z +
texture2D(diffuseMaps[3], gl_TexCoord[3].st) * alphaValues.w);
}

View file

@ -0,0 +1,64 @@
#version 120
//
// metavoxel_heighfield_splat.vert
// vertex shader
//
// Created by Andrzej Kapolka on 8/20/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// the height texture
uniform sampler2D heightMap;
// the texture that contains the texture indices
uniform sampler2D textureMap;
// the distance between height points in texture space
uniform float heightScale;
// the scale between height and texture textures
uniform float textureScale;
// the splat texture offset
uniform vec2 splatTextureOffset;
// the splat textures scales on the S axis
uniform vec4 splatTextureScalesS;
// the splat texture scales on the T axis
uniform vec4 splatTextureScalesT;
// the lower bounds of the values corresponding to the splat textures
uniform vec4 textureValueMinima;
// the upper bounds of the values corresponding to the splat textures
uniform vec4 textureValueMaxima;
// alpha values for the four splat textures
varying vec4 alphaValues;
void main(void) {
// add the height to the position
float height = texture2D(heightMap, gl_MultiTexCoord0.st).r;
vec4 modelSpacePosition = gl_Vertex + vec4(0.0, height, 0.0, 0.0);
gl_Position = gl_ModelViewProjectionMatrix * modelSpacePosition;
// the zero height should be invisible
gl_FrontColor = vec4(1.0, 1.0, 1.0, 1.0 - step(height, 0.0));
// pass along the scaled/offset texture coordinates
vec4 textureSpacePosition = vec4(modelSpacePosition.xz, 0.0, 1.0) + vec4(splatTextureOffset, 0.0, 0.0);
gl_TexCoord[0] = textureSpacePosition * vec4(splatTextureScalesS[0], splatTextureScalesT[0], 0.0, 1.0);
gl_TexCoord[1] = textureSpacePosition * vec4(splatTextureScalesS[1], splatTextureScalesT[1], 0.0, 1.0);
gl_TexCoord[2] = textureSpacePosition * vec4(splatTextureScalesS[2], splatTextureScalesT[2], 0.0, 1.0);
gl_TexCoord[3] = textureSpacePosition * vec4(splatTextureScalesS[3], splatTextureScalesT[3], 0.0, 1.0);
// compute the alpha values for each texture
float value = texture2D(textureMap, (gl_MultiTexCoord0.st - vec2(heightScale, heightScale)) * textureScale).r;
vec4 valueVector = vec4(value, value, value, value);
alphaValues = step(textureValueMinima, valueVector) * step(valueVector, textureValueMaxima);
}

View file

@ -246,7 +246,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer &startup_time) :
connect(&domainHandler, SIGNAL(connectedToDomain(const QString&)), SLOT(updateWindowTitle()));
connect(&domainHandler, SIGNAL(disconnectedFromDomain()), SLOT(updateWindowTitle()));
connect(&domainHandler, &DomainHandler::settingsReceived, this, &Application::domainSettingsReceived);
connect(&domainHandler, &DomainHandler::hostnameChanged, Menu::getInstance(), &Menu::clearLoginDialogDisplayedFlag);
// hookup VoxelEditSender to PaymentManager so we can pay for octree edits
const PaymentManager& paymentManager = PaymentManager::getInstance();
connect(&_voxelEditSender, &VoxelEditPacketSender::octreePaymentRequired,
@ -1772,14 +1773,7 @@ void Application::init() {
_lastTimeUpdated.start();
Menu::getInstance()->loadSettings();
if (Menu::getInstance()->getAudioJitterBufferFrames() != 0) {
_audio.setDynamicJitterBuffers(false);
_audio.setStaticDesiredJitterBufferFrames(Menu::getInstance()->getAudioJitterBufferFrames());
} else {
_audio.setDynamicJitterBuffers(true);
}
_audio.setMaxFramesOverDesired(Menu::getInstance()->getMaxFramesOverDesired());
_audio.setReceivedAudioStreamSettings(Menu::getInstance()->getReceivedAudioStreamSettings());
qDebug("Loaded settings");
@ -3821,6 +3815,10 @@ void Application::stopAllScripts(bool restart) {
it.value()->stop();
qDebug() << "stopping script..." << it.key();
}
// HACK: ATM scripts cannot set/get their animation priorities, so we clear priorities
// whenever a script stops in case it happened to have been setting joint rotations.
// TODO: expose animation priorities and provide a layered animation control system.
_myAvatar->clearJointAnimationPriorities();
}
void Application::stopScript(const QString &scriptName) {
@ -3828,6 +3826,10 @@ void Application::stopScript(const QString &scriptName) {
if (_scriptEnginesHash.contains(scriptURLString)) {
_scriptEnginesHash.value(scriptURLString)->stop();
qDebug() << "stopping script..." << scriptName;
// HACK: ATM scripts cannot set/get their animation priorities, so we clear priorities
// whenever a script stops in case it happened to have been setting joint rotations.
// TODO: expose animation priorities and provide a layered animation control system.
_myAvatar->clearJointAnimationPriorities();
}
}

View file

@ -72,7 +72,7 @@ Audio::Audio(QObject* parent) :
_proceduralAudioOutput(NULL),
_proceduralOutputDevice(NULL),
_inputRingBuffer(0),
_receivedAudioStream(0, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, true, 0, 0, true),
_receivedAudioStream(0, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, InboundAudioStream::Settings()),
_isStereoInput(false),
_averagedLatency(0.0),
_lastInputLoudness(0),
@ -105,6 +105,7 @@ Audio::Audio(QObject* parent) :
_scopeInput(0),
_scopeOutputLeft(0),
_scopeOutputRight(0),
_scopeLastFrame(),
_statsEnabled(false),
_statsShowInjectedStreams(false),
_outgoingAvatarAudioSequenceNumber(0),
@ -113,14 +114,17 @@ Audio::Audio(QObject* parent) :
_audioOutputMsecsUnplayedStats(1, FRAMES_AVAILABLE_STATS_WINDOW_SECONDS),
_lastSentAudioPacket(0),
_packetSentTimeGaps(1, APPROXIMATELY_30_SECONDS_OF_AUDIO_PACKETS),
_audioOutputIODevice(*this)
_audioOutputIODevice(_receivedAudioStream)
{
// clear the array of locally injected samples
memset(_localProceduralSamples, 0, NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL);
// Create the noise sample array
_noiseSampleFrames = new float[NUMBER_OF_NOISE_SAMPLE_FRAMES];
connect(&_receivedAudioStream, &MixedProcessedAudioStream::processSamples, this, &Audio::processReceivedAudioStreamSamples, Qt::DirectConnection);
connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedSilence, this, &Audio::addStereoSilenceToScope, Qt::DirectConnection);
connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedLastFrameRepeatedWithFade, this, &Audio::addLastFrameRepeatedWithFadeToScope, Qt::DirectConnection);
connect(&_receivedAudioStream, &MixedProcessedAudioStream::addedStereoSamples, this, &Audio::addStereoSamplesToScope, Qt::DirectConnection);
connect(&_receivedAudioStream, &MixedProcessedAudioStream::processSamples, this, &Audio::processReceivedSamples, Qt::DirectConnection);
}
void Audio::init(QGLWidget *parent) {
@ -460,9 +464,12 @@ void Audio::handleAudioInput() {
static char audioDataPacket[MAX_PACKET_SIZE];
static int numBytesPacketHeader = numBytesForPacketHeaderGivenPacketType(PacketTypeMicrophoneAudioNoEcho);
static int leadingBytes = numBytesPacketHeader + sizeof(quint16) + sizeof(glm::vec3) + sizeof(glm::quat) + sizeof(quint8);
static int16_t* networkAudioSamples = (int16_t*) (audioDataPacket + leadingBytes);
// NOTE: we assume PacketTypeMicrophoneAudioWithEcho has same size headers as
// PacketTypeMicrophoneAudioNoEcho. If not, then networkAudioSamples will be pointing to the wrong place for writing
// audio samples with echo.
static int leadingBytes = numBytesPacketHeader + sizeof(quint16) + sizeof(glm::vec3) + sizeof(glm::quat) + sizeof(quint8);
static int16_t* networkAudioSamples = (int16_t*)(audioDataPacket + leadingBytes);
float inputToNetworkInputRatio = calculateDeviceToNetworkInputRatio(_numInputCallbackBytes);
@ -668,9 +675,7 @@ void Audio::handleAudioInput() {
if (!_isStereoInput && _scopeEnabled && !_scopeEnabledPause) {
unsigned int numMonoAudioChannels = 1;
unsigned int monoAudioChannel = 0;
addBufferToScope(_scopeInput, _scopeInputOffset, networkAudioSamples, monoAudioChannel, numMonoAudioChannels);
_scopeInputOffset += NETWORK_SAMPLES_PER_FRAME;
_scopeInputOffset %= _samplesPerScope;
_scopeInputOffset = addBufferToScope(_scopeInput, _scopeInputOffset, networkAudioSamples, NETWORK_SAMPLES_PER_FRAME, monoAudioChannel, numMonoAudioChannels);
}
NodeList* nodeList = NodeList::getInstance();
@ -686,19 +691,11 @@ void Audio::handleAudioInput() {
glm::vec3 headPosition = interfaceAvatar->getHead()->getPosition();
glm::quat headOrientation = interfaceAvatar->getHead()->getFinalOrientationInWorldFrame();
quint8 isStereo = _isStereoInput ? 1 : 0;
int numAudioBytes = 0;
PacketType packetType;
if (_lastInputLoudness == 0) {
packetType = PacketTypeSilentAudioFrame;
// we need to indicate how many silent samples this is to the audio mixer
networkAudioSamples[0] = numNetworkSamples;
numAudioBytes = sizeof(int16_t);
} else {
numAudioBytes = numNetworkBytes;
if (Menu::getInstance()->isOptionChecked(MenuOption::EchoServerAudio)) {
packetType = PacketTypeMicrophoneAudioWithEcho;
} else {
@ -707,21 +704,31 @@ void Audio::handleAudioInput() {
}
char* currentPacketPtr = audioDataPacket + populatePacketHeader(audioDataPacket, packetType);
// pack sequence number
memcpy(currentPacketPtr, &_outgoingAvatarAudioSequenceNumber, sizeof(quint16));
currentPacketPtr += sizeof(quint16);
// set the mono/stereo byte
*currentPacketPtr++ = isStereo;
if (packetType == PacketTypeSilentAudioFrame) {
// pack num silent samples
quint16 numSilentSamples = numNetworkSamples;
memcpy(currentPacketPtr, &numSilentSamples, sizeof(quint16));
currentPacketPtr += sizeof(quint16);
} else {
// set the mono/stereo byte
*currentPacketPtr++ = isStereo;
// memcpy the three float positions
memcpy(currentPacketPtr, &headPosition, sizeof(headPosition));
currentPacketPtr += (sizeof(headPosition));
// memcpy the three float positions
memcpy(currentPacketPtr, &headPosition, sizeof(headPosition));
currentPacketPtr += (sizeof(headPosition));
// memcpy our orientation
memcpy(currentPacketPtr, &headOrientation, sizeof(headOrientation));
currentPacketPtr += sizeof(headOrientation);
// memcpy our orientation
memcpy(currentPacketPtr, &headOrientation, sizeof(headOrientation));
currentPacketPtr += sizeof(headOrientation);
// audio samples have already been packed (written to networkAudioSamples)
currentPacketPtr += numNetworkBytes;
}
// first time this is 0
if (_lastSentAudioPacket == 0) {
@ -733,18 +740,58 @@ void Audio::handleAudioInput() {
_lastSentAudioPacket = now;
}
nodeList->writeDatagram(audioDataPacket, numAudioBytes + leadingBytes, audioMixer);
int packetBytes = currentPacketPtr - audioDataPacket;
nodeList->writeDatagram(audioDataPacket, packetBytes, audioMixer);
_outgoingAvatarAudioSequenceNumber++;
Application::getInstance()->getBandwidthMeter()->outputStream(BandwidthMeter::AUDIO)
.updateValue(numAudioBytes + leadingBytes);
.updateValue(packetBytes);
}
delete[] inputAudioSamples;
}
}
void Audio::processReceivedAudioStreamSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer) {
const int STEREO_FACTOR = 2;
void Audio::addStereoSilenceToScope(int silentSamplesPerChannel) {
if (!_scopeEnabled || _scopeEnabledPause) {
return;
}
addSilenceToScope(_scopeOutputLeft, _scopeOutputOffset, silentSamplesPerChannel);
_scopeOutputOffset = addSilenceToScope(_scopeOutputRight, _scopeOutputOffset, silentSamplesPerChannel);
}
void Audio::addStereoSamplesToScope(const QByteArray& samples) {
if (!_scopeEnabled || _scopeEnabledPause) {
return;
}
const int16_t* samplesData = reinterpret_cast<const int16_t*>(samples.data());
int samplesPerChannel = samples.size() / sizeof(int16_t) / STEREO_FACTOR;
addBufferToScope(_scopeOutputLeft, _scopeOutputOffset, samplesData, samplesPerChannel, 0, STEREO_FACTOR);
_scopeOutputOffset = addBufferToScope(_scopeOutputRight, _scopeOutputOffset, samplesData, samplesPerChannel, 1, STEREO_FACTOR);
_scopeLastFrame = samples.right(NETWORK_BUFFER_LENGTH_BYTES_STEREO);
}
void Audio::addLastFrameRepeatedWithFadeToScope(int samplesPerChannel) {
const int16_t* lastFrameData = reinterpret_cast<const int16_t*>(_scopeLastFrame.data());
int samplesRemaining = samplesPerChannel;
int indexOfRepeat = 0;
do {
int samplesToWriteThisIteration = std::min(samplesRemaining, (int)NETWORK_SAMPLES_PER_FRAME);
float fade = calculateRepeatedFrameFadeFactor(indexOfRepeat);
addBufferToScope(_scopeOutputLeft, _scopeOutputOffset, lastFrameData, samplesToWriteThisIteration, 0, STEREO_FACTOR, fade);
_scopeOutputOffset = addBufferToScope(_scopeOutputRight, _scopeOutputOffset, lastFrameData, samplesToWriteThisIteration, 1, STEREO_FACTOR, fade);
samplesRemaining -= samplesToWriteThisIteration;
indexOfRepeat++;
} while (samplesRemaining > 0);
}
void Audio::processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer) {
const int numNetworkOutputSamples = inputBuffer.size() / sizeof(int16_t);
const int numDeviceOutputSamples = numNetworkOutputSamples * (_outputFormat.sampleRate() * _outputFormat.channelCount())
@ -789,30 +836,6 @@ void Audio::processReceivedAudioStreamSamples(const QByteArray& inputBuffer, QBy
numNetworkOutputSamples,
numDeviceOutputSamples,
_desiredOutputFormat, _outputFormat);
if (_scopeEnabled && !_scopeEnabledPause) {
unsigned int numAudioChannels = _desiredOutputFormat.channelCount();
const int16_t* samples = receivedSamples;
for (int numSamples = numNetworkOutputSamples / numAudioChannels; numSamples > 0; numSamples -= NETWORK_SAMPLES_PER_FRAME) {
unsigned int audioChannel = 0;
addBufferToScope(
_scopeOutputLeft,
_scopeOutputOffset,
samples, audioChannel, numAudioChannels);
audioChannel = 1;
addBufferToScope(
_scopeOutputRight,
_scopeOutputOffset,
samples, audioChannel, numAudioChannels);
_scopeOutputOffset += NETWORK_SAMPLES_PER_FRAME;
_scopeOutputOffset %= _samplesPerScope;
samples += NETWORK_SAMPLES_PER_FRAME * numAudioChannels;
}
}
}
void Audio::addReceivedAudioToStream(const QByteArray& audioByteArray) {
@ -825,9 +848,6 @@ void Audio::addReceivedAudioToStream(const QByteArray& audioByteArray) {
Application::getInstance()->getBandwidthMeter()->inputStream(BandwidthMeter::AUDIO).updateValue(audioByteArray.size());
}
void Audio::parseAudioStreamStatsPacket(const QByteArray& packet) {
int numBytesPacketHeader = numBytesForPacketHeader(packet);
@ -860,12 +880,13 @@ void Audio::parseAudioStreamStatsPacket(const QByteArray& packet) {
void Audio::sendDownstreamAudioStatsPacket() {
// since this function is called every second, we'll sample some of our stats here
// since this function is called every second, we'll sample for some of our stats here
_inputRingBufferMsecsAvailableStats.update(getInputRingBufferMsecsAvailable());
_audioOutputMsecsUnplayedStats.update(getAudioOutputMsecsUnplayed());
// also, call _receivedAudioStream's per-second callback
_receivedAudioStream.perSecondCallbackForUpdatingStats();
char packet[MAX_PACKET_SIZE];
// pack header
@ -883,7 +904,7 @@ void Audio::sendDownstreamAudioStatsPacket() {
dataAt += sizeof(quint16);
// pack downstream audio stream stats
AudioStreamStats stats = _receivedAudioStream.updateSeqHistoryAndGetAudioStreamStats();
AudioStreamStats stats = _receivedAudioStream.getAudioStreamStats();
memcpy(dataAt, &stats, sizeof(AudioStreamStats));
dataAt += sizeof(AudioStreamStats);
@ -916,7 +937,7 @@ void Audio::addSpatialAudioToBuffer(unsigned int sampleTime, const QByteArray& s
unsigned int delayCount = delay * _desiredOutputFormat.channelCount();
unsigned int silentCount = (remaining < delayCount) ? remaining : delayCount;
if (silentCount) {
_spatialAudioRingBuffer.addSilentFrame(silentCount);
_spatialAudioRingBuffer.addSilentSamples(silentCount);
}
// Recalculate the number of remaining samples
@ -1220,8 +1241,6 @@ void Audio::selectAudioFilterSmiley() {
void Audio::toggleScope() {
_scopeEnabled = !_scopeEnabled;
if (_scopeEnabled) {
_scopeInputOffset = 0;
_scopeOutputOffset = 0;
allocateScope();
} else {
freeScope();
@ -1259,6 +1278,8 @@ void Audio::selectAudioScopeFiftyFrames() {
}
void Audio::allocateScope() {
_scopeInputOffset = 0;
_scopeOutputOffset = 0;
int num = _samplesPerScope * sizeof(int16_t);
_scopeInput = new QByteArray(num, 0);
_scopeOutputLeft = new QByteArray(num, 0);
@ -1290,12 +1311,18 @@ void Audio::freeScope() {
}
}
void Audio::addBufferToScope(
QByteArray* byteArray, unsigned int frameOffset, const int16_t* source, unsigned int sourceChannel, unsigned int sourceNumberOfChannels) {
int Audio::addBufferToScope(QByteArray* byteArray, int frameOffset, const int16_t* source, int sourceSamplesPerChannel,
unsigned int sourceChannel, unsigned int sourceNumberOfChannels, float fade) {
if (!_scopeEnabled || _scopeEnabledPause) {
return 0;
}
// Constant multiplier to map sample value to vertical size of scope
float multiplier = (float)MULTIPLIER_SCOPE_HEIGHT / logf(2.0f);
// Used to scale each sample. (logf(sample) + fadeOffset) is same as logf(sample * fade).
float fadeOffset = logf(fade);
// Temporary variable receives sample value
float sample;
@ -1306,17 +1333,41 @@ void Audio::addBufferToScope(
// Short int pointer to mapped samples in byte array
int16_t* destination = (int16_t*) byteArray->data();
for (unsigned int i = 0; i < NETWORK_SAMPLES_PER_FRAME; i++) {
for (int i = 0; i < sourceSamplesPerChannel; i++) {
sample = (float)source[i * sourceNumberOfChannels + sourceChannel];
if (sample > 0) {
value = (int16_t)(multiplier * logf(sample));
} else if (sample < 0) {
value = (int16_t)(-multiplier * logf(-sample));
if (sample > 1) {
value = (int16_t)(multiplier * (logf(sample) + fadeOffset));
} else if (sample < -1) {
value = (int16_t)(-multiplier * (logf(-sample) + fadeOffset));
} else {
value = 0;
}
destination[i + frameOffset] = value;
destination[frameOffset] = value;
frameOffset = (frameOffset == _samplesPerScope - 1) ? 0 : frameOffset + 1;
}
return frameOffset;
}
int Audio::addSilenceToScope(QByteArray* byteArray, int frameOffset, int silentSamples) {
QMutexLocker lock(&_guard);
// Short int pointer to mapped samples in byte array
int16_t* destination = (int16_t*)byteArray->data();
if (silentSamples >= _samplesPerScope) {
memset(destination, 0, byteArray->size());
return frameOffset;
}
int samplesToBufferEnd = _samplesPerScope - frameOffset;
if (silentSamples > samplesToBufferEnd) {
memset(destination + frameOffset, 0, samplesToBufferEnd * sizeof(int16_t));
memset(destination, 0, silentSamples - samplesToBufferEnd * sizeof(int16_t));
} else {
memset(destination + frameOffset, 0, silentSamples * sizeof(int16_t));
}
return (frameOffset + silentSamples) % _samplesPerScope;
}
void Audio::renderStats(const float* color, int width, int height) {
@ -1517,17 +1568,17 @@ void Audio::renderScope(int width, int height) {
return;
static const float backgroundColor[4] = { 0.4f, 0.4f, 0.4f, 0.6f };
static const float gridColor[4] = { 0.3f, 0.3f, 0.3f, 0.6f };
static const float gridColor[4] = { 0.7f, 0.7f, 0.7f, 1.0f };
static const float inputColor[4] = { 0.3f, 1.0f, 0.3f, 1.0f };
static const float outputLeftColor[4] = { 1.0f, 0.3f, 0.3f, 1.0f };
static const float outputRightColor[4] = { 0.3f, 0.3f, 1.0f, 1.0f };
static const int gridRows = 2;
int gridCols = _framesPerScope;
int x = (width - SCOPE_WIDTH) / 2;
int y = (height - SCOPE_HEIGHT) / 2;
int w = SCOPE_WIDTH;
int h = SCOPE_HEIGHT;
int x = (width - (int)SCOPE_WIDTH) / 2;
int y = (height - (int)SCOPE_HEIGHT) / 2;
int w = (int)SCOPE_WIDTH;
int h = (int)SCOPE_HEIGHT;
renderBackground(backgroundColor, x, y, w, h);
renderGrid(gridColor, x, y, w, h, gridRows, gridCols);
@ -1717,7 +1768,7 @@ bool Audio::switchOutputToAudioDevice(const QAudioDeviceInfo& outputDeviceInfo)
// setup our general output device for audio-mixer audio
_audioOutput = new QAudioOutput(outputDeviceInfo, _outputFormat, this);
_audioOutput->setBufferSize(AUDIO_OUTPUT_BUFFER_SIZE_FRAMES * _outputFrameSize * sizeof(int16_t));
qDebug() << "Ring Buffer capacity in frames: " << _audioOutput->bufferSize() / sizeof(int16_t) / (float)_outputFrameSize;
qDebug() << "Output Buffer capacity in frames: " << _audioOutput->bufferSize() / sizeof(int16_t) / (float)_outputFrameSize;
_audioOutputIODevice.start();
_audioOutput->start(&_audioOutputIODevice);
@ -1792,13 +1843,11 @@ float Audio::getInputRingBufferMsecsAvailable() const {
}
qint64 Audio::AudioOutputIODevice::readData(char * data, qint64 maxSize) {
MixedProcessedAudioStream& receivedAUdioStream = _parent._receivedAudioStream;
int samplesRequested = maxSize / sizeof(int16_t);
int samplesPopped;
int bytesWritten;
if ((samplesPopped = receivedAUdioStream.popSamples(samplesRequested, false)) > 0) {
AudioRingBuffer::ConstIterator lastPopOutput = receivedAUdioStream.getLastPopOutput();
if ((samplesPopped = _receivedAudioStream.popSamples(samplesRequested, false)) > 0) {
AudioRingBuffer::ConstIterator lastPopOutput = _receivedAudioStream.getLastPopOutput();
lastPopOutput.readSamples((int16_t*)data, samplesPopped);
bytesWritten = samplesPopped * sizeof(int16_t);
} else {

View file

@ -21,6 +21,7 @@
#include "RingBufferHistory.h"
#include "MovingMinMaxAvg.h"
#include "AudioFilter.h"
#include "AudioFilterBank.h"
#include <QAudio>
#include <QAudioInput>
@ -50,14 +51,14 @@ public:
class AudioOutputIODevice : public QIODevice {
public:
AudioOutputIODevice(Audio& parent) : _parent(parent) {};
AudioOutputIODevice(MixedProcessedAudioStream& receivedAudioStream) : _receivedAudioStream(receivedAudioStream) {};
void start() { open(QIODevice::ReadOnly); }
void stop() { close(); }
qint64 readData(char * data, qint64 maxSize);
qint64 writeData(const char * data, qint64 maxSize) { return 0; }
private:
Audio& _parent;
MixedProcessedAudioStream& _receivedAudioStream;
};
@ -73,10 +74,7 @@ public:
virtual void startCollisionSound(float magnitude, float frequency, float noise, float duration, bool flashScreen);
virtual void startDrumSound(float volume, float frequency, float duration, float decay);
void setDynamicJitterBuffers(bool dynamicJitterBuffers) { _receivedAudioStream.setDynamicJitterBuffers(dynamicJitterBuffers); }
void setStaticDesiredJitterBufferFrames(int staticDesiredJitterBufferFrames) { _receivedAudioStream.setStaticDesiredJitterBufferFrames(staticDesiredJitterBufferFrames); }
void setMaxFramesOverDesired(int maxFramesOverDesired) { _receivedAudioStream.setMaxFramesOverDesired(maxFramesOverDesired); }
void setReceivedAudioStreamSettings(const InboundAudioStream::Settings& settings) { _receivedAudioStream.setSettings(settings); }
int getDesiredJitterBufferFrames() const { return _receivedAudioStream.getDesiredJitterBufferFrames(); }
@ -105,7 +103,6 @@ public:
float getAudioOutputAverageMsecsUnplayed() const { return (float)_audioOutputMsecsUnplayedStats.getWindowAverage(); }
void setRecorder(RecorderPointer recorder) { _recorder = recorder; }
void setPlayer(PlayerPointer player) { _player = player; }
public slots:
void start();
@ -113,7 +110,6 @@ public slots:
void addReceivedAudioToStream(const QByteArray& audioByteArray);
void parseAudioStreamStatsPacket(const QByteArray& packet);
void addSpatialAudioToBuffer(unsigned int sampleTime, const QByteArray& spatialAudio, unsigned int numSamples);
void processReceivedAudioStreamSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer);
void handleAudioInput();
void reset();
void resetStats();
@ -130,6 +126,10 @@ public slots:
void selectAudioScopeFiveFrames();
void selectAudioScopeTwentyFrames();
void selectAudioScopeFiftyFrames();
void addStereoSilenceToScope(int silentSamplesPerChannel);
void addLastFrameRepeatedWithFadeToScope(int samplesPerChannel);
void addStereoSamplesToScope(const QByteArray& samples);
void processReceivedSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer);
void toggleAudioFilter();
void selectAudioFilterFlat();
void selectAudioFilterTrebleCut();
@ -256,8 +256,9 @@ private:
void reallocateScope(int frames);
// Audio scope methods for data acquisition
void addBufferToScope(
QByteArray* byteArray, unsigned int frameOffset, const int16_t* source, unsigned int sourceChannel, unsigned int sourceNumberOfChannels);
int addBufferToScope(QByteArray* byteArray, int frameOffset, const int16_t* source, int sourceSamples,
unsigned int sourceChannel, unsigned int sourceNumberOfChannels, float fade = 1.0f);
int addSilenceToScope(QByteArray* byteArray, int frameOffset, int silentSamples);
// Audio scope methods for rendering
void renderBackground(const float* color, int x, int y, int width, int height);
@ -282,13 +283,14 @@ private:
int _samplesPerScope;
// Multi-band parametric EQ
bool _peqEnabled;
AudioFilterPEQ3 _peq;
bool _peqEnabled;
AudioFilterPEQ3m _peq;
QMutex _guard;
QByteArray* _scopeInput;
QByteArray* _scopeOutputLeft;
QByteArray* _scopeOutputRight;
QByteArray _scopeLastFrame;
#ifdef _WIN32
static const unsigned int STATS_WIDTH = 1500;
#else
@ -314,7 +316,6 @@ private:
AudioOutputIODevice _audioOutputIODevice;
WeakRecorderPointer _recorder;
WeakPlayerPointer _player;
};

View file

@ -48,6 +48,7 @@ void DatagramProcessor::processDatagrams() {
// only process this packet if we have a match on the packet version
switch (packetTypeForPacket(incomingPacket)) {
case PacketTypeMixedAudio:
case PacketTypeSilentAudioFrame:
QMetaObject::invokeMethod(&application->_audio, "addReceivedAudioToStream", Qt::QueuedConnection,
Q_ARG(QByteArray, incomingPacket));
break;

View file

@ -82,8 +82,7 @@ const int CONSOLE_HEIGHT = 200;
Menu::Menu() :
_actionHash(),
_audioJitterBufferFrames(0),
_maxFramesOverDesired(0),
_receivedAudioStreamSettings(),
_bandwidthDialog(NULL),
_fieldOfView(DEFAULT_FIELD_OF_VIEW_DEGREES),
_realWorldFieldOfView(DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES),
@ -115,6 +114,7 @@ Menu::Menu() :
_loginAction(NULL),
_preferencesDialog(NULL),
_loginDialog(NULL),
_hasLoginDialogDisplayed(false),
_snapshotsLocation(),
_scriptsLocation(),
_walletPrivateKey()
@ -680,8 +680,15 @@ void Menu::loadSettings(QSettings* settings) {
lockedSettings = true;
}
_audioJitterBufferFrames = loadSetting(settings, "audioJitterBufferFrames", 0);
_maxFramesOverDesired = loadSetting(settings, "maxFramesOverDesired", DEFAULT_MAX_FRAMES_OVER_DESIRED);
_receivedAudioStreamSettings._dynamicJitterBuffers = settings->value("dynamicJitterBuffers", DEFAULT_DYNAMIC_JITTER_BUFFERS).toBool();
_receivedAudioStreamSettings._maxFramesOverDesired = settings->value("maxFramesOverDesired", DEFAULT_MAX_FRAMES_OVER_DESIRED).toInt();
_receivedAudioStreamSettings._staticDesiredJitterBufferFrames = settings->value("staticDesiredJitterBufferFrames", DEFAULT_STATIC_DESIRED_JITTER_BUFFER_FRAMES).toInt();
_receivedAudioStreamSettings._useStDevForJitterCalc = settings->value("useStDevForJitterCalc", DEFAULT_USE_STDEV_FOR_JITTER_CALC).toBool();
_receivedAudioStreamSettings._windowStarveThreshold = settings->value("windowStarveThreshold", DEFAULT_WINDOW_STARVE_THRESHOLD).toInt();
_receivedAudioStreamSettings._windowSecondsForDesiredCalcOnTooManyStarves = settings->value("windowSecondsForDesiredCalcOnTooManyStarves", DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES).toInt();
_receivedAudioStreamSettings._windowSecondsForDesiredReduction = settings->value("windowSecondsForDesiredReduction", DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION).toInt();
_receivedAudioStreamSettings._repetitionWithFade = settings->value("repetitionWithFade", DEFAULT_REPETITION_WITH_FADE).toBool();
_fieldOfView = loadSetting(settings, "fieldOfView", DEFAULT_FIELD_OF_VIEW_DEGREES);
_realWorldFieldOfView = loadSetting(settings, "realWorldFieldOfView", DEFAULT_REAL_WORLD_FIELD_OF_VIEW_DEGREES);
_faceshiftEyeDeflection = loadSetting(settings, "faceshiftEyeDeflection", DEFAULT_FACESHIFT_EYE_DEFLECTION);
@ -735,8 +742,15 @@ void Menu::saveSettings(QSettings* settings) {
lockedSettings = true;
}
settings->setValue("audioJitterBufferFrames", _audioJitterBufferFrames);
settings->setValue("maxFramesOverDesired", _maxFramesOverDesired);
settings->setValue("dynamicJitterBuffers", _receivedAudioStreamSettings._dynamicJitterBuffers);
settings->setValue("maxFramesOverDesired", _receivedAudioStreamSettings._maxFramesOverDesired);
settings->setValue("staticDesiredJitterBufferFrames", _receivedAudioStreamSettings._staticDesiredJitterBufferFrames);
settings->setValue("useStDevForJitterCalc", _receivedAudioStreamSettings._useStDevForJitterCalc);
settings->setValue("windowStarveThreshold", _receivedAudioStreamSettings._windowStarveThreshold);
settings->setValue("windowSecondsForDesiredCalcOnTooManyStarves", _receivedAudioStreamSettings._windowSecondsForDesiredCalcOnTooManyStarves);
settings->setValue("windowSecondsForDesiredReduction", _receivedAudioStreamSettings._windowSecondsForDesiredReduction);
settings->setValue("repetitionWithFade", _receivedAudioStreamSettings._repetitionWithFade);
settings->setValue("fieldOfView", _fieldOfView);
settings->setValue("faceshiftEyeDeflection", _faceshiftEyeDeflection);
settings->setValue("maxVoxels", _maxVoxels);
@ -1039,12 +1053,24 @@ void sendFakeEnterEvent() {
const float DIALOG_RATIO_OF_WINDOW = 0.30f;
void Menu::clearLoginDialogDisplayedFlag() {
// Needed for domains that don't require login.
_hasLoginDialogDisplayed = false;
}
void Menu::loginForCurrentDomain() {
if (!_loginDialog) {
if (!_loginDialog && !_hasLoginDialogDisplayed) {
_loginDialog = new LoginDialog(Application::getInstance()->getWindow());
_loginDialog->show();
_loginDialog->resizeAndPosition(false);
}
_hasLoginDialogDisplayed = true;
}
void Menu::showLoginForCurrentDomain() {
_hasLoginDialogDisplayed = false;
loginForCurrentDomain();
}
void Menu::editPreferences() {
@ -1391,7 +1417,7 @@ void Menu::toggleLoginMenuItem() {
// change the menu item to login
_loginAction->setText("Login");
connect(_loginAction, &QAction::triggered, this, &Menu::loginForCurrentDomain);
connect(_loginAction, &QAction::triggered, this, &Menu::showLoginForCurrentDomain);
}
}

View file

@ -89,10 +89,8 @@ public:
void triggerOption(const QString& menuOption);
QAction* getActionForOption(const QString& menuOption);
float getAudioJitterBufferFrames() const { return _audioJitterBufferFrames; }
void setAudioJitterBufferFrames(float audioJitterBufferSamples) { _audioJitterBufferFrames = audioJitterBufferSamples; }
int getMaxFramesOverDesired() const { return _maxFramesOverDesired; }
void setMaxFramesOverDesired(int maxFramesOverDesired) { _maxFramesOverDesired = maxFramesOverDesired; }
const InboundAudioStream::Settings& getReceivedAudioStreamSettings() const { return _receivedAudioStreamSettings; }
void setReceivedAudioStreamSettings(const InboundAudioStream::Settings& receivedAudioStreamSettings) { _receivedAudioStreamSettings = receivedAudioStreamSettings; }
float getFieldOfView() const { return _fieldOfView; }
void setFieldOfView(float fieldOfView) { _fieldOfView = fieldOfView; }
float getRealWorldFieldOfView() const { return _realWorldFieldOfView; }
@ -177,7 +175,9 @@ signals:
public slots:
void clearLoginDialogDisplayedFlag();
void loginForCurrentDomain();
void showLoginForCurrentDomain();
void bandwidthDetails();
void octreeStatsDetails();
void lodTools();
@ -265,8 +265,7 @@ private:
QHash<QString, QAction*> _actionHash;
int _audioJitterBufferFrames; /// number of extra samples to wait before starting audio playback
int _maxFramesOverDesired;
InboundAudioStream::Settings _receivedAudioStreamSettings;
BandwidthDialog* _bandwidthDialog;
float _fieldOfView; /// in Degrees, doesn't apply to HMD like Oculus
float _realWorldFieldOfView; // The actual FOV set by the user's monitor size and view distance
@ -305,6 +304,7 @@ private:
QPointer<AttachmentsDialog> _attachmentsDialog;
QPointer<AnimationsDialog> _animationsDialog;
QPointer<LoginDialog> _loginDialog;
bool _hasLoginDialogDisplayed;
QAction* _chatAction;
QString _snapshotsLocation;
QString _scriptsLocation;

View file

@ -118,7 +118,7 @@ void MetavoxelSystem::render() {
viewFrustum->getNearBottomLeft(), viewFrustum->getNearBottomRight());
RenderVisitor renderVisitor(getLOD());
guideToAugmented(renderVisitor);
guideToAugmented(renderVisitor, true);
}
class RayHeightfieldIntersectionVisitor : public RayIntersectionVisitor {
@ -449,22 +449,29 @@ void MetavoxelSystem::renderHeightfieldCursor(const glm::vec3& position, float r
glDepthFunc(GL_LESS);
}
void MetavoxelSystem::deleteTextures(int heightID, int colorID) {
void MetavoxelSystem::deleteTextures(int heightID, int colorID, int textureID) {
glDeleteTextures(1, (GLuint*)&heightID);
glDeleteTextures(1, (GLuint*)&colorID);
glDeleteTextures(1, (GLuint*)&textureID);
}
MetavoxelClient* MetavoxelSystem::createClient(const SharedNodePointer& node) {
return new MetavoxelSystemClient(node, _updater);
}
void MetavoxelSystem::guideToAugmented(MetavoxelVisitor& visitor) {
void MetavoxelSystem::guideToAugmented(MetavoxelVisitor& visitor, bool render) {
foreach (const SharedNodePointer& node, NodeList::getInstance()->getNodeHash()) {
if (node->getType() == NodeType::MetavoxelServer) {
QMutexLocker locker(&node->getMutex());
MetavoxelSystemClient* client = static_cast<MetavoxelSystemClient*>(node->getLinkedData());
if (client) {
client->getAugmentedData().guide(visitor);
MetavoxelData data = client->getAugmentedData();
data.guide(visitor);
if (render) {
// save the rendered augmented data so that its cached texture references, etc., don't
// get collected when we replace it with more recent versions
client->setRenderedAugmentedData(data);
}
}
}
}
@ -601,15 +608,19 @@ const int HeightfieldBuffer::SHARED_EDGE = 1;
const int HeightfieldBuffer::HEIGHT_EXTENSION = 2 * HeightfieldBuffer::HEIGHT_BORDER + HeightfieldBuffer::SHARED_EDGE;
HeightfieldBuffer::HeightfieldBuffer(const glm::vec3& translation, float scale,
const QByteArray& height, const QByteArray& color) :
const QByteArray& height, const QByteArray& color, const QByteArray& texture,
const QVector<SharedObjectPointer>& textures) :
_translation(translation),
_scale(scale),
_heightBounds(translation, translation + glm::vec3(scale, scale, scale)),
_colorBounds(_heightBounds),
_height(height),
_color(color),
_texture(texture),
_textures(textures),
_heightTextureID(0),
_colorTextureID(0),
_textureTextureID(0),
_heightSize(glm::sqrt(height.size())),
_heightIncrement(scale / (_heightSize - HEIGHT_EXTENSION)),
_colorSize(glm::sqrt(color.size() / HeightfieldData::COLOR_BYTES)),
@ -628,10 +639,11 @@ HeightfieldBuffer::~HeightfieldBuffer() {
// the textures have to be deleted on the main thread (for its opengl context)
if (QThread::currentThread() != Application::getInstance()->thread()) {
QMetaObject::invokeMethod(Application::getInstance()->getMetavoxels(), "deleteTextures",
Q_ARG(int, _heightTextureID), Q_ARG(int, _colorTextureID));
Q_ARG(int, _heightTextureID), Q_ARG(int, _colorTextureID), Q_ARG(int, _textureTextureID));
} else {
glDeleteTextures(1, &_heightTextureID);
glDeleteTextures(1, &_colorTextureID);
glDeleteTextures(1, &_textureTextureID);
}
}
@ -667,13 +679,17 @@ public:
glm::vec3 vertex;
};
const int SPLAT_COUNT = 4;
const GLint SPLAT_TEXTURE_UNITS[] = { 3, 4, 5, 6 };
void HeightfieldBuffer::render(bool cursor) {
// initialize textures, etc. on first render
if (_heightTextureID == 0) {
glGenTextures(1, &_heightTextureID);
glBindTexture(GL_TEXTURE_2D, _heightTextureID);
glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, _heightSize, _heightSize, 0,
@ -692,6 +708,27 @@ void HeightfieldBuffer::render(bool cursor) {
int colorSize = glm::sqrt(_color.size() / HeightfieldData::COLOR_BYTES);
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, colorSize, colorSize, 0, GL_RGB, GL_UNSIGNED_BYTE, _color.constData());
}
if (!_texture.isEmpty()) {
glGenTextures(1, &_textureTextureID);
glBindTexture(GL_TEXTURE_2D, _textureTextureID);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP_TO_EDGE);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP_TO_EDGE);
int textureSize = glm::sqrt(_texture.size());
glTexImage2D(GL_TEXTURE_2D, 0, GL_LUMINANCE, textureSize, textureSize, 0,
GL_LUMINANCE, GL_UNSIGNED_BYTE, _texture.constData());
_networkTextures.resize(_textures.size());
for (int i = 0; i < _textures.size(); i++) {
const SharedObjectPointer texture = _textures.at(i);
if (texture) {
_networkTextures[i] = Application::getInstance()->getTextureCache()->getTexture(
static_cast<HeightfieldTexture*>(texture.data())->getURL(), SPLAT_TEXTURE);
}
}
}
}
// create the buffer objects lazily
int innerSize = _heightSize - 2 * HeightfieldBuffer::HEIGHT_BORDER;
@ -759,7 +796,115 @@ void HeightfieldBuffer::render(bool cursor) {
glBindTexture(GL_TEXTURE_2D, _heightTextureID);
if (!cursor) {
if (cursor) {
glDrawRangeElements(GL_TRIANGLES, 0, vertexCount - 1, indexCount, GL_UNSIGNED_INT, 0);
} else if (!_textures.isEmpty()) {
DefaultMetavoxelRendererImplementation::getBaseHeightfieldProgram().bind();
DefaultMetavoxelRendererImplementation::getBaseHeightfieldProgram().setUniformValue(
DefaultMetavoxelRendererImplementation::getBaseHeightScaleLocation(), 1.0f / _heightSize);
DefaultMetavoxelRendererImplementation::getBaseHeightfieldProgram().setUniformValue(
DefaultMetavoxelRendererImplementation::getBaseColorScaleLocation(), (float)_heightSize / innerSize);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, _colorTextureID);
glDrawRangeElements(GL_TRIANGLES, 0, vertexCount - 1, indexCount, GL_UNSIGNED_INT, 0);
glDepthFunc(GL_LEQUAL);
glDepthMask(false);
glEnable(GL_BLEND);
glDisable(GL_ALPHA_TEST);
glEnable(GL_POLYGON_OFFSET_FILL);
glPolygonOffset(-1.0f, -1.0f);
DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().bind();
DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue(
DefaultMetavoxelRendererImplementation::getSplatHeightScaleLocation(), 1.0f / _heightSize);
DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue(
DefaultMetavoxelRendererImplementation::getSplatTextureScaleLocation(), (float)_heightSize / innerSize);
DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue(
DefaultMetavoxelRendererImplementation::getSplatTextureOffsetLocation(),
_translation.x / _scale, _translation.z / _scale);
glBindTexture(GL_TEXTURE_2D, _textureTextureID);
const int TEXTURES_PER_SPLAT = 4;
for (int i = 0; i < _textures.size(); i += TEXTURES_PER_SPLAT) {
QVector4D scalesS, scalesT;
for (int j = 0; j < SPLAT_COUNT; j++) {
glActiveTexture(GL_TEXTURE0 + SPLAT_TEXTURE_UNITS[j]);
int index = i + j;
if (index < _networkTextures.size()) {
const NetworkTexturePointer& texture = _networkTextures.at(index);
if (texture) {
HeightfieldTexture* heightfieldTexture = static_cast<HeightfieldTexture*>(_textures.at(index).data());
scalesS[j] = _scale / heightfieldTexture->getScaleS();
scalesT[j] = _scale / heightfieldTexture->getScaleT();
glBindTexture(GL_TEXTURE_2D, texture->getID());
} else {
glBindTexture(GL_TEXTURE_2D, 0);
}
} else {
glBindTexture(GL_TEXTURE_2D, 0);
}
}
const float QUARTER_STEP = 0.25f * EIGHT_BIT_MAXIMUM_RECIPROCAL;
DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue(
DefaultMetavoxelRendererImplementation::getSplatTextureScalesSLocation(), scalesS);
DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue(
DefaultMetavoxelRendererImplementation::getSplatTextureScalesTLocation(), scalesT);
DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue(
DefaultMetavoxelRendererImplementation::getSplatTextureValueMinimaLocation(),
(i + 1) * EIGHT_BIT_MAXIMUM_RECIPROCAL - QUARTER_STEP, (i + 2) * EIGHT_BIT_MAXIMUM_RECIPROCAL - QUARTER_STEP,
(i + 3) * EIGHT_BIT_MAXIMUM_RECIPROCAL - QUARTER_STEP, (i + 4) * EIGHT_BIT_MAXIMUM_RECIPROCAL - QUARTER_STEP);
DefaultMetavoxelRendererImplementation::getSplatHeightfieldProgram().setUniformValue(
DefaultMetavoxelRendererImplementation::getSplatTextureValueMaximaLocation(),
(i + 1) * EIGHT_BIT_MAXIMUM_RECIPROCAL + QUARTER_STEP, (i + 2) * EIGHT_BIT_MAXIMUM_RECIPROCAL + QUARTER_STEP,
(i + 3) * EIGHT_BIT_MAXIMUM_RECIPROCAL + QUARTER_STEP, (i + 4) * EIGHT_BIT_MAXIMUM_RECIPROCAL + QUARTER_STEP);
glDrawRangeElements(GL_TRIANGLES, 0, vertexCount - 1, indexCount, GL_UNSIGNED_INT, 0);
}
glEnable(GL_ALPHA_TEST);
glBlendFunc(GL_DST_COLOR, GL_ZERO);
for (int i = 0; i < SPLAT_COUNT; i++) {
glActiveTexture(GL_TEXTURE0 + SPLAT_TEXTURE_UNITS[i]);
glBindTexture(GL_TEXTURE_2D, 0);
}
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, 0);
if (Menu::getInstance()->isOptionChecked(MenuOption::SimpleShadows)) {
DefaultMetavoxelRendererImplementation::getShadowLightHeightfieldProgram().bind();
DefaultMetavoxelRendererImplementation::getShadowLightHeightfieldProgram().setUniformValue(
DefaultMetavoxelRendererImplementation::getShadowLightHeightScaleLocation(), 1.0f / _heightSize);
} else if (Menu::getInstance()->isOptionChecked(MenuOption::CascadedShadows)) {
DefaultMetavoxelRendererImplementation::getCascadedShadowLightHeightfieldProgram().bind();
DefaultMetavoxelRendererImplementation::getCascadedShadowLightHeightfieldProgram().setUniformValue(
DefaultMetavoxelRendererImplementation::getCascadedShadowLightHeightScaleLocation(), 1.0f / _heightSize);
} else {
DefaultMetavoxelRendererImplementation::getLightHeightfieldProgram().bind();
DefaultMetavoxelRendererImplementation::getLightHeightfieldProgram().setUniformValue(
DefaultMetavoxelRendererImplementation::getBaseHeightScaleLocation(), 1.0f / _heightSize);
}
glDrawRangeElements(GL_TRIANGLES, 0, vertexCount - 1, indexCount, GL_UNSIGNED_INT, 0);
DefaultMetavoxelRendererImplementation::getHeightfieldProgram().bind();
glDisable(GL_POLYGON_OFFSET_FILL);
glDisable(GL_BLEND);
glBlendFuncSeparate(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA, GL_CONSTANT_ALPHA, GL_ONE);
glDepthFunc(GL_LESS);
glDepthMask(true);
glActiveTexture(GL_TEXTURE0);
} else {
int heightScaleLocation = DefaultMetavoxelRendererImplementation::getHeightScaleLocation();
int colorScaleLocation = DefaultMetavoxelRendererImplementation::getColorScaleLocation();
ProgramObject* program = &DefaultMetavoxelRendererImplementation::getHeightfieldProgram();
@ -777,11 +922,9 @@ void HeightfieldBuffer::render(bool cursor) {
program->setUniformValue(colorScaleLocation, (float)_heightSize / innerSize);
glActiveTexture(GL_TEXTURE1);
glBindTexture(GL_TEXTURE_2D, _colorTextureID);
}
glDrawRangeElements(GL_TRIANGLES, 0, vertexCount - 1, indexCount, GL_UNSIGNED_INT, 0);
if (!cursor) {
glDrawRangeElements(GL_TRIANGLES, 0, vertexCount - 1, indexCount, GL_UNSIGNED_INT, 0);
glBindTexture(GL_TEXTURE_2D, 0);
glActiveTexture(GL_TEXTURE0);
}
@ -898,6 +1041,74 @@ void DefaultMetavoxelRendererImplementation::init() {
_shadowDistancesLocation = _cascadedShadowMapHeightfieldProgram.uniformLocation("shadowDistances");
_cascadedShadowMapHeightfieldProgram.release();
_baseHeightfieldProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() +
"shaders/metavoxel_heightfield_base.vert");
_baseHeightfieldProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() +
"shaders/metavoxel_heightfield_base.frag");
_baseHeightfieldProgram.link();
_baseHeightfieldProgram.bind();
_baseHeightfieldProgram.setUniformValue("heightMap", 0);
_baseHeightfieldProgram.setUniformValue("diffuseMap", 1);
_baseHeightScaleLocation = _heightfieldProgram.uniformLocation("heightScale");
_baseColorScaleLocation = _heightfieldProgram.uniformLocation("colorScale");
_baseHeightfieldProgram.release();
_splatHeightfieldProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() +
"shaders/metavoxel_heightfield_splat.vert");
_splatHeightfieldProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() +
"shaders/metavoxel_heightfield_splat.frag");
_splatHeightfieldProgram.link();
_splatHeightfieldProgram.bind();
_splatHeightfieldProgram.setUniformValue("heightMap", 0);
_splatHeightfieldProgram.setUniformValue("textureMap", 1);
_splatHeightfieldProgram.setUniformValueArray("diffuseMaps", SPLAT_TEXTURE_UNITS, SPLAT_COUNT);
_splatHeightScaleLocation = _splatHeightfieldProgram.uniformLocation("heightScale");
_splatTextureScaleLocation = _splatHeightfieldProgram.uniformLocation("textureScale");
_splatTextureOffsetLocation = _splatHeightfieldProgram.uniformLocation("splatTextureOffset");
_splatTextureScalesSLocation = _splatHeightfieldProgram.uniformLocation("splatTextureScalesS");
_splatTextureScalesTLocation = _splatHeightfieldProgram.uniformLocation("splatTextureScalesT");
_splatTextureValueMinimaLocation = _splatHeightfieldProgram.uniformLocation("textureValueMinima");
_splatTextureValueMaximaLocation = _splatHeightfieldProgram.uniformLocation("textureValueMaxima");
_splatHeightfieldProgram.release();
_lightHeightfieldProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() +
"shaders/metavoxel_heightfield_light.vert");
_lightHeightfieldProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() +
"shaders/metavoxel_heightfield_light.frag");
_lightHeightfieldProgram.link();
_lightHeightfieldProgram.bind();
_lightHeightfieldProgram.setUniformValue("heightMap", 0);
_lightHeightScaleLocation = _lightHeightfieldProgram.uniformLocation("heightScale");
_lightHeightfieldProgram.release();
_shadowLightHeightfieldProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() +
"shaders/metavoxel_heightfield_light.vert");
_shadowLightHeightfieldProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() +
"shaders/metavoxel_heightfield_light_shadow_map.frag");
_shadowLightHeightfieldProgram.link();
_shadowLightHeightfieldProgram.bind();
_shadowLightHeightfieldProgram.setUniformValue("heightMap", 0);
_shadowLightHeightfieldProgram.setUniformValue("shadowMap", 2);
_shadowLightHeightScaleLocation = _shadowLightHeightfieldProgram.uniformLocation("heightScale");
_shadowLightHeightfieldProgram.release();
_cascadedShadowLightHeightfieldProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() +
"shaders/metavoxel_heightfield_light.vert");
_cascadedShadowLightHeightfieldProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() +
"shaders/metavoxel_heightfield_light_cascaded_shadow_map.frag");
_cascadedShadowLightHeightfieldProgram.link();
_cascadedShadowLightHeightfieldProgram.bind();
_cascadedShadowLightHeightfieldProgram.setUniformValue("heightMap", 0);
_cascadedShadowLightHeightfieldProgram.setUniformValue("shadowMap", 2);
_cascadedShadowLightHeightScaleLocation = _cascadedShadowLightHeightfieldProgram.uniformLocation("heightScale");
_shadowLightDistancesLocation = _cascadedShadowLightHeightfieldProgram.uniformLocation("shadowDistances");
_cascadedShadowLightHeightfieldProgram.release();
_heightfieldCursorProgram.addShaderFromSourceFile(QGLShader::Vertex, Application::resourcesPath() +
"shaders/metavoxel_heightfield_cursor.vert");
_heightfieldCursorProgram.addShaderFromSourceFile(QGLShader::Fragment, Application::resourcesPath() +
@ -1011,7 +1222,7 @@ int HeightfieldFetchVisitor::visit(MetavoxelInfo& info) {
if (!info.isLeaf && info.size > _buffer->getScale()) {
return DEFAULT_ORDER;
}
HeightfieldDataPointer height = info.inputValues.at(0).getInlineValue<HeightfieldDataPointer>();
HeightfieldHeightDataPointer height = info.inputValues.at(0).getInlineValue<HeightfieldHeightDataPointer>();
if (!height) {
return STOP_RECURSION;
}
@ -1065,11 +1276,11 @@ int HeightfieldFetchVisitor::visit(MetavoxelInfo& info) {
int colorSize = _buffer->getColorSize();
if (colorSize == 0) {
return STOP_RECURSION;
continue;
}
HeightfieldDataPointer color = info.inputValues.at(1).getInlineValue<HeightfieldDataPointer>();
HeightfieldColorDataPointer color = info.inputValues.at(1).getInlineValue<HeightfieldColorDataPointer>();
if (!color) {
return STOP_RECURSION;
continue;
}
const Box& colorBounds = _buffer->getColorBounds();
overlap = colorBounds.getIntersection(overlap);
@ -1138,6 +1349,7 @@ private:
HeightfieldRegionVisitor::HeightfieldRegionVisitor(const MetavoxelLOD& lod) :
MetavoxelVisitor(QVector<AttributePointer>() << AttributeRegistry::getInstance()->getHeightfieldAttribute() <<
AttributeRegistry::getInstance()->getHeightfieldColorAttribute() <<
AttributeRegistry::getInstance()->getHeightfieldTextureAttribute() <<
Application::getInstance()->getMetavoxels()->getHeightfieldBufferAttribute(), QVector<AttributePointer>() <<
Application::getInstance()->getMetavoxels()->getHeightfieldBufferAttribute(), lod),
regionBounds(glm::vec3(FLT_MAX, FLT_MAX, FLT_MAX), glm::vec3(-FLT_MAX, -FLT_MAX, -FLT_MAX)),
@ -1149,14 +1361,14 @@ int HeightfieldRegionVisitor::visit(MetavoxelInfo& info) {
return DEFAULT_ORDER;
}
HeightfieldBuffer* buffer = NULL;
HeightfieldDataPointer height = info.inputValues.at(0).getInlineValue<HeightfieldDataPointer>();
HeightfieldHeightDataPointer height = info.inputValues.at(0).getInlineValue<HeightfieldHeightDataPointer>();
if (height) {
const QByteArray& heightContents = height->getContents();
int size = glm::sqrt(heightContents.size());
int extendedSize = size + HeightfieldBuffer::HEIGHT_EXTENSION;
int heightContentsSize = extendedSize * extendedSize;
HeightfieldDataPointer color = info.inputValues.at(1).getInlineValue<HeightfieldDataPointer>();
HeightfieldColorDataPointer color = info.inputValues.at(1).getInlineValue<HeightfieldColorDataPointer>();
int colorContentsSize = 0;
if (color) {
const QByteArray& colorContents = color->getContents();
@ -1165,33 +1377,44 @@ int HeightfieldRegionVisitor::visit(MetavoxelInfo& info) {
colorContentsSize = extendedColorSize * extendedColorSize * HeightfieldData::COLOR_BYTES;
}
HeightfieldTextureDataPointer texture = info.inputValues.at(2).getInlineValue<HeightfieldTextureDataPointer>();
QByteArray textureContents;
QVector<SharedObjectPointer> textures;
if (texture) {
textureContents = texture->getContents();
textures = texture->getTextures();
}
const HeightfieldBuffer* existingBuffer = static_cast<const HeightfieldBuffer*>(
info.inputValues.at(2).getInlineValue<BufferDataPointer>().data());
info.inputValues.at(3).getInlineValue<BufferDataPointer>().data());
Box bounds = info.getBounds();
if (existingBuffer && existingBuffer->getHeight().size() == heightContentsSize &&
existingBuffer->getColor().size() == colorContentsSize) {
// we already have a buffer of the correct resolution
addRegion(bounds, existingBuffer->getHeightBounds());
return STOP_RECURSION;
buffer = new HeightfieldBuffer(info.minimum, info.size, existingBuffer->getHeight(),
existingBuffer->getColor(), textureContents, textures);
} else {
// we must create a new buffer and update its borders
buffer = new HeightfieldBuffer(info.minimum, info.size, QByteArray(heightContentsSize, 0),
QByteArray(colorContentsSize, 0), textureContents, textures);
const Box& heightBounds = buffer->getHeightBounds();
addRegion(bounds, heightBounds);
_intersections.clear();
_intersections.append(Box(heightBounds.minimum,
glm::vec3(bounds.maximum.x, heightBounds.maximum.y, bounds.minimum.z)));
_intersections.append(Box(glm::vec3(bounds.maximum.x, heightBounds.minimum.y, heightBounds.minimum.z),
glm::vec3(heightBounds.maximum.x, heightBounds.maximum.y, bounds.maximum.z)));
_intersections.append(Box(glm::vec3(bounds.minimum.x, heightBounds.minimum.y, bounds.maximum.z),
heightBounds.maximum));
_intersections.append(Box(glm::vec3(heightBounds.minimum.x, heightBounds.minimum.y, bounds.minimum.z),
glm::vec3(bounds.minimum.x, heightBounds.maximum.y, heightBounds.maximum.z)));
_fetchVisitor.init(buffer);
_data->guide(_fetchVisitor);
}
// we must create a new buffer and update its borders
buffer = new HeightfieldBuffer(info.minimum, info.size, QByteArray(heightContentsSize, 0),
QByteArray(colorContentsSize, 0));
const Box& heightBounds = buffer->getHeightBounds();
addRegion(bounds, heightBounds);
_intersections.clear();
_intersections.append(Box(heightBounds.minimum,
glm::vec3(bounds.maximum.x, heightBounds.maximum.y, bounds.minimum.z)));
_intersections.append(Box(glm::vec3(bounds.maximum.x, heightBounds.minimum.y, heightBounds.minimum.z),
glm::vec3(heightBounds.maximum.x, heightBounds.maximum.y, bounds.maximum.z)));
_intersections.append(Box(glm::vec3(bounds.minimum.x, heightBounds.minimum.y, bounds.maximum.z),
heightBounds.maximum));
_intersections.append(Box(glm::vec3(heightBounds.minimum.x, heightBounds.minimum.y, bounds.minimum.z),
glm::vec3(bounds.minimum.x, heightBounds.maximum.y, heightBounds.maximum.z)));
_fetchVisitor.init(buffer);
_data->guide(_fetchVisitor);
}
info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline(BufferDataPointer(buffer)));
return STOP_RECURSION;
@ -1249,7 +1472,7 @@ int HeightfieldUpdateVisitor::visit(MetavoxelInfo& info) {
return STOP_RECURSION;
}
HeightfieldBuffer* newBuffer = new HeightfieldBuffer(info.minimum, info.size,
buffer->getHeight(), buffer->getColor());
buffer->getHeight(), buffer->getColor(), buffer->getTexture(), buffer->getTextures());
_fetchVisitor.init(newBuffer);
_data->guide(_fetchVisitor);
info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline(BufferDataPointer(newBuffer)));
@ -1437,6 +1660,9 @@ void DefaultMetavoxelRendererImplementation::render(MetavoxelData& data, Metavox
ProgramObject* program = &_heightfieldProgram;
if (Menu::getInstance()->getShadowsEnabled()) {
if (Menu::getInstance()->isOptionChecked(MenuOption::CascadedShadows)) {
_cascadedShadowLightHeightfieldProgram.bind();
_cascadedShadowLightHeightfieldProgram.setUniform(_shadowLightDistancesLocation,
Application::getInstance()->getShadowDistances());
program = &_cascadedShadowMapHeightfieldProgram;
program->bind();
program->setUniform(_shadowDistancesLocation, Application::getInstance()->getShadowDistances());
@ -1482,6 +1708,24 @@ ProgramObject DefaultMetavoxelRendererImplementation::_cascadedShadowMapHeightfi
int DefaultMetavoxelRendererImplementation::_cascadedShadowMapHeightScaleLocation;
int DefaultMetavoxelRendererImplementation::_cascadedShadowMapColorScaleLocation;
int DefaultMetavoxelRendererImplementation::_shadowDistancesLocation;
ProgramObject DefaultMetavoxelRendererImplementation::_baseHeightfieldProgram;
int DefaultMetavoxelRendererImplementation::_baseHeightScaleLocation;
int DefaultMetavoxelRendererImplementation::_baseColorScaleLocation;
ProgramObject DefaultMetavoxelRendererImplementation::_splatHeightfieldProgram;
int DefaultMetavoxelRendererImplementation::_splatHeightScaleLocation;
int DefaultMetavoxelRendererImplementation::_splatTextureScaleLocation;
int DefaultMetavoxelRendererImplementation::_splatTextureOffsetLocation;
int DefaultMetavoxelRendererImplementation::_splatTextureScalesSLocation;
int DefaultMetavoxelRendererImplementation::_splatTextureScalesTLocation;
int DefaultMetavoxelRendererImplementation::_splatTextureValueMinimaLocation;
int DefaultMetavoxelRendererImplementation::_splatTextureValueMaximaLocation;
ProgramObject DefaultMetavoxelRendererImplementation::_lightHeightfieldProgram;
int DefaultMetavoxelRendererImplementation::_lightHeightScaleLocation;
ProgramObject DefaultMetavoxelRendererImplementation::_shadowLightHeightfieldProgram;
int DefaultMetavoxelRendererImplementation::_shadowLightHeightScaleLocation;
ProgramObject DefaultMetavoxelRendererImplementation::_cascadedShadowLightHeightfieldProgram;
int DefaultMetavoxelRendererImplementation::_cascadedShadowLightHeightScaleLocation;
int DefaultMetavoxelRendererImplementation::_shadowLightDistancesLocation;
ProgramObject DefaultMetavoxelRendererImplementation::_heightfieldCursorProgram;
static void enableClipPlane(GLenum plane, float x, float y, float z, float w) {

View file

@ -49,7 +49,7 @@ public:
Q_INVOKABLE float getHeightfieldHeight(const glm::vec3& location);
Q_INVOKABLE void deleteTextures(int heightID, int colorID);
Q_INVOKABLE void deleteTextures(int heightID, int colorID, int textureID);
protected:
@ -57,7 +57,7 @@ protected:
private:
void guideToAugmented(MetavoxelVisitor& visitor);
void guideToAugmented(MetavoxelVisitor& visitor, bool render = false);
AttributePointer _pointBufferAttribute;
AttributePointer _heightfieldBufferAttribute;
@ -92,6 +92,8 @@ public:
/// Returns a copy of the augmented data. This function is thread-safe.
MetavoxelData getAugmentedData();
void setRenderedAugmentedData(const MetavoxelData& data) { _renderedAugmentedData = data; }
virtual int parseData(const QByteArray& packet);
protected:
@ -102,6 +104,7 @@ protected:
private:
MetavoxelData _augmentedData;
MetavoxelData _renderedAugmentedData;
QReadWriteLock _augmentedDataLock;
};
@ -139,7 +142,9 @@ public:
static const int SHARED_EDGE;
static const int HEIGHT_EXTENSION;
HeightfieldBuffer(const glm::vec3& translation, float scale, const QByteArray& height, const QByteArray& color);
HeightfieldBuffer(const glm::vec3& translation, float scale, const QByteArray& height,
const QByteArray& color, const QByteArray& texture = QByteArray(),
const QVector<SharedObjectPointer>& textures = QVector<SharedObjectPointer>());
~HeightfieldBuffer();
const glm::vec3& getTranslation() const { return _translation; }
@ -154,6 +159,11 @@ public:
QByteArray& getColor() { return _color; }
const QByteArray& getColor() const { return _color; }
QByteArray& getTexture() { return _texture; }
const QByteArray& getTexture() const { return _texture; }
const QVector<SharedObjectPointer>& getTextures() const { return _textures; }
QByteArray getUnextendedHeight() const;
QByteArray getUnextendedColor() const;
@ -173,13 +183,17 @@ private:
Box _colorBounds;
QByteArray _height;
QByteArray _color;
QByteArray _texture;
QVector<SharedObjectPointer> _textures;
GLuint _heightTextureID;
GLuint _colorTextureID;
GLuint _textureTextureID;
QVector<NetworkTexturePointer> _networkTextures;
int _heightSize;
float _heightIncrement;
int _colorSize;
float _colorIncrement;
typedef QPair<QOpenGLBuffer, QOpenGLBuffer> BufferPair;
static QHash<int, BufferPair> _bufferPairs;
};
@ -231,6 +245,28 @@ public:
static int getCascadedShadowMapHeightScaleLocation() { return _cascadedShadowMapHeightScaleLocation; }
static int getCascadedShadowMapColorScaleLocation() { return _cascadedShadowMapColorScaleLocation; }
static ProgramObject& getBaseHeightfieldProgram() { return _baseHeightfieldProgram; }
static int getBaseHeightScaleLocation() { return _baseHeightScaleLocation; }
static int getBaseColorScaleLocation() { return _baseColorScaleLocation; }
static ProgramObject& getSplatHeightfieldProgram() { return _splatHeightfieldProgram; }
static int getSplatHeightScaleLocation() { return _splatHeightScaleLocation; }
static int getSplatTextureScaleLocation() { return _splatTextureScaleLocation; }
static int getSplatTextureOffsetLocation() { return _splatTextureOffsetLocation; }
static int getSplatTextureScalesSLocation() { return _splatTextureScalesSLocation; }
static int getSplatTextureScalesTLocation() { return _splatTextureScalesTLocation; }
static int getSplatTextureValueMinimaLocation() { return _splatTextureValueMinimaLocation; }
static int getSplatTextureValueMaximaLocation() { return _splatTextureValueMaximaLocation; }
static ProgramObject& getLightHeightfieldProgram() { return _lightHeightfieldProgram; }
static int getLightHeightScaleLocation() { return _lightHeightScaleLocation; }
static ProgramObject& getShadowLightHeightfieldProgram() { return _shadowLightHeightfieldProgram; }
static int getShadowLightHeightScaleLocation() { return _shadowLightHeightScaleLocation; }
static ProgramObject& getCascadedShadowLightHeightfieldProgram() { return _cascadedShadowLightHeightfieldProgram; }
static int getCascadedShadowLightHeightScaleLocation() { return _cascadedShadowLightHeightScaleLocation; }
static ProgramObject& getHeightfieldCursorProgram() { return _heightfieldCursorProgram; }
Q_INVOKABLE DefaultMetavoxelRendererImplementation();
@ -257,6 +293,29 @@ private:
static int _cascadedShadowMapColorScaleLocation;
static int _shadowDistancesLocation;
static ProgramObject _baseHeightfieldProgram;
static int _baseHeightScaleLocation;
static int _baseColorScaleLocation;
static ProgramObject _splatHeightfieldProgram;
static int _splatHeightScaleLocation;
static int _splatTextureScaleLocation;
static int _splatTextureOffsetLocation;
static int _splatTextureScalesSLocation;
static int _splatTextureScalesTLocation;
static int _splatTextureValueMinimaLocation;
static int _splatTextureValueMaximaLocation;
static ProgramObject _lightHeightfieldProgram;
static int _lightHeightScaleLocation;
static ProgramObject _shadowLightHeightfieldProgram;
static int _shadowLightHeightScaleLocation;
static ProgramObject _cascadedShadowLightHeightfieldProgram;
static int _cascadedShadowLightHeightScaleLocation;
static int _shadowLightDistancesLocation;
static ProgramObject _heightfieldCursorProgram;
};

View file

@ -54,7 +54,7 @@ void FaceModel::maybeUpdateNeckRotation(const JointState& parentState, const FBX
state.setRotationInConstrainedFrame(glm::angleAxis(- RADIANS_PER_DEGREE * _owningHead->getFinalRoll(), glm::normalize(inverse * axes[2]))
* glm::angleAxis(RADIANS_PER_DEGREE * _owningHead->getFinalYaw(), glm::normalize(inverse * axes[1]))
* glm::angleAxis(- RADIANS_PER_DEGREE * _owningHead->getFinalPitch(), glm::normalize(inverse * axes[0]))
* joint.rotation);
* joint.rotation, DEFAULT_PRIORITY);
}
void FaceModel::maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
@ -69,7 +69,7 @@ void FaceModel::maybeUpdateEyeRotation(const JointState& parentState, const FBXJ
glm::quat between = rotationBetween(front, lookAt);
const float MAX_ANGLE = 30.0f * RADIANS_PER_DEGREE;
state.setRotationInConstrainedFrame(glm::angleAxis(glm::clamp(glm::angle(between), -MAX_ANGLE, MAX_ANGLE), glm::axis(between)) *
joint.rotation);
joint.rotation, DEFAULT_PRIORITY);
}
void FaceModel::updateJointState(int index) {

View file

@ -222,6 +222,18 @@ glm::vec3 Head::getScalePivot() const {
return _faceModel.isActive() ? _faceModel.getTranslation() : _position;
}
void Head::setFinalPitch(float finalPitch) {
_deltaPitch = glm::clamp(finalPitch, MIN_HEAD_PITCH, MAX_HEAD_PITCH) - _basePitch;
}
void Head::setFinalYaw(float finalYaw) {
_deltaYaw = glm::clamp(finalYaw, MIN_HEAD_YAW, MAX_HEAD_YAW) - _baseYaw;
}
void Head::setFinalRoll(float finalRoll) {
_deltaRoll = glm::clamp(finalRoll, MIN_HEAD_ROLL, MAX_HEAD_ROLL) - _baseRoll;
}
float Head::getFinalYaw() const {
return glm::clamp(_baseYaw + _deltaYaw, MIN_HEAD_YAW, MAX_HEAD_YAW);
}

View file

@ -95,6 +95,9 @@ public:
void setDeltaRoll(float roll) { _deltaRoll = roll; }
float getDeltaRoll() const { return _deltaRoll; }
virtual void setFinalYaw(float finalYaw);
virtual void setFinalPitch(float finalPitch);
virtual void setFinalRoll(float finalRoll);
virtual float getFinalPitch() const;
virtual float getFinalYaw() const;
virtual float getFinalRoll() const;

View file

@ -272,10 +272,12 @@ void MyAvatar::simulate(float deltaTime) {
// Update avatar head rotation with sensor data
void MyAvatar::updateFromTrackers(float deltaTime) {
glm::vec3 estimatedPosition, estimatedRotation;
if (isPlaying()) {
estimatedRotation = glm::degrees(safeEulerAngles(_player->getHeadRotation()));
} else if (Application::getInstance()->getPrioVR()->hasHeadRotation()) {
if (isPlaying() && !OculusManager::isConnected()) {
return;
}
if (Application::getInstance()->getPrioVR()->hasHeadRotation()) {
estimatedRotation = glm::degrees(safeEulerAngles(Application::getInstance()->getPrioVR()->getHeadRotation()));
estimatedRotation.x *= -1.0f;
estimatedRotation.z *= -1.0f;
@ -327,11 +329,6 @@ void MyAvatar::updateFromTrackers(float deltaTime) {
}
head->setDeltaRoll(estimatedRotation.z);
if (isPlaying()) {
head->setLeanSideways(_player->getLeanSideways());
head->setLeanForward(_player->getLeanForward());
return;
}
// the priovr can give us exact lean
if (Application::getInstance()->getPrioVR()->isActive()) {
glm::vec3 eulers = glm::degrees(safeEulerAngles(Application::getInstance()->getPrioVR()->getTorsoRotation()));
@ -576,58 +573,6 @@ void MyAvatar::saveRecording(QString filename) {
}
}
bool MyAvatar::isPlaying() {
if (!_player) {
return false;
}
if (QThread::currentThread() != thread()) {
bool result;
QMetaObject::invokeMethod(this, "isPlaying", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, result));
return result;
}
return _player && _player->isPlaying();
}
qint64 MyAvatar::playerElapsed() {
if (!_player) {
return 0;
}
if (QThread::currentThread() != thread()) {
qint64 result;
QMetaObject::invokeMethod(this, "playerElapsed", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(qint64, result));
return result;
}
return _player->elapsed();
}
qint64 MyAvatar::playerLength() {
if (!_player) {
return 0;
}
if (QThread::currentThread() != thread()) {
qint64 result;
QMetaObject::invokeMethod(this, "playerLength", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(qint64, result));
return result;
}
return _player->getRecording()->getLength();
}
void MyAvatar::loadRecording(QString filename) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "loadRecording", Qt::BlockingQueuedConnection,
Q_ARG(QString, filename));
return;
}
if (!_player) {
_player = PlayerPointer(new Player(this));
}
_player->loadFromFile(filename);
}
void MyAvatar::loadLastRecording() {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "loadLastRecording", Qt::BlockingQueuedConnection);
@ -644,32 +589,6 @@ void MyAvatar::loadLastRecording() {
_player->loadRecording(_recorder->getRecording());
}
void MyAvatar::startPlaying() {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "startPlaying", Qt::BlockingQueuedConnection);
return;
}
if (!_player) {
_player = PlayerPointer(new Player(this));
}
Application::getInstance()->getAudio()->setPlayer(_player);
_player->startPlaying();
}
void MyAvatar::stopPlaying() {
if (!_player) {
return;
}
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "stopPlaying", Qt::BlockingQueuedConnection);
return;
}
if (_player) {
_player->stopPlaying();
}
}
void MyAvatar::setLocalGravity(glm::vec3 gravity) {
_motionBehaviors |= AVATAR_MOTION_OBEY_LOCAL_GRAVITY;
// Environmental and Local gravities are incompatible. Since Local is being set here
@ -1050,36 +969,39 @@ glm::vec3 MyAvatar::getUprightHeadPosition() const {
return _position + getWorldAlignedOrientation() * glm::vec3(0.0f, getPelvisToHeadLength(), 0.0f);
}
const float JOINT_PRIORITY = 2.0f;
const float SCRIPT_PRIORITY = DEFAULT_PRIORITY + 1.0f;
const float RECORDER_PRIORITY = SCRIPT_PRIORITY + 1.0f;
void MyAvatar::setJointRotations(QVector<glm::quat> jointRotations) {
for (int i = 0; i < jointRotations.size(); ++i) {
if (i < _jointData.size()) {
_skeletonModel.setJointState(i, true, jointRotations[i], JOINT_PRIORITY + 1.0f);
}
int numStates = glm::min(_skeletonModel.getJointStateCount(), jointRotations.size());
for (int i = 0; i < numStates; ++i) {
// HACK: ATM only Recorder calls setJointRotations() so we hardcode its priority here
_skeletonModel.setJointState(i, true, jointRotations[i], RECORDER_PRIORITY);
}
}
void MyAvatar::setJointData(int index, const glm::quat& rotation) {
Avatar::setJointData(index, rotation);
if (QThread::currentThread() == thread()) {
_skeletonModel.setJointState(index, true, rotation, JOINT_PRIORITY);
// HACK: ATM only JS scripts call setJointData() on MyAvatar so we hardcode the priority
_skeletonModel.setJointState(index, true, rotation, SCRIPT_PRIORITY);
}
}
void MyAvatar::clearJointData(int index) {
Avatar::clearJointData(index);
if (QThread::currentThread() == thread()) {
_skeletonModel.setJointState(index, false, glm::quat(), JOINT_PRIORITY);
// HACK: ATM only JS scripts call clearJointData() on MyAvatar so we hardcode the priority
_skeletonModel.setJointState(index, false, glm::quat(), 0.0f);
}
}
void MyAvatar::clearJointsData() {
for (int i = 0; i < _jointData.size(); ++i) {
Avatar::clearJointData(i);
if (QThread::currentThread() == thread()) {
_skeletonModel.clearJointAnimationPriority(i);
}
clearJointAnimationPriorities();
}
void MyAvatar::clearJointAnimationPriorities() {
int numStates = _skeletonModel.getJointStateCount();
for (int i = 0; i < numStates; ++i) {
_skeletonModel.clearJointAnimationPriority(i);
}
}
@ -1976,12 +1898,8 @@ void MyAvatar::resetSize() {
}
void MyAvatar::goToLocationFromResponse(const QJsonObject& jsonObject) {
if (jsonObject["status"].toString() == "success") {
QJsonObject locationObject = jsonObject["data"].toObject()["address"].toObject();
goToLocationFromAddress(locationObject);
} else {
QMessageBox::warning(Application::getInstance()->getWindow(), "", "That user or location could not be found.");
}
QJsonObject locationObject = jsonObject["data"].toObject()["address"].toObject();
goToLocationFromAddress(locationObject);
}
void MyAvatar::goToLocationFromAddress(const QJsonObject& locationObject) {

View file

@ -128,6 +128,8 @@ public:
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL);
virtual void setAttachmentData(const QVector<AttachmentData>& attachmentData);
void clearJointAnimationPriorities();
virtual void attach(const QString& modelURL, const QString& jointName = QString(),
const glm::vec3& translation = glm::vec3(), const glm::quat& rotation = glm::quat(), float scale = 1.0f,
bool allowDuplicates = false, bool useSaved = true);
@ -174,15 +176,7 @@ public slots:
void startRecording();
void stopRecording();
void saveRecording(QString filename);
bool isPlaying();
qint64 playerElapsed();
qint64 playerLength();
void loadRecording(QString filename);
void loadLastRecording();
void startPlaying();
void stopPlaying();
signals:
void transformChanged();
@ -222,7 +216,6 @@ private:
PhysicsSimulation _physicsSimulation;
RecorderPointer _recorder;
PlayerPointer _player;
// private methods
float computeDistanceToFloor(const glm::vec3& startPoint);

View file

@ -51,7 +51,8 @@ void SkeletonModel::setJointStates(QVector<JointState> states) {
}
}
const float PALM_PRIORITY = 3.0f;
const float PALM_PRIORITY = DEFAULT_PRIORITY;
const float LEAN_PRIORITY = DEFAULT_PRIORITY;
void SkeletonModel::simulate(float deltaTime, bool fullUpdate) {
setTranslation(_owningAvatar->getPosition());
@ -230,7 +231,7 @@ void SkeletonModel::applyPalmData(int jointIndex, PalmData& palm) {
JointState& parentState = _jointStates[parentJointIndex];
parentState.setRotationInBindFrame(palmRotation, PALM_PRIORITY);
// lock hand to forearm by slamming its rotation (in parent-frame) to identity
_jointStates[jointIndex].setRotationInConstrainedFrame(glm::quat());
_jointStates[jointIndex].setRotationInConstrainedFrame(glm::quat(), PALM_PRIORITY);
} else {
inverseKinematics(jointIndex, palmPosition, palmRotation, PALM_PRIORITY);
}
@ -243,7 +244,7 @@ void SkeletonModel::updateJointState(int index) {
const JointState& parentState = _jointStates.at(joint.parentIndex);
const FBXGeometry& geometry = _geometry->getFBXGeometry();
if (index == geometry.leanJointIndex) {
maybeUpdateLeanRotation(parentState, joint, state);
maybeUpdateLeanRotation(parentState, state);
} else if (index == geometry.neckJointIndex) {
maybeUpdateNeckRotation(parentState, joint, state);
@ -260,17 +261,18 @@ void SkeletonModel::updateJointState(int index) {
}
}
void SkeletonModel::maybeUpdateLeanRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {
void SkeletonModel::maybeUpdateLeanRotation(const JointState& parentState, JointState& state) {
if (!_owningAvatar->isMyAvatar() || Application::getInstance()->getPrioVR()->isActive()) {
return;
}
// get the rotation axes in joint space and use them to adjust the rotation
glm::mat3 axes = glm::mat3_cast(glm::quat());
glm::mat3 inverse = glm::mat3(glm::inverse(parentState.getTransform() * glm::translate(state.getDefaultTranslationInConstrainedFrame()) *
joint.preTransform * glm::mat4_cast(joint.preRotation * joint.rotation)));
state.setRotationInConstrainedFrame(glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanSideways(),
glm::normalize(inverse * axes[2])) * glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanForward(),
glm::normalize(inverse * axes[0])) * joint.rotation);
glm::vec3 xAxis(1.0f, 0.0f, 0.0f);
glm::vec3 zAxis(0.0f, 0.0f, 1.0f);
glm::quat inverse = glm::inverse(parentState.getRotation() * state.getDefaultRotationInParentFrame());
state.setRotationInConstrainedFrame(
glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanSideways(), inverse * zAxis)
* glm::angleAxis(- RADIANS_PER_DEGREE * _owningAvatar->getHead()->getFinalLeanForward(), inverse * xAxis)
* state.getFBXJoint().rotation, LEAN_PRIORITY);
}
void SkeletonModel::maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state) {

View file

@ -127,7 +127,7 @@ protected:
/// Updates the state of the joint at the specified index.
virtual void updateJointState(int index);
void maybeUpdateLeanRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
void maybeUpdateLeanRotation(const JointState& parentState, JointState& state);
void maybeUpdateNeckRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);
void maybeUpdateEyeRotation(const JointState& parentState, const FBXJoint& joint, JointState& state);

View file

@ -85,9 +85,13 @@ void LocationManager::goTo(QString destination) {
if (!goToDestination(destination)) {
destination = QString(QUrl::toPercentEncoding(destination));
UserActivityLogger::getInstance().wentTo(OTHER_DESTINATION_TYPE, destination);
JSONCallbackParameters callbackParams;
callbackParams.jsonCallbackReceiver = this;
callbackParams.jsonCallbackMethod = "goToAddressFromResponse";
callbackParams.errorCallbackReceiver = this;
callbackParams.errorCallbackMethod = "handleAddressLookupError";
AccountManager::getInstance().authenticatedRequest(GET_ADDRESSES.arg(destination),
QNetworkAccessManager::GetOperation,
callbackParams);
@ -96,21 +100,17 @@ void LocationManager::goTo(QString destination) {
void LocationManager::goToAddressFromResponse(const QJsonObject& responseData) {
QJsonValue status = responseData["status"];
qDebug() << responseData;
if (!status.isUndefined() && status.toString() == "success") {
const QJsonObject& data = responseData["data"].toObject();
const QJsonValue& userObject = data["user"];
const QJsonValue& placeObject = data["place"];
if (!placeObject.isUndefined() && !userObject.isUndefined()) {
emit multipleDestinationsFound(userObject.toObject(), placeObject.toObject());
} else if (placeObject.isUndefined()) {
Application::getInstance()->getAvatar()->goToLocationFromAddress(userObject.toObject()["address"].toObject());
} else {
Application::getInstance()->getAvatar()->goToLocationFromAddress(placeObject.toObject()["address"].toObject());
}
const QJsonObject& data = responseData["data"].toObject();
const QJsonValue& userObject = data["user"];
const QJsonValue& placeObject = data["place"];
if (!placeObject.isUndefined() && !userObject.isUndefined()) {
emit multipleDestinationsFound(userObject.toObject(), placeObject.toObject());
} else if (placeObject.isUndefined()) {
Application::getInstance()->getAvatar()->goToLocationFromAddress(userObject.toObject()["address"].toObject());
} else {
QMessageBox::warning(Application::getInstance()->getWindow(), "", "That user or location could not be found.");
Application::getInstance()->getAvatar()->goToLocationFromAddress(placeObject.toObject()["address"].toObject());
}
}
@ -118,6 +118,8 @@ void LocationManager::goToUser(QString userName) {
JSONCallbackParameters callbackParams;
callbackParams.jsonCallbackReceiver = Application::getInstance()->getAvatar();
callbackParams.jsonCallbackMethod = "goToLocationFromResponse";
callbackParams.errorCallbackReceiver = this;
callbackParams.errorCallbackMethod = "handleAddressLookupError";
userName = QString(QUrl::toPercentEncoding(userName));
AccountManager::getInstance().authenticatedRequest(GET_USER_ADDRESS.arg(userName),
@ -129,6 +131,8 @@ void LocationManager::goToPlace(QString placeName) {
JSONCallbackParameters callbackParams;
callbackParams.jsonCallbackReceiver = Application::getInstance()->getAvatar();
callbackParams.jsonCallbackMethod = "goToLocationFromResponse";
callbackParams.errorCallbackReceiver = this;
callbackParams.errorCallbackMethod = "handleAddressLookupError";
placeName = QString(QUrl::toPercentEncoding(placeName));
AccountManager::getInstance().authenticatedRequest(GET_PLACE_ADDRESS.arg(placeName),
@ -212,6 +216,19 @@ bool LocationManager::goToDestination(QString destination) {
return false;
}
void LocationManager::handleAddressLookupError(QNetworkReply::NetworkError networkError,
const QString& errorString) {
QString messageBoxString;
if (networkError == QNetworkReply::ContentNotFoundError) {
messageBoxString = "That address could not be found.";
} else {
messageBoxString = errorString;
}
QMessageBox::warning(Application::getInstance()->getWindow(), "", messageBoxString);
}
void LocationManager::replaceLastOccurrence(const QChar search, const QChar replace, QString& string) {
int lastIndex;
lastIndex = string.lastIndexOf(search);

View file

@ -37,6 +37,9 @@ public:
void goToPlace(QString placeName);
void goToOrientation(QString orientation);
bool goToDestination(QString destination);
public slots:
void handleAddressLookupError(QNetworkReply::NetworkError networkError, const QString& errorString);
private:
void replaceLastOccurrence(const QChar search, const QChar replace, QString& string);

View file

@ -593,17 +593,20 @@ void NetworkGeometry::setGeometry(const FBXGeometry& geometry) {
NetworkMeshPart networkPart;
if (!part.diffuseTexture.filename.isEmpty()) {
networkPart.diffuseTexture = Application::getInstance()->getTextureCache()->getTexture(
_textureBase.resolved(QUrl(part.diffuseTexture.filename)), false, mesh.isEye, part.diffuseTexture.content);
_textureBase.resolved(QUrl(part.diffuseTexture.filename)), DEFAULT_TEXTURE,
mesh.isEye, part.diffuseTexture.content);
networkPart.diffuseTexture->setLoadPriorities(_loadPriorities);
}
if (!part.normalTexture.filename.isEmpty()) {
networkPart.normalTexture = Application::getInstance()->getTextureCache()->getTexture(
_textureBase.resolved(QUrl(part.normalTexture.filename)), true, false, part.normalTexture.content);
_textureBase.resolved(QUrl(part.normalTexture.filename)), NORMAL_TEXTURE,
false, part.normalTexture.content);
networkPart.normalTexture->setLoadPriorities(_loadPriorities);
}
if (!part.specularTexture.filename.isEmpty()) {
networkPart.specularTexture = Application::getInstance()->getTextureCache()->getTexture(
_textureBase.resolved(QUrl(part.specularTexture.filename)), true, false, part.specularTexture.content);
_textureBase.resolved(QUrl(part.specularTexture.filename)), SPECULAR_TEXTURE,
false, part.specularTexture.content);
networkPart.specularTexture->setLoadPriorities(_loadPriorities);
}
networkMesh.parts.append(networkPart);

View file

@ -145,7 +145,7 @@ glm::quat JointState::getVisibleRotationInParentFrame() const {
void JointState::restoreRotation(float fraction, float priority) {
assert(_fbxJoint != NULL);
if (priority == _animationPriority || _animationPriority == 0.0f) {
setRotationInConstrainedFrame(safeMix(_rotationInConstrainedFrame, _fbxJoint->rotation, fraction));
setRotationInConstrainedFrameInternal(safeMix(_rotationInConstrainedFrame, _fbxJoint->rotation, fraction));
_animationPriority = 0.0f;
}
}
@ -158,7 +158,7 @@ void JointState::setRotationInBindFrame(const glm::quat& rotation, float priorit
if (constrain && _constraint) {
_constraint->softClamp(targetRotation, _rotationInConstrainedFrame, 0.5f);
}
setRotationInConstrainedFrame(targetRotation);
setRotationInConstrainedFrameInternal(targetRotation);
_animationPriority = priority;
}
}
@ -189,7 +189,7 @@ void JointState::applyRotationDelta(const glm::quat& delta, bool constrain, floa
_rotation = delta * getRotation();
return;
}
setRotationInConstrainedFrame(targetRotation);
setRotationInConstrainedFrameInternal(targetRotation);
}
/// Applies delta rotation to joint but mixes a little bit of the default pose as well.
@ -208,7 +208,7 @@ void JointState::mixRotationDelta(const glm::quat& delta, float mixFactor, float
if (_constraint) {
_constraint->softClamp(targetRotation, _rotationInConstrainedFrame, 0.5f);
}
setRotationInConstrainedFrame(targetRotation);
setRotationInConstrainedFrameInternal(targetRotation);
}
void JointState::mixVisibleRotationDelta(const glm::quat& delta, float mixFactor) {
@ -232,7 +232,17 @@ glm::quat JointState::computeVisibleParentRotation() const {
return _visibleRotation * glm::inverse(_fbxJoint->preRotation * _visibleRotationInConstrainedFrame * _fbxJoint->postRotation);
}
void JointState::setRotationInConstrainedFrame(const glm::quat& targetRotation) {
void JointState::setRotationInConstrainedFrame(glm::quat targetRotation, float priority, bool constrain) {
if (priority >= _animationPriority || _animationPriority == 0.0f) {
if (constrain && _constraint) {
_constraint->softClamp(targetRotation, _rotationInConstrainedFrame, 0.5f);
}
setRotationInConstrainedFrameInternal(targetRotation);
_animationPriority = priority;
}
}
void JointState::setRotationInConstrainedFrameInternal(const glm::quat& targetRotation) {
glm::quat parentRotation = computeParentRotation();
_rotationInConstrainedFrame = targetRotation;
_transformChanged = true;
@ -254,6 +264,11 @@ const bool JointState::rotationIsDefault(const glm::quat& rotation, float tolera
glm::abs(rotation.w - defaultRotation.w) < tolerance;
}
glm::quat JointState::getDefaultRotationInParentFrame() const {
// NOTE: the result is constant and could be cached in the FBXJoint
return _fbxJoint->preRotation * _fbxJoint->rotation * _fbxJoint->postRotation;
}
const glm::vec3& JointState::getDefaultTranslationInConstrainedFrame() const {
assert(_fbxJoint != NULL);
return _fbxJoint->translation;

View file

@ -19,6 +19,8 @@
#include <GLMHelpers.h>
#include <FBXReader.h>
const float DEFAULT_PRIORITY = 3.0f;
class AngularConstraint;
class JointState {
@ -81,13 +83,14 @@ public:
/// NOTE: the JointState's model-frame transform/rotation are NOT updated!
void setRotationInBindFrame(const glm::quat& rotation, float priority, bool constrain = false);
void setRotationInConstrainedFrame(const glm::quat& targetRotation);
void setRotationInConstrainedFrame(glm::quat targetRotation, float priority, bool constrain = false);
void setVisibleRotationInConstrainedFrame(const glm::quat& targetRotation);
const glm::quat& getRotationInConstrainedFrame() const { return _rotationInConstrainedFrame; }
const glm::quat& getVisibleRotationInConstrainedFrame() const { return _visibleRotationInConstrainedFrame; }
const bool rotationIsDefault(const glm::quat& rotation, float tolerance = EPSILON) const;
glm::quat getDefaultRotationInParentFrame() const;
const glm::vec3& getDefaultTranslationInConstrainedFrame() const;
@ -103,6 +106,7 @@ public:
glm::quat computeVisibleParentRotation() const;
private:
void setRotationInConstrainedFrameInternal(const glm::quat& targetRotation);
/// debug helper function
void loadBindRotation();

View file

@ -438,7 +438,7 @@ void Model::reset() {
}
const FBXGeometry& geometry = _geometry->getFBXGeometry();
for (int i = 0; i < _jointStates.size(); i++) {
_jointStates[i].setRotationInConstrainedFrame(geometry.joints.at(i).rotation);
_jointStates[i].setRotationInConstrainedFrame(geometry.joints.at(i).rotation, 0.0f);
}
}
@ -695,8 +695,7 @@ bool Model::getVisibleJointState(int index, glm::quat& rotation) const {
void Model::clearJointState(int index) {
if (index != -1 && index < _jointStates.size()) {
JointState& state = _jointStates[index];
state.setRotationInConstrainedFrame(glm::quat());
state._animationPriority = 0.0f;
state.setRotationInConstrainedFrame(glm::quat(), 0.0f);
}
}
@ -709,13 +708,10 @@ void Model::clearJointAnimationPriority(int index) {
void Model::setJointState(int index, bool valid, const glm::quat& rotation, float priority) {
if (index != -1 && index < _jointStates.size()) {
JointState& state = _jointStates[index];
if (priority >= state._animationPriority) {
if (valid) {
state.setRotationInConstrainedFrame(rotation);
state._animationPriority = priority;
} else {
state.restoreRotation(1.0f, priority);
}
if (valid) {
state.setRotationInConstrainedFrame(rotation, priority);
} else {
state.restoreRotation(1.0f, priority);
}
}
}
@ -1745,10 +1741,7 @@ void AnimationHandle::applyFrame(float frameIndex) {
int mapping = _jointMappings.at(i);
if (mapping != -1) {
JointState& state = _model->_jointStates[mapping];
if (_priority >= state._animationPriority) {
state.setRotationInConstrainedFrame(safeMix(floorFrame.rotations.at(i), ceilFrame.rotations.at(i), frameFraction));
state._animationPriority = _priority;
}
state.setRotationInConstrainedFrame(safeMix(floorFrame.rotations.at(i), ceilFrame.rotations.at(i), frameFraction), _priority);
}
}
}

View file

@ -145,6 +145,8 @@ GLuint TextureCache::getPermutationNormalTextureID() {
}
const unsigned char OPAQUE_WHITE[] = { 0xFF, 0xFF, 0xFF, 0xFF };
const unsigned char TRANSPARENT_WHITE[] = { 0xFF, 0xFF, 0xFF, 0x0 };
const unsigned char OPAQUE_BLACK[] = { 0x0, 0x0, 0x0, 0xFF };
const unsigned char OPAQUE_BLUE[] = { 0x80, 0x80, 0xFF, 0xFF };
static void loadSingleColorTexture(const unsigned char* color) {
@ -175,19 +177,18 @@ GLuint TextureCache::getBlueTextureID() {
/// Extra data for creating textures.
class TextureExtra {
public:
bool normalMap;
TextureType type;
const QByteArray& content;
};
QSharedPointer<NetworkTexture> TextureCache::getTexture(const QUrl& url, bool normalMap,
bool dilatable, const QByteArray& content) {
NetworkTexturePointer TextureCache::getTexture(const QUrl& url, TextureType type, bool dilatable, const QByteArray& content) {
if (!dilatable) {
TextureExtra extra = { normalMap, content };
TextureExtra extra = { type, content };
return ResourceCache::getResource(url, QUrl(), false, &extra).staticCast<NetworkTexture>();
}
QSharedPointer<NetworkTexture> texture = _dilatableNetworkTextures.value(url);
NetworkTexturePointer texture = _dilatableNetworkTextures.value(url);
if (texture.isNull()) {
texture = QSharedPointer<NetworkTexture>(new DilatableNetworkTexture(url, content), &Resource::allReferencesCleared);
texture = NetworkTexturePointer(new DilatableNetworkTexture(url, content), &Resource::allReferencesCleared);
texture->setSelf(texture);
texture->setCache(this);
_dilatableNetworkTextures.insert(url, texture);
@ -293,7 +294,7 @@ bool TextureCache::eventFilter(QObject* watched, QEvent* event) {
QSharedPointer<Resource> TextureCache::createResource(const QUrl& url,
const QSharedPointer<Resource>& fallback, bool delayLoad, const void* extra) {
const TextureExtra* textureExtra = static_cast<const TextureExtra*>(extra);
return QSharedPointer<Resource>(new NetworkTexture(url, textureExtra->normalMap, textureExtra->content),
return QSharedPointer<Resource>(new NetworkTexture(url, textureExtra->type, textureExtra->content),
&Resource::allReferencesCleared);
}
@ -317,17 +318,34 @@ Texture::~Texture() {
glDeleteTextures(1, &_id);
}
NetworkTexture::NetworkTexture(const QUrl& url, bool normalMap, const QByteArray& content) :
NetworkTexture::NetworkTexture(const QUrl& url, TextureType type, const QByteArray& content) :
Resource(url, !content.isEmpty()),
_type(type),
_translucent(false) {
if (!url.isValid()) {
_loaded = true;
}
// default to white/blue
// default to white/blue/black
glBindTexture(GL_TEXTURE_2D, getID());
loadSingleColorTexture(normalMap ? OPAQUE_BLUE : OPAQUE_WHITE);
switch (type) {
case NORMAL_TEXTURE:
loadSingleColorTexture(OPAQUE_BLUE);
break;
case SPECULAR_TEXTURE:
loadSingleColorTexture(OPAQUE_BLACK);
break;
case SPLAT_TEXTURE:
loadSingleColorTexture(TRANSPARENT_WHITE);
break;
default:
loadSingleColorTexture(OPAQUE_WHITE);
break;
}
glBindTexture(GL_TEXTURE_2D, 0);
// if we have content, load it after we have our self pointer
@ -382,12 +400,28 @@ void ImageReader::run() {
qDebug() << "Image greater than maximum size:" << _url << image.width() << image.height();
image = image.scaled(MAXIMUM_SIZE, MAXIMUM_SIZE, Qt::KeepAspectRatio);
}
int imageArea = image.width() * image.height();
const int EIGHT_BIT_MAXIMUM = 255;
if (!image.hasAlphaChannel()) {
if (image.format() != QImage::Format_RGB888) {
image = image.convertToFormat(QImage::Format_RGB888);
}
QMetaObject::invokeMethod(texture.data(), "setImage", Q_ARG(const QImage&, image), Q_ARG(bool, false));
int redTotal = 0, greenTotal = 0, blueTotal = 0;
for (int y = 0; y < image.height(); y++) {
for (int x = 0; x < image.width(); x++) {
QRgb rgb = image.pixel(x, y);
redTotal += qRed(rgb);
greenTotal += qGreen(rgb);
blueTotal += qBlue(rgb);
}
}
QColor averageColor(EIGHT_BIT_MAXIMUM, EIGHT_BIT_MAXIMUM, EIGHT_BIT_MAXIMUM);
if (imageArea > 0) {
averageColor.setRgb(redTotal / imageArea, greenTotal / imageArea, blueTotal / imageArea);
}
QMetaObject::invokeMethod(texture.data(), "setImage", Q_ARG(const QImage&, image), Q_ARG(bool, false),
Q_ARG(const QColor&, averageColor));
return;
}
if (image.format() != QImage::Format_ARGB32) {
@ -397,11 +431,15 @@ void ImageReader::run() {
// check for translucency/false transparency
int opaquePixels = 0;
int translucentPixels = 0;
const int EIGHT_BIT_MAXIMUM = 255;
const int RGB_BITS = 24;
int redTotal = 0, greenTotal = 0, blueTotal = 0, alphaTotal = 0;
for (int y = 0; y < image.height(); y++) {
for (int x = 0; x < image.width(); x++) {
int alpha = image.pixel(x, y) >> RGB_BITS;
QRgb rgb = image.pixel(x, y);
redTotal += qRed(rgb);
greenTotal += qGreen(rgb);
blueTotal += qBlue(rgb);
int alpha = qAlpha(rgb);
alphaTotal += alpha;
if (alpha == EIGHT_BIT_MAXIMUM) {
opaquePixels++;
} else if (alpha != 0) {
@ -409,13 +447,13 @@ void ImageReader::run() {
}
}
}
int imageArea = image.width() * image.height();
if (opaquePixels == imageArea) {
qDebug() << "Image with alpha channel is completely opaque:" << _url;
image = image.convertToFormat(QImage::Format_RGB888);
}
QMetaObject::invokeMethod(texture.data(), "setImage", Q_ARG(const QImage&, image),
Q_ARG(bool, translucentPixels >= imageArea / 2));
Q_ARG(bool, translucentPixels >= imageArea / 2), Q_ARG(const QColor&, QColor(redTotal / imageArea,
greenTotal / imageArea, blueTotal / imageArea, alphaTotal / imageArea)));
}
void NetworkTexture::downloadFinished(QNetworkReply* reply) {
@ -427,8 +465,9 @@ void NetworkTexture::loadContent(const QByteArray& content) {
QThreadPool::globalInstance()->start(new ImageReader(_self, NULL, _url, content));
}
void NetworkTexture::setImage(const QImage& image, bool translucent) {
void NetworkTexture::setImage(const QImage& image, bool translucent, const QColor& averageColor) {
_translucent = translucent;
_averageColor = averageColor;
finishedLoading(true);
imageLoaded(image);
@ -440,7 +479,13 @@ void NetworkTexture::setImage(const QImage& image, bool translucent) {
glTexImage2D(GL_TEXTURE_2D, 0, GL_RGB, image.width(), image.height(), 0,
GL_RGB, GL_UNSIGNED_BYTE, image.constBits());
}
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
if (_type == SPLAT_TEXTURE) {
// generate mipmaps for splat textures
glGenerateMipmap(GL_TEXTURE_2D);
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR_MIPMAP_LINEAR);
} else {
glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
}
glBindTexture(GL_TEXTURE_2D, 0);
}
@ -449,7 +494,7 @@ void NetworkTexture::imageLoaded(const QImage& image) {
}
DilatableNetworkTexture::DilatableNetworkTexture(const QUrl& url, const QByteArray& content) :
NetworkTexture(url, false, content),
NetworkTexture(url, DEFAULT_TEXTURE, content),
_innerRadius(0),
_outerRadius(0)
{

View file

@ -23,6 +23,10 @@ class QOpenGLFramebufferObject;
class NetworkTexture;
typedef QSharedPointer<NetworkTexture> NetworkTexturePointer;
enum TextureType { DEFAULT_TEXTURE, NORMAL_TEXTURE, SPECULAR_TEXTURE, SPLAT_TEXTURE };
/// Stores cached textures, including render-to-texture targets.
class TextureCache : public ResourceCache {
Q_OBJECT
@ -47,7 +51,7 @@ public:
GLuint getBlueTextureID();
/// Loads a texture from the specified URL.
QSharedPointer<NetworkTexture> getTexture(const QUrl& url, bool normalMap = false, bool dilatable = false,
NetworkTexturePointer getTexture(const QUrl& url, TextureType type = DEFAULT_TEXTURE, bool dilatable = false,
const QByteArray& content = QByteArray());
/// Returns a pointer to the primary framebuffer object. This render target includes a depth component, and is
@ -121,24 +125,29 @@ class NetworkTexture : public Resource, public Texture {
public:
NetworkTexture(const QUrl& url, bool normalMap, const QByteArray& content);
NetworkTexture(const QUrl& url, TextureType type, const QByteArray& content);
/// Checks whether it "looks like" this texture is translucent
/// (majority of pixels neither fully opaque or fully transparent).
bool isTranslucent() const { return _translucent; }
/// Returns the lazily-computed average texture color.
const QColor& getAverageColor() const { return _averageColor; }
protected:
virtual void downloadFinished(QNetworkReply* reply);
Q_INVOKABLE void loadContent(const QByteArray& content);
Q_INVOKABLE void setImage(const QImage& image, bool translucent);
Q_INVOKABLE void setImage(const QImage& image, bool translucent, const QColor& averageColor);
virtual void imageLoaded(const QImage& image);
private:
TextureType _type;
bool _translucent;
QColor _averageColor;
};
/// Caches derived, dilated textures.

View file

@ -20,6 +20,10 @@ LocationScriptingInterface* LocationScriptingInterface::getInstance() {
return &sharedInstance;
}
bool LocationScriptingInterface::isConnected() {
return NodeList::getInstance()->getDomainHandler().isConnected();
}
QString LocationScriptingInterface::getHref() {
return getProtocol() + "//" + getHostname() + getPathname();
}

View file

@ -22,6 +22,7 @@
class LocationScriptingInterface : public QObject {
Q_OBJECT
Q_PROPERTY(bool isConnected READ isConnected)
Q_PROPERTY(QString href READ getHref)
Q_PROPERTY(QString protocol READ getProtocol)
Q_PROPERTY(QString hostname READ getHostname)
@ -30,6 +31,7 @@ class LocationScriptingInterface : public QObject {
public:
static LocationScriptingInterface* getInstance();
bool isConnected();
QString getHref();
QString getProtocol() { return CUSTOM_URL_SCHEME; };
QString getPathname();

View file

@ -120,6 +120,7 @@ MetavoxelEditor::MetavoxelEditor() :
addTool(new EraseHeightfieldTool(this));
addTool(new HeightfieldHeightBrushTool(this));
addTool(new HeightfieldColorBrushTool(this));
addTool(new HeightfieldTextureBrushTool(this));
updateAttributes();
@ -956,14 +957,29 @@ void ImportHeightfieldTool::apply() {
HeightfieldBuffer* buffer = static_cast<HeightfieldBuffer*>(bufferData.data());
MetavoxelData data;
data.setSize(scale);
HeightfieldDataPointer heightPointer(new HeightfieldData(buffer->getUnextendedHeight()));
QByteArray height = buffer->getUnextendedHeight();
HeightfieldHeightDataPointer heightPointer(new HeightfieldHeightData(height));
data.setRoot(AttributeRegistry::getInstance()->getHeightfieldAttribute(), new MetavoxelNode(AttributeValue(
AttributeRegistry::getInstance()->getHeightfieldAttribute(), encodeInline(heightPointer))));
if (!buffer->getColor().isEmpty()) {
HeightfieldDataPointer colorPointer(new HeightfieldData(buffer->getUnextendedColor()));
data.setRoot(AttributeRegistry::getInstance()->getHeightfieldColorAttribute(), new MetavoxelNode(AttributeValue(
AttributeRegistry::getInstance()->getHeightfieldColorAttribute(), encodeInline(colorPointer))));
QByteArray color;
if (buffer->getColor().isEmpty()) {
const int WHITE_VALUE = 0xFF;
color = QByteArray(height.size() * HeightfieldData::COLOR_BYTES, WHITE_VALUE);
} else {
color = buffer->getUnextendedColor();
}
HeightfieldColorDataPointer colorPointer(new HeightfieldColorData(color));
data.setRoot(AttributeRegistry::getInstance()->getHeightfieldColorAttribute(), new MetavoxelNode(AttributeValue(
AttributeRegistry::getInstance()->getHeightfieldColorAttribute(), encodeInline(colorPointer))));
int size = glm::sqrt(height.size()) + HeightfieldBuffer::SHARED_EDGE;
QByteArray texture(size * size, 0);
HeightfieldTextureDataPointer texturePointer(new HeightfieldTextureData(texture));
data.setRoot(AttributeRegistry::getInstance()->getHeightfieldTextureAttribute(), new MetavoxelNode(AttributeValue(
AttributeRegistry::getInstance()->getHeightfieldTextureAttribute(), encodeInline(texturePointer))));
MetavoxelEditMessage message = { QVariant::fromValue(SetDataEdit(
_translation->getValue() + buffer->getTranslation() * scale, data)) };
Application::getInstance()->getMetavoxels()->applyEdit(message, true);
@ -1100,6 +1116,10 @@ HeightfieldBrushTool::HeightfieldBrushTool(MetavoxelEditor* editor, const QStrin
_radius->setValue(1.0);
}
bool HeightfieldBrushTool::appliesTo(const AttributePointer& attribute) const {
return attribute->inherits("HeightfieldAttribute");
}
void HeightfieldBrushTool::render() {
if (Application::getInstance()->isMouseHidden()) {
return;
@ -1153,5 +1173,29 @@ HeightfieldColorBrushTool::HeightfieldColorBrushTool(MetavoxelEditor* editor) :
}
QVariant HeightfieldColorBrushTool::createEdit(bool alternate) {
return QVariant::fromValue(PaintHeightfieldColorEdit(_position, _radius->value(), _color->getColor()));
return QVariant::fromValue(PaintHeightfieldColorEdit(_position, _radius->value(),
alternate ? QColor() : _color->getColor()));
}
HeightfieldTextureBrushTool::HeightfieldTextureBrushTool(MetavoxelEditor* editor) :
HeightfieldBrushTool(editor, "Texture Brush") {
_form->addRow(_textureEditor = new SharedObjectEditor(&HeightfieldTexture::staticMetaObject, false));
connect(_textureEditor, &SharedObjectEditor::objectChanged, this, &HeightfieldTextureBrushTool::updateTexture);
}
QVariant HeightfieldTextureBrushTool::createEdit(bool alternate) {
if (alternate) {
return QVariant::fromValue(PaintHeightfieldTextureEdit(_position, _radius->value(), SharedObjectPointer(), QColor()));
} else {
SharedObjectPointer texture = _textureEditor->getObject();
_textureEditor->detachObject();
return QVariant::fromValue(PaintHeightfieldTextureEdit(_position, _radius->value(), texture,
_texture ? _texture->getAverageColor() : QColor()));
}
}
void HeightfieldTextureBrushTool::updateTexture() {
HeightfieldTexture* texture = static_cast<HeightfieldTexture*>(_textureEditor->getObject().data());
_texture = Application::getInstance()->getTextureCache()->getTexture(texture->getURL());
}

View file

@ -28,6 +28,7 @@ class QScrollArea;
class QSpinBox;
class MetavoxelTool;
class SharedObjectEditor;
class Vec3Editor;
/// Allows editing metavoxels.
@ -311,6 +312,8 @@ public:
HeightfieldBrushTool(MetavoxelEditor* editor, const QString& name);
virtual bool appliesTo(const AttributePointer& attribute) const;
virtual void render();
virtual bool eventFilter(QObject* watched, QEvent* event);
@ -359,4 +362,26 @@ private:
QColorEditor* _color;
};
/// Allows texturing parts of the heightfield.
class HeightfieldTextureBrushTool : public HeightfieldBrushTool {
Q_OBJECT
public:
HeightfieldTextureBrushTool(MetavoxelEditor* editor);
protected:
virtual QVariant createEdit(bool alternate);
private slots:
void updateTexture();
private:
SharedObjectEditor* _textureEditor;
QSharedPointer<NetworkTexture> _texture;
};
#endif // hifi_MetavoxelEditor_h

View file

@ -149,9 +149,16 @@ void PreferencesDialog::loadPreferences() {
ui.faceshiftEyeDeflectionSider->setValue(menuInstance->getFaceshiftEyeDeflection() *
ui.faceshiftEyeDeflectionSider->maximum());
ui.audioJitterSpin->setValue(menuInstance->getAudioJitterBufferFrames());
const InboundAudioStream::Settings& streamSettings = menuInstance->getReceivedAudioStreamSettings();
ui.maxFramesOverDesiredSpin->setValue(menuInstance->getMaxFramesOverDesired());
ui.dynamicJitterBuffersCheckBox->setChecked(streamSettings._dynamicJitterBuffers);
ui.staticDesiredJitterBufferFramesSpin->setValue(streamSettings._staticDesiredJitterBufferFrames);
ui.maxFramesOverDesiredSpin->setValue(streamSettings._maxFramesOverDesired);
ui.useStdevForJitterCalcCheckBox->setChecked(streamSettings._useStDevForJitterCalc);
ui.windowStarveThresholdSpin->setValue(streamSettings._windowStarveThreshold);
ui.windowSecondsForDesiredCalcOnTooManyStarvesSpin->setValue(streamSettings._windowSecondsForDesiredCalcOnTooManyStarves);
ui.windowSecondsForDesiredReductionSpin->setValue(streamSettings._windowSecondsForDesiredReduction);
ui.repetitionWithFadeCheckBox->setChecked(streamSettings._repetitionWithFade);
ui.realWorldFieldOfViewSpin->setValue(menuInstance->getRealWorldFieldOfView());
@ -241,16 +248,18 @@ void PreferencesDialog::savePreferences() {
Menu::getInstance()->setInvertSixenseButtons(ui.invertSixenseButtonsCheckBox->isChecked());
Menu::getInstance()->setAudioJitterBufferFrames(ui.audioJitterSpin->value());
if (Menu::getInstance()->getAudioJitterBufferFrames() != 0) {
Application::getInstance()->getAudio()->setDynamicJitterBuffers(false);
Application::getInstance()->getAudio()->setStaticDesiredJitterBufferFrames(Menu::getInstance()->getAudioJitterBufferFrames());
} else {
Application::getInstance()->getAudio()->setDynamicJitterBuffers(true);
}
InboundAudioStream::Settings streamSettings;
streamSettings._dynamicJitterBuffers = ui.dynamicJitterBuffersCheckBox->isChecked();
streamSettings._staticDesiredJitterBufferFrames = ui.staticDesiredJitterBufferFramesSpin->value();
streamSettings._maxFramesOverDesired = ui.maxFramesOverDesiredSpin->value();
streamSettings._useStDevForJitterCalc = ui.useStdevForJitterCalcCheckBox->isChecked();
streamSettings._windowStarveThreshold = ui.windowStarveThresholdSpin->value();
streamSettings._windowSecondsForDesiredCalcOnTooManyStarves = ui.windowSecondsForDesiredCalcOnTooManyStarvesSpin->value();
streamSettings._windowSecondsForDesiredReduction = ui.windowSecondsForDesiredReductionSpin->value();
streamSettings._repetitionWithFade = ui.repetitionWithFadeCheckBox->isChecked();
Menu::getInstance()->setMaxFramesOverDesired(ui.maxFramesOverDesiredSpin->value());
Application::getInstance()->getAudio()->setMaxFramesOverDesired(Menu::getInstance()->getMaxFramesOverDesired());
Menu::getInstance()->setReceivedAudioStreamSettings(streamSettings);
Application::getInstance()->getAudio()->setReceivedAudioStreamSettings(streamSettings);
Application::getInstance()->resizeGL(Application::getInstance()->getGLWidget()->width(),
Application::getInstance()->getGLWidget()->height());

View file

@ -1464,6 +1464,97 @@ padding: 10px;margin-top:10px</string>
</item>
</layout>
</item>
<!-- dynamic jitter buffers ____________________________________________________________________________ -->
<item>
<layout class="QHBoxLayout" name="horizontalLayout_23">
<property name="spacing">
<number>0</number>
</property>
<property name="topMargin">
<number>10</number>
</property>
<property name="rightMargin">
<number>0</number>
</property>
<property name="bottomMargin">
<number>10</number>
</property>
<item>
<widget class="QLabel" name="label_20">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="styleSheet">
<string notr="true">color: rgb(51, 51, 51)</string>
</property>
<property name="text">
<string>Enable Dynamic Jitter Buffers</string>
</property>
<property name="indent">
<number>15</number>
</property>
<property name="buddy">
<cstring>dynamicJitterBuffersCheckBox</cstring>
</property>
</widget>
</item>
<item>
<spacer name="horizontalSpacer_17">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>40</width>
<height>20</height>
</size>
</property>
</spacer>
</item>
<item>
<widget class="QCheckBox" name="dynamicJitterBuffersCheckBox">
<property name="sizePolicy">
<sizepolicy hsizetype="Minimum" vsizetype="Fixed">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
<property name="minimumSize">
<size>
<width>32</width>
<height>0</height>
</size>
</property>
<property name="baseSize">
<size>
<width>0</width>
<height>0</height>
</size>
</property>
<property name="text">
<string/>
</property>
<property name="iconSize">
<size>
<width>32</width>
<height>32</height>
</size>
</property>
</widget>
</item>
</layout>
</item>
<!-- static desired jitter frames____________________________________________________________________________ -->
<item>
<layout class="QHBoxLayout" name="horizontalLayout_8">
<property name="spacing">
@ -1489,13 +1580,13 @@ padding: 10px;margin-top:10px</string>
<string notr="true">color: rgb(51, 51, 51)</string>
</property>
<property name="text">
<string>Audio Jitter Buffer Frames (0 for automatic)</string>
<string>Static Jitter Buffer Frames</string>
</property>
<property name="indent">
<number>15</number>
</property>
<property name="buddy">
<cstring>audioJitterSpin</cstring>
<cstring>staticDesiredJitterBufferFramesSpin</cstring>
</property>
</widget>
</item>
@ -1518,7 +1609,7 @@ padding: 10px;margin-top:10px</string>
</spacer>
</item>
<item>
<widget class="QSpinBox" name="audioJitterSpin">
<widget class="QSpinBox" name="staticDesiredJitterBufferFramesSpin">
<property name="sizePolicy">
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
<horstretch>0</horstretch>
@ -1555,6 +1646,7 @@ padding: 10px;margin-top:10px</string>
</item>
</layout>
</item>
<!-- max frames over desired ____________________________________________________________________________ -->
<item>
<layout class="QHBoxLayout" name="horizontalLayout_13">
<property name="spacing">
@ -1591,7 +1683,7 @@ padding: 10px;margin-top:10px</string>
</widget>
</item>
<item>
<spacer name="horizontalSpacer_12">
<spacer name="horizontalSpacer_20">
<property name="font">
<font>
<family>Arial</family>
@ -1646,7 +1738,467 @@ padding: 10px;margin-top:10px</string>
</item>
</layout>
</item>
<!-- use stdev for jitter calc ____________________________________________________________________________ -->
<item>
<layout class="QHBoxLayout" name="horizontalLayout_19">
<property name="spacing">
<number>0</number>
</property>
<property name="topMargin">
<number>10</number>
</property>
<property name="rightMargin">
<number>0</number>
</property>
<property name="bottomMargin">
<number>10</number>
</property>
<item>
<widget class="QLabel" name="label_16">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="styleSheet">
<string notr="true">color: rgb(51, 51, 51)</string>
</property>
<property name="text">
<string>Use Stdev for Dynamic Jitter Calc</string>
</property>
<property name="indent">
<number>15</number>
</property>
<property name="buddy">
<cstring>useStdevForJitterCalcCheckBox</cstring>
</property>
</widget>
</item>
<item>
<spacer name="horizontalSpacer_21">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>40</width>
<height>20</height>
</size>
</property>
</spacer>
</item>
<item>
<widget class="QCheckBox" name="useStdevForJitterCalcCheckBox">
<property name="sizePolicy">
<sizepolicy hsizetype="Minimum" vsizetype="Fixed">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
<property name="minimumSize">
<size>
<width>32</width>
<height>0</height>
</size>
</property>
<property name="baseSize">
<size>
<width>0</width>
<height>0</height>
</size>
</property>
<property name="text">
<string/>
</property>
<property name="iconSize">
<size>
<width>32</width>
<height>32</height>
</size>
</property>
</widget>
</item>
</layout>
</item>
<!-- window starve threshold ____________________________________________________________________________ -->
<item>
<layout class="QHBoxLayout" name="horizontalLayout_20">
<property name="spacing">
<number>0</number>
</property>
<property name="topMargin">
<number>10</number>
</property>
<property name="rightMargin">
<number>0</number>
</property>
<property name="bottomMargin">
<number>10</number>
</property>
<item alignment="Qt::AlignLeft">
<widget class="QLabel" name="label_17">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="styleSheet">
<string notr="true">color: rgb(51, 51, 51)</string>
</property>
<property name="text">
<string>Window A Starve Threshold</string>
</property>
<property name="indent">
<number>15</number>
</property>
<property name="buddy">
<cstring>windowStarveThresholdSpin</cstring>
</property>
</widget>
</item>
<item>
<spacer name="horizontalSpacer_22">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>40</width>
<height>20</height>
</size>
</property>
</spacer>
</item>
<item>
<widget class="QSpinBox" name="windowStarveThresholdSpin">
<property name="sizePolicy">
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
<property name="minimumSize">
<size>
<width>95</width>
<height>36</height>
</size>
</property>
<property name="maximumSize">
<size>
<width>70</width>
<height>16777215</height>
</size>
</property>
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="minimum">
<number>0</number>
</property>
<property name="maximum">
<number>10000</number>
</property>
<property name="value">
<number>1</number>
</property>
</widget>
</item>
</layout>
</item>
<!-- window A seconds ____________________________________________________________________________ -->
<item>
<layout class="QHBoxLayout" name="horizontalLayout_21">
<property name="spacing">
<number>0</number>
</property>
<property name="topMargin">
<number>10</number>
</property>
<property name="rightMargin">
<number>0</number>
</property>
<property name="bottomMargin">
<number>10</number>
</property>
<item alignment="Qt::AlignLeft">
<widget class="QLabel" name="label_18">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="styleSheet">
<string notr="true">color: rgb(51, 51, 51)</string>
</property>
<property name="text">
<string>Window A (raise desired on N starves) Seconds</string>
</property>
<property name="indent">
<number>15</number>
</property>
<property name="buddy">
<cstring>windowSecondsForDesiredCalcOnTooManyStarvesSpin</cstring>
</property>
</widget>
</item>
<item>
<spacer name="horizontalSpacer_23">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>40</width>
<height>20</height>
</size>
</property>
</spacer>
</item>
<item>
<widget class="QSpinBox" name="windowSecondsForDesiredCalcOnTooManyStarvesSpin">
<property name="sizePolicy">
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
<property name="minimumSize">
<size>
<width>95</width>
<height>36</height>
</size>
</property>
<property name="maximumSize">
<size>
<width>70</width>
<height>16777215</height>
</size>
</property>
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="minimum">
<number>0</number>
</property>
<property name="maximum">
<number>10000</number>
</property>
<property name="value">
<number>1</number>
</property>
</widget>
</item>
</layout>
</item>
<!-- window B seconds ____________________________________________________________________________ -->
<item>
<layout class="QHBoxLayout" name="horizontalLayout_22">
<property name="spacing">
<number>0</number>
</property>
<property name="topMargin">
<number>10</number>
</property>
<property name="rightMargin">
<number>0</number>
</property>
<property name="bottomMargin">
<number>10</number>
</property>
<item alignment="Qt::AlignLeft">
<widget class="QLabel" name="label_19">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="styleSheet">
<string notr="true">color: rgb(51, 51, 51)</string>
</property>
<property name="text">
<string>Window B (desired ceiling) Seconds</string>
</property>
<property name="indent">
<number>15</number>
</property>
<property name="buddy">
<cstring>windowSecondsForDesiredReductionSpin</cstring>
</property>
</widget>
</item>
<item>
<spacer name="horizontalSpacer_24">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>40</width>
<height>20</height>
</size>
</property>
</spacer>
</item>
<item>
<widget class="QSpinBox" name="windowSecondsForDesiredReductionSpin">
<property name="sizePolicy">
<sizepolicy hsizetype="Fixed" vsizetype="Fixed">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
<property name="minimumSize">
<size>
<width>95</width>
<height>36</height>
</size>
</property>
<property name="maximumSize">
<size>
<width>70</width>
<height>16777215</height>
</size>
</property>
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="minimum">
<number>0</number>
</property>
<property name="maximum">
<number>10000</number>
</property>
<property name="value">
<number>1</number>
</property>
</widget>
</item>
</layout>
</item>
<!-- repetition with fade ____________________________________________________________________________ -->
<item>
<layout class="QHBoxLayout" name="horizontalLayout_24">
<property name="spacing">
<number>0</number>
</property>
<property name="topMargin">
<number>10</number>
</property>
<property name="rightMargin">
<number>0</number>
</property>
<property name="bottomMargin">
<number>10</number>
</property>
<item>
<widget class="QLabel" name="label_21">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="styleSheet">
<string notr="true">color: rgb(51, 51, 51)</string>
</property>
<property name="text">
<string>Repetition with Fade</string>
</property>
<property name="indent">
<number>15</number>
</property>
<property name="buddy">
<cstring>repetitionWithFadeCheckBox</cstring>
</property>
</widget>
</item>
<item>
<spacer name="horizontalSpacer_25">
<property name="font">
<font>
<family>Arial</family>
</font>
</property>
<property name="orientation">
<enum>Qt::Horizontal</enum>
</property>
<property name="sizeHint" stdset="0">
<size>
<width>40</width>
<height>20</height>
</size>
</property>
</spacer>
</item>
<item>
<widget class="QCheckBox" name="repetitionWithFadeCheckBox">
<property name="sizePolicy">
<sizepolicy hsizetype="Minimum" vsizetype="Fixed">
<horstretch>0</horstretch>
<verstretch>0</verstretch>
</sizepolicy>
</property>
<property name="minimumSize">
<size>
<width>32</width>
<height>0</height>
</size>
</property>
<property name="baseSize">
<size>
<width>0</width>
<height>0</height>
</size>
</property>
<property name="text">
<string/>
</property>
<property name="iconSize">
<size>
<width>32</width>
<height>32</height>
</size>
</property>
</widget>
</item>
</layout>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_6">

View file

@ -1,26 +0,0 @@
//
// AudioFilter.cpp
// hifi
//
// Created by Craig Hansen-Sturm on 8/10/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <math.h>
#include <vector>
#include <SharedUtil.h>
#include "AudioRingBuffer.h"
#include "AudioFilter.h"
template<>
AudioFilterPEQ3::FilterParameter AudioFilterPEQ3::_profiles[ AudioFilterPEQ3::_profileCount ][ AudioFilterPEQ3::_filterCount ] = {
// Freq Gain Q Freq Gain Q Freq Gain Q
{ { 300.0f, 1.0f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 1.0f, 1.0f } }, // flat response (default)
{ { 300.0f, 1.0f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 0.1f, 1.0f } }, // treble cut
{ { 300.0f, 0.1f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 1.0f, 1.0f } }, // bass cut
{ { 300.0f, 1.5f, 0.71f }, { 1000.0f, 0.5f, 1.0f }, { 4000.0f, 1.50f, 0.71f } } // smiley curve
};

View file

@ -12,7 +12,7 @@
#ifndef hifi_AudioFilter_h
#define hifi_AudioFilter_h
////////////////////////////////////////////////////////////////////////////////////////////
//
// Implements a standard biquad filter in "Direct Form 1"
// Reference http://www.musicdsp.org/files/Audio-EQ-Cookbook.txt
//
@ -51,15 +51,15 @@ public:
//
// public interface
//
void setParameters( const float a0, const float a1, const float a2, const float b1, const float b2 ) {
void setParameters(const float a0, const float a1, const float a2, const float b1, const float b2) {
_a0 = a0; _a1 = a1; _a2 = a2; _b1 = b1; _b2 = b2;
}
void getParameters( float& a0, float& a1, float& a2, float& b1, float& b2 ) {
void getParameters(float& a0, float& a1, float& a2, float& b1, float& b2) {
a0 = _a0; a1 = _a1; a2 = _a2; b1 = _b1; b2 = _b2;
}
void render( const float* in, float* out, const int frames) {
void render(const float* in, float* out, const int frames) {
float x;
float y;
@ -90,209 +90,223 @@ public:
}
};
////////////////////////////////////////////////////////////////////////////////////////////
// Implements a single-band parametric EQ using a biquad "peaking EQ" configuration
//
// gain > 1.0 boosts the center frequency
// gain < 1.0 cuts the center frequency
//
class AudioParametricEQ {
//
// Implements common base class interface for all Audio Filter Objects
//
template< class T >
class AudioFilter {
protected:
//
// private data
// data
//
AudioBiquad _kernel;
float _sampleRate;
float _frequency;
float _gain;
float _slope;
//
// helpers
//
void updateKernel() {
/*
a0 = 1 + alpha*A
a1 = -2*cos(w0)
a2 = 1 - alpha*A
b1 = -2*cos(w0)
b2 = 1 - alpha/A
*/
const float a = _gain;
const float omega = TWO_PI * _frequency / _sampleRate;
const float alpha = 0.5f * sinf(omega) / _slope;
const float gamma = 1.0f / ( 1.0f + (alpha/a) );
const float a0 = 1.0f + (alpha*a);
const float a1 = -2.0f * cosf(omega);
const float a2 = 1.0f - (alpha*a);
const float b1 = a1;
const float b2 = 1.0f - (alpha/a);
_kernel.setParameters( a0*gamma,a1*gamma,a2*gamma,b1*gamma,b2*gamma );
static_cast<T*>(this)->updateKernel();
}
public:
//
// ctor/dtor
//
AudioParametricEQ() {
AudioFilter() {
setParameters(0.,0.,0.,0.);
updateKernel();
}
~AudioParametricEQ() {
~AudioFilter() {
}
//
// public interface
//
void setParameters( const float sampleRate, const float frequency, const float gain, const float slope ) {
_sampleRate = std::max(sampleRate,1.0f);
_frequency = std::max(frequency,2.0f);
_gain = std::max(gain,0.0f);
_slope = std::max(slope,0.00001f);
void setParameters(const float sampleRate, const float frequency, const float gain, const float slope) {
_sampleRate = std::max(sampleRate, 1.0f);
_frequency = std::max(frequency, 2.0f);
_gain = std::max(gain, 0.0f);
_slope = std::max(slope, 0.00001f);
updateKernel();
}
void getParameters( float& sampleRate, float& frequency, float& gain, float& slope ) {
void getParameters(float& sampleRate, float& frequency, float& gain, float& slope) {
sampleRate = _sampleRate; frequency = _frequency; gain = _gain; slope = _slope;
}
void render(const float* in, float* out, const int frames ) {
void render(const float* in, float* out, const int frames) {
_kernel.render(in,out,frames);
}
void reset() {
_kernel.reset();
}
};
////////////////////////////////////////////////////////////////////////////////////////////
// Helper/convenience class that implements a bank of EQ objects
//
template< typename T, const int N>
class AudioFilterBank {
//
// types
//
struct FilterParameter {
float _p1;
float _p2;
float _p3;
};
//
// private static data
//
static const int _filterCount = N;
static const int _profileCount = 4;
static FilterParameter _profiles[_profileCount][_filterCount];
//
// private data
//
T _filters[ _filterCount ];
float* _buffer;
float _sampleRate;
uint16_t _frameCount;
// Implements a low-shelf filter using a biquad
//
class AudioFilterLSF : public AudioFilter< AudioFilterLSF >
{
public:
//
// ctor/dtor
// helpers
//
AudioFilterBank()
: _buffer(NULL)
, _sampleRate(0.)
, _frameCount(0) {
}
~AudioFilterBank() {
finalize();
}
//
// public interface
//
void initialize( const float sampleRate, const int frameCount ) {
finalize();
void updateKernel() {
_buffer = (float*)malloc( frameCount * sizeof(float) );
if(!_buffer) {
return;
}
const float a = _gain;
const float aAdd1 = a + 1.0f;
const float aSub1 = a - 1.0f;
const float omega = TWO_PI * _frequency / _sampleRate;
const float aAdd1TimesCosOmega = aAdd1 * cosf(omega);
const float aSub1TimesCosOmega = aSub1 * cosf(omega);
const float alpha = 0.5f * sinf(omega) / _slope;
const float zeta = 2.0f * sqrtf(a) * alpha;
/*
b0 = A*( (A+1) - (A-1)*cos(w0) + 2*sqrt(A)*alpha )
b1 = 2*A*( (A-1) - (A+1)*cos(w0) )
b2 = A*( (A+1) - (A-1)*cos(w0) - 2*sqrt(A)*alpha )
a0 = (A+1) + (A-1)*cos(w0) + 2*sqrt(A)*alpha
a1 = -2*( (A-1) + (A+1)*cos(w0) )
a2 = (A+1) + (A-1)*cos(w0) - 2*sqrt(A)*alpha
*/
const float b0 = +1.0f * (aAdd1 - aSub1TimesCosOmega + zeta) * a;
const float b1 = +2.0f * (aSub1 - aAdd1TimesCosOmega + ZERO) * a;
const float b2 = +1.0f * (aAdd1 - aSub1TimesCosOmega - zeta) * a;
const float a0 = +1.0f * (aAdd1 + aSub1TimesCosOmega + zeta);
const float a1 = -2.0f * (aSub1 + aAdd1TimesCosOmega + ZERO);
const float a2 = +1.0f * (aAdd1 + aSub1TimesCosOmega - zeta);
_sampleRate = sampleRate;
_frameCount = frameCount;
reset();
loadProfile(0); // load default profile "flat response" into the bank (see AudioFilter.cpp)
const float normA0 = 1.0f / a0;
_kernel.setParameters(b0 * normA0, b1 * normA0 , b2 * normA0, a1 * normA0, a2 * normA0);
}
void finalize() {
if (_buffer ) {
free (_buffer);
_buffer = NULL;
}
}
void loadProfile( int profileIndex ) {
if (profileIndex >= 0 && profileIndex < _profileCount) {
for (int i = 0; i < _filterCount; ++i) {
FilterParameter p = _profiles[profileIndex][i];
_filters[i].setParameters(_sampleRate,p._p1,p._p2,p._p3);
}
}
}
void render( const float* in, float* out, const int frameCount ) {
for (int i = 0; i < _filterCount; ++i) {
_filters[i].render( in, out, frameCount );
}
}
void render( const int16_t* in, int16_t* out, const int frameCount ) {
if (!_buffer || ( frameCount > _frameCount ))
return;
const int scale = (2 << ((8*sizeof(int16_t))-1));
// convert int16_t to float32 (normalized to -1. ... 1.)
for (int i = 0; i < frameCount; ++i) {
_buffer[i] = ((float)(*in++)) / scale;
}
// for this filter, we share input/output buffers at each stage, but our design does not mandate this
render( _buffer, _buffer, frameCount );
// convert float32 to int16_t
for (int i = 0; i < frameCount; ++i) {
*out++ = (int16_t)(_buffer[i] * scale);
}
}
void reset() {
for (int i = 0; i < _filterCount; ++i ) {
_filters[i].reset();
}
}
};
////////////////////////////////////////////////////////////////////////////////////////////
// Specializations of AudioFilterBank
//
typedef AudioFilterBank< AudioParametricEQ, 1> AudioFilterPEQ1; // bank with one band of PEQ
typedef AudioFilterBank< AudioParametricEQ, 2> AudioFilterPEQ2; // bank with two bands of PEQ
typedef AudioFilterBank< AudioParametricEQ, 3> AudioFilterPEQ3; // bank with three bands of PEQ
// etc....
// Implements a hi-shelf filter using a biquad
//
class AudioFilterHSF : public AudioFilter< AudioFilterHSF >
{
public:
//
// helpers
//
void updateKernel() {
const float a = _gain;
const float aAdd1 = a + 1.0f;
const float aSub1 = a - 1.0f;
const float omega = TWO_PI * _frequency / _sampleRate;
const float aAdd1TimesCosOmega = aAdd1 * cosf(omega);
const float aSub1TimesCosOmega = aSub1 * cosf(omega);
const float alpha = 0.5f * sinf(omega) / _slope;
const float zeta = 2.0f * sqrtf(a) * alpha;
/*
b0 = A*( (A+1) + (A-1)*cos(w0) + 2*sqrt(A)*alpha )
b1 = -2*A*( (A-1) + (A+1)*cos(w0) )
b2 = A*( (A+1) + (A-1)*cos(w0) - 2*sqrt(A)*alpha )
a0 = (A+1) - (A-1)*cos(w0) + 2*sqrt(A)*alpha
a1 = 2*( (A-1) - (A+1)*cos(w0) )
a2 = (A+1) - (A-1)*cos(w0) - 2*sqrt(A)*alpha
*/
const float b0 = +1.0f * (aAdd1 + aSub1TimesCosOmega + zeta) * a;
const float b1 = -2.0f * (aSub1 + aAdd1TimesCosOmega + ZERO) * a;
const float b2 = +1.0f * (aAdd1 + aSub1TimesCosOmega - zeta) * a;
const float a0 = +1.0f * (aAdd1 - aSub1TimesCosOmega + zeta);
const float a1 = +2.0f * (aSub1 - aAdd1TimesCosOmega + ZERO);
const float a2 = +1.0f * (aAdd1 - aSub1TimesCosOmega - zeta);
const float normA0 = 1.0f / a0;
_kernel.setParameters(b0 * normA0, b1 * normA0 , b2 * normA0, a1 * normA0, a2 * normA0);
}
};
//
// Implements a all-pass filter using a biquad
//
class AudioFilterALL : public AudioFilter< AudioFilterALL >
{
public:
//
// helpers
//
void updateKernel() {
const float omega = TWO_PI * _frequency / _sampleRate;
const float cosOmega = cosf(omega);
const float alpha = 0.5f * sinf(omega) / _slope;
/*
b0 = 1 - alpha
b1 = -2*cos(w0)
b2 = 1 + alpha
a0 = 1 + alpha
a1 = -2*cos(w0)
a2 = 1 - alpha
*/
const float b0 = +1.0f - alpha;
const float b1 = -2.0f * cosOmega;
const float b2 = +1.0f + alpha;
const float a0 = +1.0f + alpha;
const float a1 = -2.0f * cosOmega;
const float a2 = +1.0f - alpha;
const float normA0 = 1.0f / a0;
_kernel.setParameters(b0 * normA0, b1 * normA0 , b2 * normA0, a1 * normA0, a2 * normA0);
}
};
//
// Implements a single-band parametric EQ using a biquad "peaking EQ" configuration
//
class AudioFilterPEQ : public AudioFilter< AudioFilterPEQ >
{
public:
//
// helpers
//
void updateKernel() {
const float a = _gain;
const float omega = TWO_PI * _frequency / _sampleRate;
const float cosOmega = cosf(omega);
const float alpha = 0.5f * sinf(omega) / _slope;
const float alphaMulA = alpha * a;
const float alphaDivA = alpha / a;
/*
b0 = 1 + alpha*A
b1 = -2*cos(w0)
b2 = 1 - alpha*A
a0 = 1 + alpha/A
a1 = -2*cos(w0)
a2 = 1 - alpha/A
*/
const float b0 = +1.0f + alphaMulA;
const float b1 = -2.0f * cosOmega;
const float b2 = +1.0f - alphaMulA;
const float a0 = +1.0f + alphaDivA;
const float a1 = -2.0f * cosOmega;
const float a2 = +1.0f - alphaDivA;
const float normA0 = 1.0f / a0;
_kernel.setParameters(b0 * normA0, b1 * normA0 , b2 * normA0, a1 * normA0, a2 * normA0);
}
};
#endif // hifi_AudioFilter_h

View file

@ -0,0 +1,48 @@
//
// AudioFilterBank.cpp
// hifi
//
// Created by Craig Hansen-Sturm on 8/10/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include <math.h>
#include <SharedUtil.h>
#include "AudioRingBuffer.h"
#include "AudioFilter.h"
#include "AudioFilterBank.h"
template<>
AudioFilterLSF1s::FilterParameter
AudioFilterLSF1s::_profiles[ AudioFilterLSF1s::_profileCount ][ AudioFilterLSF1s::_filterCount ] = {
// Freq Gain Slope
{ { 1000.0f, 1.0f, 1.0f } } // flat response (default)
};
template<>
AudioFilterHSF1s::FilterParameter
AudioFilterHSF1s::_profiles[ AudioFilterHSF1s::_profileCount ][ AudioFilterHSF1s::_filterCount ] = {
// Freq Gain Slope
{ { 1000.0f, 1.0f, 1.0f } } // flat response (default)
};
template<>
AudioFilterPEQ1s::FilterParameter
AudioFilterPEQ1s::_profiles[ AudioFilterPEQ1s::_profileCount ][ AudioFilterPEQ1s::_filterCount ] = {
// Freq Gain Q
{ { 1000.0f, 1.0f, 1.0f } } // flat response (default)
};
template<>
AudioFilterPEQ3m::FilterParameter
AudioFilterPEQ3m::_profiles[ AudioFilterPEQ3m::_profileCount ][ AudioFilterPEQ3m::_filterCount ] = {
// Freq Gain Q Freq Gain Q Freq Gain Q
{ { 300.0f, 1.0f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 1.0f, 1.0f } }, // flat response (default)
{ { 300.0f, 1.0f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 0.1f, 1.0f } }, // treble cut
{ { 300.0f, 0.1f, 1.0f }, { 1000.0f, 1.0f, 1.0f }, { 4000.0f, 1.0f, 1.0f } }, // bass cut
{ { 300.0f, 1.5f, 0.71f }, { 1000.0f, 0.5f, 1.0f }, { 4000.0f, 1.50f, 0.71f } } // smiley curve
};

View file

@ -0,0 +1,170 @@
//
// AudioFilterBank.h
// hifi
//
// Created by Craig Hansen-Sturm on 8/23/14.
// Copyright 2014 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#ifndef hifi_AudioFilterBank_h
#define hifi_AudioFilterBank_h
//
// Helper/convenience class that implements a bank of Filter objects
//
template< typename T, const int N, const int C >
class AudioFilterBank {
//
// types
//
struct FilterParameter {
float _p1;
float _p2;
float _p3;
};
//
// private static data
//
static const int _filterCount = N;
static const int _channelCount = C;
static const int _profileCount = 4;
static FilterParameter _profiles[ _profileCount ][ _filterCount ];
//
// private data
//
T _filters[ _filterCount ][ _channelCount ];
float* _buffer[ _channelCount ];
float _sampleRate;
uint16_t _frameCount;
public:
//
// ctor/dtor
//
AudioFilterBank()
: _sampleRate(0.)
, _frameCount(0) {
for (int i = 0; i < _channelCount; ++i) {
_buffer[ i ] = NULL;
}
}
~AudioFilterBank() {
finalize();
}
//
// public interface
//
void initialize(const float sampleRate, const int frameCount) {
finalize();
for (int i = 0; i < _channelCount; ++i) {
_buffer[i] = (float*)malloc(frameCount * sizeof(float));
}
_sampleRate = sampleRate;
_frameCount = frameCount;
reset();
loadProfile(0); // load default profile "flat response" into the bank (see AudioFilterBank.cpp)
}
void finalize() {
for (int i = 0; i < _channelCount; ++i) {
if (_buffer[i]) {
free (_buffer[i]);
_buffer[i] = NULL;
}
}
}
void loadProfile(int profileIndex) {
if (profileIndex >= 0 && profileIndex < _profileCount) {
for (int i = 0; i < _filterCount; ++i) {
FilterParameter p = _profiles[profileIndex][i];
for (int j = 0; j < _channelCount; ++j) {
_filters[i][j].setParameters(_sampleRate,p._p1,p._p2,p._p3);
}
}
}
}
void setParameters(int filterStage, int filterChannel, const float sampleRate, const float frequency, const float gain,
const float slope) {
if (filterStage >= 0 && filterStage < _filterCount && filterChannel >= 0 && filterChannel < _channelCount) {
_filters[filterStage][filterChannel].setParameters(sampleRate,frequency,gain,slope);
}
}
void getParameters(int filterStage, int filterChannel, float& sampleRate, float& frequency, float& gain, float& slope) {
if (filterStage >= 0 && filterStage < _filterCount && filterChannel >= 0 && filterChannel < _channelCount) {
_filters[filterStage][filterChannel].getParameters(sampleRate,frequency,gain,slope);
}
}
void render(const int16_t* in, int16_t* out, const int frameCount) {
if (!_buffer || (frameCount > _frameCount))
return;
const int scale = (2 << ((8 * sizeof(int16_t)) - 1));
// de-interleave and convert int16_t to float32 (normalized to -1. ... 1.)
for (int i = 0; i < frameCount; ++i) {
for (int j = 0; j < _channelCount; ++j) {
_buffer[j][i] = ((float)(*in++)) / scale;
}
}
// now step through each filter
for (int i = 0; i < _channelCount; ++i) {
for (int j = 0; j < _filterCount; ++j) {
_filters[j][i].render( &_buffer[i][0], &_buffer[i][0], frameCount );
}
}
// convert float32 to int16_t and interleave
for (int i = 0; i < frameCount; ++i) {
for (int j = 0; j < _channelCount; ++j) {
*out++ = (int16_t)(_buffer[j][i] * scale);
}
}
}
void reset() {
for (int i = 0; i < _filterCount; ++i) {
for (int j = 0; j < _channelCount; ++j) {
_filters[i][j].reset();
}
}
}
};
//
// Specializations of AudioFilterBank
//
typedef AudioFilterBank< AudioFilterLSF, 1, 1> AudioFilterLSF1m; // mono bank with one band of LSF
typedef AudioFilterBank< AudioFilterLSF, 1, 2> AudioFilterLSF1s; // stereo bank with one band of LSF
typedef AudioFilterBank< AudioFilterHSF, 1, 1> AudioFilterHSF1m; // mono bank with one band of HSF
typedef AudioFilterBank< AudioFilterHSF, 1, 2> AudioFilterHSF1s; // stereo bank with one band of HSF
typedef AudioFilterBank< AudioFilterPEQ, 1, 1> AudioFilterPEQ1m; // mono bank with one band of PEQ
typedef AudioFilterBank< AudioFilterPEQ, 2, 1> AudioFilterPEQ2m; // mono bank with two bands of PEQ
typedef AudioFilterBank< AudioFilterPEQ, 3, 1> AudioFilterPEQ3m; // mono bank with three bands of PEQ
typedef AudioFilterBank< AudioFilterPEQ, 1, 2> AudioFilterPEQ1s; // stereo bank with one band of PEQ
typedef AudioFilterBank< AudioFilterPEQ, 2, 2> AudioFilterPEQ2s; // stereo bank with two bands of PEQ
typedef AudioFilterBank< AudioFilterPEQ, 3, 2> AudioFilterPEQ3s; // stereo bank with three bands of PEQ
// etc....
#endif // hifi_AudioFilter_h

View file

@ -57,8 +57,6 @@ void AudioInjector::injectAudio() {
}
NodeList* nodeList = NodeList::getInstance();
// setup the packet for injected audio
QByteArray injectAudioPacket = byteArrayWithPopulatedHeader(PacketTypeInjectAudio);
QDataStream packetStream(&injectAudioPacket, QIODevice::Append);
@ -122,6 +120,7 @@ void AudioInjector::injectAudio() {
memcpy(injectAudioPacket.data() + numPreAudioDataBytes, soundByteArray.data() + currentSendPosition, bytesToCopy);
// grab our audio mixer from the NodeList, if it exists
NodeList* nodeList = NodeList::getInstance();
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
// send off this audio packet

View file

@ -20,18 +20,16 @@
#include "AudioRingBuffer.h"
AudioRingBuffer::AudioRingBuffer(int numFrameSamples, bool randomAccessMode, int numFramesCapacity) :
_frameCapacity(numFramesCapacity),
_sampleCapacity(numFrameSamples * numFramesCapacity),
_isFull(false),
_numFrameSamples(numFrameSamples),
_randomAccessMode(randomAccessMode),
_overflowCount(0)
_frameCapacity(numFramesCapacity),
_sampleCapacity(numFrameSamples * numFramesCapacity),
_bufferLength(numFrameSamples * (numFramesCapacity + 1)),
_numFrameSamples(numFrameSamples),
_randomAccessMode(randomAccessMode),
_overflowCount(0)
{
if (numFrameSamples) {
_buffer = new int16_t[_sampleCapacity];
if (_randomAccessMode) {
memset(_buffer, 0, _sampleCapacity * sizeof(int16_t));
}
_buffer = new int16_t[_bufferLength];
memset(_buffer, 0, _bufferLength * sizeof(int16_t));
_nextOutput = _buffer;
_endOfLastWrite = _buffer;
} else {
@ -53,28 +51,29 @@ void AudioRingBuffer::reset() {
void AudioRingBuffer::resizeForFrameSize(int numFrameSamples) {
delete[] _buffer;
_sampleCapacity = numFrameSamples * _frameCapacity;
_bufferLength = numFrameSamples * (_frameCapacity + 1);
_numFrameSamples = numFrameSamples;
_buffer = new int16_t[_sampleCapacity];
_buffer = new int16_t[_bufferLength];
memset(_buffer, 0, _bufferLength * sizeof(int16_t));
if (_randomAccessMode) {
memset(_buffer, 0, _sampleCapacity * sizeof(int16_t));
memset(_buffer, 0, _bufferLength * sizeof(int16_t));
}
reset();
}
void AudioRingBuffer::clear() {
_isFull = false;
_endOfLastWrite = _buffer;
_nextOutput = _buffer;
}
int AudioRingBuffer::readSamples(int16_t* destination, int maxSamples) {
return readData((char*) destination, maxSamples * sizeof(int16_t));
return readData((char*)destination, maxSamples * sizeof(int16_t)) / sizeof(int16_t);
}
int AudioRingBuffer::readData(char *data, int maxSize) {
// only copy up to the number of samples we have available
int numReadSamples = std::min((int) (maxSize / sizeof(int16_t)), samplesAvailable());
int numReadSamples = std::min((int)(maxSize / sizeof(int16_t)), samplesAvailable());
// If we're in random access mode, then we consider our number of available read samples slightly
// differently. Namely, if anything has been written, we say we have as many samples as they ask for
@ -83,16 +82,16 @@ int AudioRingBuffer::readData(char *data, int maxSize) {
numReadSamples = _endOfLastWrite ? (maxSize / sizeof(int16_t)) : 0;
}
if (_nextOutput + numReadSamples > _buffer + _sampleCapacity) {
if (_nextOutput + numReadSamples > _buffer + _bufferLength) {
// we're going to need to do two reads to get this data, it wraps around the edge
// read to the end of the buffer
int numSamplesToEnd = (_buffer + _sampleCapacity) - _nextOutput;
int numSamplesToEnd = (_buffer + _bufferLength) - _nextOutput;
memcpy(data, _nextOutput, numSamplesToEnd * sizeof(int16_t));
if (_randomAccessMode) {
memset(_nextOutput, 0, numSamplesToEnd * sizeof(int16_t)); // clear it
}
// read the rest from the beginning of the buffer
memcpy(data + (numSamplesToEnd * sizeof(int16_t)), _buffer, (numReadSamples - numSamplesToEnd) * sizeof(int16_t));
if (_randomAccessMode) {
@ -108,22 +107,19 @@ int AudioRingBuffer::readData(char *data, int maxSize) {
// push the position of _nextOutput by the number of samples read
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, numReadSamples);
if (numReadSamples > 0) {
_isFull = false;
}
return numReadSamples * sizeof(int16_t);
}
int AudioRingBuffer::writeSamples(const int16_t* source, int maxSamples) {
return writeData((const char*) source, maxSamples * sizeof(int16_t));
int AudioRingBuffer::writeSamples(const int16_t* source, int maxSamples) {
return writeData((const char*)source, maxSamples * sizeof(int16_t)) / sizeof(int16_t);
}
int AudioRingBuffer::writeData(const char* data, int maxSize) {
// make sure we have enough bytes left for this to be the right amount of audio
// otherwise we should not copy that data, and leave the buffer pointers where they are
int samplesToCopy = std::min((int)(maxSize / sizeof(int16_t)), _sampleCapacity);
int samplesRoomFor = _sampleCapacity - samplesAvailable();
if (samplesToCopy > samplesRoomFor) {
// there's not enough room for this write. erase old data to make room for this new data
@ -132,19 +128,16 @@ int AudioRingBuffer::writeData(const char* data, int maxSize) {
_overflowCount++;
qDebug() << "Overflowed ring buffer! Overwriting old data";
}
if (_endOfLastWrite + samplesToCopy <= _buffer + _sampleCapacity) {
if (_endOfLastWrite + samplesToCopy <= _buffer + _bufferLength) {
memcpy(_endOfLastWrite, data, samplesToCopy * sizeof(int16_t));
} else {
int numSamplesToEnd = (_buffer + _sampleCapacity) - _endOfLastWrite;
int numSamplesToEnd = (_buffer + _bufferLength) - _endOfLastWrite;
memcpy(_endOfLastWrite, data, numSamplesToEnd * sizeof(int16_t));
memcpy(_buffer, data + (numSamplesToEnd * sizeof(int16_t)), (samplesToCopy - numSamplesToEnd) * sizeof(int16_t));
}
_endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, samplesToCopy);
if (samplesToCopy > 0 && _endOfLastWrite == _nextOutput) {
_isFull = true;
}
return samplesToCopy * sizeof(int16_t);
}
@ -158,61 +151,52 @@ const int16_t& AudioRingBuffer::operator[] (const int index) const {
}
void AudioRingBuffer::shiftReadPosition(unsigned int numSamples) {
if (numSamples > 0) {
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, numSamples);
_isFull = false;
}
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, numSamples);
}
int AudioRingBuffer::samplesAvailable() const {
if (!_endOfLastWrite) {
return 0;
}
if (_isFull) {
return _sampleCapacity;
}
int sampleDifference = _endOfLastWrite - _nextOutput;
if (sampleDifference < 0) {
sampleDifference += _sampleCapacity;
sampleDifference += _bufferLength;
}
return sampleDifference;
}
int AudioRingBuffer::addSilentFrame(int numSilentSamples) {
int AudioRingBuffer::addSilentSamples(int silentSamples) {
int samplesRoomFor = _sampleCapacity - samplesAvailable();
if (numSilentSamples > samplesRoomFor) {
if (silentSamples > samplesRoomFor) {
// there's not enough room for this write. write as many silent samples as we have room for
numSilentSamples = samplesRoomFor;
silentSamples = samplesRoomFor;
qDebug() << "Dropping some silent samples to prevent ring buffer overflow";
}
// memset zeroes into the buffer, accomodate a wrap around the end
// push the _endOfLastWrite to the correct spot
if (_endOfLastWrite + numSilentSamples <= _buffer + _sampleCapacity) {
memset(_endOfLastWrite, 0, numSilentSamples * sizeof(int16_t));
if (_endOfLastWrite + silentSamples <= _buffer + _bufferLength) {
memset(_endOfLastWrite, 0, silentSamples * sizeof(int16_t));
} else {
int numSamplesToEnd = (_buffer + _sampleCapacity) - _endOfLastWrite;
int numSamplesToEnd = (_buffer + _bufferLength) - _endOfLastWrite;
memset(_endOfLastWrite, 0, numSamplesToEnd * sizeof(int16_t));
memset(_buffer, 0, (numSilentSamples - numSamplesToEnd) * sizeof(int16_t));
}
_endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, numSilentSamples);
if (numSilentSamples > 0 && _nextOutput == _endOfLastWrite) {
_isFull = true;
memset(_buffer, 0, (silentSamples - numSamplesToEnd) * sizeof(int16_t));
}
_endOfLastWrite = shiftedPositionAccomodatingWrap(_endOfLastWrite, silentSamples);
return numSilentSamples * sizeof(int16_t);
return silentSamples;
}
int16_t* AudioRingBuffer::shiftedPositionAccomodatingWrap(int16_t* position, int numSamplesShift) const {
if (numSamplesShift > 0 && position + numSamplesShift >= _buffer + _sampleCapacity) {
if (numSamplesShift > 0 && position + numSamplesShift >= _buffer + _bufferLength) {
// this shift will wrap the position around to the beginning of the ring
return position + numSamplesShift - _sampleCapacity;
return position + numSamplesShift - _bufferLength;
} else if (numSamplesShift < 0 && position + numSamplesShift < _buffer) {
// this shift will go around to the end of the ring
return position + numSamplesShift + _sampleCapacity;
return position + numSamplesShift + _bufferLength;
} else {
return position + numSamplesShift;
}
@ -221,7 +205,7 @@ int16_t* AudioRingBuffer::shiftedPositionAccomodatingWrap(int16_t* position, int
float AudioRingBuffer::getFrameLoudness(const int16_t* frameStart) const {
float loudness = 0.0f;
const int16_t* sampleAt = frameStart;
const int16_t* _bufferLastAt = _buffer + _sampleCapacity - 1;
const int16_t* _bufferLastAt = _buffer + _bufferLength - 1;
for (int i = 0; i < _numFrameSamples; ++i) {
loudness += fabsf(*sampleAt);
@ -229,11 +213,14 @@ float AudioRingBuffer::getFrameLoudness(const int16_t* frameStart) const {
}
loudness /= _numFrameSamples;
loudness /= MAX_SAMPLE_VALUE;
return loudness;
}
float AudioRingBuffer::getFrameLoudness(ConstIterator frameStart) const {
if (frameStart.isNull()) {
return 0.0f;
}
return getFrameLoudness(&(*frameStart));
}
@ -241,3 +228,44 @@ float AudioRingBuffer::getNextOutputFrameLoudness() const {
return getFrameLoudness(_nextOutput);
}
int AudioRingBuffer::writeSamples(ConstIterator source, int maxSamples) {
int samplesToCopy = std::min(maxSamples, _sampleCapacity);
int samplesRoomFor = _sampleCapacity - samplesAvailable();
if (samplesToCopy > samplesRoomFor) {
// there's not enough room for this write. erase old data to make room for this new data
int samplesToDelete = samplesToCopy - samplesRoomFor;
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, samplesToDelete);
_overflowCount++;
qDebug() << "Overflowed ring buffer! Overwriting old data";
}
int16_t* bufferLast = _buffer + _bufferLength - 1;
for (int i = 0; i < samplesToCopy; i++) {
*_endOfLastWrite = *source;
_endOfLastWrite = (_endOfLastWrite == bufferLast) ? _buffer : _endOfLastWrite + 1;
++source;
}
return samplesToCopy;
}
int AudioRingBuffer::writeSamplesWithFade(ConstIterator source, int maxSamples, float fade) {
int samplesToCopy = std::min(maxSamples, _sampleCapacity);
int samplesRoomFor = _sampleCapacity - samplesAvailable();
if (samplesToCopy > samplesRoomFor) {
// there's not enough room for this write. erase old data to make room for this new data
int samplesToDelete = samplesToCopy - samplesRoomFor;
_nextOutput = shiftedPositionAccomodatingWrap(_nextOutput, samplesToDelete);
_overflowCount++;
qDebug() << "Overflowed ring buffer! Overwriting old data";
}
int16_t* bufferLast = _buffer + _bufferLength - 1;
for (int i = 0; i < samplesToCopy; i++) {
*_endOfLastWrite = (int16_t)((float)(*source) * fade);
_endOfLastWrite = (_endOfLastWrite == bufferLast) ? _buffer : _endOfLastWrite + 1;
++source;
}
return samplesToCopy;
}

View file

@ -28,7 +28,7 @@ const int NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL = 512;
const int NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL = NETWORK_BUFFER_LENGTH_BYTES_PER_CHANNEL / sizeof(int16_t);
const unsigned int BUFFER_SEND_INTERVAL_USECS = floorf((NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL
/ (float) SAMPLE_RATE) * USECS_PER_SECOND);
/ (float)SAMPLE_RATE) * USECS_PER_SECOND);
const int MAX_SAMPLE_VALUE = std::numeric_limits<int16_t>::max();
const int MIN_SAMPLE_VALUE = std::numeric_limits<int16_t>::min();
@ -42,33 +42,33 @@ public:
void reset();
void resizeForFrameSize(int numFrameSamples);
void clear();
int getSampleCapacity() const { return _sampleCapacity; }
int getFrameCapacity() const { return _frameCapacity; }
int readSamples(int16_t* destination, int maxSamples);
int writeSamples(const int16_t* source, int maxSamples);
int readData(char* data, int maxSize);
int writeData(const char* data, int maxSize);
int16_t& operator[](const int index);
const int16_t& operator[] (const int index) const;
void shiftReadPosition(unsigned int numSamples);
float getNextOutputFrameLoudness() const;
int samplesAvailable() const;
int framesAvailable() const { return samplesAvailable() / _numFrameSamples; }
int getNumFrameSamples() const { return _numFrameSamples; }
int getOverflowCount() const { return _overflowCount; } /// how many times has the ring buffer has overwritten old data
int addSilentFrame(int numSilentSamples);
int addSilentSamples(int samples);
private:
float getFrameLoudness(const int16_t* frameStart) const;
@ -77,12 +77,12 @@ protected:
// disallow copying of AudioRingBuffer objects
AudioRingBuffer(const AudioRingBuffer&);
AudioRingBuffer& operator= (const AudioRingBuffer&);
int16_t* shiftedPositionAccomodatingWrap(int16_t* position, int numSamplesShift) const;
int _frameCapacity;
int _sampleCapacity;
bool _isFull;
int _bufferLength; // actual length of _buffer: will be one frame larger than _sampleCapacity
int _numFrameSamples;
int16_t* _nextOutput;
int16_t* _endOfLastWrite;
@ -95,23 +95,25 @@ public:
class ConstIterator { //public std::iterator < std::forward_iterator_tag, int16_t > {
public:
ConstIterator()
: _capacity(0),
: _bufferLength(0),
_bufferFirst(NULL),
_bufferLast(NULL),
_at(NULL) {}
ConstIterator(int16_t* bufferFirst, int capacity, int16_t* at)
: _capacity(capacity),
: _bufferLength(capacity),
_bufferFirst(bufferFirst),
_bufferLast(bufferFirst + capacity - 1),
_at(at) {}
bool isNull() const { return _at == NULL; }
bool operator==(const ConstIterator& rhs) { return _at == rhs._at; }
bool operator!=(const ConstIterator& rhs) { return _at != rhs._at; }
const int16_t& operator*() { return *_at; }
ConstIterator& operator=(const ConstIterator& rhs) {
_capacity = rhs._capacity;
_bufferLength = rhs._bufferLength;
_bufferFirst = rhs._bufferFirst;
_bufferLast = rhs._bufferLast;
_at = rhs._at;
@ -145,40 +147,54 @@ public:
}
ConstIterator operator+(int i) {
return ConstIterator(_bufferFirst, _capacity, atShiftedBy(i));
return ConstIterator(_bufferFirst, _bufferLength, atShiftedBy(i));
}
ConstIterator operator-(int i) {
return ConstIterator(_bufferFirst, _capacity, atShiftedBy(-i));
return ConstIterator(_bufferFirst, _bufferLength, atShiftedBy(-i));
}
void readSamples(int16_t* dest, int numSamples) {
int16_t* at = _at;
for (int i = 0; i < numSamples; i++) {
*dest = *(*this);
*dest = *at;
++dest;
++(*this);
at = (at == _bufferLast) ? _bufferFirst : at + 1;
}
}
void readSamplesWithFade(int16_t* dest, int numSamples, float fade) {
int16_t* at = _at;
for (int i = 0; i < numSamples; i++) {
*dest = (float)*at * fade;
++dest;
at = (at == _bufferLast) ? _bufferFirst : at + 1;
}
}
private:
int16_t* atShiftedBy(int i) {
i = (_at - _bufferFirst + i) % _capacity;
i = (_at - _bufferFirst + i) % _bufferLength;
if (i < 0) {
i += _capacity;
i += _bufferLength;
}
return _bufferFirst + i;
}
private:
int _capacity;
int _bufferLength;
int16_t* _bufferFirst;
int16_t* _bufferLast;
int16_t* _at;
};
ConstIterator nextOutput() const { return ConstIterator(_buffer, _sampleCapacity, _nextOutput); }
ConstIterator nextOutput() const { return ConstIterator(_buffer, _bufferLength, _nextOutput); }
ConstIterator lastFrameWritten() const { return ConstIterator(_buffer, _bufferLength, _endOfLastWrite) - _numFrameSamples; }
float getFrameLoudness(ConstIterator frameStart) const;
int writeSamples(ConstIterator source, int maxSamples);
int writeSamplesWithFade(ConstIterator source, int maxSamples, float fade);
};
#endif // hifi_AudioRingBuffer_h

View file

@ -14,30 +14,37 @@
#include "InboundAudioStream.h"
#include "PacketHeaders.h"
InboundAudioStream::InboundAudioStream(int numFrameSamples, int numFramesCapacity,
bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc) :
const int STARVE_HISTORY_CAPACITY = 50;
InboundAudioStream::InboundAudioStream(int numFrameSamples, int numFramesCapacity, const Settings& settings) :
_ringBuffer(numFrameSamples, false, numFramesCapacity),
_lastPopSucceeded(false),
_lastPopOutput(),
_dynamicJitterBuffers(dynamicJitterBuffers),
_staticDesiredJitterBufferFrames(staticDesiredJitterBufferFrames),
_useStDevForJitterCalc(useStDevForJitterCalc),
_calculatedJitterBufferFramesUsingMaxGap(0),
_calculatedJitterBufferFramesUsingStDev(0),
_desiredJitterBufferFrames(dynamicJitterBuffers ? 1 : staticDesiredJitterBufferFrames),
_maxFramesOverDesired(maxFramesOverDesired),
_dynamicJitterBuffers(settings._dynamicJitterBuffers),
_staticDesiredJitterBufferFrames(settings._staticDesiredJitterBufferFrames),
_useStDevForJitterCalc(settings._useStDevForJitterCalc),
_desiredJitterBufferFrames(settings._dynamicJitterBuffers ? 1 : settings._staticDesiredJitterBufferFrames),
_maxFramesOverDesired(settings._maxFramesOverDesired),
_isStarved(true),
_hasStarted(false),
_consecutiveNotMixedCount(0),
_starveCount(0),
_silentFramesDropped(0),
_oldFramesDropped(0),
_incomingSequenceNumberStats(INCOMING_SEQ_STATS_HISTORY_LENGTH_SECONDS),
_lastFrameReceivedTime(0),
_interframeTimeGapStatsForJitterCalc(TIME_GAPS_FOR_JITTER_CALC_INTERVAL_SAMPLES, TIME_GAPS_FOR_JITTER_CALC_WINDOW_INTERVALS),
_interframeTimeGapStatsForStatsPacket(TIME_GAPS_FOR_STATS_PACKET_INTERVAL_SAMPLES, TIME_GAPS_FOR_STATS_PACKET_WINDOW_INTERVALS),
_incomingSequenceNumberStats(STATS_FOR_STATS_PACKET_WINDOW_SECONDS),
_lastPacketReceivedTime(0),
_timeGapStatsForDesiredCalcOnTooManyStarves(0, settings._windowSecondsForDesiredCalcOnTooManyStarves),
_calculatedJitterBufferFramesUsingMaxGap(0),
_stdevStatsForDesiredCalcOnTooManyStarves(),
_calculatedJitterBufferFramesUsingStDev(0),
_timeGapStatsForDesiredReduction(0, settings._windowSecondsForDesiredReduction),
_starveHistoryWindowSeconds(settings._windowSecondsForDesiredCalcOnTooManyStarves),
_starveHistory(STARVE_HISTORY_CAPACITY),
_starveThreshold(settings._windowStarveThreshold),
_framesAvailableStat(),
_currentJitterBufferFrames(0)
_currentJitterBufferFrames(0),
_timeGapStatsForStatsPacket(0, STATS_FOR_STATS_PACKET_WINDOW_SECONDS),
_repetitionWithFade(settings._repetitionWithFade)
{
}
@ -59,11 +66,14 @@ void InboundAudioStream::resetStats() {
_silentFramesDropped = 0;
_oldFramesDropped = 0;
_incomingSequenceNumberStats.reset();
_lastFrameReceivedTime = 0;
_interframeTimeGapStatsForJitterCalc.reset();
_interframeTimeGapStatsForStatsPacket.reset();
_lastPacketReceivedTime = 0;
_timeGapStatsForDesiredCalcOnTooManyStarves.reset();
_stdevStatsForDesiredCalcOnTooManyStarves = StDev();
_timeGapStatsForDesiredReduction.reset();
_starveHistory.clear();
_framesAvailableStat.reset();
_currentJitterBufferFrames = 0;
_timeGapStatsForStatsPacket.reset();
}
void InboundAudioStream::clearBuffer() {
@ -72,8 +82,11 @@ void InboundAudioStream::clearBuffer() {
_currentJitterBufferFrames = 0;
}
int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples) {
return _ringBuffer.writeData(packetAfterStreamProperties.data(), numAudioSamples * sizeof(int16_t));
void InboundAudioStream::perSecondCallbackForUpdatingStats() {
_incomingSequenceNumberStats.pushStatsToHistory();
_timeGapStatsForDesiredCalcOnTooManyStarves.currentIntervalComplete();
_timeGapStatsForDesiredReduction.currentIntervalComplete();
_timeGapStatsForStatsPacket.currentIntervalComplete();
}
int InboundAudioStream::parseData(const QByteArray& packet) {
@ -83,36 +96,51 @@ int InboundAudioStream::parseData(const QByteArray& packet) {
// parse header
int numBytesHeader = numBytesForPacketHeader(packet);
const char* sequenceAt = packet.constData() + numBytesHeader;
const char* dataAt = packet.constData() + numBytesHeader;
int readBytes = numBytesHeader;
// parse sequence number and track it
quint16 sequence = *(reinterpret_cast<const quint16*>(sequenceAt));
quint16 sequence = *(reinterpret_cast<const quint16*>(dataAt));
dataAt += sizeof(quint16);
readBytes += sizeof(quint16);
SequenceNumberStats::ArrivalInfo arrivalInfo = _incomingSequenceNumberStats.sequenceNumberReceived(sequence, senderUUID);
frameReceivedUpdateTimingStats();
packetReceivedUpdateTimingStats();
// TODO: handle generalized silent packet here?????
int networkSamples;
// parse the info after the seq number and before the audio data.(the stream properties)
int numAudioSamples;
readBytes += parseStreamProperties(packetType, packet.mid(readBytes), numAudioSamples);
if (packetType == PacketTypeSilentAudioFrame) {
quint16 numSilentSamples = *(reinterpret_cast<const quint16*>(dataAt));
readBytes += sizeof(quint16);
networkSamples = (int)numSilentSamples;
} else {
// parse the info after the seq number and before the audio data (the stream properties)
readBytes += parseStreamProperties(packetType, packet.mid(readBytes), networkSamples);
}
// handle this packet based on its arrival status.
// For now, late packets are ignored. It may be good in the future to insert the late audio frame
// into the ring buffer to fill in the missing frame if it hasn't been mixed yet.
switch (arrivalInfo._status) {
case SequenceNumberStats::Early: {
// Packet is early; write droppable silent samples for each of the skipped packets.
// NOTE: we assume that each dropped packet contains the same number of samples
// as the packet we just received.
int packetsDropped = arrivalInfo._seqDiffFromExpected;
writeSamplesForDroppedPackets(packetsDropped * numAudioSamples);
writeSamplesForDroppedPackets(packetsDropped * networkSamples);
// fall through to OnTime case
}
case SequenceNumberStats::OnTime: {
readBytes += parseAudioData(packetType, packet.mid(readBytes), numAudioSamples);
// Packet is on time; parse its data to the ringbuffer
if (packetType == PacketTypeSilentAudioFrame) {
writeDroppableSilentSamples(networkSamples);
} else {
readBytes += parseAudioData(packetType, packet.mid(readBytes), networkSamples);
}
break;
}
default: {
// For now, late packets are ignored. It may be good in the future to insert the late audio packet data
// into the ring buffer to fill in the missing frame if it hasn't been mixed yet.
break;
}
}
@ -139,6 +167,43 @@ int InboundAudioStream::parseData(const QByteArray& packet) {
return readBytes;
}
int InboundAudioStream::parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) {
// mixed audio packets do not have any info between the seq num and the audio data.
numAudioSamples = packetAfterSeqNum.size() / sizeof(int16_t);
return 0;
}
int InboundAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples) {
return _ringBuffer.writeData(packetAfterStreamProperties.data(), numAudioSamples * sizeof(int16_t));
}
int InboundAudioStream::writeDroppableSilentSamples(int silentSamples) {
// calculate how many silent frames we should drop.
int samplesPerFrame = _ringBuffer.getNumFrameSamples();
int desiredJitterBufferFramesPlusPadding = _desiredJitterBufferFrames + DESIRED_JITTER_BUFFER_FRAMES_PADDING;
int numSilentFramesToDrop = 0;
if (silentSamples >= samplesPerFrame && _currentJitterBufferFrames > desiredJitterBufferFramesPlusPadding) {
// our avg jitter buffer size exceeds its desired value, so ignore some silent
// frames to get that size as close to desired as possible
int numSilentFramesToDropDesired = _currentJitterBufferFrames - desiredJitterBufferFramesPlusPadding;
int numSilentFramesReceived = silentSamples / samplesPerFrame;
numSilentFramesToDrop = std::min(numSilentFramesToDropDesired, numSilentFramesReceived);
// dont reset _currentJitterBufferFrames here; we want to be able to drop further silent frames
// without waiting for _framesAvailableStat to fill up to 10s of samples.
_currentJitterBufferFrames -= numSilentFramesToDrop;
_silentFramesDropped += numSilentFramesToDrop;
_framesAvailableStat.reset();
}
int ret = _ringBuffer.addSilentSamples(silentSamples - numSilentFramesToDrop * samplesPerFrame);
return ret;
}
int InboundAudioStream::popSamples(int maxSamples, bool allOrNothing, bool starveIfNoSamplesPopped) {
int samplesPopped = 0;
int samplesAvailable = _ringBuffer.samplesAvailable();
@ -216,12 +281,61 @@ void InboundAudioStream::framesAvailableChanged() {
}
void InboundAudioStream::setToStarved() {
_isStarved = true;
_consecutiveNotMixedCount = 0;
_starveCount++;
// if we have more than the desired frames when setToStarved() is called, then we'll immediately
// be considered refilled. in that case, there's no need to set _isStarved to true.
_isStarved = (_ringBuffer.framesAvailable() < _desiredJitterBufferFrames);
// record the time of this starve in the starve history
quint64 now = usecTimestampNow();
_starveHistory.insert(now);
if (_dynamicJitterBuffers) {
// dynamic jitter buffers are enabled. check if this starve put us over the window
// starve threshold
quint64 windowEnd = now - _starveHistoryWindowSeconds * USECS_PER_SECOND;
RingBufferHistory<quint64>::Iterator starvesIterator = _starveHistory.begin();
RingBufferHistory<quint64>::Iterator end = _starveHistory.end();
int starvesInWindow = 1;
do {
++starvesIterator;
if (*starvesIterator < windowEnd) {
break;
}
starvesInWindow++;
} while (starvesIterator != end);
// this starve put us over the starve threshold. update _desiredJitterBufferFrames to
// value determined by window A.
if (starvesInWindow >= _starveThreshold) {
int calculatedJitterBufferFrames;
if (_useStDevForJitterCalc) {
calculatedJitterBufferFrames = _calculatedJitterBufferFramesUsingStDev;
} else {
// we don't know when the next packet will arrive, so it's possible the gap between the last packet and the
// next packet will exceed the max time gap in the window. If the time since the last packet has already exceeded
// the window max gap, then we should use that value to calculate desired frames.
int framesSinceLastPacket = ceilf((float)(now - _lastPacketReceivedTime) / (float)BUFFER_SEND_INTERVAL_USECS);
calculatedJitterBufferFrames = std::max(_calculatedJitterBufferFramesUsingMaxGap, framesSinceLastPacket);
}
// make sure _desiredJitterBufferFrames does not become lower here
if (calculatedJitterBufferFrames >= _desiredJitterBufferFrames) {
_desiredJitterBufferFrames = calculatedJitterBufferFrames;
}
}
}
}
void InboundAudioStream::setSettings(const Settings& settings) {
setMaxFramesOverDesired(settings._maxFramesOverDesired);
setDynamicJitterBuffers(settings._dynamicJitterBuffers);
setStaticDesiredJitterBufferFrames(settings._staticDesiredJitterBufferFrames);
setUseStDevForJitterCalc(settings._useStDevForJitterCalc);
setWindowStarveThreshold(settings._windowStarveThreshold);
setWindowSecondsForDesiredCalcOnTooManyStarves(settings._windowSecondsForDesiredCalcOnTooManyStarves);
setWindowSecondsForDesiredReduction(settings._windowSecondsForDesiredReduction);
setRepetitionWithFade(settings._repetitionWithFade);
}
void InboundAudioStream::setDynamicJitterBuffers(bool dynamicJitterBuffers) {
@ -229,6 +343,7 @@ void InboundAudioStream::setDynamicJitterBuffers(bool dynamicJitterBuffers) {
_desiredJitterBufferFrames = _staticDesiredJitterBufferFrames;
} else {
if (!_dynamicJitterBuffers) {
// if we're enabling dynamic jitter buffer frames, start desired frames at 1
_desiredJitterBufferFrames = 1;
}
}
@ -242,90 +357,102 @@ void InboundAudioStream::setStaticDesiredJitterBufferFrames(int staticDesiredJit
}
}
void InboundAudioStream::setWindowSecondsForDesiredCalcOnTooManyStarves(int windowSecondsForDesiredCalcOnTooManyStarves) {
_timeGapStatsForDesiredCalcOnTooManyStarves.setWindowIntervals(windowSecondsForDesiredCalcOnTooManyStarves);
_starveHistoryWindowSeconds = windowSecondsForDesiredCalcOnTooManyStarves;
}
void InboundAudioStream::setWindowSecondsForDesiredReduction(int windowSecondsForDesiredReduction) {
_timeGapStatsForDesiredReduction.setWindowIntervals(windowSecondsForDesiredReduction);
}
int InboundAudioStream::clampDesiredJitterBufferFramesValue(int desired) const {
const int MIN_FRAMES_DESIRED = 0;
const int MAX_FRAMES_DESIRED = _ringBuffer.getFrameCapacity();
return glm::clamp(desired, MIN_FRAMES_DESIRED, MAX_FRAMES_DESIRED);
}
void InboundAudioStream::frameReceivedUpdateTimingStats() {
void InboundAudioStream::packetReceivedUpdateTimingStats() {
// update our timegap stats and desired jitter buffer frames if necessary
// discard the first few packets we receive since they usually have gaps that aren't represensative of normal jitter
const int NUM_INITIAL_PACKETS_DISCARD = 3;
quint64 now = usecTimestampNow();
if (_incomingSequenceNumberStats.getReceived() > NUM_INITIAL_PACKETS_DISCARD) {
quint64 gap = now - _lastFrameReceivedTime;
_interframeTimeGapStatsForStatsPacket.update(gap);
quint64 gap = now - _lastPacketReceivedTime;
_timeGapStatsForStatsPacket.update(gap);
const float USECS_PER_FRAME = NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * USECS_PER_SECOND / (float)SAMPLE_RATE;
// update all stats used for desired frames calculations under dynamic jitter buffer mode
_timeGapStatsForDesiredCalcOnTooManyStarves.update(gap);
_stdevStatsForDesiredCalcOnTooManyStarves.addValue(gap);
_timeGapStatsForDesiredReduction.update(gap);
// update stats for Freddy's method of jitter calc
_interframeTimeGapStatsForJitterCalc.update(gap);
if (_interframeTimeGapStatsForJitterCalc.getNewStatsAvailableFlag()) {
_calculatedJitterBufferFramesUsingMaxGap = ceilf((float)_interframeTimeGapStatsForJitterCalc.getWindowMax() / USECS_PER_FRAME);
_interframeTimeGapStatsForJitterCalc.clearNewStatsAvailableFlag();
if (_dynamicJitterBuffers && !_useStDevForJitterCalc) {
_desiredJitterBufferFrames = clampDesiredJitterBufferFramesValue(_calculatedJitterBufferFramesUsingMaxGap);
}
if (_timeGapStatsForDesiredCalcOnTooManyStarves.getNewStatsAvailableFlag()) {
_calculatedJitterBufferFramesUsingMaxGap = ceilf((float)_timeGapStatsForDesiredCalcOnTooManyStarves.getWindowMax()
/ (float)BUFFER_SEND_INTERVAL_USECS);
_timeGapStatsForDesiredCalcOnTooManyStarves.clearNewStatsAvailableFlag();
}
// update stats for Philip's method of jitter calc
_stdev.addValue(gap);
const int STANDARD_DEVIATION_SAMPLE_COUNT = 500;
if (_stdev.getSamples() > STANDARD_DEVIATION_SAMPLE_COUNT) {
if (_stdevStatsForDesiredCalcOnTooManyStarves.getSamples() > STANDARD_DEVIATION_SAMPLE_COUNT) {
const float NUM_STANDARD_DEVIATIONS = 3.0f;
_calculatedJitterBufferFramesUsingStDev = (int)ceilf(NUM_STANDARD_DEVIATIONS * _stdev.getStDev() / USECS_PER_FRAME);
_stdev.reset();
_calculatedJitterBufferFramesUsingStDev = ceilf(NUM_STANDARD_DEVIATIONS * _stdevStatsForDesiredCalcOnTooManyStarves.getStDev()
/ (float)BUFFER_SEND_INTERVAL_USECS);
_stdevStatsForDesiredCalcOnTooManyStarves.reset();
}
if (_dynamicJitterBuffers && _useStDevForJitterCalc) {
_desiredJitterBufferFrames = clampDesiredJitterBufferFramesValue(_calculatedJitterBufferFramesUsingStDev);
if (_dynamicJitterBuffers) {
// if the max gap in window B (_timeGapStatsForDesiredReduction) corresponds to a smaller number of frames than _desiredJitterBufferFrames,
// then reduce _desiredJitterBufferFrames to that number of frames.
if (_timeGapStatsForDesiredReduction.getNewStatsAvailableFlag() && _timeGapStatsForDesiredReduction.isWindowFilled()) {
int calculatedJitterBufferFrames = ceilf((float)_timeGapStatsForDesiredReduction.getWindowMax() / (float)BUFFER_SEND_INTERVAL_USECS);
if (calculatedJitterBufferFrames < _desiredJitterBufferFrames) {
_desiredJitterBufferFrames = calculatedJitterBufferFrames;
}
_timeGapStatsForDesiredReduction.clearNewStatsAvailableFlag();
}
}
}
_lastFrameReceivedTime = now;
_lastPacketReceivedTime = now;
}
int InboundAudioStream::writeDroppableSilentSamples(int numSilentSamples) {
// calculate how many silent frames we should drop.
int samplesPerFrame = _ringBuffer.getNumFrameSamples();
int desiredJitterBufferFramesPlusPadding = _desiredJitterBufferFrames + DESIRED_JITTER_BUFFER_FRAMES_PADDING;
int numSilentFramesToDrop = 0;
if (numSilentSamples >= samplesPerFrame && _currentJitterBufferFrames > desiredJitterBufferFramesPlusPadding) {
// our avg jitter buffer size exceeds its desired value, so ignore some silent
// frames to get that size as close to desired as possible
int numSilentFramesToDropDesired = _currentJitterBufferFrames - desiredJitterBufferFramesPlusPadding;
int numSilentFramesReceived = numSilentSamples / samplesPerFrame;
numSilentFramesToDrop = std::min(numSilentFramesToDropDesired, numSilentFramesReceived);
// dont reset _currentJitterBufferFrames here; we want to be able to drop further silent frames
// without waiting for _framesAvailableStat to fill up to 10s of samples.
_currentJitterBufferFrames -= numSilentFramesToDrop;
_silentFramesDropped += numSilentFramesToDrop;
_framesAvailableStat.reset();
int InboundAudioStream::writeSamplesForDroppedPackets(int networkSamples) {
if (_repetitionWithFade) {
return writeLastFrameRepeatedWithFade(networkSamples);
}
return _ringBuffer.addSilentFrame(numSilentSamples - numSilentFramesToDrop * samplesPerFrame);
return writeDroppableSilentSamples(networkSamples);
}
int InboundAudioStream::writeSamplesForDroppedPackets(int numSamples) {
return writeDroppableSilentSamples(numSamples);
int InboundAudioStream::writeLastFrameRepeatedWithFade(int samples) {
AudioRingBuffer::ConstIterator frameToRepeat = _ringBuffer.lastFrameWritten();
int frameSize = _ringBuffer.getNumFrameSamples();
int samplesToWrite = samples;
int indexOfRepeat = 0;
do {
int samplesToWriteThisIteration = std::min(samplesToWrite, frameSize);
float fade = calculateRepeatedFrameFadeFactor(indexOfRepeat);
if (fade == 1.0f) {
samplesToWrite -= _ringBuffer.writeSamples(frameToRepeat, samplesToWriteThisIteration);
} else {
samplesToWrite -= _ringBuffer.writeSamplesWithFade(frameToRepeat, samplesToWriteThisIteration, fade);
}
indexOfRepeat++;
} while (samplesToWrite > 0);
return samples;
}
AudioStreamStats InboundAudioStream::getAudioStreamStats() const {
AudioStreamStats streamStats;
streamStats._timeGapMin = _interframeTimeGapStatsForStatsPacket.getMin();
streamStats._timeGapMax = _interframeTimeGapStatsForStatsPacket.getMax();
streamStats._timeGapAverage = _interframeTimeGapStatsForStatsPacket.getAverage();
streamStats._timeGapWindowMin = _interframeTimeGapStatsForStatsPacket.getWindowMin();
streamStats._timeGapWindowMax = _interframeTimeGapStatsForStatsPacket.getWindowMax();
streamStats._timeGapWindowAverage = _interframeTimeGapStatsForStatsPacket.getWindowAverage();
streamStats._timeGapMin = _timeGapStatsForStatsPacket.getMin();
streamStats._timeGapMax = _timeGapStatsForStatsPacket.getMax();
streamStats._timeGapAverage = _timeGapStatsForStatsPacket.getAverage();
streamStats._timeGapWindowMin = _timeGapStatsForStatsPacket.getWindowMin();
streamStats._timeGapWindowMax = _timeGapStatsForStatsPacket.getWindowMax();
streamStats._timeGapWindowAverage = _timeGapStatsForStatsPacket.getWindowAverage();
streamStats._framesAvailable = _ringBuffer.framesAvailable();
streamStats._framesAvailableAverage = _framesAvailableStat.getAverage();
@ -341,7 +468,24 @@ AudioStreamStats InboundAudioStream::getAudioStreamStats() const {
return streamStats;
}
AudioStreamStats InboundAudioStream::updateSeqHistoryAndGetAudioStreamStats() {
_incomingSequenceNumberStats.pushStatsToHistory();
return getAudioStreamStats();
float calculateRepeatedFrameFadeFactor(int indexOfRepeat) {
// fade factor scheme is from this paper:
// http://inst.eecs.berkeley.edu/~ee290t/sp04/lectures/packet_loss_recov_paper11.pdf
const float INITIAL_MSECS_NO_FADE = 20.0f;
const float MSECS_FADE_TO_ZERO = 320.0f;
const float INITIAL_FRAMES_NO_FADE = INITIAL_MSECS_NO_FADE * (float)USECS_PER_MSEC / (float)BUFFER_SEND_INTERVAL_USECS;
const float FRAMES_FADE_TO_ZERO = MSECS_FADE_TO_ZERO * (float)USECS_PER_MSEC / (float)BUFFER_SEND_INTERVAL_USECS;
const float SAMPLE_RANGE = std::numeric_limits<int16_t>::max();
if (indexOfRepeat <= INITIAL_FRAMES_NO_FADE) {
return 1.0f;
} else if (indexOfRepeat <= INITIAL_FRAMES_NO_FADE + FRAMES_FADE_TO_ZERO) {
return pow(SAMPLE_RANGE, -(indexOfRepeat - INITIAL_FRAMES_NO_FADE) / FRAMES_FADE_TO_ZERO);
//return 1.0f - ((indexOfRepeat - INITIAL_FRAMES_NO_FADE) / FRAMES_FADE_TO_ZERO);
}
return 0.0f;
}

View file

@ -22,43 +22,84 @@
#include "TimeWeightedAvg.h"
// This adds some number of frames to the desired jitter buffer frames target we use when we're dropping frames.
// The larger this value is, the less aggressive we are about reducing the jitter buffer length.
// Setting this to 0 will try to get the jitter buffer to be exactly _desiredJitterBufferFrames long when dropping frames,
// The larger this value is, the less frames we drop when attempting to reduce the jitter buffer length.
// Setting this to 0 will try to get the jitter buffer to be exactly _desiredJitterBufferFrames when dropping frames,
// which could lead to a starve soon after.
const int DESIRED_JITTER_BUFFER_FRAMES_PADDING = 1;
// the time gaps stats for _desiredJitterBufferFrames calculation
// will recalculate the max for the past 5000 samples every 500 samples
const int TIME_GAPS_FOR_JITTER_CALC_INTERVAL_SAMPLES = 500;
const int TIME_GAPS_FOR_JITTER_CALC_WINDOW_INTERVALS = 10;
// the time gap stats for constructing AudioStreamStats will
// recalculate min/max/avg every ~1 second for the past ~30 seconds of time gap data
const int TIME_GAPS_FOR_STATS_PACKET_INTERVAL_SAMPLES = USECS_PER_SECOND / BUFFER_SEND_INTERVAL_USECS;
const int TIME_GAPS_FOR_STATS_PACKET_WINDOW_INTERVALS = 30;
// this controls the length of the window for stats used in the stats packet (not the stats used in
// _desiredJitterBufferFrames calculation)
const int STATS_FOR_STATS_PACKET_WINDOW_SECONDS = 30;
// this controls the window size of the time-weighted avg of frames available. Every time the window fills up,
// _currentJitterBufferFrames is updated with the time-weighted avg and the running time-weighted avg is reset.
const int FRAMES_AVAILABLE_STAT_WINDOW_USECS = 2 * USECS_PER_SECOND;
// the internal history buffer of the incoming seq stats will cover 30s to calculate
// packet loss % over last 30s
const int INCOMING_SEQ_STATS_HISTORY_LENGTH_SECONDS = 30;
const int INBOUND_RING_BUFFER_FRAME_CAPACITY = 100;
const int FRAMES_AVAILABLE_STAT_WINDOW_USECS = 10 * USECS_PER_SECOND;
// default values for members of the Settings struct
const int DEFAULT_MAX_FRAMES_OVER_DESIRED = 10;
const int DEFAULT_DESIRED_JITTER_BUFFER_FRAMES = 1;
const bool DEFAULT_DYNAMIC_JITTER_BUFFERS = true;
const int DEFAULT_STATIC_DESIRED_JITTER_BUFFER_FRAMES = 1;
const bool DEFAULT_USE_STDEV_FOR_JITTER_CALC = false;
const int DEFAULT_WINDOW_STARVE_THRESHOLD = 3;
const int DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES = 50;
const int DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION = 10;
const bool DEFAULT_REPETITION_WITH_FADE = true;
class InboundAudioStream : public NodeData {
Q_OBJECT
public:
InboundAudioStream(int numFrameSamples, int numFramesCapacity,
bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired,
bool useStDevForJitterCalc = false);
class Settings {
public:
Settings()
: _maxFramesOverDesired(DEFAULT_MAX_FRAMES_OVER_DESIRED),
_dynamicJitterBuffers(DEFAULT_DYNAMIC_JITTER_BUFFERS),
_staticDesiredJitterBufferFrames(DEFAULT_STATIC_DESIRED_JITTER_BUFFER_FRAMES),
_useStDevForJitterCalc(DEFAULT_USE_STDEV_FOR_JITTER_CALC),
_windowStarveThreshold(DEFAULT_WINDOW_STARVE_THRESHOLD),
_windowSecondsForDesiredCalcOnTooManyStarves(DEFAULT_WINDOW_SECONDS_FOR_DESIRED_CALC_ON_TOO_MANY_STARVES),
_windowSecondsForDesiredReduction(DEFAULT_WINDOW_SECONDS_FOR_DESIRED_REDUCTION),
_repetitionWithFade(DEFAULT_REPETITION_WITH_FADE)
{}
Settings(int maxFramesOverDesired, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames,
bool useStDevForJitterCalc, int windowStarveThreshold, int windowSecondsForDesiredCalcOnTooManyStarves,
int _windowSecondsForDesiredReduction, bool repetitionWithFade)
: _maxFramesOverDesired(maxFramesOverDesired),
_dynamicJitterBuffers(dynamicJitterBuffers),
_staticDesiredJitterBufferFrames(staticDesiredJitterBufferFrames),
_useStDevForJitterCalc(useStDevForJitterCalc),
_windowStarveThreshold(windowStarveThreshold),
_windowSecondsForDesiredCalcOnTooManyStarves(windowSecondsForDesiredCalcOnTooManyStarves),
_windowSecondsForDesiredReduction(windowSecondsForDesiredCalcOnTooManyStarves),
_repetitionWithFade(repetitionWithFade)
{}
// max number of frames over desired in the ringbuffer.
int _maxFramesOverDesired;
// if false, _desiredJitterBufferFrames will always be _staticDesiredJitterBufferFrames. Otherwise,
// either fred or philip's method will be used to calculate _desiredJitterBufferFrames based on packet timegaps.
bool _dynamicJitterBuffers;
// settings for static jitter buffer mode
int _staticDesiredJitterBufferFrames;
// settings for dynamic jitter buffer mode
bool _useStDevForJitterCalc; // if true, philip's method is used. otherwise, fred's method is used.
int _windowStarveThreshold;
int _windowSecondsForDesiredCalcOnTooManyStarves;
int _windowSecondsForDesiredReduction;
// if true, the prev frame will be repeated (fading to silence) for dropped frames.
// otherwise, silence will be inserted.
bool _repetitionWithFade;
};
public:
InboundAudioStream(int numFrameSamples, int numFramesCapacity, const Settings& settings);
void reset();
void resetStats();
virtual void resetStats();
void clearBuffer();
virtual int parseData(const QByteArray& packet);
@ -72,14 +113,18 @@ public:
void setToStarved();
void setDynamicJitterBuffers(bool dynamicJitterBuffers);
void setStaticDesiredJitterBufferFrames(int staticDesiredJitterBufferFrames);
/// this function should be called once per second to ensure the seq num stats history spans ~30 seconds
AudioStreamStats updateSeqHistoryAndGetAudioStreamStats();
void setSettings(const Settings& settings);
void setMaxFramesOverDesired(int maxFramesOverDesired) { _maxFramesOverDesired = maxFramesOverDesired; }
void setDynamicJitterBuffers(bool setDynamicJitterBuffers);
void setStaticDesiredJitterBufferFrames(int staticDesiredJitterBufferFrames);
void setUseStDevForJitterCalc(bool useStDevForJitterCalc) { _useStDevForJitterCalc = useStDevForJitterCalc; }
void setWindowStarveThreshold(int windowStarveThreshold) { _starveThreshold = windowStarveThreshold; }
void setWindowSecondsForDesiredCalcOnTooManyStarves(int windowSecondsForDesiredCalcOnTooManyStarves);
void setWindowSecondsForDesiredReduction(int windowSecondsForDesiredReduction);
void setRepetitionWithFade(bool repetitionWithFade) { _repetitionWithFade = repetitionWithFade; }
virtual AudioStreamStats getAudioStreamStats() const;
@ -110,11 +155,17 @@ public:
int getPacketsReceived() const { return _incomingSequenceNumberStats.getReceived(); }
public slots:
/// This function should be called every second for all the stats to function properly. If dynamic jitter buffers
/// is enabled, those stats are used to calculate _desiredJitterBufferFrames.
/// If the stats are not used and dynamic jitter buffers is disabled, it's not necessary to call this function.
void perSecondCallbackForUpdatingStats();
private:
void frameReceivedUpdateTimingStats();
void packetReceivedUpdateTimingStats();
int clampDesiredJitterBufferFramesValue(int desired) const;
int writeSamplesForDroppedPackets(int numSamples);
int writeSamplesForDroppedPackets(int networkSamples);
void popSamplesNoCheck(int samples);
void framesAvailableChanged();
@ -126,13 +177,19 @@ protected:
/// parses the info between the seq num and the audio data in the network packet and calculates
/// how many audio samples this packet contains (used when filling in samples for dropped packets).
virtual int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) = 0;
/// default implementation assumes no stream properties and raw audio samples after stream propertiess
virtual int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& networkSamples);
/// parses the audio data in the network packet.
/// default implementation assumes packet contains raw audio samples after stream properties
virtual int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples);
virtual int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int networkSamples);
int writeDroppableSilentSamples(int numSilentSamples);
/// writes silent samples to the buffer that may be dropped to reduce latency caused by the buffer
virtual int writeDroppableSilentSamples(int silentSamples);
/// writes the last written frame repeatedly, gradually fading to silence.
/// used for writing samples for dropped packets.
virtual int writeLastFrameRepeatedWithFade(int samples);
protected:
@ -147,8 +204,6 @@ protected:
// if jitter buffer is dynamic, this determines what method of calculating _desiredJitterBufferFrames
// if true, Philip's timegap std dev calculation is used. Otherwise, Freddy's max timegap calculation is used
bool _useStDevForJitterCalc;
int _calculatedJitterBufferFramesUsingMaxGap;
int _calculatedJitterBufferFramesUsingStDev;
int _desiredJitterBufferFrames;
@ -168,16 +223,28 @@ protected:
SequenceNumberStats _incomingSequenceNumberStats;
quint64 _lastFrameReceivedTime;
MovingMinMaxAvg<quint64> _interframeTimeGapStatsForJitterCalc;
StDev _stdev;
MovingMinMaxAvg<quint64> _interframeTimeGapStatsForStatsPacket;
quint64 _lastPacketReceivedTime;
MovingMinMaxAvg<quint64> _timeGapStatsForDesiredCalcOnTooManyStarves; // for Freddy's method
int _calculatedJitterBufferFramesUsingMaxGap;
StDev _stdevStatsForDesiredCalcOnTooManyStarves; // for Philip's method
int _calculatedJitterBufferFramesUsingStDev; // the most recent desired frames calculated by Philip's method
MovingMinMaxAvg<quint64> _timeGapStatsForDesiredReduction;
int _starveHistoryWindowSeconds;
RingBufferHistory<quint64> _starveHistory;
int _starveThreshold;
TimeWeightedAvg<int> _framesAvailableStat;
// this value is based on the time-weighted avg from _framesAvailableStat. it is only used for
// this value is periodically updated with the time-weighted avg from _framesAvailableStat. it is only used for
// dropping silent frames right now.
int _currentJitterBufferFrames;
MovingMinMaxAvg<quint64> _timeGapStatsForStatsPacket;
bool _repetitionWithFade;
};
float calculateRepeatedFrameFadeFactor(int indexOfRepeat);
#endif // hifi_InboundAudioStream_h

View file

@ -19,8 +19,8 @@
#include "InjectedAudioStream.h"
InjectedAudioStream::InjectedAudioStream(const QUuid& streamIdentifier, bool dynamicJitterBuffer, int staticDesiredJitterBufferFrames, int maxFramesOverDesired) :
PositionalAudioStream(PositionalAudioStream::Injector, false, dynamicJitterBuffer, staticDesiredJitterBufferFrames, maxFramesOverDesired),
InjectedAudioStream::InjectedAudioStream(const QUuid& streamIdentifier, const InboundAudioStream::Settings& settings) :
PositionalAudioStream(PositionalAudioStream::Injector, false, settings),
_streamIdentifier(streamIdentifier),
_radius(0.0f),
_attenuationRatio(0)

View file

@ -18,7 +18,7 @@
class InjectedAudioStream : public PositionalAudioStream {
public:
InjectedAudioStream(const QUuid& streamIdentifier, bool dynamicJitterBuffer, int staticDesiredJitterBufferFrames, int maxFramesOverDesired);
InjectedAudioStream(const QUuid& streamIdentifier, const InboundAudioStream::Settings& settings);
float getRadius() const { return _radius; }
float getAttenuationRatio() const { return _attenuationRatio; }

View file

@ -11,13 +11,7 @@
#include "MixedAudioStream.h"
MixedAudioStream::MixedAudioStream(int numFrameSamples, int numFramesCapacity, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc)
: InboundAudioStream(numFrameSamples, numFramesCapacity, dynamicJitterBuffers, staticDesiredJitterBufferFrames, maxFramesOverDesired, useStDevForJitterCalc)
MixedAudioStream::MixedAudioStream(int numFrameSamples, int numFramesCapacity, const InboundAudioStream::Settings& settings)
: InboundAudioStream(numFrameSamples, numFramesCapacity, settings)
{
}
int MixedAudioStream::parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) {
// mixed audio packets do not have any info between the seq num and the audio data.
numAudioSamples = packetAfterSeqNum.size() / sizeof(int16_t);
return 0;
}

View file

@ -17,12 +17,9 @@
class MixedAudioStream : public InboundAudioStream {
public:
MixedAudioStream(int numFrameSamples, int numFramesCapacity, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc);
MixedAudioStream(int numFrameSamples, int numFramesCapacity, const InboundAudioStream::Settings& settings);
float getNextOutputFrameLoudness() const { return _ringBuffer.getNextOutputFrameLoudness(); }
protected:
int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples);
};
#endif // hifi_MixedAudioStream_h

View file

@ -11,35 +11,53 @@
#include "MixedProcessedAudioStream.h"
MixedProcessedAudioStream ::MixedProcessedAudioStream (int numFrameSamples, int numFramesCapacity, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc)
: InboundAudioStream(numFrameSamples, numFramesCapacity, dynamicJitterBuffers, staticDesiredJitterBufferFrames, maxFramesOverDesired, useStDevForJitterCalc)
static const int STEREO_FACTOR = 2;
MixedProcessedAudioStream::MixedProcessedAudioStream(int numFrameSamples, int numFramesCapacity, const InboundAudioStream::Settings& settings)
: InboundAudioStream(numFrameSamples, numFramesCapacity, settings)
{
}
void MixedProcessedAudioStream::outputFormatChanged(int outputFormatChannelCountTimesSampleRate) {
_outputFormatChannelsTimesSampleRate = outputFormatChannelCountTimesSampleRate;
int deviceOutputFrameSize = NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL * _outputFormatChannelsTimesSampleRate / SAMPLE_RATE;
int deviceOutputFrameSize = networkToDeviceSamples(NETWORK_BUFFER_LENGTH_SAMPLES_STEREO);
_ringBuffer.resizeForFrameSize(deviceOutputFrameSize);
}
int MixedProcessedAudioStream::parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) {
// mixed audio packets do not have any info between the seq num and the audio data.
int numNetworkSamples = packetAfterSeqNum.size() / sizeof(int16_t);
int MixedProcessedAudioStream::writeDroppableSilentSamples(int silentSamples) {
int deviceSilentSamplesWritten = InboundAudioStream::writeDroppableSilentSamples(networkToDeviceSamples(silentSamples));
emit addedSilence(deviceToNetworkSamples(deviceSilentSamplesWritten) / STEREO_FACTOR);
// since numAudioSamples is used to know how many samples to add for each dropped packet before this one,
// we want to set it to the number of device audio samples since this stream contains device audio samples, not network samples.
const int STEREO_DIVIDER = 2;
numAudioSamples = numNetworkSamples * _outputFormatChannelsTimesSampleRate / (STEREO_DIVIDER * SAMPLE_RATE);
return 0;
return deviceSilentSamplesWritten;
}
int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples) {
int MixedProcessedAudioStream::writeLastFrameRepeatedWithFade(int samples) {
int deviceSamplesWritten = InboundAudioStream::writeLastFrameRepeatedWithFade(networkToDeviceSamples(samples));
emit addedLastFrameRepeatedWithFade(deviceToNetworkSamples(deviceSamplesWritten) / STEREO_FACTOR);
return deviceSamplesWritten;
}
int MixedProcessedAudioStream::parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int networkSamples) {
emit addedStereoSamples(packetAfterStreamProperties);
QByteArray outputBuffer;
emit processSamples(packetAfterStreamProperties, outputBuffer);
_ringBuffer.writeData(outputBuffer.data(), outputBuffer.size());
_ringBuffer.writeData(outputBuffer.data(), outputBuffer.size());
return packetAfterStreamProperties.size();
}
int MixedProcessedAudioStream::networkToDeviceSamples(int networkSamples) {
return (quint64)networkSamples * (quint64)_outputFormatChannelsTimesSampleRate / (quint64)(STEREO_FACTOR * SAMPLE_RATE);
}
int MixedProcessedAudioStream::deviceToNetworkSamples(int deviceSamples) {
return (quint64)deviceSamples * (quint64)(STEREO_FACTOR * SAMPLE_RATE) / (quint64)_outputFormatChannelsTimesSampleRate;
}

View file

@ -14,21 +14,32 @@
#include "InboundAudioStream.h"
class Audio;
class MixedProcessedAudioStream : public InboundAudioStream {
Q_OBJECT
public:
MixedProcessedAudioStream (int numFrameSamples, int numFramesCapacity, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames, int maxFramesOverDesired, bool useStDevForJitterCalc);
MixedProcessedAudioStream(int numFrameSamples, int numFramesCapacity, const InboundAudioStream::Settings& settings);
signals:
void addedSilence(int silentSamplesPerChannel);
void addedLastFrameRepeatedWithFade(int samplesPerChannel);
void addedStereoSamples(const QByteArray& samples);
void processSamples(const QByteArray& inputBuffer, QByteArray& outputBuffer);
public:
void outputFormatChanged(int outputFormatChannelCountTimesSampleRate);
protected:
int parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples);
int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int numAudioSamples);
int writeDroppableSilentSamples(int silentSamples);
int writeLastFrameRepeatedWithFade(int samples);
int parseAudioData(PacketType type, const QByteArray& packetAfterStreamProperties, int networkSamples);
private:
int networkToDeviceSamples(int networkSamples);
int deviceToNetworkSamples(int deviceSamples);
private:
int _outputFormatChannelsTimesSampleRate;

View file

@ -21,32 +21,41 @@
#include <PacketHeaders.h>
#include <UUID.h>
PositionalAudioStream::PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, bool dynamicJitterBuffers,
int staticDesiredJitterBufferFrames, int maxFramesOverDesired) :
PositionalAudioStream::PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, const InboundAudioStream::Settings& settings) :
InboundAudioStream(isStereo ? NETWORK_BUFFER_LENGTH_SAMPLES_STEREO : NETWORK_BUFFER_LENGTH_SAMPLES_PER_CHANNEL,
AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY, dynamicJitterBuffers, staticDesiredJitterBufferFrames, maxFramesOverDesired),
AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY, settings),
_type(type),
_position(0.0f, 0.0f, 0.0f),
_orientation(0.0f, 0.0f, 0.0f, 0.0f),
_shouldLoopbackForNode(false),
_isStereo(isStereo),
_lastPopOutputTrailingLoudness(0.0f),
_lastPopOutputLoudness(0.0f),
_listenerUnattenuatedZone(NULL)
{
// constant defined in AudioMixer.h. However, we don't want to include this here
// we will soon find a better common home for these audio-related constants
const int SAMPLE_PHASE_DELAY_AT_90 = 20;
_filter.initialize(SAMPLE_RATE, (NETWORK_BUFFER_LENGTH_SAMPLES_STEREO + (SAMPLE_PHASE_DELAY_AT_90 * 2)) / 2);
}
void PositionalAudioStream::updateLastPopOutputTrailingLoudness() {
float lastPopLoudness = _ringBuffer.getFrameLoudness(_lastPopOutput);
void PositionalAudioStream::resetStats() {
_lastPopOutputTrailingLoudness = 0.0f;
_lastPopOutputLoudness = 0.0f;
}
void PositionalAudioStream::updateLastPopOutputLoudnessAndTrailingLoudness() {
_lastPopOutputLoudness = _ringBuffer.getFrameLoudness(_lastPopOutput);
const int TRAILING_AVERAGE_FRAMES = 100;
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
const float LOUDNESS_EPSILON = 0.000001f;
if (lastPopLoudness >= _lastPopOutputTrailingLoudness) {
_lastPopOutputTrailingLoudness = lastPopLoudness;
if (_lastPopOutputLoudness >= _lastPopOutputTrailingLoudness) {
_lastPopOutputTrailingLoudness = _lastPopOutputLoudness;
} else {
_lastPopOutputTrailingLoudness = (_lastPopOutputTrailingLoudness * PREVIOUS_FRAMES_RATIO) + (CURRENT_FRAME_RATIO * lastPopLoudness);
_lastPopOutputTrailingLoudness = (_lastPopOutputTrailingLoudness * PREVIOUS_FRAMES_RATIO) + (CURRENT_FRAME_RATIO * _lastPopOutputLoudness);
if (_lastPopOutputTrailingLoudness < LOUDNESS_EPSILON) {
_lastPopOutputTrailingLoudness = 0;

View file

@ -16,6 +16,8 @@
#include <AABox.h>
#include "InboundAudioStream.h"
#include "AudioFilter.h"
#include "AudioFilterBank.h"
const int AUDIOMIXER_INBOUND_RING_BUFFER_FRAME_CAPACITY = 100;
@ -27,13 +29,15 @@ public:
Injector
};
PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, bool dynamicJitterBuffers, int staticDesiredJitterBufferFrames,
int maxFramesOverDesired);
PositionalAudioStream(PositionalAudioStream::Type type, bool isStereo, const InboundAudioStream::Settings& settings);
virtual void resetStats();
virtual AudioStreamStats getAudioStreamStats() const;
void updateLastPopOutputTrailingLoudness();
void updateLastPopOutputLoudnessAndTrailingLoudness();
float getLastPopOutputTrailingLoudness() const { return _lastPopOutputTrailingLoudness; }
float getLastPopOutputLoudness() const { return _lastPopOutputLoudness; }
bool shouldLoopbackForNode() const { return _shouldLoopbackForNode; }
bool isStereo() const { return _isStereo; }
@ -44,6 +48,8 @@ public:
void setListenerUnattenuatedZone(AABox* listenerUnattenuatedZone) { _listenerUnattenuatedZone = listenerUnattenuatedZone; }
AudioFilterHSF1s& getFilter() { return _filter; }
protected:
// disallow copying of PositionalAudioStream objects
PositionalAudioStream(const PositionalAudioStream&);
@ -60,7 +66,10 @@ protected:
bool _isStereo;
float _lastPopOutputTrailingLoudness;
float _lastPopOutputLoudness;
AABox* _listenerUnattenuatedZone;
AudioFilterHSF1s _filter;
};
#endif // hifi_PositionalAudioStream_h

View file

@ -135,9 +135,9 @@ QByteArray AvatarData::toByteArray() {
// lazily allocate memory for HeadData in case we're not an Avatar instance
if (!_headData) {
_headData = new HeadData(this);
if (_forceFaceshiftConnected) {
_headData->_isFaceshiftConnected = true;
}
}
if (_forceFaceshiftConnected) {
_headData->_isFaceshiftConnected = true;
}
QByteArray avatarDataByteArray;
@ -153,7 +153,7 @@ QByteArray AvatarData::toByteArray() {
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _bodyYaw);
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _bodyPitch);
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _bodyRoll);
// Body scale
destinationBuffer += packFloatRatioToTwoByte(destinationBuffer, _targetScale);
@ -585,6 +585,101 @@ bool AvatarData::hasReferential() {
return _referential != NULL;
}
bool AvatarData::isPlaying() {
if (!_player) {
return false;
}
if (QThread::currentThread() != thread()) {
bool result;
QMetaObject::invokeMethod(this, "isPlaying", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, result));
return result;
}
return _player && _player->isPlaying();
}
qint64 AvatarData::playerElapsed() {
if (!_player) {
return 0;
}
if (QThread::currentThread() != thread()) {
qint64 result;
QMetaObject::invokeMethod(this, "playerElapsed", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(qint64, result));
return result;
}
return _player->elapsed();
}
qint64 AvatarData::playerLength() {
if (!_player) {
return 0;
}
if (QThread::currentThread() != thread()) {
qint64 result;
QMetaObject::invokeMethod(this, "playerLength", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(qint64, result));
return result;
}
return _player->getRecording()->getLength();
}
void AvatarData::loadRecording(QString filename) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "loadRecording", Qt::BlockingQueuedConnection,
Q_ARG(QString, filename));
return;
}
if (!_player) {
_player = PlayerPointer(new Player(this));
}
_player->loadFromFile(filename);
}
void AvatarData::startPlaying() {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "startPlaying", Qt::BlockingQueuedConnection);
return;
}
if (!_player) {
_player = PlayerPointer(new Player(this));
}
_player->startPlaying();
}
void AvatarData::setPlayFromCurrentLocation(bool playFromCurrentLocation) {
_player->setPlayFromCurrentLocation(playFromCurrentLocation);
}
void AvatarData::setPlayerLoop(bool loop) {
_player->setLoop(loop);
}
void AvatarData::play() {
if (isPlaying()) {
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "play", Qt::BlockingQueuedConnection);
return;
}
_player->play();
}
}
void AvatarData::stopPlaying() {
if (!_player) {
return;
}
if (QThread::currentThread() != thread()) {
QMetaObject::invokeMethod(this, "stopPlaying", Qt::BlockingQueuedConnection);
return;
}
if (_player) {
_player->stopPlaying();
}
}
void AvatarData::changeReferential(Referential *ref) {
delete _referential;
_referential = ref;
@ -705,6 +800,9 @@ void AvatarData::setJointRotations(QVector<glm::quat> jointRotations) {
"setJointRotations", Qt::BlockingQueuedConnection,
Q_ARG(QVector<glm::quat>, jointRotations));
}
if (_jointData.size() < jointRotations.size()) {
_jointData.resize(jointRotations.size());
}
for (int i = 0; i < jointRotations.size(); ++i) {
if (i < _jointData.size()) {
setJointData(i, jointRotations[i]);

View file

@ -49,6 +49,7 @@ typedef unsigned long long quint64;
#include <Node.h>
#include "Recorder.h"
#include "Referential.h"
#include "HeadData.h"
#include "HandData.h"
@ -298,6 +299,16 @@ public slots:
void setSessionUUID(const QUuid& sessionUUID) { _sessionUUID = sessionUUID; }
bool hasReferential();
bool isPlaying();
qint64 playerElapsed();
qint64 playerLength();
void loadRecording(QString filename);
void startPlaying();
void setPlayFromCurrentLocation(bool playFromCurrentLocation);
void setPlayerLoop(bool loop);
void play();
void stopPlaying();
protected:
QUuid _sessionUUID;
glm::vec3 _position;
@ -351,6 +362,8 @@ protected:
QWeakPointer<Node> _owningAvatarMixer;
QElapsedTimer _lastUpdateTimer;
PlayerPointer _player;
/// Loads the joint indices, names from the FST file (if any)
virtual void updateJointMappings();
void changeReferential(Referential* ref);

View file

@ -41,6 +41,10 @@ public:
void setBasePitch(float pitch) { _basePitch = glm::clamp(pitch, MIN_HEAD_PITCH, MAX_HEAD_PITCH); }
float getBaseRoll() const { return _baseRoll; }
void setBaseRoll(float roll) { _baseRoll = glm::clamp(roll, MIN_HEAD_ROLL, MAX_HEAD_ROLL); }
virtual void setFinalYaw(float finalYaw) { _baseYaw = finalYaw; }
virtual void setFinalPitch(float finalPitch) { _basePitch = finalPitch; }
virtual void setFinalRoll(float finalRoll) { _baseRoll = finalRoll; }
virtual float getFinalYaw() const { return _baseYaw; }
virtual float getFinalPitch() const { return _basePitch; }
virtual float getFinalRoll() const { return _baseRoll; }

View file

@ -15,6 +15,7 @@
#include <QMetaObject>
#include <QObject>
#include "AvatarData.h"
#include "Recorder.h"
void RecordingFrame::setBlendshapeCoefficients(QVector<float> blendshapeCoefficients) {
@ -164,7 +165,10 @@ void Recorder::record(char* samples, int size) {
Player::Player(AvatarData* avatar) :
_recording(new Recording()),
_avatar(avatar),
_audioThread(NULL)
_audioThread(NULL),
_startingScale(1.0f),
_playFromCurrentPosition(true),
_loop(false)
{
_timer.invalidate();
_options.setLoop(false);
@ -228,6 +232,19 @@ void Player::startPlaying() {
_audioThread->start();
QMetaObject::invokeMethod(_injector.data(), "injectAudio", Qt::QueuedConnection);
// Fake faceshift connection
_avatar->setForceFaceshiftConnected(true);
if (_playFromCurrentPosition) {
_startingPosition = _avatar->getPosition();
_startingRotation = _avatar->getOrientation();
_startingScale = _avatar->getTargetScale();
} else {
_startingPosition = _recording->getFrame(0).getTranslation();
_startingRotation = _recording->getFrame(0).getRotation();
_startingScale = _recording->getFrame(0).getScale();
}
_timer.start();
}
}
@ -251,6 +268,10 @@ void Player::stopPlaying() {
_audioThread, &QThread::deleteLater);
_injector.clear();
_audioThread = NULL;
// Turn off fake faceshift connection
_avatar->setForceFaceshiftConnected(false);
qDebug() << "Recorder::stopPlaying()";
}
@ -269,29 +290,40 @@ void Player::loadRecording(RecordingPointer recording) {
void Player::play() {
computeCurrentFrame();
if (_currentFrame < 0 || _currentFrame >= _recording->getFrameNumber() - 1) {
if (_currentFrame < 0 || (_currentFrame >= _recording->getFrameNumber() - 1)) {
// If it's the end of the recording, stop playing
stopPlaying();
if (_loop) {
startPlaying();
}
return;
}
if (_currentFrame == 0) {
_avatar->setPosition(_recording->getFrame(_currentFrame).getTranslation());
_avatar->setOrientation(_recording->getFrame(_currentFrame).getRotation());
_avatar->setTargetScale(_recording->getFrame(_currentFrame).getScale());
_avatar->setJointRotations(_recording->getFrame(_currentFrame).getJointRotations());
HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
head->setBlendshapeCoefficients(_recording->getFrame(_currentFrame).getBlendshapeCoefficients());
} else {
_avatar->setPosition(_recording->getFrame(0).getTranslation() +
_recording->getFrame(_currentFrame).getTranslation());
_avatar->setOrientation(_recording->getFrame(0).getRotation() *
_recording->getFrame(_currentFrame).getRotation());
_avatar->setTargetScale(_recording->getFrame(0).getScale() *
_recording->getFrame(_currentFrame).getScale());
_avatar->setJointRotations(_recording->getFrame(_currentFrame).getJointRotations());
HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
// Don't play frame 0
// only meant to store absolute values
return;
}
_avatar->setPosition(_startingPosition +
glm::inverse(_recording->getFrame(0).getRotation()) * _startingRotation *
_recording->getFrame(_currentFrame).getTranslation());
_avatar->setOrientation(_startingRotation *
_recording->getFrame(_currentFrame).getRotation());
_avatar->setTargetScale(_startingScale *
_recording->getFrame(_currentFrame).getScale());
_avatar->setJointRotations(_recording->getFrame(_currentFrame).getJointRotations());
HeadData* head = const_cast<HeadData*>(_avatar->getHeadData());
if (head) {
head->setBlendshapeCoefficients(_recording->getFrame(_currentFrame).getBlendshapeCoefficients());
head->setLeanSideways(_recording->getFrame(_currentFrame).getLeanSideways());
head->setLeanForward(_recording->getFrame(_currentFrame).getLeanForward());
glm::vec3 eulers = glm::degrees(safeEulerAngles(_recording->getFrame(_currentFrame).getHeadRotation()));
head->setFinalPitch(eulers.x);
head->setFinalYaw(eulers.y);
head->setFinalRoll(eulers.z);
}
_options.setPosition(_avatar->getPosition());
@ -299,6 +331,14 @@ void Player::play() {
_injector->setOptions(_options);
}
void Player::setPlayFromCurrentLocation(bool playFromCurrentLocation) {
_playFromCurrentPosition = playFromCurrentLocation;
}
void Player::setLoop(bool loop) {
_loop = loop;
}
bool Player::computeCurrentFrame() {
if (!isPlaying()) {
_currentFrame = -1;

View file

@ -23,10 +23,10 @@
#include <glm/gtx/quaternion.hpp>
#include <AudioInjector.h>
#include <AvatarData.h>
#include <SharedUtil.h>
#include <Sound.h>
class AvatarData;
class Recorder;
class Recording;
class Player;
@ -97,6 +97,7 @@ private:
QVector<qint32> _timestamps;
QVector<RecordingFrame> _frames;
bool _stereo;
Sound* _audio;
friend class Recorder;
@ -152,6 +153,9 @@ public slots:
void loadRecording(RecordingPointer recording);
void play();
void setPlayFromCurrentLocation(bool playFromCurrentLocation);
void setLoop(bool loop);
private:
bool computeCurrentFrame();
@ -164,6 +168,13 @@ private:
AvatarData* _avatar;
QThread* _audioThread;
glm::vec3 _startingPosition;
glm::quat _startingRotation;
float _startingScale;
bool _playFromCurrentPosition;
bool _loop;
};
void writeRecordingToFile(RecordingPointer recording, QString file);

View file

@ -107,7 +107,7 @@ int Referential::packExtraData(unsigned char *destinationBuffer) const {
int Referential::unpackExtraData(const unsigned char* sourceBuffer, int size) {
_extraDataBuffer.clear();
_extraDataBuffer.setRawData(reinterpret_cast<const char*>(sourceBuffer), size);
_extraDataBuffer.append(reinterpret_cast<const char*>(sourceBuffer), size);
return size;
}

View file

@ -23,8 +23,10 @@ REGISTER_META_OBJECT(QRgbAttribute)
REGISTER_META_OBJECT(PackedNormalAttribute)
REGISTER_META_OBJECT(SpannerQRgbAttribute)
REGISTER_META_OBJECT(SpannerPackedNormalAttribute)
REGISTER_META_OBJECT(HeightfieldTexture)
REGISTER_META_OBJECT(HeightfieldAttribute)
REGISTER_META_OBJECT(HeightfieldColorAttribute)
REGISTER_META_OBJECT(HeightfieldTextureAttribute)
REGISTER_META_OBJECT(SharedObjectAttribute)
REGISTER_META_OBJECT(SharedObjectSetAttribute)
REGISTER_META_OBJECT(SpannerSetAttribute)
@ -49,7 +51,8 @@ AttributeRegistry::AttributeRegistry() :
_spannerNormalAttribute(registerAttribute(new SpannerPackedNormalAttribute("spannerNormal"))),
_spannerMaskAttribute(registerAttribute(new FloatAttribute("spannerMask"))),
_heightfieldAttribute(registerAttribute(new HeightfieldAttribute("heightfield"))),
_heightfieldColorAttribute(registerAttribute(new HeightfieldColorAttribute("heightfieldColor"))) {
_heightfieldColorAttribute(registerAttribute(new HeightfieldColorAttribute("heightfieldColor"))),
_heightfieldTextureAttribute(registerAttribute(new HeightfieldTextureAttribute("heightfieldTexture"))) {
// our baseline LOD threshold is for voxels; spanners and heightfields are a different story
const float SPANNER_LOD_THRESHOLD_MULTIPLIER = 8.0f;
@ -58,6 +61,7 @@ AttributeRegistry::AttributeRegistry() :
const float HEIGHTFIELD_LOD_THRESHOLD_MULTIPLIER = 32.0f;
_heightfieldAttribute->setLODThresholdMultiplier(HEIGHTFIELD_LOD_THRESHOLD_MULTIPLIER);
_heightfieldColorAttribute->setLODThresholdMultiplier(HEIGHTFIELD_LOD_THRESHOLD_MULTIPLIER);
_heightfieldTextureAttribute->setLODThresholdMultiplier(HEIGHTFIELD_LOD_THRESHOLD_MULTIPLIER);
}
static QScriptValue qDebugFunction(QScriptContext* context, QScriptEngine* engine) {
@ -204,6 +208,16 @@ Attribute::Attribute(const QString& name) :
Attribute::~Attribute() {
}
void Attribute::readSubdivided(MetavoxelStreamState& state, void*& value,
const MetavoxelStreamState& ancestorState, void* ancestorValue, bool isLeaf) const {
read(state.base.stream, value, isLeaf);
}
void Attribute::writeSubdivided(MetavoxelStreamState& state, void* value,
const MetavoxelStreamState& ancestorState, void* ancestorValue, bool isLeaf) const {
write(state.base.stream, value, isLeaf);
}
MetavoxelNode* Attribute::createMetavoxelNode(const AttributeValue& value, const MetavoxelNode* original) const {
return new MetavoxelNode(value);
}
@ -260,9 +274,7 @@ MetavoxelNode* Attribute::expandMetavoxelRoot(const MetavoxelNode& root) {
MetavoxelNode* newGrandchild = new MetavoxelNode(attribute);
newChild->setChild((index + j) % MetavoxelNode::CHILD_COUNT, newGrandchild);
}
newChild->mergeChildren(attribute);
}
newParent->mergeChildren(attribute);
return newParent;
}
@ -489,20 +501,19 @@ HeightfieldData::HeightfieldData(const QByteArray& contents) :
_contents(contents) {
}
HeightfieldData::HeightfieldData(Bitstream& in, int bytes, bool color) {
read(in, bytes, color);
HeightfieldData::~HeightfieldData() {
}
enum HeightfieldImage { NULL_HEIGHTFIELD_IMAGE, NORMAL_HEIGHTFIELD_IMAGE, DEFLATED_HEIGHTFIELD_IMAGE };
static QByteArray encodeHeightfieldImage(const QImage& image) {
static QByteArray encodeHeightfieldImage(const QImage& image, bool lossless = false) {
if (image.isNull()) {
return QByteArray(1, NULL_HEIGHTFIELD_IMAGE);
}
QBuffer buffer;
buffer.open(QIODevice::WriteOnly);
const int JPEG_ENCODE_THRESHOLD = 16;
if (image.width() >= JPEG_ENCODE_THRESHOLD && image.height() >= JPEG_ENCODE_THRESHOLD) {
if (image.width() >= JPEG_ENCODE_THRESHOLD && image.height() >= JPEG_ENCODE_THRESHOLD && !lossless) {
qint32 offsetX = image.offset().x(), offsetY = image.offset().y();
buffer.write((char*)&offsetX, sizeof(qint32));
buffer.write((char*)&offsetY, sizeof(qint32));
@ -536,65 +547,93 @@ const QImage decodeHeightfieldImage(const QByteArray& data) {
}
}
HeightfieldData::HeightfieldData(Bitstream& in, int bytes, const HeightfieldDataPointer& reference, bool color) {
HeightfieldHeightData::HeightfieldHeightData(const QByteArray& contents) :
HeightfieldData(contents) {
}
HeightfieldHeightData::HeightfieldHeightData(Bitstream& in, int bytes) {
read(in, bytes);
}
HeightfieldHeightData::HeightfieldHeightData(Bitstream& in, int bytes, const HeightfieldHeightDataPointer& reference) {
if (!reference) {
read(in, bytes, color);
read(in, bytes);
return;
}
QMutexLocker locker(&reference->_encodedDeltaMutex);
reference->_encodedDelta = in.readAligned(bytes);
reference->_deltaData = this;
_contents = reference->_contents;
QImage image = decodeHeightfieldImage(reference->_encodedDelta);
QMutexLocker locker(&reference->getEncodedDeltaMutex());
reference->setEncodedDelta(in.readAligned(bytes));
reference->setDeltaData(HeightfieldDataPointer(this));
_contents = reference->getContents();
QImage image = decodeHeightfieldImage(reference->getEncodedDelta());
if (image.isNull()) {
return;
}
QPoint offset = image.offset();
image = image.convertToFormat(QImage::Format_RGB888);
if (offset.x() == 0) {
set(image, color);
set(image);
return;
}
int minX = offset.x() - 1;
int minY = offset.y() - 1;
if (color) {
int size = glm::sqrt(_contents.size() / (float)COLOR_BYTES);
char* dest = _contents.data() + (minY * size + minX) * COLOR_BYTES;
int destStride = size * COLOR_BYTES;
int srcStride = image.width() * COLOR_BYTES;
for (int y = 0; y < image.height(); y++) {
memcpy(dest, image.constScanLine(y), srcStride);
dest += destStride;
int size = glm::sqrt((float)_contents.size());
char* lineDest = _contents.data() + minY * size + minX;
for (int y = 0; y < image.height(); y++) {
const uchar* src = image.constScanLine(y);
for (char* dest = lineDest, *end = dest + image.width(); dest != end; dest++, src += COLOR_BYTES) {
*dest = *src;
}
} else {
int size = glm::sqrt((float)_contents.size());
char* lineDest = _contents.data() + minY * size + minX;
for (int y = 0; y < image.height(); y++) {
const uchar* src = image.constScanLine(y);
for (char* dest = lineDest, *end = dest + image.width(); dest != end; dest++, src += COLOR_BYTES) {
*dest = *src;
}
lineDest += size;
lineDest += size;
}
}
HeightfieldHeightData::HeightfieldHeightData(Bitstream& in, int bytes, const HeightfieldHeightDataPointer& ancestor,
const glm::vec3& minimum, float size) {
QMutexLocker locker(&_encodedSubdivisionsMutex);
int index = (int)glm::round(glm::log(size) / glm::log(0.5f)) - 1;
if (_encodedSubdivisions.size() <= index) {
_encodedSubdivisions.resize(index + 1);
}
EncodedSubdivision& subdivision = _encodedSubdivisions[index];
subdivision.data = in.readAligned(bytes);
subdivision.ancestor = ancestor;
QImage image = decodeHeightfieldImage(subdivision.data);
if (image.isNull()) {
return;
}
image = image.convertToFormat(QImage::Format_RGB888);
int destSize = image.width();
const uchar* src = image.constBits();
const QByteArray& ancestorContents = ancestor->getContents();
int ancestorSize = glm::sqrt((float)ancestorContents.size());
float ancestorY = minimum.z * ancestorSize;
float ancestorIncrement = size * ancestorSize / destSize;
_contents = QByteArray(destSize * destSize, 0);
char* dest = _contents.data();
for (int y = 0; y < destSize; y++, ancestorY += ancestorIncrement) {
const uchar* lineRef = (const uchar*)ancestorContents.constData() + (int)ancestorY * ancestorSize;
float ancestorX = minimum.x * ancestorSize;
for (char* end = dest + destSize; dest != end; src += COLOR_BYTES, ancestorX += ancestorIncrement) {
const uchar* ref = lineRef + (int)ancestorX;
*dest++ = *ref++ + *src;
}
}
}
void HeightfieldData::write(Bitstream& out, bool color) {
void HeightfieldHeightData::write(Bitstream& out) {
QMutexLocker locker(&_encodedMutex);
if (_encoded.isEmpty()) {
QImage image;
if (color) {
int size = glm::sqrt(_contents.size() / (float)COLOR_BYTES);
image = QImage((uchar*)_contents.data(), size, size, QImage::Format_RGB888);
} else {
int size = glm::sqrt((float)_contents.size());
image = QImage(size, size, QImage::Format_RGB888);
uchar* dest = image.bits();
for (const char* src = _contents.constData(), *end = src + _contents.size(); src != end; src++) {
*dest++ = *src;
*dest++ = *src;
*dest++ = *src;
}
int size = glm::sqrt((float)_contents.size());
image = QImage(size, size, QImage::Format_RGB888);
uchar* dest = image.bits();
for (const char* src = _contents.constData(), *end = src + _contents.size(); src != end; src++) {
*dest++ = *src;
*dest++ = *src;
*dest++ = *src;
}
_encoded = encodeHeightfieldImage(image);
}
@ -602,114 +641,427 @@ void HeightfieldData::write(Bitstream& out, bool color) {
out.writeAligned(_encoded);
}
void HeightfieldData::writeDelta(Bitstream& out, const HeightfieldDataPointer& reference, bool color) {
void HeightfieldHeightData::writeDelta(Bitstream& out, const HeightfieldHeightDataPointer& reference) {
if (!reference || reference->getContents().size() != _contents.size()) {
write(out, color);
write(out);
return;
}
QMutexLocker locker(&reference->_encodedDeltaMutex);
if (reference->_encodedDelta.isEmpty() || reference->_deltaData != this) {
QMutexLocker locker(&reference->getEncodedDeltaMutex());
if (reference->getEncodedDelta().isEmpty() || reference->getDeltaData() != this) {
QImage image;
int minX, minY;
if (color) {
int size = glm::sqrt(_contents.size() / (float)COLOR_BYTES);
minX = size;
minY = size;
int maxX = -1, maxY = -1;
const char* src = _contents.constData();
const char* ref = reference->_contents.constData();
for (int y = 0; y < size; y++) {
bool difference = false;
for (int x = 0; x < size; x++, src += COLOR_BYTES, ref += COLOR_BYTES) {
if (src[0] != ref[0] || src[1] != ref[1] || src[2] != ref[2]) {
minX = qMin(minX, x);
maxX = qMax(maxX, x);
difference = true;
}
}
if (difference) {
minY = qMin(minY, y);
maxY = qMax(maxY, y);
int size = glm::sqrt((float)_contents.size());
int minX = size, minY = size;
int maxX = -1, maxY = -1;
const char* src = _contents.constData();
const char* ref = reference->getContents().constData();
for (int y = 0; y < size; y++) {
bool difference = false;
for (int x = 0; x < size; x++) {
if (*src++ != *ref++) {
minX = qMin(minX, x);
maxX = qMax(maxX, x);
difference = true;
}
}
if (maxX >= minX) {
int width = maxX - minX + 1;
int height = maxY - minY + 1;
image = QImage(width, height, QImage::Format_RGB888);
src = _contents.constData() + (minY * size + minX) * COLOR_BYTES;
int srcStride = size * COLOR_BYTES;
int destStride = width * COLOR_BYTES;
for (int y = 0; y < height; y++) {
memcpy(image.scanLine(y), src, destStride);
src += srcStride;
}
if (difference) {
minY = qMin(minY, y);
maxY = qMax(maxY, y);
}
} else {
int size = glm::sqrt((float)_contents.size());
minX = size;
minY = size;
int maxX = -1, maxY = -1;
const char* src = _contents.constData();
const char* ref = reference->_contents.constData();
for (int y = 0; y < size; y++) {
bool difference = false;
for (int x = 0; x < size; x++) {
if (*src++ != *ref++) {
minX = qMin(minX, x);
maxX = qMax(maxX, x);
difference = true;
}
}
if (difference) {
minY = qMin(minY, y);
maxY = qMax(maxY, y);
}
}
if (maxX >= minX) {
int width = qMax(maxX - minX + 1, 0);
int height = qMax(maxY - minY + 1, 0);
image = QImage(width, height, QImage::Format_RGB888);
const uchar* lineSrc = (const uchar*)_contents.constData() + minY * size + minX;
for (int y = 0; y < height; y++) {
uchar* dest = image.scanLine(y);
for (const uchar* src = lineSrc, *end = src + width; src != end; src++) {
*dest++ = *src;
*dest++ = *src;
*dest++ = *src;
}
lineSrc += size;
}
if (maxX >= minX) {
int width = qMax(maxX - minX + 1, 0);
int height = qMax(maxY - minY + 1, 0);
image = QImage(width, height, QImage::Format_RGB888);
const uchar* lineSrc = (const uchar*)_contents.constData() + minY * size + minX;
for (int y = 0; y < height; y++) {
uchar* dest = image.scanLine(y);
for (const uchar* src = lineSrc, *end = src + width; src != end; src++) {
*dest++ = *src;
*dest++ = *src;
*dest++ = *src;
}
lineSrc += size;
}
}
image.setOffset(QPoint(minX + 1, minY + 1));
reference->_encodedDelta = encodeHeightfieldImage(image);
reference->_deltaData = this;
reference->setEncodedDelta(encodeHeightfieldImage(image));
reference->setDeltaData(HeightfieldDataPointer(this));
}
out << reference->_encodedDelta.size();
out.writeAligned(reference->_encodedDelta);
out << reference->getEncodedDelta().size();
out.writeAligned(reference->getEncodedDelta());
}
void HeightfieldData::read(Bitstream& in, int bytes, bool color) {
set(decodeHeightfieldImage(_encoded = in.readAligned(bytes)).convertToFormat(QImage::Format_RGB888), color);
}
void HeightfieldData::set(const QImage& image, bool color) {
if (color) {
_contents.resize(image.width() * image.height() * COLOR_BYTES);
memcpy(_contents.data(), image.constBits(), _contents.size());
void HeightfieldHeightData::writeSubdivided(Bitstream& out, const HeightfieldHeightDataPointer& ancestor,
const glm::vec3& minimum, float size) {
QMutexLocker locker(&_encodedSubdivisionsMutex);
int index = (int)glm::round(glm::log(size) / glm::log(0.5f)) - 1;
if (_encodedSubdivisions.size() <= index) {
_encodedSubdivisions.resize(index + 1);
}
EncodedSubdivision& subdivision = _encodedSubdivisions[index];
if (subdivision.data.isEmpty() || subdivision.ancestor != ancestor) {
QImage image;
const QByteArray& ancestorContents = ancestor->getContents();
const uchar* src = (const uchar*)_contents.constData();
} else {
_contents.resize(image.width() * image.height());
char* dest = _contents.data();
for (const uchar* src = image.constBits(), *end = src + _contents.size() * COLOR_BYTES;
src != end; src += COLOR_BYTES) {
*dest++ = *src;
int destSize = glm::sqrt((float)_contents.size());
image = QImage(destSize, destSize, QImage::Format_RGB888);
uchar* dest = image.bits();
int ancestorSize = glm::sqrt((float)ancestorContents.size());
float ancestorY = minimum.z * ancestorSize;
float ancestorIncrement = size * ancestorSize / destSize;
for (int y = 0; y < destSize; y++, ancestorY += ancestorIncrement) {
const uchar* lineRef = (const uchar*)ancestorContents.constData() + (int)ancestorY * ancestorSize;
float ancestorX = minimum.x * ancestorSize;
for (const uchar* end = src + destSize; src != end; ancestorX += ancestorIncrement) {
const uchar* ref = lineRef + (int)ancestorX;
uchar difference = *src++ - *ref;
*dest++ = difference;
*dest++ = difference;
*dest++ = difference;
}
}
subdivision.data = encodeHeightfieldImage(image, true);
subdivision.ancestor = ancestor;
}
out << subdivision.data.size();
out.writeAligned(subdivision.data);
}
void HeightfieldHeightData::read(Bitstream& in, int bytes) {
set(decodeHeightfieldImage(_encoded = in.readAligned(bytes)).convertToFormat(QImage::Format_RGB888));
}
void HeightfieldHeightData::set(const QImage& image) {
_contents.resize(image.width() * image.height());
char* dest = _contents.data();
for (const uchar* src = image.constBits(), *end = src + _contents.size() * COLOR_BYTES;
src != end; src += COLOR_BYTES) {
*dest++ = *src;
}
}
HeightfieldColorData::HeightfieldColorData(const QByteArray& contents) :
HeightfieldData(contents) {
}
HeightfieldColorData::HeightfieldColorData(Bitstream& in, int bytes) {
read(in, bytes);
}
HeightfieldColorData::HeightfieldColorData(Bitstream& in, int bytes, const HeightfieldColorDataPointer& reference) {
if (!reference) {
read(in, bytes);
return;
}
QMutexLocker locker(&reference->getEncodedDeltaMutex());
reference->setEncodedDelta(in.readAligned(bytes));
reference->setDeltaData(HeightfieldDataPointer(this));
_contents = reference->getContents();
QImage image = decodeHeightfieldImage(reference->getEncodedDelta());
if (image.isNull()) {
return;
}
QPoint offset = image.offset();
image = image.convertToFormat(QImage::Format_RGB888);
if (offset.x() == 0) {
set(image);
return;
}
int minX = offset.x() - 1;
int minY = offset.y() - 1;
int size = glm::sqrt(_contents.size() / (float)COLOR_BYTES);
char* dest = _contents.data() + (minY * size + minX) * COLOR_BYTES;
int destStride = size * COLOR_BYTES;
int srcStride = image.width() * COLOR_BYTES;
for (int y = 0; y < image.height(); y++) {
memcpy(dest, image.constScanLine(y), srcStride);
dest += destStride;
}
}
HeightfieldColorData::HeightfieldColorData(Bitstream& in, int bytes, const HeightfieldColorDataPointer& ancestor,
const glm::vec3& minimum, float size) {
QMutexLocker locker(&_encodedSubdivisionsMutex);
int index = (int)glm::round(glm::log(size) / glm::log(0.5f)) - 1;
if (_encodedSubdivisions.size() <= index) {
_encodedSubdivisions.resize(index + 1);
}
EncodedSubdivision& subdivision = _encodedSubdivisions[index];
subdivision.data = in.readAligned(bytes);
subdivision.ancestor = ancestor;
QImage image = decodeHeightfieldImage(subdivision.data);
if (image.isNull()) {
return;
}
image = image.convertToFormat(QImage::Format_RGB888);
int destSize = image.width();
const uchar* src = image.constBits();
const QByteArray& ancestorContents = ancestor->getContents();
int ancestorSize = glm::sqrt(ancestorContents.size() / (float)COLOR_BYTES);
float ancestorY = minimum.z * ancestorSize;
float ancestorIncrement = size * ancestorSize / destSize;
int ancestorStride = ancestorSize * COLOR_BYTES;
_contents = QByteArray(destSize * destSize * COLOR_BYTES, 0);
char* dest = _contents.data();
int stride = image.width() * COLOR_BYTES;
for (int y = 0; y < destSize; y++, ancestorY += ancestorIncrement) {
const uchar* lineRef = (const uchar*)ancestorContents.constData() + (int)ancestorY * ancestorStride;
float ancestorX = minimum.x * ancestorSize;
for (char* end = dest + stride; dest != end; ancestorX += ancestorIncrement) {
const uchar* ref = lineRef + (int)ancestorX * COLOR_BYTES;
*dest++ = *ref++ + *src++;
*dest++ = *ref++ + *src++;
*dest++ = *ref++ + *src++;
}
}
}
void HeightfieldColorData::write(Bitstream& out) {
QMutexLocker locker(&_encodedMutex);
if (_encoded.isEmpty()) {
QImage image;
int size = glm::sqrt(_contents.size() / (float)COLOR_BYTES);
image = QImage((uchar*)_contents.data(), size, size, QImage::Format_RGB888);
_encoded = encodeHeightfieldImage(image);
}
out << _encoded.size();
out.writeAligned(_encoded);
}
void HeightfieldColorData::writeDelta(Bitstream& out, const HeightfieldColorDataPointer& reference) {
if (!reference || reference->getContents().size() != _contents.size()) {
write(out);
return;
}
QMutexLocker locker(&reference->getEncodedDeltaMutex());
if (reference->getEncodedDelta().isEmpty() || reference->getDeltaData() != this) {
QImage image;
int size = glm::sqrt(_contents.size() / (float)COLOR_BYTES);
int minX = size, minY = size;
int maxX = -1, maxY = -1;
const char* src = _contents.constData();
const char* ref = reference->getContents().constData();
for (int y = 0; y < size; y++) {
bool difference = false;
for (int x = 0; x < size; x++, src += COLOR_BYTES, ref += COLOR_BYTES) {
if (src[0] != ref[0] || src[1] != ref[1] || src[2] != ref[2]) {
minX = qMin(minX, x);
maxX = qMax(maxX, x);
difference = true;
}
}
if (difference) {
minY = qMin(minY, y);
maxY = qMax(maxY, y);
}
}
if (maxX >= minX) {
int width = maxX - minX + 1;
int height = maxY - minY + 1;
image = QImage(width, height, QImage::Format_RGB888);
src = _contents.constData() + (minY * size + minX) * COLOR_BYTES;
int srcStride = size * COLOR_BYTES;
int destStride = width * COLOR_BYTES;
for (int y = 0; y < height; y++) {
memcpy(image.scanLine(y), src, destStride);
src += srcStride;
}
}
image.setOffset(QPoint(minX + 1, minY + 1));
reference->setEncodedDelta(encodeHeightfieldImage(image));
reference->setDeltaData(HeightfieldDataPointer(this));
}
out << reference->getEncodedDelta().size();
out.writeAligned(reference->getEncodedDelta());
}
void HeightfieldColorData::writeSubdivided(Bitstream& out, const HeightfieldColorDataPointer& ancestor,
const glm::vec3& minimum, float size) {
QMutexLocker locker(&_encodedSubdivisionsMutex);
int index = (int)glm::round(glm::log(size) / glm::log(0.5f)) - 1;
if (_encodedSubdivisions.size() <= index) {
_encodedSubdivisions.resize(index + 1);
}
EncodedSubdivision& subdivision = _encodedSubdivisions[index];
if (subdivision.data.isEmpty() || subdivision.ancestor != ancestor) {
QImage image;
const QByteArray& ancestorContents = ancestor->getContents();
const uchar* src = (const uchar*)_contents.constData();
int destSize = glm::sqrt(_contents.size() / (float)COLOR_BYTES);
image = QImage(destSize, destSize, QImage::Format_RGB888);
uchar* dest = image.bits();
int stride = destSize * COLOR_BYTES;
int ancestorSize = glm::sqrt(ancestorContents.size() / (float)COLOR_BYTES);
float ancestorY = minimum.z * ancestorSize;
float ancestorIncrement = size * ancestorSize / destSize;
int ancestorStride = ancestorSize * COLOR_BYTES;
for (int y = 0; y < destSize; y++, ancestorY += ancestorIncrement) {
const uchar* lineRef = (const uchar*)ancestorContents.constData() + (int)ancestorY * ancestorStride;
float ancestorX = minimum.x * ancestorSize;
for (const uchar* end = src + stride; src != end; ancestorX += ancestorIncrement) {
const uchar* ref = lineRef + (int)ancestorX * COLOR_BYTES;
*dest++ = *src++ - *ref++;
*dest++ = *src++ - *ref++;
*dest++ = *src++ - *ref++;
}
}
subdivision.data = encodeHeightfieldImage(image, true);
subdivision.ancestor = ancestor;
}
out << subdivision.data.size();
out.writeAligned(subdivision.data);
}
void HeightfieldColorData::read(Bitstream& in, int bytes) {
set(decodeHeightfieldImage(_encoded = in.readAligned(bytes)).convertToFormat(QImage::Format_RGB888));
}
void HeightfieldColorData::set(const QImage& image) {
_contents.resize(image.width() * image.height() * COLOR_BYTES);
memcpy(_contents.data(), image.constBits(), _contents.size());
}
const int TEXTURE_HEADER_SIZE = sizeof(qint32) * 4;
static QByteArray encodeTexture(int offsetX, int offsetY, int width, int height, const QByteArray& contents) {
QByteArray inflated(TEXTURE_HEADER_SIZE, 0);
qint32* header = (qint32*)inflated.data();
*header++ = offsetX;
*header++ = offsetY;
*header++ = width;
*header++ = height;
inflated.append(contents);
return qCompress(inflated);
}
static QByteArray decodeTexture(const QByteArray& encoded, int& offsetX, int& offsetY, int& width, int& height) {
QByteArray inflated = qUncompress(encoded);
const qint32* header = (const qint32*)inflated.constData();
offsetX = *header++;
offsetY = *header++;
width = *header++;
height = *header++;
return inflated.mid(TEXTURE_HEADER_SIZE);
}
HeightfieldTextureData::HeightfieldTextureData(const QByteArray& contents, const QVector<SharedObjectPointer>& textures) :
HeightfieldData(contents),
_textures(textures) {
}
HeightfieldTextureData::HeightfieldTextureData(Bitstream& in, int bytes) {
read(in, bytes);
}
HeightfieldTextureData::HeightfieldTextureData(Bitstream& in, int bytes, const HeightfieldTextureDataPointer& reference) {
if (!reference) {
read(in, bytes);
return;
}
QMutexLocker locker(&reference->getEncodedDeltaMutex());
reference->setEncodedDelta(in.readAligned(bytes));
in.readDelta(_textures, reference->getTextures());
reference->setDeltaData(HeightfieldDataPointer(this));
_contents = reference->getContents();
int offsetX, offsetY, width, height;
QByteArray delta = decodeTexture(reference->getEncodedDelta(), offsetX, offsetY, width, height);
if (delta.isEmpty()) {
return;
}
if (offsetX == 0) {
_contents = delta;
return;
}
int minX = offsetX - 1;
int minY = offsetY - 1;
int size = glm::sqrt((float)_contents.size());
const char* src = delta.constData();
char* dest = _contents.data() + minY * size + minX;
for (int y = 0; y < height; y++, src += width, dest += size) {
memcpy(dest, src, width);
}
}
void HeightfieldTextureData::write(Bitstream& out) {
QMutexLocker locker(&_encodedMutex);
if (_encoded.isEmpty()) {
int size = glm::sqrt((float)_contents.size());
_encoded = encodeTexture(0, 0, size, size, _contents);
}
out << _encoded.size();
out.writeAligned(_encoded);
out << _textures;
}
void HeightfieldTextureData::writeDelta(Bitstream& out, const HeightfieldTextureDataPointer& reference) {
if (!reference || reference->getContents().size() != _contents.size()) {
write(out);
return;
}
QMutexLocker locker(&reference->getEncodedDeltaMutex());
if (reference->getEncodedDelta().isEmpty() || reference->getDeltaData() != this) {
int size = glm::sqrt((float)_contents.size());
int minX = size, minY = size;
int maxX = -1, maxY = -1;
const char* src = _contents.constData();
const char* ref = reference->getContents().constData();
for (int y = 0; y < size; y++) {
bool difference = false;
for (int x = 0; x < size; x++) {
if (*src++ != *ref++) {
minX = qMin(minX, x);
maxX = qMax(maxX, x);
difference = true;
}
}
if (difference) {
minY = qMin(minY, y);
maxY = qMax(maxY, y);
}
}
QByteArray delta;
int width = 0, height = 0;
if (maxX >= minX) {
width = maxX - minX + 1;
height = maxY - minY + 1;
delta = QByteArray(width * height, 0);
char* dest = delta.data();
src = _contents.constData() + minY * size + minX;
for (int y = 0; y < height; y++, src += size, dest += width) {
memcpy(dest, src, width);
}
}
reference->setEncodedDelta(encodeTexture(minX + 1, minY + 1, width, height, delta));
reference->setDeltaData(HeightfieldDataPointer(this));
}
out << reference->getEncodedDelta().size();
out.writeAligned(reference->getEncodedDelta());
out.writeDelta(_textures, reference->getTextures());
}
void HeightfieldTextureData::read(Bitstream& in, int bytes) {
int offsetX, offsetY, width, height;
_contents = decodeTexture(_encoded = in.readAligned(bytes), offsetX, offsetY, width, height);
in >> _textures;
}
HeightfieldTexture::HeightfieldTexture() :
_scaleS(1.0f),
_scaleT(1.0f) {
}
HeightfieldAttribute::HeightfieldAttribute(const QString& name) :
InlineAttribute<HeightfieldDataPointer>(name) {
InlineAttribute<HeightfieldHeightDataPointer>(name) {
}
void HeightfieldAttribute::read(Bitstream& in, void*& value, bool isLeaf) const {
@ -719,9 +1071,9 @@ void HeightfieldAttribute::read(Bitstream& in, void*& value, bool isLeaf) const
int size;
in >> size;
if (size == 0) {
*(HeightfieldDataPointer*)&value = HeightfieldDataPointer();
*(HeightfieldHeightDataPointer*)&value = HeightfieldHeightDataPointer();
} else {
*(HeightfieldDataPointer*)&value = HeightfieldDataPointer(new HeightfieldData(in, size, false));
*(HeightfieldHeightDataPointer*)&value = HeightfieldHeightDataPointer(new HeightfieldHeightData(in, size));
}
}
@ -729,9 +1081,9 @@ void HeightfieldAttribute::write(Bitstream& out, void* value, bool isLeaf) const
if (!isLeaf) {
return;
}
HeightfieldDataPointer data = decodeInline<HeightfieldDataPointer>(value);
HeightfieldHeightDataPointer data = decodeInline<HeightfieldHeightDataPointer>(value);
if (data) {
data->write(out, false);
data->write(out);
} else {
out << 0;
}
@ -744,10 +1096,10 @@ void HeightfieldAttribute::readDelta(Bitstream& in, void*& value, void* referenc
int size;
in >> size;
if (size == 0) {
*(HeightfieldDataPointer*)&value = HeightfieldDataPointer();
*(HeightfieldHeightDataPointer*)&value = HeightfieldHeightDataPointer();
} else {
*(HeightfieldDataPointer*)&value = HeightfieldDataPointer(new HeightfieldData(
in, size, decodeInline<HeightfieldDataPointer>(reference), false));
*(HeightfieldHeightDataPointer*)&value = HeightfieldHeightDataPointer(new HeightfieldHeightData(
in, size, decodeInline<HeightfieldHeightDataPointer>(reference)));
}
}
@ -755,9 +1107,9 @@ void HeightfieldAttribute::writeDelta(Bitstream& out, void* value, void* referen
if (!isLeaf) {
return;
}
HeightfieldDataPointer data = decodeInline<HeightfieldDataPointer>(value);
HeightfieldHeightDataPointer data = decodeInline<HeightfieldHeightDataPointer>(value);
if (data) {
data->writeDelta(out, decodeInline<HeightfieldDataPointer>(reference), false);
data->writeDelta(out, decodeInline<HeightfieldHeightDataPointer>(reference));
} else {
out << 0;
}
@ -766,20 +1118,20 @@ void HeightfieldAttribute::writeDelta(Bitstream& out, void* value, void* referen
bool HeightfieldAttribute::merge(void*& parent, void* children[], bool postRead) const {
int maxSize = 0;
for (int i = 0; i < MERGE_COUNT; i++) {
HeightfieldDataPointer pointer = decodeInline<HeightfieldDataPointer>(children[i]);
HeightfieldHeightDataPointer pointer = decodeInline<HeightfieldHeightDataPointer>(children[i]);
if (pointer) {
maxSize = qMax(maxSize, pointer->getContents().size());
}
}
if (maxSize == 0) {
*(HeightfieldDataPointer*)&parent = HeightfieldDataPointer();
*(HeightfieldHeightDataPointer*)&parent = HeightfieldHeightDataPointer();
return true;
}
int size = glm::sqrt((float)maxSize);
QByteArray contents(size * size, 0);
int halfSize = size / 2;
for (int i = 0; i < MERGE_COUNT; i++) {
HeightfieldDataPointer child = decodeInline<HeightfieldDataPointer>(children[i]);
HeightfieldHeightDataPointer child = decodeInline<HeightfieldHeightDataPointer>(children[i]);
if (!child) {
continue;
}
@ -789,7 +1141,7 @@ bool HeightfieldAttribute::merge(void*& parent, void* children[], bool postRead)
int xIndex = i & INDEX_MASK;
const int Y_SHIFT = 1;
int yIndex = (i >> Y_SHIFT) & INDEX_MASK;
if (yIndex == 0 && decodeInline<HeightfieldDataPointer>(children[i | (1 << Y_SHIFT)])) {
if (yIndex == 0 && decodeInline<HeightfieldHeightDataPointer>(children[i | (1 << Y_SHIFT)])) {
continue; // bottom is overriden by top
}
const int HALF_RANGE = 128;
@ -828,12 +1180,12 @@ bool HeightfieldAttribute::merge(void*& parent, void* children[], bool postRead)
}
}
}
*(HeightfieldDataPointer*)&parent = HeightfieldDataPointer(new HeightfieldData(contents));
*(HeightfieldHeightDataPointer*)&parent = HeightfieldHeightDataPointer(new HeightfieldHeightData(contents));
return false;
}
HeightfieldColorAttribute::HeightfieldColorAttribute(const QString& name) :
InlineAttribute<HeightfieldDataPointer>(name) {
InlineAttribute<HeightfieldColorDataPointer>(name) {
}
void HeightfieldColorAttribute::read(Bitstream& in, void*& value, bool isLeaf) const {
@ -843,9 +1195,9 @@ void HeightfieldColorAttribute::read(Bitstream& in, void*& value, bool isLeaf) c
int size;
in >> size;
if (size == 0) {
*(HeightfieldDataPointer*)&value = HeightfieldDataPointer();
*(HeightfieldColorDataPointer*)&value = HeightfieldColorDataPointer();
} else {
*(HeightfieldDataPointer*)&value = HeightfieldDataPointer(new HeightfieldData(in, size, true));
*(HeightfieldColorDataPointer*)&value = HeightfieldColorDataPointer(new HeightfieldColorData(in, size));
}
}
@ -853,9 +1205,9 @@ void HeightfieldColorAttribute::write(Bitstream& out, void* value, bool isLeaf)
if (!isLeaf) {
return;
}
HeightfieldDataPointer data = decodeInline<HeightfieldDataPointer>(value);
HeightfieldColorDataPointer data = decodeInline<HeightfieldColorDataPointer>(value);
if (data) {
data->write(out, true);
data->write(out);
} else {
out << 0;
}
@ -868,10 +1220,10 @@ void HeightfieldColorAttribute::readDelta(Bitstream& in, void*& value, void* ref
int size;
in >> size;
if (size == 0) {
*(HeightfieldDataPointer*)&value = HeightfieldDataPointer();
*(HeightfieldColorDataPointer*)&value = HeightfieldColorDataPointer();
} else {
*(HeightfieldDataPointer*)&value = HeightfieldDataPointer(new HeightfieldData(
in, size, decodeInline<HeightfieldDataPointer>(reference), true));
*(HeightfieldColorDataPointer*)&value = HeightfieldColorDataPointer(new HeightfieldColorData(
in, size, decodeInline<HeightfieldColorDataPointer>(reference)));
}
}
@ -879,9 +1231,9 @@ void HeightfieldColorAttribute::writeDelta(Bitstream& out, void* value, void* re
if (!isLeaf) {
return;
}
HeightfieldDataPointer data = decodeInline<HeightfieldDataPointer>(value);
HeightfieldColorDataPointer data = decodeInline<HeightfieldColorDataPointer>(value);
if (data) {
data->writeDelta(out, decodeInline<HeightfieldDataPointer>(reference), true);
data->writeDelta(out, decodeInline<HeightfieldColorDataPointer>(reference));
} else {
out << 0;
}
@ -890,20 +1242,20 @@ void HeightfieldColorAttribute::writeDelta(Bitstream& out, void* value, void* re
bool HeightfieldColorAttribute::merge(void*& parent, void* children[], bool postRead) const {
int maxSize = 0;
for (int i = 0; i < MERGE_COUNT; i++) {
HeightfieldDataPointer pointer = decodeInline<HeightfieldDataPointer>(children[i]);
HeightfieldColorDataPointer pointer = decodeInline<HeightfieldColorDataPointer>(children[i]);
if (pointer) {
maxSize = qMax(maxSize, pointer->getContents().size());
}
}
if (maxSize == 0) {
*(HeightfieldDataPointer*)&parent = HeightfieldDataPointer();
*(HeightfieldColorDataPointer*)&parent = HeightfieldColorDataPointer();
return true;
}
int size = glm::sqrt(maxSize / (float)HeightfieldData::COLOR_BYTES);
QByteArray contents(size * size * HeightfieldData::COLOR_BYTES, 0);
int halfSize = size / 2;
for (int i = 0; i < MERGE_COUNT; i++) {
HeightfieldDataPointer child = decodeInline<HeightfieldDataPointer>(children[i]);
HeightfieldColorDataPointer child = decodeInline<HeightfieldColorDataPointer>(children[i]);
if (!child) {
continue;
}
@ -913,7 +1265,7 @@ bool HeightfieldColorAttribute::merge(void*& parent, void* children[], bool post
int xIndex = i & INDEX_MASK;
const int Y_SHIFT = 1;
int yIndex = (i >> Y_SHIFT) & INDEX_MASK;
if (yIndex == 0 && decodeInline<HeightfieldDataPointer>(children[i | (1 << Y_SHIFT)])) {
if (yIndex == 0 && decodeInline<HeightfieldColorDataPointer>(children[i | (1 << Y_SHIFT)])) {
continue; // bottom is overriden by top
}
int Z_SHIFT = 2;
@ -967,10 +1319,77 @@ bool HeightfieldColorAttribute::merge(void*& parent, void* children[], bool post
}
}
}
*(HeightfieldDataPointer*)&parent = HeightfieldDataPointer(new HeightfieldData(contents));
*(HeightfieldColorDataPointer*)&parent = HeightfieldColorDataPointer(new HeightfieldColorData(contents));
return false;
}
HeightfieldTextureAttribute::HeightfieldTextureAttribute(const QString& name) :
InlineAttribute<HeightfieldTextureDataPointer>(name) {
}
void HeightfieldTextureAttribute::read(Bitstream& in, void*& value, bool isLeaf) const {
if (!isLeaf) {
return;
}
int size;
in >> size;
if (size == 0) {
*(HeightfieldTextureDataPointer*)&value = HeightfieldTextureDataPointer();
} else {
*(HeightfieldTextureDataPointer*)&value = HeightfieldTextureDataPointer(new HeightfieldTextureData(in, size));
}
}
void HeightfieldTextureAttribute::write(Bitstream& out, void* value, bool isLeaf) const {
if (!isLeaf) {
return;
}
HeightfieldTextureDataPointer data = decodeInline<HeightfieldTextureDataPointer>(value);
if (data) {
data->write(out);
} else {
out << 0;
}
}
void HeightfieldTextureAttribute::readDelta(Bitstream& in, void*& value, void* reference, bool isLeaf) const {
if (!isLeaf) {
return;
}
int size;
in >> size;
if (size == 0) {
*(HeightfieldTextureDataPointer*)&value = HeightfieldTextureDataPointer();
} else {
*(HeightfieldTextureDataPointer*)&value = HeightfieldTextureDataPointer(new HeightfieldTextureData(
in, size, decodeInline<HeightfieldTextureDataPointer>(reference)));
}
}
void HeightfieldTextureAttribute::writeDelta(Bitstream& out, void* value, void* reference, bool isLeaf) const {
if (!isLeaf) {
return;
}
HeightfieldTextureDataPointer data = decodeInline<HeightfieldTextureDataPointer>(value);
if (data) {
data->writeDelta(out, decodeInline<HeightfieldTextureDataPointer>(reference));
} else {
out << 0;
}
}
bool HeightfieldTextureAttribute::merge(void*& parent, void* children[], bool postRead) const {
int maxSize = 0;
for (int i = 0; i < MERGE_COUNT; i++) {
HeightfieldTextureDataPointer pointer = decodeInline<HeightfieldTextureDataPointer>(children[i]);
if (pointer) {
maxSize = qMax(maxSize, pointer->getContents().size());
}
}
*(HeightfieldTextureDataPointer*)&parent = HeightfieldTextureDataPointer();
return maxSize == 0;
}
SharedObjectAttribute::SharedObjectAttribute(const QString& name, const QMetaObject* metaObject,
const SharedObjectPointer& defaultValue) :
InlineAttribute<SharedObjectPointer>(name, defaultValue),
@ -1082,9 +1501,7 @@ MetavoxelNode* SharedObjectSetAttribute::expandMetavoxelRoot(const MetavoxelNode
MetavoxelNode* newGrandchild = new MetavoxelNode(attribute);
newChild->setChild((index + j) % MetavoxelNode::CHILD_COUNT, newGrandchild);
}
newChild->mergeChildren(attribute);
}
newParent->mergeChildren(attribute);
return newParent;
}

View file

@ -18,6 +18,7 @@
#include <QReadWriteLock>
#include <QSharedPointer>
#include <QString>
#include <QUrl>
#include <QWidget>
#include "Bitstream.h"
@ -28,7 +29,10 @@ class QScriptEngine;
class QScriptValue;
class Attribute;
class HeightfieldColorData;
class HeightfieldData;
class HeightfieldHeightData;
class HeightfieldTextureData;
class MetavoxelData;
class MetavoxelLOD;
class MetavoxelNode;
@ -96,12 +100,15 @@ public:
/// Returns a reference to the standard "spannerMask" attribute.
const AttributePointer& getSpannerMaskAttribute() const { return _spannerMaskAttribute; }
/// Returns a reference to the standard HeightfieldPointer "heightfield" attribute.
/// Returns a reference to the standard HeightfieldDataPointer "heightfield" attribute.
const AttributePointer& getHeightfieldAttribute() const { return _heightfieldAttribute; }
/// Returns a reference to the standard HeightfieldColorPointer "heightfieldColor" attribute.
/// Returns a reference to the standard HeightfieldDataPointer "heightfieldColor" attribute.
const AttributePointer& getHeightfieldColorAttribute() const { return _heightfieldColorAttribute; }
/// Returns a reference to the standard HeightfieldDataPointer "heightfieldTexture" attribute.
const AttributePointer& getHeightfieldTextureAttribute() const { return _heightfieldTextureAttribute; }
private:
static QScriptValue getAttribute(QScriptContext* context, QScriptEngine* engine);
@ -119,6 +126,7 @@ private:
AttributePointer _spannerMaskAttribute;
AttributePointer _heightfieldAttribute;
AttributePointer _heightfieldColorAttribute;
AttributePointer _heightfieldTextureAttribute;
};
/// Converts a value to a void pointer.
@ -221,6 +229,11 @@ public:
virtual void readDelta(Bitstream& in, void*& value, void* reference, bool isLeaf) const { read(in, value, isLeaf); }
virtual void writeDelta(Bitstream& out, void* value, void* reference, bool isLeaf) const { write(out, value, isLeaf); }
virtual void readSubdivided(MetavoxelStreamState& state, void*& value,
const MetavoxelStreamState& ancestorState, void* ancestorValue, bool isLeaf) const;
virtual void writeSubdivided(MetavoxelStreamState& state, void* value,
const MetavoxelStreamState& ancestorState, void* ancestorValue, bool isLeaf) const;
virtual MetavoxelNode* createMetavoxelNode(const AttributeValue& value, const MetavoxelNode* original) const;
virtual void readMetavoxelRoot(MetavoxelData& data, MetavoxelStreamState& state);
@ -430,19 +443,20 @@ public:
static const int COLOR_BYTES = 3;
HeightfieldData(const QByteArray& contents);
HeightfieldData(Bitstream& in, int bytes, bool color);
HeightfieldData(Bitstream& in, int bytes, const HeightfieldDataPointer& reference, bool color);
HeightfieldData(const QByteArray& contents = QByteArray());
virtual ~HeightfieldData();
const QByteArray& getContents() const { return _contents; }
void write(Bitstream& out, bool color);
void writeDelta(Bitstream& out, const HeightfieldDataPointer& reference, bool color);
private:
void setDeltaData(const HeightfieldDataPointer& deltaData) { _deltaData = deltaData; }
const HeightfieldDataPointer& getDeltaData() const { return _deltaData; }
void read(Bitstream& in, int bytes, bool color);
void set(const QImage& image, bool color);
void setEncodedDelta(const QByteArray& encodedDelta) { _encodedDelta = encodedDelta; }
const QByteArray& getEncodedDelta() const { return _encodedDelta; }
QMutex& getEncodedDeltaMutex() { return _encodedDeltaMutex; }
protected:
QByteArray _contents;
QByteArray _encoded;
@ -451,10 +465,110 @@ private:
HeightfieldDataPointer _deltaData;
QByteArray _encodedDelta;
QMutex _encodedDeltaMutex;
class EncodedSubdivision {
public:
HeightfieldDataPointer ancestor;
QByteArray data;
};
QVector<EncodedSubdivision> _encodedSubdivisions;
QMutex _encodedSubdivisionsMutex;
};
typedef QExplicitlySharedDataPointer<HeightfieldHeightData> HeightfieldHeightDataPointer;
/// Contains a block of heightfield height data.
class HeightfieldHeightData : public HeightfieldData {
public:
HeightfieldHeightData(const QByteArray& contents);
HeightfieldHeightData(Bitstream& in, int bytes);
HeightfieldHeightData(Bitstream& in, int bytes, const HeightfieldHeightDataPointer& reference);
HeightfieldHeightData(Bitstream& in, int bytes, const HeightfieldHeightDataPointer& ancestor,
const glm::vec3& minimum, float size);
void write(Bitstream& out);
void writeDelta(Bitstream& out, const HeightfieldHeightDataPointer& reference);
void writeSubdivided(Bitstream& out, const HeightfieldHeightDataPointer& ancestor,
const glm::vec3& minimum, float size);
private:
void read(Bitstream& in, int bytes);
void set(const QImage& image);
};
typedef QExplicitlySharedDataPointer<HeightfieldColorData> HeightfieldColorDataPointer;
/// Contains a block of heightfield color data.
class HeightfieldColorData : public HeightfieldData {
public:
HeightfieldColorData(const QByteArray& contents);
HeightfieldColorData(Bitstream& in, int bytes);
HeightfieldColorData(Bitstream& in, int bytes, const HeightfieldColorDataPointer& reference);
HeightfieldColorData(Bitstream& in, int bytes, const HeightfieldColorDataPointer& ancestor,
const glm::vec3& minimum, float size);
void write(Bitstream& out);
void writeDelta(Bitstream& out, const HeightfieldColorDataPointer& reference);
void writeSubdivided(Bitstream& out, const HeightfieldColorDataPointer& ancestor,
const glm::vec3& minimum, float size);
private:
void read(Bitstream& in, int bytes);
void set(const QImage& image);
};
typedef QExplicitlySharedDataPointer<HeightfieldTextureData> HeightfieldTextureDataPointer;
/// Contains a block of heightfield texture data.
class HeightfieldTextureData : public HeightfieldData {
public:
HeightfieldTextureData(const QByteArray& contents,
const QVector<SharedObjectPointer>& textures = QVector<SharedObjectPointer>());
HeightfieldTextureData(Bitstream& in, int bytes);
HeightfieldTextureData(Bitstream& in, int bytes, const HeightfieldTextureDataPointer& reference);
const QVector<SharedObjectPointer>& getTextures() const { return _textures; }
void write(Bitstream& out);
void writeDelta(Bitstream& out, const HeightfieldTextureDataPointer& reference);
private:
void read(Bitstream& in, int bytes);
QVector<SharedObjectPointer> _textures;
};
/// Contains the description of a heightfield texture.
class HeightfieldTexture : public SharedObject {
Q_OBJECT
Q_PROPERTY(QUrl url MEMBER _url)
Q_PROPERTY(float scaleS MEMBER _scaleS)
Q_PROPERTY(float scaleT MEMBER _scaleT)
public:
Q_INVOKABLE HeightfieldTexture();
const QUrl& getURL() const { return _url; }
float getScaleS() const { return _scaleS; }
float getScaleT() const { return _scaleT; }
private:
QUrl _url;
float _scaleS;
float _scaleT;
};
/// An attribute that stores heightfield data.
class HeightfieldAttribute : public InlineAttribute<HeightfieldDataPointer> {
class HeightfieldAttribute : public InlineAttribute<HeightfieldHeightDataPointer> {
Q_OBJECT
public:
@ -471,7 +585,7 @@ public:
};
/// An attribute that stores heightfield colors.
class HeightfieldColorAttribute : public InlineAttribute<HeightfieldDataPointer> {
class HeightfieldColorAttribute : public InlineAttribute<HeightfieldColorDataPointer> {
Q_OBJECT
public:
@ -487,6 +601,23 @@ public:
virtual bool merge(void*& parent, void* children[], bool postRead = false) const;
};
/// An attribute that stores heightfield textures.
class HeightfieldTextureAttribute : public InlineAttribute<HeightfieldTextureDataPointer> {
Q_OBJECT
public:
Q_INVOKABLE HeightfieldTextureAttribute(const QString& name = QString());
virtual void read(Bitstream& in, void*& value, bool isLeaf) const;
virtual void write(Bitstream& out, void* value, bool isLeaf) const;
virtual void readDelta(Bitstream& in, void*& value, void* reference, bool isLeaf) const;
virtual void writeDelta(Bitstream& out, void* value, void* reference, bool isLeaf) const;
virtual bool merge(void*& parent, void* children[], bool postRead = false) const;
};
/// An attribute that takes the form of QObjects of a given meta-type (a subclass of SharedObject).
class SharedObjectAttribute : public InlineAttribute<SharedObjectPointer> {
Q_OBJECT

View file

@ -998,7 +998,7 @@ MetavoxelNode* MetavoxelNode::readSubdivision(MetavoxelStreamState& state) {
for (int i = 0; i < CHILD_COUNT; i++) {
nextState.setMinimum(state.minimum, i);
newNode->_children[i] = new MetavoxelNode(state.base.attribute);
newNode->_children[i]->read(nextState);
newNode->_children[i]->readSubdivided(nextState, state, _attributeValue);
}
return newNode;
}
@ -1037,7 +1037,7 @@ void MetavoxelNode::writeSubdivision(MetavoxelStreamState& state) const {
MetavoxelStreamState nextState = { state.base, glm::vec3(), state.size * 0.5f };
for (int i = 0; i < CHILD_COUNT; i++) {
nextState.setMinimum(state.minimum, i);
_children[i]->write(nextState);
_children[i]->writeSubdivided(nextState, state, _attributeValue);
}
}
} else if (!leaf) {
@ -1051,6 +1051,46 @@ void MetavoxelNode::writeSubdivision(MetavoxelStreamState& state) const {
}
}
void MetavoxelNode::readSubdivided(MetavoxelStreamState& state, const MetavoxelStreamState& ancestorState,
void* ancestorValue) {
clearChildren(state.base.attribute);
if (!state.shouldSubdivide()) {
state.base.attribute->readSubdivided(state, _attributeValue, ancestorState, ancestorValue, true);
return;
}
bool leaf;
state.base.stream >> leaf;
state.base.attribute->readSubdivided(state, _attributeValue, ancestorState, ancestorValue, leaf);
if (!leaf) {
MetavoxelStreamState nextState = { state.base, glm::vec3(), state.size * 0.5f };
for (int i = 0; i < CHILD_COUNT; i++) {
nextState.setMinimum(state.minimum, i);
_children[i] = new MetavoxelNode(state.base.attribute);
_children[i]->readSubdivided(nextState, ancestorState, ancestorValue);
}
mergeChildren(state.base.attribute, true);
}
}
void MetavoxelNode::writeSubdivided(MetavoxelStreamState& state, const MetavoxelStreamState& ancestorState,
void* ancestorValue) const {
if (!state.shouldSubdivide()) {
state.base.attribute->writeSubdivided(state, _attributeValue, ancestorState, ancestorValue, true);
return;
}
bool leaf = isLeaf();
state.base.stream << leaf;
state.base.attribute->writeSubdivided(state, _attributeValue, ancestorState, ancestorValue, leaf);
if (!leaf) {
MetavoxelStreamState nextState = { state.base, glm::vec3(), state.size * 0.5f };
for (int i = 0; i < CHILD_COUNT; i++) {
nextState.setMinimum(state.minimum, i);
_children[i]->writeSubdivided(nextState, ancestorState, ancestorValue);
}
}
}
void MetavoxelNode::writeSpanners(MetavoxelStreamState& state) const {
foreach (const SharedObjectPointer& object, decodeInline<SharedObjectSet>(_attributeValue)) {
if (static_cast<Spanner*>(object.data())->testAndSetVisited(state.base.visit)) {

View file

@ -225,6 +225,9 @@ public:
MetavoxelNode* readSubdivision(MetavoxelStreamState& state);
void writeSubdivision(MetavoxelStreamState& state) const;
void readSubdivided(MetavoxelStreamState& state, const MetavoxelStreamState& ancestorState, void* ancestorValue);
void writeSubdivided(MetavoxelStreamState& state, const MetavoxelStreamState& ancestorState, void* ancestorValue) const;
void writeSpanners(MetavoxelStreamState& state) const;
void writeSpannerDelta(const MetavoxelNode& reference, MetavoxelStreamState& state) const;
void writeSpannerSubdivision(MetavoxelStreamState& state) const;

View file

@ -347,6 +347,8 @@ PaintHeightfieldHeightEditVisitor::PaintHeightfieldHeightEditVisitor(const Paint
_bounds = Box(_edit.position - extents, _edit.position + extents);
}
const int EIGHT_BIT_MAXIMUM = 255;
int PaintHeightfieldHeightEditVisitor::visit(MetavoxelInfo& info) {
if (!info.getBounds().intersects(_bounds)) {
return STOP_RECURSION;
@ -354,7 +356,7 @@ int PaintHeightfieldHeightEditVisitor::visit(MetavoxelInfo& info) {
if (!info.isLeaf) {
return DEFAULT_ORDER;
}
HeightfieldDataPointer pointer = info.inputValues.at(0).getInlineValue<HeightfieldDataPointer>();
HeightfieldHeightDataPointer pointer = info.inputValues.at(0).getInlineValue<HeightfieldHeightDataPointer>();
if (!pointer) {
return STOP_RECURSION;
}
@ -375,8 +377,7 @@ int PaintHeightfieldHeightEditVisitor::visit(MetavoxelInfo& info) {
float startX = qMax(start.x, 0.0f), endX = qMin(end.x, (float)highest);
uchar* lineDest = (uchar*)contents.data() + (int)z * size + (int)startX;
float squaredRadius = scaledRadius * scaledRadius;
float squaredRadiusReciprocal = 1.0f / squaredRadius;
const int EIGHT_BIT_MAXIMUM = 255;
float squaredRadiusReciprocal = 1.0f / squaredRadius;
float scaledHeight = _edit.height * EIGHT_BIT_MAXIMUM / info.size;
bool changed = false;
for (float endZ = qMin(end.z, (float)highest); z <= endZ; z += 1.0f) {
@ -396,8 +397,8 @@ int PaintHeightfieldHeightEditVisitor::visit(MetavoxelInfo& info) {
lineDest += size;
}
if (changed) {
HeightfieldDataPointer newPointer(new HeightfieldData(contents));
info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline<HeightfieldDataPointer>(newPointer));
HeightfieldHeightDataPointer newPointer(new HeightfieldHeightData(contents));
info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline<HeightfieldHeightDataPointer>(newPointer));
}
return STOP_RECURSION;
}
@ -435,25 +436,18 @@ PaintHeightfieldColorEditVisitor::PaintHeightfieldColorEditVisitor(const PaintHe
_bounds = Box(_edit.position - extents, _edit.position + extents);
}
int PaintHeightfieldColorEditVisitor::visit(MetavoxelInfo& info) {
if (!info.getBounds().intersects(_bounds)) {
return STOP_RECURSION;
}
if (!info.isLeaf) {
return DEFAULT_ORDER;
}
HeightfieldDataPointer pointer = info.inputValues.at(0).getInlineValue<HeightfieldDataPointer>();
static void paintColor(MetavoxelInfo& info, int index, const glm::vec3& position, float radius, const QColor& color) {
HeightfieldColorDataPointer pointer = info.inputValues.at(index).getInlineValue<HeightfieldColorDataPointer>();
if (!pointer) {
return STOP_RECURSION;
return;
}
QByteArray contents(pointer->getContents());
const int BYTES_PER_PIXEL = 3;
int size = glm::sqrt((float)contents.size() / BYTES_PER_PIXEL);
int size = glm::sqrt((float)contents.size() / HeightfieldData::COLOR_BYTES);
int highest = size - 1;
float heightScale = size / info.size;
glm::vec3 center = (_edit.position - info.minimum) * heightScale;
float scaledRadius = _edit.radius * heightScale;
glm::vec3 center = (position - info.minimum) * heightScale;
float scaledRadius = radius * heightScale;
glm::vec3 extents(scaledRadius, scaledRadius, scaledRadius);
glm::vec3 start = glm::floor(center - extents);
@ -462,14 +456,14 @@ int PaintHeightfieldColorEditVisitor::visit(MetavoxelInfo& info) {
// paint all points within the radius
float z = qMax(start.z, 0.0f);
float startX = qMax(start.x, 0.0f), endX = qMin(end.x, (float)highest);
int stride = size * BYTES_PER_PIXEL;
char* lineDest = contents.data() + (int)z * stride + (int)startX * BYTES_PER_PIXEL;
int stride = size * HeightfieldData::COLOR_BYTES;
char* lineDest = contents.data() + (int)z * stride + (int)startX * HeightfieldData::COLOR_BYTES;
float squaredRadius = scaledRadius * scaledRadius;
char red = _edit.color.red(), green = _edit.color.green(), blue = _edit.color.blue();
char red = color.red(), green = color.green(), blue = color.blue();
bool changed = false;
for (float endZ = qMin(end.z, (float)highest); z <= endZ; z += 1.0f) {
char* dest = lineDest;
for (float x = startX; x <= endX; x += 1.0f, dest += BYTES_PER_PIXEL) {
for (float x = startX; x <= endX; x += 1.0f, dest += HeightfieldData::COLOR_BYTES) {
float dx = x - center.x, dz = z - center.z;
if (dx * dx + dz * dz <= squaredRadius) {
dest[0] = red;
@ -481,9 +475,20 @@ int PaintHeightfieldColorEditVisitor::visit(MetavoxelInfo& info) {
lineDest += stride;
}
if (changed) {
HeightfieldDataPointer newPointer(new HeightfieldData(contents));
info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline<HeightfieldDataPointer>(newPointer));
HeightfieldColorDataPointer newPointer(new HeightfieldColorData(contents));
info.outputValues[index] = AttributeValue(info.inputValues.at(index).getAttribute(),
encodeInline<HeightfieldColorDataPointer>(newPointer));
}
}
int PaintHeightfieldColorEditVisitor::visit(MetavoxelInfo& info) {
if (!info.getBounds().intersects(_bounds)) {
return STOP_RECURSION;
}
if (!info.isLeaf) {
return DEFAULT_ORDER;
}
paintColor(info, 0, _edit.position, _edit.radius, _edit.color);
return STOP_RECURSION;
}
@ -492,3 +497,148 @@ void PaintHeightfieldColorEdit::apply(MetavoxelData& data, const WeakSharedObjec
data.guide(visitor);
}
PaintHeightfieldTextureEdit::PaintHeightfieldTextureEdit(const glm::vec3& position, float radius,
const SharedObjectPointer& texture, const QColor& averageColor) :
position(position),
radius(radius),
texture(texture),
averageColor(averageColor) {
}
class PaintHeightfieldTextureEditVisitor : public MetavoxelVisitor {
public:
PaintHeightfieldTextureEditVisitor(const PaintHeightfieldTextureEdit& edit);
virtual int visit(MetavoxelInfo& info);
private:
PaintHeightfieldTextureEdit _edit;
Box _bounds;
};
PaintHeightfieldTextureEditVisitor::PaintHeightfieldTextureEditVisitor(const PaintHeightfieldTextureEdit& edit) :
MetavoxelVisitor(QVector<AttributePointer>() << AttributeRegistry::getInstance()->getHeightfieldTextureAttribute() <<
AttributeRegistry::getInstance()->getHeightfieldColorAttribute(), QVector<AttributePointer>() <<
AttributeRegistry::getInstance()->getHeightfieldTextureAttribute() <<
AttributeRegistry::getInstance()->getHeightfieldColorAttribute()),
_edit(edit) {
glm::vec3 extents(_edit.radius, _edit.radius, _edit.radius);
_bounds = Box(_edit.position - extents, _edit.position + extents);
}
static QHash<uchar, int> countIndices(const QByteArray& contents) {
QHash<uchar, int> counts;
for (const uchar* src = (const uchar*)contents.constData(), *end = src + contents.size(); src != end; src++) {
if (*src != 0) {
counts[*src]++;
}
}
return counts;
}
int PaintHeightfieldTextureEditVisitor::visit(MetavoxelInfo& info) {
if (!info.getBounds().intersects(_bounds)) {
return STOP_RECURSION;
}
if (!info.isLeaf) {
return DEFAULT_ORDER;
}
HeightfieldTextureDataPointer pointer = info.inputValues.at(0).getInlineValue<HeightfieldTextureDataPointer>();
if (!pointer) {
return STOP_RECURSION;
}
QVector<SharedObjectPointer> textures = pointer->getTextures();
QByteArray contents(pointer->getContents());
uchar textureIndex = 0;
if (_edit.texture && static_cast<HeightfieldTexture*>(_edit.texture.data())->getURL().isValid()) {
// first look for a matching existing texture, noting the first reusable slot
int firstEmptyIndex = -1;
for (int i = 0; i < textures.size(); i++) {
const SharedObjectPointer& texture = textures.at(i);
if (texture) {
if (texture->equals(_edit.texture.data())) {
textureIndex = i + 1;
break;
}
} else if (firstEmptyIndex == -1) {
firstEmptyIndex = i;
}
}
// if nothing found, use the first empty slot or append
if (textureIndex == 0) {
if (firstEmptyIndex != -1) {
textures[firstEmptyIndex] = _edit.texture;
textureIndex = firstEmptyIndex + 1;
} else if (textures.size() < EIGHT_BIT_MAXIMUM) {
textures.append(_edit.texture);
textureIndex = textures.size();
} else {
// last resort: find the least-used texture and remove it
QHash<uchar, int> counts = countIndices(contents);
int lowestCount = INT_MAX;
for (QHash<uchar, int>::const_iterator it = counts.constBegin(); it != counts.constEnd(); it++) {
if (it.value() < lowestCount) {
textureIndex = it.key();
lowestCount = it.value();
}
}
contents.replace((char)textureIndex, (char)0);
}
}
}
int size = glm::sqrt((float)contents.size());
int highest = size - 1;
float heightScale = highest / info.size;
glm::vec3 center = (_edit.position - info.minimum) * heightScale;
float scaledRadius = _edit.radius * heightScale;
glm::vec3 extents(scaledRadius, scaledRadius, scaledRadius);
glm::vec3 start = glm::floor(center - extents);
glm::vec3 end = glm::ceil(center + extents);
// paint all points within the radius
float z = qMax(start.z, 0.0f);
float startX = qMax(start.x, 0.0f), endX = qMin(end.x, (float)highest);
uchar* lineDest = (uchar*)contents.data() + (int)z * size + (int)startX;
float squaredRadius = scaledRadius * scaledRadius;
bool changed = false;
QHash<uchar, int> counts;
for (float endZ = qMin(end.z, (float)highest); z <= endZ; z += 1.0f) {
uchar* dest = lineDest;
for (float x = startX; x <= endX; x += 1.0f, dest++) {
float dx = x - center.x, dz = z - center.z;
if (dx * dx + dz * dz <= squaredRadius) {
*dest = textureIndex;
changed = true;
}
}
lineDest += size;
}
if (changed) {
// clear any unused textures
QHash<uchar, int> counts = countIndices(contents);
for (int i = 0; i < textures.size(); i++) {
if (counts.value(i + 1) == 0) {
textures[i] = SharedObjectPointer();
}
}
while (!(textures.isEmpty() || textures.last())) {
textures.removeLast();
}
HeightfieldTextureDataPointer newPointer(new HeightfieldTextureData(contents, textures));
info.outputValues[0] = AttributeValue(_outputs.at(0), encodeInline<HeightfieldTextureDataPointer>(newPointer));
}
paintColor(info, 1, _edit.position, _edit.radius, _edit.averageColor);
return STOP_RECURSION;
}
void PaintHeightfieldTextureEdit::apply(MetavoxelData& data, const WeakSharedObjectHash& objects) const {
PaintHeightfieldTextureEditVisitor visitor(*this);
data.guide(visitor);
}

View file

@ -241,4 +241,23 @@ public:
DECLARE_STREAMABLE_METATYPE(PaintHeightfieldColorEdit)
/// An edit that sets a region of a heightfield texture.
class PaintHeightfieldTextureEdit : public MetavoxelEdit {
STREAMABLE
public:
STREAM glm::vec3 position;
STREAM float radius;
STREAM SharedObjectPointer texture;
STREAM QColor averageColor;
PaintHeightfieldTextureEdit(const glm::vec3& position = glm::vec3(), float radius = 0.0f,
const SharedObjectPointer& texture = SharedObjectPointer(), const QColor& averageColor = QColor());
virtual void apply(MetavoxelData& data, const WeakSharedObjectHash& objects) const;
};
DECLARE_STREAMABLE_METATYPE(PaintHeightfieldTextureEdit)
#endif // hifi_MetavoxelMessages_h

View file

@ -158,7 +158,7 @@ SharedObjectEditor::SharedObjectEditor(const QMetaObject* metaObject, bool nulla
_type->addItem("(none)");
}
foreach (const QMetaObject* metaObject, Bitstream::getMetaObjectSubClasses(metaObject)) {
// add add constructable subclasses
// add constructable subclasses
if (metaObject->constructorCount() > 0) {
_type->addItem(metaObject->className(), QVariant::fromValue(metaObject));
}
@ -193,7 +193,9 @@ void SharedObjectEditor::detachObject() {
for (int i = 0; i < form->rowCount(); i++) {
QWidget* widget = form->itemAt(i, QFormLayout::FieldRole)->widget();
QMetaProperty property = metaObject->property(widget->property("propertyIndex").toInt());
connect(_object.data(), signal(property.notifySignal().methodSignature()), SLOT(updateProperty()));
if (property.hasNotifySignal()) {
connect(_object.data(), signal(property.notifySignal().methodSignature()), SLOT(updateProperty()));
}
}
}
@ -226,6 +228,7 @@ void SharedObjectEditor::updateType() {
const QMetaObject* metaObject = _type->itemData(_type->currentIndex()).value<const QMetaObject*>();
if (!metaObject) {
_object.reset();
emit objectChanged(_object);
return;
}
QObject* newObject = metaObject->newInstance();
@ -259,7 +262,7 @@ void SharedObjectEditor::updateType() {
}
}
}
_object = static_cast<SharedObject*>(newObject);
emit objectChanged(_object = static_cast<SharedObject*>(newObject));
}
void SharedObjectEditor::propertyChanged() {
@ -275,6 +278,7 @@ void SharedObjectEditor::propertyChanged() {
QByteArray valuePropertyName = QItemEditorFactory::defaultFactory()->valuePropertyName(property.userType());
property.write(object, widget->property(valuePropertyName));
}
emit objectChanged(_object);
}
void SharedObjectEditor::updateProperty() {

View file

@ -211,7 +211,7 @@ Q_DECLARE_METATYPE(SharedObjectSet)
/// Allows editing shared object instances.
class SharedObjectEditor : public QWidget {
Q_OBJECT
Q_PROPERTY(SharedObjectPointer object READ getObject WRITE setObject USER true)
Q_PROPERTY(SharedObjectPointer object READ getObject WRITE setObject NOTIFY objectChanged USER true)
public:
@ -222,6 +222,10 @@ public:
/// "Detaches" the object pointer, copying it if anyone else is holding a reference.
void detachObject();
signals:
void objectChanged(const SharedObjectPointer& object);
public slots:
void setObject(const SharedObjectPointer& object);

View file

@ -49,8 +49,9 @@ PacketVersion versionForPacketType(PacketType type) {
switch (type) {
case PacketTypeMicrophoneAudioNoEcho:
case PacketTypeMicrophoneAudioWithEcho:
case PacketTypeSilentAudioFrame:
return 2;
case PacketTypeSilentAudioFrame:
return 3;
case PacketTypeMixedAudio:
return 1;
case PacketTypeAvatarData:
@ -81,7 +82,7 @@ PacketVersion versionForPacketType(PacketType type) {
case PacketTypeAudioStreamStats:
return 1;
case PacketTypeMetavoxelData:
return 1;
return 3;
default:
return 0;
}

View file

@ -486,14 +486,6 @@ void ScriptEngine::run() {
// pack a placeholder value for sequence number for now, will be packed when destination node is known
int numPreSequenceNumberBytes = audioPacket.size();
packetStream << (quint16) 0;
// assume scripted avatar audio is mono and set channel flag to zero
packetStream << (quint8) 0;
// use the orientation and position of this avatar for the source of this audio
packetStream.writeRawData(reinterpret_cast<const char*>(&_avatarData->getPosition()), sizeof(glm::vec3));
glm::quat headOrientation = _avatarData->getHeadOrientation();
packetStream.writeRawData(reinterpret_cast<const char*>(&headOrientation), sizeof(glm::quat));
if (silentFrame) {
if (!_isListeningToAudioStream) {
@ -503,12 +495,20 @@ void ScriptEngine::run() {
// write the number of silent samples so the audio-mixer can uphold timing
packetStream.writeRawData(reinterpret_cast<const char*>(&SCRIPT_AUDIO_BUFFER_SAMPLES), sizeof(int16_t));
} else if (nextSoundOutput) {
// write the raw audio data
packetStream.writeRawData(reinterpret_cast<const char*>(nextSoundOutput),
numAvailableSamples * sizeof(int16_t));
}
} else if (nextSoundOutput) {
// assume scripted avatar audio is mono and set channel flag to zero
packetStream << (quint8)0;
// use the orientation and position of this avatar for the source of this audio
packetStream.writeRawData(reinterpret_cast<const char*>(&_avatarData->getPosition()), sizeof(glm::vec3));
glm::quat headOrientation = _avatarData->getHeadOrientation();
packetStream.writeRawData(reinterpret_cast<const char*>(&headOrientation), sizeof(glm::quat));
// write the raw audio data
packetStream.writeRawData(reinterpret_cast<const char*>(nextSoundOutput), numAvailableSamples * sizeof(int16_t));
}
// write audio packet to AudioMixer nodes
NodeList* nodeList = NodeList::getInstance();
foreach(const SharedNodePointer& node, nodeList->getNodeHash()) {

View file

@ -18,45 +18,63 @@
#include "RingBufferHistory.h"
template <typename T>
class MovingMinMaxAvg {
class MinMaxAvg {
public:
MinMaxAvg()
: _min(std::numeric_limits<T>::max()),
_max(std::numeric_limits<T>::min()),
_average(0.0),
_samples(0)
{}
void reset() {
_min = std::numeric_limits<T>::max();
_max = std::numeric_limits<T>::min();
_average = 0.0;
_samples = 0;
}
void update(T sample) {
if (sample < _min) {
_min = sample;
}
if (sample > _max) {
_max = sample;
}
double totalSamples = _samples + 1;
_average = _average * ((double)_samples / totalSamples)
+ (double)sample / totalSamples;
_samples++;
}
void update(const MinMaxAvg<T>& other) {
if (other._min < _min) {
_min = other._min;
}
if (other._max > _max) {
_max = other._max;
}
double totalSamples = _samples + other._samples;
_average = _average * ((double)_samples / totalSamples)
+ other._average * ((double)other._samples / totalSamples);
_samples += other._samples;
}
T getMin() const { return _min; }
T getMax() const { return _max; }
double getAverage() const { return _average; }
int getSamples() const { return _samples; }
double getSum() const { return _samples * _average; }
private:
class Stats {
public:
Stats()
: _min(std::numeric_limits<T>::max()),
_max(std::numeric_limits<T>::min()),
_average(0.0) {}
void updateWithSample(T sample, int& numSamplesInAverage) {
if (sample < _min) {
_min = sample;
}
if (sample > _max) {
_max = sample;
}
_average = _average * ((double)numSamplesInAverage / (numSamplesInAverage + 1))
+ (double)sample / (numSamplesInAverage + 1);
numSamplesInAverage++;
}
void updateWithOtherStats(const Stats& other, int& numStatsInAverage) {
if (other._min < _min) {
_min = other._min;
}
if (other._max > _max) {
_max = other._max;
}
_average = _average * ((double)numStatsInAverage / (numStatsInAverage + 1))
+ other._average / (numStatsInAverage + 1);
numStatsInAverage++;
}
T _min;
T _max;
double _average;
};
T _min;
T _max;
double _average;
int _samples;
};
template <typename T>
class MovingMinMaxAvg {
public:
// This class collects 3 stats (min, max, avg) over a moving window of samples.
// The moving window contains _windowIntervals * _intervalLength samples.
@ -66,66 +84,98 @@ public:
// this class with MovingMinMaxAvg(100, 50). If you want a moving min of the past 100 samples updated on every
// new sample, instantiate this class with MovingMinMaxAvg(1, 100).
/// use intervalLength = 0 to use in manual mode, where the currentIntervalComplete() function must
/// be called to complete an interval
MovingMinMaxAvg(int intervalLength, int windowIntervals)
: _intervalLength(intervalLength),
_windowIntervals(windowIntervals),
_overallStats(),
_samplesCollected(0),
_windowStats(),
_existingSamplesInCurrentInterval(0),
_currentIntervalStats(),
_intervalStats(windowIntervals),
_newStatsAvailable(false)
{}
void reset() {
_overallStats = Stats();
_samplesCollected = 0;
_windowStats = Stats();
_existingSamplesInCurrentInterval = 0;
_currentIntervalStats = Stats();
_overallStats.reset();
_windowStats.reset();
_currentIntervalStats.reset();
_intervalStats.clear();
_newStatsAvailable = false;
}
void setWindowIntervals(int windowIntervals) {
_windowIntervals = windowIntervals;
_overallStats.reset();
_windowStats.reset();
_currentIntervalStats.reset();
_intervalStats.setCapacity(_windowIntervals);
_newStatsAvailable = false;
}
void update(T newSample) {
// update overall stats
_overallStats.updateWithSample(newSample, _samplesCollected);
_overallStats.update(newSample);
// update the current interval stats
_currentIntervalStats.updateWithSample(newSample, _existingSamplesInCurrentInterval);
_currentIntervalStats.update(newSample);
// if the current interval of samples is now full, record its stats into our past intervals' stats
if (_existingSamplesInCurrentInterval == _intervalLength) {
// record current interval's stats, then reset them
_intervalStats.insert(_currentIntervalStats);
_currentIntervalStats = Stats();
_existingSamplesInCurrentInterval = 0;
// update the window's stats by combining the intervals' stats
typename RingBufferHistory<Stats>::Iterator i = _intervalStats.begin();
typename RingBufferHistory<Stats>::Iterator end = _intervalStats.end();
_windowStats = Stats();
int intervalsIncludedInWindowStats = 0;
while (i != end) {
_windowStats.updateWithOtherStats(*i, intervalsIncludedInWindowStats);
i++;
}
_newStatsAvailable = true;
// NOTE: if _intervalLength is 0 (manual mode), currentIntervalComplete() will not be called here.
if (_currentIntervalStats.getSamples() == _intervalLength) {
currentIntervalComplete();
}
}
/// This function can be called to manually control when each interval ends. For example, if each interval
/// needs to last T seconds as opposed to N samples, this function should be called every T seconds.
void currentIntervalComplete() {
// record current interval's stats, then reset them
_intervalStats.insert(_currentIntervalStats);
_currentIntervalStats.reset();
// update the window's stats by combining the intervals' stats
typename RingBufferHistory< MinMaxAvg<T> >::Iterator i = _intervalStats.begin();
typename RingBufferHistory< MinMaxAvg<T> >::Iterator end = _intervalStats.end();
_windowStats.reset();
while (i != end) {
_windowStats.update(*i);
++i;
}
_newStatsAvailable = true;
}
bool getNewStatsAvailableFlag() const { return _newStatsAvailable; }
void clearNewStatsAvailableFlag() { _newStatsAvailable = false; }
T getMin() const { return _overallStats._min; }
T getMax() const { return _overallStats._max; }
double getAverage() const { return _overallStats._average; }
T getWindowMin() const { return _windowStats._min; }
T getWindowMax() const { return _windowStats._max; }
double getWindowAverage() const { return _windowStats._average; }
T getMin() const { return _overallStats.getMin(); }
T getMax() const { return _overallStats.getMax(); }
double getAverage() const { return _overallStats.getAverage(); }
int getSamples() const { return _overallStats.getSamples(); }
double getSum() const { return _overallStats.getSum(); }
T getWindowMin() const { return _windowStats.getMin(); }
T getWindowMax() const { return _windowStats.getMax(); }
double getWindowAverage() const { return _windowStats.getAverage(); }
int getWindowSamples() const { return _windowStats.getSamples(); }
double getWindowSum() const { return _windowStats.getSum(); }
T getCurrentIntervalMin() const { return _currentIntervalStats.getMin(); }
T getCurrentIntervalMax() const { return _currentIntervalStats.getMax(); }
double getCurrentIntervalAverage() const { return _currentIntervalStats.getAverage(); }
int getCurrentIntervalSamples() const { return _currentIntervalStats.getSamples(); }
double getCurrentIntervalSum() const { return _currentIntervalStats.getSum(); }
const MinMaxAvg<T>& getOverallStats() const{ return _overallStats; }
const MinMaxAvg<T>& getWindowStats() const{ return _windowStats; }
const MinMaxAvg<T>& getCurrentIntervalStats() const { return _currentIntervalStats; }
MinMaxAvg<T> getLastCompleteIntervalStats() const {
const MinMaxAvg<T>* stats = _intervalStats.getNewestEntry();
return stats == NULL ? MinMaxAvg<T>() : *stats;
}
bool isWindowFilled() const { return _intervalStats.isFilled(); }
@ -134,18 +184,16 @@ private:
int _windowIntervals;
// these are min/max/avg stats for all samples collected.
Stats _overallStats;
int _samplesCollected;
MinMaxAvg<T> _overallStats;
// these are the min/max/avg stats for the samples in the moving window
Stats _windowStats;
int _existingSamplesInCurrentInterval;
MinMaxAvg<T> _windowStats;
// these are the min/max/avg stats for the current interval
Stats _currentIntervalStats;
// these are the min/max/avg stats for the samples in the current interval
MinMaxAvg<T> _currentIntervalStats;
// these are stored stats for the past intervals in the window
RingBufferHistory<Stats> _intervalStats;
RingBufferHistory< MinMaxAvg<T> > _intervalStats;
bool _newStatsAvailable;
};

View file

@ -35,6 +35,14 @@ public:
_numEntries = 0;
}
void setCapacity(int capacity) {
_size = capacity + 1;
_capacity = capacity;
_newestEntryAtIndex = 0;
_numEntries = 0;
_buffer.resize(_size);
}
void insert(const T& entry) {
// increment newest entry index cyclically
_newestEntryAtIndex = (_newestEntryAtIndex == _size - 1) ? 0 : _newestEntryAtIndex + 1;
@ -83,9 +91,14 @@ private:
QVector<T> _buffer;
public:
class Iterator : public std::iterator < std::forward_iterator_tag, T > {
class Iterator : public std::iterator < std::random_access_iterator_tag, T > {
public:
Iterator(T* bufferFirst, T* bufferLast, T* at) : _bufferFirst(bufferFirst), _bufferLast(bufferLast), _at(at) {}
Iterator(T* bufferFirst, T* bufferLast, T* newestAt, T* at)
: _bufferFirst(bufferFirst),
_bufferLast(bufferLast),
_bufferLength(bufferLast - bufferFirst + 1),
_newestAt(newestAt),
_at(at) {}
bool operator==(const Iterator& rhs) { return _at == rhs._at; }
bool operator!=(const Iterator& rhs) { return _at != rhs._at; }
@ -103,20 +116,95 @@ public:
return tmp;
}
Iterator& operator--() {
_at = (_at == _bufferLast) ? _bufferFirst : _at + 1;
return *this;
}
Iterator operator--(int) {
Iterator tmp(*this);
--(*this);
return tmp;
}
Iterator operator+(int add) {
Iterator sum(*this);
sum._at = atShiftedBy(add);
return sum;
}
Iterator operator-(int sub) {
Iterator sum(*this);
sum._at = atShiftedBy(-sub);
return sum;
}
Iterator& operator+=(int add) {
_at = atShiftedBy(add);
return *this;
}
Iterator& operator-=(int sub) {
_at = atShiftedBy(-sub);
return *this;
}
T& operator[](int i) {
return *(atShiftedBy(i));
}
bool operator<(const Iterator& rhs) {
return age() < rhs.age();
}
bool operator>(const Iterator& rhs) {
return age() > rhs.age();
}
bool operator<=(const Iterator& rhs) {
return age() < rhs.age();
}
bool operator>=(const Iterator& rhs) {
return age() >= rhs.age();
}
int operator-(const Iterator& rhs) {
return age() - rhs.age();
}
private:
T* const _bufferFirst;
T* const _bufferLast;
T* atShiftedBy(int i) { // shifts i places towards _bufferFirst (towards older entries)
i = (_at - _bufferFirst - i) % _bufferLength;
if (i < 0) {
i += _bufferLength;
}
return _bufferFirst + i;
}
int age() {
int age = _newestAt - _at;
if (age < 0) {
age += _bufferLength;
}
return age;
}
T* _bufferFirst;
T* _bufferLast;
int _bufferLength;
T* _newestAt;
T* _at;
};
Iterator begin() { return Iterator(&_buffer.first(), &_buffer.last(), &_buffer[_newestEntryAtIndex]); }
Iterator begin() { return Iterator(&_buffer.first(), &_buffer.last(), &_buffer[_newestEntryAtIndex], &_buffer[_newestEntryAtIndex]); }
Iterator end() {
int endAtIndex = _newestEntryAtIndex - _numEntries;
if (endAtIndex < 0) {
endAtIndex += _size;
}
return Iterator(&_buffer.first(), &_buffer.last(), &_buffer[endAtIndex]);
return Iterator(&_buffer.first(), &_buffer.last(), &_buffer[_newestEntryAtIndex], &_buffer[endAtIndex]);
}
};

View file

@ -27,28 +27,28 @@ void AudioRingBufferTests::runAllTests() {
int16_t readData[10000];
int readIndexAt;
AudioRingBuffer ringBuffer(10, false, 10); // makes buffer of 100 int16_t samples
for (int T = 0; T < 300; T++) {
writeIndexAt = 0;
readIndexAt = 0;
// write 73 samples, 73 samples in buffer
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 73) / sizeof(int16_t);
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 73);
assertBufferSize(ringBuffer, 73);
// read 43 samples, 30 samples in buffer
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 43) / sizeof(int16_t);
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 43);
assertBufferSize(ringBuffer, 30);
// write 70 samples, 100 samples in buffer (full)
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 70) / sizeof(int16_t);
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 70);
assertBufferSize(ringBuffer, 100);
// read 100 samples, 0 samples in buffer (empty)
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 100) / sizeof(int16_t);
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 100);
assertBufferSize(ringBuffer, 0);
@ -65,15 +65,15 @@ void AudioRingBufferTests::runAllTests() {
readIndexAt = 0;
// write 59 samples, 59 samples in buffer
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 59) / sizeof(int16_t);
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 59);
assertBufferSize(ringBuffer, 59);
// write 99 samples, 100 samples in buffer
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 99) / sizeof(int16_t);
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 99);
assertBufferSize(ringBuffer, 100);
// read 100 samples, 0 samples in buffer
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 100) / sizeof(int16_t);
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 100);
assertBufferSize(ringBuffer, 0);
// verify 100 samples of read data
@ -88,23 +88,23 @@ void AudioRingBufferTests::runAllTests() {
readIndexAt = 0;
// write 77 samples, 77 samples in buffer
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 77) / sizeof(int16_t);
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 77);
assertBufferSize(ringBuffer, 77);
// write 24 samples, 100 samples in buffer (overwrote one sample: "0")
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 24) / sizeof(int16_t);
writeIndexAt += ringBuffer.writeSamples(&writeData[writeIndexAt], 24);
assertBufferSize(ringBuffer, 100);
// write 29 silent samples, 100 samples in buffer, make sure non were added
int samplesWritten;
if ((samplesWritten = ringBuffer.addSilentFrame(29)) != 0) {
qDebug("addSilentFrame(29) incorrect! Expected: 0 Actual: %d", samplesWritten);
if ((samplesWritten = ringBuffer.addSilentSamples(29)) != 0) {
qDebug("addSilentSamples(29) incorrect! Expected: 0 Actual: %d", samplesWritten);
return;
}
assertBufferSize(ringBuffer, 100);
// read 3 samples, 97 samples in buffer (expect to read "1", "2", "3")
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 3) / sizeof(int16_t);
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 3);
for (int i = 0; i < 3; i++) {
if (readData[i] != i + 1) {
qDebug("Second readData[%d] incorrect! Expcted: %d Actual: %d", i, i + 1, readData[i]);
@ -114,14 +114,14 @@ void AudioRingBufferTests::runAllTests() {
assertBufferSize(ringBuffer, 97);
// write 4 silent samples, 100 samples in buffer
if ((samplesWritten = ringBuffer.addSilentFrame(4) / sizeof(int16_t)) != 3) {
qDebug("addSilentFrame(4) incorrect! Exptected: 3 Actual: %d", samplesWritten);
if ((samplesWritten = ringBuffer.addSilentSamples(4)) != 3) {
qDebug("addSilentSamples(4) incorrect! Exptected: 3 Actual: %d", samplesWritten);
return;
}
assertBufferSize(ringBuffer, 100);
// read back 97 samples (the non-silent samples), 3 samples in buffer (expect to read "4" thru "100")
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 97) / sizeof(int16_t);
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 97);
for (int i = 3; i < 100; i++) {
if (readData[i] != i + 1) {
qDebug("third readData[%d] incorrect! Expcted: %d Actual: %d", i, i + 1, readData[i]);
@ -131,7 +131,7 @@ void AudioRingBufferTests::runAllTests() {
assertBufferSize(ringBuffer, 3);
// read back 3 silent samples, 0 samples in buffer
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 3) / sizeof(int16_t);
readIndexAt += ringBuffer.readSamples(&readData[readIndexAt], 3);
for (int i = 100; i < 103; i++) {
if (readData[i] != 0) {
qDebug("Fourth readData[%d] incorrect! Expcted: %d Actual: %d", i, 0, readData[i]);
@ -143,4 +143,3 @@ void AudioRingBufferTests::runAllTests() {
qDebug() << "PASSED";
}

View file

@ -271,9 +271,10 @@ void runReceive(const char* addressOption, int port, int gap, int size, int repo
quint64 networkStart = usecTimestampNow();
n = recvfrom(sockfd, inputBuffer, size, 0, NULL, NULL); // we don't care about where it came from
quint64 networkEnd = usecTimestampNow();
float networkElapsed = (float)(networkEnd - networkStart);
if (n < 0) {
std::cout << "Receive error: " << strerror(errno) << "\n";
}

View file

@ -16,6 +16,7 @@ int main(int argc, char** argv) {
MovingMinMaxAvgTests::runAllTests();
MovingPercentileTests::runAllTests();
AngularConstraintTests::runAllTests();
printf("tests complete, press enter to exit\n");
getchar();
return 0;
}