From d602c713466feea6a0374c32e8c786b1a29f7e4b Mon Sep 17 00:00:00 2001 From: Stephen Birarda Date: Fri, 1 May 2015 16:12:46 -0700 Subject: [PATCH] cleanup audio-mixer stats, add username interpolation --- assignment-client/src/audio/AudioMixer.cpp | 23 +++- .../src/audio/AudioMixerClientData.cpp | 115 ++++++++++-------- 2 files changed, 83 insertions(+), 55 deletions(-) diff --git a/assignment-client/src/audio/AudioMixer.cpp b/assignment-client/src/audio/AudioMixer.cpp index ef3e58c246..94439cb18b 100644 --- a/assignment-client/src/audio/AudioMixer.cpp +++ b/assignment-client/src/audio/AudioMixer.cpp @@ -622,19 +622,30 @@ void AudioMixer::sendStatsPacket() { readPendingDatagramStats["hashmatch_time_per_call"] = hashMatchTimePerCallStats; statsObject["read_pending_datagrams"] = readPendingDatagramStats; - - auto nodeList = DependencyManager::get(); - int clientNumber = 0; // add stats for each listerner + auto nodeList = DependencyManager::get(); + QJsonObject listenerStats; + nodeList->eachNode([&](const SharedNodePointer& node) { - clientNumber++; AudioMixerClientData* clientData = static_cast(node->getLinkedData()); if (clientData) { - statsObject["jitterStats." + node->getUUID().toString()] = clientData->getAudioStreamStats(); + QJsonObject nodeStats; + QString uuidString = uuidStringWithoutCurlyBraces(node->getUUID()); + + nodeStats["outbound_kbps"] = node->getOutboundBandwidth(); + nodeStats[USERNAME_UUID_REPLACEMENT_STATS_KEY] = uuidString; + + nodeStats["jitter"] = clientData->getAudioStreamStats(); + + listenerStats[uuidString] = nodeStats; } }); - + + // add the listeners object to the root object + statsObject["listeners"] = listenerStats; + + // send off the stats packets ThreadedAssignment::addPacketStatsAndSendStatsPacket(statsObject); } diff --git a/assignment-client/src/audio/AudioMixerClientData.cpp b/assignment-client/src/audio/AudioMixerClientData.cpp index 365ee6a8f7..4db5611bb5 100644 --- a/assignment-client/src/audio/AudioMixerClientData.cpp +++ b/assignment-client/src/audio/AudioMixerClientData.cpp @@ -9,7 +9,8 @@ // See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html // -#include +#include +#include #include #include @@ -198,68 +199,84 @@ void AudioMixerClientData::sendAudioStreamStatsPackets(const SharedNodePointer& QJsonObject AudioMixerClientData::getAudioStreamStats() const { QJsonObject result; + + QJsonObject downstreamStats; AudioStreamStats streamStats = _downstreamAudioStreamStats; - result["DOWNSTREAM.desired"] = streamStats._desiredJitterBufferFrames; - result["DOWNSTREAM.available_avg_10s"] = streamStats._framesAvailableAverage; - result["DOWNSTREAM.available"] = (double) streamStats._framesAvailable; - result["DOWNSTREAM.starves"] = (double) streamStats._starveCount; - result["DOWNSTREAM.not_mixed"] = (double) streamStats._consecutiveNotMixedCount; - result["DOWNSTREAM.overflows"] = (double) streamStats._overflowCount; - result["DOWNSTREAM.lost%"] = streamStats._packetStreamStats.getLostRate() * 100.0f; - result["DOWNSTREAM.lost%_30s"] = streamStats._packetStreamWindowStats.getLostRate() * 100.0f; - result["DOWNSTREAM.min_gap"] = formatUsecTime(streamStats._timeGapMin); - result["DOWNSTREAM.max_gap"] = formatUsecTime(streamStats._timeGapMax); - result["DOWNSTREAM.avg_gap"] = formatUsecTime(streamStats._timeGapAverage); - result["DOWNSTREAM.min_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMin); - result["DOWNSTREAM.max_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMax); - result["DOWNSTREAM.avg_gap_30s"] = formatUsecTime(streamStats._timeGapWindowAverage); + downstreamStats["desired"] = streamStats._desiredJitterBufferFrames; + downstreamStats["available_avg_10s"] = streamStats._framesAvailableAverage; + downstreamStats["available"] = (double) streamStats._framesAvailable; + downstreamStats["starves"] = (double) streamStats._starveCount; + downstreamStats["not_mixed"] = (double) streamStats._consecutiveNotMixedCount; + downstreamStats["overflows"] = (double) streamStats._overflowCount; + downstreamStats["lost%"] = streamStats._packetStreamStats.getLostRate() * 100.0f; + downstreamStats["lost%_30s"] = streamStats._packetStreamWindowStats.getLostRate() * 100.0f; + downstreamStats["min_gap"] = formatUsecTime(streamStats._timeGapMin); + downstreamStats["max_gap"] = formatUsecTime(streamStats._timeGapMax); + downstreamStats["avg_gap"] = formatUsecTime(streamStats._timeGapAverage); + downstreamStats["min_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMin); + downstreamStats["max_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMax); + downstreamStats["avg_gap_30s"] = formatUsecTime(streamStats._timeGapWindowAverage); + + result["downstream"] = downstreamStats; AvatarAudioStream* avatarAudioStream = getAvatarAudioStream(); + if (avatarAudioStream) { + QJsonObject upstreamStats; + AudioStreamStats streamStats = avatarAudioStream->getAudioStreamStats(); - result["UPSTREAM.mic.desired"] = streamStats._desiredJitterBufferFrames; - result["UPSTREAM.desired_calc"] = avatarAudioStream->getCalculatedJitterBufferFrames(); - result["UPSTREAM.available_avg_10s"] = streamStats._framesAvailableAverage; - result["UPSTREAM.available"] = (double) streamStats._framesAvailable; - result["UPSTREAM.starves"] = (double) streamStats._starveCount; - result["UPSTREAM.not_mixed"] = (double) streamStats._consecutiveNotMixedCount; - result["UPSTREAM.overflows"] = (double) streamStats._overflowCount; - result["UPSTREAM.silents_dropped"] = (double) streamStats._framesDropped; - result["UPSTREAM.lost%"] = streamStats._packetStreamStats.getLostRate() * 100.0f; - result["UPSTREAM.lost%_30s"] = streamStats._packetStreamWindowStats.getLostRate() * 100.0f; - result["UPSTREAM.min_gap"] = formatUsecTime(streamStats._timeGapMin); - result["UPSTREAM.max_gap"] = formatUsecTime(streamStats._timeGapMax); - result["UPSTREAM.avg_gap"] = formatUsecTime(streamStats._timeGapAverage); - result["UPSTREAM.min_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMin); - result["UPSTREAM.max_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMax); - result["UPSTREAM.avg_gap_30s"] = formatUsecTime(streamStats._timeGapWindowAverage); + upstreamStats["mic.desired"] = streamStats._desiredJitterBufferFrames; + upstreamStats["desired_calc"] = avatarAudioStream->getCalculatedJitterBufferFrames(); + upstreamStats["available_avg_10s"] = streamStats._framesAvailableAverage; + upstreamStats["available"] = (double) streamStats._framesAvailable; + upstreamStats["starves"] = (double) streamStats._starveCount; + upstreamStats["not_mixed"] = (double) streamStats._consecutiveNotMixedCount; + upstreamStats["overflows"] = (double) streamStats._overflowCount; + upstreamStats["silents_dropped"] = (double) streamStats._framesDropped; + upstreamStats["lost%"] = streamStats._packetStreamStats.getLostRate() * 100.0f; + upstreamStats["lost%_30s"] = streamStats._packetStreamWindowStats.getLostRate() * 100.0f; + upstreamStats["min_gap"] = formatUsecTime(streamStats._timeGapMin); + upstreamStats["max_gap"] = formatUsecTime(streamStats._timeGapMax); + upstreamStats["avg_gap"] = formatUsecTime(streamStats._timeGapAverage); + upstreamStats["min_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMin); + upstreamStats["max_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMax); + upstreamStats["avg_gap_30s"] = formatUsecTime(streamStats._timeGapWindowAverage); + + result["upstream"] = upstreamStats; } else { - // TOOD: How should we handle this case? - // result = "mic unknown"; + result["upstream"] = "mic unknown"; } QHash::ConstIterator i; + QJsonArray injectorArray; for (i = _audioStreams.constBegin(); i != _audioStreams.constEnd(); i++) { if (i.value()->getType() == PositionalAudioStream::Injector) { + QJsonObject upstreamStats; + AudioStreamStats streamStats = i.value()->getAudioStreamStats(); - result["UPSTREAM.inj.desired"] = streamStats._desiredJitterBufferFrames; - result["UPSTREAM.desired_calc"] = i.value()->getCalculatedJitterBufferFrames(); - result["UPSTREAM.available_avg_10s"] = streamStats._framesAvailableAverage; - result["UPSTREAM.available"] = (double) streamStats._framesAvailable; - result["UPSTREAM.starves"] = (double) streamStats._starveCount; - result["UPSTREAM.not_mixed"] = (double) streamStats._consecutiveNotMixedCount; - result["UPSTREAM.overflows"] = (double) streamStats._overflowCount; - result["UPSTREAM.silents_dropped"] = (double) streamStats._framesDropped; - result["UPSTREAM.lost%"] = streamStats._packetStreamStats.getLostRate() * 100.0f; - result["UPSTREAM.lost%_30s"] = streamStats._packetStreamWindowStats.getLostRate() * 100.0f; - result["UPSTREAM.min_gap"] = formatUsecTime(streamStats._timeGapMin); - result["UPSTREAM.max_gap"] = formatUsecTime(streamStats._timeGapMax); - result["UPSTREAM.avg_gap"] = formatUsecTime(streamStats._timeGapAverage); - result["UPSTREAM.min_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMin); - result["max_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMax); - result["avg_gap_30s"] = formatUsecTime(streamStats._timeGapWindowAverage); + upstreamStats["inj.desired"] = streamStats._desiredJitterBufferFrames; + upstreamStats["desired_calc"] = i.value()->getCalculatedJitterBufferFrames(); + upstreamStats["available_avg_10s"] = streamStats._framesAvailableAverage; + upstreamStats["available"] = (double) streamStats._framesAvailable; + upstreamStats["starves"] = (double) streamStats._starveCount; + upstreamStats["not_mixed"] = (double) streamStats._consecutiveNotMixedCount; + upstreamStats["overflows"] = (double) streamStats._overflowCount; + upstreamStats["silents_dropped"] = (double) streamStats._framesDropped; + upstreamStats["lost%"] = streamStats._packetStreamStats.getLostRate() * 100.0f; + upstreamStats["lost%_30s"] = streamStats._packetStreamWindowStats.getLostRate() * 100.0f; + upstreamStats["min_gap"] = formatUsecTime(streamStats._timeGapMin); + upstreamStats["max_gap"] = formatUsecTime(streamStats._timeGapMax); + upstreamStats["avg_gap"] = formatUsecTime(streamStats._timeGapAverage); + upstreamStats["min_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMin); + upstreamStats["max_gap_30s"] = formatUsecTime(streamStats._timeGapWindowMax); + upstreamStats["avg_gap_30s"] = formatUsecTime(streamStats._timeGapWindowAverage); + + injectorArray.push_back(upstreamStats); } } + + result["injectors"] = injectorArray; + return result; }