mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-07-24 21:27:40 +02:00
Merge pull request #9726 from ZappoMan/avatarBandwidthBudgetImprovements
Avatar bandwidth budget improvements
This commit is contained in:
commit
e723385df2
20 changed files with 534 additions and 393 deletions
|
@ -47,7 +47,7 @@ AvatarMixer::AvatarMixer(ReceivedMessage& message) :
|
||||||
|
|
||||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||||
packetReceiver.registerListener(PacketType::AvatarData, this, "queueIncomingPacket");
|
packetReceiver.registerListener(PacketType::AvatarData, this, "queueIncomingPacket");
|
||||||
|
packetReceiver.registerListener(PacketType::AdjustAvatarSorting, this, "handleAdjustAvatarSorting");
|
||||||
packetReceiver.registerListener(PacketType::ViewFrustum, this, "handleViewFrustumPacket");
|
packetReceiver.registerListener(PacketType::ViewFrustum, this, "handleViewFrustumPacket");
|
||||||
packetReceiver.registerListener(PacketType::AvatarIdentity, this, "handleAvatarIdentityPacket");
|
packetReceiver.registerListener(PacketType::AvatarIdentity, this, "handleAvatarIdentityPacket");
|
||||||
packetReceiver.registerListener(PacketType::KillAvatar, this, "handleKillAvatarPacket");
|
packetReceiver.registerListener(PacketType::KillAvatar, this, "handleKillAvatarPacket");
|
||||||
|
@ -309,7 +309,7 @@ void AvatarMixer::nodeKilled(SharedNodePointer killedNode) {
|
||||||
},
|
},
|
||||||
[&](const SharedNodePointer& node) {
|
[&](const SharedNodePointer& node) {
|
||||||
QMetaObject::invokeMethod(node->getLinkedData(),
|
QMetaObject::invokeMethod(node->getLinkedData(),
|
||||||
"removeLastBroadcastSequenceNumber",
|
"cleanupKilledNode",
|
||||||
Qt::AutoConnection,
|
Qt::AutoConnection,
|
||||||
Q_ARG(const QUuid&, QUuid(killedNode->getUUID())));
|
Q_ARG(const QUuid&, QUuid(killedNode->getUUID())));
|
||||||
}
|
}
|
||||||
|
@ -317,6 +317,27 @@ void AvatarMixer::nodeKilled(SharedNodePointer killedNode) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void AvatarMixer::handleAdjustAvatarSorting(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||||
|
auto start = usecTimestampNow();
|
||||||
|
|
||||||
|
// only allow admins with kick rights to change this value...
|
||||||
|
if (senderNode->getCanKick()) {
|
||||||
|
message->readPrimitive(&AvatarData::_avatarSortCoefficientSize);
|
||||||
|
message->readPrimitive(&AvatarData::_avatarSortCoefficientCenter);
|
||||||
|
message->readPrimitive(&AvatarData::_avatarSortCoefficientAge);
|
||||||
|
|
||||||
|
qCDebug(avatars) << "New avatar sorting... "
|
||||||
|
<< "size:" << AvatarData::_avatarSortCoefficientSize
|
||||||
|
<< "center:" << AvatarData::_avatarSortCoefficientCenter
|
||||||
|
<< "age:" << AvatarData::_avatarSortCoefficientAge;
|
||||||
|
}
|
||||||
|
|
||||||
|
auto end = usecTimestampNow();
|
||||||
|
_handleAdjustAvatarSortingElapsedTime += (end - start);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
void AvatarMixer::handleViewFrustumPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
void AvatarMixer::handleViewFrustumPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||||
auto start = usecTimestampNow();
|
auto start = usecTimestampNow();
|
||||||
getOrCreateClientData(senderNode);
|
getOrCreateClientData(senderNode);
|
||||||
|
@ -485,6 +506,9 @@ void AvatarMixer::sendStatsPacket() {
|
||||||
float averageOthersIncluded = averageNodes ? stats.numOthersIncluded / averageNodes : 0.0f;
|
float averageOthersIncluded = averageNodes ? stats.numOthersIncluded / averageNodes : 0.0f;
|
||||||
slaveObject["sent_6_averageOthersIncluded"] = TIGHT_LOOP_STAT(averageOthersIncluded);
|
slaveObject["sent_6_averageOthersIncluded"] = TIGHT_LOOP_STAT(averageOthersIncluded);
|
||||||
|
|
||||||
|
float averageOverBudgetAvatars = averageNodes ? stats.overBudgetAvatars / averageNodes : 0.0f;
|
||||||
|
slaveObject["sent_7_averageOverBudgetAvatars"] = TIGHT_LOOP_STAT(averageOverBudgetAvatars);
|
||||||
|
|
||||||
slaveObject["timing_1_processIncomingPackets"] = TIGHT_LOOP_STAT_UINT64(stats.processIncomingPacketsElapsedTime);
|
slaveObject["timing_1_processIncomingPackets"] = TIGHT_LOOP_STAT_UINT64(stats.processIncomingPacketsElapsedTime);
|
||||||
slaveObject["timing_2_ignoreCalculation"] = TIGHT_LOOP_STAT_UINT64(stats.ignoreCalculationElapsedTime);
|
slaveObject["timing_2_ignoreCalculation"] = TIGHT_LOOP_STAT_UINT64(stats.ignoreCalculationElapsedTime);
|
||||||
slaveObject["timing_3_toByteArray"] = TIGHT_LOOP_STAT_UINT64(stats.toByteArrayElapsedTime);
|
slaveObject["timing_3_toByteArray"] = TIGHT_LOOP_STAT_UINT64(stats.toByteArrayElapsedTime);
|
||||||
|
@ -514,7 +538,10 @@ void AvatarMixer::sendStatsPacket() {
|
||||||
|
|
||||||
float averageOthersIncluded = averageNodes ? aggregateStats.numOthersIncluded / averageNodes : 0.0f;
|
float averageOthersIncluded = averageNodes ? aggregateStats.numOthersIncluded / averageNodes : 0.0f;
|
||||||
slavesAggregatObject["sent_6_averageOthersIncluded"] = TIGHT_LOOP_STAT(averageOthersIncluded);
|
slavesAggregatObject["sent_6_averageOthersIncluded"] = TIGHT_LOOP_STAT(averageOthersIncluded);
|
||||||
|
|
||||||
|
float averageOverBudgetAvatars = averageNodes ? aggregateStats.overBudgetAvatars / averageNodes : 0.0f;
|
||||||
|
slavesAggregatObject["sent_7_averageOverBudgetAvatars"] = TIGHT_LOOP_STAT(averageOverBudgetAvatars);
|
||||||
|
|
||||||
slavesAggregatObject["timing_1_processIncomingPackets"] = TIGHT_LOOP_STAT_UINT64(aggregateStats.processIncomingPacketsElapsedTime);
|
slavesAggregatObject["timing_1_processIncomingPackets"] = TIGHT_LOOP_STAT_UINT64(aggregateStats.processIncomingPacketsElapsedTime);
|
||||||
slavesAggregatObject["timing_2_ignoreCalculation"] = TIGHT_LOOP_STAT_UINT64(aggregateStats.ignoreCalculationElapsedTime);
|
slavesAggregatObject["timing_2_ignoreCalculation"] = TIGHT_LOOP_STAT_UINT64(aggregateStats.ignoreCalculationElapsedTime);
|
||||||
slavesAggregatObject["timing_3_toByteArray"] = TIGHT_LOOP_STAT_UINT64(aggregateStats.toByteArrayElapsedTime);
|
slavesAggregatObject["timing_3_toByteArray"] = TIGHT_LOOP_STAT_UINT64(aggregateStats.toByteArrayElapsedTime);
|
||||||
|
|
|
@ -39,6 +39,7 @@ public slots:
|
||||||
|
|
||||||
private slots:
|
private slots:
|
||||||
void queueIncomingPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer node);
|
void queueIncomingPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer node);
|
||||||
|
void handleAdjustAvatarSorting(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||||
void handleViewFrustumPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
void handleViewFrustumPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||||
void handleAvatarIdentityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
void handleAvatarIdentityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||||
void handleKillAvatarPacket(QSharedPointer<ReceivedMessage> message);
|
void handleKillAvatarPacket(QSharedPointer<ReceivedMessage> message);
|
||||||
|
@ -92,6 +93,7 @@ private:
|
||||||
quint64 _broadcastAvatarDataNodeTransform { 0 };
|
quint64 _broadcastAvatarDataNodeTransform { 0 };
|
||||||
quint64 _broadcastAvatarDataNodeFunctor { 0 };
|
quint64 _broadcastAvatarDataNodeFunctor { 0 };
|
||||||
|
|
||||||
|
quint64 _handleAdjustAvatarSortingElapsedTime { 0 };
|
||||||
quint64 _handleViewFrustumPacketElapsedTime { 0 };
|
quint64 _handleViewFrustumPacketElapsedTime { 0 };
|
||||||
quint64 _handleAvatarIdentityPacketElapsedTime { 0 };
|
quint64 _handleAvatarIdentityPacketElapsedTime { 0 };
|
||||||
quint64 _handleKillAvatarPacketElapsedTime { 0 };
|
quint64 _handleKillAvatarPacketElapsedTime { 0 };
|
||||||
|
|
|
@ -74,14 +74,22 @@ bool AvatarMixerClientData::checkAndSetHasReceivedFirstPacketsFrom(const QUuid&
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
uint64_t AvatarMixerClientData::getLastBroadcastTime(const QUuid& nodeUUID) const {
|
||||||
|
// return the matching PacketSequenceNumber, or the default if we don't have it
|
||||||
|
auto nodeMatch = _lastBroadcastTimes.find(nodeUUID);
|
||||||
|
if (nodeMatch != _lastBroadcastTimes.end()) {
|
||||||
|
return nodeMatch->second;
|
||||||
|
}
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
uint16_t AvatarMixerClientData::getLastBroadcastSequenceNumber(const QUuid& nodeUUID) const {
|
uint16_t AvatarMixerClientData::getLastBroadcastSequenceNumber(const QUuid& nodeUUID) const {
|
||||||
// return the matching PacketSequenceNumber, or the default if we don't have it
|
// return the matching PacketSequenceNumber, or the default if we don't have it
|
||||||
auto nodeMatch = _lastBroadcastSequenceNumbers.find(nodeUUID);
|
auto nodeMatch = _lastBroadcastSequenceNumbers.find(nodeUUID);
|
||||||
if (nodeMatch != _lastBroadcastSequenceNumbers.end()) {
|
if (nodeMatch != _lastBroadcastSequenceNumbers.end()) {
|
||||||
return nodeMatch->second;
|
return nodeMatch->second;
|
||||||
} else {
|
|
||||||
return 0;
|
|
||||||
}
|
}
|
||||||
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointer other) {
|
void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointer other) {
|
||||||
|
@ -118,8 +126,6 @@ bool AvatarMixerClientData::otherAvatarInView(const AABox& otherAvatarBox) {
|
||||||
|
|
||||||
void AvatarMixerClientData::loadJSONStats(QJsonObject& jsonObject) const {
|
void AvatarMixerClientData::loadJSONStats(QJsonObject& jsonObject) const {
|
||||||
jsonObject["display_name"] = _avatar->getDisplayName();
|
jsonObject["display_name"] = _avatar->getDisplayName();
|
||||||
jsonObject["full_rate_distance"] = _fullRateDistance;
|
|
||||||
jsonObject["max_av_distance"] = _maxAvatarDistance;
|
|
||||||
jsonObject["num_avs_sent_last_frame"] = _numAvatarsSentLastFrame;
|
jsonObject["num_avs_sent_last_frame"] = _numAvatarsSentLastFrame;
|
||||||
jsonObject["avg_other_av_starves_per_second"] = getAvgNumOtherAvatarStarvesPerSecond();
|
jsonObject["avg_other_av_starves_per_second"] = getAvgNumOtherAvatarStarvesPerSecond();
|
||||||
jsonObject["avg_other_av_skips_per_second"] = getAvgNumOtherAvatarSkipsPerSecond();
|
jsonObject["avg_other_av_skips_per_second"] = getAvgNumOtherAvatarSkipsPerSecond();
|
||||||
|
|
|
@ -43,6 +43,7 @@ public:
|
||||||
int parseData(ReceivedMessage& message) override;
|
int parseData(ReceivedMessage& message) override;
|
||||||
AvatarData& getAvatar() { return *_avatar; }
|
AvatarData& getAvatar() { return *_avatar; }
|
||||||
const AvatarData* getConstAvatarData() const { return _avatar.get(); }
|
const AvatarData* getConstAvatarData() const { return _avatar.get(); }
|
||||||
|
AvatarSharedPointer getAvatarSharedPointer() const { return _avatar; }
|
||||||
|
|
||||||
bool checkAndSetHasReceivedFirstPacketsFrom(const QUuid& uuid);
|
bool checkAndSetHasReceivedFirstPacketsFrom(const QUuid& uuid);
|
||||||
|
|
||||||
|
@ -51,6 +52,15 @@ public:
|
||||||
{ _lastBroadcastSequenceNumbers[nodeUUID] = sequenceNumber; }
|
{ _lastBroadcastSequenceNumbers[nodeUUID] = sequenceNumber; }
|
||||||
Q_INVOKABLE void removeLastBroadcastSequenceNumber(const QUuid& nodeUUID) { _lastBroadcastSequenceNumbers.erase(nodeUUID); }
|
Q_INVOKABLE void removeLastBroadcastSequenceNumber(const QUuid& nodeUUID) { _lastBroadcastSequenceNumbers.erase(nodeUUID); }
|
||||||
|
|
||||||
|
uint64_t getLastBroadcastTime(const QUuid& nodeUUID) const;
|
||||||
|
void setLastBroadcastTime(const QUuid& nodeUUID, uint64_t broadcastTime) { _lastBroadcastTimes[nodeUUID] = broadcastTime; }
|
||||||
|
Q_INVOKABLE void removeLastBroadcastTime(const QUuid& nodeUUID) { _lastBroadcastTimes.erase(nodeUUID); }
|
||||||
|
|
||||||
|
Q_INVOKABLE void cleanupKilledNode(const QUuid& nodeUUID) {
|
||||||
|
removeLastBroadcastSequenceNumber(nodeUUID);
|
||||||
|
removeLastBroadcastTime(nodeUUID);
|
||||||
|
}
|
||||||
|
|
||||||
uint16_t getLastReceivedSequenceNumber() const { return _lastReceivedSequenceNumber; }
|
uint16_t getLastReceivedSequenceNumber() const { return _lastReceivedSequenceNumber; }
|
||||||
|
|
||||||
HRCTime getIdentityChangeTimestamp() const { return _identityChangeTimestamp; }
|
HRCTime getIdentityChangeTimestamp() const { return _identityChangeTimestamp; }
|
||||||
|
@ -58,12 +68,6 @@ public:
|
||||||
bool getAvatarSessionDisplayNameMustChange() const { return _avatarSessionDisplayNameMustChange; }
|
bool getAvatarSessionDisplayNameMustChange() const { return _avatarSessionDisplayNameMustChange; }
|
||||||
void setAvatarSessionDisplayNameMustChange(bool set = true) { _avatarSessionDisplayNameMustChange = set; }
|
void setAvatarSessionDisplayNameMustChange(bool set = true) { _avatarSessionDisplayNameMustChange = set; }
|
||||||
|
|
||||||
void setFullRateDistance(float fullRateDistance) { _fullRateDistance = fullRateDistance; }
|
|
||||||
float getFullRateDistance() const { return _fullRateDistance; }
|
|
||||||
|
|
||||||
void setMaxAvatarDistance(float maxAvatarDistance) { _maxAvatarDistance = maxAvatarDistance; }
|
|
||||||
float getMaxAvatarDistance() const { return _maxAvatarDistance; }
|
|
||||||
|
|
||||||
void resetNumAvatarsSentLastFrame() { _numAvatarsSentLastFrame = 0; }
|
void resetNumAvatarsSentLastFrame() { _numAvatarsSentLastFrame = 0; }
|
||||||
void incrementNumAvatarsSentLastFrame() { ++_numAvatarsSentLastFrame; }
|
void incrementNumAvatarsSentLastFrame() { ++_numAvatarsSentLastFrame; }
|
||||||
int getNumAvatarsSentLastFrame() const { return _numAvatarsSentLastFrame; }
|
int getNumAvatarsSentLastFrame() const { return _numAvatarsSentLastFrame; }
|
||||||
|
@ -106,6 +110,8 @@ public:
|
||||||
bool getRequestsDomainListData() { return _requestsDomainListData; }
|
bool getRequestsDomainListData() { return _requestsDomainListData; }
|
||||||
void setRequestsDomainListData(bool requesting) { _requestsDomainListData = requesting; }
|
void setRequestsDomainListData(bool requesting) { _requestsDomainListData = requesting; }
|
||||||
|
|
||||||
|
ViewFrustum getViewFrustom() const { return _currentViewFrustum; }
|
||||||
|
|
||||||
quint64 getLastOtherAvatarEncodeTime(QUuid otherAvatar) {
|
quint64 getLastOtherAvatarEncodeTime(QUuid otherAvatar) {
|
||||||
quint64 result = 0;
|
quint64 result = 0;
|
||||||
if (_lastOtherAvatarEncodeTime.find(otherAvatar) != _lastOtherAvatarEncodeTime.end()) {
|
if (_lastOtherAvatarEncodeTime.find(otherAvatar) != _lastOtherAvatarEncodeTime.end()) {
|
||||||
|
@ -134,6 +140,7 @@ private:
|
||||||
uint16_t _lastReceivedSequenceNumber { 0 };
|
uint16_t _lastReceivedSequenceNumber { 0 };
|
||||||
std::unordered_map<QUuid, uint16_t> _lastBroadcastSequenceNumbers;
|
std::unordered_map<QUuid, uint16_t> _lastBroadcastSequenceNumbers;
|
||||||
std::unordered_set<QUuid> _hasReceivedFirstPacketsFrom;
|
std::unordered_set<QUuid> _hasReceivedFirstPacketsFrom;
|
||||||
|
std::unordered_map<QUuid, uint64_t> _lastBroadcastTimes;
|
||||||
|
|
||||||
// this is a map of the last time we encoded an "other" avatar for
|
// this is a map of the last time we encoded an "other" avatar for
|
||||||
// sending to "this" node
|
// sending to "this" node
|
||||||
|
@ -143,9 +150,6 @@ private:
|
||||||
HRCTime _identityChangeTimestamp;
|
HRCTime _identityChangeTimestamp;
|
||||||
bool _avatarSessionDisplayNameMustChange{ false };
|
bool _avatarSessionDisplayNameMustChange{ false };
|
||||||
|
|
||||||
float _fullRateDistance = FLT_MAX;
|
|
||||||
float _maxAvatarDistance = FLT_MAX;
|
|
||||||
|
|
||||||
int _numAvatarsSentLastFrame = 0;
|
int _numAvatarsSentLastFrame = 0;
|
||||||
int _numFramesSinceAdjustment = 0;
|
int _numFramesSinceAdjustment = 0;
|
||||||
|
|
||||||
|
|
|
@ -66,24 +66,20 @@ void AvatarMixerSlave::processIncomingPackets(const SharedNodePointer& node) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void AvatarMixerSlave::sendIdentityPacket(const AvatarMixerClientData* nodeData, const SharedNodePointer& destinationNode) {
|
int AvatarMixerSlave::sendIdentityPacket(const AvatarMixerClientData* nodeData, const SharedNodePointer& destinationNode) {
|
||||||
|
int bytesSent = 0;
|
||||||
QByteArray individualData = nodeData->getConstAvatarData()->identityByteArray();
|
QByteArray individualData = nodeData->getConstAvatarData()->identityByteArray();
|
||||||
auto identityPacket = NLPacket::create(PacketType::AvatarIdentity, individualData.size());
|
auto identityPacket = NLPacket::create(PacketType::AvatarIdentity, individualData.size());
|
||||||
individualData.replace(0, NUM_BYTES_RFC4122_UUID, nodeData->getNodeID().toRfc4122());
|
individualData.replace(0, NUM_BYTES_RFC4122_UUID, nodeData->getNodeID().toRfc4122()); // FIXME, this looks suspicious
|
||||||
|
bytesSent += individualData.size();
|
||||||
identityPacket->write(individualData);
|
identityPacket->write(individualData);
|
||||||
DependencyManager::get<NodeList>()->sendPacket(std::move(identityPacket), *destinationNode);
|
DependencyManager::get<NodeList>()->sendPacket(std::move(identityPacket), *destinationNode);
|
||||||
_stats.numIdentityPackets++;
|
_stats.numIdentityPackets++;
|
||||||
|
return bytesSent;
|
||||||
}
|
}
|
||||||
|
|
||||||
static const int AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND = 45;
|
static const int AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND = 45;
|
||||||
|
|
||||||
// only send extra avatar data (avatars out of view, ignored) every Nth AvatarData frame
|
|
||||||
// Extra avatar data will be sent (AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND/EXTRA_AVATAR_DATA_FRAME_RATIO) times
|
|
||||||
// per second.
|
|
||||||
// This value should be a power of two for performance purposes, as the mixer performs a modulo operation every frame
|
|
||||||
// to determine whether the extra data should be sent.
|
|
||||||
static const int EXTRA_AVATAR_DATA_FRAME_RATIO = 16;
|
|
||||||
|
|
||||||
// FIXME - There is some old logic (unchanged as of 2/17/17) that randomly decides to send an identity
|
// FIXME - There is some old logic (unchanged as of 2/17/17) that randomly decides to send an identity
|
||||||
// packet. That logic had the following comment about the constants it uses...
|
// packet. That logic had the following comment about the constants it uses...
|
||||||
//
|
//
|
||||||
|
@ -117,9 +113,6 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
||||||
// reset the internal state for correct random number distribution
|
// reset the internal state for correct random number distribution
|
||||||
distribution.reset();
|
distribution.reset();
|
||||||
|
|
||||||
// reset the max distance for this frame
|
|
||||||
float maxAvatarDistanceThisFrame = 0.0f;
|
|
||||||
|
|
||||||
// reset the number of sent avatars
|
// reset the number of sent avatars
|
||||||
nodeData->resetNumAvatarsSentLastFrame();
|
nodeData->resetNumAvatarsSentLastFrame();
|
||||||
|
|
||||||
|
@ -128,6 +121,15 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
||||||
|
|
||||||
// keep track of outbound data rate specifically for avatar data
|
// keep track of outbound data rate specifically for avatar data
|
||||||
int numAvatarDataBytes = 0;
|
int numAvatarDataBytes = 0;
|
||||||
|
int identityBytesSent = 0;
|
||||||
|
|
||||||
|
// max number of avatarBytes per frame
|
||||||
|
auto maxAvatarBytesPerFrame = (_maxKbpsPerNode * BYTES_PER_KILOBIT) / AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND;
|
||||||
|
|
||||||
|
// FIXME - find a way to not send the sessionID for every avatar
|
||||||
|
int minimumBytesPerAvatar = AvatarDataPacket::AVATAR_HAS_FLAGS_SIZE + NUM_BYTES_RFC4122_UUID;
|
||||||
|
|
||||||
|
int overBudgetAvatars = 0;
|
||||||
|
|
||||||
// keep track of the number of other avatars held back in this frame
|
// keep track of the number of other avatars held back in this frame
|
||||||
int numAvatarsHeldBack = 0;
|
int numAvatarsHeldBack = 0;
|
||||||
|
@ -135,9 +137,6 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
||||||
// keep track of the number of other avatar frames skipped
|
// keep track of the number of other avatar frames skipped
|
||||||
int numAvatarsWithSkippedFrames = 0;
|
int numAvatarsWithSkippedFrames = 0;
|
||||||
|
|
||||||
// use the data rate specifically for avatar data for FRD adjustment checks
|
|
||||||
float avatarDataRateLastSecond = nodeData->getOutboundAvatarDataKbps();
|
|
||||||
|
|
||||||
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that are not in the view frustrum
|
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that are not in the view frustrum
|
||||||
bool getsOutOfView = nodeData->getRequestsDomainListData();
|
bool getsOutOfView = nodeData->getRequestsDomainListData();
|
||||||
|
|
||||||
|
@ -147,267 +146,264 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
||||||
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that have ignored them
|
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that have ignored them
|
||||||
bool getsAnyIgnored = getsIgnoredByMe && node->getCanKick();
|
bool getsAnyIgnored = getsIgnoredByMe && node->getCanKick();
|
||||||
|
|
||||||
// Check if it is time to adjust what we send this client based on the observed
|
|
||||||
// bandwidth to this node. We do this once a second, which is also the window for
|
|
||||||
// the bandwidth reported by node->getOutboundBandwidth();
|
|
||||||
if (nodeData->getNumFramesSinceFRDAdjustment() > AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND) {
|
|
||||||
|
|
||||||
const float FRD_ADJUSTMENT_ACCEPTABLE_RATIO = 0.8f;
|
|
||||||
const float HYSTERISIS_GAP = (1 - FRD_ADJUSTMENT_ACCEPTABLE_RATIO);
|
|
||||||
const float HYSTERISIS_MIDDLE_PERCENTAGE = (1 - (HYSTERISIS_GAP * 0.5f));
|
|
||||||
|
|
||||||
// get the current full rate distance so we can work with it
|
|
||||||
float currentFullRateDistance = nodeData->getFullRateDistance();
|
|
||||||
|
|
||||||
if (avatarDataRateLastSecond > _maxKbpsPerNode) {
|
|
||||||
|
|
||||||
// is the FRD greater than the farthest avatar?
|
|
||||||
// if so, before we calculate anything, set it to that distance
|
|
||||||
currentFullRateDistance = std::min(currentFullRateDistance, nodeData->getMaxAvatarDistance());
|
|
||||||
|
|
||||||
// we're adjusting the full rate distance to target a bandwidth in the middle
|
|
||||||
// of the hysterisis gap
|
|
||||||
currentFullRateDistance *= (_maxKbpsPerNode * HYSTERISIS_MIDDLE_PERCENTAGE) / avatarDataRateLastSecond;
|
|
||||||
|
|
||||||
nodeData->setFullRateDistance(currentFullRateDistance);
|
|
||||||
nodeData->resetNumFramesSinceFRDAdjustment();
|
|
||||||
} else if (currentFullRateDistance < nodeData->getMaxAvatarDistance()
|
|
||||||
&& avatarDataRateLastSecond < _maxKbpsPerNode * FRD_ADJUSTMENT_ACCEPTABLE_RATIO) {
|
|
||||||
// we are constrained AND we've recovered to below the acceptable ratio
|
|
||||||
// lets adjust the full rate distance to target a bandwidth in the middle of the hyterisis gap
|
|
||||||
currentFullRateDistance *= (_maxKbpsPerNode * HYSTERISIS_MIDDLE_PERCENTAGE) / avatarDataRateLastSecond;
|
|
||||||
|
|
||||||
nodeData->setFullRateDistance(currentFullRateDistance);
|
|
||||||
nodeData->resetNumFramesSinceFRDAdjustment();
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
nodeData->incrementNumFramesSinceFRDAdjustment();
|
|
||||||
}
|
|
||||||
|
|
||||||
// setup a PacketList for the avatarPackets
|
// setup a PacketList for the avatarPackets
|
||||||
auto avatarPacketList = NLPacketList::create(PacketType::BulkAvatarData);
|
auto avatarPacketList = NLPacketList::create(PacketType::BulkAvatarData);
|
||||||
|
|
||||||
// this is an AGENT we have received head data from
|
// Define the minimum bubble size
|
||||||
// send back a packet with other active node data to this node
|
static const glm::vec3 minBubbleSize = glm::vec3(0.3f, 1.3f, 0.3f);
|
||||||
|
// Define the scale of the box for the current node
|
||||||
|
glm::vec3 nodeBoxScale = (nodeData->getPosition() - nodeData->getGlobalBoundingBoxCorner()) * 2.0f;
|
||||||
|
// Set up the bounding box for the current node
|
||||||
|
AABox nodeBox(nodeData->getGlobalBoundingBoxCorner(), nodeBoxScale);
|
||||||
|
// Clamp the size of the bounding box to a minimum scale
|
||||||
|
if (glm::any(glm::lessThan(nodeBoxScale, minBubbleSize))) {
|
||||||
|
nodeBox.setScaleStayCentered(minBubbleSize);
|
||||||
|
}
|
||||||
|
// Quadruple the scale of both bounding boxes
|
||||||
|
nodeBox.embiggen(4.0f);
|
||||||
|
|
||||||
|
|
||||||
|
// setup list of AvatarData as well as maps to map betweeen the AvatarData and the original nodes
|
||||||
|
// for calling the AvatarData::sortAvatars() function and getting our sorted list of client nodes
|
||||||
|
QList<AvatarSharedPointer> avatarList;
|
||||||
|
std::unordered_map<AvatarSharedPointer, SharedNodePointer> avatarDataToNodes;
|
||||||
|
|
||||||
|
int listItem = 0;
|
||||||
std::for_each(_begin, _end, [&](const SharedNodePointer& otherNode) {
|
std::for_each(_begin, _end, [&](const SharedNodePointer& otherNode) {
|
||||||
|
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
|
||||||
|
|
||||||
bool shouldConsider = false;
|
// theoretically it's possible for a Node to be in the NodeList (and therefore end up here),
|
||||||
quint64 startIgnoreCalculation = usecTimestampNow();
|
// but not have yet sent data that's linked to the node. Check for that case and don't
|
||||||
|
// consider those nodes.
|
||||||
// make sure we have data for this avatar, that it isn't the same node,
|
if (otherNodeData) {
|
||||||
// and isn't an avatar that the viewing node has ignored
|
listItem++;
|
||||||
// or that has ignored the viewing node
|
AvatarSharedPointer otherAvatar = otherNodeData->getAvatarSharedPointer();
|
||||||
if (!otherNode->getLinkedData()
|
avatarList << otherAvatar;
|
||||||
|| otherNode->getUUID() == node->getUUID()
|
avatarDataToNodes[otherAvatar] = otherNode;
|
||||||
|| (node->isIgnoringNodeWithID(otherNode->getUUID()) && !getsIgnoredByMe)
|
|
||||||
|| (otherNode->isIgnoringNodeWithID(node->getUUID()) && !getsAnyIgnored)) {
|
|
||||||
|
|
||||||
shouldConsider = false;
|
|
||||||
|
|
||||||
} else {
|
|
||||||
const AvatarMixerClientData* otherData = reinterpret_cast<AvatarMixerClientData*>(otherNode->getLinkedData());
|
|
||||||
|
|
||||||
shouldConsider = true; // assume we will consider...
|
|
||||||
|
|
||||||
// Check to see if the space bubble is enabled
|
|
||||||
if (node->isIgnoreRadiusEnabled() || otherNode->isIgnoreRadiusEnabled()) {
|
|
||||||
// Define the minimum bubble size
|
|
||||||
static const glm::vec3 minBubbleSize = glm::vec3(0.3f, 1.3f, 0.3f);
|
|
||||||
// Define the scale of the box for the current node
|
|
||||||
glm::vec3 nodeBoxScale = (nodeData->getPosition() - nodeData->getGlobalBoundingBoxCorner()) * 2.0f;
|
|
||||||
// Define the scale of the box for the current other node
|
|
||||||
glm::vec3 otherNodeBoxScale = (otherData->getPosition() - otherData->getGlobalBoundingBoxCorner()) * 2.0f;
|
|
||||||
|
|
||||||
// Set up the bounding box for the current node
|
|
||||||
AABox nodeBox(nodeData->getGlobalBoundingBoxCorner(), nodeBoxScale);
|
|
||||||
// Clamp the size of the bounding box to a minimum scale
|
|
||||||
if (glm::any(glm::lessThan(nodeBoxScale, minBubbleSize))) {
|
|
||||||
nodeBox.setScaleStayCentered(minBubbleSize);
|
|
||||||
}
|
|
||||||
// Set up the bounding box for the current other node
|
|
||||||
AABox otherNodeBox(otherData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
|
|
||||||
// Clamp the size of the bounding box to a minimum scale
|
|
||||||
if (glm::any(glm::lessThan(otherNodeBoxScale, minBubbleSize))) {
|
|
||||||
otherNodeBox.setScaleStayCentered(minBubbleSize);
|
|
||||||
}
|
|
||||||
// Quadruple the scale of both bounding boxes
|
|
||||||
nodeBox.embiggen(4.0f);
|
|
||||||
otherNodeBox.embiggen(4.0f);
|
|
||||||
|
|
||||||
// Perform the collision check between the two bounding boxes
|
|
||||||
if (nodeBox.touches(otherNodeBox)) {
|
|
||||||
nodeData->ignoreOther(node, otherNode);
|
|
||||||
shouldConsider = getsAnyIgnored;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Not close enough to ignore
|
|
||||||
if (shouldConsider) {
|
|
||||||
nodeData->removeFromRadiusIgnoringSet(node, otherNode->getUUID());
|
|
||||||
}
|
|
||||||
|
|
||||||
quint64 endIgnoreCalculation = usecTimestampNow();
|
|
||||||
_stats.ignoreCalculationElapsedTime += (endIgnoreCalculation - startIgnoreCalculation);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (shouldConsider) {
|
|
||||||
quint64 startAvatarDataPacking = usecTimestampNow();
|
|
||||||
|
|
||||||
++numOtherAvatars;
|
|
||||||
|
|
||||||
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
|
|
||||||
|
|
||||||
// make sure we send out identity packets to and from new arrivals.
|
|
||||||
bool forceSend = !nodeData->checkAndSetHasReceivedFirstPacketsFrom(otherNode->getUUID());
|
|
||||||
|
|
||||||
// FIXME - this clause seems suspicious "... || otherNodeData->getIdentityChangeTimestamp() > _lastFrameTimestamp ..."
|
|
||||||
if (otherNodeData->getIdentityChangeTimestamp().time_since_epoch().count() > 0
|
|
||||||
&& (forceSend
|
|
||||||
|| otherNodeData->getIdentityChangeTimestamp() > _lastFrameTimestamp
|
|
||||||
|| distribution(generator) < IDENTITY_SEND_PROBABILITY)) {
|
|
||||||
|
|
||||||
sendIdentityPacket(otherNodeData, node);
|
|
||||||
}
|
|
||||||
|
|
||||||
const AvatarData* otherAvatar = otherNodeData->getConstAvatarData();
|
|
||||||
// Decide whether to send this avatar's data based on it's distance from us
|
|
||||||
|
|
||||||
// The full rate distance is the distance at which EVERY update will be sent for this avatar
|
|
||||||
// at twice the full rate distance, there will be a 50% chance of sending this avatar's update
|
|
||||||
glm::vec3 otherPosition = otherAvatar->getClientGlobalPosition();
|
|
||||||
float distanceToAvatar = glm::length(myPosition - otherPosition);
|
|
||||||
|
|
||||||
// potentially update the max full rate distance for this frame
|
|
||||||
maxAvatarDistanceThisFrame = std::max(maxAvatarDistanceThisFrame, distanceToAvatar);
|
|
||||||
|
|
||||||
// This code handles the random dropping of avatar data based on the ratio of
|
|
||||||
// "getFullRateDistance" to actual distance.
|
|
||||||
//
|
|
||||||
// NOTE: If the recieving node is in "PAL mode" then it's asked to get things even that
|
|
||||||
// are out of view, this also appears to disable this random distribution.
|
|
||||||
if (distanceToAvatar != 0.0f
|
|
||||||
&& !getsOutOfView
|
|
||||||
&& distribution(generator) > (nodeData->getFullRateDistance() / distanceToAvatar)) {
|
|
||||||
|
|
||||||
quint64 endAvatarDataPacking = usecTimestampNow();
|
|
||||||
_stats.avatarDataPackingElapsedTime += (endAvatarDataPacking - startAvatarDataPacking);
|
|
||||||
shouldConsider = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (shouldConsider) {
|
|
||||||
AvatarDataSequenceNumber lastSeqToReceiver = nodeData->getLastBroadcastSequenceNumber(otherNode->getUUID());
|
|
||||||
AvatarDataSequenceNumber lastSeqFromSender = otherNodeData->getLastReceivedSequenceNumber();
|
|
||||||
|
|
||||||
// FIXME - This code does appear to be working. But it seems brittle.
|
|
||||||
// It supports determining if the frame of data for this "other"
|
|
||||||
// avatar has already been sent to the reciever. This has been
|
|
||||||
// verified to work on a desktop display that renders at 60hz and
|
|
||||||
// therefore sends to mixer at 30hz. Each second you'd expect to
|
|
||||||
// have 15 (45hz-30hz) duplicate frames. In this case, the stat
|
|
||||||
// avg_other_av_skips_per_second does report 15.
|
|
||||||
//
|
|
||||||
// make sure we haven't already sent this data from this sender to this receiver
|
|
||||||
// or that somehow we haven't sent
|
|
||||||
if (lastSeqToReceiver == lastSeqFromSender && lastSeqToReceiver != 0) {
|
|
||||||
++numAvatarsHeldBack;
|
|
||||||
|
|
||||||
quint64 endAvatarDataPacking = usecTimestampNow();
|
|
||||||
_stats.avatarDataPackingElapsedTime += (endAvatarDataPacking - startAvatarDataPacking);
|
|
||||||
shouldConsider = false;
|
|
||||||
} else if (lastSeqFromSender - lastSeqToReceiver > 1) {
|
|
||||||
// this is a skip - we still send the packet but capture the presence of the skip so we see it happening
|
|
||||||
++numAvatarsWithSkippedFrames;
|
|
||||||
}
|
|
||||||
|
|
||||||
// we're going to send this avatar
|
|
||||||
if (shouldConsider) {
|
|
||||||
|
|
||||||
// determine if avatar is in view, to determine how much data to include...
|
|
||||||
glm::vec3 otherNodeBoxScale = (otherPosition - otherNodeData->getGlobalBoundingBoxCorner()) * 2.0f;
|
|
||||||
AABox otherNodeBox(otherNodeData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
|
|
||||||
bool isInView = nodeData->otherAvatarInView(otherNodeBox);
|
|
||||||
|
|
||||||
// this throttles the extra data to only be sent every Nth message
|
|
||||||
if (!isInView && !getsOutOfView && (lastSeqToReceiver % EXTRA_AVATAR_DATA_FRAME_RATIO > 0)) {
|
|
||||||
quint64 endAvatarDataPacking = usecTimestampNow();
|
|
||||||
|
|
||||||
_stats.avatarDataPackingElapsedTime += (endAvatarDataPacking - startAvatarDataPacking);
|
|
||||||
shouldConsider = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (shouldConsider) {
|
|
||||||
// start a new segment in the PacketList for this avatar
|
|
||||||
avatarPacketList->startSegment();
|
|
||||||
|
|
||||||
AvatarData::AvatarDataDetail detail;
|
|
||||||
if (!isInView && !getsOutOfView) {
|
|
||||||
detail = AvatarData::MinimumData;
|
|
||||||
nodeData->incrementAvatarOutOfView();
|
|
||||||
} else {
|
|
||||||
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO
|
|
||||||
? AvatarData::SendAllData : AvatarData::CullSmallData;
|
|
||||||
nodeData->incrementAvatarInView();
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
bool includeThisAvatar = true;
|
|
||||||
auto lastEncodeForOther = nodeData->getLastOtherAvatarEncodeTime(otherNode->getUUID());
|
|
||||||
QVector<JointData>& lastSentJointsForOther = nodeData->getLastOtherAvatarSentJoints(otherNode->getUUID());
|
|
||||||
bool distanceAdjust = true;
|
|
||||||
glm::vec3 viewerPosition = myPosition;
|
|
||||||
AvatarDataPacket::HasFlags hasFlagsOut; // the result of the toByteArray
|
|
||||||
bool dropFaceTracking = false;
|
|
||||||
|
|
||||||
quint64 start = usecTimestampNow();
|
|
||||||
QByteArray bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
|
||||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
|
|
||||||
quint64 end = usecTimestampNow();
|
|
||||||
_stats.toByteArrayElapsedTime += (end - start);
|
|
||||||
|
|
||||||
static const int MAX_ALLOWED_AVATAR_DATA = (1400 - NUM_BYTES_RFC4122_UUID);
|
|
||||||
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
|
|
||||||
qCWarning(avatars) << "otherAvatar.toByteArray() resulted in very large buffer:" << bytes.size() << "... attempt to drop facial data";
|
|
||||||
|
|
||||||
dropFaceTracking = true; // first try dropping the facial data
|
|
||||||
bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
|
||||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
|
|
||||||
|
|
||||||
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
|
|
||||||
qCWarning(avatars) << "otherAvatar.toByteArray() without facial data resulted in very large buffer:" << bytes.size() << "... reduce to MinimumData";
|
|
||||||
bytes = otherAvatar->toByteArray(AvatarData::MinimumData, lastEncodeForOther, lastSentJointsForOther,
|
|
||||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
|
|
||||||
qCWarning(avatars) << "otherAvatar.toByteArray() MinimumData resulted in very large buffer:" << bytes.size() << "... FAIL!!";
|
|
||||||
includeThisAvatar = false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (includeThisAvatar) {
|
|
||||||
numAvatarDataBytes += avatarPacketList->write(otherNode->getUUID().toRfc4122());
|
|
||||||
numAvatarDataBytes += avatarPacketList->write(bytes);
|
|
||||||
_stats.numOthersIncluded++;
|
|
||||||
|
|
||||||
// increment the number of avatars sent to this reciever
|
|
||||||
nodeData->incrementNumAvatarsSentLastFrame();
|
|
||||||
|
|
||||||
// set the last sent sequence number for this sender on the receiver
|
|
||||||
nodeData->setLastBroadcastSequenceNumber(otherNode->getUUID(),
|
|
||||||
otherNodeData->getLastReceivedSequenceNumber());
|
|
||||||
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
avatarPacketList->endSegment();
|
|
||||||
|
|
||||||
quint64 endAvatarDataPacking = usecTimestampNow();
|
|
||||||
_stats.avatarDataPackingElapsedTime += (endAvatarDataPacking - startAvatarDataPacking);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
AvatarSharedPointer thisAvatar = nodeData->getAvatarSharedPointer();
|
||||||
|
ViewFrustum cameraView = nodeData->getViewFrustom();
|
||||||
|
std::priority_queue<AvatarPriority> sortedAvatars = AvatarData::sortAvatars(
|
||||||
|
avatarList, cameraView,
|
||||||
|
|
||||||
|
[&](AvatarSharedPointer avatar)->uint64_t{
|
||||||
|
auto avatarNode = avatarDataToNodes[avatar];
|
||||||
|
assert(avatarNode); // we can't have gotten here without the avatarData being a valid key in the map
|
||||||
|
return nodeData->getLastBroadcastTime(avatarNode->getUUID());
|
||||||
|
},
|
||||||
|
|
||||||
|
[&](AvatarSharedPointer avatar)->float{
|
||||||
|
glm::vec3 nodeBoxHalfScale = (avatar->getPosition() - avatar->getGlobalBoundingBoxCorner());
|
||||||
|
return glm::max(nodeBoxHalfScale.x, glm::max(nodeBoxHalfScale.y, nodeBoxHalfScale.z));
|
||||||
|
},
|
||||||
|
|
||||||
|
[&](AvatarSharedPointer avatar)->bool{
|
||||||
|
if (avatar == thisAvatar) {
|
||||||
|
return true; // ignore ourselves...
|
||||||
|
}
|
||||||
|
|
||||||
|
bool shouldIgnore = false;
|
||||||
|
|
||||||
|
// We will also ignore other nodes for a couple of different reasons:
|
||||||
|
// 1) ignore bubbles and ignore specific node
|
||||||
|
// 2) the node hasn't really updated it's frame data recently, this can
|
||||||
|
// happen if for example the avatar is connected on a desktop and sending
|
||||||
|
// updates at ~30hz. So every 3 frames we skip a frame.
|
||||||
|
auto avatarNode = avatarDataToNodes[avatar];
|
||||||
|
|
||||||
|
assert(avatarNode); // we can't have gotten here without the avatarData being a valid key in the map
|
||||||
|
|
||||||
|
const AvatarMixerClientData* avatarNodeData = reinterpret_cast<const AvatarMixerClientData*>(avatarNode->getLinkedData());
|
||||||
|
assert(avatarNodeData); // we can't have gotten here without avatarNode having valid data
|
||||||
|
quint64 startIgnoreCalculation = usecTimestampNow();
|
||||||
|
|
||||||
|
// make sure we have data for this avatar, that it isn't the same node,
|
||||||
|
// and isn't an avatar that the viewing node has ignored
|
||||||
|
// or that has ignored the viewing node
|
||||||
|
if (!avatarNode->getLinkedData()
|
||||||
|
|| avatarNode->getUUID() == node->getUUID()
|
||||||
|
|| (node->isIgnoringNodeWithID(avatarNode->getUUID()) && !getsIgnoredByMe)
|
||||||
|
|| (avatarNode->isIgnoringNodeWithID(node->getUUID()) && !getsAnyIgnored)) {
|
||||||
|
shouldIgnore = true;
|
||||||
|
} else {
|
||||||
|
|
||||||
|
// Check to see if the space bubble is enabled
|
||||||
|
if (node->isIgnoreRadiusEnabled() || avatarNode->isIgnoreRadiusEnabled()) {
|
||||||
|
|
||||||
|
// Define the scale of the box for the current other node
|
||||||
|
glm::vec3 otherNodeBoxScale = (avatarNodeData->getPosition() - avatarNodeData->getGlobalBoundingBoxCorner()) * 2.0f;
|
||||||
|
// Set up the bounding box for the current other node
|
||||||
|
AABox otherNodeBox(avatarNodeData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
|
||||||
|
// Clamp the size of the bounding box to a minimum scale
|
||||||
|
if (glm::any(glm::lessThan(otherNodeBoxScale, minBubbleSize))) {
|
||||||
|
otherNodeBox.setScaleStayCentered(minBubbleSize);
|
||||||
|
}
|
||||||
|
// Quadruple the scale of both bounding boxes
|
||||||
|
otherNodeBox.embiggen(4.0f);
|
||||||
|
|
||||||
|
// Perform the collision check between the two bounding boxes
|
||||||
|
if (nodeBox.touches(otherNodeBox)) {
|
||||||
|
nodeData->ignoreOther(node, avatarNode);
|
||||||
|
shouldIgnore = !getsAnyIgnored;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Not close enough to ignore
|
||||||
|
if (!shouldIgnore) {
|
||||||
|
nodeData->removeFromRadiusIgnoringSet(node, avatarNode->getUUID());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
quint64 endIgnoreCalculation = usecTimestampNow();
|
||||||
|
_stats.ignoreCalculationElapsedTime += (endIgnoreCalculation - startIgnoreCalculation);
|
||||||
|
|
||||||
|
if (!shouldIgnore) {
|
||||||
|
AvatarDataSequenceNumber lastSeqToReceiver = nodeData->getLastBroadcastSequenceNumber(avatarNode->getUUID());
|
||||||
|
AvatarDataSequenceNumber lastSeqFromSender = avatarNodeData->getLastReceivedSequenceNumber();
|
||||||
|
|
||||||
|
// FIXME - This code does appear to be working. But it seems brittle.
|
||||||
|
// It supports determining if the frame of data for this "other"
|
||||||
|
// avatar has already been sent to the reciever. This has been
|
||||||
|
// verified to work on a desktop display that renders at 60hz and
|
||||||
|
// therefore sends to mixer at 30hz. Each second you'd expect to
|
||||||
|
// have 15 (45hz-30hz) duplicate frames. In this case, the stat
|
||||||
|
// avg_other_av_skips_per_second does report 15.
|
||||||
|
//
|
||||||
|
// make sure we haven't already sent this data from this sender to this receiver
|
||||||
|
// or that somehow we haven't sent
|
||||||
|
if (lastSeqToReceiver == lastSeqFromSender && lastSeqToReceiver != 0) {
|
||||||
|
++numAvatarsHeldBack;
|
||||||
|
shouldIgnore = true;
|
||||||
|
} else if (lastSeqFromSender - lastSeqToReceiver > 1) {
|
||||||
|
// this is a skip - we still send the packet but capture the presence of the skip so we see it happening
|
||||||
|
++numAvatarsWithSkippedFrames;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return shouldIgnore;
|
||||||
|
});
|
||||||
|
|
||||||
|
// loop through our sorted avatars and allocate our bandwidth to them accordingly
|
||||||
|
int avatarRank = 0;
|
||||||
|
|
||||||
|
// this is overly conservative, because it includes some avatars we might not consider
|
||||||
|
int remainingAvatars = (int)sortedAvatars.size();
|
||||||
|
|
||||||
|
while (!sortedAvatars.empty()) {
|
||||||
|
AvatarPriority sortData = sortedAvatars.top();
|
||||||
|
sortedAvatars.pop();
|
||||||
|
const auto& avatarData = sortData.avatar;
|
||||||
|
avatarRank++;
|
||||||
|
remainingAvatars--;
|
||||||
|
|
||||||
|
auto otherNode = avatarDataToNodes[avatarData];
|
||||||
|
assert(otherNode); // we can't have gotten here without the avatarData being a valid key in the map
|
||||||
|
|
||||||
|
// NOTE: Here's where we determine if we are over budget and drop to bare minimum data
|
||||||
|
int minimRemainingAvatarBytes = minimumBytesPerAvatar * remainingAvatars;
|
||||||
|
bool overBudget = (identityBytesSent + numAvatarDataBytes + minimRemainingAvatarBytes) > maxAvatarBytesPerFrame;
|
||||||
|
|
||||||
|
quint64 startAvatarDataPacking = usecTimestampNow();
|
||||||
|
|
||||||
|
++numOtherAvatars;
|
||||||
|
|
||||||
|
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
|
||||||
|
|
||||||
|
// make sure we send out identity packets to and from new arrivals.
|
||||||
|
bool forceSend = !nodeData->checkAndSetHasReceivedFirstPacketsFrom(otherNode->getUUID());
|
||||||
|
|
||||||
|
// FIXME - this clause seems suspicious "... || otherNodeData->getIdentityChangeTimestamp() > _lastFrameTimestamp ..."
|
||||||
|
if (!overBudget
|
||||||
|
&& otherNodeData->getIdentityChangeTimestamp().time_since_epoch().count() > 0
|
||||||
|
&& (forceSend
|
||||||
|
|| otherNodeData->getIdentityChangeTimestamp() > _lastFrameTimestamp
|
||||||
|
|| distribution(generator) < IDENTITY_SEND_PROBABILITY)) {
|
||||||
|
|
||||||
|
identityBytesSent += sendIdentityPacket(otherNodeData, node);
|
||||||
|
}
|
||||||
|
|
||||||
|
const AvatarData* otherAvatar = otherNodeData->getConstAvatarData();
|
||||||
|
glm::vec3 otherPosition = otherAvatar->getClientGlobalPosition();
|
||||||
|
|
||||||
|
// determine if avatar is in view, to determine how much data to include...
|
||||||
|
glm::vec3 otherNodeBoxScale = (otherPosition - otherNodeData->getGlobalBoundingBoxCorner()) * 2.0f;
|
||||||
|
AABox otherNodeBox(otherNodeData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
|
||||||
|
bool isInView = nodeData->otherAvatarInView(otherNodeBox);
|
||||||
|
|
||||||
|
// start a new segment in the PacketList for this avatar
|
||||||
|
avatarPacketList->startSegment();
|
||||||
|
|
||||||
|
AvatarData::AvatarDataDetail detail;
|
||||||
|
|
||||||
|
if (overBudget) {
|
||||||
|
overBudgetAvatars++;
|
||||||
|
_stats.overBudgetAvatars++;
|
||||||
|
detail = AvatarData::NoData;
|
||||||
|
} else if (!isInView && !getsOutOfView) {
|
||||||
|
detail = AvatarData::NoData;
|
||||||
|
nodeData->incrementAvatarOutOfView();
|
||||||
|
} else {
|
||||||
|
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO
|
||||||
|
? AvatarData::SendAllData : AvatarData::CullSmallData;
|
||||||
|
nodeData->incrementAvatarInView();
|
||||||
|
}
|
||||||
|
|
||||||
|
bool includeThisAvatar = true;
|
||||||
|
auto lastEncodeForOther = nodeData->getLastOtherAvatarEncodeTime(otherNode->getUUID());
|
||||||
|
QVector<JointData>& lastSentJointsForOther = nodeData->getLastOtherAvatarSentJoints(otherNode->getUUID());
|
||||||
|
bool distanceAdjust = true;
|
||||||
|
glm::vec3 viewerPosition = myPosition;
|
||||||
|
AvatarDataPacket::HasFlags hasFlagsOut; // the result of the toByteArray
|
||||||
|
bool dropFaceTracking = false;
|
||||||
|
|
||||||
|
quint64 start = usecTimestampNow();
|
||||||
|
QByteArray bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
||||||
|
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
|
||||||
|
quint64 end = usecTimestampNow();
|
||||||
|
_stats.toByteArrayElapsedTime += (end - start);
|
||||||
|
|
||||||
|
static const int MAX_ALLOWED_AVATAR_DATA = (1400 - NUM_BYTES_RFC4122_UUID);
|
||||||
|
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
|
||||||
|
qCWarning(avatars) << "otherAvatar.toByteArray() resulted in very large buffer:" << bytes.size() << "... attempt to drop facial data";
|
||||||
|
|
||||||
|
dropFaceTracking = true; // first try dropping the facial data
|
||||||
|
bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
||||||
|
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
|
||||||
|
|
||||||
|
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
|
||||||
|
qCWarning(avatars) << "otherAvatar.toByteArray() without facial data resulted in very large buffer:" << bytes.size() << "... reduce to MinimumData";
|
||||||
|
bytes = otherAvatar->toByteArray(AvatarData::MinimumData, lastEncodeForOther, lastSentJointsForOther,
|
||||||
|
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
|
||||||
|
qCWarning(avatars) << "otherAvatar.toByteArray() MinimumData resulted in very large buffer:" << bytes.size() << "... FAIL!!";
|
||||||
|
includeThisAvatar = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (includeThisAvatar) {
|
||||||
|
numAvatarDataBytes += avatarPacketList->write(otherNode->getUUID().toRfc4122());
|
||||||
|
numAvatarDataBytes += avatarPacketList->write(bytes);
|
||||||
|
_stats.numOthersIncluded++;
|
||||||
|
|
||||||
|
// increment the number of avatars sent to this reciever
|
||||||
|
nodeData->incrementNumAvatarsSentLastFrame();
|
||||||
|
|
||||||
|
// set the last sent sequence number for this sender on the receiver
|
||||||
|
nodeData->setLastBroadcastSequenceNumber(otherNode->getUUID(),
|
||||||
|
otherNodeData->getLastReceivedSequenceNumber());
|
||||||
|
|
||||||
|
// remember the last time we sent details about this other node to the receiver
|
||||||
|
nodeData->setLastBroadcastTime(otherNode->getUUID(), start);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
avatarPacketList->endSegment();
|
||||||
|
|
||||||
|
quint64 endAvatarDataPacking = usecTimestampNow();
|
||||||
|
_stats.avatarDataPackingElapsedTime += (endAvatarDataPacking - startAvatarDataPacking);
|
||||||
|
};
|
||||||
|
|
||||||
quint64 startPacketSending = usecTimestampNow();
|
quint64 startPacketSending = usecTimestampNow();
|
||||||
|
|
||||||
// close the current packet so that we're always sending something
|
// close the current packet so that we're always sending something
|
||||||
|
@ -426,13 +422,6 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
||||||
nodeData->recordNumOtherAvatarStarves(numAvatarsHeldBack);
|
nodeData->recordNumOtherAvatarStarves(numAvatarsHeldBack);
|
||||||
nodeData->recordNumOtherAvatarSkips(numAvatarsWithSkippedFrames);
|
nodeData->recordNumOtherAvatarSkips(numAvatarsWithSkippedFrames);
|
||||||
|
|
||||||
if (numOtherAvatars == 0) {
|
|
||||||
// update the full rate distance to FLOAT_MAX since we didn't have any other avatars to send
|
|
||||||
nodeData->setMaxAvatarDistance(FLT_MAX);
|
|
||||||
} else {
|
|
||||||
nodeData->setMaxAvatarDistance(maxAvatarDistanceThisFrame);
|
|
||||||
}
|
|
||||||
|
|
||||||
quint64 endPacketSending = usecTimestampNow();
|
quint64 endPacketSending = usecTimestampNow();
|
||||||
_stats.packetSendingElapsedTime += (endPacketSending - startPacketSending);
|
_stats.packetSendingElapsedTime += (endPacketSending - startPacketSending);
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,8 @@ public:
|
||||||
int numBytesSent { 0 };
|
int numBytesSent { 0 };
|
||||||
int numIdentityPackets { 0 };
|
int numIdentityPackets { 0 };
|
||||||
int numOthersIncluded { 0 };
|
int numOthersIncluded { 0 };
|
||||||
|
int overBudgetAvatars { 0 };
|
||||||
|
|
||||||
quint64 ignoreCalculationElapsedTime { 0 };
|
quint64 ignoreCalculationElapsedTime { 0 };
|
||||||
quint64 avatarDataPackingElapsedTime { 0 };
|
quint64 avatarDataPackingElapsedTime { 0 };
|
||||||
quint64 packetSendingElapsedTime { 0 };
|
quint64 packetSendingElapsedTime { 0 };
|
||||||
|
@ -43,6 +45,8 @@ public:
|
||||||
numBytesSent = 0;
|
numBytesSent = 0;
|
||||||
numIdentityPackets = 0;
|
numIdentityPackets = 0;
|
||||||
numOthersIncluded = 0;
|
numOthersIncluded = 0;
|
||||||
|
overBudgetAvatars = 0;
|
||||||
|
|
||||||
ignoreCalculationElapsedTime = 0;
|
ignoreCalculationElapsedTime = 0;
|
||||||
avatarDataPackingElapsedTime = 0;
|
avatarDataPackingElapsedTime = 0;
|
||||||
packetSendingElapsedTime = 0;
|
packetSendingElapsedTime = 0;
|
||||||
|
@ -60,6 +64,8 @@ public:
|
||||||
numBytesSent += rhs.numBytesSent;
|
numBytesSent += rhs.numBytesSent;
|
||||||
numIdentityPackets += rhs.numIdentityPackets;
|
numIdentityPackets += rhs.numIdentityPackets;
|
||||||
numOthersIncluded += rhs.numOthersIncluded;
|
numOthersIncluded += rhs.numOthersIncluded;
|
||||||
|
overBudgetAvatars += rhs.overBudgetAvatars;
|
||||||
|
|
||||||
ignoreCalculationElapsedTime += rhs.ignoreCalculationElapsedTime;
|
ignoreCalculationElapsedTime += rhs.ignoreCalculationElapsedTime;
|
||||||
avatarDataPackingElapsedTime += rhs.avatarDataPackingElapsedTime;
|
avatarDataPackingElapsedTime += rhs.avatarDataPackingElapsedTime;
|
||||||
packetSendingElapsedTime += rhs.packetSendingElapsedTime;
|
packetSendingElapsedTime += rhs.packetSendingElapsedTime;
|
||||||
|
@ -85,7 +91,7 @@ public:
|
||||||
void harvestStats(AvatarMixerSlaveStats& stats);
|
void harvestStats(AvatarMixerSlaveStats& stats);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
void sendIdentityPacket(const AvatarMixerClientData* nodeData, const SharedNodePointer& destinationNode);
|
int sendIdentityPacket(const AvatarMixerClientData* nodeData, const SharedNodePointer& destinationNode);
|
||||||
|
|
||||||
// frame state
|
// frame state
|
||||||
ConstIter _begin;
|
ConstIter _begin;
|
||||||
|
|
|
@ -107,11 +107,11 @@ Item {
|
||||||
}
|
}
|
||||||
StatText {
|
StatText {
|
||||||
visible: root.expanded
|
visible: root.expanded
|
||||||
text: "Fully Simulated Avatars: " + root.fullySimulatedAvatarCount
|
text: "Avatars Updated: " + root.updatedAvatarCount
|
||||||
}
|
}
|
||||||
StatText {
|
StatText {
|
||||||
visible: root.expanded
|
visible: root.expanded
|
||||||
text: "Partially Simulated Avatars: " + root.partiallySimulatedAvatarCount
|
text: "Avatars NOT Updated: " + root.notUpdatedAvatarCount
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -334,11 +334,6 @@ void Avatar::updateAvatarEntities() {
|
||||||
setAvatarEntityDataChanged(false);
|
setAvatarEntityDataChanged(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Avatar::shouldDie() const {
|
|
||||||
const qint64 AVATAR_SILENCE_THRESHOLD_USECS = 5 * USECS_PER_SECOND;
|
|
||||||
return _owningAvatarMixer.isNull() || getUsecsSinceLastUpdate() > AVATAR_SILENCE_THRESHOLD_USECS;
|
|
||||||
}
|
|
||||||
|
|
||||||
void Avatar::simulate(float deltaTime, bool inView) {
|
void Avatar::simulate(float deltaTime, bool inView) {
|
||||||
PROFILE_RANGE(simulation, "simulate");
|
PROFILE_RANGE(simulation, "simulate");
|
||||||
|
|
||||||
|
|
|
@ -178,12 +178,13 @@ public:
|
||||||
uint64_t getLastRenderUpdateTime() const { return _lastRenderUpdateTime; }
|
uint64_t getLastRenderUpdateTime() const { return _lastRenderUpdateTime; }
|
||||||
void setLastRenderUpdateTime(uint64_t time) { _lastRenderUpdateTime = time; }
|
void setLastRenderUpdateTime(uint64_t time) { _lastRenderUpdateTime = time; }
|
||||||
|
|
||||||
bool shouldDie() const;
|
|
||||||
void animateScaleChanges(float deltaTime);
|
void animateScaleChanges(float deltaTime);
|
||||||
void setTargetScale(float targetScale) override;
|
void setTargetScale(float targetScale) override;
|
||||||
|
|
||||||
Q_INVOKABLE float getSimulationRate(const QString& rateName = QString("")) const;
|
Q_INVOKABLE float getSimulationRate(const QString& rateName = QString("")) const;
|
||||||
|
|
||||||
|
bool hasNewJointData() const { return _hasNewJointData; }
|
||||||
|
|
||||||
public slots:
|
public slots:
|
||||||
|
|
||||||
// FIXME - these should be migrated to use Pose data instead
|
// FIXME - these should be migrated to use Pose data instead
|
||||||
|
|
|
@ -148,15 +148,6 @@ float AvatarManager::getAvatarSimulationRate(const QUuid& sessionID, const QStri
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class AvatarPriority {
|
|
||||||
public:
|
|
||||||
AvatarPriority(AvatarSharedPointer a, float p) : avatar(a), priority(p) {}
|
|
||||||
AvatarSharedPointer avatar;
|
|
||||||
float priority;
|
|
||||||
bool operator<(const AvatarPriority& other) const { return priority < other.priority; }
|
|
||||||
};
|
|
||||||
|
|
||||||
void AvatarManager::updateOtherAvatars(float deltaTime) {
|
void AvatarManager::updateOtherAvatars(float deltaTime) {
|
||||||
// lock the hash for read to check the size
|
// lock the hash for read to check the size
|
||||||
QReadLocker lock(&_hashLock);
|
QReadLocker lock(&_hashLock);
|
||||||
|
@ -172,59 +163,35 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
|
||||||
QList<AvatarSharedPointer> avatarList = avatarMap.values();
|
QList<AvatarSharedPointer> avatarList = avatarMap.values();
|
||||||
ViewFrustum cameraView;
|
ViewFrustum cameraView;
|
||||||
qApp->copyDisplayViewFrustum(cameraView);
|
qApp->copyDisplayViewFrustum(cameraView);
|
||||||
glm::vec3 frustumCenter = cameraView.getPosition();
|
|
||||||
|
|
||||||
const float OUT_OF_VIEW_PENALTY = -10.0;
|
std::priority_queue<AvatarPriority> sortedAvatars = AvatarData::sortAvatars(
|
||||||
|
avatarList, cameraView,
|
||||||
|
|
||||||
std::priority_queue<AvatarPriority> sortedAvatars;
|
[](AvatarSharedPointer avatar)->uint64_t{
|
||||||
{
|
return std::static_pointer_cast<Avatar>(avatar)->getLastRenderUpdateTime();
|
||||||
PROFILE_RANGE(simulation, "sort");
|
},
|
||||||
for (int32_t i = 0; i < avatarList.size(); ++i) {
|
|
||||||
const auto& avatar = std::static_pointer_cast<Avatar>(avatarList.at(i));
|
[](AvatarSharedPointer avatar)->float{
|
||||||
if (avatar == _myAvatar || !avatar->isInitialized()) {
|
return std::static_pointer_cast<Avatar>(avatar)->getBoundingRadius();
|
||||||
|
},
|
||||||
|
|
||||||
|
[this](AvatarSharedPointer avatar)->bool{
|
||||||
|
const auto& castedAvatar = std::static_pointer_cast<Avatar>(avatar);
|
||||||
|
if (castedAvatar == _myAvatar || !castedAvatar->isInitialized()) {
|
||||||
// DO NOT update _myAvatar! Its update has already been done earlier in the main loop.
|
// DO NOT update _myAvatar! Its update has already been done earlier in the main loop.
|
||||||
// DO NOT update or fade out uninitialized Avatars
|
// DO NOT update or fade out uninitialized Avatars
|
||||||
continue;
|
return true; // ignore it
|
||||||
}
|
}
|
||||||
if (avatar->shouldDie()) {
|
if (avatar->shouldDie()) {
|
||||||
removeAvatar(avatar->getID());
|
removeAvatar(avatar->getID());
|
||||||
continue;
|
return true; // ignore it
|
||||||
}
|
}
|
||||||
if (avatar->isDead()) {
|
if (avatar->isDead()) {
|
||||||
continue;
|
return true; // ignore it
|
||||||
}
|
}
|
||||||
|
|
||||||
// priority = weighted linear combination of:
|
return false;
|
||||||
// (a) apparentSize
|
});
|
||||||
// (b) proximity to center of view
|
|
||||||
// (c) time since last update
|
|
||||||
// (d) TIME_PENALTY to help recently updated entries sort toward back
|
|
||||||
glm::vec3 avatarPosition = avatar->getPosition();
|
|
||||||
glm::vec3 offset = avatarPosition - frustumCenter;
|
|
||||||
float distance = glm::length(offset) + 0.001f; // add 1mm to avoid divide by zero
|
|
||||||
float radius = avatar->getBoundingRadius();
|
|
||||||
const glm::vec3& forward = cameraView.getDirection();
|
|
||||||
float apparentSize = 2.0f * radius / distance;
|
|
||||||
float cosineAngle = glm::length(glm::dot(offset, forward) * forward) / distance;
|
|
||||||
float age = (float)(startTime - avatar->getLastRenderUpdateTime()) / (float)(USECS_PER_SECOND);
|
|
||||||
|
|
||||||
// NOTE: we are adding values of different units to get a single measure of "priority".
|
|
||||||
// Thus we multiply each component by a conversion "weight" that scales its units relative to the others.
|
|
||||||
// These weights are pure magic tuning and should be hard coded in the relation below,
|
|
||||||
// but are currently exposed for anyone who would like to explore fine tuning:
|
|
||||||
float priority = _avatarSortCoefficientSize * apparentSize
|
|
||||||
+ _avatarSortCoefficientCenter * cosineAngle
|
|
||||||
+ _avatarSortCoefficientAge * age;
|
|
||||||
|
|
||||||
// decrement priority of avatars outside keyhole
|
|
||||||
if (distance > cameraView.getCenterRadius()) {
|
|
||||||
if (!cameraView.sphereIntersectsFrustum(avatarPosition, radius)) {
|
|
||||||
priority += OUT_OF_VIEW_PENALTY;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
sortedAvatars.push(AvatarPriority(avatar, priority));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
render::PendingChanges pendingChanges;
|
render::PendingChanges pendingChanges;
|
||||||
const uint64_t RENDER_UPDATE_BUDGET = 1500; // usec
|
const uint64_t RENDER_UPDATE_BUDGET = 1500; // usec
|
||||||
|
@ -232,8 +199,8 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
|
||||||
uint64_t renderExpiry = startTime + RENDER_UPDATE_BUDGET;
|
uint64_t renderExpiry = startTime + RENDER_UPDATE_BUDGET;
|
||||||
uint64_t maxExpiry = startTime + MAX_UPDATE_BUDGET;
|
uint64_t maxExpiry = startTime + MAX_UPDATE_BUDGET;
|
||||||
|
|
||||||
int fullySimulatedAvatars = 0;
|
int numAvatarsUpdated = 0;
|
||||||
int partiallySimulatedAvatars = 0;
|
int numAVatarsNotUpdated = 0;
|
||||||
while (!sortedAvatars.empty()) {
|
while (!sortedAvatars.empty()) {
|
||||||
const AvatarPriority& sortData = sortedAvatars.top();
|
const AvatarPriority& sortData = sortedAvatars.top();
|
||||||
const auto& avatar = std::static_pointer_cast<Avatar>(sortData.avatar);
|
const auto& avatar = std::static_pointer_cast<Avatar>(sortData.avatar);
|
||||||
|
@ -254,33 +221,57 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
|
||||||
}
|
}
|
||||||
avatar->animateScaleChanges(deltaTime);
|
avatar->animateScaleChanges(deltaTime);
|
||||||
|
|
||||||
|
const float OUT_OF_VIEW_THRESHOLD = 0.5f * AvatarData::OUT_OF_VIEW_PENALTY;
|
||||||
uint64_t now = usecTimestampNow();
|
uint64_t now = usecTimestampNow();
|
||||||
if (now < renderExpiry) {
|
if (now < renderExpiry) {
|
||||||
// we're within budget
|
// we're within budget
|
||||||
const float OUT_OF_VIEW_THRESHOLD = 0.5f * OUT_OF_VIEW_PENALTY;
|
|
||||||
bool inView = sortData.priority > OUT_OF_VIEW_THRESHOLD;
|
bool inView = sortData.priority > OUT_OF_VIEW_THRESHOLD;
|
||||||
|
if (inView && avatar->hasNewJointData()) {
|
||||||
|
numAvatarsUpdated++;
|
||||||
|
}
|
||||||
avatar->simulate(deltaTime, inView);
|
avatar->simulate(deltaTime, inView);
|
||||||
avatar->updateRenderItem(pendingChanges);
|
avatar->updateRenderItem(pendingChanges);
|
||||||
avatar->setLastRenderUpdateTime(startTime);
|
avatar->setLastRenderUpdateTime(startTime);
|
||||||
fullySimulatedAvatars++;
|
|
||||||
} else if (now < maxExpiry) {
|
} else if (now < maxExpiry) {
|
||||||
// we've spent most of our time budget, but we still simulate() the avatar as it if were out of view
|
// we've spent most of our time budget, but we still simulate() the avatar as it if were out of view
|
||||||
// --> some avatars may freeze until their priority trickles up
|
// --> some avatars may freeze until their priority trickles up
|
||||||
const bool inView = false;
|
bool inView = sortData.priority > OUT_OF_VIEW_THRESHOLD;
|
||||||
avatar->simulate(deltaTime, inView);
|
if (inView && avatar->hasNewJointData()) {
|
||||||
partiallySimulatedAvatars++;
|
numAVatarsNotUpdated++;
|
||||||
|
}
|
||||||
|
avatar->simulate(deltaTime, false);
|
||||||
} else {
|
} else {
|
||||||
// we've spent ALL of our time budget --> bail on the rest of the avatar updates
|
// we've spent ALL of our time budget --> bail on the rest of the avatar updates
|
||||||
|
// --> more avatars may freeze until their priority trickles up
|
||||||
// --> some scale or fade animations may glitch
|
// --> some scale or fade animations may glitch
|
||||||
// --> some avatar velocity measurements may be a little off
|
// --> some avatar velocity measurements may be a little off
|
||||||
|
|
||||||
|
// HACK: no time simulate, but we will take the time to count how many were tragically missed
|
||||||
|
bool inView = sortData.priority > OUT_OF_VIEW_THRESHOLD;
|
||||||
|
if (!inView) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
if (inView && avatar->hasNewJointData()) {
|
||||||
|
numAVatarsNotUpdated++;
|
||||||
|
}
|
||||||
|
sortedAvatars.pop();
|
||||||
|
while (inView && !sortedAvatars.empty()) {
|
||||||
|
const AvatarPriority& newSortData = sortedAvatars.top();
|
||||||
|
const auto& newAvatar = std::static_pointer_cast<Avatar>(newSortData.avatar);
|
||||||
|
inView = newSortData.priority > OUT_OF_VIEW_THRESHOLD;
|
||||||
|
if (inView && newAvatar->hasNewJointData()) {
|
||||||
|
numAVatarsNotUpdated++;
|
||||||
|
}
|
||||||
|
sortedAvatars.pop();
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
sortedAvatars.pop();
|
sortedAvatars.pop();
|
||||||
}
|
}
|
||||||
|
|
||||||
_avatarSimulationTime = (float)(usecTimestampNow() - startTime) / (float)USECS_PER_MSEC;
|
_avatarSimulationTime = (float)(usecTimestampNow() - startTime) / (float)USECS_PER_MSEC;
|
||||||
_fullySimulatedAvatars = fullySimulatedAvatars;
|
_numAvatarsUpdated = numAvatarsUpdated;
|
||||||
_partiallySimulatedAvatars = partiallySimulatedAvatars;
|
_numAvatarsNotUpdated = numAVatarsNotUpdated;
|
||||||
qApp->getMain3DScene()->enqueuePendingChanges(pendingChanges);
|
qApp->getMain3DScene()->enqueuePendingChanges(pendingChanges);
|
||||||
|
|
||||||
simulateAvatarFades(deltaTime);
|
simulateAvatarFades(deltaTime);
|
||||||
|
@ -598,25 +589,40 @@ RayToAvatarIntersectionResult AvatarManager::findRayIntersection(const PickRay&
|
||||||
// HACK
|
// HACK
|
||||||
float AvatarManager::getAvatarSortCoefficient(const QString& name) {
|
float AvatarManager::getAvatarSortCoefficient(const QString& name) {
|
||||||
if (name == "size") {
|
if (name == "size") {
|
||||||
return _avatarSortCoefficientSize;
|
return AvatarData::_avatarSortCoefficientSize;
|
||||||
} else if (name == "center") {
|
} else if (name == "center") {
|
||||||
return _avatarSortCoefficientCenter;
|
return AvatarData::_avatarSortCoefficientCenter;
|
||||||
} else if (name == "age") {
|
} else if (name == "age") {
|
||||||
return _avatarSortCoefficientAge;
|
return AvatarData::_avatarSortCoefficientAge;
|
||||||
}
|
}
|
||||||
return 0.0f;
|
return 0.0f;
|
||||||
}
|
}
|
||||||
|
|
||||||
// HACK
|
// HACK
|
||||||
void AvatarManager::setAvatarSortCoefficient(const QString& name, const QScriptValue& value) {
|
void AvatarManager::setAvatarSortCoefficient(const QString& name, const QScriptValue& value) {
|
||||||
|
bool somethingChanged = false;
|
||||||
if (value.isNumber()) {
|
if (value.isNumber()) {
|
||||||
float numericalValue = (float)value.toNumber();
|
float numericalValue = (float)value.toNumber();
|
||||||
if (name == "size") {
|
if (name == "size") {
|
||||||
_avatarSortCoefficientSize = numericalValue;
|
AvatarData::_avatarSortCoefficientSize = numericalValue;
|
||||||
|
somethingChanged = true;
|
||||||
} else if (name == "center") {
|
} else if (name == "center") {
|
||||||
_avatarSortCoefficientCenter = numericalValue;
|
AvatarData::_avatarSortCoefficientCenter = numericalValue;
|
||||||
|
somethingChanged = true;
|
||||||
} else if (name == "age") {
|
} else if (name == "age") {
|
||||||
_avatarSortCoefficientAge = numericalValue;
|
AvatarData::_avatarSortCoefficientAge = numericalValue;
|
||||||
|
somethingChanged = true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if (somethingChanged) {
|
||||||
|
size_t packetSize = sizeof(AvatarData::_avatarSortCoefficientSize) +
|
||||||
|
sizeof(AvatarData::_avatarSortCoefficientCenter) +
|
||||||
|
sizeof(AvatarData::_avatarSortCoefficientAge);
|
||||||
|
|
||||||
|
auto packet = NLPacket::create(PacketType::AdjustAvatarSorting, packetSize);
|
||||||
|
packet->writePrimitive(AvatarData::_avatarSortCoefficientSize);
|
||||||
|
packet->writePrimitive(AvatarData::_avatarSortCoefficientCenter);
|
||||||
|
packet->writePrimitive(AvatarData::_avatarSortCoefficientAge);
|
||||||
|
DependencyManager::get<NodeList>()->broadcastToNodes(std::move(packet), NodeSet() << NodeType::AvatarMixer);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,8 +43,8 @@ public:
|
||||||
std::shared_ptr<MyAvatar> getMyAvatar() { return _myAvatar; }
|
std::shared_ptr<MyAvatar> getMyAvatar() { return _myAvatar; }
|
||||||
AvatarSharedPointer getAvatarBySessionID(const QUuid& sessionID) const override;
|
AvatarSharedPointer getAvatarBySessionID(const QUuid& sessionID) const override;
|
||||||
|
|
||||||
int getFullySimulatedAvatars() const { return _fullySimulatedAvatars; }
|
int getNumAvatarsUpdated() const { return _numAvatarsUpdated; }
|
||||||
int getPartiallySimulatedAvatars() const { return _partiallySimulatedAvatars; }
|
int getNumAvatarsNotUpdated() const { return _numAvatarsNotUpdated; }
|
||||||
float getAvatarSimulationTime() const { return _avatarSimulationTime; }
|
float getAvatarSimulationTime() const { return _avatarSimulationTime; }
|
||||||
|
|
||||||
void updateMyAvatar(float deltaTime);
|
void updateMyAvatar(float deltaTime);
|
||||||
|
@ -120,15 +120,9 @@ private:
|
||||||
VectorOfMotionStates _motionStatesToRemoveFromPhysics;
|
VectorOfMotionStates _motionStatesToRemoveFromPhysics;
|
||||||
|
|
||||||
RateCounter<> _myAvatarSendRate;
|
RateCounter<> _myAvatarSendRate;
|
||||||
int _fullySimulatedAvatars { 0 };
|
int _numAvatarsUpdated { 0 };
|
||||||
int _partiallySimulatedAvatars { 0 };
|
int _numAvatarsNotUpdated { 0 };
|
||||||
float _avatarSimulationTime { 0.0f };
|
float _avatarSimulationTime { 0.0f };
|
||||||
|
|
||||||
// TODO: remove this HACK once we settle on optimal sort coefficients
|
|
||||||
// These coefficients exposed for fine tuning the sort priority for transfering new _jointData to the render pipeline.
|
|
||||||
float _avatarSortCoefficientSize { 0.5f };
|
|
||||||
float _avatarSortCoefficientCenter { 0.25 };
|
|
||||||
float _avatarSortCoefficientAge { 1.0f };
|
|
||||||
};
|
};
|
||||||
|
|
||||||
Q_DECLARE_METATYPE(AvatarManager::LocalLight)
|
Q_DECLARE_METATYPE(AvatarManager::LocalLight)
|
||||||
|
|
|
@ -121,8 +121,8 @@ void Stats::updateStats(bool force) {
|
||||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||||
// we need to take one avatar out so we don't include ourselves
|
// we need to take one avatar out so we don't include ourselves
|
||||||
STAT_UPDATE(avatarCount, avatarManager->size() - 1);
|
STAT_UPDATE(avatarCount, avatarManager->size() - 1);
|
||||||
STAT_UPDATE(fullySimulatedAvatarCount, avatarManager->getFullySimulatedAvatars());
|
STAT_UPDATE(updatedAvatarCount, avatarManager->getNumAvatarsUpdated());
|
||||||
STAT_UPDATE(partiallySimulatedAvatarCount, avatarManager->getPartiallySimulatedAvatars());
|
STAT_UPDATE(notUpdatedAvatarCount, avatarManager->getNumAvatarsNotUpdated());
|
||||||
STAT_UPDATE(serverCount, (int)nodeList->size());
|
STAT_UPDATE(serverCount, (int)nodeList->size());
|
||||||
STAT_UPDATE(framerate, qApp->getFps());
|
STAT_UPDATE(framerate, qApp->getFps());
|
||||||
if (qApp->getActiveDisplayPlugin()) {
|
if (qApp->getActiveDisplayPlugin()) {
|
||||||
|
|
|
@ -49,8 +49,8 @@ class Stats : public QQuickItem {
|
||||||
STATS_PROPERTY(int, simrate, 0)
|
STATS_PROPERTY(int, simrate, 0)
|
||||||
STATS_PROPERTY(int, avatarSimrate, 0)
|
STATS_PROPERTY(int, avatarSimrate, 0)
|
||||||
STATS_PROPERTY(int, avatarCount, 0)
|
STATS_PROPERTY(int, avatarCount, 0)
|
||||||
STATS_PROPERTY(int, fullySimulatedAvatarCount, 0)
|
STATS_PROPERTY(int, updatedAvatarCount, 0)
|
||||||
STATS_PROPERTY(int, partiallySimulatedAvatarCount, 0)
|
STATS_PROPERTY(int, notUpdatedAvatarCount, 0)
|
||||||
STATS_PROPERTY(int, packetInCount, 0)
|
STATS_PROPERTY(int, packetInCount, 0)
|
||||||
STATS_PROPERTY(int, packetOutCount, 0)
|
STATS_PROPERTY(int, packetOutCount, 0)
|
||||||
STATS_PROPERTY(float, mbpsIn, 0)
|
STATS_PROPERTY(float, mbpsIn, 0)
|
||||||
|
@ -159,8 +159,8 @@ signals:
|
||||||
void simrateChanged();
|
void simrateChanged();
|
||||||
void avatarSimrateChanged();
|
void avatarSimrateChanged();
|
||||||
void avatarCountChanged();
|
void avatarCountChanged();
|
||||||
void fullySimulatedAvatarCountChanged();
|
void updatedAvatarCountChanged();
|
||||||
void partiallySimulatedAvatarCountChanged();
|
void notUpdatedAvatarCountChanged();
|
||||||
void packetInCountChanged();
|
void packetInCountChanged();
|
||||||
void packetOutCountChanged();
|
void packetOutCountChanged();
|
||||||
void mbpsInChanged();
|
void mbpsInChanged();
|
||||||
|
|
|
@ -36,6 +36,7 @@
|
||||||
#include <shared/JSONHelpers.h>
|
#include <shared/JSONHelpers.h>
|
||||||
#include <ShapeInfo.h>
|
#include <ShapeInfo.h>
|
||||||
#include <AudioHelpers.h>
|
#include <AudioHelpers.h>
|
||||||
|
#include <Profile.h>
|
||||||
#include <VariantMapToScriptValue.h>
|
#include <VariantMapToScriptValue.h>
|
||||||
|
|
||||||
#include "AvatarLogging.h"
|
#include "AvatarLogging.h"
|
||||||
|
@ -69,8 +70,7 @@ AvatarData::AvatarData() :
|
||||||
_displayNameAlpha(1.0f),
|
_displayNameAlpha(1.0f),
|
||||||
_errorLogExpiry(0),
|
_errorLogExpiry(0),
|
||||||
_owningAvatarMixer(),
|
_owningAvatarMixer(),
|
||||||
_targetVelocity(0.0f),
|
_targetVelocity(0.0f)
|
||||||
_localAABox(DEFAULT_LOCAL_AABOX_CORNER, DEFAULT_LOCAL_AABOX_SCALE)
|
|
||||||
{
|
{
|
||||||
setBodyPitch(0.0f);
|
setBodyPitch(0.0f);
|
||||||
setBodyYaw(-90.0f);
|
setBodyYaw(-90.0f);
|
||||||
|
@ -193,6 +193,13 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
unsigned char* destinationBuffer = reinterpret_cast<unsigned char*>(avatarDataByteArray.data());
|
unsigned char* destinationBuffer = reinterpret_cast<unsigned char*>(avatarDataByteArray.data());
|
||||||
unsigned char* startPosition = destinationBuffer;
|
unsigned char* startPosition = destinationBuffer;
|
||||||
|
|
||||||
|
// special case, if we were asked for no data, then just include the flags all set to nothing
|
||||||
|
if (dataDetail == NoData) {
|
||||||
|
AvatarDataPacket::HasFlags packetStateFlags = 0;
|
||||||
|
memcpy(destinationBuffer, &packetStateFlags, sizeof(packetStateFlags));
|
||||||
|
return avatarDataByteArray.left(sizeof(packetStateFlags));
|
||||||
|
}
|
||||||
|
|
||||||
// FIXME -
|
// FIXME -
|
||||||
//
|
//
|
||||||
// BUG -- if you enter a space bubble, and then back away, the avatar has wrong orientation until "send all" happens...
|
// BUG -- if you enter a space bubble, and then back away, the avatar has wrong orientation until "send all" happens...
|
||||||
|
@ -2311,6 +2318,69 @@ void RayToAvatarIntersectionResultFromScriptValue(const QScriptValue& object, Ra
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const float AvatarData::OUT_OF_VIEW_PENALTY = -10.0f;
|
||||||
|
|
||||||
|
float AvatarData::_avatarSortCoefficientSize { 0.5f };
|
||||||
|
float AvatarData::_avatarSortCoefficientCenter { 0.25 };
|
||||||
|
float AvatarData::_avatarSortCoefficientAge { 1.0f };
|
||||||
|
|
||||||
|
std::priority_queue<AvatarPriority> AvatarData::sortAvatars(
|
||||||
|
QList<AvatarSharedPointer> avatarList,
|
||||||
|
const ViewFrustum& cameraView,
|
||||||
|
std::function<uint64_t(AvatarSharedPointer)> getLastUpdated,
|
||||||
|
std::function<float(AvatarSharedPointer)> getBoundingRadius,
|
||||||
|
std::function<bool(AvatarSharedPointer)> shouldIgnore) {
|
||||||
|
|
||||||
|
uint64_t startTime = usecTimestampNow();
|
||||||
|
|
||||||
|
glm::vec3 frustumCenter = cameraView.getPosition();
|
||||||
|
|
||||||
|
std::priority_queue<AvatarPriority> sortedAvatars;
|
||||||
|
{
|
||||||
|
PROFILE_RANGE(simulation, "sort");
|
||||||
|
for (int32_t i = 0; i < avatarList.size(); ++i) {
|
||||||
|
const auto& avatar = avatarList.at(i);
|
||||||
|
|
||||||
|
if (shouldIgnore(avatar)) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// priority = weighted linear combination of:
|
||||||
|
// (a) apparentSize
|
||||||
|
// (b) proximity to center of view
|
||||||
|
// (c) time since last update
|
||||||
|
glm::vec3 avatarPosition = avatar->getPosition();
|
||||||
|
glm::vec3 offset = avatarPosition - frustumCenter;
|
||||||
|
float distance = glm::length(offset) + 0.001f; // add 1mm to avoid divide by zero
|
||||||
|
|
||||||
|
// FIXME - AvatarData has something equivolent to this
|
||||||
|
float radius = getBoundingRadius(avatar);
|
||||||
|
|
||||||
|
const glm::vec3& forward = cameraView.getDirection();
|
||||||
|
float apparentSize = 2.0f * radius / distance;
|
||||||
|
float cosineAngle = glm::length(glm::dot(offset, forward) * forward) / distance;
|
||||||
|
float age = (float)(startTime - getLastUpdated(avatar)) / (float)(USECS_PER_SECOND);
|
||||||
|
|
||||||
|
// NOTE: we are adding values of different units to get a single measure of "priority".
|
||||||
|
// Thus we multiply each component by a conversion "weight" that scales its units relative to the others.
|
||||||
|
// These weights are pure magic tuning and should be hard coded in the relation below,
|
||||||
|
// but are currently exposed for anyone who would like to explore fine tuning:
|
||||||
|
float priority = _avatarSortCoefficientSize * apparentSize
|
||||||
|
+ _avatarSortCoefficientCenter * cosineAngle
|
||||||
|
+ _avatarSortCoefficientAge * age;
|
||||||
|
|
||||||
|
// decrement priority of avatars outside keyhole
|
||||||
|
if (distance > cameraView.getCenterRadius()) {
|
||||||
|
if (!cameraView.sphereIntersectsFrustum(avatarPosition, radius)) {
|
||||||
|
priority += OUT_OF_VIEW_PENALTY;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sortedAvatars.push(AvatarPriority(avatar, priority));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return sortedAvatars;
|
||||||
|
}
|
||||||
|
|
||||||
QScriptValue AvatarEntityMapToScriptValue(QScriptEngine* engine, const AvatarEntityMap& value) {
|
QScriptValue AvatarEntityMapToScriptValue(QScriptEngine* engine, const AvatarEntityMap& value) {
|
||||||
QScriptValue obj = engine->newObject();
|
QScriptValue obj = engine->newObject();
|
||||||
for (auto entityID : value.keys()) {
|
for (auto entityID : value.keys()) {
|
||||||
|
|
|
@ -14,6 +14,8 @@
|
||||||
|
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <memory>
|
#include <memory>
|
||||||
|
#include <queue>
|
||||||
|
|
||||||
/* VS2010 defines stdint.h, but not inttypes.h */
|
/* VS2010 defines stdint.h, but not inttypes.h */
|
||||||
#if defined(_MSC_VER)
|
#if defined(_MSC_VER)
|
||||||
typedef signed char int8_t;
|
typedef signed char int8_t;
|
||||||
|
@ -58,6 +60,7 @@ typedef unsigned long long quint64;
|
||||||
#include <ThreadSafeValueCache.h>
|
#include <ThreadSafeValueCache.h>
|
||||||
#include <SharedUtil.h>
|
#include <SharedUtil.h>
|
||||||
#include <shared/RateCounter.h>
|
#include <shared/RateCounter.h>
|
||||||
|
#include <ViewFrustum.h>
|
||||||
|
|
||||||
#include "AABox.h"
|
#include "AABox.h"
|
||||||
#include "HeadData.h"
|
#include "HeadData.h"
|
||||||
|
@ -134,6 +137,7 @@ namespace AvatarDataPacket {
|
||||||
const HasFlags PACKET_HAS_AVATAR_LOCAL_POSITION = 1U << 9;
|
const HasFlags PACKET_HAS_AVATAR_LOCAL_POSITION = 1U << 9;
|
||||||
const HasFlags PACKET_HAS_FACE_TRACKER_INFO = 1U << 10;
|
const HasFlags PACKET_HAS_FACE_TRACKER_INFO = 1U << 10;
|
||||||
const HasFlags PACKET_HAS_JOINT_DATA = 1U << 11;
|
const HasFlags PACKET_HAS_JOINT_DATA = 1U << 11;
|
||||||
|
const size_t AVATAR_HAS_FLAGS_SIZE = 2;
|
||||||
|
|
||||||
// NOTE: AvatarDataPackets start with a uint16_t sequence number that is not reflected in the Header structure.
|
// NOTE: AvatarDataPackets start with a uint16_t sequence number that is not reflected in the Header structure.
|
||||||
|
|
||||||
|
@ -305,6 +309,14 @@ public:
|
||||||
RateCounter<> jointDataRate;
|
RateCounter<> jointDataRate;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
class AvatarPriority {
|
||||||
|
public:
|
||||||
|
AvatarPriority(AvatarSharedPointer a, float p) : avatar(a), priority(p) {}
|
||||||
|
AvatarSharedPointer avatar;
|
||||||
|
float priority;
|
||||||
|
// NOTE: we invert the less-than operator to sort high priorities to front
|
||||||
|
bool operator<(const AvatarPriority& other) const { return priority < other.priority; }
|
||||||
|
};
|
||||||
|
|
||||||
class AvatarData : public QObject, public SpatiallyNestable {
|
class AvatarData : public QObject, public SpatiallyNestable {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
|
@ -363,6 +375,7 @@ public:
|
||||||
void setHandPosition(const glm::vec3& handPosition);
|
void setHandPosition(const glm::vec3& handPosition);
|
||||||
|
|
||||||
typedef enum {
|
typedef enum {
|
||||||
|
NoData,
|
||||||
MinimumData,
|
MinimumData,
|
||||||
CullSmallData,
|
CullSmallData,
|
||||||
IncludeSmallData,
|
IncludeSmallData,
|
||||||
|
@ -540,8 +553,6 @@ public:
|
||||||
|
|
||||||
void setOwningAvatarMixer(const QWeakPointer<Node>& owningAvatarMixer) { _owningAvatarMixer = owningAvatarMixer; }
|
void setOwningAvatarMixer(const QWeakPointer<Node>& owningAvatarMixer) { _owningAvatarMixer = owningAvatarMixer; }
|
||||||
|
|
||||||
const AABox& getLocalAABox() const { return _localAABox; }
|
|
||||||
|
|
||||||
int getUsecsSinceLastUpdate() const { return _averageBytesReceived.getUsecsSinceLastEvent(); }
|
int getUsecsSinceLastUpdate() const { return _averageBytesReceived.getUsecsSinceLastEvent(); }
|
||||||
int getAverageBytesReceivedPerSecond() const;
|
int getAverageBytesReceivedPerSecond() const;
|
||||||
int getReceiveRate() const;
|
int getReceiveRate() const;
|
||||||
|
@ -579,6 +590,28 @@ public:
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
bool shouldDie() const {
|
||||||
|
const qint64 AVATAR_SILENCE_THRESHOLD_USECS = 5 * USECS_PER_SECOND;
|
||||||
|
return _owningAvatarMixer.isNull() || getUsecsSinceLastUpdate() > AVATAR_SILENCE_THRESHOLD_USECS;
|
||||||
|
}
|
||||||
|
|
||||||
|
static const float OUT_OF_VIEW_PENALTY;
|
||||||
|
|
||||||
|
static std::priority_queue<AvatarPriority> sortAvatars(
|
||||||
|
QList<AvatarSharedPointer> avatarList,
|
||||||
|
const ViewFrustum& cameraView,
|
||||||
|
std::function<uint64_t(AvatarSharedPointer)> getLastUpdated,
|
||||||
|
std::function<float(AvatarSharedPointer)> getBoundingRadius,
|
||||||
|
std::function<bool(AvatarSharedPointer)> shouldIgnore);
|
||||||
|
|
||||||
|
// TODO: remove this HACK once we settle on optimal sort coefficients
|
||||||
|
// These coefficients exposed for fine tuning the sort priority for transfering new _jointData to the render pipeline.
|
||||||
|
static float _avatarSortCoefficientSize;
|
||||||
|
static float _avatarSortCoefficientCenter;
|
||||||
|
static float _avatarSortCoefficientAge;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
public slots:
|
public slots:
|
||||||
void sendAvatarDataPacket();
|
void sendAvatarDataPacket();
|
||||||
void sendIdentityPacket();
|
void sendIdentityPacket();
|
||||||
|
@ -660,8 +693,6 @@ protected:
|
||||||
|
|
||||||
glm::vec3 _targetVelocity;
|
glm::vec3 _targetVelocity;
|
||||||
|
|
||||||
AABox _localAABox;
|
|
||||||
|
|
||||||
SimpleMovingAverage _averageBytesReceived;
|
SimpleMovingAverage _averageBytesReceived;
|
||||||
|
|
||||||
// During recording, this holds the starting position, orientation & scale of the recorded avatar
|
// During recording, this holds the starting position, orientation & scale of the recorded avatar
|
||||||
|
|
|
@ -190,3 +190,4 @@ void AvatarHashMap::sessionUUIDChanged(const QUuid& sessionUUID, const QUuid& ol
|
||||||
_lastOwnerSessionUUID = oldUUID;
|
_lastOwnerSessionUUID = oldUUID;
|
||||||
emit avatarSessionChangedEvent(sessionUUID, oldUUID);
|
emit avatarSessionChangedEvent(sessionUUID, oldUUID);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,6 @@
|
||||||
|
|
||||||
#include "AvatarData.h"
|
#include "AvatarData.h"
|
||||||
|
|
||||||
|
|
||||||
class AvatarHashMap : public QObject, public Dependency {
|
class AvatarHashMap : public QObject, public Dependency {
|
||||||
Q_OBJECT
|
Q_OBJECT
|
||||||
SINGLETON_DEPENDENCY
|
SINGLETON_DEPENDENCY
|
||||||
|
|
|
@ -112,7 +112,8 @@ public:
|
||||||
ReloadEntityServerScript,
|
ReloadEntityServerScript,
|
||||||
EntityPhysics,
|
EntityPhysics,
|
||||||
EntityServerScriptLog,
|
EntityServerScriptLog,
|
||||||
LAST_PACKET_TYPE = EntityServerScriptLog
|
AdjustAvatarSorting,
|
||||||
|
LAST_PACKET_TYPE = AdjustAvatarSorting
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -56,6 +56,11 @@ bool RecordingScriptingInterface::loadRecording(const QString& url) {
|
||||||
using namespace recording;
|
using namespace recording;
|
||||||
|
|
||||||
auto loader = ClipCache::instance().getClipLoader(url);
|
auto loader = ClipCache::instance().getClipLoader(url);
|
||||||
|
if (!loader) {
|
||||||
|
qWarning() << "Clip failed to load from " << url;
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
if (!loader->isLoaded()) {
|
if (!loader->isLoaded()) {
|
||||||
QEventLoop loop;
|
QEventLoop loop;
|
||||||
QObject::connect(loader.data(), &Resource::loaded, &loop, &QEventLoop::quit);
|
QObject::connect(loader.data(), &Resource::loaded, &loop, &QEventLoop::quit);
|
||||||
|
|
|
@ -58,11 +58,14 @@ function updateOverlays() {
|
||||||
|
|
||||||
// setup a position for the overlay that is just above this avatar's head
|
// setup a position for the overlay that is just above this avatar's head
|
||||||
var overlayPosition = avatar.getJointPosition("Head");
|
var overlayPosition = avatar.getJointPosition("Head");
|
||||||
overlayPosition.y += 1.05;
|
overlayPosition.y += 1.15;
|
||||||
|
|
||||||
|
var rows = 8;
|
||||||
|
|
||||||
var text = avatarID + "\n"
|
var text = avatarID + "\n"
|
||||||
+"--- Data from Mixer ---\n"
|
+"--- Data from Mixer ---\n"
|
||||||
+"All: " + AvatarManager.getAvatarDataRate(avatarID).toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID).toFixed(2) + "hz)" + "\n"
|
+"All: " + AvatarManager.getAvatarDataRate(avatarID).toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID).toFixed(2) + "hz)" + "\n"
|
||||||
|
/*
|
||||||
+" GP: " + AvatarManager.getAvatarDataRate(avatarID,"globalPosition").toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID,"globalPosition").toFixed(2) + "hz)" + "\n"
|
+" GP: " + AvatarManager.getAvatarDataRate(avatarID,"globalPosition").toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID,"globalPosition").toFixed(2) + "hz)" + "\n"
|
||||||
+" LP: " + AvatarManager.getAvatarDataRate(avatarID,"localPosition").toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID,"localPosition").toFixed(2) + "hz)" + "\n"
|
+" LP: " + AvatarManager.getAvatarDataRate(avatarID,"localPosition").toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID,"localPosition").toFixed(2) + "hz)" + "\n"
|
||||||
+" BB: " + AvatarManager.getAvatarDataRate(avatarID,"avatarBoundingBox").toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID,"avatarBoundingBox").toFixed(2) + "hz)" + "\n"
|
+" BB: " + AvatarManager.getAvatarDataRate(avatarID,"avatarBoundingBox").toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID,"avatarBoundingBox").toFixed(2) + "hz)" + "\n"
|
||||||
|
@ -74,11 +77,12 @@ function updateOverlays() {
|
||||||
+" AF: " + AvatarManager.getAvatarDataRate(avatarID,"additionalFlags").toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID,"additionalFlags").toFixed(2) + "hz)" + "\n"
|
+" AF: " + AvatarManager.getAvatarDataRate(avatarID,"additionalFlags").toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID,"additionalFlags").toFixed(2) + "hz)" + "\n"
|
||||||
+" PI: " + AvatarManager.getAvatarDataRate(avatarID,"parentInfo").toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID,"parentInfo").toFixed(2) + "hz)" + "\n"
|
+" PI: " + AvatarManager.getAvatarDataRate(avatarID,"parentInfo").toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID,"parentInfo").toFixed(2) + "hz)" + "\n"
|
||||||
+" FT: " + AvatarManager.getAvatarDataRate(avatarID,"faceTracker").toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID,"faceTracker").toFixed(2) + "hz)" + "\n"
|
+" FT: " + AvatarManager.getAvatarDataRate(avatarID,"faceTracker").toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID,"faceTracker").toFixed(2) + "hz)" + "\n"
|
||||||
|
*/
|
||||||
+" JD: " + AvatarManager.getAvatarDataRate(avatarID,"jointData").toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID,"jointData").toFixed(2) + "hz)" + "\n"
|
+" JD: " + AvatarManager.getAvatarDataRate(avatarID,"jointData").toFixed(2) + "kbps (" + AvatarManager.getAvatarUpdateRate(avatarID,"jointData").toFixed(2) + "hz)" + "\n"
|
||||||
+"--- Simulation ---\n"
|
+"--- Simulation ---\n"
|
||||||
+"All: " + AvatarManager.getAvatarSimulationRate(avatarID,"avatar").toFixed(2) + "hz \n"
|
+"All: " + AvatarManager.getAvatarSimulationRate(avatarID,"avatar").toFixed(2) + "hz \n"
|
||||||
+" inView: " + AvatarManager.getAvatarSimulationRate(avatarID,"avatarInView").toFixed(2) + "hz \n"
|
+" inView: " + AvatarManager.getAvatarSimulationRate(avatarID,"avatarInView").toFixed(2) + "hz \n"
|
||||||
+" SM: " + AvatarManager.getAvatarSimulationRate(avatarID,"skeletonModel").toFixed(2) + "hz \n"
|
//+" SM: " + AvatarManager.getAvatarSimulationRate(avatarID,"skeletonModel").toFixed(2) + "hz \n"
|
||||||
+" JD: " + AvatarManager.getAvatarSimulationRate(avatarID,"jointData").toFixed(2) + "hz \n"
|
+" JD: " + AvatarManager.getAvatarSimulationRate(avatarID,"jointData").toFixed(2) + "hz \n"
|
||||||
|
|
||||||
if (avatarID in debugOverlays) {
|
if (avatarID in debugOverlays) {
|
||||||
|
@ -93,7 +97,7 @@ function updateOverlays() {
|
||||||
position: overlayPosition,
|
position: overlayPosition,
|
||||||
dimensions: {
|
dimensions: {
|
||||||
x: 1.25,
|
x: 1.25,
|
||||||
y: 19 * 0.13
|
y: rows * 0.13
|
||||||
},
|
},
|
||||||
lineHeight: 0.1,
|
lineHeight: 0.1,
|
||||||
font:{size:0.1},
|
font:{size:0.1},
|
||||||
|
|
Loading…
Reference in a new issue