mirror of
https://github.com/JulianGro/overte.git
synced 2025-04-06 08:23:13 +02:00
Merge pull request #13894 from SimonWalton-HiFi/avatar-mixer-scaling
Performance improvements for Avatar Mixer
This commit is contained in:
commit
b675e6cc6b
9 changed files with 299 additions and 360 deletions
|
@ -11,6 +11,7 @@
|
|||
|
||||
#include "AvatarMixerClientData.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <udt/PacketHeaders.h>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
|
@ -218,6 +219,10 @@ uint16_t AvatarMixerClientData::getLastBroadcastSequenceNumber(const QUuid& node
|
|||
}
|
||||
|
||||
void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointer other) {
|
||||
ignoreOther(self.data(), other.data());
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::ignoreOther(const Node* self, const Node* other) {
|
||||
if (!isRadiusIgnoring(other->getUUID())) {
|
||||
addToRadiusIgnoringSet(other->getUUID());
|
||||
auto killPacket = NLPacket::create(PacketType::KillAvatar, NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason), true);
|
||||
|
@ -235,9 +240,20 @@ void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointe
|
|||
}
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::removeFromRadiusIgnoringSet(SharedNodePointer self, const QUuid& other) {
|
||||
if (isRadiusIgnoring(other)) {
|
||||
_radiusIgnoredOthers.erase(other);
|
||||
bool AvatarMixerClientData::isRadiusIgnoring(const QUuid& other) const {
|
||||
return std::find(_radiusIgnoredOthers.cbegin(), _radiusIgnoredOthers.cend(), other) != _radiusIgnoredOthers.cend();
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::addToRadiusIgnoringSet(const QUuid& other) {
|
||||
if (!isRadiusIgnoring(other)) {
|
||||
_radiusIgnoredOthers.push_back(other);
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::removeFromRadiusIgnoringSet(const QUuid& other) {
|
||||
auto ignoredOtherIter = std::find(_radiusIgnoredOthers.cbegin(), _radiusIgnoredOthers.cend(), other);
|
||||
if (ignoredOtherIter != _radiusIgnoredOthers.cend()) {
|
||||
_radiusIgnoredOthers.erase(ignoredOtherIter);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
#include <algorithm>
|
||||
#include <cfloat>
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
#include <vector>
|
||||
#include <queue>
|
||||
|
||||
#include <QtCore/QJsonObject>
|
||||
|
@ -45,6 +45,7 @@ public:
|
|||
|
||||
int parseData(ReceivedMessage& message) override;
|
||||
AvatarData& getAvatar() { return *_avatar; }
|
||||
const AvatarData& getAvatar() const { return *_avatar; }
|
||||
const AvatarData* getConstAvatarData() const { return _avatar.get(); }
|
||||
AvatarSharedPointer getAvatarSharedPointer() const { return _avatar; }
|
||||
|
||||
|
@ -90,11 +91,11 @@ public:
|
|||
void loadJSONStats(QJsonObject& jsonObject) const;
|
||||
|
||||
glm::vec3 getPosition() const { return _avatar ? _avatar->getWorldPosition() : glm::vec3(0); }
|
||||
glm::vec3 getGlobalBoundingBoxCorner() const { return _avatar ? _avatar->getGlobalBoundingBoxCorner() : glm::vec3(0); }
|
||||
bool isRadiusIgnoring(const QUuid& other) const { return _radiusIgnoredOthers.find(other) != _radiusIgnoredOthers.end(); }
|
||||
void addToRadiusIgnoringSet(const QUuid& other) { _radiusIgnoredOthers.insert(other); }
|
||||
void removeFromRadiusIgnoringSet(SharedNodePointer self, const QUuid& other);
|
||||
bool isRadiusIgnoring(const QUuid& other) const;
|
||||
void addToRadiusIgnoringSet(const QUuid& other);
|
||||
void removeFromRadiusIgnoringSet(const QUuid& other);
|
||||
void ignoreOther(SharedNodePointer self, SharedNodePointer other);
|
||||
void ignoreOther(const Node* self, const Node* other);
|
||||
|
||||
void readViewFrustumPacket(const QByteArray& message);
|
||||
|
||||
|
@ -166,7 +167,7 @@ private:
|
|||
int _numOutOfOrderSends = 0;
|
||||
|
||||
SimpleMovingAverage _avgOtherAvatarDataRate;
|
||||
std::unordered_set<QUuid> _radiusIgnoredOthers;
|
||||
std::vector<QUuid> _radiusIgnoredOthers;
|
||||
ConicalViewFrustums _currentViewFrustums;
|
||||
|
||||
int _recentOtherAvatarsInView { 0 };
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
|
||||
#include <algorithm>
|
||||
#include <random>
|
||||
#include <chrono>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtx/norm.hpp>
|
||||
|
@ -33,6 +34,8 @@
|
|||
#include "AvatarMixer.h"
|
||||
#include "AvatarMixerClientData.h"
|
||||
|
||||
namespace chrono = std::chrono;
|
||||
|
||||
void AvatarMixerSlave::configure(ConstIter begin, ConstIter end) {
|
||||
_begin = begin;
|
||||
_end = end;
|
||||
|
@ -209,7 +212,18 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
|||
_stats.jobElapsedTime += (end - start);
|
||||
}
|
||||
|
||||
AABox computeBubbleBox(const AvatarData& avatar, float bubbleExpansionFactor) {
|
||||
AABox box = avatar.getGlobalBoundingBox();
|
||||
glm::vec3 scale = box.getScale();
|
||||
scale *= bubbleExpansionFactor;
|
||||
const glm::vec3 MIN_BUBBLE_SCALE(0.3f, 1.3f, 0.3);
|
||||
scale = glm::max(scale, MIN_BUBBLE_SCALE);
|
||||
box.setScaleStayCentered(glm::max(scale, MIN_BUBBLE_SCALE));
|
||||
return box;
|
||||
}
|
||||
|
||||
void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node) {
|
||||
const Node* destinationNode = node.data();
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
@ -220,7 +234,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
|
||||
_stats.nodesBroadcastedTo++;
|
||||
|
||||
AvatarMixerClientData* nodeData = reinterpret_cast<AvatarMixerClientData*>(node->getLinkedData());
|
||||
AvatarMixerClientData* nodeData = reinterpret_cast<AvatarMixerClientData*>(destinationNode->getLinkedData());
|
||||
|
||||
nodeData->resetInViewStats();
|
||||
|
||||
|
@ -242,12 +256,8 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
int traitBytesSent = 0;
|
||||
|
||||
// max number of avatarBytes per frame
|
||||
auto maxAvatarBytesPerFrame = (_maxKbpsPerNode * BYTES_PER_KILOBIT) / AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND;
|
||||
int maxAvatarBytesPerFrame = int(_maxKbpsPerNode * BYTES_PER_KILOBIT / AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND);
|
||||
|
||||
// FIXME - find a way to not send the sessionID for every avatar
|
||||
int minimumBytesPerAvatar = AvatarDataPacket::AVATAR_HAS_FLAGS_SIZE + NUM_BYTES_RFC4122_UUID;
|
||||
|
||||
int overBudgetAvatars = 0;
|
||||
|
||||
// keep track of the number of other avatars held back in this frame
|
||||
int numAvatarsHeldBack = 0;
|
||||
|
@ -260,66 +270,38 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
bool PALIsOpen = nodeData->getRequestsDomainListData();
|
||||
|
||||
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that have ignored them
|
||||
bool getsAnyIgnored = PALIsOpen && node->getCanKick();
|
||||
bool getsAnyIgnored = PALIsOpen && destinationNode->getCanKick();
|
||||
|
||||
if (PALIsOpen) {
|
||||
// Increase minimumBytesPerAvatar if the PAL is open
|
||||
minimumBytesPerAvatar += sizeof(AvatarDataPacket::AvatarGlobalPosition) +
|
||||
sizeof(AvatarDataPacket::AudioLoudness);
|
||||
}
|
||||
// Bandwidth allowance for data that must be sent.
|
||||
int minimumBytesPerAvatar = PALIsOpen ? AvatarDataPacket::AVATAR_HAS_FLAGS_SIZE + NUM_BYTES_RFC4122_UUID +
|
||||
sizeof(AvatarDataPacket::AvatarGlobalPosition) + sizeof(AvatarDataPacket::AudioLoudness) : 0;
|
||||
|
||||
// setup a PacketList for the avatarPackets
|
||||
auto avatarPacketList = NLPacketList::create(PacketType::BulkAvatarData);
|
||||
static auto maxAvatarDataBytes = avatarPacketList->getMaxSegmentSize() - NUM_BYTES_RFC4122_UUID;
|
||||
|
||||
// Define the minimum bubble size
|
||||
static const glm::vec3 minBubbleSize = avatar.getSensorToWorldScale() * glm::vec3(0.3f, 1.3f, 0.3f);
|
||||
// Define the scale of the box for the current node
|
||||
glm::vec3 nodeBoxScale = (nodeData->getPosition() - nodeData->getGlobalBoundingBoxCorner()) * 2.0f * avatar.getSensorToWorldScale();
|
||||
// Set up the bounding box for the current node
|
||||
AABox nodeBox(nodeData->getGlobalBoundingBoxCorner(), nodeBoxScale);
|
||||
// Clamp the size of the bounding box to a minimum scale
|
||||
if (glm::any(glm::lessThan(nodeBoxScale, minBubbleSize))) {
|
||||
nodeBox.setScaleStayCentered(minBubbleSize);
|
||||
}
|
||||
// Quadruple the scale of both bounding boxes
|
||||
nodeBox.embiggen(4.0f);
|
||||
|
||||
|
||||
// setup list of AvatarData as well as maps to map betweeen the AvatarData and the original nodes
|
||||
std::vector<AvatarSharedPointer> avatarsToSort;
|
||||
std::unordered_map<AvatarSharedPointer, SharedNodePointer> avatarDataToNodes;
|
||||
std::unordered_map<QUuid, uint64_t> avatarEncodeTimes;
|
||||
std::for_each(_begin, _end, [&](const SharedNodePointer& otherNode) {
|
||||
// make sure this is an agent that we have avatar data for before considering it for inclusion
|
||||
if (otherNode->getType() == NodeType::Agent
|
||||
&& otherNode->getLinkedData()) {
|
||||
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
|
||||
|
||||
AvatarSharedPointer otherAvatar = otherNodeData->getAvatarSharedPointer();
|
||||
avatarsToSort.push_back(otherAvatar);
|
||||
avatarDataToNodes[otherAvatar] = otherNode;
|
||||
QUuid id = otherAvatar->getSessionUUID();
|
||||
avatarEncodeTimes[id] = nodeData->getLastOtherAvatarEncodeTime(id);
|
||||
}
|
||||
});
|
||||
// compute node bounding box
|
||||
const float MY_AVATAR_BUBBLE_EXPANSION_FACTOR = 4.0f; // magic number determined emperically
|
||||
AABox nodeBox = computeBubbleBox(avatar, MY_AVATAR_BUBBLE_EXPANSION_FACTOR);
|
||||
|
||||
class SortableAvatar: public PrioritySortUtil::Sortable {
|
||||
public:
|
||||
SortableAvatar() = delete;
|
||||
SortableAvatar(const AvatarSharedPointer& avatar, uint64_t lastEncodeTime)
|
||||
: _avatar(avatar), _lastEncodeTime(lastEncodeTime) {}
|
||||
glm::vec3 getPosition() const override { return _avatar->getWorldPosition(); }
|
||||
SortableAvatar(const AvatarData* avatar, const Node* avatarNode, uint64_t lastEncodeTime)
|
||||
: _avatar(avatar), _node(avatarNode), _lastEncodeTime(lastEncodeTime) {}
|
||||
glm::vec3 getPosition() const override { return _avatar->getClientGlobalPosition(); }
|
||||
float getRadius() const override {
|
||||
glm::vec3 nodeBoxHalfScale = (_avatar->getWorldPosition() - _avatar->getGlobalBoundingBoxCorner() * _avatar->getSensorToWorldScale());
|
||||
return glm::max(nodeBoxHalfScale.x, glm::max(nodeBoxHalfScale.y, nodeBoxHalfScale.z));
|
||||
glm::vec3 nodeBoxScale = _avatar->getGlobalBoundingBox().getScale();
|
||||
return 0.5f * glm::max(nodeBoxScale.x, glm::max(nodeBoxScale.y, nodeBoxScale.z));
|
||||
}
|
||||
uint64_t getTimestamp() const override {
|
||||
return _lastEncodeTime;
|
||||
}
|
||||
AvatarSharedPointer getAvatar() const { return _avatar; }
|
||||
const Node* getNode() const { return _node; }
|
||||
|
||||
private:
|
||||
AvatarSharedPointer _avatar;
|
||||
const AvatarData* _avatar;
|
||||
const Node* _node;
|
||||
uint64_t _lastEncodeTime;
|
||||
};
|
||||
|
||||
|
@ -329,16 +311,18 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
AvatarData::_avatarSortCoefficientSize,
|
||||
AvatarData::_avatarSortCoefficientCenter,
|
||||
AvatarData::_avatarSortCoefficientAge);
|
||||
sortedAvatars.reserve(avatarsToSort.size());
|
||||
sortedAvatars.reserve(_end - _begin);
|
||||
|
||||
// ignore or sort
|
||||
const AvatarSharedPointer& thisAvatar = nodeData->getAvatarSharedPointer();
|
||||
for (const auto& avatar : avatarsToSort) {
|
||||
if (avatar == thisAvatar) {
|
||||
// don't echo updates to self
|
||||
for (auto listedNode = _begin; listedNode != _end; ++listedNode) {
|
||||
Node* otherNodeRaw = (*listedNode).data();
|
||||
if (otherNodeRaw->getType() != NodeType::Agent
|
||||
|| !otherNodeRaw->getLinkedData()
|
||||
|| otherNodeRaw == destinationNode) {
|
||||
continue;
|
||||
}
|
||||
|
||||
auto avatarNode = otherNodeRaw;
|
||||
|
||||
bool shouldIgnore = false;
|
||||
// We ignore other nodes for a couple of reasons:
|
||||
// 1) ignore bubbles and ignore specific node
|
||||
|
@ -346,53 +330,39 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
// happen if for example the avatar is connected on a desktop and sending
|
||||
// updates at ~30hz. So every 3 frames we skip a frame.
|
||||
|
||||
auto avatarNode = avatarDataToNodes[avatar];
|
||||
assert(avatarNode); // we can't have gotten here without the avatarData being a valid key in the map
|
||||
|
||||
const AvatarMixerClientData* avatarNodeData = reinterpret_cast<const AvatarMixerClientData*>(avatarNode->getLinkedData());
|
||||
assert(avatarNodeData); // we can't have gotten here without avatarNode having valid data
|
||||
const AvatarMixerClientData* avatarClientNodeData = reinterpret_cast<const AvatarMixerClientData*>(avatarNode->getLinkedData());
|
||||
assert(avatarClientNodeData); // we can't have gotten here without avatarNode having valid data
|
||||
quint64 startIgnoreCalculation = usecTimestampNow();
|
||||
|
||||
// make sure we have data for this avatar, that it isn't the same node,
|
||||
// and isn't an avatar that the viewing node has ignored
|
||||
// or that has ignored the viewing node
|
||||
if (!avatarNode->getLinkedData()
|
||||
|| avatarNode->getUUID() == node->getUUID()
|
||||
|| (node->isIgnoringNodeWithID(avatarNode->getUUID()) && !PALIsOpen)
|
||||
|| (avatarNode->isIgnoringNodeWithID(node->getUUID()) && !getsAnyIgnored)) {
|
||||
if ((destinationNode->isIgnoringNodeWithID(avatarNode->getUUID()) && !PALIsOpen)
|
||||
|| (avatarNode->isIgnoringNodeWithID(destinationNode->getUUID()) && !getsAnyIgnored)) {
|
||||
shouldIgnore = true;
|
||||
} else {
|
||||
// Check to see if the space bubble is enabled
|
||||
// Don't bother with these checks if the other avatar has their bubble enabled and we're gettingAnyIgnored
|
||||
if (node->isIgnoreRadiusEnabled() || (avatarNode->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
|
||||
float sensorToWorldScale = avatarNodeData->getAvatarSharedPointer()->getSensorToWorldScale();
|
||||
// Define the scale of the box for the current other node
|
||||
glm::vec3 otherNodeBoxScale = (avatarNodeData->getPosition() - avatarNodeData->getGlobalBoundingBoxCorner()) * 2.0f * sensorToWorldScale;
|
||||
// Set up the bounding box for the current other node
|
||||
AABox otherNodeBox(avatarNodeData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
|
||||
// Clamp the size of the bounding box to a minimum scale
|
||||
if (glm::any(glm::lessThan(otherNodeBoxScale, minBubbleSize))) {
|
||||
otherNodeBox.setScaleStayCentered(minBubbleSize);
|
||||
}
|
||||
// Change the scale of both bounding boxes
|
||||
// (This is an arbitrary number determined empirically)
|
||||
otherNodeBox.embiggen(2.4f);
|
||||
|
||||
if (destinationNode->isIgnoreRadiusEnabled() || (avatarNode->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
|
||||
// Perform the collision check between the two bounding boxes
|
||||
const float OTHER_AVATAR_BUBBLE_EXPANSION_FACTOR = 2.4f; // magic number determined empirically
|
||||
AABox otherNodeBox = computeBubbleBox(avatarClientNodeData->getAvatar(), OTHER_AVATAR_BUBBLE_EXPANSION_FACTOR);
|
||||
if (nodeBox.touches(otherNodeBox)) {
|
||||
nodeData->ignoreOther(node, avatarNode);
|
||||
nodeData->ignoreOther(destinationNode, avatarNode);
|
||||
shouldIgnore = !getsAnyIgnored;
|
||||
}
|
||||
}
|
||||
// Not close enough to ignore
|
||||
if (!shouldIgnore) {
|
||||
nodeData->removeFromRadiusIgnoringSet(node, avatarNode->getUUID());
|
||||
nodeData->removeFromRadiusIgnoringSet(avatarNode->getUUID());
|
||||
}
|
||||
}
|
||||
|
||||
if (!shouldIgnore) {
|
||||
AvatarDataSequenceNumber lastSeqToReceiver = nodeData->getLastBroadcastSequenceNumber(avatarNode->getUUID());
|
||||
AvatarDataSequenceNumber lastSeqFromSender = avatarNodeData->getLastReceivedSequenceNumber();
|
||||
AvatarDataSequenceNumber lastSeqFromSender = avatarClientNodeData->getLastReceivedSequenceNumber();
|
||||
|
||||
// FIXME - This code does appear to be working. But it seems brittle.
|
||||
// It supports determining if the frame of data for this "other"
|
||||
|
@ -417,12 +387,10 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
|
||||
if (!shouldIgnore) {
|
||||
// sort this one for later
|
||||
uint64_t lastEncodeTime = 0;
|
||||
std::unordered_map<QUuid, uint64_t>::const_iterator itr = avatarEncodeTimes.find(avatar->getSessionUUID());
|
||||
if (itr != avatarEncodeTimes.end()) {
|
||||
lastEncodeTime = itr->second;
|
||||
}
|
||||
sortedAvatars.push(SortableAvatar(avatar, lastEncodeTime));
|
||||
const AvatarData* avatarNodeData = avatarClientNodeData->getConstAvatarData();
|
||||
auto lastEncodeTime = nodeData->getLastOtherAvatarEncodeTime(avatarNodeData->getSessionUUID());
|
||||
|
||||
sortedAvatars.push(SortableAvatar(avatarNodeData, avatarNode, lastEncodeTime));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -430,19 +398,31 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
|
||||
int remainingAvatars = (int)sortedAvatars.size();
|
||||
auto traitsPacketList = NLPacketList::create(PacketType::BulkAvatarTraits, QByteArray(), true, true);
|
||||
|
||||
const auto& sortedAvatarVector = sortedAvatars.getSortedVector();
|
||||
for (const auto& sortedAvatar : sortedAvatarVector) {
|
||||
const auto& avatarData = sortedAvatar.getAvatar();
|
||||
remainingAvatars--;
|
||||
const Node* otherNode = sortedAvatar.getNode();
|
||||
auto lastEncodeForOther = sortedAvatar.getTimestamp();
|
||||
|
||||
auto otherNode = avatarDataToNodes[avatarData];
|
||||
assert(otherNode); // we can't have gotten here without the avatarData being a valid key in the map
|
||||
|
||||
// NOTE: Here's where we determine if we are over budget and drop to bare minimum data
|
||||
AvatarData::AvatarDataDetail detail = AvatarData::NoData;
|
||||
|
||||
// NOTE: Here's where we determine if we are over budget and drop remaining avatars,
|
||||
// or send minimal avatar data in uncommon case of PALIsOpen.
|
||||
int minimRemainingAvatarBytes = minimumBytesPerAvatar * remainingAvatars;
|
||||
bool overBudget = (identityBytesSent + numAvatarDataBytes + minimRemainingAvatarBytes) > maxAvatarBytesPerFrame;
|
||||
if (overBudget) {
|
||||
if (PALIsOpen) {
|
||||
_stats.overBudgetAvatars++;
|
||||
detail = AvatarData::PALMinimum;
|
||||
} else {
|
||||
_stats.overBudgetAvatars += remainingAvatars;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
quint64 startAvatarDataPacking = usecTimestampNow();
|
||||
auto startAvatarDataPacking = chrono::high_resolution_clock::now();
|
||||
|
||||
++numOtherAvatars;
|
||||
|
||||
|
@ -459,32 +439,18 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
nodeData->setLastBroadcastTime(otherNode->getUUID(), usecTimestampNow());
|
||||
}
|
||||
|
||||
// determine if avatar is in view which determines how much data to send
|
||||
glm::vec3 otherPosition = otherAvatar->getClientGlobalPosition();
|
||||
glm::vec3 otherNodeBoxScale = (otherPosition - otherNodeData->getGlobalBoundingBoxCorner()) * 2.0f * otherAvatar->getSensorToWorldScale();
|
||||
AABox otherNodeBox(otherNodeData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
|
||||
bool isInView = nodeData->otherAvatarInView(otherNodeBox);
|
||||
// Typically all out-of-view avatars but such avatars' priorities will rise with time:
|
||||
bool isLowerPriority = sortedAvatar.getPriority() <= OUT_OF_VIEW_THRESHOLD;
|
||||
|
||||
// start a new segment in the PacketList for this avatar
|
||||
avatarPacketList->startSegment();
|
||||
|
||||
AvatarData::AvatarDataDetail detail;
|
||||
|
||||
if (overBudget) {
|
||||
overBudgetAvatars++;
|
||||
_stats.overBudgetAvatars++;
|
||||
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::NoData;
|
||||
} else if (!isInView) {
|
||||
if (isLowerPriority) {
|
||||
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::MinimumData;
|
||||
nodeData->incrementAvatarOutOfView();
|
||||
} else {
|
||||
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO
|
||||
? AvatarData::SendAllData : AvatarData::CullSmallData;
|
||||
} else if (!overBudget) {
|
||||
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO ? AvatarData::SendAllData : AvatarData::CullSmallData;
|
||||
nodeData->incrementAvatarInView();
|
||||
}
|
||||
|
||||
bool includeThisAvatar = true;
|
||||
auto lastEncodeForOther = nodeData->getLastOtherAvatarEncodeTime(otherNode->getUUID());
|
||||
QVector<JointData>& lastSentJointsForOther = nodeData->getLastOtherAvatarSentJoints(otherNode->getUUID());
|
||||
|
||||
lastSentJointsForOther.resize(otherAvatar->getJointCount());
|
||||
|
@ -494,14 +460,14 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
AvatarDataPacket::HasFlags hasFlagsOut; // the result of the toByteArray
|
||||
bool dropFaceTracking = false;
|
||||
|
||||
quint64 start = usecTimestampNow();
|
||||
auto startSerialize = chrono::high_resolution_clock::now();
|
||||
QByteArray bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition,
|
||||
&lastSentJointsForOther);
|
||||
quint64 end = usecTimestampNow();
|
||||
_stats.toByteArrayElapsedTime += (end - start);
|
||||
auto endSerialize = chrono::high_resolution_clock::now();
|
||||
_stats.toByteArrayElapsedTime +=
|
||||
(quint64) chrono::duration_cast<chrono::microseconds>(endSerialize - startSerialize).count();
|
||||
|
||||
static auto maxAvatarDataBytes = avatarPacketList->getMaxSegmentSize() - NUM_BYTES_RFC4122_UUID;
|
||||
if (bytes.size() > maxAvatarDataBytes) {
|
||||
qCWarning(avatars) << "otherAvatar.toByteArray() for" << otherNode->getUUID()
|
||||
<< "resulted in very large buffer of" << bytes.size() << "bytes - dropping facial data";
|
||||
|
@ -527,8 +493,11 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
}
|
||||
|
||||
if (includeThisAvatar) {
|
||||
// start a new segment in the PacketList for this avatar
|
||||
avatarPacketList->startSegment();
|
||||
numAvatarDataBytes += avatarPacketList->write(otherNode->getUUID().toRfc4122());
|
||||
numAvatarDataBytes += avatarPacketList->write(bytes);
|
||||
avatarPacketList->endSegment();
|
||||
|
||||
if (detail != AvatarData::NoData) {
|
||||
_stats.numOthersIncluded++;
|
||||
|
@ -546,15 +515,13 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
// It would be nice if we could tweak its future sort priority to put it at the back of the list.
|
||||
}
|
||||
|
||||
avatarPacketList->endSegment();
|
||||
|
||||
quint64 endAvatarDataPacking = usecTimestampNow();
|
||||
_stats.avatarDataPackingElapsedTime += (endAvatarDataPacking - startAvatarDataPacking);
|
||||
auto endAvatarDataPacking = chrono::high_resolution_clock::now();
|
||||
_stats.avatarDataPackingElapsedTime +=
|
||||
(quint64) chrono::duration_cast<chrono::microseconds>(endAvatarDataPacking - startAvatarDataPacking).count();
|
||||
|
||||
// use helper to add any changed traits to our packet list
|
||||
traitBytesSent += addChangedTraitsToBulkPacket(nodeData, otherNodeData, *traitsPacketList);
|
||||
|
||||
traitsPacketList->getDataSize();
|
||||
remainingAvatars--;
|
||||
}
|
||||
|
||||
quint64 startPacketSending = usecTimestampNow();
|
||||
|
@ -566,7 +533,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
_stats.numBytesSent += numAvatarDataBytes;
|
||||
|
||||
// send the avatar data PacketList
|
||||
nodeList->sendPacketList(std::move(avatarPacketList), *node);
|
||||
nodeList->sendPacketList(std::move(avatarPacketList), *destinationNode);
|
||||
|
||||
// record the bytes sent for other avatar data in the AvatarMixerClientData
|
||||
nodeData->recordSentAvatarData(numAvatarDataBytes);
|
||||
|
@ -576,7 +543,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
|
||||
if (traitsPacketList->getNumPackets() >= 1) {
|
||||
// send the traits packet list
|
||||
nodeList->sendPacketList(std::move(traitsPacketList), *node);
|
||||
nodeList->sendPacketList(std::move(traitsPacketList), *destinationNode);
|
||||
}
|
||||
|
||||
// record the number of avatars held back this frame
|
||||
|
|
|
@ -176,29 +176,29 @@ float AvatarManager::getAvatarSimulationRate(const QUuid& sessionID, const QStri
|
|||
}
|
||||
|
||||
void AvatarManager::updateOtherAvatars(float deltaTime) {
|
||||
// lock the hash for read to check the size
|
||||
QReadLocker lock(&_hashLock);
|
||||
if (_avatarHash.size() < 2 && _avatarsToFade.isEmpty()) {
|
||||
return;
|
||||
{
|
||||
// lock the hash for read to check the size
|
||||
QReadLocker lock(&_hashLock);
|
||||
if (_avatarHash.size() < 2 && _avatarsToFade.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
lock.unlock();
|
||||
|
||||
PerformanceTimer perfTimer("otherAvatars");
|
||||
|
||||
class SortableAvatar: public PrioritySortUtil::Sortable {
|
||||
public:
|
||||
SortableAvatar() = delete;
|
||||
SortableAvatar(const AvatarSharedPointer& avatar) : _avatar(avatar) {}
|
||||
SortableAvatar(const std::shared_ptr<Avatar>& avatar) : _avatar(avatar) {}
|
||||
glm::vec3 getPosition() const override { return _avatar->getWorldPosition(); }
|
||||
float getRadius() const override { return std::static_pointer_cast<Avatar>(_avatar)->getBoundingRadius(); }
|
||||
uint64_t getTimestamp() const override { return std::static_pointer_cast<Avatar>(_avatar)->getLastRenderUpdateTime(); }
|
||||
AvatarSharedPointer getAvatar() const { return _avatar; }
|
||||
float getRadius() const override { return _avatar->getBoundingRadius(); }
|
||||
uint64_t getTimestamp() const override { return _avatar->getLastRenderUpdateTime(); }
|
||||
std::shared_ptr<Avatar> getAvatar() const { return _avatar; }
|
||||
private:
|
||||
AvatarSharedPointer _avatar;
|
||||
std::shared_ptr<Avatar> _avatar;
|
||||
};
|
||||
|
||||
auto avatarMap = getHashCopy();
|
||||
AvatarHash::iterator itr = avatarMap.begin();
|
||||
|
||||
const auto& views = qApp->getConicalViews();
|
||||
PrioritySortUtil::PriorityQueue<SortableAvatar> sortedAvatars(views,
|
||||
|
@ -207,22 +207,24 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
|
|||
AvatarData::_avatarSortCoefficientAge);
|
||||
sortedAvatars.reserve(avatarMap.size() - 1); // don't include MyAvatar
|
||||
|
||||
// sort
|
||||
// Build vector and compute priorities
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
AvatarHash::iterator itr = avatarMap.begin();
|
||||
while (itr != avatarMap.end()) {
|
||||
const auto& avatar = std::static_pointer_cast<Avatar>(*itr);
|
||||
// DO NOT update _myAvatar! Its update has already been done earlier in the main loop.
|
||||
// DO NOT update or fade out uninitialized Avatars
|
||||
if (avatar != _myAvatar && avatar->isInitialized()) {
|
||||
if (avatar != _myAvatar && avatar->isInitialized() && !nodeList->isPersonalMutingNode(avatar->getID())) {
|
||||
sortedAvatars.push(SortableAvatar(avatar));
|
||||
}
|
||||
++itr;
|
||||
}
|
||||
// Sort
|
||||
const auto& sortedAvatarVector = sortedAvatars.getSortedVector();
|
||||
|
||||
// process in sorted order
|
||||
uint64_t startTime = usecTimestampNow();
|
||||
const uint64_t UPDATE_BUDGET = 2000; // usec
|
||||
uint64_t updateExpiry = startTime + UPDATE_BUDGET;
|
||||
uint64_t updateExpiry = startTime + MAX_UPDATE_AVATARS_TIME_BUDGET;
|
||||
int numAvatarsUpdated = 0;
|
||||
int numAVatarsNotUpdated = 0;
|
||||
|
||||
|
@ -241,18 +243,12 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
|
|||
avatar->updateOrbPosition();
|
||||
}
|
||||
|
||||
bool ignoring = DependencyManager::get<NodeList>()->isPersonalMutingNode(avatar->getID());
|
||||
if (ignoring) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// for ALL avatars...
|
||||
if (_shouldRender) {
|
||||
avatar->ensureInScene(avatar, qApp->getMain3DScene());
|
||||
}
|
||||
avatar->animateScaleChanges(deltaTime);
|
||||
|
||||
const float OUT_OF_VIEW_THRESHOLD = 0.5f * AvatarData::OUT_OF_VIEW_PENALTY;
|
||||
uint64_t now = usecTimestampNow();
|
||||
if (now < updateExpiry) {
|
||||
// we're within budget
|
||||
|
@ -273,7 +269,7 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
|
|||
// no time to simulate, but we take the time to count how many were tragically missed
|
||||
while (it != sortedAvatarVector.end()) {
|
||||
const SortableAvatar& newSortData = *it;
|
||||
const auto newAvatar = std::static_pointer_cast<Avatar>(newSortData.getAvatar());
|
||||
const auto& newAvatar = newSortData.getAvatar();
|
||||
bool inView = newSortData.getPriority() > OUT_OF_VIEW_THRESHOLD;
|
||||
// Once we reach an avatar that's not in view, all avatars after it will also be out of view
|
||||
if (!inView) {
|
||||
|
|
|
@ -363,13 +363,13 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
memcpy(destinationBuffer, &packetStateFlags, sizeof(packetStateFlags));
|
||||
destinationBuffer += sizeof(packetStateFlags);
|
||||
|
||||
#define AVATAR_MEMCPY(src) \
|
||||
memcpy(destinationBuffer, &(src), sizeof(src)); \
|
||||
destinationBuffer += sizeof(src);
|
||||
|
||||
if (hasAvatarGlobalPosition) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AvatarGlobalPosition*>(destinationBuffer);
|
||||
data->globalPosition[0] = _globalPosition.x;
|
||||
data->globalPosition[1] = _globalPosition.y;
|
||||
data->globalPosition[2] = _globalPosition.z;
|
||||
destinationBuffer += sizeof(AvatarDataPacket::AvatarGlobalPosition);
|
||||
AVATAR_MEMCPY(_globalPosition);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
|
||||
|
@ -380,17 +380,8 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
|
||||
if (hasAvatarBoundingBox) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AvatarBoundingBox*>(destinationBuffer);
|
||||
|
||||
data->avatarDimensions[0] = _globalBoundingBoxDimensions.x;
|
||||
data->avatarDimensions[1] = _globalBoundingBoxDimensions.y;
|
||||
data->avatarDimensions[2] = _globalBoundingBoxDimensions.z;
|
||||
|
||||
data->boundOriginOffset[0] = _globalBoundingBoxOffset.x;
|
||||
data->boundOriginOffset[1] = _globalBoundingBoxOffset.y;
|
||||
data->boundOriginOffset[2] = _globalBoundingBoxOffset.z;
|
||||
|
||||
destinationBuffer += sizeof(AvatarDataPacket::AvatarBoundingBox);
|
||||
AVATAR_MEMCPY(_globalBoundingBoxDimensions);
|
||||
AVATAR_MEMCPY(_globalBoundingBoxOffset);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
if (outboundDataRateOut) {
|
||||
|
@ -424,13 +415,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
|
||||
if (hasLookAtPosition) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::LookAtPosition*>(destinationBuffer);
|
||||
auto lookAt = _headData->getLookAtPosition();
|
||||
data->lookAtPosition[0] = lookAt.x;
|
||||
data->lookAtPosition[1] = lookAt.y;
|
||||
data->lookAtPosition[2] = lookAt.z;
|
||||
destinationBuffer += sizeof(AvatarDataPacket::LookAtPosition);
|
||||
|
||||
AVATAR_MEMCPY(_headData->getLookAtPosition());
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
if (outboundDataRateOut) {
|
||||
outboundDataRateOut->lookAtPositionRate.increment(numBytes);
|
||||
|
@ -531,12 +516,8 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
|
||||
if (hasAvatarLocalPosition) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AvatarLocalPosition*>(destinationBuffer);
|
||||
auto localPosition = getLocalPosition();
|
||||
data->localPosition[0] = localPosition.x;
|
||||
data->localPosition[1] = localPosition.y;
|
||||
data->localPosition[2] = localPosition.z;
|
||||
destinationBuffer += sizeof(AvatarDataPacket::AvatarLocalPosition);
|
||||
const auto localPosition = getLocalPosition();
|
||||
AVATAR_MEMCPY(localPosition);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
if (outboundDataRateOut) {
|
||||
|
@ -567,19 +548,24 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
}
|
||||
|
||||
QVector<JointData> jointData;
|
||||
if (hasJointData || hasJointDefaultPoseFlags) {
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
jointData = _jointData;
|
||||
}
|
||||
|
||||
// If it is connected, pack up the data
|
||||
if (hasJointData) {
|
||||
auto startSection = destinationBuffer;
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
|
||||
// joint rotation data
|
||||
int numJoints = _jointData.size();
|
||||
int numJoints = jointData.size();
|
||||
*destinationBuffer++ = (uint8_t)numJoints;
|
||||
|
||||
unsigned char* validityPosition = destinationBuffer;
|
||||
unsigned char validity = 0;
|
||||
int validityBit = 0;
|
||||
int numValidityBytes = (int)std::ceil(numJoints / (float)BITS_IN_BYTE);
|
||||
int numValidityBytes = calcBitVectorSize(numJoints);
|
||||
|
||||
#ifdef WANT_DEBUG
|
||||
int rotationSentCount = 0;
|
||||
|
@ -589,43 +575,37 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
destinationBuffer += numValidityBytes; // Move pointer past the validity bytes
|
||||
|
||||
// sentJointDataOut and lastSentJointData might be the same vector
|
||||
// build sentJointDataOut locally and then swap it at the end.
|
||||
QVector<JointData> localSentJointDataOut;
|
||||
if (sentJointDataOut) {
|
||||
localSentJointDataOut.resize(numJoints); // Make sure the destination is resized before using it
|
||||
sentJointDataOut->resize(numJoints); // Make sure the destination is resized before using it
|
||||
}
|
||||
|
||||
float minRotationDOT = !distanceAdjust ? AVATAR_MIN_ROTATION_DOT : getDistanceBasedMinRotationDOT(viewerPosition);
|
||||
float minRotationDOT = (distanceAdjust && cullSmallChanges) ? getDistanceBasedMinRotationDOT(viewerPosition) : AVATAR_MIN_ROTATION_DOT;
|
||||
|
||||
for (int i = 0; i < _jointData.size(); i++) {
|
||||
const JointData& data = _jointData[i];
|
||||
for (int i = 0; i < jointData.size(); i++) {
|
||||
const JointData& data = jointData[i];
|
||||
const JointData& last = lastSentJointData[i];
|
||||
|
||||
if (!data.rotationIsDefaultPose) {
|
||||
bool mustSend = sendAll || last.rotationIsDefaultPose;
|
||||
if (mustSend || last.rotation != data.rotation) {
|
||||
|
||||
bool largeEnoughRotation = true;
|
||||
if (cullSmallChanges) {
|
||||
// The dot product for smaller rotations is a smaller number.
|
||||
// So if the dot() is less than the value, then the rotation is a larger angle of rotation
|
||||
largeEnoughRotation = fabsf(glm::dot(last.rotation, data.rotation)) < minRotationDOT;
|
||||
}
|
||||
|
||||
if (mustSend || !cullSmallChanges || largeEnoughRotation) {
|
||||
validity |= (1 << validityBit);
|
||||
// The dot product for larger rotations is a lower number.
|
||||
// So if the dot() is less than the value, then the rotation is a larger angle of rotation
|
||||
if (sendAll || last.rotationIsDefaultPose || (!cullSmallChanges && last.rotation != data.rotation)
|
||||
|| (cullSmallChanges && glm::dot(last.rotation, data.rotation) < minRotationDOT) ) {
|
||||
validity |= (1 << validityBit);
|
||||
#ifdef WANT_DEBUG
|
||||
rotationSentCount++;
|
||||
rotationSentCount++;
|
||||
#endif
|
||||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, data.rotation);
|
||||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, data.rotation);
|
||||
|
||||
if (sentJointDataOut) {
|
||||
localSentJointDataOut[i].rotation = data.rotation;
|
||||
localSentJointDataOut[i].rotationIsDefaultPose = false;
|
||||
}
|
||||
if (sentJointDataOut) {
|
||||
(*sentJointDataOut)[i].rotation = data.rotation;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (sentJointDataOut) {
|
||||
(*sentJointDataOut)[i].rotationIsDefaultPose = data.rotationIsDefaultPose;
|
||||
}
|
||||
|
||||
if (++validityBit == BITS_IN_BYTE) {
|
||||
*validityPosition++ = validity;
|
||||
validityBit = validity = 0;
|
||||
|
@ -647,35 +627,38 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
|
||||
destinationBuffer += numValidityBytes; // Move pointer past the validity bytes
|
||||
|
||||
float minTranslation = !distanceAdjust ? AVATAR_MIN_TRANSLATION : getDistanceBasedMinTranslationDistance(viewerPosition);
|
||||
float minTranslation = (distanceAdjust && cullSmallChanges) ? getDistanceBasedMinTranslationDistance(viewerPosition) : AVATAR_MIN_TRANSLATION;
|
||||
|
||||
float maxTranslationDimension = 0.0;
|
||||
for (int i = 0; i < _jointData.size(); i++) {
|
||||
const JointData& data = _jointData[i];
|
||||
for (int i = 0; i < jointData.size(); i++) {
|
||||
const JointData& data = jointData[i];
|
||||
const JointData& last = lastSentJointData[i];
|
||||
|
||||
if (!data.translationIsDefaultPose) {
|
||||
bool mustSend = sendAll || last.translationIsDefaultPose;
|
||||
if (mustSend || last.translation != data.translation) {
|
||||
if (mustSend || !cullSmallChanges || glm::distance(data.translation, lastSentJointData[i].translation) > minTranslation) {
|
||||
validity |= (1 << validityBit);
|
||||
if (sendAll || last.translationIsDefaultPose || (!cullSmallChanges && last.translation != data.translation)
|
||||
|| (cullSmallChanges && glm::distance(data.translation, lastSentJointData[i].translation) > minTranslation)) {
|
||||
|
||||
validity |= (1 << validityBit);
|
||||
#ifdef WANT_DEBUG
|
||||
translationSentCount++;
|
||||
translationSentCount++;
|
||||
#endif
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.x), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.y), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.z), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.x), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.y), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.z), maxTranslationDimension);
|
||||
|
||||
destinationBuffer +=
|
||||
packFloatVec3ToSignedTwoByteFixed(destinationBuffer, data.translation, TRANSLATION_COMPRESSION_RADIX);
|
||||
destinationBuffer +=
|
||||
packFloatVec3ToSignedTwoByteFixed(destinationBuffer, data.translation, TRANSLATION_COMPRESSION_RADIX);
|
||||
|
||||
if (sentJointDataOut) {
|
||||
localSentJointDataOut[i].translation = data.translation;
|
||||
localSentJointDataOut[i].translationIsDefaultPose = false;
|
||||
}
|
||||
if (sentJointDataOut) {
|
||||
(*sentJointDataOut)[i].translation = data.translation;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (sentJointDataOut) {
|
||||
(*sentJointDataOut)[i].translationIsDefaultPose = data.translationIsDefaultPose;
|
||||
}
|
||||
|
||||
if (++validityBit == BITS_IN_BYTE) {
|
||||
*validityPosition++ = validity;
|
||||
validityBit = validity = 0;
|
||||
|
@ -691,6 +674,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, controllerLeftHandTransform.getRotation());
|
||||
destinationBuffer += packFloatVec3ToSignedTwoByteFixed(destinationBuffer, controllerLeftHandTransform.getTranslation(),
|
||||
TRANSLATION_COMPRESSION_RADIX);
|
||||
|
||||
Transform controllerRightHandTransform = Transform(getControllerRightHandMatrix());
|
||||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, controllerRightHandTransform.getRotation());
|
||||
destinationBuffer += packFloatVec3ToSignedTwoByteFixed(destinationBuffer, controllerRightHandTransform.getTranslation(),
|
||||
|
@ -707,34 +691,27 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
glm::vec3 mouseFarGrabPosition = extractTranslation(mouseFarGrabMatrix);
|
||||
glm::quat mouseFarGrabRotation = extractRotation(mouseFarGrabMatrix);
|
||||
|
||||
data->leftFarGrabPosition[0] = leftFarGrabPosition.x;
|
||||
data->leftFarGrabPosition[1] = leftFarGrabPosition.y;
|
||||
data->leftFarGrabPosition[2] = leftFarGrabPosition.z;
|
||||
|
||||
AVATAR_MEMCPY(leftFarGrabPosition);
|
||||
// Can't do block copy as struct order is x, y, z, w.
|
||||
data->leftFarGrabRotation[0] = leftFarGrabRotation.w;
|
||||
data->leftFarGrabRotation[1] = leftFarGrabRotation.x;
|
||||
data->leftFarGrabRotation[2] = leftFarGrabRotation.y;
|
||||
data->leftFarGrabRotation[3] = leftFarGrabRotation.z;
|
||||
destinationBuffer += sizeof(data->leftFarGrabPosition);
|
||||
|
||||
data->rightFarGrabPosition[0] = rightFarGrabPosition.x;
|
||||
data->rightFarGrabPosition[1] = rightFarGrabPosition.y;
|
||||
data->rightFarGrabPosition[2] = rightFarGrabPosition.z;
|
||||
|
||||
AVATAR_MEMCPY(rightFarGrabPosition);
|
||||
data->rightFarGrabRotation[0] = rightFarGrabRotation.w;
|
||||
data->rightFarGrabRotation[1] = rightFarGrabRotation.x;
|
||||
data->rightFarGrabRotation[2] = rightFarGrabRotation.y;
|
||||
data->rightFarGrabRotation[3] = rightFarGrabRotation.z;
|
||||
destinationBuffer += sizeof(data->rightFarGrabRotation);
|
||||
|
||||
data->mouseFarGrabPosition[0] = mouseFarGrabPosition.x;
|
||||
data->mouseFarGrabPosition[1] = mouseFarGrabPosition.y;
|
||||
data->mouseFarGrabPosition[2] = mouseFarGrabPosition.z;
|
||||
|
||||
AVATAR_MEMCPY(mouseFarGrabPosition);
|
||||
data->mouseFarGrabRotation[0] = mouseFarGrabRotation.w;
|
||||
data->mouseFarGrabRotation[1] = mouseFarGrabRotation.x;
|
||||
data->mouseFarGrabRotation[2] = mouseFarGrabRotation.y;
|
||||
data->mouseFarGrabRotation[3] = mouseFarGrabRotation.z;
|
||||
|
||||
destinationBuffer += sizeof(AvatarDataPacket::FarGrabJoints);
|
||||
destinationBuffer += sizeof(data->mouseFarGrabRotation);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
|
||||
|
@ -761,41 +738,23 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
outboundDataRateOut->jointDataRate.increment(numBytes);
|
||||
}
|
||||
|
||||
if (sentJointDataOut) {
|
||||
|
||||
// Mark default poses in lastSentJointData, so when they become non-default we send them.
|
||||
for (int i = 0; i < _jointData.size(); i++) {
|
||||
const JointData& data = _jointData[i];
|
||||
JointData& local = localSentJointDataOut[i];
|
||||
if (data.rotationIsDefaultPose) {
|
||||
local.rotationIsDefaultPose = true;
|
||||
}
|
||||
if (data.translationIsDefaultPose) {
|
||||
local.translationIsDefaultPose = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Push new sent joint data to sentJointDataOut
|
||||
sentJointDataOut->swap(localSentJointDataOut);
|
||||
}
|
||||
}
|
||||
|
||||
if (hasJointDefaultPoseFlags) {
|
||||
auto startSection = destinationBuffer;
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
|
||||
// write numJoints
|
||||
int numJoints = _jointData.size();
|
||||
int numJoints = jointData.size();
|
||||
*destinationBuffer++ = (uint8_t)numJoints;
|
||||
|
||||
// write rotationIsDefaultPose bits
|
||||
destinationBuffer += writeBitVector(destinationBuffer, numJoints, [&](int i) {
|
||||
return _jointData[i].rotationIsDefaultPose;
|
||||
return jointData[i].rotationIsDefaultPose;
|
||||
});
|
||||
|
||||
// write translationIsDefaultPose bits
|
||||
destinationBuffer += writeBitVector(destinationBuffer, numJoints, [&](int i) {
|
||||
return _jointData[i].translationIsDefaultPose;
|
||||
return jointData[i].translationIsDefaultPose;
|
||||
});
|
||||
|
||||
if (outboundDataRateOut) {
|
||||
|
@ -880,7 +839,6 @@ const unsigned char* unpackFauxJoint(const unsigned char* sourceBuffer, ThreadSa
|
|||
|
||||
// read data in packet starting at byte offset and return number of bytes parsed
|
||||
int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
||||
|
||||
// lazily allocate memory for HeadData in case we're not an Avatar instance
|
||||
lazyInitHeadData();
|
||||
|
||||
|
@ -932,7 +890,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
auto newValue = glm::vec3(data->globalPosition[0], data->globalPosition[1], data->globalPosition[2]) + offset;
|
||||
if (_globalPosition != newValue) {
|
||||
_globalPosition = newValue;
|
||||
_globalPositionChanged = usecTimestampNow();
|
||||
_globalPositionChanged = now;
|
||||
}
|
||||
sourceBuffer += sizeof(AvatarDataPacket::AvatarGlobalPosition);
|
||||
int numBytesRead = sourceBuffer - startSection;
|
||||
|
@ -956,11 +914,11 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
|
||||
if (_globalBoundingBoxDimensions != newDimensions) {
|
||||
_globalBoundingBoxDimensions = newDimensions;
|
||||
_avatarBoundingBoxChanged = usecTimestampNow();
|
||||
_avatarBoundingBoxChanged = now;
|
||||
}
|
||||
if (_globalBoundingBoxOffset != newOffset) {
|
||||
_globalBoundingBoxOffset = newOffset;
|
||||
_avatarBoundingBoxChanged = usecTimestampNow();
|
||||
_avatarBoundingBoxChanged = now;
|
||||
}
|
||||
|
||||
sourceBuffer += sizeof(AvatarDataPacket::AvatarBoundingBox);
|
||||
|
@ -1061,7 +1019,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
glm::mat4 sensorToWorldMatrix = createMatFromScaleQuatAndPos(glm::vec3(sensorToWorldScale), sensorToWorldQuat, sensorToWorldTrans);
|
||||
if (_sensorToWorldMatrixCache.get() != sensorToWorldMatrix) {
|
||||
_sensorToWorldMatrixCache.set(sensorToWorldMatrix);
|
||||
_sensorToWorldMatrixChanged = usecTimestampNow();
|
||||
_sensorToWorldMatrixChanged = now;
|
||||
}
|
||||
sourceBuffer += sizeof(AvatarDataPacket::SensorToWorldMatrix);
|
||||
int numBytesRead = sourceBuffer - startSection;
|
||||
|
@ -1118,7 +1076,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
sourceBuffer += sizeof(AvatarDataPacket::AdditionalFlags);
|
||||
|
||||
if (somethingChanged) {
|
||||
_additionalFlagsChanged = usecTimestampNow();
|
||||
_additionalFlagsChanged = now;
|
||||
}
|
||||
int numBytesRead = sourceBuffer - startSection;
|
||||
_additionalFlagsRate.increment(numBytesRead);
|
||||
|
@ -1138,7 +1096,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
if ((getParentID() != newParentID) || (getParentJointIndex() != parentInfo->parentJointIndex)) {
|
||||
SpatiallyNestable::setParentID(newParentID);
|
||||
SpatiallyNestable::setParentJointIndex(parentInfo->parentJointIndex);
|
||||
_parentChanged = usecTimestampNow();
|
||||
_parentChanged = now;
|
||||
}
|
||||
|
||||
int numBytesRead = sourceBuffer - startSection;
|
||||
|
@ -1187,8 +1145,6 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
int numBytesRead = sourceBuffer - startSection;
|
||||
_faceTrackerRate.increment(numBytesRead);
|
||||
_faceTrackerUpdateRate.increment();
|
||||
} else {
|
||||
_headData->_blendshapeCoefficients.fill(0, _headData->_blendshapeCoefficients.size());
|
||||
}
|
||||
|
||||
if (hasJointData) {
|
||||
|
@ -2873,10 +2829,8 @@ void RayToAvatarIntersectionResultFromScriptValue(const QScriptValue& object, Ra
|
|||
value.extraInfo = object.property("extraInfo").toVariant().toMap();
|
||||
}
|
||||
|
||||
const float AvatarData::OUT_OF_VIEW_PENALTY = -10.0f;
|
||||
|
||||
float AvatarData::_avatarSortCoefficientSize { 1.0f };
|
||||
float AvatarData::_avatarSortCoefficientCenter { 0.25 };
|
||||
float AvatarData::_avatarSortCoefficientSize { 8.0f };
|
||||
float AvatarData::_avatarSortCoefficientCenter { 4.0f };
|
||||
float AvatarData::_avatarSortCoefficientAge { 1.0f };
|
||||
|
||||
QScriptValue AvatarEntityMapToScriptValue(QScriptEngine* engine, const AvatarEntityMap& value) {
|
||||
|
|
|
@ -1097,7 +1097,7 @@ public:
|
|||
void fromJson(const QJsonObject& json, bool useFrameSkeleton = true);
|
||||
|
||||
glm::vec3 getClientGlobalPosition() const { return _globalPosition; }
|
||||
glm::vec3 getGlobalBoundingBoxCorner() const { return _globalPosition + _globalBoundingBoxOffset - _globalBoundingBoxDimensions; }
|
||||
AABox getGlobalBoundingBox() const { return AABox(_globalPosition + _globalBoundingBoxOffset - _globalBoundingBoxDimensions, _globalBoundingBoxDimensions); }
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getAvatarEntityData
|
||||
|
@ -1169,8 +1169,6 @@ public:
|
|||
// A method intended to be overriden by MyAvatar for polling orientation for network transmission.
|
||||
virtual glm::quat getOrientationOutbound() const;
|
||||
|
||||
static const float OUT_OF_VIEW_PENALTY;
|
||||
|
||||
// TODO: remove this HACK once we settle on optimal sort coefficients
|
||||
// These coefficients exposed for fine tuning the sort priority for transfering new _jointData to the render pipeline.
|
||||
static float _avatarSortCoefficientSize;
|
||||
|
|
|
@ -37,6 +37,7 @@ Socket::Socket(QObject* parent, bool shouldChangeSocketOptions) :
|
|||
_shouldChangeSocketOptions(shouldChangeSocketOptions)
|
||||
{
|
||||
connect(&_udpSocket, &QUdpSocket::readyRead, this, &Socket::readPendingDatagrams);
|
||||
connect(this, &Socket::pendingDatagrams, this, &Socket::processPendingDatagrams, Qt::QueuedConnection);
|
||||
|
||||
// make sure we hear about errors and state changes from the underlying socket
|
||||
connect(&_udpSocket, SIGNAL(error(QAbstractSocket::SocketError)),
|
||||
|
@ -315,55 +316,85 @@ void Socket::checkForReadyReadBackup() {
|
|||
}
|
||||
|
||||
void Socket::readPendingDatagrams() {
|
||||
int packetsRead = 0;
|
||||
|
||||
int packetSizeWithHeader = -1;
|
||||
|
||||
while (_udpSocket.hasPendingDatagrams() && (packetSizeWithHeader = _udpSocket.pendingDatagramSize()) != -1) {
|
||||
|
||||
// we're reading a packet so re-start the readyRead backup timer
|
||||
_readyReadBackupTimer->start();
|
||||
|
||||
// Max datagrams to read before processing:
|
||||
static const int MAX_DATAGRAMS_CONSECUTIVELY = 10000;
|
||||
while (_udpSocket.hasPendingDatagrams()
|
||||
&& (packetSizeWithHeader = _udpSocket.pendingDatagramSize()) != -1
|
||||
&& packetsRead <= MAX_DATAGRAMS_CONSECUTIVELY) {
|
||||
// grab a time point we can mark as the receive time of this packet
|
||||
auto receiveTime = p_high_resolution_clock::now();
|
||||
|
||||
// setup a HifiSockAddr to read into
|
||||
HifiSockAddr senderSockAddr;
|
||||
|
||||
// setup a buffer to read the packet into
|
||||
auto buffer = std::unique_ptr<char[]>(new char[packetSizeWithHeader]);
|
||||
|
||||
QHostAddress senderAddress;
|
||||
quint16 senderPort;
|
||||
|
||||
// pull the datagram
|
||||
auto sizeRead = _udpSocket.readDatagram(buffer.get(), packetSizeWithHeader,
|
||||
senderSockAddr.getAddressPointer(), senderSockAddr.getPortPointer());
|
||||
&senderAddress, &senderPort);
|
||||
|
||||
// save information for this packet, in case it is the one that sticks readyRead
|
||||
_lastPacketSizeRead = sizeRead;
|
||||
_lastPacketSockAddr = senderSockAddr;
|
||||
|
||||
if (sizeRead <= 0) {
|
||||
// we either didn't pull anything for this packet or there was an error reading (this seems to trigger
|
||||
// on windows even if there's not a packet available)
|
||||
// we either didn't pull anything for this packet or there was an error reading (this seems to trigger
|
||||
// on windows even if there's not a packet available)
|
||||
if (sizeRead < 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
auto it = _unfilteredHandlers.find(senderSockAddr);
|
||||
_incomingDatagrams.push_back({ senderAddress, senderPort, packetSizeWithHeader,
|
||||
std::move(buffer), receiveTime });
|
||||
++packetsRead;
|
||||
|
||||
}
|
||||
|
||||
if (packetsRead > _maxDatagramsRead) {
|
||||
_maxDatagramsRead = packetsRead;
|
||||
qCDebug(networking) << "readPendingDatagrams: Datagrams read:" << packetsRead;
|
||||
}
|
||||
emit pendingDatagrams(packetsRead);
|
||||
}
|
||||
|
||||
void Socket::processPendingDatagrams(int) {
|
||||
// setup a HifiSockAddr to read into
|
||||
HifiSockAddr senderSockAddr;
|
||||
|
||||
while (!_incomingDatagrams.empty()) {
|
||||
auto& datagram = _incomingDatagrams.front();
|
||||
senderSockAddr.setAddress(datagram._senderAddress);
|
||||
senderSockAddr.setPort(datagram._senderPort);
|
||||
int datagramSize = datagram._datagramLength;
|
||||
auto receiveTime = datagram._receiveTime;
|
||||
|
||||
// we're reading a packet so re-start the readyRead backup timer
|
||||
_readyReadBackupTimer->start();
|
||||
|
||||
// save information for this packet, in case it is the one that sticks readyRead
|
||||
_lastPacketSizeRead = datagramSize;
|
||||
_lastPacketSockAddr = senderSockAddr;
|
||||
|
||||
// Process unfiltered packets first.
|
||||
auto it = _unfilteredHandlers.find(senderSockAddr);
|
||||
if (it != _unfilteredHandlers.end()) {
|
||||
// we have a registered unfiltered handler for this HifiSockAddr - call that and return
|
||||
// we have a registered unfiltered handler for this HifiSockAddr (eg. STUN packet) - call that and return
|
||||
if (it->second) {
|
||||
auto basePacket = BasePacket::fromReceivedPacket(std::move(buffer), packetSizeWithHeader, senderSockAddr);
|
||||
auto basePacket = BasePacket::fromReceivedPacket(std::move(datagram._datagram),
|
||||
datagramSize, senderSockAddr);
|
||||
basePacket->setReceiveTime(receiveTime);
|
||||
it->second(std::move(basePacket));
|
||||
}
|
||||
|
||||
_incomingDatagrams.pop_front();
|
||||
continue;
|
||||
}
|
||||
|
||||
// check if this was a control packet or a data packet
|
||||
bool isControlPacket = *reinterpret_cast<uint32_t*>(buffer.get()) & CONTROL_BIT_MASK;
|
||||
bool isControlPacket = *reinterpret_cast<uint32_t*>(datagram._datagram.get()) & CONTROL_BIT_MASK;
|
||||
|
||||
if (isControlPacket) {
|
||||
// setup a control packet from the data we just read
|
||||
auto controlPacket = ControlPacket::fromReceivedPacket(std::move(buffer), packetSizeWithHeader, senderSockAddr);
|
||||
auto controlPacket = ControlPacket::fromReceivedPacket(std::move(datagram._datagram), datagramSize, senderSockAddr);
|
||||
controlPacket->setReceiveTime(receiveTime);
|
||||
|
||||
// move this control packet to the matching connection, if there is one
|
||||
|
@ -375,13 +406,13 @@ void Socket::readPendingDatagrams() {
|
|||
|
||||
} else {
|
||||
// setup a Packet from the data we just read
|
||||
auto packet = Packet::fromReceivedPacket(std::move(buffer), packetSizeWithHeader, senderSockAddr);
|
||||
auto packet = Packet::fromReceivedPacket(std::move(datagram._datagram), datagramSize, senderSockAddr);
|
||||
packet->setReceiveTime(receiveTime);
|
||||
|
||||
// save the sequence number in case this is the packet that sticks readyRead
|
||||
_lastReceivedSequenceNumber = packet->getSequenceNumber();
|
||||
|
||||
// call our verification operator to see if this packet is verified
|
||||
// call our hash verification operator to see if this packet is verified
|
||||
if (!_packetFilterOperator || _packetFilterOperator(*packet)) {
|
||||
if (packet->isReliable()) {
|
||||
// if this was a reliable packet then signal the matching connection with the sequence number
|
||||
|
@ -395,6 +426,7 @@ void Socket::readPendingDatagrams() {
|
|||
qCDebug(networking) << "Can't process packet: version" << (unsigned int)NLPacket::versionInHeader(*packet)
|
||||
<< ", type" << NLPacket::typeInHeader(*packet);
|
||||
#endif
|
||||
_incomingDatagrams.pop_front();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -410,6 +442,8 @@ void Socket::readPendingDatagrams() {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
_incomingDatagrams.pop_front();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#include <functional>
|
||||
#include <unordered_map>
|
||||
#include <mutex>
|
||||
#include <list>
|
||||
|
||||
#include <QtCore/QObject>
|
||||
#include <QtCore/QTimer>
|
||||
|
@ -94,6 +95,7 @@ public:
|
|||
|
||||
signals:
|
||||
void clientHandshakeRequestComplete(const HifiSockAddr& sockAddr);
|
||||
void pendingDatagrams(int datagramCount);
|
||||
|
||||
public slots:
|
||||
void cleanupConnection(HifiSockAddr sockAddr);
|
||||
|
@ -101,6 +103,7 @@ public slots:
|
|||
|
||||
private slots:
|
||||
void readPendingDatagrams();
|
||||
void processPendingDatagrams(int datagramCount);
|
||||
void checkForReadyReadBackup();
|
||||
|
||||
void handleSocketError(QAbstractSocket::SocketError socketError);
|
||||
|
@ -144,6 +147,17 @@ private:
|
|||
int _lastPacketSizeRead { 0 };
|
||||
SequenceNumber _lastReceivedSequenceNumber;
|
||||
HifiSockAddr _lastPacketSockAddr;
|
||||
|
||||
struct Datagram {
|
||||
QHostAddress _senderAddress;
|
||||
int _senderPort;
|
||||
int _datagramLength;
|
||||
std::unique_ptr<char[]> _datagram;
|
||||
p_high_resolution_clock::time_point _receiveTime;
|
||||
};
|
||||
|
||||
std::list<Datagram> _incomingDatagrams;
|
||||
int _maxDatagramsRead { 0 };
|
||||
|
||||
friend UDTTest;
|
||||
};
|
||||
|
|
|
@ -16,55 +16,16 @@
|
|||
#include "NumericalConstants.h"
|
||||
#include "shared/ConicalViewFrustum.h"
|
||||
|
||||
/* PrioritySortUtil is a helper for sorting 3D things relative to a ViewFrustum. To use:
|
||||
// PrioritySortUtil is a helper for sorting 3D things relative to a ViewFrustum.
|
||||
|
||||
(1) Derive a class from pure-virtual PrioritySortUtil::Sortable that wraps a copy of
|
||||
the Thing you want to prioritize and sort:
|
||||
|
||||
class SortableWrapper: public PrioritySortUtil::Sortable {
|
||||
public:
|
||||
SortableWrapper(const Thing& thing) : _thing(thing) { }
|
||||
glm::vec3 getPosition() const override { return _thing->getPosition(); }
|
||||
float getRadius() const override { return 0.5f * _thing->getBoundingRadius(); }
|
||||
uint64_t getTimestamp() const override { return _thing->getLastTime(); }
|
||||
Thing getThing() const { return _thing; }
|
||||
private:
|
||||
Thing _thing;
|
||||
};
|
||||
|
||||
(2) Make a PrioritySortUtil::PriorityQueue<Thing> and add them to the queue:
|
||||
|
||||
PrioritySortUtil::PriorityQueue<SortableWrapper> sortedThings(viewFrustum);
|
||||
std::priority_queue< PrioritySortUtil::Sortable<Thing> > sortedThings;
|
||||
for (thing in things) {
|
||||
sortedThings.push(SortableWrapper(thing));
|
||||
}
|
||||
|
||||
(3) Loop over your priority queue and do timeboxed work:
|
||||
|
||||
NOTE: Be careful using references to members of instances of T from std::priority_queue<T>.
|
||||
Under the hood std::priority_queue<T> may re-use instances of T.
|
||||
For example, after a pop() or a push() the top T may have the same memory address
|
||||
as the top T before the pop() or push() (but point to a swapped instance of T).
|
||||
This causes a reference to member variable of T to point to a different value
|
||||
when operations taken on std::priority_queue<T> shuffle around the instances of T.
|
||||
|
||||
uint64_t cutoffTime = usecTimestampNow() + TIME_BUDGET;
|
||||
while (!sortedThings.empty()) {
|
||||
const Thing& thing = sortedThings.top();
|
||||
// ...do work on thing...
|
||||
sortedThings.pop();
|
||||
if (usecTimestampNow() > cutoffTime) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
*/
|
||||
const float OUT_OF_VIEW_PENALTY = -10.0f;
|
||||
const float OUT_OF_VIEW_THRESHOLD = 0.5f * OUT_OF_VIEW_PENALTY;
|
||||
|
||||
namespace PrioritySortUtil {
|
||||
|
||||
constexpr float DEFAULT_ANGULAR_COEF { 1.0f };
|
||||
constexpr float DEFAULT_CENTER_COEF { 0.5f };
|
||||
constexpr float DEFAULT_AGE_COEF { 0.25f / (float)(USECS_PER_SECOND) };
|
||||
constexpr float DEFAULT_AGE_COEF { 0.25f };
|
||||
|
||||
class Sortable {
|
||||
public:
|
||||
|
@ -84,8 +45,9 @@ namespace PrioritySortUtil {
|
|||
PriorityQueue() = delete;
|
||||
PriorityQueue(const ConicalViewFrustums& views) : _views(views) { }
|
||||
PriorityQueue(const ConicalViewFrustums& views, float angularWeight, float centerWeight, float ageWeight)
|
||||
: _views(views), _angularWeight(angularWeight), _centerWeight(centerWeight), _ageWeight(ageWeight)
|
||||
{ }
|
||||
: _views(views), _angularWeight(angularWeight), _centerWeight(centerWeight), _ageWeight(ageWeight)
|
||||
, _usecCurrentTime(usecTimestampNow()) {
|
||||
}
|
||||
|
||||
void setViews(const ConicalViewFrustums& views) { _views = views; }
|
||||
|
||||
|
@ -93,6 +55,7 @@ namespace PrioritySortUtil {
|
|||
_angularWeight = angularWeight;
|
||||
_centerWeight = centerWeight;
|
||||
_ageWeight = ageWeight;
|
||||
_usecCurrentTime = usecTimestampNow();
|
||||
}
|
||||
|
||||
size_t size() const { return _vector.size(); }
|
||||
|
@ -131,23 +94,18 @@ namespace PrioritySortUtil {
|
|||
glm::vec3 offset = position - view.getPosition();
|
||||
float distance = glm::length(offset) + 0.001f; // add 1mm to avoid divide by zero
|
||||
const float MIN_RADIUS = 0.1f; // WORKAROUND for zero size objects (we still want them to sort by distance)
|
||||
float radius = glm::min(thing.getRadius(), MIN_RADIUS);
|
||||
float cosineAngle = (glm::dot(offset, view.getDirection()) / distance);
|
||||
float age = (float)(usecTimestampNow() - thing.getTimestamp());
|
||||
float radius = glm::max(thing.getRadius(), MIN_RADIUS);
|
||||
// Other item's angle from view centre:
|
||||
float cosineAngle = glm::dot(offset, view.getDirection()) / distance;
|
||||
float age = float((_usecCurrentTime - thing.getTimestamp()) / USECS_PER_SECOND);
|
||||
|
||||
// we modulatate "age" drift rate by the cosineAngle term to make periphrial objects sort forward
|
||||
// at a reduced rate but we don't want the "age" term to go zero or negative so we clamp it
|
||||
const float MIN_COSINE_ANGLE_FACTOR = 0.1f;
|
||||
float cosineAngleFactor = glm::max(cosineAngle, MIN_COSINE_ANGLE_FACTOR);
|
||||
|
||||
float priority = _angularWeight * glm::max(radius, MIN_RADIUS) / distance
|
||||
+ _centerWeight * cosineAngle
|
||||
+ _ageWeight * cosineAngleFactor * age;
|
||||
// the "age" term accumulates at the sum of all weights
|
||||
float angularSize = radius / distance;
|
||||
float priority = (_angularWeight * angularSize + _centerWeight * cosineAngle) * (age + 1.0f) + _ageWeight * age;
|
||||
|
||||
// decrement priority of things outside keyhole
|
||||
if (distance - radius > view.getRadius()) {
|
||||
if (!view.intersects(offset, distance, radius)) {
|
||||
constexpr float OUT_OF_VIEW_PENALTY = -10.0f;
|
||||
priority += OUT_OF_VIEW_PENALTY;
|
||||
}
|
||||
}
|
||||
|
@ -159,12 +117,13 @@ namespace PrioritySortUtil {
|
|||
float _angularWeight { DEFAULT_ANGULAR_COEF };
|
||||
float _centerWeight { DEFAULT_CENTER_COEF };
|
||||
float _ageWeight { DEFAULT_AGE_COEF };
|
||||
quint64 _usecCurrentTime { 0 };
|
||||
};
|
||||
} // namespace PrioritySortUtil
|
||||
|
||||
// for now we're keeping hard-coded sorted time budgets in one spot
|
||||
// for now we're keeping hard-coded sorted time budgets in one spot
|
||||
const uint64_t MAX_UPDATE_RENDERABLES_TIME_BUDGET = 2000; // usec
|
||||
const uint64_t MIN_SORTED_UPDATE_RENDERABLES_TIME_BUDGET = 1000; // usec
|
||||
const uint64_t MAX_UPDATE_AVATARS_TIME_BUDGET = 2000; // usec
|
||||
|
||||
#endif // hifi_PrioritySortUtil_h
|
||||
|
||||
|
|
Loading…
Reference in a new issue