mirror of
https://github.com/lubosz/overte.git
synced 2025-08-07 19:21:16 +02:00
Merge pull request #14035 from SimonWalton-HiFi/avatar-mixer-improvements
Further avatar-mixer improvements
This commit is contained in:
commit
2eb801bdc6
9 changed files with 430 additions and 345 deletions
|
@ -541,7 +541,7 @@ void AvatarMixer::handleRequestsDomainListDataPacket(QSharedPointer<ReceivedMess
|
||||||
// ...For those nodes, reset the lastBroadcastTime to 0
|
// ...For those nodes, reset the lastBroadcastTime to 0
|
||||||
// so that the AvatarMixer will send Identity data to us
|
// so that the AvatarMixer will send Identity data to us
|
||||||
[&](const SharedNodePointer& node) {
|
[&](const SharedNodePointer& node) {
|
||||||
nodeData->setLastBroadcastTime(node->getUUID(), 0);
|
nodeData->setLastBroadcastTime(node->getLocalID(), 0);
|
||||||
nodeData->resetSentTraitData(node->getLocalID());
|
nodeData->resetSentTraitData(node->getLocalID());
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
@ -565,7 +565,8 @@ void AvatarMixer::handleAvatarIdentityPacket(QSharedPointer<ReceivedMessage> mes
|
||||||
// parse the identity packet and update the change timestamp if appropriate
|
// parse the identity packet and update the change timestamp if appropriate
|
||||||
bool identityChanged = false;
|
bool identityChanged = false;
|
||||||
bool displayNameChanged = false;
|
bool displayNameChanged = false;
|
||||||
avatar.processAvatarIdentity(message->getMessage(), identityChanged, displayNameChanged);
|
QDataStream avatarIdentityStream(message->getMessage());
|
||||||
|
avatar.processAvatarIdentity(avatarIdentityStream, identityChanged, displayNameChanged);
|
||||||
|
|
||||||
if (identityChanged) {
|
if (identityChanged) {
|
||||||
QMutexLocker nodeDataLocker(&nodeData->getMutex());
|
QMutexLocker nodeDataLocker(&nodeData->getMutex());
|
||||||
|
@ -637,7 +638,7 @@ void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage>
|
||||||
// Reset the lastBroadcastTime for the ignored avatar to 0
|
// Reset the lastBroadcastTime for the ignored avatar to 0
|
||||||
// so the AvatarMixer knows it'll have to send identity data about the ignored avatar
|
// so the AvatarMixer knows it'll have to send identity data about the ignored avatar
|
||||||
// to the ignorer if the ignorer unignores.
|
// to the ignorer if the ignorer unignores.
|
||||||
nodeData->setLastBroadcastTime(ignoredUUID, 0);
|
nodeData->setLastBroadcastTime(ignoredNode->getLocalID(), 0);
|
||||||
nodeData->resetSentTraitData(ignoredNode->getLocalID());
|
nodeData->resetSentTraitData(ignoredNode->getLocalID());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -647,7 +648,7 @@ void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage>
|
||||||
// to the ignored if the ignorer unignores.
|
// to the ignored if the ignorer unignores.
|
||||||
AvatarMixerClientData* ignoredNodeData = reinterpret_cast<AvatarMixerClientData*>(ignoredNode->getLinkedData());
|
AvatarMixerClientData* ignoredNodeData = reinterpret_cast<AvatarMixerClientData*>(ignoredNode->getLinkedData());
|
||||||
if (ignoredNodeData) {
|
if (ignoredNodeData) {
|
||||||
ignoredNodeData->setLastBroadcastTime(senderNode->getUUID(), 0);
|
ignoredNodeData->setLastBroadcastTime(senderNode->getLocalID(), 0);
|
||||||
ignoredNodeData->resetSentTraitData(senderNode->getLocalID());
|
ignoredNodeData->resetSentTraitData(senderNode->getLocalID());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,20 +26,20 @@ AvatarMixerClientData::AvatarMixerClientData(const QUuid& nodeID, Node::LocalID
|
||||||
_avatar->setID(nodeID);
|
_avatar->setID(nodeID);
|
||||||
}
|
}
|
||||||
|
|
||||||
uint64_t AvatarMixerClientData::getLastOtherAvatarEncodeTime(QUuid otherAvatar) const {
|
uint64_t AvatarMixerClientData::getLastOtherAvatarEncodeTime(NLPacket::LocalID otherAvatar) const {
|
||||||
std::unordered_map<QUuid, uint64_t>::const_iterator itr = _lastOtherAvatarEncodeTime.find(otherAvatar);
|
const auto itr = _lastOtherAvatarEncodeTime.find(otherAvatar);
|
||||||
if (itr != _lastOtherAvatarEncodeTime.end()) {
|
if (itr != _lastOtherAvatarEncodeTime.end()) {
|
||||||
return itr->second;
|
return itr->second;
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
void AvatarMixerClientData::setLastOtherAvatarEncodeTime(const QUuid& otherAvatar, uint64_t time) {
|
void AvatarMixerClientData::setLastOtherAvatarEncodeTime(NLPacket::LocalID otherAvatar, uint64_t time) {
|
||||||
std::unordered_map<QUuid, uint64_t>::iterator itr = _lastOtherAvatarEncodeTime.find(otherAvatar);
|
auto itr = _lastOtherAvatarEncodeTime.find(otherAvatar);
|
||||||
if (itr != _lastOtherAvatarEncodeTime.end()) {
|
if (itr != _lastOtherAvatarEncodeTime.end()) {
|
||||||
itr->second = time;
|
itr->second = time;
|
||||||
} else {
|
} else {
|
||||||
_lastOtherAvatarEncodeTime.emplace(std::pair<QUuid, uint64_t>(otherAvatar, time));
|
_lastOtherAvatarEncodeTime.emplace(std::pair<NLPacket::LocalID, uint64_t>(otherAvatar, time));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -220,7 +220,7 @@ void AvatarMixerClientData::checkSkeletonURLAgainstWhitelist(const SlaveSharedDa
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
uint64_t AvatarMixerClientData::getLastBroadcastTime(const QUuid& nodeUUID) const {
|
uint64_t AvatarMixerClientData::getLastBroadcastTime(NLPacket::LocalID nodeUUID) const {
|
||||||
// return the matching PacketSequenceNumber, or the default if we don't have it
|
// return the matching PacketSequenceNumber, or the default if we don't have it
|
||||||
auto nodeMatch = _lastBroadcastTimes.find(nodeUUID);
|
auto nodeMatch = _lastBroadcastTimes.find(nodeUUID);
|
||||||
if (nodeMatch != _lastBroadcastTimes.end()) {
|
if (nodeMatch != _lastBroadcastTimes.end()) {
|
||||||
|
@ -229,9 +229,9 @@ uint64_t AvatarMixerClientData::getLastBroadcastTime(const QUuid& nodeUUID) cons
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
uint16_t AvatarMixerClientData::getLastBroadcastSequenceNumber(const QUuid& nodeUUID) const {
|
uint16_t AvatarMixerClientData::getLastBroadcastSequenceNumber(NLPacket::LocalID nodeID) const {
|
||||||
// return the matching PacketSequenceNumber, or the default if we don't have it
|
// return the matching PacketSequenceNumber, or the default if we don't have it
|
||||||
auto nodeMatch = _lastBroadcastSequenceNumbers.find(nodeUUID);
|
auto nodeMatch = _lastBroadcastSequenceNumbers.find(nodeID);
|
||||||
if (nodeMatch != _lastBroadcastSequenceNumbers.end()) {
|
if (nodeMatch != _lastBroadcastSequenceNumbers.end()) {
|
||||||
return nodeMatch->second;
|
return nodeMatch->second;
|
||||||
}
|
}
|
||||||
|
@ -252,7 +252,7 @@ void AvatarMixerClientData::ignoreOther(const Node* self, const Node* other) {
|
||||||
} else {
|
} else {
|
||||||
killPacket->writePrimitive(KillAvatarReason::YourAvatarEnteredTheirBubble);
|
killPacket->writePrimitive(KillAvatarReason::YourAvatarEnteredTheirBubble);
|
||||||
}
|
}
|
||||||
setLastBroadcastTime(other->getUUID(), 0);
|
setLastBroadcastTime(other->getLocalID(), 0);
|
||||||
|
|
||||||
resetSentTraitData(other->getLocalID());
|
resetSentTraitData(other->getLocalID());
|
||||||
|
|
||||||
|
@ -331,9 +331,9 @@ AvatarMixerClientData::TraitsCheckTimestamp AvatarMixerClientData::getLastOtherA
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
void AvatarMixerClientData::cleanupKilledNode(const QUuid& nodeUUID, Node::LocalID nodeLocalID) {
|
void AvatarMixerClientData::cleanupKilledNode(const QUuid&, Node::LocalID nodeLocalID) {
|
||||||
removeLastBroadcastSequenceNumber(nodeUUID);
|
removeLastBroadcastSequenceNumber(nodeLocalID);
|
||||||
removeLastBroadcastTime(nodeUUID);
|
removeLastBroadcastTime(nodeLocalID);
|
||||||
_lastSentTraitsTimestamps.erase(nodeLocalID);
|
_lastSentTraitsTimestamps.erase(nodeLocalID);
|
||||||
_sentTraitVersions.erase(nodeLocalID);
|
_sentTraitVersions.erase(nodeLocalID);
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,17 +49,16 @@ public:
|
||||||
const AvatarData* getConstAvatarData() const { return _avatar.get(); }
|
const AvatarData* getConstAvatarData() const { return _avatar.get(); }
|
||||||
AvatarSharedPointer getAvatarSharedPointer() const { return _avatar; }
|
AvatarSharedPointer getAvatarSharedPointer() const { return _avatar; }
|
||||||
|
|
||||||
|
uint16_t getLastBroadcastSequenceNumber(NLPacket::LocalID nodeID) const;
|
||||||
|
void setLastBroadcastSequenceNumber(NLPacket::LocalID nodeID, uint16_t sequenceNumber)
|
||||||
|
{ _lastBroadcastSequenceNumbers[nodeID] = sequenceNumber; }
|
||||||
|
Q_INVOKABLE void removeLastBroadcastSequenceNumber(NLPacket::LocalID nodeID) { _lastBroadcastSequenceNumbers.erase(nodeID); }
|
||||||
bool isIgnoreRadiusEnabled() const { return _isIgnoreRadiusEnabled; }
|
bool isIgnoreRadiusEnabled() const { return _isIgnoreRadiusEnabled; }
|
||||||
void setIsIgnoreRadiusEnabled(bool enabled) { _isIgnoreRadiusEnabled = enabled; }
|
void setIsIgnoreRadiusEnabled(bool enabled) { _isIgnoreRadiusEnabled = enabled; }
|
||||||
|
|
||||||
uint16_t getLastBroadcastSequenceNumber(const QUuid& nodeUUID) const;
|
uint64_t getLastBroadcastTime(NLPacket::LocalID nodeUUID) const;
|
||||||
void setLastBroadcastSequenceNumber(const QUuid& nodeUUID, uint16_t sequenceNumber)
|
void setLastBroadcastTime(NLPacket::LocalID nodeUUID, uint64_t broadcastTime) { _lastBroadcastTimes[nodeUUID] = broadcastTime; }
|
||||||
{ _lastBroadcastSequenceNumbers[nodeUUID] = sequenceNumber; }
|
Q_INVOKABLE void removeLastBroadcastTime(NLPacket::LocalID nodeUUID) { _lastBroadcastTimes.erase(nodeUUID); }
|
||||||
Q_INVOKABLE void removeLastBroadcastSequenceNumber(const QUuid& nodeUUID) { _lastBroadcastSequenceNumbers.erase(nodeUUID); }
|
|
||||||
|
|
||||||
uint64_t getLastBroadcastTime(const QUuid& nodeUUID) const;
|
|
||||||
void setLastBroadcastTime(const QUuid& nodeUUID, uint64_t broadcastTime) { _lastBroadcastTimes[nodeUUID] = broadcastTime; }
|
|
||||||
Q_INVOKABLE void removeLastBroadcastTime(const QUuid& nodeUUID) { _lastBroadcastTimes.erase(nodeUUID); }
|
|
||||||
|
|
||||||
Q_INVOKABLE void cleanupKilledNode(const QUuid& nodeUUID, Node::LocalID nodeLocalID);
|
Q_INVOKABLE void cleanupKilledNode(const QUuid& nodeUUID, Node::LocalID nodeLocalID);
|
||||||
|
|
||||||
|
@ -93,7 +92,7 @@ public:
|
||||||
|
|
||||||
void loadJSONStats(QJsonObject& jsonObject) const;
|
void loadJSONStats(QJsonObject& jsonObject) const;
|
||||||
|
|
||||||
glm::vec3 getPosition() const { return _avatar ? _avatar->getWorldPosition() : glm::vec3(0); }
|
glm::vec3 getPosition() const { return _avatar ? _avatar->getClientGlobalPosition() : glm::vec3(0); }
|
||||||
bool isRadiusIgnoring(const QUuid& other) const;
|
bool isRadiusIgnoring(const QUuid& other) const;
|
||||||
void addToRadiusIgnoringSet(const QUuid& other);
|
void addToRadiusIgnoringSet(const QUuid& other);
|
||||||
void removeFromRadiusIgnoringSet(const QUuid& other);
|
void removeFromRadiusIgnoringSet(const QUuid& other);
|
||||||
|
@ -114,10 +113,10 @@ public:
|
||||||
|
|
||||||
const ConicalViewFrustums& getViewFrustums() const { return _currentViewFrustums; }
|
const ConicalViewFrustums& getViewFrustums() const { return _currentViewFrustums; }
|
||||||
|
|
||||||
uint64_t getLastOtherAvatarEncodeTime(QUuid otherAvatar) const;
|
uint64_t getLastOtherAvatarEncodeTime(NLPacket::LocalID otherAvatar) const;
|
||||||
void setLastOtherAvatarEncodeTime(const QUuid& otherAvatar, uint64_t time);
|
void setLastOtherAvatarEncodeTime(NLPacket::LocalID otherAvatar, uint64_t time);
|
||||||
|
|
||||||
QVector<JointData>& getLastOtherAvatarSentJoints(QUuid otherAvatar) { return _lastOtherAvatarSentJoints[otherAvatar]; }
|
QVector<JointData>& getLastOtherAvatarSentJoints(NLPacket::LocalID otherAvatar) { return _lastOtherAvatarSentJoints[otherAvatar]; }
|
||||||
|
|
||||||
void queuePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer node);
|
void queuePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer node);
|
||||||
int processPackets(const SlaveSharedData& slaveSharedData); // returns number of packets processed
|
int processPackets(const SlaveSharedData& slaveSharedData); // returns number of packets processed
|
||||||
|
@ -150,13 +149,13 @@ private:
|
||||||
AvatarSharedPointer _avatar { new AvatarData() };
|
AvatarSharedPointer _avatar { new AvatarData() };
|
||||||
|
|
||||||
uint16_t _lastReceivedSequenceNumber { 0 };
|
uint16_t _lastReceivedSequenceNumber { 0 };
|
||||||
std::unordered_map<QUuid, uint16_t> _lastBroadcastSequenceNumbers;
|
std::unordered_map<NLPacket::LocalID, uint16_t> _lastBroadcastSequenceNumbers;
|
||||||
std::unordered_map<QUuid, uint64_t> _lastBroadcastTimes;
|
std::unordered_map<NLPacket::LocalID, uint64_t> _lastBroadcastTimes;
|
||||||
|
|
||||||
// this is a map of the last time we encoded an "other" avatar for
|
// this is a map of the last time we encoded an "other" avatar for
|
||||||
// sending to "this" node
|
// sending to "this" node
|
||||||
std::unordered_map<QUuid, uint64_t> _lastOtherAvatarEncodeTime;
|
std::unordered_map<NLPacket::LocalID, uint64_t> _lastOtherAvatarEncodeTime;
|
||||||
std::unordered_map<QUuid, QVector<JointData>> _lastOtherAvatarSentJoints;
|
std::unordered_map<NLPacket::LocalID, QVector<JointData>> _lastOtherAvatarSentJoints;
|
||||||
|
|
||||||
uint64_t _identityChangeTimestamp;
|
uint64_t _identityChangeTimestamp;
|
||||||
bool _avatarSessionDisplayNameMustChange{ true };
|
bool _avatarSessionDisplayNameMustChange{ true };
|
||||||
|
|
|
@ -68,13 +68,11 @@ void AvatarMixerSlave::processIncomingPackets(const SharedNodePointer& node) {
|
||||||
_stats.processIncomingPacketsElapsedTime += (end - start);
|
_stats.processIncomingPacketsElapsedTime += (end - start);
|
||||||
}
|
}
|
||||||
|
|
||||||
int AvatarMixerSlave::sendIdentityPacket(const AvatarMixerClientData* nodeData, const SharedNodePointer& destinationNode) {
|
int AvatarMixerSlave::sendIdentityPacket(NLPacketList& packetList, const AvatarMixerClientData* nodeData, const Node& destinationNode) {
|
||||||
if (destinationNode->getType() == NodeType::Agent && !destinationNode->isUpstream()) {
|
if (destinationNode.getType() == NodeType::Agent && !destinationNode.isUpstream()) {
|
||||||
QByteArray individualData = nodeData->getConstAvatarData()->identityByteArray();
|
QByteArray individualData = nodeData->getConstAvatarData()->identityByteArray();
|
||||||
individualData.replace(0, NUM_BYTES_RFC4122_UUID, nodeData->getNodeID().toRfc4122()); // FIXME, this looks suspicious
|
individualData.replace(0, NUM_BYTES_RFC4122_UUID, nodeData->getNodeID().toRfc4122()); // FIXME, this looks suspicious
|
||||||
auto identityPackets = NLPacketList::create(PacketType::AvatarIdentity, QByteArray(), true, true);
|
packetList.write(individualData);
|
||||||
identityPackets->write(individualData);
|
|
||||||
DependencyManager::get<NodeList>()->sendPacketList(std::move(identityPackets), *destinationNode);
|
|
||||||
_stats.numIdentityPackets++;
|
_stats.numIdentityPackets++;
|
||||||
return individualData.size();
|
return individualData.size();
|
||||||
} else {
|
} else {
|
||||||
|
@ -247,12 +245,12 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
||||||
// reset the internal state for correct random number distribution
|
// reset the internal state for correct random number distribution
|
||||||
distribution.reset();
|
distribution.reset();
|
||||||
|
|
||||||
|
// Estimate number to sort on number sent last frame (with min. of 20).
|
||||||
|
const int numToSendEst = std::max(int(nodeData->getNumAvatarsSentLastFrame() * 2.5f), 20);
|
||||||
|
|
||||||
// reset the number of sent avatars
|
// reset the number of sent avatars
|
||||||
nodeData->resetNumAvatarsSentLastFrame();
|
nodeData->resetNumAvatarsSentLastFrame();
|
||||||
|
|
||||||
// keep a counter of the number of considered avatars
|
|
||||||
int numOtherAvatars = 0;
|
|
||||||
|
|
||||||
// keep track of outbound data rate specifically for avatar data
|
// keep track of outbound data rate specifically for avatar data
|
||||||
int numAvatarDataBytes = 0;
|
int numAvatarDataBytes = 0;
|
||||||
int identityBytesSent = 0;
|
int identityBytesSent = 0;
|
||||||
|
@ -261,7 +259,6 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
||||||
// max number of avatarBytes per frame
|
// max number of avatarBytes per frame
|
||||||
int maxAvatarBytesPerFrame = int(_maxKbpsPerNode * BYTES_PER_KILOBIT / AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND);
|
int maxAvatarBytesPerFrame = int(_maxKbpsPerNode * BYTES_PER_KILOBIT / AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND);
|
||||||
|
|
||||||
|
|
||||||
// keep track of the number of other avatars held back in this frame
|
// keep track of the number of other avatars held back in this frame
|
||||||
int numAvatarsHeldBack = 0;
|
int numAvatarsHeldBack = 0;
|
||||||
|
|
||||||
|
@ -279,10 +276,6 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
||||||
int minimumBytesPerAvatar = PALIsOpen ? AvatarDataPacket::AVATAR_HAS_FLAGS_SIZE + NUM_BYTES_RFC4122_UUID +
|
int minimumBytesPerAvatar = PALIsOpen ? AvatarDataPacket::AVATAR_HAS_FLAGS_SIZE + NUM_BYTES_RFC4122_UUID +
|
||||||
sizeof(AvatarDataPacket::AvatarGlobalPosition) + sizeof(AvatarDataPacket::AudioLoudness) : 0;
|
sizeof(AvatarDataPacket::AvatarGlobalPosition) + sizeof(AvatarDataPacket::AudioLoudness) : 0;
|
||||||
|
|
||||||
// setup a PacketList for the avatarPackets
|
|
||||||
auto avatarPacketList = NLPacketList::create(PacketType::BulkAvatarData);
|
|
||||||
static auto maxAvatarDataBytes = avatarPacketList->getMaxSegmentSize() - NUM_BYTES_RFC4122_UUID;
|
|
||||||
|
|
||||||
// compute node bounding box
|
// compute node bounding box
|
||||||
const float MY_AVATAR_BUBBLE_EXPANSION_FACTOR = 4.0f; // magic number determined emperically
|
const float MY_AVATAR_BUBBLE_EXPANSION_FACTOR = 4.0f; // magic number determined emperically
|
||||||
AABox nodeBox = computeBubbleBox(avatar, MY_AVATAR_BUBBLE_EXPANSION_FACTOR);
|
AABox nodeBox = computeBubbleBox(avatar, MY_AVATAR_BUBBLE_EXPANSION_FACTOR);
|
||||||
|
@ -350,8 +343,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
||||||
// Don't bother with these checks if the other avatar has their bubble enabled and we're gettingAnyIgnored
|
// Don't bother with these checks if the other avatar has their bubble enabled and we're gettingAnyIgnored
|
||||||
if (nodeData->isIgnoreRadiusEnabled() || (avatarClientNodeData->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
|
if (nodeData->isIgnoreRadiusEnabled() || (avatarClientNodeData->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
|
||||||
// Perform the collision check between the two bounding boxes
|
// Perform the collision check between the two bounding boxes
|
||||||
const float OTHER_AVATAR_BUBBLE_EXPANSION_FACTOR = 2.4f; // magic number determined empirically
|
AABox otherNodeBox = avatarClientNodeData->getAvatar().getDefaultBubbleBox();
|
||||||
AABox otherNodeBox = computeBubbleBox(avatarClientNodeData->getAvatar(), OTHER_AVATAR_BUBBLE_EXPANSION_FACTOR);
|
|
||||||
if (nodeBox.touches(otherNodeBox)) {
|
if (nodeBox.touches(otherNodeBox)) {
|
||||||
nodeData->ignoreOther(destinationNode, avatarNode);
|
nodeData->ignoreOther(destinationNode, avatarNode);
|
||||||
shouldIgnore = !getsAnyIgnored;
|
shouldIgnore = !getsAnyIgnored;
|
||||||
|
@ -364,7 +356,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!shouldIgnore) {
|
if (!shouldIgnore) {
|
||||||
AvatarDataSequenceNumber lastSeqToReceiver = nodeData->getLastBroadcastSequenceNumber(avatarNode->getUUID());
|
AvatarDataSequenceNumber lastSeqToReceiver = nodeData->getLastBroadcastSequenceNumber(avatarNode->getLocalID());
|
||||||
AvatarDataSequenceNumber lastSeqFromSender = avatarClientNodeData->getLastReceivedSequenceNumber();
|
AvatarDataSequenceNumber lastSeqFromSender = avatarClientNodeData->getLastReceivedSequenceNumber();
|
||||||
|
|
||||||
// FIXME - This code does appear to be working. But it seems brittle.
|
// FIXME - This code does appear to be working. But it seems brittle.
|
||||||
|
@ -396,7 +388,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
||||||
if (!shouldIgnore) {
|
if (!shouldIgnore) {
|
||||||
// sort this one for later
|
// sort this one for later
|
||||||
const AvatarData* avatarNodeData = avatarClientNodeData->getConstAvatarData();
|
const AvatarData* avatarNodeData = avatarClientNodeData->getConstAvatarData();
|
||||||
auto lastEncodeTime = nodeData->getLastOtherAvatarEncodeTime(avatarNodeData->getSessionUUID());
|
auto lastEncodeTime = nodeData->getLastOtherAvatarEncodeTime(avatarNode->getLocalID());
|
||||||
|
|
||||||
sortedAvatars.push(SortableAvatar(avatarNodeData, avatarNode, lastEncodeTime));
|
sortedAvatars.push(SortableAvatar(avatarNodeData, avatarNode, lastEncodeTime));
|
||||||
}
|
}
|
||||||
|
@ -406,8 +398,13 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
||||||
|
|
||||||
int remainingAvatars = (int)sortedAvatars.size();
|
int remainingAvatars = (int)sortedAvatars.size();
|
||||||
auto traitsPacketList = NLPacketList::create(PacketType::BulkAvatarTraits, QByteArray(), true, true);
|
auto traitsPacketList = NLPacketList::create(PacketType::BulkAvatarTraits, QByteArray(), true, true);
|
||||||
|
auto avatarPacket = NLPacket::create(PacketType::BulkAvatarData);
|
||||||
|
const int avatarPacketCapacity = avatarPacket->getPayloadCapacity();
|
||||||
|
int avatarSpaceAvailable = avatarPacketCapacity;
|
||||||
|
int numPacketsSent = 0;
|
||||||
|
auto identityPacketList = NLPacketList::create(PacketType::AvatarIdentity, QByteArray(), true, true);
|
||||||
|
|
||||||
const auto& sortedAvatarVector = sortedAvatars.getSortedVector();
|
const auto& sortedAvatarVector = sortedAvatars.getSortedVector(numToSendEst);
|
||||||
for (const auto& sortedAvatar : sortedAvatarVector) {
|
for (const auto& sortedAvatar : sortedAvatarVector) {
|
||||||
const Node* otherNode = sortedAvatar.getNode();
|
const Node* otherNode = sortedAvatar.getNode();
|
||||||
auto lastEncodeForOther = sortedAvatar.getTimestamp();
|
auto lastEncodeForOther = sortedAvatar.getTimestamp();
|
||||||
|
@ -432,21 +429,9 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
||||||
|
|
||||||
auto startAvatarDataPacking = chrono::high_resolution_clock::now();
|
auto startAvatarDataPacking = chrono::high_resolution_clock::now();
|
||||||
|
|
||||||
++numOtherAvatars;
|
|
||||||
|
|
||||||
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
|
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
|
||||||
const AvatarData* otherAvatar = otherNodeData->getConstAvatarData();
|
const AvatarData* otherAvatar = otherNodeData->getConstAvatarData();
|
||||||
|
|
||||||
// If the time that the mixer sent AVATAR DATA about Avatar B to Avatar A is BEFORE OR EQUAL TO
|
|
||||||
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Avatar A.
|
|
||||||
if (otherAvatar->hasProcessedFirstIdentity()
|
|
||||||
&& nodeData->getLastBroadcastTime(otherNode->getUUID()) <= otherNodeData->getIdentityChangeTimestamp()) {
|
|
||||||
identityBytesSent += sendIdentityPacket(otherNodeData, node);
|
|
||||||
|
|
||||||
// remember the last time we sent identity details about this other node to the receiver
|
|
||||||
nodeData->setLastBroadcastTime(otherNode->getUUID(), usecTimestampNow());
|
|
||||||
}
|
|
||||||
|
|
||||||
// Typically all out-of-view avatars but such avatars' priorities will rise with time:
|
// Typically all out-of-view avatars but such avatars' priorities will rise with time:
|
||||||
bool isLowerPriority = sortedAvatar.getPriority() <= OUT_OF_VIEW_THRESHOLD;
|
bool isLowerPriority = sortedAvatar.getPriority() <= OUT_OF_VIEW_THRESHOLD;
|
||||||
|
|
||||||
|
@ -456,71 +441,56 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
||||||
} else if (!overBudget) {
|
} else if (!overBudget) {
|
||||||
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO ? AvatarData::SendAllData : AvatarData::CullSmallData;
|
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO ? AvatarData::SendAllData : AvatarData::CullSmallData;
|
||||||
nodeData->incrementAvatarInView();
|
nodeData->incrementAvatarInView();
|
||||||
}
|
|
||||||
|
|
||||||
bool includeThisAvatar = true;
|
// If the time that the mixer sent AVATAR DATA about Avatar B to Avatar A is BEFORE OR EQUAL TO
|
||||||
QVector<JointData>& lastSentJointsForOther = nodeData->getLastOtherAvatarSentJoints(otherNode->getUUID());
|
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Avatar A.
|
||||||
|
if (otherAvatar->hasProcessedFirstIdentity()
|
||||||
|
&& nodeData->getLastBroadcastTime(otherNode->getLocalID()) <= otherNodeData->getIdentityChangeTimestamp()) {
|
||||||
|
identityBytesSent += sendIdentityPacket(*identityPacketList, otherNodeData, *destinationNode);
|
||||||
|
|
||||||
lastSentJointsForOther.resize(otherAvatar->getJointCount());
|
// remember the last time we sent identity details about this other node to the receiver
|
||||||
|
nodeData->setLastBroadcastTime(otherNode->getLocalID(), usecTimestampNow());
|
||||||
bool distanceAdjust = true;
|
|
||||||
glm::vec3 viewerPosition = myPosition;
|
|
||||||
AvatarDataPacket::HasFlags hasFlagsOut; // the result of the toByteArray
|
|
||||||
bool dropFaceTracking = false;
|
|
||||||
|
|
||||||
auto startSerialize = chrono::high_resolution_clock::now();
|
|
||||||
QByteArray bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
|
||||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition,
|
|
||||||
&lastSentJointsForOther);
|
|
||||||
auto endSerialize = chrono::high_resolution_clock::now();
|
|
||||||
_stats.toByteArrayElapsedTime +=
|
|
||||||
(quint64) chrono::duration_cast<chrono::microseconds>(endSerialize - startSerialize).count();
|
|
||||||
|
|
||||||
if (bytes.size() > maxAvatarDataBytes) {
|
|
||||||
qCWarning(avatars) << "otherAvatar.toByteArray() for" << otherNode->getUUID()
|
|
||||||
<< "resulted in very large buffer of" << bytes.size() << "bytes - dropping facial data";
|
|
||||||
|
|
||||||
dropFaceTracking = true; // first try dropping the facial data
|
|
||||||
bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
|
||||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
|
|
||||||
|
|
||||||
if (bytes.size() > maxAvatarDataBytes) {
|
|
||||||
qCWarning(avatars) << "otherAvatar.toByteArray() for" << otherNode->getUUID()
|
|
||||||
<< "without facial data resulted in very large buffer of" << bytes.size()
|
|
||||||
<< "bytes - reducing to MinimumData";
|
|
||||||
bytes = otherAvatar->toByteArray(AvatarData::MinimumData, lastEncodeForOther, lastSentJointsForOther,
|
|
||||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
|
|
||||||
|
|
||||||
if (bytes.size() > maxAvatarDataBytes) {
|
|
||||||
qCWarning(avatars) << "otherAvatar.toByteArray() for" << otherNode->getUUID()
|
|
||||||
<< "MinimumData resulted in very large buffer of" << bytes.size()
|
|
||||||
<< "bytes - refusing to send avatar";
|
|
||||||
includeThisAvatar = false;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (includeThisAvatar) {
|
QVector<JointData>& lastSentJointsForOther = nodeData->getLastOtherAvatarSentJoints(otherNode->getLocalID());
|
||||||
// start a new segment in the PacketList for this avatar
|
|
||||||
avatarPacketList->startSegment();
|
|
||||||
numAvatarDataBytes += avatarPacketList->write(otherNode->getUUID().toRfc4122());
|
|
||||||
numAvatarDataBytes += avatarPacketList->write(bytes);
|
|
||||||
avatarPacketList->endSegment();
|
|
||||||
|
|
||||||
if (detail != AvatarData::NoData) {
|
const bool distanceAdjust = true;
|
||||||
_stats.numOthersIncluded++;
|
const bool dropFaceTracking = false;
|
||||||
|
AvatarDataPacket::SendStatus sendStatus;
|
||||||
|
sendStatus.sendUUID = true;
|
||||||
|
|
||||||
// increment the number of avatars sent to this reciever
|
do {
|
||||||
nodeData->incrementNumAvatarsSentLastFrame();
|
auto startSerialize = chrono::high_resolution_clock::now();
|
||||||
|
QByteArray bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
||||||
|
sendStatus, dropFaceTracking, distanceAdjust, myPosition,
|
||||||
|
&lastSentJointsForOther, avatarSpaceAvailable);
|
||||||
|
auto endSerialize = chrono::high_resolution_clock::now();
|
||||||
|
_stats.toByteArrayElapsedTime +=
|
||||||
|
(quint64)chrono::duration_cast<chrono::microseconds>(endSerialize - startSerialize).count();
|
||||||
|
|
||||||
// set the last sent sequence number for this sender on the receiver
|
avatarPacket->write(bytes);
|
||||||
nodeData->setLastBroadcastSequenceNumber(otherNode->getUUID(),
|
avatarSpaceAvailable -= bytes.size();
|
||||||
otherNodeData->getLastReceivedSequenceNumber());
|
numAvatarDataBytes += bytes.size();
|
||||||
nodeData->setLastOtherAvatarEncodeTime(otherNode->getUUID(), usecTimestampNow());
|
if (!sendStatus || avatarSpaceAvailable < (int)AvatarDataPacket::MIN_BULK_PACKET_SIZE) {
|
||||||
|
// Weren't able to fit everything.
|
||||||
|
nodeList->sendPacket(std::move(avatarPacket), *destinationNode);
|
||||||
|
++numPacketsSent;
|
||||||
|
avatarPacket = NLPacket::create(PacketType::BulkAvatarData);
|
||||||
|
avatarSpaceAvailable = avatarPacketCapacity;
|
||||||
}
|
}
|
||||||
} else {
|
} while (!sendStatus);
|
||||||
// TODO? this avatar is not included now, and will probably not be included next frame.
|
|
||||||
// It would be nice if we could tweak its future sort priority to put it at the back of the list.
|
if (detail != AvatarData::NoData) {
|
||||||
|
_stats.numOthersIncluded++;
|
||||||
|
|
||||||
|
// increment the number of avatars sent to this receiver
|
||||||
|
nodeData->incrementNumAvatarsSentLastFrame();
|
||||||
|
|
||||||
|
// set the last sent sequence number for this sender on the receiver
|
||||||
|
nodeData->setLastBroadcastSequenceNumber(otherNode->getLocalID(),
|
||||||
|
otherNodeData->getLastReceivedSequenceNumber());
|
||||||
|
nodeData->setLastOtherAvatarEncodeTime(otherNode->getLocalID(), usecTimestampNow());
|
||||||
}
|
}
|
||||||
|
|
||||||
auto endAvatarDataPacking = chrono::high_resolution_clock::now();
|
auto endAvatarDataPacking = chrono::high_resolution_clock::now();
|
||||||
|
@ -532,17 +502,21 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
||||||
remainingAvatars--;
|
remainingAvatars--;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (nodeData->getNumAvatarsSentLastFrame() > numToSendEst) {
|
||||||
|
qCWarning(avatars) << "More avatars sent than upper estimate" << nodeData->getNumAvatarsSentLastFrame()
|
||||||
|
<< " / " << numToSendEst;
|
||||||
|
}
|
||||||
|
|
||||||
quint64 startPacketSending = usecTimestampNow();
|
quint64 startPacketSending = usecTimestampNow();
|
||||||
|
|
||||||
// close the current packet so that we're always sending something
|
if (avatarPacket->getPayloadSize() != 0) {
|
||||||
avatarPacketList->closeCurrentPacket(true);
|
nodeList->sendPacket(std::move(avatarPacket), *destinationNode);
|
||||||
|
++numPacketsSent;
|
||||||
|
}
|
||||||
|
|
||||||
_stats.numPacketsSent += (int)avatarPacketList->getNumPackets();
|
_stats.numPacketsSent += numPacketsSent;
|
||||||
_stats.numBytesSent += numAvatarDataBytes;
|
_stats.numBytesSent += numAvatarDataBytes;
|
||||||
|
|
||||||
// send the avatar data PacketList
|
|
||||||
nodeList->sendPacketList(std::move(avatarPacketList), *destinationNode);
|
|
||||||
|
|
||||||
// record the bytes sent for other avatar data in the AvatarMixerClientData
|
// record the bytes sent for other avatar data in the AvatarMixerClientData
|
||||||
nodeData->recordSentAvatarData(numAvatarDataBytes);
|
nodeData->recordSentAvatarData(numAvatarDataBytes);
|
||||||
|
|
||||||
|
@ -554,6 +528,12 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
||||||
nodeList->sendPacketList(std::move(traitsPacketList), *destinationNode);
|
nodeList->sendPacketList(std::move(traitsPacketList), *destinationNode);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Send any AvatarIdentity packets:
|
||||||
|
identityPacketList->closeCurrentPacket();
|
||||||
|
if (identityBytesSent > 0) {
|
||||||
|
nodeList->sendPacketList(std::move(identityPacketList), *destinationNode);
|
||||||
|
}
|
||||||
|
|
||||||
// record the number of avatars held back this frame
|
// record the number of avatars held back this frame
|
||||||
nodeData->recordNumOtherAvatarStarves(numAvatarsHeldBack);
|
nodeData->recordNumOtherAvatarStarves(numAvatarsHeldBack);
|
||||||
nodeData->recordNumOtherAvatarSkips(numAvatarsWithSkippedFrames);
|
nodeData->recordNumOtherAvatarSkips(numAvatarsWithSkippedFrames);
|
||||||
|
@ -599,20 +579,20 @@ void AvatarMixerSlave::broadcastAvatarDataToDownstreamMixer(const SharedNodePoin
|
||||||
// so we always send a full update for this avatar
|
// so we always send a full update for this avatar
|
||||||
|
|
||||||
quint64 start = usecTimestampNow();
|
quint64 start = usecTimestampNow();
|
||||||
AvatarDataPacket::HasFlags flagsOut;
|
AvatarDataPacket::SendStatus sendStatus;
|
||||||
|
|
||||||
QVector<JointData> emptyLastJointSendData { otherAvatar->getJointCount() };
|
QVector<JointData> emptyLastJointSendData { otherAvatar->getJointCount() };
|
||||||
|
|
||||||
QByteArray avatarByteArray = otherAvatar->toByteArray(AvatarData::SendAllData, 0, emptyLastJointSendData,
|
QByteArray avatarByteArray = otherAvatar->toByteArray(AvatarData::SendAllData, 0, emptyLastJointSendData,
|
||||||
flagsOut, false, false, glm::vec3(0), nullptr);
|
sendStatus, false, false, glm::vec3(0), nullptr, 0);
|
||||||
quint64 end = usecTimestampNow();
|
quint64 end = usecTimestampNow();
|
||||||
_stats.toByteArrayElapsedTime += (end - start);
|
_stats.toByteArrayElapsedTime += (end - start);
|
||||||
|
|
||||||
auto lastBroadcastTime = nodeData->getLastBroadcastTime(agentNode->getUUID());
|
auto lastBroadcastTime = nodeData->getLastBroadcastTime(agentNode->getLocalID());
|
||||||
if (lastBroadcastTime <= agentNodeData->getIdentityChangeTimestamp()
|
if (lastBroadcastTime <= agentNodeData->getIdentityChangeTimestamp()
|
||||||
|| (start - lastBroadcastTime) >= REBROADCAST_IDENTITY_TO_DOWNSTREAM_EVERY_US) {
|
|| (start - lastBroadcastTime) >= REBROADCAST_IDENTITY_TO_DOWNSTREAM_EVERY_US) {
|
||||||
sendReplicatedIdentityPacket(*agentNode, agentNodeData, *node);
|
sendReplicatedIdentityPacket(*agentNode, agentNodeData, *node);
|
||||||
nodeData->setLastBroadcastTime(agentNode->getUUID(), start);
|
nodeData->setLastBroadcastTime(agentNode->getLocalID(), start);
|
||||||
}
|
}
|
||||||
|
|
||||||
// figure out how large our avatar byte array can be to fit in the packet list
|
// figure out how large our avatar byte array can be to fit in the packet list
|
||||||
|
@ -630,14 +610,14 @@ void AvatarMixerSlave::broadcastAvatarDataToDownstreamMixer(const SharedNodePoin
|
||||||
<< "-" << avatarByteArray.size() << "bytes";
|
<< "-" << avatarByteArray.size() << "bytes";
|
||||||
|
|
||||||
avatarByteArray = otherAvatar->toByteArray(AvatarData::SendAllData, 0, emptyLastJointSendData,
|
avatarByteArray = otherAvatar->toByteArray(AvatarData::SendAllData, 0, emptyLastJointSendData,
|
||||||
flagsOut, true, false, glm::vec3(0), nullptr);
|
sendStatus, true, false, glm::vec3(0), nullptr, 0);
|
||||||
|
|
||||||
if (avatarByteArray.size() > maxAvatarByteArraySize) {
|
if (avatarByteArray.size() > maxAvatarByteArraySize) {
|
||||||
qCWarning(avatars) << "Replicated avatar data without facial data still too large for"
|
qCWarning(avatars) << "Replicated avatar data without facial data still too large for"
|
||||||
<< otherAvatar->getSessionUUID() << "-" << avatarByteArray.size() << "bytes";
|
<< otherAvatar->getSessionUUID() << "-" << avatarByteArray.size() << "bytes";
|
||||||
|
|
||||||
avatarByteArray = otherAvatar->toByteArray(AvatarData::MinimumData, 0, emptyLastJointSendData,
|
avatarByteArray = otherAvatar->toByteArray(AvatarData::MinimumData, 0, emptyLastJointSendData,
|
||||||
flagsOut, true, false, glm::vec3(0), nullptr);
|
sendStatus, true, false, glm::vec3(0), nullptr, 0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -646,7 +626,7 @@ void AvatarMixerSlave::broadcastAvatarDataToDownstreamMixer(const SharedNodePoin
|
||||||
nodeData->incrementNumAvatarsSentLastFrame();
|
nodeData->incrementNumAvatarsSentLastFrame();
|
||||||
|
|
||||||
// set the last sent sequence number for this sender on the receiver
|
// set the last sent sequence number for this sender on the receiver
|
||||||
nodeData->setLastBroadcastSequenceNumber(agentNode->getUUID(),
|
nodeData->setLastBroadcastSequenceNumber(agentNode->getLocalID(),
|
||||||
agentNodeData->getLastReceivedSequenceNumber());
|
agentNodeData->getLastReceivedSequenceNumber());
|
||||||
|
|
||||||
// increment the number of avatars sent to this reciever
|
// increment the number of avatars sent to this reciever
|
||||||
|
|
|
@ -101,7 +101,7 @@ public:
|
||||||
void harvestStats(AvatarMixerSlaveStats& stats);
|
void harvestStats(AvatarMixerSlaveStats& stats);
|
||||||
|
|
||||||
private:
|
private:
|
||||||
int sendIdentityPacket(const AvatarMixerClientData* nodeData, const SharedNodePointer& destinationNode);
|
int sendIdentityPacket(NLPacketList& packet, const AvatarMixerClientData* nodeData, const Node& destinationNode);
|
||||||
int sendReplicatedIdentityPacket(const Node& agentNode, const AvatarMixerClientData* nodeData, const Node& destinationNode);
|
int sendReplicatedIdentityPacket(const Node& agentNode, const AvatarMixerClientData* nodeData, const Node& destinationNode);
|
||||||
|
|
||||||
qint64 addChangedTraitsToBulkPacket(AvatarMixerClientData* listeningNodeData,
|
qint64 addChangedTraitsToBulkPacket(AvatarMixerClientData* listeningNodeData,
|
||||||
|
|
|
@ -66,7 +66,7 @@ size_t AvatarDataPacket::maxFaceTrackerInfoSize(size_t numBlendshapeCoefficients
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t AvatarDataPacket::maxJointDataSize(size_t numJoints, bool hasGrabJoints) {
|
size_t AvatarDataPacket::maxJointDataSize(size_t numJoints, bool hasGrabJoints) {
|
||||||
const size_t validityBitsSize = (size_t)std::ceil(numJoints / (float)BITS_IN_BYTE);
|
const size_t validityBitsSize = calcBitVectorSize((int)numJoints);
|
||||||
|
|
||||||
size_t totalSize = sizeof(uint8_t); // numJoints
|
size_t totalSize = sizeof(uint8_t); // numJoints
|
||||||
|
|
||||||
|
@ -228,18 +228,18 @@ float AvatarData::getDistanceBasedMinTranslationDistance(glm::vec3 viewerPositio
|
||||||
|
|
||||||
// we want to track outbound data in this case...
|
// we want to track outbound data in this case...
|
||||||
QByteArray AvatarData::toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking) {
|
QByteArray AvatarData::toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking) {
|
||||||
AvatarDataPacket::HasFlags hasFlagsOut;
|
|
||||||
auto lastSentTime = _lastToByteArray;
|
auto lastSentTime = _lastToByteArray;
|
||||||
_lastToByteArray = usecTimestampNow();
|
_lastToByteArray = usecTimestampNow();
|
||||||
return AvatarData::toByteArray(dataDetail, lastSentTime, getLastSentJointData(),
|
AvatarDataPacket::SendStatus sendStatus;
|
||||||
hasFlagsOut, dropFaceTracking, false, glm::vec3(0), nullptr,
|
auto avatarByteArray = AvatarData::toByteArray(dataDetail, lastSentTime, getLastSentJointData(),
|
||||||
&_outboundDataRate);
|
sendStatus, dropFaceTracking, false, glm::vec3(0), nullptr, 0, &_outboundDataRate);
|
||||||
|
return avatarByteArray;
|
||||||
}
|
}
|
||||||
|
|
||||||
QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSentTime,
|
QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSentTime,
|
||||||
const QVector<JointData>& lastSentJointData,
|
const QVector<JointData>& lastSentJointData,
|
||||||
AvatarDataPacket::HasFlags& hasFlagsOut, bool dropFaceTracking, bool distanceAdjust,
|
AvatarDataPacket::SendStatus& sendStatus, bool dropFaceTracking, bool distanceAdjust,
|
||||||
glm::vec3 viewerPosition, QVector<JointData>* sentJointDataOut, AvatarDataRate* outboundDataRateOut) const {
|
glm::vec3 viewerPosition, QVector<JointData>* sentJointDataOut, int maxDataSize, AvatarDataRate* outboundDataRateOut) const {
|
||||||
|
|
||||||
bool cullSmallChanges = (dataDetail == CullSmallData);
|
bool cullSmallChanges = (dataDetail == CullSmallData);
|
||||||
bool sendAll = (dataDetail == SendAllData);
|
bool sendAll = (dataDetail == SendAllData);
|
||||||
|
@ -247,11 +247,23 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
bool sendPALMinimum = (dataDetail == PALMinimum);
|
bool sendPALMinimum = (dataDetail == PALMinimum);
|
||||||
|
|
||||||
lazyInitHeadData();
|
lazyInitHeadData();
|
||||||
|
ASSERT(maxDataSize == 0 || (size_t)maxDataSize >= AvatarDataPacket::MIN_BULK_PACKET_SIZE);
|
||||||
|
|
||||||
|
// Leading flags, to indicate how much data is actually included in the packet...
|
||||||
|
AvatarDataPacket::HasFlags wantedFlags = 0;
|
||||||
|
AvatarDataPacket::HasFlags includedFlags = 0;
|
||||||
|
AvatarDataPacket::HasFlags extraReturnedFlags = 0; // For partial joint data.
|
||||||
|
|
||||||
// special case, if we were asked for no data, then just include the flags all set to nothing
|
// special case, if we were asked for no data, then just include the flags all set to nothing
|
||||||
if (dataDetail == NoData) {
|
if (dataDetail == NoData) {
|
||||||
AvatarDataPacket::HasFlags packetStateFlags = 0;
|
sendStatus.itemFlags = wantedFlags;
|
||||||
QByteArray avatarDataByteArray(reinterpret_cast<char*>(&packetStateFlags), sizeof(packetStateFlags));
|
|
||||||
|
QByteArray avatarDataByteArray;
|
||||||
|
if (sendStatus.sendUUID) {
|
||||||
|
avatarDataByteArray.append(getSessionUUID().toRfc4122().data(), NUM_BYTES_RFC4122_UUID);
|
||||||
|
}
|
||||||
|
|
||||||
|
avatarDataByteArray.append((char*) &wantedFlags, sizeof wantedFlags);
|
||||||
return avatarDataByteArray;
|
return avatarDataByteArray;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -274,109 +286,141 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
// 3 translations * 6 bytes = 6.48kbps
|
// 3 translations * 6 bytes = 6.48kbps
|
||||||
//
|
//
|
||||||
|
|
||||||
auto parentID = getParentID();
|
QUuid parentID;
|
||||||
|
|
||||||
bool hasAvatarGlobalPosition = true; // always include global position
|
|
||||||
bool hasAvatarOrientation = false;
|
|
||||||
bool hasAvatarBoundingBox = false;
|
|
||||||
bool hasAvatarScale = false;
|
|
||||||
bool hasLookAtPosition = false;
|
|
||||||
bool hasAudioLoudness = false;
|
|
||||||
bool hasSensorToWorldMatrix = false;
|
|
||||||
bool hasAdditionalFlags = false;
|
|
||||||
|
|
||||||
// local position, and parent info only apply to avatars that are parented. The local position
|
|
||||||
// and the parent info can change independently though, so we track their "changed since"
|
|
||||||
// separately
|
|
||||||
bool hasParentInfo = false;
|
|
||||||
bool hasAvatarLocalPosition = false;
|
|
||||||
|
|
||||||
bool hasFaceTrackerInfo = false;
|
|
||||||
bool hasJointData = false;
|
|
||||||
bool hasJointDefaultPoseFlags = false;
|
|
||||||
bool hasGrabJoints = false;
|
|
||||||
|
|
||||||
glm::mat4 leftFarGrabMatrix;
|
glm::mat4 leftFarGrabMatrix;
|
||||||
glm::mat4 rightFarGrabMatrix;
|
glm::mat4 rightFarGrabMatrix;
|
||||||
glm::mat4 mouseFarGrabMatrix;
|
glm::mat4 mouseFarGrabMatrix;
|
||||||
|
|
||||||
if (sendPALMinimum) {
|
if (sendStatus.itemFlags == 0) {
|
||||||
hasAudioLoudness = true;
|
// New avatar ...
|
||||||
} else {
|
bool hasAvatarGlobalPosition = true; // always include global position
|
||||||
hasAvatarOrientation = sendAll || rotationChangedSince(lastSentTime);
|
bool hasAvatarOrientation = false;
|
||||||
hasAvatarBoundingBox = sendAll || avatarBoundingBoxChangedSince(lastSentTime);
|
bool hasAvatarBoundingBox = false;
|
||||||
hasAvatarScale = sendAll || avatarScaleChangedSince(lastSentTime);
|
bool hasAvatarScale = false;
|
||||||
hasLookAtPosition = sendAll || lookAtPositionChangedSince(lastSentTime);
|
bool hasLookAtPosition = false;
|
||||||
hasAudioLoudness = sendAll || audioLoudnessChangedSince(lastSentTime);
|
bool hasAudioLoudness = false;
|
||||||
hasSensorToWorldMatrix = sendAll || sensorToWorldMatrixChangedSince(lastSentTime);
|
bool hasSensorToWorldMatrix = false;
|
||||||
hasAdditionalFlags = sendAll || additionalFlagsChangedSince(lastSentTime);
|
bool hasJointData = false;
|
||||||
hasParentInfo = sendAll || parentInfoChangedSince(lastSentTime);
|
bool hasJointDefaultPoseFlags = false;
|
||||||
hasAvatarLocalPosition = hasParent() && (sendAll ||
|
bool hasAdditionalFlags = false;
|
||||||
tranlationChangedSince(lastSentTime) ||
|
|
||||||
parentInfoChangedSince(lastSentTime));
|
|
||||||
|
|
||||||
hasFaceTrackerInfo = !dropFaceTracking && (hasFaceTracker() || getHasScriptedBlendshapes()) &&
|
// local position, and parent info only apply to avatars that are parented. The local position
|
||||||
(sendAll || faceTrackerInfoChangedSince(lastSentTime));
|
// and the parent info can change independently though, so we track their "changed since"
|
||||||
hasJointData = sendAll || !sendMinimum;
|
// separately
|
||||||
hasJointDefaultPoseFlags = hasJointData;
|
bool hasParentInfo = false;
|
||||||
if (hasJointData) {
|
bool hasAvatarLocalPosition = false;
|
||||||
bool leftValid;
|
|
||||||
leftFarGrabMatrix = _farGrabLeftMatrixCache.get(leftValid);
|
bool hasFaceTrackerInfo = false;
|
||||||
if (!leftValid) {
|
|
||||||
leftFarGrabMatrix = glm::mat4();
|
if (sendPALMinimum) {
|
||||||
}
|
hasAudioLoudness = true;
|
||||||
bool rightValid;
|
} else {
|
||||||
rightFarGrabMatrix = _farGrabRightMatrixCache.get(rightValid);
|
hasAvatarOrientation = sendAll || rotationChangedSince(lastSentTime);
|
||||||
if (!rightValid) {
|
hasAvatarBoundingBox = sendAll || avatarBoundingBoxChangedSince(lastSentTime);
|
||||||
rightFarGrabMatrix = glm::mat4();
|
hasAvatarScale = sendAll || avatarScaleChangedSince(lastSentTime);
|
||||||
}
|
hasLookAtPosition = sendAll || lookAtPositionChangedSince(lastSentTime);
|
||||||
bool mouseValid;
|
hasAudioLoudness = sendAll || audioLoudnessChangedSince(lastSentTime);
|
||||||
mouseFarGrabMatrix = _farGrabMouseMatrixCache.get(mouseValid);
|
hasSensorToWorldMatrix = sendAll || sensorToWorldMatrixChangedSince(lastSentTime);
|
||||||
if (!mouseValid) {
|
hasAdditionalFlags = sendAll || additionalFlagsChangedSince(lastSentTime);
|
||||||
mouseFarGrabMatrix = glm::mat4();
|
hasParentInfo = sendAll || parentInfoChangedSince(lastSentTime);
|
||||||
}
|
hasAvatarLocalPosition = hasParent() && (sendAll ||
|
||||||
hasGrabJoints = (leftValid || rightValid || mouseValid);
|
tranlationChangedSince(lastSentTime) ||
|
||||||
|
parentInfoChangedSince(lastSentTime));
|
||||||
|
|
||||||
|
hasFaceTrackerInfo = !dropFaceTracking && (hasFaceTracker() || getHasScriptedBlendshapes()) &&
|
||||||
|
(sendAll || faceTrackerInfoChangedSince(lastSentTime));
|
||||||
|
hasJointData = !sendMinimum;
|
||||||
|
hasJointDefaultPoseFlags = hasJointData;
|
||||||
|
}
|
||||||
|
|
||||||
|
wantedFlags =
|
||||||
|
(hasAvatarGlobalPosition ? AvatarDataPacket::PACKET_HAS_AVATAR_GLOBAL_POSITION : 0)
|
||||||
|
| (hasAvatarBoundingBox ? AvatarDataPacket::PACKET_HAS_AVATAR_BOUNDING_BOX : 0)
|
||||||
|
| (hasAvatarOrientation ? AvatarDataPacket::PACKET_HAS_AVATAR_ORIENTATION : 0)
|
||||||
|
| (hasAvatarScale ? AvatarDataPacket::PACKET_HAS_AVATAR_SCALE : 0)
|
||||||
|
| (hasLookAtPosition ? AvatarDataPacket::PACKET_HAS_LOOK_AT_POSITION : 0)
|
||||||
|
| (hasAudioLoudness ? AvatarDataPacket::PACKET_HAS_AUDIO_LOUDNESS : 0)
|
||||||
|
| (hasSensorToWorldMatrix ? AvatarDataPacket::PACKET_HAS_SENSOR_TO_WORLD_MATRIX : 0)
|
||||||
|
| (hasAdditionalFlags ? AvatarDataPacket::PACKET_HAS_ADDITIONAL_FLAGS : 0)
|
||||||
|
| (hasParentInfo ? AvatarDataPacket::PACKET_HAS_PARENT_INFO : 0)
|
||||||
|
| (hasAvatarLocalPosition ? AvatarDataPacket::PACKET_HAS_AVATAR_LOCAL_POSITION : 0)
|
||||||
|
| (hasFaceTrackerInfo ? AvatarDataPacket::PACKET_HAS_FACE_TRACKER_INFO : 0)
|
||||||
|
| (hasJointData ? AvatarDataPacket::PACKET_HAS_JOINT_DATA : 0)
|
||||||
|
| (hasJointDefaultPoseFlags ? AvatarDataPacket::PACKET_HAS_JOINT_DEFAULT_POSE_FLAGS : 0)
|
||||||
|
| (hasJointData ? AvatarDataPacket::PACKET_HAS_GRAB_JOINTS : 0);
|
||||||
|
|
||||||
|
sendStatus.itemFlags = wantedFlags;
|
||||||
|
sendStatus.rotationsSent = 0;
|
||||||
|
sendStatus.translationsSent = 0;
|
||||||
|
} else { // Continuing avatar ...
|
||||||
|
wantedFlags = sendStatus.itemFlags;
|
||||||
|
if (wantedFlags & AvatarDataPacket::PACKET_HAS_GRAB_JOINTS) {
|
||||||
|
// Must send joints for grab joints -
|
||||||
|
wantedFlags |= AvatarDataPacket::PACKET_HAS_JOINT_DATA;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (wantedFlags & AvatarDataPacket::PACKET_HAS_GRAB_JOINTS) {
|
||||||
|
bool leftValid;
|
||||||
|
leftFarGrabMatrix = _farGrabLeftMatrixCache.get(leftValid);
|
||||||
|
if (!leftValid) {
|
||||||
|
leftFarGrabMatrix = glm::mat4();
|
||||||
|
}
|
||||||
|
bool rightValid;
|
||||||
|
rightFarGrabMatrix = _farGrabRightMatrixCache.get(rightValid);
|
||||||
|
if (!rightValid) {
|
||||||
|
rightFarGrabMatrix = glm::mat4();
|
||||||
|
}
|
||||||
|
bool mouseValid;
|
||||||
|
mouseFarGrabMatrix = _farGrabMouseMatrixCache.get(mouseValid);
|
||||||
|
if (!mouseValid) {
|
||||||
|
mouseFarGrabMatrix = glm::mat4();
|
||||||
|
}
|
||||||
|
if (!(leftValid || rightValid || mouseValid)) {
|
||||||
|
wantedFlags &= ~AvatarDataPacket::PACKET_HAS_GRAB_JOINTS;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (wantedFlags & (AvatarDataPacket::PACKET_HAS_ADDITIONAL_FLAGS | AvatarDataPacket::PACKET_HAS_PARENT_INFO)) {
|
||||||
|
parentID = getParentID();
|
||||||
|
}
|
||||||
|
|
||||||
const size_t byteArraySize = AvatarDataPacket::MAX_CONSTANT_HEADER_SIZE +
|
const size_t byteArraySize = AvatarDataPacket::MAX_CONSTANT_HEADER_SIZE + NUM_BYTES_RFC4122_UUID +
|
||||||
(hasFaceTrackerInfo ? AvatarDataPacket::maxFaceTrackerInfoSize(_headData->getBlendshapeCoefficients().size()) : 0) +
|
AvatarDataPacket::maxFaceTrackerInfoSize(_headData->getBlendshapeCoefficients().size()) +
|
||||||
(hasJointData ? AvatarDataPacket::maxJointDataSize(_jointData.size(), hasGrabJoints) : 0) +
|
AvatarDataPacket::maxJointDataSize(_jointData.size(), true) +
|
||||||
(hasJointDefaultPoseFlags ? AvatarDataPacket::maxJointDefaultPoseFlagsSize(_jointData.size()) : 0);
|
AvatarDataPacket::maxJointDefaultPoseFlagsSize(_jointData.size());
|
||||||
|
|
||||||
|
if (maxDataSize == 0) {
|
||||||
|
maxDataSize = (int)byteArraySize;
|
||||||
|
}
|
||||||
|
|
||||||
QByteArray avatarDataByteArray((int)byteArraySize, 0);
|
QByteArray avatarDataByteArray((int)byteArraySize, 0);
|
||||||
unsigned char* destinationBuffer = reinterpret_cast<unsigned char*>(avatarDataByteArray.data());
|
unsigned char* destinationBuffer = reinterpret_cast<unsigned char*>(avatarDataByteArray.data());
|
||||||
unsigned char* startPosition = destinationBuffer;
|
const unsigned char* const startPosition = destinationBuffer;
|
||||||
|
const unsigned char* const packetEnd = destinationBuffer + maxDataSize;
|
||||||
// Leading flags, to indicate how much data is actually included in the packet...
|
|
||||||
AvatarDataPacket::HasFlags packetStateFlags =
|
|
||||||
(hasAvatarGlobalPosition ? AvatarDataPacket::PACKET_HAS_AVATAR_GLOBAL_POSITION : 0)
|
|
||||||
| (hasAvatarBoundingBox ? AvatarDataPacket::PACKET_HAS_AVATAR_BOUNDING_BOX : 0)
|
|
||||||
| (hasAvatarOrientation ? AvatarDataPacket::PACKET_HAS_AVATAR_ORIENTATION : 0)
|
|
||||||
| (hasAvatarScale ? AvatarDataPacket::PACKET_HAS_AVATAR_SCALE : 0)
|
|
||||||
| (hasLookAtPosition ? AvatarDataPacket::PACKET_HAS_LOOK_AT_POSITION : 0)
|
|
||||||
| (hasAudioLoudness ? AvatarDataPacket::PACKET_HAS_AUDIO_LOUDNESS : 0)
|
|
||||||
| (hasSensorToWorldMatrix ? AvatarDataPacket::PACKET_HAS_SENSOR_TO_WORLD_MATRIX : 0)
|
|
||||||
| (hasAdditionalFlags ? AvatarDataPacket::PACKET_HAS_ADDITIONAL_FLAGS : 0)
|
|
||||||
| (hasParentInfo ? AvatarDataPacket::PACKET_HAS_PARENT_INFO : 0)
|
|
||||||
| (hasAvatarLocalPosition ? AvatarDataPacket::PACKET_HAS_AVATAR_LOCAL_POSITION : 0)
|
|
||||||
| (hasFaceTrackerInfo ? AvatarDataPacket::PACKET_HAS_FACE_TRACKER_INFO : 0)
|
|
||||||
| (hasJointData ? AvatarDataPacket::PACKET_HAS_JOINT_DATA : 0)
|
|
||||||
| (hasJointDefaultPoseFlags ? AvatarDataPacket::PACKET_HAS_JOINT_DEFAULT_POSE_FLAGS : 0)
|
|
||||||
| (hasGrabJoints ? AvatarDataPacket::PACKET_HAS_GRAB_JOINTS : 0);
|
|
||||||
|
|
||||||
memcpy(destinationBuffer, &packetStateFlags, sizeof(packetStateFlags));
|
|
||||||
destinationBuffer += sizeof(packetStateFlags);
|
|
||||||
|
|
||||||
#define AVATAR_MEMCPY(src) \
|
#define AVATAR_MEMCPY(src) \
|
||||||
memcpy(destinationBuffer, &(src), sizeof(src)); \
|
memcpy(destinationBuffer, &(src), sizeof(src)); \
|
||||||
destinationBuffer += sizeof(src);
|
destinationBuffer += sizeof(src);
|
||||||
|
|
||||||
if (hasAvatarGlobalPosition) {
|
// If we want an item and there's sufficient space:
|
||||||
auto startSection = destinationBuffer;
|
#define IF_AVATAR_SPACE(flag, space) \
|
||||||
AVATAR_MEMCPY(_globalPosition);
|
if ((wantedFlags & AvatarDataPacket::flag) \
|
||||||
|
&& (packetEnd - destinationBuffer) >= (ptrdiff_t)(space) \
|
||||||
|
&& (includedFlags |= AvatarDataPacket::flag))
|
||||||
|
|
||||||
|
if (sendStatus.sendUUID) {
|
||||||
|
memcpy(destinationBuffer, getSessionUUID().toRfc4122(), NUM_BYTES_RFC4122_UUID);
|
||||||
|
destinationBuffer += NUM_BYTES_RFC4122_UUID;
|
||||||
|
}
|
||||||
|
|
||||||
|
unsigned char * packetFlagsLocation = destinationBuffer;
|
||||||
|
destinationBuffer += sizeof(wantedFlags);
|
||||||
|
|
||||||
|
IF_AVATAR_SPACE(PACKET_HAS_AVATAR_GLOBAL_POSITION, sizeof _globalPosition) {
|
||||||
|
auto startSection = destinationBuffer;
|
||||||
|
AVATAR_MEMCPY(_globalPosition);
|
||||||
|
|
||||||
int numBytes = destinationBuffer - startSection;
|
int numBytes = destinationBuffer - startSection;
|
||||||
|
|
||||||
if (outboundDataRateOut) {
|
if (outboundDataRateOut) {
|
||||||
|
@ -384,7 +428,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasAvatarBoundingBox) {
|
IF_AVATAR_SPACE(PACKET_HAS_AVATAR_BOUNDING_BOX, sizeof _globalBoundingBoxDimensions + sizeof _globalBoundingBoxOffset) {
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
AVATAR_MEMCPY(_globalBoundingBoxDimensions);
|
AVATAR_MEMCPY(_globalBoundingBoxDimensions);
|
||||||
AVATAR_MEMCPY(_globalBoundingBoxOffset);
|
AVATAR_MEMCPY(_globalBoundingBoxOffset);
|
||||||
|
@ -395,7 +439,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasAvatarOrientation) {
|
IF_AVATAR_SPACE(PACKET_HAS_AVATAR_ORIENTATION, sizeof(AvatarDataPacket::SixByteQuat)) {
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
auto localOrientation = getOrientationOutbound();
|
auto localOrientation = getOrientationOutbound();
|
||||||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, localOrientation);
|
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, localOrientation);
|
||||||
|
@ -406,7 +450,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasAvatarScale) {
|
IF_AVATAR_SPACE(PACKET_HAS_AVATAR_SCALE, sizeof(AvatarDataPacket::AvatarScale)) {
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
auto data = reinterpret_cast<AvatarDataPacket::AvatarScale*>(destinationBuffer);
|
auto data = reinterpret_cast<AvatarDataPacket::AvatarScale*>(destinationBuffer);
|
||||||
auto scale = getDomainLimitedScale();
|
auto scale = getDomainLimitedScale();
|
||||||
|
@ -419,7 +463,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasLookAtPosition) {
|
IF_AVATAR_SPACE(PACKET_HAS_LOOK_AT_POSITION, sizeof(_headData->getLookAtPosition()) ) {
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
AVATAR_MEMCPY(_headData->getLookAtPosition());
|
AVATAR_MEMCPY(_headData->getLookAtPosition());
|
||||||
int numBytes = destinationBuffer - startSection;
|
int numBytes = destinationBuffer - startSection;
|
||||||
|
@ -428,7 +472,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasAudioLoudness) {
|
IF_AVATAR_SPACE(PACKET_HAS_AUDIO_LOUDNESS, sizeof(AvatarDataPacket::AudioLoudness)) {
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
auto data = reinterpret_cast<AvatarDataPacket::AudioLoudness*>(destinationBuffer);
|
auto data = reinterpret_cast<AvatarDataPacket::AudioLoudness*>(destinationBuffer);
|
||||||
data->audioLoudness = packFloatGainToByte(getAudioLoudness() / AUDIO_LOUDNESS_SCALE);
|
data->audioLoudness = packFloatGainToByte(getAudioLoudness() / AUDIO_LOUDNESS_SCALE);
|
||||||
|
@ -440,7 +484,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasSensorToWorldMatrix) {
|
IF_AVATAR_SPACE(PACKET_HAS_SENSOR_TO_WORLD_MATRIX, sizeof(AvatarDataPacket::SensorToWorldMatrix)) {
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
auto data = reinterpret_cast<AvatarDataPacket::SensorToWorldMatrix*>(destinationBuffer);
|
auto data = reinterpret_cast<AvatarDataPacket::SensorToWorldMatrix*>(destinationBuffer);
|
||||||
glm::mat4 sensorToWorldMatrix = getSensorToWorldMatrix();
|
glm::mat4 sensorToWorldMatrix = getSensorToWorldMatrix();
|
||||||
|
@ -458,7 +502,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasAdditionalFlags) {
|
IF_AVATAR_SPACE(PACKET_HAS_ADDITIONAL_FLAGS, sizeof (uint16_t)) {
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
auto data = reinterpret_cast<AvatarDataPacket::AdditionalFlags*>(destinationBuffer);
|
auto data = reinterpret_cast<AvatarDataPacket::AdditionalFlags*>(destinationBuffer);
|
||||||
|
|
||||||
|
@ -506,7 +550,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasParentInfo) {
|
IF_AVATAR_SPACE(PACKET_HAS_PARENT_INFO, sizeof(AvatarDataPacket::ParentInfo)) {
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
auto parentInfo = reinterpret_cast<AvatarDataPacket::ParentInfo*>(destinationBuffer);
|
auto parentInfo = reinterpret_cast<AvatarDataPacket::ParentInfo*>(destinationBuffer);
|
||||||
QByteArray referentialAsBytes = parentID.toRfc4122();
|
QByteArray referentialAsBytes = parentID.toRfc4122();
|
||||||
|
@ -520,7 +564,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasAvatarLocalPosition) {
|
IF_AVATAR_SPACE(PACKET_HAS_AVATAR_LOCAL_POSITION, sizeof(getLocalPosition()) ) {
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
const auto localPosition = getLocalPosition();
|
const auto localPosition = getLocalPosition();
|
||||||
AVATAR_MEMCPY(localPosition);
|
AVATAR_MEMCPY(localPosition);
|
||||||
|
@ -531,11 +575,11 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const auto& blendshapeCoefficients = _headData->getBlendshapeCoefficients();
|
||||||
// If it is connected, pack up the data
|
// If it is connected, pack up the data
|
||||||
if (hasFaceTrackerInfo) {
|
IF_AVATAR_SPACE(PACKET_HAS_FACE_TRACKER_INFO, sizeof(AvatarDataPacket::FaceTrackerInfo) + (size_t)blendshapeCoefficients.size() * sizeof(float)) {
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
auto faceTrackerInfo = reinterpret_cast<AvatarDataPacket::FaceTrackerInfo*>(destinationBuffer);
|
auto faceTrackerInfo = reinterpret_cast<AvatarDataPacket::FaceTrackerInfo*>(destinationBuffer);
|
||||||
const auto& blendshapeCoefficients = _headData->getBlendshapeCoefficients();
|
|
||||||
// note: we don't use the blink and average loudness, we just use the numBlendShapes and
|
// note: we don't use the blink and average loudness, we just use the numBlendShapes and
|
||||||
// compute the procedural info on the client side.
|
// compute the procedural info on the client side.
|
||||||
faceTrackerInfo->leftEyeBlink = _headData->_leftEyeBlink;
|
faceTrackerInfo->leftEyeBlink = _headData->_leftEyeBlink;
|
||||||
|
@ -555,125 +599,125 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
}
|
}
|
||||||
|
|
||||||
QVector<JointData> jointData;
|
QVector<JointData> jointData;
|
||||||
if (hasJointData || hasJointDefaultPoseFlags) {
|
if (wantedFlags & (AvatarDataPacket::PACKET_HAS_JOINT_DATA | AvatarDataPacket::PACKET_HAS_JOINT_DEFAULT_POSE_FLAGS)) {
|
||||||
QReadLocker readLock(&_jointDataLock);
|
QReadLocker readLock(&_jointDataLock);
|
||||||
jointData = _jointData;
|
jointData = _jointData;
|
||||||
}
|
}
|
||||||
|
const int numJoints = jointData.size();
|
||||||
|
assert(numJoints <= 255);
|
||||||
|
const int jointBitVectorSize = calcBitVectorSize(numJoints);
|
||||||
|
|
||||||
// If it is connected, pack up the data
|
// Start joints if room for at least the faux joints.
|
||||||
if (hasJointData) {
|
IF_AVATAR_SPACE(PACKET_HAS_JOINT_DATA, 1 + 2 * jointBitVectorSize + AvatarDataPacket::FAUX_JOINTS_SIZE) {
|
||||||
|
// Allow for faux joints + translation bit-vector:
|
||||||
|
const ptrdiff_t minSizeForJoint = sizeof(AvatarDataPacket::SixByteQuat)
|
||||||
|
+ jointBitVectorSize + AvatarDataPacket::FAUX_JOINTS_SIZE;
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
|
|
||||||
// joint rotation data
|
// joint rotation data
|
||||||
int numJoints = jointData.size();
|
|
||||||
*destinationBuffer++ = (uint8_t)numJoints;
|
*destinationBuffer++ = (uint8_t)numJoints;
|
||||||
|
|
||||||
unsigned char* validityPosition = destinationBuffer;
|
unsigned char* validityPosition = destinationBuffer;
|
||||||
unsigned char validity = 0;
|
memset(validityPosition, 0, jointBitVectorSize);
|
||||||
int validityBit = 0;
|
|
||||||
int numValidityBytes = calcBitVectorSize(numJoints);
|
|
||||||
|
|
||||||
#ifdef WANT_DEBUG
|
#ifdef WANT_DEBUG
|
||||||
int rotationSentCount = 0;
|
int rotationSentCount = 0;
|
||||||
unsigned char* beforeRotations = destinationBuffer;
|
unsigned char* beforeRotations = destinationBuffer;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
destinationBuffer += numValidityBytes; // Move pointer past the validity bytes
|
destinationBuffer += jointBitVectorSize; // Move pointer past the validity bytes
|
||||||
|
|
||||||
// sentJointDataOut and lastSentJointData might be the same vector
|
// sentJointDataOut and lastSentJointData might be the same vector
|
||||||
if (sentJointDataOut) {
|
if (sentJointDataOut) {
|
||||||
sentJointDataOut->resize(numJoints); // Make sure the destination is resized before using it
|
sentJointDataOut->resize(numJoints); // Make sure the destination is resized before using it
|
||||||
}
|
}
|
||||||
|
const JointData *const joints = jointData.data();
|
||||||
|
JointData *const sentJoints = sentJointDataOut ? sentJointDataOut->data() : nullptr;
|
||||||
|
|
||||||
float minRotationDOT = (distanceAdjust && cullSmallChanges) ? getDistanceBasedMinRotationDOT(viewerPosition) : AVATAR_MIN_ROTATION_DOT;
|
float minRotationDOT = (distanceAdjust && cullSmallChanges) ? getDistanceBasedMinRotationDOT(viewerPosition) : AVATAR_MIN_ROTATION_DOT;
|
||||||
|
|
||||||
for (int i = 0; i < jointData.size(); i++) {
|
int i = sendStatus.rotationsSent;
|
||||||
const JointData& data = jointData[i];
|
for (; i < numJoints; ++i) {
|
||||||
|
const JointData& data = joints[i];
|
||||||
const JointData& last = lastSentJointData[i];
|
const JointData& last = lastSentJointData[i];
|
||||||
|
|
||||||
if (!data.rotationIsDefaultPose) {
|
if (packetEnd - destinationBuffer >= minSizeForJoint) {
|
||||||
// The dot product for larger rotations is a lower number.
|
if (!data.rotationIsDefaultPose) {
|
||||||
// So if the dot() is less than the value, then the rotation is a larger angle of rotation
|
// The dot product for larger rotations is a lower number,
|
||||||
if (sendAll || last.rotationIsDefaultPose || (!cullSmallChanges && last.rotation != data.rotation)
|
// so if the dot() is less than the value, then the rotation is a larger angle of rotation
|
||||||
|| (cullSmallChanges && fabsf(glm::dot(last.rotation, data.rotation)) < minRotationDOT) ) {
|
if (sendAll || last.rotationIsDefaultPose || (!cullSmallChanges && last.rotation != data.rotation)
|
||||||
validity |= (1 << validityBit);
|
|| (cullSmallChanges && fabsf(glm::dot(last.rotation, data.rotation)) < minRotationDOT)) {
|
||||||
|
validityPosition[i / BITS_IN_BYTE] |= 1 << (i % BITS_IN_BYTE);
|
||||||
#ifdef WANT_DEBUG
|
#ifdef WANT_DEBUG
|
||||||
rotationSentCount++;
|
rotationSentCount++;
|
||||||
#endif
|
#endif
|
||||||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, data.rotation);
|
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, data.rotation);
|
||||||
|
|
||||||
if (sentJointDataOut) {
|
if (sentJoints) {
|
||||||
(*sentJointDataOut)[i].rotation = data.rotation;
|
sentJoints[i].rotation = data.rotation;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sentJointDataOut) {
|
if (sentJoints) {
|
||||||
(*sentJointDataOut)[i].rotationIsDefaultPose = data.rotationIsDefaultPose;
|
sentJoints[i].rotationIsDefaultPose = data.rotationIsDefaultPose;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (++validityBit == BITS_IN_BYTE) {
|
|
||||||
*validityPosition++ = validity;
|
|
||||||
validityBit = validity = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (validityBit != 0) {
|
|
||||||
*validityPosition++ = validity;
|
|
||||||
}
|
}
|
||||||
|
sendStatus.rotationsSent = i;
|
||||||
|
|
||||||
// joint translation data
|
// joint translation data
|
||||||
validityPosition = destinationBuffer;
|
validityPosition = destinationBuffer;
|
||||||
validity = 0;
|
|
||||||
validityBit = 0;
|
|
||||||
|
|
||||||
#ifdef WANT_DEBUG
|
#ifdef WANT_DEBUG
|
||||||
int translationSentCount = 0;
|
int translationSentCount = 0;
|
||||||
unsigned char* beforeTranslations = destinationBuffer;
|
unsigned char* beforeTranslations = destinationBuffer;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
destinationBuffer += numValidityBytes; // Move pointer past the validity bytes
|
memset(destinationBuffer, 0, jointBitVectorSize);
|
||||||
|
destinationBuffer += jointBitVectorSize; // Move pointer past the validity bytes
|
||||||
|
|
||||||
float minTranslation = (distanceAdjust && cullSmallChanges) ? getDistanceBasedMinTranslationDistance(viewerPosition) : AVATAR_MIN_TRANSLATION;
|
float minTranslation = (distanceAdjust && cullSmallChanges) ? getDistanceBasedMinTranslationDistance(viewerPosition) : AVATAR_MIN_TRANSLATION;
|
||||||
|
|
||||||
float maxTranslationDimension = 0.0;
|
float maxTranslationDimension = 0.0;
|
||||||
for (int i = 0; i < jointData.size(); i++) {
|
i = sendStatus.translationsSent;
|
||||||
const JointData& data = jointData[i];
|
for (; i < numJoints; ++i) {
|
||||||
|
const JointData& data = joints[i];
|
||||||
const JointData& last = lastSentJointData[i];
|
const JointData& last = lastSentJointData[i];
|
||||||
|
|
||||||
if (!data.translationIsDefaultPose) {
|
if (packetEnd - destinationBuffer >= minSizeForJoint) {
|
||||||
if (sendAll || last.translationIsDefaultPose || (!cullSmallChanges && last.translation != data.translation)
|
if (!data.translationIsDefaultPose) {
|
||||||
|| (cullSmallChanges && glm::distance(data.translation, lastSentJointData[i].translation) > minTranslation)) {
|
if (sendAll || last.translationIsDefaultPose || (!cullSmallChanges && last.translation != data.translation)
|
||||||
|
|| (cullSmallChanges && glm::distance(data.translation, lastSentJointData[i].translation) > minTranslation)) {
|
||||||
validity |= (1 << validityBit);
|
validityPosition[i / BITS_IN_BYTE] |= 1 << (i % BITS_IN_BYTE);
|
||||||
#ifdef WANT_DEBUG
|
#ifdef WANT_DEBUG
|
||||||
translationSentCount++;
|
translationSentCount++;
|
||||||
#endif
|
#endif
|
||||||
maxTranslationDimension = glm::max(fabsf(data.translation.x), maxTranslationDimension);
|
maxTranslationDimension = glm::max(fabsf(data.translation.x), maxTranslationDimension);
|
||||||
maxTranslationDimension = glm::max(fabsf(data.translation.y), maxTranslationDimension);
|
maxTranslationDimension = glm::max(fabsf(data.translation.y), maxTranslationDimension);
|
||||||
maxTranslationDimension = glm::max(fabsf(data.translation.z), maxTranslationDimension);
|
maxTranslationDimension = glm::max(fabsf(data.translation.z), maxTranslationDimension);
|
||||||
|
|
||||||
destinationBuffer +=
|
destinationBuffer +=
|
||||||
packFloatVec3ToSignedTwoByteFixed(destinationBuffer, data.translation, TRANSLATION_COMPRESSION_RADIX);
|
packFloatVec3ToSignedTwoByteFixed(destinationBuffer, data.translation, TRANSLATION_COMPRESSION_RADIX);
|
||||||
|
|
||||||
if (sentJointDataOut) {
|
if (sentJoints) {
|
||||||
(*sentJointDataOut)[i].translation = data.translation;
|
sentJoints[i].translation = data.translation;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (sentJointDataOut) {
|
if (sentJoints) {
|
||||||
(*sentJointDataOut)[i].translationIsDefaultPose = data.translationIsDefaultPose;
|
sentJoints[i].translationIsDefaultPose = data.translationIsDefaultPose;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (++validityBit == BITS_IN_BYTE) {
|
|
||||||
*validityPosition++ = validity;
|
|
||||||
validityBit = validity = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (validityBit != 0) {
|
|
||||||
*validityPosition++ = validity;
|
|
||||||
}
|
}
|
||||||
|
sendStatus.translationsSent = i;
|
||||||
|
|
||||||
// faux joints
|
// faux joints
|
||||||
Transform controllerLeftHandTransform = Transform(getControllerLeftHandMatrix());
|
Transform controllerLeftHandTransform = Transform(getControllerLeftHandMatrix());
|
||||||
|
@ -686,7 +730,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
destinationBuffer += packFloatVec3ToSignedTwoByteFixed(destinationBuffer, controllerRightHandTransform.getTranslation(),
|
destinationBuffer += packFloatVec3ToSignedTwoByteFixed(destinationBuffer, controllerRightHandTransform.getTranslation(),
|
||||||
TRANSLATION_COMPRESSION_RADIX);
|
TRANSLATION_COMPRESSION_RADIX);
|
||||||
|
|
||||||
if (hasGrabJoints) {
|
IF_AVATAR_SPACE(PACKET_HAS_GRAB_JOINTS, sizeof (AvatarDataPacket::FarGrabJoints)) {
|
||||||
// the far-grab joints may range further than 3 meters, so we can't use packFloatVec3ToSignedTwoByteFixed etc
|
// the far-grab joints may range further than 3 meters, so we can't use packFloatVec3ToSignedTwoByteFixed etc
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
|
|
||||||
|
@ -728,18 +772,20 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
if (sendStatus.rotationsSent != numJoints || sendStatus.translationsSent != numJoints) {
|
||||||
|
extraReturnedFlags |= AvatarDataPacket::PACKET_HAS_JOINT_DATA;
|
||||||
|
}
|
||||||
|
|
||||||
int numBytes = destinationBuffer - startSection;
|
int numBytes = destinationBuffer - startSection;
|
||||||
if (outboundDataRateOut) {
|
if (outboundDataRateOut) {
|
||||||
outboundDataRateOut->jointDataRate.increment(numBytes);
|
outboundDataRateOut->jointDataRate.increment(numBytes);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasJointDefaultPoseFlags) {
|
IF_AVATAR_SPACE(PACKET_HAS_JOINT_DEFAULT_POSE_FLAGS, 1 + 2 * jointBitVectorSize) {
|
||||||
auto startSection = destinationBuffer;
|
auto startSection = destinationBuffer;
|
||||||
|
|
||||||
// write numJoints
|
// write numJoints
|
||||||
int numJoints = jointData.size();
|
|
||||||
*destinationBuffer++ = (uint8_t)numJoints;
|
*destinationBuffer++ = (uint8_t)numJoints;
|
||||||
|
|
||||||
// write rotationIsDefaultPose bits
|
// write rotationIsDefaultPose bits
|
||||||
|
@ -758,6 +804,10 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
memcpy(packetFlagsLocation, &includedFlags, sizeof(includedFlags));
|
||||||
|
// Return dropped items.
|
||||||
|
sendStatus.itemFlags = (wantedFlags & ~includedFlags) | extraReturnedFlags;
|
||||||
|
|
||||||
int avatarDataSize = destinationBuffer - startPosition;
|
int avatarDataSize = destinationBuffer - startPosition;
|
||||||
|
|
||||||
if (avatarDataSize > (int)byteArraySize) {
|
if (avatarDataSize > (int)byteArraySize) {
|
||||||
|
@ -766,6 +816,9 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
||||||
}
|
}
|
||||||
|
|
||||||
return avatarDataByteArray.left(avatarDataSize);
|
return avatarDataByteArray.left(avatarDataSize);
|
||||||
|
|
||||||
|
#undef AVATAR_MEMCPY
|
||||||
|
#undef IF_AVATAR_SPACE
|
||||||
}
|
}
|
||||||
|
|
||||||
// NOTE: This is never used in a "distanceAdjust" mode, so it's ok that it doesn't use a variable minimum rotation/translation
|
// NOTE: This is never used in a "distanceAdjust" mode, so it's ok that it doesn't use a variable minimum rotation/translation
|
||||||
|
@ -918,6 +971,8 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
||||||
_avatarBoundingBoxChanged = now;
|
_avatarBoundingBoxChanged = now;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_defaultBubbleBox = computeBubbleBox();
|
||||||
|
|
||||||
sourceBuffer += sizeof(AvatarDataPacket::AvatarBoundingBox);
|
sourceBuffer += sizeof(AvatarDataPacket::AvatarBoundingBox);
|
||||||
int numBytesRead = sourceBuffer - startSection;
|
int numBytesRead = sourceBuffer - startSection;
|
||||||
_avatarBoundingBoxRate.increment(numBytesRead);
|
_avatarBoundingBoxRate.increment(numBytesRead);
|
||||||
|
@ -1727,11 +1782,9 @@ glm::quat AvatarData::getOrientationOutbound() const {
|
||||||
return (getLocalOrientation());
|
return (getLocalOrientation());
|
||||||
}
|
}
|
||||||
|
|
||||||
void AvatarData::processAvatarIdentity(const QByteArray& identityData, bool& identityChanged,
|
void AvatarData::processAvatarIdentity(QDataStream& packetStream, bool& identityChanged,
|
||||||
bool& displayNameChanged) {
|
bool& displayNameChanged) {
|
||||||
|
|
||||||
QDataStream packetStream(identityData);
|
|
||||||
|
|
||||||
QUuid avatarSessionID;
|
QUuid avatarSessionID;
|
||||||
|
|
||||||
// peek the sequence number, this will tell us if we should be processing this identity packet at all
|
// peek the sequence number, this will tell us if we should be processing this identity packet at all
|
||||||
|
@ -1746,17 +1799,18 @@ void AvatarData::processAvatarIdentity(const QByteArray& identityData, bool& ide
|
||||||
<< (udt::SequenceNumber::Type) incomingSequenceNumber;
|
<< (udt::SequenceNumber::Type) incomingSequenceNumber;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (incomingSequenceNumber > _identitySequenceNumber) {
|
Identity identity;
|
||||||
Identity identity;
|
|
||||||
|
|
||||||
packetStream
|
packetStream
|
||||||
>> identity.attachmentData
|
>> identity.attachmentData
|
||||||
>> identity.displayName
|
>> identity.displayName
|
||||||
>> identity.sessionDisplayName
|
>> identity.sessionDisplayName
|
||||||
>> identity.isReplicated
|
>> identity.isReplicated
|
||||||
>> identity.lookAtSnappingEnabled
|
>> identity.lookAtSnappingEnabled
|
||||||
;
|
;
|
||||||
|
|
||||||
|
if (incomingSequenceNumber > _identitySequenceNumber) {
|
||||||
|
|
||||||
// set the store identity sequence number to match the incoming identity
|
// set the store identity sequence number to match the incoming identity
|
||||||
_identitySequenceNumber = incomingSequenceNumber;
|
_identitySequenceNumber = incomingSequenceNumber;
|
||||||
|
|
||||||
|
@ -2902,3 +2956,21 @@ void AvatarEntityMapFromScriptValue(const QScriptValue& object, AvatarEntityMap&
|
||||||
value[EntityID] = binaryEntityProperties;
|
value[EntityID] = binaryEntityProperties;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const float AvatarData::DEFAULT_BUBBLE_SCALE = 2.4f; // magic number determined empirically
|
||||||
|
|
||||||
|
AABox AvatarData::computeBubbleBox(float bubbleScale) const {
|
||||||
|
AABox box = AABox(_globalBoundingBoxOffset - _globalBoundingBoxDimensions, _globalBoundingBoxDimensions);
|
||||||
|
glm::vec3 size = box.getScale();
|
||||||
|
size *= bubbleScale;
|
||||||
|
const glm::vec3 MIN_BUBBLE_SCALE(0.3f, 1.3f, 0.3);
|
||||||
|
size= glm::max(size, MIN_BUBBLE_SCALE);
|
||||||
|
box.setScaleStayCentered(size);
|
||||||
|
return box;
|
||||||
|
}
|
||||||
|
|
||||||
|
AABox AvatarData::getDefaultBubbleBox() const {
|
||||||
|
AABox bubbleBox(_defaultBubbleBox);
|
||||||
|
bubbleBox.translate(_globalPosition);
|
||||||
|
return bubbleBox;
|
||||||
|
}
|
||||||
|
|
|
@ -296,6 +296,17 @@ namespace AvatarDataPacket {
|
||||||
} PACKED_END;
|
} PACKED_END;
|
||||||
const size_t FAR_GRAB_JOINTS_SIZE = 84;
|
const size_t FAR_GRAB_JOINTS_SIZE = 84;
|
||||||
static_assert(sizeof(FarGrabJoints) == FAR_GRAB_JOINTS_SIZE, "AvatarDataPacket::FarGrabJoints size doesn't match.");
|
static_assert(sizeof(FarGrabJoints) == FAR_GRAB_JOINTS_SIZE, "AvatarDataPacket::FarGrabJoints size doesn't match.");
|
||||||
|
|
||||||
|
static const size_t MIN_BULK_PACKET_SIZE = NUM_BYTES_RFC4122_UUID + HEADER_SIZE;
|
||||||
|
static const size_t FAUX_JOINTS_SIZE = 2 * (sizeof(SixByteQuat) + sizeof(SixByteTrans));
|
||||||
|
|
||||||
|
struct SendStatus {
|
||||||
|
HasFlags itemFlags { 0 };
|
||||||
|
bool sendUUID { false };
|
||||||
|
int rotationsSent { 0 }; // ie: index of next unsent joint
|
||||||
|
int translationsSent { 0 };
|
||||||
|
operator bool() { return itemFlags == 0; }
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
const float MAX_AUDIO_LOUDNESS = 1000.0f; // close enough for mouth animation
|
const float MAX_AUDIO_LOUDNESS = 1000.0f; // close enough for mouth animation
|
||||||
|
@ -463,8 +474,8 @@ public:
|
||||||
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking = false);
|
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking = false);
|
||||||
|
|
||||||
virtual QByteArray toByteArray(AvatarDataDetail dataDetail, quint64 lastSentTime, const QVector<JointData>& lastSentJointData,
|
virtual QByteArray toByteArray(AvatarDataDetail dataDetail, quint64 lastSentTime, const QVector<JointData>& lastSentJointData,
|
||||||
AvatarDataPacket::HasFlags& hasFlagsOut, bool dropFaceTracking, bool distanceAdjust, glm::vec3 viewerPosition,
|
AvatarDataPacket::SendStatus& sendStatus, bool dropFaceTracking, bool distanceAdjust, glm::vec3 viewerPosition,
|
||||||
QVector<JointData>* sentJointDataOut, AvatarDataRate* outboundDataRateOut = nullptr) const;
|
QVector<JointData>* sentJointDataOut, int maxDataSize = 0, AvatarDataRate* outboundDataRateOut = nullptr) const;
|
||||||
|
|
||||||
virtual void doneEncoding(bool cullSmallChanges);
|
virtual void doneEncoding(bool cullSmallChanges);
|
||||||
|
|
||||||
|
@ -971,7 +982,7 @@ public:
|
||||||
|
|
||||||
// identityChanged returns true if identity has changed, false otherwise.
|
// identityChanged returns true if identity has changed, false otherwise.
|
||||||
// identityChanged returns true if identity has changed, false otherwise. Similarly for displayNameChanged and skeletonModelUrlChange.
|
// identityChanged returns true if identity has changed, false otherwise. Similarly for displayNameChanged and skeletonModelUrlChange.
|
||||||
void processAvatarIdentity(const QByteArray& identityData, bool& identityChanged, bool& displayNameChanged);
|
void processAvatarIdentity(QDataStream& packetStream, bool& identityChanged, bool& displayNameChanged);
|
||||||
|
|
||||||
qint64 packTrait(AvatarTraits::TraitType traitType, ExtendedIODevice& destination,
|
qint64 packTrait(AvatarTraits::TraitType traitType, ExtendedIODevice& destination,
|
||||||
AvatarTraits::TraitVersion traitVersion = AvatarTraits::NULL_TRAIT_VERSION);
|
AvatarTraits::TraitVersion traitVersion = AvatarTraits::NULL_TRAIT_VERSION);
|
||||||
|
@ -1112,6 +1123,7 @@ public:
|
||||||
|
|
||||||
glm::vec3 getClientGlobalPosition() const { return _globalPosition; }
|
glm::vec3 getClientGlobalPosition() const { return _globalPosition; }
|
||||||
AABox getGlobalBoundingBox() const { return AABox(_globalPosition + _globalBoundingBoxOffset - _globalBoundingBoxDimensions, _globalBoundingBoxDimensions); }
|
AABox getGlobalBoundingBox() const { return AABox(_globalPosition + _globalBoundingBoxOffset - _globalBoundingBoxDimensions, _globalBoundingBoxDimensions); }
|
||||||
|
AABox getDefaultBubbleBox() const;
|
||||||
|
|
||||||
/**jsdoc
|
/**jsdoc
|
||||||
* @function MyAvatar.getAvatarEntityData
|
* @function MyAvatar.getAvatarEntityData
|
||||||
|
@ -1204,6 +1216,9 @@ public:
|
||||||
void setReplicaIndex(int replicaIndex) { _replicaIndex = replicaIndex; }
|
void setReplicaIndex(int replicaIndex) { _replicaIndex = replicaIndex; }
|
||||||
int getReplicaIndex() { return _replicaIndex; }
|
int getReplicaIndex() { return _replicaIndex; }
|
||||||
|
|
||||||
|
static const float DEFAULT_BUBBLE_SCALE; /* = 2.4 */
|
||||||
|
AABox computeBubbleBox(float bubbleScale = DEFAULT_BUBBLE_SCALE) const;
|
||||||
|
|
||||||
void setIsNewAvatar(bool isNewAvatar) { _isNewAvatar = isNewAvatar; }
|
void setIsNewAvatar(bool isNewAvatar) { _isNewAvatar = isNewAvatar; }
|
||||||
bool getIsNewAvatar() { return _isNewAvatar; }
|
bool getIsNewAvatar() { return _isNewAvatar; }
|
||||||
|
|
||||||
|
@ -1440,6 +1455,8 @@ protected:
|
||||||
glm::vec3 _globalBoundingBoxDimensions;
|
glm::vec3 _globalBoundingBoxDimensions;
|
||||||
glm::vec3 _globalBoundingBoxOffset;
|
glm::vec3 _globalBoundingBoxOffset;
|
||||||
|
|
||||||
|
AABox _defaultBubbleBox;
|
||||||
|
|
||||||
mutable ReadWriteLockable _avatarEntitiesLock;
|
mutable ReadWriteLockable _avatarEntitiesLock;
|
||||||
AvatarEntityIDs _avatarEntityDetached; // recently detached from this avatar
|
AvatarEntityIDs _avatarEntityDetached; // recently detached from this avatar
|
||||||
AvatarEntityIDs _avatarEntityForRecording; // create new entities id for avatar recording
|
AvatarEntityIDs _avatarEntityForRecording; // create new entities id for avatar recording
|
||||||
|
|
|
@ -85,8 +85,9 @@ std::vector<AvatarSharedPointer> AvatarReplicas::takeReplicas(const QUuid& paren
|
||||||
void AvatarReplicas::processAvatarIdentity(const QUuid& parentID, const QByteArray& identityData, bool& identityChanged, bool& displayNameChanged) {
|
void AvatarReplicas::processAvatarIdentity(const QUuid& parentID, const QByteArray& identityData, bool& identityChanged, bool& displayNameChanged) {
|
||||||
if (_replicasMap.find(parentID) != _replicasMap.end()) {
|
if (_replicasMap.find(parentID) != _replicasMap.end()) {
|
||||||
auto &replicas = _replicasMap[parentID];
|
auto &replicas = _replicasMap[parentID];
|
||||||
|
QDataStream identityDataStream(identityData);
|
||||||
for (auto avatar : replicas) {
|
for (auto avatar : replicas) {
|
||||||
avatar->processAvatarIdentity(identityData, identityChanged, displayNameChanged);
|
avatar->processAvatarIdentity(identityDataStream, identityChanged, displayNameChanged);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -284,39 +285,45 @@ AvatarSharedPointer AvatarHashMap::parseAvatarData(QSharedPointer<ReceivedMessag
|
||||||
}
|
}
|
||||||
|
|
||||||
void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||||
|
QDataStream avatarIdentityStream(message->getMessage());
|
||||||
|
|
||||||
// peek the avatar UUID from the incoming packet
|
while (!avatarIdentityStream.atEnd()) {
|
||||||
QUuid identityUUID = QUuid::fromRfc4122(message->peek(NUM_BYTES_RFC4122_UUID));
|
// peek the avatar UUID from the incoming packet
|
||||||
|
avatarIdentityStream.startTransaction();
|
||||||
|
QUuid identityUUID;
|
||||||
|
avatarIdentityStream >> identityUUID;
|
||||||
|
avatarIdentityStream.rollbackTransaction();
|
||||||
|
|
||||||
if (identityUUID.isNull()) {
|
if (identityUUID.isNull()) {
|
||||||
qCDebug(avatars) << "Refusing to process identity packet for null avatar ID";
|
qCDebug(avatars) << "Refusing to process identity packet for null avatar ID";
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// make sure this isn't for an ignored avatar
|
// make sure this isn't for an ignored avatar
|
||||||
auto nodeList = DependencyManager::get<NodeList>();
|
auto nodeList = DependencyManager::get<NodeList>();
|
||||||
static auto EMPTY = QUuid();
|
static auto EMPTY = QUuid();
|
||||||
|
|
||||||
{
|
{
|
||||||
QReadLocker locker(&_hashLock);
|
QReadLocker locker(&_hashLock);
|
||||||
auto me = _avatarHash.find(EMPTY);
|
auto me = _avatarHash.find(EMPTY);
|
||||||
if ((me != _avatarHash.end()) && (identityUUID == me.value()->getSessionUUID())) {
|
if ((me != _avatarHash.end()) && (identityUUID == me.value()->getSessionUUID())) {
|
||||||
// We add MyAvatar to _avatarHash with an empty UUID. Code relies on this. In order to correctly handle an
|
// We add MyAvatar to _avatarHash with an empty UUID. Code relies on this. In order to correctly handle an
|
||||||
// identity packet for ourself (such as when we are assigned a sessionDisplayName by the mixer upon joining),
|
// identity packet for ourself (such as when we are assigned a sessionDisplayName by the mixer upon joining),
|
||||||
// we make things match here.
|
// we make things match here.
|
||||||
identityUUID = EMPTY;
|
identityUUID = EMPTY;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!nodeList->isIgnoringNode(identityUUID) || nodeList->getRequestsDomainListData()) {
|
||||||
|
// mesh URL for a UUID, find avatar in our list
|
||||||
|
bool isNewAvatar;
|
||||||
|
auto avatar = newOrExistingAvatar(identityUUID, sendingNode, isNewAvatar);
|
||||||
|
bool identityChanged = false;
|
||||||
|
bool displayNameChanged = false;
|
||||||
|
// In this case, the "sendingNode" is the Avatar Mixer.
|
||||||
|
avatar->processAvatarIdentity(avatarIdentityStream, identityChanged, displayNameChanged);
|
||||||
|
_replicas.processAvatarIdentity(identityUUID, message->getMessage(), identityChanged, displayNameChanged);
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if (!nodeList->isIgnoringNode(identityUUID) || nodeList->getRequestsDomainListData()) {
|
|
||||||
// mesh URL for a UUID, find avatar in our list
|
|
||||||
bool isNewAvatar;
|
|
||||||
auto avatar = newOrExistingAvatar(identityUUID, sendingNode, isNewAvatar);
|
|
||||||
bool identityChanged = false;
|
|
||||||
bool displayNameChanged = false;
|
|
||||||
// In this case, the "sendingNode" is the Avatar Mixer.
|
|
||||||
avatar->processAvatarIdentity(message->getMessage(), identityChanged, displayNameChanged);
|
|
||||||
_replicas.processAvatarIdentity(identityUUID, message->getMessage(), identityChanged, displayNameChanged);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -68,8 +68,14 @@ namespace PrioritySortUtil {
|
||||||
void reserve(size_t num) {
|
void reserve(size_t num) {
|
||||||
_vector.reserve(num);
|
_vector.reserve(num);
|
||||||
}
|
}
|
||||||
const std::vector<T>& getSortedVector() {
|
const std::vector<T>& getSortedVector(int numToSort = 0) {
|
||||||
std::sort(_vector.begin(), _vector.end(), [](const T& left, const T& right) { return left.getPriority() > right.getPriority(); });
|
if (numToSort == 0 || numToSort >= (int)_vector.size()) {
|
||||||
|
std::sort(_vector.begin(), _vector.end(),
|
||||||
|
[](const T& left, const T& right) { return left.getPriority() > right.getPriority(); });
|
||||||
|
} else {
|
||||||
|
std::partial_sort(_vector.begin(), _vector.begin() + numToSort, _vector.end(),
|
||||||
|
[](const T& left, const T& right) { return left.getPriority() > right.getPriority(); });
|
||||||
|
}
|
||||||
return _vector;
|
return _vector;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -99,6 +105,9 @@ namespace PrioritySortUtil {
|
||||||
float radius = glm::max(thing.getRadius(), MIN_RADIUS);
|
float radius = glm::max(thing.getRadius(), MIN_RADIUS);
|
||||||
// Other item's angle from view centre:
|
// Other item's angle from view centre:
|
||||||
float cosineAngle = glm::dot(offset, view.getDirection()) / distance;
|
float cosineAngle = glm::dot(offset, view.getDirection()) / distance;
|
||||||
|
if (cosineAngle > 0.0f) {
|
||||||
|
cosineAngle = std::sqrt(cosineAngle);
|
||||||
|
}
|
||||||
float age = float((_usecCurrentTime - thing.getTimestamp()) / USECS_PER_SECOND);
|
float age = float((_usecCurrentTime - thing.getTimestamp()) / USECS_PER_SECOND);
|
||||||
|
|
||||||
// the "age" term accumulates at the sum of all weights
|
// the "age" term accumulates at the sum of all weights
|
||||||
|
|
Loading…
Reference in a new issue