mirror of
https://github.com/HifiExperiments/overte.git
synced 2025-04-14 16:27:41 +02:00
Merge pull request #14035 from SimonWalton-HiFi/avatar-mixer-improvements
Further avatar-mixer improvements
This commit is contained in:
commit
2eb801bdc6
9 changed files with 430 additions and 345 deletions
|
@ -541,7 +541,7 @@ void AvatarMixer::handleRequestsDomainListDataPacket(QSharedPointer<ReceivedMess
|
|||
// ...For those nodes, reset the lastBroadcastTime to 0
|
||||
// so that the AvatarMixer will send Identity data to us
|
||||
[&](const SharedNodePointer& node) {
|
||||
nodeData->setLastBroadcastTime(node->getUUID(), 0);
|
||||
nodeData->setLastBroadcastTime(node->getLocalID(), 0);
|
||||
nodeData->resetSentTraitData(node->getLocalID());
|
||||
}
|
||||
);
|
||||
|
@ -565,7 +565,8 @@ void AvatarMixer::handleAvatarIdentityPacket(QSharedPointer<ReceivedMessage> mes
|
|||
// parse the identity packet and update the change timestamp if appropriate
|
||||
bool identityChanged = false;
|
||||
bool displayNameChanged = false;
|
||||
avatar.processAvatarIdentity(message->getMessage(), identityChanged, displayNameChanged);
|
||||
QDataStream avatarIdentityStream(message->getMessage());
|
||||
avatar.processAvatarIdentity(avatarIdentityStream, identityChanged, displayNameChanged);
|
||||
|
||||
if (identityChanged) {
|
||||
QMutexLocker nodeDataLocker(&nodeData->getMutex());
|
||||
|
@ -637,7 +638,7 @@ void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage>
|
|||
// Reset the lastBroadcastTime for the ignored avatar to 0
|
||||
// so the AvatarMixer knows it'll have to send identity data about the ignored avatar
|
||||
// to the ignorer if the ignorer unignores.
|
||||
nodeData->setLastBroadcastTime(ignoredUUID, 0);
|
||||
nodeData->setLastBroadcastTime(ignoredNode->getLocalID(), 0);
|
||||
nodeData->resetSentTraitData(ignoredNode->getLocalID());
|
||||
}
|
||||
|
||||
|
@ -647,7 +648,7 @@ void AvatarMixer::handleNodeIgnoreRequestPacket(QSharedPointer<ReceivedMessage>
|
|||
// to the ignored if the ignorer unignores.
|
||||
AvatarMixerClientData* ignoredNodeData = reinterpret_cast<AvatarMixerClientData*>(ignoredNode->getLinkedData());
|
||||
if (ignoredNodeData) {
|
||||
ignoredNodeData->setLastBroadcastTime(senderNode->getUUID(), 0);
|
||||
ignoredNodeData->setLastBroadcastTime(senderNode->getLocalID(), 0);
|
||||
ignoredNodeData->resetSentTraitData(senderNode->getLocalID());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,20 +26,20 @@ AvatarMixerClientData::AvatarMixerClientData(const QUuid& nodeID, Node::LocalID
|
|||
_avatar->setID(nodeID);
|
||||
}
|
||||
|
||||
uint64_t AvatarMixerClientData::getLastOtherAvatarEncodeTime(QUuid otherAvatar) const {
|
||||
std::unordered_map<QUuid, uint64_t>::const_iterator itr = _lastOtherAvatarEncodeTime.find(otherAvatar);
|
||||
uint64_t AvatarMixerClientData::getLastOtherAvatarEncodeTime(NLPacket::LocalID otherAvatar) const {
|
||||
const auto itr = _lastOtherAvatarEncodeTime.find(otherAvatar);
|
||||
if (itr != _lastOtherAvatarEncodeTime.end()) {
|
||||
return itr->second;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::setLastOtherAvatarEncodeTime(const QUuid& otherAvatar, uint64_t time) {
|
||||
std::unordered_map<QUuid, uint64_t>::iterator itr = _lastOtherAvatarEncodeTime.find(otherAvatar);
|
||||
void AvatarMixerClientData::setLastOtherAvatarEncodeTime(NLPacket::LocalID otherAvatar, uint64_t time) {
|
||||
auto itr = _lastOtherAvatarEncodeTime.find(otherAvatar);
|
||||
if (itr != _lastOtherAvatarEncodeTime.end()) {
|
||||
itr->second = time;
|
||||
} else {
|
||||
_lastOtherAvatarEncodeTime.emplace(std::pair<QUuid, uint64_t>(otherAvatar, time));
|
||||
_lastOtherAvatarEncodeTime.emplace(std::pair<NLPacket::LocalID, uint64_t>(otherAvatar, time));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -220,7 +220,7 @@ void AvatarMixerClientData::checkSkeletonURLAgainstWhitelist(const SlaveSharedDa
|
|||
}
|
||||
}
|
||||
|
||||
uint64_t AvatarMixerClientData::getLastBroadcastTime(const QUuid& nodeUUID) const {
|
||||
uint64_t AvatarMixerClientData::getLastBroadcastTime(NLPacket::LocalID nodeUUID) const {
|
||||
// return the matching PacketSequenceNumber, or the default if we don't have it
|
||||
auto nodeMatch = _lastBroadcastTimes.find(nodeUUID);
|
||||
if (nodeMatch != _lastBroadcastTimes.end()) {
|
||||
|
@ -229,9 +229,9 @@ uint64_t AvatarMixerClientData::getLastBroadcastTime(const QUuid& nodeUUID) cons
|
|||
return 0;
|
||||
}
|
||||
|
||||
uint16_t AvatarMixerClientData::getLastBroadcastSequenceNumber(const QUuid& nodeUUID) const {
|
||||
uint16_t AvatarMixerClientData::getLastBroadcastSequenceNumber(NLPacket::LocalID nodeID) const {
|
||||
// return the matching PacketSequenceNumber, or the default if we don't have it
|
||||
auto nodeMatch = _lastBroadcastSequenceNumbers.find(nodeUUID);
|
||||
auto nodeMatch = _lastBroadcastSequenceNumbers.find(nodeID);
|
||||
if (nodeMatch != _lastBroadcastSequenceNumbers.end()) {
|
||||
return nodeMatch->second;
|
||||
}
|
||||
|
@ -252,7 +252,7 @@ void AvatarMixerClientData::ignoreOther(const Node* self, const Node* other) {
|
|||
} else {
|
||||
killPacket->writePrimitive(KillAvatarReason::YourAvatarEnteredTheirBubble);
|
||||
}
|
||||
setLastBroadcastTime(other->getUUID(), 0);
|
||||
setLastBroadcastTime(other->getLocalID(), 0);
|
||||
|
||||
resetSentTraitData(other->getLocalID());
|
||||
|
||||
|
@ -331,9 +331,9 @@ AvatarMixerClientData::TraitsCheckTimestamp AvatarMixerClientData::getLastOtherA
|
|||
}
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::cleanupKilledNode(const QUuid& nodeUUID, Node::LocalID nodeLocalID) {
|
||||
removeLastBroadcastSequenceNumber(nodeUUID);
|
||||
removeLastBroadcastTime(nodeUUID);
|
||||
void AvatarMixerClientData::cleanupKilledNode(const QUuid&, Node::LocalID nodeLocalID) {
|
||||
removeLastBroadcastSequenceNumber(nodeLocalID);
|
||||
removeLastBroadcastTime(nodeLocalID);
|
||||
_lastSentTraitsTimestamps.erase(nodeLocalID);
|
||||
_sentTraitVersions.erase(nodeLocalID);
|
||||
}
|
||||
|
|
|
@ -49,17 +49,16 @@ public:
|
|||
const AvatarData* getConstAvatarData() const { return _avatar.get(); }
|
||||
AvatarSharedPointer getAvatarSharedPointer() const { return _avatar; }
|
||||
|
||||
uint16_t getLastBroadcastSequenceNumber(NLPacket::LocalID nodeID) const;
|
||||
void setLastBroadcastSequenceNumber(NLPacket::LocalID nodeID, uint16_t sequenceNumber)
|
||||
{ _lastBroadcastSequenceNumbers[nodeID] = sequenceNumber; }
|
||||
Q_INVOKABLE void removeLastBroadcastSequenceNumber(NLPacket::LocalID nodeID) { _lastBroadcastSequenceNumbers.erase(nodeID); }
|
||||
bool isIgnoreRadiusEnabled() const { return _isIgnoreRadiusEnabled; }
|
||||
void setIsIgnoreRadiusEnabled(bool enabled) { _isIgnoreRadiusEnabled = enabled; }
|
||||
|
||||
uint16_t getLastBroadcastSequenceNumber(const QUuid& nodeUUID) const;
|
||||
void setLastBroadcastSequenceNumber(const QUuid& nodeUUID, uint16_t sequenceNumber)
|
||||
{ _lastBroadcastSequenceNumbers[nodeUUID] = sequenceNumber; }
|
||||
Q_INVOKABLE void removeLastBroadcastSequenceNumber(const QUuid& nodeUUID) { _lastBroadcastSequenceNumbers.erase(nodeUUID); }
|
||||
|
||||
uint64_t getLastBroadcastTime(const QUuid& nodeUUID) const;
|
||||
void setLastBroadcastTime(const QUuid& nodeUUID, uint64_t broadcastTime) { _lastBroadcastTimes[nodeUUID] = broadcastTime; }
|
||||
Q_INVOKABLE void removeLastBroadcastTime(const QUuid& nodeUUID) { _lastBroadcastTimes.erase(nodeUUID); }
|
||||
uint64_t getLastBroadcastTime(NLPacket::LocalID nodeUUID) const;
|
||||
void setLastBroadcastTime(NLPacket::LocalID nodeUUID, uint64_t broadcastTime) { _lastBroadcastTimes[nodeUUID] = broadcastTime; }
|
||||
Q_INVOKABLE void removeLastBroadcastTime(NLPacket::LocalID nodeUUID) { _lastBroadcastTimes.erase(nodeUUID); }
|
||||
|
||||
Q_INVOKABLE void cleanupKilledNode(const QUuid& nodeUUID, Node::LocalID nodeLocalID);
|
||||
|
||||
|
@ -93,7 +92,7 @@ public:
|
|||
|
||||
void loadJSONStats(QJsonObject& jsonObject) const;
|
||||
|
||||
glm::vec3 getPosition() const { return _avatar ? _avatar->getWorldPosition() : glm::vec3(0); }
|
||||
glm::vec3 getPosition() const { return _avatar ? _avatar->getClientGlobalPosition() : glm::vec3(0); }
|
||||
bool isRadiusIgnoring(const QUuid& other) const;
|
||||
void addToRadiusIgnoringSet(const QUuid& other);
|
||||
void removeFromRadiusIgnoringSet(const QUuid& other);
|
||||
|
@ -114,10 +113,10 @@ public:
|
|||
|
||||
const ConicalViewFrustums& getViewFrustums() const { return _currentViewFrustums; }
|
||||
|
||||
uint64_t getLastOtherAvatarEncodeTime(QUuid otherAvatar) const;
|
||||
void setLastOtherAvatarEncodeTime(const QUuid& otherAvatar, uint64_t time);
|
||||
uint64_t getLastOtherAvatarEncodeTime(NLPacket::LocalID otherAvatar) const;
|
||||
void setLastOtherAvatarEncodeTime(NLPacket::LocalID otherAvatar, uint64_t time);
|
||||
|
||||
QVector<JointData>& getLastOtherAvatarSentJoints(QUuid otherAvatar) { return _lastOtherAvatarSentJoints[otherAvatar]; }
|
||||
QVector<JointData>& getLastOtherAvatarSentJoints(NLPacket::LocalID otherAvatar) { return _lastOtherAvatarSentJoints[otherAvatar]; }
|
||||
|
||||
void queuePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer node);
|
||||
int processPackets(const SlaveSharedData& slaveSharedData); // returns number of packets processed
|
||||
|
@ -150,13 +149,13 @@ private:
|
|||
AvatarSharedPointer _avatar { new AvatarData() };
|
||||
|
||||
uint16_t _lastReceivedSequenceNumber { 0 };
|
||||
std::unordered_map<QUuid, uint16_t> _lastBroadcastSequenceNumbers;
|
||||
std::unordered_map<QUuid, uint64_t> _lastBroadcastTimes;
|
||||
std::unordered_map<NLPacket::LocalID, uint16_t> _lastBroadcastSequenceNumbers;
|
||||
std::unordered_map<NLPacket::LocalID, uint64_t> _lastBroadcastTimes;
|
||||
|
||||
// this is a map of the last time we encoded an "other" avatar for
|
||||
// sending to "this" node
|
||||
std::unordered_map<QUuid, uint64_t> _lastOtherAvatarEncodeTime;
|
||||
std::unordered_map<QUuid, QVector<JointData>> _lastOtherAvatarSentJoints;
|
||||
std::unordered_map<NLPacket::LocalID, uint64_t> _lastOtherAvatarEncodeTime;
|
||||
std::unordered_map<NLPacket::LocalID, QVector<JointData>> _lastOtherAvatarSentJoints;
|
||||
|
||||
uint64_t _identityChangeTimestamp;
|
||||
bool _avatarSessionDisplayNameMustChange{ true };
|
||||
|
|
|
@ -68,13 +68,11 @@ void AvatarMixerSlave::processIncomingPackets(const SharedNodePointer& node) {
|
|||
_stats.processIncomingPacketsElapsedTime += (end - start);
|
||||
}
|
||||
|
||||
int AvatarMixerSlave::sendIdentityPacket(const AvatarMixerClientData* nodeData, const SharedNodePointer& destinationNode) {
|
||||
if (destinationNode->getType() == NodeType::Agent && !destinationNode->isUpstream()) {
|
||||
int AvatarMixerSlave::sendIdentityPacket(NLPacketList& packetList, const AvatarMixerClientData* nodeData, const Node& destinationNode) {
|
||||
if (destinationNode.getType() == NodeType::Agent && !destinationNode.isUpstream()) {
|
||||
QByteArray individualData = nodeData->getConstAvatarData()->identityByteArray();
|
||||
individualData.replace(0, NUM_BYTES_RFC4122_UUID, nodeData->getNodeID().toRfc4122()); // FIXME, this looks suspicious
|
||||
auto identityPackets = NLPacketList::create(PacketType::AvatarIdentity, QByteArray(), true, true);
|
||||
identityPackets->write(individualData);
|
||||
DependencyManager::get<NodeList>()->sendPacketList(std::move(identityPackets), *destinationNode);
|
||||
packetList.write(individualData);
|
||||
_stats.numIdentityPackets++;
|
||||
return individualData.size();
|
||||
} else {
|
||||
|
@ -247,12 +245,12 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
// reset the internal state for correct random number distribution
|
||||
distribution.reset();
|
||||
|
||||
// Estimate number to sort on number sent last frame (with min. of 20).
|
||||
const int numToSendEst = std::max(int(nodeData->getNumAvatarsSentLastFrame() * 2.5f), 20);
|
||||
|
||||
// reset the number of sent avatars
|
||||
nodeData->resetNumAvatarsSentLastFrame();
|
||||
|
||||
// keep a counter of the number of considered avatars
|
||||
int numOtherAvatars = 0;
|
||||
|
||||
// keep track of outbound data rate specifically for avatar data
|
||||
int numAvatarDataBytes = 0;
|
||||
int identityBytesSent = 0;
|
||||
|
@ -261,7 +259,6 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
// max number of avatarBytes per frame
|
||||
int maxAvatarBytesPerFrame = int(_maxKbpsPerNode * BYTES_PER_KILOBIT / AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND);
|
||||
|
||||
|
||||
// keep track of the number of other avatars held back in this frame
|
||||
int numAvatarsHeldBack = 0;
|
||||
|
||||
|
@ -279,10 +276,6 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
int minimumBytesPerAvatar = PALIsOpen ? AvatarDataPacket::AVATAR_HAS_FLAGS_SIZE + NUM_BYTES_RFC4122_UUID +
|
||||
sizeof(AvatarDataPacket::AvatarGlobalPosition) + sizeof(AvatarDataPacket::AudioLoudness) : 0;
|
||||
|
||||
// setup a PacketList for the avatarPackets
|
||||
auto avatarPacketList = NLPacketList::create(PacketType::BulkAvatarData);
|
||||
static auto maxAvatarDataBytes = avatarPacketList->getMaxSegmentSize() - NUM_BYTES_RFC4122_UUID;
|
||||
|
||||
// compute node bounding box
|
||||
const float MY_AVATAR_BUBBLE_EXPANSION_FACTOR = 4.0f; // magic number determined emperically
|
||||
AABox nodeBox = computeBubbleBox(avatar, MY_AVATAR_BUBBLE_EXPANSION_FACTOR);
|
||||
|
@ -350,8 +343,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
// Don't bother with these checks if the other avatar has their bubble enabled and we're gettingAnyIgnored
|
||||
if (nodeData->isIgnoreRadiusEnabled() || (avatarClientNodeData->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
|
||||
// Perform the collision check between the two bounding boxes
|
||||
const float OTHER_AVATAR_BUBBLE_EXPANSION_FACTOR = 2.4f; // magic number determined empirically
|
||||
AABox otherNodeBox = computeBubbleBox(avatarClientNodeData->getAvatar(), OTHER_AVATAR_BUBBLE_EXPANSION_FACTOR);
|
||||
AABox otherNodeBox = avatarClientNodeData->getAvatar().getDefaultBubbleBox();
|
||||
if (nodeBox.touches(otherNodeBox)) {
|
||||
nodeData->ignoreOther(destinationNode, avatarNode);
|
||||
shouldIgnore = !getsAnyIgnored;
|
||||
|
@ -364,7 +356,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
}
|
||||
|
||||
if (!shouldIgnore) {
|
||||
AvatarDataSequenceNumber lastSeqToReceiver = nodeData->getLastBroadcastSequenceNumber(avatarNode->getUUID());
|
||||
AvatarDataSequenceNumber lastSeqToReceiver = nodeData->getLastBroadcastSequenceNumber(avatarNode->getLocalID());
|
||||
AvatarDataSequenceNumber lastSeqFromSender = avatarClientNodeData->getLastReceivedSequenceNumber();
|
||||
|
||||
// FIXME - This code does appear to be working. But it seems brittle.
|
||||
|
@ -396,7 +388,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
if (!shouldIgnore) {
|
||||
// sort this one for later
|
||||
const AvatarData* avatarNodeData = avatarClientNodeData->getConstAvatarData();
|
||||
auto lastEncodeTime = nodeData->getLastOtherAvatarEncodeTime(avatarNodeData->getSessionUUID());
|
||||
auto lastEncodeTime = nodeData->getLastOtherAvatarEncodeTime(avatarNode->getLocalID());
|
||||
|
||||
sortedAvatars.push(SortableAvatar(avatarNodeData, avatarNode, lastEncodeTime));
|
||||
}
|
||||
|
@ -406,8 +398,13 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
|
||||
int remainingAvatars = (int)sortedAvatars.size();
|
||||
auto traitsPacketList = NLPacketList::create(PacketType::BulkAvatarTraits, QByteArray(), true, true);
|
||||
auto avatarPacket = NLPacket::create(PacketType::BulkAvatarData);
|
||||
const int avatarPacketCapacity = avatarPacket->getPayloadCapacity();
|
||||
int avatarSpaceAvailable = avatarPacketCapacity;
|
||||
int numPacketsSent = 0;
|
||||
auto identityPacketList = NLPacketList::create(PacketType::AvatarIdentity, QByteArray(), true, true);
|
||||
|
||||
const auto& sortedAvatarVector = sortedAvatars.getSortedVector();
|
||||
const auto& sortedAvatarVector = sortedAvatars.getSortedVector(numToSendEst);
|
||||
for (const auto& sortedAvatar : sortedAvatarVector) {
|
||||
const Node* otherNode = sortedAvatar.getNode();
|
||||
auto lastEncodeForOther = sortedAvatar.getTimestamp();
|
||||
|
@ -432,21 +429,9 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
|
||||
auto startAvatarDataPacking = chrono::high_resolution_clock::now();
|
||||
|
||||
++numOtherAvatars;
|
||||
|
||||
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
|
||||
const AvatarData* otherAvatar = otherNodeData->getConstAvatarData();
|
||||
|
||||
// If the time that the mixer sent AVATAR DATA about Avatar B to Avatar A is BEFORE OR EQUAL TO
|
||||
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Avatar A.
|
||||
if (otherAvatar->hasProcessedFirstIdentity()
|
||||
&& nodeData->getLastBroadcastTime(otherNode->getUUID()) <= otherNodeData->getIdentityChangeTimestamp()) {
|
||||
identityBytesSent += sendIdentityPacket(otherNodeData, node);
|
||||
|
||||
// remember the last time we sent identity details about this other node to the receiver
|
||||
nodeData->setLastBroadcastTime(otherNode->getUUID(), usecTimestampNow());
|
||||
}
|
||||
|
||||
// Typically all out-of-view avatars but such avatars' priorities will rise with time:
|
||||
bool isLowerPriority = sortedAvatar.getPriority() <= OUT_OF_VIEW_THRESHOLD;
|
||||
|
||||
|
@ -456,71 +441,56 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
} else if (!overBudget) {
|
||||
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO ? AvatarData::SendAllData : AvatarData::CullSmallData;
|
||||
nodeData->incrementAvatarInView();
|
||||
}
|
||||
|
||||
bool includeThisAvatar = true;
|
||||
QVector<JointData>& lastSentJointsForOther = nodeData->getLastOtherAvatarSentJoints(otherNode->getUUID());
|
||||
// If the time that the mixer sent AVATAR DATA about Avatar B to Avatar A is BEFORE OR EQUAL TO
|
||||
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Avatar A.
|
||||
if (otherAvatar->hasProcessedFirstIdentity()
|
||||
&& nodeData->getLastBroadcastTime(otherNode->getLocalID()) <= otherNodeData->getIdentityChangeTimestamp()) {
|
||||
identityBytesSent += sendIdentityPacket(*identityPacketList, otherNodeData, *destinationNode);
|
||||
|
||||
lastSentJointsForOther.resize(otherAvatar->getJointCount());
|
||||
|
||||
bool distanceAdjust = true;
|
||||
glm::vec3 viewerPosition = myPosition;
|
||||
AvatarDataPacket::HasFlags hasFlagsOut; // the result of the toByteArray
|
||||
bool dropFaceTracking = false;
|
||||
|
||||
auto startSerialize = chrono::high_resolution_clock::now();
|
||||
QByteArray bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition,
|
||||
&lastSentJointsForOther);
|
||||
auto endSerialize = chrono::high_resolution_clock::now();
|
||||
_stats.toByteArrayElapsedTime +=
|
||||
(quint64) chrono::duration_cast<chrono::microseconds>(endSerialize - startSerialize).count();
|
||||
|
||||
if (bytes.size() > maxAvatarDataBytes) {
|
||||
qCWarning(avatars) << "otherAvatar.toByteArray() for" << otherNode->getUUID()
|
||||
<< "resulted in very large buffer of" << bytes.size() << "bytes - dropping facial data";
|
||||
|
||||
dropFaceTracking = true; // first try dropping the facial data
|
||||
bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
|
||||
|
||||
if (bytes.size() > maxAvatarDataBytes) {
|
||||
qCWarning(avatars) << "otherAvatar.toByteArray() for" << otherNode->getUUID()
|
||||
<< "without facial data resulted in very large buffer of" << bytes.size()
|
||||
<< "bytes - reducing to MinimumData";
|
||||
bytes = otherAvatar->toByteArray(AvatarData::MinimumData, lastEncodeForOther, lastSentJointsForOther,
|
||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
|
||||
|
||||
if (bytes.size() > maxAvatarDataBytes) {
|
||||
qCWarning(avatars) << "otherAvatar.toByteArray() for" << otherNode->getUUID()
|
||||
<< "MinimumData resulted in very large buffer of" << bytes.size()
|
||||
<< "bytes - refusing to send avatar";
|
||||
includeThisAvatar = false;
|
||||
}
|
||||
// remember the last time we sent identity details about this other node to the receiver
|
||||
nodeData->setLastBroadcastTime(otherNode->getLocalID(), usecTimestampNow());
|
||||
}
|
||||
}
|
||||
|
||||
if (includeThisAvatar) {
|
||||
// start a new segment in the PacketList for this avatar
|
||||
avatarPacketList->startSegment();
|
||||
numAvatarDataBytes += avatarPacketList->write(otherNode->getUUID().toRfc4122());
|
||||
numAvatarDataBytes += avatarPacketList->write(bytes);
|
||||
avatarPacketList->endSegment();
|
||||
QVector<JointData>& lastSentJointsForOther = nodeData->getLastOtherAvatarSentJoints(otherNode->getLocalID());
|
||||
|
||||
if (detail != AvatarData::NoData) {
|
||||
_stats.numOthersIncluded++;
|
||||
const bool distanceAdjust = true;
|
||||
const bool dropFaceTracking = false;
|
||||
AvatarDataPacket::SendStatus sendStatus;
|
||||
sendStatus.sendUUID = true;
|
||||
|
||||
// increment the number of avatars sent to this reciever
|
||||
nodeData->incrementNumAvatarsSentLastFrame();
|
||||
do {
|
||||
auto startSerialize = chrono::high_resolution_clock::now();
|
||||
QByteArray bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
||||
sendStatus, dropFaceTracking, distanceAdjust, myPosition,
|
||||
&lastSentJointsForOther, avatarSpaceAvailable);
|
||||
auto endSerialize = chrono::high_resolution_clock::now();
|
||||
_stats.toByteArrayElapsedTime +=
|
||||
(quint64)chrono::duration_cast<chrono::microseconds>(endSerialize - startSerialize).count();
|
||||
|
||||
// set the last sent sequence number for this sender on the receiver
|
||||
nodeData->setLastBroadcastSequenceNumber(otherNode->getUUID(),
|
||||
otherNodeData->getLastReceivedSequenceNumber());
|
||||
nodeData->setLastOtherAvatarEncodeTime(otherNode->getUUID(), usecTimestampNow());
|
||||
avatarPacket->write(bytes);
|
||||
avatarSpaceAvailable -= bytes.size();
|
||||
numAvatarDataBytes += bytes.size();
|
||||
if (!sendStatus || avatarSpaceAvailable < (int)AvatarDataPacket::MIN_BULK_PACKET_SIZE) {
|
||||
// Weren't able to fit everything.
|
||||
nodeList->sendPacket(std::move(avatarPacket), *destinationNode);
|
||||
++numPacketsSent;
|
||||
avatarPacket = NLPacket::create(PacketType::BulkAvatarData);
|
||||
avatarSpaceAvailable = avatarPacketCapacity;
|
||||
}
|
||||
} else {
|
||||
// TODO? this avatar is not included now, and will probably not be included next frame.
|
||||
// It would be nice if we could tweak its future sort priority to put it at the back of the list.
|
||||
} while (!sendStatus);
|
||||
|
||||
if (detail != AvatarData::NoData) {
|
||||
_stats.numOthersIncluded++;
|
||||
|
||||
// increment the number of avatars sent to this receiver
|
||||
nodeData->incrementNumAvatarsSentLastFrame();
|
||||
|
||||
// set the last sent sequence number for this sender on the receiver
|
||||
nodeData->setLastBroadcastSequenceNumber(otherNode->getLocalID(),
|
||||
otherNodeData->getLastReceivedSequenceNumber());
|
||||
nodeData->setLastOtherAvatarEncodeTime(otherNode->getLocalID(), usecTimestampNow());
|
||||
}
|
||||
|
||||
auto endAvatarDataPacking = chrono::high_resolution_clock::now();
|
||||
|
@ -532,17 +502,21 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
remainingAvatars--;
|
||||
}
|
||||
|
||||
if (nodeData->getNumAvatarsSentLastFrame() > numToSendEst) {
|
||||
qCWarning(avatars) << "More avatars sent than upper estimate" << nodeData->getNumAvatarsSentLastFrame()
|
||||
<< " / " << numToSendEst;
|
||||
}
|
||||
|
||||
quint64 startPacketSending = usecTimestampNow();
|
||||
|
||||
// close the current packet so that we're always sending something
|
||||
avatarPacketList->closeCurrentPacket(true);
|
||||
if (avatarPacket->getPayloadSize() != 0) {
|
||||
nodeList->sendPacket(std::move(avatarPacket), *destinationNode);
|
||||
++numPacketsSent;
|
||||
}
|
||||
|
||||
_stats.numPacketsSent += (int)avatarPacketList->getNumPackets();
|
||||
_stats.numPacketsSent += numPacketsSent;
|
||||
_stats.numBytesSent += numAvatarDataBytes;
|
||||
|
||||
// send the avatar data PacketList
|
||||
nodeList->sendPacketList(std::move(avatarPacketList), *destinationNode);
|
||||
|
||||
// record the bytes sent for other avatar data in the AvatarMixerClientData
|
||||
nodeData->recordSentAvatarData(numAvatarDataBytes);
|
||||
|
||||
|
@ -554,6 +528,12 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
nodeList->sendPacketList(std::move(traitsPacketList), *destinationNode);
|
||||
}
|
||||
|
||||
// Send any AvatarIdentity packets:
|
||||
identityPacketList->closeCurrentPacket();
|
||||
if (identityBytesSent > 0) {
|
||||
nodeList->sendPacketList(std::move(identityPacketList), *destinationNode);
|
||||
}
|
||||
|
||||
// record the number of avatars held back this frame
|
||||
nodeData->recordNumOtherAvatarStarves(numAvatarsHeldBack);
|
||||
nodeData->recordNumOtherAvatarSkips(numAvatarsWithSkippedFrames);
|
||||
|
@ -599,20 +579,20 @@ void AvatarMixerSlave::broadcastAvatarDataToDownstreamMixer(const SharedNodePoin
|
|||
// so we always send a full update for this avatar
|
||||
|
||||
quint64 start = usecTimestampNow();
|
||||
AvatarDataPacket::HasFlags flagsOut;
|
||||
AvatarDataPacket::SendStatus sendStatus;
|
||||
|
||||
QVector<JointData> emptyLastJointSendData { otherAvatar->getJointCount() };
|
||||
|
||||
QByteArray avatarByteArray = otherAvatar->toByteArray(AvatarData::SendAllData, 0, emptyLastJointSendData,
|
||||
flagsOut, false, false, glm::vec3(0), nullptr);
|
||||
sendStatus, false, false, glm::vec3(0), nullptr, 0);
|
||||
quint64 end = usecTimestampNow();
|
||||
_stats.toByteArrayElapsedTime += (end - start);
|
||||
|
||||
auto lastBroadcastTime = nodeData->getLastBroadcastTime(agentNode->getUUID());
|
||||
auto lastBroadcastTime = nodeData->getLastBroadcastTime(agentNode->getLocalID());
|
||||
if (lastBroadcastTime <= agentNodeData->getIdentityChangeTimestamp()
|
||||
|| (start - lastBroadcastTime) >= REBROADCAST_IDENTITY_TO_DOWNSTREAM_EVERY_US) {
|
||||
sendReplicatedIdentityPacket(*agentNode, agentNodeData, *node);
|
||||
nodeData->setLastBroadcastTime(agentNode->getUUID(), start);
|
||||
nodeData->setLastBroadcastTime(agentNode->getLocalID(), start);
|
||||
}
|
||||
|
||||
// figure out how large our avatar byte array can be to fit in the packet list
|
||||
|
@ -630,14 +610,14 @@ void AvatarMixerSlave::broadcastAvatarDataToDownstreamMixer(const SharedNodePoin
|
|||
<< "-" << avatarByteArray.size() << "bytes";
|
||||
|
||||
avatarByteArray = otherAvatar->toByteArray(AvatarData::SendAllData, 0, emptyLastJointSendData,
|
||||
flagsOut, true, false, glm::vec3(0), nullptr);
|
||||
sendStatus, true, false, glm::vec3(0), nullptr, 0);
|
||||
|
||||
if (avatarByteArray.size() > maxAvatarByteArraySize) {
|
||||
qCWarning(avatars) << "Replicated avatar data without facial data still too large for"
|
||||
<< otherAvatar->getSessionUUID() << "-" << avatarByteArray.size() << "bytes";
|
||||
|
||||
avatarByteArray = otherAvatar->toByteArray(AvatarData::MinimumData, 0, emptyLastJointSendData,
|
||||
flagsOut, true, false, glm::vec3(0), nullptr);
|
||||
sendStatus, true, false, glm::vec3(0), nullptr, 0);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -646,7 +626,7 @@ void AvatarMixerSlave::broadcastAvatarDataToDownstreamMixer(const SharedNodePoin
|
|||
nodeData->incrementNumAvatarsSentLastFrame();
|
||||
|
||||
// set the last sent sequence number for this sender on the receiver
|
||||
nodeData->setLastBroadcastSequenceNumber(agentNode->getUUID(),
|
||||
nodeData->setLastBroadcastSequenceNumber(agentNode->getLocalID(),
|
||||
agentNodeData->getLastReceivedSequenceNumber());
|
||||
|
||||
// increment the number of avatars sent to this reciever
|
||||
|
|
|
@ -101,7 +101,7 @@ public:
|
|||
void harvestStats(AvatarMixerSlaveStats& stats);
|
||||
|
||||
private:
|
||||
int sendIdentityPacket(const AvatarMixerClientData* nodeData, const SharedNodePointer& destinationNode);
|
||||
int sendIdentityPacket(NLPacketList& packet, const AvatarMixerClientData* nodeData, const Node& destinationNode);
|
||||
int sendReplicatedIdentityPacket(const Node& agentNode, const AvatarMixerClientData* nodeData, const Node& destinationNode);
|
||||
|
||||
qint64 addChangedTraitsToBulkPacket(AvatarMixerClientData* listeningNodeData,
|
||||
|
|
|
@ -66,7 +66,7 @@ size_t AvatarDataPacket::maxFaceTrackerInfoSize(size_t numBlendshapeCoefficients
|
|||
}
|
||||
|
||||
size_t AvatarDataPacket::maxJointDataSize(size_t numJoints, bool hasGrabJoints) {
|
||||
const size_t validityBitsSize = (size_t)std::ceil(numJoints / (float)BITS_IN_BYTE);
|
||||
const size_t validityBitsSize = calcBitVectorSize((int)numJoints);
|
||||
|
||||
size_t totalSize = sizeof(uint8_t); // numJoints
|
||||
|
||||
|
@ -228,18 +228,18 @@ float AvatarData::getDistanceBasedMinTranslationDistance(glm::vec3 viewerPositio
|
|||
|
||||
// we want to track outbound data in this case...
|
||||
QByteArray AvatarData::toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking) {
|
||||
AvatarDataPacket::HasFlags hasFlagsOut;
|
||||
auto lastSentTime = _lastToByteArray;
|
||||
_lastToByteArray = usecTimestampNow();
|
||||
return AvatarData::toByteArray(dataDetail, lastSentTime, getLastSentJointData(),
|
||||
hasFlagsOut, dropFaceTracking, false, glm::vec3(0), nullptr,
|
||||
&_outboundDataRate);
|
||||
AvatarDataPacket::SendStatus sendStatus;
|
||||
auto avatarByteArray = AvatarData::toByteArray(dataDetail, lastSentTime, getLastSentJointData(),
|
||||
sendStatus, dropFaceTracking, false, glm::vec3(0), nullptr, 0, &_outboundDataRate);
|
||||
return avatarByteArray;
|
||||
}
|
||||
|
||||
QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSentTime,
|
||||
const QVector<JointData>& lastSentJointData,
|
||||
AvatarDataPacket::HasFlags& hasFlagsOut, bool dropFaceTracking, bool distanceAdjust,
|
||||
glm::vec3 viewerPosition, QVector<JointData>* sentJointDataOut, AvatarDataRate* outboundDataRateOut) const {
|
||||
AvatarDataPacket::SendStatus& sendStatus, bool dropFaceTracking, bool distanceAdjust,
|
||||
glm::vec3 viewerPosition, QVector<JointData>* sentJointDataOut, int maxDataSize, AvatarDataRate* outboundDataRateOut) const {
|
||||
|
||||
bool cullSmallChanges = (dataDetail == CullSmallData);
|
||||
bool sendAll = (dataDetail == SendAllData);
|
||||
|
@ -247,11 +247,23 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
bool sendPALMinimum = (dataDetail == PALMinimum);
|
||||
|
||||
lazyInitHeadData();
|
||||
ASSERT(maxDataSize == 0 || (size_t)maxDataSize >= AvatarDataPacket::MIN_BULK_PACKET_SIZE);
|
||||
|
||||
// Leading flags, to indicate how much data is actually included in the packet...
|
||||
AvatarDataPacket::HasFlags wantedFlags = 0;
|
||||
AvatarDataPacket::HasFlags includedFlags = 0;
|
||||
AvatarDataPacket::HasFlags extraReturnedFlags = 0; // For partial joint data.
|
||||
|
||||
// special case, if we were asked for no data, then just include the flags all set to nothing
|
||||
if (dataDetail == NoData) {
|
||||
AvatarDataPacket::HasFlags packetStateFlags = 0;
|
||||
QByteArray avatarDataByteArray(reinterpret_cast<char*>(&packetStateFlags), sizeof(packetStateFlags));
|
||||
sendStatus.itemFlags = wantedFlags;
|
||||
|
||||
QByteArray avatarDataByteArray;
|
||||
if (sendStatus.sendUUID) {
|
||||
avatarDataByteArray.append(getSessionUUID().toRfc4122().data(), NUM_BYTES_RFC4122_UUID);
|
||||
}
|
||||
|
||||
avatarDataByteArray.append((char*) &wantedFlags, sizeof wantedFlags);
|
||||
return avatarDataByteArray;
|
||||
}
|
||||
|
||||
|
@ -274,109 +286,141 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
// 3 translations * 6 bytes = 6.48kbps
|
||||
//
|
||||
|
||||
auto parentID = getParentID();
|
||||
|
||||
bool hasAvatarGlobalPosition = true; // always include global position
|
||||
bool hasAvatarOrientation = false;
|
||||
bool hasAvatarBoundingBox = false;
|
||||
bool hasAvatarScale = false;
|
||||
bool hasLookAtPosition = false;
|
||||
bool hasAudioLoudness = false;
|
||||
bool hasSensorToWorldMatrix = false;
|
||||
bool hasAdditionalFlags = false;
|
||||
|
||||
// local position, and parent info only apply to avatars that are parented. The local position
|
||||
// and the parent info can change independently though, so we track their "changed since"
|
||||
// separately
|
||||
bool hasParentInfo = false;
|
||||
bool hasAvatarLocalPosition = false;
|
||||
|
||||
bool hasFaceTrackerInfo = false;
|
||||
bool hasJointData = false;
|
||||
bool hasJointDefaultPoseFlags = false;
|
||||
bool hasGrabJoints = false;
|
||||
QUuid parentID;
|
||||
|
||||
glm::mat4 leftFarGrabMatrix;
|
||||
glm::mat4 rightFarGrabMatrix;
|
||||
glm::mat4 mouseFarGrabMatrix;
|
||||
|
||||
if (sendPALMinimum) {
|
||||
hasAudioLoudness = true;
|
||||
} else {
|
||||
hasAvatarOrientation = sendAll || rotationChangedSince(lastSentTime);
|
||||
hasAvatarBoundingBox = sendAll || avatarBoundingBoxChangedSince(lastSentTime);
|
||||
hasAvatarScale = sendAll || avatarScaleChangedSince(lastSentTime);
|
||||
hasLookAtPosition = sendAll || lookAtPositionChangedSince(lastSentTime);
|
||||
hasAudioLoudness = sendAll || audioLoudnessChangedSince(lastSentTime);
|
||||
hasSensorToWorldMatrix = sendAll || sensorToWorldMatrixChangedSince(lastSentTime);
|
||||
hasAdditionalFlags = sendAll || additionalFlagsChangedSince(lastSentTime);
|
||||
hasParentInfo = sendAll || parentInfoChangedSince(lastSentTime);
|
||||
hasAvatarLocalPosition = hasParent() && (sendAll ||
|
||||
tranlationChangedSince(lastSentTime) ||
|
||||
parentInfoChangedSince(lastSentTime));
|
||||
if (sendStatus.itemFlags == 0) {
|
||||
// New avatar ...
|
||||
bool hasAvatarGlobalPosition = true; // always include global position
|
||||
bool hasAvatarOrientation = false;
|
||||
bool hasAvatarBoundingBox = false;
|
||||
bool hasAvatarScale = false;
|
||||
bool hasLookAtPosition = false;
|
||||
bool hasAudioLoudness = false;
|
||||
bool hasSensorToWorldMatrix = false;
|
||||
bool hasJointData = false;
|
||||
bool hasJointDefaultPoseFlags = false;
|
||||
bool hasAdditionalFlags = false;
|
||||
|
||||
hasFaceTrackerInfo = !dropFaceTracking && (hasFaceTracker() || getHasScriptedBlendshapes()) &&
|
||||
(sendAll || faceTrackerInfoChangedSince(lastSentTime));
|
||||
hasJointData = sendAll || !sendMinimum;
|
||||
hasJointDefaultPoseFlags = hasJointData;
|
||||
if (hasJointData) {
|
||||
bool leftValid;
|
||||
leftFarGrabMatrix = _farGrabLeftMatrixCache.get(leftValid);
|
||||
if (!leftValid) {
|
||||
leftFarGrabMatrix = glm::mat4();
|
||||
}
|
||||
bool rightValid;
|
||||
rightFarGrabMatrix = _farGrabRightMatrixCache.get(rightValid);
|
||||
if (!rightValid) {
|
||||
rightFarGrabMatrix = glm::mat4();
|
||||
}
|
||||
bool mouseValid;
|
||||
mouseFarGrabMatrix = _farGrabMouseMatrixCache.get(mouseValid);
|
||||
if (!mouseValid) {
|
||||
mouseFarGrabMatrix = glm::mat4();
|
||||
}
|
||||
hasGrabJoints = (leftValid || rightValid || mouseValid);
|
||||
// local position, and parent info only apply to avatars that are parented. The local position
|
||||
// and the parent info can change independently though, so we track their "changed since"
|
||||
// separately
|
||||
bool hasParentInfo = false;
|
||||
bool hasAvatarLocalPosition = false;
|
||||
|
||||
bool hasFaceTrackerInfo = false;
|
||||
|
||||
if (sendPALMinimum) {
|
||||
hasAudioLoudness = true;
|
||||
} else {
|
||||
hasAvatarOrientation = sendAll || rotationChangedSince(lastSentTime);
|
||||
hasAvatarBoundingBox = sendAll || avatarBoundingBoxChangedSince(lastSentTime);
|
||||
hasAvatarScale = sendAll || avatarScaleChangedSince(lastSentTime);
|
||||
hasLookAtPosition = sendAll || lookAtPositionChangedSince(lastSentTime);
|
||||
hasAudioLoudness = sendAll || audioLoudnessChangedSince(lastSentTime);
|
||||
hasSensorToWorldMatrix = sendAll || sensorToWorldMatrixChangedSince(lastSentTime);
|
||||
hasAdditionalFlags = sendAll || additionalFlagsChangedSince(lastSentTime);
|
||||
hasParentInfo = sendAll || parentInfoChangedSince(lastSentTime);
|
||||
hasAvatarLocalPosition = hasParent() && (sendAll ||
|
||||
tranlationChangedSince(lastSentTime) ||
|
||||
parentInfoChangedSince(lastSentTime));
|
||||
|
||||
hasFaceTrackerInfo = !dropFaceTracking && (hasFaceTracker() || getHasScriptedBlendshapes()) &&
|
||||
(sendAll || faceTrackerInfoChangedSince(lastSentTime));
|
||||
hasJointData = !sendMinimum;
|
||||
hasJointDefaultPoseFlags = hasJointData;
|
||||
}
|
||||
|
||||
wantedFlags =
|
||||
(hasAvatarGlobalPosition ? AvatarDataPacket::PACKET_HAS_AVATAR_GLOBAL_POSITION : 0)
|
||||
| (hasAvatarBoundingBox ? AvatarDataPacket::PACKET_HAS_AVATAR_BOUNDING_BOX : 0)
|
||||
| (hasAvatarOrientation ? AvatarDataPacket::PACKET_HAS_AVATAR_ORIENTATION : 0)
|
||||
| (hasAvatarScale ? AvatarDataPacket::PACKET_HAS_AVATAR_SCALE : 0)
|
||||
| (hasLookAtPosition ? AvatarDataPacket::PACKET_HAS_LOOK_AT_POSITION : 0)
|
||||
| (hasAudioLoudness ? AvatarDataPacket::PACKET_HAS_AUDIO_LOUDNESS : 0)
|
||||
| (hasSensorToWorldMatrix ? AvatarDataPacket::PACKET_HAS_SENSOR_TO_WORLD_MATRIX : 0)
|
||||
| (hasAdditionalFlags ? AvatarDataPacket::PACKET_HAS_ADDITIONAL_FLAGS : 0)
|
||||
| (hasParentInfo ? AvatarDataPacket::PACKET_HAS_PARENT_INFO : 0)
|
||||
| (hasAvatarLocalPosition ? AvatarDataPacket::PACKET_HAS_AVATAR_LOCAL_POSITION : 0)
|
||||
| (hasFaceTrackerInfo ? AvatarDataPacket::PACKET_HAS_FACE_TRACKER_INFO : 0)
|
||||
| (hasJointData ? AvatarDataPacket::PACKET_HAS_JOINT_DATA : 0)
|
||||
| (hasJointDefaultPoseFlags ? AvatarDataPacket::PACKET_HAS_JOINT_DEFAULT_POSE_FLAGS : 0)
|
||||
| (hasJointData ? AvatarDataPacket::PACKET_HAS_GRAB_JOINTS : 0);
|
||||
|
||||
sendStatus.itemFlags = wantedFlags;
|
||||
sendStatus.rotationsSent = 0;
|
||||
sendStatus.translationsSent = 0;
|
||||
} else { // Continuing avatar ...
|
||||
wantedFlags = sendStatus.itemFlags;
|
||||
if (wantedFlags & AvatarDataPacket::PACKET_HAS_GRAB_JOINTS) {
|
||||
// Must send joints for grab joints -
|
||||
wantedFlags |= AvatarDataPacket::PACKET_HAS_JOINT_DATA;
|
||||
}
|
||||
}
|
||||
|
||||
if (wantedFlags & AvatarDataPacket::PACKET_HAS_GRAB_JOINTS) {
|
||||
bool leftValid;
|
||||
leftFarGrabMatrix = _farGrabLeftMatrixCache.get(leftValid);
|
||||
if (!leftValid) {
|
||||
leftFarGrabMatrix = glm::mat4();
|
||||
}
|
||||
bool rightValid;
|
||||
rightFarGrabMatrix = _farGrabRightMatrixCache.get(rightValid);
|
||||
if (!rightValid) {
|
||||
rightFarGrabMatrix = glm::mat4();
|
||||
}
|
||||
bool mouseValid;
|
||||
mouseFarGrabMatrix = _farGrabMouseMatrixCache.get(mouseValid);
|
||||
if (!mouseValid) {
|
||||
mouseFarGrabMatrix = glm::mat4();
|
||||
}
|
||||
if (!(leftValid || rightValid || mouseValid)) {
|
||||
wantedFlags &= ~AvatarDataPacket::PACKET_HAS_GRAB_JOINTS;
|
||||
}
|
||||
}
|
||||
if (wantedFlags & (AvatarDataPacket::PACKET_HAS_ADDITIONAL_FLAGS | AvatarDataPacket::PACKET_HAS_PARENT_INFO)) {
|
||||
parentID = getParentID();
|
||||
}
|
||||
|
||||
const size_t byteArraySize = AvatarDataPacket::MAX_CONSTANT_HEADER_SIZE +
|
||||
(hasFaceTrackerInfo ? AvatarDataPacket::maxFaceTrackerInfoSize(_headData->getBlendshapeCoefficients().size()) : 0) +
|
||||
(hasJointData ? AvatarDataPacket::maxJointDataSize(_jointData.size(), hasGrabJoints) : 0) +
|
||||
(hasJointDefaultPoseFlags ? AvatarDataPacket::maxJointDefaultPoseFlagsSize(_jointData.size()) : 0);
|
||||
const size_t byteArraySize = AvatarDataPacket::MAX_CONSTANT_HEADER_SIZE + NUM_BYTES_RFC4122_UUID +
|
||||
AvatarDataPacket::maxFaceTrackerInfoSize(_headData->getBlendshapeCoefficients().size()) +
|
||||
AvatarDataPacket::maxJointDataSize(_jointData.size(), true) +
|
||||
AvatarDataPacket::maxJointDefaultPoseFlagsSize(_jointData.size());
|
||||
|
||||
if (maxDataSize == 0) {
|
||||
maxDataSize = (int)byteArraySize;
|
||||
}
|
||||
|
||||
QByteArray avatarDataByteArray((int)byteArraySize, 0);
|
||||
unsigned char* destinationBuffer = reinterpret_cast<unsigned char*>(avatarDataByteArray.data());
|
||||
unsigned char* startPosition = destinationBuffer;
|
||||
|
||||
// Leading flags, to indicate how much data is actually included in the packet...
|
||||
AvatarDataPacket::HasFlags packetStateFlags =
|
||||
(hasAvatarGlobalPosition ? AvatarDataPacket::PACKET_HAS_AVATAR_GLOBAL_POSITION : 0)
|
||||
| (hasAvatarBoundingBox ? AvatarDataPacket::PACKET_HAS_AVATAR_BOUNDING_BOX : 0)
|
||||
| (hasAvatarOrientation ? AvatarDataPacket::PACKET_HAS_AVATAR_ORIENTATION : 0)
|
||||
| (hasAvatarScale ? AvatarDataPacket::PACKET_HAS_AVATAR_SCALE : 0)
|
||||
| (hasLookAtPosition ? AvatarDataPacket::PACKET_HAS_LOOK_AT_POSITION : 0)
|
||||
| (hasAudioLoudness ? AvatarDataPacket::PACKET_HAS_AUDIO_LOUDNESS : 0)
|
||||
| (hasSensorToWorldMatrix ? AvatarDataPacket::PACKET_HAS_SENSOR_TO_WORLD_MATRIX : 0)
|
||||
| (hasAdditionalFlags ? AvatarDataPacket::PACKET_HAS_ADDITIONAL_FLAGS : 0)
|
||||
| (hasParentInfo ? AvatarDataPacket::PACKET_HAS_PARENT_INFO : 0)
|
||||
| (hasAvatarLocalPosition ? AvatarDataPacket::PACKET_HAS_AVATAR_LOCAL_POSITION : 0)
|
||||
| (hasFaceTrackerInfo ? AvatarDataPacket::PACKET_HAS_FACE_TRACKER_INFO : 0)
|
||||
| (hasJointData ? AvatarDataPacket::PACKET_HAS_JOINT_DATA : 0)
|
||||
| (hasJointDefaultPoseFlags ? AvatarDataPacket::PACKET_HAS_JOINT_DEFAULT_POSE_FLAGS : 0)
|
||||
| (hasGrabJoints ? AvatarDataPacket::PACKET_HAS_GRAB_JOINTS : 0);
|
||||
|
||||
memcpy(destinationBuffer, &packetStateFlags, sizeof(packetStateFlags));
|
||||
destinationBuffer += sizeof(packetStateFlags);
|
||||
const unsigned char* const startPosition = destinationBuffer;
|
||||
const unsigned char* const packetEnd = destinationBuffer + maxDataSize;
|
||||
|
||||
#define AVATAR_MEMCPY(src) \
|
||||
memcpy(destinationBuffer, &(src), sizeof(src)); \
|
||||
destinationBuffer += sizeof(src);
|
||||
|
||||
if (hasAvatarGlobalPosition) {
|
||||
auto startSection = destinationBuffer;
|
||||
AVATAR_MEMCPY(_globalPosition);
|
||||
// If we want an item and there's sufficient space:
|
||||
#define IF_AVATAR_SPACE(flag, space) \
|
||||
if ((wantedFlags & AvatarDataPacket::flag) \
|
||||
&& (packetEnd - destinationBuffer) >= (ptrdiff_t)(space) \
|
||||
&& (includedFlags |= AvatarDataPacket::flag))
|
||||
|
||||
if (sendStatus.sendUUID) {
|
||||
memcpy(destinationBuffer, getSessionUUID().toRfc4122(), NUM_BYTES_RFC4122_UUID);
|
||||
destinationBuffer += NUM_BYTES_RFC4122_UUID;
|
||||
}
|
||||
|
||||
unsigned char * packetFlagsLocation = destinationBuffer;
|
||||
destinationBuffer += sizeof(wantedFlags);
|
||||
|
||||
IF_AVATAR_SPACE(PACKET_HAS_AVATAR_GLOBAL_POSITION, sizeof _globalPosition) {
|
||||
auto startSection = destinationBuffer;
|
||||
AVATAR_MEMCPY(_globalPosition);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
|
||||
if (outboundDataRateOut) {
|
||||
|
@ -384,7 +428,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
}
|
||||
|
||||
if (hasAvatarBoundingBox) {
|
||||
IF_AVATAR_SPACE(PACKET_HAS_AVATAR_BOUNDING_BOX, sizeof _globalBoundingBoxDimensions + sizeof _globalBoundingBoxOffset) {
|
||||
auto startSection = destinationBuffer;
|
||||
AVATAR_MEMCPY(_globalBoundingBoxDimensions);
|
||||
AVATAR_MEMCPY(_globalBoundingBoxOffset);
|
||||
|
@ -395,7 +439,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
}
|
||||
|
||||
if (hasAvatarOrientation) {
|
||||
IF_AVATAR_SPACE(PACKET_HAS_AVATAR_ORIENTATION, sizeof(AvatarDataPacket::SixByteQuat)) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto localOrientation = getOrientationOutbound();
|
||||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, localOrientation);
|
||||
|
@ -406,7 +450,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
}
|
||||
|
||||
if (hasAvatarScale) {
|
||||
IF_AVATAR_SPACE(PACKET_HAS_AVATAR_SCALE, sizeof(AvatarDataPacket::AvatarScale)) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AvatarScale*>(destinationBuffer);
|
||||
auto scale = getDomainLimitedScale();
|
||||
|
@ -419,7 +463,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
}
|
||||
|
||||
if (hasLookAtPosition) {
|
||||
IF_AVATAR_SPACE(PACKET_HAS_LOOK_AT_POSITION, sizeof(_headData->getLookAtPosition()) ) {
|
||||
auto startSection = destinationBuffer;
|
||||
AVATAR_MEMCPY(_headData->getLookAtPosition());
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
|
@ -428,7 +472,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
}
|
||||
|
||||
if (hasAudioLoudness) {
|
||||
IF_AVATAR_SPACE(PACKET_HAS_AUDIO_LOUDNESS, sizeof(AvatarDataPacket::AudioLoudness)) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AudioLoudness*>(destinationBuffer);
|
||||
data->audioLoudness = packFloatGainToByte(getAudioLoudness() / AUDIO_LOUDNESS_SCALE);
|
||||
|
@ -440,7 +484,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
}
|
||||
|
||||
if (hasSensorToWorldMatrix) {
|
||||
IF_AVATAR_SPACE(PACKET_HAS_SENSOR_TO_WORLD_MATRIX, sizeof(AvatarDataPacket::SensorToWorldMatrix)) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::SensorToWorldMatrix*>(destinationBuffer);
|
||||
glm::mat4 sensorToWorldMatrix = getSensorToWorldMatrix();
|
||||
|
@ -458,7 +502,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
}
|
||||
|
||||
if (hasAdditionalFlags) {
|
||||
IF_AVATAR_SPACE(PACKET_HAS_ADDITIONAL_FLAGS, sizeof (uint16_t)) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AdditionalFlags*>(destinationBuffer);
|
||||
|
||||
|
@ -506,7 +550,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
}
|
||||
|
||||
if (hasParentInfo) {
|
||||
IF_AVATAR_SPACE(PACKET_HAS_PARENT_INFO, sizeof(AvatarDataPacket::ParentInfo)) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto parentInfo = reinterpret_cast<AvatarDataPacket::ParentInfo*>(destinationBuffer);
|
||||
QByteArray referentialAsBytes = parentID.toRfc4122();
|
||||
|
@ -520,7 +564,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
}
|
||||
|
||||
if (hasAvatarLocalPosition) {
|
||||
IF_AVATAR_SPACE(PACKET_HAS_AVATAR_LOCAL_POSITION, sizeof(getLocalPosition()) ) {
|
||||
auto startSection = destinationBuffer;
|
||||
const auto localPosition = getLocalPosition();
|
||||
AVATAR_MEMCPY(localPosition);
|
||||
|
@ -531,11 +575,11 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
}
|
||||
|
||||
const auto& blendshapeCoefficients = _headData->getBlendshapeCoefficients();
|
||||
// If it is connected, pack up the data
|
||||
if (hasFaceTrackerInfo) {
|
||||
IF_AVATAR_SPACE(PACKET_HAS_FACE_TRACKER_INFO, sizeof(AvatarDataPacket::FaceTrackerInfo) + (size_t)blendshapeCoefficients.size() * sizeof(float)) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto faceTrackerInfo = reinterpret_cast<AvatarDataPacket::FaceTrackerInfo*>(destinationBuffer);
|
||||
const auto& blendshapeCoefficients = _headData->getBlendshapeCoefficients();
|
||||
// note: we don't use the blink and average loudness, we just use the numBlendShapes and
|
||||
// compute the procedural info on the client side.
|
||||
faceTrackerInfo->leftEyeBlink = _headData->_leftEyeBlink;
|
||||
|
@ -555,125 +599,125 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
|
||||
QVector<JointData> jointData;
|
||||
if (hasJointData || hasJointDefaultPoseFlags) {
|
||||
if (wantedFlags & (AvatarDataPacket::PACKET_HAS_JOINT_DATA | AvatarDataPacket::PACKET_HAS_JOINT_DEFAULT_POSE_FLAGS)) {
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
jointData = _jointData;
|
||||
}
|
||||
const int numJoints = jointData.size();
|
||||
assert(numJoints <= 255);
|
||||
const int jointBitVectorSize = calcBitVectorSize(numJoints);
|
||||
|
||||
// If it is connected, pack up the data
|
||||
if (hasJointData) {
|
||||
// Start joints if room for at least the faux joints.
|
||||
IF_AVATAR_SPACE(PACKET_HAS_JOINT_DATA, 1 + 2 * jointBitVectorSize + AvatarDataPacket::FAUX_JOINTS_SIZE) {
|
||||
// Allow for faux joints + translation bit-vector:
|
||||
const ptrdiff_t minSizeForJoint = sizeof(AvatarDataPacket::SixByteQuat)
|
||||
+ jointBitVectorSize + AvatarDataPacket::FAUX_JOINTS_SIZE;
|
||||
auto startSection = destinationBuffer;
|
||||
|
||||
// joint rotation data
|
||||
int numJoints = jointData.size();
|
||||
*destinationBuffer++ = (uint8_t)numJoints;
|
||||
|
||||
unsigned char* validityPosition = destinationBuffer;
|
||||
unsigned char validity = 0;
|
||||
int validityBit = 0;
|
||||
int numValidityBytes = calcBitVectorSize(numJoints);
|
||||
memset(validityPosition, 0, jointBitVectorSize);
|
||||
|
||||
#ifdef WANT_DEBUG
|
||||
int rotationSentCount = 0;
|
||||
unsigned char* beforeRotations = destinationBuffer;
|
||||
#endif
|
||||
|
||||
destinationBuffer += numValidityBytes; // Move pointer past the validity bytes
|
||||
destinationBuffer += jointBitVectorSize; // Move pointer past the validity bytes
|
||||
|
||||
// sentJointDataOut and lastSentJointData might be the same vector
|
||||
if (sentJointDataOut) {
|
||||
sentJointDataOut->resize(numJoints); // Make sure the destination is resized before using it
|
||||
}
|
||||
const JointData *const joints = jointData.data();
|
||||
JointData *const sentJoints = sentJointDataOut ? sentJointDataOut->data() : nullptr;
|
||||
|
||||
float minRotationDOT = (distanceAdjust && cullSmallChanges) ? getDistanceBasedMinRotationDOT(viewerPosition) : AVATAR_MIN_ROTATION_DOT;
|
||||
|
||||
for (int i = 0; i < jointData.size(); i++) {
|
||||
const JointData& data = jointData[i];
|
||||
int i = sendStatus.rotationsSent;
|
||||
for (; i < numJoints; ++i) {
|
||||
const JointData& data = joints[i];
|
||||
const JointData& last = lastSentJointData[i];
|
||||
|
||||
if (!data.rotationIsDefaultPose) {
|
||||
// The dot product for larger rotations is a lower number.
|
||||
// So if the dot() is less than the value, then the rotation is a larger angle of rotation
|
||||
if (sendAll || last.rotationIsDefaultPose || (!cullSmallChanges && last.rotation != data.rotation)
|
||||
|| (cullSmallChanges && fabsf(glm::dot(last.rotation, data.rotation)) < minRotationDOT) ) {
|
||||
validity |= (1 << validityBit);
|
||||
if (packetEnd - destinationBuffer >= minSizeForJoint) {
|
||||
if (!data.rotationIsDefaultPose) {
|
||||
// The dot product for larger rotations is a lower number,
|
||||
// so if the dot() is less than the value, then the rotation is a larger angle of rotation
|
||||
if (sendAll || last.rotationIsDefaultPose || (!cullSmallChanges && last.rotation != data.rotation)
|
||||
|| (cullSmallChanges && fabsf(glm::dot(last.rotation, data.rotation)) < minRotationDOT)) {
|
||||
validityPosition[i / BITS_IN_BYTE] |= 1 << (i % BITS_IN_BYTE);
|
||||
#ifdef WANT_DEBUG
|
||||
rotationSentCount++;
|
||||
rotationSentCount++;
|
||||
#endif
|
||||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, data.rotation);
|
||||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, data.rotation);
|
||||
|
||||
if (sentJointDataOut) {
|
||||
(*sentJointDataOut)[i].rotation = data.rotation;
|
||||
if (sentJoints) {
|
||||
sentJoints[i].rotation = data.rotation;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
|
||||
if (sentJointDataOut) {
|
||||
(*sentJointDataOut)[i].rotationIsDefaultPose = data.rotationIsDefaultPose;
|
||||
if (sentJoints) {
|
||||
sentJoints[i].rotationIsDefaultPose = data.rotationIsDefaultPose;
|
||||
}
|
||||
|
||||
if (++validityBit == BITS_IN_BYTE) {
|
||||
*validityPosition++ = validity;
|
||||
validityBit = validity = 0;
|
||||
}
|
||||
}
|
||||
if (validityBit != 0) {
|
||||
*validityPosition++ = validity;
|
||||
}
|
||||
sendStatus.rotationsSent = i;
|
||||
|
||||
// joint translation data
|
||||
validityPosition = destinationBuffer;
|
||||
validity = 0;
|
||||
validityBit = 0;
|
||||
|
||||
#ifdef WANT_DEBUG
|
||||
int translationSentCount = 0;
|
||||
unsigned char* beforeTranslations = destinationBuffer;
|
||||
#endif
|
||||
|
||||
destinationBuffer += numValidityBytes; // Move pointer past the validity bytes
|
||||
memset(destinationBuffer, 0, jointBitVectorSize);
|
||||
destinationBuffer += jointBitVectorSize; // Move pointer past the validity bytes
|
||||
|
||||
float minTranslation = (distanceAdjust && cullSmallChanges) ? getDistanceBasedMinTranslationDistance(viewerPosition) : AVATAR_MIN_TRANSLATION;
|
||||
|
||||
float maxTranslationDimension = 0.0;
|
||||
for (int i = 0; i < jointData.size(); i++) {
|
||||
const JointData& data = jointData[i];
|
||||
i = sendStatus.translationsSent;
|
||||
for (; i < numJoints; ++i) {
|
||||
const JointData& data = joints[i];
|
||||
const JointData& last = lastSentJointData[i];
|
||||
|
||||
if (!data.translationIsDefaultPose) {
|
||||
if (sendAll || last.translationIsDefaultPose || (!cullSmallChanges && last.translation != data.translation)
|
||||
|| (cullSmallChanges && glm::distance(data.translation, lastSentJointData[i].translation) > minTranslation)) {
|
||||
|
||||
validity |= (1 << validityBit);
|
||||
if (packetEnd - destinationBuffer >= minSizeForJoint) {
|
||||
if (!data.translationIsDefaultPose) {
|
||||
if (sendAll || last.translationIsDefaultPose || (!cullSmallChanges && last.translation != data.translation)
|
||||
|| (cullSmallChanges && glm::distance(data.translation, lastSentJointData[i].translation) > minTranslation)) {
|
||||
validityPosition[i / BITS_IN_BYTE] |= 1 << (i % BITS_IN_BYTE);
|
||||
#ifdef WANT_DEBUG
|
||||
translationSentCount++;
|
||||
translationSentCount++;
|
||||
#endif
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.x), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.y), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.z), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.x), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.y), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.z), maxTranslationDimension);
|
||||
|
||||
destinationBuffer +=
|
||||
packFloatVec3ToSignedTwoByteFixed(destinationBuffer, data.translation, TRANSLATION_COMPRESSION_RADIX);
|
||||
destinationBuffer +=
|
||||
packFloatVec3ToSignedTwoByteFixed(destinationBuffer, data.translation, TRANSLATION_COMPRESSION_RADIX);
|
||||
|
||||
if (sentJointDataOut) {
|
||||
(*sentJointDataOut)[i].translation = data.translation;
|
||||
if (sentJoints) {
|
||||
sentJoints[i].translation = data.translation;
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
|
||||
if (sentJointDataOut) {
|
||||
(*sentJointDataOut)[i].translationIsDefaultPose = data.translationIsDefaultPose;
|
||||
if (sentJoints) {
|
||||
sentJoints[i].translationIsDefaultPose = data.translationIsDefaultPose;
|
||||
}
|
||||
|
||||
if (++validityBit == BITS_IN_BYTE) {
|
||||
*validityPosition++ = validity;
|
||||
validityBit = validity = 0;
|
||||
}
|
||||
}
|
||||
|
||||
if (validityBit != 0) {
|
||||
*validityPosition++ = validity;
|
||||
}
|
||||
sendStatus.translationsSent = i;
|
||||
|
||||
// faux joints
|
||||
Transform controllerLeftHandTransform = Transform(getControllerLeftHandMatrix());
|
||||
|
@ -686,7 +730,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
destinationBuffer += packFloatVec3ToSignedTwoByteFixed(destinationBuffer, controllerRightHandTransform.getTranslation(),
|
||||
TRANSLATION_COMPRESSION_RADIX);
|
||||
|
||||
if (hasGrabJoints) {
|
||||
IF_AVATAR_SPACE(PACKET_HAS_GRAB_JOINTS, sizeof (AvatarDataPacket::FarGrabJoints)) {
|
||||
// the far-grab joints may range further than 3 meters, so we can't use packFloatVec3ToSignedTwoByteFixed etc
|
||||
auto startSection = destinationBuffer;
|
||||
|
||||
|
@ -728,18 +772,20 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
#endif
|
||||
|
||||
if (sendStatus.rotationsSent != numJoints || sendStatus.translationsSent != numJoints) {
|
||||
extraReturnedFlags |= AvatarDataPacket::PACKET_HAS_JOINT_DATA;
|
||||
}
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
if (outboundDataRateOut) {
|
||||
outboundDataRateOut->jointDataRate.increment(numBytes);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (hasJointDefaultPoseFlags) {
|
||||
|
||||
IF_AVATAR_SPACE(PACKET_HAS_JOINT_DEFAULT_POSE_FLAGS, 1 + 2 * jointBitVectorSize) {
|
||||
auto startSection = destinationBuffer;
|
||||
|
||||
// write numJoints
|
||||
int numJoints = jointData.size();
|
||||
*destinationBuffer++ = (uint8_t)numJoints;
|
||||
|
||||
// write rotationIsDefaultPose bits
|
||||
|
@ -758,6 +804,10 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
}
|
||||
|
||||
memcpy(packetFlagsLocation, &includedFlags, sizeof(includedFlags));
|
||||
// Return dropped items.
|
||||
sendStatus.itemFlags = (wantedFlags & ~includedFlags) | extraReturnedFlags;
|
||||
|
||||
int avatarDataSize = destinationBuffer - startPosition;
|
||||
|
||||
if (avatarDataSize > (int)byteArraySize) {
|
||||
|
@ -766,6 +816,9 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
|
||||
return avatarDataByteArray.left(avatarDataSize);
|
||||
|
||||
#undef AVATAR_MEMCPY
|
||||
#undef IF_AVATAR_SPACE
|
||||
}
|
||||
|
||||
// NOTE: This is never used in a "distanceAdjust" mode, so it's ok that it doesn't use a variable minimum rotation/translation
|
||||
|
@ -918,6 +971,8 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
_avatarBoundingBoxChanged = now;
|
||||
}
|
||||
|
||||
_defaultBubbleBox = computeBubbleBox();
|
||||
|
||||
sourceBuffer += sizeof(AvatarDataPacket::AvatarBoundingBox);
|
||||
int numBytesRead = sourceBuffer - startSection;
|
||||
_avatarBoundingBoxRate.increment(numBytesRead);
|
||||
|
@ -1727,11 +1782,9 @@ glm::quat AvatarData::getOrientationOutbound() const {
|
|||
return (getLocalOrientation());
|
||||
}
|
||||
|
||||
void AvatarData::processAvatarIdentity(const QByteArray& identityData, bool& identityChanged,
|
||||
void AvatarData::processAvatarIdentity(QDataStream& packetStream, bool& identityChanged,
|
||||
bool& displayNameChanged) {
|
||||
|
||||
QDataStream packetStream(identityData);
|
||||
|
||||
QUuid avatarSessionID;
|
||||
|
||||
// peek the sequence number, this will tell us if we should be processing this identity packet at all
|
||||
|
@ -1746,17 +1799,18 @@ void AvatarData::processAvatarIdentity(const QByteArray& identityData, bool& ide
|
|||
<< (udt::SequenceNumber::Type) incomingSequenceNumber;
|
||||
}
|
||||
|
||||
if (incomingSequenceNumber > _identitySequenceNumber) {
|
||||
Identity identity;
|
||||
Identity identity;
|
||||
|
||||
packetStream
|
||||
>> identity.attachmentData
|
||||
>> identity.displayName
|
||||
>> identity.sessionDisplayName
|
||||
>> identity.isReplicated
|
||||
>> identity.lookAtSnappingEnabled
|
||||
packetStream
|
||||
>> identity.attachmentData
|
||||
>> identity.displayName
|
||||
>> identity.sessionDisplayName
|
||||
>> identity.isReplicated
|
||||
>> identity.lookAtSnappingEnabled
|
||||
;
|
||||
|
||||
if (incomingSequenceNumber > _identitySequenceNumber) {
|
||||
|
||||
// set the store identity sequence number to match the incoming identity
|
||||
_identitySequenceNumber = incomingSequenceNumber;
|
||||
|
||||
|
@ -2902,3 +2956,21 @@ void AvatarEntityMapFromScriptValue(const QScriptValue& object, AvatarEntityMap&
|
|||
value[EntityID] = binaryEntityProperties;
|
||||
}
|
||||
}
|
||||
|
||||
const float AvatarData::DEFAULT_BUBBLE_SCALE = 2.4f; // magic number determined empirically
|
||||
|
||||
AABox AvatarData::computeBubbleBox(float bubbleScale) const {
|
||||
AABox box = AABox(_globalBoundingBoxOffset - _globalBoundingBoxDimensions, _globalBoundingBoxDimensions);
|
||||
glm::vec3 size = box.getScale();
|
||||
size *= bubbleScale;
|
||||
const glm::vec3 MIN_BUBBLE_SCALE(0.3f, 1.3f, 0.3);
|
||||
size= glm::max(size, MIN_BUBBLE_SCALE);
|
||||
box.setScaleStayCentered(size);
|
||||
return box;
|
||||
}
|
||||
|
||||
AABox AvatarData::getDefaultBubbleBox() const {
|
||||
AABox bubbleBox(_defaultBubbleBox);
|
||||
bubbleBox.translate(_globalPosition);
|
||||
return bubbleBox;
|
||||
}
|
||||
|
|
|
@ -296,6 +296,17 @@ namespace AvatarDataPacket {
|
|||
} PACKED_END;
|
||||
const size_t FAR_GRAB_JOINTS_SIZE = 84;
|
||||
static_assert(sizeof(FarGrabJoints) == FAR_GRAB_JOINTS_SIZE, "AvatarDataPacket::FarGrabJoints size doesn't match.");
|
||||
|
||||
static const size_t MIN_BULK_PACKET_SIZE = NUM_BYTES_RFC4122_UUID + HEADER_SIZE;
|
||||
static const size_t FAUX_JOINTS_SIZE = 2 * (sizeof(SixByteQuat) + sizeof(SixByteTrans));
|
||||
|
||||
struct SendStatus {
|
||||
HasFlags itemFlags { 0 };
|
||||
bool sendUUID { false };
|
||||
int rotationsSent { 0 }; // ie: index of next unsent joint
|
||||
int translationsSent { 0 };
|
||||
operator bool() { return itemFlags == 0; }
|
||||
};
|
||||
}
|
||||
|
||||
const float MAX_AUDIO_LOUDNESS = 1000.0f; // close enough for mouth animation
|
||||
|
@ -463,8 +474,8 @@ public:
|
|||
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking = false);
|
||||
|
||||
virtual QByteArray toByteArray(AvatarDataDetail dataDetail, quint64 lastSentTime, const QVector<JointData>& lastSentJointData,
|
||||
AvatarDataPacket::HasFlags& hasFlagsOut, bool dropFaceTracking, bool distanceAdjust, glm::vec3 viewerPosition,
|
||||
QVector<JointData>* sentJointDataOut, AvatarDataRate* outboundDataRateOut = nullptr) const;
|
||||
AvatarDataPacket::SendStatus& sendStatus, bool dropFaceTracking, bool distanceAdjust, glm::vec3 viewerPosition,
|
||||
QVector<JointData>* sentJointDataOut, int maxDataSize = 0, AvatarDataRate* outboundDataRateOut = nullptr) const;
|
||||
|
||||
virtual void doneEncoding(bool cullSmallChanges);
|
||||
|
||||
|
@ -971,7 +982,7 @@ public:
|
|||
|
||||
// identityChanged returns true if identity has changed, false otherwise.
|
||||
// identityChanged returns true if identity has changed, false otherwise. Similarly for displayNameChanged and skeletonModelUrlChange.
|
||||
void processAvatarIdentity(const QByteArray& identityData, bool& identityChanged, bool& displayNameChanged);
|
||||
void processAvatarIdentity(QDataStream& packetStream, bool& identityChanged, bool& displayNameChanged);
|
||||
|
||||
qint64 packTrait(AvatarTraits::TraitType traitType, ExtendedIODevice& destination,
|
||||
AvatarTraits::TraitVersion traitVersion = AvatarTraits::NULL_TRAIT_VERSION);
|
||||
|
@ -1112,6 +1123,7 @@ public:
|
|||
|
||||
glm::vec3 getClientGlobalPosition() const { return _globalPosition; }
|
||||
AABox getGlobalBoundingBox() const { return AABox(_globalPosition + _globalBoundingBoxOffset - _globalBoundingBoxDimensions, _globalBoundingBoxDimensions); }
|
||||
AABox getDefaultBubbleBox() const;
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getAvatarEntityData
|
||||
|
@ -1204,6 +1216,9 @@ public:
|
|||
void setReplicaIndex(int replicaIndex) { _replicaIndex = replicaIndex; }
|
||||
int getReplicaIndex() { return _replicaIndex; }
|
||||
|
||||
static const float DEFAULT_BUBBLE_SCALE; /* = 2.4 */
|
||||
AABox computeBubbleBox(float bubbleScale = DEFAULT_BUBBLE_SCALE) const;
|
||||
|
||||
void setIsNewAvatar(bool isNewAvatar) { _isNewAvatar = isNewAvatar; }
|
||||
bool getIsNewAvatar() { return _isNewAvatar; }
|
||||
|
||||
|
@ -1440,6 +1455,8 @@ protected:
|
|||
glm::vec3 _globalBoundingBoxDimensions;
|
||||
glm::vec3 _globalBoundingBoxOffset;
|
||||
|
||||
AABox _defaultBubbleBox;
|
||||
|
||||
mutable ReadWriteLockable _avatarEntitiesLock;
|
||||
AvatarEntityIDs _avatarEntityDetached; // recently detached from this avatar
|
||||
AvatarEntityIDs _avatarEntityForRecording; // create new entities id for avatar recording
|
||||
|
|
|
@ -85,8 +85,9 @@ std::vector<AvatarSharedPointer> AvatarReplicas::takeReplicas(const QUuid& paren
|
|||
void AvatarReplicas::processAvatarIdentity(const QUuid& parentID, const QByteArray& identityData, bool& identityChanged, bool& displayNameChanged) {
|
||||
if (_replicasMap.find(parentID) != _replicasMap.end()) {
|
||||
auto &replicas = _replicasMap[parentID];
|
||||
QDataStream identityDataStream(identityData);
|
||||
for (auto avatar : replicas) {
|
||||
avatar->processAvatarIdentity(identityData, identityChanged, displayNameChanged);
|
||||
avatar->processAvatarIdentity(identityDataStream, identityChanged, displayNameChanged);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -284,39 +285,45 @@ AvatarSharedPointer AvatarHashMap::parseAvatarData(QSharedPointer<ReceivedMessag
|
|||
}
|
||||
|
||||
void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
QDataStream avatarIdentityStream(message->getMessage());
|
||||
|
||||
// peek the avatar UUID from the incoming packet
|
||||
QUuid identityUUID = QUuid::fromRfc4122(message->peek(NUM_BYTES_RFC4122_UUID));
|
||||
while (!avatarIdentityStream.atEnd()) {
|
||||
// peek the avatar UUID from the incoming packet
|
||||
avatarIdentityStream.startTransaction();
|
||||
QUuid identityUUID;
|
||||
avatarIdentityStream >> identityUUID;
|
||||
avatarIdentityStream.rollbackTransaction();
|
||||
|
||||
if (identityUUID.isNull()) {
|
||||
qCDebug(avatars) << "Refusing to process identity packet for null avatar ID";
|
||||
return;
|
||||
}
|
||||
|
||||
// make sure this isn't for an ignored avatar
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
static auto EMPTY = QUuid();
|
||||
|
||||
{
|
||||
QReadLocker locker(&_hashLock);
|
||||
auto me = _avatarHash.find(EMPTY);
|
||||
if ((me != _avatarHash.end()) && (identityUUID == me.value()->getSessionUUID())) {
|
||||
// We add MyAvatar to _avatarHash with an empty UUID. Code relies on this. In order to correctly handle an
|
||||
// identity packet for ourself (such as when we are assigned a sessionDisplayName by the mixer upon joining),
|
||||
// we make things match here.
|
||||
identityUUID = EMPTY;
|
||||
if (identityUUID.isNull()) {
|
||||
qCDebug(avatars) << "Refusing to process identity packet for null avatar ID";
|
||||
return;
|
||||
}
|
||||
|
||||
// make sure this isn't for an ignored avatar
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
static auto EMPTY = QUuid();
|
||||
|
||||
{
|
||||
QReadLocker locker(&_hashLock);
|
||||
auto me = _avatarHash.find(EMPTY);
|
||||
if ((me != _avatarHash.end()) && (identityUUID == me.value()->getSessionUUID())) {
|
||||
// We add MyAvatar to _avatarHash with an empty UUID. Code relies on this. In order to correctly handle an
|
||||
// identity packet for ourself (such as when we are assigned a sessionDisplayName by the mixer upon joining),
|
||||
// we make things match here.
|
||||
identityUUID = EMPTY;
|
||||
}
|
||||
}
|
||||
|
||||
if (!nodeList->isIgnoringNode(identityUUID) || nodeList->getRequestsDomainListData()) {
|
||||
// mesh URL for a UUID, find avatar in our list
|
||||
bool isNewAvatar;
|
||||
auto avatar = newOrExistingAvatar(identityUUID, sendingNode, isNewAvatar);
|
||||
bool identityChanged = false;
|
||||
bool displayNameChanged = false;
|
||||
// In this case, the "sendingNode" is the Avatar Mixer.
|
||||
avatar->processAvatarIdentity(avatarIdentityStream, identityChanged, displayNameChanged);
|
||||
_replicas.processAvatarIdentity(identityUUID, message->getMessage(), identityChanged, displayNameChanged);
|
||||
}
|
||||
}
|
||||
|
||||
if (!nodeList->isIgnoringNode(identityUUID) || nodeList->getRequestsDomainListData()) {
|
||||
// mesh URL for a UUID, find avatar in our list
|
||||
bool isNewAvatar;
|
||||
auto avatar = newOrExistingAvatar(identityUUID, sendingNode, isNewAvatar);
|
||||
bool identityChanged = false;
|
||||
bool displayNameChanged = false;
|
||||
// In this case, the "sendingNode" is the Avatar Mixer.
|
||||
avatar->processAvatarIdentity(message->getMessage(), identityChanged, displayNameChanged);
|
||||
_replicas.processAvatarIdentity(identityUUID, message->getMessage(), identityChanged, displayNameChanged);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -68,8 +68,14 @@ namespace PrioritySortUtil {
|
|||
void reserve(size_t num) {
|
||||
_vector.reserve(num);
|
||||
}
|
||||
const std::vector<T>& getSortedVector() {
|
||||
std::sort(_vector.begin(), _vector.end(), [](const T& left, const T& right) { return left.getPriority() > right.getPriority(); });
|
||||
const std::vector<T>& getSortedVector(int numToSort = 0) {
|
||||
if (numToSort == 0 || numToSort >= (int)_vector.size()) {
|
||||
std::sort(_vector.begin(), _vector.end(),
|
||||
[](const T& left, const T& right) { return left.getPriority() > right.getPriority(); });
|
||||
} else {
|
||||
std::partial_sort(_vector.begin(), _vector.begin() + numToSort, _vector.end(),
|
||||
[](const T& left, const T& right) { return left.getPriority() > right.getPriority(); });
|
||||
}
|
||||
return _vector;
|
||||
}
|
||||
|
||||
|
@ -99,6 +105,9 @@ namespace PrioritySortUtil {
|
|||
float radius = glm::max(thing.getRadius(), MIN_RADIUS);
|
||||
// Other item's angle from view centre:
|
||||
float cosineAngle = glm::dot(offset, view.getDirection()) / distance;
|
||||
if (cosineAngle > 0.0f) {
|
||||
cosineAngle = std::sqrt(cosineAngle);
|
||||
}
|
||||
float age = float((_usecCurrentTime - thing.getTimestamp()) / USECS_PER_SECOND);
|
||||
|
||||
// the "age" term accumulates at the sum of all weights
|
||||
|
|
Loading…
Reference in a new issue