fix indentation in AvatarMixerSlave/AvatarMixerSlavePool

This commit is contained in:
Stephen Birarda 2017-06-14 15:53:24 -07:00
parent afc9978841
commit 4c2fa4be64
2 changed files with 178 additions and 182 deletions

View file

@ -204,216 +204,212 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
ViewFrustum cameraView = nodeData->getViewFrustom();
std::priority_queue<AvatarPriority> sortedAvatars;
AvatarData::sortAvatars(avatarList, cameraView, sortedAvatars,
[&](AvatarSharedPointer avatar)->uint64_t{
auto avatarNode = avatarDataToNodes[avatar];
assert(avatarNode); // we can't have gotten here without the avatarData being a valid key in the map
return nodeData->getLastBroadcastTime(avatarNode->getUUID());
},
[&](AvatarSharedPointer avatar)->uint64_t {
auto avatarNode = avatarDataToNodes[avatar];
assert(avatarNode); // we can't have gotten here without the avatarData being a valid key in the map
return nodeData->getLastBroadcastTime(avatarNode->getUUID());
}, [&](AvatarSharedPointer avatar)->float{
glm::vec3 nodeBoxHalfScale = (avatar->getPosition() - avatar->getGlobalBoundingBoxCorner());
return glm::max(nodeBoxHalfScale.x, glm::max(nodeBoxHalfScale.y, nodeBoxHalfScale.z));
}, [&](AvatarSharedPointer avatar)->bool {
if (avatar == thisAvatar) {
return true; // ignore ourselves...
}
[&](AvatarSharedPointer avatar)->float{
glm::vec3 nodeBoxHalfScale = (avatar->getPosition() - avatar->getGlobalBoundingBoxCorner());
return glm::max(nodeBoxHalfScale.x, glm::max(nodeBoxHalfScale.y, nodeBoxHalfScale.z));
},
bool shouldIgnore = false;
[&](AvatarSharedPointer avatar)->bool{
if (avatar == thisAvatar) {
return true; // ignore ourselves...
}
// We will also ignore other nodes for a couple of different reasons:
// 1) ignore bubbles and ignore specific node
// 2) the node hasn't really updated it's frame data recently, this can
// happen if for example the avatar is connected on a desktop and sending
// updates at ~30hz. So every 3 frames we skip a frame.
auto avatarNode = avatarDataToNodes[avatar];
bool shouldIgnore = false;
assert(avatarNode); // we can't have gotten here without the avatarData being a valid key in the map
// We will also ignore other nodes for a couple of different reasons:
// 1) ignore bubbles and ignore specific node
// 2) the node hasn't really updated it's frame data recently, this can
// happen if for example the avatar is connected on a desktop and sending
// updates at ~30hz. So every 3 frames we skip a frame.
auto avatarNode = avatarDataToNodes[avatar];
const AvatarMixerClientData* avatarNodeData = reinterpret_cast<const AvatarMixerClientData*>(avatarNode->getLinkedData());
assert(avatarNodeData); // we can't have gotten here without avatarNode having valid data
quint64 startIgnoreCalculation = usecTimestampNow();
assert(avatarNode); // we can't have gotten here without the avatarData being a valid key in the map
// make sure we have data for this avatar, that it isn't the same node,
// and isn't an avatar that the viewing node has ignored
// or that has ignored the viewing node
if (!avatarNode->getLinkedData()
|| avatarNode->getUUID() == node->getUUID()
|| (node->isIgnoringNodeWithID(avatarNode->getUUID()) && !PALIsOpen)
|| (avatarNode->isIgnoringNodeWithID(node->getUUID()) && !getsAnyIgnored)) {
shouldIgnore = true;
} else {
const AvatarMixerClientData* avatarNodeData = reinterpret_cast<const AvatarMixerClientData*>(avatarNode->getLinkedData());
assert(avatarNodeData); // we can't have gotten here without avatarNode having valid data
quint64 startIgnoreCalculation = usecTimestampNow();
// Check to see if the space bubble is enabled
// Don't bother with these checks if the other avatar has their bubble enabled and we're gettingAnyIgnored
if (node->isIgnoreRadiusEnabled() || (avatarNode->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
// make sure we have data for this avatar, that it isn't the same node,
// and isn't an avatar that the viewing node has ignored
// or that has ignored the viewing node
if (!avatarNode->getLinkedData()
|| avatarNode->getUUID() == node->getUUID()
|| (node->isIgnoringNodeWithID(avatarNode->getUUID()) && !PALIsOpen)
|| (avatarNode->isIgnoringNodeWithID(node->getUUID()) && !getsAnyIgnored)) {
shouldIgnore = true;
} else {
// Check to see if the space bubble is enabled
// Don't bother with these checks if the other avatar has their bubble enabled and we're gettingAnyIgnored
if (node->isIgnoreRadiusEnabled() || (avatarNode->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
// Define the scale of the box for the current other node
glm::vec3 otherNodeBoxScale = (avatarNodeData->getPosition() - avatarNodeData->getGlobalBoundingBoxCorner()) * 2.0f;
// Set up the bounding box for the current other node
AABox otherNodeBox(avatarNodeData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
// Clamp the size of the bounding box to a minimum scale
if (glm::any(glm::lessThan(otherNodeBoxScale, minBubbleSize))) {
otherNodeBox.setScaleStayCentered(minBubbleSize);
}
// Quadruple the scale of both bounding boxes
otherNodeBox.embiggen(4.0f);
// Perform the collision check between the two bounding boxes
if (nodeBox.touches(otherNodeBox)) {
nodeData->ignoreOther(node, avatarNode);
shouldIgnore = !getsAnyIgnored;
}
// Define the scale of the box for the current other node
glm::vec3 otherNodeBoxScale = (avatarNodeData->getPosition() - avatarNodeData->getGlobalBoundingBoxCorner()) * 2.0f;
// Set up the bounding box for the current other node
AABox otherNodeBox(avatarNodeData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
// Clamp the size of the bounding box to a minimum scale
if (glm::any(glm::lessThan(otherNodeBoxScale, minBubbleSize))) {
otherNodeBox.setScaleStayCentered(minBubbleSize);
}
// Not close enough to ignore
if (!shouldIgnore) {
nodeData->removeFromRadiusIgnoringSet(node, avatarNode->getUUID());
// Quadruple the scale of both bounding boxes
otherNodeBox.embiggen(4.0f);
// Perform the collision check between the two bounding boxes
if (nodeBox.touches(otherNodeBox)) {
nodeData->ignoreOther(node, avatarNode);
shouldIgnore = !getsAnyIgnored;
}
}
quint64 endIgnoreCalculation = usecTimestampNow();
_stats.ignoreCalculationElapsedTime += (endIgnoreCalculation - startIgnoreCalculation);
// Not close enough to ignore
if (!shouldIgnore) {
AvatarDataSequenceNumber lastSeqToReceiver = nodeData->getLastBroadcastSequenceNumber(avatarNode->getUUID());
AvatarDataSequenceNumber lastSeqFromSender = avatarNodeData->getLastReceivedSequenceNumber();
// FIXME - This code does appear to be working. But it seems brittle.
// It supports determining if the frame of data for this "other"
// avatar has already been sent to the reciever. This has been
// verified to work on a desktop display that renders at 60hz and
// therefore sends to mixer at 30hz. Each second you'd expect to
// have 15 (45hz-30hz) duplicate frames. In this case, the stat
// avg_other_av_skips_per_second does report 15.
//
// make sure we haven't already sent this data from this sender to this receiver
// or that somehow we haven't sent
if (lastSeqToReceiver == lastSeqFromSender && lastSeqToReceiver != 0) {
++numAvatarsHeldBack;
shouldIgnore = true;
} else if (lastSeqFromSender - lastSeqToReceiver > 1) {
// this is a skip - we still send the packet but capture the presence of the skip so we see it happening
++numAvatarsWithSkippedFrames;
}
nodeData->removeFromRadiusIgnoringSet(node, avatarNode->getUUID());
}
return shouldIgnore;
});
}
quint64 endIgnoreCalculation = usecTimestampNow();
_stats.ignoreCalculationElapsedTime += (endIgnoreCalculation - startIgnoreCalculation);
// loop through our sorted avatars and allocate our bandwidth to them accordingly
int avatarRank = 0;
if (!shouldIgnore) {
AvatarDataSequenceNumber lastSeqToReceiver = nodeData->getLastBroadcastSequenceNumber(avatarNode->getUUID());
AvatarDataSequenceNumber lastSeqFromSender = avatarNodeData->getLastReceivedSequenceNumber();
// this is overly conservative, because it includes some avatars we might not consider
int remainingAvatars = (int)sortedAvatars.size();
while (!sortedAvatars.empty()) {
AvatarPriority sortData = sortedAvatars.top();
sortedAvatars.pop();
const auto& avatarData = sortData.avatar;
avatarRank++;
remainingAvatars--;
auto otherNode = avatarDataToNodes[avatarData];
assert(otherNode); // we can't have gotten here without the avatarData being a valid key in the map
// NOTE: Here's where we determine if we are over budget and drop to bare minimum data
int minimRemainingAvatarBytes = minimumBytesPerAvatar * remainingAvatars;
bool overBudget = (identityBytesSent + numAvatarDataBytes + minimRemainingAvatarBytes) > maxAvatarBytesPerFrame;
quint64 startAvatarDataPacking = usecTimestampNow();
++numOtherAvatars;
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
// If the time that the mixer sent AVATAR DATA about Avatar B to Avatar A is BEFORE OR EQUAL TO
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Avatar A.
if (nodeData->getLastBroadcastTime(otherNode->getUUID()) <= otherNodeData->getIdentityChangeTimestamp()) {
identityBytesSent += sendIdentityPacket(otherNodeData, node);
// FIXME - This code does appear to be working. But it seems brittle.
// It supports determining if the frame of data for this "other"
// avatar has already been sent to the reciever. This has been
// verified to work on a desktop display that renders at 60hz and
// therefore sends to mixer at 30hz. Each second you'd expect to
// have 15 (45hz-30hz) duplicate frames. In this case, the stat
// avg_other_av_skips_per_second does report 15.
//
// make sure we haven't already sent this data from this sender to this receiver
// or that somehow we haven't sent
if (lastSeqToReceiver == lastSeqFromSender && lastSeqToReceiver != 0) {
++numAvatarsHeldBack;
shouldIgnore = true;
} else if (lastSeqFromSender - lastSeqToReceiver > 1) {
// this is a skip - we still send the packet but capture the presence of the skip so we see it happening
++numAvatarsWithSkippedFrames;
}
}
return shouldIgnore;
});
const AvatarData* otherAvatar = otherNodeData->getConstAvatarData();
glm::vec3 otherPosition = otherAvatar->getClientGlobalPosition();
// loop through our sorted avatars and allocate our bandwidth to them accordingly
int avatarRank = 0;
// determine if avatar is in view, to determine how much data to include...
glm::vec3 otherNodeBoxScale = (otherPosition - otherNodeData->getGlobalBoundingBoxCorner()) * 2.0f;
AABox otherNodeBox(otherNodeData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
bool isInView = nodeData->otherAvatarInView(otherNodeBox);
// this is overly conservative, because it includes some avatars we might not consider
int remainingAvatars = (int)sortedAvatars.size();
// start a new segment in the PacketList for this avatar
avatarPacketList->startSegment();
while (!sortedAvatars.empty()) {
AvatarPriority sortData = sortedAvatars.top();
sortedAvatars.pop();
const auto& avatarData = sortData.avatar;
avatarRank++;
remainingAvatars--;
AvatarData::AvatarDataDetail detail;
auto otherNode = avatarDataToNodes[avatarData];
assert(otherNode); // we can't have gotten here without the avatarData being a valid key in the map
if (overBudget) {
overBudgetAvatars++;
_stats.overBudgetAvatars++;
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::NoData;
} else if (!isInView) {
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::MinimumData;
nodeData->incrementAvatarOutOfView();
} else {
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO
? AvatarData::SendAllData : AvatarData::CullSmallData;
nodeData->incrementAvatarInView();
}
// NOTE: Here's where we determine if we are over budget and drop to bare minimum data
int minimRemainingAvatarBytes = minimumBytesPerAvatar * remainingAvatars;
bool overBudget = (identityBytesSent + numAvatarDataBytes + minimRemainingAvatarBytes) > maxAvatarBytesPerFrame;
bool includeThisAvatar = true;
auto lastEncodeForOther = nodeData->getLastOtherAvatarEncodeTime(otherNode->getUUID());
QVector<JointData>& lastSentJointsForOther = nodeData->getLastOtherAvatarSentJoints(otherNode->getUUID());
bool distanceAdjust = true;
glm::vec3 viewerPosition = myPosition;
AvatarDataPacket::HasFlags hasFlagsOut; // the result of the toByteArray
bool dropFaceTracking = false;
quint64 startAvatarDataPacking = usecTimestampNow();
quint64 start = usecTimestampNow();
QByteArray bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
quint64 end = usecTimestampNow();
_stats.toByteArrayElapsedTime += (end - start);
++numOtherAvatars;
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
// If the time that the mixer sent AVATAR DATA about Avatar B to Avatar A is BEFORE OR EQUAL TO
// the time that Avatar B flagged an IDENTITY DATA change, send IDENTITY DATA about Avatar B to Avatar A.
if (nodeData->getLastBroadcastTime(otherNode->getUUID()) <= otherNodeData->getIdentityChangeTimestamp()) {
identityBytesSent += sendIdentityPacket(otherNodeData, node);
}
const AvatarData* otherAvatar = otherNodeData->getConstAvatarData();
glm::vec3 otherPosition = otherAvatar->getClientGlobalPosition();
// determine if avatar is in view, to determine how much data to include...
glm::vec3 otherNodeBoxScale = (otherPosition - otherNodeData->getGlobalBoundingBoxCorner()) * 2.0f;
AABox otherNodeBox(otherNodeData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
bool isInView = nodeData->otherAvatarInView(otherNodeBox);
// start a new segment in the PacketList for this avatar
avatarPacketList->startSegment();
AvatarData::AvatarDataDetail detail;
if (overBudget) {
overBudgetAvatars++;
_stats.overBudgetAvatars++;
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::NoData;
} else if (!isInView) {
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::MinimumData;
nodeData->incrementAvatarOutOfView();
} else {
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO
? AvatarData::SendAllData : AvatarData::CullSmallData;
nodeData->incrementAvatarInView();
}
bool includeThisAvatar = true;
auto lastEncodeForOther = nodeData->getLastOtherAvatarEncodeTime(otherNode->getUUID());
QVector<JointData>& lastSentJointsForOther = nodeData->getLastOtherAvatarSentJoints(otherNode->getUUID());
bool distanceAdjust = true;
glm::vec3 viewerPosition = myPosition;
AvatarDataPacket::HasFlags hasFlagsOut; // the result of the toByteArray
bool dropFaceTracking = false;
quint64 start = usecTimestampNow();
QByteArray bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
quint64 end = usecTimestampNow();
_stats.toByteArrayElapsedTime += (end - start);
static const int MAX_ALLOWED_AVATAR_DATA = (1400 - NUM_BYTES_RFC4122_UUID);
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
qCWarning(avatars) << "otherAvatar.toByteArray() resulted in very large buffer:" << bytes.size() << "... attempt to drop facial data";
dropFaceTracking = true; // first try dropping the facial data
bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
static const int MAX_ALLOWED_AVATAR_DATA = (1400 - NUM_BYTES_RFC4122_UUID);
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
qCWarning(avatars) << "otherAvatar.toByteArray() resulted in very large buffer:" << bytes.size() << "... attempt to drop facial data";
dropFaceTracking = true; // first try dropping the facial data
bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
qCWarning(avatars) << "otherAvatar.toByteArray() without facial data resulted in very large buffer:" << bytes.size() << "... reduce to MinimumData";
bytes = otherAvatar->toByteArray(AvatarData::MinimumData, lastEncodeForOther, lastSentJointsForOther,
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
qCWarning(avatars) << "otherAvatar.toByteArray() without facial data resulted in very large buffer:" << bytes.size() << "... reduce to MinimumData";
bytes = otherAvatar->toByteArray(AvatarData::MinimumData, lastEncodeForOther, lastSentJointsForOther,
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
}
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
qCWarning(avatars) << "otherAvatar.toByteArray() MinimumData resulted in very large buffer:" << bytes.size() << "... FAIL!!";
includeThisAvatar = false;
}
}
if (includeThisAvatar) {
numAvatarDataBytes += avatarPacketList->write(otherNode->getUUID().toRfc4122());
numAvatarDataBytes += avatarPacketList->write(bytes);
if (detail != AvatarData::NoData) {
_stats.numOthersIncluded++;
// increment the number of avatars sent to this reciever
nodeData->incrementNumAvatarsSentLastFrame();
// set the last sent sequence number for this sender on the receiver
nodeData->setLastBroadcastSequenceNumber(otherNode->getUUID(),
otherNodeData->getLastReceivedSequenceNumber());
// remember the last time we sent details about this other node to the receiver
nodeData->setLastBroadcastTime(otherNode->getUUID(), start);
}
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
qCWarning(avatars) << "otherAvatar.toByteArray() MinimumData resulted in very large buffer:" << bytes.size() << "... FAIL!!";
includeThisAvatar = false;
}
}
avatarPacketList->endSegment();
if (includeThisAvatar) {
numAvatarDataBytes += avatarPacketList->write(otherNode->getUUID().toRfc4122());
numAvatarDataBytes += avatarPacketList->write(bytes);
quint64 endAvatarDataPacking = usecTimestampNow();
_stats.avatarDataPackingElapsedTime += (endAvatarDataPacking - startAvatarDataPacking);
if (detail != AvatarData::NoData) {
_stats.numOthersIncluded++;
// increment the number of avatars sent to this reciever
nodeData->incrementNumAvatarsSentLastFrame();
// set the last sent sequence number for this sender on the receiver
nodeData->setLastBroadcastSequenceNumber(otherNode->getUUID(),
otherNodeData->getLastReceivedSequenceNumber());
// remember the last time we sent details about this other node to the receiver
nodeData->setLastBroadcastTime(otherNode->getUUID(), start);
}
}
avatarPacketList->endSegment();
quint64 endAvatarDataPacking = usecTimestampNow();
_stats.avatarDataPackingElapsedTime += (endAvatarDataPacking - startAvatarDataPacking);
};
quint64 startPacketSending = usecTimestampNow();

View file

@ -76,8 +76,8 @@ void AvatarMixerSlavePool::processIncomingPackets(ConstIter begin, ConstIter end
}
void AvatarMixerSlavePool::broadcastAvatarData(ConstIter begin, ConstIter end,
p_high_resolution_clock::time_point lastFrameTimestamp,
float maxKbpsPerNode, float throttlingRatio) {
p_high_resolution_clock::time_point lastFrameTimestamp,
float maxKbpsPerNode, float throttlingRatio) {
_function = &AvatarMixerSlave::broadcastAvatarData;
_configure = [&](AvatarMixerSlave& slave) {
slave.configureBroadcast(begin, end, lastFrameTimestamp, maxKbpsPerNode, throttlingRatio);