Merge pull request #9208 from ZappoMan/addViewFrustumToAvatarMixer

Optimize avatar-mixer bandwidth for out of view avatars
This commit is contained in:
Brad Hefta-Gaub 2016-12-21 10:22:56 -08:00 committed by GitHub
commit 08cfd8a40e
14 changed files with 394 additions and 195 deletions

View file

@ -499,7 +499,8 @@ void Agent::processAgentAvatar() {
if (!_scriptEngine->isFinished() && _isAvatar) {
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
QByteArray avatarByteArray = scriptedAvatar->toByteArray(true, randFloat() < AVATAR_SEND_FULL_UPDATE_RATIO);
QByteArray avatarByteArray = scriptedAvatar->toByteArray((randFloat() < AVATAR_SEND_FULL_UPDATE_RATIO)
? AvatarData::SendAllData : AvatarData::CullSmallData);
scriptedAvatar->doneEncoding(true);
static AvatarDataSequenceNumber sequenceNumber = 0;

View file

@ -44,6 +44,7 @@ AvatarMixer::AvatarMixer(ReceivedMessage& message) :
connect(DependencyManager::get<NodeList>().data(), &NodeList::nodeKilled, this, &AvatarMixer::nodeKilled);
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
packetReceiver.registerListener(PacketType::ViewFrustum, this, "handleViewFrustumPacket");
packetReceiver.registerListener(PacketType::AvatarData, this, "handleAvatarDataPacket");
packetReceiver.registerListener(PacketType::AvatarIdentity, this, "handleAvatarIdentityPacket");
packetReceiver.registerListener(PacketType::KillAvatar, this, "handleKillAvatarPacket");
@ -85,6 +86,8 @@ void AvatarMixer::sendIdentityPacket(AvatarMixerClientData* nodeData, const Shar
// 1) use the view frustum to cull those avatars that are out of view. Since avatar data doesn't need to be present
// if the avatar is not in view or in the keyhole.
void AvatarMixer::broadcastAvatarData() {
_broadcastRate.increment();
int idleTime = AVATAR_DATA_SEND_INTERVAL_MSECS;
if (_lastFrameTimestamp.time_since_epoch().count() > 0) {
@ -173,6 +176,7 @@ void AvatarMixer::broadcastAvatarData() {
return;
}
++_sumListeners;
nodeData->resetInViewStats();
AvatarData& avatar = nodeData->getAvatar();
glm::vec3 myPosition = avatar.getClientGlobalPosition();
@ -379,9 +383,22 @@ void AvatarMixer::broadcastAvatarData() {
// start a new segment in the PacketList for this avatar
avatarPacketList->startSegment();
// determine if avatar is in view, to determine how much data to include...
glm::vec3 otherNodeBoxScale = (otherNodeData->getPosition() - otherNodeData->getGlobalBoundingBoxCorner()) * 2.0f;
AABox otherNodeBox(otherNodeData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
AvatarData::AvatarDataDetail detail;
if (!nodeData->otherAvatarInView(otherNodeBox)) {
detail = AvatarData::MinimumData;
nodeData->incrementAvatarOutOfView();
} else {
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO
? AvatarData::SendAllData : AvatarData::IncludeSmallData;
nodeData->incrementAvatarInView();
}
numAvatarDataBytes += avatarPacketList->write(otherNode->getUUID().toRfc4122());
numAvatarDataBytes +=
avatarPacketList->write(otherAvatar.toByteArray(false, distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO));
numAvatarDataBytes += avatarPacketList->write(otherAvatar.toByteArray(detail));
avatarPacketList->endSegment();
});
@ -410,6 +427,9 @@ void AvatarMixer::broadcastAvatarData() {
// We're done encoding this version of the otherAvatars. Update their "lastSent" joint-states so
// that we can notice differences, next time around.
//
// FIXME - this seems suspicious, the code seems to consider all avatars, but not all avatars will
// have had their joints sent, so actually we should consider the time since they actually were sent????
nodeList->eachMatchingNode(
[&](const SharedNodePointer& otherNode)->bool {
if (!otherNode->getLinkedData()) {
@ -434,6 +454,18 @@ void AvatarMixer::broadcastAvatarData() {
});
_lastFrameTimestamp = p_high_resolution_clock::now();
#ifdef WANT_DEBUG
auto sinceLastDebug = p_high_resolution_clock::now() - _lastDebugMessage;
auto sinceLastDebugUsecs = std::chrono::duration_cast<std::chrono::microseconds>(sinceLastDebug).count();
quint64 DEBUG_INTERVAL = USECS_PER_SECOND * 5;
if (sinceLastDebugUsecs > DEBUG_INTERVAL) {
qDebug() << "broadcast rate:" << _broadcastRate.rate() << "hz";
_lastDebugMessage = p_high_resolution_clock::now();
}
#endif
}
void AvatarMixer::nodeKilled(SharedNodePointer killedNode) {
@ -483,6 +515,18 @@ void AvatarMixer::nodeKilled(SharedNodePointer killedNode) {
}
}
void AvatarMixer::handleViewFrustumPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
auto nodeList = DependencyManager::get<NodeList>();
nodeList->getOrCreateLinkedData(senderNode);
if (senderNode->getLinkedData()) {
AvatarMixerClientData* nodeData = dynamic_cast<AvatarMixerClientData*>(senderNode->getLinkedData());
if (nodeData != nullptr) {
nodeData->readViewFrustumPacket(message->getMessage());
}
}
}
void AvatarMixer::handleAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
auto nodeList = DependencyManager::get<NodeList>();
nodeList->updateNodeWithDataFromPacket(message, senderNode);
@ -529,6 +573,7 @@ void AvatarMixer::sendStatsPacket() {
statsObject["trailing_sleep_percentage"] = _trailingSleepRatio * 100;
statsObject["performance_throttling_ratio"] = _performanceThrottlingRatio;
statsObject["broadcast_loop_rate"] = _broadcastRate.rate();
QJsonObject avatarsObject;
@ -581,6 +626,7 @@ void AvatarMixer::run() {
// setup the timer that will be fired on the broadcast thread
_broadcastTimer = new QTimer;
_broadcastTimer->setTimerType(Qt::PreciseTimer);
_broadcastTimer->setInterval(AVATAR_DATA_SEND_INTERVAL_MSECS);
_broadcastTimer->moveToThread(&_broadcastThread);

View file

@ -15,6 +15,7 @@
#ifndef hifi_AvatarMixer_h
#define hifi_AvatarMixer_h
#include <shared/RateCounter.h>
#include <PortableHighResolutionClock.h>
#include <ThreadedAssignment.h>
@ -35,6 +36,7 @@ public slots:
void sendStatsPacket() override;
private slots:
void handleViewFrustumPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleAvatarIdentityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleKillAvatarPacket(QSharedPointer<ReceivedMessage> message);
@ -67,6 +69,8 @@ private:
QTimer* _broadcastTimer = nullptr;
RateCounter<> _broadcastRate;
p_high_resolution_clock::time_point _lastDebugMessage;
QHash<QString, QPair<int, int>> _sessionDisplayNames;
};

View file

@ -57,6 +57,14 @@ void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointe
}
}
void AvatarMixerClientData::readViewFrustumPacket(const QByteArray& message) {
_currentViewFrustum.fromByteArray(message);
}
bool AvatarMixerClientData::otherAvatarInView(const AABox& otherAvatarBox) {
return _currentViewFrustum.boxIntersectsKeyhole(otherAvatarBox);
}
void AvatarMixerClientData::loadJSONStats(QJsonObject& jsonObject) const {
jsonObject["display_name"] = _avatar->getDisplayName();
jsonObject["full_rate_distance"] = _fullRateDistance;
@ -70,4 +78,6 @@ void AvatarMixerClientData::loadJSONStats(QJsonObject& jsonObject) const {
jsonObject[INBOUND_AVATAR_DATA_STATS_KEY] = _avatar->getAverageBytesReceivedPerSecond() / (float) BYTES_PER_KILOBIT;
jsonObject["av_data_receive_rate"] = _avatar->getReceiveRate();
jsonObject["recent_other_av_in_view"] = _recentOtherAvatarsInView;
jsonObject["recent_other_av_out_of_view"] = _recentOtherAvatarsOutOfView;
}

View file

@ -27,6 +27,7 @@
#include <PortableHighResolutionClock.h>
#include <SimpleMovingAverage.h>
#include <UUIDHasher.h>
#include <ViewFrustum.h>
const QString OUTBOUND_AVATAR_DATA_STATS_KEY = "outbound_av_data_kbps";
const QString INBOUND_AVATAR_DATA_STATS_KEY = "inbound_av_data_kbps";
@ -34,7 +35,7 @@ const QString INBOUND_AVATAR_DATA_STATS_KEY = "inbound_av_data_kbps";
class AvatarMixerClientData : public NodeData {
Q_OBJECT
public:
AvatarMixerClientData(const QUuid& nodeID = QUuid()) : NodeData(nodeID) {}
AvatarMixerClientData(const QUuid& nodeID = QUuid()) : NodeData(nodeID) { _currentViewFrustum.invalidate(); }
virtual ~AvatarMixerClientData() {}
using HRCTime = p_high_resolution_clock::time_point;
@ -91,6 +92,13 @@ public:
void removeFromRadiusIgnoringSet(const QUuid& other) { _radiusIgnoredOthers.erase(other); }
void ignoreOther(SharedNodePointer self, SharedNodePointer other);
void readViewFrustumPacket(const QByteArray& message);
bool otherAvatarInView(const AABox& otherAvatarBox);
void resetInViewStats() { _recentOtherAvatarsInView = _recentOtherAvatarsOutOfView = 0; }
void incrementAvatarInView() { _recentOtherAvatarsInView++; }
void incrementAvatarOutOfView() { _recentOtherAvatarsOutOfView++; }
const QString& getBaseDisplayName() { return _baseDisplayName; }
void setBaseDisplayName(const QString& baseDisplayName) { _baseDisplayName = baseDisplayName; }
@ -116,7 +124,10 @@ private:
SimpleMovingAverage _avgOtherAvatarDataRate;
std::unordered_set<QUuid> _radiusIgnoredOthers;
ViewFrustum _currentViewFrustum;
int _recentOtherAvatarsInView { 0 };
int _recentOtherAvatarsOutOfView { 0 };
QString _baseDisplayName{}; // The santized key used in determinging unique sessionDisplayName, so that we can remove from dictionary.
};

View file

@ -4357,6 +4357,7 @@ void Application::update(float deltaTime) {
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
queryOctree(NodeType::EntityServer, PacketType::EntityQuery, _entityServerJurisdictions);
}
sendAvatarViewFrustum();
_lastQueriedViewFrustum = _viewFrustum;
}
}
@ -4396,6 +4397,14 @@ void Application::update(float deltaTime) {
AnimDebugDraw::getInstance().update();
}
void Application::sendAvatarViewFrustum() {
QByteArray viewFrustumByteArray = _viewFrustum.toByteArray();
auto avatarPacket = NLPacket::create(PacketType::ViewFrustum, viewFrustumByteArray.size());
avatarPacket->write(viewFrustumByteArray);
DependencyManager::get<NodeList>()->broadcastToNodes(std::move(avatarPacket), NodeSet() << NodeType::AvatarMixer);
}
int Application::sendNackPackets() {

View file

@ -444,6 +444,7 @@ private:
void renderRearViewMirror(RenderArgs* renderArgs, const QRect& region, bool isZoomed);
int sendNackPackets();
void sendAvatarViewFrustum();
std::shared_ptr<MyAvatar> getMyAvatar() const;

View file

@ -226,7 +226,7 @@ void MyAvatar::simulateAttachments(float deltaTime) {
// don't update attachments here, do it in harvestResultsFromPhysicsSimulation()
}
QByteArray MyAvatar::toByteArray(bool cullSmallChanges, bool sendAll) {
QByteArray MyAvatar::toByteArray(AvatarDataDetail dataDetail) {
CameraMode mode = qApp->getCamera()->getMode();
_globalPosition = getPosition();
_globalBoundingBoxCorner.x = _characterController.getCapsuleRadius();
@ -237,12 +237,12 @@ QByteArray MyAvatar::toByteArray(bool cullSmallChanges, bool sendAll) {
// fake the avatar position that is sent up to the AvatarMixer
glm::vec3 oldPosition = getPosition();
setPosition(getSkeletonPosition());
QByteArray array = AvatarData::toByteArray(cullSmallChanges, sendAll);
QByteArray array = AvatarData::toByteArray(dataDetail);
// copy the correct position back
setPosition(oldPosition);
return array;
}
return AvatarData::toByteArray(cullSmallChanges, sendAll);
return AvatarData::toByteArray(dataDetail);
}
void MyAvatar::centerBody() {

View file

@ -333,7 +333,7 @@ private:
glm::vec3 getWorldBodyPosition() const;
glm::quat getWorldBodyOrientation() const;
QByteArray toByteArray(bool cullSmallChanges, bool sendAll) override;
QByteArray toByteArray(AvatarDataDetail dataDetail) override;
void simulate(float deltaTime);
void updateFromTrackers(float deltaTime);
virtual void render(RenderArgs* renderArgs, const glm::vec3& cameraPositio) override;

View file

@ -181,7 +181,11 @@ void AvatarData::setHandPosition(const glm::vec3& handPosition) {
_handPosition = glm::inverse(getOrientation()) * (handPosition - getPosition());
}
QByteArray AvatarData::toByteArray(bool cullSmallChanges, bool sendAll) {
QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail) {
bool cullSmallChanges = (dataDetail == CullSmallData);
bool sendAll = (dataDetail == SendAllData);
bool sendMinimum = (dataDetail == MinimumData);
// TODO: DRY this up to a shared method
// that can pack any type given the number of bytes
// and return the number of bytes to push the pointer
@ -199,6 +203,19 @@ QByteArray AvatarData::toByteArray(bool cullSmallChanges, bool sendAll) {
unsigned char* destinationBuffer = reinterpret_cast<unsigned char*>(avatarDataByteArray.data());
unsigned char* startPosition = destinationBuffer;
// Leading flags, to indicate how much data is actually included in the packet...
uint8_t packetStateFlags = 0;
if (sendMinimum) {
setAtBit(packetStateFlags, AVATARDATA_FLAGS_MINIMUM);
}
memcpy(destinationBuffer, &packetStateFlags, sizeof(packetStateFlags));
destinationBuffer += sizeof(packetStateFlags);
if (sendMinimum) {
memcpy(destinationBuffer, &_globalPosition, sizeof(_globalPosition));
destinationBuffer += sizeof(_globalPosition);
} else {
auto header = reinterpret_cast<AvatarDataPacket::Header*>(destinationBuffer);
header->position[0] = getLocalPosition().x;
header->position[1] = getLocalPosition().y;
@ -400,6 +417,7 @@ QByteArray AvatarData::toByteArray(bool cullSmallChanges, bool sendAll) {
<< (destinationBuffer - startPosition);
}
#endif
}
return avatarDataByteArray.left(destinationBuffer - startPosition);
}
@ -474,9 +492,22 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
_headData = new HeadData(this);
}
uint8_t packetStateFlags = buffer.at(0);
bool minimumSent = oneAtBit(packetStateFlags, AVATARDATA_FLAGS_MINIMUM);
const unsigned char* startPosition = reinterpret_cast<const unsigned char*>(buffer.data());
const unsigned char* endPosition = startPosition + buffer.size();
const unsigned char* sourceBuffer = startPosition;
const unsigned char* sourceBuffer = startPosition + sizeof(packetStateFlags); // skip the flags!!
// if this is the minimum, then it only includes the flags
if (minimumSent) {
memcpy(&_globalPosition, sourceBuffer, sizeof(_globalPosition));
sourceBuffer += sizeof(_globalPosition);
int numBytesRead = (sourceBuffer - startPosition);
_averageBytesReceived.updateAverage(numBytesRead);
return numBytesRead;
}
quint64 now = usecTimestampNow();
PACKET_READ_CHECK(Header, sizeof(AvatarDataPacket::Header));
@ -1181,9 +1212,9 @@ void AvatarData::sendAvatarDataPacket() {
// about 2% of the time, we send a full update (meaning, we transmit all the joint data), even if nothing has changed.
// this is to guard against a joint moving once, the packet getting lost, and the joint never moving again.
bool sendFullUpdate = randFloat() < AVATAR_SEND_FULL_UPDATE_RATIO;
QByteArray avatarByteArray = toByteArray(true, sendFullUpdate);
doneEncoding(true);
QByteArray avatarByteArray = toByteArray((randFloat() < AVATAR_SEND_FULL_UPDATE_RATIO) ? SendAllData : CullSmallData);
doneEncoding(true); // FIXME - doneEncoding() takes a bool for culling small changes, that's janky!
static AvatarDataSequenceNumber sequenceNumber = 0;

View file

@ -106,6 +106,11 @@ const char LEFT_HAND_POINTING_FLAG = 1;
const char RIGHT_HAND_POINTING_FLAG = 2;
const char IS_FINGER_POINTING_FLAG = 4;
// AvatarData state flags - we store the details about the packet encoding in the first byte,
// before the "header" structure
const char AVATARDATA_FLAGS_MINIMUM = 0;
static const float MAX_AVATAR_SCALE = 1000.0f;
static const float MIN_AVATAR_SCALE = .005f;
@ -204,7 +209,14 @@ public:
glm::vec3 getHandPosition() const;
void setHandPosition(const glm::vec3& handPosition);
virtual QByteArray toByteArray(bool cullSmallChanges, bool sendAll);
typedef enum {
MinimumData,
CullSmallData,
IncludeSmallData,
SendAllData
} AvatarDataDetail;
virtual QByteArray toByteArray(AvatarDataDetail dataDetail);
virtual void doneEncoding(bool cullSmallChanges);
/// \return true if an error should be logged

View file

@ -103,7 +103,8 @@ public:
RadiusIgnoreRequest,
UsernameFromIDRequest,
UsernameFromIDReply,
LAST_PACKET_TYPE = UsernameFromIDReply
ViewFrustum,
LAST_PACKET_TYPE = ViewFrustum
};
};

View file

@ -130,6 +130,75 @@ const char* ViewFrustum::debugPlaneName (int plane) const {
return "Unknown";
}
void ViewFrustum::fromByteArray(const QByteArray& input) {
// From the wire!
glm::vec3 cameraPosition;
glm::quat cameraOrientation;
float cameraCenterRadius;
float cameraFov;
float cameraAspectRatio;
float cameraNearClip;
float cameraFarClip;
const unsigned char* startPosition = reinterpret_cast<const unsigned char*>(input.constData());
const unsigned char* sourceBuffer = startPosition;
// camera details
memcpy(&cameraPosition, sourceBuffer, sizeof(cameraPosition));
sourceBuffer += sizeof(cameraPosition);
sourceBuffer += unpackOrientationQuatFromBytes(sourceBuffer, cameraOrientation);
sourceBuffer += unpackFloatAngleFromTwoByte((uint16_t*)sourceBuffer, &cameraFov);
sourceBuffer += unpackFloatRatioFromTwoByte(sourceBuffer, cameraAspectRatio);
sourceBuffer += unpackClipValueFromTwoByte(sourceBuffer, cameraNearClip);
sourceBuffer += unpackClipValueFromTwoByte(sourceBuffer, cameraFarClip);
memcpy(&cameraCenterRadius, sourceBuffer, sizeof(cameraCenterRadius));
sourceBuffer += sizeof(cameraCenterRadius);
setPosition(cameraPosition);
setOrientation(cameraOrientation);
setCenterRadius(cameraCenterRadius);
// Also make sure it's got the correct lens details from the camera
const float VIEW_FRUSTUM_FOV_OVERSEND = 60.0f;
float originalFOV = cameraFov;
float wideFOV = originalFOV + VIEW_FRUSTUM_FOV_OVERSEND;
if (0.0f != cameraAspectRatio &&
0.0f != cameraNearClip &&
0.0f != cameraFarClip &&
cameraNearClip != cameraFarClip) {
setProjection(glm::perspective(
glm::radians(wideFOV), // hack
cameraAspectRatio,
cameraNearClip,
cameraFarClip));
calculate();
}
}
QByteArray ViewFrustum::toByteArray() {
static const int LARGE_ENOUGH = 1024;
QByteArray viewFrustumDataByteArray(LARGE_ENOUGH, 0);
unsigned char* destinationBuffer = reinterpret_cast<unsigned char*>(viewFrustumDataByteArray.data());
unsigned char* startPosition = destinationBuffer;
// camera details
memcpy(destinationBuffer, &_position, sizeof(_position));
destinationBuffer += sizeof(_position);
destinationBuffer += packOrientationQuatToBytes(destinationBuffer, _orientation);
destinationBuffer += packFloatAngleToTwoByte(destinationBuffer, _fieldOfView);
destinationBuffer += packFloatRatioToTwoByte(destinationBuffer, _aspectRatio);
destinationBuffer += packClipValueToTwoByte(destinationBuffer, _nearClip);
destinationBuffer += packClipValueToTwoByte(destinationBuffer, _farClip);
memcpy(destinationBuffer, &_centerSphereRadius, sizeof(_centerSphereRadius));
destinationBuffer += sizeof(_centerSphereRadius);
return viewFrustumDataByteArray.left(destinationBuffer - startPosition);
}
ViewFrustum::intersection ViewFrustum::calculateCubeFrustumIntersection(const AACube& cube) const {
// only check against frustum
ViewFrustum::intersection result = INSIDE;

View file

@ -139,6 +139,10 @@ public:
const ::Plane* getPlanes() const { return _planes; }
void invalidate(); // causes all reasonable intersection tests to fail
QByteArray toByteArray();
void fromByteArray(const QByteArray& input);
private:
glm::mat4 _view;
glm::mat4 _projection;