mirror of
https://thingvellir.net/git/overte
synced 2025-03-27 23:52:03 +01:00
Limit upstream data to avatar mixer
This commit is contained in:
parent
6bd6c45b60
commit
8480624797
12 changed files with 113 additions and 95 deletions
|
@ -505,16 +505,6 @@ void Agent::executeScript() {
|
|||
|
||||
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
|
||||
|
||||
// Agents should run at 45hz
|
||||
static const int AVATAR_DATA_HZ = 45;
|
||||
static const int AVATAR_DATA_IN_MSECS = MSECS_PER_SECOND / AVATAR_DATA_HZ;
|
||||
QTimer* avatarDataTimer = new QTimer(this);
|
||||
connect(avatarDataTimer, &QTimer::timeout, this, &Agent::processAgentAvatar);
|
||||
avatarDataTimer->setSingleShot(false);
|
||||
avatarDataTimer->setInterval(AVATAR_DATA_IN_MSECS);
|
||||
avatarDataTimer->setTimerType(Qt::PreciseTimer);
|
||||
avatarDataTimer->start();
|
||||
|
||||
_scriptEngine->run();
|
||||
|
||||
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
|
||||
|
@ -528,8 +518,6 @@ void Agent::executeScript() {
|
|||
recordingInterface->stopRecording();
|
||||
}
|
||||
|
||||
avatarDataTimer->stop();
|
||||
|
||||
setIsAvatar(false); // will stop timers for sending identity packets
|
||||
}
|
||||
|
||||
|
@ -584,20 +572,16 @@ void Agent::setIsAvatar(bool isAvatar) {
|
|||
|
||||
auto scriptableAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
if (_isAvatar) {
|
||||
if (!_avatarIdentityTimer) {
|
||||
if (!_avatarQueryTimer) {
|
||||
// set up the avatar timers
|
||||
_avatarIdentityTimer = new QTimer(this);
|
||||
_avatarQueryTimer = new QTimer(this);
|
||||
|
||||
// connect our slot
|
||||
connect(_avatarIdentityTimer, &QTimer::timeout, this, &Agent::sendAvatarIdentityPacket);
|
||||
connect(_avatarQueryTimer, &QTimer::timeout, this, &Agent::queryAvatars);
|
||||
|
||||
static const int AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS = 1000;
|
||||
static const int AVATAR_VIEW_PACKET_SEND_INTERVAL_MSECS = 1000;
|
||||
|
||||
// start the timers
|
||||
_avatarIdentityTimer->start(AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS); // FIXME - we shouldn't really need to constantly send identity packets
|
||||
// start the timer
|
||||
_avatarQueryTimer->start(AVATAR_VIEW_PACKET_SEND_INTERVAL_MSECS);
|
||||
|
||||
connect(_scriptEngine.data(), &ScriptEngine::update,
|
||||
|
@ -609,11 +593,7 @@ void Agent::setIsAvatar(bool isAvatar) {
|
|||
|
||||
_entityEditSender.setMyAvatar(scriptableAvatar.data());
|
||||
} else {
|
||||
if (_avatarIdentityTimer) {
|
||||
_avatarIdentityTimer->stop();
|
||||
delete _avatarIdentityTimer;
|
||||
_avatarIdentityTimer = nullptr;
|
||||
|
||||
if (_avatarQueryTimer) {
|
||||
_avatarQueryTimer->stop();
|
||||
delete _avatarQueryTimer;
|
||||
_avatarQueryTimer = nullptr;
|
||||
|
@ -646,14 +626,6 @@ void Agent::setIsAvatar(bool isAvatar) {
|
|||
}
|
||||
}
|
||||
|
||||
void Agent::sendAvatarIdentityPacket() {
|
||||
if (_isAvatar) {
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
scriptedAvatar->markIdentityDataChanged();
|
||||
scriptedAvatar->sendIdentityPacket();
|
||||
}
|
||||
}
|
||||
|
||||
void Agent::queryAvatars() {
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
|
||||
|
@ -681,44 +653,6 @@ void Agent::queryAvatars() {
|
|||
{ NodeType::AvatarMixer });
|
||||
}
|
||||
|
||||
void Agent::processAgentAvatar() {
|
||||
if (!_scriptEngine->isFinished() && _isAvatar) {
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
|
||||
AvatarData::AvatarDataDetail dataDetail = (randFloat() < AVATAR_SEND_FULL_UPDATE_RATIO) ? AvatarData::SendAllData : AvatarData::CullSmallData;
|
||||
QByteArray avatarByteArray = scriptedAvatar->toByteArrayStateful(dataDetail);
|
||||
|
||||
int maximumByteArraySize = NLPacket::maxPayloadSize(PacketType::AvatarData) - sizeof(AvatarDataSequenceNumber);
|
||||
|
||||
if (avatarByteArray.size() > maximumByteArraySize) {
|
||||
qWarning() << " scriptedAvatar->toByteArrayStateful() resulted in very large buffer:" << avatarByteArray.size() << "... attempt to drop facial data";
|
||||
avatarByteArray = scriptedAvatar->toByteArrayStateful(dataDetail, true);
|
||||
|
||||
if (avatarByteArray.size() > maximumByteArraySize) {
|
||||
qWarning() << " scriptedAvatar->toByteArrayStateful() without facial data resulted in very large buffer:" << avatarByteArray.size() << "... reduce to MinimumData";
|
||||
avatarByteArray = scriptedAvatar->toByteArrayStateful(AvatarData::MinimumData, true);
|
||||
|
||||
if (avatarByteArray.size() > maximumByteArraySize) {
|
||||
qWarning() << " scriptedAvatar->toByteArrayStateful() MinimumData resulted in very large buffer:" << avatarByteArray.size() << "... FAIL!!";
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
scriptedAvatar->doneEncoding(true);
|
||||
|
||||
static AvatarDataSequenceNumber sequenceNumber = 0;
|
||||
auto avatarPacket = NLPacket::create(PacketType::AvatarData, avatarByteArray.size() + sizeof(sequenceNumber));
|
||||
avatarPacket->writePrimitive(sequenceNumber++);
|
||||
|
||||
avatarPacket->write(avatarByteArray);
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
nodeList->broadcastToNodes(std::move(avatarPacket), NodeSet() << NodeType::AvatarMixer);
|
||||
}
|
||||
}
|
||||
|
||||
void Agent::encodeFrameOfZeros(QByteArray& encodedZeros) {
|
||||
_flushEncoder = false;
|
||||
static const QByteArray zeros(AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL, 0);
|
||||
|
|
|
@ -81,7 +81,6 @@ private slots:
|
|||
void nodeActivated(SharedNodePointer activatedNode);
|
||||
void nodeKilled(SharedNodePointer killedNode);
|
||||
|
||||
void processAgentAvatar();
|
||||
void processAgentAvatarAudio();
|
||||
|
||||
private:
|
||||
|
@ -99,7 +98,6 @@ private:
|
|||
|
||||
void setAvatarSound(SharedSoundPointer avatarSound) { _avatarSound = avatarSound; }
|
||||
|
||||
void sendAvatarIdentityPacket();
|
||||
void queryAvatars();
|
||||
|
||||
QString _scriptContents;
|
||||
|
@ -110,7 +108,6 @@ private:
|
|||
bool _shouldMuteRecordingAudio { false };
|
||||
int _numAvatarSoundSentBytes = 0;
|
||||
bool _isAvatar = false;
|
||||
QTimer* _avatarIdentityTimer = nullptr;
|
||||
QTimer* _avatarQueryTimer = nullptr;
|
||||
QHash<QUuid, quint16> _outgoingScriptAudioSequenceNumbers;
|
||||
|
||||
|
|
|
@ -91,6 +91,39 @@ void ScriptableAvatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
|
|||
updateJointMappings();
|
||||
}
|
||||
|
||||
int ScriptableAvatar::sendAvatarDataPacket(bool sendAll) {
|
||||
using namespace std::chrono;
|
||||
auto now = Clock::now();
|
||||
|
||||
int MAX_DATA_RATE_MBPS = 3;
|
||||
int maxDataRateBytesPerSeconds = MAX_DATA_RATE_MBPS * BYTES_PER_KILOBYTE * KILO_PER_MEGA / BITS_IN_BYTE;
|
||||
int maxDataRateBytesPerMilliseconds = maxDataRateBytesPerSeconds / MSECS_PER_SECOND;
|
||||
|
||||
auto bytesSent = 0;
|
||||
|
||||
if (now > _nextTraitsSendWindow) {
|
||||
if (getIdentityDataChanged()) {
|
||||
bytesSent += sendIdentityPacket();
|
||||
}
|
||||
|
||||
bytesSent += _clientTraitsHandler->sendChangedTraitsToMixer();
|
||||
|
||||
// Compute the next send window based on how much data we sent and what
|
||||
// data rate we're trying to max at.
|
||||
milliseconds timeUntilNextSend { bytesSent / maxDataRateBytesPerMilliseconds };
|
||||
_nextTraitsSendWindow += timeUntilNextSend;
|
||||
|
||||
// Don't let the next send window lag behind if we're not sending a lot of data.
|
||||
if (_nextTraitsSendWindow < now) {
|
||||
_nextTraitsSendWindow = now;
|
||||
}
|
||||
}
|
||||
|
||||
bytesSent += AvatarData::sendAvatarDataPacket(sendAll);
|
||||
|
||||
return bytesSent;
|
||||
}
|
||||
|
||||
static AnimPose composeAnimPose(const HFMJoint& joint, const glm::quat rotation, const glm::vec3 translation) {
|
||||
glm::mat4 translationMat = glm::translate(translation);
|
||||
glm::mat4 rotationMat = glm::mat4_cast(joint.preRotation * rotation * joint.postRotation);
|
||||
|
@ -161,7 +194,13 @@ void ScriptableAvatar::update(float deltatime) {
|
|||
}
|
||||
}
|
||||
|
||||
_clientTraitsHandler->sendChangedTraitsToMixer();
|
||||
quint64 now = usecTimestampNow();
|
||||
quint64 dt = now - _lastSendAvatarDataTime;
|
||||
|
||||
if (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS) {
|
||||
sendAvatarDataPacket();
|
||||
_lastSendAvatarDataTime = now;
|
||||
}
|
||||
}
|
||||
|
||||
void ScriptableAvatar::updateJointMappings() {
|
||||
|
|
|
@ -123,6 +123,10 @@
|
|||
|
||||
class ScriptableAvatar : public AvatarData, public Dependency {
|
||||
Q_OBJECT
|
||||
|
||||
using Clock = std::chrono::system_clock;
|
||||
using TimePoint = Clock::time_point;
|
||||
|
||||
public:
|
||||
|
||||
ScriptableAvatar();
|
||||
|
@ -177,6 +181,8 @@ public:
|
|||
|
||||
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL) override;
|
||||
|
||||
int sendAvatarDataPacket(bool sendAll = false) override;
|
||||
|
||||
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking = false) override;
|
||||
|
||||
void setHasProceduralBlinkFaceMovement(bool hasProceduralBlinkFaceMovement);
|
||||
|
@ -228,6 +234,10 @@ private:
|
|||
|
||||
/// Loads the joint indices, names from the FST file (if any)
|
||||
void updateJointMappings();
|
||||
|
||||
quint64 _lastSendAvatarDataTime { 0 };
|
||||
|
||||
TimePoint _nextTraitsSendWindow;
|
||||
};
|
||||
|
||||
#endif // hifi_ScriptableAvatar_h
|
||||
|
|
|
@ -48,8 +48,6 @@
|
|||
// 50 times per second - target is 45hz, but this helps account for any small deviations
|
||||
// in the update loop - this also results in ~30hz when in desktop mode which is essentially
|
||||
// what we want
|
||||
const int CLIENT_TO_AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND = 50;
|
||||
static const quint64 MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS = USECS_PER_SECOND / CLIENT_TO_AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND;
|
||||
|
||||
// We add _myAvatar into the hash with all the other AvatarData, and we use the default NULL QUid as the key.
|
||||
const QUuid MY_AVATAR_KEY; // NULL key
|
||||
|
|
|
@ -668,12 +668,6 @@ void MyAvatar::update(float deltaTime) {
|
|||
Q_ARG(glm::vec3, (getWorldPosition() - halfBoundingBoxDimensions)),
|
||||
Q_ARG(glm::vec3, (halfBoundingBoxDimensions*2.0f)));
|
||||
|
||||
if (getIdentityDataChanged()) {
|
||||
sendIdentityPacket();
|
||||
}
|
||||
|
||||
_clientTraitsHandler->sendChangedTraitsToMixer();
|
||||
|
||||
simulate(deltaTime, true);
|
||||
|
||||
currentEnergy += energyChargeRate;
|
||||
|
@ -3106,6 +3100,39 @@ void MyAvatar::preDisplaySide(const RenderArgs* renderArgs) {
|
|||
_prevShouldDrawHead = shouldDrawHead;
|
||||
}
|
||||
|
||||
int MyAvatar::sendAvatarDataPacket(bool sendAll) {
|
||||
using namespace std::chrono;
|
||||
auto now = Clock::now();
|
||||
|
||||
int MAX_DATA_RATE_MBPS = 3;
|
||||
int maxDataRateBytesPerSeconds = MAX_DATA_RATE_MBPS * BYTES_PER_KILOBYTE * KILO_PER_MEGA / BITS_IN_BYTE;
|
||||
int maxDataRateBytesPerMilliseconds = maxDataRateBytesPerSeconds / MSECS_PER_SECOND;
|
||||
|
||||
auto bytesSent = 0;
|
||||
|
||||
if (now > _nextTraitsSendWindow) {
|
||||
if (getIdentityDataChanged()) {
|
||||
bytesSent += sendIdentityPacket();
|
||||
}
|
||||
|
||||
bytesSent += _clientTraitsHandler->sendChangedTraitsToMixer();
|
||||
|
||||
// Compute the next send window based on how much data we sent and what
|
||||
// data rate we're trying to max at.
|
||||
milliseconds timeUntilNextSend { bytesSent / maxDataRateBytesPerMilliseconds };
|
||||
_nextTraitsSendWindow += timeUntilNextSend;
|
||||
|
||||
// Don't let the next send window lag behind if we're not sending a lot of data.
|
||||
if (_nextTraitsSendWindow < now) {
|
||||
_nextTraitsSendWindow = now;
|
||||
}
|
||||
}
|
||||
|
||||
bytesSent += Avatar::sendAvatarDataPacket(sendAll);
|
||||
|
||||
return bytesSent;
|
||||
}
|
||||
|
||||
const float RENDER_HEAD_CUTOFF_DISTANCE = 0.47f;
|
||||
|
||||
bool MyAvatar::cameraInsideHead(const glm::vec3& cameraPosition) const {
|
||||
|
|
|
@ -253,6 +253,9 @@ class MyAvatar : public Avatar {
|
|||
const QString DOMINANT_LEFT_HAND = "left";
|
||||
const QString DOMINANT_RIGHT_HAND = "right";
|
||||
|
||||
using Clock = std::chrono::system_clock;
|
||||
using TimePoint = Clock::time_point;
|
||||
|
||||
public:
|
||||
enum DriveKeys {
|
||||
TRANSLATE_X = 0,
|
||||
|
@ -1211,6 +1214,7 @@ public:
|
|||
void setAvatarEntityData(const AvatarEntityMap& avatarEntityData) override;
|
||||
void updateAvatarEntity(const QUuid& entityID, const QByteArray& entityData) override;
|
||||
void avatarEntityDataToJson(QJsonObject& root) const override;
|
||||
int sendAvatarDataPacket(bool sendAll = false) override;
|
||||
|
||||
public slots:
|
||||
|
||||
|
@ -1935,6 +1939,8 @@ private:
|
|||
bool _skeletonModelLoaded { false };
|
||||
bool _reloadAvatarEntityDataFromSettings { true };
|
||||
|
||||
TimePoint _nextTraitsSendWindow;
|
||||
|
||||
Setting::Handle<QString> _dominantHandSetting;
|
||||
Setting::Handle<float> _headPitchSetting;
|
||||
Setting::Handle<float> _scaleSetting;
|
||||
|
|
|
@ -2157,7 +2157,7 @@ void AvatarData::detachAll(const QString& modelURL, const QString& jointName) {
|
|||
setAttachmentData(attachmentData);
|
||||
}
|
||||
|
||||
void AvatarData::sendAvatarDataPacket(bool sendAll) {
|
||||
int AvatarData::sendAvatarDataPacket(bool sendAll) {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
// about 2% of the time, we send a full update (meaning, we transmit all the joint data), even if nothing has changed.
|
||||
|
@ -2170,16 +2170,14 @@ void AvatarData::sendAvatarDataPacket(bool sendAll) {
|
|||
int maximumByteArraySize = NLPacket::maxPayloadSize(PacketType::AvatarData) - sizeof(AvatarDataSequenceNumber);
|
||||
|
||||
if (avatarByteArray.size() > maximumByteArraySize) {
|
||||
qCWarning(avatars) << "toByteArrayStateful() resulted in very large buffer:" << avatarByteArray.size() << "... attempt to drop facial data";
|
||||
avatarByteArray = toByteArrayStateful(dataDetail, true);
|
||||
|
||||
if (avatarByteArray.size() > maximumByteArraySize) {
|
||||
qCWarning(avatars) << "toByteArrayStateful() without facial data resulted in very large buffer:" << avatarByteArray.size() << "... reduce to MinimumData";
|
||||
avatarByteArray = toByteArrayStateful(MinimumData, true);
|
||||
|
||||
if (avatarByteArray.size() > maximumByteArraySize) {
|
||||
qCWarning(avatars) << "toByteArrayStateful() MinimumData resulted in very large buffer:" << avatarByteArray.size() << "... FAIL!!";
|
||||
return;
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -2191,18 +2189,20 @@ void AvatarData::sendAvatarDataPacket(bool sendAll) {
|
|||
auto avatarPacket = NLPacket::create(PacketType::AvatarData, avatarByteArray.size() + sizeof(sequenceNumber));
|
||||
avatarPacket->writePrimitive(sequenceNumber++);
|
||||
avatarPacket->write(avatarByteArray);
|
||||
auto packetSize = avatarPacket->getWireSize();
|
||||
|
||||
nodeList->broadcastToNodes(std::move(avatarPacket), NodeSet() << NodeType::AvatarMixer);
|
||||
|
||||
return packetSize;
|
||||
}
|
||||
|
||||
void AvatarData::sendIdentityPacket() {
|
||||
int AvatarData::sendIdentityPacket() {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
if (_identityDataChanged) {
|
||||
// if the identity data has changed, push the sequence number forwards
|
||||
++_identitySequenceNumber;
|
||||
}
|
||||
|
||||
QByteArray identityData = identityByteArray();
|
||||
|
||||
auto packetList = NLPacketList::create(PacketType::AvatarIdentity, QByteArray(), true, true);
|
||||
|
@ -2216,6 +2216,7 @@ void AvatarData::sendIdentityPacket() {
|
|||
});
|
||||
|
||||
_identityDataChanged = false;
|
||||
return identityData.size();
|
||||
}
|
||||
|
||||
static const QString JSON_ATTACHMENT_URL = QStringLiteral("modelUrl");
|
||||
|
|
|
@ -1273,12 +1273,12 @@ public slots:
|
|||
* @function MyAvatar.sendAvatarDataPacket
|
||||
* @param {boolean} [sendAll=false]
|
||||
*/
|
||||
void sendAvatarDataPacket(bool sendAll = false);
|
||||
virtual int sendAvatarDataPacket(bool sendAll = false);
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.sendIdentityPacket
|
||||
*/
|
||||
void sendIdentityPacket();
|
||||
int sendIdentityPacket();
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.setSessionUUID
|
||||
|
|
|
@ -32,6 +32,9 @@
|
|||
#include "AvatarData.h"
|
||||
#include "AssociatedTraitValues.h"
|
||||
|
||||
const int CLIENT_TO_AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND = 50;
|
||||
const quint64 MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS = USECS_PER_SECOND / CLIENT_TO_AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND;
|
||||
|
||||
/**jsdoc
|
||||
* <strong>Note:</strong> An <code>AvatarList</code> API is also provided for Interface and client entity scripts: it is a
|
||||
* synonym for the {@link AvatarManager} API.
|
||||
|
|
|
@ -65,8 +65,9 @@ void ClientTraitsHandler::resetForNewMixer() {
|
|||
_owningAvatar->prepareResetTraitInstances();
|
||||
}
|
||||
|
||||
void ClientTraitsHandler::sendChangedTraitsToMixer() {
|
||||
int ClientTraitsHandler::sendChangedTraitsToMixer() {
|
||||
std::unique_lock<Mutex> lock(_traitLock);
|
||||
int bytesWritten = 0;
|
||||
|
||||
if (hasChangedTraits() || _shouldPerformInitialSend) {
|
||||
// we have at least one changed trait to send
|
||||
|
@ -75,7 +76,7 @@ void ClientTraitsHandler::sendChangedTraitsToMixer() {
|
|||
auto avatarMixer = nodeList->soloNodeOfType(NodeType::AvatarMixer);
|
||||
if (!avatarMixer || !avatarMixer->getActiveSocket()) {
|
||||
// we don't have an avatar mixer with an active socket, we can't send changed traits at this time
|
||||
return;
|
||||
return 0;
|
||||
}
|
||||
|
||||
// we have a mixer to send to, setup our set traits packet
|
||||
|
@ -106,7 +107,7 @@ void ClientTraitsHandler::sendChangedTraitsToMixer() {
|
|||
|
||||
if (initialSend || *simpleIt == Updated) {
|
||||
if (traitType == AvatarTraits::SkeletonModelURL) {
|
||||
_owningAvatar->packTrait(traitType, *traitsPacketList);
|
||||
bytesWritten += _owningAvatar->packTrait(traitType, *traitsPacketList);
|
||||
|
||||
// keep track of our skeleton version in case we get an override back
|
||||
_currentSkeletonVersion = _currentTraitVersion;
|
||||
|
@ -123,10 +124,10 @@ void ClientTraitsHandler::sendChangedTraitsToMixer() {
|
|||
|| instanceIDValuePair.value == Updated) {
|
||||
// this is a changed trait we need to send or we haven't send out trait information yet
|
||||
// ask the owning avatar to pack it
|
||||
_owningAvatar->packTraitInstance(instancedIt->traitType, instanceIDValuePair.id, *traitsPacketList);
|
||||
bytesWritten += _owningAvatar->packTraitInstance(instancedIt->traitType, instanceIDValuePair.id, *traitsPacketList);
|
||||
} else if (!initialSend && instanceIDValuePair.value == Deleted) {
|
||||
// pack delete for this trait instance
|
||||
AvatarTraits::packInstancedTraitDelete(instancedIt->traitType, instanceIDValuePair.id,
|
||||
bytesWritten += AvatarTraits::packInstancedTraitDelete(instancedIt->traitType, instanceIDValuePair.id,
|
||||
*traitsPacketList);
|
||||
}
|
||||
}
|
||||
|
@ -136,6 +137,8 @@ void ClientTraitsHandler::sendChangedTraitsToMixer() {
|
|||
|
||||
nodeList->sendPacketList(std::move(traitsPacketList), *avatarMixer);
|
||||
}
|
||||
|
||||
return bytesWritten;
|
||||
}
|
||||
|
||||
void ClientTraitsHandler::processTraitOverride(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
|
|
|
@ -24,7 +24,7 @@ class ClientTraitsHandler : public QObject {
|
|||
public:
|
||||
ClientTraitsHandler(AvatarData* owningAvatar);
|
||||
|
||||
void sendChangedTraitsToMixer();
|
||||
int sendChangedTraitsToMixer();
|
||||
|
||||
bool hasChangedTraits() const { return _hasChangedTraits; }
|
||||
|
||||
|
|
Loading…
Reference in a new issue