update parseData for new network API

This commit is contained in:
Stephen Birarda 2015-07-13 14:26:10 -07:00
parent ca1ff9b0f5
commit ede39515d8
13 changed files with 76 additions and 118 deletions

View file

@ -24,7 +24,7 @@
class DomainServerNodeData : public NodeData { class DomainServerNodeData : public NodeData {
public: public:
DomainServerNodeData(); DomainServerNodeData();
int parseData(const QByteArray& packet) { return 0; } int parseData(NLPacket& packet, QSharedPointer<Node> sendingNode) { return 0; }
const QJsonObject& getStatsJSONObject() const { return _statsJSONObject; } const QJsonObject& getStatsJSONObject() const { return _statsJSONObject; }

View file

@ -33,7 +33,7 @@
#include <QtMultimedia/QAudioInput> #include <QtMultimedia/QAudioInput>
#include <QtMultimedia/QAudioOutput> #include <QtMultimedia/QAudioOutput>
#ifdef __GNUC__ #if defined(__GNUC__) && !defined(__clang__)
#pragma GCC diagnostic push #pragma GCC diagnostic push
#pragma GCC diagnostic ignored "-Wdouble-promotion" #pragma GCC diagnostic ignored "-Wdouble-promotion"
#endif #endif
@ -142,10 +142,10 @@ AudioClient::AudioClient() :
configureGverbFilter(_gverb); configureGverbFilter(_gverb);
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver(); auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
packetReceiver.registerPacketListener(PacketType::AudioEnvironment, this, "handleAudioStreamStatsPacket"); packetReceiver.registerPacketListener(PacketType::AudioStreamStats, &_stats, "handleAudioStreamStatsPacket");
packetReceiver.registerPacketListener(PacketType::AudioStreamStats, this, "handleAudioEnvironmentDataPacket"); packetReceiver.registerPacketListener(PacketType::AudioEnvironment, this, "handleAudioEnvironmentDataPacket");
packetReceiver.registerPacketListener(PacketType::SilentAudioFrame, this, "handleAudioDataPacket");
packetReceiver.registerPacketListener(PacketType::MixedAudio, this, "handleAudioDataPacket"); packetReceiver.registerPacketListener(PacketType::MixedAudio, this, "handleAudioDataPacket");
packetReceiver.registerPacketListener(PacketType::SilentAudioFrame, this, "handleSilentAudioFrame");
packetReceiver.registerPacketListener(PacketType::NoisyMute, this, "handleNoisyMutePacket"); packetReceiver.registerPacketListener(PacketType::NoisyMute, this, "handleNoisyMutePacket");
packetReceiver.registerPacketListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket"); packetReceiver.registerPacketListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket");
} }
@ -535,35 +535,24 @@ void AudioClient::stop() {
} }
} }
void AudioClient::handleAudioStreamStatsPacket(QSharedPointer<NLPacket> packet, HifiSockAddr senderSockAddr) { void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
_stats.parseAudioStreamStatsPacket(packet->getData());
updateLastHeardFromAudioMixer(packet);
}
void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<NLPacket> packet, HifiSockAddr senderSockAddr) {
const char* dataAt = packet->getPayload();
char bitset; char bitset;
memcpy(&bitset, dataAt, sizeof(char)); packet->readPrimitive(&bitset);
dataAt += sizeof(char);
bool hasReverb = oneAtBit(bitset, HAS_REVERB_BIT);; bool hasReverb = oneAtBit(bitset, HAS_REVERB_BIT);
if (hasReverb) { if (hasReverb) {
float reverbTime, wetLevel; float reverbTime, wetLevel;
memcpy(&reverbTime, dataAt, sizeof(float)); packet->readPrimitive(&reverbTime);
dataAt += sizeof(float); packet->readPrimitive(&wetLevel);
memcpy(&wetLevel, dataAt, sizeof(float));
dataAt += sizeof(float);
_receivedAudioStream.setReverb(reverbTime, wetLevel); _receivedAudioStream.setReverb(reverbTime, wetLevel);
} else { } else {
_receivedAudioStream.clearReverb(); _receivedAudioStream.clearReverb();
} }
updateLastHeardFromAudioMixer(packet);
} }
void AudioClient::handleAudioDataPacket(QSharedPointer<NLPacket> packet, HifiSockAddr senderSockAddr) { void AudioClient::handleAudioDataPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
auto nodeList = DependencyManager::get<NodeList>(); auto nodeList = DependencyManager::get<NodeList>();
nodeList->flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::ReceiveFirstAudioPacket); nodeList->flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::ReceiveFirstAudioPacket);
@ -577,45 +566,29 @@ void AudioClient::handleAudioDataPacket(QSharedPointer<NLPacket> packet, HifiSoc
} }
// Audio output must exist and be correctly set up if we're going to process received audio // Audio output must exist and be correctly set up if we're going to process received audio
_receivedAudioStream.parseData(packet->getData()); _receivedAudioStream.parseData(*packet, sendingNode);
} }
updateLastHeardFromAudioMixer(packet);
} }
void AudioClient::handleSilentAudioFrame(QSharedPointer<NLPacket> packet, HifiSockAddr senderSockAddr) { void AudioClient::handleNoisyMutePacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
updateLastHeardFromAudioMixer(packet);
}
void AudioClient::handleNoisyMutePacket(QSharedPointer<NLPacket> packet, HifiSockAddr senderSockAddr) {
if (!_muted) { if (!_muted) {
toggleMute(); toggleMute();
// TODO reimplement on interface side // TODO reimplement on interface side
//AudioScriptingInterface::getInstance().mutedByMixer(); //AudioScriptingInterface::getInstance().mutedByMixer();
} }
} }
void AudioClient::handleMuteEnvironmentPacket(QSharedPointer<NLPacket> packet, HifiSockAddr senderSockAddr) { void AudioClient::handleMuteEnvironmentPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
glm::vec3 position; glm::vec3 position;
float radius; float radius;
int headerSize = numBytesForPacketHeaderGivenPacketType(PacketType::MuteEnvironment); packet->readPrimitive(&position);
memcpy(&position, packet->getPayload(), sizeof(glm::vec3)); packet->readPrimitive(&radius);
memcpy(&radius, packet->getPayload() + sizeof(glm::vec3), sizeof(float));
emit muteEnvironmentRequested(position, radius); emit muteEnvironmentRequested(position, radius);
} }
void AudioClient::updateLastHeardFromAudioMixer(QSharedPointer<NLPacket>& packet) {
// update having heard from the audio-mixer and record the bytes received
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer audioMixer = nodeList->nodeWithUUID(packet->getSourceID());
if (audioMixer) {
audioMixer->setLastHeardMicrostamp(usecTimestampNow());
}
}
QString AudioClient::getDefaultDeviceName(QAudio::Mode mode) { QString AudioClient::getDefaultDeviceName(QAudio::Mode mode) {
QAudioDeviceInfo deviceInfo = defaultAudioDeviceForMode(mode); QAudioDeviceInfo deviceInfo = defaultAudioDeviceForMode(mode);
return deviceInfo.deviceName(); return deviceInfo.deviceName();

View file

@ -139,12 +139,10 @@ public slots:
void start(); void start();
void stop(); void stop();
void handleAudioStreamStatsPacket(QSharedPointer<NLPacket> packet, HifiSockAddr senderSockAddr); void handleAudioEnvironmentDataPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
void handleAudioEnvironmentDataPacket(QSharedPointer<NLPacket> packet, HifiSockAddr senderSockAddr); void handleAudioDataPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
void handleAudioDataPacket(QSharedPointer<NLPacket> packet, HifiSockAddr senderSockAddr); void handleNoisyMutePacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
void handleSilentAudioFrame(QSharedPointer<NLPacket> packet, HifiSockAddr senderSockAddr); void handleMuteEnvironmentPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
void handleNoisyMutePacket(QSharedPointer<NLPacket> packet, HifiSockAddr senderSockAddr);
void handleMuteEnvironmentPacket(QSharedPointer<NLPacket> packet, HifiSockAddr senderSockAddr);
void sendDownstreamAudioStatsPacket() { _stats.sendDownstreamAudioStatsPacket(); } void sendDownstreamAudioStatsPacket() { _stats.sendDownstreamAudioStatsPacket(); }
void handleAudioInput(); void handleAudioInput();
@ -214,7 +212,6 @@ private slots:
void audioStateChanged(QAudio::State state); void audioStateChanged(QAudio::State state);
private: private:
void updateLastHeardFromAudioMixer(QSharedPointer<NLPacket>& packet);
void outputFormatChanged(); void outputFormatChanged();
QByteArray firstInputFrame; QByteArray firstInputFrame;

View file

@ -63,27 +63,24 @@ void AudioIOStats::sentPacket() {
_lastSentAudioPacket = now; _lastSentAudioPacket = now;
} }
} }
void AudioIOStats::parseAudioStreamStatsPacket(const QByteArray& packet) { void AudioIOStats::processStreamStatsPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
int numBytesPacketHeader = numBytesForPacketHeader(packet);
const char* dataAt = packet.constData() + numBytesPacketHeader;
// parse the appendFlag, clear injected audio stream stats if 0 // parse the appendFlag, clear injected audio stream stats if 0
quint8 appendFlag = *(reinterpret_cast<const quint16*>(dataAt)); quint8 appendFlag;
dataAt += sizeof(quint8); packet->readPrimitive(&appendFlag);
if (!appendFlag) { if (!appendFlag) {
_mixerInjectedStreamStatsMap.clear(); _mixerInjectedStreamStatsMap.clear();
} }
// parse the number of stream stats structs to follow // parse the number of stream stats structs to follow
quint16 numStreamStats = *(reinterpret_cast<const quint16*>(dataAt)); quint16 numStreamStats;
dataAt += sizeof(quint16); packet->readPrimitive(&numStreamStats);
// parse the stream stats // parse the stream stats
AudioStreamStats streamStats; AudioStreamStats streamStats;
for (quint16 i = 0; i < numStreamStats; i++) { for (quint16 i = 0; i < numStreamStats; i++) {
memcpy(&streamStats, dataAt, sizeof(AudioStreamStats)); packet->readPrimitive(&streamStats);
dataAt += sizeof(AudioStreamStats);
if (streamStats._streamType == PositionalAudioStream::Microphone) { if (streamStats._streamType == PositionalAudioStream::Microphone) {
_mixerAvatarStreamStats = streamStats; _mixerAvatarStreamStats = streamStats;

View file

@ -17,6 +17,8 @@
#include <QObject> #include <QObject>
#include <AudioStreamStats.h> #include <AudioStreamStats.h>
#include <Node.h>
#include <NLPacket.h>
class MixedProcessedAudioStream; class MixedProcessedAudioStream;
@ -41,7 +43,7 @@ public:
const MovingMinMaxAvg<quint64>& getPacketSentTimeGaps() const { return _packetSentTimeGaps; } const MovingMinMaxAvg<quint64>& getPacketSentTimeGaps() const { return _packetSentTimeGaps; }
void sendDownstreamAudioStatsPacket(); void sendDownstreamAudioStatsPacket();
void parseAudioStreamStatsPacket(const QByteArray& packet); void processStreamStatsPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
private: private:
MixedProcessedAudioStream* _receivedAudioStream; MixedProcessedAudioStream* _receivedAudioStream;
@ -57,4 +59,4 @@ private:
MovingMinMaxAvg<quint64> _packetSentTimeGaps; MovingMinMaxAvg<quint64> _packetSentTimeGaps;
}; };
#endif // hifi_AudioIOStats_h #endif // hifi_AudioIOStats_h

View file

@ -11,6 +11,9 @@
#include <glm/glm.hpp> #include <glm/glm.hpp>
#include <NLPacket.h>
#include <Node.h>
#include "InboundAudioStream.h" #include "InboundAudioStream.h"
#include "PacketHeaders.h" #include "PacketHeaders.h"
@ -96,28 +99,23 @@ void InboundAudioStream::perSecondCallbackForUpdatingStats() {
_timeGapStatsForStatsPacket.currentIntervalComplete(); _timeGapStatsForStatsPacket.currentIntervalComplete();
} }
int InboundAudioStream::parseData(const QByteArray& packet) { int InboundAudioStream::parseData(NLPacket& packet, SharedNodePointer sendingNode) {
PacketType::Value packetType = packetTypeForPacket(packet);
QUuid senderUUID = uuidFromPacketHeader(packet);
// parse header
int numBytesHeader = numBytesForPacketHeader(packet);
const char* dataAt = packet.constData() + numBytesHeader;
int readBytes = numBytesHeader;
// parse sequence number and track it // parse sequence number and track it
quint16 sequence = *(reinterpret_cast<const quint16*>(dataAt)); quint16 sequence;
dataAt += sizeof(quint16); packet.readPrimitive(&sequence);
readBytes += sizeof(quint16); SequenceNumberStats::ArrivalInfo arrivalInfo = _incomingSequenceNumberStats.sequenceNumberReceived(sequence,
SequenceNumberStats::ArrivalInfo arrivalInfo = _incomingSequenceNumberStats.sequenceNumberReceived(sequence, senderUUID); sendingNode->getUUID());
packetReceivedUpdateTimingStats(); packetReceivedUpdateTimingStats();
int networkSamples; int networkSamples;
// parse the info after the seq number and before the audio data (the stream properties) // parse the info after the seq number and before the audio data (the stream properties)
readBytes += parseStreamProperties(packetType, packet.mid(readBytes), networkSamples); int propertyBytes = parseStreamProperties(packet.getType(),
QByteArray::fromRawData(packet.getPayload(), packet.pos()),
networkSamples);
packet.seek(packet.pos() + propertyBytes);
// handle this packet based on its arrival status. // handle this packet based on its arrival status.
switch (arrivalInfo._status) { switch (arrivalInfo._status) {
@ -132,10 +130,12 @@ int InboundAudioStream::parseData(const QByteArray& packet) {
} }
case SequenceNumberStats::OnTime: { case SequenceNumberStats::OnTime: {
// Packet is on time; parse its data to the ringbuffer // Packet is on time; parse its data to the ringbuffer
if (packetType == PacketType::SilentAudioFrame) { if (packet.getType() == PacketType::SilentAudioFrame) {
writeDroppableSilentSamples(networkSamples); writeDroppableSilentSamples(networkSamples);
} else { } else {
readBytes += parseAudioData(packetType, packet.mid(readBytes), networkSamples); int audioBytes = parseAudioData(packet.getType(), QByteArray::fromRawData(packet.getPayload(), packet.pos()),
networkSamples);
packet.seek(packet.pos() + audioBytes);
} }
break; break;
} }
@ -165,7 +165,7 @@ int InboundAudioStream::parseData(const QByteArray& packet) {
framesAvailableChanged(); framesAvailableChanged();
return readBytes; return packet.pos();
} }
int InboundAudioStream::parseStreamProperties(PacketType::Value type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) { int InboundAudioStream::parseStreamProperties(PacketType::Value type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) {
@ -314,7 +314,7 @@ void InboundAudioStream::setToStarved() {
starvesInWindow++; starvesInWindow++;
} while (starvesIterator != end); } while (starvesIterator != end);
// this starve put us over the starve threshold. update _desiredJitterBufferFrames to // this starve put us over the starve threshold. update _desiredJitterBufferFrames to
// value determined by window A. // value determined by window A.
if (starvesInWindow >= _starveThreshold) { if (starvesInWindow >= _starveThreshold) {
int calculatedJitterBufferFrames; int calculatedJitterBufferFrames;
@ -398,7 +398,7 @@ void InboundAudioStream::packetReceivedUpdateTimingStats() {
_timeGapStatsForDesiredReduction.update(gap); _timeGapStatsForDesiredReduction.update(gap);
if (_timeGapStatsForDesiredCalcOnTooManyStarves.getNewStatsAvailableFlag()) { if (_timeGapStatsForDesiredCalcOnTooManyStarves.getNewStatsAvailableFlag()) {
_calculatedJitterBufferFramesUsingMaxGap = ceilf((float)_timeGapStatsForDesiredCalcOnTooManyStarves.getWindowMax() _calculatedJitterBufferFramesUsingMaxGap = ceilf((float)_timeGapStatsForDesiredCalcOnTooManyStarves.getWindowMax()
/ (float) AudioConstants::NETWORK_FRAME_USECS); / (float) AudioConstants::NETWORK_FRAME_USECS);
_timeGapStatsForDesiredCalcOnTooManyStarves.clearNewStatsAvailableFlag(); _timeGapStatsForDesiredCalcOnTooManyStarves.clearNewStatsAvailableFlag();
} }

View file

@ -80,7 +80,7 @@ public:
{} {}
// max number of frames over desired in the ringbuffer. // max number of frames over desired in the ringbuffer.
int _maxFramesOverDesired; int _maxFramesOverDesired;
// if false, _desiredJitterBufferFrames will always be _staticDesiredJitterBufferFrames. Otherwise, // if false, _desiredJitterBufferFrames will always be _staticDesiredJitterBufferFrames. Otherwise,
// either fred or philip's method will be used to calculate _desiredJitterBufferFrames based on packet timegaps. // either fred or philip's method will be used to calculate _desiredJitterBufferFrames based on packet timegaps.
@ -107,7 +107,7 @@ public:
virtual void resetStats(); virtual void resetStats();
void clearBuffer(); void clearBuffer();
virtual int parseData(const QByteArray& packet); virtual int parseData(NLPacket& packet, QSharedPointer<Node> sendingNode);
int popFrames(int maxFrames, bool allOrNothing, bool starveIfNoFramesPopped = true); int popFrames(int maxFrames, bool allOrNothing, bool starveIfNoFramesPopped = true);
int popSamples(int maxSamples, bool allOrNothing, bool starveIfNoSamplesPopped = true); int popSamples(int maxSamples, bool allOrNothing, bool starveIfNoSamplesPopped = true);
@ -131,7 +131,7 @@ public:
virtual AudioStreamStats getAudioStreamStats() const; virtual AudioStreamStats getAudioStreamStats() const;
/// returns the desired number of jitter buffer frames under the dyanmic jitter buffers scheme /// returns the desired number of jitter buffer frames under the dyanmic jitter buffers scheme
int getCalculatedJitterBufferFrames() const { return _useStDevForJitterCalc ? int getCalculatedJitterBufferFrames() const { return _useStDevForJitterCalc ?
_calculatedJitterBufferFramesUsingStDev : _calculatedJitterBufferFramesUsingMaxGap; }; _calculatedJitterBufferFramesUsingStDev : _calculatedJitterBufferFramesUsingMaxGap; };
/// returns the desired number of jitter buffer frames using Philip's method /// returns the desired number of jitter buffer frames using Philip's method
@ -217,7 +217,7 @@ protected:
bool _dynamicJitterBuffers; // if false, _desiredJitterBufferFrames is locked at 1 (old behavior) bool _dynamicJitterBuffers; // if false, _desiredJitterBufferFrames is locked at 1 (old behavior)
int _staticDesiredJitterBufferFrames; int _staticDesiredJitterBufferFrames;
// if jitter buffer is dynamic, this determines what method of calculating _desiredJitterBufferFrames // if jitter buffer is dynamic, this determines what method of calculating _desiredJitterBufferFrames
// if true, Philip's timegap std dev calculation is used. Otherwise, Freddy's max timegap calculation is used // if true, Philip's timegap std dev calculation is used. Otherwise, Freddy's max timegap calculation is used
bool _useStDevForJitterCalc; bool _useStDevForJitterCalc;

View file

@ -86,7 +86,7 @@ public:
const char* getTypeName() const; const char* getTypeName() const;
// implement parseData to return 0 so we can be a subclass of NodeData // implement parseData to return 0 so we can be a subclass of NodeData
int parseData(const QByteArray& packet) { return 0; } int parseData(NLPacket& packet, SharedNodePointer sendingNode) { return 0; }
friend QDebug operator<<(QDebug debug, const Assignment& assignment); friend QDebug operator<<(QDebug debug, const Assignment& assignment);
friend QDataStream& operator<<(QDataStream &out, const Assignment& assignment); friend QDataStream& operator<<(QDataStream &out, const Assignment& assignment);

View file

@ -231,35 +231,25 @@ PacketSequenceNumber LimitedNodeList::getNextSequenceNumberForPacket(const QUuid
return _packetSequenceNumbers[nodeUUID][packetType]++; return _packetSequenceNumbers[nodeUUID][packetType]++;
} }
int LimitedNodeList::updateNodeWithDataFromPacket(const SharedNodePointer& matchingNode, QSharedPointer<NLPacket> packet) { int LimitedNodeList::updateNodeWithDataFromPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
QMutexLocker locker(&matchingNode->getMutex()); QMutexLocker locker(&sendingNode->getMutex());
// if this was a sequence numbered packet we should store the last seq number for // if this was a sequence numbered packet we should store the last seq number for
// a packet of this type for this node // a packet of this type for this node
if (SEQUENCE_NUMBERED_PACKETS.contains(packet->getType())) { if (SEQUENCE_NUMBERED_PACKETS.contains(packet->getType())) {
matchingNode->setLastSequenceNumberForPacketType(packet->readSequenceNumber(), packet->getType()); sendingNode->setLastSequenceNumberForPacketType(packet->readSequenceNumber(), packet->getType());
} }
NodeData* linkedData = matchingNode->getLinkedData(); NodeData* linkedData = sendingNode->getLinkedData();
if (!linkedData && linkedDataCreateCallback) { if (!linkedData && linkedDataCreateCallback) {
linkedDataCreateCallback(matchingNode.data()); linkedDataCreateCallback(sendingNode.data());
} }
if (linkedData) { if (linkedData) {
QMutexLocker linkedDataLocker(&linkedData->getMutex()); QMutexLocker linkedDataLocker(&linkedData->getMutex());
return linkedData->parseData(QByteArray::fromRawData(packet->getData(), packet->getSizeWithHeader())); return linkedData->parseData(*packet, sendingNode);
} }
return 0;
}
int LimitedNodeList::findNodeAndUpdateWithDataFromPacket(QSharedPointer<NLPacket> packet) {
SharedNodePointer matchingNode = nodeWithUUID(packet->getSourceID());
if (matchingNode) {
return updateNodeWithDataFromPacket(matchingNode, packet);
}
// we weren't able to match the sender address to the address we have for this node, unlock and don't parse
return 0; return 0;
} }

View file

@ -155,8 +155,7 @@ public:
void processKillNode(const QByteArray& datagram); void processKillNode(const QByteArray& datagram);
int updateNodeWithDataFromPacket(const SharedNodePointer& matchingNode, QSharedPointer<NLPacket> packet); int updateNodeWithDataFromPacket(QSharedPointer<NLPacket> packet, SharedNodePointer matchingNode);
int findNodeAndUpdateWithDataFromPacket(const QSharedPointer<NLPacket> packet);
unsigned broadcastToNodes(std::unique_ptr<NLPacket> packet, const NodeSet& destinationNodeTypes) { assert(false); return 0; } unsigned broadcastToNodes(std::unique_ptr<NLPacket> packet, const NodeSet& destinationNodeTypes) { assert(false); return 0; }
SharedNodePointer soloNodeOfType(char nodeType); SharedNodePointer soloNodeOfType(char nodeType);

View file

@ -14,6 +14,9 @@
#include <QtCore/QMutex> #include <QtCore/QMutex>
#include <QtCore/QObject> #include <QtCore/QObject>
#include <QtCore/QSharedPointer>
#include "NLPacket.h"
class Node; class Node;
@ -22,7 +25,7 @@ class NodeData : public QObject {
public: public:
NodeData(); NodeData();
virtual ~NodeData() = 0; virtual ~NodeData() = 0;
virtual int parseData(const QByteArray& packet) = 0; virtual int parseData(NLPacket& packet, QSharedPointer<Node> sendingNode) = 0;
QMutex& getMutex() { return _mutex; } QMutex& getMutex() { return _mutex; }

View file

@ -64,13 +64,10 @@ int OctreeQuery::getBroadcastData(unsigned char* destinationBuffer) {
} }
// called on the other nodes - assigns it to my views of the others // called on the other nodes - assigns it to my views of the others
int OctreeQuery::parseData(const QByteArray& packet) { int OctreeQuery::parseData(NLPacket& packet, QSharedPointer<Node> sendingNode) {
// increment to push past the packet header const unsigned char* startPosition = reinterpret_cast<const unsigned char*>(packet.getPayload());
int numBytesPacketHeader = numBytesForPacketHeader(packet); const unsigned char* sourceBuffer = startPosition;
const unsigned char* startPosition = reinterpret_cast<const unsigned char*>(packet.data());
const unsigned char* sourceBuffer = startPosition + numBytesPacketHeader;
// camera details // camera details
memcpy(&_cameraPosition, sourceBuffer, sizeof(_cameraPosition)); memcpy(&_cameraPosition, sourceBuffer, sizeof(_cameraPosition));

View file

@ -48,7 +48,7 @@ public:
virtual ~OctreeQuery() {} virtual ~OctreeQuery() {}
int getBroadcastData(unsigned char* destinationBuffer); int getBroadcastData(unsigned char* destinationBuffer);
int parseData(const QByteArray& packet); int parseData(NLPacket& packet, QSharedPointer<Node> sendingNode);
// getters for camera details // getters for camera details
const glm::vec3& getCameraPosition() const { return _cameraPosition; } const glm::vec3& getCameraPosition() const { return _cameraPosition; }