Merge branch 'master' of https://github.com/highfidelity/hifi into audio-reverb

This commit is contained in:
Ken Cooke 2015-12-07 13:08:21 -08:00
commit 4803b3bd9f
200 changed files with 3687 additions and 1608 deletions

View file

@ -43,8 +43,8 @@
static const int RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES = 10;
Agent::Agent(NLPacket& packet) :
ThreadedAssignment(packet),
Agent::Agent(ReceivedMessage& message) :
ThreadedAssignment(message),
_entityEditSender(),
_receivedAudioStream(AudioConstants::NETWORK_FRAME_SAMPLES_STEREO, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES,
InboundAudioStream::Settings(0, false, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, false,
@ -79,46 +79,46 @@ Agent::Agent(NLPacket& packet) :
packetReceiver.registerListener(PacketType::Jurisdiction, this, "handleJurisdictionPacket");
}
void Agent::handleOctreePacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode) {
auto packetType = packet->getType();
void Agent::handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
auto packetType = message->getType();
if (packetType == PacketType::OctreeStats) {
int statsMessageLength = OctreeHeadlessViewer::parseOctreeStats(packet, senderNode);
if (packet->getPayloadSize() > statsMessageLength) {
int statsMessageLength = OctreeHeadlessViewer::parseOctreeStats(message, senderNode);
if (message->getSize() > statsMessageLength) {
// pull out the piggybacked packet and create a new QSharedPointer<NLPacket> for it
int piggyBackedSizeWithHeader = packet->getPayloadSize() - statsMessageLength;
int piggyBackedSizeWithHeader = message->getSize() - statsMessageLength;
auto buffer = std::unique_ptr<char[]>(new char[piggyBackedSizeWithHeader]);
memcpy(buffer.get(), packet->getPayload() + statsMessageLength, piggyBackedSizeWithHeader);
memcpy(buffer.get(), message->getRawMessage() + statsMessageLength, piggyBackedSizeWithHeader);
auto newPacket = NLPacket::fromReceivedPacket(std::move(buffer), piggyBackedSizeWithHeader, packet->getSenderSockAddr());
packet = QSharedPointer<NLPacket>(newPacket.release());
auto newPacket = NLPacket::fromReceivedPacket(std::move(buffer), piggyBackedSizeWithHeader, message->getSenderSockAddr());
message = QSharedPointer<ReceivedMessage>::create(*newPacket);
} else {
return; // bail since no piggyback data
}
packetType = packet->getType();
packetType = message->getType();
} // fall through to piggyback message
if (packetType == PacketType::EntityData || packetType == PacketType::EntityErase) {
_entityViewer.processDatagram(*packet, senderNode);
_entityViewer.processDatagram(*message, senderNode);
}
}
void Agent::handleJurisdictionPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode) {
void Agent::handleJurisdictionPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
NodeType_t nodeType;
packet->peekPrimitive(&nodeType);
message->peekPrimitive(&nodeType);
// PacketType_JURISDICTION, first byte is the node type...
if (nodeType == NodeType::EntityServer) {
DependencyManager::get<EntityScriptingInterface>()->getJurisdictionListener()->
queueReceivedPacket(packet, senderNode);
queueReceivedPacket(message, senderNode);
}
}
void Agent::handleAudioPacket(QSharedPointer<NLPacket> packet) {
_receivedAudioStream.parseData(*packet);
void Agent::handleAudioPacket(QSharedPointer<ReceivedMessage> message) {
_receivedAudioStream.parseData(*message);
_lastReceivedAudioLoudness = _receivedAudioStream.getNextOutputFrameLoudness();

View file

@ -39,7 +39,7 @@ class Agent : public ThreadedAssignment {
Q_PROPERTY(QUuid sessionUUID READ getSessionUUID)
public:
Agent(NLPacket& packet);
Agent(ReceivedMessage& message);
void setIsAvatar(bool isAvatar);
bool isAvatar() const { return _isAvatar; }
@ -63,9 +63,10 @@ private slots:
void scriptRequestFinished();
void executeScript();
void handleAudioPacket(QSharedPointer<NLPacket> packet);
void handleOctreePacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleJurisdictionPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleAudioPacket(QSharedPointer<ReceivedMessage> message);
void handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleJurisdictionPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void processAgentAvatarAndAudio(float deltaTime);
private:

View file

@ -225,11 +225,11 @@ void AssignmentClient::sendAssignmentRequest() {
}
}
void AssignmentClient::handleCreateAssignmentPacket(QSharedPointer<NLPacket> packet) {
void AssignmentClient::handleCreateAssignmentPacket(QSharedPointer<ReceivedMessage> message) {
qDebug() << "Received a PacketType::CreateAssignment - attempting to unpack.";
// construct the deployed assignment from the packet data
_currentAssignment = AssignmentFactory::unpackAssignment(*packet);
_currentAssignment = AssignmentFactory::unpackAssignment(*message);
if (_currentAssignment && !_isAssigned) {
qDebug() << "Received an assignment -" << *_currentAssignment;
@ -239,7 +239,7 @@ void AssignmentClient::handleCreateAssignmentPacket(QSharedPointer<NLPacket> pac
// switch our DomainHandler hostname and port to whoever sent us the assignment
nodeList->getDomainHandler().setSockAddr(packet->getSenderSockAddr(), _assignmentServerHostname);
nodeList->getDomainHandler().setSockAddr(message->getSenderSockAddr(), _assignmentServerHostname);
nodeList->getDomainHandler().setAssignmentUUID(_currentAssignment->getUUID());
qDebug() << "Destination IP for assignment is" << nodeList->getDomainHandler().getIP().toString();
@ -274,8 +274,8 @@ void AssignmentClient::handleCreateAssignmentPacket(QSharedPointer<NLPacket> pac
}
}
void AssignmentClient::handleStopNodePacket(QSharedPointer<NLPacket> packet) {
const HifiSockAddr& senderSockAddr = packet->getSenderSockAddr();
void AssignmentClient::handleStopNodePacket(QSharedPointer<ReceivedMessage> message) {
const HifiSockAddr& senderSockAddr = message->getSenderSockAddr();
if (senderSockAddr.getAddress() == QHostAddress::LocalHost ||
senderSockAddr.getAddress() == QHostAddress::LocalHostIPv6) {

View file

@ -38,8 +38,8 @@ public slots:
void aboutToQuit();
private slots:
void handleCreateAssignmentPacket(QSharedPointer<NLPacket> packet);
void handleStopNodePacket(QSharedPointer<NLPacket> packet);
void handleCreateAssignmentPacket(QSharedPointer<ReceivedMessage> message);
void handleStopNodePacket(QSharedPointer<ReceivedMessage> message);
private:
void setUpStatusToMonitor();

View file

@ -207,14 +207,14 @@ void AssignmentClientMonitor::checkSpares() {
}
}
void AssignmentClientMonitor::handleChildStatusPacket(QSharedPointer<NLPacket> packet) {
void AssignmentClientMonitor::handleChildStatusPacket(QSharedPointer<ReceivedMessage> message) {
// read out the sender ID
QUuid senderID = QUuid::fromRfc4122(packet->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
QUuid senderID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
auto nodeList = DependencyManager::get<NodeList>();
SharedNodePointer matchingNode = nodeList->nodeWithUUID(senderID);
const HifiSockAddr& senderSockAddr = packet->getSenderSockAddr();
const HifiSockAddr& senderSockAddr = message->getSenderSockAddr();
AssignmentClientChildData* childData = nullptr;
@ -251,7 +251,7 @@ void AssignmentClientMonitor::handleChildStatusPacket(QSharedPointer<NLPacket> p
// get child's assignment type out of the packet
quint8 assignmentType;
packet->readPrimitive(&assignmentType);
message->readPrimitive(&assignmentType);
childData->setChildType((Assignment::Type) assignmentType);

View file

@ -36,7 +36,7 @@ public:
private slots:
void checkSpares();
void childProcessFinished();
void handleChildStatusPacket(QSharedPointer<NLPacket> packet);
void handleChildStatusPacket(QSharedPointer<ReceivedMessage> message);
public slots:
void aboutToQuit();

View file

@ -19,26 +19,26 @@
#include "assets/AssetServer.h"
#include "messages/MessagesMixer.h"
ThreadedAssignment* AssignmentFactory::unpackAssignment(NLPacket& packet) {
ThreadedAssignment* AssignmentFactory::unpackAssignment(ReceivedMessage& message) {
quint8 packedType;
packet.peekPrimitive(&packedType);
message.peekPrimitive(&packedType);
Assignment::Type unpackedType = (Assignment::Type) packedType;
switch (unpackedType) {
case Assignment::AudioMixerType:
return new AudioMixer(packet);
return new AudioMixer(message);
case Assignment::AvatarMixerType:
return new AvatarMixer(packet);
return new AvatarMixer(message);
case Assignment::AgentType:
return new Agent(packet);
return new Agent(message);
case Assignment::EntityServerType:
return new EntityServer(packet);
return new EntityServer(message);
case Assignment::AssetServerType:
return new AssetServer(packet);
return new AssetServer(message);
case Assignment::MessagesMixerType:
return new MessagesMixer(packet);
return new MessagesMixer(message);
default:
return NULL;
}

View file

@ -16,7 +16,7 @@
class AssignmentFactory {
public:
static ThreadedAssignment* unpackAssignment(NLPacket& packet);
static ThreadedAssignment* unpackAssignment(ReceivedMessage& message);
};
#endif // hifi_AssignmentFactory_h

View file

@ -27,8 +27,8 @@
const QString ASSET_SERVER_LOGGING_TARGET_NAME = "asset-server";
AssetServer::AssetServer(NLPacket& packet) :
ThreadedAssignment(packet),
AssetServer::AssetServer(ReceivedMessage& message) :
ThreadedAssignment(message),
_taskPool(this)
{
@ -40,7 +40,7 @@ AssetServer::AssetServer(NLPacket& packet) :
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
packetReceiver.registerListener(PacketType::AssetGet, this, "handleAssetGet");
packetReceiver.registerListener(PacketType::AssetGetInfo, this, "handleAssetGetInfo");
packetReceiver.registerMessageListener(PacketType::AssetUpload, this, "handleAssetUpload");
packetReceiver.registerListener(PacketType::AssetUpload, this, "handleAssetUpload");
}
void AssetServer::run() {
@ -84,20 +84,20 @@ void AssetServer::run() {
}
}
void AssetServer::handleAssetGetInfo(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode) {
void AssetServer::handleAssetGetInfo(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
QByteArray assetHash;
MessageID messageID;
uint8_t extensionLength;
if (packet->getPayloadSize() < qint64(SHA256_HASH_LENGTH + sizeof(messageID) + sizeof(extensionLength))) {
if (message->getSize() < qint64(SHA256_HASH_LENGTH + sizeof(messageID) + sizeof(extensionLength))) {
qDebug() << "ERROR bad file request";
return;
}
packet->readPrimitive(&messageID);
assetHash = packet->readWithoutCopy(SHA256_HASH_LENGTH);
packet->readPrimitive(&extensionLength);
QByteArray extension = packet->read(extensionLength);
message->readPrimitive(&messageID);
assetHash = message->readWithoutCopy(SHA256_HASH_LENGTH);
message->readPrimitive(&extensionLength);
QByteArray extension = message->read(extensionLength);
auto replyPacket = NLPacket::create(PacketType::AssetGetInfoReply);
@ -122,26 +122,26 @@ void AssetServer::handleAssetGetInfo(QSharedPointer<NLPacket> packet, SharedNode
nodeList->sendPacket(std::move(replyPacket), *senderNode);
}
void AssetServer::handleAssetGet(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode) {
void AssetServer::handleAssetGet(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
auto minSize = qint64(sizeof(MessageID) + SHA256_HASH_LENGTH + sizeof(uint8_t) + sizeof(DataOffset) + sizeof(DataOffset));
if (packet->getPayloadSize() < minSize) {
if (message->getSize() < minSize) {
qDebug() << "ERROR bad file request";
return;
}
// Queue task
auto task = new SendAssetTask(packet, senderNode, _resourcesDirectory);
auto task = new SendAssetTask(message, senderNode, _resourcesDirectory);
_taskPool.start(task);
}
void AssetServer::handleAssetUpload(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode) {
void AssetServer::handleAssetUpload(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
if (senderNode->getCanRez()) {
qDebug() << "Starting an UploadAssetTask for upload from" << uuidStringWithoutCurlyBraces(senderNode->getUUID());
auto task = new UploadAssetTask(packetList, senderNode, _resourcesDirectory);
auto task = new UploadAssetTask(message, senderNode, _resourcesDirectory);
_taskPool.start(task);
} else {
// this is a node the domain told us is not allowed to rez entities
@ -151,7 +151,7 @@ void AssetServer::handleAssetUpload(QSharedPointer<NLPacketList> packetList, Sha
auto permissionErrorPacket = NLPacket::create(PacketType::AssetUploadReply, sizeof(MessageID) + sizeof(AssetServerError));
MessageID messageID;
packetList->readPrimitive(&messageID);
message->readPrimitive(&messageID);
// write the message ID and a permission denied error
permissionErrorPacket->writePrimitive(messageID);

View file

@ -18,19 +18,20 @@
#include <QThreadPool>
#include "AssetUtils.h"
#include "ReceivedMessage.h"
class AssetServer : public ThreadedAssignment {
Q_OBJECT
public:
AssetServer(NLPacket& packet);
AssetServer(ReceivedMessage& message);
public slots:
void run();
private slots:
void handleAssetGetInfo(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleAssetGet(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleAssetUpload(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode);
void handleAssetGetInfo(QSharedPointer<ReceivedMessage> packet, SharedNodePointer senderNode);
void handleAssetGet(QSharedPointer<ReceivedMessage> packet, SharedNodePointer senderNode);
void handleAssetUpload(QSharedPointer<ReceivedMessage> packetList, SharedNodePointer senderNode);
void sendStatsPacket();

View file

@ -22,9 +22,9 @@
#include "AssetUtils.h"
SendAssetTask::SendAssetTask(QSharedPointer<NLPacket> packet, const SharedNodePointer& sendToNode, const QDir& resourcesDir) :
SendAssetTask::SendAssetTask(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& sendToNode, const QDir& resourcesDir) :
QRunnable(),
_packet(packet),
_message(message),
_senderNode(sendToNode),
_resourcesDir(resourcesDir)
{
@ -36,16 +36,16 @@ void SendAssetTask::run() {
uint8_t extensionLength;
DataOffset start, end;
_packet->readPrimitive(&messageID);
QByteArray assetHash = _packet->read(SHA256_HASH_LENGTH);
_packet->readPrimitive(&extensionLength);
QByteArray extension = _packet->read(extensionLength);
_message->readPrimitive(&messageID);
QByteArray assetHash = _message->read(SHA256_HASH_LENGTH);
_message->readPrimitive(&extensionLength);
QByteArray extension = _message->read(extensionLength);
// `start` and `end` indicate the range of data to retrieve for the asset identified by `assetHash`.
// `start` is inclusive, `end` is exclusive. Requesting `start` = 1, `end` = 10 will retrieve 9 bytes of data,
// starting at index 1.
_packet->readPrimitive(&start);
_packet->readPrimitive(&end);
_message->readPrimitive(&start);
_message->readPrimitive(&end);
QString hexHash = assetHash.toHex();

View file

@ -25,12 +25,12 @@ class NLPacket;
class SendAssetTask : public QRunnable {
public:
SendAssetTask(QSharedPointer<NLPacket> packet, const SharedNodePointer& sendToNode, const QDir& resourcesDir);
SendAssetTask(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& sendToNode, const QDir& resourcesDir);
void run();
private:
QSharedPointer<NLPacket> _packet;
QSharedPointer<ReceivedMessage> _message;
SharedNodePointer _senderNode;
QDir _resourcesDir;
};

View file

@ -19,9 +19,9 @@
#include <NLPacketList.h>
UploadAssetTask::UploadAssetTask(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode,
UploadAssetTask::UploadAssetTask(QSharedPointer<ReceivedMessage> receivedMessage, SharedNodePointer senderNode,
const QDir& resourcesDir) :
_packetList(packetList),
_receivedMessage(receivedMessage),
_senderNode(senderNode),
_resourcesDir(resourcesDir)
{
@ -29,7 +29,7 @@ UploadAssetTask::UploadAssetTask(QSharedPointer<NLPacketList> packetList, Shared
}
void UploadAssetTask::run() {
auto data = _packetList->getMessage();
auto data = _receivedMessage->getMessage();
QBuffer buffer { &data };
buffer.open(QIODevice::ReadOnly);

View file

@ -19,17 +19,19 @@
#include <QtCore/QRunnable>
#include <QtCore/QSharedPointer>
#include "ReceivedMessage.h"
class NLPacketList;
class Node;
class UploadAssetTask : public QRunnable {
public:
UploadAssetTask(QSharedPointer<NLPacketList> packetList, QSharedPointer<Node> senderNode, const QDir& resourcesDir);
UploadAssetTask(QSharedPointer<ReceivedMessage> message, QSharedPointer<Node> senderNode, const QDir& resourcesDir);
void run();
private:
QSharedPointer<NLPacketList> _packetList;
QSharedPointer<ReceivedMessage> _receivedMessage;
QSharedPointer<Node> _senderNode;
QDir _resourcesDir;
};

View file

@ -75,8 +75,8 @@ bool AudioMixer::shouldMute(float quietestFrame) {
return (quietestFrame > _noiseMutingThreshold);
}
AudioMixer::AudioMixer(NLPacket& packet) :
ThreadedAssignment(packet),
AudioMixer::AudioMixer(ReceivedMessage& message) :
ThreadedAssignment(message),
_trailingSleepRatio(1.0f),
_minAudibilityThreshold(LOUDNESS_TO_DISTANCE_RATIO / 2.0f),
_performanceThrottlingRatio(0.0f),
@ -542,17 +542,17 @@ void AudioMixer::sendAudioEnvironmentPacket(SharedNodePointer node) {
}
}
void AudioMixer::handleNodeAudioPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
DependencyManager::get<NodeList>()->updateNodeWithDataFromPacket(packet, sendingNode);
void AudioMixer::handleNodeAudioPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
DependencyManager::get<NodeList>()->updateNodeWithDataFromPacket(message, sendingNode);
}
void AudioMixer::handleMuteEnvironmentPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
void AudioMixer::handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
auto nodeList = DependencyManager::get<NodeList>();
if (sendingNode->getCanAdjustLocks()) {
auto newPacket = NLPacket::create(PacketType::MuteEnvironment, packet->getPayloadSize());
auto newPacket = NLPacket::create(PacketType::MuteEnvironment, message->getSize());
// Copy payload
newPacket->write(packet->getPayload(), packet->getPayloadSize());
newPacket->write(message->getRawMessage(), message->getSize());
nodeList->eachNode([&](const SharedNodePointer& node){
if (node->getType() == NodeType::Agent && node->getActiveSocket() &&

View file

@ -28,7 +28,7 @@ const int READ_DATAGRAMS_STATS_WINDOW_SECONDS = 30;
class AudioMixer : public ThreadedAssignment {
Q_OBJECT
public:
AudioMixer(NLPacket& packet);
AudioMixer(ReceivedMessage& message);
void deleteLater() { qDebug() << "DELETE LATER CALLED?"; QObject::deleteLater(); }
public slots:
@ -41,8 +41,8 @@ public slots:
private slots:
void broadcastMixes();
void handleNodeAudioPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
void handleMuteEnvironmentPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
void handleNodeAudioPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
void handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
private:
void domainSettingsRequestComplete();

View file

@ -49,18 +49,18 @@ AvatarAudioStream* AudioMixerClientData::getAvatarAudioStream() const {
return NULL;
}
int AudioMixerClientData::parseData(NLPacket& packet) {
PacketType packetType = packet.getType();
int AudioMixerClientData::parseData(ReceivedMessage& message) {
PacketType packetType = message.getType();
if (packetType == PacketType::AudioStreamStats) {
// skip over header, appendFlag, and num stats packed
packet.seek(sizeof(quint8) + sizeof(quint16));
message.seek(sizeof(quint8) + sizeof(quint16));
// read the downstream audio stream stats
packet.readPrimitive(&_downstreamAudioStreamStats);
message.readPrimitive(&_downstreamAudioStreamStats);
return packet.pos();
return message.getPosition();
} else {
PositionalAudioStream* matchingStream = NULL;
@ -74,10 +74,10 @@ int AudioMixerClientData::parseData(NLPacket& packet) {
// we don't have a mic stream yet, so add it
// read the channel flag to see if our stream is stereo or not
packet.seek(sizeof(quint16));
message.seek(sizeof(quint16));
quint8 channelFlag;
packet.readPrimitive(&channelFlag);
message.readPrimitive(&channelFlag);
bool isStereo = channelFlag == 1;
@ -89,11 +89,11 @@ int AudioMixerClientData::parseData(NLPacket& packet) {
// this is injected audio
// grab the stream identifier for this injected audio
packet.seek(sizeof(quint16));
QUuid streamIdentifier = QUuid::fromRfc4122(packet.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
message.seek(sizeof(quint16));
QUuid streamIdentifier = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
bool isStereo;
packet.readPrimitive(&isStereo);
message.readPrimitive(&isStereo);
if (!_audioStreams.contains(streamIdentifier)) {
// we don't have this injected stream yet, so add it
@ -105,9 +105,9 @@ int AudioMixerClientData::parseData(NLPacket& packet) {
}
// seek to the beginning of the packet so that the next reader is in the right spot
packet.seek(0);
message.seek(0);
return matchingStream->parseData(packet);
return matchingStream->parseData(message);
}
return 0;
}

View file

@ -42,7 +42,7 @@ public:
const QHash<QUuid, PositionalAudioStream*>& getAudioStreams() const { return _audioStreams; }
AvatarAudioStream* getAvatarAudioStream() const;
int parseData(NLPacket& packet);
int parseData(ReceivedMessage& message);
void checkBuffersBeforeFrameSend();

View file

@ -34,8 +34,8 @@ const QString AVATAR_MIXER_LOGGING_NAME = "avatar-mixer";
const int AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND = 60;
const unsigned int AVATAR_DATA_SEND_INTERVAL_MSECS = (1.0f / (float) AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND) * 1000;
AvatarMixer::AvatarMixer(NLPacket& packet) :
ThreadedAssignment(packet),
AvatarMixer::AvatarMixer(ReceivedMessage& message) :
ThreadedAssignment(message),
_broadcastThread(),
_lastFrameTimestamp(QDateTime::currentMSecsSinceEpoch()),
_trailingSleepRatio(1.0f),
@ -424,19 +424,19 @@ void AvatarMixer::nodeKilled(SharedNodePointer killedNode) {
}
}
void AvatarMixer::handleAvatarDataPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode) {
void AvatarMixer::handleAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
auto nodeList = DependencyManager::get<NodeList>();
nodeList->updateNodeWithDataFromPacket(packet, senderNode);
nodeList->updateNodeWithDataFromPacket(message, senderNode);
}
void AvatarMixer::handleAvatarIdentityPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode) {
void AvatarMixer::handleAvatarIdentityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
if (senderNode->getLinkedData()) {
AvatarMixerClientData* nodeData = dynamic_cast<AvatarMixerClientData*>(senderNode->getLinkedData());
if (nodeData != nullptr) {
AvatarData& avatar = nodeData->getAvatar();
// parse the identity packet and update the change timestamp if appropriate
if (avatar.hasIdentityChangedAfterParsing(*packet)) {
if (avatar.hasIdentityChangedAfterParsing(message->getMessage())) {
QMutexLocker nodeDataLocker(&nodeData->getMutex());
nodeData->setIdentityChangeTimestamp(QDateTime::currentMSecsSinceEpoch());
}
@ -444,13 +444,13 @@ void AvatarMixer::handleAvatarIdentityPacket(QSharedPointer<NLPacket> packet, Sh
}
}
void AvatarMixer::handleAvatarBillboardPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode) {
void AvatarMixer::handleAvatarBillboardPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
AvatarMixerClientData* nodeData = dynamic_cast<AvatarMixerClientData*>(senderNode->getLinkedData());
if (nodeData) {
AvatarData& avatar = nodeData->getAvatar();
// parse the billboard packet and update the change timestamp if appropriate
if (avatar.hasBillboardChangedAfterParsing(*packet)) {
if (avatar.hasBillboardChangedAfterParsing(message->getMessage())) {
QMutexLocker nodeDataLocker(&nodeData->getMutex());
nodeData->setBillboardChangeTimestamp(QDateTime::currentMSecsSinceEpoch());
}
@ -458,8 +458,8 @@ void AvatarMixer::handleAvatarBillboardPacket(QSharedPointer<NLPacket> packet, S
}
}
void AvatarMixer::handleKillAvatarPacket(QSharedPointer<NLPacket> packet) {
DependencyManager::get<NodeList>()->processKillNode(*packet);
void AvatarMixer::handleKillAvatarPacket(QSharedPointer<ReceivedMessage> message) {
DependencyManager::get<NodeList>()->processKillNode(*message);
}
void AvatarMixer::sendStatsPacket() {

View file

@ -21,7 +21,7 @@
class AvatarMixer : public ThreadedAssignment {
Q_OBJECT
public:
AvatarMixer(NLPacket& packet);
AvatarMixer(ReceivedMessage& message);
~AvatarMixer();
public slots:
/// runs the avatar mixer
@ -32,10 +32,10 @@ public slots:
void sendStatsPacket();
private slots:
void handleAvatarDataPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleAvatarIdentityPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleAvatarBillboardPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleKillAvatarPacket(QSharedPointer<NLPacket> packet);
void handleAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleAvatarIdentityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleAvatarBillboardPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleKillAvatarPacket(QSharedPointer<ReceivedMessage> message);
void domainSettingsRequestComplete();
private:

View file

@ -13,12 +13,12 @@
#include "AvatarMixerClientData.h"
int AvatarMixerClientData::parseData(NLPacket& packet) {
int AvatarMixerClientData::parseData(ReceivedMessage& message) {
// pull the sequence number from the data first
packet.readPrimitive(&_lastReceivedSequenceNumber);
message.readPrimitive(&_lastReceivedSequenceNumber);
// compute the offset to the data payload
return _avatar.parseDataFromBuffer(packet.readWithoutCopy(packet.bytesLeftToRead()));
return _avatar->parseDataFromBuffer(message.readWithoutCopy(message.getBytesLeftToRead()));
}
bool AvatarMixerClientData::checkAndSetHasReceivedFirstPacketsFrom(const QUuid& uuid) {
@ -40,7 +40,7 @@ uint16_t AvatarMixerClientData::getLastBroadcastSequenceNumber(const QUuid& node
}
void AvatarMixerClientData::loadJSONStats(QJsonObject& jsonObject) const {
jsonObject["display_name"] = _avatar.getDisplayName();
jsonObject["display_name"] = _avatar->getDisplayName();
jsonObject["full_rate_distance"] = _fullRateDistance;
jsonObject["max_av_distance"] = _maxAvatarDistance;
jsonObject["num_avs_sent_last_frame"] = _numAvatarsSentLastFrame;
@ -49,7 +49,7 @@ void AvatarMixerClientData::loadJSONStats(QJsonObject& jsonObject) const {
jsonObject["total_num_out_of_order_sends"] = _numOutOfOrderSends;
jsonObject[OUTBOUND_AVATAR_DATA_STATS_KEY] = getOutboundAvatarDataKbps();
jsonObject[INBOUND_AVATAR_DATA_STATS_KEY] = _avatar.getAverageBytesReceivedPerSecond() / (float) BYTES_PER_KILOBIT;
jsonObject[INBOUND_AVATAR_DATA_STATS_KEY] = _avatar->getAverageBytesReceivedPerSecond() / (float) BYTES_PER_KILOBIT;
jsonObject["av_data_receive_rate"] = _avatar.getReceiveRate();
jsonObject["av_data_receive_rate"] = _avatar->getReceiveRate();
}

View file

@ -33,8 +33,8 @@ const QString INBOUND_AVATAR_DATA_STATS_KEY = "inbound_av_data_kbps";
class AvatarMixerClientData : public NodeData {
Q_OBJECT
public:
int parseData(NLPacket& packet);
AvatarData& getAvatar() { return _avatar; }
int parseData(ReceivedMessage& message) override;
AvatarData& getAvatar() { return *_avatar; }
bool checkAndSetHasReceivedFirstPacketsFrom(const QUuid& uuid);
@ -80,7 +80,7 @@ public:
void loadJSONStats(QJsonObject& jsonObject) const;
private:
AvatarData _avatar;
AvatarSharedPointer _avatar { new AvatarData() };
uint16_t _lastReceivedSequenceNumber { 0 };
std::unordered_map<QUuid, uint16_t> _lastBroadcastSequenceNumbers;

View file

@ -22,8 +22,8 @@ const char* MODEL_SERVER_NAME = "Entity";
const char* MODEL_SERVER_LOGGING_TARGET_NAME = "entity-server";
const char* LOCAL_MODELS_PERSIST_FILE = "resources/models.svo";
EntityServer::EntityServer(NLPacket& packet) :
OctreeServer(packet),
EntityServer::EntityServer(ReceivedMessage& message) :
OctreeServer(message),
_entitySimulation(NULL)
{
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
@ -41,9 +41,9 @@ EntityServer::~EntityServer() {
tree->removeNewlyCreatedHook(this);
}
void EntityServer::handleEntityPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode) {
void EntityServer::handleEntityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
if (_octreeInboundPacketProcessor) {
_octreeInboundPacketProcessor->queueReceivedPacket(packet, senderNode);
_octreeInboundPacketProcessor->queueReceivedPacket(message, senderNode);
}
}

View file

@ -30,7 +30,7 @@ struct ViewerSendingStats {
class EntityServer : public OctreeServer, public NewlyCreatedEntityHook {
Q_OBJECT
public:
EntityServer(NLPacket& packet);
EntityServer(ReceivedMessage& message);
~EntityServer();
// Subclasses must implement these methods
@ -62,7 +62,7 @@ protected:
virtual OctreePointer createTree() override;
private slots:
void handleEntityPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleEntityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
private:
EntitySimulation* _entitySimulation;

View file

@ -20,13 +20,13 @@
const QString MESSAGES_MIXER_LOGGING_NAME = "messages-mixer";
MessagesMixer::MessagesMixer(NLPacket& packet) : ThreadedAssignment(packet)
MessagesMixer::MessagesMixer(ReceivedMessage& message) : ThreadedAssignment(message)
{
connect(DependencyManager::get<NodeList>().data(), &NodeList::nodeKilled, this, &MessagesMixer::nodeKilled);
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
packetReceiver.registerMessageListener(PacketType::MessagesData, this, "handleMessages");
packetReceiver.registerMessageListener(PacketType::MessagesSubscribe, this, "handleMessagesSubscribe");
packetReceiver.registerMessageListener(PacketType::MessagesUnsubscribe, this, "handleMessagesUnsubscribe");
packetReceiver.registerListener(PacketType::MessagesData, this, "handleMessages");
packetReceiver.registerListener(PacketType::MessagesSubscribe, this, "handleMessagesSubscribe");
packetReceiver.registerListener(PacketType::MessagesUnsubscribe, this, "handleMessagesUnsubscribe");
}
void MessagesMixer::nodeKilled(SharedNodePointer killedNode) {
@ -35,10 +35,10 @@ void MessagesMixer::nodeKilled(SharedNodePointer killedNode) {
}
}
void MessagesMixer::handleMessages(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode) {
void MessagesMixer::handleMessages(QSharedPointer<ReceivedMessage> receivedMessage, SharedNodePointer senderNode) {
QString channel, message;
QUuid senderID;
MessagesClient::decodeMessagesPacket(packetList, channel, message, senderID);
MessagesClient::decodeMessagesPacket(receivedMessage, channel, message, senderID);
auto nodeList = DependencyManager::get<NodeList>();
@ -53,13 +53,13 @@ void MessagesMixer::handleMessages(QSharedPointer<NLPacketList> packetList, Shar
});
}
void MessagesMixer::handleMessagesSubscribe(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode) {
QString channel = QString::fromUtf8(packetList->getMessage());
void MessagesMixer::handleMessagesSubscribe(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
QString channel = QString::fromUtf8(message->getMessage());
_channelSubscribers[channel] << senderNode->getUUID();
}
void MessagesMixer::handleMessagesUnsubscribe(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode) {
QString channel = QString::fromUtf8(packetList->getMessage());
void MessagesMixer::handleMessagesUnsubscribe(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
QString channel = QString::fromUtf8(message->getMessage());
if (_channelSubscribers.contains(channel)) {
_channelSubscribers[channel].remove(senderNode->getUUID());
}

View file

@ -21,7 +21,7 @@
class MessagesMixer : public ThreadedAssignment {
Q_OBJECT
public:
MessagesMixer(NLPacket& packet);
MessagesMixer(ReceivedMessage& message);
public slots:
void run();
@ -29,9 +29,9 @@ public slots:
void sendStatsPacket();
private slots:
void handleMessages(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode);
void handleMessagesSubscribe(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode);
void handleMessagesUnsubscribe(QSharedPointer<NLPacketList> packetList, SharedNodePointer senderNode);
void handleMessages(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleMessagesSubscribe(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleMessagesUnsubscribe(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
private:
QHash<QString,QSet<QUuid>> _channelSubscribers;

View file

@ -75,7 +75,7 @@ void OctreeInboundPacketProcessor::midProcess() {
}
}
void OctreeInboundPacketProcessor::processPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
void OctreeInboundPacketProcessor::processPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
if (_shuttingDown) {
qDebug() << "OctreeInboundPacketProcessor::processPacket() while shutting down... ignoring incoming packet";
return;
@ -85,22 +85,22 @@ void OctreeInboundPacketProcessor::processPacket(QSharedPointer<NLPacket> packet
if (debugProcessPacket) {
qDebug("OctreeInboundPacketProcessor::processPacket() payload=%p payloadLength=%lld",
packet->getPayload(),
packet->getPayloadSize());
message->getRawMessage(),
message->getSize());
}
// Ask our tree subclass if it can handle the incoming packet...
PacketType packetType = packet->getType();
PacketType packetType = message->getType();
if (_myServer->getOctree()->handlesEditPacketType(packetType)) {
PerformanceWarning warn(debugProcessPacket, "processPacket KNOWN TYPE", debugProcessPacket);
_receivedPacketCount++;
unsigned short int sequence;
packet->readPrimitive(&sequence);
message->readPrimitive(&sequence);
quint64 sentAt;
packet->readPrimitive(&sentAt);
message->readPrimitive(&sentAt);
quint64 arrivedAt = usecTimestampNow();
if (sentAt > arrivedAt) {
@ -118,7 +118,7 @@ void OctreeInboundPacketProcessor::processPacket(QSharedPointer<NLPacket> packet
if (debugProcessPacket || _myServer->wantsDebugReceiving()) {
qDebug() << "PROCESSING THREAD: got '" << packetType << "' packet - " << _receivedPacketCount << " command from client";
qDebug() << " receivedBytes=" << packet->getDataSize();
qDebug() << " receivedBytes=" << message->getSize();
qDebug() << " sequence=" << sequence;
qDebug() << " sentAt=" << sentAt << " usecs";
qDebug() << " arrivedAt=" << arrivedAt << " usecs";
@ -132,29 +132,29 @@ void OctreeInboundPacketProcessor::processPacket(QSharedPointer<NLPacket> packet
qDebug() << " numBytesPacketHeader=" << NLPacket::totalHeaderSize(packetType);
qDebug() << " sizeof(sequence)=" << sizeof(sequence);
qDebug() << " sizeof(sentAt)=" << sizeof(sentAt);
qDebug() << " atByte (in payload)=" << packet->pos();
qDebug() << " payload size=" << packet->getPayloadSize();
qDebug() << " atByte (in payload)=" << message->getPosition();
qDebug() << " payload size=" << message->getSize();
if (!packet->bytesLeftToRead()) {
if (!message->getBytesLeftToRead()) {
qDebug() << " ----- UNEXPECTED ---- got a packet without any edit details!!!! --------";
}
}
const unsigned char* editData = nullptr;
while (packet->bytesLeftToRead() > 0) {
while (message->getBytesLeftToRead() > 0) {
editData = reinterpret_cast<const unsigned char*>(packet->getPayload() + packet->pos());
editData = reinterpret_cast<const unsigned char*>(message->getRawMessage() + message->getPosition());
int maxSize = packet->bytesLeftToRead();
int maxSize = message->getBytesLeftToRead();
if (debugProcessPacket) {
qDebug() << " --- inside while loop ---";
qDebug() << " maxSize=" << maxSize;
qDebug("OctreeInboundPacketProcessor::processPacket() %hhu "
"payload=%p payloadLength=%lld editData=%p payloadPosition=%lld maxSize=%d",
packetType, packet->getPayload(), packet->getPayloadSize(), editData,
packet->pos(), maxSize);
packetType, message->getRawMessage(), message->getSize(), editData,
message->getPosition(), maxSize);
}
quint64 startProcess, startLock = usecTimestampNow();
@ -162,7 +162,7 @@ void OctreeInboundPacketProcessor::processPacket(QSharedPointer<NLPacket> packet
_myServer->getOctree()->withWriteLock([&] {
startProcess = usecTimestampNow();
editDataBytesRead =
_myServer->getOctree()->processEditPacketData(*packet, editData, maxSize, sendingNode);
_myServer->getOctree()->processEditPacketData(*message, editData, maxSize, sendingNode);
});
quint64 endProcess = usecTimestampNow();
@ -178,12 +178,12 @@ void OctreeInboundPacketProcessor::processPacket(QSharedPointer<NLPacket> packet
lockWaitTime += thisLockWaitTime;
// skip to next edit record in the packet
packet->seek(packet->pos() + editDataBytesRead);
message->seek(message->getPosition() + editDataBytesRead);
if (debugProcessPacket) {
qDebug() << " editDataBytesRead=" << editDataBytesRead;
qDebug() << " AFTER processEditPacketData payload position=" << packet->pos();
qDebug() << " AFTER processEditPacketData payload size=" << packet->getPayloadSize();
qDebug() << " AFTER processEditPacketData payload position=" << message->getPosition();
qDebug() << " AFTER processEditPacketData payload size=" << message->getSize();
}
}
@ -191,7 +191,7 @@ void OctreeInboundPacketProcessor::processPacket(QSharedPointer<NLPacket> packet
if (debugProcessPacket) {
qDebug("OctreeInboundPacketProcessor::processPacket() DONE LOOPING FOR %hhu "
"payload=%p payloadLength=%lld editData=%p payloadPosition=%lld",
packetType, packet->getPayload(), packet->getPayloadSize(), editData, packet->pos());
packetType, message->getRawMessage(), message->getSize(), editData, message->getPosition());
}
// Make sure our Node and NodeList knows we've heard from this node.

View file

@ -78,7 +78,7 @@ public:
protected:
virtual void processPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
virtual void processPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
virtual unsigned long getMaxWait() const;
virtual void preProcess();

View file

@ -33,8 +33,6 @@ OctreeQueryNode::OctreeQueryNode() :
_lastTimeBagEmpty(0),
_viewFrustumChanging(false),
_viewFrustumJustStoppedChanging(true),
_currentPacketIsColor(true),
_currentPacketIsCompressed(false),
_octreeSendThread(NULL),
_lastClientBoundaryLevelAdjust(0),
_lastClientOctreeSizeScale(DEFAULT_OCTREE_SIZE_SCALE),
@ -179,14 +177,9 @@ void OctreeQueryNode::resetOctreePacket() {
// If we're moving, and the client asked for low res, then we force monochrome, otherwise, use
// the clients requested color state.
_currentPacketIsCompressed = getWantCompression();
OCTREE_PACKET_FLAGS flags = 0;
if (_currentPacketIsColor) {
setAtBit(flags, PACKET_IS_COLOR_BIT);
}
if (_currentPacketIsCompressed) {
setAtBit(flags, PACKET_IS_COMPRESSED_BIT);
}
setAtBit(flags, PACKET_IS_COLOR_BIT); // always color
setAtBit(flags, PACKET_IS_COMPRESSED_BIT); // always compressed
_octreePacket->reset();
@ -211,10 +204,9 @@ void OctreeQueryNode::writeToPacket(const unsigned char* buffer, unsigned int by
// compressed packets include lead bytes which contain compressed size, this allows packing of
// multiple compressed portions together
if (_currentPacketIsCompressed) {
OCTREE_PACKET_INTERNAL_SECTION_SIZE sectionSize = bytes;
_octreePacket->writePrimitive(sectionSize);
}
OCTREE_PACKET_INTERNAL_SECTION_SIZE sectionSize = bytes;
_octreePacket->writePrimitive(sectionSize);
if (bytes <= _octreePacket->bytesAvailableForWrite()) {
_octreePacket->write(reinterpret_cast<const char*>(buffer), bytes);
_octreePacketWaiting = true;
@ -370,11 +362,11 @@ const NLPacket* OctreeQueryNode::getNextNackedPacket() {
return nullptr;
}
void OctreeQueryNode::parseNackPacket(NLPacket& packet) {
void OctreeQueryNode::parseNackPacket(ReceivedMessage& message) {
// read sequence numbers
while (packet.bytesLeftToRead()) {
while (message.getBytesLeftToRead()) {
OCTREE_PACKET_SEQUENCE sequenceNumber;
packet.readPrimitive(&sequenceNumber);
message.readPrimitive(&sequenceNumber);
_nackedSequenceNumbers.enqueue(sequenceNumber);
}
}

View file

@ -75,12 +75,6 @@ public:
quint64 getLastTimeBagEmpty() const { return _lastTimeBagEmpty; }
void setLastTimeBagEmpty() { _lastTimeBagEmpty = _sceneSendStartTime; }
bool getCurrentPacketIsColor() const { return _currentPacketIsColor; }
bool getCurrentPacketIsCompressed() const { return _currentPacketIsCompressed; }
bool getCurrentPacketFormatMatches() {
return (getCurrentPacketIsCompressed() == getWantCompression());
}
bool hasLodChanged() const { return _lodChanged; }
OctreeSceneStats stats;
@ -106,7 +100,7 @@ public:
OCTREE_PACKET_SEQUENCE getSequenceNumber() const { return _sequenceNumber; }
void parseNackPacket(NLPacket& packet);
void parseNackPacket(ReceivedMessage& message);
bool hasNextNackedPacket() const;
const NLPacket* getNextNackedPacket();
@ -133,8 +127,6 @@ private:
quint64 _lastTimeBagEmpty;
bool _viewFrustumChanging;
bool _viewFrustumJustStoppedChanging;
bool _currentPacketIsColor;
bool _currentPacketIsCompressed;
OctreeSendThread* _octreeSendThread;

View file

@ -309,36 +309,29 @@ int OctreeSendThread::packetDistributor(OctreeQueryNode* nodeData, bool viewFrus
int truePacketsSent = 0;
int trueBytesSent = 0;
int packetsSentThisInterval = 0;
bool isFullScene = ((!viewFrustumChanged || !nodeData->getWantDelta()) && nodeData->getViewFrustumJustStoppedChanging())
bool isFullScene = ((!viewFrustumChanged) && nodeData->getViewFrustumJustStoppedChanging())
|| nodeData->hasLodChanged();
bool somethingToSend = true; // assume we have something
// FOR NOW... node tells us if it wants to receive only view frustum deltas
bool wantDelta = viewFrustumChanged && nodeData->getWantDelta();
// If our packet already has content in it, then we must use the color choice of the waiting packet.
// If we're starting a fresh packet, then...
// If we're moving, and the client asked for low res, then we force monochrome, otherwise, use
// the clients requested color state.
bool wantCompression = nodeData->getWantCompression();
// If we have a packet waiting, and our desired want color, doesn't match the current waiting packets color
// then let's just send that waiting packet.
if (!nodeData->getCurrentPacketFormatMatches()) {
if (nodeData->isPacketWaiting()) {
packetsSentThisInterval += handlePacketSend(nodeData, trueBytesSent, truePacketsSent);
} else {
nodeData->resetOctreePacket();
}
int targetSize = MAX_OCTREE_PACKET_DATA_SIZE;
if (wantCompression) {
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
}
_packetData.changeSettings(wantCompression, targetSize);
if (nodeData->isPacketWaiting()) {
packetsSentThisInterval += handlePacketSend(nodeData, trueBytesSent, truePacketsSent);
} else {
nodeData->resetOctreePacket();
}
int targetSize = MAX_OCTREE_PACKET_DATA_SIZE;
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
const ViewFrustum* lastViewFrustum = wantDelta ? &nodeData->getLastKnownViewFrustum() : NULL;
_packetData.changeSettings(true, targetSize); // FIXME - eventually support only compressed packets
const ViewFrustum* lastViewFrustum = viewFrustumChanged ? &nodeData->getLastKnownViewFrustum() : NULL;
// If the current view frustum has changed OR we have nothing to send, then search against
// the current view frustum for things to send.
@ -351,11 +344,6 @@ int OctreeSendThread::packetDistributor(OctreeQueryNode* nodeData, bool viewFrus
}
}
if (!viewFrustumChanged && !nodeData->getWantDelta()) {
// only set our last sent time if we weren't resetting due to frustum change
nodeData->setLastTimeBagEmpty();
}
// track completed scenes and send out the stats packet accordingly
nodeData->stats.sceneCompleted();
nodeData->setLastRootTimestamp(_myServer->getOctree()->getRoot()->getLastChanged());
@ -452,11 +440,11 @@ int OctreeSendThread::packetDistributor(OctreeQueryNode* nodeData, bool viewFrus
float octreeSizeScale = nodeData->getOctreeSizeScale();
int boundaryLevelAdjustClient = nodeData->getBoundaryLevelAdjust();
int boundaryLevelAdjust = boundaryLevelAdjustClient + (viewFrustumChanged && nodeData->getWantLowResMoving()
? LOW_RES_MOVING_ADJUST : NO_BOUNDARY_ADJUST);
int boundaryLevelAdjust = boundaryLevelAdjustClient +
(viewFrustumChanged ? LOW_RES_MOVING_ADJUST : NO_BOUNDARY_ADJUST);
EncodeBitstreamParams params(INT_MAX, &nodeData->getCurrentViewFrustum(),
WANT_EXISTS_BITS, DONT_CHOP, wantDelta, lastViewFrustum,
WANT_EXISTS_BITS, DONT_CHOP, viewFrustumChanged, lastViewFrustum,
boundaryLevelAdjust, octreeSizeScale,
nodeData->getLastTimeBagEmpty(),
isFullScene, &nodeData->stats, _myServer->getJurisdiction(),
@ -522,8 +510,7 @@ int OctreeSendThread::packetDistributor(OctreeQueryNode* nodeData, bool viewFrus
// if for some reason the finalized size is greater than our available size, then probably the "compressed"
// form actually inflated beyond our padding, and in this case we will send the current packet, then
// write to out new packet...
unsigned int writtenSize = _packetData.getFinalizedSize()
+ (nodeData->getCurrentPacketIsCompressed() ? sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE) : 0);
unsigned int writtenSize = _packetData.getFinalizedSize() + sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
if (writtenSize > nodeData->getAvailable()) {
packetsSentThisInterval += handlePacketSend(nodeData, trueBytesSent, truePacketsSent);
@ -539,8 +526,7 @@ int OctreeSendThread::packetDistributor(OctreeQueryNode* nodeData, bool viewFrus
// the packet doesn't have enough space to bother attempting to pack more...
bool sendNow = true;
if (nodeData->getCurrentPacketIsCompressed() &&
nodeData->getAvailable() >= MINIMUM_ATTEMPT_MORE_PACKING &&
if (nodeData->getAvailable() >= MINIMUM_ATTEMPT_MORE_PACKING &&
extraPackingAttempts <= REASONABLE_NUMBER_OF_PACKING_ATTEMPTS) {
sendNow = false; // try to pack more
}
@ -552,9 +538,7 @@ int OctreeSendThread::packetDistributor(OctreeQueryNode* nodeData, bool viewFrus
quint64 packetSendingEnd = usecTimestampNow();
packetSendingElapsedUsec = (float)(packetSendingEnd - packetSendingStart);
if (wantCompression) {
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
}
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
} else {
// If we're in compressed mode, then we want to see if we have room for more in this wire packet.
// but we've finalized the _packetData, so we want to start a new section, we will do that by
@ -564,7 +548,7 @@ int OctreeSendThread::packetDistributor(OctreeQueryNode* nodeData, bool viewFrus
// a larger compressed size then uncompressed size
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE) - COMPRESS_PADDING;
}
_packetData.changeSettings(nodeData->getWantCompression(), targetSize); // will do reset
_packetData.changeSettings(true, targetSize); // will do reset - NOTE: Always compressed
}
OctreeServer::trackTreeWaitTime(lockWaitElapsedUsec);

View file

@ -210,8 +210,8 @@ void OctreeServer::trackProcessWaitTime(float time) {
_averageProcessWaitTime.updateAverage(time);
}
OctreeServer::OctreeServer(NLPacket& packet) :
ThreadedAssignment(packet),
OctreeServer::OctreeServer(ReceivedMessage& message) :
ThreadedAssignment(message),
_argc(0),
_argv(NULL),
_parsedArgV(NULL),
@ -878,12 +878,12 @@ void OctreeServer::parsePayload() {
}
}
void OctreeServer::handleOctreeQueryPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode) {
void OctreeServer::handleOctreeQueryPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
if (!_isFinished) {
// If we got a query packet, then we're talking to an agent, and we
// need to make sure we have it in our nodeList.
auto nodeList = DependencyManager::get<NodeList>();
nodeList->updateNodeWithDataFromPacket(packet, senderNode);
nodeList->updateNodeWithDataFromPacket(message, senderNode);
OctreeQueryNode* nodeData = dynamic_cast<OctreeQueryNode*>(senderNode->getLinkedData());
if (nodeData && !nodeData->isOctreeSendThreadInitalized()) {
@ -892,17 +892,17 @@ void OctreeServer::handleOctreeQueryPacket(QSharedPointer<NLPacket> packet, Shar
}
}
void OctreeServer::handleOctreeDataNackPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode) {
void OctreeServer::handleOctreeDataNackPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
// If we got a nack packet, then we're talking to an agent, and we
// need to make sure we have it in our nodeList.
OctreeQueryNode* nodeData = dynamic_cast<OctreeQueryNode*>(senderNode->getLinkedData());
if (nodeData) {
nodeData->parseNackPacket(*packet);
nodeData->parseNackPacket(*message);
}
}
void OctreeServer::handleJurisdictionRequestPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode) {
_jurisdictionSender->queueReceivedPacket(packet, senderNode);
void OctreeServer::handleJurisdictionRequestPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
_jurisdictionSender->queueReceivedPacket(message, senderNode);
}
bool OctreeServer::readOptionBool(const QString& optionName, const QJsonObject& settingsSectionObject, bool& result) {

View file

@ -34,7 +34,7 @@ const int DEFAULT_PACKETS_PER_INTERVAL = 2000; // some 120,000 packets per secon
class OctreeServer : public ThreadedAssignment, public HTTPRequestHandler {
Q_OBJECT
public:
OctreeServer(NLPacket& packet);
OctreeServer(ReceivedMessage& message);
~OctreeServer();
/// allows setting of run arguments
@ -135,9 +135,9 @@ public slots:
private slots:
void domainSettingsRequestComplete();
void handleOctreeQueryPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleOctreeDataNackPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleJurisdictionRequestPacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleOctreeQueryPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleOctreeDataNackPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
void handleJurisdictionRequestPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
protected:
virtual OctreePointer createTree() = 0;

View file

@ -51,15 +51,15 @@ const NodeSet STATICALLY_ASSIGNED_NODES = NodeSet() << NodeType::AudioMixer
<< NodeType::AssetServer
<< NodeType::MessagesMixer;
void DomainGatekeeper::processConnectRequestPacket(QSharedPointer<NLPacket> packet) {
if (packet->getPayloadSize() == 0) {
void DomainGatekeeper::processConnectRequestPacket(QSharedPointer<ReceivedMessage> message) {
if (message->getSize() == 0) {
return;
}
QDataStream packetStream(packet.data());
QDataStream packetStream(message->getMessage());
// read a NodeConnectionData object from the packet so we can pass around this data while we're inspecting it
NodeConnectionData nodeConnection = NodeConnectionData::fromDataStream(packetStream, packet->getSenderSockAddr());
NodeConnectionData nodeConnection = NodeConnectionData::fromDataStream(packetStream, message->getSenderSockAddr());
if (nodeConnection.localSockAddr.isNull() || nodeConnection.publicSockAddr.isNull()) {
qDebug() << "Unexpected data received for node local socket or public socket. Will not allow connection.";
@ -72,7 +72,7 @@ void DomainGatekeeper::processConnectRequestPacket(QSharedPointer<NLPacket> pack
if (!VALID_NODE_TYPES.contains(nodeConnection.nodeType)) {
qDebug() << "Received an invalid node type with connect request. Will not allow connection from"
<< nodeConnection.senderSockAddr;
<< nodeConnection.senderSockAddr << ": " << nodeConnection.nodeType;
return;
}
@ -87,11 +87,11 @@ void DomainGatekeeper::processConnectRequestPacket(QSharedPointer<NLPacket> pack
QString username;
QByteArray usernameSignature;
if (packet->bytesLeftToRead() > 0) {
if (message->getBytesLeftToRead() > 0) {
// read username from packet
packetStream >> username;
if (packet->bytesLeftToRead() > 0) {
if (message->getBytesLeftToRead() > 0) {
// read user signature from packet
packetStream >> usernameSignature;
}
@ -103,14 +103,14 @@ void DomainGatekeeper::processConnectRequestPacket(QSharedPointer<NLPacket> pack
if (node) {
// set the sending sock addr and node interest set on this node
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(node->getLinkedData());
nodeData->setSendingSockAddr(packet->getSenderSockAddr());
nodeData->setSendingSockAddr(message->getSenderSockAddr());
nodeData->setNodeInterestSet(nodeConnection.interestList.toSet());
// signal that we just connected a node so the DomainServer can get it a list
// and broadcast its presence right away
emit connectedNode(node);
} else {
qDebug() << "Refusing connection from node at" << packet->getSenderSockAddr();
qDebug() << "Refusing connection from node at" << message->getSenderSockAddr();
}
}
@ -572,10 +572,10 @@ void DomainGatekeeper::handlePeerPingTimeout() {
}
}
void DomainGatekeeper::processICEPeerInformationPacket(QSharedPointer<NLPacket> packet) {
void DomainGatekeeper::processICEPeerInformationPacket(QSharedPointer<ReceivedMessage> message) {
// loop through the packet and pull out network peers
// any peer we don't have we add to the hash, otherwise we update
QDataStream iceResponseStream(packet.data());
QDataStream iceResponseStream(message->getMessage());
NetworkPeer* receivedPeer = new NetworkPeer;
iceResponseStream >> *receivedPeer;
@ -600,15 +600,15 @@ void DomainGatekeeper::processICEPeerInformationPacket(QSharedPointer<NLPacket>
}
}
void DomainGatekeeper::processICEPingPacket(QSharedPointer<NLPacket> packet) {
void DomainGatekeeper::processICEPingPacket(QSharedPointer<ReceivedMessage> message) {
auto limitedNodeList = DependencyManager::get<LimitedNodeList>();
auto pingReplyPacket = limitedNodeList->constructICEPingReplyPacket(*packet, limitedNodeList->getSessionUUID());
auto pingReplyPacket = limitedNodeList->constructICEPingReplyPacket(*message, limitedNodeList->getSessionUUID());
limitedNodeList->sendPacket(std::move(pingReplyPacket), packet->getSenderSockAddr());
limitedNodeList->sendPacket(std::move(pingReplyPacket), message->getSenderSockAddr());
}
void DomainGatekeeper::processICEPingReplyPacket(QSharedPointer<NLPacket> packet) {
QDataStream packetStream(packet.data());
void DomainGatekeeper::processICEPingReplyPacket(QSharedPointer<ReceivedMessage> message) {
QDataStream packetStream(message->getMessage());
QUuid nodeUUID;
packetStream >> nodeUUID;
@ -617,6 +617,6 @@ void DomainGatekeeper::processICEPingReplyPacket(QSharedPointer<NLPacket> packet
if (sendingPeer) {
// we had this NetworkPeer in our connecting list - add the right sock addr to our connected list
sendingPeer->activateMatchingOrNewSymmetricSocket(packet->getSenderSockAddr());
sendingPeer->activateMatchingOrNewSymmetricSocket(message->getSenderSockAddr());
}
}

View file

@ -41,10 +41,10 @@ public:
void removeICEPeer(const QUuid& peerUUID) { _icePeers.remove(peerUUID); }
public slots:
void processConnectRequestPacket(QSharedPointer<NLPacket> packet);
void processICEPingPacket(QSharedPointer<NLPacket> packet);
void processICEPingReplyPacket(QSharedPointer<NLPacket> packet);
void processICEPeerInformationPacket(QSharedPointer<NLPacket> packet);
void processConnectRequestPacket(QSharedPointer<ReceivedMessage> message);
void processICEPingPacket(QSharedPointer<ReceivedMessage> message);
void processICEPingReplyPacket(QSharedPointer<ReceivedMessage> message);
void processICEPeerInformationPacket(QSharedPointer<ReceivedMessage> message);
void publicKeyJSONCallback(QNetworkReply& requestReply);

View file

@ -273,7 +273,7 @@ void DomainServer::setupNodeListAndAssignments(const QUuid& sessionUUID) {
packetReceiver.registerListener(PacketType::RequestAssignment, this, "processRequestAssignmentPacket");
packetReceiver.registerListener(PacketType::DomainListRequest, this, "processListRequestPacket");
packetReceiver.registerListener(PacketType::DomainServerPathQuery, this, "processPathQueryPacket");
packetReceiver.registerMessageListener(PacketType::NodeJsonStats, this, "processNodeJSONStatsPacket");
packetReceiver.registerListener(PacketType::NodeJsonStats, this, "processNodeJSONStatsPacket");
packetReceiver.registerListener(PacketType::DomainDisconnectRequest, this, "processNodeDisconnectRequestPacket");
// NodeList won't be available to the settings manager when it is created, so call registerListener here
@ -578,10 +578,10 @@ void DomainServer::populateDefaultStaticAssignmentsExcludingTypes(const QSet<Ass
}
}
void DomainServer::processListRequestPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
void DomainServer::processListRequestPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
QDataStream packetStream(packet.data());
NodeConnectionData nodeRequestData = NodeConnectionData::fromDataStream(packetStream, packet->getSenderSockAddr(), false);
QDataStream packetStream(message->getMessage());
NodeConnectionData nodeRequestData = NodeConnectionData::fromDataStream(packetStream, message->getSenderSockAddr(), false);
// update this node's sockets in case they have changed
sendingNode->setPublicSocket(nodeRequestData.publicSockAddr);
@ -591,7 +591,7 @@ void DomainServer::processListRequestPacket(QSharedPointer<NLPacket> packet, Sha
DomainServerNodeData* nodeData = reinterpret_cast<DomainServerNodeData*>(sendingNode->getLinkedData());
nodeData->setNodeInterestSet(nodeRequestData.interestList.toSet());
sendDomainListToNode(sendingNode, packet->getSenderSockAddr());
sendDomainListToNode(sendingNode, message->getSenderSockAddr());
}
unsigned int DomainServer::countConnectedUsers() {
@ -764,9 +764,9 @@ void DomainServer::broadcastNewNode(const SharedNodePointer& addedNode) {
);
}
void DomainServer::processRequestAssignmentPacket(QSharedPointer<NLPacket> packet) {
void DomainServer::processRequestAssignmentPacket(QSharedPointer<ReceivedMessage> message) {
// construct the requested assignment from the packet data
Assignment requestAssignment(*packet);
Assignment requestAssignment(*message);
// Suppress these for Assignment::AgentType to once per 5 seconds
static QElapsedTimer noisyMessageTimer;
@ -784,14 +784,14 @@ void DomainServer::processRequestAssignmentPacket(QSharedPointer<NLPacket> packe
static QString repeatedMessage = LogHandler::getInstance().addOnlyOnceMessageRegex
("Received a request for assignment type [^ ]+ from [^ ]+");
qDebug() << "Received a request for assignment type" << requestAssignment.getType()
<< "from" << packet->getSenderSockAddr();
<< "from" << message->getSenderSockAddr();
noisyMessageTimer.restart();
}
SharedAssignmentPointer assignmentToDeploy = deployableAssignmentForRequest(requestAssignment);
if (assignmentToDeploy) {
qDebug() << "Deploying assignment -" << *assignmentToDeploy.data() << "- to" << packet->getSenderSockAddr();
qDebug() << "Deploying assignment -" << *assignmentToDeploy.data() << "- to" << message->getSenderSockAddr();
// give this assignment out, either the type matches or the requestor said they will take any
static std::unique_ptr<NLPacket> assignmentPacket;
@ -812,7 +812,7 @@ void DomainServer::processRequestAssignmentPacket(QSharedPointer<NLPacket> packe
assignmentStream << uniqueAssignment;
auto limitedNodeList = DependencyManager::get<LimitedNodeList>();
limitedNodeList->sendUnreliablePacket(*assignmentPacket, packet->getSenderSockAddr());
limitedNodeList->sendUnreliablePacket(*assignmentPacket, message->getSenderSockAddr());
// give the information for that deployed assignment to the gatekeeper so it knows to that that node
// in when it comes back around
@ -824,7 +824,7 @@ void DomainServer::processRequestAssignmentPacket(QSharedPointer<NLPacket> packe
static QString repeatedMessage = LogHandler::getInstance().addOnlyOnceMessageRegex
("Unable to fulfill assignment request of type [^ ]+ from [^ ]+");
qDebug() << "Unable to fulfill assignment request of type" << requestAssignment.getType()
<< "from" << packet->getSenderSockAddr();
<< "from" << message->getSenderSockAddr();
noisyMessageTimer.restart();
}
}
@ -993,7 +993,7 @@ void DomainServer::sendHeartbeatToIceServer() {
DependencyManager::get<LimitedNodeList>()->sendHeartbeatToIceServer(_iceServerSocket);
}
void DomainServer::processNodeJSONStatsPacket(QSharedPointer<NLPacketList> packetList, SharedNodePointer sendingNode) {
void DomainServer::processNodeJSONStatsPacket(QSharedPointer<ReceivedMessage> packetList, SharedNodePointer sendingNode) {
auto nodeData = dynamic_cast<DomainServerNodeData*>(sendingNode->getLinkedData());
if (nodeData) {
nodeData->updateJSONStats(packetList->getMessage());
@ -1767,17 +1767,17 @@ void DomainServer::addStaticAssignmentsToQueue() {
}
}
void DomainServer::processPathQueryPacket(QSharedPointer<NLPacket> packet) {
void DomainServer::processPathQueryPacket(QSharedPointer<ReceivedMessage> message) {
// this is a query for the viewpoint resulting from a path
// first pull the query path from the packet
// figure out how many bytes the sender said this path is
quint16 numPathBytes;
packet->readPrimitive(&numPathBytes);
message->readPrimitive(&numPathBytes);
if (numPathBytes <= packet->bytesLeftToRead()) {
if (numPathBytes <= message->getBytesLeftToRead()) {
// the number of path bytes makes sense for the sent packet - pull out the path
QString pathQuery = QString::fromUtf8(packet->getPayload() + packet->pos(), numPathBytes);
QString pathQuery = QString::fromUtf8(message->getRawMessage() + message->getPosition(), numPathBytes);
// our settings contain paths that start with a leading slash, so make sure this query has that
if (!pathQuery.startsWith("/")) {
@ -1825,7 +1825,7 @@ void DomainServer::processPathQueryPacket(QSharedPointer<NLPacket> packet) {
// send off the packet - see if we can associate this outbound data to a particular node
// TODO: does this senderSockAddr always work for a punched DS client?
nodeList->sendPacket(std::move(pathResponsePacket), packet->getSenderSockAddr());
nodeList->sendPacket(std::move(pathResponsePacket), message->getSenderSockAddr());
}
}
@ -1837,11 +1837,11 @@ void DomainServer::processPathQueryPacket(QSharedPointer<NLPacket> packet) {
}
}
void DomainServer::processNodeDisconnectRequestPacket(QSharedPointer<NLPacket> packet) {
void DomainServer::processNodeDisconnectRequestPacket(QSharedPointer<ReceivedMessage> message) {
// This packet has been matched to a source node and they're asking not to be in the domain anymore
auto limitedNodeList = DependencyManager::get<LimitedNodeList>();
const QUuid& nodeUUID = packet->getSourceID();
const QUuid& nodeUUID = message->getSourceID();
qDebug() << "Received a disconnect request from node with UUID" << nodeUUID;

View file

@ -56,11 +56,11 @@ public slots:
void restart();
void processRequestAssignmentPacket(QSharedPointer<NLPacket> packet);
void processListRequestPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
void processNodeJSONStatsPacket(QSharedPointer<NLPacketList> packetList, SharedNodePointer sendingNode);
void processPathQueryPacket(QSharedPointer<NLPacket> packet);
void processNodeDisconnectRequestPacket(QSharedPointer<NLPacket> packet);
void processRequestAssignmentPacket(QSharedPointer<ReceivedMessage> packet);
void processListRequestPacket(QSharedPointer<ReceivedMessage> packet, SharedNodePointer sendingNode);
void processNodeJSONStatsPacket(QSharedPointer<ReceivedMessage> packetList, SharedNodePointer sendingNode);
void processPathQueryPacket(QSharedPointer<ReceivedMessage> packet);
void processNodeDisconnectRequestPacket(QSharedPointer<ReceivedMessage> message);
private slots:
void aboutToQuit();

View file

@ -67,9 +67,9 @@ DomainServerSettingsManager::DomainServerSettingsManager() :
QMetaObject::invokeMethod(QCoreApplication::instance(), "quit", Qt::QueuedConnection);
}
void DomainServerSettingsManager::processSettingsRequestPacket(QSharedPointer<NLPacket> packet) {
void DomainServerSettingsManager::processSettingsRequestPacket(QSharedPointer<ReceivedMessage> message) {
Assignment::Type type;
packet->readPrimitive(&type);
message->readPrimitive(&type);
QJsonObject responseObject = responseObjectForType(QString::number(type));
auto json = QJsonDocument(responseObject).toJson();
@ -79,7 +79,7 @@ void DomainServerSettingsManager::processSettingsRequestPacket(QSharedPointer<NL
packetList->write(json);
auto nodeList = DependencyManager::get<LimitedNodeList>();
nodeList->sendPacketList(std::move(packetList), packet->getSenderSockAddr());
nodeList->sendPacketList(std::move(packetList), message->getSenderSockAddr());
}
void DomainServerSettingsManager::setupConfigMap(const QStringList& argumentList) {

View file

@ -18,7 +18,7 @@
#include <HifiConfigVariantMap.h>
#include <HTTPManager.h>
#include <NLPacket.h>
#include <ReceivedMessage.h>
const QString SETTINGS_PATHS_KEY = "paths";
@ -42,7 +42,7 @@ public:
QVariantMap& getSettingsMap() { return _configMap.getMergedConfig(); }
private slots:
void processSettingsRequestPacket(QSharedPointer<NLPacket> packet);
void processSettingsRequestPacket(QSharedPointer<ReceivedMessage> message);
private:
QJsonObject responseObjectForType(const QString& typeValue, bool isAuthenticated = false);

View file

@ -34,9 +34,10 @@ var BUMPER_ON_VALUE = 0.5;
// distant manipulation
//
var DISTANCE_HOLDING_RADIUS_FACTOR = 5; // multiplied by distance between hand and object
var DISTANCE_HOLDING_RADIUS_FACTOR = 3.5; // multiplied by distance between hand and object
var DISTANCE_HOLDING_ACTION_TIMEFRAME = 0.1; // how quickly objects move to their new position
var DISTANCE_HOLDING_ROTATION_EXAGGERATION_FACTOR = 2.0; // object rotates this much more than hand did
var MOVE_WITH_HEAD = true; // experimental head-controll of distantly held objects
var NO_INTERSECT_COLOR = {
red: 10,
@ -658,6 +659,13 @@ function MyController(hand) {
this.currentObjectTime = now;
this.handRelativePreviousPosition = Vec3.subtract(handControllerPosition, MyAvatar.position);
this.handPreviousRotation = handRotation;
this.currentCameraOrientation = Camera.orientation;
// compute a constant based on the initial conditions which we use below to exagerate hand motion onto the held object
this.radiusScalar = Math.log(Vec3.distance(this.currentObjectPosition, handControllerPosition) + 1.0);
if (this.radiusScalar < 1.0) {
this.radiusScalar = 1.0;
}
this.actionID = NULL_ACTION_ID;
this.actionID = Entities.addAction("spring", this.grabbedEntity, {
@ -689,8 +697,6 @@ function MyController(hand) {
this.currentAvatarOrientation = MyAvatar.orientation;
this.overlayLineOff();
};
this.continueDistanceHolding = function() {
@ -719,8 +725,12 @@ function MyController(hand) {
this.lineOn(handPosition, Vec3.subtract(grabbedProperties.position, handPosition), INTERSECT_COLOR);
// the action was set up on a previous call. update the targets.
var radius = Math.max(Vec3.distance(this.currentObjectPosition, handControllerPosition) *
DISTANCE_HOLDING_RADIUS_FACTOR, DISTANCE_HOLDING_RADIUS_FACTOR);
var radius = Vec3.distance(this.currentObjectPosition, handControllerPosition) *
this.radiusScalar * DISTANCE_HOLDING_RADIUS_FACTOR;
if (radius < 1.0) {
radius = 1.0;
}
// how far did avatar move this timestep?
var currentPosition = MyAvatar.position;
var avatarDeltaPosition = Vec3.subtract(currentPosition, this.currentAvatarPosition);
@ -751,11 +761,11 @@ function MyController(hand) {
var handMoved = Vec3.subtract(handToAvatar, this.handRelativePreviousPosition);
this.handRelativePreviousPosition = handToAvatar;
// magnify the hand movement but not the change from avatar movement & rotation
// magnify the hand movement but not the change from avatar movement & rotation
handMoved = Vec3.subtract(handMoved, handMovementFromTurning);
var superHandMoved = Vec3.multiply(handMoved, radius);
// Move the object by the magnified amount and then by amount from avatar movement & rotation
// Move the object by the magnified amount and then by amount from avatar movement & rotation
var newObjectPosition = Vec3.sum(this.currentObjectPosition, superHandMoved);
newObjectPosition = Vec3.sum(newObjectPosition, avatarDeltaPosition);
newObjectPosition = Vec3.sum(newObjectPosition, objectMovementFromTurning);
@ -777,6 +787,16 @@ function MyController(hand) {
Entities.callEntityMethod(this.grabbedEntity, "continueDistantGrab");
// mix in head motion
if (MOVE_WITH_HEAD) {
var objDistance = Vec3.length(objectToAvatar);
var before = Vec3.multiplyQbyV(this.currentCameraOrientation, { x: 0.0, y: 0.0, z: objDistance });
var after = Vec3.multiplyQbyV(Camera.orientation, { x: 0.0, y: 0.0, z: objDistance });
var change = Vec3.subtract(before, after);
this.currentCameraOrientation = Camera.orientation;
this.currentObjectPosition = Vec3.sum(this.currentObjectPosition, change);
}
Entities.updateAction(this.grabbedEntity, this.actionID, {
targetPosition: this.currentObjectPosition,
linearTimeScale: DISTANCE_HOLDING_ACTION_TIMEFRAME,

View file

@ -0,0 +1,226 @@
// earthquakes_live.js
//
// exploratory implementation in prep for abstract latlong to earth graphing tool for VR
// shows all of the quakes in the past 24 hours reported by the USGS
//
// created by james b. pollack @imgntn on 12/5/2015
// Copyright 2015 High Fidelity, Inc.
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
// working notes: maybe try doing markers as boxes,rotated to the sphere normal, and with the height representing some value
Script.include('../libraries/promise.js');
var Promise = loadPromise();
Script.include('../libraries/tinyColor.js');
var tinyColor = loadTinyColor();
//you could make it the size of the actual earth.
var EARTH_SPHERE_RADIUS = 6371;
var EARTH_SPHERE_RADIUS = 2;
var EARTH_CENTER_POSITION = Vec3.sum(Vec3.sum(MyAvatar.position, {
x: 0,
y: 0.5,
z: 0
}), Vec3.multiply(EARTH_SPHERE_RADIUS, Quat.getFront(Camera.getOrientation())));
var EARTH_MODEL_URL = 'http://hifi-content.s3.amazonaws.com/james/earthquakes_live/models/earth-noclouds.fbx';
var SHOULD_SPIN=false;
var POLL_FOR_CHANGES = true;
//USGS updates the data every five minutes
var CHECK_QUAKE_FREQUENCY = 5 * 60 * 1000;
var QUAKE_MARKER_DIMENSIONS = {
x: 0.01,
y: 0.01,
z: 0.01
};
function createEarth() {
var earthProperties = {
name: 'Earth',
type: 'Model',
modelURL: EARTH_MODEL_URL,
position: EARTH_CENTER_POSITION,
dimensions: {
x: EARTH_SPHERE_RADIUS,
y: EARTH_SPHERE_RADIUS,
z: EARTH_SPHERE_RADIUS
},
rotation: Quat.fromPitchYawRollDegrees(0, 90, 0),
// collisionsWillMove: true,
//if you have a shapetype it blocks the smaller markers
// shapeType:'sphere'
// userData: JSON.stringify({
// grabbableKey: {
// grabbable: false
// }
// })
}
return Entities.addEntity(earthProperties)
}
function latLongToVector3(lat, lon, radius, height) {
var phi = (lat) * Math.PI / 180;
var theta = (lon - 180) * Math.PI / 180;
var x = -(radius + height) * Math.cos(phi) * Math.cos(theta);
var y = (radius + height) * Math.sin(phi);
var z = (radius + height) * Math.cos(phi) * Math.sin(theta);
return {
x: x,
y: y,
z: z
};
}
function getQuakePosition(earthquake) {
var longitude = earthquake.geometry.coordinates[0];
var latitude = earthquake.geometry.coordinates[1];
var depth = earthquake.geometry.coordinates[2];
var latlng = latLongToVector3(latitude, longitude, EARTH_SPHERE_RADIUS / 2, 0);
var position = EARTH_CENTER_POSITION;
var finalPosition = Vec3.sum(position, latlng);
//print('finalpos::' + JSON.stringify(finalPosition))
return finalPosition
}
var QUAKE_URL = 'http://earthquake.usgs.gov/earthquakes/feed/v1.0/summary/all_day.geojson'
function get(url) {
print('getting' + url)
// Return a new promise.
return new Promise(function(resolve, reject) {
// Do the usual XHR stuff
var req = new XMLHttpRequest();
req.open('GET', url);
req.onreadystatechange = function() {
print('req status:: ' + JSON.stringify(req.status))
if (req.readyState == 4 && req.status == 200) {
var myArr = JSON.parse(req.responseText);
resolve(myArr);
}
};
req.send();
});
}
function createQuakeMarker(earthquake) {
var markerProperties = {
name: earthquake.properties.place,
type: 'Sphere',
parentID:earth,
dimensions: QUAKE_MARKER_DIMENSIONS,
position: getQuakePosition(earthquake),
ignoreForCollisions:true,
lifetime: 6000,
color: getQuakeMarkerColor(earthquake)
}
// print('marker properties::' + JSON.stringify(markerProperties))
return Entities.addEntity(markerProperties);
}
function getQuakeMarkerColor(earthquake) {
var color = {};
var magnitude = earthquake.properties.mag;
//realistic but will never get full red coloring and will probably be pretty dull for most. must experiment
var sValue = scale(magnitude, 0, 10, 0, 100);
var HSL_string = "hsl(0, " + sValue + "%, 50%)"
var color = tinyColor(HSL_string);
var finalColor = {
red: color._r,
green: color._g,
blue: color._b
}
return finalColor
}
function scale(value, min1, max1, min2, max2) {
return min2 + (max2 - min2) * ((value - min1) / (max1 - min1));
}
function processQuakes(earthquakes) {
print('quakers length' + earthquakes.length)
earthquakes.forEach(function(quake) {
// print('PROCESSING A QUAKE')
var marker = createQuakeMarker(quake);
markers.push(marker);
})
print('markers length:' + markers.length)
}
var quakes;
var markers = [];
var earth = createEarth();
function getThenProcessQuakes() {
get(QUAKE_URL).then(function(response) {
print('got it::' + response.features.length)
quakes = response.features;
processQuakes(quakes);
//print("Success!" + JSON.stringify(response));
}, function(error) {
print('error getting quakes')
});
}
function cleanupMarkers() {
print('CLEANING UP MARKERS')
while (markers.length > 0) {
Entities.deleteEntity(markers.pop());
}
}
function cleanupEarth() {
Entities.deleteEntity(earth);
Script.update.disconnect(spinEarth);
}
function cleanupInterval() {
if (pollingInterval !== null) {
Script.clearInterval(pollingInterval)
}
}
Script.scriptEnding.connect(cleanupMarkers);
Script.scriptEnding.connect(cleanupEarth);
Script.scriptEnding.connect(cleanupInterval);
getThenProcessQuakes();
var pollingInterval = null;
if (POLL_FOR_CHANGES === true) {
pollingInterval = Script.setInterval(function() {
cleanupMarkers();
getThenProcessQuakes()
}, CHECK_QUAKE_FREQUENCY)
}
function spinEarth(){
Entities.editEntity(earth,{
angularVelocity:{
x:0,
y:0.25,
z:0
}
})
}
if(SHOULD_SPIN===true){
Script.update.connect(spinEarth);
}

View file

@ -392,6 +392,11 @@ var toolBar = (function() {
url,
file;
if (!event.isLeftButton) {
// if another mouse button than left is pressed ignore it
return false;
}
clickedOverlay = Overlays.getOverlayAtPoint({
x: event.x,
y: event.y

View file

@ -2340,6 +2340,11 @@ SelectionDisplay = (function () {
that.mousePressEvent = function(event) {
if (!event.isLeftButton) {
// if another mouse button than left is pressed ignore it
return false;
}
var somethingClicked = false;
var pickRay = Camera.computePickRay(event.x, event.y);

File diff suppressed because it is too large Load diff

View file

@ -45,7 +45,12 @@ Item {
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Framerate: " + root.framerate
text: "Render Rate: " + root.renderrate
}
Text {
color: root.fontColor;
font.pixelSize: root.fontSize
text: "Present Rate: " + root.presentrate
}
Text {
color: root.fontColor;

View file

@ -44,6 +44,9 @@
#include <QtNetwork/QNetworkDiskCache>
#include <gl/Config.h>
#include <QOpenGLContextWrapper.h>
#include <AccountManager.h>
#include <AddressManager.h>
#include <ApplicationVersion.h>
@ -617,6 +620,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
// enable mouse tracking; otherwise, we only get drag events
_glWidget->setMouseTracking(true);
_glWidget->makeCurrent();
_glWidget->initializeGL();
_offscreenContext = new OffscreenGLCanvas();
_offscreenContext->create(_glWidget->context()->contextHandle());
@ -1136,7 +1141,7 @@ void Application::paintGL() {
_lastInstantaneousFps = instantaneousFps;
auto displayPlugin = getActiveDisplayPlugin();
displayPlugin->preRender();
// FIXME not needed anymore?
_offscreenContext->makeCurrent();
// update the avatar with a fresh HMD pose
@ -1191,6 +1196,9 @@ void Application::paintGL() {
QSize size = getDeviceSize();
renderArgs._viewport = glm::ivec4(0, 0, size.width(), size.height());
_applicationOverlay.renderOverlay(&renderArgs);
gpu::FramebufferPointer overlayFramebuffer = _applicationOverlay.getOverlayFramebuffer();
}
{
@ -1304,6 +1312,13 @@ void Application::paintGL() {
auto baseProjection = renderArgs._viewFrustum->getProjection();
auto hmdInterface = DependencyManager::get<HMDScriptingInterface>();
float IPDScale = hmdInterface->getIPDScale();
// Tell the plugin what pose we're using to render. In this case we're just using the
// unmodified head pose because the only plugin that cares (the Oculus plugin) uses it
// for rotational timewarp. If we move to support positonal timewarp, we need to
// ensure this contains the full pose composed with the eye offsets.
mat4 headPose = displayPlugin->getHeadPose(_frameCount);
// FIXME we probably don't need to set the projection matrix every frame,
// only when the display plugin changes (or in non-HMD modes when the user
// changes the FOV manually, which right now I don't think they can.
@ -1319,12 +1334,7 @@ void Application::paintGL() {
mat4 eyeOffsetTransform = glm::translate(mat4(), eyeOffset * -1.0f * IPDScale);
eyeOffsets[eye] = eyeOffsetTransform;
// Tell the plugin what pose we're using to render. In this case we're just using the
// unmodified head pose because the only plugin that cares (the Oculus plugin) uses it
// for rotational timewarp. If we move to support positonal timewarp, we need to
// ensure this contains the full pose composed with the eye offsets.
mat4 headPose = displayPlugin->getHeadPose();
displayPlugin->setEyeRenderPose(eye, headPose);
displayPlugin->setEyeRenderPose(_frameCount, eye, headPose);
eyeProjections[eye] = displayPlugin->getProjection(eye, baseProjection);
});
@ -1339,6 +1349,7 @@ void Application::paintGL() {
}
// Overlay Composition, needs to occur after screen space effects have completed
// FIXME migrate composition into the display plugins
{
PROFILE_RANGE(__FUNCTION__ "/compositor");
PerformanceTimer perfTimer("compositor");
@ -1367,44 +1378,40 @@ void Application::paintGL() {
{
PROFILE_RANGE(__FUNCTION__ "/pluginOutput");
PerformanceTimer perfTimer("pluginOutput");
auto primaryFbo = framebufferCache->getPrimaryFramebuffer();
GLuint finalTexture = gpu::GLBackend::getTextureID(primaryFbo->getRenderBuffer(0));
// Ensure the rendering context commands are completed when rendering
GLsync sync = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
// Ensure the sync object is flushed to the driver thread before releasing the context
// CRITICAL for the mac driver apparently.
glFlush();
_offscreenContext->doneCurrent();
auto primaryFramebuffer = framebufferCache->getPrimaryFramebuffer();
auto scratchFramebuffer = framebufferCache->getFramebuffer();
gpu::doInBatch(renderArgs._context, [=](gpu::Batch& batch) {
gpu::Vec4i rect;
rect.z = size.width();
rect.w = size.height();
batch.setFramebuffer(scratchFramebuffer);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, glm::vec4(0.0f, 0.0f, 0.0f, 0.0f));
batch.blit(primaryFramebuffer, rect, scratchFramebuffer, rect);
batch.setFramebuffer(nullptr);
});
auto finalTexturePointer = scratchFramebuffer->getRenderBuffer(0);
GLuint finalTexture = gpu::GLBackend::getTextureID(finalTexturePointer);
Q_ASSERT(0 != finalTexture);
Q_ASSERT(!_lockedFramebufferMap.contains(finalTexture));
_lockedFramebufferMap[finalTexture] = scratchFramebuffer;
// Switches to the display plugin context
displayPlugin->preDisplay();
// Ensure all operations from the previous context are complete before we try to read the fbo
glWaitSync(sync, 0, GL_TIMEOUT_IGNORED);
glDeleteSync(sync);
uint64_t displayStart = usecTimestampNow();
Q_ASSERT(isCurrentContext(_offscreenContext->getContext()));
{
PROFILE_RANGE(__FUNCTION__ "/pluginDisplay");
PerformanceTimer perfTimer("pluginDisplay");
displayPlugin->display(finalTexture, toGlm(size));
PROFILE_RANGE(__FUNCTION__ "/pluginSubmitScene");
PerformanceTimer perfTimer("pluginSubmitScene");
displayPlugin->submitSceneTexture(_frameCount, finalTexture, toGlm(size));
}
Q_ASSERT(isCurrentContext(_offscreenContext->getContext()));
{
PROFILE_RANGE(__FUNCTION__ "/bufferSwap");
PerformanceTimer perfTimer("bufferSwap");
displayPlugin->finishFrame();
}
uint64_t displayEnd = usecTimestampNow();
const float displayPeriodUsec = (float)(displayEnd - displayStart); // usecs
_lastPaintWait = displayPeriodUsec / (float)USECS_PER_SECOND;
}
{
PerformanceTimer perfTimer("makeCurrent");
_offscreenContext->makeCurrent();
Stats::getInstance()->setRenderDetails(renderArgs._details);
// Reset the gpu::Context Stages
// Back to the default framebuffer;
gpu::doInBatch(renderArgs._context, [=](gpu::Batch& batch) {
@ -2612,7 +2619,7 @@ void Application::updateMyAvatarLookAtPosition() {
lookAtPosition.x = -lookAtPosition.x;
}
if (isHMD) {
glm::mat4 headPose = getActiveDisplayPlugin()->getHeadPose();
glm::mat4 headPose = getActiveDisplayPlugin()->getHeadPose(_frameCount);
glm::quat hmdRotation = glm::quat_cast(headPose);
lookAtSpot = _myCamera.getPosition() + myAvatar->getOrientation() * (hmdRotation * lookAtPosition);
} else {
@ -3066,11 +3073,6 @@ void Application::queryOctree(NodeType_t serverType, PacketType packetType, Node
//qCDebug(interfaceapp) << ">>> inside... queryOctree()... _viewFrustum.getFieldOfView()=" << _viewFrustum.getFieldOfView();
bool wantExtraDebugging = getLogger()->extraDebugging();
// These will be the same for all servers, so we can set them up once and then reuse for each server we send to.
_octreeQuery.setWantLowResMoving(true);
_octreeQuery.setWantDelta(true);
_octreeQuery.setWantCompression(true);
_octreeQuery.setCameraPosition(_viewFrustum.getPosition());
_octreeQuery.setCameraOrientation(_viewFrustum.getOrientation());
_octreeQuery.setCameraFov(_viewFrustum.getFieldOfView());
@ -3773,12 +3775,11 @@ void Application::domainChanged(const QString& domainHostname) {
_domainConnectionRefusals.clear();
}
void Application::handleDomainConnectionDeniedPacket(QSharedPointer<NLPacket> packet) {
void Application::handleDomainConnectionDeniedPacket(QSharedPointer<ReceivedMessage> message) {
// Read deny reason from packet
quint16 reasonSize;
packet->readPrimitive(&reasonSize);
QString reason = QString::fromUtf8(packet->getPayload() + packet->pos(), reasonSize);
packet->seek(packet->pos() + reasonSize);
message->readPrimitive(&reasonSize);
QString reason = QString::fromUtf8(message->readWithoutCopy(reasonSize));
// output to the log so the user knows they got a denied connection request
// and check and signal for an access token so that we can make sure they are logged in
@ -3864,9 +3865,7 @@ void Application::nodeKilled(SharedNodePointer node) {
Menu::getInstance()->getActionForOption(MenuOption::UploadAsset)->setEnabled(false);
}
}
void Application::trackIncomingOctreePacket(NLPacket& packet, SharedNodePointer sendingNode, bool wasStatsPacket) {
void Application::trackIncomingOctreePacket(ReceivedMessage& message, SharedNodePointer sendingNode, bool wasStatsPacket) {
// Attempt to identify the sender from its address.
if (sendingNode) {
const QUuid& nodeUUID = sendingNode->getUUID();
@ -3875,13 +3874,13 @@ void Application::trackIncomingOctreePacket(NLPacket& packet, SharedNodePointer
_octreeServerSceneStats.withWriteLock([&] {
if (_octreeServerSceneStats.find(nodeUUID) != _octreeServerSceneStats.end()) {
OctreeSceneStats& stats = _octreeServerSceneStats[nodeUUID];
stats.trackIncomingOctreePacket(packet, wasStatsPacket, sendingNode->getClockSkewUsec());
stats.trackIncomingOctreePacket(message, wasStatsPacket, sendingNode->getClockSkewUsec());
}
});
}
}
int Application::processOctreeStats(NLPacket& packet, SharedNodePointer sendingNode) {
int Application::processOctreeStats(ReceivedMessage& message, SharedNodePointer sendingNode) {
// But, also identify the sender, and keep track of the contained jurisdiction root for this server
// parse the incoming stats datas stick it in a temporary object for now, while we
@ -3893,7 +3892,7 @@ int Application::processOctreeStats(NLPacket& packet, SharedNodePointer sendingN
// now that we know the node ID, let's add these stats to the stats for that node...
_octreeServerSceneStats.withWriteLock([&] {
OctreeSceneStats& octreeStats = _octreeServerSceneStats[nodeUUID];
statsMessageLength = octreeStats.unpackFromPacket(packet);
statsMessageLength = octreeStats.unpackFromPacket(message);
// see if this is the first we've heard of this node...
NodeToJurisdictionMap* jurisdiction = NULL;
@ -4508,7 +4507,7 @@ void Application::takeSnapshot() {
player->setMedia(QUrl::fromLocalFile(inf.absoluteFilePath()));
player->play();
QString fileName = Snapshot::saveSnapshot(_glWidget->grabFrameBuffer());
QString fileName = Snapshot::saveSnapshot(getActiveDisplayPlugin()->getScreenshot());
AccountManager& accountManager = AccountManager::getInstance();
if (!accountManager.isLoggedIn()) {
@ -4519,7 +4518,6 @@ void Application::takeSnapshot() {
_snapshotShareDialog = new SnapshotShareDialog(fileName, _glWidget);
}
_snapshotShareDialog->show();
}
float Application::getRenderResolutionScale() const {
@ -4702,10 +4700,6 @@ const DisplayPlugin* Application::getActiveDisplayPlugin() const {
return ((Application*)this)->getActiveDisplayPlugin();
}
bool _activatingDisplayPlugin{ false };
QVector<QPair<QString, QString>> _currentDisplayPluginActions;
QVector<QPair<QString, QString>> _currentInputPluginActions;
static void addDisplayPluginToMenu(DisplayPluginPointer displayPlugin, bool active = false) {
auto menu = Menu::getInstance();
QString name = displayPlugin->getName();
@ -4735,9 +4729,10 @@ void Application::updateDisplayMode() {
bool first = true;
foreach(auto displayPlugin, displayPlugins) {
addDisplayPluginToMenu(displayPlugin, first);
QObject::connect(displayPlugin.get(), &DisplayPlugin::requestRender, [this] {
paintGL();
});
// This must be a queued connection to avoid a deadlock
QObject::connect(displayPlugin.get(), &DisplayPlugin::requestRender,
this, &Application::paintGL, Qt::QueuedConnection);
QObject::connect(displayPlugin.get(), &DisplayPlugin::recommendedFramebufferSizeChanged, [this](const QSize & size) {
resizeGL();
});
@ -4779,19 +4774,18 @@ void Application::updateDisplayMode() {
return;
}
if (!_currentDisplayPluginActions.isEmpty()) {
if (!_pluginContainer->currentDisplayActions().isEmpty()) {
auto menu = Menu::getInstance();
foreach(auto itemInfo, _currentDisplayPluginActions) {
foreach(auto itemInfo, _pluginContainer->currentDisplayActions()) {
menu->removeMenuItem(itemInfo.first, itemInfo.second);
}
_currentDisplayPluginActions.clear();
_pluginContainer->currentDisplayActions().clear();
}
if (newDisplayPlugin) {
_offscreenContext->makeCurrent();
_activatingDisplayPlugin = true;
newDisplayPlugin->activate();
_activatingDisplayPlugin = false;
_offscreenContext->makeCurrent();
offscreenUi->resize(fromGlm(newDisplayPlugin->getRecommendedUiSize()));
_offscreenContext->makeCurrent();
@ -4917,7 +4911,7 @@ mat4 Application::getEyeOffset(int eye) const {
mat4 Application::getHMDSensorPose() const {
if (isHMDMode()) {
return getActiveDisplayPlugin()->getHeadPose();
return getActiveDisplayPlugin()->getHeadPose(_frameCount);
}
return mat4();
}

View file

@ -158,6 +158,7 @@ public:
bool isForeground() const { return _isForeground; }
uint32_t getFrameCount() { return _frameCount; }
float getFps() const { return _fps; }
float const HMD_TARGET_FRAME_RATE = 75.0f;
float const DESKTOP_TARGET_FRAME_RATE = 60.0f;
@ -328,7 +329,7 @@ private slots:
void activeChanged(Qt::ApplicationState state);
void domainSettingsReceived(const QJsonObject& domainSettingsObject);
void handleDomainConnectionDeniedPacket(QSharedPointer<NLPacket> packet);
void handleDomainConnectionDeniedPacket(QSharedPointer<ReceivedMessage> message);
void notifyPacketVersionMismatch();
@ -394,8 +395,8 @@ private:
bool importSVOFromURL(const QString& urlString);
int processOctreeStats(NLPacket& packet, SharedNodePointer sendingNode);
void trackIncomingOctreePacket(NLPacket& packet, SharedNodePointer sendingNode, bool wasStatsPacket);
int processOctreeStats(ReceivedMessage& message, SharedNodePointer sendingNode);
void trackIncomingOctreePacket(ReceivedMessage& message, SharedNodePointer sendingNode, bool wasStatsPacket);
void resizeEvent(QResizeEvent* size);
@ -425,6 +426,9 @@ private:
DisplayPluginPointer _displayPlugin;
InputPluginList _activeInputPlugins;
bool _activatingDisplayPlugin { false };
QMap<uint32_t, gpu::FramebufferPointer> _lockedFramebufferMap;
MainWindow* _window;
ToolWindow* _toolWindow;

View file

@ -13,27 +13,6 @@
#include "Application.h"
#include "GLCanvas.h"
#include <QWindow>
#include "MainWindow.h"
#include "Menu.h"
void GLCanvas::paintGL() {
PROFILE_RANGE(__FUNCTION__);
// FIXME - I'm not sure why this still remains, it appears as if this GLCanvas gets a single paintGL call near
// the beginning of the application starting up. I'm not sure if we really need to call Application::paintGL()
// in this case, since the display plugins eventually handle all the painting
bool isThrottleFPSEnabled = Menu::getInstance()->isOptionChecked(MenuOption::ThrottleFPSIfNotFocus);
if (!qApp->getWindow()->isMinimized() || !isThrottleFPSEnabled) {
qApp->paintGL();
}
}
void GLCanvas::resizeGL(int width, int height) {
qApp->resizeGL();
}
bool GLCanvas::event(QEvent* event) {
if (QEvent::Paint == event->type() && qApp->isAboutToQuit()) {
return true;

View file

@ -18,8 +18,6 @@
class GLCanvas : public GLWidget {
Q_OBJECT
protected:
virtual void paintGL() override;
virtual void resizeGL(int width, int height) override;
virtual bool event(QEvent* event) override;
};

View file

@ -432,8 +432,6 @@ Menu::Menu() {
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::AvatarReceiveStats, 0, false,
avatarManager.data(), SLOT(setShouldShowReceiveStats(bool)));
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderSkeletonCollisionShapes);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderHeadCollisionShapes);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderBoundingCollisionShapes);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtVectors, 0, false);
addCheckableActionToQMenuAndActionHash(avatarDebugMenu, MenuOption::RenderLookAtTargets, 0, false);

View file

@ -238,10 +238,8 @@ namespace MenuOption {
const QString ReloadContent = "Reload Content (Clears all caches)";
const QString RenderBoundingCollisionShapes = "Show Bounding Collision Shapes";
const QString RenderFocusIndicator = "Show Eye Focus";
const QString RenderHeadCollisionShapes = "Show Head Collision Shapes";
const QString RenderLookAtTargets = "Show Look-at Targets";
const QString RenderLookAtVectors = "Show Look-at Vectors";
const QString RenderSkeletonCollisionShapes = "Show Skeleton Collision Shapes";
const QString RenderResolution = "Scale Resolution";
const QString RenderResolutionOne = "1";
const QString RenderResolutionTwoThird = "2/3";

View file

@ -1,17 +1,22 @@
#include "PluginContainerProxy.h"
#include <QScreen>
#include <QWindow>
#include <QtGui/QScreen>
#include <QtGui/QWindow>
#include <plugins/Plugin.h>
#include <plugins/PluginManager.h>
#include <display-plugins/DisplayPlugin.h>
#include <DependencyManager.h>
#include <FramebufferCache.h>
#include "Application.h"
#include "MainWindow.h"
#include "GLCanvas.h"
#include "ui/DialogsManager.h"
#include <gl/OffscreenGLCanvas.h>
#include <QtGui/QOpenGLContext>
PluginContainerProxy::PluginContainerProxy() {
}
@ -30,12 +35,7 @@ void PluginContainerProxy::removeMenu(const QString& menuName) {
Menu::getInstance()->removeMenu(menuName);
}
extern bool _activatingDisplayPlugin;
extern QVector<QPair<QString, QString>> _currentDisplayPluginActions;
extern QVector<QPair<QString, QString>> _currentInputPluginActions;
std::map<QString, QActionGroup*> _exclusiveGroups;
QAction* PluginContainerProxy::addMenuItem(const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable, bool checked, const QString& groupName) {
QAction* PluginContainerProxy::addMenuItem(PluginType type, const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable, bool checked, const QString& groupName) {
auto menu = Menu::getInstance();
MenuWrapper* parentItem = menu->getMenu(path);
QAction* action = menu->addActionToQMenuAndActionHash(parentItem, name);
@ -54,7 +54,7 @@ QAction* PluginContainerProxy::addMenuItem(const QString& path, const QString& n
});
action->setCheckable(checkable);
action->setChecked(checked);
if (_activatingDisplayPlugin) {
if (type == PluginType::DISPLAY_PLUGIN) {
_currentDisplayPluginActions.push_back({ path, name });
} else {
_currentInputPluginActions.push_back({ path, name });
@ -150,10 +150,37 @@ void PluginContainerProxy::showDisplayPluginsTools() {
DependencyManager::get<DialogsManager>()->hmdTools(true);
}
QGLWidget* PluginContainerProxy::getPrimarySurface() {
GLWidget* PluginContainerProxy::getPrimaryWidget() {
return qApp->_glWidget;
}
QWindow* PluginContainerProxy::getPrimaryWindow() {
return qApp->_glWidget->windowHandle();
}
QOpenGLContext* PluginContainerProxy::getPrimaryContext() {
return qApp->_glWidget->context()->contextHandle();
}
const DisplayPlugin* PluginContainerProxy::getActiveDisplayPlugin() const {
return qApp->getActiveDisplayPlugin();
}
bool PluginContainerProxy::makeRenderingContextCurrent() {
return qApp->_offscreenContext->makeCurrent();
}
void PluginContainerProxy::releaseSceneTexture(uint32_t texture) {
Q_ASSERT(QThread::currentThread() == qApp->thread());
auto& framebufferMap = qApp->_lockedFramebufferMap;
Q_ASSERT(framebufferMap.contains(texture));
auto framebufferPointer = framebufferMap[texture];
framebufferMap.remove(texture);
auto framebufferCache = DependencyManager::get<FramebufferCache>();
framebufferCache->releaseFramebuffer(framebufferPointer);
}
void PluginContainerProxy::releaseOverlayTexture(uint32_t texture) {
// FIXME implement present thread compositing
}

View file

@ -2,19 +2,21 @@
#ifndef hifi_PluginContainerProxy_h
#define hifi_PluginContainerProxy_h
#include <QObject>
#include <QRect>
#include <QtCore/QObject>
#include <QtCore/QRect>
#include <plugins/Forward.h>
#include <plugins/PluginContainer.h>
class QActionGroup;
class PluginContainerProxy : public QObject, PluginContainer {
Q_OBJECT
PluginContainerProxy();
virtual ~PluginContainerProxy();
virtual void addMenu(const QString& menuName) override;
virtual void removeMenu(const QString& menuName) override;
virtual QAction* addMenuItem(const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable = false, bool checked = false, const QString& groupName = "") override;
virtual QAction* addMenuItem(PluginType type, const QString& path, const QString& name, std::function<void(bool)> onClicked, bool checkable = false, bool checked = false, const QString& groupName = "") override;
virtual void removeMenuItem(const QString& menuName, const QString& menuItem) override;
virtual bool isOptionChecked(const QString& name) override;
virtual void setIsOptionChecked(const QString& path, bool checked) override;
@ -22,13 +24,20 @@ class PluginContainerProxy : public QObject, PluginContainer {
virtual void unsetFullscreen(const QScreen* avoidScreen = nullptr) override;
virtual void showDisplayPluginsTools() override;
virtual void requestReset() override;
virtual QGLWidget* getPrimarySurface() override;
virtual bool makeRenderingContextCurrent() override;
virtual void releaseSceneTexture(uint32_t texture) override;
virtual void releaseOverlayTexture(uint32_t texture) override;
virtual GLWidget* getPrimaryWidget() override;
virtual QWindow* getPrimaryWindow() override;
virtual QOpenGLContext* getPrimaryContext() override;
virtual bool isForeground() override;
virtual const DisplayPlugin* getActiveDisplayPlugin() const override;
QRect _savedGeometry{ 10, 120, 800, 600 };
std::map<QString, QActionGroup*> _exclusiveGroups;
friend class Application;
};
#endif

View file

@ -130,9 +130,9 @@ void Stars::render(RenderArgs* renderArgs, float alpha) {
std::call_once(once, [&] {
{
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(standardTransformPNTC_vert)));
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(starsGrid_frag)));
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
auto vs = gpu::Shader::createVertex(std::string(standardTransformPNTC_vert));
auto ps = gpu::Shader::createPixel(std::string(starsGrid_frag));
auto program = gpu::Shader::createProgram(vs, ps);
gpu::Shader::makeProgram((*program));
_timeSlot = program->getBuffers().findLocation(UNIFORM_TIME_NAME);
if (_timeSlot == gpu::Shader::INVALID_LOCATION) {
@ -143,12 +143,12 @@ void Stars::render(RenderArgs* renderArgs, float alpha) {
state->setDepthTest(gpu::State::DepthTest(false));
state->setStencilTest(true, 0xFF, gpu::State::StencilTest(0, 0xFF, gpu::EQUAL, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP));
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
_gridPipeline.reset(gpu::Pipeline::create(program, state));
_gridPipeline = gpu::Pipeline::create(program, state);
}
{
auto vs = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(stars_vert)));
auto ps = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(stars_frag)));
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vs, ps));
auto vs = gpu::Shader::createVertex(std::string(stars_vert));
auto ps = gpu::Shader::createPixel(std::string(stars_frag));
auto program = gpu::Shader::createProgram(vs, ps);
gpu::Shader::makeProgram((*program));
auto state = gpu::StatePointer(new gpu::State());
// enable decal blend
@ -156,7 +156,7 @@ void Stars::render(RenderArgs* renderArgs, float alpha) {
state->setStencilTest(true, 0xFF, gpu::State::StencilTest(0, 0xFF, gpu::EQUAL, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP, gpu::State::STENCIL_OP_KEEP));
state->setAntialiasedLineEnable(true); // line smoothing also smooth points
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA);
_starsPipeline.reset(gpu::Pipeline::create(program, state));
_starsPipeline = gpu::Pipeline::create(program, state);
}

View file

@ -433,16 +433,6 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
}
}
/*
// TODO: re-implement these when we have more detailed avatar collision shapes
bool renderSkeleton = Menu::getInstance()->isOptionChecked(MenuOption::RenderSkeletonCollisionShapes);
if (renderSkeleton) {
}
bool renderHead = Menu::getInstance()->isOptionChecked(MenuOption::RenderHeadCollisionShapes);
if (renderHead && shouldRenderHead(renderArgs)) {
}
*/
bool renderBounding = Menu::getInstance()->isOptionChecked(MenuOption::RenderBoundingCollisionShapes);
if (renderBounding && shouldRenderHead(renderArgs) && _skeletonModel.isRenderable()) {
PROFILE_RANGE_BATCH(batch, __FUNCTION__":skeletonBoundingCollisionShapes");
@ -867,18 +857,12 @@ QVector<glm::quat> Avatar::getJointRotations() const {
}
glm::quat Avatar::getJointRotation(int index) const {
if (QThread::currentThread() != thread()) {
return AvatarData::getJointRotation(index);
}
glm::quat rotation;
_skeletonModel.getJointRotation(index, rotation);
return rotation;
}
glm::vec3 Avatar::getJointTranslation(int index) const {
if (QThread::currentThread() != thread()) {
return AvatarData::getJointTranslation(index);
}
glm::vec3 translation;
_skeletonModel.getJointTranslation(index, translation);
return translation;

View file

@ -30,7 +30,8 @@ void AvatarUpdate::synchronousProcess() {
// Keep our own updated value, so that our asynchronous code can consult it.
_isHMDMode = qApp->isHMDMode();
_headPose = qApp->getActiveDisplayPlugin()->getHeadPose();
auto frameCount = qApp->getFrameCount();
_headPose = qApp->getActiveDisplayPlugin()->getHeadPose(frameCount);
if (_updateBillboard) {
DependencyManager::get<AvatarManager>()->getMyAvatar()->doUpdateBillboard();

View file

@ -967,8 +967,6 @@ void MyAvatar::clearJointData(int index) {
QMetaObject::invokeMethod(this, "clearJointData", Q_ARG(int, index));
return;
}
// HACK: ATM only JS scripts call clearJointData() on MyAvatar so we hardcode the priority
_rig->setJointState(index, false, glm::quat(), glm::vec3(), 0.0f);
_rig->clearJointAnimationPriority(index);
}
@ -1200,7 +1198,7 @@ void MyAvatar::renderBody(RenderArgs* renderArgs, ViewFrustum* renderFrustum, fl
if (qApp->isHMDMode()) {
glm::vec3 cameraPosition = qApp->getCamera()->getPosition();
glm::mat4 headPose = qApp->getActiveDisplayPlugin()->getHeadPose();
glm::mat4 headPose = qApp->getActiveDisplayPlugin()->getHeadPose(qApp->getFrameCount());
glm::mat4 leftEyePose = qApp->getActiveDisplayPlugin()->getEyeToHeadTransform(Eye::Left);
leftEyePose = leftEyePose * headPose;
glm::vec3 leftEyePosition = extractTranslation(leftEyePose);
@ -1254,6 +1252,16 @@ void MyAvatar::initHeadBones() {
}
}
void MyAvatar::setAnimGraphUrl(const QUrl& url) {
if (_animGraphUrl == url) {
return;
}
destroyAnimGraph();
_skeletonModel.reset(); // Why is this necessary? Without this, we crash in the next render.
_animGraphUrl = url;
initAnimGraph();
}
void MyAvatar::initAnimGraph() {
// avatar.json
// https://gist.github.com/hyperlogic/7d6a0892a7319c69e2b9
@ -1270,9 +1278,9 @@ void MyAvatar::initAnimGraph() {
// or run a local web-server
// python -m SimpleHTTPServer&
//auto graphUrl = QUrl("http://localhost:8000/avatar.json");
auto graphUrl = QUrl(_animGraphUrl.isEmpty() ?
QUrl::fromLocalFile(PathUtils::resourcesPath() + "meshes/defaultAvatar_full/avatar-animation.json") :
_animGraphUrl);
auto graphUrl =_animGraphUrl.isEmpty() ?
QUrl::fromLocalFile(PathUtils::resourcesPath() + "meshes/defaultAvatar_full/avatar-animation.json") :
QUrl(_animGraphUrl);
_rig->initAnimGraph(graphUrl);
_bodySensorMatrix = deriveBodyFromHMDSensor(); // Based on current cached HMD position/rotation..

View file

@ -249,13 +249,13 @@ public slots:
virtual void rebuildSkeletonBody() override;
const QString& getAnimGraphUrl() const { return _animGraphUrl; }
Q_INVOKABLE QUrl getAnimGraphUrl() const { return _animGraphUrl; }
void setEnableDebugDrawDefaultPose(bool isEnabled);
void setEnableDebugDrawAnimPose(bool isEnabled);
void setEnableDebugDrawPosition(bool isEnabled);
void setEnableMeshVisible(bool isEnabled);
void setAnimGraphUrl(const QString& url) { _animGraphUrl = url; }
Q_INVOKABLE void setAnimGraphUrl(const QUrl& url);
glm::vec3 getPositionForAudio();
glm::quat getOrientationForAudio();
@ -356,7 +356,7 @@ private:
// Avatar Preferences
QUrl _fullAvatarURLFromPreferences;
QString _fullAvatarModelName;
QString _animGraphUrl {""};
QUrl _animGraphUrl {""};
// cache of the current HMD sensor position and orientation
// in sensor space.

View file

@ -23,11 +23,11 @@ OctreePacketProcessor::OctreePacketProcessor() {
this, "handleOctreePacket");
}
void OctreePacketProcessor::handleOctreePacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode) {
queueReceivedPacket(packet, senderNode);
void OctreePacketProcessor::handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
queueReceivedPacket(message, senderNode);
}
void OctreePacketProcessor::processPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
void OctreePacketProcessor::processPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
PerformanceWarning warn(Menu::getInstance()->isOptionChecked(MenuOption::PipelineWarnings),
"OctreePacketProcessor::processPacket()");
@ -39,41 +39,42 @@ void OctreePacketProcessor::processPacket(QSharedPointer<NLPacket> packet, Share
bool wasStatsPacket = false;
PacketType octreePacketType = packet->getType();
PacketType octreePacketType = message->getType();
// note: PacketType_OCTREE_STATS can have PacketType_VOXEL_DATA
// immediately following them inside the same packet. So, we process the PacketType_OCTREE_STATS first
// then process any remaining bytes as if it was another packet
if (octreePacketType == PacketType::OctreeStats) {
int statsMessageLength = qApp->processOctreeStats(*packet, sendingNode);
int statsMessageLength = qApp->processOctreeStats(*message, sendingNode);
wasStatsPacket = true;
int piggybackBytes = packet->getPayloadSize() - statsMessageLength;
int piggybackBytes = message->getSize() - statsMessageLength;
if (piggybackBytes) {
// construct a new packet from the piggybacked one
auto buffer = std::unique_ptr<char[]>(new char[piggybackBytes]);
memcpy(buffer.get(), packet->getPayload() + statsMessageLength, piggybackBytes);
memcpy(buffer.get(), message->getRawMessage() + statsMessageLength, piggybackBytes);
auto newPacket = NLPacket::fromReceivedPacket(std::move(buffer), piggybackBytes, packet->getSenderSockAddr());
packet = QSharedPointer<NLPacket>(newPacket.release());
auto newPacket = NLPacket::fromReceivedPacket(std::move(buffer), piggybackBytes, message->getSenderSockAddr());
message = QSharedPointer<ReceivedMessage>::create(*newPacket.release());
} else {
// Note... stats packets don't have sequence numbers, so we don't want to send those to trackIncomingVoxelPacket()
return; // bail since no piggyback data
}
} // fall through to piggyback message
PacketType packetType = packet->getType();
PacketType packetType = message->getType();
// check version of piggyback packet against expected version
if (packet->getVersion() != versionForPacketType(packet->getType())) {
if (message->getVersion() != versionForPacketType(message->getType())) {
static QMultiMap<QUuid, PacketType> versionDebugSuppressMap;
const QUuid& senderUUID = packet->getSourceID();
const QUuid& senderUUID = message->getSourceID();
if (!versionDebugSuppressMap.contains(senderUUID, packetType)) {
qDebug() << "Was stats packet? " << wasStatsPacket;
qDebug() << "OctreePacketProcessor - piggyback packet version mismatch on" << packetType << "- Sender"
<< senderUUID << "sent" << (int) packet->getVersion() << "but"
<< senderUUID << "sent" << (int) message->getVersion() << "but"
<< (int) versionForPacketType(packetType) << "expected.";
emit packetVersionMismatch();
@ -83,21 +84,21 @@ void OctreePacketProcessor::processPacket(QSharedPointer<NLPacket> packet, Share
return; // bail since piggyback version doesn't match
}
qApp->trackIncomingOctreePacket(*packet, sendingNode, wasStatsPacket);
qApp->trackIncomingOctreePacket(*message, sendingNode, wasStatsPacket);
// seek back to beginning of packet after tracking
packet->seek(0);
message->seek(0);
switch(packetType) {
case PacketType::EntityErase: {
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
qApp->getEntities()->processEraseMessage(*packet, sendingNode);
qApp->getEntities()->processEraseMessage(*message, sendingNode);
}
} break;
case PacketType::EntityData: {
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
qApp->getEntities()->processDatagram(*packet, sendingNode);
qApp->getEntities()->processDatagram(*message, sendingNode);
}
} break;

View file

@ -13,6 +13,7 @@
#define hifi_OctreePacketProcessor_h
#include <ReceivedPacketProcessor.h>
#include <ReceivedMessage.h>
/// Handles processing of incoming voxel packets for the interface application. As with other ReceivedPacketProcessor classes
/// the user is responsible for reading inbound packets and adding them to the processing queue by calling queueReceivedPacket()
@ -25,9 +26,9 @@ signals:
void packetVersionMismatch();
protected:
virtual void processPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
virtual void processPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) override;
private slots:
void handleOctreePacket(QSharedPointer<NLPacket> packet, SharedNodePointer senderNode);
void handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
};
#endif // hifi_OctreePacketProcessor_h

View file

@ -287,7 +287,7 @@ void ApplicationCompositor::displayOverlayTextureHmd(RenderArgs* renderArgs, int
mat4 camMat;
_cameraBaseTransform.getMatrix(camMat);
auto displayPlugin = qApp->getActiveDisplayPlugin();
auto headPose = displayPlugin->getHeadPose();
auto headPose = displayPlugin->getHeadPose(qApp->getFrameCount());
auto eyeToHead = displayPlugin->getEyeToHeadTransform((Eye)eye);
camMat = (headPose * eyeToHead) * camMat;
batch.setViewportTransform(renderArgs->_viewport);

View file

@ -188,9 +188,9 @@ void PreferencesDialog::loadPreferences() {
ui.fieldOfViewSpin->setValue(qApp->getFieldOfView());
ui.leanScaleSpin->setValue(myAvatar->getLeanScale());
ui.avatarScaleSpin->setValue(myAvatar->getAvatarScale());
ui.avatarAnimationEdit->setText(myAvatar->getAnimGraphUrl());
ui.avatarAnimationEdit->setText(myAvatar->getAnimGraphUrl().toString());
ui.maxOctreePPSSpin->setValue(qApp->getMaxOctreePacketsPerSecond());

View file

@ -23,6 +23,7 @@
#include <LODManager.h>
#include <OffscreenUi.h>
#include <PerfStat.h>
#include <plugins/DisplayPlugin.h>
#include "BandwidthRecorder.h"
#include "Menu.h"
@ -118,7 +119,12 @@ void Stats::updateStats(bool force) {
STAT_UPDATE(avatarRenderableCount, avatarManager->getNumberInRenderRange());
STAT_UPDATE(avatarRenderDistance, (int) round(avatarManager->getRenderDistance())); // deliberately truncating
STAT_UPDATE(serverCount, nodeList->size());
STAT_UPDATE(framerate, (int)qApp->getFps());
STAT_UPDATE(renderrate, (int)qApp->getFps());
if (qApp->getActiveDisplayPlugin()) {
STAT_UPDATE(presentrate, (int)round(qApp->getActiveDisplayPlugin()->presentRate()));
} else {
STAT_UPDATE(presentrate, -1);
}
STAT_UPDATE(simrate, (int)qApp->getAverageSimsPerSecond());
STAT_UPDATE(avatarSimrate, (int)qApp->getAvatarSimrate());

View file

@ -32,7 +32,8 @@ class Stats : public QQuickItem {
Q_PROPERTY(float audioPacketlossDownstream READ getAudioPacketLossDownstream)
STATS_PROPERTY(int, serverCount, 0)
STATS_PROPERTY(int, framerate, 0)
STATS_PROPERTY(int, renderrate, 0)
STATS_PROPERTY(int, presentrate, 0)
STATS_PROPERTY(int, simrate, 0)
STATS_PROPERTY(int, avatarSimrate, 0)
STATS_PROPERTY(int, avatarCount, 0)
@ -115,7 +116,8 @@ signals:
void expandedChanged();
void timingExpandedChanged();
void serverCountChanged();
void framerateChanged();
void renderrateChanged();
void presentrateChanged();
void simrateChanged();
void avatarSimrateChanged();
void avatarCountChanged();

View file

@ -12,6 +12,7 @@
#include <GeometryCache.h>
#include <RegisteredMetaTypes.h>
#include <DeferredLightingEffect.h>
QString const Line3DOverlay::TYPE = "line3d";
@ -53,6 +54,7 @@ void Line3DOverlay::render(RenderArgs* args) {
auto batch = args->_batch;
if (batch) {
batch->setModelTransform(_transform);
DependencyManager::get<DeferredLightingEffect>()->bindSimpleProgram(*batch);
if (getIsDashedLine()) {
// TODO: add support for color to renderDashedLine()

View file

@ -14,6 +14,8 @@
#include <glm/gtx/vector_angle.hpp>
#include <queue>
#include <QScriptValueIterator>
#include <QWriteLocker>
#include <QReadLocker>
#include <NumericalConstants.h>
#include <DebugDraw.h>
@ -158,10 +160,10 @@ void Rig::destroyAnimGraph() {
_animSkeleton.reset();
_animLoader.reset();
_animNode.reset();
_relativePoses.clear();
_absolutePoses.clear();
_overridePoses.clear();
_overrideFlags.clear();
_internalPoseSet._relativePoses.clear();
_internalPoseSet._absolutePoses.clear();
_internalPoseSet._overridePoses.clear();
_internalPoseSet._overrideFlags.clear();
}
void Rig::initJointStates(const FBXGeometry& geometry, const glm::mat4& modelOffset) {
@ -173,16 +175,16 @@ void Rig::initJointStates(const FBXGeometry& geometry, const glm::mat4& modelOff
computeEyesInRootFrame(_animSkeleton->getRelativeDefaultPoses());
_relativePoses.clear();
_relativePoses = _animSkeleton->getRelativeDefaultPoses();
_internalPoseSet._relativePoses.clear();
_internalPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
buildAbsoluteRigPoses(_relativePoses, _absolutePoses);
buildAbsoluteRigPoses(_internalPoseSet._relativePoses, _internalPoseSet._absolutePoses);
_overridePoses.clear();
_overridePoses = _animSkeleton->getRelativeDefaultPoses();
_internalPoseSet._overridePoses.clear();
_internalPoseSet._overridePoses = _animSkeleton->getRelativeDefaultPoses();
_overrideFlags.clear();
_overrideFlags.resize(_animSkeleton->getNumJoints(), false);
_internalPoseSet._overrideFlags.clear();
_internalPoseSet._overrideFlags.resize(_animSkeleton->getNumJoints(), false);
buildAbsoluteRigPoses(_animSkeleton->getRelativeDefaultPoses(), _absoluteDefaultPoses);
@ -201,16 +203,16 @@ void Rig::reset(const FBXGeometry& geometry) {
computeEyesInRootFrame(_animSkeleton->getRelativeDefaultPoses());
_relativePoses.clear();
_relativePoses = _animSkeleton->getRelativeDefaultPoses();
_internalPoseSet._relativePoses.clear();
_internalPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
buildAbsoluteRigPoses(_relativePoses, _absolutePoses);
buildAbsoluteRigPoses(_internalPoseSet._relativePoses, _internalPoseSet._absolutePoses);
_overridePoses.clear();
_overridePoses = _animSkeleton->getRelativeDefaultPoses();
_internalPoseSet._overridePoses.clear();
_internalPoseSet._overridePoses = _animSkeleton->getRelativeDefaultPoses();
_overrideFlags.clear();
_overrideFlags.resize(_animSkeleton->getNumJoints(), false);
_internalPoseSet._overrideFlags.clear();
_internalPoseSet._overrideFlags.resize(_animSkeleton->getNumJoints(), false);
buildAbsoluteRigPoses(_animSkeleton->getRelativeDefaultPoses(), _absoluteDefaultPoses);
@ -228,11 +230,11 @@ void Rig::reset(const FBXGeometry& geometry) {
}
bool Rig::jointStatesEmpty() {
return _relativePoses.empty();
return _internalPoseSet._relativePoses.empty();
}
int Rig::getJointStateCount() const {
return _relativePoses.size();
return _internalPoseSet._relativePoses.size();
}
int Rig::indexOfJoint(const QString& jointName) const {
@ -262,7 +264,7 @@ void Rig::setModelOffset(const glm::mat4& modelOffsetMat) {
bool Rig::getJointStateRotation(int index, glm::quat& rotation) const {
if (isIndexValid(index)) {
rotation = _relativePoses[index].rot;
rotation = _internalPoseSet._relativePoses[index].rot;
return !isEqual(rotation, _animSkeleton->getRelativeDefaultPose(index).rot);
} else {
return false;
@ -271,7 +273,7 @@ bool Rig::getJointStateRotation(int index, glm::quat& rotation) const {
bool Rig::getJointStateTranslation(int index, glm::vec3& translation) const {
if (isIndexValid(index)) {
translation = _relativePoses[index].trans;
translation = _internalPoseSet._relativePoses[index].trans;
return !isEqual(translation, _animSkeleton->getRelativeDefaultPose(index).trans);
} else {
return false;
@ -280,46 +282,47 @@ bool Rig::getJointStateTranslation(int index, glm::vec3& translation) const {
void Rig::clearJointState(int index) {
if (isIndexValid(index)) {
_overrideFlags[index] = false;
_internalPoseSet._overrideFlags[index] = false;
}
}
void Rig::clearJointStates() {
_overrideFlags.clear();
_overrideFlags.resize(_animSkeleton->getNumJoints());
_internalPoseSet._overrideFlags.clear();
_internalPoseSet._overrideFlags.resize(_animSkeleton->getNumJoints());
}
void Rig::clearJointAnimationPriority(int index) {
if (isIndexValid(index)) {
_overrideFlags[index] = false;
_internalPoseSet._overrideFlags[index] = false;
_internalPoseSet._overridePoses[index] = _animSkeleton->getRelativeDefaultPose(index);
}
}
void Rig::setJointTranslation(int index, bool valid, const glm::vec3& translation, float priority) {
if (isIndexValid(index)) {
if (valid) {
assert(_overrideFlags.size() == _overridePoses.size());
_overrideFlags[index] = true;
_overridePoses[index].trans = translation;
assert(_internalPoseSet._overrideFlags.size() == _internalPoseSet._overridePoses.size());
_internalPoseSet._overrideFlags[index] = true;
_internalPoseSet._overridePoses[index].trans = translation;
}
}
}
void Rig::setJointState(int index, bool valid, const glm::quat& rotation, const glm::vec3& translation, float priority) {
if (isIndexValid(index)) {
assert(_overrideFlags.size() == _overridePoses.size());
_overrideFlags[index] = true;
_overridePoses[index].rot = rotation;
_overridePoses[index].trans = translation;
assert(_internalPoseSet._overrideFlags.size() == _internalPoseSet._overridePoses.size());
_internalPoseSet._overrideFlags[index] = true;
_internalPoseSet._overridePoses[index].rot = rotation;
_internalPoseSet._overridePoses[index].trans = translation;
}
}
void Rig::setJointRotation(int index, bool valid, const glm::quat& rotation, float priority) {
if (isIndexValid(index)) {
if (valid) {
ASSERT(_overrideFlags.size() == _overridePoses.size());
_overrideFlags[index] = true;
_overridePoses[index].rot = rotation;
ASSERT(_internalPoseSet._overrideFlags.size() == _internalPoseSet._overridePoses.size());
_internalPoseSet._overrideFlags[index] = true;
_internalPoseSet._overridePoses[index].rot = rotation;
}
}
}
@ -336,7 +339,7 @@ void Rig::restoreJointTranslation(int index, float fraction, float priority) {
bool Rig::getJointPositionInWorldFrame(int jointIndex, glm::vec3& position, glm::vec3 translation, glm::quat rotation) const {
if (isIndexValid(jointIndex)) {
position = (rotation * _absolutePoses[jointIndex].trans) + translation;
position = (rotation * _internalPoseSet._absolutePoses[jointIndex].trans) + translation;
return true;
} else {
return false;
@ -345,7 +348,7 @@ bool Rig::getJointPositionInWorldFrame(int jointIndex, glm::vec3& position, glm:
bool Rig::getJointPosition(int jointIndex, glm::vec3& position) const {
if (isIndexValid(jointIndex)) {
position = _absolutePoses[jointIndex].trans;
position = _internalPoseSet._absolutePoses[jointIndex].trans;
return true;
} else {
return false;
@ -354,7 +357,7 @@ bool Rig::getJointPosition(int jointIndex, glm::vec3& position) const {
bool Rig::getJointRotationInWorldFrame(int jointIndex, glm::quat& result, const glm::quat& rotation) const {
if (isIndexValid(jointIndex)) {
result = rotation * _absolutePoses[jointIndex].rot;
result = rotation * _internalPoseSet._absolutePoses[jointIndex].rot;
return true;
} else {
return false;
@ -362,8 +365,9 @@ bool Rig::getJointRotationInWorldFrame(int jointIndex, glm::quat& result, const
}
bool Rig::getJointRotation(int jointIndex, glm::quat& rotation) const {
if (isIndexValid(jointIndex)) {
rotation = _relativePoses[jointIndex].rot;
QReadLocker readLock(&_externalPoseSetLock);
if (jointIndex >= 0 && jointIndex < (int)_externalPoseSet._relativePoses.size()) {
rotation = _externalPoseSet._relativePoses[jointIndex].rot;
return true;
} else {
return false;
@ -371,8 +375,9 @@ bool Rig::getJointRotation(int jointIndex, glm::quat& rotation) const {
}
bool Rig::getJointTranslation(int jointIndex, glm::vec3& translation) const {
if (isIndexValid(jointIndex)) {
translation = _relativePoses[jointIndex].trans;
QReadLocker readLock(&_externalPoseSetLock);
if (jointIndex >= 0 && jointIndex < (int)_externalPoseSet._relativePoses.size()) {
translation = _externalPoseSet._relativePoses[jointIndex].trans;
return true;
} else {
return false;
@ -708,21 +713,27 @@ void Rig::updateAnimations(float deltaTime, glm::mat4 rootTransform) {
// evaluate the animation
AnimNode::Triggers triggersOut;
_relativePoses = _animNode->evaluate(_animVars, deltaTime, triggersOut);
if ((int)_relativePoses.size() != _animSkeleton->getNumJoints()) {
_internalPoseSet._relativePoses = _animNode->evaluate(_animVars, deltaTime, triggersOut);
if ((int)_internalPoseSet._relativePoses.size() != _animSkeleton->getNumJoints()) {
// animations haven't fully loaded yet.
_relativePoses = _animSkeleton->getRelativeDefaultPoses();
_internalPoseSet._relativePoses = _animSkeleton->getRelativeDefaultPoses();
}
_animVars.clearTriggers();
for (auto& trigger : triggersOut) {
_animVars.setTrigger(trigger);
}
computeEyesInRootFrame(_relativePoses);
computeEyesInRootFrame(_internalPoseSet._relativePoses);
}
applyOverridePoses();
buildAbsoluteRigPoses(_relativePoses, _absolutePoses);
buildAbsoluteRigPoses(_internalPoseSet._relativePoses, _internalPoseSet._absolutePoses);
// copy internal poses to external poses
{
QWriteLocker writeLock(&_externalPoseSetLock);
_externalPoseSet = _internalPoseSet;
}
}
void Rig::inverseKinematics(int endIndex, glm::vec3 targetPosition, const glm::quat& targetRotation, float priority,
@ -884,7 +895,7 @@ void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm
if (isIndexValid(index)) {
glm::mat4 rigToWorld = createMatFromQuatAndPos(modelRotation, modelTranslation);
glm::mat4 worldToRig = glm::inverse(rigToWorld);
glm::vec3 zAxis = glm::normalize(_absolutePoses[index].trans - transformPoint(worldToRig, lookAtSpot));
glm::vec3 zAxis = glm::normalize(_internalPoseSet._absolutePoses[index].trans - transformPoint(worldToRig, lookAtSpot));
glm::quat q = rotationBetween(IDENTITY_FRONT, zAxis);
// limit rotation
@ -892,7 +903,7 @@ void Rig::updateEyeJoint(int index, const glm::vec3& modelTranslation, const glm
q = glm::angleAxis(glm::clamp(glm::angle(q), -MAX_ANGLE, MAX_ANGLE), glm::axis(q));
// directly set absolutePose rotation
_absolutePoses[index].rot = q;
_internalPoseSet._absolutePoses[index].rot = q;
}
}
@ -989,13 +1000,13 @@ void Rig::applyOverridePoses() {
return;
}
ASSERT(_animSkeleton->getNumJoints() == (int)_relativePoses.size());
ASSERT(_animSkeleton->getNumJoints() == (int)_overrideFlags.size());
ASSERT(_animSkeleton->getNumJoints() == (int)_overridePoses.size());
ASSERT(_animSkeleton->getNumJoints() == (int)_internalPoseSet._relativePoses.size());
ASSERT(_animSkeleton->getNumJoints() == (int)_internalPoseSet._overrideFlags.size());
ASSERT(_animSkeleton->getNumJoints() == (int)_internalPoseSet._overridePoses.size());
for (size_t i = 0; i < _overrideFlags.size(); i++) {
if (_overrideFlags[i]) {
_relativePoses[i] = _overridePoses[i];
for (size_t i = 0; i < _internalPoseSet._overrideFlags.size(); i++) {
if (_internalPoseSet._overrideFlags[i]) {
_internalPoseSet._relativePoses[i] = _internalPoseSet._overridePoses[i];
}
}
}
@ -1020,14 +1031,14 @@ void Rig::buildAbsoluteRigPoses(const AnimPoseVec& relativePoses, AnimPoseVec& a
// transform all absolute poses into rig space.
AnimPose geometryToRigTransform(_geometryToRigTransform);
for (int i = 0; i < (int)_absolutePoses.size(); i++) {
for (int i = 0; i < (int)absolutePosesOut.size(); i++) {
absolutePosesOut[i] = geometryToRigTransform * absolutePosesOut[i];
}
}
glm::mat4 Rig::getJointTransform(int jointIndex) const {
if (isIndexValid(jointIndex)) {
return _absolutePoses[jointIndex];
return _internalPoseSet._absolutePoses[jointIndex];
} else {
return glm::mat4();
}

View file

@ -19,6 +19,7 @@
#include <QScriptValue>
#include <vector>
#include <JointData.h>
#include <QReadWriteLock>
#include "AnimNode.h"
#include "AnimNodeLoader.h"
@ -27,6 +28,9 @@
class Rig;
typedef std::shared_ptr<Rig> RigPointer;
// Rig instances are reentrant.
// However only specific methods thread-safe. Noted below.
class Rig : public QObject, public std::enable_shared_from_this<Rig> {
public:
struct StateHandler {
@ -123,10 +127,10 @@ public:
// if rotation is identity, result will be in rig space
bool getJointRotationInWorldFrame(int jointIndex, glm::quat& result, const glm::quat& rotation) const;
// geometry space
// geometry space (thread-safe)
bool getJointRotation(int jointIndex, glm::quat& rotation) const;
// geometry space
// geometry space (thread-safe)
bool getJointTranslation(int jointIndex, glm::vec3& translation) const;
// legacy
@ -217,10 +221,19 @@ public:
AnimPose _modelOffset; // model to rig space
AnimPose _geometryOffset; // geometry to model space (includes unit offset & fst offsets)
AnimPoseVec _relativePoses; // geometry space relative to parent.
AnimPoseVec _absolutePoses; // rig space, not relative to parent.
AnimPoseVec _overridePoses; // geometry space relative to parent.
std::vector<bool> _overrideFlags;
struct PoseSet {
AnimPoseVec _relativePoses; // geometry space relative to parent.
AnimPoseVec _absolutePoses; // rig space, not relative to parent.
AnimPoseVec _overridePoses; // geometry space relative to parent.
std::vector<bool> _overrideFlags;
};
// Only accessed by the main thread
PoseSet _internalPoseSet;
// Copy of the _poseSet for external threads.
PoseSet _externalPoseSet;
mutable QReadWriteLock _externalPoseSetLock;
AnimPoseVec _absoluteDefaultPoses; // rig space, not relative to parent.

View file

@ -462,24 +462,24 @@ void AudioClient::stop() {
}
}
void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<NLPacket> packet) {
void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessage> message) {
char bitset;
packet->readPrimitive(&bitset);
message->readPrimitive(&bitset);
bool hasReverb = oneAtBit(bitset, HAS_REVERB_BIT);
if (hasReverb) {
float reverbTime, wetLevel;
packet->readPrimitive(&reverbTime);
packet->readPrimitive(&wetLevel);
message->readPrimitive(&reverbTime);
message->readPrimitive(&wetLevel);
_receivedAudioStream.setReverb(reverbTime, wetLevel);
} else {
_receivedAudioStream.clearReverb();
}
}
void AudioClient::handleAudioDataPacket(QSharedPointer<NLPacket> packet) {
void AudioClient::handleAudioDataPacket(QSharedPointer<ReceivedMessage> message) {
auto nodeList = DependencyManager::get<NodeList>();
nodeList->flagTimeForConnectionStep(LimitedNodeList::ConnectionStep::ReceiveFirstAudioPacket);
@ -493,11 +493,11 @@ void AudioClient::handleAudioDataPacket(QSharedPointer<NLPacket> packet) {
}
// Audio output must exist and be correctly set up if we're going to process received audio
_receivedAudioStream.parseData(*packet);
_receivedAudioStream.parseData(*message);
}
}
void AudioClient::handleNoisyMutePacket(QSharedPointer<NLPacket> packet) {
void AudioClient::handleNoisyMutePacket(QSharedPointer<ReceivedMessage> message) {
if (!_muted) {
toggleMute();
@ -506,12 +506,12 @@ void AudioClient::handleNoisyMutePacket(QSharedPointer<NLPacket> packet) {
}
}
void AudioClient::handleMuteEnvironmentPacket(QSharedPointer<NLPacket> packet) {
void AudioClient::handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> message) {
glm::vec3 position;
float radius;
packet->readPrimitive(&position);
packet->readPrimitive(&radius);
message->readPrimitive(&position);
message->readPrimitive(&radius);
emit muteEnvironmentRequested(position, radius);
}

View file

@ -140,10 +140,10 @@ public slots:
void start();
void stop();
void handleAudioEnvironmentDataPacket(QSharedPointer<NLPacket> packet);
void handleAudioDataPacket(QSharedPointer<NLPacket> packet);
void handleNoisyMutePacket(QSharedPointer<NLPacket> packet);
void handleMuteEnvironmentPacket(QSharedPointer<NLPacket> packet);
void handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessage> message);
void handleAudioDataPacket(QSharedPointer<ReceivedMessage> message);
void handleNoisyMutePacket(QSharedPointer<ReceivedMessage> message);
void handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> message);
void sendDownstreamAudioStatsPacket() { _stats.sendDownstreamAudioStatsPacket(); }
void handleAudioInput();

View file

@ -63,11 +63,12 @@ void AudioIOStats::sentPacket() {
_lastSentAudioPacket = now;
}
}
void AudioIOStats::processStreamStatsPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
void AudioIOStats::processStreamStatsPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
// parse the appendFlag, clear injected audio stream stats if 0
quint8 appendFlag;
packet->readPrimitive(&appendFlag);
message->readPrimitive(&appendFlag);
if (!appendFlag) {
_mixerInjectedStreamStatsMap.clear();
@ -75,12 +76,12 @@ void AudioIOStats::processStreamStatsPacket(QSharedPointer<NLPacket> packet, Sha
// parse the number of stream stats structs to follow
quint16 numStreamStats;
packet->readPrimitive(&numStreamStats);
message->readPrimitive(&numStreamStats);
// parse the stream stats
AudioStreamStats streamStats;
for (quint16 i = 0; i < numStreamStats; i++) {
packet->readPrimitive(&streamStats);
message->readPrimitive(&streamStats);
if (streamStats._streamType == PositionalAudioStream::Microphone) {
_mixerAvatarStreamStats = streamStats;

View file

@ -45,7 +45,7 @@ public:
void sendDownstreamAudioStatsPacket();
public slots:
void processStreamStatsPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
void processStreamStatsPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
private:
MixedProcessedAudioStream* _receivedAudioStream;

View file

@ -98,22 +98,22 @@ void InboundAudioStream::perSecondCallbackForUpdatingStats() {
_timeGapStatsForStatsPacket.currentIntervalComplete();
}
int InboundAudioStream::parseData(NLPacket& packet) {
int InboundAudioStream::parseData(ReceivedMessage& message) {
// parse sequence number and track it
quint16 sequence;
packet.readPrimitive(&sequence);
message.readPrimitive(&sequence);
SequenceNumberStats::ArrivalInfo arrivalInfo = _incomingSequenceNumberStats.sequenceNumberReceived(sequence,
packet.getSourceID());
message.getSourceID());
packetReceivedUpdateTimingStats();
int networkSamples;
// parse the info after the seq number and before the audio data (the stream properties)
int prePropertyPosition = packet.pos();
int propertyBytes = parseStreamProperties(packet.getType(), packet.readWithoutCopy(packet.bytesLeftToRead()), networkSamples);
packet.seek(prePropertyPosition + propertyBytes);
int prePropertyPosition = message.getPosition();
int propertyBytes = parseStreamProperties(message.getType(), message.readWithoutCopy(message.getBytesLeftToRead()), networkSamples);
message.seek(prePropertyPosition + propertyBytes);
// handle this packet based on its arrival status.
switch (arrivalInfo._status) {
@ -128,10 +128,10 @@ int InboundAudioStream::parseData(NLPacket& packet) {
}
case SequenceNumberStats::OnTime: {
// Packet is on time; parse its data to the ringbuffer
if (packet.getType() == PacketType::SilentAudioFrame) {
if (message.getType() == PacketType::SilentAudioFrame) {
writeDroppableSilentSamples(networkSamples);
} else {
parseAudioData(packet.getType(), packet.readWithoutCopy(packet.bytesLeftToRead()), networkSamples);
parseAudioData(message.getType(), message.readWithoutCopy(message.getBytesLeftToRead()), networkSamples);
}
break;
}
@ -161,7 +161,7 @@ int InboundAudioStream::parseData(NLPacket& packet) {
framesAvailableChanged();
return packet.pos();
return message.getPosition();
}
int InboundAudioStream::parseStreamProperties(PacketType type, const QByteArray& packetAfterSeqNum, int& numAudioSamples) {

View file

@ -15,6 +15,7 @@
#include <NodeData.h>
#include <NumericalConstants.h>
#include <udt/PacketHeaders.h>
#include <ReceivedMessage.h>
#include <StDev.h>
#include "AudioRingBuffer.h"
@ -107,7 +108,7 @@ public:
virtual void resetStats();
void clearBuffer();
virtual int parseData(NLPacket& packet);
virtual int parseData(ReceivedMessage& packet) override;
int popFrames(int maxFrames, bool allOrNothing, bool starveIfNoFramesPopped = true);
int popSamples(int maxSamples, bool allOrNothing, bool starveIfNoSamplesPopped = true);

View file

@ -985,8 +985,8 @@ void AvatarData::clearJointsData() {
}
}
bool AvatarData::hasIdentityChangedAfterParsing(NLPacket& packet) {
QDataStream packetStream(&packet);
bool AvatarData::hasIdentityChangedAfterParsing(const QByteArray& data) {
QDataStream packetStream(data);
QUuid avatarUUID;
QUrl faceModelURL, skeletonModelURL;
@ -1030,12 +1030,11 @@ QByteArray AvatarData::identityByteArray() {
return identityData;
}
bool AvatarData::hasBillboardChangedAfterParsing(NLPacket& packet) {
QByteArray newBillboard = packet.readAll();
if (newBillboard == _billboard) {
bool AvatarData::hasBillboardChangedAfterParsing(const QByteArray& data) {
if (data == _billboard) {
return false;
}
_billboard = newBillboard;
_billboard = data;
return true;
}

View file

@ -284,10 +284,10 @@ public:
const HeadData* getHeadData() const { return _headData; }
const HandData* getHandData() const { return _handData; }
bool hasIdentityChangedAfterParsing(NLPacket& packet);
bool hasIdentityChangedAfterParsing(const QByteArray& data);
QByteArray identityByteArray();
bool hasBillboardChangedAfterParsing(NLPacket& packet);
bool hasBillboardChangedAfterParsing(const QByteArray& data);
const QUrl& getFaceModelURL() const { return _faceModelURL; }
QString getFaceModelURLString() const { return _faceModelURL.toString(); }

View file

@ -71,35 +71,34 @@ AvatarSharedPointer AvatarHashMap::findAvatar(const QUuid& sessionUUID) {
return nullptr;
}
void AvatarHashMap::processAvatarDataPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
void AvatarHashMap::processAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
// enumerate over all of the avatars in this packet
// only add them if mixerWeakPointer points to something (meaning that mixer is still around)
while (packet->bytesLeftToRead()) {
QUuid sessionUUID = QUuid::fromRfc4122(packet->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
while (message->getBytesLeftToRead()) {
QUuid sessionUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
int positionBeforeRead = packet->pos();
int positionBeforeRead = message->getPosition();
QByteArray byteArray = packet->readWithoutCopy(packet->bytesLeftToRead());
QByteArray byteArray = message->readWithoutCopy(message->getBytesLeftToRead());
if (sessionUUID != _lastOwnerSessionUUID) {
auto avatar = newOrExistingAvatar(sessionUUID, sendingNode);
// have the matching (or new) avatar parse the data from the packet
int bytesRead = avatar->parseDataFromBuffer(byteArray);
packet->seek(positionBeforeRead + bytesRead);
message->seek(positionBeforeRead + bytesRead);
} else {
// create a dummy AvatarData class to throw this data on the ground
AvatarData dummyData;
int bytesRead = dummyData.parseDataFromBuffer(byteArray);
packet->seek(positionBeforeRead + bytesRead);
message->seek(positionBeforeRead + bytesRead);
}
}
}
void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
// setup a data stream to parse the packet
QDataStream identityStream(packet.data());
QDataStream identityStream(message->getMessage());
QUuid sessionUUID;
@ -131,20 +130,20 @@ void AvatarHashMap::processAvatarIdentityPacket(QSharedPointer<NLPacket> packet,
}
}
void AvatarHashMap::processAvatarBillboardPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
QUuid sessionUUID = QUuid::fromRfc4122(packet->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
void AvatarHashMap::processAvatarBillboardPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
QUuid sessionUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
auto avatar = newOrExistingAvatar(sessionUUID, sendingNode);
QByteArray billboard = packet->read(packet->bytesLeftToRead());
QByteArray billboard = message->read(message->getBytesLeftToRead());
if (avatar->getBillboard() != billboard) {
avatar->setBillboard(billboard);
}
}
void AvatarHashMap::processKillAvatar(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
void AvatarHashMap::processKillAvatar(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
// read the node id
QUuid sessionUUID = QUuid::fromRfc4122(packet->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
QUuid sessionUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
removeAvatar(sessionUUID);
}

View file

@ -45,10 +45,10 @@ public slots:
private slots:
void sessionUUIDChanged(const QUuid& sessionUUID, const QUuid& oldUUID);
void processAvatarDataPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
void processAvatarIdentityPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
void processAvatarBillboardPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
void processKillAvatar(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
void processAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
void processAvatarIdentityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
void processAvatarBillboardPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
void processKillAvatar(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
protected:
AvatarHashMap();

View file

@ -30,12 +30,11 @@ const QString& Basic2DWindowOpenGLDisplayPlugin::getName() const {
return NAME;
}
std::vector<QAction*> _framerateActions;
QAction* _vsyncAction{ nullptr };
void Basic2DWindowOpenGLDisplayPlugin::activate() {
WindowOpenGLDisplayPlugin::activate();
_framerateActions.clear();
_container->addMenuItem(MENU_PATH(), FULLSCREEN,
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, MENU_PATH(), FULLSCREEN,
[this](bool clicked) {
if (clicked) {
_container->setFullscreen(getFullscreenTarget());
@ -45,26 +44,24 @@ void Basic2DWindowOpenGLDisplayPlugin::activate() {
}, true, false);
_container->addMenu(FRAMERATE);
_framerateActions.push_back(
_container->addMenuItem(FRAMERATE, FRAMERATE_UNLIMITED,
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, FRAMERATE, FRAMERATE_UNLIMITED,
[this](bool) { updateFramerate(); }, true, true, FRAMERATE));
_framerateActions.push_back(
_container->addMenuItem(FRAMERATE, FRAMERATE_60,
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, FRAMERATE, FRAMERATE_60,
[this](bool) { updateFramerate(); }, true, false, FRAMERATE));
_framerateActions.push_back(
_container->addMenuItem(FRAMERATE, FRAMERATE_50,
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, FRAMERATE, FRAMERATE_50,
[this](bool) { updateFramerate(); }, true, false, FRAMERATE));
_framerateActions.push_back(
_container->addMenuItem(FRAMERATE, FRAMERATE_40,
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, FRAMERATE, FRAMERATE_40,
[this](bool) { updateFramerate(); }, true, false, FRAMERATE));
_framerateActions.push_back(
_container->addMenuItem(FRAMERATE, FRAMERATE_30,
_container->addMenuItem(PluginType::DISPLAY_PLUGIN, FRAMERATE, FRAMERATE_30,
[this](bool) { updateFramerate(); }, true, false, FRAMERATE));
WindowOpenGLDisplayPlugin::activate();
// Vsync detection happens in the parent class activate, so we need to check after that
if (_vsyncSupported) {
_vsyncAction = _container->addMenuItem(MENU_PATH(), VSYNC_ON, [this](bool) {}, true, true);
_vsyncAction = _container->addMenuItem(PluginType::DISPLAY_PLUGIN, MENU_PATH(), VSYNC_ON, [this](bool) {}, true, true);
} else {
_vsyncAction = nullptr;
}
@ -72,22 +69,20 @@ void Basic2DWindowOpenGLDisplayPlugin::activate() {
updateFramerate();
}
void Basic2DWindowOpenGLDisplayPlugin::deactivate() {
WindowOpenGLDisplayPlugin::deactivate();
}
void Basic2DWindowOpenGLDisplayPlugin::display(GLuint sceneTexture, const glm::uvec2& sceneSize) {
void Basic2DWindowOpenGLDisplayPlugin::submitSceneTexture(uint32_t frameIndex, uint32_t sceneTexture, const glm::uvec2& sceneSize) {
if (_vsyncAction) {
bool wantVsync = _vsyncAction->isChecked();
bool vsyncEnabed = isVsyncEnabled();
if (vsyncEnabed ^ wantVsync) {
enableVsync(wantVsync);
}
_wantVsync = _vsyncAction->isChecked();
}
WindowOpenGLDisplayPlugin::display(sceneTexture, sceneSize);
WindowOpenGLDisplayPlugin::submitSceneTexture(frameIndex, sceneTexture, sceneSize);
}
void Basic2DWindowOpenGLDisplayPlugin::internalPresent() {
if (_wantVsync != isVsyncEnabled()) {
enableVsync(_wantVsync);
}
WindowOpenGLDisplayPlugin::internalPresent();
}
int Basic2DWindowOpenGLDisplayPlugin::getDesiredInterval() const {
static const int THROTTLED_PAINT_TIMER_DELAY_MS = MSECS_PER_SECOND / 15;

View file

@ -10,6 +10,8 @@
#include "WindowOpenGLDisplayPlugin.h"
class QScreen;
class QAction;
class Basic2DWindowOpenGLDisplayPlugin : public WindowOpenGLDisplayPlugin {
Q_OBJECT
@ -17,9 +19,10 @@ public:
virtual const QString & getName() const override;
virtual void activate() override;
virtual void deactivate() override;
virtual void display(GLuint sceneTexture, const glm::uvec2& sceneSize) override;
virtual void submitSceneTexture(uint32_t frameIndex, uint32_t sceneTexture, const glm::uvec2& sceneSize) override;
virtual void internalPresent() override;
virtual bool isThrottled() const override;
@ -31,6 +34,9 @@ private:
void updateFramerate();
static const QString NAME;
QScreen* getFullscreenTarget();
uint32_t _framerateTarget{ 0 };
std::vector<QAction*> _framerateActions;
QAction* _vsyncAction { nullptr };
uint32_t _framerateTarget { 0 };
int _fullscreenTarget{ -1 };
bool _wantVsync { true };
};

View file

@ -25,8 +25,8 @@ const QString& DisplayPlugin::MENU_PATH() {
DisplayPluginList getDisplayPlugins() {
DisplayPlugin* PLUGIN_POOL[] = {
new Basic2DWindowOpenGLDisplayPlugin(),
#ifdef DEBUG
new NullDisplayPlugin(),
#ifdef DEBUG
#endif
// Stereo modes
@ -37,10 +37,10 @@ DisplayPluginList getDisplayPlugins() {
new InterleavedStereoDisplayPlugin(),
// HMDs
#ifdef Q_OS_WIN
// SteamVR SDK
new OpenVrDisplayPlugin(),
#endif
//#ifdef Q_OS_WIN
// // SteamVR SDK
// new OpenVrDisplayPlugin(),
//#endif
nullptr
};

View file

@ -9,6 +9,9 @@
//
#include "NullDisplayPlugin.h"
#include <QtGui/QImage>
#include <plugins/PluginContainer.h>
const QString NullDisplayPlugin::NAME("NullDisplayPlugin");
const QString & NullDisplayPlugin::getName() const {
@ -23,8 +26,16 @@ bool NullDisplayPlugin::hasFocus() const {
return false;
}
void NullDisplayPlugin::preRender() {}
void NullDisplayPlugin::preDisplay() {}
void NullDisplayPlugin::display(uint32_t sceneTexture, const glm::uvec2& sceneSize) {}
void NullDisplayPlugin::finishFrame() {}
void NullDisplayPlugin::submitSceneTexture(uint32_t frameIndex, uint32_t sceneTexture, const glm::uvec2& sceneSize) {
_container->releaseSceneTexture(sceneTexture);
}
void NullDisplayPlugin::submitOverlayTexture(uint32_t overlayTexture, const glm::uvec2& overlaySize) {
_container->releaseOverlayTexture(overlayTexture);
}
void NullDisplayPlugin::stop() {}
QImage NullDisplayPlugin::getScreenshot() const {
return QImage();
}

View file

@ -19,11 +19,9 @@ public:
virtual glm::uvec2 getRecommendedRenderSize() const override;
virtual bool hasFocus() const override;
virtual void preRender() override;
virtual void preDisplay() override;
virtual void display(uint32_t sceneTexture, const glm::uvec2& sceneSize) override;
virtual void finishFrame() override;
virtual void submitSceneTexture(uint32_t frameIndex, uint32_t sceneTexture, const glm::uvec2& sceneSize) override;
virtual void submitOverlayTexture(uint32_t overlayTexture, const glm::uvec2& overlaySize) override;
virtual QImage getScreenshot() const override;
private:
static const QString NAME;
};

View file

@ -6,74 +6,214 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "OpenGLDisplayPlugin.h"
#include <QCoreApplication>
#include <condition_variable>
#include <QtCore/QCoreApplication>
#include <QtCore/QThread>
#include <QtCore/QTimer>
#include <QtOpenGL/QGLWidget>
#include <QtGui/QImage>
#include <gl/GLWidget.h>
#include <NumericalConstants.h>
#include <DependencyManager.h>
#include <plugins/PluginContainer.h>
#include <gl/Config.h>
#include <gl/GLEscrow.h>
#include <GLMHelpers.h>
class PresentThread : public QThread, public Dependency {
using Mutex = std::mutex;
using Condition = std::condition_variable;
using Lock = std::unique_lock<Mutex>;
public:
~PresentThread() {
_shutdown = true;
wait();
}
void setNewDisplayPlugin(OpenGLDisplayPlugin* plugin) {
Lock lock(_mutex);
_newPlugin = plugin;
}
void setContext(QGLContext * context) {
// Move the OpenGL context to the present thread
// Extra code because of the widget 'wrapper' context
_context = context;
_context->moveToThread(this);
}
virtual void run() override {
Q_ASSERT(_context);
while (!_shutdown) {
if (_pendingMainThreadOperation) {
{
Lock lock(_mutex);
// Move the context to the main thread
_context->moveToThread(qApp->thread());
_pendingMainThreadOperation = false;
// Release the main thread to do it's action
_condition.notify_one();
}
{
// Main thread does it's thing while we wait on the lock to release
Lock lock(_mutex);
_condition.wait(lock, [&] { return _finishedMainThreadOperation; });
}
}
// Check before lock
if (_newPlugin != nullptr) {
Lock lock(_mutex);
_context->makeCurrent();
// Check if we have a new plugin to activate
if (_newPlugin != nullptr) {
// Deactivate the old plugin
if (_activePlugin != nullptr) {
_activePlugin->uncustomizeContext();
}
_newPlugin->customizeContext();
_activePlugin = _newPlugin;
_newPlugin = nullptr;
}
_context->doneCurrent();
lock.unlock();
}
// If there's no active plugin, just sleep
if (_activePlugin == nullptr) {
QThread::usleep(100);
continue;
}
// take the latest texture and present it
_context->makeCurrent();
_activePlugin->present();
_context->doneCurrent();
}
_context->doneCurrent();
_context->moveToThread(qApp->thread());
}
void withMainThreadContext(std::function<void()> f) {
// Signal to the thread that there is work to be done on the main thread
Lock lock(_mutex);
_pendingMainThreadOperation = true;
_finishedMainThreadOperation = false;
_condition.wait(lock, [&] { return !_pendingMainThreadOperation; });
_context->makeCurrent();
f();
_context->doneCurrent();
// Move the context back to the presentation thread
_context->moveToThread(this);
// restore control of the context to the presentation thread and signal
// the end of the operation
_finishedMainThreadOperation = true;
lock.unlock();
_condition.notify_one();
}
private:
void makeCurrent();
void doneCurrent();
bool _shutdown { false };
Mutex _mutex;
// Used to allow the main thread to perform context operations
Condition _condition;
bool _pendingMainThreadOperation { false };
bool _finishedMainThreadOperation { false };
QThread* _mainThread { nullptr };
OpenGLDisplayPlugin* _newPlugin { nullptr };
OpenGLDisplayPlugin* _activePlugin { nullptr };
QGLContext* _context { nullptr };
};
OpenGLDisplayPlugin::OpenGLDisplayPlugin() {
_sceneTextureEscrow.setRecycler([this](GLuint texture){
cleanupForSceneTexture(texture);
_container->releaseSceneTexture(texture);
});
_overlayTextureEscrow.setRecycler([this](GLuint texture) {
_container->releaseOverlayTexture(texture);
});
connect(&_timer, &QTimer::timeout, this, [&] {
if (_active) {
if (_active && _sceneTextureEscrow.depth() < 1) {
emit requestRender();
}
});
}
OpenGLDisplayPlugin::~OpenGLDisplayPlugin() {
void OpenGLDisplayPlugin::cleanupForSceneTexture(uint32_t sceneTexture) {
Lock lock(_mutex);
Q_ASSERT(_sceneTextureToFrameIndexMap.contains(sceneTexture));
_sceneTextureToFrameIndexMap.remove(sceneTexture);
}
void OpenGLDisplayPlugin::preDisplay() {
makeCurrent();
};
void OpenGLDisplayPlugin::preRender() {
// NOOP
}
void OpenGLDisplayPlugin::finishFrame() {
swapBuffers();
doneCurrent();
};
void OpenGLDisplayPlugin::customizeContext() {
using namespace oglplus;
// TODO: write the poper code for linux
#if defined(Q_OS_WIN)
_vsyncSupported = wglewGetExtension("WGL_EXT_swap_control");
#endif
Context::BlendFunc(BlendFunction::SrcAlpha, BlendFunction::OneMinusSrcAlpha);
Context::Disable(Capability::Blend);
Context::Disable(Capability::DepthTest);
Context::Disable(Capability::CullFace);
_program = loadDefaultShader();
_plane = loadPlane(_program);
enableVsync();
}
void OpenGLDisplayPlugin::activate() {
DisplayPlugin::activate();
_timer.start(1);
_vsyncSupported = _container->getPrimaryWidget()->isVsyncSupported();
// Start the present thread if necessary
auto presentThread = DependencyManager::get<PresentThread>();
if (!presentThread) {
auto widget = _container->getPrimaryWidget();
DependencyManager::set<PresentThread>();
presentThread = DependencyManager::get<PresentThread>();
presentThread->setObjectName("Presentation Thread");
presentThread->setContext(widget->context());
// Start execution
presentThread->start();
}
presentThread->setNewDisplayPlugin(this);
DisplayPlugin::activate();
}
void OpenGLDisplayPlugin::stop() {
DisplayPlugin::activate();
_timer.stop();
}
void OpenGLDisplayPlugin::deactivate() {
_active = false;
_timer.stop();
DisplayPlugin::deactivate();
}
makeCurrent();
Q_ASSERT(0 == glGetError());
void OpenGLDisplayPlugin::customizeContext() {
auto presentThread = DependencyManager::get<PresentThread>();
Q_ASSERT(thread() == presentThread->thread());
enableVsync();
using namespace oglplus;
Context::BlendFunc(BlendFunction::SrcAlpha, BlendFunction::OneMinusSrcAlpha);
Context::Disable(Capability::Blend);
Context::Disable(Capability::DepthTest);
Context::Disable(Capability::CullFace);
_program = loadDefaultShader();
_plane = loadPlane(_program);
}
void OpenGLDisplayPlugin::uncustomizeContext() {
_program.reset();
_plane.reset();
doneCurrent();
}
// Pressing Alt (and Meta) key alone activates the menubar because its style inherits the
@ -118,13 +258,65 @@ bool OpenGLDisplayPlugin::eventFilter(QObject* receiver, QEvent* event) {
return false;
}
void OpenGLDisplayPlugin::display(
GLuint finalTexture, const glm::uvec2& sceneSize) {
void OpenGLDisplayPlugin::submitSceneTexture(uint32_t frameIndex, uint32_t sceneTexture, const glm::uvec2& sceneSize) {
{
Lock lock(_mutex);
_sceneTextureToFrameIndexMap[sceneTexture] = frameIndex;
}
// Submit it to the presentation thread via escrow
_sceneTextureEscrow.submit(sceneTexture);
}
void OpenGLDisplayPlugin::submitOverlayTexture(GLuint sceneTexture, const glm::uvec2& sceneSize) {
// Submit it to the presentation thread via escrow
_overlayTextureEscrow.submit(sceneTexture);
}
void OpenGLDisplayPlugin::updateTextures() {
_currentSceneTexture = _sceneTextureEscrow.fetchAndRelease(_currentSceneTexture);
_currentOverlayTexture = _overlayTextureEscrow.fetchAndRelease(_currentOverlayTexture);
}
void OpenGLDisplayPlugin::updateFramerate() {
uint64_t now = usecTimestampNow();
static uint64_t lastSwapEnd { now };
uint64_t diff = now - lastSwapEnd;
lastSwapEnd = now;
if (diff != 0) {
Lock lock(_mutex);
_usecsPerFrame.updateAverage(diff);
}
}
void OpenGLDisplayPlugin::internalPresent() {
using namespace oglplus;
uvec2 size = getSurfaceSize();
uvec2 size = getSurfacePixels();
Context::Viewport(size.x, size.y);
glBindTexture(GL_TEXTURE_2D, finalTexture);
Context::Clear().DepthBuffer();
glBindTexture(GL_TEXTURE_2D, _currentSceneTexture);
drawUnitQuad();
swapBuffers();
}
void OpenGLDisplayPlugin::present() {
updateTextures();
if (_currentSceneTexture) {
internalPresent();
updateFramerate();
}
}
float OpenGLDisplayPlugin::presentRate() {
float result { -1.0f };
{
Lock lock(_mutex);
result = _usecsPerFrame.getAverage();
result = 1.0f / result;
result *= USECS_PER_SECOND;
}
return result;
}
void OpenGLDisplayPlugin::drawUnitQuad() {
@ -151,3 +343,23 @@ bool OpenGLDisplayPlugin::isVsyncEnabled() {
return true;
#endif
}
void OpenGLDisplayPlugin::swapBuffers() {
static auto widget = _container->getPrimaryWidget();
widget->swapBuffers();
}
void OpenGLDisplayPlugin::withMainThreadContext(std::function<void()> f) const {
static auto presentThread = DependencyManager::get<PresentThread>();
presentThread->withMainThreadContext(f);
_container->makeRenderingContextCurrent();
}
QImage OpenGLDisplayPlugin::getScreenshot() const {
QImage result;
withMainThreadContext([&] {
static auto widget = _container->getPrimaryWidget();
result = widget->grabFrameBuffer();
});
return result;
}

View file

@ -9,42 +9,79 @@
#include "DisplayPlugin.h"
#include <QTimer>
#include <gl/OglplusHelpers.h>
#include <QtCore/QTimer>
class GlWindow;
class QOpenGLContext;
#include <GLMHelpers.h>
#include <SimpleMovingAverage.h>
#include <gl/OglplusHelpers.h>
#include <gl/GLEscrow.h>
class OpenGLDisplayPlugin : public DisplayPlugin {
protected:
using Mutex = std::recursive_mutex;
using Lock = std::unique_lock<Mutex>;
public:
OpenGLDisplayPlugin();
virtual ~OpenGLDisplayPlugin();
virtual void preRender() override;
virtual void preDisplay() override;
virtual void finishFrame() override;
virtual void activate() override;
virtual void deactivate() override;
virtual void stop() override;
virtual bool eventFilter(QObject* receiver, QEvent* event) override;
virtual void display(GLuint sceneTexture, const glm::uvec2& sceneSize) override;
virtual void submitSceneTexture(uint32_t frameIndex, uint32_t sceneTexture, const glm::uvec2& sceneSize) override;
virtual void submitOverlayTexture(uint32_t overlayTexture, const glm::uvec2& overlaySize) override;
virtual float presentRate() override;
virtual glm::uvec2 getRecommendedRenderSize() const override {
return getSurfacePixels();
}
virtual glm::uvec2 getRecommendedUiSize() const override {
return getSurfaceSize();
}
virtual QImage getScreenshot() const override;
protected:
virtual void customizeContext();
virtual void drawUnitQuad();
virtual glm::uvec2 getSurfaceSize() const = 0;
virtual void makeCurrent() = 0;
virtual void doneCurrent() = 0;
virtual void swapBuffers() = 0;
friend class PresentThread;
virtual glm::uvec2 getSurfaceSize() const = 0;
virtual glm::uvec2 getSurfacePixels() const = 0;
// FIXME make thread safe?
virtual bool isVsyncEnabled();
virtual void enableVsync(bool enable = true);
// These functions must only be called on the presentation thread
virtual void customizeContext();
virtual void uncustomizeContext();
virtual void cleanupForSceneTexture(uint32_t sceneTexture);
void withMainThreadContext(std::function<void()> f) const;
void present();
void updateTextures();
void updateFramerate();
void drawUnitQuad();
void swapBuffers();
// Plugin specific functionality to composite the scene and overlay and present the result
virtual void internalPresent();
mutable QTimer _timer;
ProgramPtr _program;
ShapeWrapperPtr _plane;
bool _vsyncSupported{ false };
Mutex _mutex;
SimpleMovingAverage _usecsPerFrame { 10 };
QMap<uint32_t, uint32_t> _sceneTextureToFrameIndexMap;
GLuint _currentSceneTexture { 0 };
GLuint _currentOverlayTexture { 0 };
GLTextureEscrow _overlayTextureEscrow;
GLTextureEscrow _sceneTextureEscrow;
bool _vsyncSupported { false };
};

View file

@ -7,18 +7,11 @@
//
#include "WindowOpenGLDisplayPlugin.h"
#include <QGLWidget>
#include <gl/GLWidget.h>
#include "plugins/PluginContainer.h"
WindowOpenGLDisplayPlugin::WindowOpenGLDisplayPlugin() {
}
glm::uvec2 WindowOpenGLDisplayPlugin::getRecommendedRenderSize() const {
return getSurfaceSize();
}
glm::uvec2 WindowOpenGLDisplayPlugin::getSurfaceSize() const {
glm::uvec2 WindowOpenGLDisplayPlugin::getSurfacePixels() const {
uvec2 result;
if (_window) {
result = toGlm(_window->geometry().size() * _window->devicePixelRatio());
@ -26,8 +19,7 @@ glm::uvec2 WindowOpenGLDisplayPlugin::getSurfaceSize() const {
return result;
}
glm::uvec2 WindowOpenGLDisplayPlugin::getRecommendedUiSize() const {
glm::uvec2 WindowOpenGLDisplayPlugin::getSurfaceSize() const {
uvec2 result;
if (_window) {
result = toGlm(_window->geometry().size());
@ -40,11 +32,8 @@ bool WindowOpenGLDisplayPlugin::hasFocus() const {
}
void WindowOpenGLDisplayPlugin::activate() {
_window = _container->getPrimaryWidget();
OpenGLDisplayPlugin::activate();
_window = _container->getPrimarySurface();
_window->makeCurrent();
customizeContext();
_window->doneCurrent();
}
void WindowOpenGLDisplayPlugin::deactivate() {
@ -52,14 +41,3 @@ void WindowOpenGLDisplayPlugin::deactivate() {
_window = nullptr;
}
void WindowOpenGLDisplayPlugin::makeCurrent() {
_window->makeCurrent();
}
void WindowOpenGLDisplayPlugin::doneCurrent() {
_window->doneCurrent();
}
void WindowOpenGLDisplayPlugin::swapBuffers() {
_window->swapBuffers();
}

View file

@ -9,21 +9,17 @@
#include "OpenGLDisplayPlugin.h"
class QGLWidget;
class QWidget;
class WindowOpenGLDisplayPlugin : public OpenGLDisplayPlugin {
public:
WindowOpenGLDisplayPlugin();
virtual glm::uvec2 getRecommendedRenderSize() const override;
virtual glm::uvec2 getRecommendedUiSize() const override;
virtual bool hasFocus() const override;
virtual void activate() override;
virtual void deactivate() override;
protected:
virtual glm::uvec2 getSurfaceSize() const override final;
virtual void makeCurrent() override;
virtual void doneCurrent() override;
virtual void swapBuffers() override;
QGLWidget* _window{ nullptr };
virtual glm::uvec2 getSurfacePixels() const override final;
QWidget* _window { nullptr };
};

View file

@ -152,7 +152,7 @@ glm::mat4 OpenVrDisplayPlugin::getEyeToHeadTransform(Eye eye) const {
return _eyesData[eye]._eyeOffset;
}
glm::mat4 OpenVrDisplayPlugin::getHeadPose() const {
glm::mat4 OpenVrDisplayPlugin::getHeadPose(uint32_t frameIndex) const {
return _trackedDevicePoseMat4[0];
}
@ -160,26 +160,26 @@ void OpenVrDisplayPlugin::customizeContext() {
WindowOpenGLDisplayPlugin::customizeContext();
}
void OpenVrDisplayPlugin::display(GLuint finalTexture, const glm::uvec2& sceneSize) {
// Flip y-axis since GL UV coords are backwards.
static vr::Compositor_TextureBounds leftBounds{ 0, 1, 0.5f, 0 };
static vr::Compositor_TextureBounds rightBounds{ 0.5f, 1, 1, 0 };
_compositor->Submit(vr::Eye_Left, (void*)finalTexture, &leftBounds);
_compositor->Submit(vr::Eye_Right, (void*)finalTexture, &rightBounds);
glFinish();
}
//void OpenVrDisplayPlugin::display(uint32_t frameIndex, uint32_t finalTexture, const glm::uvec2& sceneSize) {
// // Flip y-axis since GL UV coords are backwards.
// static vr::Compositor_TextureBounds leftBounds{ 0, 1, 0.5f, 0 };
// static vr::Compositor_TextureBounds rightBounds{ 0.5f, 1, 1, 0 };
// _compositor->Submit(vr::Eye_Left, (void*)finalTexture, &leftBounds);
// _compositor->Submit(vr::Eye_Right, (void*)finalTexture, &rightBounds);
// glFinish();
//}
void OpenVrDisplayPlugin::finishFrame() {
// swapBuffers();
doneCurrent();
_compositor->WaitGetPoses(_trackedDevicePose, vr::k_unMaxTrackedDeviceCount);
for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
_trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
}
openvr_for_each_eye([&](vr::Hmd_Eye eye) {
_eyesData[eye]._pose = _trackedDevicePoseMat4[0];
});
};
//void OpenVrDisplayPlugin::finishFrame() {
//// swapBuffers();
// doneCurrent();
// _compositor->WaitGetPoses(_trackedDevicePose, vr::k_unMaxTrackedDeviceCount);
// for (int i = 0; i < vr::k_unMaxTrackedDeviceCount; i++) {
// _trackedDevicePoseMat4[i] = _sensorResetMat * toGlm(_trackedDevicePose[i].mDeviceToAbsoluteTracking);
// }
// openvr_for_each_eye([&](vr::Hmd_Eye eye) {
// _eyesData[eye]._pose = _trackedDevicePoseMat4[0];
// });
//};
#endif

View file

@ -31,13 +31,11 @@ public:
virtual void resetSensors() override;
virtual glm::mat4 getEyeToHeadTransform(Eye eye) const override;
virtual glm::mat4 getHeadPose() const override;
virtual glm::mat4 getHeadPose(uint32_t frameIndex) const override;
protected:
virtual void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
// virtual void display(uint32_t frameIndex, uint32_t finalTexture, const glm::uvec2& sceneSize) override;
virtual void customizeContext() override;
// Do not perform swap in finish
virtual void finishFrame() override;
private:
vr::IVRSystem* _hmd { nullptr };

View file

@ -66,10 +66,10 @@ glm::uvec2 InterleavedStereoDisplayPlugin::getRecommendedRenderSize() const {
return result;
}
void InterleavedStereoDisplayPlugin::display(
GLuint finalTexture, const glm::uvec2& sceneSize) {
void InterleavedStereoDisplayPlugin::internalPresent() {
using namespace oglplus;
_program->Bind();
auto sceneSize = getRecommendedRenderSize();
Uniform<ivec2>(*_program, "textureSize").SetValue(sceneSize);
WindowOpenGLDisplayPlugin::display(finalTexture, sceneSize);
WindowOpenGLDisplayPlugin::internalPresent();
}

View file

@ -19,7 +19,7 @@ public:
virtual void customizeContext() override;
virtual glm::uvec2 getRecommendedRenderSize() const override;
void display(GLuint finalTexture, const glm::uvec2& sceneSize) override;
void internalPresent() override;
private:
static const QString NAME;

View file

@ -74,7 +74,7 @@ void StereoDisplayPlugin::activate() {
if (screen == qApp->primaryScreen()) {
checked = true;
}
auto action = _container->addMenuItem(MENU_PATH(), name,
auto action = _container->addMenuItem(PluginType::DISPLAY_PLUGIN, MENU_PATH(), name,
[this](bool clicked) { updateScreen(); }, true, checked, "Screens");
_screenActions[i] = action;
}

View file

@ -408,8 +408,8 @@ int EntityTreeRenderer::getBoundaryLevelAdjust() const {
}
void EntityTreeRenderer::processEraseMessage(NLPacket& packet, const SharedNodePointer& sourceNode) {
std::static_pointer_cast<EntityTree>(_tree)->processEraseMessage(packet, sourceNode);
void EntityTreeRenderer::processEraseMessage(ReceivedMessage& message, const SharedNodePointer& sourceNode) {
std::static_pointer_cast<EntityTree>(_tree)->processEraseMessage(message, sourceNode);
}
Model* EntityTreeRenderer::allocateModel(const QString& url, const QString& collisionUrl) {

View file

@ -49,7 +49,7 @@ public:
EntityTreePointer getTree() { return std::static_pointer_cast<EntityTree>(_tree); }
void processEraseMessage(NLPacket& packet, const SharedNodePointer& sourceNode);
void processEraseMessage(ReceivedMessage& message, const SharedNodePointer& sourceNode);
virtual void init();

View file

@ -68,8 +68,8 @@ public:
virtual bool contains(const glm::vec3& point) const override;
// these are in the frame of this object
virtual glm::quat getJointRotation(int index) const;
virtual glm::vec3 getJointTranslation(int index) const;
virtual glm::quat getJointRotation(int index) const override;
virtual glm::vec3 getJointTranslation(int index) const override;
private:
void remapTextures();

View file

@ -337,10 +337,10 @@ void RenderableParticleEffectEntityItem::createPipelines() {
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD,
destinationColorBlendArg, gpu::State::FACTOR_ALPHA,
gpu::State::BLEND_OP_ADD, gpu::State::ONE);
auto vertShader = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(untextured_particle_vert)));
auto fragShader = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(untextured_particle_frag)));
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vertShader, fragShader));
_untexturedPipeline = gpu::PipelinePointer(gpu::Pipeline::create(program, state));
auto vertShader = gpu::Shader::createVertex(std::string(untextured_particle_vert));
auto fragShader = gpu::Shader::createPixel(std::string(untextured_particle_frag));
auto program = gpu::Shader::createProgram(vertShader, fragShader);
_untexturedPipeline = gpu::Pipeline::create(program, state);
}
if (!_texturedPipeline) {
auto state = std::make_shared<gpu::State>();
@ -352,17 +352,16 @@ void RenderableParticleEffectEntityItem::createPipelines() {
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD,
destinationColorBlendArg, gpu::State::FACTOR_ALPHA,
gpu::State::BLEND_OP_ADD, gpu::State::ONE);
auto vertShader = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(textured_particle_vert)));
auto vertShader = gpu::Shader::createVertex(std::string(textured_particle_vert));
gpu::ShaderPointer fragShader;
if (_additiveBlending) {
fragShader = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(textured_particle_frag)));
fragShader = gpu::Shader::createPixel(std::string(textured_particle_frag));
}
else {
//If we are sorting and have no additive blending, we want to discard pixels with low alpha to avoid inter-particle entity artifacts
fragShader = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(textured_particle_alpha_discard_frag)));
fragShader = gpu::Shader::createPixel(std::string(textured_particle_alpha_discard_frag));
}
auto program = gpu::ShaderPointer(gpu::Shader::createProgram(vertShader, fragShader));
_texturedPipeline = gpu::PipelinePointer(gpu::Pipeline::create(program, state));
auto program = gpu::Shader::createProgram(vertShader, fragShader);
_texturedPipeline = gpu::Pipeline::create(program, state);
}
}

View file

@ -50,9 +50,9 @@ void RenderablePolyLineEntityItem::createPipeline() {
_format->setAttribute(gpu::Stream::COLOR, 0, gpu::Element::COLOR_RGBA_32, COLOR_OFFSET);
_format->setAttribute(gpu::Stream::TEXCOORD, 0, gpu::Element(gpu::VEC2, gpu::FLOAT, gpu::UV), TEXTURE_OFFSET);
auto VS = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(paintStroke_vert)));
auto PS = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(paintStroke_frag)));
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(VS, PS));
auto VS = gpu::Shader::createVertex(std::string(paintStroke_vert));
auto PS = gpu::Shader::createPixel(std::string(paintStroke_frag));
gpu::ShaderPointer program = gpu::Shader::createProgram(VS, PS);
gpu::Shader::BindingSet slotBindings;
PAINTSTROKE_GPU_SLOT = 0;
@ -64,7 +64,7 @@ void RenderablePolyLineEntityItem::createPipeline() {
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
_pipeline = gpu::PipelinePointer(gpu::Pipeline::create(program, state));
_pipeline = gpu::Pipeline::create(program, state);
}
void RenderablePolyLineEntityItem::updateGeometry() {

View file

@ -478,8 +478,8 @@ void RenderablePolyVoxEntityItem::render(RenderArgs* args) {
_meshLock.unlock();
if (!_pipeline) {
gpu::ShaderPointer vertexShader = gpu::ShaderPointer(gpu::Shader::createVertex(std::string(polyvox_vert)));
gpu::ShaderPointer pixelShader = gpu::ShaderPointer(gpu::Shader::createPixel(std::string(polyvox_frag)));
gpu::ShaderPointer vertexShader = gpu::Shader::createVertex(std::string(polyvox_vert));
gpu::ShaderPointer pixelShader = gpu::Shader::createPixel(std::string(polyvox_frag));
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("materialBuffer"), MATERIAL_GPU_SLOT));
@ -487,14 +487,14 @@ void RenderablePolyVoxEntityItem::render(RenderArgs* args) {
slotBindings.insert(gpu::Shader::Binding(std::string("yMap"), 1));
slotBindings.insert(gpu::Shader::Binding(std::string("zMap"), 2));
gpu::ShaderPointer program = gpu::ShaderPointer(gpu::Shader::createProgram(vertexShader, pixelShader));
gpu::ShaderPointer program = gpu::Shader::createProgram(vertexShader, pixelShader);
gpu::Shader::makeProgram(*program, slotBindings);
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, true, gpu::LESS_EQUAL);
_pipeline = gpu::PipelinePointer(gpu::Pipeline::create(program, state));
_pipeline = gpu::Pipeline::create(program, state);
}
gpu::Batch& batch = *args->_batch;

View file

@ -23,9 +23,9 @@ EntityEditPacketSender::EntityEditPacketSender() {
packetReceiver.registerDirectListener(PacketType::EntityEditNack, this, "processEntityEditNackPacket");
}
void EntityEditPacketSender::processEntityEditNackPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode) {
void EntityEditPacketSender::processEntityEditNackPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
if (_shouldProcessNack) {
processNackPacket(*packet, sendingNode);
processNackPacket(*message, sendingNode);
}
}

View file

@ -35,7 +35,7 @@ public:
virtual void adjustEditPacketForClockSkew(PacketType type, QByteArray& buffer, int clockSkew);
public slots:
void processEntityEditNackPacket(QSharedPointer<NLPacket> packet, SharedNodePointer sendingNode);
void processEntityEditNackPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
void toggleNackPackets() { _shouldProcessNack = !_shouldProcessNack; }
private:

View file

@ -742,7 +742,7 @@ void EntityTree::fixupTerseEditLogging(EntityItemProperties& properties, QList<Q
}
}
int EntityTree::processEditPacketData(NLPacket& packet, const unsigned char* editData, int maxLength,
int EntityTree::processEditPacketData(ReceivedMessage& message, const unsigned char* editData, int maxLength,
const SharedNodePointer& senderNode) {
if (!getIsServer()) {
@ -752,7 +752,7 @@ int EntityTree::processEditPacketData(NLPacket& packet, const unsigned char* edi
int processedBytes = 0;
// we handle these types of "edit" packets
switch (packet.getType()) {
switch (message.getType()) {
case PacketType::EntityErase: {
QByteArray dataByteArray = QByteArray::fromRawData(reinterpret_cast<const char*>(editData), maxLength);
processedBytes = processEraseMessageDetails(dataByteArray, senderNode);
@ -784,7 +784,7 @@ int EntityTree::processEditPacketData(NLPacket& packet, const unsigned char* edi
startLookup = usecTimestampNow();
EntityItemPointer existingEntity = findEntityByEntityItemID(entityItemID);
endLookup = usecTimestampNow();
if (existingEntity && packet.getType() == PacketType::EntityEdit) {
if (existingEntity && message.getType() == PacketType::EntityEdit) {
// if the EntityItem exists, then update it
startLogging = usecTimestampNow();
if (wantEditLogging()) {
@ -804,7 +804,7 @@ int EntityTree::processEditPacketData(NLPacket& packet, const unsigned char* edi
existingEntity->markAsChangedOnServer();
endUpdate = usecTimestampNow();
_totalUpdates++;
} else if (packet.getType() == PacketType::EntityAdd) {
} else if (message.getType() == PacketType::EntityAdd) {
if (senderNode->getCanRez()) {
// this is a new entity... assign a new entityID
properties.setCreated(properties.getLastEdited());
@ -837,7 +837,7 @@ int EntityTree::processEditPacketData(NLPacket& packet, const unsigned char* edi
} else {
static QString repeatedMessage =
LogHandler::getInstance().addRepeatedMessageRegex("^Edit failed.*");
qCDebug(entities) << "Edit failed. [" << packet.getType() <<"] " <<
qCDebug(entities) << "Edit failed. [" << message.getType() <<"] " <<
"entity id:" << entityItemID <<
"existingEntity pointer:" << existingEntity.get();
}
@ -979,27 +979,27 @@ void EntityTree::forgetEntitiesDeletedBefore(quint64 sinceTime) {
// TODO: consider consolidating processEraseMessageDetails() and processEraseMessage()
int EntityTree::processEraseMessage(NLPacket& packet, const SharedNodePointer& sourceNode) {
int EntityTree::processEraseMessage(ReceivedMessage& message, const SharedNodePointer& sourceNode) {
#ifdef EXTRA_ERASE_DEBUGGING
qDebug() << "EntityTree::processEraseMessage()";
#endif
withWriteLock([&] {
packet.seek(sizeof(OCTREE_PACKET_FLAGS) + sizeof(OCTREE_PACKET_SEQUENCE) + sizeof(OCTREE_PACKET_SENT_TIME));
message.seek(sizeof(OCTREE_PACKET_FLAGS) + sizeof(OCTREE_PACKET_SEQUENCE) + sizeof(OCTREE_PACKET_SENT_TIME));
uint16_t numberOfIDs = 0; // placeholder for now
packet.readPrimitive(&numberOfIDs);
message.readPrimitive(&numberOfIDs);
if (numberOfIDs > 0) {
QSet<EntityItemID> entityItemIDsToDelete;
for (size_t i = 0; i < numberOfIDs; i++) {
if (NUM_BYTES_RFC4122_UUID > packet.bytesLeftToRead()) {
if (NUM_BYTES_RFC4122_UUID > message.getBytesLeftToRead()) {
qCDebug(entities) << "EntityTree::processEraseMessage().... bailing because not enough bytes in buffer";
break; // bail to prevent buffer overflow
}
QUuid entityID = QUuid::fromRfc4122(packet.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
QUuid entityID = QUuid::fromRfc4122(message.readWithoutCopy(NUM_BYTES_RFC4122_UUID));
#ifdef EXTRA_ERASE_DEBUGGING
qDebug() << " ---- EntityTree::processEraseMessage() contained ID:" << entityID;
#endif
@ -1015,7 +1015,7 @@ int EntityTree::processEraseMessage(NLPacket& packet, const SharedNodePointer& s
deleteEntities(entityItemIDsToDelete, true, true);
}
});
return packet.pos();
return message.getPosition();
}
// This version skips over the header

Some files were not shown because too many files have changed in this diff Show more