Merge pull request #10972 from highfidelity/RC-48

Beta Release 48 - Includes up to Developer Release 6863
This commit is contained in:
Chris Collins 2017-07-20 08:57:47 -07:00 committed by GitHub
commit 13262afec7
169 changed files with 4614 additions and 1733 deletions

View file

@ -23,6 +23,7 @@
#include <AvatarHashMap.h>
#include <AudioInjectorManager.h>
#include <AssetClient.h>
#include <DebugDraw.h>
#include <LocationScriptingInterface.h>
#include <MessagesClient.h>
#include <NetworkAccessManager.h>
@ -81,6 +82,9 @@ Agent::Agent(ReceivedMessage& message) :
DependencyManager::set<RecordingScriptingInterface>();
DependencyManager::set<UsersScriptingInterface>();
// Needed to ensure the creation of the DebugDraw instance on the main thread
DebugDraw::getInstance();
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
@ -604,6 +608,24 @@ void Agent::processAgentAvatar() {
AvatarData::AvatarDataDetail dataDetail = (randFloat() < AVATAR_SEND_FULL_UPDATE_RATIO) ? AvatarData::SendAllData : AvatarData::CullSmallData;
QByteArray avatarByteArray = scriptedAvatar->toByteArrayStateful(dataDetail);
int maximumByteArraySize = NLPacket::maxPayloadSize(PacketType::AvatarData) - sizeof(AvatarDataSequenceNumber);
if (avatarByteArray.size() > maximumByteArraySize) {
qWarning() << " scriptedAvatar->toByteArrayStateful() resulted in very large buffer:" << avatarByteArray.size() << "... attempt to drop facial data";
avatarByteArray = scriptedAvatar->toByteArrayStateful(dataDetail, true);
if (avatarByteArray.size() > maximumByteArraySize) {
qWarning() << " scriptedAvatar->toByteArrayStateful() without facial data resulted in very large buffer:" << avatarByteArray.size() << "... reduce to MinimumData";
avatarByteArray = scriptedAvatar->toByteArrayStateful(AvatarData::MinimumData, true);
if (avatarByteArray.size() > maximumByteArraySize) {
qWarning() << " scriptedAvatar->toByteArrayStateful() MinimumData resulted in very large buffer:" << avatarByteArray.size() << "... FAIL!!";
return;
}
}
}
scriptedAvatar->doneEncoding(true);
static AvatarDataSequenceNumber sequenceNumber = 0;
@ -798,6 +820,7 @@ void Agent::aboutToFinish() {
emit stopAvatarAudioTimer();
_avatarAudioTimerThread.quit();
_avatarAudioTimerThread.wait();
// cleanup codec & encoder
if (_codec && _encoder) {

View file

@ -17,8 +17,8 @@
#include <QtCore/QThread>
#include <LogHandler.h>
#include <SharedUtil.h>
#include <HifiConfigVariantMap.h>
#include <SharedUtil.h>
#include <ShutdownEventListener.h>
#include "Assignment.h"

View file

@ -76,7 +76,7 @@ void AudioMixerSlavePool::processPackets(ConstIter begin, ConstIter end) {
void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
_function = &AudioMixerSlave::mix;
_configure = [&](AudioMixerSlave& slave) {
_configure = [=](AudioMixerSlave& slave) {
slave.configureMix(_begin, _end, _frame, _throttlingRatio);
};
_frame = frame;

View file

@ -85,7 +85,22 @@ void AvatarMixer::handleReplicatedPacket(QSharedPointer<ReceivedMessage> message
auto nodeList = DependencyManager::get<NodeList>();
auto nodeID = QUuid::fromRfc4122(message->peek(NUM_BYTES_RFC4122_UUID));
auto replicatedNode = addOrUpdateReplicatedNode(nodeID, message->getSenderSockAddr());
SharedNodePointer replicatedNode;
if (message->getType() == PacketType::ReplicatedKillAvatar) {
// this is a kill packet, which we should only process if we already have the node in our list
// since it of course does not make sense to add a node just to remove it an instant later
replicatedNode = nodeList->nodeWithUUID(nodeID);
if (!replicatedNode) {
return;
}
} else {
replicatedNode = addOrUpdateReplicatedNode(nodeID, message->getSenderSockAddr());
}
// we better have a node to work with at this point
assert(replicatedNode);
if (message->getType() == PacketType::ReplicatedAvatarIdentity) {
handleAvatarIdentityPacket(message, replicatedNode);

View file

@ -108,9 +108,6 @@ void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointe
void AvatarMixerClientData::removeFromRadiusIgnoringSet(SharedNodePointer self, const QUuid& other) {
if (isRadiusIgnoring(other)) {
_radiusIgnoredOthers.erase(other);
auto exitingSpaceBubblePacket = NLPacket::create(PacketType::ExitingSpaceBubble, NUM_BYTES_RFC4122_UUID);
exitingSpaceBubblePacket->write(other.toRfc4122());
DependencyManager::get<NodeList>()->sendUnreliablePacket(*exitingSpaceBubblePacket, *self);
}
}

View file

@ -383,11 +383,11 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
qCWarning(avatars) << "otherAvatar.toByteArray() without facial data resulted in very large buffer:" << bytes.size() << "... reduce to MinimumData";
bytes = otherAvatar->toByteArray(AvatarData::MinimumData, lastEncodeForOther, lastSentJointsForOther,
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
}
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
qCWarning(avatars) << "otherAvatar.toByteArray() MinimumData resulted in very large buffer:" << bytes.size() << "... FAIL!!";
includeThisAvatar = false;
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
qCWarning(avatars) << "otherAvatar.toByteArray() MinimumData resulted in very large buffer:" << bytes.size() << "... FAIL!!";
includeThisAvatar = false;
}
}
}

View file

@ -69,7 +69,7 @@ static AvatarMixerSlave slave;
void AvatarMixerSlavePool::processIncomingPackets(ConstIter begin, ConstIter end) {
_function = &AvatarMixerSlave::processIncomingPackets;
_configure = [&](AvatarMixerSlave& slave) {
_configure = [=](AvatarMixerSlave& slave) {
slave.configure(begin, end);
};
run(begin, end);
@ -79,7 +79,7 @@ void AvatarMixerSlavePool::broadcastAvatarData(ConstIter begin, ConstIter end,
p_high_resolution_clock::time_point lastFrameTimestamp,
float maxKbpsPerNode, float throttlingRatio) {
_function = &AvatarMixerSlave::broadcastAvatarData;
_configure = [&](AvatarMixerSlave& slave) {
_configure = [=](AvatarMixerSlave& slave) {
slave.configureBroadcast(begin, end, lastFrameTimestamp, maxKbpsPerNode, throttlingRatio);
};
run(begin, end);

View file

@ -19,7 +19,7 @@
#include "ScriptableAvatar.h"
QByteArray ScriptableAvatar::toByteArrayStateful(AvatarDataDetail dataDetail) {
QByteArray ScriptableAvatar::toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking) {
_globalPosition = getPosition();
return AvatarData::toByteArrayStateful(dataDetail);
}

View file

@ -28,7 +28,7 @@ public:
Q_INVOKABLE AnimationDetails getAnimationDetails();
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL) override;
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail) override;
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking = false) override;
private slots:

View file

@ -50,6 +50,12 @@ EntityServer::~EntityServer() {
tree->removeNewlyCreatedHook(this);
}
void EntityServer::aboutToFinish() {
DependencyManager::get<ResourceManager>()->cleanup();
OctreeServer::aboutToFinish();
}
void EntityServer::handleEntityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
if (_octreeInboundPacketProcessor) {
_octreeInboundPacketProcessor->queueReceivedPacket(message, senderNode);

View file

@ -59,6 +59,8 @@ public:
virtual void trackSend(const QUuid& dataID, quint64 dataLastEdited, const QUuid& sessionID) override;
virtual void trackViewerGone(const QUuid& sessionID) override;
virtual void aboutToFinish() override;
public slots:
virtual void nodeAdded(SharedNodePointer node) override;
virtual void nodeKilled(SharedNodePointer node) override;

View file

@ -81,7 +81,6 @@ bool OctreeSendThread::process() {
// don't do any send processing until the initial load of the octree is complete...
if (_myServer->isInitialLoadComplete()) {
if (auto node = _node.lock()) {
_nodeMissingCount = 0;
OctreeQueryNode* nodeData = static_cast<OctreeQueryNode*>(node->getLinkedData());
// Sometimes the node data has not yet been linked, in which case we can't really do anything
@ -129,8 +128,7 @@ AtomicUIntStat OctreeSendThread::_totalSpecialBytes { 0 };
AtomicUIntStat OctreeSendThread::_totalSpecialPackets { 0 };
int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode* nodeData, int& trueBytesSent,
int& truePacketsSent, bool dontSuppressDuplicate) {
int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode* nodeData, bool dontSuppressDuplicate) {
OctreeServer::didHandlePacketSend(this);
// if we're shutting down, then exit early
@ -141,15 +139,14 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
bool debug = _myServer->wantsDebugSending();
quint64 now = usecTimestampNow();
bool packetSent = false; // did we send a packet?
int packetsSent = 0;
int numPackets = 0;
// Here's where we check to see if this packet is a duplicate of the last packet. If it is, we will silently
// obscure the packet and not send it. This allows the callers and upper level logic to not need to know about
// this rate control savings.
if (!dontSuppressDuplicate && nodeData->shouldSuppressDuplicatePacket()) {
nodeData->resetOctreePacket(); // we still need to reset it though!
return packetsSent; // without sending...
return numPackets; // without sending...
}
// If we've got a stats message ready to send, then see if we can piggyback them together
@ -163,12 +160,15 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
// copy octree message to back of stats message
statsPacket.write(nodeData->getPacket().getData(), nodeData->getPacket().getDataSize());
// since a stats message is only included on end of scene, don't consider any of these bytes "wasted", since
int numBytes = statsPacket.getDataSize();
_totalBytes += numBytes;
_totalPackets++;
// since a stats message is only included on end of scene, don't consider any of these bytes "wasted"
// there was nothing else to send.
int thisWastedBytes = 0;
_totalWastedBytes += thisWastedBytes;
_totalBytes += statsPacket.getDataSize();
_totalPackets++;
//_totalWastedBytes += 0;
_trueBytesSent += numBytes;
numPackets++;
if (debug) {
NLPacket& sentPacket = nodeData->getPacket();
@ -191,18 +191,22 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
// actually send it
OctreeServer::didCallWriteDatagram(this);
DependencyManager::get<NodeList>()->sendUnreliablePacket(statsPacket, *node);
packetSent = true;
} else {
// not enough room in the packet, send two packets
// first packet
OctreeServer::didCallWriteDatagram(this);
DependencyManager::get<NodeList>()->sendUnreliablePacket(statsPacket, *node);
// since a stats message is only included on end of scene, don't consider any of these bytes "wasted", since
int numBytes = statsPacket.getDataSize();
_totalBytes += numBytes;
_totalPackets++;
// since a stats message is only included on end of scene, don't consider any of these bytes "wasted"
// there was nothing else to send.
int thisWastedBytes = 0;
_totalWastedBytes += thisWastedBytes;
_totalBytes += statsPacket.getDataSize();
_totalPackets++;
//_totalWastedBytes += 0;
_trueBytesSent += numBytes;
numPackets++;
if (debug) {
NLPacket& sentPacket = nodeData->getPacket();
@ -221,19 +225,18 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
"] wasted bytes:" << thisWastedBytes << " [" << _totalWastedBytes << "]";
}
trueBytesSent += statsPacket.getDataSize();
truePacketsSent++;
packetsSent++;
// second packet
OctreeServer::didCallWriteDatagram(this);
DependencyManager::get<NodeList>()->sendUnreliablePacket(nodeData->getPacket(), *node);
packetSent = true;
int packetSizeWithHeader = nodeData->getPacket().getDataSize();
thisWastedBytes = udt::MAX_PACKET_SIZE - packetSizeWithHeader;
_totalWastedBytes += thisWastedBytes;
_totalBytes += nodeData->getPacket().getDataSize();
numBytes = nodeData->getPacket().getDataSize();
_totalBytes += numBytes;
_totalPackets++;
// we count wasted bytes here because we were unable to fit the stats packet
thisWastedBytes = udt::MAX_PACKET_SIZE - numBytes;
_totalWastedBytes += thisWastedBytes;
_trueBytesSent += numBytes;
numPackets++;
if (debug) {
NLPacket& sentPacket = nodeData->getPacket();
@ -259,13 +262,14 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
// just send the octree packet
OctreeServer::didCallWriteDatagram(this);
DependencyManager::get<NodeList>()->sendUnreliablePacket(nodeData->getPacket(), *node);
packetSent = true;
int packetSizeWithHeader = nodeData->getPacket().getDataSize();
int thisWastedBytes = udt::MAX_PACKET_SIZE - packetSizeWithHeader;
_totalWastedBytes += thisWastedBytes;
_totalBytes += packetSizeWithHeader;
int numBytes = nodeData->getPacket().getDataSize();
_totalBytes += numBytes;
_totalPackets++;
int thisWastedBytes = udt::MAX_PACKET_SIZE - numBytes;
_totalWastedBytes += thisWastedBytes;
numPackets++;
_trueBytesSent += numBytes;
if (debug) {
NLPacket& sentPacket = nodeData->getPacket();
@ -280,23 +284,21 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
qDebug() << "Sending packet at " << now << " [" << _totalPackets <<"]: sequence: " << sequence <<
" timestamp: " << timestamp <<
" size: " << packetSizeWithHeader << " [" << _totalBytes <<
" size: " << numBytes << " [" << _totalBytes <<
"] wasted bytes:" << thisWastedBytes << " [" << _totalWastedBytes << "]";
}
}
}
// remember to track our stats
if (packetSent) {
if (numPackets > 0) {
nodeData->stats.packetSent(nodeData->getPacket().getPayloadSize());
trueBytesSent += nodeData->getPacket().getPayloadSize();
truePacketsSent++;
packetsSent++;
nodeData->octreePacketSent();
nodeData->resetOctreePacket();
}
return packetsSent;
_truePacketsSent += numPackets;
return numPackets;
}
/// Version of octree element distributor that sends the deepest LOD level at once
@ -315,13 +317,9 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
preDistributionProcessing();
}
// calculate max number of packets that can be sent during this interval
int clientMaxPacketsPerInterval = std::max(1, (nodeData->getMaxQueryPacketsPerSecond() / INTERVALS_PER_SECOND));
int maxPacketsPerInterval = std::min(clientMaxPacketsPerInterval, _myServer->getPacketsPerClientPerInterval());
int truePacketsSent = 0;
int trueBytesSent = 0;
int packetsSentThisInterval = 0;
_truePacketsSent = 0;
_trueBytesSent = 0;
_packetsSentThisInterval = 0;
bool isFullScene = nodeData->shouldForceFullScene();
if (isFullScene) {
@ -334,17 +332,9 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
&& ((!viewFrustumChanged && nodeData->getViewFrustumJustStoppedChanging()) || nodeData->hasLodChanged()));
}
bool somethingToSend = true; // assume we have something
// If our packet already has content in it, then we must use the color choice of the waiting packet.
// If we're starting a fresh packet, then...
// If we're moving, and the client asked for low res, then we force monochrome, otherwise, use
// the clients requested color state.
// If we have a packet waiting, and our desired want color, doesn't match the current waiting packets color
// then let's just send that waiting packet.
if (nodeData->isPacketWaiting()) {
packetsSentThisInterval += handlePacketSend(node, nodeData, trueBytesSent, truePacketsSent);
// send the waiting packet
_packetsSentThisInterval += handlePacketSend(node, nodeData);
} else {
nodeData->resetOctreePacket();
}
@ -375,8 +365,7 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
//unsigned long encodeTime = nodeData->stats.getTotalEncodeTime();
//unsigned long elapsedTime = nodeData->stats.getElapsedTime();
int packetsJustSent = handlePacketSend(node, nodeData, trueBytesSent, truePacketsSent, isFullScene);
packetsSentThisInterval += packetsJustSent;
_packetsSentThisInterval += handlePacketSend(node, nodeData, isFullScene);
// If we're starting a full scene, then definitely we want to empty the elementBag
if (isFullScene) {
@ -404,185 +393,44 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
// If we have something in our elementBag, then turn them into packets and send them out...
if (!nodeData->elementBag.isEmpty()) {
int bytesWritten = 0;
quint64 start = usecTimestampNow();
// TODO: add these to stats page
//quint64 startCompressTimeMsecs = OctreePacketData::getCompressContentTime() / 1000;
//quint64 startCompressCalls = OctreePacketData::getCompressContentCalls();
int extraPackingAttempts = 0;
bool completedScene = false;
while (somethingToSend && packetsSentThisInterval < maxPacketsPerInterval && !nodeData->isShuttingDown()) {
float lockWaitElapsedUsec = OctreeServer::SKIP_TIME;
float encodeElapsedUsec = OctreeServer::SKIP_TIME;
float compressAndWriteElapsedUsec = OctreeServer::SKIP_TIME;
float packetSendingElapsedUsec = OctreeServer::SKIP_TIME;
quint64 startInside = usecTimestampNow();
bool lastNodeDidntFit = false; // assume each node fits
if (!nodeData->elementBag.isEmpty()) {
quint64 lockWaitStart = usecTimestampNow();
_myServer->getOctree()->withReadLock([&]{
quint64 lockWaitEnd = usecTimestampNow();
lockWaitElapsedUsec = (float)(lockWaitEnd - lockWaitStart);
quint64 encodeStart = usecTimestampNow();
OctreeElementPointer subTree = nodeData->elementBag.extract();
if (!subTree) {
return;
}
float octreeSizeScale = nodeData->getOctreeSizeScale();
int boundaryLevelAdjustClient = nodeData->getBoundaryLevelAdjust();
int boundaryLevelAdjust = boundaryLevelAdjustClient +
(viewFrustumChanged ? LOW_RES_MOVING_ADJUST : NO_BOUNDARY_ADJUST);
EncodeBitstreamParams params(INT_MAX, WANT_EXISTS_BITS, DONT_CHOP,
viewFrustumChanged, boundaryLevelAdjust, octreeSizeScale,
isFullScene, _myServer->getJurisdiction(), nodeData);
nodeData->copyCurrentViewFrustum(params.viewFrustum);
if (viewFrustumChanged) {
nodeData->copyLastKnownViewFrustum(params.lastViewFrustum);
}
// Our trackSend() function is implemented by the server subclass, and will be called back
// during the encodeTreeBitstream() as new entities/data elements are sent
params.trackSend = [this, node](const QUuid& dataID, quint64 dataEdited) {
_myServer->trackSend(dataID, dataEdited, node->getUUID());
};
// TODO: should this include the lock time or not? This stat is sent down to the client,
// it seems like it may be a good idea to include the lock time as part of the encode time
// are reported to client. Since you can encode without the lock
nodeData->stats.encodeStarted();
bytesWritten = _myServer->getOctree()->encodeTreeBitstream(subTree, &_packetData, nodeData->elementBag, params);
quint64 encodeEnd = usecTimestampNow();
encodeElapsedUsec = (float)(encodeEnd - encodeStart);
// If after calling encodeTreeBitstream() there are no nodes left to send, then we know we've
// sent the entire scene. We want to know this below so we'll actually write this content into
// the packet and send it
completedScene = nodeData->elementBag.isEmpty();
if (params.stopReason == EncodeBitstreamParams::DIDNT_FIT) {
lastNodeDidntFit = true;
extraPackingAttempts++;
}
nodeData->stats.encodeStopped();
});
} else {
// If the bag was empty then we didn't even attempt to encode, and so we know the bytesWritten were 0
bytesWritten = 0;
somethingToSend = false; // this will cause us to drop out of the loop...
}
// If the last node didn't fit, but we're in compressed mode, then we actually want to see if we can fit a
// little bit more in this packet. To do this we write into the packet, but don't send it yet, we'll
// keep attempting to write in compressed mode to add more compressed segments
// We only consider sending anything if there is something in the _packetData to send... But
// if bytesWritten == 0 it means either the subTree couldn't fit or we had an empty bag... Both cases
// mean we should send the previous packet contents and reset it.
if (completedScene || lastNodeDidntFit) {
if (_packetData.hasContent()) {
quint64 compressAndWriteStart = usecTimestampNow();
// if for some reason the finalized size is greater than our available size, then probably the "compressed"
// form actually inflated beyond our padding, and in this case we will send the current packet, then
// write to out new packet...
unsigned int writtenSize = _packetData.getFinalizedSize() + sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
if (writtenSize > nodeData->getAvailable()) {
packetsSentThisInterval += handlePacketSend(node, nodeData, trueBytesSent, truePacketsSent);
}
nodeData->writeToPacket(_packetData.getFinalizedData(), _packetData.getFinalizedSize());
quint64 compressAndWriteEnd = usecTimestampNow();
compressAndWriteElapsedUsec = (float)(compressAndWriteEnd - compressAndWriteStart);
}
// If we're not running compressed, then we know we can just send now. Or if we're running compressed, but
// the packet doesn't have enough space to bother attempting to pack more...
bool sendNow = true;
if (!completedScene && (nodeData->getAvailable() >= MINIMUM_ATTEMPT_MORE_PACKING &&
extraPackingAttempts <= REASONABLE_NUMBER_OF_PACKING_ATTEMPTS)) {
sendNow = false; // try to pack more
}
int targetSize = MAX_OCTREE_PACKET_DATA_SIZE;
if (sendNow) {
quint64 packetSendingStart = usecTimestampNow();
packetsSentThisInterval += handlePacketSend(node, nodeData, trueBytesSent, truePacketsSent);
quint64 packetSendingEnd = usecTimestampNow();
packetSendingElapsedUsec = (float)(packetSendingEnd - packetSendingStart);
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
extraPackingAttempts = 0;
} else {
// If we're in compressed mode, then we want to see if we have room for more in this wire packet.
// but we've finalized the _packetData, so we want to start a new section, we will do that by
// resetting the packet settings with the max uncompressed size of our current available space
// in the wire packet. We also include room for our section header, and a little bit of padding
// to account for the fact that whenc compressing small amounts of data, we sometimes end up with
// a larger compressed size then uncompressed size
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE) - COMPRESS_PADDING;
}
_packetData.changeSettings(true, targetSize); // will do reset - NOTE: Always compressed
}
OctreeServer::trackTreeWaitTime(lockWaitElapsedUsec);
OctreeServer::trackEncodeTime(encodeElapsedUsec);
OctreeServer::trackCompressAndWriteTime(compressAndWriteElapsedUsec);
OctreeServer::trackPacketSendingTime(packetSendingElapsedUsec);
quint64 endInside = usecTimestampNow();
quint64 elapsedInsideUsecs = endInside - startInside;
OctreeServer::trackInsideTime((float)elapsedInsideUsecs);
}
if (somethingToSend && _myServer->wantsVerboseDebug()) {
qCDebug(octree) << "Hit PPS Limit, packetsSentThisInterval =" << packetsSentThisInterval
<< " maxPacketsPerInterval = " << maxPacketsPerInterval
<< " clientMaxPacketsPerInterval = " << clientMaxPacketsPerInterval;
}
traverseTreeAndSendContents(node, nodeData, viewFrustumChanged, isFullScene);
// Here's where we can/should allow the server to send other data...
// send the environment packet
// TODO: should we turn this into a while loop to better handle sending multiple special packets
if (_myServer->hasSpecialPacketsToSend(node) && !nodeData->isShuttingDown()) {
int specialPacketsSent = 0;
trueBytesSent += _myServer->sendSpecialPackets(node, nodeData, specialPacketsSent);
int specialBytesSent = _myServer->sendSpecialPackets(node, nodeData, specialPacketsSent);
nodeData->resetOctreePacket(); // because nodeData's _sequenceNumber has changed
truePacketsSent += specialPacketsSent;
packetsSentThisInterval += specialPacketsSent;
_truePacketsSent += specialPacketsSent;
_trueBytesSent += specialBytesSent;
_packetsSentThisInterval += specialPacketsSent;
_totalPackets += specialPacketsSent;
_totalBytes += trueBytesSent;
_totalBytes += specialBytesSent;
_totalSpecialPackets += specialPacketsSent;
_totalSpecialBytes += trueBytesSent;
_totalSpecialBytes += specialBytesSent;
}
// calculate max number of packets that can be sent during this interval
int clientMaxPacketsPerInterval = std::max(1, (nodeData->getMaxQueryPacketsPerSecond() / INTERVALS_PER_SECOND));
int maxPacketsPerInterval = std::min(clientMaxPacketsPerInterval, _myServer->getPacketsPerClientPerInterval());
// Re-send packets that were nacked by the client
while (nodeData->hasNextNackedPacket() && packetsSentThisInterval < maxPacketsPerInterval) {
while (nodeData->hasNextNackedPacket() && _packetsSentThisInterval < maxPacketsPerInterval) {
const NLPacket* packet = nodeData->getNextNackedPacket();
if (packet) {
DependencyManager::get<NodeList>()->sendUnreliablePacket(*packet, *node);
truePacketsSent++;
packetsSentThisInterval++;
int numBytes = packet->getDataSize();
_truePacketsSent++;
_trueBytesSent += numBytes;
_packetsSentThisInterval++;
_totalBytes += packet->getDataSize();
_totalPackets++;
_totalBytes += numBytes;
_totalWastedBytes += udt::MAX_PACKET_SIZE - packet->getDataSize();
}
}
@ -591,12 +439,6 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
int elapsedmsec = (end - start) / USECS_PER_MSEC;
OctreeServer::trackLoopTime(elapsedmsec);
// TODO: add these to stats page
//quint64 endCompressCalls = OctreePacketData::getCompressContentCalls();
//int elapsedCompressCalls = endCompressCalls - startCompressCalls;
//quint64 endCompressTimeMsecs = OctreePacketData::getCompressContentTime() / 1000;
//int elapsedCompressTimeMsecs = endCompressTimeMsecs - startCompressTimeMsecs;
// if after sending packets we've emptied our bag, then we want to remember that we've sent all
// the octree elements from the current view frustum
if (nodeData->elementBag.isEmpty()) {
@ -606,17 +448,147 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
// If this was a full scene then make sure we really send out a stats packet at this point so that
// the clients will know the scene is stable
if (isFullScene) {
int thisTrueBytesSent = 0;
int thisTruePacketsSent = 0;
nodeData->stats.sceneCompleted();
int packetsJustSent = handlePacketSend(node, nodeData, thisTrueBytesSent, thisTruePacketsSent, true);
_totalBytes += thisTrueBytesSent;
_totalPackets += thisTruePacketsSent;
truePacketsSent += packetsJustSent;
handlePacketSend(node, nodeData, true);
}
}
} // end if bag wasn't empty, and so we sent stuff...
return truePacketsSent;
return _truePacketsSent;
}
void OctreeSendThread::traverseTreeAndSendContents(SharedNodePointer node, OctreeQueryNode* nodeData, bool viewFrustumChanged, bool isFullScene) {
// calculate max number of packets that can be sent during this interval
int clientMaxPacketsPerInterval = std::max(1, (nodeData->getMaxQueryPacketsPerSecond() / INTERVALS_PER_SECOND));
int maxPacketsPerInterval = std::min(clientMaxPacketsPerInterval, _myServer->getPacketsPerClientPerInterval());
int extraPackingAttempts = 0;
bool completedScene = false;
bool somethingToSend = true; // assume we have something
while (somethingToSend && _packetsSentThisInterval < maxPacketsPerInterval && !nodeData->isShuttingDown()) {
float lockWaitElapsedUsec = OctreeServer::SKIP_TIME;
float encodeElapsedUsec = OctreeServer::SKIP_TIME;
float compressAndWriteElapsedUsec = OctreeServer::SKIP_TIME;
float packetSendingElapsedUsec = OctreeServer::SKIP_TIME;
quint64 startInside = usecTimestampNow();
bool lastNodeDidntFit = false; // assume each node fits
if (!nodeData->elementBag.isEmpty()) {
quint64 lockWaitStart = usecTimestampNow();
_myServer->getOctree()->withReadLock([&]{
quint64 lockWaitEnd = usecTimestampNow();
lockWaitElapsedUsec = (float)(lockWaitEnd - lockWaitStart);
quint64 encodeStart = usecTimestampNow();
OctreeElementPointer subTree = nodeData->elementBag.extract();
if (!subTree) {
return;
}
float octreeSizeScale = nodeData->getOctreeSizeScale();
int boundaryLevelAdjustClient = nodeData->getBoundaryLevelAdjust();
int boundaryLevelAdjust = boundaryLevelAdjustClient +
(viewFrustumChanged ? LOW_RES_MOVING_ADJUST : NO_BOUNDARY_ADJUST);
EncodeBitstreamParams params(INT_MAX, WANT_EXISTS_BITS, DONT_CHOP,
viewFrustumChanged, boundaryLevelAdjust, octreeSizeScale,
isFullScene, _myServer->getJurisdiction(), nodeData);
nodeData->copyCurrentViewFrustum(params.viewFrustum);
if (viewFrustumChanged) {
nodeData->copyLastKnownViewFrustum(params.lastViewFrustum);
}
// Our trackSend() function is implemented by the server subclass, and will be called back
// during the encodeTreeBitstream() as new entities/data elements are sent
params.trackSend = [this](const QUuid& dataID, quint64 dataEdited) {
_myServer->trackSend(dataID, dataEdited, _nodeUuid);
};
// TODO: should this include the lock time or not? This stat is sent down to the client,
// it seems like it may be a good idea to include the lock time as part of the encode time
// are reported to client. Since you can encode without the lock
nodeData->stats.encodeStarted();
// NOTE: this is where the tree "contents" are actaully packed
_myServer->getOctree()->encodeTreeBitstream(subTree, &_packetData, nodeData->elementBag, params);
quint64 encodeEnd = usecTimestampNow();
encodeElapsedUsec = (float)(encodeEnd - encodeStart);
// If after calling encodeTreeBitstream() there are no nodes left to send, then we know we've
// sent the entire scene. We want to know this below so we'll actually write this content into
// the packet and send it
completedScene = nodeData->elementBag.isEmpty();
if (params.stopReason == EncodeBitstreamParams::DIDNT_FIT) {
lastNodeDidntFit = true;
extraPackingAttempts++;
}
nodeData->stats.encodeStopped();
});
} else {
somethingToSend = false; // this will cause us to drop out of the loop...
}
if (completedScene || lastNodeDidntFit) {
// we probably want to flush what has accumulated in nodeData but:
// do we have more data to send? and is there room?
if (_packetData.hasContent()) {
// yes, more data to send
quint64 compressAndWriteStart = usecTimestampNow();
unsigned int additionalSize = _packetData.getFinalizedSize() + sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
if (additionalSize > nodeData->getAvailable()) {
// no room --> flush what we've got
_packetsSentThisInterval += handlePacketSend(node, nodeData);
}
// either there is room, or we've flushed and reset nodeData's data buffer
// so we can transfer whatever is in _packetData to nodeData
nodeData->writeToPacket(_packetData.getFinalizedData(), _packetData.getFinalizedSize());
compressAndWriteElapsedUsec = (float)(usecTimestampNow()- compressAndWriteStart);
}
bool sendNow = completedScene ||
nodeData->getAvailable() < MINIMUM_ATTEMPT_MORE_PACKING ||
extraPackingAttempts > REASONABLE_NUMBER_OF_PACKING_ATTEMPTS;
int targetSize = MAX_OCTREE_PACKET_DATA_SIZE;
if (sendNow) {
quint64 packetSendingStart = usecTimestampNow();
_packetsSentThisInterval += handlePacketSend(node, nodeData);
quint64 packetSendingEnd = usecTimestampNow();
packetSendingElapsedUsec = (float)(packetSendingEnd - packetSendingStart);
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
extraPackingAttempts = 0;
} else {
// We want to see if we have room for more in this wire packet but we've copied the _packetData,
// so we want to start a new section. We will do that by resetting the packet settings with the max
// size of our current available space in the wire packet plus room for our section header and a
// little bit of padding.
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE) - COMPRESS_PADDING;
}
_packetData.changeSettings(true, targetSize); // will do reset - NOTE: Always compressed
}
OctreeServer::trackTreeWaitTime(lockWaitElapsedUsec);
OctreeServer::trackEncodeTime(encodeElapsedUsec);
OctreeServer::trackCompressAndWriteTime(compressAndWriteElapsedUsec);
OctreeServer::trackPacketSendingTime(packetSendingElapsedUsec);
quint64 endInside = usecTimestampNow();
quint64 elapsedInsideUsecs = endInside - startInside;
OctreeServer::trackInsideTime((float)elapsedInsideUsecs);
}
if (somethingToSend && _myServer->wantsVerboseDebug()) {
qCDebug(octree) << "Hit PPS Limit, packetsSentThisInterval =" << _packetsSentThisInterval
<< " maxPacketsPerInterval = " << maxPacketsPerInterval
<< " clientMaxPacketsPerInterval = " << clientMaxPacketsPerInterval;
}
}

View file

@ -34,7 +34,7 @@ public:
void setIsShuttingDown();
bool isShuttingDown() { return _isShuttingDown; }
QUuid getNodeUuid() const { return _nodeUuid; }
static AtomicUIntStat _totalBytes;
@ -53,20 +53,23 @@ protected:
/// Called before a packetDistributor pass to allow for pre-distribution processing
virtual void preDistributionProcessing() {};
virtual void traverseTreeAndSendContents(SharedNodePointer node, OctreeQueryNode* nodeData, bool viewFrustumChanged, bool isFullScene);
OctreeServer* _myServer { nullptr };
QWeakPointer<Node> _node;
private:
int handlePacketSend(SharedNodePointer node, OctreeQueryNode* nodeData, int& trueBytesSent, int& truePacketsSent, bool dontSuppressDuplicate = false);
int handlePacketSend(SharedNodePointer node, OctreeQueryNode* nodeData, bool dontSuppressDuplicate = false);
int packetDistributor(SharedNodePointer node, OctreeQueryNode* nodeData, bool viewFrustumChanged);
QUuid _nodeUuid;
OctreePacketData _packetData;
int _nodeMissingCount { 0 };
int _truePacketsSent { 0 }; // available for debug stats
int _trueBytesSent { 0 }; // available for debug stats
int _packetsSentThisInterval { 0 }; // used for bandwidth throttle condition
bool _isShuttingDown { false };
};

View file

@ -16,6 +16,7 @@
#include <AudioConstants.h>
#include <AudioInjectorManager.h>
#include <ClientServerUtils.h>
#include <DebugDraw.h>
#include <EntityNodeData.h>
#include <EntityScriptingInterface.h>
#include <LogHandler.h>
@ -67,6 +68,9 @@ EntityScriptServer::EntityScriptServer(ReceivedMessage& message) : ThreadedAssig
DependencyManager::set<ScriptCache>();
DependencyManager::set<ScriptEngines>(ScriptEngine::ENTITY_SERVER_SCRIPT);
// Needed to ensure the creation of the DebugDraw instance on the main thread
DebugDraw::getInstance();
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
packetReceiver.registerListenerForTypes({ PacketType::OctreeStats, PacketType::EntityData, PacketType::EntityErase },
this, "handleOctreePacket");

View file

@ -50,6 +50,7 @@
{
"label": "Places / Paths",
"html_id": "places_paths",
"restart": false,
"settings": [
{
"name": "paths",

View file

@ -2,11 +2,11 @@ $(document).ready(function(){
// setup the underscore templates
var nodeTemplate = _.template($('#nodes-template').html());
var queuedTemplate = _.template($('#queued-template').html());
// setup a function to grab the assignments
function getNodesAndAssignments() {
$.getJSON("nodes.json", function(json){
json.nodes.sort(function(a, b){
if (a.type === b.type) {
if (a.uptime < b.uptime) {
@ -16,36 +16,50 @@ $(document).ready(function(){
} else {
return 0;
}
}
}
if (a.type === "agent" && b.type !== "agent") {
return 1;
} else if (b.type === "agent" && a.type !== "agent") {
return -1;
}
if (a.type > b.type) {
return 1;
}
if (a.type < b.type) {
return -1;
}
}
});
$('#nodes-table tbody').html(nodeTemplate(json));
}).fail(function(jqXHR, textStatus, errorThrown) {
// we assume a 401 means the DS has restarted
// and no longer has our OAuth produced uuid
// so just reload and re-auth
if (jqXHR.status == 401) {
location.reload();
}
});
$.getJSON("assignments.json", function(json){
$.getJSON("assignments.json", function(json){
$('#assignments-table tbody').html(queuedTemplate(json));
}).fail(function(jqXHR, textStatus, errorThrown) {
// we assume a 401 means the DS has restarted
// and no longer has our OAuth produced uuid
// so just reload and re-auth
if (jqXHR.status == 401) {
location.reload();
}
});
}
// do the first GET on page load
getNodesAndAssignments();
// grab the new assignments JSON every two seconds
var getNodesAndAssignmentsInterval = setInterval(getNodesAndAssignments, 2000);
// hook the node delete to the X button
$(document.body).on('click', '.glyphicon-remove', function(){
// fire off a delete for this node
@ -57,10 +71,10 @@ $(document).ready(function(){
}
});
});
$(document.body).on('click', '#kill-all-btn', function() {
var confirmed_kill = confirm("Are you sure?");
if (confirmed_kill == true) {
$.ajax({
url: "/nodes/",

View file

@ -40,11 +40,11 @@
#include <LogHandler.h>
#include <PathUtils.h>
#include <NumericalConstants.h>
#include <Trace.h>
#include <StatTracker.h>
#include "DomainServerNodeData.h"
#include "NodeConnectionData.h"
#include <Trace.h>
#include <StatTracker.h>
int const DomainServer::EXIT_CODE_REBOOT = 234923;
@ -1974,7 +1974,8 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
return _settingsManager.handleAuthenticatedHTTPRequest(connection, url);
}
const QString HIFI_SESSION_COOKIE_KEY = "DS_WEB_SESSION_UUID";
static const QString HIFI_SESSION_COOKIE_KEY = "DS_WEB_SESSION_UUID";
static const QString STATE_QUERY_KEY = "state";
bool DomainServer::handleHTTPSRequest(HTTPSConnection* connection, const QUrl &url, bool skipSubHandler) {
qDebug() << "HTTPS request received at" << url.toString();
@ -1985,10 +1986,9 @@ bool DomainServer::handleHTTPSRequest(HTTPSConnection* connection, const QUrl &u
const QString CODE_QUERY_KEY = "code";
QString authorizationCode = codeURLQuery.queryItemValue(CODE_QUERY_KEY);
const QString STATE_QUERY_KEY = "state";
QUuid stateUUID = QUuid(codeURLQuery.queryItemValue(STATE_QUERY_KEY));
if (!authorizationCode.isEmpty() && !stateUUID.isNull()) {
if (!authorizationCode.isEmpty() && !stateUUID.isNull() && _webAuthenticationStateSet.remove(stateUUID)) {
// fire off a request with this code and state to get an access token for the user
const QString OAUTH_TOKEN_REQUEST_PATH = "/oauth/token";
@ -2006,47 +2006,83 @@ bool DomainServer::handleHTTPSRequest(HTTPSConnection* connection, const QUrl &u
tokenRequest.setHeader(QNetworkRequest::ContentTypeHeader, "application/x-www-form-urlencoded");
QNetworkReply* tokenReply = NetworkAccessManager::getInstance().post(tokenRequest, tokenPostBody.toLocal8Bit());
connect(tokenReply, &QNetworkReply::finished, this, &DomainServer::tokenGrantFinished);
if (_webAuthenticationStateSet.remove(stateUUID)) {
// this is a web user who wants to auth to access web interface
// we hold the response back to them until we get their profile information
// and can decide if they are let in or not
// add this connection to our list of pending connections so that we can hold the response
_pendingOAuthConnections.insert(stateUUID, connection);
QEventLoop loop;
connect(tokenReply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
// set the state UUID on the reply so that we can associate the response with the connection later
tokenReply->setProperty(STATE_QUERY_KEY.toLocal8Bit(), stateUUID);
// start the loop for the token request
loop.exec();
return true;
} else {
connection->respond(HTTPConnection::StatusCode400);
QNetworkReply* profileReply = profileRequestGivenTokenReply(tokenReply);
return true;
}
} else {
return false;
}
}
// stop the loop once the profileReply is complete
connect(profileReply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
HTTPSConnection* DomainServer::connectionFromReplyWithState(QNetworkReply* reply) {
// grab the UUID state property from the reply
QUuid stateUUID = reply->property(STATE_QUERY_KEY.toLocal8Bit()).toUuid();
// restart the loop for the profile request
loop.exec();
if (!stateUUID.isNull()) {
return _pendingOAuthConnections.take(stateUUID);
} else {
return nullptr;
}
}
void DomainServer::tokenGrantFinished() {
auto tokenReply = qobject_cast<QNetworkReply*>(sender());
if (tokenReply) {
if (tokenReply->error() == QNetworkReply::NoError) {
// now that we have a token for this profile, send off a profile request
QNetworkReply* profileReply = profileRequestGivenTokenReply(tokenReply);
// forward along the state UUID that we kept with the token request
profileReply->setProperty(STATE_QUERY_KEY.toLocal8Bit(), tokenReply->property(STATE_QUERY_KEY.toLocal8Bit()));
connect(profileReply, &QNetworkReply::finished, this, &DomainServer::profileRequestFinished);
} else {
// the token grant failed, send back a 500 (assuming the connection is still around)
auto connection = connectionFromReplyWithState(tokenReply);
if (connection) {
connection->respond(HTTPConnection::StatusCode500);
}
}
tokenReply->deleteLater();
}
}
void DomainServer::profileRequestFinished() {
auto profileReply = qobject_cast<QNetworkReply*>(sender());
if (profileReply) {
auto connection = connectionFromReplyWithState(profileReply);
if (connection) {
if (profileReply->error() == QNetworkReply::NoError) {
// call helper method to get cookieHeaders
Headers cookieHeaders = setupCookieHeadersFromProfileReply(profileReply);
connection->respond(HTTPConnection::StatusCode302, QByteArray(),
HTTPConnection::DefaultContentType, cookieHeaders);
delete tokenReply;
delete profileReply;
// we've redirected the user back to our homepage
return true;
} else {
// the profile request failed, send back a 500 (assuming the connection is still around)
connection->respond(HTTPConnection::StatusCode500);
}
}
// respond with a 200 code indicating that login is complete
connection->respond(HTTPConnection::StatusCode200);
return true;
} else {
return false;
profileReply->deleteLater();
}
}
@ -2106,22 +2142,31 @@ bool DomainServer::isAuthenticatedRequest(HTTPConnection* connection, const QUrl
// the user does not have allowed username or role, return 401
return false;
} else {
// re-direct this user to OAuth page
static const QByteArray REQUESTED_WITH_HEADER = "X-Requested-With";
static const QString XML_REQUESTED_WITH = "XMLHttpRequest";
// generate a random state UUID to use
QUuid stateUUID = QUuid::createUuid();
if (connection->requestHeaders().value(REQUESTED_WITH_HEADER) == XML_REQUESTED_WITH) {
// unauthorized XHR requests get a 401 and not a 302, since there isn't an XHR
// path to OAuth authorize
connection->respond(HTTPConnection::StatusCode401, UNAUTHENTICATED_BODY);
} else {
// re-direct this user to OAuth page
// add it to the set so we can handle the callback from the OAuth provider
_webAuthenticationStateSet.insert(stateUUID);
// generate a random state UUID to use
QUuid stateUUID = QUuid::createUuid();
QUrl authURL = oauthAuthorizationURL(stateUUID);
// add it to the set so we can handle the callback from the OAuth provider
_webAuthenticationStateSet.insert(stateUUID);
Headers redirectHeaders;
QUrl authURL = oauthAuthorizationURL(stateUUID);
redirectHeaders.insert("Location", authURL.toEncoded());
Headers redirectHeaders;
connection->respond(HTTPConnection::StatusCode302,
QByteArray(), HTTPConnection::DefaultContentType, redirectHeaders);
redirectHeaders.insert("Location", authURL.toEncoded());
connection->respond(HTTPConnection::StatusCode302,
QByteArray(), HTTPConnection::DefaultContentType, redirectHeaders);
}
// we don't know about this user yet, so they are not yet authenticated
return false;

View file

@ -111,6 +111,9 @@ private slots:
void updateDownstreamNodes();
void updateUpstreamNodes();
void tokenGrantFinished();
void profileRequestFinished();
signals:
void iceServerChanged();
void userConnected();
@ -178,6 +181,8 @@ private:
void updateReplicationNodes(ReplicationServerDirection direction);
HTTPSConnection* connectionFromReplyWithState(QNetworkReply* reply);
SubnetList _acSubnetWhitelist;
std::vector<QString> _replicatedUsernames;
@ -235,6 +240,8 @@ private:
bool _sendICEServerAddressToMetaverseAPIInProgress { false };
bool _sendICEServerAddressToMetaverseAPIRedo { false };
QHash<QUuid, QPointer<HTTPSConnection>> _pendingOAuthConnections;
};

View file

@ -1250,7 +1250,7 @@ bool DomainServerSettingsManager::recurseJSONObjectAndOverwriteSettings(const QJ
if (!matchingDescriptionObject.isEmpty()) {
updateSetting(rootKey, rootValue, *thisMap, matchingDescriptionObject);
if (rootKey != SECURITY_ROOT_KEY && rootKey != BROADCASTING_KEY) {
if (rootKey != SECURITY_ROOT_KEY && rootKey != BROADCASTING_KEY && rootKey != SETTINGS_PATHS_KEY ) {
needRestart = true;
}
} else {

View file

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 21.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
<path d="M43.9,13.3c-1.3-0.6-2.4-0.3-3.5,0.4c-1.6,1.2-3.3,2.4-5,3.5c-1.4-3.4-3.3-5-6.3-5c-5.9-0.1-11.9-0.1-17.9,0
c-3.8,0.1-6.4,3.1-6.4,7.3c0,3.7-0.1,7.6,0,11.4c0,1.1,0.2,2.1,0.6,3.1c1.2,2.7,3.3,3.8,6,3.8c5.6,0,11-0.1,16.5,0
c3.5,0.1,6-1.5,7.4-5.1c1.7,1.2,3.4,2.4,5.1,3.5c1.1,0.7,2.2,1.1,3.5,0.3c1.2-0.7,1.6-1.9,1.6-3.3c0-5.6,0-11,0-16.6
C45.5,15.3,45.2,14.1,43.9,13.3z M32.2,30.5c0,2.5-1,3.6-3.4,3.6c-2.9,0-5.8,0-8.7,0s-5.6,0-8.5,0.1c-2.4-0.1-3.4-1.2-3.4-3.7
c0-3.7,0-7.5,0-11.2c0-2.2,1.1-3.4,3.1-3.4c5.9,0,11.9,0,17.8,0c2,0,3.1,1.2,3.1,3.4C32.2,23,32.2,26.8,32.2,30.5z M41.9,32.8
c-2.1-1.4-4.2-2.9-6.3-4.3c-0.1-0.1-0.2-0.4-0.2-0.7c0-1.9,0-3.7,0-5.5c0-0.3,0.1-0.7,0.3-0.8c2-1.4,4-2.8,6.2-4.3
C41.9,22.3,41.9,27.4,41.9,32.8z"/>
<path d="M27.4,25C27.4,24.7,27.4,25.2,27.4,25c0-1.1-0.1-2-0.2-2.7c-0.2-1.4-0.7-2.7-1.6-4c-0.2-0.3-0.5-0.5-1-0.6
c-0.4-0.1-0.9,0-1.3,0.2c-0.5,0.3-0.7,1.3-0.3,1.8c1.2,1.6,1.4,3,1.4,4.8c0.1,2.1-0.2,3.4-1.5,5.2c-0.2,0.3-0.2,1.1,0.1,1.6
c0.1,0.2,0.3,0.4,0.6,0.6c0.2,0.1,0.3,0.1,0.5,0.1c0.5,0,1-0.3,1.3-0.9C27,29.3,27.3,27.3,27.4,25L27.4,25z"/>
<ellipse cx="15.2" cy="24.7" rx="2.1" ry="2.4"/>
<path d="M22.3,24.8C22.3,24.7,22.3,25,22.3,24.8c0-0.7-0.1-1.5-0.1-1.9c-0.2-1-0.6-2.1-1.3-3c-0.1-0.2-0.4-0.5-0.9-0.5
c-0.7,0-0.9,0.2-1.2,0.4c-0.4,0.2-0.5,0.9-0.2,1.3c0.9,1.2,1,2.1,1.1,3.5c0,1.6-0.2,2.5-1.1,3.8c-0.1,0.2-0.1,0.7,0,1.2
c0.1,0.2,0.2,0.3,0.5,0.4c0.1,0,0.2,0.1,0.3,0.1c0.5,0.2,1.2,0.1,1.5-0.5C21.7,28,22.2,26.5,22.3,24.8L22.3,24.8z"/>
</svg>

After

Width:  |  Height:  |  Size: 1.8 KiB

View file

@ -0,0 +1,22 @@
<?xml version="1.0" encoding="utf-8"?>
<!-- Generator: Adobe Illustrator 21.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
<style type="text/css">
.st0{fill:#FFFFFF;}
</style>
<path class="st0" d="M43.9,13.3c-1.3-0.6-2.4-0.3-3.5,0.4c-1.6,1.2-3.3,2.4-5,3.5c-1.4-3.4-3.3-5-6.3-5c-5.9-0.1-11.9-0.1-17.9,0
c-3.8,0.1-6.4,3.1-6.4,7.3c0,3.7-0.1,7.6,0,11.4c0,1.1,0.2,2.1,0.6,3.1c1.2,2.7,3.3,3.8,6,3.8c5.6,0,11-0.1,16.5,0
c3.5,0.1,6-1.5,7.4-5.1c1.7,1.2,3.4,2.4,5.1,3.5c1.1,0.7,2.2,1.1,3.5,0.3c1.2-0.7,1.6-1.9,1.6-3.3c0-5.6,0-11,0-16.6
C45.5,15.3,45.2,14.1,43.9,13.3z M32.2,30.5c0,2.5-1,3.6-3.4,3.6c-2.9,0-5.8,0-8.7,0s-5.6,0-8.5,0.1c-2.4-0.1-3.4-1.2-3.4-3.7
c0-3.7,0-7.5,0-11.2c0-2.2,1.1-3.4,3.1-3.4c5.9,0,11.9,0,17.8,0c2,0,3.1,1.2,3.1,3.4C32.2,23,32.2,26.8,32.2,30.5z M41.9,32.8
c-2.1-1.4-4.2-2.9-6.3-4.3c-0.1-0.1-0.2-0.4-0.2-0.7c0-1.9,0-3.7,0-5.5c0-0.3,0.1-0.7,0.3-0.8c2-1.4,4-2.8,6.2-4.3
C41.9,22.3,41.9,27.4,41.9,32.8z"/>
<path class="st0" d="M27.4,25C27.4,24.7,27.4,25.2,27.4,25c0-1.1-0.1-2-0.2-2.7c-0.2-1.4-0.7-2.7-1.6-4c-0.2-0.3-0.5-0.5-1-0.6
c-0.4-0.1-0.9,0-1.3,0.2c-0.5,0.3-0.7,1.3-0.3,1.8c1.2,1.6,1.4,3,1.4,4.8c0.1,2.1-0.2,3.4-1.5,5.2c-0.2,0.3-0.2,1.1,0.1,1.6
c0.1,0.2,0.3,0.4,0.6,0.6c0.2,0.1,0.3,0.1,0.5,0.1c0.5,0,1-0.3,1.3-0.9C27,29.3,27.3,27.3,27.4,25L27.4,25z"/>
<ellipse class="st0" cx="15.2" cy="24.7" rx="2.1" ry="2.4"/>
<path class="st0" d="M22.3,24.8C22.3,24.7,22.3,25,22.3,24.8c0-0.7-0.1-1.5-0.1-1.9c-0.2-1-0.6-2.1-1.3-3c-0.1-0.2-0.4-0.5-0.9-0.5
c-0.7,0-0.9,0.2-1.2,0.4c-0.4,0.2-0.5,0.9-0.2,1.3c0.9,1.2,1,2.1,1.1,3.5c0,1.6-0.2,2.5-1.1,3.8c-0.1,0.2-0.1,0.7,0,1.2
c0.1,0.2,0.2,0.3,0.5,0.4c0.1,0,0.2,0.1,0.3,0.1c0.5,0.2,1.2,0.1,1.5-0.5C21.7,28,22.2,26.5,22.3,24.8L22.3,24.8z"/>
</svg>

After

Width:  |  Height:  |  Size: 1.9 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 50 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 899 KiB

View file

@ -18,7 +18,7 @@ Original.CheckBox {
id: checkBox
property int colorScheme: hifi.colorSchemes.light
property string color: hifi.colors.lightGray
property string color: hifi.colors.lightGrayText
readonly property bool isLightColorScheme: colorScheme == hifi.colorSchemes.light
property bool isRedCheck: false
property int boxSize: 14

View file

@ -0,0 +1,64 @@
//
// ImageMessageBox.qml
//
// Created by Dante Ruiz on 7/5/2017
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtQuick.Controls 1.4
import "../styles-uit"
Item {
id: imageBox
visible: false
anchors.fill: parent
property alias source: image.source
property alias imageWidth: image.width
property alias imageHeight: image.height
Rectangle {
anchors.fill: parent
color: "black"
opacity: 0.3
}
Image {
id: image
anchors.centerIn: parent
HiFiGlyphs {
id: closeGlyphButton
text: hifi.glyphs.close
size: 25
anchors {
top: parent.top
topMargin: 15
right: parent.right
rightMargin: 15
}
MouseArea {
anchors.fill: parent
hoverEnabled: true
onEntered: {
parent.text = hifi.glyphs.closeInverted;
}
onExited: {
parent.text = hifi.glyphs.close;
}
onClicked: {
imageBox.visible = false;
}
}
}
}
}

View file

@ -0,0 +1,38 @@
//
// Separator.qml
//
// Created by Zach Fox on 2017-06-06
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import "../styles-uit"
Item {
// Size
height: 2;
Rectangle {
// Size
width: parent.width;
height: 1;
// Anchors
anchors.left: parent.left;
anchors.bottom: parent.bottom;
anchors.bottomMargin: height;
// Style
color: hifi.colors.baseGrayShadow;
}
Rectangle {
// Size
width: parent.width;
height: 1;
// Anchors
anchors.left: parent.left;
anchors.bottom: parent.bottom;
// Style
color: hifi.colors.baseGrayHighlight;
}
}

View file

@ -0,0 +1,156 @@
//
// Switch.qml
//
// Created by Zach Fox on 2017-06-06
// Copyright 2017 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import QtQuick 2.5
import QtQuick.Controls 1.4 as Original
import QtQuick.Controls.Styles 1.4
import "../styles-uit"
Item {
id: rootSwitch;
property int colorScheme: hifi.colorSchemes.light;
readonly property bool isLightColorScheme: colorScheme == hifi.colorSchemes.light;
property int switchWidth: 70;
readonly property int switchRadius: height/2;
property string labelTextOff: "";
property string labelGlyphOffText: "";
property int labelGlyphOffSize: 32;
property string labelTextOn: "";
property string labelGlyphOnText: "";
property int labelGlyphOnSize: 32;
property alias checked: originalSwitch.checked;
signal onCheckedChanged;
signal clicked;
Original.Switch {
id: originalSwitch;
activeFocusOnPress: true;
anchors.top: rootSwitch.top;
anchors.left: rootSwitch.left;
anchors.leftMargin: rootSwitch.width/2 - rootSwitch.switchWidth/2;
onCheckedChanged: rootSwitch.onCheckedChanged();
onClicked: rootSwitch.clicked();
style: SwitchStyle {
padding {
top: 3;
left: 3;
right: 3;
bottom: 3;
}
groove: Rectangle {
color: "#252525";
implicitWidth: rootSwitch.switchWidth;
implicitHeight: rootSwitch.height;
radius: rootSwitch.switchRadius;
}
handle: Rectangle {
id: switchHandle;
implicitWidth: rootSwitch.height - padding.top - padding.bottom;
implicitHeight: implicitWidth;
radius: implicitWidth/2;
border.color: hifi.colors.lightGrayText;
color: hifi.colors.lightGray;
MouseArea {
anchors.fill: parent;
hoverEnabled: true;
onEntered: parent.color = hifi.colors.blueHighlight;
onExited: parent.color = hifi.colors.lightGray;
}
}
}
}
// OFF Label
Item {
anchors.right: originalSwitch.left;
anchors.rightMargin: 10;
anchors.top: rootSwitch.top;
height: rootSwitch.height;
RalewaySemiBold {
id: labelOff;
text: labelTextOff;
size: hifi.fontSizes.inputLabel;
color: originalSwitch.checked ? hifi.colors.lightGrayText : "#FFFFFF";
anchors.top: parent.top;
anchors.right: parent.right;
width: paintedWidth;
height: parent.height;
verticalAlignment: Text.AlignVCenter;
}
HiFiGlyphs {
id: labelGlyphOff;
text: labelGlyphOffText;
size: labelGlyphOffSize;
color: labelOff.color;
anchors.top: parent.top;
anchors.topMargin: 2;
anchors.right: labelOff.left;
anchors.rightMargin: 4;
}
MouseArea {
anchors.top: parent.top;
anchors.bottom: parent.bottom;
anchors.left: labelGlyphOff.left;
anchors.right: labelOff.right;
onClicked: {
originalSwitch.checked = false;
}
}
}
// ON Label
Item {
anchors.left: originalSwitch.right;
anchors.leftMargin: 10;
anchors.top: rootSwitch.top;
height: rootSwitch.height;
RalewaySemiBold {
id: labelOn;
text: labelTextOn;
size: hifi.fontSizes.inputLabel;
color: originalSwitch.checked ? "#FFFFFF" : hifi.colors.lightGrayText;
anchors.top: parent.top;
anchors.left: parent.left;
width: paintedWidth;
height: parent.height;
verticalAlignment: Text.AlignVCenter;
}
HiFiGlyphs {
id: labelGlyphOn;
text: labelGlyphOnText;
size: labelGlyphOnSize;
color: labelOn.color;
anchors.top: parent.top;
anchors.left: labelOn.right;
}
MouseArea {
anchors.top: parent.top;
anchors.bottom: parent.bottom;
anchors.left: labelOn.left;
anchors.right: labelGlyphOn.right;
onClicked: {
originalSwitch.checked = true;
}
}
}
}

View file

@ -32,14 +32,15 @@ Item {
radius: popupRadius
}
Rectangle {
width: Math.max(parent.width * 0.75, 400)
id: textContainer;
width: Math.max(parent.width * 0.8, 400)
height: contentContainer.height + 50
anchors.centerIn: parent
radius: popupRadius
color: "white"
Item {
id: contentContainer
width: parent.width - 60
width: parent.width - 50
height: childrenRect.height
anchors.centerIn: parent
Item {
@ -92,7 +93,7 @@ Item {
anchors.top: parent.top
anchors.topMargin: -20
anchors.right: parent.right
anchors.rightMargin: -25
anchors.rightMargin: -20
MouseArea {
anchors.fill: closeGlyphButton
hoverEnabled: true
@ -127,11 +128,51 @@ Item {
color: hifi.colors.darkGray
wrapMode: Text.WordWrap
textFormat: Text.StyledText
onLinkActivated: {
Qt.openUrlExternally(link)
}
}
}
}
// Left gray MouseArea
MouseArea {
anchors.fill: parent
anchors.left: parent.left;
anchors.right: textContainer.left;
anchors.top: textContainer.top;
anchors.bottom: textContainer.bottom;
acceptedButtons: Qt.LeftButton
onClicked: {
letterbox.visible = false
}
}
// Right gray MouseArea
MouseArea {
anchors.left: textContainer.left;
anchors.right: parent.left;
anchors.top: textContainer.top;
anchors.bottom: textContainer.bottom;
acceptedButtons: Qt.LeftButton
onClicked: {
letterbox.visible = false
}
}
// Top gray MouseArea
MouseArea {
anchors.left: parent.left;
anchors.right: parent.right;
anchors.top: parent.top;
anchors.bottom: textContainer.top;
acceptedButtons: Qt.LeftButton
onClicked: {
letterbox.visible = false
}
}
// Bottom gray MouseArea
MouseArea {
anchors.left: parent.left;
anchors.right: parent.right;
anchors.top: textContainer.bottom;
anchors.bottom: parent.bottom;
acceptedButtons: Qt.LeftButton
onClicked: {
letterbox.visible = false

View file

@ -1101,9 +1101,9 @@ Rectangle {
case 'nearbyUsers':
var data = message.params;
var index = -1;
iAmAdmin = Users.canKick;
index = findNearbySessionIndex('', data);
if (index !== -1) {
iAmAdmin = Users.canKick;
myData = data[index];
data.splice(index, 1);
} else {

View file

@ -0,0 +1,374 @@
//
// SpectatorCamera.qml
// qml/hifi
//
// Spectator Camera
//
// Created by Zach Fox on 2017-06-05
// Copyright 2016 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
import Hifi 1.0 as Hifi
import QtQuick 2.5
import QtQuick.Controls 1.4
import "../styles-uit"
import "../controls-uit" as HifiControlsUit
import "../controls" as HifiControls
// references HMD, XXX from root context
Rectangle {
HifiConstants { id: hifi; }
id: spectatorCamera;
// Style
color: hifi.colors.baseGray;
// The letterbox used for popup messages
LetterboxMessage {
id: letterboxMessage;
z: 999; // Force the popup on top of everything else
}
function letterbox(headerGlyph, headerText, message) {
letterboxMessage.headerGlyph = headerGlyph;
letterboxMessage.headerText = headerText;
letterboxMessage.text = message;
letterboxMessage.visible = true;
letterboxMessage.popupRadius = 0;
}
//
// TITLE BAR START
//
Item {
id: titleBarContainer;
// Size
width: spectatorCamera.width;
height: 50;
// Anchors
anchors.left: parent.left;
anchors.top: parent.top;
// "Spectator" text
RalewaySemiBold {
id: titleBarText;
text: "Spectator";
// Text size
size: hifi.fontSizes.overlayTitle;
// Anchors
anchors.fill: parent;
anchors.leftMargin: 16;
// Style
color: hifi.colors.lightGrayText;
// Alignment
horizontalAlignment: Text.AlignHLeft;
verticalAlignment: Text.AlignVCenter;
}
// Separator
HifiControlsUit.Separator {
anchors.left: parent.left;
anchors.right: parent.right;
anchors.bottom: parent.bottom;
}
}
//
// TITLE BAR END
//
//
// SPECTATOR APP DESCRIPTION START
//
Item {
id: spectatorDescriptionContainer;
// Size
width: spectatorCamera.width;
height: childrenRect.height;
// Anchors
anchors.left: parent.left;
anchors.top: titleBarContainer.bottom;
// (i) Glyph
HiFiGlyphs {
id: spectatorDescriptionGlyph;
text: hifi.glyphs.info;
// Size
width: 20;
height: parent.height;
size: 60;
// Anchors
anchors.left: parent.left;
anchors.leftMargin: 20;
anchors.top: parent.top;
anchors.topMargin: 0;
// Style
color: hifi.colors.lightGrayText;
horizontalAlignment: Text.AlignHLeft;
verticalAlignment: Text.AlignTop;
}
// "Spectator" app description text
RalewayLight {
id: spectatorDescriptionText;
text: "Spectator lets you change what your monitor displays while you're using a VR headset. Use Spectator when streaming and recording video.";
// Text size
size: 14;
// Size
width: 350;
height: paintedHeight;
// Anchors
anchors.top: parent.top;
anchors.topMargin: 15;
anchors.left: spectatorDescriptionGlyph.right;
anchors.leftMargin: 40;
// Style
color: hifi.colors.lightGrayText;
wrapMode: Text.WordWrap;
// Alignment
horizontalAlignment: Text.AlignHLeft;
verticalAlignment: Text.AlignVCenter;
}
// "Learn More" text
RalewayRegular {
id: spectatorLearnMoreText;
text: "Learn More About Spectator";
// Text size
size: 14;
// Size
width: paintedWidth;
height: paintedHeight;
// Anchors
anchors.top: spectatorDescriptionText.bottom;
anchors.topMargin: 10;
anchors.left: spectatorDescriptionText.anchors.left;
anchors.leftMargin: spectatorDescriptionText.anchors.leftMargin;
// Style
color: hifi.colors.blueAccent;
wrapMode: Text.WordWrap;
font.underline: true;
// Alignment
horizontalAlignment: Text.AlignHLeft;
verticalAlignment: Text.AlignVCenter;
MouseArea {
anchors.fill: parent;
hoverEnabled: enabled;
onClicked: {
letterbox(hifi.glyphs.question,
"Spectator Camera",
"By default, your monitor shows a preview of what you're seeing in VR. " +
"Using the Spectator Camera app, your monitor can display the view " +
"from a virtual hand-held camera - perfect for taking selfies or filming " +
"your friends!<br>" +
"<h3>Streaming and Recording</h3>" +
"We recommend OBS for streaming and recording the contents of your monitor to services like " +
"Twitch, YouTube Live, and Facebook Live.<br><br>" +
"To get started using OBS, click this link now. The page will open in an external browser:<br>" +
'<font size="4"><a href="https://obsproject.com/forum/threads/official-overview-guide.402/">OBS Official Overview Guide</a></font>');
}
onEntered: parent.color = hifi.colors.blueHighlight;
onExited: parent.color = hifi.colors.blueAccent;
}
}
// Separator
HifiControlsUit.Separator {
anchors.left: parent.left;
anchors.right: parent.right;
anchors.top: spectatorLearnMoreText.bottom;
anchors.topMargin: spectatorDescriptionText.anchors.topMargin;
}
}
//
// SPECTATOR APP DESCRIPTION END
//
//
// SPECTATOR CONTROLS START
//
Item {
id: spectatorControlsContainer;
// Size
height: spectatorCamera.height - spectatorDescriptionContainer.height - titleBarContainer.height;
// Anchors
anchors.top: spectatorDescriptionContainer.bottom;
anchors.topMargin: 20;
anchors.left: parent.left;
anchors.leftMargin: 25;
anchors.right: parent.right;
anchors.rightMargin: anchors.leftMargin;
// "Camera On" Checkbox
HifiControlsUit.CheckBox {
id: cameraToggleCheckBox;
colorScheme: hifi.colorSchemes.dark;
anchors.left: parent.left;
anchors.top: parent.top;
text: "Spectator Camera On";
boxSize: 24;
onClicked: {
sendToScript({method: (checked ? 'spectatorCameraOn' : 'spectatorCameraOff')});
spectatorCameraPreview.ready = checked;
}
}
// Instructions or Preview
Rectangle {
id: spectatorCameraImageContainer;
anchors.left: parent.left;
anchors.top: cameraToggleCheckBox.bottom;
anchors.topMargin: 20;
anchors.right: parent.right;
height: 250;
color: cameraToggleCheckBox.checked ? "transparent" : "black";
AnimatedImage {
source: "../../images/static.gif"
visible: !cameraToggleCheckBox.checked;
anchors.fill: parent;
opacity: 0.15;
}
// Instructions (visible when display texture isn't set)
FiraSansRegular {
id: spectatorCameraInstructions;
text: "Turn on Spectator Camera for a preview\nof what your monitor shows.";
size: 16;
color: hifi.colors.lightGrayText;
visible: !cameraToggleCheckBox.checked;
anchors.fill: parent;
horizontalAlignment: Text.AlignHCenter;
verticalAlignment: Text.AlignVCenter;
}
// Spectator Camera Preview
Hifi.ResourceImageItem {
id: spectatorCameraPreview;
visible: cameraToggleCheckbox.checked;
url: monitorShowsSwitch.checked ? "resource://spectatorCameraFrame" : "resource://hmdPreviewFrame";
ready: cameraToggleCheckBox.checked;
mirrorVertically: true;
anchors.fill: parent;
onVisibleChanged: {
ready = cameraToggleCheckBox.checked;
update();
}
}
}
// "Monitor Shows" Switch Label Glyph
HiFiGlyphs {
id: monitorShowsSwitchLabelGlyph;
text: hifi.glyphs.screen;
size: 32;
color: hifi.colors.blueHighlight;
anchors.top: spectatorCameraImageContainer.bottom;
anchors.topMargin: 13;
anchors.left: parent.left;
}
// "Monitor Shows" Switch Label
RalewayLight {
id: monitorShowsSwitchLabel;
text: "MONITOR SHOWS:";
anchors.top: spectatorCameraImageContainer.bottom;
anchors.topMargin: 20;
anchors.left: monitorShowsSwitchLabelGlyph.right;
anchors.leftMargin: 6;
size: 16;
width: paintedWidth;
height: paintedHeight;
color: hifi.colors.lightGrayText;
verticalAlignment: Text.AlignVCenter;
}
// "Monitor Shows" Switch
HifiControlsUit.Switch {
id: monitorShowsSwitch;
height: 30;
anchors.left: parent.left;
anchors.right: parent.right;
anchors.top: monitorShowsSwitchLabel.bottom;
anchors.topMargin: 10;
labelTextOff: "HMD Preview";
labelTextOn: "Camera View";
labelGlyphOnText: hifi.glyphs.alert;
onCheckedChanged: {
sendToScript({method: 'setMonitorShowsCameraView', params: checked});
}
}
// "Switch View From Controller" Checkbox
HifiControlsUit.CheckBox {
id: switchViewFromControllerCheckBox;
colorScheme: hifi.colorSchemes.dark;
anchors.left: parent.left;
anchors.top: monitorShowsSwitch.bottom;
anchors.topMargin: 25;
text: "";
boxSize: 24;
onClicked: {
sendToScript({method: 'changeSwitchViewFromControllerPreference', params: checked});
}
}
}
//
// SPECTATOR CONTROLS END
//
//
// FUNCTION DEFINITIONS START
//
//
// Function Name: fromScript()
//
// Relevant Variables:
// None
//
// Arguments:
// message: The message sent from the SpectatorCamera JavaScript.
// Messages are in format "{method, params}", like json-rpc.
//
// Description:
// Called when a message is received from spectatorCamera.js.
//
function fromScript(message) {
switch (message.method) {
case 'updateSpectatorCameraCheckbox':
cameraToggleCheckBox.checked = message.params;
break;
case 'updateMonitorShowsSwitch':
monitorShowsSwitch.checked = message.params;
break;
case 'updateControllerMappingCheckbox':
switchViewFromControllerCheckBox.checked = message.setting;
switchViewFromControllerCheckBox.enabled = true;
if (message.controller === "OculusTouch") {
switchViewFromControllerCheckBox.text = "Clicking Touch's Left Thumbstick Switches Monitor View";
} else if (message.controller === "Vive") {
switchViewFromControllerCheckBox.text = "Clicking Left Thumb Pad Switches Monitor View";
} else {
switchViewFromControllerCheckBox.text = "Pressing Ctrl+0 Switches Monitor View";
switchViewFromControllerCheckBox.checked = true;
switchViewFromControllerCheckBox.enabled = false;
}
break;
case 'showPreviewTextureNotInstructions':
console.log('showPreviewTextureNotInstructions recvd', JSON.stringify(message));
spectatorCameraPreview.url = message.url;
spectatorCameraPreview.visible = message.setting;
break;
default:
console.log('Unrecognized message from spectatorCamera.js:', JSON.stringify(message));
}
}
signal sendToScript(var message);
//
// FUNCTION DEFINITIONS END
//
}

View file

@ -117,26 +117,28 @@ Rectangle {
delegate: Item {
width: parent.width;
height: 36;
AudioControls.CheckBox {
id: checkbox
anchors.verticalCenter: parent.verticalCenter
anchors.left: parent.left
text: display;
wrap: false;
checked: selected;
enabled: false;
}
RowLayout {
width: parent.width;
MouseArea {
anchors.fill: checkbox
onClicked: Audio.setInputDevice(info);
}
AudioControls.CheckBox {
Layout.maximumWidth: parent.width - level.width - 40;
text: display;
wrap: false;
checked: selected;
onClicked: {
selected = checked;
checked = Qt.binding(function() { return selected; }); // restore binding
}
}
InputLevel {
id: level;
Layout.alignment: Qt.AlignRight;
Layout.rightMargin: 30;
visible: selected;
}
InputLevel {
id: level;
anchors.verticalCenter: parent.verticalCenter
anchors.right: parent.right
anchors.rightMargin: 30
visible: selected;
}
}
}
@ -166,7 +168,7 @@ Rectangle {
ListView {
anchors { left: parent.left; right: parent.right; leftMargin: 70 }
height: 125;
height: Math.min(250, contentHeight);
spacing: 0;
snapMode: ListView.SnapToItem;
clip: true;
@ -174,13 +176,19 @@ Rectangle {
delegate: Item {
width: parent.width;
height: 36;
AudioControls.CheckBox {
id: checkbox
anchors.verticalCenter: parent.verticalCenter
anchors.left: parent.left
text: display;
checked: selected;
onClicked: {
selected = checked;
checked = Qt.binding(function() { return selected; }); // restore binding
}
enabled: false;
}
MouseArea {
anchors.fill: checkbox
onClicked: Audio.setOutputDevice(info);
}
}
}

View file

@ -65,7 +65,7 @@ Rectangle {
HiFiGlyphs {
id: image
text: hifi.glyphs.avatar1
text: hifi.glyphs.avatarTPose
size: 190
color: hifi.colors.white

View file

@ -16,6 +16,7 @@ import "../../controls-uit" as HifiControls
StackView {
id: stack
initialItem: inputConfiguration
property alias messageVisible: imageMessageBox.visible
Rectangle {
id: inputConfiguration
anchors.fill: parent
@ -26,6 +27,15 @@ StackView {
property var pluginSettings: null
HifiControls.ImageMessageBox {
id: imageMessageBox
anchors.fill: parent
z: 2000
imageWidth: 442
imageHeight: 670
source: "../../../images/calibration-help.png"
}
Rectangle {
width: inputConfiguration.width
height: 1
@ -167,7 +177,7 @@ StackView {
loader.item.pluginName = box.currentText;
}
}
if (loader.item.hasOwnProperty("displayInformation")) {
loader.item.displayConfiguration();
}
@ -183,20 +193,20 @@ StackView {
return InputConfiguration.activeInputPlugins();
}
}
function initialize() {
changeSource();
}
function changeSource() {
loader.source = "";
var source = "";
if (box.currentText == "Vive") {
source = InputConfiguration.configurationLayout("OpenVR");
} else {
} else {
source = InputConfiguration.configurationLayout(box.currentText);
}
loader.source = source;
if (source === "") {
box.label = "(not configurable)";
@ -204,14 +214,14 @@ StackView {
box.label = "";
}
}
Timer {
id: timer
repeat: false
interval: 300
onTriggered: initialize()
}
Component.onCompleted: {
timer.start();
}

View file

@ -50,9 +50,12 @@ Rectangle {
readonly property int apply: 1
readonly property int applyAndCalibrate: 2
readonly property int calibrate: 3
}
MouseArea {
id: mouseArea
@ -64,6 +67,7 @@ Rectangle {
mouse.accepted = false;
}
}
color: hifi.colors.baseGray
RalewayBold {
@ -146,6 +150,7 @@ Rectangle {
label: "Y: offset"
minimumValue: -10
stepSize: 0.0254
value: -0.05
colorScheme: hifi.colorSchemes.dark
onEditingFinished: {
@ -161,15 +166,16 @@ Rectangle {
minimumValue: -10
stepSize: 0.0254
decimals: 4
value: -0.05
colorScheme: hifi.colorSchemes.dark
onEditingFinished: {
sendConfigurationSettings();
}
}
}
RalewayBold {
id: hands
@ -245,7 +251,7 @@ Rectangle {
anchors.left: openVrConfiguration.left
anchors.leftMargin: leftMargin + 10
spacing: 10
HifiControls.SpinBox {
id: handYOffset
decimals: 4
@ -269,7 +275,7 @@ Rectangle {
stepSize: 0.0254
decimals: 4
colorScheme: hifi.colorSchemes.dark
onEditingFinished: {
sendConfigurationSettings();
}
@ -290,6 +296,52 @@ Rectangle {
anchors.leftMargin: leftMargin
}
RalewayRegular {
id: info
text: "See Recommended Tracker Placement"
color: hifi.colors.blueHighlight
size: 10
anchors {
left: additional.right
leftMargin: 10
verticalCenter: additional.verticalCenter
}
Rectangle {
id: selected
color: hifi.colors.blueHighlight
width: info.width
height: 1
anchors {
top: info.bottom
topMargin: 1
left: info.left
right: info.right
}
visible: false
}
MouseArea {
anchors.fill: parent;
hoverEnabled: true
onEntered: {
selected.visible = true;
}
onExited: {
selected.visible = false;
}
onClicked: {
stack.messageVisible = true;
}
}
}
Row {
id: feetConfig
anchors.top: additional.bottom
@ -379,6 +431,7 @@ Rectangle {
if (checked) {
hipBox.checked = true;
feetBox.checked = true;
shoulderBox.checked = false;
}
sendConfigurationSettings();
}
@ -416,6 +469,7 @@ Rectangle {
if (checked) {
hipBox.checked = true;
feetBox.checked = true;
chestBox.checked = false;
}
sendConfigurationSettings();
}
@ -463,7 +517,7 @@ Rectangle {
anchors.leftMargin: leftMargin
radius: hifi.buttons.radius
gradient: Gradient {
GradientStop {
position: 0.2
@ -479,7 +533,7 @@ Rectangle {
}
}
}
GradientStop {
position: 1.0
color: {
@ -495,10 +549,10 @@ Rectangle {
}
}
}
HiFiGlyphs {
id: glyphButton
color: enabled ? hifi.buttons.textColor[calibrationButton.color]
@ -512,7 +566,7 @@ Rectangle {
bottomMargin: 1
}
}
RalewayBold {
id: calibrationText
font.capitalization: Font.AllUppercase
@ -527,7 +581,7 @@ Rectangle {
topMargin: 7
}
}
MouseArea {
anchors.fill: parent
@ -549,19 +603,19 @@ Rectangle {
}
}
}
onPressed: {
calibrationButton.pressed = true;
}
onReleased: {
calibrationButton.pressed = false;
}
onEntered: {
calibrationButton.hovered = true;
}
onExited: {
calibrationButton.hovered = false;
}
@ -652,7 +706,7 @@ Rectangle {
RalewayBold {
id: advanceSettings
text: "Advance Settings"
text: "Advanced Settings"
size: 12
color: hifi.colors.white
@ -683,7 +737,7 @@ Rectangle {
RalewayBold {
id: viveDesktopText
size: 10
text: "Use vive devices in desktop mode"
text: "Use Vive devices in desktop mode"
color: hifi.colors.white
anchors {
@ -718,14 +772,14 @@ Rectangle {
calibratingScreen = screen.createObject();
stack.push(calibratingScreen);
}
if (status["calibrated"]) {
calibrationScreen.success();
if (status["UI"]) {
logAction("mocap_ui_success", status);
}
} else if (!status["calibrated"]) {
calibrationScreen.failure();
@ -840,11 +894,11 @@ Rectangle {
var handOverride = handSetting["override"];
var settingsChanged = false;
if (lastConfiguration["bodyConfiguration"] !== bodySetting) {
settingsChanged = true;
}
var lastHead = lastConfiguration["headConfiguration"];
if (lastHead["override"] !== headOverride) {
settingsChanged = true;
@ -854,13 +908,13 @@ Rectangle {
if (lastHand["override"] !== handOverride) {
settingsChanged = true;
}
if (settingsChanged) {
if ((!handOverride) && (!headOverride) && (bodySetting === "None")) {
state = buttonState.apply;
} else {
state = buttonState.applyAndCalibrate;
}
}
} else {
if (state == buttonState.apply) {
state = buttonState.disabled;
@ -868,7 +922,7 @@ Rectangle {
state = buttonState.calibrate;
}
}
lastConfiguration = settings;
}
@ -885,7 +939,7 @@ Rectangle {
state = buttonState.disabled;
} else {
state = buttonState.calibrate;
}
}
}
function updateCalibrationButton() {
@ -951,7 +1005,7 @@ Rectangle {
"Y": handYOffset.value,
"Z": handZOffset.value
}
var settingsObject = {
"bodyConfiguration": trackerConfiguration,
"headConfiguration": headObject,

View file

@ -99,7 +99,13 @@ StackView {
height: parent.height
MouseArea {
anchors.fill: parent
anchors {
top: parent.top
left: parent.left
right: parent.right
bottom: keyboard.top
}
propagateComposedEvents: true
onPressed: {
parent.forceActiveFocus();

View file

@ -52,8 +52,10 @@ Windows.ScrollingWindow {
// used to receive messages from interface script
function fromScript(message) {
if (loader.item.hasOwnProperty("fromScript")) {
loader.item.fromScript(message);
if (loader.item !== null) {
if (loader.item.hasOwnProperty("fromScript")) {
loader.item.fromScript(message);
}
}
}

View file

@ -50,7 +50,7 @@ Item {
id: colors
// Base colors
readonly property color baseGray: "#404040"
readonly property color baseGray: "#393939"
readonly property color darkGray: "#121212"
readonly property color baseGrayShadow: "#252525"
readonly property color baseGrayHighlight: "#575757"
@ -336,5 +336,6 @@ Item {
readonly property string source: "\ue01c"
readonly property string playback_play: "\ue01d"
readonly property string stop_square: "\ue01e"
readonly property string avatarTPose: "\ue01f"
}
}

View file

@ -112,10 +112,7 @@
#include <plugins/InputConfiguration.h>
#include <RecordingScriptingInterface.h>
#include <RenderableWebEntityItem.h>
#include <RenderShadowTask.h>
#include <render/RenderFetchCullSortTask.h>
#include <RenderDeferredTask.h>
#include <RenderForwardTask.h>
#include <UpdateSceneTask.h>
#include <RenderViewTask.h>
#include <SecondaryCamera.h>
#include <ResourceCache.h>
@ -170,6 +167,7 @@
#if defined(Q_OS_MAC) || defined(Q_OS_WIN)
#include "SpeechRecognizer.h"
#endif
#include "ui/ResourceImageItem.h"
#include "ui/AddressBarDialog.h"
#include "ui/AvatarInputs.h"
#include "ui/DialogsManager.h"
@ -952,58 +950,68 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
// Make sure we don't time out during slow operations at startup
updateHeartbeat();
// sessionRunTime will be reset soon by loadSettings. Grab it now to get previous session value.
// The value will be 0 if the user blew away settings this session, which is both a feature and a bug.
static const QString TESTER = "HIFI_TESTER";
auto gpuIdent = GPUIdent::getInstance();
auto glContextData = getGLContextData();
QJsonObject properties = {
{ "version", applicationVersion() },
{ "tester", QProcessEnvironment::systemEnvironment().contains(TESTER) },
{ "previousSessionCrashed", _previousSessionCrashed },
{ "previousSessionRuntime", sessionRunTime.get() },
{ "cpu_architecture", QSysInfo::currentCpuArchitecture() },
{ "kernel_type", QSysInfo::kernelType() },
{ "kernel_version", QSysInfo::kernelVersion() },
{ "os_type", QSysInfo::productType() },
{ "os_version", QSysInfo::productVersion() },
{ "gpu_name", gpuIdent->getName() },
{ "gpu_driver", gpuIdent->getDriver() },
{ "gpu_memory", static_cast<qint64>(gpuIdent->getMemory()) },
{ "gl_version_int", glVersionToInteger(glContextData.value("version").toString()) },
{ "gl_version", glContextData["version"] },
{ "gl_vender", glContextData["vendor"] },
{ "gl_sl_version", glContextData["sl_version"] },
{ "gl_renderer", glContextData["renderer"] },
{ "ideal_thread_count", QThread::idealThreadCount() }
};
auto macVersion = QSysInfo::macVersion();
if (macVersion != QSysInfo::MV_None) {
properties["os_osx_version"] = QSysInfo::macVersion();
}
auto windowsVersion = QSysInfo::windowsVersion();
if (windowsVersion != QSysInfo::WV_None) {
properties["os_win_version"] = QSysInfo::windowsVersion();
}
ProcessorInfo procInfo;
if (getProcessorInfo(procInfo)) {
properties["processor_core_count"] = procInfo.numProcessorCores;
properties["logical_processor_count"] = procInfo.numLogicalProcessors;
properties["processor_l1_cache_count"] = procInfo.numProcessorCachesL1;
properties["processor_l2_cache_count"] = procInfo.numProcessorCachesL2;
properties["processor_l3_cache_count"] = procInfo.numProcessorCachesL3;
}
// add firstRun flag from settings to launch event
Setting::Handle<bool> firstRun { Settings::firstRun, true };
properties["first_run"] = firstRun.get();
// add the user's machine ID to the launch event
properties["machine_fingerprint"] = uuidStringWithoutCurlyBraces(FingerprintUtils::getMachineFingerprint());
// once the settings have been loaded, check if we need to flip the default for UserActivityLogger
auto& userActivityLogger = UserActivityLogger::getInstance();
if (!userActivityLogger.isDisabledSettingSet()) {
// the user activity logger is opt-out for Interface
// but it's defaulted to disabled for other targets
// so we need to enable it here if it has never been disabled by the user
userActivityLogger.disable(false);
}
UserActivityLogger::getInstance().logAction("launch", properties);
if (userActivityLogger.isEnabled()) {
// sessionRunTime will be reset soon by loadSettings. Grab it now to get previous session value.
// The value will be 0 if the user blew away settings this session, which is both a feature and a bug.
static const QString TESTER = "HIFI_TESTER";
auto gpuIdent = GPUIdent::getInstance();
auto glContextData = getGLContextData();
QJsonObject properties = {
{ "version", applicationVersion() },
{ "tester", QProcessEnvironment::systemEnvironment().contains(TESTER) },
{ "previousSessionCrashed", _previousSessionCrashed },
{ "previousSessionRuntime", sessionRunTime.get() },
{ "cpu_architecture", QSysInfo::currentCpuArchitecture() },
{ "kernel_type", QSysInfo::kernelType() },
{ "kernel_version", QSysInfo::kernelVersion() },
{ "os_type", QSysInfo::productType() },
{ "os_version", QSysInfo::productVersion() },
{ "gpu_name", gpuIdent->getName() },
{ "gpu_driver", gpuIdent->getDriver() },
{ "gpu_memory", static_cast<qint64>(gpuIdent->getMemory()) },
{ "gl_version_int", glVersionToInteger(glContextData.value("version").toString()) },
{ "gl_version", glContextData["version"] },
{ "gl_vender", glContextData["vendor"] },
{ "gl_sl_version", glContextData["sl_version"] },
{ "gl_renderer", glContextData["renderer"] },
{ "ideal_thread_count", QThread::idealThreadCount() }
};
auto macVersion = QSysInfo::macVersion();
if (macVersion != QSysInfo::MV_None) {
properties["os_osx_version"] = QSysInfo::macVersion();
}
auto windowsVersion = QSysInfo::windowsVersion();
if (windowsVersion != QSysInfo::WV_None) {
properties["os_win_version"] = QSysInfo::windowsVersion();
}
ProcessorInfo procInfo;
if (getProcessorInfo(procInfo)) {
properties["processor_core_count"] = procInfo.numProcessorCores;
properties["logical_processor_count"] = procInfo.numLogicalProcessors;
properties["processor_l1_cache_count"] = procInfo.numProcessorCachesL1;
properties["processor_l2_cache_count"] = procInfo.numProcessorCachesL2;
properties["processor_l3_cache_count"] = procInfo.numProcessorCachesL3;
}
properties["first_run"] = firstRun.get();
// add the user's machine ID to the launch event
properties["machine_fingerprint"] = uuidStringWithoutCurlyBraces(FingerprintUtils::getMachineFingerprint());
userActivityLogger.logAction("launch", properties);
}
// Tell our entity edit sender about our known jurisdictions
_entityEditSender.setServerJurisdictions(&_entityServerJurisdictions);
@ -1963,7 +1971,8 @@ void Application::initializeGL() {
render::CullFunctor cullFunctor = LODManager::shouldRender;
static const QString RENDER_FORWARD = "HIFI_RENDER_FORWARD";
bool isDeferred = !QProcessEnvironment::systemEnvironment().contains(RENDER_FORWARD);
_renderEngine->addJob<SecondaryCameraRenderTask>("SecondaryCameraFrame", cullFunctor);
_renderEngine->addJob<UpdateSceneTask>("UpdateScene");
_renderEngine->addJob<SecondaryCameraRenderTask>("SecondaryCameraJob", cullFunctor);
_renderEngine->addJob<RenderViewTask>("RenderMainView", cullFunctor, isDeferred);
_renderEngine->load();
_renderEngine->registerScene(_main3DScene);
@ -2011,6 +2020,7 @@ void Application::initializeUi() {
LoginDialog::registerType();
Tooltip::registerType();
UpdateDialog::registerType();
qmlRegisterType<ResourceImageItem>("Hifi", 1, 0, "ResourceImageItem");
qmlRegisterType<Preference>("Hifi", 1, 0, "Preference");
auto offscreenUi = DependencyManager::get<OffscreenUi>();
@ -2732,6 +2742,16 @@ bool Application::event(QEvent* event) {
static_cast<LambdaEvent*>(event)->call();
return true;
// Explicit idle keeps the idle running at a lower interval, but without any rendering
// see (windowMinimizedChanged)
case Event::Idle:
{
float nsecsElapsed = (float)_lastTimeUpdated.nsecsElapsed();
_lastTimeUpdated.start();
idle(nsecsElapsed);
}
return true;
case Event::Present:
if (!_renderRequested) {
float nsecsElapsed = (float)_lastTimeUpdated.nsecsElapsed();
@ -2741,7 +2761,7 @@ bool Application::event(QEvent* event) {
idle(nsecsElapsed);
postEvent(this, new QEvent(static_cast<QEvent::Type>(Paint)), Qt::HighEventPriority);
}
}
}
return true;
case Event::Paint:
@ -3143,59 +3163,6 @@ void Application::keyPressEvent(QKeyEvent* event) {
break;
#endif
case Qt::Key_H: {
// whenever switching to/from full screen mirror from the keyboard, remember
// the state you were in before full screen mirror, and return to that.
auto previousMode = _myCamera.getMode();
if (previousMode != CAMERA_MODE_MIRROR) {
switch (previousMode) {
case CAMERA_MODE_FIRST_PERSON:
_returnFromFullScreenMirrorTo = MenuOption::FirstPerson;
break;
case CAMERA_MODE_THIRD_PERSON:
_returnFromFullScreenMirrorTo = MenuOption::ThirdPerson;
break;
// FIXME - it's not clear that these modes make sense to return to...
case CAMERA_MODE_INDEPENDENT:
_returnFromFullScreenMirrorTo = MenuOption::IndependentMode;
break;
case CAMERA_MODE_ENTITY:
_returnFromFullScreenMirrorTo = MenuOption::CameraEntityMode;
break;
default:
_returnFromFullScreenMirrorTo = MenuOption::ThirdPerson;
break;
}
}
bool isMirrorChecked = Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror);
Menu::getInstance()->setIsOptionChecked(MenuOption::FullscreenMirror, !isMirrorChecked);
if (isMirrorChecked) {
// if we got here without coming in from a non-Full Screen mirror case, then our
// _returnFromFullScreenMirrorTo is unknown. In that case we'll go to the old
// behavior of returning to ThirdPerson
if (_returnFromFullScreenMirrorTo.isEmpty()) {
_returnFromFullScreenMirrorTo = MenuOption::ThirdPerson;
}
Menu::getInstance()->setIsOptionChecked(_returnFromFullScreenMirrorTo, true);
}
cameraMenuChanged();
break;
}
case Qt::Key_P: {
if (!(isShifted || isMeta || isOption)) {
bool isFirstPersonChecked = Menu::getInstance()->isOptionChecked(MenuOption::FirstPerson);
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, !isFirstPersonChecked);
Menu::getInstance()->setIsOptionChecked(MenuOption::ThirdPerson, isFirstPersonChecked);
cameraMenuChanged();
}
break;
}
case Qt::Key_Slash:
Menu::getInstance()->triggerOption(MenuOption::Stats);
break;
@ -3770,8 +3737,8 @@ void updateCpuInformation() {
// Update friendly structure
auto& myCpuInfo = myCpuInfos[i];
myCpuInfo.update(cpuInfo);
PROFILE_COUNTER(app, myCpuInfo.name.c_str(), {
{ "kernel", myCpuInfo.kernelUsage },
PROFILE_COUNTER(app, myCpuInfo.name.c_str(), {
{ "kernel", myCpuInfo.kernelUsage },
{ "user", myCpuInfo.userUsage }
});
}
@ -3838,7 +3805,7 @@ void getCpuUsage(vec3& systemAndUser) {
void setupCpuMonitorThread() {
initCpuUsage();
auto cpuMonitorThread = QThread::currentThread();
QTimer* timer = new QTimer();
timer->setInterval(50);
QObject::connect(timer, &QTimer::timeout, [] {
@ -5355,7 +5322,7 @@ namespace render {
auto& batch = *args->_batch;
DependencyManager::get<GeometryCache>()->bindSimpleProgram(batch);
renderWorldBox(batch);
renderWorldBox(args, batch);
}
}
}
@ -5418,10 +5385,7 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
}
{
PerformanceTimer perfTimer("SceneProcessTransaction");
_main3DScene->enqueueTransaction(transaction);
_main3DScene->processTransactionQueue();
}
// For now every frame pass the renderContext
@ -7112,6 +7076,12 @@ void Application::updateDisplayMode() {
// reset the avatar, to set head and hand palms back to a reasonable default pose.
getMyAvatar()->reset(false);
// switch to first person if entering hmd and setting is checked
if (isHmd && menu->isOptionChecked(MenuOption::FirstPersonHMD)) {
menu->setIsOptionChecked(MenuOption::FirstPerson, true);
cameraMenuChanged();
}
Q_ASSERT_X(_displayPlugin, "Application::updateDisplayMode", "could not find an activated display plugin");
}

View file

@ -678,7 +678,7 @@ private:
QTimer _addAssetToWorldErrorTimer;
FileScriptingInterface* _fileDownload;
AudioInjector* _snapshotSoundInjector { nullptr };
AudioInjectorPointer _snapshotSoundInjector;
SharedSoundPointer _snapshotSound;
DisplayPluginPointer _autoSwitchDisplayModeSupportedHMDPlugin;

View file

@ -223,7 +223,7 @@ Menu::Menu() {
// View > First Person
cameraModeGroup->addAction(addCheckableActionToQMenuAndActionHash(viewMenu,
MenuOption::FirstPerson, 0, // QML Qt:: Key_P
MenuOption::FirstPerson, 0,
true, qApp, SLOT(cameraMenuChanged())));
// View > Third Person
@ -233,7 +233,7 @@ Menu::Menu() {
// View > Mirror
cameraModeGroup->addAction(addCheckableActionToQMenuAndActionHash(viewMenu,
MenuOption::FullscreenMirror, 0, // QML Qt::Key_H,
MenuOption::FullscreenMirror, 0,
false, qApp, SLOT(cameraMenuChanged())));
// View > Independent [advanced]
@ -258,6 +258,9 @@ Menu::Menu() {
// View > Overlays
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Overlays, 0, true);
// View > Enter First Person Mode in HMD
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::FirstPersonHMD, 0, true);
// Navigate menu ----------------------------------
MenuWrapper* navigateMenu = addMenu("Navigate");
@ -319,7 +322,7 @@ Menu::Menu() {
QString("../../hifi/tablet/TabletLodPreferences.qml"), "LodPreferencesDialog");
});
action = addActionToQMenuAndActionHash(settingsMenu, "Controller Settings");
action = addActionToQMenuAndActionHash(settingsMenu, "Controller Settings...");
connect(action, &QAction::triggered, [] {
auto tablet = DependencyManager::get<TabletScriptingInterface>()->getTablet("com.highfidelity.interface.tablet.system");
auto hmd = DependencyManager::get<HMDScriptingInterface>();
@ -677,7 +680,7 @@ Menu::Menu() {
// Developer > Physics >>>
MenuWrapper* physicsOptionsMenu = developerMenu->addMenu("Physics");
{
auto drawStatusConfig = qApp->getRenderEngine()->getConfiguration()->getConfig<render::DrawStatus>();
auto drawStatusConfig = qApp->getRenderEngine()->getConfiguration()->getConfig<render::DrawStatus>("RenderMainView.DrawStatus");
addCheckableActionToQMenuAndActionHash(physicsOptionsMenu, MenuOption::PhysicsShowOwned,
0, false, drawStatusConfig, SLOT(setShowNetwork(bool)));
}

View file

@ -105,6 +105,7 @@ namespace MenuOption {
const QString ExpandPhysicsSimulationTiming = "Expand /physics";
const QString ExpandUpdateTiming = "Expand /update";
const QString FirstPerson = "First Person";
const QString FirstPersonHMD = "Enter First Person Mode in HMD";
const QString FivePointCalibration = "5 Point Calibration";
const QString FixGaze = "Fix Gaze (no saccade)";
const QString Forward = "Forward";

View file

@ -9,9 +9,11 @@
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#include "Application.h"
#include "SecondaryCamera.h"
#include <TextureCache.h>
#include <gpu/Context.h>
#include <EntityScriptingInterface.h>
using RenderArgsPointer = std::shared_ptr<RenderArgs>;
@ -27,39 +29,32 @@ void MainRenderTask::build(JobModel& task, const render::Varying& inputs, render
}
}
void SecondaryCameraRenderTaskConfig::resetSize(int width, int height) { // FIXME: Add an arg here for "destinationFramebuffer"
bool wasEnabled = isEnabled();
setEnabled(false);
auto textureCache = DependencyManager::get<TextureCache>();
textureCache->resetSpectatorCameraFramebuffer(width, height); // FIXME: Call the correct reset function based on the "destinationFramebuffer" arg
setEnabled(wasEnabled);
}
void SecondaryCameraRenderTaskConfig::resetSizeSpectatorCamera(int width, int height) { // Carefully adjust the framebuffer / texture.
resetSize(width, height);
}
class BeginSecondaryCameraFrame { // Changes renderContext for our framebuffer and and view.
class SecondaryCameraJob { // Changes renderContext for our framebuffer and view.
QUuid _attachedEntityId{};
glm::vec3 _position{};
glm::quat _orientation{};
float _vFoV{};
float _nearClipPlaneDistance{};
float _farClipPlaneDistance{};
EntityPropertyFlags _attachedEntityPropertyFlags;
QSharedPointer<EntityScriptingInterface> _entityScriptingInterface;
public:
using Config = BeginSecondaryCameraFrameConfig;
using JobModel = render::Job::ModelO<BeginSecondaryCameraFrame, RenderArgsPointer, Config>;
BeginSecondaryCameraFrame() {
using Config = SecondaryCameraJobConfig;
using JobModel = render::Job::ModelO<SecondaryCameraJob, RenderArgsPointer, Config>;
SecondaryCameraJob() {
_cachedArgsPointer = std::make_shared<RenderArgs>(_cachedArgs);
_entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
_attachedEntityPropertyFlags += PROP_POSITION;
_attachedEntityPropertyFlags += PROP_ROTATION;
}
void configure(const Config& config) {
if (config.enabled || config.alwaysEnabled) {
_position = config.position;
_orientation = config.orientation;
_vFoV = config.vFoV;
_nearClipPlaneDistance = config.nearClipPlaneDistance;
_farClipPlaneDistance = config.farClipPlaneDistance;
}
_attachedEntityId = config.attachedEntityId;
_position = config.position;
_orientation = config.orientation;
_vFoV = config.vFoV;
_nearClipPlaneDistance = config.nearClipPlaneDistance;
_farClipPlaneDistance = config.farClipPlaneDistance;
}
void run(const render::RenderContextPointer& renderContext, RenderArgsPointer& cachedArgs) {
@ -83,8 +78,14 @@ public:
});
auto srcViewFrustum = args->getViewFrustum();
srcViewFrustum.setPosition(_position);
srcViewFrustum.setOrientation(_orientation);
if (!_attachedEntityId.isNull()) {
EntityItemProperties entityProperties = _entityScriptingInterface->getEntityProperties(_attachedEntityId, _attachedEntityPropertyFlags);
srcViewFrustum.setPosition(entityProperties.getPosition());
srcViewFrustum.setOrientation(entityProperties.getRotation());
} else {
srcViewFrustum.setPosition(_position);
srcViewFrustum.setOrientation(_orientation);
}
srcViewFrustum.setProjection(glm::perspective(glm::radians(_vFoV), ((float)args->_viewport.z / (float)args->_viewport.w), _nearClipPlaneDistance, _farClipPlaneDistance));
// Without calculating the bound planes, the secondary camera will use the same culling frustum as the main camera,
// which is not what we want here.
@ -99,6 +100,41 @@ protected:
RenderArgsPointer _cachedArgsPointer;
};
void SecondaryCameraJobConfig::setPosition(glm::vec3 pos) {
if (attachedEntityId.isNull()) {
position = pos;
emit dirty();
} else {
qDebug() << "ERROR: Cannot set position of SecondaryCamera while attachedEntityId is set.";
}
}
void SecondaryCameraJobConfig::setOrientation(glm::quat orient) {
if (attachedEntityId.isNull()) {
orientation = orient;
emit dirty();
} else {
qDebug() << "ERROR: Cannot set orientation of SecondaryCamera while attachedEntityId is set.";
}
}
void SecondaryCameraJobConfig::enableSecondaryCameraRenderConfigs(bool enabled) {
qApp->getRenderEngine()->getConfiguration()->getConfig<SecondaryCameraRenderTask>()->setEnabled(enabled);
setEnabled(enabled);
}
void SecondaryCameraJobConfig::resetSizeSpectatorCamera(int width, int height) { // Carefully adjust the framebuffer / texture.
qApp->getRenderEngine()->getConfiguration()->getConfig<SecondaryCameraRenderTask>()->resetSize(width, height);
}
void SecondaryCameraRenderTaskConfig::resetSize(int width, int height) { // FIXME: Add an arg here for "destinationFramebuffer"
bool wasEnabled = isEnabled();
setEnabled(false);
auto textureCache = DependencyManager::get<TextureCache>();
textureCache->resetSpectatorCameraFramebuffer(width, height); // FIXME: Call the correct reset function based on the "destinationFramebuffer" arg
setEnabled(wasEnabled);
}
class EndSecondaryCameraFrame { // Restores renderContext.
public:
using JobModel = render::Job::ModelI<EndSecondaryCameraFrame, RenderArgsPointer>;
@ -119,7 +155,7 @@ public:
};
void SecondaryCameraRenderTask::build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor) {
const auto cachedArg = task.addJob<BeginSecondaryCameraFrame>("BeginSecondaryCamera");
const auto cachedArg = task.addJob<SecondaryCameraJob>("SecondaryCamera");
const auto items = task.addJob<RenderFetchCullSortTask>("FetchCullSort", cullFunctor);
assert(items.canCast<RenderFetchCullSortTask::Output>());
task.addJob<RenderDeferredTask>("RenderDeferredTask", items);

View file

@ -28,34 +28,40 @@ public:
void build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor, bool isDeferred = true);
};
class BeginSecondaryCameraFrameConfig : public render::Task::Config { // Exposes secondary camera parameters to JavaScript.
class SecondaryCameraJobConfig : public render::Task::Config { // Exposes secondary camera parameters to JavaScript.
Q_OBJECT
Q_PROPERTY(glm::vec3 position MEMBER position NOTIFY dirty) // of viewpoint to render from
Q_PROPERTY(glm::quat orientation MEMBER orientation NOTIFY dirty) // of viewpoint to render from
Q_PROPERTY(QUuid attachedEntityId MEMBER attachedEntityId NOTIFY dirty) // entity whose properties define camera position and orientation
Q_PROPERTY(glm::vec3 position READ getPosition WRITE setPosition) // of viewpoint to render from
Q_PROPERTY(glm::quat orientation READ getOrientation WRITE setOrientation) // of viewpoint to render from
Q_PROPERTY(float vFoV MEMBER vFoV NOTIFY dirty) // Secondary camera's vertical field of view. In degrees.
Q_PROPERTY(float nearClipPlaneDistance MEMBER nearClipPlaneDistance NOTIFY dirty) // Secondary camera's near clip plane distance. In meters.
Q_PROPERTY(float farClipPlaneDistance MEMBER farClipPlaneDistance NOTIFY dirty) // Secondary camera's far clip plane distance. In meters.
public:
QUuid attachedEntityId{};
glm::vec3 position{};
glm::quat orientation{};
float vFoV{ 45.0f };
float nearClipPlaneDistance{ 0.1f };
float farClipPlaneDistance{ 100.0f };
BeginSecondaryCameraFrameConfig() : render::Task::Config(false) {}
float vFoV{ DEFAULT_FIELD_OF_VIEW_DEGREES };
float nearClipPlaneDistance{ DEFAULT_NEAR_CLIP };
float farClipPlaneDistance{ DEFAULT_FAR_CLIP };
SecondaryCameraJobConfig() : render::Task::Config(false) {}
signals:
void dirty();
public slots:
glm::vec3 getPosition() { return position; }
void setPosition(glm::vec3 pos);
glm::quat getOrientation() { return orientation; }
void setOrientation(glm::quat orient);
void enableSecondaryCameraRenderConfigs(bool enabled);
void resetSizeSpectatorCamera(int width, int height);
};
class SecondaryCameraRenderTaskConfig : public render::Task::Config {
Q_OBJECT
public:
SecondaryCameraRenderTaskConfig() : render::Task::Config(false) {}
private:
void resetSize(int width, int height);
signals:
void dirty();
public slots:
void resetSizeSpectatorCamera(int width, int height);
};
class SecondaryCameraRenderTask {

View file

@ -34,7 +34,7 @@
using namespace std;
void renderWorldBox(gpu::Batch& batch) {
void renderWorldBox(RenderArgs* args, gpu::Batch& batch) {
auto geometryCache = DependencyManager::get<GeometryCache>();
// Show center of world
@ -115,7 +115,7 @@ void renderWorldBox(gpu::Batch& batch) {
geometryIds[17]);
geometryCache->renderWireCubeInstance(batch, GREY4);
geometryCache->renderWireCubeInstance(args, batch, GREY4);
// Draw meter markers along the 3 axis to help with measuring things
const float MARKER_DISTANCE = 1.0f;
@ -123,23 +123,23 @@ void renderWorldBox(gpu::Batch& batch) {
transform = Transform().setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(batch, RED);
geometryCache->renderSolidSphereInstance(args, batch, RED);
transform = Transform().setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, 0.0f)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(batch, RED);
geometryCache->renderSolidSphereInstance(args, batch, RED);
transform = Transform().setTranslation(glm::vec3(0.0f, MARKER_DISTANCE, 0.0f)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(batch, GREEN);
geometryCache->renderSolidSphereInstance(args, batch, GREEN);
transform = Transform().setTranslation(glm::vec3(0.0f, 0.0f, MARKER_DISTANCE)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(batch, BLUE);
geometryCache->renderSolidSphereInstance(args, batch, BLUE);
transform = Transform().setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, MARKER_DISTANCE)).setScale(MARKER_RADIUS);
batch.setModelTransform(transform);
geometryCache->renderSolidSphereInstance(batch, GREY);
geometryCache->renderSolidSphereInstance(args, batch, GREY);
}
// Do some basic timing tests and report the results

View file

@ -16,8 +16,9 @@
#include <glm/gtc/quaternion.hpp>
#include <gpu/Batch.h>
#include <render/Forward.h>
void renderWorldBox(gpu::Batch& batch);
void renderWorldBox(RenderArgs* args, gpu::Batch& batch);
void runTimingTests();
void runUnitTests();

View file

@ -63,7 +63,6 @@ AvatarManager::AvatarManager(QObject* parent) :
packetReceiver.registerListener(PacketType::BulkAvatarData, this, "processAvatarDataPacket");
packetReceiver.registerListener(PacketType::KillAvatar, this, "processKillAvatar");
packetReceiver.registerListener(PacketType::AvatarIdentity, this, "processAvatarIdentityPacket");
packetReceiver.registerListener(PacketType::ExitingSpaceBubble, this, "processExitingSpaceBubble");
// when we hear that the user has ignored an avatar by session UUID
// immediately remove that avatar instead of waiting for the absence of packets from avatar mixer
@ -320,9 +319,6 @@ void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar
if (removalReason == KillAvatarReason::TheirAvatarEnteredYourBubble) {
emit DependencyManager::get<UsersScriptingInterface>()->enteredIgnoreRadius();
}
if (removalReason == KillAvatarReason::TheirAvatarEnteredYourBubble || removalReason == YourAvatarEnteredTheirBubble) {
DependencyManager::get<NodeList>()->radiusIgnoreNodeBySessionID(avatar->getSessionUUID(), true);
} else if (removalReason == KillAvatarReason::AvatarDisconnected) {
// remove from node sets, if present
DependencyManager::get<NodeList>()->removeFromIgnoreMuteSets(avatar->getSessionUUID());
@ -434,8 +430,7 @@ void AvatarManager::handleCollisionEvents(const CollisionEvents& collisionEvents
// but most avatars are roughly the same size, so let's not be so fancy yet.
const float AVATAR_STRETCH_FACTOR = 1.0f;
_collisionInjectors.remove_if([](QPointer<AudioInjector>& injector) {
_collisionInjectors.remove_if([](const AudioInjectorPointer& injector) {
return !injector || injector->isFinished();
});

View file

@ -22,11 +22,11 @@
#include <SimpleMovingAverage.h>
#include <shared/RateCounter.h>
#include <avatars-renderer/ScriptAvatar.h>
#include <AudioInjector.h>
#include "AvatarMotionState.h"
#include "MyAvatar.h"
class AudioInjector;
class AvatarManager : public AvatarHashMap {
Q_OBJECT
@ -104,7 +104,7 @@ private:
std::shared_ptr<MyAvatar> _myAvatar;
quint64 _lastSendAvatarDataTime = 0; // Controls MyAvatar send data rate.
std::list<QPointer<AudioInjector>> _collisionInjectors;
std::list<AudioInjectorPointer> _collisionInjectors;
RateCounter<> _myAvatarSendRate;
int _numAvatarsUpdated { 0 };

View file

@ -295,7 +295,7 @@ void MyAvatar::simulateAttachments(float deltaTime) {
// don't update attachments here, do it in harvestResultsFromPhysicsSimulation()
}
QByteArray MyAvatar::toByteArrayStateful(AvatarDataDetail dataDetail) {
QByteArray MyAvatar::toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking) {
CameraMode mode = qApp->getCamera().getMode();
_globalPosition = getPosition();
// This might not be right! Isn't the capsule local offset in avatar space, and don't we need to add the radius to the y as well? -HRS 5/26/17
@ -1356,6 +1356,7 @@ void MyAvatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
Avatar::setSkeletonModelURL(skeletonModelURL);
_skeletonModel->setVisibleInScene(true, qApp->getMain3DScene());
_headBoneSet.clear();
emit skeletonChanged();
}

View file

@ -606,12 +606,13 @@ signals:
void onLoadComplete();
void wentAway();
void wentActive();
void skeletonChanged();
private:
bool requiresSafeLanding(const glm::vec3& positionIn, glm::vec3& positionOut);
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail) override;
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking) override;
void simulate(float deltaTime);
void updateFromTrackers(float deltaTime);

View file

@ -133,4 +133,12 @@ void Audio::setReverb(bool enable) {
void Audio::setReverbOptions(const AudioEffectOptions* options) {
DependencyManager::get<AudioClient>()->setReverbOptions(options);
}
}
void Audio::setInputDevice(const QAudioDeviceInfo& device) {
_devices.chooseInputDevice(device);
}
void Audio::setOutputDevice(const QAudioDeviceInfo& device) {
_devices.chooseOutputDevice(device);
}

View file

@ -50,6 +50,8 @@ public:
void showMicMeter(bool show);
void setInputVolume(float volume);
Q_INVOKABLE void setInputDevice(const QAudioDeviceInfo& device);
Q_INVOKABLE void setOutputDevice(const QAudioDeviceInfo& device);
Q_INVOKABLE void setReverb(bool enable);
Q_INVOKABLE void setReverbOptions(const AudioEffectOptions* options);

View file

@ -38,7 +38,8 @@ Setting::Handle<QString>& getSetting(bool contextIsHMD, QAudio::Mode mode) {
QHash<int, QByteArray> AudioDeviceList::_roles {
{ Qt::DisplayRole, "display" },
{ Qt::CheckStateRole, "selected" }
{ Qt::CheckStateRole, "selected" },
{ Qt::UserRole, "info" }
};
Qt::ItemFlags AudioDeviceList::_flags { Qt::ItemIsSelectable | Qt::ItemIsEnabled };
@ -51,66 +52,24 @@ QVariant AudioDeviceList::data(const QModelIndex& index, int role) const {
return _devices.at(index.row()).display;
} else if (role == Qt::CheckStateRole) {
return _devices.at(index.row()).selected;
} else if (role == Qt::UserRole) {
return QVariant::fromValue<QAudioDeviceInfo>(_devices.at(index.row()).info);
} else {
return QVariant();
}
}
bool AudioDeviceList::setData(const QModelIndex& index, const QVariant& value, int role) {
if (!index.isValid() || index.row() >= _devices.size() || role != Qt::CheckStateRole) {
return false;
}
// only allow switching to a new device, not deactivating an in-use device
auto selected = value.toBool();
if (!selected) {
return false;
}
return setDevice(index.row(), true);
}
bool AudioDeviceList::setDevice(int row, bool fromUser) {
bool success = false;
auto& device = _devices[row];
_userSelection = fromUser;
// skip if already selected
if (!device.selected) {
auto client = DependencyManager::get<AudioClient>();
QMetaObject::invokeMethod(client.data(), "switchAudioDevice",
Q_ARG(QAudio::Mode, _mode),
Q_ARG(const QAudioDeviceInfo&, device.info));
}
emit dataChanged(createIndex(0, 0), createIndex(rowCount() - 1, 0));
return success;
}
void AudioDeviceList::resetDevice(bool contextIsHMD, const QString& device) {
bool success { false };
// try to set the last selected device
if (!device.isNull()) {
auto i = 0;
for (; i < rowCount(); ++i) {
if (device == _devices[i].info.deviceName()) {
break;
}
}
if (i < rowCount()) {
success = setDevice(i, false);
}
// the selection failed - reset it
if (!success) {
emit deviceSelected();
}
}
auto client = DependencyManager::get<AudioClient>().data();
auto deviceName = getSetting(contextIsHMD, _mode).get();
bool switchResult = false;
QMetaObject::invokeMethod(client, "switchAudioDevice", Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, switchResult),
Q_ARG(QAudio::Mode, _mode), Q_ARG(QString, deviceName));
// try to set to the default device for this mode
if (!success) {
auto client = DependencyManager::get<AudioClient>().data();
if (!switchResult) {
if (contextIsHMD) {
QString deviceName;
if (_mode == QAudio::AudioInput) {
@ -131,7 +90,6 @@ void AudioDeviceList::resetDevice(bool contextIsHMD, const QString& device) {
void AudioDeviceList::onDeviceChanged(const QAudioDeviceInfo& device) {
auto oldDevice = _selectedDevice;
_selectedDevice = device;
QModelIndex index;
for (auto i = 0; i < _devices.size(); ++i) {
AudioDevice& device = _devices[i];
@ -139,15 +97,9 @@ void AudioDeviceList::onDeviceChanged(const QAudioDeviceInfo& device) {
device.selected = false;
} else if (device.info == _selectedDevice) {
device.selected = true;
index = createIndex(i, 0);
}
}
if (_userSelection) {
_userSelection = false;
emit deviceSelected(_selectedDevice, oldDevice);
}
emit deviceChanged(_selectedDevice);
emit dataChanged(createIndex(0, 0), createIndex(rowCount() - 1, 0));
}
@ -182,13 +134,6 @@ AudioDevices::AudioDevices(bool& contextIsHMD) : _contextIsHMD(contextIsHMD) {
_outputs.onDeviceChanged(client->getActiveAudioDevice(QAudio::AudioOutput));
_inputs.onDevicesChanged(client->getAudioDevices(QAudio::AudioInput));
_outputs.onDevicesChanged(client->getAudioDevices(QAudio::AudioOutput));
connect(&_inputs, &AudioDeviceList::deviceSelected, [&](const QAudioDeviceInfo& device, const QAudioDeviceInfo& previousDevice) {
onDeviceSelected(QAudio::AudioInput, device, previousDevice);
});
connect(&_outputs, &AudioDeviceList::deviceSelected, [&](const QAudioDeviceInfo& device, const QAudioDeviceInfo& previousDevice) {
onDeviceSelected(QAudio::AudioOutput, device, previousDevice);
});
}
void AudioDevices::onContextChanged(const QString& context) {
@ -244,22 +189,40 @@ void AudioDevices::onDeviceChanged(QAudio::Mode mode, const QAudioDeviceInfo& de
}
void AudioDevices::onDevicesChanged(QAudio::Mode mode, const QList<QAudioDeviceInfo>& devices) {
static bool initialized { false };
auto initialize = [&]{
if (initialized) {
onContextChanged(QString());
} else {
initialized = true;
}
};
static std::once_flag once;
if (mode == QAudio::AudioInput) {
_inputs.onDevicesChanged(devices);
static std::once_flag inputFlag;
std::call_once(inputFlag, initialize);
} else { // if (mode == QAudio::AudioOutput)
_outputs.onDevicesChanged(devices);
static std::once_flag outputFlag;
std::call_once(outputFlag, initialize);
}
std::call_once(once, [&] { onContextChanged(QString()); });
}
void AudioDevices::chooseInputDevice(const QAudioDeviceInfo& device) {
auto client = DependencyManager::get<AudioClient>();
bool success = false;
QMetaObject::invokeMethod(client.data(), "switchAudioDevice",
Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, success),
Q_ARG(QAudio::Mode, QAudio::AudioInput),
Q_ARG(const QAudioDeviceInfo&, device));
if (success) {
onDeviceSelected(QAudio::AudioInput, device, _inputs._selectedDevice);
}
}
void AudioDevices::chooseOutputDevice(const QAudioDeviceInfo& device) {
auto client = DependencyManager::get<AudioClient>();
bool success = false;
QMetaObject::invokeMethod(client.data(), "switchAudioDevice",
Qt::BlockingQueuedConnection,
Q_RETURN_ARG(bool, success),
Q_ARG(QAudio::Mode, QAudio::AudioOutput),
Q_ARG(const QAudioDeviceInfo&, device));
if (success) {
onDeviceSelected(QAudio::AudioOutput, device, _outputs._selectedDevice);
}
}

View file

@ -37,14 +37,11 @@ public:
// get/set devices through a QML ListView
QVariant data(const QModelIndex& index, int role) const override;
bool setData(const QModelIndex& index, const QVariant &value, int role) override;
// reset device to the last selected device in this context, or the default
void resetDevice(bool contextIsHMD, const QString& device);
signals:
void deviceSelected(const QAudioDeviceInfo& device = QAudioDeviceInfo(),
const QAudioDeviceInfo& previousDevice = QAudioDeviceInfo());
void deviceChanged(const QAudioDeviceInfo& device);
private slots:
@ -54,12 +51,9 @@ private slots:
private:
friend class AudioDevices;
bool setDevice(int index, bool fromUser);
static QHash<int, QByteArray> _roles;
static Qt::ItemFlags _flags;
bool _userSelection { false };
QAudio::Mode _mode;
const QAudio::Mode _mode;
QAudioDeviceInfo _selectedDevice;
QList<AudioDevice> _devices;
};
@ -73,6 +67,8 @@ class AudioDevices : public QObject {
public:
AudioDevices(bool& contextIsHMD);
void chooseInputDevice(const QAudioDeviceInfo& device);
void chooseOutputDevice(const QAudioDeviceInfo& device);
signals:
void nop();

View file

@ -0,0 +1,114 @@
//
// ResourceImageItem.cpp
//
// Created by David Kelly and Howard Stearns on 2017/06/08
// Copyright 2017 High Fidelity, Inc.
// Distributed under the Apache License, Version 2.0
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
//#include "Application.h"
#include "ResourceImageItem.h"
#include <QOpenGLFramebufferObjectFormat>
#include <QOpenGLFunctions>
#include <QOpenGLExtraFunctions>
#include <QOpenGLContext>
ResourceImageItem::ResourceImageItem() : QQuickFramebufferObject() {
auto textureCache = DependencyManager::get<TextureCache>();
connect(textureCache.data(), SIGNAL(spectatorCameraFramebufferReset()), this, SLOT(update()));
}
void ResourceImageItem::setUrl(const QString& url) {
if (url != m_url) {
m_url = url;
update();
}
}
void ResourceImageItem::setReady(bool ready) {
if (ready != m_ready) {
m_ready = ready;
update();
}
}
void ResourceImageItemRenderer::onUpdateTimer() {
if (_ready) {
if (_networkTexture && _networkTexture->isLoaded()) {
if(_fboMutex.tryLock()) {
invalidateFramebufferObject();
qApp->getActiveDisplayPlugin()->copyTextureToQuickFramebuffer(_networkTexture, _copyFbo, &_fenceSync);
_fboMutex.unlock();
} else {
qDebug() << "couldn't get a lock, using last frame";
}
} else {
_networkTexture = DependencyManager::get<TextureCache>()->getTexture(_url);
}
}
update();
}
ResourceImageItemRenderer::ResourceImageItemRenderer() : QQuickFramebufferObject::Renderer() {
connect(&_updateTimer, SIGNAL(timeout()), this, SLOT(onUpdateTimer()));
auto textureCache = DependencyManager::get<TextureCache>();
}
void ResourceImageItemRenderer::synchronize(QQuickFramebufferObject* item) {
ResourceImageItem* resourceImageItem = static_cast<ResourceImageItem*>(item);
resourceImageItem->setFlag(QQuickItem::ItemHasContents);
_url = resourceImageItem->getUrl();
_ready = resourceImageItem->getReady();
_visible = resourceImageItem->isVisible();
_window = resourceImageItem->window();
_networkTexture = DependencyManager::get<TextureCache>()->getTexture(_url);
static const int UPDATE_TIMER_DELAY_IN_MS = 100; // 100 ms = 10 hz for now
if (_ready && _visible && !_updateTimer.isActive()) {
_updateTimer.start(UPDATE_TIMER_DELAY_IN_MS);
} else if (!(_ready && _visible) && _updateTimer.isActive()) {
_updateTimer.stop();
}
}
QOpenGLFramebufferObject* ResourceImageItemRenderer::createFramebufferObject(const QSize& size) {
if (_copyFbo) {
delete _copyFbo;
}
QOpenGLFramebufferObjectFormat format;
format.setAttachment(QOpenGLFramebufferObject::CombinedDepthStencil);
_copyFbo = new QOpenGLFramebufferObject(size, format);
_copyFbo->bind();
return new QOpenGLFramebufferObject(size, format);
}
void ResourceImageItemRenderer::render() {
auto f = QOpenGLContext::currentContext()->extraFunctions();
if (_fenceSync) {
f->glWaitSync(_fenceSync, 0, GL_TIMEOUT_IGNORED);
f->glDeleteSync(_fenceSync);
_fenceSync = 0;
}
if (_ready) {
_fboMutex.lock();
_copyFbo->bind();
QOpenGLFramebufferObject::blitFramebuffer(framebufferObject(), _copyFbo, GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT, GL_NEAREST);
// this clears the copyFbo texture
// so next frame starts fresh - helps
// when aspect ratio changes
_copyFbo->takeTexture();
_copyFbo->bind();
_copyFbo->release();
_fboMutex.unlock();
}
glFlush();
_window->resetOpenGLState();
}

View file

@ -0,0 +1,63 @@
//
// ResourceImageItem.h
//
// Created by David Kelly and Howard Stearns on 2017/06/08
// Copyright 2017 High Fidelity, Inc.
// Distributed under the Apache License, Version 2.0
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
#pragma once
#ifndef hifi_ResourceImageItem_h
#define hifi_ResourceImageItem_h
#include "Application.h"
#include <QQuickFramebufferObject>
#include <QQuickWindow>
#include <QTimer>
#include <TextureCache.h>
class ResourceImageItemRenderer : public QObject, public QQuickFramebufferObject::Renderer {
Q_OBJECT
public:
ResourceImageItemRenderer();
QOpenGLFramebufferObject* createFramebufferObject(const QSize& size) override;
void synchronize(QQuickFramebufferObject* item) override;
void render() override;
private:
bool _ready;
QString _url;
bool _visible;
NetworkTexturePointer _networkTexture;
QQuickWindow* _window;
QMutex _fboMutex;
QOpenGLFramebufferObject* _copyFbo { nullptr };
GLsync _fenceSync { 0 };
QTimer _updateTimer;
public slots:
void onUpdateTimer();
};
class ResourceImageItem : public QQuickFramebufferObject {
Q_OBJECT
Q_PROPERTY(QString url READ getUrl WRITE setUrl)
Q_PROPERTY(bool ready READ getReady WRITE setReady)
public:
ResourceImageItem();
QString getUrl() const { return m_url; }
void setUrl(const QString& url);
bool getReady() const { return m_ready; }
void setReady(bool ready);
QQuickFramebufferObject::Renderer* createRenderer() const override { return new ResourceImageItemRenderer; }
private:
QString m_url;
bool m_ready { false };
};
#endif // hifi_ResourceImageItem_h

View file

@ -37,9 +37,11 @@ QVariant Billboard3DOverlay::getProperty(const QString &property) {
return Planar3DOverlay::getProperty(property);
}
void Billboard3DOverlay::applyTransformTo(Transform& transform, bool force) {
bool Billboard3DOverlay::applyTransformTo(Transform& transform, bool force) {
bool transformChanged = false;
if (force || usecTimestampNow() > _transformExpiry) {
PanelAttachable::applyTransformTo(transform, true);
pointTransformAtCamera(transform, getOffsetRotation());
transformChanged = PanelAttachable::applyTransformTo(transform, true);
transformChanged |= pointTransformAtCamera(transform, getOffsetRotation());
}
return transformChanged;
}

View file

@ -27,7 +27,7 @@ public:
QVariant getProperty(const QString& property) override;
protected:
virtual void applyTransformTo(Transform& transform, bool force = false) override;
virtual bool applyTransformTo(Transform& transform, bool force = false) override;
};
#endif // hifi_Billboard3DOverlay_h

View file

@ -28,7 +28,7 @@ QVariant Billboardable::getProperty(const QString &property) {
return QVariant();
}
void Billboardable::pointTransformAtCamera(Transform& transform, glm::quat offsetRotation) {
bool Billboardable::pointTransformAtCamera(Transform& transform, glm::quat offsetRotation) {
if (isFacingAvatar()) {
glm::vec3 billboardPos = transform.getTranslation();
glm::vec3 cameraPos = qApp->getCamera().getPosition();
@ -38,5 +38,7 @@ void Billboardable::pointTransformAtCamera(Transform& transform, glm::quat offse
glm::quat rotation(glm::vec3(elevation, azimuth, 0));
transform.setRotation(rotation);
transform.postRotate(offsetRotation);
return true;
}
return false;
}

View file

@ -27,7 +27,7 @@ protected:
void setProperties(const QVariantMap& properties);
QVariant getProperty(const QString& property);
void pointTransformAtCamera(Transform& transform, glm::quat offsetRotation = {1, 0, 0, 0});
bool pointTransformAtCamera(Transform& transform, glm::quat offsetRotation = {1, 0, 0, 0});
private:
bool _isFacingAvatar = false;

View file

@ -80,8 +80,8 @@ void Circle3DOverlay::render(RenderArgs* args) {
Q_ASSERT(args->_batch);
auto& batch = *args->_batch;
if (args->_pipeline) {
batch.setPipeline(args->_pipeline->pipeline);
if (args->_shapePipeline) {
batch.setPipeline(args->_shapePipeline->pipeline);
}
// FIXME: THe line width of _lineWidth is not supported anymore, we ll need a workaround

View file

@ -65,15 +65,15 @@ void Cube3DOverlay::render(RenderArgs* args) {
transform.setTranslation(position);
transform.setRotation(rotation);
auto geometryCache = DependencyManager::get<GeometryCache>();
auto pipeline = args->_pipeline;
if (!pipeline) {
pipeline = _isSolid ? geometryCache->getOpaqueShapePipeline() : geometryCache->getWireShapePipeline();
auto shapePipeline = args->_shapePipeline;
if (!shapePipeline) {
shapePipeline = _isSolid ? geometryCache->getOpaqueShapePipeline() : geometryCache->getWireShapePipeline();
}
if (_isSolid) {
transform.setScale(dimensions);
batch->setModelTransform(transform);
geometryCache->renderSolidCubeInstance(*batch, cubeColor, pipeline);
geometryCache->renderSolidCubeInstance(args, *batch, cubeColor, shapePipeline);
} else {
geometryCache->bindSimpleProgram(*batch, false, false, false, true, true);
if (getIsDashedLine()) {
@ -109,7 +109,7 @@ void Cube3DOverlay::render(RenderArgs* args) {
} else {
transform.setScale(dimensions);
batch->setModelTransform(transform);
geometryCache->renderWireCubeInstance(*batch, cubeColor, pipeline);
geometryCache->renderWireCubeInstance(args, *batch, cubeColor, shapePipeline);
}
}
}

View file

@ -99,10 +99,14 @@ void Image3DOverlay::render(RenderArgs* args) {
const float MAX_COLOR = 255.0f;
xColor color = getColor();
float alpha = getAlpha();
Transform transform = getTransform();
applyTransformTo(transform, true);
setTransform(transform);
bool transformChanged = applyTransformTo(transform, true);
// If the transform is not modified, setting the transform to
// itself will cause drift over time due to floating point errors.
if (transformChanged) {
setTransform(transform);
}
transform.postScale(glm::vec3(getDimensions(), 1.0f));
batch->setModelTransform(transform);

View file

@ -61,7 +61,7 @@ void PanelAttachable::setProperties(const QVariantMap& properties) {
}
}
void PanelAttachable::applyTransformTo(Transform& transform, bool force) {
bool PanelAttachable::applyTransformTo(Transform& transform, bool force) {
if (force || usecTimestampNow() > _transformExpiry) {
const quint64 TRANSFORM_UPDATE_PERIOD = 100000; // frequency is 10 Hz
_transformExpiry = usecTimestampNow() + TRANSFORM_UPDATE_PERIOD;
@ -71,7 +71,9 @@ void PanelAttachable::applyTransformTo(Transform& transform, bool force) {
transform.postTranslate(getOffsetPosition());
transform.postRotate(getOffsetRotation());
transform.postScale(getOffsetScale());
return true;
}
#endif
}
return false;
}

View file

@ -67,7 +67,7 @@ protected:
/// set position, rotation and scale on transform based on offsets, and parent panel offsets
/// if force is false, only apply transform if it hasn't been applied in the last .1 seconds
virtual void applyTransformTo(Transform& transform, bool force = false);
virtual bool applyTransformTo(Transform& transform, bool force = false);
quint64 _transformExpiry = 0;
private:

View file

@ -45,17 +45,17 @@ void Shape3DOverlay::render(RenderArgs* args) {
transform.setTranslation(position);
transform.setRotation(rotation);
auto geometryCache = DependencyManager::get<GeometryCache>();
auto pipeline = args->_pipeline;
if (!pipeline) {
pipeline = _isSolid ? geometryCache->getOpaqueShapePipeline() : geometryCache->getWireShapePipeline();
auto shapePipeline = args->_shapePipeline;
if (!shapePipeline) {
shapePipeline = _isSolid ? geometryCache->getOpaqueShapePipeline() : geometryCache->getWireShapePipeline();
}
transform.setScale(dimensions);
batch->setModelTransform(transform);
if (_isSolid) {
geometryCache->renderSolidShapeInstance(*batch, _shape, cubeColor, pipeline);
geometryCache->renderSolidShapeInstance(args, *batch, _shape, cubeColor, shapePipeline);
} else {
geometryCache->renderWireShapeInstance(*batch, _shape, cubeColor, pipeline);
geometryCache->renderWireShapeInstance(args, *batch, _shape, cubeColor, shapePipeline);
}
}
}

View file

@ -44,15 +44,15 @@ void Sphere3DOverlay::render(RenderArgs* args) {
batch->setModelTransform(transform);
auto geometryCache = DependencyManager::get<GeometryCache>();
auto pipeline = args->_pipeline;
if (!pipeline) {
pipeline = _isSolid ? geometryCache->getOpaqueShapePipeline() : geometryCache->getWireShapePipeline();
auto shapePipeline = args->_shapePipeline;
if (!shapePipeline) {
shapePipeline = _isSolid ? geometryCache->getOpaqueShapePipeline() : geometryCache->getWireShapePipeline();
}
if (_isSolid) {
geometryCache->renderSolidSphereInstance(*batch, sphereColor, pipeline);
geometryCache->renderSolidSphereInstance(args, *batch, sphereColor, shapePipeline);
} else {
geometryCache->renderWireSphereInstance(*batch, sphereColor, pipeline);
geometryCache->renderWireSphereInstance(args, *batch, sphereColor, shapePipeline);
}
}
}

View file

@ -137,8 +137,8 @@ void Text3DOverlay::render(RenderArgs* args) {
// Text renderer sets its own pipeline,
_textRenderer->draw(batch, 0, 0, getText(), textColor, glm::vec2(-1.0f), getDrawInFront());
// so before we continue, we must reset the pipeline
batch.setPipeline(args->_pipeline->pipeline);
args->_pipeline->prepare(batch);
batch.setPipeline(args->_shapePipeline->pipeline);
args->_shapePipeline->prepare(batch, args);
}
const render::ShapeKey Text3DOverlay::getShapeKey() {

View file

@ -451,7 +451,7 @@ void Web3DOverlay::handlePointerEventAsTouch(const PointerEvent& event) {
// In Qt 5.9 mouse events must be sent before touch events to make sure some QtQuick components will
// receive mouse events
#if QT_VERSION >= QT_VERSION_CHECK(5, 9, 0)
if (!(this->_pressed && event.getType() == PointerEvent::Move)) {
if (event.getType() == PointerEvent::Move) {
QMouseEvent* mouseEvent = new QMouseEvent(mouseType, windowPoint, windowPoint, windowPoint, button, buttons, Qt::NoModifier);
QCoreApplication::postEvent(_webSurface->getWindow(), mouseEvent);
}
@ -459,11 +459,10 @@ void Web3DOverlay::handlePointerEventAsTouch(const PointerEvent& event) {
QCoreApplication::postEvent(_webSurface->getWindow(), touchEvent);
#if QT_VERSION < QT_VERSION_CHECK(5, 9, 0)
if (this->_pressed && event.getType() == PointerEvent::Move) {
return;
if (event.getType() == PointerEvent::Move) {
QMouseEvent* mouseEvent = new QMouseEvent(mouseType, windowPoint, windowPoint, windowPoint, button, buttons, Qt::NoModifier);
QCoreApplication::postEvent(_webSurface->getWindow(), mouseEvent);
}
QMouseEvent* mouseEvent = new QMouseEvent(mouseType, windowPoint, windowPoint, windowPoint, button, buttons, Qt::NoModifier);
QCoreApplication::postEvent(_webSurface->getWindow(), mouseEvent);
#endif
}

View file

@ -404,8 +404,18 @@ void Rig::setJointRotation(int index, bool valid, const glm::quat& rotation, flo
}
bool Rig::getJointPositionInWorldFrame(int jointIndex, glm::vec3& position, glm::vec3 translation, glm::quat rotation) const {
if (isIndexValid(jointIndex)) {
position = (rotation * _internalPoseSet._absolutePoses[jointIndex].trans()) + translation;
if (QThread::currentThread() == thread()) {
if (isIndexValid(jointIndex)) {
position = (rotation * _internalPoseSet._absolutePoses[jointIndex].trans()) + translation;
return true;
} else {
return false;
}
}
QReadLocker readLock(&_externalPoseSetLock);
if (jointIndex >= 0 && jointIndex < (int)_externalPoseSet._absolutePoses.size()) {
position = (rotation * _externalPoseSet._absolutePoses[jointIndex].trans()) + translation;
return true;
} else {
return false;
@ -413,17 +423,31 @@ bool Rig::getJointPositionInWorldFrame(int jointIndex, glm::vec3& position, glm:
}
bool Rig::getJointPosition(int jointIndex, glm::vec3& position) const {
if (isIndexValid(jointIndex)) {
position = _internalPoseSet._absolutePoses[jointIndex].trans();
return true;
if (QThread::currentThread() == thread()) {
if (isIndexValid(jointIndex)) {
position = _internalPoseSet._absolutePoses[jointIndex].trans();
return true;
} else {
return false;
}
} else {
return false;
return getAbsoluteJointTranslationInRigFrame(jointIndex, position);
}
}
bool Rig::getJointRotationInWorldFrame(int jointIndex, glm::quat& result, const glm::quat& rotation) const {
if (isIndexValid(jointIndex)) {
result = rotation * _internalPoseSet._absolutePoses[jointIndex].rot();
if (QThread::currentThread() == thread()) {
if (isIndexValid(jointIndex)) {
result = rotation * _internalPoseSet._absolutePoses[jointIndex].rot();
return true;
} else {
return false;
}
}
QReadLocker readLock(&_externalPoseSetLock);
if (jointIndex >= 0 && jointIndex < (int)_externalPoseSet._absolutePoses.size()) {
result = rotation * _externalPoseSet._absolutePoses[jointIndex].rot();
return true;
} else {
return false;
@ -431,6 +455,15 @@ bool Rig::getJointRotationInWorldFrame(int jointIndex, glm::quat& result, const
}
bool Rig::getJointRotation(int jointIndex, glm::quat& rotation) const {
if (QThread::currentThread() == thread()) {
if (isIndexValid(jointIndex)) {
rotation = _internalPoseSet._relativePoses[jointIndex].rot();
return true;
} else {
return false;
}
}
QReadLocker readLock(&_externalPoseSetLock);
if (jointIndex >= 0 && jointIndex < (int)_externalPoseSet._relativePoses.size()) {
rotation = _externalPoseSet._relativePoses[jointIndex].rot();

View file

@ -92,6 +92,7 @@ void AudioClient::checkDevices() {
auto inputDevices = getAvailableDevices(QAudio::AudioInput);
auto outputDevices = getAvailableDevices(QAudio::AudioOutput);
Lock lock(_deviceMutex);
if (inputDevices != _inputDevices) {
_inputDevices.swap(inputDevices);
emit devicesChanged(QAudio::AudioInput, _inputDevices);
@ -210,9 +211,9 @@ AudioClient::AudioClient() :
connect(&_receivedAudioStream, &MixedProcessedAudioStream::processSamples,
this, &AudioClient::processReceivedSamples, Qt::DirectConnection);
connect(this, &AudioClient::changeDevice, this, [=](const QAudioDeviceInfo& outputDeviceInfo) {
connect(this, &AudioClient::changeDevice, this, [=](const QAudioDeviceInfo& outputDeviceInfo) {
qCDebug(audioclient) << "got AudioClient::changeDevice signal, about to call switchOutputToAudioDevice() outputDeviceInfo: [" << outputDeviceInfo.deviceName() << "]";
switchOutputToAudioDevice(outputDeviceInfo);
switchOutputToAudioDevice(outputDeviceInfo);
});
connect(&_receivedAudioStream, &InboundAudioStream::mismatchedAudioCodec, this, &AudioClient::handleMismatchAudioFormat);
@ -261,10 +262,10 @@ void AudioClient::cleanupBeforeQuit() {
// so this must be explicitly, synchronously stopped
static ConditionalGuard guard;
if (QThread::currentThread() != thread()) {
// This will likely be called from the main thread, but we don't want to do blocking queued calls
// from the main thread, so we use a normal auto-connection invoke, and then use a conditional to wait
// This will likely be called from the main thread, but we don't want to do blocking queued calls
// from the main thread, so we use a normal auto-connection invoke, and then use a conditional to wait
// for completion
// The effect is the same, yes, but we actually want to avoid the use of Qt::BlockingQueuedConnection
// The effect is the same, yes, but we actually want to avoid the use of Qt::BlockingQueuedConnection
// in the code
QMetaObject::invokeMethod(this, "cleanupBeforeQuit");
guard.wait();
@ -630,7 +631,7 @@ void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessag
message->readPrimitive(&bitset);
bool hasReverb = oneAtBit(bitset, HAS_REVERB_BIT);
if (hasReverb) {
float reverbTime, wetLevel;
message->readPrimitive(&reverbTime);
@ -728,7 +729,7 @@ void AudioClient::Gate::flush() {
void AudioClient::handleNoisyMutePacket(QSharedPointer<ReceivedMessage> message) {
if (!_muted) {
toggleMute();
// have the audio scripting interface emit a signal to say we were muted by the mixer
emit mutedByMixer();
}
@ -737,7 +738,7 @@ void AudioClient::handleNoisyMutePacket(QSharedPointer<ReceivedMessage> message)
void AudioClient::handleMuteEnvironmentPacket(QSharedPointer<ReceivedMessage> message) {
glm::vec3 position;
float radius;
message->readPrimitive(&position);
message->readPrimitive(&radius);
@ -770,7 +771,7 @@ void AudioClient::handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> mess
}
void AudioClient::selectAudioFormat(const QString& selectedCodecName) {
_selectedCodecName = selectedCodecName;
qCDebug(audioclient) << "Selected Codec:" << _selectedCodecName;
@ -787,7 +788,7 @@ void AudioClient::selectAudioFormat(const QString& selectedCodecName) {
for (auto& plugin : codecPlugins) {
if (_selectedCodecName == plugin->getName()) {
_codec = plugin;
_receivedAudioStream.setupCodec(plugin, _selectedCodecName, AudioConstants::STEREO);
_receivedAudioStream.setupCodec(plugin, _selectedCodecName, AudioConstants::STEREO);
_encoder = plugin->createEncoder(AudioConstants::SAMPLE_RATE, AudioConstants::MONO);
qCDebug(audioclient) << "Selected Codec Plugin:" << _codec.get();
break;
@ -795,7 +796,7 @@ void AudioClient::selectAudioFormat(const QString& selectedCodecName) {
}
}
bool AudioClient::switchAudioDevice(QAudio::Mode mode, const QAudioDeviceInfo& deviceInfo) {
auto device = deviceInfo;
@ -1203,11 +1204,11 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
// lock the injectors
Lock lock(_injectorsMutex);
QVector<AudioInjector*> injectorsToRemove;
QVector<AudioInjectorPointer> injectorsToRemove;
memset(mixBuffer, 0, AudioConstants::NETWORK_FRAME_SAMPLES_STEREO * sizeof(float));
for (AudioInjector* injector : _activeLocalAudioInjectors) {
for (const AudioInjectorPointer& injector : _activeLocalAudioInjectors) {
// the lock guarantees that injectorBuffer, if found, is invariant
AudioInjectorLocalBuffer* injectorBuffer = injector->getLocalBuffer();
if (injectorBuffer) {
@ -1220,7 +1221,7 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
// get one frame from the injector
memset(_localScratchBuffer, 0, bytesToRead);
if (0 < injectorBuffer->readData((char*)_localScratchBuffer, bytesToRead)) {
if (injector->isAmbisonic()) {
// no distance attenuation
@ -1249,36 +1250,36 @@ bool AudioClient::mixLocalAudioInjectors(float* mixBuffer) {
for (int i = 0; i < AudioConstants::NETWORK_FRAME_SAMPLES_STEREO; i++) {
mixBuffer[i] += convertToFloat(_localScratchBuffer[i]) * gain;
}
} else {
// calculate distance, gain and azimuth for hrtf
glm::vec3 relativePosition = injector->getPosition() - _positionGetter();
float distance = glm::max(glm::length(relativePosition), EPSILON);
float gain = gainForSource(distance, injector->getVolume());
float gain = gainForSource(distance, injector->getVolume());
float azimuth = azimuthForSource(relativePosition);
// mono gets spatialized into mixBuffer
injector->getLocalHRTF().render(_localScratchBuffer, mixBuffer, HRTF_DATASET_INDEX,
injector->getLocalHRTF().render(_localScratchBuffer, mixBuffer, HRTF_DATASET_INDEX,
azimuth, distance, gain, AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL);
}
} else {
qCDebug(audioclient) << "injector has no more data, marking finished for removal";
injector->finishLocalInjection();
injectorsToRemove.append(injector);
}
} else {
qCDebug(audioclient) << "injector has no local buffer, marking as finished for removal";
injector->finishLocalInjection();
injectorsToRemove.append(injector);
}
}
for (AudioInjector* injector : injectorsToRemove) {
for (const AudioInjectorPointer& injector : injectorsToRemove) {
qCDebug(audioclient) << "removing injector";
_activeLocalAudioInjectors.removeOne(injector);
}
@ -1369,7 +1370,7 @@ void AudioClient::setIsStereoInput(bool isStereoInput) {
}
}
bool AudioClient::outputLocalInjector(AudioInjector* injector) {
bool AudioClient::outputLocalInjector(const AudioInjectorPointer& injector) {
AudioInjectorLocalBuffer* injectorBuffer = injector->getLocalBuffer();
if (injectorBuffer) {
// local injectors are on the AudioInjectorsThread, so we must guard access
@ -1711,9 +1712,9 @@ int AudioClient::calculateNumberOfFrameSamples(int numBytes) const {
float AudioClient::azimuthForSource(const glm::vec3& relativePosition) {
glm::quat inverseOrientation = glm::inverse(_orientationGetter());
glm::vec3 rotatedSourcePosition = inverseOrientation * relativePosition;
// project the rotated source position vector onto the XZ plane
rotatedSourcePosition.y = 0.0f;
@ -1721,15 +1722,15 @@ float AudioClient::azimuthForSource(const glm::vec3& relativePosition) {
float rotatedSourcePositionLength2 = glm::length2(rotatedSourcePosition);
if (rotatedSourcePositionLength2 > SOURCE_DISTANCE_THRESHOLD) {
// produce an oriented angle about the y-axis
glm::vec3 direction = rotatedSourcePosition * (1.0f / fastSqrtf(rotatedSourcePositionLength2));
float angle = fastAcosf(glm::clamp(-direction.z, -1.0f, 1.0f)); // UNIT_NEG_Z is "forward"
return (direction.x < 0.0f) ? -angle : angle;
} else {
} else {
// no azimuth if they are in same spot
return 0.0f;
return 0.0f;
}
}
@ -1869,9 +1870,9 @@ void AudioClient::startThread() {
moveToNewNamedThread(this, "Audio Thread", [this] { start(); });
}
void AudioClient::setInputVolume(float volume) {
if (_audioInput && volume != (float)_audioInput->volume()) {
_audioInput->setVolume(volume);
void AudioClient::setInputVolume(float volume) {
if (_audioInput && volume != (float)_audioInput->volume()) {
_audioInput->setVolume(volume);
emit inputVolumeChanged(_audioInput->volume());
}
}

View file

@ -143,7 +143,7 @@ public:
Q_INVOKABLE void setAvatarBoundingBoxParameters(glm::vec3 corner, glm::vec3 scale);
bool outputLocalInjector(AudioInjector* injector) override;
bool outputLocalInjector(const AudioInjectorPointer& injector) override;
QAudioDeviceInfo getActiveAudioDevice(QAudio::Mode mode) const;
QList<QAudioDeviceInfo> getAudioDevices(QAudio::Mode mode) const;
@ -380,7 +380,7 @@ private:
bool _hasReceivedFirstPacket { false };
QVector<AudioInjector*> _activeLocalAudioInjectors;
QVector<AudioInjectorPointer> _activeLocalAudioInjectors;
bool _isPlayingBackRecording { false };

View file

@ -18,6 +18,7 @@
#include <udt/PacketHeaders.h>
#include "AudioInjectorOptions.h"
#include "AudioInjector.h"
class AudioInjector;
class AudioInjectorLocalBuffer;
@ -35,7 +36,7 @@ public:
// threadsafe
// moves injector->getLocalBuffer() to another thread (so removes its parent)
// take care to delete it when ~AudioInjector, as parenting Qt semantics will not work
virtual bool outputLocalInjector(AudioInjector* injector) = 0;
virtual bool outputLocalInjector(const AudioInjectorPointer& injector) = 0;
public slots:
virtual bool shouldLoopbackInjectors() { return false; }

View file

@ -92,11 +92,6 @@ void AudioInjector::finish() {
emit finished();
deleteLocalBuffer();
if (stateHas(AudioInjectorState::PendingDelete)) {
// we've been asked to delete after finishing, trigger a deleteLater here
deleteLater();
}
}
void AudioInjector::restart() {
@ -132,7 +127,7 @@ void AudioInjector::restart() {
}
}
bool AudioInjector::inject(bool(AudioInjectorManager::*injection)(AudioInjector*)) {
bool AudioInjector::inject(bool(AudioInjectorManager::*injection)(const AudioInjectorPointer&)) {
_state = AudioInjectorState::NotFinished;
int byteOffset = 0;
@ -150,7 +145,7 @@ bool AudioInjector::inject(bool(AudioInjectorManager::*injection)(AudioInjector*
bool success = true;
if (!_options.localOnly) {
auto injectorManager = DependencyManager::get<AudioInjectorManager>();
if (!(*injectorManager.*injection)(this)) {
if (!(*injectorManager.*injection)(sharedFromThis())) {
success = false;
finishNetworkInjection();
}
@ -173,7 +168,7 @@ bool AudioInjector::injectLocally() {
// call this function on the AudioClient's thread
// this will move the local buffer's thread to the LocalInjectorThread
success = _localAudioInterface->outputLocalInjector(this);
success = _localAudioInterface->outputLocalInjector(sharedFromThis());
if (!success) {
qCDebug(audio) << "AudioInjector::injectLocally could not output locally via _localAudioInterface";
@ -418,20 +413,16 @@ void AudioInjector::triggerDeleteAfterFinish() {
}
if (stateHas(AudioInjectorState::Finished)) {
stopAndDeleteLater();
stop();
} else {
_state |= AudioInjectorState::PendingDelete;
}
}
void AudioInjector::stopAndDeleteLater() {
stop();
QMetaObject::invokeMethod(this, "deleteLater", Qt::QueuedConnection);
}
AudioInjector* AudioInjector::playSound(SharedSoundPointer sound, const float volume, const float stretchFactor, const glm::vec3 position) {
AudioInjectorPointer AudioInjector::playSound(SharedSoundPointer sound, const float volume,
const float stretchFactor, const glm::vec3 position) {
if (!sound || !sound->isReady()) {
return nullptr;
return AudioInjectorPointer();
}
AudioInjectorOptions options;
@ -462,8 +453,8 @@ AudioInjector* AudioInjector::playSound(SharedSoundPointer sound, const float vo
return playSoundAndDelete(resampled, options);
}
AudioInjector* AudioInjector::playSoundAndDelete(const QByteArray& buffer, const AudioInjectorOptions options) {
AudioInjector* sound = playSound(buffer, options);
AudioInjectorPointer AudioInjector::playSoundAndDelete(const QByteArray& buffer, const AudioInjectorOptions options) {
AudioInjectorPointer sound = playSound(buffer, options);
if (sound) {
sound->_state |= AudioInjectorState::PendingDelete;
@ -473,8 +464,9 @@ AudioInjector* AudioInjector::playSoundAndDelete(const QByteArray& buffer, const
}
AudioInjector* AudioInjector::playSound(const QByteArray& buffer, const AudioInjectorOptions options) {
AudioInjector* injector = new AudioInjector(buffer, options);
AudioInjectorPointer AudioInjector::playSound(const QByteArray& buffer, const AudioInjectorOptions options) {
AudioInjectorPointer injector = AudioInjectorPointer::create(buffer, options);
if (!injector->inject(&AudioInjectorManager::threadInjector)) {
qWarning() << "AudioInjector::playSound failed to thread injector";
}

View file

@ -32,6 +32,8 @@
class AbstractAudioInterface;
class AudioInjectorManager;
class AudioInjector;
using AudioInjectorPointer = QSharedPointer<AudioInjector>;
enum class AudioInjectorState : uint8_t {
@ -46,19 +48,19 @@ AudioInjectorState operator& (AudioInjectorState lhs, AudioInjectorState rhs);
AudioInjectorState& operator|= (AudioInjectorState& lhs, AudioInjectorState rhs);
// In order to make scripting cleaner for the AudioInjector, the script now holds on to the AudioInjector object
// until it dies.
class AudioInjector : public QObject {
// until it dies.
class AudioInjector : public QObject, public QEnableSharedFromThis<AudioInjector> {
Q_OBJECT
public:
AudioInjector(const Sound& sound, const AudioInjectorOptions& injectorOptions);
AudioInjector(const QByteArray& audioData, const AudioInjectorOptions& injectorOptions);
~AudioInjector();
bool isFinished() const { return (stateHas(AudioInjectorState::Finished)); }
int getCurrentSendOffset() const { return _currentSendOffset; }
void setCurrentSendOffset(int currentSendOffset) { _currentSendOffset = currentSendOffset; }
AudioInjectorLocalBuffer* getLocalBuffer() const { return _localBuffer; }
AudioHRTF& getLocalHRTF() { return _localHRTF; }
AudioFOA& getLocalFOA() { return _localFOA; }
@ -72,36 +74,36 @@ public:
bool stateHas(AudioInjectorState state) const ;
static void setLocalAudioInterface(AbstractAudioInterface* audioInterface) { _localAudioInterface = audioInterface; }
static AudioInjector* playSoundAndDelete(const QByteArray& buffer, const AudioInjectorOptions options);
static AudioInjector* playSound(const QByteArray& buffer, const AudioInjectorOptions options);
static AudioInjector* playSound(SharedSoundPointer sound, const float volume, const float stretchFactor, const glm::vec3 position);
static AudioInjectorPointer playSoundAndDelete(const QByteArray& buffer, const AudioInjectorOptions options);
static AudioInjectorPointer playSound(const QByteArray& buffer, const AudioInjectorOptions options);
static AudioInjectorPointer playSound(SharedSoundPointer sound, const float volume,
const float stretchFactor, const glm::vec3 position);
public slots:
void restart();
void stop();
void triggerDeleteAfterFinish();
void stopAndDeleteLater();
const AudioInjectorOptions& getOptions() const { return _options; }
void setOptions(const AudioInjectorOptions& options);
float getLoudness() const { return _loudness; }
bool isPlaying() const { return !stateHas(AudioInjectorState::Finished); }
void finish();
void finishLocalInjection();
void finishNetworkInjection();
signals:
void finished();
void restarting();
private:
int64_t injectNextFrame();
bool inject(bool(AudioInjectorManager::*injection)(AudioInjector*));
bool inject(bool(AudioInjectorManager::*injection)(const AudioInjectorPointer&));
bool injectLocally();
void deleteLocalBuffer();
static AbstractAudioInterface* _localAudioInterface;
QByteArray _audioData;
@ -112,17 +114,17 @@ private:
int _currentSendOffset { 0 };
std::unique_ptr<NLPacket> _currentPacket { nullptr };
AudioInjectorLocalBuffer* _localBuffer { nullptr };
int64_t _nextFrame { 0 };
std::unique_ptr<QElapsedTimer> _frameTimer { nullptr };
quint16 _outgoingSequenceNumber { 0 };
// when the injector is local, we need this
AudioHRTF _localHRTF;
AudioFOA _localFOA;
friend class AudioInjectorManager;
};
Q_DECLARE_METATYPE(AudioInjector*)
Q_DECLARE_METATYPE(AudioInjectorPointer)
#endif // hifi_AudioInjector_h

View file

@ -21,26 +21,26 @@
AudioInjectorManager::~AudioInjectorManager() {
_shouldStop = true;
Lock lock(_injectorsMutex);
// make sure any still living injectors are stopped and deleted
while (!_injectors.empty()) {
// grab the injector at the front
auto& timePointerPair = _injectors.top();
// ask it to stop and be deleted
timePointerPair.second->stopAndDeleteLater();
timePointerPair.second->stop();
_injectors.pop();
}
// get rid of the lock now that we've stopped all living injectors
lock.unlock();
// in case the thread is waiting for injectors wake it up now
_injectorReady.notify_one();
// quit and wait on the manager thread, if we ever created it
if (_thread) {
_thread->quit();
@ -51,10 +51,10 @@ AudioInjectorManager::~AudioInjectorManager() {
void AudioInjectorManager::createThread() {
_thread = new QThread;
_thread->setObjectName("Audio Injector Thread");
// when the thread is started, have it call our run to handle injection of audio
connect(_thread, &QThread::started, this, &AudioInjectorManager::run, Qt::DirectConnection);
// start the thread
_thread->start();
}
@ -63,20 +63,20 @@ void AudioInjectorManager::run() {
while (!_shouldStop) {
// wait until the next injector is ready, or until we get a new injector given to us
Lock lock(_injectorsMutex);
if (_injectors.size() > 0) {
// when does the next injector need to send a frame?
// do we get to wait or should we just go for it now?
auto timeInjectorPair = _injectors.top();
auto nextTimestamp = timeInjectorPair.first;
int64_t difference = int64_t(nextTimestamp - usecTimestampNow());
if (difference > 0) {
_injectorReady.wait_for(lock, std::chrono::microseconds(difference));
}
if (_injectors.size() > 0) {
// loop through the injectors in the map and send whatever frames need to go out
auto front = _injectors.top();
@ -90,7 +90,7 @@ void AudioInjectorManager::run() {
// either way we're popping this injector off - get a copy first
auto injector = front.second;
_injectors.pop();
if (!injector.isNull()) {
// this is an injector that's ready to go, have it send a frame now
auto nextCallDelta = injector->injectNextFrame();
@ -100,7 +100,7 @@ void AudioInjectorManager::run() {
heldInjectors.emplace(heldInjectors.end(), usecTimestampNow() + nextCallDelta, injector);
}
}
if (_injectors.size() > 0) {
front = _injectors.top();
} else {
@ -120,10 +120,10 @@ void AudioInjectorManager::run() {
// we have no current injectors, wait until we get at least one before we do anything
_injectorReady.wait(lock);
}
// unlock the lock in case something in process events needs to modify the queue
lock.unlock();
QCoreApplication::processEvents();
}
}
@ -139,36 +139,36 @@ bool AudioInjectorManager::wouldExceedLimits() { // Should be called inside of a
return false;
}
bool AudioInjectorManager::threadInjector(AudioInjector* injector) {
bool AudioInjectorManager::threadInjector(const AudioInjectorPointer& injector) {
if (_shouldStop) {
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
return false;
}
// guard the injectors vector with a mutex
Lock lock(_injectorsMutex);
if (wouldExceedLimits()) {
return false;
} else {
if (!_thread) {
createThread();
}
// move the injector to the QThread
injector->moveToThread(_thread);
// add the injector to the queue with a send timestamp of now
_injectors.emplace(usecTimestampNow(), InjectorQPointer { injector });
_injectors.emplace(usecTimestampNow(), injector);
// notify our wait condition so we can inject two frames for this injector immediately
_injectorReady.notify_one();
return true;
}
}
bool AudioInjectorManager::restartFinishedInjector(AudioInjector* injector) {
bool AudioInjectorManager::restartFinishedInjector(const AudioInjectorPointer& injector) {
if (_shouldStop) {
qCDebug(audio) << "AudioInjectorManager::threadInjector asked to thread injector but is shutting down.";
return false;
@ -181,8 +181,8 @@ bool AudioInjectorManager::restartFinishedInjector(AudioInjector* injector) {
return false;
} else {
// add the injector to the queue with a send timestamp of now
_injectors.emplace(usecTimestampNow(), InjectorQPointer { injector });
_injectors.emplace(usecTimestampNow(), injector);
// notify our wait condition so we can inject two frames for this injector immediately
_injectorReady.notify_one();
}

View file

@ -23,7 +23,7 @@
#include <DependencyManager.h>
class AudioInjector;
#include "AudioInjector.h"
class AudioInjectorManager : public QObject, public Dependency {
Q_OBJECT
@ -33,39 +33,38 @@ public:
private slots:
void run();
private:
using InjectorQPointer = QPointer<AudioInjector>;
using TimeInjectorPointerPair = std::pair<uint64_t, InjectorQPointer>;
using TimeInjectorPointerPair = std::pair<uint64_t, AudioInjectorPointer>;
struct greaterTime {
bool operator() (const TimeInjectorPointerPair& x, const TimeInjectorPointerPair& y) const {
return x.first > y.first;
}
};
using InjectorQueue = std::priority_queue<TimeInjectorPointerPair,
std::deque<TimeInjectorPointerPair>,
greaterTime>;
using Mutex = std::mutex;
using Lock = std::unique_lock<Mutex>;
bool threadInjector(AudioInjector* injector);
bool restartFinishedInjector(AudioInjector* injector);
bool threadInjector(const AudioInjectorPointer& injector);
bool restartFinishedInjector(const AudioInjectorPointer& injector);
void notifyInjectorReadyCondition() { _injectorReady.notify_one(); }
bool wouldExceedLimits();
AudioInjectorManager() {};
AudioInjectorManager(const AudioInjectorManager&) = delete;
AudioInjectorManager& operator=(const AudioInjectorManager&) = delete;
void createThread();
QThread* _thread { nullptr };
bool _shouldStop { false };
InjectorQueue _injectors;
Mutex _injectorsMutex;
std::condition_variable _injectorReady;
friend class AudioInjector;
};

View file

@ -610,7 +610,7 @@ void Avatar::render(RenderArgs* renderArgs) {
if (showCollisionShapes && shouldRenderHead(renderArgs) && _skeletonModel->isRenderable()) {
PROFILE_RANGE_BATCH(batch, __FUNCTION__":skeletonBoundingCollisionShapes");
const float BOUNDING_SHAPE_ALPHA = 0.7f;
_skeletonModel->renderBoundingCollisionShapes(*renderArgs->_batch, getUniformScale(), BOUNDING_SHAPE_ALPHA);
_skeletonModel->renderBoundingCollisionShapes(renderArgs, *renderArgs->_batch, getUniformScale(), BOUNDING_SHAPE_ALPHA);
}
if (showReceiveStats || showNamesAboveHeads) {
@ -1008,49 +1008,87 @@ glm::vec3 Avatar::getAbsoluteJointTranslationInObjectFrame(int index) const {
}
}
int Avatar::getJointIndex(const QString& name) const {
if (QThread::currentThread() != thread()) {
int result;
BLOCKING_INVOKE_METHOD(const_cast<Avatar*>(this), "getJointIndex",
Q_RETURN_ARG(int, result), Q_ARG(const QString&, name));
return result;
void Avatar::invalidateJointIndicesCache() const {
QWriteLocker writeLock(&_modelJointIndicesCacheLock);
_modelJointsCached = false;
}
void Avatar::withValidJointIndicesCache(std::function<void()> const& worker) const {
QReadLocker readLock(&_modelJointIndicesCacheLock);
if (_modelJointsCached) {
worker();
} else {
readLock.unlock();
{
QWriteLocker writeLock(&_modelJointIndicesCacheLock);
if (!_modelJointsCached) {
_modelJointIndicesCache.clear();
if (_skeletonModel && _skeletonModel->isActive()) {
_modelJointIndicesCache = _skeletonModel->getFBXGeometry().jointIndices;
_modelJointsCached = true;
}
}
worker();
}
}
}
int Avatar::getJointIndex(const QString& name) const {
int result = getFauxJointIndex(name);
if (result != -1) {
return result;
}
return _skeletonModel->isActive() ? _skeletonModel->getFBXGeometry().getJointIndex(name) : -1;
withValidJointIndicesCache([&]() {
if (_modelJointIndicesCache.contains(name)) {
result = _modelJointIndicesCache[name] - 1;
}
});
return result;
}
QStringList Avatar::getJointNames() const {
if (QThread::currentThread() != thread()) {
QStringList result;
BLOCKING_INVOKE_METHOD(const_cast<Avatar*>(this), "getJointNames",
Q_RETURN_ARG(QStringList, result));
return result;
}
return _skeletonModel->isActive() ? _skeletonModel->getFBXGeometry().getJointNames() : QStringList();
QStringList result;
withValidJointIndicesCache([&]() {
// find out how large the vector needs to be
int maxJointIndex = -1;
QHashIterator<QString, int> k(_modelJointIndicesCache);
while (k.hasNext()) {
k.next();
int index = k.value();
if (index > maxJointIndex) {
maxJointIndex = index;
}
}
// iterate through the hash and put joint names
// into the vector at their indices
QVector<QString> resultVector(maxJointIndex+1);
QHashIterator<QString, int> i(_modelJointIndicesCache);
while (i.hasNext()) {
i.next();
int index = i.value();
resultVector[index] = i.key();
}
// convert to QList and drop out blanks
result = resultVector.toList();
QMutableListIterator<QString> j(result);
while (j.hasNext()) {
QString jointName = j.next();
if (jointName.isEmpty()) {
j.remove();
}
}
});
return result;
}
glm::vec3 Avatar::getJointPosition(int index) const {
if (QThread::currentThread() != thread()) {
glm::vec3 position;
BLOCKING_INVOKE_METHOD(const_cast<Avatar*>(this), "getJointPosition",
Q_RETURN_ARG(glm::vec3, position), Q_ARG(const int, index));
return position;
}
glm::vec3 position;
_skeletonModel->getJointPositionInWorldFrame(index, position);
return position;
}
glm::vec3 Avatar::getJointPosition(const QString& name) const {
if (QThread::currentThread() != thread()) {
glm::vec3 position;
BLOCKING_INVOKE_METHOD(const_cast<Avatar*>(this), "getJointPosition",
Q_RETURN_ARG(glm::vec3, position), Q_ARG(const QString&, name));
return position;
}
glm::vec3 position;
_skeletonModel->getJointPositionInWorldFrame(getJointIndex(name), position);
return position;
@ -1071,6 +1109,8 @@ void Avatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
}
void Avatar::setModelURLFinished(bool success) {
invalidateJointIndicesCache();
if (!success && _skeletonModelURL != AvatarData::defaultFullAvatarModelUrl()) {
const int MAX_SKELETON_DOWNLOAD_ATTEMPTS = 4; // NOTE: we don't want to be as generous as ResourceCache is, we only want 4 attempts
if (_skeletonModel->getResourceDownloadAttemptsRemaining() <= 0 ||
@ -1440,8 +1480,7 @@ void Avatar::addToScene(AvatarSharedPointer myHandle, const render::ScenePointer
if (scene) {
auto nodelist = DependencyManager::get<NodeList>();
if (showAvatars
&& !nodelist->isIgnoringNode(getSessionUUID())
&& !nodelist->isRadiusIgnoringNode(getSessionUUID())) {
&& !nodelist->isIgnoringNode(getSessionUUID())) {
render::Transaction transaction;
addToScene(myHandle, scene, transaction);
scene->enqueueTransaction(transaction);

View file

@ -267,6 +267,13 @@ protected:
virtual void maybeUpdateSessionDisplayNameFromTransport(const QString& sessionDisplayName) override { _sessionDisplayName = sessionDisplayName; } // don't use no-op setter!
SkeletonModelPointer _skeletonModel;
void invalidateJointIndicesCache() const;
void withValidJointIndicesCache(std::function<void()> const& worker) const;
mutable QHash<QString, int> _modelJointIndicesCache;
mutable QReadWriteLock _modelJointIndicesCacheLock;
mutable bool _modelJointsCached { false };
glm::vec3 _skeletonOffset;
std::vector<std::shared_ptr<Model>> _attachmentModels;
std::vector<std::shared_ptr<Model>> _attachmentsToRemove;

View file

@ -322,20 +322,20 @@ void SkeletonModel::computeBoundingShape() {
_boundingCapsuleLocalOffset = invScale * offset;
}
void SkeletonModel::renderBoundingCollisionShapes(gpu::Batch& batch, float scale, float alpha) {
void SkeletonModel::renderBoundingCollisionShapes(RenderArgs* args, gpu::Batch& batch, float scale, float alpha) {
auto geometryCache = DependencyManager::get<GeometryCache>();
// draw a blue sphere at the capsule top point
glm::vec3 topPoint = _translation + getRotation() * (scale * (_boundingCapsuleLocalOffset + (0.5f * _boundingCapsuleHeight) * Vectors::UNIT_Y));
batch.setModelTransform(Transform().setTranslation(topPoint).postScale(scale * _boundingCapsuleRadius));
geometryCache->renderSolidSphereInstance(batch, glm::vec4(0.6f, 0.6f, 0.8f, alpha));
geometryCache->renderSolidSphereInstance(args, batch, glm::vec4(0.6f, 0.6f, 0.8f, alpha));
// draw a yellow sphere at the capsule bottom point
glm::vec3 bottomPoint = topPoint - glm::vec3(0.0f, scale * _boundingCapsuleHeight, 0.0f);
glm::vec3 axis = topPoint - bottomPoint;
batch.setModelTransform(Transform().setTranslation(bottomPoint).postScale(scale * _boundingCapsuleRadius));
geometryCache->renderSolidSphereInstance(batch, glm::vec4(0.8f, 0.8f, 0.6f, alpha));
geometryCache->renderSolidSphereInstance(args, batch, glm::vec4(0.8f, 0.8f, 0.6f, alpha));
// draw a green cylinder between the two points
glm::vec3 origin(0.0f);

View file

@ -96,7 +96,7 @@ public:
/// \return whether or not the head was found.
glm::vec3 getDefaultEyeModelPosition() const;
void renderBoundingCollisionShapes(gpu::Batch& batch, float scale, float alpha);
void renderBoundingCollisionShapes(RenderArgs* args, gpu::Batch& batch, float scale, float alpha);
float getBoundingCapsuleRadius() const { return _boundingCapsuleRadius; }
float getBoundingCapsuleHeight() const { return _boundingCapsuleHeight; }
const glm::vec3 getBoundingCapsuleOffset() const { return _boundingCapsuleLocalOffset; }

View file

@ -57,6 +57,27 @@ static const float DEFAULT_AVATAR_DENSITY = 1000.0f; // density of water
#define ASSERT(COND) do { if (!(COND)) { abort(); } } while(0)
size_t AvatarDataPacket::maxFaceTrackerInfoSize(size_t numBlendshapeCoefficients) {
return FACE_TRACKER_INFO_SIZE + numBlendshapeCoefficients * sizeof(float);
}
size_t AvatarDataPacket::maxJointDataSize(size_t numJoints) {
const size_t validityBitsSize = (size_t)std::ceil(numJoints / (float)BITS_IN_BYTE);
size_t totalSize = sizeof(uint8_t); // numJoints
totalSize += validityBitsSize; // Orientations mask
totalSize += numJoints * sizeof(SixByteQuat); // Orientations
totalSize += validityBitsSize; // Translations mask
totalSize += numJoints * sizeof(SixByteTrans); // Translations
size_t NUM_FAUX_JOINT = 2;
totalSize += NUM_FAUX_JOINT * (sizeof(SixByteQuat) + sizeof(SixByteTrans)); // faux joints
return totalSize;
}
AvatarData::AvatarData() :
SpatiallyNestable(NestableType::Avatar, QUuid()),
_handPosition(0.0f),
@ -73,19 +94,6 @@ AvatarData::AvatarData() :
setBodyPitch(0.0f);
setBodyYaw(-90.0f);
setBodyRoll(0.0f);
ASSERT(sizeof(AvatarDataPacket::Header) == AvatarDataPacket::HEADER_SIZE);
ASSERT(sizeof(AvatarDataPacket::AvatarGlobalPosition) == AvatarDataPacket::AVATAR_GLOBAL_POSITION_SIZE);
ASSERT(sizeof(AvatarDataPacket::AvatarLocalPosition) == AvatarDataPacket::AVATAR_LOCAL_POSITION_SIZE);
ASSERT(sizeof(AvatarDataPacket::AvatarBoundingBox) == AvatarDataPacket::AVATAR_BOUNDING_BOX_SIZE);
ASSERT(sizeof(AvatarDataPacket::AvatarOrientation) == AvatarDataPacket::AVATAR_ORIENTATION_SIZE);
ASSERT(sizeof(AvatarDataPacket::AvatarScale) == AvatarDataPacket::AVATAR_SCALE_SIZE);
ASSERT(sizeof(AvatarDataPacket::LookAtPosition) == AvatarDataPacket::LOOK_AT_POSITION_SIZE);
ASSERT(sizeof(AvatarDataPacket::AudioLoudness) == AvatarDataPacket::AUDIO_LOUDNESS_SIZE);
ASSERT(sizeof(AvatarDataPacket::SensorToWorldMatrix) == AvatarDataPacket::SENSOR_TO_WORLD_SIZE);
ASSERT(sizeof(AvatarDataPacket::AdditionalFlags) == AvatarDataPacket::ADDITIONAL_FLAGS_SIZE);
ASSERT(sizeof(AvatarDataPacket::ParentInfo) == AvatarDataPacket::PARENT_INFO_SIZE);
ASSERT(sizeof(AvatarDataPacket::FaceTrackerInfo) == AvatarDataPacket::FACE_TRACKER_INFO_SIZE);
}
AvatarData::~AvatarData() {
@ -169,12 +177,12 @@ float AvatarData::getDistanceBasedMinTranslationDistance(glm::vec3 viewerPositio
// we want to track outbound data in this case...
QByteArray AvatarData::toByteArrayStateful(AvatarDataDetail dataDetail) {
QByteArray AvatarData::toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking) {
AvatarDataPacket::HasFlags hasFlagsOut;
auto lastSentTime = _lastToByteArray;
_lastToByteArray = usecTimestampNow();
return AvatarData::toByteArray(dataDetail, lastSentTime, getLastSentJointData(),
hasFlagsOut, false, false, glm::vec3(0), nullptr,
hasFlagsOut, dropFaceTracking, false, glm::vec3(0), nullptr,
&_outboundDataRate);
}
@ -189,15 +197,11 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
lazyInitHeadData();
QByteArray avatarDataByteArray(udt::MAX_PACKET_SIZE, 0);
unsigned char* destinationBuffer = reinterpret_cast<unsigned char*>(avatarDataByteArray.data());
unsigned char* startPosition = destinationBuffer;
// special case, if we were asked for no data, then just include the flags all set to nothing
if (dataDetail == NoData) {
AvatarDataPacket::HasFlags packetStateFlags = 0;
memcpy(destinationBuffer, &packetStateFlags, sizeof(packetStateFlags));
return avatarDataByteArray.left(sizeof(packetStateFlags));
QByteArray avatarDataByteArray(reinterpret_cast<char*>(&packetStateFlags), sizeof(packetStateFlags));
return avatarDataByteArray;
}
// FIXME -
@ -258,6 +262,15 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
hasJointData = sendAll || !sendMinimum;
}
const size_t byteArraySize = AvatarDataPacket::MAX_CONSTANT_HEADER_SIZE +
(hasFaceTrackerInfo ? AvatarDataPacket::maxFaceTrackerInfoSize(_headData->getNumSummedBlendshapeCoefficients()) : 0) +
(hasJointData ? AvatarDataPacket::maxJointDataSize(_jointData.size()) : 0);
QByteArray avatarDataByteArray((int)byteArraySize, 0);
unsigned char* destinationBuffer = reinterpret_cast<unsigned char*>(avatarDataByteArray.data());
unsigned char* startPosition = destinationBuffer;
// Leading flags, to indicate how much data is actually included in the packet...
AvatarDataPacket::HasFlags packetStateFlags =
(hasAvatarGlobalPosition ? AvatarDataPacket::PACKET_HAS_AVATAR_GLOBAL_POSITION : 0)
@ -478,12 +491,15 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
unsigned char* validityPosition = destinationBuffer;
unsigned char validity = 0;
int validityBit = 0;
int numValidityBytes = (int)std::ceil(numJoints / (float)BITS_IN_BYTE);
#ifdef WANT_DEBUG
int rotationSentCount = 0;
unsigned char* beforeRotations = destinationBuffer;
#endif
destinationBuffer += numValidityBytes; // Move pointer past the validity bytes
if (sentJointDataOut) {
sentJointDataOut->resize(_jointData.size()); // Make sure the destination is resized before using it
}
@ -503,6 +519,8 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
#ifdef WANT_DEBUG
rotationSentCount++;
#endif
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, data.rotation);
if (sentJointDataOut) {
auto jointDataOut = *sentJointDataOut;
jointDataOut[i].rotation = data.rotation;
@ -512,28 +530,14 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
}
}
if (++validityBit == BITS_IN_BYTE) {
*destinationBuffer++ = validity;
*validityPosition++ = validity;
validityBit = validity = 0;
}
}
if (validityBit != 0) {
*destinationBuffer++ = validity;
*validityPosition++ = validity;
}
validityBit = 0;
validity = *validityPosition++;
for (int i = 0; i < _jointData.size(); i++) {
const JointData& data = _jointData[i];
if (validity & (1 << validityBit)) {
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, data.rotation);
}
if (++validityBit == BITS_IN_BYTE) {
validityBit = 0;
validity = *validityPosition++;
}
}
// joint translation data
validityPosition = destinationBuffer;
validity = 0;
@ -544,6 +548,8 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
unsigned char* beforeTranslations = destinationBuffer;
#endif
destinationBuffer += numValidityBytes; // Move pointer past the validity bytes
float minTranslation = !distanceAdjust ? AVATAR_MIN_TRANSLATION : getDistanceBasedMinTranslationDistance(viewerPosition);
float maxTranslationDimension = 0.0;
@ -562,6 +568,9 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
maxTranslationDimension = glm::max(fabsf(data.translation.y), maxTranslationDimension);
maxTranslationDimension = glm::max(fabsf(data.translation.z), maxTranslationDimension);
destinationBuffer +=
packFloatVec3ToSignedTwoByteFixed(destinationBuffer, data.translation, TRANSLATION_COMPRESSION_RADIX);
if (sentJointDataOut) {
auto jointDataOut = *sentJointDataOut;
jointDataOut[i].translation = data.translation;
@ -571,27 +580,13 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
}
}
if (++validityBit == BITS_IN_BYTE) {
*destinationBuffer++ = validity;
*validityPosition++ = validity;
validityBit = validity = 0;
}
}
if (validityBit != 0) {
*destinationBuffer++ = validity;
}
validityBit = 0;
validity = *validityPosition++;
for (int i = 0; i < _jointData.size(); i++) {
const JointData& data = _jointData[i];
if (validity & (1 << validityBit)) {
destinationBuffer +=
packFloatVec3ToSignedTwoByteFixed(destinationBuffer, data.translation, TRANSLATION_COMPRESSION_RADIX);
}
if (++validityBit == BITS_IN_BYTE) {
validityBit = 0;
validity = *validityPosition++;
}
*validityPosition++ = validity;
}
// faux joints
@ -624,6 +619,12 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
}
int avatarDataSize = destinationBuffer - startPosition;
if (avatarDataSize > (int)byteArraySize) {
qCCritical(avatars) << "AvatarData::toByteArray buffer overflow"; // We've overflown into the heap
ASSERT(false);
}
return avatarDataByteArray.left(avatarDataSize);
}
// NOTE: This is never used in a "distanceAdjust" mode, so it's ok that it doesn't use a variable minimum rotation/translation
@ -1462,12 +1463,12 @@ int AvatarData::getJointIndex(const QString& name) const {
return result;
}
QReadLocker readLock(&_jointDataLock);
return _jointIndices.value(name) - 1;
return _fstJointIndices.value(name) - 1;
}
QStringList AvatarData::getJointNames() const {
QReadLocker readLock(&_jointDataLock);
return _jointNames;
return _fstJointNames;
}
glm::quat AvatarData::getOrientationOutbound() const {
@ -1720,14 +1721,14 @@ void AvatarData::setJointMappingsFromNetworkReply() {
bool ok;
int jointIndex = line.mid(secondSeparatorIndex + 1).trimmed().toInt(&ok);
if (ok) {
while (_jointNames.size() < jointIndex + 1) {
_jointNames.append(QString());
while (_fstJointNames.size() < jointIndex + 1) {
_fstJointNames.append(QString());
}
_jointNames[jointIndex] = jointName;
_fstJointNames[jointIndex] = jointName;
}
}
for (int i = 0; i < _jointNames.size(); i++) {
_jointIndices.insert(_jointNames.at(i), i + 1);
for (int i = 0; i < _fstJointNames.size(); i++) {
_fstJointIndices.insert(_fstJointNames.at(i), i + 1);
}
}
@ -1743,6 +1744,24 @@ void AvatarData::sendAvatarDataPacket() {
bool cullSmallData = (randFloat() < AVATAR_SEND_FULL_UPDATE_RATIO);
auto dataDetail = cullSmallData ? SendAllData : CullSmallData;
QByteArray avatarByteArray = toByteArrayStateful(dataDetail);
int maximumByteArraySize = NLPacket::maxPayloadSize(PacketType::AvatarData) - sizeof(AvatarDataSequenceNumber);
if (avatarByteArray.size() > maximumByteArraySize) {
qCWarning(avatars) << "toByteArrayStateful() resulted in very large buffer:" << avatarByteArray.size() << "... attempt to drop facial data";
avatarByteArray = toByteArrayStateful(dataDetail, true);
if (avatarByteArray.size() > maximumByteArraySize) {
qCWarning(avatars) << "toByteArrayStateful() without facial data resulted in very large buffer:" << avatarByteArray.size() << "... reduce to MinimumData";
avatarByteArray = toByteArrayStateful(MinimumData, true);
if (avatarByteArray.size() > maximumByteArraySize) {
qCWarning(avatars) << "toByteArrayStateful() MinimumData resulted in very large buffer:" << avatarByteArray.size() << "... FAIL!!";
return;
}
}
}
doneEncoding(cullSmallData);
static AvatarDataSequenceNumber sequenceNumber = 0;
@ -1781,8 +1800,8 @@ void AvatarData::sendIdentityPacket() {
void AvatarData::updateJointMappings() {
{
QWriteLocker writeLock(&_jointDataLock);
_jointIndices.clear();
_jointNames.clear();
_fstJointIndices.clear();
_fstJointNames.clear();
_jointData.clear();
}

View file

@ -140,35 +140,41 @@ namespace AvatarDataPacket {
const HasFlags PACKET_HAS_JOINT_DATA = 1U << 11;
const size_t AVATAR_HAS_FLAGS_SIZE = 2;
using SixByteQuat = uint8_t[6];
using SixByteTrans = uint8_t[6];
// NOTE: AvatarDataPackets start with a uint16_t sequence number that is not reflected in the Header structure.
PACKED_BEGIN struct Header {
HasFlags packetHasFlags; // state flags, indicated which additional records are included in the packet
} PACKED_END;
const size_t HEADER_SIZE = 2;
static_assert(sizeof(Header) == HEADER_SIZE, "AvatarDataPacket::Header size doesn't match.");
PACKED_BEGIN struct AvatarGlobalPosition {
float globalPosition[3]; // avatar's position
} PACKED_END;
const size_t AVATAR_GLOBAL_POSITION_SIZE = 12;
static_assert(sizeof(AvatarGlobalPosition) == AVATAR_GLOBAL_POSITION_SIZE, "AvatarDataPacket::AvatarGlobalPosition size doesn't match.");
PACKED_BEGIN struct AvatarBoundingBox {
float avatarDimensions[3]; // avatar's bounding box in world space units, but relative to the position.
float boundOriginOffset[3]; // offset from the position of the avatar to the origin of the bounding box
} PACKED_END;
const size_t AVATAR_BOUNDING_BOX_SIZE = 24;
static_assert(sizeof(AvatarBoundingBox) == AVATAR_BOUNDING_BOX_SIZE, "AvatarDataPacket::AvatarBoundingBox size doesn't match.");
using SixByteQuat = uint8_t[6];
PACKED_BEGIN struct AvatarOrientation {
SixByteQuat avatarOrientation; // encodeded and compressed by packOrientationQuatToSixBytes()
} PACKED_END;
const size_t AVATAR_ORIENTATION_SIZE = 6;
static_assert(sizeof(AvatarOrientation) == AVATAR_ORIENTATION_SIZE, "AvatarDataPacket::AvatarOrientation size doesn't match.");
PACKED_BEGIN struct AvatarScale {
SmallFloat scale; // avatar's scale, compressed by packFloatRatioToTwoByte()
} PACKED_END;
const size_t AVATAR_SCALE_SIZE = 2;
static_assert(sizeof(AvatarScale) == AVATAR_SCALE_SIZE, "AvatarDataPacket::AvatarScale size doesn't match.");
PACKED_BEGIN struct LookAtPosition {
float lookAtPosition[3]; // world space position that eyes are focusing on.
@ -180,11 +186,13 @@ namespace AvatarDataPacket {
// POTENTIAL SAVINGS - 12 bytes
} PACKED_END;
const size_t LOOK_AT_POSITION_SIZE = 12;
static_assert(sizeof(LookAtPosition) == LOOK_AT_POSITION_SIZE, "AvatarDataPacket::LookAtPosition size doesn't match.");
PACKED_BEGIN struct AudioLoudness {
uint8_t audioLoudness; // current loudness of microphone compressed with packFloatGainToByte()
} PACKED_END;
const size_t AUDIO_LOUDNESS_SIZE = 1;
static_assert(sizeof(AudioLoudness) == AUDIO_LOUDNESS_SIZE, "AvatarDataPacket::AudioLoudness size doesn't match.");
PACKED_BEGIN struct SensorToWorldMatrix {
// FIXME - these 20 bytes are only used by viewers if my avatar has "attachments"
@ -199,11 +207,13 @@ namespace AvatarDataPacket {
// relative to the avatar position.
} PACKED_END;
const size_t SENSOR_TO_WORLD_SIZE = 20;
static_assert(sizeof(SensorToWorldMatrix) == SENSOR_TO_WORLD_SIZE, "AvatarDataPacket::SensorToWorldMatrix size doesn't match.");
PACKED_BEGIN struct AdditionalFlags {
uint8_t flags; // additional flags: hand state, key state, eye tracking
} PACKED_END;
const size_t ADDITIONAL_FLAGS_SIZE = 1;
static_assert(sizeof(AdditionalFlags) == ADDITIONAL_FLAGS_SIZE, "AvatarDataPacket::AdditionalFlags size doesn't match.");
// only present if HAS_REFERENTIAL flag is set in AvatarInfo.flags
PACKED_BEGIN struct ParentInfo {
@ -211,6 +221,7 @@ namespace AvatarDataPacket {
uint16_t parentJointIndex;
} PACKED_END;
const size_t PARENT_INFO_SIZE = 18;
static_assert(sizeof(ParentInfo) == PARENT_INFO_SIZE, "AvatarDataPacket::ParentInfo size doesn't match.");
// will only ever be included if the avatar has a parent but can change independent of changes to parent info
// and so we keep it a separate record
@ -218,6 +229,22 @@ namespace AvatarDataPacket {
float localPosition[3]; // parent frame translation of the avatar
} PACKED_END;
const size_t AVATAR_LOCAL_POSITION_SIZE = 12;
static_assert(sizeof(AvatarLocalPosition) == AVATAR_LOCAL_POSITION_SIZE, "AvatarDataPacket::AvatarLocalPosition size doesn't match.");
const size_t MAX_CONSTANT_HEADER_SIZE = HEADER_SIZE +
AVATAR_GLOBAL_POSITION_SIZE +
AVATAR_BOUNDING_BOX_SIZE +
AVATAR_ORIENTATION_SIZE +
AVATAR_SCALE_SIZE +
LOOK_AT_POSITION_SIZE +
AUDIO_LOUDNESS_SIZE +
SENSOR_TO_WORLD_SIZE +
ADDITIONAL_FLAGS_SIZE +
PARENT_INFO_SIZE +
AVATAR_LOCAL_POSITION_SIZE;
// variable length structure follows
// only present if IS_FACE_TRACKER_CONNECTED flag is set in AvatarInfo.flags
PACKED_BEGIN struct FaceTrackerInfo {
@ -229,8 +256,9 @@ namespace AvatarDataPacket {
// float blendshapeCoefficients[numBlendshapeCoefficients];
} PACKED_END;
const size_t FACE_TRACKER_INFO_SIZE = 17;
static_assert(sizeof(FaceTrackerInfo) == FACE_TRACKER_INFO_SIZE, "AvatarDataPacket::FaceTrackerInfo size doesn't match.");
size_t maxFaceTrackerInfoSize(size_t numBlendshapeCoefficients);
// variable length structure follows
/*
struct JointData {
uint8_t numJoints;
@ -240,6 +268,7 @@ namespace AvatarDataPacket {
SixByteTrans translation[numValidTranslations]; // encodeded and compressed by packFloatVec3ToSignedTwoByteFixed()
};
*/
size_t maxJointDataSize(size_t numJoints);
}
static const float MAX_AVATAR_SCALE = 1000.0f;
@ -387,7 +416,7 @@ public:
SendAllData
} AvatarDataDetail;
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail);
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking = false);
virtual QByteArray toByteArray(AvatarDataDetail dataDetail, quint64 lastSentTime, const QVector<JointData>& lastSentJointData,
AvatarDataPacket::HasFlags& hasFlagsOut, bool dropFaceTracking, bool distanceAdjust, glm::vec3 viewerPosition,
@ -704,8 +733,8 @@ protected:
QString _displayName;
QString _sessionDisplayName { };
QHash<QString, int> _jointIndices; ///< 1-based, since zero is returned for missing keys
QStringList _jointNames; ///< in order of depth-first traversal
QHash<QString, int> _fstJointIndices; ///< 1-based, since zero is returned for missing keys
QStringList _fstJointNames; ///< in order of depth-first traversal
quint64 _errorLogExpiry; ///< time in future when to log an error

View file

@ -170,13 +170,6 @@ void AvatarHashMap::processKillAvatar(QSharedPointer<ReceivedMessage> message, S
removeAvatar(sessionUUID, reason);
}
void AvatarHashMap::processExitingSpaceBubble(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
// read the node id
QUuid sessionUUID = QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
auto nodeList = DependencyManager::get<NodeList>();
nodeList->radiusIgnoreNodeBySessionID(sessionUUID, false);
}
void AvatarHashMap::removeAvatar(const QUuid& sessionUUID, KillAvatarReason removalReason) {
QWriteLocker locker(&_hashLock);

View file

@ -60,7 +60,6 @@ protected slots:
void processAvatarDataPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
void processAvatarIdentityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
void processKillAvatar(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
void processExitingSpaceBubble(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode);
protected:
AvatarHashMap();

View file

@ -83,6 +83,11 @@ static const QMap<QString, int>& getBlendshapesLookupMap() {
return blendshapeLookupMap;
}
int HeadData::getNumSummedBlendshapeCoefficients() const {
int maxSize = std::max(_blendshapeCoefficients.size(), _transientBlendshapeCoefficients.size());
return maxSize;
}
const QVector<float>& HeadData::getSummedBlendshapeCoefficients() {
int maxSize = std::max(_blendshapeCoefficients.size(), _transientBlendshapeCoefficients.size());
if (_summedBlendshapeCoefficients.size() != maxSize) {

View file

@ -57,6 +57,7 @@ public:
void setBlendshape(QString name, float val);
const QVector<float>& getBlendshapeCoefficients() const { return _blendshapeCoefficients; }
const QVector<float>& getSummedBlendshapeCoefficients();
int getNumSummedBlendshapeCoefficients() const;
void setBlendshapeCoefficients(const QVector<float>& blendshapeCoefficients) { _blendshapeCoefficients = blendshapeCoefficients; }
const glm::vec3& getLookAtPosition() const { return _lookAtPosition; }

View file

@ -16,6 +16,7 @@
#include <QtOpenGL/QGLWidget>
#include <QtGui/QImage>
#include <QOpenGLFramebufferObject>
#if defined(Q_OS_MAC)
#include <OpenGL/CGLCurrent.h>
#endif
@ -41,7 +42,7 @@
#include <ui-plugins/PluginContainer.h>
#include <ui/Menu.h>
#include <CursorManager.h>
#include <TextureCache.h>
#include "CompositorHelper.h"
#include "Logging.h"
@ -55,7 +56,7 @@ out vec4 outFragColor;
float sRGBFloatToLinear(float value) {
const float SRGB_ELBOW = 0.04045;
return (value <= SRGB_ELBOW) ? value / 12.92 : pow((value + 0.055) / 1.055, 2.4);
}
@ -121,10 +122,10 @@ public:
PROFILE_SET_THREAD_NAME("Present Thread");
// FIXME determine the best priority balance between this and the main thread...
// It may be dependent on the display plugin being used, since VR plugins should
// It may be dependent on the display plugin being used, since VR plugins should
// have higher priority on rendering (although we could say that the Oculus plugin
// doesn't need that since it has async timewarp).
// A higher priority here
// A higher priority here
setPriority(QThread::HighPriority);
OpenGLDisplayPlugin* currentPlugin{ nullptr };
Q_ASSERT(_context);
@ -233,7 +234,7 @@ public:
// Move the context back to the presentation thread
_context->moveToThread(this);
// restore control of the context to the presentation thread and signal
// restore control of the context to the presentation thread and signal
// the end of the operation
_finishedMainThreadOperation = true;
lock.unlock();
@ -291,7 +292,7 @@ bool OpenGLDisplayPlugin::activate() {
if (!RENDER_THREAD) {
RENDER_THREAD = _presentThread;
}
// Child classes may override this in order to do things like initialize
// libraries, etc
if (!internalActivate()) {
@ -411,7 +412,7 @@ void OpenGLDisplayPlugin::customizeContext() {
gpu::Shader::makeProgram(*program);
gpu::StatePointer state = gpu::StatePointer(new gpu::State());
state->setDepthTest(gpu::State::DepthTest(false));
state->setBlendFunction(true,
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
_overlayPipeline = gpu::Pipeline::create(program, state);
@ -496,16 +497,48 @@ void OpenGLDisplayPlugin::submitFrame(const gpu::FramePointer& newFrame) {
_newFrameQueue.push(newFrame);
});
}
void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer texture, glm::ivec4 viewport, const glm::ivec4 scissor) {
renderFromTexture(batch, texture, viewport, scissor, gpu::FramebufferPointer());
}
void OpenGLDisplayPlugin::renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer texture, glm::ivec4 viewport, const glm::ivec4 scissor, gpu::FramebufferPointer copyFbo /*=gpu::FramebufferPointer()*/) {
auto fbo = gpu::FramebufferPointer();
batch.enableStereo(false);
batch.resetViewTransform();
batch.setFramebuffer(gpu::FramebufferPointer());
batch.setFramebuffer(fbo);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, vec4(0));
batch.setStateScissorRect(scissor);
batch.setViewportTransform(viewport);
batch.setResourceTexture(0, texture);
batch.setPipeline(_presentPipeline);
batch.draw(gpu::TRIANGLE_STRIP, 4);
if (copyFbo) {
gpu::Vec4i copyFboRect(0, 0, copyFbo->getWidth(), copyFbo->getHeight());
gpu::Vec4i sourceRect(scissor.x, scissor.y, scissor.x + scissor.z, scissor.y + scissor.w);
float aspectRatio = (float)scissor.w / (float) scissor.z; // height/width
// scale width first
int xOffset = 0;
int yOffset = 0;
int newWidth = copyFbo->getWidth();
int newHeight = std::round(aspectRatio * (float) copyFbo->getWidth());
if (newHeight > copyFbo->getHeight()) {
// ok, so now fill height instead
newHeight = copyFbo->getHeight();
newWidth = std::round((float)copyFbo->getHeight() / aspectRatio);
xOffset = (copyFbo->getWidth() - newWidth) / 2;
} else {
yOffset = (copyFbo->getHeight() - newHeight) / 2;
}
gpu::Vec4i copyRect(xOffset, yOffset, xOffset + newWidth, yOffset + newHeight);
batch.setFramebuffer(copyFbo);
batch.resetViewTransform();
batch.setViewportTransform(copyFboRect);
batch.setStateScissorRect(copyFboRect);
batch.clearColorFramebuffer(gpu::Framebuffer::BUFFER_COLOR0, {0.0f, 0.0f, 0.0f, 1.0f});
batch.blit(fbo, sourceRect, copyFbo, copyRect);
}
}
void OpenGLDisplayPlugin::updateFrameData() {
@ -686,7 +719,7 @@ void OpenGLDisplayPlugin::resetPresentRate() {
// _presentRate = RateCounter<100>();
}
float OpenGLDisplayPlugin::renderRate() const {
float OpenGLDisplayPlugin::renderRate() const {
return _renderRate.rate();
}
@ -821,3 +854,53 @@ void OpenGLDisplayPlugin::updateCompositeFramebuffer() {
_compositeFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("OpenGLDisplayPlugin::composite", gpu::Element::COLOR_RGBA_32, renderSize.x, renderSize.y));
}
}
void OpenGLDisplayPlugin::copyTextureToQuickFramebuffer(NetworkTexturePointer networkTexture, QOpenGLFramebufferObject* target, GLsync* fenceSync) {
auto glBackend = const_cast<OpenGLDisplayPlugin&>(*this).getGLBackend();
withMainThreadContext([&] {
GLuint sourceTexture = glBackend->getTextureID(networkTexture->getGPUTexture());
GLuint targetTexture = target->texture();
GLuint fbo[2] {0, 0};
// need mipmaps for blitting texture
glGenerateTextureMipmap(sourceTexture);
// create 2 fbos (one for initial texture, second for scaled one)
glCreateFramebuffers(2, fbo);
// setup source fbo
glBindFramebuffer(GL_FRAMEBUFFER, fbo[0]);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, sourceTexture, 0);
GLint texWidth = networkTexture->getWidth();
GLint texHeight = networkTexture->getHeight();
// setup destination fbo
glBindFramebuffer(GL_FRAMEBUFFER, fbo[1]);
glFramebufferTexture2D(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_TEXTURE_2D, targetTexture, 0);
glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
glClear(GL_COLOR_BUFFER_BIT);
// maintain aspect ratio, filling the width first if possible. If that makes the height too
// much, fill height instead. TODO: only do this when texture changes
GLint newX = 0;
GLint newY = 0;
float aspectRatio = (float)texHeight / (float)texWidth;
GLint newWidth = target->width();
GLint newHeight = std::round(aspectRatio * (float) target->width());
if (newHeight > target->height()) {
newHeight = target->height();
newWidth = std::round((float)target->height() / aspectRatio);
newX = (target->width() - newWidth) / 2;
} else {
newY = (target->height() - newHeight) / 2;
}
glBlitNamedFramebuffer(fbo[0], fbo[1], 0, 0, texWidth, texHeight, newX, newY, newX + newWidth, newY + newHeight, GL_DEPTH_BUFFER_BIT|GL_COLOR_BUFFER_BIT, GL_NEAREST);
// don't delete the textures!
glDeleteFramebuffers(2, fbo);
*fenceSync = glFenceSync(GL_SYNC_GPU_COMMANDS_COMPLETE, 0);
});
}

View file

@ -38,7 +38,7 @@ protected:
using Condition = std::condition_variable;
public:
~OpenGLDisplayPlugin();
// These must be final to ensure proper ordering of operations
// These must be final to ensure proper ordering of operations
// between the main thread and the presentation thread
bool activate() override final;
void deactivate() override final;
@ -79,6 +79,8 @@ public:
// Three threads, one for rendering, one for texture transfers, one reserved for the GL driver
int getRequiredThreadCount() const override { return 3; }
void copyTextureToQuickFramebuffer(NetworkTexturePointer source, QOpenGLFramebufferObject* target, GLsync* fenceSync) override;
protected:
friend class PresentThread;
@ -103,7 +105,7 @@ protected:
// Returns true on successful activation
virtual bool internalActivate() { return true; }
virtual void internalDeactivate() {}
// Returns true on successful activation of standby session
virtual bool activateStandBySession() { return true; }
virtual void deactivateSession() {}
@ -111,6 +113,7 @@ protected:
// Plugin specific functionality to send the composed scene to the output window or device
virtual void internalPresent();
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer texture, glm::ivec4 viewport, const glm::ivec4 scissor, gpu::FramebufferPointer fbo);
void renderFromTexture(gpu::Batch& batch, const gpu::TexturePointer texture, glm::ivec4 viewport, const glm::ivec4 scissor);
virtual void updateFrameData();

View file

@ -134,7 +134,7 @@ void HmdDisplayPlugin::customizeContext() {
state->setBlendFunction(true,
gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::INV_SRC_ALPHA,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
gpu::Shader::BindingSet bindings;
bindings.insert({ "lineData", LINE_DATA_SLOT });;
gpu::Shader::makeProgram(*program, bindings);
@ -243,6 +243,8 @@ void HmdDisplayPlugin::internalPresent() {
glm::ivec4 viewport = getViewportForSourceSize(sourceSize);
glm::ivec4 scissor = viewport;
auto fbo = gpu::FramebufferPointer();
render([&](gpu::Batch& batch) {
if (_monoPreview) {
@ -285,11 +287,15 @@ void HmdDisplayPlugin::internalPresent() {
viewport = ivec4(scissorOffset - scaledShiftLeftBy, viewportOffset, viewportSizeX, viewportSizeY);
}
// TODO: only bother getting and passing in the hmdPreviewFramebuffer if the camera is on
fbo = DependencyManager::get<TextureCache>()->getHmdPreviewFramebuffer(windowSize.x, windowSize.y);
viewport.z *= 2;
}
renderFromTexture(batch, _compositeFramebuffer->getRenderBuffer(0), viewport, scissor);
renderFromTexture(batch, _compositeFramebuffer->getRenderBuffer(0), viewport, scissor, fbo);
});
swapBuffers();
} else if (_clearPreviewFlag) {
QImage image;
if (_vsyncEnabled) {
@ -312,7 +318,7 @@ void HmdDisplayPlugin::internalPresent() {
_previewTexture->assignStoredMip(0, image.byteCount(), image.constBits());
_previewTexture->setAutoGenerateMips(true);
}
auto viewport = getViewportForSourceSize(uvec2(_previewTexture->getDimensions()));
render([&](gpu::Batch& batch) {
@ -323,7 +329,7 @@ void HmdDisplayPlugin::internalPresent() {
}
postPreview();
// If preview is disabled, we need to check to see if the window size has changed
// If preview is disabled, we need to check to see if the window size has changed
// and re-render the no-preview message
if (_disablePreview) {
auto window = _container->getPrimaryWidget();
@ -510,7 +516,7 @@ void HmdDisplayPlugin::OverlayRenderer::build() {
indices = std::make_shared<gpu::Buffer>();
//UV mapping source: http://www.mvps.org/directx/articles/spheremap.htm
static const float fov = CompositorHelper::VIRTUAL_UI_TARGET_FOV.y;
static const float aspectRatio = CompositorHelper::VIRTUAL_UI_ASPECT_RATIO;
static const uint16_t stacks = 128;
@ -672,7 +678,7 @@ bool HmdDisplayPlugin::setHandLaser(uint32_t hands, HandLaserMode mode, const ve
_handLasers[1] = info;
}
});
// FIXME defer to a child class plugin to determine if hand lasers are actually
// FIXME defer to a child class plugin to determine if hand lasers are actually
// available based on the presence or absence of hand controllers
return true;
}
@ -687,7 +693,7 @@ bool HmdDisplayPlugin::setExtraLaser(HandLaserMode mode, const vec4& color, cons
_extraLaserStart = sensorSpaceStart;
});
// FIXME defer to a child class plugin to determine if hand lasers are actually
// FIXME defer to a child class plugin to determine if hand lasers are actually
// available based on the presence or absence of hand controllers
return true;
}
@ -702,7 +708,7 @@ void HmdDisplayPlugin::compositeExtra() {
if (_presentHandPoses[0] == IDENTITY_MATRIX && _presentHandPoses[1] == IDENTITY_MATRIX && !_presentExtraLaser.valid()) {
return;
}
render([&](gpu::Batch& batch) {
batch.setFramebuffer(_compositeFramebuffer);
batch.setModelTransform(Transform());

View file

@ -378,7 +378,7 @@ void RenderableModelEntityItem::render(RenderArgs* args) {
auto shapeTransform = getTransformToCenter(success);
if (success) {
batch.setModelTransform(shapeTransform); // we want to include the scale as well
DependencyManager::get<GeometryCache>()->renderWireCubeInstance(batch, greenColor);
DependencyManager::get<GeometryCache>()->renderWireCubeInstance(args, batch, greenColor);
}
return;
}

View file

@ -13,14 +13,13 @@
#include <DependencyManager.h>
#include <PerfStat.h>
#include <GeometryCache.h>
#include <render/ShapePipeline.h>
#include <StencilMaskPass.h>
#include <AbstractViewStateInterface.h>
#include "EntitiesRendererLogging.h"
#include "RenderableParticleEffectEntityItem.h"
#include "untextured_particle_vert.h"
#include "untextured_particle_frag.h"
#include "textured_particle_vert.h"
#include "textured_particle_frag.h"
@ -29,6 +28,16 @@ class ParticlePayloadData {
public:
static const size_t VERTEX_PER_PARTICLE = 4;
static uint8_t CUSTOM_PIPELINE_NUMBER;
static render::ShapePipelinePointer shapePipelineFactory(const render::ShapePlumber& plumber, const render::ShapeKey& key);
static void registerShapePipeline() {
if (!CUSTOM_PIPELINE_NUMBER) {
CUSTOM_PIPELINE_NUMBER = render::ShapePipeline::registerCustomShapePipelineFactory(shapePipelineFactory);
}
}
static std::weak_ptr<gpu::Pipeline> _texturedPipeline;
template<typename T>
struct InterpolationData {
T start;
@ -70,9 +79,6 @@ public:
offsetof(ParticlePrimitive, uv), gpu::Stream::PER_INSTANCE);
}
void setPipeline(PipelinePointer pipeline) { _pipeline = pipeline; }
const PipelinePointer& getPipeline() const { return _pipeline; }
const Transform& getModelTransform() const { return _modelTransform; }
void setModelTransform(const Transform& modelTransform) { _modelTransform = modelTransform; }
@ -90,15 +96,15 @@ public:
bool getVisibleFlag() const { return _visibleFlag; }
void setVisibleFlag(bool visibleFlag) { _visibleFlag = visibleFlag; }
void render(RenderArgs* args) const {
assert(_pipeline);
gpu::Batch& batch = *args->_batch;
batch.setPipeline(_pipeline);
if (_texture) {
batch.setResourceTexture(0, _texture);
} else {
batch.setResourceTexture(0, DependencyManager::get<TextureCache>()->getWhiteTexture());
}
batch.setModelTransform(_modelTransform);
@ -113,7 +119,6 @@ public:
protected:
Transform _modelTransform;
AABox _bound;
PipelinePointer _pipeline;
FormatPointer _vertexFormat { std::make_shared<Format>() };
BufferPointer _particleBuffer { std::make_shared<Buffer>() };
BufferView _uniformBuffer;
@ -142,23 +147,49 @@ namespace render {
payload->render(args);
}
}
template <>
const ShapeKey shapeGetShapeKey(const ParticlePayloadData::Pointer& payload) {
return render::ShapeKey::Builder().withCustom(ParticlePayloadData::CUSTOM_PIPELINE_NUMBER).withTranslucent().build();
}
}
uint8_t ParticlePayloadData::CUSTOM_PIPELINE_NUMBER = 0;
std::weak_ptr<gpu::Pipeline> ParticlePayloadData::_texturedPipeline;
render::ShapePipelinePointer ParticlePayloadData::shapePipelineFactory(const render::ShapePlumber& plumber, const render::ShapeKey& key) {
auto texturedPipeline = _texturedPipeline.lock();
if (!texturedPipeline) {
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, false, gpu::LESS_EQUAL);
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
PrepareStencil::testMask(*state);
auto vertShader = gpu::Shader::createVertex(std::string(textured_particle_vert));
auto fragShader = gpu::Shader::createPixel(std::string(textured_particle_frag));
auto program = gpu::Shader::createProgram(vertShader, fragShader);
_texturedPipeline = texturedPipeline = gpu::Pipeline::create(program, state);
}
return std::make_shared<render::ShapePipeline>(texturedPipeline, nullptr, nullptr, nullptr);
}
EntityItemPointer RenderableParticleEffectEntityItem::factory(const EntityItemID& entityID,
const EntityItemProperties& properties) {
auto entity = std::make_shared<RenderableParticleEffectEntityItem>(entityID);
entity->setProperties(properties);
// As we create the first ParticuleSystem entity, let s register its special shapePIpeline factory:
ParticlePayloadData::registerShapePipeline();
return entity;
}
RenderableParticleEffectEntityItem::RenderableParticleEffectEntityItem(const EntityItemID& entityItemID) :
ParticleEffectEntityItem(entityItemID) {
// lazy creation of particle system pipeline
if (!_untexturedPipeline || !_texturedPipeline) {
createPipelines();
}
}
bool RenderableParticleEffectEntityItem::addToScene(const EntityItemPointer& self,
@ -167,7 +198,6 @@ bool RenderableParticleEffectEntityItem::addToScene(const EntityItemPointer& sel
_scene = scene;
_renderItemId = _scene->allocateID();
auto particlePayloadData = std::make_shared<ParticlePayloadData>();
particlePayloadData->setPipeline(_untexturedPipeline);
auto renderPayload = std::make_shared<ParticlePayloadData::Payload>(particlePayloadData);
render::Item::Status::Getters statusGetters;
makeEntityItemStatusGetters(getThisPointer(), statusGetters);
@ -276,47 +306,14 @@ void RenderableParticleEffectEntityItem::updateRenderItem() {
if (_texture && _texture->isLoaded()) {
payload.setTexture(_texture->getGPUTexture());
payload.setPipeline(_texturedPipeline);
} else {
payload.setTexture(nullptr);
payload.setPipeline(_untexturedPipeline);
}
});
_scene->enqueueTransaction(transaction);
}
void RenderableParticleEffectEntityItem::createPipelines() {
if (!_untexturedPipeline) {
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, false, gpu::LESS_EQUAL);
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
PrepareStencil::testMask(*state);
auto vertShader = gpu::Shader::createVertex(std::string(untextured_particle_vert));
auto fragShader = gpu::Shader::createPixel(std::string(untextured_particle_frag));
auto program = gpu::Shader::createProgram(vertShader, fragShader);
_untexturedPipeline = gpu::Pipeline::create(program, state);
}
if (!_texturedPipeline) {
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, false, gpu::LESS_EQUAL);
state->setBlendFunction(true, gpu::State::SRC_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE,
gpu::State::FACTOR_ALPHA, gpu::State::BLEND_OP_ADD, gpu::State::ONE);
PrepareStencil::testMask(*state);
auto vertShader = gpu::Shader::createVertex(std::string(textured_particle_vert));
auto fragShader = gpu::Shader::createPixel(std::string(textured_particle_frag));
auto program = gpu::Shader::createProgram(vertShader, fragShader);
_texturedPipeline = gpu::Pipeline::create(program, state);
}
}
void RenderableParticleEffectEntityItem::notifyBoundChanged() {
if (!render::Item::isValidID(_renderItemId)) {
return;

View file

@ -34,16 +34,13 @@ protected:
virtual void locationChanged(bool tellPhysics = true) override { EntityItem::locationChanged(tellPhysics); notifyBoundChanged(); }
virtual void dimensionsChanged() override { EntityItem::dimensionsChanged(); notifyBoundChanged(); }
void notifyBoundChanged();
void notifyBoundChanged();
void createPipelines();
render::ScenePointer _scene;
render::ItemID _renderItemId{ render::Item::INVALID_ITEM_ID };
NetworkTexturePointer _texture;
gpu::PipelinePointer _untexturedPipeline;
gpu::PipelinePointer _texturedPipeline;
};

View file

@ -59,12 +59,8 @@
#include "EntityEditPacketSender.h"
#include "PhysicalEntitySimulation.h"
gpu::PipelinePointer RenderablePolyVoxEntityItem::_pipeline = nullptr;
gpu::PipelinePointer RenderablePolyVoxEntityItem::_wireframePipeline = nullptr;
const float MARCHING_CUBE_COLLISION_HULL_OFFSET = 0.5;
/*
A PolyVoxEntity has several interdependent parts:
@ -116,6 +112,10 @@ EntityItemPointer RenderablePolyVoxEntityItem::factory(const EntityItemID& entit
EntityItemPointer entity{ new RenderablePolyVoxEntityItem(entityID) };
entity->setProperties(properties);
std::static_pointer_cast<RenderablePolyVoxEntityItem>(entity)->initializePolyVox();
// As we create the first Polyvox entity, let's register its special shapePipeline factory:
PolyVoxPayload::registerShapePipeline();
return entity;
}
@ -732,35 +732,6 @@ void RenderablePolyVoxEntityItem::render(RenderArgs* args) {
!mesh->getIndexBuffer()._buffer) {
return;
}
if (!_pipeline) {
gpu::ShaderPointer vertexShader = gpu::Shader::createVertex(std::string(polyvox_vert));
gpu::ShaderPointer pixelShader = gpu::Shader::createPixel(std::string(polyvox_frag));
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("materialBuffer"), MATERIAL_GPU_SLOT));
slotBindings.insert(gpu::Shader::Binding(std::string("xMap"), 0));
slotBindings.insert(gpu::Shader::Binding(std::string("yMap"), 1));
slotBindings.insert(gpu::Shader::Binding(std::string("zMap"), 2));
gpu::ShaderPointer program = gpu::Shader::createProgram(vertexShader, pixelShader);
gpu::Shader::makeProgram(*program, slotBindings);
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, true, gpu::LESS_EQUAL);
PrepareStencil::testMaskDrawShape(*state);
_pipeline = gpu::Pipeline::create(program, state);
auto wireframeState = std::make_shared<gpu::State>();
wireframeState->setCullMode(gpu::State::CULL_BACK);
wireframeState->setDepthTest(true, true, gpu::LESS_EQUAL);
wireframeState->setFillMode(gpu::State::FILL_LINE);
PrepareStencil::testMaskDrawShape(*wireframeState);
_wireframePipeline = gpu::Pipeline::create(program, wireframeState);
}
if (!_vertexFormat) {
auto vf = std::make_shared<gpu::Stream::Format>();
@ -771,11 +742,6 @@ void RenderablePolyVoxEntityItem::render(RenderArgs* args) {
gpu::Batch& batch = *args->_batch;
// Pick correct Pipeline
bool wireframe = (render::ShapeKey(args->_globalShapeKey).isWireframe());
auto pipeline = (wireframe ? _wireframePipeline : _pipeline);
batch.setPipeline(pipeline);
Transform transform(voxelToWorldMatrix());
batch.setModelTransform(transform);
batch.setInputFormat(_vertexFormat);
@ -817,7 +783,7 @@ void RenderablePolyVoxEntityItem::render(RenderArgs* args) {
batch.setResourceTexture(2, DependencyManager::get<TextureCache>()->getWhiteTexture());
}
int voxelVolumeSizeLocation = pipeline->getProgram()->getUniforms().findLocation("voxelVolumeSize");
int voxelVolumeSizeLocation = args->_shapePipeline->pipeline->getProgram()->getUniforms().findLocation("voxelVolumeSize");
batch._glUniform3f(voxelVolumeSizeLocation, voxelVolumeSize.x, voxelVolumeSize.y, voxelVolumeSize.z);
batch.drawIndexed(gpu::TRIANGLES, (gpu::uint32)mesh->getNumIndices(), 0);
@ -848,6 +814,48 @@ void RenderablePolyVoxEntityItem::removeFromScene(const EntityItemPointer& self,
render::Item::clearID(_myItem);
}
uint8_t PolyVoxPayload::CUSTOM_PIPELINE_NUMBER = 0;
std::shared_ptr<gpu::Pipeline> PolyVoxPayload::_pipeline;
std::shared_ptr<gpu::Pipeline> PolyVoxPayload::_wireframePipeline;
render::ShapePipelinePointer PolyVoxPayload::shapePipelineFactory(const render::ShapePlumber& plumber, const render::ShapeKey& key) {
if (!_pipeline) {
gpu::ShaderPointer vertexShader = gpu::Shader::createVertex(std::string(polyvox_vert));
gpu::ShaderPointer pixelShader = gpu::Shader::createPixel(std::string(polyvox_frag));
gpu::Shader::BindingSet slotBindings;
slotBindings.insert(gpu::Shader::Binding(std::string("materialBuffer"), PolyVoxPayload::MATERIAL_GPU_SLOT));
slotBindings.insert(gpu::Shader::Binding(std::string("xMap"), 0));
slotBindings.insert(gpu::Shader::Binding(std::string("yMap"), 1));
slotBindings.insert(gpu::Shader::Binding(std::string("zMap"), 2));
gpu::ShaderPointer program = gpu::Shader::createProgram(vertexShader, pixelShader);
gpu::Shader::makeProgram(*program, slotBindings);
auto state = std::make_shared<gpu::State>();
state->setCullMode(gpu::State::CULL_BACK);
state->setDepthTest(true, true, gpu::LESS_EQUAL);
PrepareStencil::testMaskDrawShape(*state);
_pipeline = gpu::Pipeline::create(program, state);
auto wireframeState = std::make_shared<gpu::State>();
wireframeState->setCullMode(gpu::State::CULL_BACK);
wireframeState->setDepthTest(true, true, gpu::LESS_EQUAL);
wireframeState->setFillMode(gpu::State::FILL_LINE);
PrepareStencil::testMaskDrawShape(*wireframeState);
_wireframePipeline = gpu::Pipeline::create(program, wireframeState);
}
if (key.isWireframe()) {
return std::make_shared<render::ShapePipeline>(_wireframePipeline, nullptr, nullptr, nullptr);
} else {
return std::make_shared<render::ShapePipeline>(_pipeline, nullptr, nullptr, nullptr);
}
}
namespace render {
template <> const ItemKey payloadGetKey(const PolyVoxPayload::Pointer& payload) {
return ItemKey::Builder::opaqueShape();
@ -871,6 +879,10 @@ namespace render {
payload->_owner->getRenderableInterface()->render(args);
}
}
template <> const ShapeKey shapeGetShapeKey(const PolyVoxPayload::Pointer& payload) {
return ShapeKey::Builder().withCustom(PolyVoxPayload::CUSTOM_PIPELINE_NUMBER).build();
}
}
@ -1619,7 +1631,7 @@ void RenderablePolyVoxEntityItem::bonkNeighbors() {
void RenderablePolyVoxEntityItem::locationChanged(bool tellPhysics) {
EntityItem::locationChanged(tellPhysics);
if (!_pipeline || !render::Item::isValidID(_myItem)) {
if (!render::Item::isValidID(_myItem)) {
return;
}
render::ScenePointer scene = AbstractViewStateInterface::instance()->getMain3DScene();

View file

@ -28,6 +28,19 @@
class PolyVoxPayload {
public:
static uint8_t CUSTOM_PIPELINE_NUMBER;
static render::ShapePipelinePointer shapePipelineFactory(const render::ShapePlumber& plumber, const render::ShapeKey& key);
static void registerShapePipeline() {
if (!CUSTOM_PIPELINE_NUMBER) {
CUSTOM_PIPELINE_NUMBER = render::ShapePipeline::registerCustomShapePipelineFactory(shapePipelineFactory);
}
}
static const int MATERIAL_GPU_SLOT = 3;
static std::shared_ptr<gpu::Pipeline> _pipeline;
static std::shared_ptr<gpu::Pipeline> _wireframePipeline;
PolyVoxPayload(EntityItemPointer owner) : _owner(owner), _bounds(AABox()) { }
typedef render::Payload<PolyVoxPayload> Payload;
typedef Payload::DataPointer Pointer;
@ -40,6 +53,7 @@ namespace render {
template <> const ItemKey payloadGetKey(const PolyVoxPayload::Pointer& payload);
template <> const Item::Bound payloadGetBound(const PolyVoxPayload::Pointer& payload);
template <> void payloadRender(const PolyVoxPayload::Pointer& payload, RenderArgs* args);
template <> const ShapeKey shapeGetShapeKey(const PolyVoxPayload::Pointer& payload);
}
@ -168,10 +182,7 @@ private:
NetworkTexturePointer _yTexture;
NetworkTexturePointer _zTexture;
const int MATERIAL_GPU_SLOT = 3;
render::ItemID _myItem{ render::Item::INVALID_ITEM_ID };
static gpu::PipelinePointer _pipeline;
static gpu::PipelinePointer _wireframePipeline;
ShapeInfo _shapeInfo;

View file

@ -128,9 +128,9 @@ void RenderableShapeEntityItem::render(RenderArgs* args) {
auto pipeline = color.a < 1.0f ? geometryCache->getTransparentShapePipeline() : geometryCache->getOpaqueShapePipeline();
if (render::ShapeKey(args->_globalShapeKey).isWireframe()) {
geometryCache->renderWireShapeInstance(batch, MAPPING[_shape], color, pipeline);
geometryCache->renderWireShapeInstance(args, batch, MAPPING[_shape], color, pipeline);
} else {
geometryCache->renderSolidShapeInstance(batch, MAPPING[_shape], color, pipeline);
geometryCache->renderSolidShapeInstance(args, batch, MAPPING[_shape], color, pipeline);
}
}

View file

@ -221,10 +221,10 @@ void RenderableZoneEntityItem::render(RenderArgs* args) {
if (getShapeType() == SHAPE_TYPE_SPHERE) {
shapeTransform.postScale(SPHERE_ENTITY_SCALE);
batch.setModelTransform(shapeTransform);
geometryCache->renderWireSphereInstance(batch, DEFAULT_COLOR);
geometryCache->renderWireSphereInstance(args, batch, DEFAULT_COLOR);
} else {
batch.setModelTransform(shapeTransform);
geometryCache->renderWireCubeInstance(batch, DEFAULT_COLOR);
geometryCache->renderWireCubeInstance(args, batch, DEFAULT_COLOR);
}
break;
}
@ -554,11 +554,13 @@ void RenderableZoneEntityItemMeta::setProceduralUserData(QString userData) {
void RenderableZoneEntityItemMeta::render(RenderArgs* args) {
if (!_stage) {
_stage = DependencyManager::get<DeferredLightingEffect>()->getLightStage();
_stage = args->_scene->getStage<LightStage>();
assert(_stage);
}
if (!_backgroundStage) {
_backgroundStage = DependencyManager::get<DeferredLightingEffect>()->getBackgroundStage();
_backgroundStage = args->_scene->getStage<BackgroundStage>();
assert(_backgroundStage);
}
{ // Sun

View file

@ -1,18 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
// fragment shader
//
// Copyright 2015 High Fidelity, Inc.
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
in vec4 _color;
out vec4 outFragColor;
void main(void) {
outFragColor = _color;
}

View file

@ -1,25 +0,0 @@
<@include gpu/Config.slh@>
<$VERSION_HEADER$>
// Generated on <$_SCRIBE_DATE$>
//
// particle vertex shader
//
// Distributed under the Apache License, Version 2.0.
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
//
<@include gpu/Inputs.slh@>
<@include gpu/Color.slh@>
<@include gpu/Transform.slh@>
<$declareStandardTransform()$>
out vec4 _color;
void main(void) {
// pass along the color
_color = colorToLinearRGBA(inColor);
TransformCamera cam = getTransformCamera();
TransformObject obj = getTransformObject();
<$transformModelToClipPos(cam, obj, inPosition, gl_Position)$>
}

View file

@ -182,7 +182,6 @@ GLuint compileProgram(const std::vector<GLuint>& glshaders, std::string& error)
filestream.close();
}
*/
delete[] temp;
glDeleteProgram(glprogram);
return 0;

Some files were not shown because too many files have changed in this diff Show more