mirror of
https://github.com/overte-org/overte.git
synced 2025-04-22 07:24:17 +02:00
Merge branch 'master' into M17428-f
This commit is contained in:
commit
bbfb2b1374
102 changed files with 3108 additions and 1072 deletions
assignment-client/src/avatars
interface
CMakeLists.txt
resources
images
meshes/redirect
oopsDialog_auth.fbxoopsDialog_auth.pngoopsDialog_protocol.fbxoopsDialog_protocol.pngoopsDialog_timeout.fbxoopsDialog_timeout.pngoopsDialog_vague.fbxoopsDialog_vague.png
qml
serverless
src
libraries
audio-client/src
avatars-renderer/src/avatars-renderer
avatars/src
AvatarData.cppAvatarData.hAvatarHashMap.cppAvatarHashMap.hAvatarTraits.hClientTraitsHandler.cppClientTraitsHandler.h
entities-renderer/src
entities/src
fbx/src
gpu-gl-common/src/gpu/gl
GLBackend.cppGLBackend.hGLBackendInput.cppGLBackendOutput.cppGLBackendPipeline.cppGLBackendQuery.cppGLBackendTexture.cppGLPipeline.cppGLQuery.h
gpu-gl/src/gpu
gl41
gl45
gpu-gles/src/gpu/gles
gpu/src/gpu
networking/src
physics/src
render-utils/src
shared/src
scripts
defaultScripts.js
system
|
@ -11,6 +11,7 @@
|
|||
|
||||
#include "AvatarMixerClientData.h"
|
||||
|
||||
#include <algorithm>
|
||||
#include <udt/PacketHeaders.h>
|
||||
|
||||
#include <DependencyManager.h>
|
||||
|
@ -218,6 +219,10 @@ uint16_t AvatarMixerClientData::getLastBroadcastSequenceNumber(const QUuid& node
|
|||
}
|
||||
|
||||
void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointer other) {
|
||||
ignoreOther(self.data(), other.data());
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::ignoreOther(const Node* self, const Node* other) {
|
||||
if (!isRadiusIgnoring(other->getUUID())) {
|
||||
addToRadiusIgnoringSet(other->getUUID());
|
||||
auto killPacket = NLPacket::create(PacketType::KillAvatar, NUM_BYTES_RFC4122_UUID + sizeof(KillAvatarReason), true);
|
||||
|
@ -235,9 +240,20 @@ void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointe
|
|||
}
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::removeFromRadiusIgnoringSet(SharedNodePointer self, const QUuid& other) {
|
||||
if (isRadiusIgnoring(other)) {
|
||||
_radiusIgnoredOthers.erase(other);
|
||||
bool AvatarMixerClientData::isRadiusIgnoring(const QUuid& other) const {
|
||||
return std::find(_radiusIgnoredOthers.cbegin(), _radiusIgnoredOthers.cend(), other) != _radiusIgnoredOthers.cend();
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::addToRadiusIgnoringSet(const QUuid& other) {
|
||||
if (!isRadiusIgnoring(other)) {
|
||||
_radiusIgnoredOthers.push_back(other);
|
||||
}
|
||||
}
|
||||
|
||||
void AvatarMixerClientData::removeFromRadiusIgnoringSet(const QUuid& other) {
|
||||
auto ignoredOtherIter = std::find(_radiusIgnoredOthers.cbegin(), _radiusIgnoredOthers.cend(), other);
|
||||
if (ignoredOtherIter != _radiusIgnoredOthers.cend()) {
|
||||
_radiusIgnoredOthers.erase(ignoredOtherIter);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
#include <algorithm>
|
||||
#include <cfloat>
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
#include <vector>
|
||||
#include <queue>
|
||||
|
||||
#include <QtCore/QJsonObject>
|
||||
|
@ -45,6 +45,7 @@ public:
|
|||
|
||||
int parseData(ReceivedMessage& message) override;
|
||||
AvatarData& getAvatar() { return *_avatar; }
|
||||
const AvatarData& getAvatar() const { return *_avatar; }
|
||||
const AvatarData* getConstAvatarData() const { return _avatar.get(); }
|
||||
AvatarSharedPointer getAvatarSharedPointer() const { return _avatar; }
|
||||
|
||||
|
@ -90,11 +91,11 @@ public:
|
|||
void loadJSONStats(QJsonObject& jsonObject) const;
|
||||
|
||||
glm::vec3 getPosition() const { return _avatar ? _avatar->getWorldPosition() : glm::vec3(0); }
|
||||
glm::vec3 getGlobalBoundingBoxCorner() const { return _avatar ? _avatar->getGlobalBoundingBoxCorner() : glm::vec3(0); }
|
||||
bool isRadiusIgnoring(const QUuid& other) const { return _radiusIgnoredOthers.find(other) != _radiusIgnoredOthers.end(); }
|
||||
void addToRadiusIgnoringSet(const QUuid& other) { _radiusIgnoredOthers.insert(other); }
|
||||
void removeFromRadiusIgnoringSet(SharedNodePointer self, const QUuid& other);
|
||||
bool isRadiusIgnoring(const QUuid& other) const;
|
||||
void addToRadiusIgnoringSet(const QUuid& other);
|
||||
void removeFromRadiusIgnoringSet(const QUuid& other);
|
||||
void ignoreOther(SharedNodePointer self, SharedNodePointer other);
|
||||
void ignoreOther(const Node* self, const Node* other);
|
||||
|
||||
void readViewFrustumPacket(const QByteArray& message);
|
||||
|
||||
|
@ -166,7 +167,7 @@ private:
|
|||
int _numOutOfOrderSends = 0;
|
||||
|
||||
SimpleMovingAverage _avgOtherAvatarDataRate;
|
||||
std::unordered_set<QUuid> _radiusIgnoredOthers;
|
||||
std::vector<QUuid> _radiusIgnoredOthers;
|
||||
ConicalViewFrustums _currentViewFrustums;
|
||||
|
||||
int _recentOtherAvatarsInView { 0 };
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
|
||||
#include <algorithm>
|
||||
#include <random>
|
||||
#include <chrono>
|
||||
|
||||
#include <glm/glm.hpp>
|
||||
#include <glm/gtx/norm.hpp>
|
||||
|
@ -33,6 +34,8 @@
|
|||
#include "AvatarMixer.h"
|
||||
#include "AvatarMixerClientData.h"
|
||||
|
||||
namespace chrono = std::chrono;
|
||||
|
||||
void AvatarMixerSlave::configure(ConstIter begin, ConstIter end) {
|
||||
_begin = begin;
|
||||
_end = end;
|
||||
|
@ -209,7 +212,18 @@ void AvatarMixerSlave::broadcastAvatarData(const SharedNodePointer& node) {
|
|||
_stats.jobElapsedTime += (end - start);
|
||||
}
|
||||
|
||||
AABox computeBubbleBox(const AvatarData& avatar, float bubbleExpansionFactor) {
|
||||
AABox box = avatar.getGlobalBoundingBox();
|
||||
glm::vec3 scale = box.getScale();
|
||||
scale *= bubbleExpansionFactor;
|
||||
const glm::vec3 MIN_BUBBLE_SCALE(0.3f, 1.3f, 0.3);
|
||||
scale = glm::max(scale, MIN_BUBBLE_SCALE);
|
||||
box.setScaleStayCentered(glm::max(scale, MIN_BUBBLE_SCALE));
|
||||
return box;
|
||||
}
|
||||
|
||||
void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node) {
|
||||
const Node* destinationNode = node.data();
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
|
@ -220,7 +234,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
|
||||
_stats.nodesBroadcastedTo++;
|
||||
|
||||
AvatarMixerClientData* nodeData = reinterpret_cast<AvatarMixerClientData*>(node->getLinkedData());
|
||||
AvatarMixerClientData* nodeData = reinterpret_cast<AvatarMixerClientData*>(destinationNode->getLinkedData());
|
||||
|
||||
nodeData->resetInViewStats();
|
||||
|
||||
|
@ -242,12 +256,8 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
int traitBytesSent = 0;
|
||||
|
||||
// max number of avatarBytes per frame
|
||||
auto maxAvatarBytesPerFrame = (_maxKbpsPerNode * BYTES_PER_KILOBIT) / AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND;
|
||||
int maxAvatarBytesPerFrame = int(_maxKbpsPerNode * BYTES_PER_KILOBIT / AVATAR_MIXER_BROADCAST_FRAMES_PER_SECOND);
|
||||
|
||||
// FIXME - find a way to not send the sessionID for every avatar
|
||||
int minimumBytesPerAvatar = AvatarDataPacket::AVATAR_HAS_FLAGS_SIZE + NUM_BYTES_RFC4122_UUID;
|
||||
|
||||
int overBudgetAvatars = 0;
|
||||
|
||||
// keep track of the number of other avatars held back in this frame
|
||||
int numAvatarsHeldBack = 0;
|
||||
|
@ -260,66 +270,38 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
bool PALIsOpen = nodeData->getRequestsDomainListData();
|
||||
|
||||
// When this is true, the AvatarMixer will send Avatar data to a client about avatars that have ignored them
|
||||
bool getsAnyIgnored = PALIsOpen && node->getCanKick();
|
||||
bool getsAnyIgnored = PALIsOpen && destinationNode->getCanKick();
|
||||
|
||||
if (PALIsOpen) {
|
||||
// Increase minimumBytesPerAvatar if the PAL is open
|
||||
minimumBytesPerAvatar += sizeof(AvatarDataPacket::AvatarGlobalPosition) +
|
||||
sizeof(AvatarDataPacket::AudioLoudness);
|
||||
}
|
||||
// Bandwidth allowance for data that must be sent.
|
||||
int minimumBytesPerAvatar = PALIsOpen ? AvatarDataPacket::AVATAR_HAS_FLAGS_SIZE + NUM_BYTES_RFC4122_UUID +
|
||||
sizeof(AvatarDataPacket::AvatarGlobalPosition) + sizeof(AvatarDataPacket::AudioLoudness) : 0;
|
||||
|
||||
// setup a PacketList for the avatarPackets
|
||||
auto avatarPacketList = NLPacketList::create(PacketType::BulkAvatarData);
|
||||
static auto maxAvatarDataBytes = avatarPacketList->getMaxSegmentSize() - NUM_BYTES_RFC4122_UUID;
|
||||
|
||||
// Define the minimum bubble size
|
||||
static const glm::vec3 minBubbleSize = avatar.getSensorToWorldScale() * glm::vec3(0.3f, 1.3f, 0.3f);
|
||||
// Define the scale of the box for the current node
|
||||
glm::vec3 nodeBoxScale = (nodeData->getPosition() - nodeData->getGlobalBoundingBoxCorner()) * 2.0f * avatar.getSensorToWorldScale();
|
||||
// Set up the bounding box for the current node
|
||||
AABox nodeBox(nodeData->getGlobalBoundingBoxCorner(), nodeBoxScale);
|
||||
// Clamp the size of the bounding box to a minimum scale
|
||||
if (glm::any(glm::lessThan(nodeBoxScale, minBubbleSize))) {
|
||||
nodeBox.setScaleStayCentered(minBubbleSize);
|
||||
}
|
||||
// Quadruple the scale of both bounding boxes
|
||||
nodeBox.embiggen(4.0f);
|
||||
|
||||
|
||||
// setup list of AvatarData as well as maps to map betweeen the AvatarData and the original nodes
|
||||
std::vector<AvatarSharedPointer> avatarsToSort;
|
||||
std::unordered_map<AvatarSharedPointer, SharedNodePointer> avatarDataToNodes;
|
||||
std::unordered_map<QUuid, uint64_t> avatarEncodeTimes;
|
||||
std::for_each(_begin, _end, [&](const SharedNodePointer& otherNode) {
|
||||
// make sure this is an agent that we have avatar data for before considering it for inclusion
|
||||
if (otherNode->getType() == NodeType::Agent
|
||||
&& otherNode->getLinkedData()) {
|
||||
const AvatarMixerClientData* otherNodeData = reinterpret_cast<const AvatarMixerClientData*>(otherNode->getLinkedData());
|
||||
|
||||
AvatarSharedPointer otherAvatar = otherNodeData->getAvatarSharedPointer();
|
||||
avatarsToSort.push_back(otherAvatar);
|
||||
avatarDataToNodes[otherAvatar] = otherNode;
|
||||
QUuid id = otherAvatar->getSessionUUID();
|
||||
avatarEncodeTimes[id] = nodeData->getLastOtherAvatarEncodeTime(id);
|
||||
}
|
||||
});
|
||||
// compute node bounding box
|
||||
const float MY_AVATAR_BUBBLE_EXPANSION_FACTOR = 4.0f; // magic number determined emperically
|
||||
AABox nodeBox = computeBubbleBox(avatar, MY_AVATAR_BUBBLE_EXPANSION_FACTOR);
|
||||
|
||||
class SortableAvatar: public PrioritySortUtil::Sortable {
|
||||
public:
|
||||
SortableAvatar() = delete;
|
||||
SortableAvatar(const AvatarSharedPointer& avatar, uint64_t lastEncodeTime)
|
||||
: _avatar(avatar), _lastEncodeTime(lastEncodeTime) {}
|
||||
glm::vec3 getPosition() const override { return _avatar->getWorldPosition(); }
|
||||
SortableAvatar(const AvatarData* avatar, const Node* avatarNode, uint64_t lastEncodeTime)
|
||||
: _avatar(avatar), _node(avatarNode), _lastEncodeTime(lastEncodeTime) {}
|
||||
glm::vec3 getPosition() const override { return _avatar->getClientGlobalPosition(); }
|
||||
float getRadius() const override {
|
||||
glm::vec3 nodeBoxHalfScale = (_avatar->getWorldPosition() - _avatar->getGlobalBoundingBoxCorner() * _avatar->getSensorToWorldScale());
|
||||
return glm::max(nodeBoxHalfScale.x, glm::max(nodeBoxHalfScale.y, nodeBoxHalfScale.z));
|
||||
glm::vec3 nodeBoxScale = _avatar->getGlobalBoundingBox().getScale();
|
||||
return 0.5f * glm::max(nodeBoxScale.x, glm::max(nodeBoxScale.y, nodeBoxScale.z));
|
||||
}
|
||||
uint64_t getTimestamp() const override {
|
||||
return _lastEncodeTime;
|
||||
}
|
||||
AvatarSharedPointer getAvatar() const { return _avatar; }
|
||||
const Node* getNode() const { return _node; }
|
||||
|
||||
private:
|
||||
AvatarSharedPointer _avatar;
|
||||
const AvatarData* _avatar;
|
||||
const Node* _node;
|
||||
uint64_t _lastEncodeTime;
|
||||
};
|
||||
|
||||
|
@ -329,16 +311,18 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
AvatarData::_avatarSortCoefficientSize,
|
||||
AvatarData::_avatarSortCoefficientCenter,
|
||||
AvatarData::_avatarSortCoefficientAge);
|
||||
sortedAvatars.reserve(avatarsToSort.size());
|
||||
sortedAvatars.reserve(_end - _begin);
|
||||
|
||||
// ignore or sort
|
||||
const AvatarSharedPointer& thisAvatar = nodeData->getAvatarSharedPointer();
|
||||
for (const auto& avatar : avatarsToSort) {
|
||||
if (avatar == thisAvatar) {
|
||||
// don't echo updates to self
|
||||
for (auto listedNode = _begin; listedNode != _end; ++listedNode) {
|
||||
Node* otherNodeRaw = (*listedNode).data();
|
||||
if (otherNodeRaw->getType() != NodeType::Agent
|
||||
|| !otherNodeRaw->getLinkedData()
|
||||
|| otherNodeRaw == destinationNode) {
|
||||
continue;
|
||||
}
|
||||
|
||||
auto avatarNode = otherNodeRaw;
|
||||
|
||||
bool shouldIgnore = false;
|
||||
// We ignore other nodes for a couple of reasons:
|
||||
// 1) ignore bubbles and ignore specific node
|
||||
|
@ -346,53 +330,39 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
// happen if for example the avatar is connected on a desktop and sending
|
||||
// updates at ~30hz. So every 3 frames we skip a frame.
|
||||
|
||||
auto avatarNode = avatarDataToNodes[avatar];
|
||||
assert(avatarNode); // we can't have gotten here without the avatarData being a valid key in the map
|
||||
|
||||
const AvatarMixerClientData* avatarNodeData = reinterpret_cast<const AvatarMixerClientData*>(avatarNode->getLinkedData());
|
||||
assert(avatarNodeData); // we can't have gotten here without avatarNode having valid data
|
||||
const AvatarMixerClientData* avatarClientNodeData = reinterpret_cast<const AvatarMixerClientData*>(avatarNode->getLinkedData());
|
||||
assert(avatarClientNodeData); // we can't have gotten here without avatarNode having valid data
|
||||
quint64 startIgnoreCalculation = usecTimestampNow();
|
||||
|
||||
// make sure we have data for this avatar, that it isn't the same node,
|
||||
// and isn't an avatar that the viewing node has ignored
|
||||
// or that has ignored the viewing node
|
||||
if (!avatarNode->getLinkedData()
|
||||
|| avatarNode->getUUID() == node->getUUID()
|
||||
|| (node->isIgnoringNodeWithID(avatarNode->getUUID()) && !PALIsOpen)
|
||||
|| (avatarNode->isIgnoringNodeWithID(node->getUUID()) && !getsAnyIgnored)) {
|
||||
if ((destinationNode->isIgnoringNodeWithID(avatarNode->getUUID()) && !PALIsOpen)
|
||||
|| (avatarNode->isIgnoringNodeWithID(destinationNode->getUUID()) && !getsAnyIgnored)) {
|
||||
shouldIgnore = true;
|
||||
} else {
|
||||
// Check to see if the space bubble is enabled
|
||||
// Don't bother with these checks if the other avatar has their bubble enabled and we're gettingAnyIgnored
|
||||
if (node->isIgnoreRadiusEnabled() || (avatarNode->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
|
||||
float sensorToWorldScale = avatarNodeData->getAvatarSharedPointer()->getSensorToWorldScale();
|
||||
// Define the scale of the box for the current other node
|
||||
glm::vec3 otherNodeBoxScale = (avatarNodeData->getPosition() - avatarNodeData->getGlobalBoundingBoxCorner()) * 2.0f * sensorToWorldScale;
|
||||
// Set up the bounding box for the current other node
|
||||
AABox otherNodeBox(avatarNodeData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
|
||||
// Clamp the size of the bounding box to a minimum scale
|
||||
if (glm::any(glm::lessThan(otherNodeBoxScale, minBubbleSize))) {
|
||||
otherNodeBox.setScaleStayCentered(minBubbleSize);
|
||||
}
|
||||
// Change the scale of both bounding boxes
|
||||
// (This is an arbitrary number determined empirically)
|
||||
otherNodeBox.embiggen(2.4f);
|
||||
|
||||
if (destinationNode->isIgnoreRadiusEnabled() || (avatarNode->isIgnoreRadiusEnabled() && !getsAnyIgnored)) {
|
||||
// Perform the collision check between the two bounding boxes
|
||||
const float OTHER_AVATAR_BUBBLE_EXPANSION_FACTOR = 2.4f; // magic number determined empirically
|
||||
AABox otherNodeBox = computeBubbleBox(avatarClientNodeData->getAvatar(), OTHER_AVATAR_BUBBLE_EXPANSION_FACTOR);
|
||||
if (nodeBox.touches(otherNodeBox)) {
|
||||
nodeData->ignoreOther(node, avatarNode);
|
||||
nodeData->ignoreOther(destinationNode, avatarNode);
|
||||
shouldIgnore = !getsAnyIgnored;
|
||||
}
|
||||
}
|
||||
// Not close enough to ignore
|
||||
if (!shouldIgnore) {
|
||||
nodeData->removeFromRadiusIgnoringSet(node, avatarNode->getUUID());
|
||||
nodeData->removeFromRadiusIgnoringSet(avatarNode->getUUID());
|
||||
}
|
||||
}
|
||||
|
||||
if (!shouldIgnore) {
|
||||
AvatarDataSequenceNumber lastSeqToReceiver = nodeData->getLastBroadcastSequenceNumber(avatarNode->getUUID());
|
||||
AvatarDataSequenceNumber lastSeqFromSender = avatarNodeData->getLastReceivedSequenceNumber();
|
||||
AvatarDataSequenceNumber lastSeqFromSender = avatarClientNodeData->getLastReceivedSequenceNumber();
|
||||
|
||||
// FIXME - This code does appear to be working. But it seems brittle.
|
||||
// It supports determining if the frame of data for this "other"
|
||||
|
@ -417,12 +387,10 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
|
||||
if (!shouldIgnore) {
|
||||
// sort this one for later
|
||||
uint64_t lastEncodeTime = 0;
|
||||
std::unordered_map<QUuid, uint64_t>::const_iterator itr = avatarEncodeTimes.find(avatar->getSessionUUID());
|
||||
if (itr != avatarEncodeTimes.end()) {
|
||||
lastEncodeTime = itr->second;
|
||||
}
|
||||
sortedAvatars.push(SortableAvatar(avatar, lastEncodeTime));
|
||||
const AvatarData* avatarNodeData = avatarClientNodeData->getConstAvatarData();
|
||||
auto lastEncodeTime = nodeData->getLastOtherAvatarEncodeTime(avatarNodeData->getSessionUUID());
|
||||
|
||||
sortedAvatars.push(SortableAvatar(avatarNodeData, avatarNode, lastEncodeTime));
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -430,19 +398,31 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
|
||||
int remainingAvatars = (int)sortedAvatars.size();
|
||||
auto traitsPacketList = NLPacketList::create(PacketType::BulkAvatarTraits, QByteArray(), true, true);
|
||||
|
||||
const auto& sortedAvatarVector = sortedAvatars.getSortedVector();
|
||||
for (const auto& sortedAvatar : sortedAvatarVector) {
|
||||
const auto& avatarData = sortedAvatar.getAvatar();
|
||||
remainingAvatars--;
|
||||
const Node* otherNode = sortedAvatar.getNode();
|
||||
auto lastEncodeForOther = sortedAvatar.getTimestamp();
|
||||
|
||||
auto otherNode = avatarDataToNodes[avatarData];
|
||||
assert(otherNode); // we can't have gotten here without the avatarData being a valid key in the map
|
||||
|
||||
// NOTE: Here's where we determine if we are over budget and drop to bare minimum data
|
||||
AvatarData::AvatarDataDetail detail = AvatarData::NoData;
|
||||
|
||||
// NOTE: Here's where we determine if we are over budget and drop remaining avatars,
|
||||
// or send minimal avatar data in uncommon case of PALIsOpen.
|
||||
int minimRemainingAvatarBytes = minimumBytesPerAvatar * remainingAvatars;
|
||||
bool overBudget = (identityBytesSent + numAvatarDataBytes + minimRemainingAvatarBytes) > maxAvatarBytesPerFrame;
|
||||
if (overBudget) {
|
||||
if (PALIsOpen) {
|
||||
_stats.overBudgetAvatars++;
|
||||
detail = AvatarData::PALMinimum;
|
||||
} else {
|
||||
_stats.overBudgetAvatars += remainingAvatars;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
quint64 startAvatarDataPacking = usecTimestampNow();
|
||||
auto startAvatarDataPacking = chrono::high_resolution_clock::now();
|
||||
|
||||
++numOtherAvatars;
|
||||
|
||||
|
@ -459,32 +439,18 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
nodeData->setLastBroadcastTime(otherNode->getUUID(), usecTimestampNow());
|
||||
}
|
||||
|
||||
// determine if avatar is in view which determines how much data to send
|
||||
glm::vec3 otherPosition = otherAvatar->getClientGlobalPosition();
|
||||
glm::vec3 otherNodeBoxScale = (otherPosition - otherNodeData->getGlobalBoundingBoxCorner()) * 2.0f * otherAvatar->getSensorToWorldScale();
|
||||
AABox otherNodeBox(otherNodeData->getGlobalBoundingBoxCorner(), otherNodeBoxScale);
|
||||
bool isInView = nodeData->otherAvatarInView(otherNodeBox);
|
||||
// Typically all out-of-view avatars but such avatars' priorities will rise with time:
|
||||
bool isLowerPriority = sortedAvatar.getPriority() <= OUT_OF_VIEW_THRESHOLD;
|
||||
|
||||
// start a new segment in the PacketList for this avatar
|
||||
avatarPacketList->startSegment();
|
||||
|
||||
AvatarData::AvatarDataDetail detail;
|
||||
|
||||
if (overBudget) {
|
||||
overBudgetAvatars++;
|
||||
_stats.overBudgetAvatars++;
|
||||
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::NoData;
|
||||
} else if (!isInView) {
|
||||
if (isLowerPriority) {
|
||||
detail = PALIsOpen ? AvatarData::PALMinimum : AvatarData::MinimumData;
|
||||
nodeData->incrementAvatarOutOfView();
|
||||
} else {
|
||||
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO
|
||||
? AvatarData::SendAllData : AvatarData::CullSmallData;
|
||||
} else if (!overBudget) {
|
||||
detail = distribution(generator) < AVATAR_SEND_FULL_UPDATE_RATIO ? AvatarData::SendAllData : AvatarData::CullSmallData;
|
||||
nodeData->incrementAvatarInView();
|
||||
}
|
||||
|
||||
bool includeThisAvatar = true;
|
||||
auto lastEncodeForOther = nodeData->getLastOtherAvatarEncodeTime(otherNode->getUUID());
|
||||
QVector<JointData>& lastSentJointsForOther = nodeData->getLastOtherAvatarSentJoints(otherNode->getUUID());
|
||||
|
||||
lastSentJointsForOther.resize(otherAvatar->getJointCount());
|
||||
|
@ -494,14 +460,14 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
AvatarDataPacket::HasFlags hasFlagsOut; // the result of the toByteArray
|
||||
bool dropFaceTracking = false;
|
||||
|
||||
quint64 start = usecTimestampNow();
|
||||
auto startSerialize = chrono::high_resolution_clock::now();
|
||||
QByteArray bytes = otherAvatar->toByteArray(detail, lastEncodeForOther, lastSentJointsForOther,
|
||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition,
|
||||
&lastSentJointsForOther);
|
||||
quint64 end = usecTimestampNow();
|
||||
_stats.toByteArrayElapsedTime += (end - start);
|
||||
auto endSerialize = chrono::high_resolution_clock::now();
|
||||
_stats.toByteArrayElapsedTime +=
|
||||
(quint64) chrono::duration_cast<chrono::microseconds>(endSerialize - startSerialize).count();
|
||||
|
||||
static auto maxAvatarDataBytes = avatarPacketList->getMaxSegmentSize() - NUM_BYTES_RFC4122_UUID;
|
||||
if (bytes.size() > maxAvatarDataBytes) {
|
||||
qCWarning(avatars) << "otherAvatar.toByteArray() for" << otherNode->getUUID()
|
||||
<< "resulted in very large buffer of" << bytes.size() << "bytes - dropping facial data";
|
||||
|
@ -527,8 +493,11 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
}
|
||||
|
||||
if (includeThisAvatar) {
|
||||
// start a new segment in the PacketList for this avatar
|
||||
avatarPacketList->startSegment();
|
||||
numAvatarDataBytes += avatarPacketList->write(otherNode->getUUID().toRfc4122());
|
||||
numAvatarDataBytes += avatarPacketList->write(bytes);
|
||||
avatarPacketList->endSegment();
|
||||
|
||||
if (detail != AvatarData::NoData) {
|
||||
_stats.numOthersIncluded++;
|
||||
|
@ -546,15 +515,13 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
// It would be nice if we could tweak its future sort priority to put it at the back of the list.
|
||||
}
|
||||
|
||||
avatarPacketList->endSegment();
|
||||
|
||||
quint64 endAvatarDataPacking = usecTimestampNow();
|
||||
_stats.avatarDataPackingElapsedTime += (endAvatarDataPacking - startAvatarDataPacking);
|
||||
auto endAvatarDataPacking = chrono::high_resolution_clock::now();
|
||||
_stats.avatarDataPackingElapsedTime +=
|
||||
(quint64) chrono::duration_cast<chrono::microseconds>(endAvatarDataPacking - startAvatarDataPacking).count();
|
||||
|
||||
// use helper to add any changed traits to our packet list
|
||||
traitBytesSent += addChangedTraitsToBulkPacket(nodeData, otherNodeData, *traitsPacketList);
|
||||
|
||||
traitsPacketList->getDataSize();
|
||||
remainingAvatars--;
|
||||
}
|
||||
|
||||
quint64 startPacketSending = usecTimestampNow();
|
||||
|
@ -566,7 +533,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
_stats.numBytesSent += numAvatarDataBytes;
|
||||
|
||||
// send the avatar data PacketList
|
||||
nodeList->sendPacketList(std::move(avatarPacketList), *node);
|
||||
nodeList->sendPacketList(std::move(avatarPacketList), *destinationNode);
|
||||
|
||||
// record the bytes sent for other avatar data in the AvatarMixerClientData
|
||||
nodeData->recordSentAvatarData(numAvatarDataBytes);
|
||||
|
@ -576,7 +543,7 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
|
||||
if (traitsPacketList->getNumPackets() >= 1) {
|
||||
// send the traits packet list
|
||||
nodeList->sendPacketList(std::move(traitsPacketList), *node);
|
||||
nodeList->sendPacketList(std::move(traitsPacketList), *destinationNode);
|
||||
}
|
||||
|
||||
// record the number of avatars held back this frame
|
||||
|
|
|
@ -332,6 +332,10 @@ if (APPLE)
|
|||
COMMAND "${CMAKE_COMMAND}" -E copy_directory
|
||||
"${PROJECT_SOURCE_DIR}/resources/fonts"
|
||||
"${RESOURCES_DEV_DIR}/fonts"
|
||||
# add redirect json to macOS builds.
|
||||
COMMAND "${CMAKE_COMMAND}" -E copy_if_different
|
||||
"${PROJECT_SOURCE_DIR}/resources/serverless/redirect.json"
|
||||
"${RESOURCES_DEV_DIR}/serverless/redirect.json"
|
||||
)
|
||||
|
||||
# call the fixup_interface macro to add required bundling commands for installation
|
||||
|
@ -360,6 +364,9 @@ else()
|
|||
COMMAND "${CMAKE_COMMAND}" -E copy_if_different
|
||||
"${PROJECT_SOURCE_DIR}/resources/serverless/tutorial.json"
|
||||
"${RESOURCES_DEV_DIR}/serverless/tutorial.json"
|
||||
COMMAND "${CMAKE_COMMAND}" -E copy_if_different
|
||||
"${PROJECT_SOURCE_DIR}/resources/serverless/redirect.json"
|
||||
"${RESOURCES_DEV_DIR}/serverless/redirect.json"
|
||||
# copy JSDoc files beside the executable
|
||||
COMMAND "${CMAKE_COMMAND}" -E copy_directory
|
||||
"${CMAKE_SOURCE_DIR}/tools/jsdoc/out"
|
||||
|
|
5
interface/resources/images/eyeClosed.svg
Normal file
5
interface/resources/images/eyeClosed.svg
Normal file
|
@ -0,0 +1,5 @@
|
|||
<svg width="31" height="23" viewBox="0 0 31 23" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M3.59534 11.0156C6.16042 13.4128 9.65987 15.5898 13.6042 16.1774C17.686 16.7856 22.4164 15.7196 27.3057 11.0659C22.0721 6.07309 17.0642 5.14115 12.9153 5.90073C8.99427 6.61859 5.69298 8.87688 3.59534 11.0156ZM12.455 3.27591C17.7727 2.30235 23.9836 3.74895 30.1053 10.1333L31 11.0664L30.1053 11.9994C24.3636 17.9875 18.4774 19.5983 13.2276 18.8161C8.06048 18.0463 3.70384 14.9892 0.837069 11.9994L0 11.1265L0.778477 10.1986C3.05338 7.48717 7.2318 4.23217 12.455 3.27591Z" fill="#3D3D3D"/>
|
||||
<ellipse cx="15.6539" cy="10.9218" rx="3.65386" ry="3.81061" fill="#3D3D3D"/>
|
||||
<line x1="25" y1="2.12132" x2="7.12132" y2="20" stroke="#3D3D3D" stroke-width="3" stroke-linecap="round"/>
|
||||
</svg>
|
After (image error) Size: 825 B |
4
interface/resources/images/eyeOpen.svg
Normal file
4
interface/resources/images/eyeOpen.svg
Normal file
|
@ -0,0 +1,4 @@
|
|||
<svg width="31" height="16" viewBox="0 0 31 16" fill="none" xmlns="http://www.w3.org/2000/svg">
|
||||
<path fill-rule="evenodd" clip-rule="evenodd" d="M3.59534 8.01564C6.16042 10.4128 9.65987 12.5898 13.6042 13.1774C17.686 13.7856 22.4164 12.7196 27.3057 8.06585C22.0721 3.07309 17.0642 2.14115 12.9153 2.90073C8.99427 3.61859 5.69298 5.87688 3.59534 8.01564ZM12.455 0.275915C17.7727 -0.697651 23.9836 0.748949 30.1053 7.13329L31 8.06636L30.1053 8.99944C24.3636 14.9875 18.4774 16.5983 13.2276 15.8161C8.06048 15.0463 3.70384 11.9892 0.837069 8.99944L0 8.12646L0.778477 7.1986C3.05338 4.48717 7.2318 1.23217 12.455 0.275915Z" fill="#3D3D3D"/>
|
||||
<ellipse cx="15.644" cy="7.92179" rx="3.65386" ry="3.81061" fill="#3D3D3D"/>
|
||||
</svg>
|
After (image error) Size: 721 B |
BIN
interface/resources/images/loadingBar_placard.png
Normal file
BIN
interface/resources/images/loadingBar_placard.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 2.7 KiB |
BIN
interface/resources/images/loadingBar_progress.png
Normal file
BIN
interface/resources/images/loadingBar_progress.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 20 KiB |
BIN
interface/resources/meshes/redirect/oopsDialog_auth.fbx
Normal file
BIN
interface/resources/meshes/redirect/oopsDialog_auth.fbx
Normal file
Binary file not shown.
BIN
interface/resources/meshes/redirect/oopsDialog_auth.png
Normal file
BIN
interface/resources/meshes/redirect/oopsDialog_auth.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 4.9 KiB |
BIN
interface/resources/meshes/redirect/oopsDialog_protocol.fbx
Normal file
BIN
interface/resources/meshes/redirect/oopsDialog_protocol.fbx
Normal file
Binary file not shown.
BIN
interface/resources/meshes/redirect/oopsDialog_protocol.png
Normal file
BIN
interface/resources/meshes/redirect/oopsDialog_protocol.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 4.8 KiB |
BIN
interface/resources/meshes/redirect/oopsDialog_timeout.fbx
Normal file
BIN
interface/resources/meshes/redirect/oopsDialog_timeout.fbx
Normal file
Binary file not shown.
BIN
interface/resources/meshes/redirect/oopsDialog_timeout.png
Normal file
BIN
interface/resources/meshes/redirect/oopsDialog_timeout.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 4.4 KiB |
BIN
interface/resources/meshes/redirect/oopsDialog_vague.fbx
Normal file
BIN
interface/resources/meshes/redirect/oopsDialog_vague.fbx
Normal file
Binary file not shown.
BIN
interface/resources/meshes/redirect/oopsDialog_vague.png
Normal file
BIN
interface/resources/meshes/redirect/oopsDialog_vague.png
Normal file
Binary file not shown.
After ![]() (image error) Size: 5 KiB |
|
@ -23,6 +23,7 @@ ModalWindow {
|
|||
objectName: "LoginDialog"
|
||||
implicitWidth: 520
|
||||
implicitHeight: 320
|
||||
closeButtonVisible: true
|
||||
destroyOnCloseButton: true
|
||||
destroyOnHidden: true
|
||||
visible: true
|
||||
|
|
|
@ -117,27 +117,27 @@ Item {
|
|||
}
|
||||
spacing: hifi.dimensions.contentSpacing.y / 2
|
||||
|
||||
TextField {
|
||||
id: usernameField
|
||||
anchors {
|
||||
horizontalCenter: parent.horizontalCenter
|
||||
}
|
||||
width: 1080
|
||||
placeholderText: qsTr("Username or Email")
|
||||
TextField {
|
||||
id: usernameField
|
||||
anchors {
|
||||
horizontalCenter: parent.horizontalCenter
|
||||
}
|
||||
width: 1080
|
||||
placeholderText: qsTr("Username or Email")
|
||||
}
|
||||
|
||||
TextField {
|
||||
id: passwordField
|
||||
anchors {
|
||||
horizontalCenter: parent.horizontalCenter
|
||||
}
|
||||
width: 1080
|
||||
|
||||
placeholderText: qsTr("Password")
|
||||
echoMode: TextInput.Password
|
||||
|
||||
Keys.onReturnPressed: linkAccountBody.login()
|
||||
TextField {
|
||||
id: passwordField
|
||||
anchors {
|
||||
horizontalCenter: parent.horizontalCenter
|
||||
}
|
||||
width: 1080
|
||||
|
||||
placeholderText: qsTr("Password")
|
||||
echoMode: TextInput.Password
|
||||
|
||||
Keys.onReturnPressed: linkAccountBody.login()
|
||||
}
|
||||
}
|
||||
|
||||
InfoItem {
|
||||
|
@ -176,7 +176,7 @@ Item {
|
|||
anchors {
|
||||
left: parent.left
|
||||
top: form.bottom
|
||||
topMargin: hifi.dimensions.contentSpacing.y / 2
|
||||
topMargin: hifi.dimensions.contentSpacing.y / 2
|
||||
}
|
||||
|
||||
spacing: hifi.dimensions.contentSpacing.x
|
||||
|
@ -201,7 +201,7 @@ Item {
|
|||
anchors {
|
||||
right: parent.right
|
||||
top: form.bottom
|
||||
topMargin: hifi.dimensions.contentSpacing.y / 2
|
||||
topMargin: hifi.dimensions.contentSpacing.y / 2
|
||||
}
|
||||
spacing: hifi.dimensions.contentSpacing.x
|
||||
onHeightChanged: d.resize(); onWidthChanged: d.resize();
|
||||
|
|
|
@ -15,7 +15,6 @@ import QtQuick.Controls.Styles 1.4 as OriginalStyles
|
|||
|
||||
import "../controls-uit"
|
||||
import "../styles-uit"
|
||||
|
||||
Item {
|
||||
id: linkAccountBody
|
||||
clip: true
|
||||
|
@ -87,6 +86,23 @@ Item {
|
|||
height: 48
|
||||
}
|
||||
|
||||
ShortcutText {
|
||||
id: flavorText
|
||||
anchors {
|
||||
top: parent.top
|
||||
left: parent.left
|
||||
margins: 0
|
||||
topMargin: hifi.dimensions.contentSpacing.y
|
||||
}
|
||||
|
||||
text: qsTr("Sign in to High Fidelity to make friends, get HFC, and buy interesting things on the Marketplace!")
|
||||
width: parent.width
|
||||
wrapMode: Text.WordWrap
|
||||
lineHeight: 1
|
||||
lineHeightMode: Text.ProportionalHeight
|
||||
horizontalAlignment: Text.AlignHCenter
|
||||
}
|
||||
|
||||
ShortcutText {
|
||||
id: mainTextContainer
|
||||
anchors {
|
||||
|
@ -97,7 +113,6 @@ Item {
|
|||
}
|
||||
|
||||
visible: false
|
||||
|
||||
text: qsTr("Username or password incorrect.")
|
||||
wrapMode: Text.WordWrap
|
||||
color: hifi.colors.redAccent
|
||||
|
@ -117,22 +132,21 @@ Item {
|
|||
}
|
||||
spacing: 2 * hifi.dimensions.contentSpacing.y
|
||||
|
||||
|
||||
TextField {
|
||||
id: usernameField
|
||||
text: Settings.getValue("wallet/savedUsername", "");
|
||||
width: parent.width
|
||||
focus: true
|
||||
label: "Username or Email"
|
||||
placeholderText: "Username or Email"
|
||||
activeFocusOnPress: true
|
||||
|
||||
ShortcutText {
|
||||
z: 10
|
||||
y: usernameField.height
|
||||
anchors {
|
||||
left: usernameField.left
|
||||
top: usernameField.top
|
||||
leftMargin: usernameField.textFieldLabel.contentWidth + 10
|
||||
topMargin: -19
|
||||
right: usernameField.right
|
||||
top: usernameField.bottom
|
||||
topMargin: 4
|
||||
}
|
||||
|
||||
text: "<a href='https://highfidelity.com/users/password/new'>Forgot Username?</a>"
|
||||
|
@ -143,26 +157,32 @@ Item {
|
|||
|
||||
onLinkActivated: loginDialog.openUrl(link)
|
||||
}
|
||||
|
||||
onFocusChanged: {
|
||||
root.text = "";
|
||||
}
|
||||
Component.onCompleted: {
|
||||
var savedUsername = Settings.getValue("wallet/savedUsername", "");
|
||||
usernameField.text = savedUsername === "Unknown user" ? "" : savedUsername;
|
||||
}
|
||||
}
|
||||
|
||||
TextField {
|
||||
id: passwordField
|
||||
width: parent.width
|
||||
|
||||
label: "Password"
|
||||
echoMode: showPassword.checked ? TextInput.Normal : TextInput.Password
|
||||
placeholderText: "Password"
|
||||
activeFocusOnPress: true
|
||||
echoMode: TextInput.Password
|
||||
onHeightChanged: d.resize(); onWidthChanged: d.resize();
|
||||
|
||||
ShortcutText {
|
||||
id: forgotPasswordShortcut
|
||||
y: passwordField.height
|
||||
z: 10
|
||||
anchors {
|
||||
left: passwordField.left
|
||||
top: passwordField.top
|
||||
leftMargin: passwordField.textFieldLabel.contentWidth + 10
|
||||
topMargin: -19
|
||||
right: passwordField.right
|
||||
top: passwordField.bottom
|
||||
topMargin: 4
|
||||
}
|
||||
|
||||
text: "<a href='https://highfidelity.com/users/password/new'>Forgot Password?</a>"
|
||||
|
@ -179,12 +199,45 @@ Item {
|
|||
root.isPassword = true;
|
||||
}
|
||||
|
||||
Keys.onReturnPressed: linkAccountBody.login()
|
||||
}
|
||||
Rectangle {
|
||||
id: showPasswordHitbox
|
||||
z: 10
|
||||
x: passwordField.width - ((passwordField.height) * 31 / 23)
|
||||
width: parent.width - (parent.width - (parent.height * 31/16))
|
||||
height: parent.height
|
||||
anchors {
|
||||
right: parent.right
|
||||
}
|
||||
color: "transparent"
|
||||
|
||||
CheckBox {
|
||||
id: showPassword
|
||||
text: "Show password"
|
||||
Image {
|
||||
id: showPasswordImage
|
||||
y: (passwordField.height - (passwordField.height * 16 / 23)) / 2
|
||||
width: passwordField.width - (passwordField.width - (((passwordField.height) * 31/23)))
|
||||
height: passwordField.height * 16 / 23
|
||||
anchors {
|
||||
right: parent.right
|
||||
rightMargin: 3
|
||||
}
|
||||
source: "../../images/eyeOpen.svg"
|
||||
}
|
||||
|
||||
MouseArea {
|
||||
id: passwordFieldMouseArea
|
||||
anchors.fill: parent
|
||||
acceptedButtons: Qt.LeftButton
|
||||
property bool showPassword: false
|
||||
onClicked: {
|
||||
showPassword = !showPassword;
|
||||
passwordField.echoMode = showPassword ? TextInput.Normal : TextInput.Password;
|
||||
showPasswordImage.source = showPassword ? "../../images/eyeClosed.svg" : "../../images/eyeOpen.svg";
|
||||
showPasswordImage.height = showPassword ? passwordField.height : passwordField.height * 16 / 23;
|
||||
showPasswordImage.y = showPassword ? 0 : (passwordField.height - showPasswordImage.height) / 2;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Keys.onReturnPressed: linkAccountBody.login()
|
||||
}
|
||||
|
||||
InfoItem {
|
||||
|
@ -206,6 +259,26 @@ Item {
|
|||
onHeightChanged: d.resize(); onWidthChanged: d.resize();
|
||||
anchors.horizontalCenter: parent.horizontalCenter
|
||||
|
||||
CheckBox {
|
||||
id: autoLogoutCheckbox
|
||||
checked: !Settings.getValue("wallet/autoLogout", true)
|
||||
text: "Keep me signed in"
|
||||
boxSize: 20;
|
||||
labelFontSize: 15
|
||||
color: hifi.colors.black
|
||||
onCheckedChanged: {
|
||||
Settings.setValue("wallet/autoLogout", !checked);
|
||||
if (checked) {
|
||||
Settings.setValue("wallet/savedUsername", Account.username);
|
||||
} else {
|
||||
Settings.setValue("wallet/savedUsername", "");
|
||||
}
|
||||
}
|
||||
Component.onDestruction: {
|
||||
Settings.setValue("wallet/autoLogout", !checked);
|
||||
}
|
||||
}
|
||||
|
||||
Button {
|
||||
id: linkAccountButton
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
|
@ -216,12 +289,6 @@ Item {
|
|||
|
||||
onClicked: linkAccountBody.login()
|
||||
}
|
||||
|
||||
Button {
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
text: qsTr("Cancel")
|
||||
onClicked: root.tryDestroy()
|
||||
}
|
||||
}
|
||||
|
||||
Row {
|
||||
|
@ -234,7 +301,7 @@ Item {
|
|||
RalewaySemiBold {
|
||||
size: hifi.fontSizes.inputLabel
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
text: qsTr("Don't have an account?")
|
||||
text: qsTr("New to High Fidelity?")
|
||||
}
|
||||
|
||||
Button {
|
||||
|
@ -279,7 +346,15 @@ Item {
|
|||
target: loginDialog
|
||||
onHandleLoginCompleted: {
|
||||
console.log("Login Succeeded, linking steam account")
|
||||
|
||||
var poppedUp = Settings.getValue("loginDialogPoppedUp", false);
|
||||
if (poppedUp) {
|
||||
console.log("[ENCOURAGELOGINDIALOG]: logging in")
|
||||
var data = {
|
||||
"action": "user logged in"
|
||||
};
|
||||
UserActivityLogger.logAction("encourageLoginDialog", data);
|
||||
Settings.setValue("loginDialogPoppedUp", false);
|
||||
}
|
||||
if (loginDialog.isSteamRunning()) {
|
||||
loginDialog.linkSteam()
|
||||
} else {
|
||||
|
@ -290,6 +365,15 @@ Item {
|
|||
}
|
||||
onHandleLoginFailed: {
|
||||
console.log("Login Failed")
|
||||
var poppedUp = Settings.getValue("loginDialogPoppedUp", false);
|
||||
if (poppedUp) {
|
||||
console.log("[ENCOURAGELOGINDIALOG]: failed logging in")
|
||||
var data = {
|
||||
"action": "user failed logging in"
|
||||
};
|
||||
UserActivityLogger.logAction("encourageLoginDialog", data);
|
||||
Settings.setValue("loginDialogPoppedUp", false);
|
||||
}
|
||||
mainTextContainer.visible = true
|
||||
toggleLoading(false)
|
||||
}
|
||||
|
|
|
@ -79,7 +79,7 @@ StackView {
|
|||
return;
|
||||
}
|
||||
location.text = targetString;
|
||||
toggleOrGo(true, targetString);
|
||||
toggleOrGo(targetString, true);
|
||||
clearAddressLineTimer.start();
|
||||
}
|
||||
|
||||
|
@ -399,7 +399,7 @@ StackView {
|
|||
}
|
||||
}
|
||||
|
||||
function toggleOrGo(fromSuggestions, address) {
|
||||
function toggleOrGo(address, fromSuggestions) {
|
||||
if (address !== undefined && address !== "") {
|
||||
addressBarDialog.loadAddress(address, fromSuggestions);
|
||||
clearAddressLineTimer.start();
|
||||
|
|
|
@ -94,5 +94,25 @@ Frame {
|
|||
color: hifi.colors.lightGray
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
GlyphButton {
|
||||
id: closeButton
|
||||
visible: window.closeButtonVisible
|
||||
width: 30
|
||||
y: -hifi.dimensions.modalDialogTitleHeight
|
||||
anchors {
|
||||
top: parent.top
|
||||
right: parent.right
|
||||
topMargin: 10
|
||||
rightMargin: 10
|
||||
}
|
||||
glyph: hifi.glyphs.close
|
||||
size: 23
|
||||
onClicked: {
|
||||
window.clickedCloseButton = true;
|
||||
window.destroy();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,9 @@ ScrollingWindow {
|
|||
destroyOnHidden: true
|
||||
frame: ModalFrame { }
|
||||
|
||||
property bool closeButtonVisible: false
|
||||
// only applicable for if close button is visible.
|
||||
property bool clickedCloseButton: false
|
||||
property int colorScheme: hifi.colorSchemes.light
|
||||
property bool draggable: false
|
||||
|
||||
|
|
934
interface/resources/serverless/redirect.json
Normal file
934
interface/resources/serverless/redirect.json
Normal file
|
@ -0,0 +1,934 @@
|
|||
{
|
||||
"DataVersion": 0,
|
||||
"Paths":
|
||||
{
|
||||
"/": "/4,1.4,4/0,0.49544,0,0.868645"
|
||||
},
|
||||
"Entities": [
|
||||
{
|
||||
"clientOnly": false,
|
||||
"collidesWith": "static,dynamic,kinematic,otherAvatar,",
|
||||
"collisionMask": 23,
|
||||
"created": "2018-09-05T18:13:00Z",
|
||||
"dimensions": {
|
||||
"blue": 1.159199833869934,
|
||||
"green": 2.8062009811401367,
|
||||
"red": 1.6216505765914917,
|
||||
"x": 1.6216505765914917,
|
||||
"y": 2.8062009811401367,
|
||||
"z": 1.159199833869934
|
||||
},
|
||||
"id": "{d0ed60b8-9174-4c56-8e78-2c5399329ae0}",
|
||||
"lastEdited": 1536171372916208,
|
||||
"lastEditedBy": "{151cb20e-715a-4c80-aa0d-5b58b1c8a0c9}",
|
||||
"locked": true,
|
||||
"name": "Try Again Zone",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"blue":4.015342712402344,
|
||||
"green":1.649999976158142,
|
||||
"red":2.00921893119812,
|
||||
"x":2.00921893119812,
|
||||
"y":1.649999976158142,
|
||||
"z":4.015342712402344
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 3.4421300888061523,
|
||||
"x": 1.6001315116882324,
|
||||
"y": -0.07100248336791992,
|
||||
"z": 0.14220571517944336
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.9914448857307434,
|
||||
"x": 0,
|
||||
"y": -0.13052619993686676,
|
||||
"z": 0
|
||||
},
|
||||
"script": "https://hifi-content.s3.amazonaws.com/wayne/404redirectionScripts/zoneTryAgainEntityScript.js",
|
||||
"shapeType": "box",
|
||||
"type": "Zone",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}"
|
||||
},
|
||||
{
|
||||
"clientOnly": false,
|
||||
"color": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 255
|
||||
},
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 8.645400047302246,
|
||||
"green": 0.20000000298023224,
|
||||
"red": 20.025121688842773,
|
||||
"x": 20.025121688842773,
|
||||
"y": 0.20000000298023224,
|
||||
"z": 8.645400047302246
|
||||
},
|
||||
"id": "{e44fb546-b34a-4966-9b11-73556f800d21}",
|
||||
"lastEdited": 1536107948776951,
|
||||
"lastEditedBy": "{ce82d352-3002-44ae-9b76-66492989a1db}",
|
||||
"locked": true,
|
||||
"name": "ceiling",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"blue": 4.846520900726318,
|
||||
"green": 2.912982940673828,
|
||||
"red": 5.739595890045166,
|
||||
"x": 5.739595890045166,
|
||||
"y": 2.912982940673828,
|
||||
"z": 4.846520900726318
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 21.812576293945312,
|
||||
"x": -5.16669225692749,
|
||||
"y": -7.993305206298828,
|
||||
"z": -6.059767246246338
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.970295786857605,
|
||||
"x": 0,
|
||||
"y": -0.24192190170288086,
|
||||
"z": 0
|
||||
},
|
||||
"shape": "Cube",
|
||||
"type": "Box",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}",
|
||||
"visible": false
|
||||
},
|
||||
{
|
||||
"clientOnly": false,
|
||||
"color": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0
|
||||
},
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 6.9401350021362305,
|
||||
"green": 0.04553089290857315,
|
||||
"red": 7.004304885864258,
|
||||
"x": 7.004304885864258,
|
||||
"y": 0.04553089290857315,
|
||||
"z": 6.9401350021362305
|
||||
},
|
||||
"id": "{8cd93fe5-16c0-44b7-b1e9-e7e06c4e9228}",
|
||||
"lastEdited": 1536107948774796,
|
||||
"lastEditedBy": "{4eecd88f-ef9b-4a83-bb9a-7f7496209c6b}",
|
||||
"locked": true,
|
||||
"name": "floor",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"blue": 3.6175529956817627,
|
||||
"green": 0,
|
||||
"red": 4.102385997772217,
|
||||
"x": 4.102385997772217,
|
||||
"y": 0,
|
||||
"z": 3.6175529956817627
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 9.860417366027832,
|
||||
"x": -0.8278226852416992,
|
||||
"y": -4.930208683013916,
|
||||
"z": -1.3126556873321533
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.8660253882408142,
|
||||
"x": -1.5922749298624694e-05,
|
||||
"y": 0.5,
|
||||
"z": -4.572480611386709e-05
|
||||
},
|
||||
"shape": "Cube",
|
||||
"type": "Box",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}"
|
||||
},
|
||||
{
|
||||
"clientOnly": false,
|
||||
"color": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0
|
||||
},
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 11.117486953735352,
|
||||
"green": 3.580313205718994,
|
||||
"red": 0.20000000298023224,
|
||||
"x": 0.20000000298023224,
|
||||
"y": 3.580313205718994,
|
||||
"z": 11.117486953735352
|
||||
},
|
||||
"id": "{147272dc-a344-4171-9621-efc1c2095997}",
|
||||
"lastEdited": 1536107948776823,
|
||||
"lastEditedBy": "{ce82d352-3002-44ae-9b76-66492989a1db}",
|
||||
"locked": true,
|
||||
"name": "leftWall",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"blue": 6.1806135177612305,
|
||||
"green": 1.0066027641296387,
|
||||
"red": 1.4690406322479248,
|
||||
"x": 1.4690406322479248,
|
||||
"y": 1.0066027641296387,
|
||||
"z": 6.1806135177612305
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 11.681488037109375,
|
||||
"x": -4.371703147888184,
|
||||
"y": -4.834141254425049,
|
||||
"z": 0.33986949920654297
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.8637980222702026,
|
||||
"x": -4.57763671875e-05,
|
||||
"y": 0.5038070678710938,
|
||||
"z": -1.52587890625e-05
|
||||
},
|
||||
"shape": "Cube",
|
||||
"type": "Box",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}",
|
||||
"visible": false
|
||||
},
|
||||
{
|
||||
"clientOnly": false,
|
||||
"color": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0
|
||||
},
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 11.117486953735352,
|
||||
"green": 3.580313205718994,
|
||||
"red": 0.20000000298023224,
|
||||
"x": 0.20000000298023224,
|
||||
"y": 3.580313205718994,
|
||||
"z": 11.117486953735352
|
||||
},
|
||||
"id": "{5f2b89b8-47e3-4915-a966-d46307a40f06}",
|
||||
"lastEdited": 1536107948774605,
|
||||
"lastEditedBy": "{ce82d352-3002-44ae-9b76-66492989a1db}",
|
||||
"locked": true,
|
||||
"name": "backWall",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"blue": 5.268576622009277,
|
||||
"green": 1.0066027641296387,
|
||||
"red": 6.093774318695068,
|
||||
"x": 6.093774318695068,
|
||||
"y": 1.0066027641296387,
|
||||
"z": 5.268576622009277
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 11.681488037109375,
|
||||
"x": 0.25303030014038086,
|
||||
"y": -4.834141254425049,
|
||||
"z": -0.5721673965454102
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.9662165641784668,
|
||||
"x": -4.57763671875e-05,
|
||||
"y": -0.2576791048049927,
|
||||
"z": 1.52587890625e-05
|
||||
},
|
||||
"shape": "Cube",
|
||||
"type": "Box",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}",
|
||||
"visible": false
|
||||
},
|
||||
{
|
||||
"clientOnly": false,
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 14.40000057220459,
|
||||
"green": 14.40000057220459,
|
||||
"red": 14.40000057220459,
|
||||
"x": 14.40000057220459,
|
||||
"y": 14.40000057220459,
|
||||
"z": 14.40000057220459
|
||||
},
|
||||
"id": "{baf96345-8f68-4068-af4c-3c690035852a}",
|
||||
"lastEdited": 1536107948775591,
|
||||
"lastEditedBy": "{b5bba536-25e5-4b12-a1be-5c7cd196a06a}",
|
||||
"locked": true,
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"blue": 2.3440732955932617,
|
||||
"green": 1.6162219047546387,
|
||||
"red": 1.8748211860656738,
|
||||
"x": 1.8748211860656738,
|
||||
"y": 1.6162219047546387,
|
||||
"z": 2.3440732955932617
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 24.9415340423584,
|
||||
"x": -10.595945358276367,
|
||||
"y": -10.854545593261719,
|
||||
"z": -10.126693725585938
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.8697794675827026,
|
||||
"x": -1.52587890625e-05,
|
||||
"y": 0.4933699369430542,
|
||||
"z": -4.57763671875e-05
|
||||
},
|
||||
"shapeType": "box",
|
||||
"skyboxMode": "enabled",
|
||||
"type": "Zone",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}"
|
||||
},
|
||||
{
|
||||
"alpha": 0,
|
||||
"alphaFinish": 0,
|
||||
"alphaStart": 1,
|
||||
"clientOnly": false,
|
||||
"color": {
|
||||
"blue": 211,
|
||||
"green": 227,
|
||||
"red": 104
|
||||
},
|
||||
"colorFinish": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"colorStart": {
|
||||
"blue": 211,
|
||||
"green": 227,
|
||||
"red": 104,
|
||||
"x": 104,
|
||||
"y": 227,
|
||||
"z": 211
|
||||
},
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 2.5,
|
||||
"green": 2.5,
|
||||
"red": 2.5,
|
||||
"x": 2.5,
|
||||
"y": 2.5,
|
||||
"z": 2.5
|
||||
},
|
||||
"emitAcceleration": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"emitDimensions": {
|
||||
"blue": 1,
|
||||
"green": 1,
|
||||
"red": 1,
|
||||
"x": 1,
|
||||
"y": 1,
|
||||
"z": 1
|
||||
},
|
||||
"emitOrientation": {
|
||||
"w": 0.9993909597396851,
|
||||
"x": 0.034897372126579285,
|
||||
"y": -1.525880907138344e-05,
|
||||
"z": -1.525880907138344e-05
|
||||
},
|
||||
"emitRate": 2,
|
||||
"emitSpeed": 0,
|
||||
"id": "{639a51f0-8613-4e46-bc7e-fef24597df73}",
|
||||
"lastEdited": 1536107948776693,
|
||||
"lastEditedBy": "{b5bba536-25e5-4b12-a1be-5c7cd196a06a}",
|
||||
"lifespan": 10,
|
||||
"locked": true,
|
||||
"maxParticles": 40,
|
||||
"name": "Rays",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"particleRadius": 0.75,
|
||||
"polarFinish": 3.1415927410125732,
|
||||
"position": {
|
||||
"blue": 1.3553659915924072,
|
||||
"green": 1.2890124320983887,
|
||||
"red": 2.5663273334503174,
|
||||
"x": 2.5663273334503174,
|
||||
"y": 1.2890124320983887,
|
||||
"z": 1.3553659915924072
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 4.330127239227295,
|
||||
"x": 0.4012637138366699,
|
||||
"y": -0.8760511875152588,
|
||||
"z": -0.8096976280212402
|
||||
},
|
||||
"radiusFinish": 0.10000000149011612,
|
||||
"radiusStart": 0,
|
||||
"rotation": {
|
||||
"w": 0.9803768396377563,
|
||||
"x": -1.52587890625e-05,
|
||||
"y": 0.19707024097442627,
|
||||
"z": -7.62939453125e-05
|
||||
},
|
||||
"speedSpread": 0,
|
||||
"spinFinish": null,
|
||||
"spinStart": null,
|
||||
"textures": "http://hifi-content.s3.amazonaws.com/alexia/Models/Portal/stripe.png",
|
||||
"type": "ParticleEffect",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}"
|
||||
},
|
||||
{
|
||||
"alpha": 0,
|
||||
"alphaFinish": 0,
|
||||
"alphaStart": 1,
|
||||
"clientOnly": false,
|
||||
"color": {
|
||||
"blue": 255,
|
||||
"green": 205,
|
||||
"red": 3
|
||||
},
|
||||
"colorFinish": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"colorStart": {
|
||||
"blue": 255,
|
||||
"green": 204,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 204,
|
||||
"z": 255
|
||||
},
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 2.5,
|
||||
"green": 2.5,
|
||||
"red": 2.5,
|
||||
"x": 2.5,
|
||||
"y": 2.5,
|
||||
"z": 2.5
|
||||
},
|
||||
"emitAcceleration": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"emitDimensions": {
|
||||
"blue": 1,
|
||||
"green": 1,
|
||||
"red": 1,
|
||||
"x": 1,
|
||||
"y": 1,
|
||||
"z": 1
|
||||
},
|
||||
"emitOrientation": {
|
||||
"w": 0.9993909597396851,
|
||||
"x": 0.034897372126579285,
|
||||
"y": -1.525880907138344e-05,
|
||||
"z": -1.525880907138344e-05
|
||||
},
|
||||
"emitRate": 2,
|
||||
"emitSpeed": 0,
|
||||
"emitterShouldTrail": true,
|
||||
"id": "{e62ced49-fa18-4ae1-977f-abef5bc0f3ba}",
|
||||
"lastEdited": 1536107948775366,
|
||||
"lastEditedBy": "{b5bba536-25e5-4b12-a1be-5c7cd196a06a}",
|
||||
"lifespan": 10,
|
||||
"locked": true,
|
||||
"maxParticles": 40,
|
||||
"name": "Rays",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"particleRadius": 0.75,
|
||||
"polarFinish": 3.1415927410125732,
|
||||
"position": {
|
||||
"blue": 3.814434051513672,
|
||||
"green": 1.2890124320983887,
|
||||
"red": 1.2254328727722168,
|
||||
"x": 1.2254328727722168,
|
||||
"y": 1.2890124320983887,
|
||||
"z": 3.814434051513672
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 4.330127239227295,
|
||||
"x": -0.9396307468414307,
|
||||
"y": -0.8760511875152588,
|
||||
"z": 1.6493704319000244
|
||||
},
|
||||
"radiusFinish": 0.10000000149011612,
|
||||
"radiusStart": 0,
|
||||
"rotation": {
|
||||
"w": 0.9594720602035522,
|
||||
"x": -1.52587890625e-05,
|
||||
"y": 0.28178834915161133,
|
||||
"z": -4.57763671875e-05
|
||||
},
|
||||
"speedSpread": 0,
|
||||
"spinFinish": null,
|
||||
"spinStart": null,
|
||||
"textures": "http://hifi-content.s3.amazonaws.com/alexia/Models/Portal/stripe.png",
|
||||
"type": "ParticleEffect",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}"
|
||||
},
|
||||
{
|
||||
"alpha": 0,
|
||||
"alphaFinish": 0,
|
||||
"alphaStart": 0.25,
|
||||
"clientOnly": false,
|
||||
"colorFinish": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"colorStart": {
|
||||
"blue": 255,
|
||||
"green": 255,
|
||||
"red": 255,
|
||||
"x": 255,
|
||||
"y": 255,
|
||||
"z": 255
|
||||
},
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 13.24000072479248,
|
||||
"green": 13.24000072479248,
|
||||
"red": 13.24000072479248,
|
||||
"x": 13.24000072479248,
|
||||
"y": 13.24000072479248,
|
||||
"z": 13.24000072479248
|
||||
},
|
||||
"emitAcceleration": {
|
||||
"blue": 0,
|
||||
"green": 0.10000000149011612,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0.10000000149011612,
|
||||
"z": 0
|
||||
},
|
||||
"emitDimensions": {
|
||||
"blue": 1,
|
||||
"green": 1,
|
||||
"red": 1,
|
||||
"x": 1,
|
||||
"y": 1,
|
||||
"z": 1
|
||||
},
|
||||
"emitOrientation": {
|
||||
"w": 1,
|
||||
"x": -1.52587890625e-05,
|
||||
"y": -1.52587890625e-05,
|
||||
"z": -1.52587890625e-05
|
||||
},
|
||||
"emitRate": 6,
|
||||
"emitSpeed": 0,
|
||||
"id": "{298c0571-cbd8-487b-8640-64037d6a8414}",
|
||||
"lastEdited": 1536107948776382,
|
||||
"lastEditedBy": "{b5bba536-25e5-4b12-a1be-5c7cd196a06a}",
|
||||
"lifespan": 10,
|
||||
"locked": true,
|
||||
"maxParticles": 10,
|
||||
"name": "Stars",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"particleRadius": 0.07000000029802322,
|
||||
"polarFinish": 3.1415927410125732,
|
||||
"position": {
|
||||
"blue": 1.3712034225463867,
|
||||
"green": 0.3698839843273163,
|
||||
"red": 2.6216418743133545,
|
||||
"x": 2.6216418743133545,
|
||||
"y": 0.3698839843273163,
|
||||
"z": 1.3712034225463867
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 22.932353973388672,
|
||||
"x": -8.844534873962402,
|
||||
"y": -11.096293449401855,
|
||||
"z": -10.09497356414795
|
||||
},
|
||||
"radiusFinish": 0,
|
||||
"radiusStart": 0,
|
||||
"rotation": {
|
||||
"w": 0.9852597713470459,
|
||||
"x": -1.52587890625e-05,
|
||||
"y": -0.17106890678405762,
|
||||
"z": -7.62939453125e-05
|
||||
},
|
||||
"speedSpread": 0,
|
||||
"spinFinish": null,
|
||||
"spinStart": null,
|
||||
"textures": "http://hifi-content.s3.amazonaws.com/alexia/Models/Portal/star.png",
|
||||
"type": "ParticleEffect",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}"
|
||||
},
|
||||
{
|
||||
"clientOnly": false,
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 2.1097896099090576,
|
||||
"green": 0.04847164824604988,
|
||||
"red": 1.458284616470337,
|
||||
"x": 1.458284616470337,
|
||||
"y": 0.04847164824604988,
|
||||
"z": 2.1097896099090576
|
||||
},
|
||||
"id": "{6625dbb8-ff25-458d-a92e-644b58460604}",
|
||||
"lastEdited": 1536107948776195,
|
||||
"lastEditedBy": "{b5bba536-25e5-4b12-a1be-5c7cd196a06a}",
|
||||
"locked": true,
|
||||
"modelURL": "http://hifi-content.s3.amazonaws.com/alexia/Models/Portal/portal1.fbx",
|
||||
"name": "Try Again",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"blue": 3.946338653564453,
|
||||
"green": 0.09449335932731628,
|
||||
"red": 1.594836711883545,
|
||||
"x": 1.594836711883545,
|
||||
"y": 0.09449335932731628,
|
||||
"z": 3.946338653564453
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 2.5651814937591553,
|
||||
"x": 0.3122459650039673,
|
||||
"y": -1.188097357749939,
|
||||
"z": 2.663747787475586
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.8220492601394653,
|
||||
"x": -1.52587890625e-05,
|
||||
"y": 0.5693598985671997,
|
||||
"z": -0.0001068115234375
|
||||
},
|
||||
"script": "https://hifi-content.s3.amazonaws.com/wayne/404redirectionScripts/tryAgainEntityScript.js",
|
||||
"shapeType": "static-mesh",
|
||||
"type": "Model",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}"
|
||||
},
|
||||
{
|
||||
"clientOnly": false,
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 0.06014331430196762,
|
||||
"green": 2.582186460494995,
|
||||
"red": 2.582186698913574,
|
||||
"x": 2.582186698913574,
|
||||
"y": 2.582186460494995,
|
||||
"z": 0.06014331430196762
|
||||
},
|
||||
"id": "{dfe92dce-f09d-4e9e-b3ed-c68ecd4d476f}",
|
||||
"lastEdited": 1536108160862286,
|
||||
"lastEditedBy": "{4656d4a8-5e61-4230-ab34-2888d7945bd6}",
|
||||
"modelURL": "",
|
||||
"name": "Oops Dialog",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"blue": 1.45927095413208,
|
||||
"green": 1.6763916015625,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 1.6763916015625,
|
||||
"z": 1.45927095413208
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 3.6522583961486816,
|
||||
"x": -1.8261291980743408,
|
||||
"y": -0.14973759651184082,
|
||||
"z": -0.36685824394226074
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.8684672117233276,
|
||||
"x": -4.57763671875e-05,
|
||||
"y": 0.4957197904586792,
|
||||
"z": -7.62939453125e-05
|
||||
},
|
||||
"script": "https://hifi-content.s3.amazonaws.com/wayne/404redirectionScripts/oopsEntityScript.js",
|
||||
"scriptTimestamp": 1536102551825,
|
||||
"type": "Model",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}"
|
||||
},
|
||||
{
|
||||
"clientOnly": false,
|
||||
"color": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0
|
||||
},
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 11.117486953735352,
|
||||
"green": 3.580313205718994,
|
||||
"red": 0.20000000298023224,
|
||||
"x": 0.20000000298023224,
|
||||
"y": 3.580313205718994,
|
||||
"z": 11.117486953735352
|
||||
},
|
||||
"id": "{144a8cf4-b0e8-489a-9403-d74d4dc4cb3e}",
|
||||
"lastEdited": 1536107948775774,
|
||||
"lastEditedBy": "{ce82d352-3002-44ae-9b76-66492989a1db}",
|
||||
"locked": true,
|
||||
"name": "rightWall",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"blue": 0,
|
||||
"green": 1.0061144828796387,
|
||||
"red": 4.965089321136475,
|
||||
"x": 4.965089321136475,
|
||||
"y": 1.0061144828796387,
|
||||
"z": 0
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 11.681488037109375,
|
||||
"x": -0.8756546974182129,
|
||||
"y": -4.834629535675049,
|
||||
"z": -5.8407440185546875
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.8637980222702026,
|
||||
"x": -4.57763671875e-05,
|
||||
"y": 0.5038070678710938,
|
||||
"z": -1.52587890625e-05
|
||||
},
|
||||
"shape": "Cube",
|
||||
"type": "Box",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}",
|
||||
"visible": false
|
||||
},
|
||||
{
|
||||
"clientOnly": false,
|
||||
"collidesWith": "static,dynamic,kinematic,otherAvatar,",
|
||||
"collisionMask": 23,
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 1.159199833869934,
|
||||
"green": 2.8062009811401367,
|
||||
"red": 1.6216505765914917,
|
||||
"x": 1.6216505765914917,
|
||||
"y": 2.8062009811401367,
|
||||
"z": 1.159199833869934
|
||||
},
|
||||
"id": "{37f53408-3d0c-42a5-9891-e6c40a227349}",
|
||||
"lastEdited": 1536107948775010,
|
||||
"lastEditedBy": "{b5bba536-25e5-4b12-a1be-5c7cd196a06a}",
|
||||
"locked": true,
|
||||
"name": "Back Zone",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"blue": 1.8632707595825195,
|
||||
"green": 1.6500625610351562,
|
||||
"red": 3.3211965560913086,
|
||||
"x": 3.3211965560913086,
|
||||
"y": 1.6500625610351562,
|
||||
"z": 1.8632707595825195
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 3.4421300888061523,
|
||||
"x": 1.6001315116882324,
|
||||
"y": -0.07100248336791992,
|
||||
"z": 0.14220571517944336
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.9304176568984985,
|
||||
"x": 0,
|
||||
"y": -0.36650121212005615,
|
||||
"z": 0
|
||||
},
|
||||
"script": "https://hifi-content.s3.amazonaws.com/wayne/404redirectionScripts/zoneBackEntityScript.js",
|
||||
"shapeType": "box",
|
||||
"type": "Zone",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}"
|
||||
},
|
||||
{
|
||||
"clientOnly": false,
|
||||
"color": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0
|
||||
},
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 11.117486953735352,
|
||||
"green": 3.580313205718994,
|
||||
"red": 0.20000000298023224,
|
||||
"x": 0.20000000298023224,
|
||||
"y": 3.580313205718994,
|
||||
"z": 11.117486953735352
|
||||
},
|
||||
"id": "{aa6e680c-6750-4776-95bc-ef3118cace5c}",
|
||||
"lastEdited": 1536107948775945,
|
||||
"lastEditedBy": "{ce82d352-3002-44ae-9b76-66492989a1db}",
|
||||
"locked": true,
|
||||
"name": "frontWall",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"blue": 2.662257671356201,
|
||||
"green": 1.0063786506652832,
|
||||
"red": 1.4868733882904053,
|
||||
"x": 1.4868733882904053,
|
||||
"y": 1.0063786506652832,
|
||||
"z": 2.662257671356201
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 11.681488037109375,
|
||||
"x": -4.353870391845703,
|
||||
"y": -4.834365367889404,
|
||||
"z": -3.1784863471984863
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.9666743278503418,
|
||||
"x": -4.57763671875e-05,
|
||||
"y": -0.2560006380081177,
|
||||
"z": 1.52587890625e-05
|
||||
},
|
||||
"shape": "Cube",
|
||||
"type": "Box",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}",
|
||||
"visible": false
|
||||
},
|
||||
{
|
||||
"clientOnly": false,
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 2.1097896099090576,
|
||||
"green": 0.04847164824604988,
|
||||
"red": 1.458284616470337,
|
||||
"x": 1.458284616470337,
|
||||
"y": 0.04847164824604988,
|
||||
"z": 2.1097896099090576
|
||||
},
|
||||
"id": "{303631f1-04f3-42a6-b8a8-8dd4b65d1231}",
|
||||
"lastEdited": 1536107948776513,
|
||||
"lastEditedBy": "{b5bba536-25e5-4b12-a1be-5c7cd196a06a}",
|
||||
"locked": true,
|
||||
"modelURL": "http://hifi-content.s3.amazonaws.com/alexia/Models/Portal/portal2.fbx",
|
||||
"name": "Back",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"position": {
|
||||
"blue": 1.5835940837860107,
|
||||
"green": 0.09449335932731628,
|
||||
"red": 3.028078079223633,
|
||||
"x": 3.028078079223633,
|
||||
"y": 0.09449335932731628,
|
||||
"z": 1.5835940837860107
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 2.5651814937591553,
|
||||
"x": 1.7454873323440552,
|
||||
"y": -1.188097357749939,
|
||||
"z": 0.3010033369064331
|
||||
},
|
||||
"rotation": {
|
||||
"w": 0.9084458351135254,
|
||||
"x": -1.52587890625e-05,
|
||||
"y": 0.4179598093032837,
|
||||
"z": -0.0001068115234375
|
||||
},
|
||||
"script": "https://hifi-content.s3.amazonaws.com/wayne/404redirectionScripts/backEntityScript.js",
|
||||
"scriptTimestamp": 1535751754379,
|
||||
"shapeType": "static-mesh",
|
||||
"type": "Model",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}"
|
||||
},
|
||||
{
|
||||
"alpha": 0,
|
||||
"alphaFinish": 0,
|
||||
"alphaStart": 0.25,
|
||||
"clientOnly": false,
|
||||
"colorFinish": {
|
||||
"blue": 0,
|
||||
"green": 0,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0,
|
||||
"z": 0
|
||||
},
|
||||
"colorStart": {
|
||||
"blue": 255,
|
||||
"green": 255,
|
||||
"red": 255,
|
||||
"x": 255,
|
||||
"y": 255,
|
||||
"z": 255
|
||||
},
|
||||
"created": "2018-09-05T00:40:03Z",
|
||||
"dimensions": {
|
||||
"blue": 13.24000072479248,
|
||||
"green": 13.24000072479248,
|
||||
"red": 13.24000072479248,
|
||||
"x": 13.24000072479248,
|
||||
"y": 13.24000072479248,
|
||||
"z": 13.24000072479248
|
||||
},
|
||||
"emitAcceleration": {
|
||||
"blue": 0,
|
||||
"green": 0.10000000149011612,
|
||||
"red": 0,
|
||||
"x": 0,
|
||||
"y": 0.10000000149011612,
|
||||
"z": 0
|
||||
},
|
||||
"emitDimensions": {
|
||||
"blue": 1,
|
||||
"green": 1,
|
||||
"red": 1,
|
||||
"x": 1,
|
||||
"y": 1,
|
||||
"z": 1
|
||||
},
|
||||
"emitOrientation": {
|
||||
"w": 1,
|
||||
"x": -1.52587890625e-05,
|
||||
"y": -1.52587890625e-05,
|
||||
"z": -1.52587890625e-05
|
||||
},
|
||||
"emitRate": 6,
|
||||
"emitSpeed": 0,
|
||||
"emitterShouldTrail": true,
|
||||
"id": "{8ded39e6-303c-48f2-be79-81b715cca9f7}",
|
||||
"lastEdited": 1536107948777127,
|
||||
"lastEditedBy": "{b5bba536-25e5-4b12-a1be-5c7cd196a06a}",
|
||||
"lifespan": 10,
|
||||
"locked": true,
|
||||
"maxParticles": 10,
|
||||
"name": "Stars",
|
||||
"owningAvatarID": "{00000000-0000-0000-0000-000000000000}",
|
||||
"particleRadius": 0.07000000029802322,
|
||||
"polarFinish": 3.1415927410125732,
|
||||
"position": {
|
||||
"blue": 3.78922963142395,
|
||||
"green": 0.3698839843273163,
|
||||
"red": 1.1863799095153809,
|
||||
"x": 1.1863799095153809,
|
||||
"y": 0.3698839843273163,
|
||||
"z": 3.78922963142395
|
||||
},
|
||||
"queryAACube": {
|
||||
"scale": 22.932353973388672,
|
||||
"x": -10.279796600341797,
|
||||
"y": -11.096293449401855,
|
||||
"z": -7.676947593688965
|
||||
},
|
||||
"radiusFinish": 0,
|
||||
"radiusStart": 0,
|
||||
"rotation": {
|
||||
"w": 0.996429443359375,
|
||||
"x": -1.52587890625e-05,
|
||||
"y": -0.08442819118499756,
|
||||
"z": -4.57763671875e-05
|
||||
},
|
||||
"speedSpread": 0,
|
||||
"spinFinish": null,
|
||||
"spinStart": null,
|
||||
"textures": "http://hifi-content.s3.amazonaws.com/alexia/Models/Portal/star.png",
|
||||
"type": "ParticleEffect",
|
||||
"userData": "{\"grabbableKey\":{\"grabbable\":false}}"
|
||||
}
|
||||
],
|
||||
"Id": "{18abccad-2d57-4176-9d89-24dc424916f5}",
|
||||
"Version": 93
|
||||
}
|
|
@ -1186,13 +1186,13 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
const DomainHandler& domainHandler = nodeList->getDomainHandler();
|
||||
|
||||
connect(&domainHandler, SIGNAL(domainURLChanged(QUrl)), SLOT(domainURLChanged(QUrl)));
|
||||
connect(&domainHandler, SIGNAL(redirectToErrorDomainURL(QUrl)), SLOT(goToErrorDomainURL(QUrl)));
|
||||
connect(&domainHandler, &DomainHandler::domainURLChanged, [](QUrl domainURL){
|
||||
setCrashAnnotation("domain", domainURL.toString().toStdString());
|
||||
});
|
||||
connect(&domainHandler, SIGNAL(resetting()), SLOT(resettingDomain()));
|
||||
connect(&domainHandler, SIGNAL(connectedToDomain(QUrl)), SLOT(updateWindowTitle()));
|
||||
connect(&domainHandler, SIGNAL(disconnectedFromDomain()), SLOT(updateWindowTitle()));
|
||||
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, &Application::clearDomainAvatars);
|
||||
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, [this]() {
|
||||
getOverlays().deleteOverlay(getTabletScreenID());
|
||||
getOverlays().deleteOverlay(getTabletHomeButtonID());
|
||||
|
@ -1200,6 +1200,8 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
});
|
||||
connect(&domainHandler, &DomainHandler::domainConnectionRefused, this, &Application::domainConnectionRefused);
|
||||
|
||||
nodeList->getDomainHandler().setErrorDomainURL(QUrl(REDIRECT_HIFI_ADDRESS));
|
||||
|
||||
// We could clear ATP assets only when changing domains, but it's possible that the domain you are connected
|
||||
// to has gone down and switched to a new content set, so when you reconnect the cached ATP assets will no longer be valid.
|
||||
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, DependencyManager::get<ScriptCache>().data(), &ScriptCache::clearATPScriptsFromCache);
|
||||
|
@ -1641,7 +1643,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
audioClient->setMuted(!audioClient->isMuted());
|
||||
} else if (action == controller::toInt(controller::Action::CYCLE_CAMERA)) {
|
||||
cycleCamera();
|
||||
} else if (action == controller::toInt(controller::Action::CONTEXT_MENU)) {
|
||||
} else if (action == controller::toInt(controller::Action::CONTEXT_MENU) && !isInterstitialMode()) {
|
||||
toggleTabletUI();
|
||||
} else if (action == controller::toInt(controller::Action::RETICLE_X)) {
|
||||
auto oldPos = getApplicationCompositor().getReticlePosition();
|
||||
|
@ -2250,6 +2252,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
|
||||
connect(this, &QCoreApplication::aboutToQuit, this, &Application::addAssetToWorldMessageClose);
|
||||
connect(&domainHandler, &DomainHandler::domainURLChanged, this, &Application::addAssetToWorldMessageClose);
|
||||
connect(&domainHandler, &DomainHandler::redirectToErrorDomainURL, this, &Application::addAssetToWorldMessageClose);
|
||||
|
||||
updateSystemTabletMode();
|
||||
|
||||
|
@ -2300,6 +2303,24 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
connect(&AndroidHelper::instance(), &AndroidHelper::enterForeground, this, &Application::enterForeground);
|
||||
AndroidHelper::instance().notifyLoadComplete();
|
||||
#endif
|
||||
|
||||
static int CHECK_LOGIN_TIMER = 3000;
|
||||
QTimer* checkLoginTimer = new QTimer(this);
|
||||
checkLoginTimer->setInterval(CHECK_LOGIN_TIMER);
|
||||
checkLoginTimer->setSingleShot(true);
|
||||
connect(checkLoginTimer, &QTimer::timeout, this, [this]() {
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
auto dialogsManager = DependencyManager::get<DialogsManager>();
|
||||
if (!accountManager->isLoggedIn()) {
|
||||
Setting::Handle<bool>{"loginDialogPoppedUp", false}.set(true);
|
||||
dialogsManager->showLoginDialog();
|
||||
QJsonObject loginData = {};
|
||||
loginData["action"] = "login dialog shown";
|
||||
UserActivityLogger::getInstance().logAction("encourageLoginDialog", loginData);
|
||||
}
|
||||
});
|
||||
Setting::Handle<bool>{"loginDialogPoppedUp", false}.set(false);
|
||||
checkLoginTimer->start();
|
||||
}
|
||||
|
||||
void Application::updateVerboseLogging() {
|
||||
|
@ -2433,6 +2454,8 @@ void Application::onAboutToQuit() {
|
|||
// so its persisted explicitly here
|
||||
Setting::Handle<QString>{ ACTIVE_DISPLAY_PLUGIN_SETTING_NAME }.set(getActiveDisplayPlugin()->getName());
|
||||
|
||||
Setting::Handle<bool>{"loginDialogPoppedUp", false}.set(false);
|
||||
|
||||
getActiveDisplayPlugin()->deactivate();
|
||||
if (_autoSwitchDisplayModeSupportedHMDPlugin
|
||||
&& _autoSwitchDisplayModeSupportedHMDPlugin->isSessionActive()) {
|
||||
|
@ -2993,6 +3016,9 @@ void Application::initializeUi() {
|
|||
if (_window && _window->isFullScreen()) {
|
||||
setFullscreen(nullptr, true);
|
||||
}
|
||||
|
||||
|
||||
setIsInterstitialMode(true);
|
||||
}
|
||||
|
||||
|
||||
|
@ -3471,6 +3497,17 @@ bool Application::isServerlessMode() const {
|
|||
return false;
|
||||
}
|
||||
|
||||
void Application::setIsInterstitialMode(bool interstitialMode) {
|
||||
Settings settings;
|
||||
bool enableInterstitial = settings.value("enableIntersitialMode", false).toBool();
|
||||
if (_interstitialMode != interstitialMode && enableInterstitial) {
|
||||
_interstitialMode = interstitialMode;
|
||||
|
||||
DependencyManager::get<AudioClient>()->setAudioPaused(_interstitialMode);
|
||||
DependencyManager::get<AvatarManager>()->setMyAvatarDataPacketsPaused(_interstitialMode);
|
||||
}
|
||||
}
|
||||
|
||||
void Application::setIsServerlessMode(bool serverlessDomain) {
|
||||
auto tree = getEntities()->getTree();
|
||||
if (tree) {
|
||||
|
@ -3478,9 +3515,9 @@ void Application::setIsServerlessMode(bool serverlessDomain) {
|
|||
}
|
||||
}
|
||||
|
||||
void Application::loadServerlessDomain(QUrl domainURL) {
|
||||
void Application::loadServerlessDomain(QUrl domainURL, bool errorDomain) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "loadServerlessDomain", Q_ARG(QUrl, domainURL));
|
||||
QMetaObject::invokeMethod(this, "loadServerlessDomain", Q_ARG(QUrl, domainURL), Q_ARG(bool, errorDomain));
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -3512,8 +3549,11 @@ void Application::loadServerlessDomain(QUrl domainURL) {
|
|||
}
|
||||
|
||||
std::map<QString, QString> namedPaths = tmpTree->getNamedPaths();
|
||||
nodeList->getDomainHandler().connectedToServerless(namedPaths);
|
||||
|
||||
if (errorDomain) {
|
||||
nodeList->getDomainHandler().loadedErrorDomain(namedPaths);
|
||||
} else {
|
||||
nodeList->getDomainHandler().connectedToServerless(namedPaths);
|
||||
}
|
||||
|
||||
_fullSceneReceivedCounter++;
|
||||
}
|
||||
|
@ -3748,7 +3788,7 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
|
||||
_controllerScriptingInterface->emitKeyPressEvent(event); // send events to any registered scripts
|
||||
// if one of our scripts have asked to capture this event, then stop processing it
|
||||
if (_controllerScriptingInterface->isKeyCaptured(event)) {
|
||||
if (_controllerScriptingInterface->isKeyCaptured(event) || isInterstitialMode()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -5535,6 +5575,7 @@ void Application::update(float deltaTime) {
|
|||
return;
|
||||
}
|
||||
|
||||
|
||||
if (!_physicsEnabled) {
|
||||
if (!domainLoadingInProgress) {
|
||||
PROFILE_ASYNC_BEGIN(app, "Scene Loading", "");
|
||||
|
@ -5555,6 +5596,7 @@ void Application::update(float deltaTime) {
|
|||
// scene is ready to compute its collision shape.
|
||||
if (getMyAvatar()->isReadyForPhysics()) {
|
||||
_physicsEnabled = true;
|
||||
setIsInterstitialMode(false);
|
||||
getMyAvatar()->updateMotionBehaviorFromMenu();
|
||||
}
|
||||
}
|
||||
|
@ -5634,7 +5676,7 @@ void Application::update(float deltaTime) {
|
|||
// Transfer the user inputs to the driveKeys
|
||||
// FIXME can we drop drive keys and just have the avatar read the action states directly?
|
||||
myAvatar->clearDriveKeys();
|
||||
if (_myCamera.getMode() != CAMERA_MODE_INDEPENDENT) {
|
||||
if (_myCamera.getMode() != CAMERA_MODE_INDEPENDENT && !isInterstitialMode()) {
|
||||
if (!_controllerScriptingInterface->areActionsCaptured() && _myCamera.getMode() != CAMERA_MODE_MIRROR) {
|
||||
myAvatar->setDriveKey(MyAvatar::TRANSLATE_Z, -1.0f * userInputMapper->getActionState(controller::Action::TRANSLATE_Z));
|
||||
myAvatar->setDriveKey(MyAvatar::TRANSLATE_Y, userInputMapper->getActionState(controller::Action::TRANSLATE_Y));
|
||||
|
@ -5950,7 +5992,7 @@ void Application::update(float deltaTime) {
|
|||
// send packet containing downstream audio stats to the AudioMixer
|
||||
{
|
||||
quint64 sinceLastNack = now - _lastSendDownstreamAudioStats;
|
||||
if (sinceLastNack > TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS) {
|
||||
if (sinceLastNack > TOO_LONG_SINCE_LAST_SEND_DOWNSTREAM_AUDIO_STATS && !isInterstitialMode()) {
|
||||
_lastSendDownstreamAudioStats = now;
|
||||
|
||||
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(), "sendDownstreamAudioStatsPacket", Qt::QueuedConnection);
|
||||
|
@ -6113,21 +6155,23 @@ void Application::updateRenderArgs(float deltaTime) {
|
|||
}
|
||||
|
||||
void Application::queryAvatars() {
|
||||
auto avatarPacket = NLPacket::create(PacketType::AvatarQuery);
|
||||
auto destinationBuffer = reinterpret_cast<unsigned char*>(avatarPacket->getPayload());
|
||||
unsigned char* bufferStart = destinationBuffer;
|
||||
if (!isInterstitialMode()) {
|
||||
auto avatarPacket = NLPacket::create(PacketType::AvatarQuery);
|
||||
auto destinationBuffer = reinterpret_cast<unsigned char*>(avatarPacket->getPayload());
|
||||
unsigned char* bufferStart = destinationBuffer;
|
||||
|
||||
uint8_t numFrustums = (uint8_t)_conicalViews.size();
|
||||
memcpy(destinationBuffer, &numFrustums, sizeof(numFrustums));
|
||||
destinationBuffer += sizeof(numFrustums);
|
||||
uint8_t numFrustums = (uint8_t)_conicalViews.size();
|
||||
memcpy(destinationBuffer, &numFrustums, sizeof(numFrustums));
|
||||
destinationBuffer += sizeof(numFrustums);
|
||||
|
||||
for (const auto& view : _conicalViews) {
|
||||
destinationBuffer += view.serialize(destinationBuffer);
|
||||
for (const auto& view : _conicalViews) {
|
||||
destinationBuffer += view.serialize(destinationBuffer);
|
||||
}
|
||||
|
||||
avatarPacket->setPayloadSize(destinationBuffer - bufferStart);
|
||||
|
||||
DependencyManager::get<NodeList>()->broadcastToNodes(std::move(avatarPacket), NodeSet() << NodeType::AvatarMixer);
|
||||
}
|
||||
|
||||
avatarPacket->setPayloadSize(destinationBuffer - bufferStart);
|
||||
|
||||
DependencyManager::get<NodeList>()->broadcastToNodes(std::move(avatarPacket), NodeSet() << NodeType::AvatarMixer);
|
||||
}
|
||||
|
||||
|
||||
|
@ -6304,6 +6348,7 @@ void Application::updateWindowTitle() const {
|
|||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
auto isInErrorState = nodeList->getDomainHandler().isInErrorState();
|
||||
|
||||
QString buildVersion = " - "
|
||||
+ (BuildInfo::BUILD_TYPE == BuildInfo::BuildType::Stable ? QString("Version") : QString("Build"))
|
||||
|
@ -6311,14 +6356,19 @@ void Application::updateWindowTitle() const {
|
|||
|
||||
QString loginStatus = accountManager->isLoggedIn() ? "" : " (NOT LOGGED IN)";
|
||||
|
||||
QString connectionStatus = nodeList->getDomainHandler().isConnected() ? "" : " (NOT CONNECTED)";
|
||||
QString connectionStatus = isInErrorState ? " (ERROR CONNECTING)" :
|
||||
nodeList->getDomainHandler().isConnected() ? "" : " (NOT CONNECTED)";
|
||||
QString username = accountManager->getAccountInfo().getUsername();
|
||||
|
||||
setCrashAnnotation("username", username.toStdString());
|
||||
|
||||
QString currentPlaceName;
|
||||
if (isServerlessMode()) {
|
||||
currentPlaceName = "serverless: " + DependencyManager::get<AddressManager>()->getDomainURL().toString();
|
||||
if (isInErrorState) {
|
||||
currentPlaceName = "serverless: " + nodeList->getDomainHandler().getErrorDomainURL().toString();
|
||||
} else {
|
||||
currentPlaceName = "serverless: " + DependencyManager::get<AddressManager>()->getDomainURL().toString();
|
||||
}
|
||||
} else {
|
||||
currentPlaceName = DependencyManager::get<AddressManager>()->getDomainURL().host();
|
||||
if (currentPlaceName.isEmpty()) {
|
||||
|
@ -6350,6 +6400,7 @@ void Application::clearDomainOctreeDetails() {
|
|||
qCDebug(interfaceapp) << "Clearing domain octree details...";
|
||||
|
||||
resetPhysicsReadyInformation();
|
||||
setIsInterstitialMode(true);
|
||||
|
||||
_octreeServerSceneStats.withWriteLock([&] {
|
||||
_octreeServerSceneStats.clear();
|
||||
|
@ -6370,10 +6421,6 @@ void Application::clearDomainOctreeDetails() {
|
|||
getMyAvatar()->setAvatarEntityDataChanged(true);
|
||||
}
|
||||
|
||||
void Application::clearDomainAvatars() {
|
||||
DependencyManager::get<AvatarManager>()->clearOtherAvatars();
|
||||
}
|
||||
|
||||
void Application::domainURLChanged(QUrl domainURL) {
|
||||
// disable physics until we have enough information about our new location to not cause craziness.
|
||||
resetPhysicsReadyInformation();
|
||||
|
@ -6384,6 +6431,16 @@ void Application::domainURLChanged(QUrl domainURL) {
|
|||
updateWindowTitle();
|
||||
}
|
||||
|
||||
void Application::goToErrorDomainURL(QUrl errorDomainURL) {
|
||||
// disable physics until we have enough information about our new location to not cause craziness.
|
||||
resetPhysicsReadyInformation();
|
||||
setIsServerlessMode(errorDomainURL.scheme() != URL_SCHEME_HIFI);
|
||||
if (isServerlessMode()) {
|
||||
loadServerlessDomain(errorDomainURL, true);
|
||||
}
|
||||
updateWindowTitle();
|
||||
}
|
||||
|
||||
|
||||
void Application::resettingDomain() {
|
||||
_notifiedPacketVersionMismatchThisDomain = false;
|
||||
|
@ -6422,7 +6479,7 @@ void Application::nodeActivated(SharedNodePointer node) {
|
|||
_octreeQuery.incrementConnectionID();
|
||||
}
|
||||
|
||||
if (node->getType() == NodeType::AudioMixer) {
|
||||
if (node->getType() == NodeType::AudioMixer && !isInterstitialMode()) {
|
||||
DependencyManager::get<AudioClient>()->negotiateAudioFormat();
|
||||
}
|
||||
|
||||
|
@ -6442,8 +6499,10 @@ void Application::nodeActivated(SharedNodePointer node) {
|
|||
getMyAvatar()->markIdentityDataChanged();
|
||||
getMyAvatar()->resetLastSent();
|
||||
|
||||
// transmit a "sendAll" packet to the AvatarMixer we just connected to.
|
||||
getMyAvatar()->sendAvatarDataPacket(true);
|
||||
if (!isInterstitialMode()) {
|
||||
// transmit a "sendAll" packet to the AvatarMixer we just connected to.
|
||||
getMyAvatar()->sendAvatarDataPacket(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -6461,9 +6520,6 @@ void Application::nodeKilled(SharedNodePointer node) {
|
|||
} else if (node->getType() == NodeType::EntityServer) {
|
||||
// we lost an entity server, clear all of the domain octree details
|
||||
clearDomainOctreeDetails();
|
||||
} else if (node->getType() == NodeType::AvatarMixer) {
|
||||
// our avatar mixer has gone away - clear the hash of avatars
|
||||
DependencyManager::get<AvatarManager>()->clearOtherAvatars();
|
||||
} else if (node->getType() == NodeType::AssetServer) {
|
||||
// asset server going away - check if we have the asset browser showing
|
||||
|
||||
|
@ -7797,7 +7853,7 @@ float Application::getRenderResolutionScale() const {
|
|||
}
|
||||
|
||||
void Application::notifyPacketVersionMismatch() {
|
||||
if (!_notifiedPacketVersionMismatchThisDomain) {
|
||||
if (!_notifiedPacketVersionMismatchThisDomain && !isInterstitialMode()) {
|
||||
_notifiedPacketVersionMismatchThisDomain = true;
|
||||
|
||||
QString message = "The location you are visiting is running an incompatible server version.\n";
|
||||
|
|
|
@ -224,6 +224,7 @@ public:
|
|||
void setHmdTabletBecomesToolbarSetting(bool value);
|
||||
bool getPreferStylusOverLaser() { return _preferStylusOverLaserSetting.get(); }
|
||||
void setPreferStylusOverLaser(bool value);
|
||||
|
||||
// FIXME: Remove setting completely or make available through JavaScript API?
|
||||
//bool getPreferAvatarFingerOverStylus() { return _preferAvatarFingerOverStylusSetting.get(); }
|
||||
bool getPreferAvatarFingerOverStylus() { return false; }
|
||||
|
@ -304,6 +305,7 @@ public:
|
|||
void saveNextPhysicsStats(QString filename);
|
||||
|
||||
bool isServerlessMode() const;
|
||||
bool isInterstitialMode() const { return _interstitialMode; }
|
||||
|
||||
void replaceDomainContent(const QString& url);
|
||||
|
||||
|
@ -331,6 +333,8 @@ signals:
|
|||
|
||||
void uploadRequest(QString path);
|
||||
|
||||
void loginDialogPoppedUp();
|
||||
|
||||
public slots:
|
||||
QVector<EntityItemID> pasteEntities(float x, float y, float z);
|
||||
bool exportEntities(const QString& filename, const QVector<EntityItemID>& entityIDs, const glm::vec3* givenOffset = nullptr);
|
||||
|
@ -338,6 +342,7 @@ public slots:
|
|||
bool importEntities(const QString& url);
|
||||
void updateThreadPoolCount() const;
|
||||
void updateSystemTabletMode();
|
||||
void goToErrorDomainURL(QUrl errorDomainURL);
|
||||
|
||||
Q_INVOKABLE void loadDialog();
|
||||
Q_INVOKABLE void loadScriptURLDialog() const;
|
||||
|
@ -426,7 +431,8 @@ public slots:
|
|||
void setPreferredCursor(const QString& cursor);
|
||||
|
||||
void setIsServerlessMode(bool serverlessDomain);
|
||||
void loadServerlessDomain(QUrl domainURL);
|
||||
void loadServerlessDomain(QUrl domainURL, bool errorDomain = false);
|
||||
void setIsInterstitialMode(bool interstialMode);
|
||||
|
||||
void updateVerboseLogging();
|
||||
|
||||
|
@ -437,7 +443,6 @@ private slots:
|
|||
void onDesktopRootContextCreated(QQmlContext* qmlContext);
|
||||
void showDesktop();
|
||||
void clearDomainOctreeDetails();
|
||||
void clearDomainAvatars();
|
||||
void onAboutToQuit();
|
||||
void onPresent(quint32 frameCount);
|
||||
|
||||
|
@ -626,6 +631,7 @@ private:
|
|||
QHash<int, QKeyEvent> _keysPressed;
|
||||
|
||||
bool _enableProcessOctreeThread;
|
||||
bool _interstitialMode { false };
|
||||
|
||||
OctreePacketProcessor _octreeProcessor;
|
||||
EntityEditPacketSender _entityEditSender;
|
||||
|
|
|
@ -11,16 +11,18 @@
|
|||
|
||||
#include "ConnectionMonitor.h"
|
||||
|
||||
#include "Application.h"
|
||||
#include "ui/DialogsManager.h"
|
||||
|
||||
#include <DependencyManager.h>
|
||||
#include <DomainHandler.h>
|
||||
#include <AddressManager.h>
|
||||
#include <NodeList.h>
|
||||
|
||||
// Because the connection monitor is created at startup, the time we wait on initial load
|
||||
// should be longer to allow the application to initialize.
|
||||
static const int ON_INITIAL_LOAD_DISPLAY_AFTER_DISCONNECTED_FOR_X_MS = 10000;
|
||||
static const int DISPLAY_AFTER_DISCONNECTED_FOR_X_MS = 5000;
|
||||
static const int ON_INITIAL_LOAD_REDIRECT_AFTER_DISCONNECTED_FOR_X_MS = 10000;
|
||||
static const int REDIRECT_AFTER_DISCONNECTED_FOR_X_MS = 5000;
|
||||
|
||||
void ConnectionMonitor::init() {
|
||||
// Connect to domain disconnected message
|
||||
|
@ -30,23 +32,25 @@ void ConnectionMonitor::init() {
|
|||
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, &ConnectionMonitor::startTimer);
|
||||
connect(&domainHandler, &DomainHandler::connectedToDomain, this, &ConnectionMonitor::stopTimer);
|
||||
connect(&domainHandler, &DomainHandler::domainConnectionRefused, this, &ConnectionMonitor::stopTimer);
|
||||
connect(&domainHandler, &DomainHandler::redirectToErrorDomainURL, this, &ConnectionMonitor::stopTimer);
|
||||
connect(this, &ConnectionMonitor::setRedirectErrorState, &domainHandler, &DomainHandler::setRedirectErrorState);
|
||||
|
||||
_timer.setSingleShot(true);
|
||||
if (!domainHandler.isConnected()) {
|
||||
_timer.start(ON_INITIAL_LOAD_DISPLAY_AFTER_DISCONNECTED_FOR_X_MS);
|
||||
_timer.start(ON_INITIAL_LOAD_REDIRECT_AFTER_DISCONNECTED_FOR_X_MS);
|
||||
}
|
||||
|
||||
connect(&_timer, &QTimer::timeout, this, []() {
|
||||
qDebug() << "ConnectionMonitor: Showing connection failure window";
|
||||
DependencyManager::get<DialogsManager>()->setDomainConnectionFailureVisibility(true);
|
||||
connect(&_timer, &QTimer::timeout, this, [this]() {
|
||||
qDebug() << "ConnectionMonitor: Redirecting to 404 error domain";
|
||||
// set in a timeout error
|
||||
emit setRedirectErrorState(REDIRECT_HIFI_ADDRESS, 5);
|
||||
});
|
||||
}
|
||||
|
||||
void ConnectionMonitor::startTimer() {
|
||||
_timer.start(DISPLAY_AFTER_DISCONNECTED_FOR_X_MS);
|
||||
_timer.start(REDIRECT_AFTER_DISCONNECTED_FOR_X_MS);
|
||||
}
|
||||
|
||||
void ConnectionMonitor::stopTimer() {
|
||||
_timer.stop();
|
||||
DependencyManager::get<DialogsManager>()->setDomainConnectionFailureVisibility(false);
|
||||
}
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
#include <QObject>
|
||||
#include <QTimer>
|
||||
|
||||
class QUrl;
|
||||
class QString;
|
||||
|
||||
class ConnectionMonitor : public QObject {
|
||||
|
@ -22,6 +23,9 @@ class ConnectionMonitor : public QObject {
|
|||
public:
|
||||
void init();
|
||||
|
||||
signals:
|
||||
void setRedirectErrorState(QUrl errorURL, int reasonCode);
|
||||
|
||||
private slots:
|
||||
void startTimer();
|
||||
void stopTimer();
|
||||
|
|
|
@ -137,7 +137,7 @@ void AvatarManager::updateMyAvatar(float deltaTime) {
|
|||
quint64 now = usecTimestampNow();
|
||||
quint64 dt = now - _lastSendAvatarDataTime;
|
||||
|
||||
if (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS) {
|
||||
if (dt > MIN_TIME_BETWEEN_MY_AVATAR_DATA_SENDS && !_myAvatarDataPacketsPaused) {
|
||||
// send head/hand data to the avatar mixer and voxel server
|
||||
PerformanceTimer perfTimer("send");
|
||||
_myAvatar->sendAvatarDataPacket();
|
||||
|
@ -155,6 +155,16 @@ float AvatarManager::getAvatarDataRate(const QUuid& sessionID, const QString& ra
|
|||
return avatar ? avatar->getDataRate(rateName) : 0.0f;
|
||||
}
|
||||
|
||||
void AvatarManager::setMyAvatarDataPacketsPaused(bool pause) {
|
||||
if (_myAvatarDataPacketsPaused != pause) {
|
||||
_myAvatarDataPacketsPaused = pause;
|
||||
|
||||
if (!_myAvatarDataPacketsPaused) {
|
||||
_myAvatar->sendAvatarDataPacket(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
float AvatarManager::getAvatarUpdateRate(const QUuid& sessionID, const QString& rateName) const {
|
||||
auto avatar = getAvatarBySessionID(sessionID);
|
||||
return avatar ? avatar->getUpdateRate(rateName) : 0.0f;
|
||||
|
@ -166,29 +176,29 @@ float AvatarManager::getAvatarSimulationRate(const QUuid& sessionID, const QStri
|
|||
}
|
||||
|
||||
void AvatarManager::updateOtherAvatars(float deltaTime) {
|
||||
// lock the hash for read to check the size
|
||||
QReadLocker lock(&_hashLock);
|
||||
if (_avatarHash.size() < 2 && _avatarsToFade.isEmpty()) {
|
||||
return;
|
||||
{
|
||||
// lock the hash for read to check the size
|
||||
QReadLocker lock(&_hashLock);
|
||||
if (_avatarHash.size() < 2 && _avatarsToFade.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
lock.unlock();
|
||||
|
||||
PerformanceTimer perfTimer("otherAvatars");
|
||||
|
||||
class SortableAvatar: public PrioritySortUtil::Sortable {
|
||||
public:
|
||||
SortableAvatar() = delete;
|
||||
SortableAvatar(const AvatarSharedPointer& avatar) : _avatar(avatar) {}
|
||||
SortableAvatar(const std::shared_ptr<Avatar>& avatar) : _avatar(avatar) {}
|
||||
glm::vec3 getPosition() const override { return _avatar->getWorldPosition(); }
|
||||
float getRadius() const override { return std::static_pointer_cast<Avatar>(_avatar)->getBoundingRadius(); }
|
||||
uint64_t getTimestamp() const override { return std::static_pointer_cast<Avatar>(_avatar)->getLastRenderUpdateTime(); }
|
||||
AvatarSharedPointer getAvatar() const { return _avatar; }
|
||||
float getRadius() const override { return _avatar->getBoundingRadius(); }
|
||||
uint64_t getTimestamp() const override { return _avatar->getLastRenderUpdateTime(); }
|
||||
std::shared_ptr<Avatar> getAvatar() const { return _avatar; }
|
||||
private:
|
||||
AvatarSharedPointer _avatar;
|
||||
std::shared_ptr<Avatar> _avatar;
|
||||
};
|
||||
|
||||
auto avatarMap = getHashCopy();
|
||||
AvatarHash::iterator itr = avatarMap.begin();
|
||||
|
||||
const auto& views = qApp->getConicalViews();
|
||||
PrioritySortUtil::PriorityQueue<SortableAvatar> sortedAvatars(views,
|
||||
|
@ -197,22 +207,24 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
|
|||
AvatarData::_avatarSortCoefficientAge);
|
||||
sortedAvatars.reserve(avatarMap.size() - 1); // don't include MyAvatar
|
||||
|
||||
// sort
|
||||
// Build vector and compute priorities
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
AvatarHash::iterator itr = avatarMap.begin();
|
||||
while (itr != avatarMap.end()) {
|
||||
const auto& avatar = std::static_pointer_cast<Avatar>(*itr);
|
||||
// DO NOT update _myAvatar! Its update has already been done earlier in the main loop.
|
||||
// DO NOT update or fade out uninitialized Avatars
|
||||
if (avatar != _myAvatar && avatar->isInitialized()) {
|
||||
if (avatar != _myAvatar && avatar->isInitialized() && !nodeList->isPersonalMutingNode(avatar->getID())) {
|
||||
sortedAvatars.push(SortableAvatar(avatar));
|
||||
}
|
||||
++itr;
|
||||
}
|
||||
// Sort
|
||||
const auto& sortedAvatarVector = sortedAvatars.getSortedVector();
|
||||
|
||||
// process in sorted order
|
||||
uint64_t startTime = usecTimestampNow();
|
||||
const uint64_t UPDATE_BUDGET = 2000; // usec
|
||||
uint64_t updateExpiry = startTime + UPDATE_BUDGET;
|
||||
uint64_t updateExpiry = startTime + MAX_UPDATE_AVATARS_TIME_BUDGET;
|
||||
int numAvatarsUpdated = 0;
|
||||
int numAVatarsNotUpdated = 0;
|
||||
|
||||
|
@ -231,18 +243,12 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
|
|||
avatar->updateOrbPosition();
|
||||
}
|
||||
|
||||
bool ignoring = DependencyManager::get<NodeList>()->isPersonalMutingNode(avatar->getID());
|
||||
if (ignoring) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// for ALL avatars...
|
||||
if (_shouldRender) {
|
||||
avatar->ensureInScene(avatar, qApp->getMain3DScene());
|
||||
}
|
||||
avatar->animateScaleChanges(deltaTime);
|
||||
|
||||
const float OUT_OF_VIEW_THRESHOLD = 0.5f * AvatarData::OUT_OF_VIEW_PENALTY;
|
||||
uint64_t now = usecTimestampNow();
|
||||
if (now < updateExpiry) {
|
||||
// we're within budget
|
||||
|
@ -263,7 +269,7 @@ void AvatarManager::updateOtherAvatars(float deltaTime) {
|
|||
// no time to simulate, but we take the time to count how many were tragically missed
|
||||
while (it != sortedAvatarVector.end()) {
|
||||
const SortableAvatar& newSortData = *it;
|
||||
const auto newAvatar = std::static_pointer_cast<Avatar>(newSortData.getAvatar());
|
||||
const auto& newAvatar = newSortData.getAvatar();
|
||||
bool inView = newSortData.getPriority() > OUT_OF_VIEW_THRESHOLD;
|
||||
// Once we reach an avatar that's not in view, all avatars after it will also be out of view
|
||||
if (!inView) {
|
||||
|
@ -433,6 +439,11 @@ void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar
|
|||
avatar->die();
|
||||
queuePhysicsChange(avatar);
|
||||
|
||||
// remove this avatar's entities from the tree now, if we wait (as we did previously) for this Avatar's destructor
|
||||
// it might not fire until after we create a new instance for the same remote avatar, which creates a race
|
||||
// on the creation of entities for that avatar instance and the deletion of entities for this instance
|
||||
avatar->removeAvatarEntitiesFromTree();
|
||||
|
||||
if (removalReason == KillAvatarReason::TheirAvatarEnteredYourBubble) {
|
||||
emit DependencyManager::get<UsersScriptingInterface>()->enteredIgnoreRadius();
|
||||
} else if (removalReason == KillAvatarReason::AvatarDisconnected) {
|
||||
|
@ -802,13 +813,13 @@ void AvatarManager::setAvatarSortCoefficient(const QString& name, const QScriptV
|
|||
QString currentSessionUUID = avatar->getSessionUUID().toString();
|
||||
if (specificAvatarIdentifiers.isEmpty() || specificAvatarIdentifiers.contains(currentSessionUUID)) {
|
||||
QJsonObject thisAvatarPalData;
|
||||
|
||||
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
|
||||
if (currentSessionUUID == myAvatar->getSessionUUID().toString()) {
|
||||
currentSessionUUID = "";
|
||||
}
|
||||
|
||||
|
||||
thisAvatarPalData.insert("sessionUUID", currentSessionUUID);
|
||||
thisAvatarPalData.insert("sessionDisplayName", avatar->getSessionDisplayName());
|
||||
thisAvatarPalData.insert("audioLoudness", avatar->getAudioLoudness());
|
||||
|
|
|
@ -91,9 +91,11 @@ public:
|
|||
void updateOtherAvatars(float deltaTime);
|
||||
void sendIdentityRequest(const QUuid& avatarID) const;
|
||||
|
||||
void setMyAvatarDataPacketsPaused(bool puase);
|
||||
|
||||
void postUpdate(float deltaTime, const render::ScenePointer& scene);
|
||||
|
||||
void clearOtherAvatars();
|
||||
void clearOtherAvatars() override;
|
||||
void deleteAllAvatars();
|
||||
|
||||
void getObjectsToRemoveFromPhysics(VectorOfMotionStates& motionStates);
|
||||
|
@ -219,6 +221,7 @@ private:
|
|||
int _numAvatarsNotUpdated { 0 };
|
||||
float _avatarSimulationTime { 0.0f };
|
||||
bool _shouldRender { true };
|
||||
bool _myAvatarDataPacketsPaused { false };
|
||||
mutable int _identityRequestsSent { 0 };
|
||||
|
||||
mutable std::mutex _spaceLock;
|
||||
|
|
|
@ -11,6 +11,25 @@
|
|||
|
||||
#include "AvatarMotionState.h"
|
||||
|
||||
static xColor getLoadingOrbColor(Avatar::LoadingStatus loadingStatus) {
|
||||
|
||||
const xColor NO_MODEL_COLOR(0xe3, 0xe3, 0xe3);
|
||||
const xColor LOAD_MODEL_COLOR(0xef, 0x93, 0xd1);
|
||||
const xColor LOAD_SUCCESS_COLOR(0x1f, 0xc6, 0xa6);
|
||||
const xColor LOAD_FAILURE_COLOR(0xc6, 0x21, 0x47);
|
||||
switch (loadingStatus) {
|
||||
case Avatar::LoadingStatus::NoModel:
|
||||
return NO_MODEL_COLOR;
|
||||
case Avatar::LoadingStatus::LoadModel:
|
||||
return LOAD_MODEL_COLOR;
|
||||
case Avatar::LoadingStatus::LoadSuccess:
|
||||
return LOAD_SUCCESS_COLOR;
|
||||
case Avatar::LoadingStatus::LoadFailure:
|
||||
default:
|
||||
return LOAD_FAILURE_COLOR;
|
||||
}
|
||||
}
|
||||
|
||||
OtherAvatar::OtherAvatar(QThread* thread) : Avatar(thread) {
|
||||
// give the pointer to our head to inherited _headData variable from AvatarData
|
||||
_headData = new Head(this);
|
||||
|
@ -48,7 +67,7 @@ void OtherAvatar::createOrb() {
|
|||
if (_otherAvatarOrbMeshPlaceholderID.isNull()) {
|
||||
_otherAvatarOrbMeshPlaceholder = std::make_shared<Sphere3DOverlay>();
|
||||
_otherAvatarOrbMeshPlaceholder->setAlpha(1.0f);
|
||||
_otherAvatarOrbMeshPlaceholder->setColor({ 0xFF, 0x00, 0xFF });
|
||||
_otherAvatarOrbMeshPlaceholder->setColor(getLoadingOrbColor(_loadingStatus));
|
||||
_otherAvatarOrbMeshPlaceholder->setIsSolid(false);
|
||||
_otherAvatarOrbMeshPlaceholder->setPulseMin(0.5);
|
||||
_otherAvatarOrbMeshPlaceholder->setPulseMax(1.0);
|
||||
|
@ -64,6 +83,13 @@ void OtherAvatar::createOrb() {
|
|||
}
|
||||
}
|
||||
|
||||
void OtherAvatar::indicateLoadingStatus(LoadingStatus loadingStatus) {
|
||||
Avatar::indicateLoadingStatus(loadingStatus);
|
||||
if (_otherAvatarOrbMeshPlaceholder) {
|
||||
_otherAvatarOrbMeshPlaceholder->setColor(getLoadingOrbColor(_loadingStatus));
|
||||
}
|
||||
}
|
||||
|
||||
void OtherAvatar::setSpaceIndex(int32_t index) {
|
||||
assert(_spaceIndex == -1);
|
||||
_spaceIndex = index;
|
||||
|
|
|
@ -28,6 +28,7 @@ public:
|
|||
|
||||
virtual void instantiableAvatar() override { };
|
||||
virtual void createOrb() override;
|
||||
virtual void indicateLoadingStatus(LoadingStatus loadingStatus) override;
|
||||
void updateOrbPosition();
|
||||
void removeOrb();
|
||||
|
||||
|
|
|
@ -16,7 +16,6 @@
|
|||
#include "Application.h"
|
||||
#include "Menu.h"
|
||||
#include "SceneScriptingInterface.h"
|
||||
#include "SafeLanding.h"
|
||||
|
||||
OctreePacketProcessor::OctreePacketProcessor():
|
||||
_safeLanding(new SafeLanding())
|
||||
|
@ -133,7 +132,3 @@ void OctreePacketProcessor::processPacket(QSharedPointer<ReceivedMessage> messag
|
|||
void OctreePacketProcessor::startEntitySequence() {
|
||||
_safeLanding->startEntitySequence(qApp->getEntities());
|
||||
}
|
||||
|
||||
bool OctreePacketProcessor::isLoadSequenceComplete() const {
|
||||
return _safeLanding->isLoadSequenceComplete();
|
||||
}
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
#include <ReceivedPacketProcessor.h>
|
||||
#include <ReceivedMessage.h>
|
||||
|
||||
class SafeLanding;
|
||||
#include "SafeLanding.h"
|
||||
|
||||
/// Handles processing of incoming voxel packets for the interface application. As with other ReceivedPacketProcessor classes
|
||||
/// the user is responsible for reading inbound packets and adding them to the processing queue by calling queueReceivedPacket()
|
||||
|
@ -26,7 +26,8 @@ public:
|
|||
~OctreePacketProcessor();
|
||||
|
||||
void startEntitySequence();
|
||||
bool isLoadSequenceComplete() const;
|
||||
bool isLoadSequenceComplete() const { return _safeLanding->isLoadSequenceComplete(); }
|
||||
float domainLoadingProgress() const { return _safeLanding->loadingProgressPercentage(); }
|
||||
|
||||
signals:
|
||||
void packetVersionMismatch();
|
||||
|
@ -40,4 +41,4 @@ private slots:
|
|||
private:
|
||||
std::unique_ptr<SafeLanding> _safeLanding;
|
||||
};
|
||||
#endif // hifi_OctreePacketProcessor_h
|
||||
#endif // hifi_OctreePacketProcessor_h
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
#include "EntityTreeRenderer.h"
|
||||
#include "ModelEntityItem.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "Application.h"
|
||||
|
||||
const int SafeLanding::SEQUENCE_MODULO = std::numeric_limits<OCTREE_PACKET_SEQUENCE>::max() + 1;
|
||||
|
||||
|
@ -53,6 +54,7 @@ void SafeLanding::startEntitySequence(QSharedPointer<EntityTreeRenderer> entityT
|
|||
void SafeLanding::stopEntitySequence() {
|
||||
Locker lock(_lock);
|
||||
_trackingEntities = false;
|
||||
_maxTrackedEntityCount = 0;
|
||||
_initialStart = INVALID_SEQUENCE;
|
||||
_initialEnd = INVALID_SEQUENCE;
|
||||
_trackedEntities.clear();
|
||||
|
@ -64,20 +66,18 @@ void SafeLanding::addTrackedEntity(const EntityItemID& entityID) {
|
|||
Locker lock(_lock);
|
||||
EntityItemPointer entity = _entityTree->findEntityByID(entityID);
|
||||
|
||||
if (entity && !entity->getCollisionless()) {
|
||||
const auto& entityType = entity->getType();
|
||||
if (entityType == EntityTypes::Model) {
|
||||
ModelEntityItem * modelEntity = std::dynamic_pointer_cast<ModelEntityItem>(entity).get();
|
||||
static const std::set<ShapeType> downloadedCollisionTypes
|
||||
{ SHAPE_TYPE_COMPOUND, SHAPE_TYPE_SIMPLE_COMPOUND, SHAPE_TYPE_STATIC_MESH, SHAPE_TYPE_SIMPLE_HULL };
|
||||
bool hasAABox;
|
||||
entity->getAABox(hasAABox);
|
||||
if (hasAABox && downloadedCollisionTypes.count(modelEntity->getShapeType()) != 0) {
|
||||
// Only track entities with downloaded collision bodies.
|
||||
_trackedEntities.emplace(entityID, entity);
|
||||
}
|
||||
if (entity) {
|
||||
|
||||
_trackedEntities.emplace(entityID, entity);
|
||||
int trackedEntityCount = (int)_trackedEntities.size();
|
||||
|
||||
if (trackedEntityCount > _maxTrackedEntityCount) {
|
||||
_maxTrackedEntityCount = trackedEntityCount;
|
||||
}
|
||||
qCDebug(interfaceapp) << "Safe Landing: Tracking entity " << entity->getItemName();
|
||||
}
|
||||
} else {
|
||||
qCDebug(interfaceapp) << "Safe Landing: Null Entity: " << entityID;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -102,7 +102,7 @@ void SafeLanding::noteReceivedsequenceNumber(int sequenceNumber) {
|
|||
}
|
||||
|
||||
bool SafeLanding::isLoadSequenceComplete() {
|
||||
if (isEntityPhysicsComplete() && isSequenceNumbersComplete()) {
|
||||
if (isEntityLoadingComplete() && isSequenceNumbersComplete()) {
|
||||
Locker lock(_lock);
|
||||
_trackedEntities.clear();
|
||||
_initialStart = INVALID_SEQUENCE;
|
||||
|
@ -114,6 +114,15 @@ bool SafeLanding::isLoadSequenceComplete() {
|
|||
return !_trackingEntities;
|
||||
}
|
||||
|
||||
float SafeLanding::loadingProgressPercentage() {
|
||||
Locker lock(_lock);
|
||||
if (_maxTrackedEntityCount > 0) {
|
||||
return ((_maxTrackedEntityCount - _trackedEntities.size()) / (float)_maxTrackedEntityCount);
|
||||
}
|
||||
|
||||
return 0.0f;
|
||||
}
|
||||
|
||||
bool SafeLanding::isSequenceNumbersComplete() {
|
||||
if (_initialStart != INVALID_SEQUENCE) {
|
||||
Locker lock(_lock);
|
||||
|
@ -132,17 +141,53 @@ bool SafeLanding::isSequenceNumbersComplete() {
|
|||
return false;
|
||||
}
|
||||
|
||||
bool SafeLanding::isEntityPhysicsComplete() {
|
||||
Locker lock(_lock);
|
||||
for (auto entityMapIter = _trackedEntities.begin(); entityMapIter != _trackedEntities.end(); ++entityMapIter) {
|
||||
auto entity = entityMapIter->second;
|
||||
if (!entity->shouldBePhysical() || entity->isReadyToComputeShape()) {
|
||||
entityMapIter = _trackedEntities.erase(entityMapIter);
|
||||
if (entityMapIter == _trackedEntities.end()) {
|
||||
break;
|
||||
bool isEntityPhysicsReady(const EntityItemPointer& entity) {
|
||||
if (entity && !entity->getCollisionless()) {
|
||||
const auto& entityType = entity->getType();
|
||||
if (entityType == EntityTypes::Model) {
|
||||
ModelEntityItem * modelEntity = std::dynamic_pointer_cast<ModelEntityItem>(entity).get();
|
||||
static const std::set<ShapeType> downloadedCollisionTypes
|
||||
{ SHAPE_TYPE_COMPOUND, SHAPE_TYPE_SIMPLE_COMPOUND, SHAPE_TYPE_STATIC_MESH, SHAPE_TYPE_SIMPLE_HULL };
|
||||
bool hasAABox;
|
||||
entity->getAABox(hasAABox);
|
||||
if (hasAABox && downloadedCollisionTypes.count(modelEntity->getShapeType()) != 0) {
|
||||
return (!entity->shouldBePhysical() || entity->isReadyToComputeShape());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool SafeLanding::isEntityLoadingComplete() {
|
||||
Locker lock(_lock);
|
||||
|
||||
|
||||
auto entityTree = qApp->getEntities();
|
||||
auto entityMapIter = _trackedEntities.begin();
|
||||
|
||||
while (entityMapIter != _trackedEntities.end()) {
|
||||
auto entity = entityMapIter->second;
|
||||
|
||||
bool isVisuallyReady = true;
|
||||
|
||||
Settings settings;
|
||||
bool enableInterstitial = settings.value("enableIntersitialMode", false).toBool();
|
||||
|
||||
if (enableInterstitial) {
|
||||
isVisuallyReady = (entity->isVisuallyReady() || !entityTree->renderableForEntityId(entityMapIter->first));
|
||||
}
|
||||
|
||||
if (isEntityPhysicsReady(entity) && isVisuallyReady) {
|
||||
entityMapIter = _trackedEntities.erase(entityMapIter);
|
||||
} else {
|
||||
if (!isVisuallyReady) {
|
||||
entity->requestRenderUpdate();
|
||||
}
|
||||
|
||||
entityMapIter++;
|
||||
}
|
||||
}
|
||||
return _trackedEntities.empty();
|
||||
}
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
#include <QtCore/QSharedPointer>
|
||||
|
||||
#include "EntityItem.h"
|
||||
#include "EntityDynamicInterface.h"
|
||||
|
||||
class EntityTreeRenderer;
|
||||
class EntityItemID;
|
||||
|
@ -29,6 +30,7 @@ public:
|
|||
void setCompletionSequenceNumbers(int first, int last); // 'last' exclusive.
|
||||
void noteReceivedsequenceNumber(int sequenceNumber);
|
||||
bool isLoadSequenceComplete();
|
||||
float loadingProgressPercentage();
|
||||
|
||||
private slots:
|
||||
void addTrackedEntity(const EntityItemID& entityID);
|
||||
|
@ -37,7 +39,7 @@ private slots:
|
|||
private:
|
||||
bool isSequenceNumbersComplete();
|
||||
void debugDumpSequenceIDs() const;
|
||||
bool isEntityPhysicsComplete();
|
||||
bool isEntityLoadingComplete();
|
||||
|
||||
std::mutex _lock;
|
||||
using Locker = std::lock_guard<std::mutex>;
|
||||
|
@ -49,6 +51,7 @@ private:
|
|||
static constexpr int INVALID_SEQUENCE = -1;
|
||||
int _initialStart { INVALID_SEQUENCE };
|
||||
int _initialEnd { INVALID_SEQUENCE };
|
||||
int _maxTrackedEntityCount { 0 };
|
||||
|
||||
struct SequenceLessThan {
|
||||
bool operator()(const int& a, const int& b) const;
|
||||
|
|
|
@ -409,6 +409,10 @@ glm::vec2 WindowScriptingInterface::getDeviceSize() const {
|
|||
return qApp->getDeviceSize();
|
||||
}
|
||||
|
||||
int WindowScriptingInterface::getLastDomainConnectionError() const {
|
||||
return DependencyManager::get<NodeList>()->getDomainHandler().getLastDomainConnectionError();
|
||||
}
|
||||
|
||||
int WindowScriptingInterface::getX() {
|
||||
return qApp->getWindow()->geometry().x();
|
||||
}
|
||||
|
@ -584,3 +588,8 @@ void WindowScriptingInterface::onMessageBoxSelected(int button) {
|
|||
_messageBoxes.remove(id);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
float WindowScriptingInterface::domainLoadingProgress() {
|
||||
return qApp->getOctreePacketProcessor().domainLoadingProgress();
|
||||
}
|
||||
|
|
|
@ -491,6 +491,13 @@ public slots:
|
|||
*/
|
||||
glm::vec2 getDeviceSize() const;
|
||||
|
||||
/**jsdoc
|
||||
* Gets the last domain connection error when a connection is refused.
|
||||
* @function Window.getLastDomainConnectionError
|
||||
* @returns {Window.ConnectionRefusedReason} Integer number that enumerates the last domain connection refused.
|
||||
*/
|
||||
int getLastDomainConnectionError() const;
|
||||
|
||||
/**jsdoc
|
||||
* Open a non-modal message box that can have a variety of button combinations. See also,
|
||||
* {@link Window.updateMessageBox|updateMessageBox} and {@link Window.closeMessageBox|closeMessageBox}.
|
||||
|
@ -561,6 +568,8 @@ public slots:
|
|||
*/
|
||||
void closeMessageBox(int id);
|
||||
|
||||
float domainLoadingProgress();
|
||||
|
||||
private slots:
|
||||
void onWindowGeometryChanged(const QRect& geometry);
|
||||
void onMessageBoxSelected(int button);
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
#include <plugins/PluginManager.h>
|
||||
#include <plugins/SteamClientPlugin.h>
|
||||
#include <ui/TabletScriptingInterface.h>
|
||||
#include <UserActivityLogger.h>
|
||||
|
||||
#include "AccountManager.h"
|
||||
#include "DependencyManager.h"
|
||||
|
@ -37,11 +38,19 @@ LoginDialog::LoginDialog(QQuickItem *parent) : OffscreenQmlDialog(parent) {
|
|||
connect(accountManager.data(), &AccountManager::loginFailed,
|
||||
this, &LoginDialog::handleLoginFailed);
|
||||
#endif
|
||||
|
||||
}
|
||||
|
||||
void LoginDialog::showWithSelection()
|
||||
{
|
||||
LoginDialog::~LoginDialog() {
|
||||
Setting::Handle<bool> loginDialogPoppedUp{ "loginDialogPoppedUp", false };
|
||||
if (loginDialogPoppedUp.get()) {
|
||||
QJsonObject data;
|
||||
data["action"] = "user opted out";
|
||||
UserActivityLogger::getInstance().logAction("encourageLoginDialog", data);
|
||||
}
|
||||
loginDialogPoppedUp.set(false);
|
||||
}
|
||||
|
||||
void LoginDialog::showWithSelection() {
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
auto tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet("com.highfidelity.interface.tablet.system"));
|
||||
auto hmd = DependencyManager::get<HMDScriptingInterface>();
|
||||
|
@ -73,9 +82,7 @@ void LoginDialog::toggleAction() {
|
|||
} else {
|
||||
// change the menu item to login
|
||||
loginAction->setText("Login / Sign Up");
|
||||
connection = connect(loginAction, &QAction::triggered, [] {
|
||||
LoginDialog::showWithSelection();
|
||||
});
|
||||
connection = connect(loginAction, &QAction::triggered, [] { LoginDialog::showWithSelection(); });
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -158,7 +165,6 @@ void LoginDialog::createAccountFromStream(QString username) {
|
|||
QJsonDocument(payload).toJson());
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void LoginDialog::openUrl(const QString& url) const {
|
||||
|
@ -200,25 +206,24 @@ void LoginDialog::createFailed(QNetworkReply* reply) {
|
|||
}
|
||||
|
||||
void LoginDialog::signup(const QString& email, const QString& username, const QString& password) {
|
||||
|
||||
JSONCallbackParameters callbackParams;
|
||||
callbackParams.callbackReceiver = this;
|
||||
callbackParams.jsonCallbackMethod = "signupCompleted";
|
||||
callbackParams.errorCallbackMethod = "signupFailed";
|
||||
|
||||
|
||||
QJsonObject payload;
|
||||
|
||||
|
||||
QJsonObject userObject;
|
||||
userObject.insert("email", email);
|
||||
userObject.insert("username", username);
|
||||
userObject.insert("password", password);
|
||||
|
||||
|
||||
payload.insert("user", userObject);
|
||||
|
||||
|
||||
static const QString API_SIGNUP_PATH = "api/v1/users";
|
||||
|
||||
|
||||
qDebug() << "Sending a request to create an account for" << username;
|
||||
|
||||
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
accountManager->sendRequest(API_SIGNUP_PATH, AccountManagerAuth::None,
|
||||
QNetworkAccessManager::PostOperation, callbackParams,
|
||||
|
@ -240,41 +245,37 @@ QString errorStringFromAPIObject(const QJsonValue& apiObject) {
|
|||
}
|
||||
|
||||
void LoginDialog::signupFailed(QNetworkReply* reply) {
|
||||
|
||||
// parse the returned JSON to see what the problem was
|
||||
auto jsonResponse = QJsonDocument::fromJson(reply->readAll());
|
||||
|
||||
|
||||
static const QString RESPONSE_DATA_KEY = "data";
|
||||
|
||||
|
||||
auto dataJsonValue = jsonResponse.object()[RESPONSE_DATA_KEY];
|
||||
|
||||
|
||||
if (dataJsonValue.isObject()) {
|
||||
auto dataObject = dataJsonValue.toObject();
|
||||
|
||||
|
||||
static const QString EMAIL_DATA_KEY = "email";
|
||||
static const QString USERNAME_DATA_KEY = "username";
|
||||
static const QString PASSWORD_DATA_KEY = "password";
|
||||
|
||||
|
||||
QStringList errorStringList;
|
||||
|
||||
|
||||
if (dataObject.contains(EMAIL_DATA_KEY)) {
|
||||
errorStringList.append(QString("Email %1.").arg(errorStringFromAPIObject(dataObject[EMAIL_DATA_KEY])));
|
||||
}
|
||||
|
||||
|
||||
if (dataObject.contains(USERNAME_DATA_KEY)) {
|
||||
errorStringList.append(QString("Username %1.").arg(errorStringFromAPIObject(dataObject[USERNAME_DATA_KEY])));
|
||||
}
|
||||
|
||||
|
||||
if (dataObject.contains(PASSWORD_DATA_KEY)) {
|
||||
errorStringList.append(QString("Password %1.").arg(errorStringFromAPIObject(dataObject[PASSWORD_DATA_KEY])));
|
||||
}
|
||||
|
||||
|
||||
emit handleSignupFailed(errorStringList.join('\n'));
|
||||
} else {
|
||||
static const QString DEFAULT_SIGN_UP_FAILURE_MESSAGE = "There was an unknown error while creating your account. Please try again later.";
|
||||
emit handleSignupFailed(DEFAULT_SIGN_UP_FAILURE_MESSAGE);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
|
|
@ -27,7 +27,10 @@ public:
|
|||
|
||||
LoginDialog(QQuickItem* parent = nullptr);
|
||||
|
||||
~LoginDialog();
|
||||
|
||||
static void showWithSelection();
|
||||
|
||||
signals:
|
||||
void handleLoginCompleted();
|
||||
void handleLoginFailed();
|
||||
|
@ -62,7 +65,6 @@ protected slots:
|
|||
Q_INVOKABLE void signup(const QString& email, const QString& username, const QString& password);
|
||||
|
||||
Q_INVOKABLE void openUrl(const QString& url) const;
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_LoginDialog_h
|
||||
|
|
|
@ -106,6 +106,10 @@ extern std::atomic<size_t> DECIMATED_TEXTURE_COUNT;
|
|||
extern std::atomic<size_t> RECTIFIED_TEXTURE_COUNT;
|
||||
|
||||
void Stats::updateStats(bool force) {
|
||||
|
||||
if (qApp->isInterstitialMode()) {
|
||||
return;
|
||||
}
|
||||
QQuickItem* parent = parentItem();
|
||||
if (!force) {
|
||||
if (!Menu::getInstance()->isOptionChecked(MenuOption::Stats)) {
|
||||
|
|
|
@ -305,6 +305,16 @@ void AudioClient::audioMixerKilled() {
|
|||
emit disconnected();
|
||||
}
|
||||
|
||||
void AudioClient::setAudioPaused(bool pause) {
|
||||
if (_audioPaused != pause) {
|
||||
_audioPaused = pause;
|
||||
|
||||
if (!_audioPaused) {
|
||||
negotiateAudioFormat();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
QAudioDeviceInfo getNamedAudioDeviceForMode(QAudio::Mode mode, const QString& deviceName) {
|
||||
QAudioDeviceInfo result;
|
||||
foreach(QAudioDeviceInfo audioDevice, getAvailableDevices(mode)) {
|
||||
|
@ -651,7 +661,6 @@ void AudioClient::stop() {
|
|||
}
|
||||
|
||||
void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessage> message) {
|
||||
|
||||
char bitset;
|
||||
message->readPrimitive(&bitset);
|
||||
|
||||
|
@ -664,11 +673,10 @@ void AudioClient::handleAudioEnvironmentDataPacket(QSharedPointer<ReceivedMessag
|
|||
_receivedAudioStream.setReverb(reverbTime, wetLevel);
|
||||
} else {
|
||||
_receivedAudioStream.clearReverb();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AudioClient::handleAudioDataPacket(QSharedPointer<ReceivedMessage> message) {
|
||||
|
||||
if (message->getType() == PacketType::SilentAudioFrame) {
|
||||
_silentInbound.increment();
|
||||
} else {
|
||||
|
@ -1026,80 +1034,82 @@ void AudioClient::handleLocalEchoAndReverb(QByteArray& inputByteArray) {
|
|||
}
|
||||
|
||||
void AudioClient::handleAudioInput(QByteArray& audioBuffer) {
|
||||
if (_muted) {
|
||||
_lastInputLoudness = 0.0f;
|
||||
_timeSinceLastClip = 0.0f;
|
||||
} else {
|
||||
int16_t* samples = reinterpret_cast<int16_t*>(audioBuffer.data());
|
||||
int numSamples = audioBuffer.size() / AudioConstants::SAMPLE_SIZE;
|
||||
int numFrames = numSamples / (_isStereoInput ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
|
||||
if (_isNoiseGateEnabled) {
|
||||
// The audio gate includes DC removal
|
||||
_audioGate->render(samples, samples, numFrames);
|
||||
} else {
|
||||
_audioGate->removeDC(samples, samples, numFrames);
|
||||
}
|
||||
|
||||
int32_t loudness = 0;
|
||||
assert(numSamples < 65536); // int32_t loudness cannot overflow
|
||||
bool didClip = false;
|
||||
for (int i = 0; i < numSamples; ++i) {
|
||||
const int32_t CLIPPING_THRESHOLD = (int32_t)(AudioConstants::MAX_SAMPLE_VALUE * 0.9f);
|
||||
int32_t sample = std::abs((int32_t)samples[i]);
|
||||
loudness += sample;
|
||||
didClip |= (sample > CLIPPING_THRESHOLD);
|
||||
}
|
||||
_lastInputLoudness = (float)loudness / numSamples;
|
||||
|
||||
if (didClip) {
|
||||
if (!_audioPaused) {
|
||||
if (_muted) {
|
||||
_lastInputLoudness = 0.0f;
|
||||
_timeSinceLastClip = 0.0f;
|
||||
} else if (_timeSinceLastClip >= 0.0f) {
|
||||
_timeSinceLastClip += (float)numSamples / (float)AudioConstants::SAMPLE_RATE;
|
||||
} else {
|
||||
int16_t* samples = reinterpret_cast<int16_t*>(audioBuffer.data());
|
||||
int numSamples = audioBuffer.size() / AudioConstants::SAMPLE_SIZE;
|
||||
int numFrames = numSamples / (_isStereoInput ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
|
||||
if (_isNoiseGateEnabled) {
|
||||
// The audio gate includes DC removal
|
||||
_audioGate->render(samples, samples, numFrames);
|
||||
} else {
|
||||
_audioGate->removeDC(samples, samples, numFrames);
|
||||
}
|
||||
|
||||
int32_t loudness = 0;
|
||||
assert(numSamples < 65536); // int32_t loudness cannot overflow
|
||||
bool didClip = false;
|
||||
for (int i = 0; i < numSamples; ++i) {
|
||||
const int32_t CLIPPING_THRESHOLD = (int32_t)(AudioConstants::MAX_SAMPLE_VALUE * 0.9f);
|
||||
int32_t sample = std::abs((int32_t)samples[i]);
|
||||
loudness += sample;
|
||||
didClip |= (sample > CLIPPING_THRESHOLD);
|
||||
}
|
||||
_lastInputLoudness = (float)loudness / numSamples;
|
||||
|
||||
if (didClip) {
|
||||
_timeSinceLastClip = 0.0f;
|
||||
} else if (_timeSinceLastClip >= 0.0f) {
|
||||
_timeSinceLastClip += (float)numSamples / (float)AudioConstants::SAMPLE_RATE;
|
||||
}
|
||||
|
||||
emit inputReceived(audioBuffer);
|
||||
}
|
||||
|
||||
emit inputReceived(audioBuffer);
|
||||
emit inputLoudnessChanged(_lastInputLoudness);
|
||||
|
||||
// state machine to detect gate opening and closing
|
||||
bool audioGateOpen = (_lastInputLoudness != 0.0f);
|
||||
bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
|
||||
bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
|
||||
_audioGateOpen = audioGateOpen;
|
||||
|
||||
if (openedInLastBlock) {
|
||||
emit noiseGateOpened();
|
||||
} else if (closedInLastBlock) {
|
||||
emit noiseGateClosed();
|
||||
}
|
||||
|
||||
// the codec must be flushed to silence before sending silent packets,
|
||||
// so delay the transition to silent packets by one packet after becoming silent.
|
||||
auto packetType = _shouldEchoToServer ? PacketType::MicrophoneAudioWithEcho : PacketType::MicrophoneAudioNoEcho;
|
||||
if (!audioGateOpen && !closedInLastBlock) {
|
||||
packetType = PacketType::SilentAudioFrame;
|
||||
_silentOutbound.increment();
|
||||
} else {
|
||||
_audioOutbound.increment();
|
||||
}
|
||||
|
||||
Transform audioTransform;
|
||||
audioTransform.setTranslation(_positionGetter());
|
||||
audioTransform.setRotation(_orientationGetter());
|
||||
|
||||
QByteArray encodedBuffer;
|
||||
if (_encoder) {
|
||||
_encoder->encode(audioBuffer, encodedBuffer);
|
||||
} else {
|
||||
encodedBuffer = audioBuffer;
|
||||
}
|
||||
|
||||
emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber, _isStereoInput,
|
||||
audioTransform, avatarBoundingBoxCorner, avatarBoundingBoxScale,
|
||||
packetType, _selectedCodecName);
|
||||
_stats.sentPacket();
|
||||
}
|
||||
|
||||
emit inputLoudnessChanged(_lastInputLoudness);
|
||||
|
||||
// state machine to detect gate opening and closing
|
||||
bool audioGateOpen = (_lastInputLoudness != 0.0f);
|
||||
bool openedInLastBlock = !_audioGateOpen && audioGateOpen; // the gate just opened
|
||||
bool closedInLastBlock = _audioGateOpen && !audioGateOpen; // the gate just closed
|
||||
_audioGateOpen = audioGateOpen;
|
||||
|
||||
if (openedInLastBlock) {
|
||||
emit noiseGateOpened();
|
||||
} else if (closedInLastBlock) {
|
||||
emit noiseGateClosed();
|
||||
}
|
||||
|
||||
// the codec must be flushed to silence before sending silent packets,
|
||||
// so delay the transition to silent packets by one packet after becoming silent.
|
||||
auto packetType = _shouldEchoToServer ? PacketType::MicrophoneAudioWithEcho : PacketType::MicrophoneAudioNoEcho;
|
||||
if (!audioGateOpen && !closedInLastBlock) {
|
||||
packetType = PacketType::SilentAudioFrame;
|
||||
_silentOutbound.increment();
|
||||
} else {
|
||||
_audioOutbound.increment();
|
||||
}
|
||||
|
||||
Transform audioTransform;
|
||||
audioTransform.setTranslation(_positionGetter());
|
||||
audioTransform.setRotation(_orientationGetter());
|
||||
|
||||
QByteArray encodedBuffer;
|
||||
if (_encoder) {
|
||||
_encoder->encode(audioBuffer, encodedBuffer);
|
||||
} else {
|
||||
encodedBuffer = audioBuffer;
|
||||
}
|
||||
|
||||
emitAudioPacket(encodedBuffer.data(), encodedBuffer.size(), _outgoingAvatarAudioSequenceNumber, _isStereoInput,
|
||||
audioTransform, avatarBoundingBoxCorner, avatarBoundingBoxScale,
|
||||
packetType, _selectedCodecName);
|
||||
_stats.sentPacket();
|
||||
}
|
||||
|
||||
void AudioClient::handleMicAudioInput() {
|
||||
|
|
|
@ -162,6 +162,7 @@ public:
|
|||
|
||||
bool startRecording(const QString& filename);
|
||||
void stopRecording();
|
||||
void setAudioPaused(bool pause);
|
||||
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
|
@ -416,6 +417,7 @@ private:
|
|||
QVector<AudioInjectorPointer> _activeLocalAudioInjectors;
|
||||
|
||||
bool _isPlayingBackRecording { false };
|
||||
bool _audioPaused { false };
|
||||
|
||||
CodecPluginPointer _codec;
|
||||
QString _selectedCodecName;
|
||||
|
|
|
@ -126,19 +126,11 @@ Avatar::Avatar(QThread* thread) :
|
|||
_leftPointerGeometryID = geometryCache->allocateID();
|
||||
_rightPointerGeometryID = geometryCache->allocateID();
|
||||
_lastRenderUpdateTime = usecTimestampNow();
|
||||
|
||||
indicateLoadingStatus(LoadingStatus::NoModel);
|
||||
}
|
||||
|
||||
Avatar::~Avatar() {
|
||||
auto treeRenderer = DependencyManager::get<EntityTreeRenderer>();
|
||||
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
|
||||
if (entityTree) {
|
||||
entityTree->withWriteLock([&] {
|
||||
AvatarEntityMap avatarEntities = getAvatarEntityData();
|
||||
for (auto entityID : avatarEntities.keys()) {
|
||||
entityTree->deleteEntity(entityID, true, true);
|
||||
}
|
||||
});
|
||||
}
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
if (geometryCache) {
|
||||
geometryCache->releaseID(_nameRectGeometryID);
|
||||
|
@ -383,6 +375,19 @@ void Avatar::updateAvatarEntities() {
|
|||
setAvatarEntityDataChanged(false);
|
||||
}
|
||||
|
||||
void Avatar::removeAvatarEntitiesFromTree() {
|
||||
auto treeRenderer = DependencyManager::get<EntityTreeRenderer>();
|
||||
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
|
||||
if (entityTree) {
|
||||
entityTree->withWriteLock([&] {
|
||||
AvatarEntityMap avatarEntities = getAvatarEntityData();
|
||||
for (auto entityID : avatarEntities.keys()) {
|
||||
entityTree->deleteEntity(entityID, true, true);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
void Avatar::relayJointDataToChildren() {
|
||||
forEachChild([&](SpatiallyNestablePointer child) {
|
||||
if (child->getNestableType() == NestableType::Entity) {
|
||||
|
@ -1370,12 +1375,15 @@ void Avatar::scaleVectorRelativeToPosition(glm::vec3 &positionToScale) const {
|
|||
}
|
||||
|
||||
void Avatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
|
||||
if (!isMyAvatar()) {
|
||||
createOrb();
|
||||
}
|
||||
AvatarData::setSkeletonModelURL(skeletonModelURL);
|
||||
if (QThread::currentThread() == thread()) {
|
||||
|
||||
if (!isMyAvatar()) {
|
||||
createOrb();
|
||||
}
|
||||
|
||||
_skeletonModel->setURL(_skeletonModelURL);
|
||||
indicateLoadingStatus(LoadingStatus::LoadModel);
|
||||
} else {
|
||||
QMetaObject::invokeMethod(_skeletonModel.get(), "setURL", Qt::QueuedConnection, Q_ARG(QUrl, _skeletonModelURL));
|
||||
}
|
||||
|
@ -1388,11 +1396,12 @@ void Avatar::setModelURLFinished(bool success) {
|
|||
_reconstructSoftEntitiesJointMap = true;
|
||||
|
||||
if (!success && _skeletonModelURL != AvatarData::defaultFullAvatarModelUrl()) {
|
||||
indicateLoadingStatus(LoadingStatus::LoadFailure);
|
||||
const int MAX_SKELETON_DOWNLOAD_ATTEMPTS = 4; // NOTE: we don't want to be as generous as ResourceCache is, we only want 4 attempts
|
||||
if (_skeletonModel->getResourceDownloadAttemptsRemaining() <= 0 ||
|
||||
_skeletonModel->getResourceDownloadAttempts() > MAX_SKELETON_DOWNLOAD_ATTEMPTS) {
|
||||
qCWarning(avatars_renderer) << "Using default after failing to load Avatar model: " << _skeletonModelURL
|
||||
<< "after" << _skeletonModel->getResourceDownloadAttempts() << "attempts.";
|
||||
<< "after" << _skeletonModel->getResourceDownloadAttempts() << "attempts.";
|
||||
// call _skeletonModel.setURL, but leave our copy of _skeletonModelURL alone. This is so that
|
||||
// we don't redo this every time we receive an identity packet from the avatar with the bad url.
|
||||
QMetaObject::invokeMethod(_skeletonModel.get(), "setURL",
|
||||
|
@ -1403,6 +1412,9 @@ void Avatar::setModelURLFinished(bool success) {
|
|||
<< "out of:" << MAX_SKELETON_DOWNLOAD_ATTEMPTS;
|
||||
}
|
||||
}
|
||||
if (success) {
|
||||
indicateLoadingStatus(LoadingStatus::LoadSuccess);
|
||||
}
|
||||
}
|
||||
|
||||
// rig is ready
|
||||
|
|
|
@ -73,6 +73,7 @@ public:
|
|||
|
||||
void init();
|
||||
void updateAvatarEntities();
|
||||
void removeAvatarEntitiesFromTree();
|
||||
void simulate(float deltaTime, bool inView);
|
||||
virtual void simulateAttachments(float deltaTime);
|
||||
|
||||
|
@ -107,6 +108,14 @@ public:
|
|||
virtual bool isMyAvatar() const override { return false; }
|
||||
virtual void createOrb() { }
|
||||
|
||||
enum class LoadingStatus {
|
||||
NoModel,
|
||||
LoadModel,
|
||||
LoadSuccess,
|
||||
LoadFailure
|
||||
};
|
||||
virtual void indicateLoadingStatus(LoadingStatus loadingStatus) { _loadingStatus = loadingStatus; }
|
||||
|
||||
virtual QVector<glm::quat> getJointRotations() const override;
|
||||
using AvatarData::getJointRotation;
|
||||
virtual glm::quat getJointRotation(int index) const override;
|
||||
|
@ -540,6 +549,8 @@ protected:
|
|||
static const float MYAVATAR_LOADING_PRIORITY;
|
||||
static const float OTHERAVATAR_LOADING_PRIORITY;
|
||||
static const float ATTACHMENT_LOADING_PRIORITY;
|
||||
|
||||
LoadingStatus _loadingStatus { LoadingStatus::NoModel };
|
||||
};
|
||||
|
||||
#endif // hifi_Avatar_h
|
||||
|
|
|
@ -363,13 +363,13 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
memcpy(destinationBuffer, &packetStateFlags, sizeof(packetStateFlags));
|
||||
destinationBuffer += sizeof(packetStateFlags);
|
||||
|
||||
#define AVATAR_MEMCPY(src) \
|
||||
memcpy(destinationBuffer, &(src), sizeof(src)); \
|
||||
destinationBuffer += sizeof(src);
|
||||
|
||||
if (hasAvatarGlobalPosition) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AvatarGlobalPosition*>(destinationBuffer);
|
||||
data->globalPosition[0] = _globalPosition.x;
|
||||
data->globalPosition[1] = _globalPosition.y;
|
||||
data->globalPosition[2] = _globalPosition.z;
|
||||
destinationBuffer += sizeof(AvatarDataPacket::AvatarGlobalPosition);
|
||||
AVATAR_MEMCPY(_globalPosition);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
|
||||
|
@ -380,17 +380,8 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
|
||||
if (hasAvatarBoundingBox) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AvatarBoundingBox*>(destinationBuffer);
|
||||
|
||||
data->avatarDimensions[0] = _globalBoundingBoxDimensions.x;
|
||||
data->avatarDimensions[1] = _globalBoundingBoxDimensions.y;
|
||||
data->avatarDimensions[2] = _globalBoundingBoxDimensions.z;
|
||||
|
||||
data->boundOriginOffset[0] = _globalBoundingBoxOffset.x;
|
||||
data->boundOriginOffset[1] = _globalBoundingBoxOffset.y;
|
||||
data->boundOriginOffset[2] = _globalBoundingBoxOffset.z;
|
||||
|
||||
destinationBuffer += sizeof(AvatarDataPacket::AvatarBoundingBox);
|
||||
AVATAR_MEMCPY(_globalBoundingBoxDimensions);
|
||||
AVATAR_MEMCPY(_globalBoundingBoxOffset);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
if (outboundDataRateOut) {
|
||||
|
@ -424,13 +415,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
|
||||
if (hasLookAtPosition) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::LookAtPosition*>(destinationBuffer);
|
||||
auto lookAt = _headData->getLookAtPosition();
|
||||
data->lookAtPosition[0] = lookAt.x;
|
||||
data->lookAtPosition[1] = lookAt.y;
|
||||
data->lookAtPosition[2] = lookAt.z;
|
||||
destinationBuffer += sizeof(AvatarDataPacket::LookAtPosition);
|
||||
|
||||
AVATAR_MEMCPY(_headData->getLookAtPosition());
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
if (outboundDataRateOut) {
|
||||
outboundDataRateOut->lookAtPositionRate.increment(numBytes);
|
||||
|
@ -531,12 +516,8 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
|
||||
if (hasAvatarLocalPosition) {
|
||||
auto startSection = destinationBuffer;
|
||||
auto data = reinterpret_cast<AvatarDataPacket::AvatarLocalPosition*>(destinationBuffer);
|
||||
auto localPosition = getLocalPosition();
|
||||
data->localPosition[0] = localPosition.x;
|
||||
data->localPosition[1] = localPosition.y;
|
||||
data->localPosition[2] = localPosition.z;
|
||||
destinationBuffer += sizeof(AvatarDataPacket::AvatarLocalPosition);
|
||||
const auto localPosition = getLocalPosition();
|
||||
AVATAR_MEMCPY(localPosition);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
if (outboundDataRateOut) {
|
||||
|
@ -567,19 +548,24 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
}
|
||||
}
|
||||
|
||||
QVector<JointData> jointData;
|
||||
if (hasJointData || hasJointDefaultPoseFlags) {
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
jointData = _jointData;
|
||||
}
|
||||
|
||||
// If it is connected, pack up the data
|
||||
if (hasJointData) {
|
||||
auto startSection = destinationBuffer;
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
|
||||
// joint rotation data
|
||||
int numJoints = _jointData.size();
|
||||
int numJoints = jointData.size();
|
||||
*destinationBuffer++ = (uint8_t)numJoints;
|
||||
|
||||
unsigned char* validityPosition = destinationBuffer;
|
||||
unsigned char validity = 0;
|
||||
int validityBit = 0;
|
||||
int numValidityBytes = (int)std::ceil(numJoints / (float)BITS_IN_BYTE);
|
||||
int numValidityBytes = calcBitVectorSize(numJoints);
|
||||
|
||||
#ifdef WANT_DEBUG
|
||||
int rotationSentCount = 0;
|
||||
|
@ -589,43 +575,37 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
destinationBuffer += numValidityBytes; // Move pointer past the validity bytes
|
||||
|
||||
// sentJointDataOut and lastSentJointData might be the same vector
|
||||
// build sentJointDataOut locally and then swap it at the end.
|
||||
QVector<JointData> localSentJointDataOut;
|
||||
if (sentJointDataOut) {
|
||||
localSentJointDataOut.resize(numJoints); // Make sure the destination is resized before using it
|
||||
sentJointDataOut->resize(numJoints); // Make sure the destination is resized before using it
|
||||
}
|
||||
|
||||
float minRotationDOT = !distanceAdjust ? AVATAR_MIN_ROTATION_DOT : getDistanceBasedMinRotationDOT(viewerPosition);
|
||||
float minRotationDOT = (distanceAdjust && cullSmallChanges) ? getDistanceBasedMinRotationDOT(viewerPosition) : AVATAR_MIN_ROTATION_DOT;
|
||||
|
||||
for (int i = 0; i < _jointData.size(); i++) {
|
||||
const JointData& data = _jointData[i];
|
||||
for (int i = 0; i < jointData.size(); i++) {
|
||||
const JointData& data = jointData[i];
|
||||
const JointData& last = lastSentJointData[i];
|
||||
|
||||
if (!data.rotationIsDefaultPose) {
|
||||
bool mustSend = sendAll || last.rotationIsDefaultPose;
|
||||
if (mustSend || last.rotation != data.rotation) {
|
||||
|
||||
bool largeEnoughRotation = true;
|
||||
if (cullSmallChanges) {
|
||||
// The dot product for smaller rotations is a smaller number.
|
||||
// So if the dot() is less than the value, then the rotation is a larger angle of rotation
|
||||
largeEnoughRotation = fabsf(glm::dot(last.rotation, data.rotation)) < minRotationDOT;
|
||||
}
|
||||
|
||||
if (mustSend || !cullSmallChanges || largeEnoughRotation) {
|
||||
validity |= (1 << validityBit);
|
||||
// The dot product for larger rotations is a lower number.
|
||||
// So if the dot() is less than the value, then the rotation is a larger angle of rotation
|
||||
if (sendAll || last.rotationIsDefaultPose || (!cullSmallChanges && last.rotation != data.rotation)
|
||||
|| (cullSmallChanges && glm::dot(last.rotation, data.rotation) < minRotationDOT) ) {
|
||||
validity |= (1 << validityBit);
|
||||
#ifdef WANT_DEBUG
|
||||
rotationSentCount++;
|
||||
rotationSentCount++;
|
||||
#endif
|
||||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, data.rotation);
|
||||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, data.rotation);
|
||||
|
||||
if (sentJointDataOut) {
|
||||
localSentJointDataOut[i].rotation = data.rotation;
|
||||
localSentJointDataOut[i].rotationIsDefaultPose = false;
|
||||
}
|
||||
if (sentJointDataOut) {
|
||||
(*sentJointDataOut)[i].rotation = data.rotation;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (sentJointDataOut) {
|
||||
(*sentJointDataOut)[i].rotationIsDefaultPose = data.rotationIsDefaultPose;
|
||||
}
|
||||
|
||||
if (++validityBit == BITS_IN_BYTE) {
|
||||
*validityPosition++ = validity;
|
||||
validityBit = validity = 0;
|
||||
|
@ -647,35 +627,38 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
|
||||
destinationBuffer += numValidityBytes; // Move pointer past the validity bytes
|
||||
|
||||
float minTranslation = !distanceAdjust ? AVATAR_MIN_TRANSLATION : getDistanceBasedMinTranslationDistance(viewerPosition);
|
||||
float minTranslation = (distanceAdjust && cullSmallChanges) ? getDistanceBasedMinTranslationDistance(viewerPosition) : AVATAR_MIN_TRANSLATION;
|
||||
|
||||
float maxTranslationDimension = 0.0;
|
||||
for (int i = 0; i < _jointData.size(); i++) {
|
||||
const JointData& data = _jointData[i];
|
||||
for (int i = 0; i < jointData.size(); i++) {
|
||||
const JointData& data = jointData[i];
|
||||
const JointData& last = lastSentJointData[i];
|
||||
|
||||
if (!data.translationIsDefaultPose) {
|
||||
bool mustSend = sendAll || last.translationIsDefaultPose;
|
||||
if (mustSend || last.translation != data.translation) {
|
||||
if (mustSend || !cullSmallChanges || glm::distance(data.translation, lastSentJointData[i].translation) > minTranslation) {
|
||||
validity |= (1 << validityBit);
|
||||
if (sendAll || last.translationIsDefaultPose || (!cullSmallChanges && last.translation != data.translation)
|
||||
|| (cullSmallChanges && glm::distance(data.translation, lastSentJointData[i].translation) > minTranslation)) {
|
||||
|
||||
validity |= (1 << validityBit);
|
||||
#ifdef WANT_DEBUG
|
||||
translationSentCount++;
|
||||
translationSentCount++;
|
||||
#endif
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.x), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.y), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.z), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.x), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.y), maxTranslationDimension);
|
||||
maxTranslationDimension = glm::max(fabsf(data.translation.z), maxTranslationDimension);
|
||||
|
||||
destinationBuffer +=
|
||||
packFloatVec3ToSignedTwoByteFixed(destinationBuffer, data.translation, TRANSLATION_COMPRESSION_RADIX);
|
||||
destinationBuffer +=
|
||||
packFloatVec3ToSignedTwoByteFixed(destinationBuffer, data.translation, TRANSLATION_COMPRESSION_RADIX);
|
||||
|
||||
if (sentJointDataOut) {
|
||||
localSentJointDataOut[i].translation = data.translation;
|
||||
localSentJointDataOut[i].translationIsDefaultPose = false;
|
||||
}
|
||||
if (sentJointDataOut) {
|
||||
(*sentJointDataOut)[i].translation = data.translation;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (sentJointDataOut) {
|
||||
(*sentJointDataOut)[i].translationIsDefaultPose = data.translationIsDefaultPose;
|
||||
}
|
||||
|
||||
if (++validityBit == BITS_IN_BYTE) {
|
||||
*validityPosition++ = validity;
|
||||
validityBit = validity = 0;
|
||||
|
@ -691,6 +674,7 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, controllerLeftHandTransform.getRotation());
|
||||
destinationBuffer += packFloatVec3ToSignedTwoByteFixed(destinationBuffer, controllerLeftHandTransform.getTranslation(),
|
||||
TRANSLATION_COMPRESSION_RADIX);
|
||||
|
||||
Transform controllerRightHandTransform = Transform(getControllerRightHandMatrix());
|
||||
destinationBuffer += packOrientationQuatToSixBytes(destinationBuffer, controllerRightHandTransform.getRotation());
|
||||
destinationBuffer += packFloatVec3ToSignedTwoByteFixed(destinationBuffer, controllerRightHandTransform.getTranslation(),
|
||||
|
@ -707,34 +691,27 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
glm::vec3 mouseFarGrabPosition = extractTranslation(mouseFarGrabMatrix);
|
||||
glm::quat mouseFarGrabRotation = extractRotation(mouseFarGrabMatrix);
|
||||
|
||||
data->leftFarGrabPosition[0] = leftFarGrabPosition.x;
|
||||
data->leftFarGrabPosition[1] = leftFarGrabPosition.y;
|
||||
data->leftFarGrabPosition[2] = leftFarGrabPosition.z;
|
||||
|
||||
AVATAR_MEMCPY(leftFarGrabPosition);
|
||||
// Can't do block copy as struct order is x, y, z, w.
|
||||
data->leftFarGrabRotation[0] = leftFarGrabRotation.w;
|
||||
data->leftFarGrabRotation[1] = leftFarGrabRotation.x;
|
||||
data->leftFarGrabRotation[2] = leftFarGrabRotation.y;
|
||||
data->leftFarGrabRotation[3] = leftFarGrabRotation.z;
|
||||
destinationBuffer += sizeof(data->leftFarGrabPosition);
|
||||
|
||||
data->rightFarGrabPosition[0] = rightFarGrabPosition.x;
|
||||
data->rightFarGrabPosition[1] = rightFarGrabPosition.y;
|
||||
data->rightFarGrabPosition[2] = rightFarGrabPosition.z;
|
||||
|
||||
AVATAR_MEMCPY(rightFarGrabPosition);
|
||||
data->rightFarGrabRotation[0] = rightFarGrabRotation.w;
|
||||
data->rightFarGrabRotation[1] = rightFarGrabRotation.x;
|
||||
data->rightFarGrabRotation[2] = rightFarGrabRotation.y;
|
||||
data->rightFarGrabRotation[3] = rightFarGrabRotation.z;
|
||||
destinationBuffer += sizeof(data->rightFarGrabRotation);
|
||||
|
||||
data->mouseFarGrabPosition[0] = mouseFarGrabPosition.x;
|
||||
data->mouseFarGrabPosition[1] = mouseFarGrabPosition.y;
|
||||
data->mouseFarGrabPosition[2] = mouseFarGrabPosition.z;
|
||||
|
||||
AVATAR_MEMCPY(mouseFarGrabPosition);
|
||||
data->mouseFarGrabRotation[0] = mouseFarGrabRotation.w;
|
||||
data->mouseFarGrabRotation[1] = mouseFarGrabRotation.x;
|
||||
data->mouseFarGrabRotation[2] = mouseFarGrabRotation.y;
|
||||
data->mouseFarGrabRotation[3] = mouseFarGrabRotation.z;
|
||||
|
||||
destinationBuffer += sizeof(AvatarDataPacket::FarGrabJoints);
|
||||
destinationBuffer += sizeof(data->mouseFarGrabRotation);
|
||||
|
||||
int numBytes = destinationBuffer - startSection;
|
||||
|
||||
|
@ -761,41 +738,23 @@ QByteArray AvatarData::toByteArray(AvatarDataDetail dataDetail, quint64 lastSent
|
|||
outboundDataRateOut->jointDataRate.increment(numBytes);
|
||||
}
|
||||
|
||||
if (sentJointDataOut) {
|
||||
|
||||
// Mark default poses in lastSentJointData, so when they become non-default we send them.
|
||||
for (int i = 0; i < _jointData.size(); i++) {
|
||||
const JointData& data = _jointData[i];
|
||||
JointData& local = localSentJointDataOut[i];
|
||||
if (data.rotationIsDefaultPose) {
|
||||
local.rotationIsDefaultPose = true;
|
||||
}
|
||||
if (data.translationIsDefaultPose) {
|
||||
local.translationIsDefaultPose = true;
|
||||
}
|
||||
}
|
||||
|
||||
// Push new sent joint data to sentJointDataOut
|
||||
sentJointDataOut->swap(localSentJointDataOut);
|
||||
}
|
||||
}
|
||||
|
||||
if (hasJointDefaultPoseFlags) {
|
||||
auto startSection = destinationBuffer;
|
||||
QReadLocker readLock(&_jointDataLock);
|
||||
|
||||
// write numJoints
|
||||
int numJoints = _jointData.size();
|
||||
int numJoints = jointData.size();
|
||||
*destinationBuffer++ = (uint8_t)numJoints;
|
||||
|
||||
// write rotationIsDefaultPose bits
|
||||
destinationBuffer += writeBitVector(destinationBuffer, numJoints, [&](int i) {
|
||||
return _jointData[i].rotationIsDefaultPose;
|
||||
return jointData[i].rotationIsDefaultPose;
|
||||
});
|
||||
|
||||
// write translationIsDefaultPose bits
|
||||
destinationBuffer += writeBitVector(destinationBuffer, numJoints, [&](int i) {
|
||||
return _jointData[i].translationIsDefaultPose;
|
||||
return jointData[i].translationIsDefaultPose;
|
||||
});
|
||||
|
||||
if (outboundDataRateOut) {
|
||||
|
@ -880,7 +839,6 @@ const unsigned char* unpackFauxJoint(const unsigned char* sourceBuffer, ThreadSa
|
|||
|
||||
// read data in packet starting at byte offset and return number of bytes parsed
|
||||
int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
||||
|
||||
// lazily allocate memory for HeadData in case we're not an Avatar instance
|
||||
lazyInitHeadData();
|
||||
|
||||
|
@ -932,7 +890,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
auto newValue = glm::vec3(data->globalPosition[0], data->globalPosition[1], data->globalPosition[2]) + offset;
|
||||
if (_globalPosition != newValue) {
|
||||
_globalPosition = newValue;
|
||||
_globalPositionChanged = usecTimestampNow();
|
||||
_globalPositionChanged = now;
|
||||
}
|
||||
sourceBuffer += sizeof(AvatarDataPacket::AvatarGlobalPosition);
|
||||
int numBytesRead = sourceBuffer - startSection;
|
||||
|
@ -956,11 +914,11 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
|
||||
if (_globalBoundingBoxDimensions != newDimensions) {
|
||||
_globalBoundingBoxDimensions = newDimensions;
|
||||
_avatarBoundingBoxChanged = usecTimestampNow();
|
||||
_avatarBoundingBoxChanged = now;
|
||||
}
|
||||
if (_globalBoundingBoxOffset != newOffset) {
|
||||
_globalBoundingBoxOffset = newOffset;
|
||||
_avatarBoundingBoxChanged = usecTimestampNow();
|
||||
_avatarBoundingBoxChanged = now;
|
||||
}
|
||||
|
||||
sourceBuffer += sizeof(AvatarDataPacket::AvatarBoundingBox);
|
||||
|
@ -1061,7 +1019,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
glm::mat4 sensorToWorldMatrix = createMatFromScaleQuatAndPos(glm::vec3(sensorToWorldScale), sensorToWorldQuat, sensorToWorldTrans);
|
||||
if (_sensorToWorldMatrixCache.get() != sensorToWorldMatrix) {
|
||||
_sensorToWorldMatrixCache.set(sensorToWorldMatrix);
|
||||
_sensorToWorldMatrixChanged = usecTimestampNow();
|
||||
_sensorToWorldMatrixChanged = now;
|
||||
}
|
||||
sourceBuffer += sizeof(AvatarDataPacket::SensorToWorldMatrix);
|
||||
int numBytesRead = sourceBuffer - startSection;
|
||||
|
@ -1118,7 +1076,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
sourceBuffer += sizeof(AvatarDataPacket::AdditionalFlags);
|
||||
|
||||
if (somethingChanged) {
|
||||
_additionalFlagsChanged = usecTimestampNow();
|
||||
_additionalFlagsChanged = now;
|
||||
}
|
||||
int numBytesRead = sourceBuffer - startSection;
|
||||
_additionalFlagsRate.increment(numBytesRead);
|
||||
|
@ -1138,7 +1096,7 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
if ((getParentID() != newParentID) || (getParentJointIndex() != parentInfo->parentJointIndex)) {
|
||||
SpatiallyNestable::setParentID(newParentID);
|
||||
SpatiallyNestable::setParentJointIndex(parentInfo->parentJointIndex);
|
||||
_parentChanged = usecTimestampNow();
|
||||
_parentChanged = now;
|
||||
}
|
||||
|
||||
int numBytesRead = sourceBuffer - startSection;
|
||||
|
@ -1187,8 +1145,6 @@ int AvatarData::parseDataFromBuffer(const QByteArray& buffer) {
|
|||
int numBytesRead = sourceBuffer - startSection;
|
||||
_faceTrackerRate.increment(numBytesRead);
|
||||
_faceTrackerUpdateRate.increment();
|
||||
} else {
|
||||
_headData->_blendshapeCoefficients.fill(0, _headData->_blendshapeCoefficients.size());
|
||||
}
|
||||
|
||||
if (hasJointData) {
|
||||
|
@ -1861,9 +1817,7 @@ qint64 AvatarData::packTrait(AvatarTraits::TraitType traitType, ExtendedIODevice
|
|||
}
|
||||
|
||||
qint64 AvatarData::packTraitInstance(AvatarTraits::TraitType traitType, AvatarTraits::TraitInstanceID traitInstanceID,
|
||||
ExtendedIODevice& destination, AvatarTraits::TraitVersion traitVersion,
|
||||
AvatarTraits::TraitInstanceID wireInstanceID) {
|
||||
|
||||
ExtendedIODevice& destination, AvatarTraits::TraitVersion traitVersion) {
|
||||
qint64 bytesWritten = 0;
|
||||
|
||||
bytesWritten += destination.writePrimitive(traitType);
|
||||
|
@ -1872,11 +1826,7 @@ qint64 AvatarData::packTraitInstance(AvatarTraits::TraitType traitType, AvatarTr
|
|||
bytesWritten += destination.writePrimitive(traitVersion);
|
||||
}
|
||||
|
||||
if (!wireInstanceID.isNull()) {
|
||||
bytesWritten += destination.write(wireInstanceID.toRfc4122());
|
||||
} else {
|
||||
bytesWritten += destination.write(traitInstanceID.toRfc4122());
|
||||
}
|
||||
bytesWritten += destination.write(traitInstanceID.toRfc4122());
|
||||
|
||||
if (traitType == AvatarTraits::AvatarEntity) {
|
||||
// grab a read lock on the avatar entities and check for entity data for the given ID
|
||||
|
@ -2879,10 +2829,8 @@ void RayToAvatarIntersectionResultFromScriptValue(const QScriptValue& object, Ra
|
|||
value.extraInfo = object.property("extraInfo").toVariant().toMap();
|
||||
}
|
||||
|
||||
const float AvatarData::OUT_OF_VIEW_PENALTY = -10.0f;
|
||||
|
||||
float AvatarData::_avatarSortCoefficientSize { 1.0f };
|
||||
float AvatarData::_avatarSortCoefficientCenter { 0.25 };
|
||||
float AvatarData::_avatarSortCoefficientSize { 8.0f };
|
||||
float AvatarData::_avatarSortCoefficientCenter { 4.0f };
|
||||
float AvatarData::_avatarSortCoefficientAge { 1.0f };
|
||||
|
||||
QScriptValue AvatarEntityMapToScriptValue(QScriptEngine* engine, const AvatarEntityMap& value) {
|
||||
|
|
|
@ -962,8 +962,7 @@ public:
|
|||
qint64 packTrait(AvatarTraits::TraitType traitType, ExtendedIODevice& destination,
|
||||
AvatarTraits::TraitVersion traitVersion = AvatarTraits::NULL_TRAIT_VERSION);
|
||||
qint64 packTraitInstance(AvatarTraits::TraitType traitType, AvatarTraits::TraitInstanceID instanceID,
|
||||
ExtendedIODevice& destination, AvatarTraits::TraitVersion traitVersion = AvatarTraits::NULL_TRAIT_VERSION,
|
||||
AvatarTraits::TraitInstanceID wireInstanceID = AvatarTraits::TraitInstanceID());
|
||||
ExtendedIODevice& destination, AvatarTraits::TraitVersion traitVersion = AvatarTraits::NULL_TRAIT_VERSION);
|
||||
|
||||
void prepareResetTraitInstances();
|
||||
|
||||
|
@ -1098,7 +1097,7 @@ public:
|
|||
void fromJson(const QJsonObject& json, bool useFrameSkeleton = true);
|
||||
|
||||
glm::vec3 getClientGlobalPosition() const { return _globalPosition; }
|
||||
glm::vec3 getGlobalBoundingBoxCorner() const { return _globalPosition + _globalBoundingBoxOffset - _globalBoundingBoxDimensions; }
|
||||
AABox getGlobalBoundingBox() const { return AABox(_globalPosition + _globalBoundingBoxOffset - _globalBoundingBoxDimensions, _globalBoundingBoxDimensions); }
|
||||
|
||||
/**jsdoc
|
||||
* @function MyAvatar.getAvatarEntityData
|
||||
|
@ -1170,8 +1169,6 @@ public:
|
|||
// A method intended to be overriden by MyAvatar for polling orientation for network transmission.
|
||||
virtual glm::quat getOrientationOutbound() const;
|
||||
|
||||
static const float OUT_OF_VIEW_PENALTY;
|
||||
|
||||
// TODO: remove this HACK once we settle on optimal sort coefficients
|
||||
// These coefficients exposed for fine tuning the sort priority for transfering new _jointData to the render pipeline.
|
||||
static float _avatarSortCoefficientSize;
|
||||
|
@ -1193,9 +1190,6 @@ public:
|
|||
void setReplicaIndex(int replicaIndex) { _replicaIndex = replicaIndex; }
|
||||
int getReplicaIndex() { return _replicaIndex; }
|
||||
|
||||
const AvatarTraits::TraitInstanceID getTraitInstanceXORID() const { return _traitInstanceXORID; }
|
||||
void cycleTraitInstanceXORID() { _traitInstanceXORID = QUuid::createUuid(); }
|
||||
|
||||
signals:
|
||||
|
||||
/**jsdoc
|
||||
|
@ -1502,8 +1496,6 @@ private:
|
|||
// privatize the copy constructor and assignment operator so they cannot be called
|
||||
AvatarData(const AvatarData&);
|
||||
AvatarData& operator= (const AvatarData&);
|
||||
|
||||
AvatarTraits::TraitInstanceID _traitInstanceXORID { QUuid::createUuid() };
|
||||
};
|
||||
Q_DECLARE_METATYPE(AvatarData*)
|
||||
|
||||
|
|
|
@ -86,8 +86,7 @@ void AvatarReplicas::processDeletedTraitInstance(const QUuid& parentID, AvatarTr
|
|||
if (_replicasMap.find(parentID) != _replicasMap.end()) {
|
||||
auto &replicas = _replicasMap[parentID];
|
||||
for (auto avatar : replicas) {
|
||||
avatar->processDeletedTraitInstance(traitType,
|
||||
AvatarTraits::xoredInstanceID(instanceID, avatar->getTraitInstanceXORID()));
|
||||
avatar->processDeletedTraitInstance(traitType, instanceID);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -96,9 +95,7 @@ void AvatarReplicas::processTraitInstance(const QUuid& parentID, AvatarTraits::T
|
|||
if (_replicasMap.find(parentID) != _replicasMap.end()) {
|
||||
auto &replicas = _replicasMap[parentID];
|
||||
for (auto avatar : replicas) {
|
||||
avatar->processTraitInstance(traitType,
|
||||
AvatarTraits::xoredInstanceID(instanceID, avatar->getTraitInstanceXORID()),
|
||||
traitBinaryData);
|
||||
avatar->processTraitInstance(traitType, instanceID, traitBinaryData);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -113,6 +110,12 @@ AvatarHashMap::AvatarHashMap() {
|
|||
packetReceiver.registerListener(PacketType::BulkAvatarTraits, this, "processBulkAvatarTraits");
|
||||
|
||||
connect(nodeList.data(), &NodeList::uuidChanged, this, &AvatarHashMap::sessionUUIDChanged);
|
||||
|
||||
connect(nodeList.data(), &NodeList::nodeKilled, this, [this](SharedNodePointer killedNode){
|
||||
if (killedNode->getType() == NodeType::AvatarMixer) {
|
||||
clearOtherAvatars();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
QVector<QUuid> AvatarHashMap::getAvatarIdentifiers() {
|
||||
|
@ -340,28 +343,16 @@ void AvatarHashMap::processBulkAvatarTraits(QSharedPointer<ReceivedMessage> mess
|
|||
AvatarTraits::TraitInstanceID traitInstanceID =
|
||||
QUuid::fromRfc4122(message->readWithoutCopy(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
// XOR the incoming trait instance ID with this avatar object's personal XOR ID
|
||||
|
||||
// this ensures that we have separate entity instances in the local tree
|
||||
// if we briefly end up with two Avatar objects for this node
|
||||
|
||||
// (which can occur if the shared pointer for the
|
||||
// previous instance of an avatar hasn't yet gone out of scope before the
|
||||
// new instance is created)
|
||||
|
||||
auto xoredInstanceID = AvatarTraits::xoredInstanceID(traitInstanceID, avatar->getTraitInstanceXORID());
|
||||
|
||||
message->readPrimitive(&traitBinarySize);
|
||||
|
||||
auto& processedInstanceVersion = lastProcessedVersions.getInstanceValueRef(traitType, traitInstanceID);
|
||||
if (packetTraitVersion > processedInstanceVersion) {
|
||||
// in order to handle re-connections to the avatar mixer when the other
|
||||
if (traitBinarySize == AvatarTraits::DELETED_TRAIT_SIZE) {
|
||||
avatar->processDeletedTraitInstance(traitType, xoredInstanceID);
|
||||
avatar->processDeletedTraitInstance(traitType, traitInstanceID);
|
||||
_replicas.processDeletedTraitInstance(avatarID, traitType, traitInstanceID);
|
||||
} else {
|
||||
auto traitData = message->read(traitBinarySize);
|
||||
avatar->processTraitInstance(traitType, xoredInstanceID, traitData);
|
||||
avatar->processTraitInstance(traitType, traitInstanceID, traitData);
|
||||
_replicas.processTraitInstance(avatarID, traitType, traitInstanceID, traitData);
|
||||
}
|
||||
processedInstanceVersion = packetTraitVersion;
|
||||
|
@ -429,3 +420,12 @@ void AvatarHashMap::sessionUUIDChanged(const QUuid& sessionUUID, const QUuid& ol
|
|||
emit avatarSessionChangedEvent(sessionUUID, oldUUID);
|
||||
}
|
||||
|
||||
void AvatarHashMap::clearOtherAvatars() {
|
||||
QWriteLocker locker(&_hashLock);
|
||||
|
||||
for (auto& av : _avatarHash) {
|
||||
handleRemovedAvatar(av);
|
||||
}
|
||||
|
||||
_avatarHash.clear();
|
||||
}
|
||||
|
|
|
@ -101,6 +101,8 @@ public:
|
|||
void setReplicaCount(int count);
|
||||
int getReplicaCount() { return _replicas.getReplicaCount(); };
|
||||
|
||||
virtual void clearOtherAvatars();
|
||||
|
||||
signals:
|
||||
|
||||
/**jsdoc
|
||||
|
|
|
@ -41,8 +41,7 @@ namespace AvatarTraits {
|
|||
const TraitWireSize DELETED_TRAIT_SIZE = -1;
|
||||
|
||||
inline qint64 packInstancedTraitDelete(TraitType traitType, TraitInstanceID instanceID, ExtendedIODevice& destination,
|
||||
TraitVersion traitVersion = NULL_TRAIT_VERSION,
|
||||
TraitInstanceID xoredInstanceID = TraitInstanceID()) {
|
||||
TraitVersion traitVersion = NULL_TRAIT_VERSION) {
|
||||
qint64 bytesWritten = 0;
|
||||
|
||||
bytesWritten += destination.writePrimitive(traitType);
|
||||
|
@ -51,28 +50,12 @@ namespace AvatarTraits {
|
|||
bytesWritten += destination.writePrimitive(traitVersion);
|
||||
}
|
||||
|
||||
if (xoredInstanceID.isNull()) {
|
||||
bytesWritten += destination.write(instanceID.toRfc4122());
|
||||
} else {
|
||||
bytesWritten += destination.write(xoredInstanceID.toRfc4122());
|
||||
}
|
||||
bytesWritten += destination.write(instanceID.toRfc4122());
|
||||
|
||||
bytesWritten += destination.writePrimitive(DELETED_TRAIT_SIZE);
|
||||
|
||||
return bytesWritten;
|
||||
}
|
||||
|
||||
inline TraitInstanceID xoredInstanceID(TraitInstanceID localInstanceID, TraitInstanceID xorKeyID) {
|
||||
QByteArray xoredInstanceID { NUM_BYTES_RFC4122_UUID, 0 };
|
||||
auto xorKeyIDBytes = xorKeyID.toRfc4122();
|
||||
auto localInstanceIDBytes = localInstanceID.toRfc4122();
|
||||
|
||||
for (auto i = 0; i < localInstanceIDBytes.size(); ++i) {
|
||||
xoredInstanceID[i] = localInstanceIDBytes[i] ^ xorKeyIDBytes[i];
|
||||
}
|
||||
|
||||
return QUuid::fromRfc4122(xoredInstanceID);
|
||||
}
|
||||
};
|
||||
|
||||
#endif // hifi_AvatarTraits_h
|
||||
|
|
|
@ -43,9 +43,6 @@ void ClientTraitsHandler::resetForNewMixer() {
|
|||
|
||||
// pre-fill the instanced statuses that we will need to send next frame
|
||||
_owningAvatar->prepareResetTraitInstances();
|
||||
|
||||
// reset the trait XOR ID since we're resetting for a new avatar mixer
|
||||
_owningAvatar->cycleTraitInstanceXORID();
|
||||
}
|
||||
|
||||
void ClientTraitsHandler::sendChangedTraitsToMixer() {
|
||||
|
@ -96,19 +93,11 @@ void ClientTraitsHandler::sendChangedTraitsToMixer() {
|
|||
|| instanceIDValuePair.value == Updated) {
|
||||
// this is a changed trait we need to send or we haven't send out trait information yet
|
||||
// ask the owning avatar to pack it
|
||||
|
||||
// since this is going to the mixer, use the XORed instance ID (to anonymize trait instance IDs
|
||||
// that would typically persist across sessions)
|
||||
_owningAvatar->packTraitInstance(instancedIt->traitType, instanceIDValuePair.id, *traitsPacketList,
|
||||
AvatarTraits::NULL_TRAIT_VERSION,
|
||||
AvatarTraits::xoredInstanceID(instanceIDValuePair.id,
|
||||
_owningAvatar->getTraitInstanceXORID()));
|
||||
_owningAvatar->packTraitInstance(instancedIt->traitType, instanceIDValuePair.id, *traitsPacketList);
|
||||
} else if (!_shouldPerformInitialSend && instanceIDValuePair.value == Deleted) {
|
||||
// pack delete for this trait instance
|
||||
AvatarTraits::packInstancedTraitDelete(instancedIt->traitType, instanceIDValuePair.id,
|
||||
*traitsPacketList, AvatarTraits::NULL_TRAIT_VERSION,
|
||||
AvatarTraits::xoredInstanceID(instanceIDValuePair.id,
|
||||
_owningAvatar->getTraitInstanceXORID()));
|
||||
*traitsPacketList);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -30,9 +30,9 @@ public:
|
|||
|
||||
void markTraitUpdated(AvatarTraits::TraitType updatedTrait)
|
||||
{ _traitStatuses[updatedTrait] = Updated; _hasChangedTraits = true; }
|
||||
void markInstancedTraitUpdated(AvatarTraits::TraitType traitType, AvatarTraits::TraitInstanceID updatedInstanceID)
|
||||
void markInstancedTraitUpdated(AvatarTraits::TraitType traitType, QUuid updatedInstanceID)
|
||||
{ _traitStatuses.instanceInsert(traitType, updatedInstanceID, Updated); _hasChangedTraits = true; }
|
||||
void markInstancedTraitDeleted(AvatarTraits::TraitType traitType, AvatarTraits::TraitInstanceID deleteInstanceID)
|
||||
void markInstancedTraitDeleted(AvatarTraits::TraitType traitType, QUuid deleteInstanceID)
|
||||
{ _traitStatuses.instanceInsert(traitType, deleteInstanceID, Deleted); _hasChangedTraits = true; }
|
||||
|
||||
void resetForNewMixer();
|
||||
|
|
|
@ -1297,9 +1297,20 @@ void ModelEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& sce
|
|||
}
|
||||
});
|
||||
|
||||
// Check for removal
|
||||
ModelPointer model;
|
||||
withReadLock([&] { model = _model; });
|
||||
|
||||
withWriteLock([&] {
|
||||
bool visuallyReady = true;
|
||||
if (_hasModel) {
|
||||
if (model && _didLastVisualGeometryRequestSucceed) {
|
||||
visuallyReady = (_prevModelLoaded && _texturesLoaded);
|
||||
}
|
||||
}
|
||||
entity->setVisuallyReady(visuallyReady);
|
||||
});
|
||||
|
||||
// Check for removal
|
||||
if (!_hasModel) {
|
||||
if (model) {
|
||||
model->removeFromScene(scene, transaction);
|
||||
|
@ -1441,11 +1452,11 @@ void ModelEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& sce
|
|||
// That is where _currentFrame and _lastAnimated were updated.
|
||||
if (_animating) {
|
||||
DETAILED_PROFILE_RANGE(simulation_physics, "Animate");
|
||||
|
||||
|
||||
if (!jointsMapped()) {
|
||||
mapJoints(entity, model->getJointNames());
|
||||
//else the joint have been mapped before but we have a new animation to load
|
||||
} else if (_animation && (_animation->getURL().toString() != entity->getAnimationURL())) {
|
||||
} else if (_animation && (_animation->getURL().toString() != entity->getAnimationURL())) {
|
||||
_animation = DependencyManager::get<AnimationCache>()->getAnimation(entity->getAnimationURL());
|
||||
_jointMappingCompleted = false;
|
||||
mapJoints(entity, model->getJointNames());
|
||||
|
|
|
@ -288,6 +288,17 @@ void ZoneEntityRenderer::doRenderUpdateSynchronousTyped(const ScenePointer& scen
|
|||
updateHazeFromEntity(entity);
|
||||
}
|
||||
|
||||
|
||||
bool visuallyReady = true;
|
||||
uint32_t skyboxMode = entity->getSkyboxMode();
|
||||
if (skyboxMode == COMPONENT_MODE_ENABLED && !_skyboxTextureURL.isEmpty()) {
|
||||
bool skyboxLoadedOrFailed = (_skyboxTexture && (_skyboxTexture->isLoaded() || _skyboxTexture->isFailed()));
|
||||
|
||||
visuallyReady = skyboxLoadedOrFailed;
|
||||
}
|
||||
|
||||
entity->setVisuallyReady(visuallyReady);
|
||||
|
||||
if (bloomChanged) {
|
||||
updateBloomFromEntity(entity);
|
||||
}
|
||||
|
|
|
@ -305,6 +305,7 @@ public:
|
|||
void setDynamic(bool value);
|
||||
|
||||
virtual bool shouldBePhysical() const { return false; }
|
||||
bool isVisuallyReady() const { return _visuallyReady; }
|
||||
|
||||
bool getLocked() const;
|
||||
void setLocked(bool value);
|
||||
|
@ -527,6 +528,7 @@ public:
|
|||
void removeCloneID(const QUuid& cloneID);
|
||||
const QVector<QUuid> getCloneIDs() const;
|
||||
void setCloneIDs(const QVector<QUuid>& cloneIDs);
|
||||
void setVisuallyReady(bool visuallyReady) { _visuallyReady = visuallyReady; }
|
||||
|
||||
signals:
|
||||
void requestRenderUpdate();
|
||||
|
@ -639,6 +641,7 @@ protected:
|
|||
EntityTreeElementPointer _element; // set by EntityTreeElement
|
||||
void* _physicsInfo { nullptr }; // set by EntitySimulation
|
||||
bool _simulated { false }; // set by EntitySimulation
|
||||
bool _visuallyReady { true };
|
||||
|
||||
bool addActionInternal(EntitySimulationPointer simulation, EntityDynamicPointer action);
|
||||
bool removeActionInternal(const QUuid& actionID, EntitySimulationPointer simulation = nullptr);
|
||||
|
|
|
@ -40,6 +40,7 @@ ModelEntityItem::ModelEntityItem(const EntityItemID& entityItemID) : EntityItem(
|
|||
_type = EntityTypes::Model;
|
||||
_lastKnownCurrentFrame = -1;
|
||||
_color[0] = _color[1] = _color[2] = 0;
|
||||
_visuallyReady = false;
|
||||
}
|
||||
|
||||
const QString ModelEntityItem::getTextures() const {
|
||||
|
|
|
@ -42,6 +42,7 @@ ZoneEntityItem::ZoneEntityItem(const EntityItemID& entityItemID) : EntityItem(en
|
|||
|
||||
_shapeType = DEFAULT_SHAPE_TYPE;
|
||||
_compoundShapeURL = DEFAULT_COMPOUND_SHAPE_URL;
|
||||
_visuallyReady = false;
|
||||
}
|
||||
|
||||
EntityItemProperties ZoneEntityItem::getProperties(EntityPropertyFlags desiredProperties) const {
|
||||
|
|
|
@ -240,7 +240,6 @@ public:
|
|||
QVector<glm::vec3> vertices;
|
||||
QVector<glm::vec3> normals;
|
||||
QVector<glm::vec3> tangents;
|
||||
mutable QVector<NormalType> normalsAndTangents; // Populated later if needed for blendshapes
|
||||
QVector<glm::vec3> colors;
|
||||
QVector<glm::vec2> texCoords;
|
||||
QVector<glm::vec2> texCoords1;
|
||||
|
|
|
@ -201,9 +201,10 @@ void GLBackend::renderPassTransfer(const Batch& batch) {
|
|||
{
|
||||
Vec2u outputSize{ 1,1 };
|
||||
|
||||
if (_output._framebuffer) {
|
||||
outputSize.x = _output._framebuffer->getWidth();
|
||||
outputSize.y = _output._framebuffer->getHeight();
|
||||
auto framebuffer = acquire(_output._framebuffer);
|
||||
if (framebuffer) {
|
||||
outputSize.x = framebuffer->getWidth();
|
||||
outputSize.y = framebuffer->getHeight();
|
||||
} else if (glm::dot(_transform._projectionJitter, _transform._projectionJitter)>0.0f) {
|
||||
qCWarning(gpugllogging) << "Jittering needs to have a frame buffer to be set";
|
||||
}
|
||||
|
@ -220,6 +221,7 @@ void GLBackend::renderPassTransfer(const Batch& batch) {
|
|||
_stereo._contextDisable = false;
|
||||
break;
|
||||
|
||||
case Batch::COMMAND_setFramebuffer:
|
||||
case Batch::COMMAND_setViewportTransform:
|
||||
case Batch::COMMAND_setViewTransform:
|
||||
case Batch::COMMAND_setProjectionTransform:
|
||||
|
|
|
@ -30,7 +30,6 @@
|
|||
|
||||
#include "GLShared.h"
|
||||
|
||||
|
||||
// Different versions for the stereo drawcall
|
||||
// Current preferred is "instanced" which draw the shape twice but instanced and rely on clipping plane to draw left/right side only
|
||||
#if defined(USE_GLES)
|
||||
|
@ -40,7 +39,6 @@
|
|||
#define GPU_STEREO_TECHNIQUE_INSTANCED
|
||||
#endif
|
||||
|
||||
|
||||
// Let these be configured by the one define picked above
|
||||
#ifdef GPU_STEREO_TECHNIQUE_DOUBLED_SIMPLE
|
||||
#define GPU_STEREO_DRAWCALL_DOUBLED
|
||||
|
@ -56,8 +54,153 @@
|
|||
#define GPU_STEREO_CAMERA_BUFFER
|
||||
#endif
|
||||
|
||||
//
|
||||
// GL Backend pointer storage mechanism
|
||||
// One of the following three defines must be defined.
|
||||
// GPU_POINTER_STORAGE_SHARED
|
||||
|
||||
// The platonic ideal, use references to smart pointers.
|
||||
// However, this produces artifacts because there are too many places in the code right now that
|
||||
// create temporary values (undesirable smart pointer duplications) and then those temp variables
|
||||
// get passed on and have their reference taken, and then invalidated
|
||||
// GPU_POINTER_STORAGE_REF
|
||||
|
||||
// Raw pointer manipulation. Seems more dangerous than the reference wrappers,
|
||||
// but in practice, the danger of grabbing a reference to a temporary variable
|
||||
// is causing issues
|
||||
// GPU_POINTER_STORAGE_RAW
|
||||
|
||||
#if defined(USE_GLES)
|
||||
#define GPU_POINTER_STORAGE_SHARED
|
||||
#else
|
||||
#define GPU_POINTER_STORAGE_RAW
|
||||
#endif
|
||||
|
||||
namespace gpu { namespace gl {
|
||||
|
||||
#if defined(GPU_POINTER_STORAGE_SHARED)
|
||||
template <typename T>
|
||||
static inline bool compare(const std::shared_ptr<T>& a, const std::shared_ptr<T>& b) {
|
||||
return a == b;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static inline T* acquire(const std::shared_ptr<T>& pointer) {
|
||||
return pointer.get();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static inline void reset(std::shared_ptr<T>& pointer) {
|
||||
return pointer.reset();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static inline bool valid(const std::shared_ptr<T>& pointer) {
|
||||
return pointer.operator bool();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static inline void assign(std::shared_ptr<T>& pointer, const std::shared_ptr<T>& source) {
|
||||
pointer = source;
|
||||
}
|
||||
|
||||
using BufferReference = BufferPointer;
|
||||
using TextureReference = TexturePointer;
|
||||
using FramebufferReference = FramebufferPointer;
|
||||
using FormatReference = Stream::FormatPointer;
|
||||
using PipelineReference = PipelinePointer;
|
||||
|
||||
#define GPU_REFERENCE_INIT_VALUE nullptr
|
||||
|
||||
#elif defined(GPU_POINTER_STORAGE_REF)
|
||||
|
||||
template <typename T>
|
||||
class PointerReferenceWrapper : public std::reference_wrapper<const std::shared_ptr<T>> {
|
||||
using Parent = std::reference_wrapper<const std::shared_ptr<T>>;
|
||||
|
||||
public:
|
||||
using Pointer = std::shared_ptr<T>;
|
||||
PointerReferenceWrapper() : Parent(EMPTY()) {}
|
||||
PointerReferenceWrapper(const Pointer& pointer) : Parent(pointer) {}
|
||||
void clear() { *this = EMPTY(); }
|
||||
|
||||
private:
|
||||
static const Pointer& EMPTY() {
|
||||
static const Pointer EMPTY_VALUE;
|
||||
return EMPTY_VALUE;
|
||||
};
|
||||
};
|
||||
|
||||
template <typename T>
|
||||
static bool compare(const PointerReferenceWrapper<T>& reference, const std::shared_ptr<T>& pointer) {
|
||||
return reference.get() == pointer;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static inline T* acquire(const PointerReferenceWrapper<T>& reference) {
|
||||
return reference.get().get();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static void assign(PointerReferenceWrapper<T>& reference, const std::shared_ptr<T>& pointer) {
|
||||
reference = pointer;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static bool valid(const PointerReferenceWrapper<T>& reference) {
|
||||
return reference.get().operator bool();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static inline void reset(PointerReferenceWrapper<T>& reference) {
|
||||
return reference.clear();
|
||||
}
|
||||
|
||||
using BufferReference = PointerReferenceWrapper<Buffer>;
|
||||
using TextureReference = PointerReferenceWrapper<Texture>;
|
||||
using FramebufferReference = PointerReferenceWrapper<Framebuffer>;
|
||||
using FormatReference = PointerReferenceWrapper<Stream::Format>;
|
||||
using PipelineReference = PointerReferenceWrapper<Pipeline>;
|
||||
|
||||
#define GPU_REFERENCE_INIT_VALUE
|
||||
|
||||
#elif defined(GPU_POINTER_STORAGE_RAW)
|
||||
|
||||
template <typename T>
|
||||
static bool compare(const T* const& rawPointer, const std::shared_ptr<T>& pointer) {
|
||||
return rawPointer == pointer.get();
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static inline T* acquire(T*& rawPointer) {
|
||||
return rawPointer;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static inline bool valid(const T* const& rawPointer) {
|
||||
return rawPointer;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static inline void reset(T*& rawPointer) {
|
||||
rawPointer = nullptr;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
static inline void assign(T*& rawPointer, const std::shared_ptr<T>& pointer) {
|
||||
rawPointer = pointer.get();
|
||||
}
|
||||
|
||||
using BufferReference = Buffer*;
|
||||
using TextureReference = Texture*;
|
||||
using FramebufferReference = Framebuffer*;
|
||||
using FormatReference = Stream::Format*;
|
||||
using PipelineReference = Pipeline*;
|
||||
|
||||
#define GPU_REFERENCE_INIT_VALUE nullptr
|
||||
|
||||
#endif
|
||||
|
||||
class GLBackend : public Backend, public std::enable_shared_from_this<GLBackend> {
|
||||
// Context Backend static interface required
|
||||
friend class gpu::Context;
|
||||
|
@ -67,8 +210,8 @@ class GLBackend : public Backend, public std::enable_shared_from_this<GLBackend>
|
|||
protected:
|
||||
explicit GLBackend(bool syncCache);
|
||||
GLBackend();
|
||||
public:
|
||||
|
||||
public:
|
||||
#if defined(USE_GLES)
|
||||
// https://www.khronos.org/registry/OpenGL-Refpages/es3/html/glGet.xhtml
|
||||
static const GLint MIN_REQUIRED_TEXTURE_IMAGE_UNITS = 16;
|
||||
|
@ -109,8 +252,8 @@ public:
|
|||
// This is the ugly "download the pixels to sysmem for taking a snapshot"
|
||||
// Just avoid using it, it's ugly and will break performances
|
||||
virtual void downloadFramebuffer(const FramebufferPointer& srcFramebuffer,
|
||||
const Vec4i& region, QImage& destImage) final override;
|
||||
|
||||
const Vec4i& region,
|
||||
QImage& destImage) final override;
|
||||
|
||||
// this is the maximum numeber of available input buffers
|
||||
size_t getNumInputBuffers() const { return _input._invalidBuffers.size(); }
|
||||
|
@ -131,7 +274,6 @@ public:
|
|||
static const int MAX_NUM_RESOURCE_TABLE_TEXTURES = 2;
|
||||
size_t getMaxNumResourceTextureTables() const { return MAX_NUM_RESOURCE_TABLE_TEXTURES; }
|
||||
|
||||
|
||||
// Draw Stage
|
||||
virtual void do_draw(const Batch& batch, size_t paramOffset) = 0;
|
||||
virtual void do_drawIndexed(const Batch& batch, size_t paramOffset) = 0;
|
||||
|
@ -183,7 +325,6 @@ public:
|
|||
// Reset stages
|
||||
virtual void do_resetStages(const Batch& batch, size_t paramOffset) final;
|
||||
|
||||
|
||||
virtual void do_disableContextViewCorrection(const Batch& batch, size_t paramOffset) final;
|
||||
virtual void do_restoreContextViewCorrection(const Batch& batch, size_t paramOffset) final;
|
||||
|
||||
|
@ -203,7 +344,7 @@ public:
|
|||
virtual void do_popProfileRange(const Batch& batch, size_t paramOffset) final;
|
||||
|
||||
// TODO: As long as we have gl calls explicitely issued from interface
|
||||
// code, we need to be able to record and batch these calls. THe long
|
||||
// code, we need to be able to record and batch these calls. THe long
|
||||
// term strategy is to get rid of any GL calls in favor of the HIFI GPU API
|
||||
virtual void do_glUniform1i(const Batch& batch, size_t paramOffset) final;
|
||||
virtual void do_glUniform1f(const Batch& batch, size_t paramOffset) final;
|
||||
|
@ -228,7 +369,9 @@ public:
|
|||
virtual void do_setStateAntialiasedLineEnable(bool enable) final;
|
||||
virtual void do_setStateDepthBias(Vec2 bias) final;
|
||||
virtual void do_setStateDepthTest(State::DepthTest test) final;
|
||||
virtual void do_setStateStencil(State::StencilActivation activation, State::StencilTest frontTest, State::StencilTest backTest) final;
|
||||
virtual void do_setStateStencil(State::StencilActivation activation,
|
||||
State::StencilTest frontTest,
|
||||
State::StencilTest backTest) final;
|
||||
virtual void do_setStateAlphaToCoverageEnable(bool enable) final;
|
||||
virtual void do_setStateSampleMask(uint32 mask) final;
|
||||
virtual void do_setStateBlend(State::BlendFunction blendFunction) final;
|
||||
|
@ -257,7 +400,9 @@ public:
|
|||
virtual void releaseQuery(GLuint id) const;
|
||||
virtual void queueLambda(const std::function<void()> lambda) const;
|
||||
|
||||
bool isTextureManagementSparseEnabled() const override { return (_textureManagement._sparseCapable && Texture::getEnableSparseTextures()); }
|
||||
bool isTextureManagementSparseEnabled() const override {
|
||||
return (_textureManagement._sparseCapable && Texture::getEnableSparseTextures());
|
||||
}
|
||||
|
||||
protected:
|
||||
virtual GLint getRealUniformLocation(GLint location) const;
|
||||
|
@ -266,11 +411,11 @@ protected:
|
|||
|
||||
// FIXME instead of a single flag, create a features struct similar to
|
||||
// https://www.khronos.org/registry/vulkan/specs/1.0/man/html/VkPhysicalDeviceFeatures.html
|
||||
virtual bool supportsBindless() const { return false; }
|
||||
virtual bool supportsBindless() const { return false; }
|
||||
|
||||
static const size_t INVALID_OFFSET = (size_t)-1;
|
||||
bool _inRenderTransferPass { false };
|
||||
int _currentDraw { -1 };
|
||||
bool _inRenderTransferPass{ false };
|
||||
int _currentDraw{ -1 };
|
||||
|
||||
std::list<std::string> profileRanges;
|
||||
mutable Mutex _trashMutex;
|
||||
|
@ -299,46 +444,36 @@ protected:
|
|||
virtual void updateInput() = 0;
|
||||
|
||||
struct InputStageState {
|
||||
bool _invalidFormat { true };
|
||||
bool _lastUpdateStereoState{ false };
|
||||
bool _invalidFormat{ true };
|
||||
bool _lastUpdateStereoState{ false };
|
||||
bool _hadColorAttribute{ true };
|
||||
Stream::FormatPointer _format;
|
||||
FormatReference _format{ GPU_REFERENCE_INIT_VALUE };
|
||||
std::string _formatKey;
|
||||
|
||||
typedef std::bitset<MAX_NUM_ATTRIBUTES> ActivationCache;
|
||||
ActivationCache _attributeActivation { 0 };
|
||||
ActivationCache _attributeActivation{ 0 };
|
||||
|
||||
typedef std::bitset<MAX_NUM_INPUT_BUFFERS> BuffersState;
|
||||
|
||||
BuffersState _invalidBuffers{ 0 };
|
||||
BuffersState _attribBindingBuffers{ 0 };
|
||||
|
||||
Buffers _buffers;
|
||||
Offsets _bufferOffsets;
|
||||
Offsets _bufferStrides;
|
||||
std::vector<GLuint> _bufferVBOs;
|
||||
std::array<BufferReference, MAX_NUM_INPUT_BUFFERS> _buffers{};
|
||||
std::array<Offset, MAX_NUM_INPUT_BUFFERS> _bufferOffsets{};
|
||||
std::array<Offset, MAX_NUM_INPUT_BUFFERS> _bufferStrides{};
|
||||
std::array<GLuint, MAX_NUM_INPUT_BUFFERS> _bufferVBOs{};
|
||||
|
||||
glm::vec4 _colorAttribute{ 0.0f };
|
||||
|
||||
BufferPointer _indexBuffer;
|
||||
Offset _indexBufferOffset { 0 };
|
||||
Type _indexBufferType { UINT32 };
|
||||
|
||||
BufferPointer _indirectBuffer;
|
||||
BufferReference _indexBuffer{};
|
||||
Offset _indexBufferOffset{ 0 };
|
||||
Type _indexBufferType{ UINT32 };
|
||||
|
||||
BufferReference _indirectBuffer{};
|
||||
Offset _indirectBufferOffset{ 0 };
|
||||
Offset _indirectBufferStride{ 0 };
|
||||
|
||||
GLuint _defaultVAO { 0 };
|
||||
|
||||
InputStageState() :
|
||||
_invalidFormat(true),
|
||||
_format(0),
|
||||
_formatKey(),
|
||||
_attributeActivation(0),
|
||||
_buffers(_invalidBuffers.size(), BufferPointer(0)),
|
||||
_bufferOffsets(_invalidBuffers.size(), 0),
|
||||
_bufferStrides(_invalidBuffers.size(), 0),
|
||||
_bufferVBOs(_invalidBuffers.size(), 0) {}
|
||||
GLuint _defaultVAO{ 0 };
|
||||
} _input;
|
||||
|
||||
virtual void initTransform() = 0;
|
||||
|
@ -349,7 +484,7 @@ protected:
|
|||
virtual void resetTransformStage();
|
||||
|
||||
// Allows for correction of the camera pose to account for changes
|
||||
// between the time when a was recorded and the time(s) when it is
|
||||
// between the time when a was recorded and the time(s) when it is
|
||||
// executed
|
||||
// Prev is the previous correction used at previous frame
|
||||
struct CameraCorrection {
|
||||
|
@ -364,9 +499,12 @@ protected:
|
|||
struct Cameras {
|
||||
TransformCamera _cams[2];
|
||||
|
||||
Cameras() {};
|
||||
Cameras(){};
|
||||
Cameras(const TransformCamera& cam) { memcpy(_cams, &cam, sizeof(TransformCamera)); };
|
||||
Cameras(const TransformCamera& camL, const TransformCamera& camR) { memcpy(_cams, &camL, sizeof(TransformCamera)); memcpy(_cams + 1, &camR, sizeof(TransformCamera)); };
|
||||
Cameras(const TransformCamera& camL, const TransformCamera& camR) {
|
||||
memcpy(_cams, &camL, sizeof(TransformCamera));
|
||||
memcpy(_cams + 1, &camR, sizeof(TransformCamera));
|
||||
};
|
||||
};
|
||||
|
||||
using CameraBufferElement = Cameras;
|
||||
|
@ -380,25 +518,24 @@ protected:
|
|||
|
||||
mutable std::map<std::string, GLvoid*> _drawCallInfoOffsets;
|
||||
|
||||
GLuint _objectBuffer { 0 };
|
||||
GLuint _cameraBuffer { 0 };
|
||||
GLuint _drawCallInfoBuffer { 0 };
|
||||
GLuint _objectBufferTexture { 0 };
|
||||
size_t _cameraUboSize { 0 };
|
||||
GLuint _objectBuffer{ 0 };
|
||||
GLuint _cameraBuffer{ 0 };
|
||||
GLuint _drawCallInfoBuffer{ 0 };
|
||||
GLuint _objectBufferTexture{ 0 };
|
||||
size_t _cameraUboSize{ 0 };
|
||||
bool _viewIsCamera{ false };
|
||||
bool _skybox { false };
|
||||
bool _skybox{ false };
|
||||
Transform _view;
|
||||
CameraCorrection _correction;
|
||||
bool _viewCorrectionEnabled{ true };
|
||||
|
||||
|
||||
Mat4 _projection;
|
||||
Vec4i _viewport { 0, 0, 1, 1 };
|
||||
Vec2 _depthRange { 0.0f, 1.0f };
|
||||
Vec4i _viewport{ 0, 0, 1, 1 };
|
||||
Vec2 _depthRange{ 0.0f, 1.0f };
|
||||
Vec2 _projectionJitter{ 0.0f, 0.0f };
|
||||
bool _invalidView { false };
|
||||
bool _invalidProj { false };
|
||||
bool _invalidViewport { false };
|
||||
bool _invalidView{ false };
|
||||
bool _invalidProj{ false };
|
||||
bool _invalidViewport{ false };
|
||||
|
||||
bool _enabledDrawcallInfoBuffer{ false };
|
||||
|
||||
|
@ -417,45 +554,54 @@ protected:
|
|||
|
||||
struct UniformStageState {
|
||||
struct BufferState {
|
||||
BufferPointer buffer;
|
||||
BufferReference buffer{};
|
||||
GLintptr offset{ 0 };
|
||||
GLsizeiptr size{ 0 };
|
||||
BufferState(const BufferPointer& buffer = nullptr, GLintptr offset = 0, GLsizeiptr size = 0);
|
||||
bool operator ==(BufferState& other) const {
|
||||
return offset == other.offset && size == other.size && buffer == other.buffer;
|
||||
|
||||
BufferState& operator=(const BufferState& other) = delete;
|
||||
void reset() {
|
||||
gpu::gl::reset(buffer);
|
||||
offset = 0;
|
||||
size = 0;
|
||||
}
|
||||
bool compare(const BufferPointer& buffer, GLintptr offset, GLsizeiptr size) {
|
||||
const auto& self = *this;
|
||||
return (self.offset == offset && self.size == size && gpu::gl::compare(self.buffer, buffer));
|
||||
}
|
||||
};
|
||||
|
||||
// MAX_NUM_UNIFORM_BUFFERS-1 is the max uniform index BATCHES are allowed to set, but
|
||||
// MIN_REQUIRED_UNIFORM_BUFFER_BINDINGS is used here because the backend sets some
|
||||
// internal UBOs for things like camera correction
|
||||
// MIN_REQUIRED_UNIFORM_BUFFER_BINDINGS is used here because the backend sets some
|
||||
// internal UBOs for things like camera correction
|
||||
std::array<BufferState, MIN_REQUIRED_UNIFORM_BUFFER_BINDINGS> _buffers;
|
||||
} _uniform;
|
||||
|
||||
// Helper function that provides common code
|
||||
// Helper function that provides common code
|
||||
void bindUniformBuffer(uint32_t slot, const BufferPointer& buffer, GLintptr offset = 0, GLsizeiptr size = 0);
|
||||
void releaseUniformBuffer(uint32_t slot);
|
||||
void resetUniformStage();
|
||||
|
||||
// update resource cache and do the gl bind/unbind call with the current gpu::Buffer cached at slot s
|
||||
// This is using different gl object depending on the gl version
|
||||
virtual bool bindResourceBuffer(uint32_t slot, BufferPointer& buffer) = 0;
|
||||
virtual bool bindResourceBuffer(uint32_t slot, const BufferPointer& buffer) = 0;
|
||||
virtual void releaseResourceBuffer(uint32_t slot) = 0;
|
||||
|
||||
// Helper function that provides common code used by do_setResourceTexture and
|
||||
// Helper function that provides common code used by do_setResourceTexture and
|
||||
// do_setResourceTextureTable (in non-bindless mode)
|
||||
void bindResourceTexture(uint32_t slot, const TexturePointer& texture);
|
||||
|
||||
|
||||
// update resource cache and do the gl unbind call with the current gpu::Texture cached at slot s
|
||||
void releaseResourceTexture(uint32_t slot);
|
||||
|
||||
void resetResourceStage();
|
||||
|
||||
struct ResourceStageState {
|
||||
std::array<BufferPointer, MAX_NUM_RESOURCE_BUFFERS> _buffers;
|
||||
std::array<TexturePointer, MAX_NUM_RESOURCE_TEXTURES> _textures;
|
||||
//Textures _textures { { MAX_NUM_RESOURCE_TEXTURES } };
|
||||
struct TextureState {
|
||||
TextureReference _texture{};
|
||||
GLenum _target;
|
||||
};
|
||||
std::array<BufferReference, MAX_NUM_RESOURCE_BUFFERS> _buffers{};
|
||||
std::array<TextureState, MAX_NUM_RESOURCE_TEXTURES> _textures{};
|
||||
int findEmptyTextureSlot() const;
|
||||
} _resource;
|
||||
|
||||
|
@ -470,21 +616,22 @@ protected:
|
|||
void resetPipelineStage();
|
||||
|
||||
struct PipelineStageState {
|
||||
PipelinePointer _pipeline;
|
||||
PipelineReference _pipeline{};
|
||||
|
||||
GLuint _program { 0 };
|
||||
bool _cameraCorrection { false };
|
||||
GLShader* _programShader { nullptr };
|
||||
bool _invalidProgram { false };
|
||||
GLuint _program{ 0 };
|
||||
bool _cameraCorrection{ false };
|
||||
GLShader* _programShader{ nullptr };
|
||||
bool _invalidProgram{ false };
|
||||
|
||||
BufferView _cameraCorrectionBuffer { gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(CameraCorrection), nullptr )) };
|
||||
BufferView _cameraCorrectionBufferIdentity { gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(CameraCorrection), nullptr )) };
|
||||
BufferView _cameraCorrectionBuffer{ gpu::BufferView(std::make_shared<gpu::Buffer>(sizeof(CameraCorrection), nullptr)) };
|
||||
BufferView _cameraCorrectionBufferIdentity{ gpu::BufferView(
|
||||
std::make_shared<gpu::Buffer>(sizeof(CameraCorrection), nullptr)) };
|
||||
|
||||
State::Data _stateCache{ State::DEFAULT };
|
||||
State::Signature _stateSignatureCache { 0 };
|
||||
State::Signature _stateSignatureCache{ 0 };
|
||||
|
||||
GLState* _state { nullptr };
|
||||
bool _invalidState { false };
|
||||
GLState* _state{ nullptr };
|
||||
bool _invalidState{ false };
|
||||
|
||||
PipelineStageState() {
|
||||
_cameraCorrectionBuffer.edit<CameraCorrection>() = CameraCorrection();
|
||||
|
@ -498,9 +645,9 @@ protected:
|
|||
virtual GLShader* compileBackendProgram(const Shader& program, const Shader::CompilationHandler& handler);
|
||||
virtual GLShader* compileBackendShader(const Shader& shader, const Shader::CompilationHandler& handler);
|
||||
virtual std::string getBackendShaderHeader() const = 0;
|
||||
// For a program, this will return a string containing all the source files (without any
|
||||
// backend headers or defines). For a vertex, fragment or geometry shader, this will
|
||||
// return the fully customized shader with all the version and backend specific
|
||||
// For a program, this will return a string containing all the source files (without any
|
||||
// backend headers or defines). For a vertex, fragment or geometry shader, this will
|
||||
// return the fully customized shader with all the version and backend specific
|
||||
// preprocessor directives
|
||||
// The program string returned can be used as a key for a cache of shader binaries
|
||||
// The shader strings can be reliably sent to the low level `compileShader` functions
|
||||
|
@ -516,22 +663,22 @@ protected:
|
|||
// Synchronize the state cache of this Backend with the actual real state of the GL Context
|
||||
void syncOutputStateCache();
|
||||
void resetOutputStage();
|
||||
|
||||
|
||||
struct OutputStageState {
|
||||
FramebufferPointer _framebuffer { nullptr };
|
||||
GLuint _drawFBO { 0 };
|
||||
FramebufferReference _framebuffer{};
|
||||
GLuint _drawFBO{ 0 };
|
||||
} _output;
|
||||
|
||||
void resetQueryStage();
|
||||
struct QueryStageState {
|
||||
uint32_t _rangeQueryDepth { 0 };
|
||||
uint32_t _rangeQueryDepth{ 0 };
|
||||
} _queryStage;
|
||||
|
||||
void resetStages();
|
||||
|
||||
// Stores cached binary versions of the shaders for quicker startup on subsequent runs
|
||||
// Note that shaders in the cache can still fail to load due to hardware or driver
|
||||
// changes that invalidate the cached binary, in which case we fall back on compiling
|
||||
// Note that shaders in the cache can still fail to load due to hardware or driver
|
||||
// changes that invalidate the cached binary, in which case we fall back on compiling
|
||||
// the source again
|
||||
struct ShaderBinaryCache {
|
||||
std::mutex _mutex;
|
||||
|
@ -543,7 +690,7 @@ protected:
|
|||
virtual void killShaderBinaryCache();
|
||||
|
||||
struct TextureManagementStageState {
|
||||
bool _sparseCapable { false };
|
||||
bool _sparseCapable{ false };
|
||||
GLTextureTransferEnginePointer _transferEngine;
|
||||
} _textureManagement;
|
||||
virtual void initTextureManagementStage();
|
||||
|
@ -556,6 +703,6 @@ protected:
|
|||
friend class GLShader;
|
||||
};
|
||||
|
||||
} }
|
||||
}} // namespace gpu::gl
|
||||
|
||||
#endif
|
||||
|
|
|
@ -17,10 +17,10 @@ using namespace gpu;
|
|||
using namespace gpu::gl;
|
||||
|
||||
void GLBackend::do_setInputFormat(const Batch& batch, size_t paramOffset) {
|
||||
Stream::FormatPointer format = batch._streamFormats.get(batch._params[paramOffset]._uint);
|
||||
if (format != _input._format) {
|
||||
_input._format = format;
|
||||
const auto& format = batch._streamFormats.get(batch._params[paramOffset]._uint);
|
||||
if (!compare(_input._format, format)) {
|
||||
if (format) {
|
||||
assign(_input._format, format);
|
||||
auto inputFormat = GLInputFormat::sync((*format));
|
||||
assert(inputFormat);
|
||||
if (_input._formatKey != inputFormat->key) {
|
||||
|
@ -28,6 +28,7 @@ void GLBackend::do_setInputFormat(const Batch& batch, size_t paramOffset) {
|
|||
_input._invalidFormat = true;
|
||||
}
|
||||
} else {
|
||||
reset(_input._format);
|
||||
_input._formatKey.clear();
|
||||
_input._invalidFormat = true;
|
||||
}
|
||||
|
@ -37,13 +38,13 @@ void GLBackend::do_setInputFormat(const Batch& batch, size_t paramOffset) {
|
|||
void GLBackend::do_setInputBuffer(const Batch& batch, size_t paramOffset) {
|
||||
Offset stride = batch._params[paramOffset + 0]._uint;
|
||||
Offset offset = batch._params[paramOffset + 1]._uint;
|
||||
BufferPointer buffer = batch._buffers.get(batch._params[paramOffset + 2]._uint);
|
||||
const auto& buffer = batch._buffers.get(batch._params[paramOffset + 2]._uint);
|
||||
uint32 channel = batch._params[paramOffset + 3]._uint;
|
||||
|
||||
if (channel < getNumInputBuffers()) {
|
||||
bool isModified = false;
|
||||
if (_input._buffers[channel] != buffer) {
|
||||
_input._buffers[channel] = buffer;
|
||||
if (!compare(_input._buffers[channel], buffer)) {
|
||||
assign(_input._buffers[channel], buffer);
|
||||
_input._bufferVBOs[channel] = getBufferIDUnsynced((*buffer));
|
||||
isModified = true;
|
||||
}
|
||||
|
@ -94,18 +95,18 @@ void GLBackend::resetInputStage() {
|
|||
// Reset index buffer
|
||||
_input._indexBufferType = UINT32;
|
||||
_input._indexBufferOffset = 0;
|
||||
_input._indexBuffer.reset();
|
||||
reset(_input._indexBuffer);
|
||||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, 0);
|
||||
(void) CHECK_GL_ERROR();
|
||||
|
||||
// Reset vertex buffer and format
|
||||
_input._format.reset();
|
||||
reset(_input._format);
|
||||
_input._formatKey.clear();
|
||||
_input._invalidFormat = false;
|
||||
_input._attributeActivation.reset();
|
||||
|
||||
for (uint32_t i = 0; i < _input._buffers.size(); i++) {
|
||||
_input._buffers[i].reset();
|
||||
reset(_input._buffers[i]);
|
||||
_input._bufferOffsets[i] = 0;
|
||||
_input._bufferStrides[i] = 0;
|
||||
_input._bufferVBOs[i] = 0;
|
||||
|
@ -119,9 +120,9 @@ void GLBackend::do_setIndexBuffer(const Batch& batch, size_t paramOffset) {
|
|||
_input._indexBufferType = (Type)batch._params[paramOffset + 2]._uint;
|
||||
_input._indexBufferOffset = batch._params[paramOffset + 0]._uint;
|
||||
|
||||
BufferPointer indexBuffer = batch._buffers.get(batch._params[paramOffset + 1]._uint);
|
||||
if (indexBuffer != _input._indexBuffer) {
|
||||
_input._indexBuffer = indexBuffer;
|
||||
const auto& indexBuffer = batch._buffers.get(batch._params[paramOffset + 1]._uint);
|
||||
if (!compare(_input._indexBuffer, indexBuffer)) {
|
||||
assign(_input._indexBuffer, indexBuffer);
|
||||
if (indexBuffer) {
|
||||
glBindBuffer(GL_ELEMENT_ARRAY_BUFFER, getBufferIDUnsynced(*indexBuffer));
|
||||
} else {
|
||||
|
@ -136,9 +137,9 @@ void GLBackend::do_setIndirectBuffer(const Batch& batch, size_t paramOffset) {
|
|||
_input._indirectBufferOffset = batch._params[paramOffset + 1]._uint;
|
||||
_input._indirectBufferStride = batch._params[paramOffset + 2]._uint;
|
||||
|
||||
BufferPointer buffer = batch._buffers.get(batch._params[paramOffset]._uint);
|
||||
if (buffer != _input._indirectBuffer) {
|
||||
_input._indirectBuffer = buffer;
|
||||
const auto& buffer = batch._buffers.get(batch._params[paramOffset]._uint);
|
||||
if (!compare(_input._indirectBuffer, buffer)) {
|
||||
assign(_input._indirectBuffer, buffer);
|
||||
if (buffer) {
|
||||
glBindBuffer(GL_DRAW_INDIRECT_BUFFER, getBufferIDUnsynced(*buffer));
|
||||
} else {
|
||||
|
@ -152,7 +153,7 @@ void GLBackend::do_setIndirectBuffer(const Batch& batch, size_t paramOffset) {
|
|||
|
||||
void GLBackend::updateInput() {
|
||||
bool isStereoNow = isStereo();
|
||||
// track stereo state change potentially happening wihtout changing the input format
|
||||
// track stereo state change potentially happening without changing the input format
|
||||
// this is a rare case requesting to invalid the format
|
||||
#ifdef GPU_STEREO_DRAWCALL_INSTANCED
|
||||
_input._invalidFormat |= (isStereoNow != _input._lastUpdateStereoState);
|
||||
|
@ -163,13 +164,14 @@ void GLBackend::updateInput() {
|
|||
InputStageState::ActivationCache newActivation;
|
||||
|
||||
// Assign the vertex format required
|
||||
if (_input._format) {
|
||||
auto format = acquire(_input._format);
|
||||
if (format) {
|
||||
bool hasColorAttribute{ false };
|
||||
|
||||
_input._attribBindingBuffers.reset();
|
||||
|
||||
const Stream::Format::AttributeMap& attributes = _input._format->getAttributes();
|
||||
auto& inputChannels = _input._format->getChannels();
|
||||
const auto& attributes = format->getAttributes();
|
||||
const auto& inputChannels = format->getChannels();
|
||||
for (auto& channelIt : inputChannels) {
|
||||
auto bufferChannelNum = (channelIt).first;
|
||||
const Stream::Format::ChannelMap::value_type::second_type& channel = (channelIt).second;
|
||||
|
|
|
@ -27,12 +27,12 @@ void GLBackend::syncOutputStateCache() {
|
|||
glGetIntegerv(GL_DRAW_FRAMEBUFFER_BINDING, ¤tFBO);
|
||||
|
||||
_output._drawFBO = currentFBO;
|
||||
_output._framebuffer.reset();
|
||||
reset(_output._framebuffer);
|
||||
}
|
||||
|
||||
void GLBackend::resetOutputStage() {
|
||||
if (_output._framebuffer) {
|
||||
_output._framebuffer.reset();
|
||||
if (valid(_output._framebuffer)) {
|
||||
reset(_output._framebuffer);
|
||||
_output._drawFBO = 0;
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, 0);
|
||||
}
|
||||
|
@ -41,7 +41,7 @@ void GLBackend::resetOutputStage() {
|
|||
}
|
||||
|
||||
void GLBackend::do_setFramebuffer(const Batch& batch, size_t paramOffset) {
|
||||
auto framebuffer = batch._framebuffers.get(batch._params[paramOffset]._uint);
|
||||
const auto& framebuffer = batch._framebuffers.get(batch._params[paramOffset]._uint);
|
||||
setFramebuffer(framebuffer);
|
||||
}
|
||||
|
||||
|
@ -55,13 +55,13 @@ void GLBackend::do_setFramebufferSwapChain(const Batch& batch, size_t paramOffse
|
|||
}
|
||||
|
||||
void GLBackend::setFramebuffer(const FramebufferPointer& framebuffer) {
|
||||
if (_output._framebuffer != framebuffer) {
|
||||
if (!compare(_output._framebuffer, framebuffer)) {
|
||||
auto newFBO = getFramebufferID(framebuffer);
|
||||
if (_output._drawFBO != newFBO) {
|
||||
_output._drawFBO = newFBO;
|
||||
glBindFramebuffer(GL_DRAW_FRAMEBUFFER, newFBO);
|
||||
}
|
||||
_output._framebuffer = framebuffer;
|
||||
assign(_output._framebuffer, framebuffer);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -114,8 +114,9 @@ void GLBackend::do_clearFramebuffer(const Batch& batch, size_t paramOffset) {
|
|||
}
|
||||
|
||||
std::vector<GLenum> drawBuffers;
|
||||
auto framebuffer = acquire(_output._framebuffer);
|
||||
if (masks & Framebuffer::BUFFER_COLORS) {
|
||||
if (_output._framebuffer) {
|
||||
if (framebuffer) {
|
||||
for (unsigned int i = 0; i < Framebuffer::MAX_NUM_RENDER_BUFFERS; i++) {
|
||||
if (masks & (1 << i)) {
|
||||
drawBuffers.push_back(GL_COLOR_ATTACHMENT0 + i);
|
||||
|
@ -163,8 +164,8 @@ void GLBackend::do_clearFramebuffer(const Batch& batch, size_t paramOffset) {
|
|||
}
|
||||
|
||||
// Restore the color draw buffers only if a frmaebuffer is bound
|
||||
if (_output._framebuffer && !drawBuffers.empty()) {
|
||||
auto glFramebuffer = syncGPUObject(*_output._framebuffer);
|
||||
if (framebuffer && !drawBuffers.empty()) {
|
||||
auto glFramebuffer = syncGPUObject(*framebuffer);
|
||||
if (glFramebuffer) {
|
||||
glDrawBuffers((GLsizei)glFramebuffer->_colorBuffers.size(), glFramebuffer->_colorBuffers.data());
|
||||
}
|
||||
|
|
|
@ -23,9 +23,9 @@ using namespace gpu;
|
|||
using namespace gpu::gl;
|
||||
|
||||
void GLBackend::do_setPipeline(const Batch& batch, size_t paramOffset) {
|
||||
PipelinePointer pipeline = batch._pipelines.get(batch._params[paramOffset + 0]._uint);
|
||||
const auto& pipeline = batch._pipelines.get(batch._params[paramOffset + 0]._uint);
|
||||
|
||||
if (_pipeline._pipeline == pipeline) {
|
||||
if (compare(_pipeline._pipeline, pipeline)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -34,7 +34,7 @@ void GLBackend::do_setPipeline(const Batch& batch, size_t paramOffset) {
|
|||
|
||||
// null pipeline == reset
|
||||
if (!pipeline) {
|
||||
_pipeline._pipeline.reset();
|
||||
reset(_pipeline._pipeline);
|
||||
|
||||
_pipeline._program = 0;
|
||||
_pipeline._cameraCorrection = false;
|
||||
|
@ -73,7 +73,7 @@ void GLBackend::do_setPipeline(const Batch& batch, size_t paramOffset) {
|
|||
}
|
||||
|
||||
// Remember the new pipeline
|
||||
_pipeline._pipeline = pipeline;
|
||||
assign(_pipeline._pipeline, pipeline);
|
||||
}
|
||||
|
||||
// THis should be done on Pipeline::update...
|
||||
|
@ -81,7 +81,7 @@ void GLBackend::do_setPipeline(const Batch& batch, size_t paramOffset) {
|
|||
glUseProgram(_pipeline._program);
|
||||
if (_pipeline._cameraCorrection) {
|
||||
// Invalidate uniform buffer cache slot
|
||||
_uniform._buffers[gpu::slot::buffer::CameraCorrection] = {};
|
||||
_uniform._buffers[gpu::slot::buffer::CameraCorrection].reset();
|
||||
auto& cameraCorrectionBuffer = _transform._viewCorrectionEnabled ?
|
||||
_pipeline._cameraCorrectionBuffer._buffer :
|
||||
_pipeline._cameraCorrectionBufferIdentity._buffer;
|
||||
|
@ -112,7 +112,7 @@ void GLBackend::updatePipeline() {
|
|||
_pipeline._stateSignatureCache |= _pipeline._state->_signature;
|
||||
|
||||
// And perform
|
||||
for (auto command : _pipeline._state->_commands) {
|
||||
for (const auto& command : _pipeline._state->_commands) {
|
||||
command->run(this);
|
||||
}
|
||||
} else {
|
||||
|
@ -134,23 +134,17 @@ void GLBackend::resetPipelineStage() {
|
|||
_pipeline._invalidProgram = false;
|
||||
_pipeline._program = 0;
|
||||
_pipeline._programShader = nullptr;
|
||||
_pipeline._pipeline.reset();
|
||||
reset(_pipeline._pipeline);
|
||||
glUseProgram(0);
|
||||
}
|
||||
|
||||
GLBackend::UniformStageState::BufferState::BufferState(const BufferPointer& buffer, GLintptr offset, GLsizeiptr size)
|
||||
: buffer(buffer), offset(offset), size(size) {}
|
||||
|
||||
void GLBackend::releaseUniformBuffer(uint32_t slot) {
|
||||
auto& buf = _uniform._buffers[slot];
|
||||
if (buf.buffer) {
|
||||
auto* object = Backend::getGPUObject<GLBuffer>(*buf.buffer);
|
||||
if (object) {
|
||||
glBindBufferBase(GL_UNIFORM_BUFFER, slot, 0); // RELEASE
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
buf = UniformStageState::BufferState();
|
||||
auto& bufferState = _uniform._buffers[slot];
|
||||
if (valid(bufferState.buffer)) {
|
||||
glBindBufferBase(GL_UNIFORM_BUFFER, slot, 0); // RELEASE
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
bufferState.reset();
|
||||
}
|
||||
|
||||
void GLBackend::resetUniformStage() {
|
||||
|
@ -165,18 +159,20 @@ void GLBackend::bindUniformBuffer(uint32_t slot, const BufferPointer& buffer, GL
|
|||
return;
|
||||
}
|
||||
|
||||
UniformStageState::BufferState bufferState{ buffer, offset, size };
|
||||
|
||||
auto& currentBufferState = _uniform._buffers[slot];
|
||||
// check cache before thinking
|
||||
if (_uniform._buffers[slot] == bufferState) {
|
||||
if (currentBufferState.compare(buffer, offset, size)) {
|
||||
return;
|
||||
}
|
||||
|
||||
// Grab the true gl Buffer object
|
||||
auto glBO = getBufferIDUnsynced(*buffer);
|
||||
if (glBO) {
|
||||
glBindBufferRange(GL_UNIFORM_BUFFER, slot, glBO, bufferState.offset, bufferState.size);
|
||||
_uniform._buffers[slot] = bufferState;
|
||||
glBindBufferRange(GL_UNIFORM_BUFFER, slot, glBO, offset, size);
|
||||
assign(currentBufferState.buffer, buffer);
|
||||
currentBufferState.offset = offset;
|
||||
currentBufferState.size = size;
|
||||
(void)CHECK_GL_ERROR();
|
||||
} else {
|
||||
releaseUniformBuffer(slot);
|
||||
|
@ -193,7 +189,7 @@ void GLBackend::do_setUniformBuffer(const Batch& batch, size_t paramOffset) {
|
|||
return;
|
||||
}
|
||||
|
||||
BufferPointer uniformBuffer = batch._buffers.get(batch._params[paramOffset + 2]._uint);
|
||||
const auto& uniformBuffer = batch._buffers.get(batch._params[paramOffset + 2]._uint);
|
||||
GLintptr rangeStart = batch._params[paramOffset + 1]._uint;
|
||||
GLsizeiptr rangeSize = batch._params[paramOffset + 0]._uint;
|
||||
|
||||
|
@ -201,16 +197,12 @@ void GLBackend::do_setUniformBuffer(const Batch& batch, size_t paramOffset) {
|
|||
}
|
||||
|
||||
void GLBackend::releaseResourceTexture(uint32_t slot) {
|
||||
auto& tex = _resource._textures[slot];
|
||||
if (tex) {
|
||||
auto* object = Backend::getGPUObject<GLTexture>(*tex);
|
||||
if (object) {
|
||||
GLuint target = object->_target;
|
||||
glActiveTexture(GL_TEXTURE0 + slot);
|
||||
glBindTexture(target, 0); // RELEASE
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
tex.reset();
|
||||
auto& textureState = _resource._textures[slot];
|
||||
if (valid(textureState._texture)) {
|
||||
glActiveTexture(GL_TEXTURE0 + slot);
|
||||
glBindTexture(textureState._target, 0); // RELEASE
|
||||
(void)CHECK_GL_ERROR();
|
||||
reset(textureState._texture);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -232,14 +224,14 @@ void GLBackend::do_setResourceBuffer(const Batch& batch, size_t paramOffset) {
|
|||
return;
|
||||
}
|
||||
|
||||
auto resourceBuffer = batch._buffers.get(batch._params[paramOffset + 0]._uint);
|
||||
const auto& resourceBuffer = batch._buffers.get(batch._params[paramOffset + 0]._uint);
|
||||
|
||||
if (!resourceBuffer) {
|
||||
releaseResourceBuffer(slot);
|
||||
return;
|
||||
}
|
||||
// check cache before thinking
|
||||
if (_resource._buffers[slot] == resourceBuffer) {
|
||||
if (compare(_resource._buffers[slot], resourceBuffer)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -248,7 +240,7 @@ void GLBackend::do_setResourceBuffer(const Batch& batch, size_t paramOffset) {
|
|||
|
||||
// If successful bind then cache it
|
||||
if (bindResourceBuffer(slot, resourceBuffer)) {
|
||||
_resource._buffers[slot] = resourceBuffer;
|
||||
assign(_resource._buffers[slot], resourceBuffer);
|
||||
} else { // else clear slot and cache
|
||||
releaseResourceBuffer(slot);
|
||||
return;
|
||||
|
@ -293,14 +285,15 @@ void GLBackend::do_setResourceFramebufferSwapChainTexture(const Batch& batch, si
|
|||
}
|
||||
auto index = batch._params[paramOffset + 2]._uint;
|
||||
auto renderBufferSlot = batch._params[paramOffset + 3]._uint;
|
||||
auto resourceFramebuffer = swapChain->get(index);
|
||||
auto resourceTexture = resourceFramebuffer->getRenderBuffer(renderBufferSlot);
|
||||
const auto& resourceFramebuffer = swapChain->get(index);
|
||||
const auto& resourceTexture = resourceFramebuffer->getRenderBuffer(renderBufferSlot);
|
||||
setResourceTexture(slot, resourceTexture);
|
||||
}
|
||||
|
||||
void GLBackend::setResourceTexture(unsigned int slot, const TexturePointer& resourceTexture) {
|
||||
auto& textureState = _resource._textures[slot];
|
||||
// check cache before thinking
|
||||
if (_resource._textures[slot] == resourceTexture) {
|
||||
if (compare(textureState._texture, resourceTexture)) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
@ -310,15 +303,12 @@ void GLBackend::setResourceTexture(unsigned int slot, const TexturePointer& reso
|
|||
// Always make sure the GLObject is in sync
|
||||
GLTexture* object = syncGPUObject(resourceTexture);
|
||||
if (object) {
|
||||
assign(textureState._texture, resourceTexture);
|
||||
GLuint to = object->_texture;
|
||||
GLuint target = object->_target;
|
||||
textureState._target = object->_target;
|
||||
glActiveTexture(GL_TEXTURE0 + slot);
|
||||
glBindTexture(target, to);
|
||||
|
||||
glBindTexture(textureState._target, to);
|
||||
(void)CHECK_GL_ERROR();
|
||||
|
||||
_resource._textures[slot] = resourceTexture;
|
||||
|
||||
_stats._RSAmountTextureMemoryBounded += (int)object->size();
|
||||
|
||||
} else {
|
||||
|
@ -343,7 +333,7 @@ void GLBackend::do_setResourceTextureTable(const Batch& batch, size_t paramOffse
|
|||
int GLBackend::ResourceStageState::findEmptyTextureSlot() const {
|
||||
// start from the end of the slots, try to find an empty one that can be used
|
||||
for (auto i = MAX_NUM_RESOURCE_TEXTURES - 1; i > 0; i--) {
|
||||
if (!_textures[i]) {
|
||||
if (!valid(_textures[i]._texture)) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,15 +25,15 @@ static bool timeElapsed = false;
|
|||
#endif
|
||||
|
||||
void GLBackend::do_beginQuery(const Batch& batch, size_t paramOffset) {
|
||||
#if !defined(USE_GLES)
|
||||
auto query = batch._queries.get(batch._params[paramOffset]._uint);
|
||||
GLQuery* glquery = syncGPUObject(*query);
|
||||
if (glquery) {
|
||||
PROFILE_RANGE_BEGIN(render_gpu_gl_detail, glquery->_profileRangeId, query->getName().c_str(), 0xFFFF7F00);
|
||||
|
||||
++_queryStage._rangeQueryDepth;
|
||||
glGetInteger64v(GL_TIMESTAMP, (GLint64*)&glquery->_batchElapsedTime);
|
||||
glquery->_batchElapsedTimeBegin = std::chrono::high_resolution_clock::now();
|
||||
|
||||
#if !defined(USE_GLES)
|
||||
if (timeElapsed) {
|
||||
if (_queryStage._rangeQueryDepth <= MAX_RANGE_QUERY_DEPTH) {
|
||||
glBeginQuery(GL_TIME_ELAPSED, glquery->_endqo);
|
||||
|
@ -41,17 +41,18 @@ void GLBackend::do_beginQuery(const Batch& batch, size_t paramOffset) {
|
|||
} else {
|
||||
glQueryCounter(glquery->_beginqo, GL_TIMESTAMP);
|
||||
}
|
||||
#endif
|
||||
|
||||
glquery->_rangeQueryDepth = _queryStage._rangeQueryDepth;
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void GLBackend::do_endQuery(const Batch& batch, size_t paramOffset) {
|
||||
#if !defined(USE_GLES)
|
||||
auto query = batch._queries.get(batch._params[paramOffset]._uint);
|
||||
GLQuery* glquery = syncGPUObject(*query);
|
||||
if (glquery) {
|
||||
#if !defined(USE_GLES)
|
||||
if (timeElapsed) {
|
||||
if (_queryStage._rangeQueryDepth <= MAX_RANGE_QUERY_DEPTH) {
|
||||
glEndQuery(GL_TIME_ELAPSED);
|
||||
|
@ -59,27 +60,26 @@ void GLBackend::do_endQuery(const Batch& batch, size_t paramOffset) {
|
|||
} else {
|
||||
glQueryCounter(glquery->_endqo, GL_TIMESTAMP);
|
||||
}
|
||||
#endif
|
||||
|
||||
--_queryStage._rangeQueryDepth;
|
||||
GLint64 now;
|
||||
glGetInteger64v(GL_TIMESTAMP, &now);
|
||||
glquery->_batchElapsedTime = now - glquery->_batchElapsedTime;
|
||||
auto duration_ns = std::chrono::duration_cast<std::chrono::nanoseconds>(std::chrono::high_resolution_clock::now() - glquery->_batchElapsedTimeBegin);
|
||||
glquery->_batchElapsedTime = duration_ns.count();
|
||||
|
||||
PROFILE_RANGE_END(render_gpu_gl_detail, glquery->_profileRangeId);
|
||||
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void GLBackend::do_getQuery(const Batch& batch, size_t paramOffset) {
|
||||
#if !defined(USE_GLES)
|
||||
auto query = batch._queries.get(batch._params[paramOffset]._uint);
|
||||
GLQuery* glquery = syncGPUObject(*query);
|
||||
if (glquery) {
|
||||
if (glquery->_rangeQueryDepth > MAX_RANGE_QUERY_DEPTH) {
|
||||
query->triggerReturnHandler(glquery->_result, glquery->_batchElapsedTime);
|
||||
} else {
|
||||
#if !defined(USE_GLES)
|
||||
glGetQueryObjectui64v(glquery->_endqo, GL_QUERY_RESULT_AVAILABLE, &glquery->_result);
|
||||
if (glquery->_result == GL_TRUE) {
|
||||
if (timeElapsed) {
|
||||
|
@ -92,10 +92,13 @@ void GLBackend::do_getQuery(const Batch& batch, size_t paramOffset) {
|
|||
}
|
||||
query->triggerReturnHandler(glquery->_result, glquery->_batchElapsedTime);
|
||||
}
|
||||
#else
|
||||
// gles3 is not supporting true time query returns just the batch elapsed time
|
||||
query->triggerReturnHandler(0, glquery->_batchElapsedTime);
|
||||
#endif
|
||||
(void)CHECK_GL_ERROR();
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void GLBackend::resetQueryStage() {
|
||||
|
|
|
@ -66,7 +66,7 @@ GLTexture* GLBackend::syncGPUObject(const TexturePointer& texturePointer) {
|
|||
}
|
||||
|
||||
void GLBackend::do_generateTextureMips(const Batch& batch, size_t paramOffset) {
|
||||
TexturePointer resourceTexture = batch._textures.get(batch._params[paramOffset + 0]._uint);
|
||||
const auto& resourceTexture = batch._textures.get(batch._params[paramOffset + 0]._uint);
|
||||
if (!resourceTexture) {
|
||||
return;
|
||||
}
|
||||
|
|
|
@ -24,7 +24,7 @@ GLPipeline* GLPipeline::sync(GLBackend& backend, const Pipeline& pipeline) {
|
|||
}
|
||||
|
||||
// No object allocated yet, let's see if it's worth it...
|
||||
ShaderPointer shader = pipeline.getProgram();
|
||||
const auto& shader = pipeline.getProgram();
|
||||
|
||||
// If this pipeline's shader has already failed to compile, don't try again
|
||||
if (shader->compilationHasFailed()) {
|
||||
|
@ -37,7 +37,7 @@ GLPipeline* GLPipeline::sync(GLBackend& backend, const Pipeline& pipeline) {
|
|||
return nullptr;
|
||||
}
|
||||
|
||||
StatePointer state = pipeline.getState();
|
||||
const auto& state = pipeline.getState();
|
||||
GLState* stateObject = GLState::sync(*state);
|
||||
if (stateObject == nullptr) {
|
||||
return nullptr;
|
||||
|
|
|
@ -47,8 +47,9 @@ public:
|
|||
|
||||
const GLuint& _endqo = { _id };
|
||||
const GLuint _beginqo = { 0 };
|
||||
GLuint64 _result { (GLuint64)-1 };
|
||||
GLuint64 _batchElapsedTime { (GLuint64) 0 };
|
||||
GLuint64 _result { (GLuint64)0 };
|
||||
GLuint64 _batchElapsedTime{ (GLuint64)0 };
|
||||
std::chrono::high_resolution_clock::time_point _batchElapsedTimeBegin;
|
||||
uint64_t _profileRangeId { 0 };
|
||||
uint32_t _rangeQueryDepth { 0 };
|
||||
|
||||
|
|
|
@ -161,7 +161,7 @@ protected:
|
|||
void updateTransform(const Batch& batch) override;
|
||||
|
||||
// Resource Stage
|
||||
bool bindResourceBuffer(uint32_t slot, BufferPointer& buffer) override;
|
||||
bool bindResourceBuffer(uint32_t slot, const BufferPointer& buffer) override;
|
||||
void releaseResourceBuffer(uint32_t slot) override;
|
||||
|
||||
// Output stage
|
||||
|
|
|
@ -100,7 +100,7 @@ GLBuffer* GL41Backend::syncGPUObject(const Buffer& buffer) {
|
|||
return GL41Buffer::sync<GL41Buffer>(*this, buffer);
|
||||
}
|
||||
|
||||
bool GL41Backend::bindResourceBuffer(uint32_t slot, BufferPointer& buffer) {
|
||||
bool GL41Backend::bindResourceBuffer(uint32_t slot, const BufferPointer& buffer) {
|
||||
GLuint texBuffer = GL41Backend::getResourceBufferID((*buffer));
|
||||
if (texBuffer) {
|
||||
glActiveTexture(GL_TEXTURE0 + GL41Backend::RESOURCE_BUFFER_SLOT0_TEX_UNIT + slot);
|
||||
|
@ -108,7 +108,7 @@ bool GL41Backend::bindResourceBuffer(uint32_t slot, BufferPointer& buffer) {
|
|||
|
||||
(void)CHECK_GL_ERROR();
|
||||
|
||||
_resource._buffers[slot] = buffer;
|
||||
assign(_resource._buffers[slot], buffer);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -117,10 +117,11 @@ bool GL41Backend::bindResourceBuffer(uint32_t slot, BufferPointer& buffer) {
|
|||
}
|
||||
|
||||
void GL41Backend::releaseResourceBuffer(uint32_t slot) {
|
||||
auto& buf = _resource._buffers[slot];
|
||||
if (buf) {
|
||||
auto& bufferReference = _resource._buffers[slot];
|
||||
auto buffer = acquire(bufferReference);
|
||||
if (buffer) {
|
||||
glActiveTexture(GL_TEXTURE0 + GL41Backend::RESOURCE_BUFFER_SLOT0_TEX_UNIT + slot);
|
||||
glBindTexture(GL_TEXTURE_BUFFER, 0);
|
||||
buf.reset();
|
||||
reset(bufferReference);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -35,14 +35,15 @@ void GL41Backend::updateInput() {
|
|||
|
||||
if (_input._invalidFormat || _input._invalidBuffers.any()) {
|
||||
|
||||
auto format = acquire(_input._format);
|
||||
if (_input._invalidFormat) {
|
||||
InputStageState::ActivationCache newActivation;
|
||||
|
||||
_stats._ISNumFormatChanges++;
|
||||
|
||||
// Check expected activation
|
||||
if (_input._format) {
|
||||
for (auto& it : _input._format->getAttributes()) {
|
||||
if (format) {
|
||||
for (auto& it : format->getAttributes()) {
|
||||
const Stream::Attribute& attrib = (it).second;
|
||||
uint8_t locationCount = attrib._element.getLocationCount();
|
||||
for (int i = 0; i < locationCount; ++i) {
|
||||
|
@ -69,15 +70,15 @@ void GL41Backend::updateInput() {
|
|||
}
|
||||
|
||||
// now we need to bind the buffers and assign the attrib pointers
|
||||
if (_input._format) {
|
||||
if (format) {
|
||||
bool hasColorAttribute{ false };
|
||||
|
||||
const Buffers& buffers = _input._buffers;
|
||||
const Offsets& offsets = _input._bufferOffsets;
|
||||
const Offsets& strides = _input._bufferStrides;
|
||||
const auto& buffers = _input._buffers;
|
||||
const auto& offsets = _input._bufferOffsets;
|
||||
const auto& strides = _input._bufferStrides;
|
||||
|
||||
const Stream::Format::AttributeMap& attributes = _input._format->getAttributes();
|
||||
auto& inputChannels = _input._format->getChannels();
|
||||
const auto& attributes = format->getAttributes();
|
||||
const auto& inputChannels = format->getChannels();
|
||||
int numInvalids = (int)_input._invalidBuffers.count();
|
||||
_stats._ISNumInputBufferChanges += numInvalids;
|
||||
|
||||
|
|
|
@ -262,7 +262,7 @@ protected:
|
|||
void updateTransform(const Batch& batch) override;
|
||||
|
||||
// Resource Stage
|
||||
bool bindResourceBuffer(uint32_t slot, BufferPointer& buffer) override;
|
||||
bool bindResourceBuffer(uint32_t slot, const BufferPointer& buffer) override;
|
||||
void releaseResourceBuffer(uint32_t slot) override;
|
||||
|
||||
// Output stage
|
||||
|
|
|
@ -60,14 +60,14 @@ GLBuffer* GL45Backend::syncGPUObject(const Buffer& buffer) {
|
|||
}
|
||||
|
||||
|
||||
bool GL45Backend::bindResourceBuffer(uint32_t slot, BufferPointer& buffer) {
|
||||
bool GL45Backend::bindResourceBuffer(uint32_t slot, const BufferPointer& buffer) {
|
||||
GLBuffer* object = syncGPUObject((*buffer));
|
||||
if (object) {
|
||||
glBindBufferBase(GL_SHADER_STORAGE_BUFFER, slot, object->_id);
|
||||
|
||||
(void)CHECK_GL_ERROR();
|
||||
|
||||
_resource._buffers[slot] = buffer;
|
||||
assign(_resource._buffers[slot], buffer);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -76,11 +76,10 @@ bool GL45Backend::bindResourceBuffer(uint32_t slot, BufferPointer& buffer) {
|
|||
}
|
||||
|
||||
void GL45Backend::releaseResourceBuffer(uint32_t slot) {
|
||||
auto& buf = _resource._buffers[slot];
|
||||
if (buf) {
|
||||
auto& bufferReference = _resource._buffers[slot];
|
||||
auto buffer = acquire(bufferReference);
|
||||
if (buffer) {
|
||||
glBindBufferBase(GL_SHADER_STORAGE_BUFFER, slot, 0);
|
||||
buf.reset();
|
||||
reset(bufferReference);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ void GL45Backend::resetInputStage() {
|
|||
|
||||
void GL45Backend::updateInput() {
|
||||
bool isStereoNow = isStereo();
|
||||
// track stereo state change potentially happening wihtout changing the input format
|
||||
// track stereo state change potentially happening without changing the input format
|
||||
// this is a rare case requesting to invalid the format
|
||||
#ifdef GPU_STEREO_DRAWCALL_INSTANCED
|
||||
_input._invalidFormat |= (isStereoNow != _input._lastUpdateStereoState);
|
||||
|
@ -39,13 +39,14 @@ void GL45Backend::updateInput() {
|
|||
InputStageState::ActivationCache newActivation;
|
||||
|
||||
// Assign the vertex format required
|
||||
if (_input._format) {
|
||||
auto format = acquire(_input._format);
|
||||
if (format) {
|
||||
bool hasColorAttribute{ false };
|
||||
|
||||
_input._attribBindingBuffers.reset();
|
||||
|
||||
const Stream::Format::AttributeMap& attributes = _input._format->getAttributes();
|
||||
auto& inputChannels = _input._format->getChannels();
|
||||
const auto& attributes = format->getAttributes();
|
||||
const auto& inputChannels = format->getChannels();
|
||||
for (auto& channelIt : inputChannels) {
|
||||
auto bufferChannelNum = (channelIt).first;
|
||||
const Stream::Format::ChannelMap::value_type::second_type& channel = (channelIt).second;
|
||||
|
|
|
@ -157,7 +157,7 @@ protected:
|
|||
void updateTransform(const Batch& batch) override;
|
||||
|
||||
// Resource Stage
|
||||
bool bindResourceBuffer(uint32_t slot, BufferPointer& buffer) override;
|
||||
bool bindResourceBuffer(uint32_t slot, const BufferPointer& buffer) override;
|
||||
void releaseResourceBuffer(uint32_t slot) override;
|
||||
|
||||
// Output stage
|
||||
|
|
|
@ -72,14 +72,14 @@ GLBuffer* GLESBackend::syncGPUObject(const Buffer& buffer) {
|
|||
return GLESBuffer::sync<GLESBuffer>(*this, buffer);
|
||||
}
|
||||
|
||||
bool GLESBackend::bindResourceBuffer(uint32_t slot, BufferPointer& buffer) {
|
||||
bool GLESBackend::bindResourceBuffer(uint32_t slot, const BufferPointer& buffer) {
|
||||
GLBuffer* object = syncGPUObject((*buffer));
|
||||
if (object) {
|
||||
glBindBufferBase(GL_SHADER_STORAGE_BUFFER, slot, object->_id);
|
||||
|
||||
(void)CHECK_GL_ERROR();
|
||||
|
||||
_resource._buffers[slot] = buffer;
|
||||
assign(_resource._buffers[slot], buffer);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -88,10 +88,10 @@ bool GLESBackend::bindResourceBuffer(uint32_t slot, BufferPointer& buffer) {
|
|||
}
|
||||
|
||||
void GLESBackend::releaseResourceBuffer(uint32_t slot) {
|
||||
auto& buf = _resource._buffers[slot];
|
||||
if (buf) {
|
||||
auto& bufferReference = _resource._buffers[slot];
|
||||
if (valid(bufferReference)) {
|
||||
glBindBufferBase(GL_SHADER_STORAGE_BUFFER, slot, 0);
|
||||
buf.reset();
|
||||
reset(bufferReference);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -498,7 +498,7 @@ void Batch::setupNamedCalls(const std::string& instanceName, NamedBatchData::Fun
|
|||
captureNamedDrawCallInfo(instanceName);
|
||||
}
|
||||
|
||||
BufferPointer Batch::getNamedBuffer(const std::string& instanceName, uint8_t index) {
|
||||
const BufferPointer& Batch::getNamedBuffer(const std::string& instanceName, uint8_t index) {
|
||||
NamedBatchData& instance = _namedData[instanceName];
|
||||
if (instance.buffers.size() <= index) {
|
||||
instance.buffers.resize(index + 1);
|
||||
|
|
|
@ -119,7 +119,7 @@ public:
|
|||
void multiDrawIndexedIndirect(uint32 numCommands, Primitive primitiveType);
|
||||
|
||||
void setupNamedCalls(const std::string& instanceName, NamedBatchData::Function function);
|
||||
BufferPointer getNamedBuffer(const std::string& instanceName, uint8_t index = 0);
|
||||
const BufferPointer& getNamedBuffer(const std::string& instanceName, uint8_t index = 0);
|
||||
|
||||
// Input Stage
|
||||
// InputFormat
|
||||
|
|
|
@ -203,11 +203,12 @@ uint32 Framebuffer::getNumRenderBuffers() const {
|
|||
return nb;
|
||||
}
|
||||
|
||||
TexturePointer Framebuffer::getRenderBuffer(uint32 slot) const {
|
||||
const TexturePointer& Framebuffer::getRenderBuffer(uint32 slot) const {
|
||||
static const TexturePointer EMPTY;
|
||||
if (!isSwapchain() && (slot < getMaxNumRenderBuffers())) {
|
||||
return _renderBuffers[slot]._texture;
|
||||
} else {
|
||||
return TexturePointer();
|
||||
return EMPTY;
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -297,9 +298,10 @@ bool Framebuffer::setDepthStencilBuffer(const TexturePointer& texture, const For
|
|||
return false;
|
||||
}
|
||||
|
||||
TexturePointer Framebuffer::getDepthStencilBuffer() const {
|
||||
const TexturePointer& Framebuffer::getDepthStencilBuffer() const {
|
||||
static const TexturePointer EMPTY;
|
||||
if (isSwapchain()) {
|
||||
return TexturePointer();
|
||||
return EMPTY;
|
||||
} else {
|
||||
return _depthStencilBuffer._texture;
|
||||
}
|
||||
|
|
|
@ -95,7 +95,7 @@ public:
|
|||
static Framebuffer* createShadowmap(uint16 width);
|
||||
|
||||
bool isSwapchain() const;
|
||||
SwapchainPointer getSwapchain() const { return _swapchain; }
|
||||
const SwapchainPointer& getSwapchain() const { return _swapchain; }
|
||||
|
||||
uint32 getFrameCount() const;
|
||||
|
||||
|
@ -105,13 +105,13 @@ public:
|
|||
const TextureViews& getRenderBuffers() const { return _renderBuffers; }
|
||||
|
||||
int32 setRenderBuffer(uint32 slot, const TexturePointer& texture, uint32 subresource = 0);
|
||||
TexturePointer getRenderBuffer(uint32 slot) const;
|
||||
const TexturePointer& getRenderBuffer(uint32 slot) const;
|
||||
uint32 getRenderBufferSubresource(uint32 slot) const;
|
||||
|
||||
bool setDepthBuffer(const TexturePointer& texture, const Format& format, uint32 subresource = 0);
|
||||
bool setStencilBuffer(const TexturePointer& texture, const Format& format, uint32 subresource = 0);
|
||||
bool setDepthStencilBuffer(const TexturePointer& texture, const Format& format, uint32 subresource = 0);
|
||||
TexturePointer getDepthStencilBuffer() const;
|
||||
const TexturePointer& getDepthStencilBuffer() const;
|
||||
uint32 getDepthStencilBufferSubresource() const;
|
||||
Format getDepthStencilBufferFormat() const;
|
||||
|
||||
|
|
|
@ -31,6 +31,7 @@
|
|||
#include "udt/PacketHeaders.h"
|
||||
|
||||
const QString DEFAULT_HIFI_ADDRESS = "file:///~/serverless/tutorial.json";
|
||||
const QString REDIRECT_HIFI_ADDRESS = "file:///~/serverless/redirect.json";
|
||||
const QString ADDRESS_MANAGER_SETTINGS_GROUP = "AddressManager";
|
||||
const QString SETTINGS_CURRENT_ADDRESS_KEY = "address";
|
||||
|
||||
|
@ -111,6 +112,9 @@ QUrl AddressManager::currentFacingPublicAddress() const {
|
|||
return shareableAddress;
|
||||
}
|
||||
|
||||
QUrl AddressManager::lastAddress() const {
|
||||
return _lastVisitedURL;
|
||||
}
|
||||
|
||||
void AddressManager::loadSettings(const QString& lookupString) {
|
||||
#if defined(USE_GLES) && defined(Q_OS_WIN)
|
||||
|
@ -247,9 +251,12 @@ bool AddressManager::handleUrl(const QUrl& lookupUrl, LookupTrigger trigger) {
|
|||
|
||||
UserActivityLogger::getInstance().wentTo(trigger, URL_TYPE_USER, lookupUrl.toString());
|
||||
|
||||
// save the last visited domain URL.
|
||||
_lastVisitedURL = lookupUrl;
|
||||
|
||||
// in case we're failing to connect to where we thought this user was
|
||||
// store their username as previous lookup so we can refresh their location via API
|
||||
_previousLookup = lookupUrl;
|
||||
_previousAPILookup = lookupUrl;
|
||||
} else {
|
||||
// we're assuming this is either a network address or global place name
|
||||
// check if it is a network address first
|
||||
|
@ -259,8 +266,11 @@ bool AddressManager::handleUrl(const QUrl& lookupUrl, LookupTrigger trigger) {
|
|||
|
||||
UserActivityLogger::getInstance().wentTo(trigger, URL_TYPE_NETWORK_ADDRESS, lookupUrl.toString());
|
||||
|
||||
// save the last visited domain URL.
|
||||
_lastVisitedURL = lookupUrl;
|
||||
|
||||
// a network address lookup clears the previous lookup since we don't expect to re-attempt it
|
||||
_previousLookup.clear();
|
||||
_previousAPILookup.clear();
|
||||
|
||||
// If the host changed then we have already saved to history
|
||||
if (hostChanged) {
|
||||
|
@ -278,8 +288,11 @@ bool AddressManager::handleUrl(const QUrl& lookupUrl, LookupTrigger trigger) {
|
|||
} else if (handleDomainID(lookupUrl.host())){
|
||||
UserActivityLogger::getInstance().wentTo(trigger, URL_TYPE_DOMAIN_ID, lookupUrl.toString());
|
||||
|
||||
// save the last visited domain URL.
|
||||
_lastVisitedURL = lookupUrl;
|
||||
|
||||
// store this domain ID as the previous lookup in case we're failing to connect and want to refresh API info
|
||||
_previousLookup = lookupUrl;
|
||||
_previousAPILookup = lookupUrl;
|
||||
|
||||
// no place name - this is probably a domain ID
|
||||
// try to look up the domain ID on the metaverse API
|
||||
|
@ -287,8 +300,11 @@ bool AddressManager::handleUrl(const QUrl& lookupUrl, LookupTrigger trigger) {
|
|||
} else {
|
||||
UserActivityLogger::getInstance().wentTo(trigger, URL_TYPE_PLACE, lookupUrl.toString());
|
||||
|
||||
// save the last visited domain URL.
|
||||
_lastVisitedURL = lookupUrl;
|
||||
|
||||
// store this place name as the previous lookup in case we fail to connect and want to refresh API info
|
||||
_previousLookup = lookupUrl;
|
||||
_previousAPILookup = lookupUrl;
|
||||
|
||||
// wasn't an address - lookup the place name
|
||||
// we may have a path that defines a relative viewpoint - pass that through the lookup so we can go to it after
|
||||
|
@ -302,7 +318,7 @@ bool AddressManager::handleUrl(const QUrl& lookupUrl, LookupTrigger trigger) {
|
|||
qCDebug(networking) << "Going to relative path" << lookupUrl.path();
|
||||
|
||||
// a path lookup clears the previous lookup since we don't expect to re-attempt it
|
||||
_previousLookup.clear();
|
||||
_previousAPILookup.clear();
|
||||
|
||||
// if this is a relative path then handle it as a relative viewpoint
|
||||
handlePath(lookupUrl.path(), trigger, true);
|
||||
|
@ -314,7 +330,10 @@ bool AddressManager::handleUrl(const QUrl& lookupUrl, LookupTrigger trigger) {
|
|||
// be loaded over http(s)
|
||||
// lookupUrl.scheme() == URL_SCHEME_HTTP ||
|
||||
// lookupUrl.scheme() == URL_SCHEME_HTTPS ||
|
||||
_previousLookup.clear();
|
||||
// TODO once a file can return a connection refusal if there were to be some kind of load error, we'd
|
||||
// need to store the previous domain tried in _lastVisitedURL. For now , do not store it.
|
||||
|
||||
_previousAPILookup.clear();
|
||||
_shareablePlaceName.clear();
|
||||
setDomainInfo(lookupUrl, trigger);
|
||||
emit lookupResultsFinished();
|
||||
|
@ -381,7 +400,7 @@ void AddressManager::handleAPIResponse(QNetworkReply* requestReply) {
|
|||
QJsonObject dataObject = responseObject["data"].toObject();
|
||||
|
||||
// Lookup succeeded, don't keep re-trying it (especially on server restarts)
|
||||
_previousLookup.clear();
|
||||
_previousAPILookup.clear();
|
||||
|
||||
if (!dataObject.isEmpty()) {
|
||||
goToAddressFromObject(dataObject.toVariantMap(), requestReply);
|
||||
|
@ -547,7 +566,7 @@ void AddressManager::handleAPIError(QNetworkReply* errorReply) {
|
|||
|
||||
if (errorReply->error() == QNetworkReply::ContentNotFoundError) {
|
||||
// if this is a lookup that has no result, don't keep re-trying it
|
||||
_previousLookup.clear();
|
||||
_previousAPILookup.clear();
|
||||
|
||||
emit lookupResultIsNotFound();
|
||||
}
|
||||
|
@ -709,7 +728,6 @@ bool AddressManager::handleViewpoint(const QString& viewpointString, bool should
|
|||
// We use _newHostLookupPath to determine if the client has already stored its last address
|
||||
// before moving to a new host thanks to the information in the same lookup URL.
|
||||
|
||||
|
||||
if (definitelyPathOnly || (!pathString.isEmpty() && pathString != _newHostLookupPath)
|
||||
|| trigger == Back || trigger == Forward) {
|
||||
addCurrentAddressToHistory(trigger);
|
||||
|
@ -798,8 +816,10 @@ bool AddressManager::setDomainInfo(const QUrl& domainURL, LookupTrigger trigger)
|
|||
const QString hostname = domainURL.host();
|
||||
quint16 port = domainURL.port();
|
||||
bool emitHostChanged { false };
|
||||
// Check if domain handler is in error state. always emit host changed if true.
|
||||
bool isInErrorState = DependencyManager::get<NodeList>()->getDomainHandler().isInErrorState();
|
||||
|
||||
if (domainURL != _domainURL) {
|
||||
if (domainURL != _domainURL || isInErrorState) {
|
||||
addCurrentAddressToHistory(trigger);
|
||||
emitHostChanged = true;
|
||||
}
|
||||
|
@ -843,8 +863,8 @@ void AddressManager::goToUser(const QString& username, bool shouldMatchOrientati
|
|||
|
||||
void AddressManager::refreshPreviousLookup() {
|
||||
// if we have a non-empty previous lookup, fire it again now (but don't re-store it in the history)
|
||||
if (!_previousLookup.isEmpty()) {
|
||||
handleUrl(_previousLookup, LookupTrigger::AttemptedRefresh);
|
||||
if (!_previousAPILookup.isEmpty()) {
|
||||
handleUrl(_previousAPILookup, LookupTrigger::AttemptedRefresh);
|
||||
} else {
|
||||
handleUrl(currentAddress(), LookupTrigger::AttemptedRefresh);
|
||||
}
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
#include "AccountManager.h"
|
||||
|
||||
extern const QString DEFAULT_HIFI_ADDRESS;
|
||||
extern const QString REDIRECT_HIFI_ADDRESS;
|
||||
|
||||
const QString SANDBOX_HIFI_ADDRESS = "hifi://localhost";
|
||||
const QString INDEX_PATH = "/";
|
||||
|
@ -55,7 +56,6 @@ const QString GET_PLACE = "/api/v1/places/%1";
|
|||
* <em>Read-only.</em>
|
||||
* @property {boolean} isConnected - <code>true</code> if you're connected to the domain in your current <code>href</code>
|
||||
* metaverse address, otherwise <code>false</code>.
|
||||
* <em>Read-only.</em>
|
||||
* @property {string} pathname - The location and orientation in your current <code>href</code> metaverse address
|
||||
* (e.g., <code>"/15,-10,26/0,0,0,1"</code>).
|
||||
* <em>Read-only.</em>
|
||||
|
@ -140,7 +140,8 @@ public:
|
|||
* </table>
|
||||
* @typedef {number} location.LookupTrigger
|
||||
*/
|
||||
enum LookupTrigger {
|
||||
enum LookupTrigger
|
||||
{
|
||||
UserInput,
|
||||
Back,
|
||||
Forward,
|
||||
|
@ -164,6 +165,8 @@ public:
|
|||
QString currentPath(bool withOrientation = true) const;
|
||||
QString currentFacingPath() const;
|
||||
|
||||
QUrl lastAddress() const;
|
||||
|
||||
const QUuid& getRootPlaceID() const { return _rootPlaceID; }
|
||||
QString getPlaceName() const;
|
||||
QString getDomainID() const;
|
||||
|
@ -191,7 +194,7 @@ public slots:
|
|||
* Helps ensure that user's location history is correctly maintained.
|
||||
*/
|
||||
void handleLookupString(const QString& lookupString, bool fromSuggestions = false);
|
||||
|
||||
|
||||
/**jsdoc
|
||||
* Go to a position and orientation resulting from a lookup for a named path in the domain (set in the domain server's
|
||||
* settings).
|
||||
|
@ -204,8 +207,9 @@ public slots:
|
|||
// functions and signals that should be exposed are moved to a scripting interface class.
|
||||
//
|
||||
// we currently expect this to be called from NodeList once handleLookupString has been called with a path
|
||||
bool goToViewpointForPath(const QString& viewpointString, const QString& pathString)
|
||||
{ return handleViewpoint(viewpointString, false, DomainPathResponse, false, pathString); }
|
||||
bool goToViewpointForPath(const QString& viewpointString, const QString& pathString) {
|
||||
return handleViewpoint(viewpointString, false, DomainPathResponse, false, pathString);
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
* Go back to the previous location in your navigation history, if there is one.
|
||||
|
@ -226,8 +230,10 @@ public slots:
|
|||
* @param {location.LookupTrigger} trigger=StartupFromSettings - The reason for the function call. Helps ensure that user's
|
||||
* location history is correctly maintained.
|
||||
*/
|
||||
void goToLocalSandbox(QString path = "", LookupTrigger trigger = LookupTrigger::StartupFromSettings) { handleUrl(SANDBOX_HIFI_ADDRESS + path, trigger); }
|
||||
|
||||
void goToLocalSandbox(QString path = "", LookupTrigger trigger = LookupTrigger::StartupFromSettings) {
|
||||
handleUrl(SANDBOX_HIFI_ADDRESS + path, trigger);
|
||||
}
|
||||
|
||||
/**jsdoc
|
||||
* Go to the default "welcome" metaverse address.
|
||||
* @function location.goToEntry
|
||||
|
@ -245,6 +251,12 @@ public slots:
|
|||
*/
|
||||
void goToUser(const QString& username, bool shouldMatchOrientation = true);
|
||||
|
||||
/**jsdoc
|
||||
* Go to the last address tried. This will be the last URL tried from location.handleLookupString
|
||||
* @function location.goToLastAddress
|
||||
*/
|
||||
void goToLastAddress() { handleUrl(_lastVisitedURL, LookupTrigger::AttemptedRefresh); }
|
||||
|
||||
/**jsdoc
|
||||
* Refresh the current address, e.g., after connecting to a domain in order to position the user to the desired location.
|
||||
* @function location.refreshPreviousLookup
|
||||
|
@ -352,7 +364,8 @@ signals:
|
|||
* location.locationChangeRequired.connect(onLocationChangeRequired);
|
||||
*/
|
||||
void locationChangeRequired(const glm::vec3& newPosition,
|
||||
bool hasOrientationChange, const glm::quat& newOrientation,
|
||||
bool hasOrientationChange,
|
||||
const glm::quat& newOrientation,
|
||||
bool shouldFaceLocation);
|
||||
|
||||
/**jsdoc
|
||||
|
@ -423,7 +436,7 @@ private slots:
|
|||
void handleShareableNameAPIResponse(QNetworkReply* requestReply);
|
||||
|
||||
private:
|
||||
void goToAddressFromObject(const QVariantMap& addressMap, const QNetworkReply* reply);
|
||||
void goToAddressFromObject(const QVariantMap& addressMap, const QNetworkReply* reply);
|
||||
|
||||
// Set host and port, and return `true` if it was changed.
|
||||
bool setHost(const QString& host, LookupTrigger trigger, quint16 port = 0);
|
||||
|
@ -435,8 +448,11 @@ private:
|
|||
|
||||
bool handleNetworkAddress(const QString& lookupString, LookupTrigger trigger, bool& hostChanged);
|
||||
void handlePath(const QString& path, LookupTrigger trigger, bool wasPathOnly = false);
|
||||
bool handleViewpoint(const QString& viewpointString, bool shouldFace, LookupTrigger trigger,
|
||||
bool definitelyPathOnly = false, const QString& pathString = QString());
|
||||
bool handleViewpoint(const QString& viewpointString,
|
||||
bool shouldFace,
|
||||
LookupTrigger trigger,
|
||||
bool definitelyPathOnly = false,
|
||||
const QString& pathString = QString());
|
||||
bool handleUsername(const QString& lookupString);
|
||||
bool handleDomainID(const QString& host);
|
||||
|
||||
|
@ -446,6 +462,7 @@ private:
|
|||
void addCurrentAddressToHistory(LookupTrigger trigger);
|
||||
|
||||
QUrl _domainURL;
|
||||
QUrl _lastVisitedURL;
|
||||
|
||||
QUuid _rootPlaceID;
|
||||
PositionGetter _positionGetter;
|
||||
|
@ -459,7 +476,7 @@ private:
|
|||
|
||||
QString _newHostLookupPath;
|
||||
|
||||
QUrl _previousLookup;
|
||||
QUrl _previousAPILookup;
|
||||
};
|
||||
|
||||
#endif // hifi_AddressManager_h
|
||||
#endif // hifi_AddressManager_h
|
||||
|
|
|
@ -55,6 +55,9 @@ DomainHandler::DomainHandler(QObject* parent) :
|
|||
|
||||
// stop the refresh timer if we connect to a domain
|
||||
connect(this, &DomainHandler::connectedToDomain, &_apiRefreshTimer, &QTimer::stop);
|
||||
|
||||
// stop the refresh timer if redirected to the error domain
|
||||
connect(this, &DomainHandler::redirectToErrorDomainURL, &_apiRefreshTimer, &QTimer::stop);
|
||||
}
|
||||
|
||||
void DomainHandler::disconnect() {
|
||||
|
@ -106,13 +109,16 @@ void DomainHandler::softReset() {
|
|||
QMetaObject::invokeMethod(&_settingsTimer, "stop");
|
||||
|
||||
// restart the API refresh timer in case we fail to connect and need to refresh information
|
||||
QMetaObject::invokeMethod(&_apiRefreshTimer, "start");
|
||||
if (!_isInErrorState) {
|
||||
QMetaObject::invokeMethod(&_apiRefreshTimer, "start");
|
||||
}
|
||||
}
|
||||
|
||||
void DomainHandler::hardReset() {
|
||||
emit resetting();
|
||||
|
||||
softReset();
|
||||
_isInErrorState = false;
|
||||
|
||||
qCDebug(networking) << "Hard reset in NodeList DomainHandler.";
|
||||
_pendingDomainID = QUuid();
|
||||
|
@ -128,6 +134,11 @@ void DomainHandler::hardReset() {
|
|||
_pendingPath.clear();
|
||||
}
|
||||
|
||||
void DomainHandler::setErrorDomainURL(const QUrl& url) {
|
||||
_errorDomainURL = url;
|
||||
return;
|
||||
}
|
||||
|
||||
void DomainHandler::setSockAddr(const HifiSockAddr& sockAddr, const QString& hostname) {
|
||||
if (_sockAddr != sockAddr) {
|
||||
// we should reset on a sockAddr change
|
||||
|
@ -171,7 +182,8 @@ void DomainHandler::setURLAndID(QUrl domainURL, QUuid domainID) {
|
|||
domainPort = DEFAULT_DOMAIN_SERVER_PORT;
|
||||
}
|
||||
|
||||
if (_domainURL != domainURL || _sockAddr.getPort() != domainPort) {
|
||||
// if it's in the error state, reset and try again.
|
||||
if ((_domainURL != domainURL || _sockAddr.getPort() != domainPort) || _isInErrorState) {
|
||||
// re-set the domain info so that auth information is reloaded
|
||||
hardReset();
|
||||
|
||||
|
@ -206,7 +218,8 @@ void DomainHandler::setURLAndID(QUrl domainURL, QUuid domainID) {
|
|||
|
||||
void DomainHandler::setIceServerHostnameAndID(const QString& iceServerHostname, const QUuid& id) {
|
||||
|
||||
if (_iceServerSockAddr.getAddress().toString() != iceServerHostname || id != _pendingDomainID) {
|
||||
// if it's in the error state, reset and try again.
|
||||
if ((_iceServerSockAddr.getAddress().toString() != iceServerHostname || id != _pendingDomainID) || _isInErrorState) {
|
||||
// re-set the domain info to connect to new domain
|
||||
hardReset();
|
||||
|
||||
|
@ -316,6 +329,24 @@ void DomainHandler::connectedToServerless(std::map<QString, QString> namedPaths)
|
|||
setIsConnected(true);
|
||||
}
|
||||
|
||||
void DomainHandler::loadedErrorDomain(std::map<QString, QString> namedPaths) {
|
||||
auto lookup = namedPaths.find("/");
|
||||
QString viewpoint;
|
||||
if (lookup != namedPaths.end()) {
|
||||
viewpoint = lookup->second;
|
||||
} else {
|
||||
viewpoint = DOMAIN_SPAWNING_POINT;
|
||||
}
|
||||
DependencyManager::get<AddressManager>()->goToViewpointForPath(viewpoint, QString());
|
||||
}
|
||||
|
||||
void DomainHandler::setRedirectErrorState(QUrl errorUrl, int reasonCode) {
|
||||
_errorDomainURL = errorUrl;
|
||||
_lastDomainConnectionError = reasonCode;
|
||||
_isInErrorState = true;
|
||||
emit redirectToErrorDomainURL(_errorDomainURL);
|
||||
}
|
||||
|
||||
void DomainHandler::requestDomainSettings() {
|
||||
qCDebug(networking) << "Requesting settings from domain server";
|
||||
|
||||
|
@ -451,7 +482,17 @@ void DomainHandler::processDomainServerConnectionDeniedPacket(QSharedPointer<Rec
|
|||
|
||||
if (!_domainConnectionRefusals.contains(reasonMessage)) {
|
||||
_domainConnectionRefusals.insert(reasonMessage);
|
||||
#if defined(Q_OS_ANDROID)
|
||||
emit domainConnectionRefused(reasonMessage, (int)reasonCode, extraInfo);
|
||||
#else
|
||||
if (reasonCode == ConnectionRefusedReason::ProtocolMismatch || reasonCode == ConnectionRefusedReason::NotAuthorized) {
|
||||
// ingest the error - this is a "hard" connection refusal.
|
||||
setRedirectErrorState(_errorDomainURL, (int)reasonCode);
|
||||
} else {
|
||||
emit domainConnectionRefused(reasonMessage, (int)reasonCode, extraInfo);
|
||||
}
|
||||
_lastDomainConnectionError = (int)reasonCode;
|
||||
#endif
|
||||
}
|
||||
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
|
|
|
@ -50,6 +50,11 @@ public:
|
|||
|
||||
QString getHostname() const { return _domainURL.host(); }
|
||||
|
||||
QUrl getErrorDomainURL(){ return _errorDomainURL; }
|
||||
void setErrorDomainURL(const QUrl& url);
|
||||
|
||||
int getLastDomainConnectionError() { return _lastDomainConnectionError; }
|
||||
|
||||
const QHostAddress& getIP() const { return _sockAddr.getAddress(); }
|
||||
void setIPToLocalhost() { _sockAddr.setAddress(QHostAddress(QHostAddress::LocalHost)); }
|
||||
|
||||
|
@ -81,6 +86,8 @@ public:
|
|||
|
||||
void connectedToServerless(std::map<QString, QString> namedPaths);
|
||||
|
||||
void loadedErrorDomain(std::map<QString, QString> namedPaths);
|
||||
|
||||
QString getViewPointFromNamedPath(QString namedPath);
|
||||
|
||||
bool hasSettings() const { return !_settingsObject.isEmpty(); }
|
||||
|
@ -135,6 +142,11 @@ public:
|
|||
* <td><code>4</code></td>
|
||||
* <td>The domain already has its maximum number of users.</td>
|
||||
* </tr>
|
||||
* <tr>
|
||||
* <td><strong>TimedOut</strong></td>
|
||||
* <td><code>5</code></td>
|
||||
* <td>Connecting to the domain timed out.</td>
|
||||
* </tr>
|
||||
* </tbody>
|
||||
* </table>
|
||||
* @typedef {number} Window.ConnectionRefusedReason
|
||||
|
@ -144,7 +156,8 @@ public:
|
|||
ProtocolMismatch,
|
||||
LoginError,
|
||||
NotAuthorized,
|
||||
TooManyUsers
|
||||
TooManyUsers,
|
||||
TimedOut
|
||||
};
|
||||
|
||||
public slots:
|
||||
|
@ -157,6 +170,11 @@ public slots:
|
|||
void processICEResponsePacket(QSharedPointer<ReceivedMessage> icePacket);
|
||||
void processDomainServerConnectionDeniedPacket(QSharedPointer<ReceivedMessage> message);
|
||||
|
||||
// sets domain handler in error state.
|
||||
void setRedirectErrorState(QUrl errorUrl, int reasonCode);
|
||||
|
||||
bool isInErrorState() { return _isInErrorState; }
|
||||
|
||||
private slots:
|
||||
void completedHostnameLookup(const QHostInfo& hostInfo);
|
||||
void completedIceServerHostnameLookup();
|
||||
|
@ -164,6 +182,8 @@ private slots:
|
|||
signals:
|
||||
void domainURLChanged(QUrl domainURL);
|
||||
|
||||
void domainConnectionErrorChanged(int reasonCode);
|
||||
|
||||
// NOTE: the emission of completedSocketDiscovery does not mean a connection to DS is established
|
||||
// It means that, either from DNS lookup or ICE, we think we have a socket we can talk to DS on
|
||||
void completedSocketDiscovery();
|
||||
|
@ -179,6 +199,7 @@ signals:
|
|||
void settingsReceiveFail();
|
||||
|
||||
void domainConnectionRefused(QString reasonMessage, int reason, const QString& extraInfo);
|
||||
void redirectToErrorDomainURL(QUrl errorDomainURL);
|
||||
|
||||
void limitOfSilentDomainCheckInsReached();
|
||||
|
||||
|
@ -190,6 +211,7 @@ private:
|
|||
QUuid _uuid;
|
||||
Node::LocalID _localID;
|
||||
QUrl _domainURL;
|
||||
QUrl _errorDomainURL;
|
||||
HifiSockAddr _sockAddr;
|
||||
QUuid _assignmentUUID;
|
||||
QUuid _connectionToken;
|
||||
|
@ -198,6 +220,7 @@ private:
|
|||
HifiSockAddr _iceServerSockAddr;
|
||||
NetworkPeer _icePeer;
|
||||
bool _isConnected { false };
|
||||
bool _isInErrorState { false };
|
||||
QJsonObject _settingsObject;
|
||||
QString _pendingPath;
|
||||
QTimer _settingsTimer;
|
||||
|
@ -210,6 +233,9 @@ private:
|
|||
QTimer _apiRefreshTimer;
|
||||
|
||||
std::map<QString, QString> _namedPaths;
|
||||
|
||||
// domain connection error upon connection refusal.
|
||||
int _lastDomainConnectionError{ -1 };
|
||||
};
|
||||
|
||||
const QString DOMAIN_SPAWNING_POINT { "/0, -10, 0" };
|
||||
|
|
|
@ -38,10 +38,10 @@ void UserActivityLogger::logAction(QString action, QJsonObject details, JSONCall
|
|||
if (_disabled.get()) {
|
||||
return;
|
||||
}
|
||||
|
||||
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
QHttpMultiPart* multipart = new QHttpMultiPart(QHttpMultiPart::FormDataType);
|
||||
|
||||
|
||||
// Adding the action name
|
||||
QHttpPart actionPart;
|
||||
actionPart.setHeader(QNetworkRequest::ContentDispositionHeader, "form-data; name=\"action_name\"");
|
||||
|
@ -53,7 +53,7 @@ void UserActivityLogger::logAction(QString action, QJsonObject details, JSONCall
|
|||
elapsedPart.setHeader(QNetworkRequest::ContentDispositionHeader, "form-data; name=\"elapsed_ms\"");
|
||||
elapsedPart.setBody(QString::number(_timer.elapsed()).toLocal8Bit());
|
||||
multipart->append(elapsedPart);
|
||||
|
||||
|
||||
// If there are action details, add them to the multipart
|
||||
if (!details.isEmpty()) {
|
||||
QHttpPart detailsPart;
|
||||
|
@ -62,13 +62,13 @@ void UserActivityLogger::logAction(QString action, QJsonObject details, JSONCall
|
|||
detailsPart.setBody(QJsonDocument(details).toJson(QJsonDocument::Compact));
|
||||
multipart->append(detailsPart);
|
||||
}
|
||||
|
||||
|
||||
// if no callbacks specified, call our owns
|
||||
if (params.isEmpty()) {
|
||||
params.callbackReceiver = this;
|
||||
params.errorCallbackMethod = "requestError";
|
||||
}
|
||||
|
||||
|
||||
accountManager->sendRequest(USER_ACTIVITY_URL,
|
||||
AccountManagerAuth::Optional,
|
||||
QNetworkAccessManager::PostOperation,
|
||||
|
@ -88,7 +88,7 @@ void UserActivityLogger::launch(QString applicationVersion, bool previousSession
|
|||
actionDetails.insert(VERSION_KEY, applicationVersion);
|
||||
actionDetails.insert(CRASH_KEY, previousSessionCrashed);
|
||||
actionDetails.insert(RUNTIME_KEY, previousSessionRuntime);
|
||||
|
||||
|
||||
logAction(ACTION_NAME, actionDetails);
|
||||
}
|
||||
|
||||
|
@ -105,9 +105,9 @@ void UserActivityLogger::changedDisplayName(QString displayName) {
|
|||
const QString ACTION_NAME = "changed_display_name";
|
||||
QJsonObject actionDetails;
|
||||
const QString DISPLAY_NAME = "display_name";
|
||||
|
||||
|
||||
actionDetails.insert(DISPLAY_NAME, displayName);
|
||||
|
||||
|
||||
logAction(ACTION_NAME, actionDetails);
|
||||
}
|
||||
|
||||
|
@ -116,10 +116,10 @@ void UserActivityLogger::changedModel(QString typeOfModel, QString modelURL) {
|
|||
QJsonObject actionDetails;
|
||||
const QString TYPE_OF_MODEL = "type_of_model";
|
||||
const QString MODEL_URL = "model_url";
|
||||
|
||||
|
||||
actionDetails.insert(TYPE_OF_MODEL, typeOfModel);
|
||||
actionDetails.insert(MODEL_URL, modelURL);
|
||||
|
||||
|
||||
logAction(ACTION_NAME, actionDetails);
|
||||
}
|
||||
|
||||
|
@ -127,9 +127,9 @@ void UserActivityLogger::changedDomain(QString domainURL) {
|
|||
const QString ACTION_NAME = "changed_domain";
|
||||
QJsonObject actionDetails;
|
||||
const QString DOMAIN_URL = "domain_url";
|
||||
|
||||
|
||||
actionDetails.insert(DOMAIN_URL, domainURL);
|
||||
|
||||
|
||||
logAction(ACTION_NAME, actionDetails);
|
||||
}
|
||||
|
||||
|
@ -151,10 +151,10 @@ void UserActivityLogger::connectedDevice(QString typeOfDevice, QString deviceNam
|
|||
QJsonObject actionDetails;
|
||||
const QString TYPE_OF_DEVICE = "type_of_device";
|
||||
const QString DEVICE_NAME = "device_name";
|
||||
|
||||
|
||||
actionDetails.insert(TYPE_OF_DEVICE, typeOfDevice);
|
||||
actionDetails.insert(DEVICE_NAME, deviceName);
|
||||
|
||||
|
||||
logAction(ACTION_NAME, actionDetails);
|
||||
|
||||
}
|
||||
|
@ -163,9 +163,9 @@ void UserActivityLogger::loadedScript(QString scriptName) {
|
|||
const QString ACTION_NAME = "loaded_script";
|
||||
QJsonObject actionDetails;
|
||||
const QString SCRIPT_NAME = "script_name";
|
||||
|
||||
|
||||
actionDetails.insert(SCRIPT_NAME, scriptName);
|
||||
|
||||
|
||||
logAction(ACTION_NAME, actionDetails);
|
||||
|
||||
}
|
||||
|
@ -199,10 +199,10 @@ void UserActivityLogger::wentTo(AddressManager::LookupTrigger lookupTrigger, QSt
|
|||
const QString TRIGGER_TYPE_KEY = "trigger";
|
||||
const QString DESTINATION_TYPE_KEY = "destination_type";
|
||||
const QString DESTINATION_NAME_KEY = "detination_name";
|
||||
|
||||
|
||||
actionDetails.insert(TRIGGER_TYPE_KEY, trigger);
|
||||
actionDetails.insert(DESTINATION_TYPE_KEY, destinationType);
|
||||
actionDetails.insert(DESTINATION_NAME_KEY, destinationName);
|
||||
|
||||
|
||||
logAction(ACTION_NAME, actionDetails);
|
||||
}
|
||||
|
|
|
@ -37,6 +37,7 @@ Socket::Socket(QObject* parent, bool shouldChangeSocketOptions) :
|
|||
_shouldChangeSocketOptions(shouldChangeSocketOptions)
|
||||
{
|
||||
connect(&_udpSocket, &QUdpSocket::readyRead, this, &Socket::readPendingDatagrams);
|
||||
connect(this, &Socket::pendingDatagrams, this, &Socket::processPendingDatagrams, Qt::QueuedConnection);
|
||||
|
||||
// make sure we hear about errors and state changes from the underlying socket
|
||||
connect(&_udpSocket, SIGNAL(error(QAbstractSocket::SocketError)),
|
||||
|
@ -315,55 +316,85 @@ void Socket::checkForReadyReadBackup() {
|
|||
}
|
||||
|
||||
void Socket::readPendingDatagrams() {
|
||||
int packetsRead = 0;
|
||||
|
||||
int packetSizeWithHeader = -1;
|
||||
|
||||
while (_udpSocket.hasPendingDatagrams() && (packetSizeWithHeader = _udpSocket.pendingDatagramSize()) != -1) {
|
||||
|
||||
// we're reading a packet so re-start the readyRead backup timer
|
||||
_readyReadBackupTimer->start();
|
||||
|
||||
// Max datagrams to read before processing:
|
||||
static const int MAX_DATAGRAMS_CONSECUTIVELY = 10000;
|
||||
while (_udpSocket.hasPendingDatagrams()
|
||||
&& (packetSizeWithHeader = _udpSocket.pendingDatagramSize()) != -1
|
||||
&& packetsRead <= MAX_DATAGRAMS_CONSECUTIVELY) {
|
||||
// grab a time point we can mark as the receive time of this packet
|
||||
auto receiveTime = p_high_resolution_clock::now();
|
||||
|
||||
// setup a HifiSockAddr to read into
|
||||
HifiSockAddr senderSockAddr;
|
||||
|
||||
// setup a buffer to read the packet into
|
||||
auto buffer = std::unique_ptr<char[]>(new char[packetSizeWithHeader]);
|
||||
|
||||
QHostAddress senderAddress;
|
||||
quint16 senderPort;
|
||||
|
||||
// pull the datagram
|
||||
auto sizeRead = _udpSocket.readDatagram(buffer.get(), packetSizeWithHeader,
|
||||
senderSockAddr.getAddressPointer(), senderSockAddr.getPortPointer());
|
||||
&senderAddress, &senderPort);
|
||||
|
||||
// save information for this packet, in case it is the one that sticks readyRead
|
||||
_lastPacketSizeRead = sizeRead;
|
||||
_lastPacketSockAddr = senderSockAddr;
|
||||
|
||||
if (sizeRead <= 0) {
|
||||
// we either didn't pull anything for this packet or there was an error reading (this seems to trigger
|
||||
// on windows even if there's not a packet available)
|
||||
// we either didn't pull anything for this packet or there was an error reading (this seems to trigger
|
||||
// on windows even if there's not a packet available)
|
||||
if (sizeRead < 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
auto it = _unfilteredHandlers.find(senderSockAddr);
|
||||
_incomingDatagrams.push_back({ senderAddress, senderPort, packetSizeWithHeader,
|
||||
std::move(buffer), receiveTime });
|
||||
++packetsRead;
|
||||
|
||||
}
|
||||
|
||||
if (packetsRead > _maxDatagramsRead) {
|
||||
_maxDatagramsRead = packetsRead;
|
||||
qCDebug(networking) << "readPendingDatagrams: Datagrams read:" << packetsRead;
|
||||
}
|
||||
emit pendingDatagrams(packetsRead);
|
||||
}
|
||||
|
||||
void Socket::processPendingDatagrams(int) {
|
||||
// setup a HifiSockAddr to read into
|
||||
HifiSockAddr senderSockAddr;
|
||||
|
||||
while (!_incomingDatagrams.empty()) {
|
||||
auto& datagram = _incomingDatagrams.front();
|
||||
senderSockAddr.setAddress(datagram._senderAddress);
|
||||
senderSockAddr.setPort(datagram._senderPort);
|
||||
int datagramSize = datagram._datagramLength;
|
||||
auto receiveTime = datagram._receiveTime;
|
||||
|
||||
// we're reading a packet so re-start the readyRead backup timer
|
||||
_readyReadBackupTimer->start();
|
||||
|
||||
// save information for this packet, in case it is the one that sticks readyRead
|
||||
_lastPacketSizeRead = datagramSize;
|
||||
_lastPacketSockAddr = senderSockAddr;
|
||||
|
||||
// Process unfiltered packets first.
|
||||
auto it = _unfilteredHandlers.find(senderSockAddr);
|
||||
if (it != _unfilteredHandlers.end()) {
|
||||
// we have a registered unfiltered handler for this HifiSockAddr - call that and return
|
||||
// we have a registered unfiltered handler for this HifiSockAddr (eg. STUN packet) - call that and return
|
||||
if (it->second) {
|
||||
auto basePacket = BasePacket::fromReceivedPacket(std::move(buffer), packetSizeWithHeader, senderSockAddr);
|
||||
auto basePacket = BasePacket::fromReceivedPacket(std::move(datagram._datagram),
|
||||
datagramSize, senderSockAddr);
|
||||
basePacket->setReceiveTime(receiveTime);
|
||||
it->second(std::move(basePacket));
|
||||
}
|
||||
|
||||
_incomingDatagrams.pop_front();
|
||||
continue;
|
||||
}
|
||||
|
||||
// check if this was a control packet or a data packet
|
||||
bool isControlPacket = *reinterpret_cast<uint32_t*>(buffer.get()) & CONTROL_BIT_MASK;
|
||||
bool isControlPacket = *reinterpret_cast<uint32_t*>(datagram._datagram.get()) & CONTROL_BIT_MASK;
|
||||
|
||||
if (isControlPacket) {
|
||||
// setup a control packet from the data we just read
|
||||
auto controlPacket = ControlPacket::fromReceivedPacket(std::move(buffer), packetSizeWithHeader, senderSockAddr);
|
||||
auto controlPacket = ControlPacket::fromReceivedPacket(std::move(datagram._datagram), datagramSize, senderSockAddr);
|
||||
controlPacket->setReceiveTime(receiveTime);
|
||||
|
||||
// move this control packet to the matching connection, if there is one
|
||||
|
@ -375,13 +406,13 @@ void Socket::readPendingDatagrams() {
|
|||
|
||||
} else {
|
||||
// setup a Packet from the data we just read
|
||||
auto packet = Packet::fromReceivedPacket(std::move(buffer), packetSizeWithHeader, senderSockAddr);
|
||||
auto packet = Packet::fromReceivedPacket(std::move(datagram._datagram), datagramSize, senderSockAddr);
|
||||
packet->setReceiveTime(receiveTime);
|
||||
|
||||
// save the sequence number in case this is the packet that sticks readyRead
|
||||
_lastReceivedSequenceNumber = packet->getSequenceNumber();
|
||||
|
||||
// call our verification operator to see if this packet is verified
|
||||
// call our hash verification operator to see if this packet is verified
|
||||
if (!_packetFilterOperator || _packetFilterOperator(*packet)) {
|
||||
if (packet->isReliable()) {
|
||||
// if this was a reliable packet then signal the matching connection with the sequence number
|
||||
|
@ -395,6 +426,7 @@ void Socket::readPendingDatagrams() {
|
|||
qCDebug(networking) << "Can't process packet: version" << (unsigned int)NLPacket::versionInHeader(*packet)
|
||||
<< ", type" << NLPacket::typeInHeader(*packet);
|
||||
#endif
|
||||
_incomingDatagrams.pop_front();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
@ -410,6 +442,8 @@ void Socket::readPendingDatagrams() {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
_incomingDatagrams.pop_front();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
#include <functional>
|
||||
#include <unordered_map>
|
||||
#include <mutex>
|
||||
#include <list>
|
||||
|
||||
#include <QtCore/QObject>
|
||||
#include <QtCore/QTimer>
|
||||
|
@ -94,6 +95,7 @@ public:
|
|||
|
||||
signals:
|
||||
void clientHandshakeRequestComplete(const HifiSockAddr& sockAddr);
|
||||
void pendingDatagrams(int datagramCount);
|
||||
|
||||
public slots:
|
||||
void cleanupConnection(HifiSockAddr sockAddr);
|
||||
|
@ -101,6 +103,7 @@ public slots:
|
|||
|
||||
private slots:
|
||||
void readPendingDatagrams();
|
||||
void processPendingDatagrams(int datagramCount);
|
||||
void checkForReadyReadBackup();
|
||||
|
||||
void handleSocketError(QAbstractSocket::SocketError socketError);
|
||||
|
@ -144,6 +147,17 @@ private:
|
|||
int _lastPacketSizeRead { 0 };
|
||||
SequenceNumber _lastReceivedSequenceNumber;
|
||||
HifiSockAddr _lastPacketSockAddr;
|
||||
|
||||
struct Datagram {
|
||||
QHostAddress _senderAddress;
|
||||
int _senderPort;
|
||||
int _datagramLength;
|
||||
std::unique_ptr<char[]> _datagram;
|
||||
p_high_resolution_clock::time_point _receiveTime;
|
||||
};
|
||||
|
||||
std::list<Datagram> _incomingDatagrams;
|
||||
int _maxDatagramsRead { 0 };
|
||||
|
||||
friend UDTTest;
|
||||
};
|
||||
|
|
|
@ -286,6 +286,7 @@ void PhysicsEngine::reinsertObject(ObjectMotionState* object) {
|
|||
void PhysicsEngine::processTransaction(PhysicsEngine::Transaction& transaction) {
|
||||
// removes
|
||||
for (auto object : transaction.objectsToRemove) {
|
||||
bumpAndPruneContacts(object);
|
||||
btRigidBody* body = object->getRigidBody();
|
||||
if (body) {
|
||||
removeDynamicsForBody(body);
|
||||
|
|
|
@ -86,7 +86,7 @@ void CauterizedModel::createRenderItemSet() {
|
|||
// Create the render payloads
|
||||
int numParts = (int)mesh->getNumParts();
|
||||
for (int partIndex = 0; partIndex < numParts; partIndex++) {
|
||||
if (!fbxGeometry.meshes[i].blendshapes.empty()) {
|
||||
if (!fbxGeometry.meshes[i].blendshapes.empty() && _blendedVertexBuffers.find(i) == _blendedVertexBuffers.end()) {
|
||||
initializeBlendshapes(fbxGeometry.meshes[i], i);
|
||||
}
|
||||
auto ptr = std::make_shared<CauterizedMeshPartPayload>(shared_from_this(), i, partIndex, shapeID, transform, offset);
|
||||
|
@ -97,6 +97,7 @@ void CauterizedModel::createRenderItemSet() {
|
|||
shapeID++;
|
||||
}
|
||||
}
|
||||
_blendedVertexBuffersInitialized = true;
|
||||
} else {
|
||||
Model::createRenderItemSet();
|
||||
}
|
||||
|
|
|
@ -302,15 +302,22 @@ bool Model::updateGeometry() {
|
|||
assert(_meshStates.empty());
|
||||
|
||||
const FBXGeometry& fbxGeometry = getFBXGeometry();
|
||||
int i = 0;
|
||||
foreach (const FBXMesh& mesh, fbxGeometry.meshes) {
|
||||
MeshState state;
|
||||
state.clusterDualQuaternions.resize(mesh.clusters.size());
|
||||
state.clusterMatrices.resize(mesh.clusters.size());
|
||||
_meshStates.push_back(state);
|
||||
if (!mesh.blendshapes.empty() && _blendedVertexBuffers.find(i) == _blendedVertexBuffers.end()) {
|
||||
initializeBlendshapes(mesh, i);
|
||||
}
|
||||
i++;
|
||||
}
|
||||
_blendedVertexBuffersInitialized = true;
|
||||
needFullUpdate = true;
|
||||
emit rigReady();
|
||||
}
|
||||
|
||||
return needFullUpdate;
|
||||
}
|
||||
|
||||
|
@ -1027,6 +1034,10 @@ void Model::removeFromScene(const render::ScenePointer& scene, render::Transacti
|
|||
_modelMeshMaterialNames.clear();
|
||||
_modelMeshRenderItemShapes.clear();
|
||||
|
||||
_blendedVertexBuffers.clear();
|
||||
_normalsAndTangents.clear();
|
||||
_blendedVertexBuffersInitialized = false;
|
||||
|
||||
_addedToScene = false;
|
||||
|
||||
_renderInfoVertexCount = 0;
|
||||
|
@ -1273,8 +1284,7 @@ QStringList Model::getJointNames() const {
|
|||
class Blender : public QRunnable {
|
||||
public:
|
||||
|
||||
Blender(ModelPointer model, int blendNumber, const Geometry::WeakPointer& geometry,
|
||||
const QVector<FBXMesh>& meshes, const QVector<float>& blendshapeCoefficients);
|
||||
Blender(ModelPointer model, int blendNumber, const Geometry::WeakPointer& geometry, const QVector<float>& blendshapeCoefficients);
|
||||
|
||||
virtual void run() override;
|
||||
|
||||
|
@ -1283,37 +1293,37 @@ private:
|
|||
ModelPointer _model;
|
||||
int _blendNumber;
|
||||
Geometry::WeakPointer _geometry;
|
||||
QVector<FBXMesh> _meshes;
|
||||
QVector<float> _blendshapeCoefficients;
|
||||
};
|
||||
|
||||
Blender::Blender(ModelPointer model, int blendNumber, const Geometry::WeakPointer& geometry,
|
||||
const QVector<FBXMesh>& meshes, const QVector<float>& blendshapeCoefficients) :
|
||||
Blender::Blender(ModelPointer model, int blendNumber, const Geometry::WeakPointer& geometry, const QVector<float>& blendshapeCoefficients) :
|
||||
_model(model),
|
||||
_blendNumber(blendNumber),
|
||||
_geometry(geometry),
|
||||
_meshes(meshes),
|
||||
_blendshapeCoefficients(blendshapeCoefficients) {
|
||||
}
|
||||
|
||||
void Blender::run() {
|
||||
DETAILED_PROFILE_RANGE_EX(simulation_animation, __FUNCTION__, 0xFFFF0000, 0, { { "url", _model->getURL().toString() } });
|
||||
QVector<glm::vec3> vertices;
|
||||
QVector<NormalType> normalsAndTangents;
|
||||
if (_model) {
|
||||
if (_model && _model->isLoaded()) {
|
||||
DETAILED_PROFILE_RANGE_EX(simulation_animation, __FUNCTION__, 0xFFFF0000, 0, { { "url", _model->getURL().toString() } });
|
||||
int offset = 0;
|
||||
int normalsAndTangentsOffset = 0;
|
||||
foreach (const FBXMesh& mesh, _meshes) {
|
||||
if (mesh.blendshapes.isEmpty()) {
|
||||
auto meshes = _model->getFBXGeometry().meshes;
|
||||
int meshIndex = 0;
|
||||
foreach (const FBXMesh& mesh, meshes) {
|
||||
auto modelMeshNormalsAndTangents = _model->_normalsAndTangents.find(meshIndex++);
|
||||
if (mesh.blendshapes.isEmpty() || modelMeshNormalsAndTangents == _model->_normalsAndTangents.end()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
vertices += mesh.vertices;
|
||||
normalsAndTangents += mesh.normalsAndTangents;
|
||||
normalsAndTangents += modelMeshNormalsAndTangents->second;
|
||||
glm::vec3* meshVertices = vertices.data() + offset;
|
||||
NormalType* meshNormalsAndTangents = normalsAndTangents.data() + normalsAndTangentsOffset;
|
||||
offset += mesh.vertices.size();
|
||||
normalsAndTangentsOffset += mesh.normalsAndTangents.size();
|
||||
normalsAndTangentsOffset += modelMeshNormalsAndTangents->second.size();
|
||||
const float NORMAL_COEFFICIENT_SCALE = 0.01f;
|
||||
for (int i = 0, n = qMin(_blendshapeCoefficients.size(), mesh.blendshapes.size()); i < n; i++) {
|
||||
float vertexCoefficient = _blendshapeCoefficients.at(i);
|
||||
|
@ -1353,9 +1363,8 @@ void Blender::run() {
|
|||
}
|
||||
// post the result to the ModelBlender, which will dispatch to the model if still alive
|
||||
QMetaObject::invokeMethod(DependencyManager::get<ModelBlender>().data(), "setBlendedVertices",
|
||||
Q_ARG(ModelPointer, _model), Q_ARG(int, _blendNumber),
|
||||
Q_ARG(const Geometry::WeakPointer&, _geometry), Q_ARG(const QVector<glm::vec3>&, vertices),
|
||||
Q_ARG(const QVector<NormalType>&, normalsAndTangents));
|
||||
Q_ARG(ModelPointer, _model), Q_ARG(int, _blendNumber), Q_ARG(QVector<glm::vec3>, vertices),
|
||||
Q_ARG(QVector<NormalType>, normalsAndTangents));
|
||||
}
|
||||
|
||||
void Model::setScaleToFit(bool scaleToFit, const glm::vec3& dimensions, bool forceRescale) {
|
||||
|
@ -1526,18 +1535,15 @@ bool Model::maybeStartBlender() {
|
|||
if (isLoaded()) {
|
||||
const FBXGeometry& fbxGeometry = getFBXGeometry();
|
||||
if (fbxGeometry.hasBlendedMeshes()) {
|
||||
QThreadPool::globalInstance()->start(new Blender(getThisPointer(), ++_blendNumber, _renderGeometry,
|
||||
fbxGeometry.meshes, _blendshapeCoefficients));
|
||||
QThreadPool::globalInstance()->start(new Blender(getThisPointer(), ++_blendNumber, _renderGeometry, _blendshapeCoefficients));
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
void Model::setBlendedVertices(int blendNumber, const Geometry::WeakPointer& geometry,
|
||||
const QVector<glm::vec3>& vertices, const QVector<NormalType>& normalsAndTangents) {
|
||||
auto geometryRef = geometry.lock();
|
||||
if (!geometryRef || _renderGeometry != geometryRef || blendNumber < _appliedBlendNumber) {
|
||||
void Model::setBlendedVertices(int blendNumber, const QVector<glm::vec3>& vertices, const QVector<NormalType>& normalsAndTangents) {
|
||||
if (!isLoaded() || blendNumber < _appliedBlendNumber || !_blendedVertexBuffersInitialized) {
|
||||
return;
|
||||
}
|
||||
_appliedBlendNumber = blendNumber;
|
||||
|
@ -1546,26 +1552,28 @@ void Model::setBlendedVertices(int blendNumber, const Geometry::WeakPointer& geo
|
|||
int normalAndTangentIndex = 0;
|
||||
for (int i = 0; i < fbxGeometry.meshes.size(); i++) {
|
||||
const FBXMesh& mesh = fbxGeometry.meshes.at(i);
|
||||
if (mesh.blendshapes.isEmpty()) {
|
||||
auto meshNormalsAndTangents = _normalsAndTangents.find(i);
|
||||
if (mesh.blendshapes.isEmpty() || meshNormalsAndTangents == _normalsAndTangents.end()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const auto vertexCount = mesh.vertices.size();
|
||||
const auto verticesSize = vertexCount * sizeof(glm::vec3);
|
||||
const auto& buffer = _blendedVertexBuffers[i];
|
||||
assert(buffer && _blendedVertexBuffersInitialized);
|
||||
buffer->resize(mesh.vertices.size() * sizeof(glm::vec3) + mesh.normalsAndTangents.size() * sizeof(NormalType));
|
||||
buffer->setSubData(0, verticesSize, (gpu::Byte*) vertices.constData() + index * sizeof(glm::vec3));
|
||||
buffer->setSubData(verticesSize, mesh.normalsAndTangents.size() * sizeof(NormalType), (const gpu::Byte*) normalsAndTangents.data() + normalAndTangentIndex * sizeof(NormalType));
|
||||
const auto& buffer = _blendedVertexBuffers.find(i);
|
||||
assert(buffer != _blendedVertexBuffers.end());
|
||||
buffer->second->resize(mesh.vertices.size() * sizeof(glm::vec3) + meshNormalsAndTangents->second.size() * sizeof(NormalType));
|
||||
buffer->second->setSubData(0, verticesSize, (gpu::Byte*) vertices.constData() + index * sizeof(glm::vec3));
|
||||
buffer->second->setSubData(verticesSize, meshNormalsAndTangents->second.size() * sizeof(NormalType), (const gpu::Byte*) normalsAndTangents.data() + normalAndTangentIndex * sizeof(NormalType));
|
||||
|
||||
index += vertexCount;
|
||||
normalAndTangentIndex += mesh.normalsAndTangents.size();
|
||||
normalAndTangentIndex += meshNormalsAndTangents->second.size();
|
||||
}
|
||||
}
|
||||
|
||||
void Model::deleteGeometry() {
|
||||
_deleteGeometryCounter++;
|
||||
_blendedVertexBuffers.clear();
|
||||
_normalsAndTangents.clear();
|
||||
_blendedVertexBuffersInitialized = false;
|
||||
_meshStates.clear();
|
||||
_rig.destroyAnimGraph();
|
||||
|
@ -1599,6 +1607,7 @@ const render::ItemIDs& Model::fetchRenderItemIDs() const {
|
|||
}
|
||||
|
||||
void Model::initializeBlendshapes(const FBXMesh& mesh, int index) {
|
||||
_blendedVertexBuffers[index] = std::make_shared<gpu::Buffer>();
|
||||
QVector<NormalType> normalsAndTangents;
|
||||
normalsAndTangents.resize(2 * mesh.normals.size());
|
||||
|
||||
|
@ -1627,12 +1636,10 @@ void Model::initializeBlendshapes(const FBXMesh& mesh, int index) {
|
|||
}
|
||||
});
|
||||
const auto verticesSize = mesh.vertices.size() * sizeof(glm::vec3);
|
||||
_blendedVertexBuffers[index] = std::make_shared<gpu::Buffer>();
|
||||
_blendedVertexBuffers[index]->resize(mesh.vertices.size() * sizeof(glm::vec3) + normalsAndTangents.size() * sizeof(NormalType));
|
||||
_blendedVertexBuffers[index]->setSubData(0, verticesSize, (const gpu::Byte*) mesh.vertices.constData());
|
||||
_blendedVertexBuffers[index]->setSubData(verticesSize, normalsAndTangents.size() * sizeof(NormalType), (const gpu::Byte*) normalsAndTangents.data());
|
||||
mesh.normalsAndTangents = normalsAndTangents;
|
||||
_blendedVertexBuffersInitialized = true;
|
||||
_normalsAndTangents[index] = normalsAndTangents;
|
||||
}
|
||||
|
||||
void Model::createRenderItemSet() {
|
||||
|
@ -1673,7 +1680,7 @@ void Model::createRenderItemSet() {
|
|||
// Create the render payloads
|
||||
int numParts = (int)mesh->getNumParts();
|
||||
for (int partIndex = 0; partIndex < numParts; partIndex++) {
|
||||
if (!fbxGeometry.meshes[i].blendshapes.empty()) {
|
||||
if (!fbxGeometry.meshes[i].blendshapes.empty() && _blendedVertexBuffers.find(i) == _blendedVertexBuffers.end()) {
|
||||
initializeBlendshapes(fbxGeometry.meshes[i], i);
|
||||
}
|
||||
_modelMeshRenderItems << std::make_shared<ModelMeshPartPayload>(shared_from_this(), i, partIndex, shapeID, transform, offset);
|
||||
|
@ -1683,6 +1690,7 @@ void Model::createRenderItemSet() {
|
|||
shapeID++;
|
||||
}
|
||||
}
|
||||
_blendedVertexBuffersInitialized = true;
|
||||
}
|
||||
|
||||
bool Model::isRenderable() const {
|
||||
|
@ -1775,35 +1783,38 @@ ModelBlender::~ModelBlender() {
|
|||
}
|
||||
|
||||
void ModelBlender::noteRequiresBlend(ModelPointer model) {
|
||||
Lock lock(_mutex);
|
||||
if (_pendingBlenders < QThread::idealThreadCount()) {
|
||||
if (model->maybeStartBlender()) {
|
||||
_pendingBlenders++;
|
||||
return;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
{
|
||||
Lock lock(_mutex);
|
||||
_modelsRequiringBlends.insert(model);
|
||||
}
|
||||
_modelsRequiringBlends.insert(model);
|
||||
}
|
||||
|
||||
void ModelBlender::setBlendedVertices(ModelPointer model, int blendNumber, const Geometry::WeakPointer& geometry,
|
||||
const QVector<glm::vec3>& vertices, const QVector<NormalType>& normalsAndTangents) {
|
||||
void ModelBlender::setBlendedVertices(ModelPointer model, int blendNumber, QVector<glm::vec3> vertices, QVector<NormalType> normalsAndTangents) {
|
||||
if (model) {
|
||||
model->setBlendedVertices(blendNumber, geometry, vertices, normalsAndTangents);
|
||||
model->setBlendedVertices(blendNumber, vertices, normalsAndTangents);
|
||||
}
|
||||
_pendingBlenders--;
|
||||
{
|
||||
Lock lock(_mutex);
|
||||
for (auto i = _modelsRequiringBlends.begin(); i != _modelsRequiringBlends.end();) {
|
||||
_pendingBlenders--;
|
||||
_modelsRequiringBlends.erase(model);
|
||||
std::set<ModelWeakPointer, std::owner_less<ModelWeakPointer>> modelsToErase;
|
||||
for (auto i = _modelsRequiringBlends.begin(); i != _modelsRequiringBlends.end(); i++) {
|
||||
auto weakPtr = *i;
|
||||
_modelsRequiringBlends.erase(i++); // remove front of the set
|
||||
ModelPointer nextModel = weakPtr.lock();
|
||||
if (nextModel && nextModel->maybeStartBlender()) {
|
||||
_pendingBlenders++;
|
||||
return;
|
||||
break;
|
||||
} else {
|
||||
modelsToErase.insert(weakPtr);
|
||||
}
|
||||
}
|
||||
for (auto& weakPtr : modelsToErase) {
|
||||
_modelsRequiringBlends.erase(weakPtr);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -144,8 +144,7 @@ public:
|
|||
bool maybeStartBlender();
|
||||
|
||||
/// Sets blended vertices computed in a separate thread.
|
||||
void setBlendedVertices(int blendNumber, const Geometry::WeakPointer& geometry,
|
||||
const QVector<glm::vec3>& vertices, const QVector<NormalType>& normalsAndTangents);
|
||||
void setBlendedVertices(int blendNumber, const QVector<glm::vec3>& vertices, const QVector<NormalType>& normalsAndTangents);
|
||||
|
||||
bool isLoaded() const { return (bool)_renderGeometry && _renderGeometry->isGeometryLoaded(); }
|
||||
bool isAddedToScene() const { return _addedToScene; }
|
||||
|
@ -345,7 +344,7 @@ public:
|
|||
void addMaterial(graphics::MaterialLayer material, const std::string& parentMaterialName);
|
||||
void removeMaterial(graphics::MaterialPointer material, const std::string& parentMaterialName);
|
||||
|
||||
bool areBlendedVertexBuffersInitialized(int index) { return _blendedVertexBuffersInitialized; }
|
||||
std::unordered_map<int, QVector<NormalType>> _normalsAndTangents;
|
||||
|
||||
public slots:
|
||||
void loadURLFinished(bool success);
|
||||
|
@ -521,8 +520,7 @@ public:
|
|||
bool shouldComputeBlendshapes() { return _computeBlendshapes; }
|
||||
|
||||
public slots:
|
||||
void setBlendedVertices(ModelPointer model, int blendNumber, const Geometry::WeakPointer& geometry,
|
||||
const QVector<glm::vec3>& vertices, const QVector<NormalType>& normalsAndTangents);
|
||||
void setBlendedVertices(ModelPointer model, int blendNumber, QVector<glm::vec3> vertices, QVector<NormalType> normalsAndTangents);
|
||||
void setComputeBlendshapes(bool computeBlendshapes) { _computeBlendshapes = computeBlendshapes; }
|
||||
|
||||
private:
|
||||
|
|
|
@ -16,55 +16,16 @@
|
|||
#include "NumericalConstants.h"
|
||||
#include "shared/ConicalViewFrustum.h"
|
||||
|
||||
/* PrioritySortUtil is a helper for sorting 3D things relative to a ViewFrustum. To use:
|
||||
// PrioritySortUtil is a helper for sorting 3D things relative to a ViewFrustum.
|
||||
|
||||
(1) Derive a class from pure-virtual PrioritySortUtil::Sortable that wraps a copy of
|
||||
the Thing you want to prioritize and sort:
|
||||
|
||||
class SortableWrapper: public PrioritySortUtil::Sortable {
|
||||
public:
|
||||
SortableWrapper(const Thing& thing) : _thing(thing) { }
|
||||
glm::vec3 getPosition() const override { return _thing->getPosition(); }
|
||||
float getRadius() const override { return 0.5f * _thing->getBoundingRadius(); }
|
||||
uint64_t getTimestamp() const override { return _thing->getLastTime(); }
|
||||
Thing getThing() const { return _thing; }
|
||||
private:
|
||||
Thing _thing;
|
||||
};
|
||||
|
||||
(2) Make a PrioritySortUtil::PriorityQueue<Thing> and add them to the queue:
|
||||
|
||||
PrioritySortUtil::PriorityQueue<SortableWrapper> sortedThings(viewFrustum);
|
||||
std::priority_queue< PrioritySortUtil::Sortable<Thing> > sortedThings;
|
||||
for (thing in things) {
|
||||
sortedThings.push(SortableWrapper(thing));
|
||||
}
|
||||
|
||||
(3) Loop over your priority queue and do timeboxed work:
|
||||
|
||||
NOTE: Be careful using references to members of instances of T from std::priority_queue<T>.
|
||||
Under the hood std::priority_queue<T> may re-use instances of T.
|
||||
For example, after a pop() or a push() the top T may have the same memory address
|
||||
as the top T before the pop() or push() (but point to a swapped instance of T).
|
||||
This causes a reference to member variable of T to point to a different value
|
||||
when operations taken on std::priority_queue<T> shuffle around the instances of T.
|
||||
|
||||
uint64_t cutoffTime = usecTimestampNow() + TIME_BUDGET;
|
||||
while (!sortedThings.empty()) {
|
||||
const Thing& thing = sortedThings.top();
|
||||
// ...do work on thing...
|
||||
sortedThings.pop();
|
||||
if (usecTimestampNow() > cutoffTime) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
*/
|
||||
const float OUT_OF_VIEW_PENALTY = -10.0f;
|
||||
const float OUT_OF_VIEW_THRESHOLD = 0.5f * OUT_OF_VIEW_PENALTY;
|
||||
|
||||
namespace PrioritySortUtil {
|
||||
|
||||
constexpr float DEFAULT_ANGULAR_COEF { 1.0f };
|
||||
constexpr float DEFAULT_CENTER_COEF { 0.5f };
|
||||
constexpr float DEFAULT_AGE_COEF { 0.25f / (float)(USECS_PER_SECOND) };
|
||||
constexpr float DEFAULT_AGE_COEF { 0.25f };
|
||||
|
||||
class Sortable {
|
||||
public:
|
||||
|
@ -84,8 +45,9 @@ namespace PrioritySortUtil {
|
|||
PriorityQueue() = delete;
|
||||
PriorityQueue(const ConicalViewFrustums& views) : _views(views) { }
|
||||
PriorityQueue(const ConicalViewFrustums& views, float angularWeight, float centerWeight, float ageWeight)
|
||||
: _views(views), _angularWeight(angularWeight), _centerWeight(centerWeight), _ageWeight(ageWeight)
|
||||
{ }
|
||||
: _views(views), _angularWeight(angularWeight), _centerWeight(centerWeight), _ageWeight(ageWeight)
|
||||
, _usecCurrentTime(usecTimestampNow()) {
|
||||
}
|
||||
|
||||
void setViews(const ConicalViewFrustums& views) { _views = views; }
|
||||
|
||||
|
@ -93,6 +55,7 @@ namespace PrioritySortUtil {
|
|||
_angularWeight = angularWeight;
|
||||
_centerWeight = centerWeight;
|
||||
_ageWeight = ageWeight;
|
||||
_usecCurrentTime = usecTimestampNow();
|
||||
}
|
||||
|
||||
size_t size() const { return _vector.size(); }
|
||||
|
@ -131,23 +94,18 @@ namespace PrioritySortUtil {
|
|||
glm::vec3 offset = position - view.getPosition();
|
||||
float distance = glm::length(offset) + 0.001f; // add 1mm to avoid divide by zero
|
||||
const float MIN_RADIUS = 0.1f; // WORKAROUND for zero size objects (we still want them to sort by distance)
|
||||
float radius = glm::min(thing.getRadius(), MIN_RADIUS);
|
||||
float cosineAngle = (glm::dot(offset, view.getDirection()) / distance);
|
||||
float age = (float)(usecTimestampNow() - thing.getTimestamp());
|
||||
float radius = glm::max(thing.getRadius(), MIN_RADIUS);
|
||||
// Other item's angle from view centre:
|
||||
float cosineAngle = glm::dot(offset, view.getDirection()) / distance;
|
||||
float age = float((_usecCurrentTime - thing.getTimestamp()) / USECS_PER_SECOND);
|
||||
|
||||
// we modulatate "age" drift rate by the cosineAngle term to make periphrial objects sort forward
|
||||
// at a reduced rate but we don't want the "age" term to go zero or negative so we clamp it
|
||||
const float MIN_COSINE_ANGLE_FACTOR = 0.1f;
|
||||
float cosineAngleFactor = glm::max(cosineAngle, MIN_COSINE_ANGLE_FACTOR);
|
||||
|
||||
float priority = _angularWeight * glm::max(radius, MIN_RADIUS) / distance
|
||||
+ _centerWeight * cosineAngle
|
||||
+ _ageWeight * cosineAngleFactor * age;
|
||||
// the "age" term accumulates at the sum of all weights
|
||||
float angularSize = radius / distance;
|
||||
float priority = (_angularWeight * angularSize + _centerWeight * cosineAngle) * (age + 1.0f) + _ageWeight * age;
|
||||
|
||||
// decrement priority of things outside keyhole
|
||||
if (distance - radius > view.getRadius()) {
|
||||
if (!view.intersects(offset, distance, radius)) {
|
||||
constexpr float OUT_OF_VIEW_PENALTY = -10.0f;
|
||||
priority += OUT_OF_VIEW_PENALTY;
|
||||
}
|
||||
}
|
||||
|
@ -159,12 +117,13 @@ namespace PrioritySortUtil {
|
|||
float _angularWeight { DEFAULT_ANGULAR_COEF };
|
||||
float _centerWeight { DEFAULT_CENTER_COEF };
|
||||
float _ageWeight { DEFAULT_AGE_COEF };
|
||||
quint64 _usecCurrentTime { 0 };
|
||||
};
|
||||
} // namespace PrioritySortUtil
|
||||
|
||||
// for now we're keeping hard-coded sorted time budgets in one spot
|
||||
// for now we're keeping hard-coded sorted time budgets in one spot
|
||||
const uint64_t MAX_UPDATE_RENDERABLES_TIME_BUDGET = 2000; // usec
|
||||
const uint64_t MIN_SORTED_UPDATE_RENDERABLES_TIME_BUDGET = 1000; // usec
|
||||
const uint64_t MAX_UPDATE_AVATARS_TIME_BUDGET = 2000; // usec
|
||||
|
||||
#endif // hifi_PrioritySortUtil_h
|
||||
|
||||
|
|
|
@ -39,6 +39,10 @@ var DEFAULT_SCRIPTS_SEPARATE = [
|
|||
//"system/chat.js"
|
||||
];
|
||||
|
||||
if (Settings.getValue("enableInterstitialMode", false)) {
|
||||
DEFAULT_SCRIPTS_SEPARATE.push("system/interstitialPage.js");
|
||||
}
|
||||
|
||||
// add a menu item for debugging
|
||||
var MENU_CATEGORY = "Developer";
|
||||
var MENU_ITEM = "Debug defaultScripts.js";
|
||||
|
|
|
@ -37,7 +37,7 @@
|
|||
this.highlightedEntities = [];
|
||||
|
||||
this.parameters = dispatcherUtils.makeDispatcherModuleParameters(
|
||||
480,
|
||||
120,
|
||||
this.hand === dispatcherUtils.RIGHT_HAND ? ["rightHand"] : ["leftHand"],
|
||||
[],
|
||||
100);
|
||||
|
|
|
@ -29,7 +29,7 @@ Script.include("/~/system/libraries/utils.js");
|
|||
this.reticleMaxY;
|
||||
|
||||
this.parameters = makeDispatcherModuleParameters(
|
||||
160,
|
||||
200,
|
||||
this.hand === RIGHT_HAND ? ["rightHand", "rightHandEquip", "rightHandTrigger"] : ["leftHand", "leftHandEquip", "leftHandTrigger"],
|
||||
[],
|
||||
100,
|
||||
|
|
|
@ -21,7 +21,7 @@ Script.include("/~/system/libraries/controllerDispatcherUtils.js");
|
|||
this.disableModules = false;
|
||||
var NO_HAND_LASER = -1; // Invalid hand parameter so that default laser is not displayed.
|
||||
this.parameters = makeDispatcherModuleParameters(
|
||||
200, // Not too high otherwise the tablet laser doesn't work.
|
||||
240, // Not too high otherwise the tablet laser doesn't work.
|
||||
this.hand === RIGHT_HAND
|
||||
? ["rightHand", "rightHandEquip", "rightHandTrigger"]
|
||||
: ["leftHand", "leftHandEquip", "leftHandTrigger"],
|
||||
|
|
|
@ -26,7 +26,7 @@ Script.include("/~/system/libraries/cloneEntityUtils.js");
|
|||
this.hapticTargetID = null;
|
||||
|
||||
this.parameters = makeDispatcherModuleParameters(
|
||||
500,
|
||||
140,
|
||||
this.hand === RIGHT_HAND ? ["rightHand"] : ["leftHand"],
|
||||
[],
|
||||
100);
|
||||
|
|
|
@ -21,7 +21,7 @@
|
|||
this.hyperlink = "";
|
||||
|
||||
this.parameters = makeDispatcherModuleParameters(
|
||||
485,
|
||||
125,
|
||||
this.hand === RIGHT_HAND ? ["rightHand"] : ["leftHand"],
|
||||
[],
|
||||
100);
|
||||
|
|
|
@ -24,6 +24,9 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
// XXX this.ignoreIK = (grabbableData.ignoreIK !== undefined) ? grabbableData.ignoreIK : true;
|
||||
// XXX this.kinematicGrab = (grabbableData.kinematic !== undefined) ? grabbableData.kinematic : NEAR_GRABBING_KINEMATIC;
|
||||
|
||||
// this offset needs to match the one in libraries/display-plugins/src/display-plugins/hmd/HmdDisplayPlugin.cpp:378
|
||||
var GRAB_POINT_SPHERE_OFFSET = { x: 0.04, y: 0.13, z: 0.039 }; // x = upward, y = forward, z = lateral
|
||||
|
||||
function getGrabOffset(handController) {
|
||||
var offset = GRAB_POINT_SPHERE_OFFSET;
|
||||
if (handController === Controller.Standard.LeftHand) {
|
||||
|
@ -54,7 +57,7 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
this.cloneAllowed = true;
|
||||
|
||||
this.parameters = makeDispatcherModuleParameters(
|
||||
500,
|
||||
140,
|
||||
this.hand === RIGHT_HAND ? ["rightHand"] : ["leftHand"],
|
||||
[],
|
||||
100);
|
||||
|
|
|
@ -29,7 +29,7 @@ Script.include("/~/system/libraries/controllerDispatcherUtils.js");
|
|||
this.startSent = false;
|
||||
|
||||
this.parameters = makeDispatcherModuleParameters(
|
||||
480,
|
||||
120,
|
||||
this.hand === RIGHT_HAND ? ["rightHandTrigger", "rightHand"] : ["leftHandTrigger", "leftHand"],
|
||||
[],
|
||||
100);
|
||||
|
|
|
@ -22,14 +22,14 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
this.running = false;
|
||||
|
||||
this.parameters = makeDispatcherModuleParameters(
|
||||
120,
|
||||
160,
|
||||
this.hand === RIGHT_HAND ? ["rightHand"] : ["leftHand"],
|
||||
[],
|
||||
100,
|
||||
makeLaserParams(hand, true));
|
||||
|
||||
this.grabModuleWantsNearbyOverlay = function(controllerData) {
|
||||
if (controllerData.triggerValues[this.hand] > TRIGGER_ON_VALUE) {
|
||||
if (controllerData.triggerValues[this.hand] > TRIGGER_ON_VALUE || controllerData.secondaryValues[this.hand] > BUMPER_ON_VALUE) {
|
||||
var nearGrabName = this.hand === RIGHT_HAND ? "RightNearParentingGrabOverlay" : "LeftNearParentingGrabOverlay";
|
||||
var nearGrabModule = getEnabledModuleByName(nearGrabName);
|
||||
if (nearGrabModule) {
|
||||
|
@ -42,6 +42,23 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
return true;
|
||||
}
|
||||
}
|
||||
nearGrabName = this.hand === RIGHT_HAND ? "RightNearParentingGrabEntity" : "LeftNearParentingGrabEntity";
|
||||
nearGrabModule = getEnabledModuleByName(nearGrabName);
|
||||
if (nearGrabModule && nearGrabModule.isReady(controllerData)) {
|
||||
// check for if near parent module is active.
|
||||
var isNearGrabModuleActive = nearGrabModule.isReady(controllerData).active;
|
||||
if (isNearGrabModuleActive) {
|
||||
// if true, return true.
|
||||
return isNearGrabModuleActive;
|
||||
} else {
|
||||
// check near action grab entity as a second pass.
|
||||
nearGrabName = this.hand === RIGHT_HAND ? "RightNearActionGrabEntity" : "LeftNearActionGrabEntity";
|
||||
nearGrabModule = getEnabledModuleByName(nearGrabName);
|
||||
if (nearGrabModule && nearGrabModule.isReady(controllerData)) {
|
||||
return nearGrabModule.isReady(controllerData).active;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
};
|
||||
|
@ -50,14 +67,14 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
return this.hand === RIGHT_HAND ? leftOverlayLaserInput : rightOverlayLaserInput;
|
||||
};
|
||||
|
||||
this.isPointingAtTriggerable = function(controllerData, triggerPressed) {
|
||||
this.isPointingAtTriggerable = function(controllerData, triggerPressed, checkEntitiesOnly) {
|
||||
// allow pointing at tablet, unlocked web entities, or web overlays automatically without pressing trigger,
|
||||
// but for pointing at locked web entities or non-web overlays user must be pressing trigger
|
||||
var intersection = controllerData.rayPicks[this.hand];
|
||||
if (intersection.type === Picks.INTERSECTED_OVERLAY) {
|
||||
var objectID = intersection.objectID;
|
||||
var objectID = intersection.objectID;
|
||||
if (intersection.type === Picks.INTERSECTED_OVERLAY && !checkEntitiesOnly) {
|
||||
if ((HMD.tabletID && objectID === HMD.tabletID) ||
|
||||
(HMD.tabletScreenID && objectID === HMD.tabletScreenID) ||
|
||||
(HMD.tabletScreenID && objectID === HMD.tabletScreenID) ||
|
||||
(HMD.homeButtonID && objectID === HMD.homeButtonID)) {
|
||||
return true;
|
||||
} else {
|
||||
|
@ -65,9 +82,9 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
return overlayType === "web3d" || triggerPressed;
|
||||
}
|
||||
} else if (intersection.type === Picks.INTERSECTED_ENTITY) {
|
||||
var entityProperty = Entities.getEntityProperties(intersection.objectID);
|
||||
var entityType = entityProperty.type;
|
||||
var isLocked = entityProperty.locked;
|
||||
var entityProperties = Entities.getEntityProperties(objectID);
|
||||
var entityType = entityProperties.type;
|
||||
var isLocked = entityProperties.locked;
|
||||
return entityType === "Web" && (!isLocked || triggerPressed);
|
||||
}
|
||||
return false;
|
||||
|
@ -103,7 +120,8 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
var isTriggerPressed = controllerData.triggerValues[this.hand] > TRIGGER_OFF_VALUE &&
|
||||
controllerData.triggerValues[this.otherHand] <= TRIGGER_OFF_VALUE;
|
||||
var allowThisModule = !otherModuleRunning || isTriggerPressed;
|
||||
if (allowThisModule && this.isPointingAtTriggerable(controllerData, isTriggerPressed)) {
|
||||
|
||||
if (allowThisModule && this.isPointingAtTriggerable(controllerData, isTriggerPressed, false)) {
|
||||
this.updateAllwaysOn();
|
||||
if (isTriggerPressed) {
|
||||
this.dominantHandOverride = true; // Override dominant hand.
|
||||
|
@ -121,13 +139,27 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
otherModuleRunning = otherModuleRunning && this.getDominantHand() !== this.hand; // Auto-swap to dominant hand.
|
||||
otherModuleRunning = otherModuleRunning || this.getOtherModule().dominantHandOverride; // Override dominant hand.
|
||||
var grabModuleNeedsToRun = this.grabModuleWantsNearbyOverlay(controllerData);
|
||||
// only allow for non-near grab
|
||||
var allowThisModule = !otherModuleRunning && !grabModuleNeedsToRun;
|
||||
var isTriggerPressed = controllerData.triggerValues[this.hand] > TRIGGER_OFF_VALUE;
|
||||
var laserOn = isTriggerPressed || this.parameters.handLaser.allwaysOn;
|
||||
if (allowThisModule && (laserOn && this.isPointingAtTriggerable(controllerData, isTriggerPressed))) {
|
||||
this.running = true;
|
||||
return makeRunningValues(true, [], []);
|
||||
if (allowThisModule) {
|
||||
if (isTriggerPressed && !this.isPointingAtTriggerable(controllerData, isTriggerPressed, true)) {
|
||||
// if trigger is down + not pointing at a web entity, keep running web surface laser
|
||||
this.running = true;
|
||||
return makeRunningValues(true, [], []);
|
||||
} else if (laserOn && this.isPointingAtTriggerable(controllerData, isTriggerPressed, false)) {
|
||||
// if trigger is down + pointing at a web entity/overlay, keep running web surface laser
|
||||
this.running = true;
|
||||
return makeRunningValues(true, [], []);
|
||||
} else {
|
||||
this.deleteContextOverlay();
|
||||
this.running = false;
|
||||
this.dominantHandOverride = false;
|
||||
return makeRunningValues(false, [], []);
|
||||
}
|
||||
}
|
||||
// if module needs to stop from near grabs or other modules are running, stop it.
|
||||
this.deleteContextOverlay();
|
||||
this.running = false;
|
||||
this.dominantHandOverride = false;
|
||||
|
|
|
@ -860,7 +860,7 @@ var toolBar = (function () {
|
|||
propertiesTool.setVisible(false);
|
||||
selectionManager.clearSelections();
|
||||
cameraManager.disable();
|
||||
selectionDisplay.triggerMapping.disable();
|
||||
selectionDisplay.disableTriggerMapping();
|
||||
tablet.landscape = false;
|
||||
Controller.disableMapping(CONTROLLER_MAPPING_NAME);
|
||||
} else {
|
||||
|
@ -876,7 +876,7 @@ var toolBar = (function () {
|
|||
gridTool.setVisible(true);
|
||||
grid.setEnabled(true);
|
||||
propertiesTool.setVisible(true);
|
||||
selectionDisplay.triggerMapping.enable();
|
||||
selectionDisplay.enableTriggerMapping();
|
||||
print("starting tablet in landscape mode");
|
||||
tablet.landscape = true;
|
||||
Controller.enableMapping(CONTROLLER_MAPPING_NAME);
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue