mirror of
https://github.com/overte-org/overte.git
synced 2025-04-20 18:23:54 +02:00
Merge branch 'master' of https://github.com/highfidelity/hifi into light
This commit is contained in:
commit
44f63ef802
45 changed files with 2484 additions and 699 deletions
|
@ -19,6 +19,10 @@ Documentation is available at [docs.highfidelity.com](https://docs.highfidelity.
|
|||
|
||||
There is also detailed [documentation on our coding standards](https://wiki.highfidelity.com/wiki/Coding_Standards).
|
||||
|
||||
Contributor License Agreement (CLA)
|
||||
=========
|
||||
Technology companies frequently receive and use code from contributors outside the company's development team. Outside code can be a tremendous resource, but it also carries responsibility. Best practice for accepting outside contributions consists of an Apache-type Contributor License Agreement (CLA). We have modeled the High Fidelity CLA after the CLA that Google presents to developers for contributions to their projects. This CLA does not transfer ownership of code, instead simply granting a non-exclusive right for High Fidelity to use the code you’ve contributed. In that regard, you should be sure you have permission if the work relates to or uses the resources of a company that you work for. You will be asked to sign our CLA when you create your first PR or when the CLA is updated. You can also [review it here](https://gist.githubusercontent.com/hifi-gustavo/fef8f06a8233d42a0040d45c3efb97a9/raw/9981827eb94f0b18666083670b6f6a02929fb402/High%2520Fidelity%2520CLA). We sincerely appreciate your contribution and efforts toward the success of the platform.
|
||||
|
||||
Build Instructions
|
||||
=========
|
||||
All information required to build is found in the [build guide](BUILD.md).
|
||||
|
|
|
@ -29,6 +29,7 @@
|
|||
#include <UUID.h>
|
||||
#include <CPUDetect.h>
|
||||
|
||||
#include "AudioLogging.h"
|
||||
#include "AudioHelpers.h"
|
||||
#include "AudioRingBuffer.h"
|
||||
#include "AudioMixerClientData.h"
|
||||
|
@ -130,7 +131,7 @@ void AudioMixer::queueReplicatedAudioPacket(QSharedPointer<ReceivedMessage> mess
|
|||
PacketType rewrittenType = PacketTypeEnum::getReplicatedPacketMapping().key(message->getType());
|
||||
|
||||
if (rewrittenType == PacketType::Unknown) {
|
||||
qDebug() << "Cannot unwrap replicated packet type not present in REPLICATED_PACKET_WRAPPING";
|
||||
qCDebug(audio) << "Cannot unwrap replicated packet type not present in REPLICATED_PACKET_WRAPPING";
|
||||
}
|
||||
|
||||
auto replicatedMessage = QSharedPointer<ReceivedMessage>::create(audioData, rewrittenType,
|
||||
|
@ -345,7 +346,7 @@ void AudioMixer::sendStatsPacket() {
|
|||
|
||||
void AudioMixer::run() {
|
||||
|
||||
qDebug() << "Waiting for connection to domain to request settings from domain-server.";
|
||||
qCDebug(audio) << "Waiting for connection to domain to request settings from domain-server.";
|
||||
|
||||
// wait until we have the domain-server settings, otherwise we bail
|
||||
DomainHandler& domainHandler = DependencyManager::get<NodeList>()->getDomainHandler();
|
||||
|
@ -502,14 +503,14 @@ void AudioMixer::throttle(std::chrono::microseconds duration, int frame) {
|
|||
int proportionalTerm = 1 + (_trailingMixRatio - TARGET) / 0.1f;
|
||||
_throttlingRatio += THROTTLE_RATE * proportionalTerm;
|
||||
_throttlingRatio = std::min(_throttlingRatio, 1.0f);
|
||||
qDebug("audio-mixer is struggling (%f mix/sleep) - throttling %f of streams",
|
||||
(double)_trailingMixRatio, (double)_throttlingRatio);
|
||||
qCDebug(audio) << "audio-mixer is struggling (" << _trailingMixRatio << "mix/sleep) - throttling"
|
||||
<< _throttlingRatio << "of streams";
|
||||
} else if (_throttlingRatio > 0.0f && _trailingMixRatio <= BACKOFF_TARGET) {
|
||||
int proportionalTerm = 1 + (TARGET - _trailingMixRatio) / 0.2f;
|
||||
_throttlingRatio -= BACKOFF_RATE * proportionalTerm;
|
||||
_throttlingRatio = std::max(_throttlingRatio, 0.0f);
|
||||
qDebug("audio-mixer is recovering (%f mix/sleep) - throttling %f of streams",
|
||||
(double)_trailingMixRatio, (double)_throttlingRatio);
|
||||
qCDebug(audio) << "audio-mixer is recovering (" << _trailingMixRatio << "mix/sleep) - throttling"
|
||||
<< _throttlingRatio << "of streams";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -534,7 +535,7 @@ void AudioMixer::clearDomainSettings() {
|
|||
}
|
||||
|
||||
void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
||||
qDebug() << "AVX2 Support:" << (cpuSupportsAVX2() ? "enabled" : "disabled");
|
||||
qCDebug(audio) << "AVX2 Support:" << (cpuSupportsAVX2() ? "enabled" : "disabled");
|
||||
|
||||
if (settingsObject.contains(AUDIO_THREADING_GROUP_KEY)) {
|
||||
QJsonObject audioThreadingGroupObject = settingsObject[AUDIO_THREADING_GROUP_KEY].toObject();
|
||||
|
@ -557,7 +558,7 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
const QString DYNAMIC_JITTER_BUFFER_JSON_KEY = "dynamic_jitter_buffer";
|
||||
bool enableDynamicJitterBuffer = audioBufferGroupObject[DYNAMIC_JITTER_BUFFER_JSON_KEY].toBool();
|
||||
if (enableDynamicJitterBuffer) {
|
||||
qDebug() << "Enabling dynamic jitter buffers.";
|
||||
qCDebug(audio) << "Enabling dynamic jitter buffers.";
|
||||
|
||||
bool ok;
|
||||
const QString DESIRED_JITTER_BUFFER_FRAMES_KEY = "static_desired_jitter_buffer_frames";
|
||||
|
@ -565,9 +566,9 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
if (!ok) {
|
||||
_numStaticJitterFrames = InboundAudioStream::DEFAULT_STATIC_JITTER_FRAMES;
|
||||
}
|
||||
qDebug() << "Static desired jitter buffer frames:" << _numStaticJitterFrames;
|
||||
qCDebug(audio) << "Static desired jitter buffer frames:" << _numStaticJitterFrames;
|
||||
} else {
|
||||
qDebug() << "Disabling dynamic jitter buffers.";
|
||||
qCDebug(audio) << "Disabling dynamic jitter buffers.";
|
||||
_numStaticJitterFrames = DISABLE_STATIC_JITTER_FRAMES;
|
||||
}
|
||||
|
||||
|
@ -621,7 +622,7 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
if (audioEnvGroupObject[CODEC_PREFERENCE_ORDER].isString()) {
|
||||
QString codecPreferenceOrder = audioEnvGroupObject[CODEC_PREFERENCE_ORDER].toString();
|
||||
_codecPreferenceOrder = codecPreferenceOrder.split(",");
|
||||
qDebug() << "Codec preference order changed to" << _codecPreferenceOrder;
|
||||
qCDebug(audio) << "Codec preference order changed to" << _codecPreferenceOrder;
|
||||
}
|
||||
|
||||
const QString ATTENATION_PER_DOULING_IN_DISTANCE = "attenuation_per_doubling_in_distance";
|
||||
|
@ -630,7 +631,7 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
float attenuation = audioEnvGroupObject[ATTENATION_PER_DOULING_IN_DISTANCE].toString().toFloat(&ok);
|
||||
if (ok) {
|
||||
_attenuationPerDoublingInDistance = attenuation;
|
||||
qDebug() << "Attenuation per doubling in distance changed to" << _attenuationPerDoublingInDistance;
|
||||
qCDebug(audio) << "Attenuation per doubling in distance changed to" << _attenuationPerDoublingInDistance;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -640,7 +641,7 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
float noiseMutingThreshold = audioEnvGroupObject[NOISE_MUTING_THRESHOLD].toString().toFloat(&ok);
|
||||
if (ok) {
|
||||
_noiseMutingThreshold = noiseMutingThreshold;
|
||||
qDebug() << "Noise muting threshold changed to" << _noiseMutingThreshold;
|
||||
qCDebug(audio) << "Noise muting threshold changed to" << _noiseMutingThreshold;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -680,8 +681,7 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
glm::vec3 dimensions(xMax - xMin, yMax - yMin, zMax - zMin);
|
||||
AABox zoneAABox(corner, dimensions);
|
||||
_audioZones.insert(zone, zoneAABox);
|
||||
qDebug() << "Added zone:" << zone << "(corner:" << corner
|
||||
<< ", dimensions:" << dimensions << ")";
|
||||
qCDebug(audio) << "Added zone:" << zone << "(corner:" << corner << ", dimensions:" << dimensions << ")";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -712,7 +712,7 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
_audioZones.contains(settings.source) && _audioZones.contains(settings.listener)) {
|
||||
|
||||
_zoneSettings.push_back(settings);
|
||||
qDebug() << "Added Coefficient:" << settings.source << settings.listener << settings.coefficient;
|
||||
qCDebug(audio) << "Added Coefficient:" << settings.source << settings.listener << settings.coefficient;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -745,7 +745,7 @@ void AudioMixer::parseSettingsObject(const QJsonObject& settingsObject) {
|
|||
|
||||
_zoneReverbSettings.push_back(settings);
|
||||
|
||||
qDebug() << "Added Reverb:" << zone << reverbTime << wetLevel;
|
||||
qCDebug(audio) << "Added Reverb:" << zone << reverbTime << wetLevel;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
#include "InjectedAudioStream.h"
|
||||
|
||||
#include "AudioLogging.h"
|
||||
#include "AudioHelpers.h"
|
||||
#include "AudioMixer.h"
|
||||
#include "AudioMixerClientData.h"
|
||||
|
@ -132,7 +133,7 @@ void AudioMixerClientData::optionallyReplicatePacket(ReceivedMessage& message, c
|
|||
if (PacketTypeEnum::getReplicatedPacketMapping().key(message.getType()) != PacketType::Unknown) {
|
||||
mirroredType = message.getType();
|
||||
} else {
|
||||
qDebug() << "Packet passed to optionallyReplicatePacket was not a replicatable type - returning";
|
||||
qCDebug(audio) << "Packet passed to optionallyReplicatePacket was not a replicatable type - returning";
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
@ -189,8 +190,16 @@ void AudioMixerClientData::parsePerAvatarGainSet(ReceivedMessage& message, const
|
|||
uint8_t packedGain;
|
||||
message.readPrimitive(&packedGain);
|
||||
float gain = unpackFloatGainFromByte(packedGain);
|
||||
hrtfForStream(avatarUuid, QUuid()).setGainAdjustment(gain);
|
||||
qDebug() << "Setting gain adjustment for hrtf[" << uuid << "][" << avatarUuid << "] to " << gain;
|
||||
|
||||
if (avatarUuid.isNull()) {
|
||||
// set the MASTER avatar gain
|
||||
setMasterAvatarGain(gain);
|
||||
qCDebug(audio) << "Setting MASTER avatar gain for " << uuid << " to " << gain;
|
||||
} else {
|
||||
// set the per-source avatar gain
|
||||
hrtfForStream(avatarUuid, QUuid()).setGainAdjustment(gain);
|
||||
qCDebug(audio) << "Setting avatar gain adjustment for hrtf[" << uuid << "][" << avatarUuid << "] to " << gain;
|
||||
}
|
||||
}
|
||||
|
||||
void AudioMixerClientData::parseNodeIgnoreRequest(QSharedPointer<ReceivedMessage> message, const SharedNodePointer& node) {
|
||||
|
@ -276,7 +285,7 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
|||
|
||||
auto avatarAudioStream = new AvatarAudioStream(isStereo, AudioMixer::getStaticJitterFrames());
|
||||
avatarAudioStream->setupCodec(_codec, _selectedCodecName, AudioConstants::MONO);
|
||||
qDebug() << "creating new AvatarAudioStream... codec:" << _selectedCodecName;
|
||||
qCDebug(audio) << "creating new AvatarAudioStream... codec:" << _selectedCodecName;
|
||||
|
||||
connect(avatarAudioStream, &InboundAudioStream::mismatchedAudioCodec,
|
||||
this, &AudioMixerClientData::handleMismatchAudioFormat);
|
||||
|
@ -315,7 +324,7 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
|||
|
||||
#if INJECTORS_SUPPORT_CODECS
|
||||
injectorStream->setupCodec(_codec, _selectedCodecName, isStereo ? AudioConstants::STEREO : AudioConstants::MONO);
|
||||
qDebug() << "creating new injectorStream... codec:" << _selectedCodecName;
|
||||
qCDebug(audio) << "creating new injectorStream... codec:" << _selectedCodecName;
|
||||
#endif
|
||||
|
||||
auto emplaced = _audioStreams.emplace(
|
||||
|
@ -339,8 +348,8 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
|||
auto parseResult = matchingStream->parseData(message);
|
||||
|
||||
if (matchingStream->getOverflowCount() > overflowBefore) {
|
||||
qDebug() << "Just overflowed on stream from" << message.getSourceID() << "at" << message.getSenderSockAddr();
|
||||
qDebug() << "This stream is for" << (isMicStream ? "microphone audio" : "injected audio");
|
||||
qCDebug(audio) << "Just overflowed on stream from" << message.getSourceID() << "at" << message.getSenderSockAddr();
|
||||
qCDebug(audio) << "This stream is for" << (isMicStream ? "microphone audio" : "injected audio");
|
||||
}
|
||||
|
||||
return parseResult;
|
||||
|
@ -689,7 +698,7 @@ void AudioMixerClientData::setupCodecForReplicatedAgent(QSharedPointer<ReceivedM
|
|||
auto codecString = message->readString();
|
||||
|
||||
if (codecString != _selectedCodecName) {
|
||||
qDebug() << "Manually setting codec for replicated agent" << uuidStringWithoutCurlyBraces(getNodeID())
|
||||
qCDebug(audio) << "Manually setting codec for replicated agent" << uuidStringWithoutCurlyBraces(getNodeID())
|
||||
<< "-" << codecString;
|
||||
|
||||
const std::pair<QString, CodecPluginPointer> codec = AudioMixer::negotiateCodec({ codecString });
|
||||
|
|
|
@ -83,6 +83,9 @@ public:
|
|||
// uses randomization to have the AudioMixer send a stats packet to this node around every second
|
||||
bool shouldSendStats(int frameNumber);
|
||||
|
||||
float getMasterAvatarGain() const { return _masterAvatarGain; }
|
||||
void setMasterAvatarGain(float gain) { _masterAvatarGain = gain; }
|
||||
|
||||
AudioLimiter audioLimiter;
|
||||
|
||||
void setupCodec(CodecPluginPointer codec, const QString& codecName);
|
||||
|
@ -175,6 +178,8 @@ private:
|
|||
|
||||
int _frameToSendStats { 0 };
|
||||
|
||||
float _masterAvatarGain { 1.0f }; // per-listener mixing gain, applied only to avatars
|
||||
|
||||
CodecPluginPointer _codec;
|
||||
QString _selectedCodecName;
|
||||
Encoder* _encoder{ nullptr }; // for outbound mixed stream
|
||||
|
|
|
@ -48,8 +48,8 @@ void sendEnvironmentPacket(const SharedNodePointer& node, AudioMixerClientData&
|
|||
// mix helpers
|
||||
inline float approximateGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition);
|
||||
inline float computeGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition, bool isEcho);
|
||||
inline float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudioStream& listeningNodeStream,
|
||||
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, bool isEcho);
|
||||
inline float computeAzimuth(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition);
|
||||
|
||||
|
@ -266,7 +266,7 @@ void AudioMixerSlave::addStream(AudioMixerClientData& listenerNodeData, const QU
|
|||
glm::vec3 relativePosition = streamToAdd.getPosition() - listeningNodeStream.getPosition();
|
||||
|
||||
float distance = glm::max(glm::length(relativePosition), EPSILON);
|
||||
float gain = computeGain(listeningNodeStream, streamToAdd, relativePosition, isEcho);
|
||||
float gain = computeGain(listenerNodeData, listeningNodeStream, streamToAdd, relativePosition, isEcho);
|
||||
float azimuth = isEcho ? 0.0f : computeAzimuth(listeningNodeStream, listeningNodeStream, relativePosition);
|
||||
const int HRTF_DATASET_INDEX = 1;
|
||||
|
||||
|
@ -484,10 +484,12 @@ float approximateGain(const AvatarAudioStream& listeningNodeStream, const Positi
|
|||
// when throttling, as close streams are expected to be heard by a user
|
||||
float distance = glm::length(relativePosition);
|
||||
return gain / distance;
|
||||
|
||||
// avatar: skip master gain - it is constant for all streams
|
||||
}
|
||||
|
||||
float computeGain(const AvatarAudioStream& listeningNodeStream, const PositionalAudioStream& streamToAdd,
|
||||
const glm::vec3& relativePosition, bool isEcho) {
|
||||
float computeGain(const AudioMixerClientData& listenerNodeData, const AvatarAudioStream& listeningNodeStream,
|
||||
const PositionalAudioStream& streamToAdd, const glm::vec3& relativePosition, bool isEcho) {
|
||||
float gain = 1.0f;
|
||||
|
||||
// injector: apply attenuation
|
||||
|
@ -507,6 +509,9 @@ float computeGain(const AvatarAudioStream& listeningNodeStream, const Positional
|
|||
float offAxisCoefficient = MAX_OFF_AXIS_ATTENUATION + (angleOfDelivery * (OFF_AXIS_ATTENUATION_STEP / PI_OVER_TWO));
|
||||
|
||||
gain *= offAxisCoefficient;
|
||||
|
||||
// apply master gain, only to avatars
|
||||
gain *= listenerNodeData.getMasterAvatarGain();
|
||||
}
|
||||
|
||||
auto& audioZones = AudioMixer::getAudioZones();
|
||||
|
|
|
@ -23,6 +23,17 @@ EntityTreeSendThread::EntityTreeSendThread(OctreeServer* myServer, const SharedN
|
|||
{
|
||||
connect(std::static_pointer_cast<EntityTree>(myServer->getOctree()).get(), &EntityTree::editingEntityPointer, this, &EntityTreeSendThread::editingEntityPointer, Qt::QueuedConnection);
|
||||
connect(std::static_pointer_cast<EntityTree>(myServer->getOctree()).get(), &EntityTree::deletingEntityPointer, this, &EntityTreeSendThread::deletingEntityPointer, Qt::QueuedConnection);
|
||||
|
||||
// connect to connection ID change on EntityNodeData so we can clear state for this receiver
|
||||
auto nodeData = static_cast<EntityNodeData*>(node->getLinkedData());
|
||||
connect(nodeData, &EntityNodeData::incomingConnectionIDChanged, this, &EntityTreeSendThread::resetState);
|
||||
}
|
||||
|
||||
void EntityTreeSendThread::resetState() {
|
||||
qCDebug(entities) << "Clearing known EntityTreeSendThread state for" << _nodeUuid;
|
||||
|
||||
_knownState.clear();
|
||||
_traversal.reset();
|
||||
}
|
||||
|
||||
void EntityTreeSendThread::preDistributionProcessing() {
|
||||
|
|
|
@ -33,6 +33,9 @@ protected:
|
|||
void traverseTreeAndSendContents(SharedNodePointer node, OctreeQueryNode* nodeData,
|
||||
bool viewFrustumChanged, bool isFullScene) override;
|
||||
|
||||
private slots:
|
||||
void resetState(); // clears our known state forcing entities to appear unsent
|
||||
|
||||
private:
|
||||
// the following two methods return booleans to indicate if any extra flagged entities were new additions to set
|
||||
bool addAncestorsToExtraFlaggedEntities(const QUuid& filteredEntityID, EntityItem& entityItem, EntityNodeData& nodeData);
|
||||
|
|
|
@ -59,7 +59,8 @@ protected:
|
|||
OctreePacketData _packetData;
|
||||
QWeakPointer<Node> _node;
|
||||
OctreeServer* _myServer { nullptr };
|
||||
|
||||
QUuid _nodeUuid;
|
||||
|
||||
private:
|
||||
/// Called before a packetDistributor pass to allow for pre-distribution processing
|
||||
virtual void preDistributionProcessing() {};
|
||||
|
@ -71,8 +72,6 @@ private:
|
|||
virtual void preStartNewScene(OctreeQueryNode* nodeData, bool isFullScene);
|
||||
virtual bool shouldTraverseAndSend(OctreeQueryNode* nodeData) { return hasSomethingToSend(nodeData); }
|
||||
|
||||
QUuid _nodeUuid;
|
||||
|
||||
int _truePacketsSent { 0 }; // available for debug stats
|
||||
int _trueBytesSent { 0 }; // available for debug stats
|
||||
int _packetsSentThisInterval { 0 }; // used for bandwidth throttle condition
|
||||
|
|
|
@ -80,11 +80,23 @@ span.port {
|
|||
display: none;
|
||||
}
|
||||
|
||||
#setup-sidebar.affix {
|
||||
/* This overrides a case where going to the bottom of the page,
|
||||
* then scrolling up, causes `position: relative` to be added to the style
|
||||
*/
|
||||
position: fixed !important;
|
||||
@media (min-width: 768px) {
|
||||
#setup-sidebar.affix {
|
||||
/* This overrides a case where going to the bottom of the page,
|
||||
* then scrolling up, causes `position: relative` to be added to the style
|
||||
*/
|
||||
position: fixed !important;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 767px) {
|
||||
#setup-sidebar.affix {
|
||||
position: static !important;
|
||||
}
|
||||
|
||||
#setup-sidebar {
|
||||
margin-bottom: 20px;
|
||||
}
|
||||
}
|
||||
|
||||
#setup-sidebar button {
|
||||
|
@ -302,6 +314,7 @@ table .headers + .headers td {
|
|||
}
|
||||
|
||||
.account-connected-header {
|
||||
vertical-align: middle;
|
||||
color: #6FCF97;
|
||||
font-size: 30px;
|
||||
margin-right: 20px;
|
||||
|
|
|
@ -62,26 +62,25 @@ var Strings = {
|
|||
// dialog with new path still set, allowing them to retry immediately, and without
|
||||
// having to type the new path in again.
|
||||
EDIT_PLACE_TITLE: "Modify Viewpoint or Path",
|
||||
EDIT_PLACE_ERROR: "Failed to update place path. Please try again.",
|
||||
EDIT_PLACE_ERROR: "Failed to update Viewpoint or Path for this Place Name. Please try again.",
|
||||
EDIT_PLACE_CONFIRM_BUTTON: "Save",
|
||||
EDIT_PLACE_CONFIRM_BUTTON_PENDING: "Saving...",
|
||||
EDIT_PLACE_CANCEL_BUTTON: "Cancel",
|
||||
|
||||
REMOVE_PLACE_TITLE: "Are you sure you want to remove <strong>{{place}}</strong>?",
|
||||
REMOVE_PLACE_ERROR: "Failed to remove place. Please try again.",
|
||||
REMOVE_PLACE_DELETE_BUTTON: "Delete",
|
||||
REMOVE_PLACE_TITLE: "Are you sure you want to remove <strong>{{place}}</strong> and its path information?",
|
||||
REMOVE_PLACE_ERROR: "Failed to remove Place Name and its Path information.",
|
||||
REMOVE_PLACE_DELETE_BUTTON: "This action removes your Place Name",
|
||||
REMOVE_PLACE_DELETE_BUTTON_PENDING: "Deleting...",
|
||||
REMOVE_PLACE_CANCEL_BUTTON: "Cancel",
|
||||
|
||||
ADD_PLACE_TITLE: "Choose a place",
|
||||
ADD_PLACE_MESSAGE: "Choose the High Fidelity place to point at this domain server.",
|
||||
ADD_PLACE_CONFIRM_BUTTON: "Choose place",
|
||||
ADD_PLACE_MESSAGE: "Choose a Place Name that you own or register a new Place Name.",
|
||||
ADD_PLACE_CONFIRM_BUTTON: "Save",
|
||||
ADD_PLACE_CONFIRM_BUTTON_PENDING: "Saving...",
|
||||
ADD_PLACE_CANCEL_BUTTON: "Cancel",
|
||||
ADD_PLACE_UNKNOWN_ERROR: "There was an error adding this place name.",
|
||||
ADD_PLACE_UNKNOWN_ERROR: "There was an error adding this Place Name. Try saving again",
|
||||
|
||||
ADD_PLACE_NO_PLACES_MESSAGE: "<p>You do not have any places in your High Fidelity account."
|
||||
+ "<br/><br/>Go to your <a href='https://metaverse.highfidelity.com/user/places/new'>places page</a> to create a new one. Once your place is created re-open this dialog to select it.</p>",
|
||||
ADD_PLACE_NO_PLACES_MESSAGE: "You don't have any Place Names registered. Once you have a Place Name, reopen this window to select it.",
|
||||
ADD_PLACE_NO_PLACES_BUTTON: "Create new place",
|
||||
ADD_PLACE_UNABLE_TO_LOAD_ERROR: "We were unable to load your place names. Please try again later.",
|
||||
ADD_PLACE_LOADING_DIALOG: "Loading your places...",
|
||||
|
@ -236,7 +235,7 @@ function chooseFromHighFidelityPlaces(accessToken, forcePathTo, onSuccessfullyAd
|
|||
|
||||
if (forcePathTo === undefined || forcePathTo === null) {
|
||||
var path = "<div class='form-group'>";
|
||||
path += "<label for='place-path-input' class='control-label'>Path</label>";
|
||||
path += "<label for='place-path-input' class='control-label'>Path or Viewpoint</label>";
|
||||
path += "<input type='text' id='place-path-input' class='form-control' value='/'>";
|
||||
path += "</div>";
|
||||
modal_body.append($(path));
|
||||
|
@ -339,7 +338,6 @@ function chooseFromHighFidelityPlaces(accessToken, forcePathTo, onSuccessfullyAd
|
|||
$('.add-place-confirm-button').html(Strings.ADD_PLACE_CONFIRM_BUTTON);
|
||||
$('.add-place-cancel-button').removeAttr('disabled');
|
||||
bootbox.alert(Strings.ADD_PLACE_UNKNOWN_ERROR);
|
||||
bootbox.alert("FAIL");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -363,7 +361,8 @@ function chooseFromHighFidelityPlaces(accessToken, forcePathTo, onSuccessfullyAd
|
|||
title: Strings.ADD_PLACE_TITLE,
|
||||
message: modal_body,
|
||||
closeButton: false,
|
||||
buttons: modal_buttons
|
||||
buttons: modal_buttons,
|
||||
onEscape: true
|
||||
});
|
||||
} else {
|
||||
bootbox.alert(Strings.ADD_PLACE_UNABLE_TO_LOAD_ERROR);
|
||||
|
|
|
@ -36,11 +36,6 @@
|
|||
</div>
|
||||
|
||||
<div class="col-md-9 col-sm-9 col-xs-12">
|
||||
|
||||
<div id="xs-advanced-container" class="col-xs-12 hidden-sm hidden-md hidden-lg">
|
||||
<button id="advanced-toggle-button-xs" class="btn btn-info advanced-toggle">Show advanced</button>
|
||||
</div>
|
||||
|
||||
<div class="col-xs-12">
|
||||
|
||||
<div id="cloud-domains-alert" class="alert alert-info alert-dismissible" role="alert" style="display: none;">
|
||||
|
|
|
@ -503,7 +503,7 @@ function showDomainCreationAlert(justConnected) {
|
|||
swal({
|
||||
title: 'Create new domain ID',
|
||||
type: 'input',
|
||||
text: 'Enter a short description for this machine.</br></br>This will help you identify which domain ID belongs to which machine.</br></br>',
|
||||
text: 'Enter a label this machine.</br></br>This will help you identify which domain ID belongs to which machine.</br></br>',
|
||||
showCancelButton: true,
|
||||
confirmButtonText: "Create",
|
||||
closeOnConfirm: false,
|
||||
|
@ -527,13 +527,12 @@ function showDomainCreationAlert(justConnected) {
|
|||
function createNewDomainID(label, justConnected) {
|
||||
// get the JSON object ready that we'll use to create a new domain
|
||||
var domainJSON = {
|
||||
"label": label
|
||||
//"access_token": $(Settings.ACCESS_TOKEN_SELECTOR).val()
|
||||
"label": label
|
||||
}
|
||||
|
||||
$.post("/api/domains", domainJSON, function(data){
|
||||
// we successfully created a domain ID, set it on that field
|
||||
var domainID = data.domain_id;
|
||||
var domainID = data.domain.id;
|
||||
console.log("Setting domain id to ", data, domainID);
|
||||
$(Settings.DOMAIN_ID_SELECTOR).val(domainID).change();
|
||||
|
||||
|
@ -620,18 +619,14 @@ function parseJSONResponse(xhr) {
|
|||
|
||||
function showOrHideLabel() {
|
||||
var type = getCurrentDomainIDType();
|
||||
if (!accessTokenIsSet() || (type !== DOMAIN_ID_TYPE_FULL && type !== DOMAIN_ID_TYPE_UNKNOWN)) {
|
||||
$(".panel#label").hide();
|
||||
return false;
|
||||
}
|
||||
$(".panel#label").show();
|
||||
return true;
|
||||
var shouldShow = accessTokenIsSet() && (type === DOMAIN_ID_TYPE_FULL || type === DOMAIN_ID_TYPE_UNKNOWN);
|
||||
$(".panel#label").toggle(shouldShow);
|
||||
$("li a[href='#label']").parent().toggle(shouldShow);
|
||||
return shouldShow;
|
||||
}
|
||||
|
||||
function setupDomainLabelSetting() {
|
||||
if (!showOrHideLabel()) {
|
||||
return;
|
||||
}
|
||||
showOrHideLabel();
|
||||
|
||||
var html = "<div>"
|
||||
html += "<label class='control-label'>Specify a label for your domain</label> <a class='domain-loading-hide' href='#'>Edit</a>";
|
||||
|
@ -654,6 +649,7 @@ function setupDomainLabelSetting() {
|
|||
title: 'Edit Label',
|
||||
message: modal_body,
|
||||
closeButton: false,
|
||||
onEscape: true,
|
||||
buttons: [
|
||||
{
|
||||
label: 'Cancel',
|
||||
|
@ -742,7 +738,7 @@ function setupDomainNetworkingSettings() {
|
|||
var includeAddress = autoNetworkingSetting === 'disabled';
|
||||
|
||||
if (includeAddress) {
|
||||
var label = "Network Address and Port";
|
||||
var label = "Network Address:Port";
|
||||
} else {
|
||||
var label = "Network Port";
|
||||
}
|
||||
|
@ -777,6 +773,7 @@ function setupDomainNetworkingSettings() {
|
|||
title: 'Edit Network',
|
||||
message: modal_body,
|
||||
closeButton: false,
|
||||
onEscape: true,
|
||||
buttons: [
|
||||
{
|
||||
label: 'Cancel',
|
||||
|
@ -924,6 +921,7 @@ function placeTableRow(name, path, isTemporary, placeID) {
|
|||
var dialog = bootbox.dialog({
|
||||
message: confirmString,
|
||||
closeButton: false,
|
||||
onEscape: true,
|
||||
buttons: [
|
||||
{
|
||||
label: Strings.REMOVE_PLACE_CANCEL_BUTTON,
|
||||
|
@ -1025,7 +1023,9 @@ function reloadDomainInfo() {
|
|||
}
|
||||
}
|
||||
|
||||
appendAddButtonToPlacesTable();
|
||||
if (accessTokenIsSet()) {
|
||||
appendAddButtonToPlacesTable();
|
||||
}
|
||||
|
||||
} else {
|
||||
$('.domain-loading-error').show();
|
||||
|
@ -1098,6 +1098,7 @@ function editHighFidelityPlace(placeID, name, path) {
|
|||
dialog = bootbox.dialog({
|
||||
title: Strings.EDIT_PLACE_TITLE,
|
||||
closeButton: false,
|
||||
onEscape: true,
|
||||
message: modal_body,
|
||||
buttons: modal_buttons
|
||||
})
|
||||
|
@ -1180,6 +1181,7 @@ function chooseFromHighFidelityDomains(clickedButton) {
|
|||
|
||||
bootbox.dialog({
|
||||
title: "Choose matching domain",
|
||||
onEscape: true,
|
||||
message: modal_body,
|
||||
buttons: modal_buttons
|
||||
})
|
||||
|
|
|
@ -1,532 +0,0 @@
|
|||
//
|
||||
// AddressBarDialog.qml
|
||||
//
|
||||
// Created by Austin Davis on 2015/04/14
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import Hifi 1.0
|
||||
import QtQuick 2.4
|
||||
import "controls"
|
||||
import "styles"
|
||||
import "windows"
|
||||
import "hifi"
|
||||
import "hifi/toolbars"
|
||||
import "styles-uit" as HifiStyles
|
||||
import "controls-uit" as HifiControls
|
||||
|
||||
Window {
|
||||
id: root
|
||||
HifiConstants { id: hifi }
|
||||
HifiStyles.HifiConstants { id: hifiStyleConstants }
|
||||
|
||||
objectName: "AddressBarDialog"
|
||||
title: "Go To:"
|
||||
|
||||
shown: false
|
||||
destroyOnHidden: false
|
||||
resizable: false
|
||||
pinnable: false;
|
||||
|
||||
width: addressBarDialog.implicitWidth
|
||||
height: addressBarDialog.implicitHeight
|
||||
property int gap: 14
|
||||
|
||||
onShownChanged: {
|
||||
addressBarDialog.keyboardEnabled = HMD.active;
|
||||
addressBarDialog.observeShownChanged(shown);
|
||||
}
|
||||
Component.onCompleted: {
|
||||
root.parentChanged.connect(center);
|
||||
center();
|
||||
}
|
||||
Component.onDestruction: {
|
||||
root.parentChanged.disconnect(center);
|
||||
}
|
||||
|
||||
function center() {
|
||||
// Explicitly center in order to avoid warnings at shutdown
|
||||
anchors.centerIn = parent;
|
||||
}
|
||||
|
||||
function resetAfterTeleport() {
|
||||
storyCardFrame.shown = root.shown = false;
|
||||
}
|
||||
function goCard(targetString) {
|
||||
if (0 !== targetString.indexOf('hifi://')) {
|
||||
storyCardHTML.url = addressBarDialog.metaverseServerUrl + targetString;
|
||||
storyCardFrame.shown = true;
|
||||
return;
|
||||
}
|
||||
addressLine.text = targetString;
|
||||
toggleOrGo(true);
|
||||
clearAddressLineTimer.start();
|
||||
}
|
||||
property var allStories: [];
|
||||
property int cardWidth: 212;
|
||||
property int cardHeight: 152;
|
||||
property string metaverseBase: addressBarDialog.metaverseServerUrl + "/api/v1/";
|
||||
property bool isCursorVisible: false // Override default cursor visibility.
|
||||
|
||||
AddressBarDialog {
|
||||
id: addressBarDialog
|
||||
|
||||
property bool keyboardEnabled: false
|
||||
property bool keyboardRaised: false
|
||||
property bool punctuationMode: false
|
||||
|
||||
implicitWidth: backgroundImage.width
|
||||
implicitHeight: scroll.height + gap + backgroundImage.height + (keyboardEnabled ? keyboard.height : 0);
|
||||
|
||||
// The buttons have their button state changed on hover, so we have to manually fix them up here
|
||||
onBackEnabledChanged: backArrow.buttonState = addressBarDialog.backEnabled ? 1 : 0;
|
||||
onForwardEnabledChanged: forwardArrow.buttonState = addressBarDialog.forwardEnabled ? 1 : 0;
|
||||
onReceivedHifiSchemeURL: resetAfterTeleport();
|
||||
|
||||
// Update location after using back and forward buttons.
|
||||
onHostChanged: updateLocationTextTimer.start();
|
||||
|
||||
ListModel { id: suggestions }
|
||||
|
||||
ListView {
|
||||
id: scroll
|
||||
height: cardHeight + scroll.stackedCardShadowHeight
|
||||
property int stackedCardShadowHeight: 10;
|
||||
spacing: gap;
|
||||
clip: true;
|
||||
anchors {
|
||||
left: backgroundImage.left
|
||||
right: swipe.left
|
||||
bottom: backgroundImage.top
|
||||
}
|
||||
model: suggestions;
|
||||
orientation: ListView.Horizontal;
|
||||
delegate: Card {
|
||||
width: cardWidth;
|
||||
height: cardHeight;
|
||||
goFunction: goCard;
|
||||
userName: model.username;
|
||||
placeName: model.place_name;
|
||||
hifiUrl: model.place_name + model.path;
|
||||
thumbnail: model.thumbnail_url;
|
||||
imageUrl: model.image_url;
|
||||
action: model.action;
|
||||
timestamp: model.created_at;
|
||||
onlineUsers: model.online_users;
|
||||
storyId: model.metaverseId;
|
||||
drillDownToPlace: model.drillDownToPlace;
|
||||
shadowHeight: scroll.stackedCardShadowHeight;
|
||||
hoverThunk: function () { ListView.view.currentIndex = index; }
|
||||
unhoverThunk: function () { ListView.view.currentIndex = -1; }
|
||||
}
|
||||
highlightMoveDuration: -1;
|
||||
highlightMoveVelocity: -1;
|
||||
highlight: Rectangle { color: "transparent"; border.width: 4; border.color: hifiStyleConstants.colors.blueHighlight; z: 1; }
|
||||
}
|
||||
Image { // Just a visual indicator that the user can swipe the cards over to see more.
|
||||
id: swipe;
|
||||
source: "../images/swipe-chevron.svg";
|
||||
width: 72;
|
||||
visible: suggestions.count > 3;
|
||||
anchors {
|
||||
right: backgroundImage.right;
|
||||
top: scroll.top;
|
||||
}
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
onClicked: scroll.currentIndex = (scroll.currentIndex < 0) ? 3 : (scroll.currentIndex + 3)
|
||||
}
|
||||
}
|
||||
|
||||
Row {
|
||||
spacing: 2 * hifi.layout.spacing;
|
||||
anchors {
|
||||
top: parent.top;
|
||||
left: parent.left;
|
||||
leftMargin: 150;
|
||||
topMargin: -30;
|
||||
}
|
||||
property var selected: allTab;
|
||||
TextButton {
|
||||
id: allTab;
|
||||
text: "ALL";
|
||||
property string includeActions: 'snapshot,concurrency';
|
||||
selected: allTab === selectedTab;
|
||||
action: tabSelect;
|
||||
}
|
||||
TextButton {
|
||||
id: placeTab;
|
||||
text: "PLACES";
|
||||
property string includeActions: 'concurrency';
|
||||
selected: placeTab === selectedTab;
|
||||
action: tabSelect;
|
||||
}
|
||||
TextButton {
|
||||
id: snapsTab;
|
||||
text: "SNAPS";
|
||||
property string includeActions: 'snapshot';
|
||||
selected: snapsTab === selectedTab;
|
||||
action: tabSelect;
|
||||
}
|
||||
}
|
||||
|
||||
Image {
|
||||
id: backgroundImage
|
||||
source: "../images/address-bar-856.svg"
|
||||
width: 856
|
||||
height: 100
|
||||
anchors {
|
||||
bottom: parent.keyboardEnabled ? keyboard.top : parent.bottom;
|
||||
}
|
||||
property int inputAreaHeight: 70
|
||||
property int inputAreaStep: (height - inputAreaHeight) / 2
|
||||
|
||||
ToolbarButton {
|
||||
id: homeButton
|
||||
imageURL: "../images/home.svg"
|
||||
onClicked: {
|
||||
addressBarDialog.loadHome();
|
||||
root.shown = false;
|
||||
}
|
||||
anchors {
|
||||
left: parent.left
|
||||
leftMargin: homeButton.width / 2
|
||||
verticalCenter: parent.verticalCenter
|
||||
}
|
||||
}
|
||||
|
||||
ToolbarButton {
|
||||
id: backArrow;
|
||||
imageURL: "../images/backward.svg";
|
||||
onClicked: addressBarDialog.loadBack();
|
||||
anchors {
|
||||
left: homeButton.right
|
||||
verticalCenter: parent.verticalCenter
|
||||
}
|
||||
}
|
||||
ToolbarButton {
|
||||
id: forwardArrow;
|
||||
imageURL: "../images/forward.svg";
|
||||
onClicked: addressBarDialog.loadForward();
|
||||
anchors {
|
||||
left: backArrow.right
|
||||
verticalCenter: parent.verticalCenter
|
||||
}
|
||||
}
|
||||
|
||||
HifiStyles.RalewayLight {
|
||||
id: notice;
|
||||
font.pixelSize: hifi.fonts.pixelSize * 0.50;
|
||||
anchors {
|
||||
top: parent.top
|
||||
topMargin: parent.inputAreaStep + 12
|
||||
left: addressLine.left
|
||||
right: addressLine.right
|
||||
}
|
||||
}
|
||||
HifiStyles.FiraSansRegular {
|
||||
id: location;
|
||||
font.pixelSize: addressLine.font.pixelSize;
|
||||
color: "gray";
|
||||
clip: true;
|
||||
anchors.fill: addressLine;
|
||||
visible: addressLine.text.length === 0
|
||||
}
|
||||
TextInput {
|
||||
id: addressLine
|
||||
focus: true
|
||||
anchors {
|
||||
top: parent.top
|
||||
bottom: parent.bottom
|
||||
left: forwardArrow.right
|
||||
right: parent.right
|
||||
leftMargin: forwardArrow.width
|
||||
rightMargin: forwardArrow.width / 2
|
||||
topMargin: parent.inputAreaStep + (2 * hifi.layout.spacing)
|
||||
bottomMargin: parent.inputAreaStep
|
||||
}
|
||||
font.pixelSize: hifi.fonts.pixelSize * 0.75
|
||||
cursorVisible: false
|
||||
onTextChanged: {
|
||||
filterChoicesByText();
|
||||
updateLocationText(text.length > 0);
|
||||
if (!isCursorVisible && text.length > 0) {
|
||||
isCursorVisible = true;
|
||||
cursorVisible = true;
|
||||
}
|
||||
}
|
||||
onActiveFocusChanged: {
|
||||
cursorVisible = isCursorVisible && focus;
|
||||
}
|
||||
MouseArea {
|
||||
// If user clicks in address bar show cursor to indicate ability to enter address.
|
||||
anchors.fill: parent
|
||||
onClicked: {
|
||||
isCursorVisible = true;
|
||||
parent.cursorVisible = true;
|
||||
parent.forceActiveFocus();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Timer {
|
||||
// Delay updating location text a bit to avoid flicker of content and so that connection status is valid.
|
||||
id: updateLocationTextTimer
|
||||
running: false
|
||||
interval: 500 // ms
|
||||
repeat: false
|
||||
onTriggered: updateLocationText(false);
|
||||
}
|
||||
|
||||
Timer {
|
||||
// Delay clearing address line so as to avoid flicker of "not connected" being displayed after entering an address.
|
||||
id: clearAddressLineTimer
|
||||
running: false
|
||||
interval: 100 // ms
|
||||
repeat: false
|
||||
onTriggered: {
|
||||
addressLine.text = "";
|
||||
isCursorVisible = false;
|
||||
}
|
||||
}
|
||||
|
||||
Window {
|
||||
width: 938
|
||||
height: 625
|
||||
HifiControls.WebView {
|
||||
anchors.fill: parent;
|
||||
id: storyCardHTML;
|
||||
}
|
||||
id: storyCardFrame;
|
||||
|
||||
shown: false;
|
||||
destroyOnCloseButton: false;
|
||||
pinnable: false;
|
||||
|
||||
anchors {
|
||||
verticalCenter: backgroundImage.verticalCenter;
|
||||
horizontalCenter: scroll.horizontalCenter;
|
||||
}
|
||||
z: 100
|
||||
}
|
||||
|
||||
HifiControls.Keyboard {
|
||||
id: keyboard
|
||||
raised: parent.keyboardEnabled // Ignore keyboardRaised; keep keyboard raised if enabled (i.e., in HMD).
|
||||
numeric: parent.punctuationMode
|
||||
anchors {
|
||||
bottom: parent.bottom
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function getRequest(url, cb) { // cb(error, responseOfCorrectContentType) of url. General for 'get' text/html/json, but without redirects.
|
||||
// TODO: make available to other .qml.
|
||||
var request = new XMLHttpRequest();
|
||||
// QT bug: apparently doesn't handle onload. Workaround using readyState.
|
||||
request.onreadystatechange = function () {
|
||||
var READY_STATE_DONE = 4;
|
||||
var HTTP_OK = 200;
|
||||
if (request.readyState >= READY_STATE_DONE) {
|
||||
var error = (request.status !== HTTP_OK) && request.status.toString() + ':' + request.statusText,
|
||||
response = !error && request.responseText,
|
||||
contentType = !error && request.getResponseHeader('content-type');
|
||||
if (!error && contentType.indexOf('application/json') === 0) {
|
||||
try {
|
||||
response = JSON.parse(response);
|
||||
} catch (e) {
|
||||
error = e;
|
||||
}
|
||||
}
|
||||
cb(error, response);
|
||||
}
|
||||
};
|
||||
request.open("GET", url, true);
|
||||
request.send();
|
||||
}
|
||||
|
||||
function identity(x) {
|
||||
return x;
|
||||
}
|
||||
|
||||
function handleError(url, error, data, cb) { // cb(error) and answer truthy if needed, else falsey
|
||||
if (!error && (data.status === 'success')) {
|
||||
return;
|
||||
}
|
||||
if (!error) { // Create a message from the data
|
||||
error = data.status + ': ' + data.error;
|
||||
}
|
||||
if (typeof(error) === 'string') { // Make a proper Error object
|
||||
error = new Error(error);
|
||||
}
|
||||
error.message += ' in ' + url; // Include the url.
|
||||
cb(error);
|
||||
return true;
|
||||
}
|
||||
function resolveUrl(url) {
|
||||
return (url.indexOf('/') === 0) ? (addressBarDialog.metaverseServerUrl + url) : url;
|
||||
}
|
||||
|
||||
function makeModelData(data) { // create a new obj from data
|
||||
// ListModel elements will only ever have those properties that are defined by the first obj that is added.
|
||||
// So here we make sure that we have all the properties we need, regardless of whether it is a place data or user story.
|
||||
var name = data.place_name,
|
||||
tags = data.tags || [data.action, data.username],
|
||||
description = data.description || "",
|
||||
thumbnail_url = data.thumbnail_url || "";
|
||||
return {
|
||||
place_name: name,
|
||||
username: data.username || "",
|
||||
path: data.path || "",
|
||||
created_at: data.created_at || "",
|
||||
action: data.action || "",
|
||||
thumbnail_url: resolveUrl(thumbnail_url),
|
||||
image_url: resolveUrl(data.details.image_url),
|
||||
|
||||
metaverseId: (data.id || "").toString(), // Some are strings from server while others are numbers. Model objects require uniformity.
|
||||
|
||||
tags: tags,
|
||||
description: description,
|
||||
online_users: data.details.concurrency || 0,
|
||||
drillDownToPlace: false,
|
||||
|
||||
searchText: [name].concat(tags, description || []).join(' ').toUpperCase()
|
||||
}
|
||||
}
|
||||
function suggestable(place) {
|
||||
if (place.action === 'snapshot') {
|
||||
return true;
|
||||
}
|
||||
return (place.place_name !== AddressManager.placename); // Not our entry, but do show other entry points to current domain.
|
||||
}
|
||||
property var selectedTab: allTab;
|
||||
function tabSelect(textButton) {
|
||||
selectedTab = textButton;
|
||||
fillDestinations();
|
||||
}
|
||||
property var placeMap: ({});
|
||||
function addToSuggestions(place) {
|
||||
var collapse = allTab.selected && (place.action !== 'concurrency');
|
||||
if (collapse) {
|
||||
var existing = placeMap[place.place_name];
|
||||
if (existing) {
|
||||
existing.drillDownToPlace = true;
|
||||
return;
|
||||
}
|
||||
}
|
||||
suggestions.append(place);
|
||||
if (collapse) {
|
||||
placeMap[place.place_name] = suggestions.get(suggestions.count - 1);
|
||||
} else if (place.action === 'concurrency') {
|
||||
suggestions.get(suggestions.count - 1).drillDownToPlace = true; // Don't change raw place object (in allStories).
|
||||
}
|
||||
}
|
||||
property int requestId: 0;
|
||||
function getUserStoryPage(pageNumber, cb) { // cb(error) after all pages of domain data have been added to model
|
||||
var options = [
|
||||
'now=' + new Date().toISOString(),
|
||||
'include_actions=' + selectedTab.includeActions,
|
||||
'restriction=' + (Account.isLoggedIn() ? 'open,hifi' : 'open'),
|
||||
'require_online=true',
|
||||
'protocol=' + encodeURIComponent(AddressManager.protocolVersion()),
|
||||
'page=' + pageNumber
|
||||
];
|
||||
var url = metaverseBase + 'user_stories?' + options.join('&');
|
||||
var thisRequestId = ++requestId;
|
||||
getRequest(url, function (error, data) {
|
||||
if ((thisRequestId !== requestId) || handleError(url, error, data, cb)) {
|
||||
return;
|
||||
}
|
||||
var stories = data.user_stories.map(function (story) { // explicit single-argument function
|
||||
return makeModelData(story, url);
|
||||
});
|
||||
allStories = allStories.concat(stories);
|
||||
stories.forEach(makeFilteredPlaceProcessor());
|
||||
if ((data.current_page < data.total_pages) && (data.current_page <= 10)) { // just 10 pages = 100 stories for now
|
||||
return getUserStoryPage(pageNumber + 1, cb);
|
||||
}
|
||||
cb();
|
||||
});
|
||||
}
|
||||
function makeFilteredPlaceProcessor() { // answer a function(placeData) that adds it to suggestions if it matches
|
||||
var words = addressLine.text.toUpperCase().split(/\s+/).filter(identity),
|
||||
data = allStories;
|
||||
function matches(place) {
|
||||
if (!words.length) {
|
||||
return suggestable(place);
|
||||
}
|
||||
return words.every(function (word) {
|
||||
return place.searchText.indexOf(word) >= 0;
|
||||
});
|
||||
}
|
||||
return function (place) {
|
||||
if (matches(place)) {
|
||||
addToSuggestions(place);
|
||||
}
|
||||
};
|
||||
}
|
||||
function filterChoicesByText() {
|
||||
suggestions.clear();
|
||||
placeMap = {};
|
||||
allStories.forEach(makeFilteredPlaceProcessor());
|
||||
}
|
||||
|
||||
function fillDestinations() {
|
||||
allStories = [];
|
||||
suggestions.clear();
|
||||
placeMap = {};
|
||||
getUserStoryPage(1, function (error) {
|
||||
console.log('user stories query', error || 'ok', allStories.length);
|
||||
});
|
||||
}
|
||||
|
||||
function updateLocationText(enteringAddress) {
|
||||
if (enteringAddress) {
|
||||
notice.text = "Go to a place, @user, path or network address";
|
||||
notice.color = hifiStyleConstants.colors.baseGrayHighlight;
|
||||
} else {
|
||||
notice.text = AddressManager.isConnected ? "Your location:" : "Not Connected";
|
||||
notice.color = AddressManager.isConnected ? hifiStyleConstants.colors.baseGrayHighlight : hifiStyleConstants.colors.redHighlight;
|
||||
// Display hostname, which includes ip address, localhost, and other non-placenames.
|
||||
location.text = (AddressManager.placename || AddressManager.hostname || '') + (AddressManager.pathname ? AddressManager.pathname.match(/\/[^\/]+/)[0] : '');
|
||||
}
|
||||
}
|
||||
|
||||
onVisibleChanged: {
|
||||
updateLocationText(false);
|
||||
if (visible) {
|
||||
addressLine.forceActiveFocus();
|
||||
fillDestinations();
|
||||
}
|
||||
}
|
||||
|
||||
function toggleOrGo(fromSuggestions) {
|
||||
if (addressLine.text !== "") {
|
||||
addressBarDialog.loadAddress(addressLine.text, fromSuggestions)
|
||||
}
|
||||
root.shown = false;
|
||||
}
|
||||
|
||||
Keys.onPressed: {
|
||||
switch (event.key) {
|
||||
case Qt.Key_Escape:
|
||||
case Qt.Key_Back:
|
||||
root.shown = false
|
||||
clearAddressLineTimer.start();
|
||||
event.accepted = true
|
||||
break
|
||||
case Qt.Key_Enter:
|
||||
case Qt.Key_Return:
|
||||
toggleOrGo()
|
||||
clearAddressLineTimer.start();
|
||||
event.accepted = true
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
|
@ -135,4 +135,10 @@ Item {
|
|||
playing: visible
|
||||
z: 10000
|
||||
}
|
||||
|
||||
Keys.onPressed: {
|
||||
if ((event.modifiers & Qt.ShiftModifier) && (event.modifiers & Qt.ControlModifier)) {
|
||||
webViewCore.focus = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -442,7 +442,7 @@ Item {
|
|||
Rectangle {
|
||||
id: nameCardVUMeter
|
||||
// Size
|
||||
width: isMyCard ? myDisplayName.width - 20 : ((gainSlider.value - gainSlider.minimumValue)/(gainSlider.maximumValue - gainSlider.minimumValue)) * (gainSlider.width);
|
||||
width: ((gainSlider.value - gainSlider.minimumValue)/(gainSlider.maximumValue - gainSlider.minimumValue)) * (gainSlider.width);
|
||||
height: 8
|
||||
// Anchors
|
||||
anchors.bottom: isMyCard ? avatarImage.bottom : parent.bottom;
|
||||
|
@ -526,16 +526,14 @@ Item {
|
|||
anchors.verticalCenter: nameCardVUMeter.verticalCenter;
|
||||
anchors.left: nameCardVUMeter.left;
|
||||
// Properties
|
||||
visible: !isMyCard && selected && pal.activeTab == "nearbyTab" && isPresent;
|
||||
visible: (isMyCard || (selected && pal.activeTab == "nearbyTab")) && isPresent;
|
||||
value: Users.getAvatarGain(uuid)
|
||||
minimumValue: -60.0
|
||||
maximumValue: 20.0
|
||||
stepSize: 5
|
||||
updateValueWhileDragging: true
|
||||
onValueChanged: {
|
||||
if (uuid !== "") {
|
||||
updateGainFromQML(uuid, value, false);
|
||||
}
|
||||
updateGainFromQML(uuid, value, false);
|
||||
}
|
||||
onPressedChanged: {
|
||||
if (!pressed) {
|
||||
|
@ -575,7 +573,19 @@ Item {
|
|||
implicitHeight: 16
|
||||
}
|
||||
}
|
||||
}
|
||||
RalewayRegular {
|
||||
// The slider for my card is special, it controls the master gain
|
||||
id: gainSliderText;
|
||||
visible: isMyCard;
|
||||
text: "master volume";
|
||||
size: hifi.fontSizes.tabularData;
|
||||
anchors.left: parent.right;
|
||||
anchors.leftMargin: 8;
|
||||
color: hifi.colors.baseGrayHighlight;
|
||||
horizontalAlignment: Text.AlignLeft;
|
||||
verticalAlignment: Text.AlignTop;
|
||||
}
|
||||
}
|
||||
|
||||
function updateGainFromQML(avatarUuid, sliderValue, isReleased) {
|
||||
Users.setAvatarGain(avatarUuid, sliderValue);
|
||||
|
|
|
@ -640,7 +640,8 @@ Rectangle {
|
|||
if (purchasesModel.get(i).title.toLowerCase().indexOf(filterBar.text.toLowerCase()) !== -1) {
|
||||
if (purchasesModel.get(i).status !== "confirmed" && !root.isShowingMyItems) {
|
||||
filteredPurchasesModel.insert(0, purchasesModel.get(i));
|
||||
} else if ((root.isShowingMyItems && purchasesModel.get(i).edition_number === -1) || !root.isShowingMyItems) {
|
||||
} else if ((root.isShowingMyItems && purchasesModel.get(i).edition_number === "0") ||
|
||||
(!root.isShowingMyItems && purchasesModel.get(i).edition_number !== "0")) {
|
||||
filteredPurchasesModel.append(purchasesModel.get(i));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
import Hifi 1.0
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import QtQuick.Controls.Styles 1.4
|
||||
import QtGraphicalEffects 1.0
|
||||
import "../../controls"
|
||||
import "../../styles"
|
||||
|
@ -83,7 +84,6 @@ StackView {
|
|||
anchors.centerIn = parent;
|
||||
}
|
||||
|
||||
|
||||
function resetAfterTeleport() {
|
||||
//storyCardFrame.shown = root.shown = false;
|
||||
}
|
||||
|
@ -134,7 +134,8 @@ StackView {
|
|||
bottom: parent.bottom
|
||||
}
|
||||
|
||||
onHostChanged: updateLocationTextTimer.start();
|
||||
onHostChanged: updateLocationTextTimer.restart();
|
||||
|
||||
Rectangle {
|
||||
id: navBar
|
||||
width: parent.width
|
||||
|
@ -205,16 +206,16 @@ StackView {
|
|||
anchors {
|
||||
top: parent.top;
|
||||
left: addressLineContainer.left;
|
||||
right: addressLineContainer.right;
|
||||
}
|
||||
}
|
||||
|
||||
HifiStyles.FiraSansRegular {
|
||||
id: location;
|
||||
anchors {
|
||||
left: addressLineContainer.left;
|
||||
leftMargin: 8;
|
||||
verticalCenter: addressLineContainer.verticalCenter;
|
||||
left: notice.right
|
||||
leftMargin: 8
|
||||
right: addressLineContainer.right
|
||||
verticalCenter: notice.verticalCenter
|
||||
}
|
||||
font.pixelSize: addressLine.font.pixelSize;
|
||||
color: "gray";
|
||||
|
@ -222,7 +223,7 @@ StackView {
|
|||
visible: addressLine.text.length === 0
|
||||
}
|
||||
|
||||
TextInput {
|
||||
TextField {
|
||||
id: addressLine
|
||||
width: addressLineContainer.width - addressLineContainer.anchors.leftMargin - addressLineContainer.anchors.rightMargin;
|
||||
anchors {
|
||||
|
@ -230,7 +231,6 @@ StackView {
|
|||
leftMargin: 8;
|
||||
verticalCenter: addressLineContainer.verticalCenter;
|
||||
}
|
||||
font.pixelSize: hifi.fonts.pixelSize * 0.75
|
||||
onTextChanged: {
|
||||
updateLocationText(text.length > 0);
|
||||
}
|
||||
|
@ -238,6 +238,17 @@ StackView {
|
|||
addressBarDialog.keyboardEnabled = false;
|
||||
toggleOrGo();
|
||||
}
|
||||
placeholderText: "Type domain address here"
|
||||
verticalAlignment: TextInput.AlignBottom
|
||||
style: TextFieldStyle {
|
||||
textColor: hifi.colors.text
|
||||
placeholderTextColor: "gray"
|
||||
font {
|
||||
family: hifi.fonts.fontFamily
|
||||
pixelSize: hifi.fonts.pixelSize * 0.75
|
||||
}
|
||||
background: Item {}
|
||||
}
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
|
@ -347,7 +358,7 @@ StackView {
|
|||
// Delay updating location text a bit to avoid flicker of content and so that connection status is valid.
|
||||
id: updateLocationTextTimer
|
||||
running: false
|
||||
interval: 500 // ms
|
||||
interval: 1000 // ms
|
||||
repeat: false
|
||||
onTriggered: updateLocationText(false);
|
||||
}
|
||||
|
|
|
@ -1392,7 +1392,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
// Make sure we don't time out during slow operations at startup
|
||||
updateHeartbeat();
|
||||
|
||||
|
||||
QTimer* settingsTimer = new QTimer();
|
||||
moveToNewNamedThread(settingsTimer, "Settings Thread", [this, settingsTimer]{
|
||||
connect(qApp, &Application::beforeAboutToQuit, [this, settingsTimer]{
|
||||
|
@ -4482,8 +4481,11 @@ void Application::resetPhysicsReadyInformation() {
|
|||
|
||||
void Application::reloadResourceCaches() {
|
||||
resetPhysicsReadyInformation();
|
||||
|
||||
// Query the octree to refresh everything in view
|
||||
_lastQueriedTime = 0;
|
||||
_octreeQuery.incrementConnectionID();
|
||||
|
||||
queryOctree(NodeType::EntityServer, PacketType::EntityQuery, _entityServerJurisdictions);
|
||||
|
||||
DependencyManager::get<AssetClient>()->clearCache();
|
||||
|
@ -5544,6 +5546,7 @@ void Application::nodeActivated(SharedNodePointer node) {
|
|||
// so we will do a proper query during update
|
||||
if (node->getType() == NodeType::EntityServer) {
|
||||
_lastQueriedTime = 0;
|
||||
_octreeQuery.incrementConnectionID();
|
||||
}
|
||||
|
||||
if (node->getType() == NodeType::AudioMixer) {
|
||||
|
|
|
@ -543,7 +543,7 @@ private:
|
|||
ViewFrustum _displayViewFrustum;
|
||||
quint64 _lastQueriedTime;
|
||||
|
||||
OctreeQuery _octreeQuery; // NodeData derived class for querying octee cells from octree servers
|
||||
OctreeQuery _octreeQuery { true }; // NodeData derived class for querying octee cells from octree servers
|
||||
|
||||
std::shared_ptr<controller::StateController> _applicationStateDevice; // Default ApplicationDevice reflecting the state of different properties of the session
|
||||
std::shared_ptr<KeyboardMouseDevice> _keyboardMouseDevice; // Default input device, the good old keyboard mouse and maybe touchpad
|
||||
|
|
|
@ -40,6 +40,10 @@ AddressBarDialog::AddressBarDialog(QQuickItem* parent) : OffscreenQmlDialog(pare
|
|||
_backEnabled = !(DependencyManager::get<AddressManager>()->getBackStack().isEmpty());
|
||||
_forwardEnabled = !(DependencyManager::get<AddressManager>()->getForwardStack().isEmpty());
|
||||
connect(addressManager.data(), &AddressManager::hostChanged, this, &AddressBarDialog::hostChanged);
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
const DomainHandler& domainHandler = nodeList->getDomainHandler();
|
||||
connect(&domainHandler, &DomainHandler::connectedToDomain, this, &AddressBarDialog::hostChanged);
|
||||
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, &AddressBarDialog::hostChanged);
|
||||
connect(DependencyManager::get<DialogsManager>().data(), &DialogsManager::setUseFeed, this, &AddressBarDialog::setUseFeed);
|
||||
connect(qApp, &Application::receivedHifiSchemeURL, this, &AddressBarDialog::receivedHifiSchemeURL);
|
||||
}
|
||||
|
|
|
@ -57,7 +57,8 @@ ContextOverlayInterface::ContextOverlayInterface() {
|
|||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
glm::quat cameraOrientation = qApp->getCamera().getOrientation();
|
||||
QVariantMap props;
|
||||
props.insert("position", vec3toVariant(myAvatar->getEyePosition() + glm::quat(glm::radians(glm::vec3(0.0f, CONTEXT_OVERLAY_TABLET_OFFSET, 0.0f))) * (CONTEXT_OVERLAY_TABLET_DISTANCE * (cameraOrientation * Vectors::FRONT))));
|
||||
float sensorToWorldScale = myAvatar->getSensorToWorldScale();
|
||||
props.insert("position", vec3toVariant(myAvatar->getEyePosition() + glm::quat(glm::radians(glm::vec3(0.0f, CONTEXT_OVERLAY_TABLET_OFFSET, 0.0f))) * ((CONTEXT_OVERLAY_TABLET_DISTANCE * sensorToWorldScale) * (cameraOrientation * Vectors::FRONT))));
|
||||
props.insert("orientation", quatToVariant(cameraOrientation * glm::quat(glm::radians(glm::vec3(0.0f, CONTEXT_OVERLAY_TABLET_ORIENTATION, 0.0f)))));
|
||||
qApp->getOverlays().editOverlay(tabletFrameID, props);
|
||||
_contextOverlayJustClicked = false;
|
||||
|
|
|
@ -673,8 +673,8 @@ static void crossfade_4x2(float* src, float* dst, const float* win, int numFrame
|
|||
// linear interpolation with gain
|
||||
static void interpolate(float* dst, const float* src0, const float* src1, float frac, float gain) {
|
||||
|
||||
float f0 = HRTF_GAIN * gain * (1.0f - frac);
|
||||
float f1 = HRTF_GAIN * gain * frac;
|
||||
float f0 = gain * (1.0f - frac);
|
||||
float f1 = gain * frac;
|
||||
|
||||
for (int k = 0; k < HRTF_TAPS; k++) {
|
||||
dst[k] = f0 * src0[k] + f1 * src1[k];
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#if defined(__AVX512F__)
|
||||
#ifdef __AVX512F__
|
||||
|
||||
#include <assert.h>
|
||||
#include <immintrin.h>
|
||||
|
@ -87,15 +87,4 @@ void FIR_1x4_AVX512(float* src, float* dst0, float* dst1, float* dst2, float* ds
|
|||
_mm256_zeroupper();
|
||||
}
|
||||
|
||||
// FIXME: this fallback can be removed, once we require VS2017
|
||||
#elif defined(_M_IX86) || defined(_M_X64) || defined(__i386__) || defined(__x86_64__)
|
||||
|
||||
#include "../AudioHRTF.h"
|
||||
|
||||
void FIR_1x4_AVX2(float* src, float* dst0, float* dst1, float* dst2, float* dst3, float coef[4][HRTF_TAPS], int numFrames);
|
||||
|
||||
void FIR_1x4_AVX512(float* src, float* dst0, float* dst1, float* dst2, float* dst3, float coef[4][HRTF_TAPS], int numFrames) {
|
||||
FIR_1x4_AVX2(src, dst0, dst1, dst2, dst3, coef, numFrames);
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
@ -65,7 +65,9 @@ QHash<QString, QString> HTTPConnection::parseUrlEncodedForm() {
|
|||
QUrlQuery form { _requestContent };
|
||||
QHash<QString, QString> pairs;
|
||||
for (auto pair : form.queryItems()) {
|
||||
pairs[QUrl::fromPercentEncoding(pair.first.toLatin1())] = QUrl::fromPercentEncoding(pair.second.toLatin1());
|
||||
auto key = QUrl::fromPercentEncoding(pair.first.toLatin1().replace('+', ' '));
|
||||
auto value = QUrl::fromPercentEncoding(pair.second.toLatin1().replace('+', ' '));
|
||||
pairs[key] = value;
|
||||
}
|
||||
|
||||
return pairs;
|
||||
|
|
|
@ -73,6 +73,8 @@ public:
|
|||
void setScanCallback(std::function<void (VisibleElement&)> cb);
|
||||
void traverse(uint64_t timeBudget);
|
||||
|
||||
void reset() { _path.clear(); _completedView.startTime = 0; } // resets our state to force a new "First" traversal
|
||||
|
||||
private:
|
||||
void getNextVisibleElement(VisibleElement& next);
|
||||
|
||||
|
|
|
@ -97,7 +97,8 @@ bool operator==(const Properties& a, const Properties& b) {
|
|||
(a.maxParticles == b.maxParticles) &&
|
||||
(a.emission == b.emission) &&
|
||||
(a.polar == b.polar) &&
|
||||
(a.azimuth == b.azimuth);
|
||||
(a.azimuth == b.azimuth) &&
|
||||
(a.textures == b.textures);
|
||||
}
|
||||
|
||||
bool operator!=(const Properties& a, const Properties& b) {
|
||||
|
|
1380
libraries/fbx/src/GLTFReader.cpp
Normal file
1380
libraries/fbx/src/GLTFReader.cpp
Normal file
File diff suppressed because it is too large
Load diff
786
libraries/fbx/src/GLTFReader.h
Normal file
786
libraries/fbx/src/GLTFReader.h
Normal file
|
@ -0,0 +1,786 @@
|
|||
//
|
||||
// GLTFReader.h
|
||||
// libraries/fbx/src
|
||||
//
|
||||
// Created by Luis Cuenca on 8/30/17.
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_GLTFReader_h
|
||||
#define hifi_GLTFReader_h
|
||||
|
||||
#include <memory.h>
|
||||
#include <QtNetwork/QNetworkReply>
|
||||
#include "ModelFormatLogging.h"
|
||||
#include "FBXReader.h"
|
||||
|
||||
|
||||
struct GLTFAsset {
|
||||
QString generator;
|
||||
QString version; //required
|
||||
QString copyright;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["generator"]) {
|
||||
qCDebug(modelformat) << "generator: " << generator;
|
||||
}
|
||||
if (defined["version"]) {
|
||||
qCDebug(modelformat) << "version: " << version;
|
||||
}
|
||||
if (defined["copyright"]) {
|
||||
qCDebug(modelformat) << "copyright: " << copyright;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFNode {
|
||||
QString name;
|
||||
int camera;
|
||||
int mesh;
|
||||
QVector<int> children;
|
||||
QVector<double> translation;
|
||||
QVector<double> rotation;
|
||||
QVector<double> scale;
|
||||
QVector<double> matrix;
|
||||
QVector<glm::mat4> transforms;
|
||||
int skin;
|
||||
QVector<int> skeletons;
|
||||
QString jointName;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["name"]) {
|
||||
qCDebug(modelformat) << "name: " << name;
|
||||
}
|
||||
if (defined["camera"]) {
|
||||
qCDebug(modelformat) << "camera: " << camera;
|
||||
}
|
||||
if (defined["mesh"]) {
|
||||
qCDebug(modelformat) << "mesh: " << mesh;
|
||||
}
|
||||
if (defined["skin"]) {
|
||||
qCDebug(modelformat) << "skin: " << skin;
|
||||
}
|
||||
if (defined["jointName"]) {
|
||||
qCDebug(modelformat) << "jointName: " << jointName;
|
||||
}
|
||||
if (defined["children"]) {
|
||||
qCDebug(modelformat) << "children: " << children;
|
||||
}
|
||||
if (defined["translation"]) {
|
||||
qCDebug(modelformat) << "translation: " << translation;
|
||||
}
|
||||
if (defined["rotation"]) {
|
||||
qCDebug(modelformat) << "rotation: " << rotation;
|
||||
}
|
||||
if (defined["scale"]) {
|
||||
qCDebug(modelformat) << "scale: " << scale;
|
||||
}
|
||||
if (defined["matrix"]) {
|
||||
qCDebug(modelformat) << "matrix: " << matrix;
|
||||
}
|
||||
if (defined["skeletons"]) {
|
||||
qCDebug(modelformat) << "skeletons: " << skeletons;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Meshes
|
||||
|
||||
struct GLTFMeshPrimitivesTarget {
|
||||
int normal;
|
||||
int position;
|
||||
int tangent;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["normal"]) {
|
||||
qCDebug(modelformat) << "normal: " << normal;
|
||||
}
|
||||
if (defined["position"]) {
|
||||
qCDebug(modelformat) << "position: " << position;
|
||||
}
|
||||
if (defined["tangent"]) {
|
||||
qCDebug(modelformat) << "tangent: " << tangent;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
namespace GLTFMeshPrimitivesRenderingMode {
|
||||
enum Values {
|
||||
POINTS = 0,
|
||||
LINES,
|
||||
LINE_LOOP,
|
||||
LINE_STRIP,
|
||||
TRIANGLES,
|
||||
TRIANGLE_STRIP,
|
||||
TRIANGLE_FAN
|
||||
};
|
||||
}
|
||||
|
||||
struct GLTFMeshPrimitiveAttr {
|
||||
QMap<QString, int> values;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
QList<QString> keys = values.keys();
|
||||
qCDebug(modelformat) << "values: ";
|
||||
foreach(auto k, keys) {
|
||||
qCDebug(modelformat) << k << ": " << values[k];
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFMeshPrimitive {
|
||||
GLTFMeshPrimitiveAttr attributes;
|
||||
int indices;
|
||||
int material;
|
||||
int mode{ GLTFMeshPrimitivesRenderingMode::TRIANGLES };
|
||||
QVector<GLTFMeshPrimitiveAttr> targets;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["attributes"]) {
|
||||
qCDebug(modelformat) << "attributes: ";
|
||||
attributes.dump();
|
||||
}
|
||||
if (defined["indices"]) {
|
||||
qCDebug(modelformat) << "indices: " << indices;
|
||||
}
|
||||
if (defined["material"]) {
|
||||
qCDebug(modelformat) << "material: " << material;
|
||||
}
|
||||
if (defined["mode"]) {
|
||||
qCDebug(modelformat) << "mode: " << mode;
|
||||
}
|
||||
if (defined["targets"]) {
|
||||
qCDebug(modelformat) << "targets: ";
|
||||
foreach(auto t, targets) t.dump();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFMesh {
|
||||
QString name;
|
||||
QVector<GLTFMeshPrimitive> primitives;
|
||||
QVector<double> weights;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["name"]) {
|
||||
qCDebug(modelformat) << "name: " << name;
|
||||
}
|
||||
if (defined["primitives"]) {
|
||||
qCDebug(modelformat) << "primitives: ";
|
||||
foreach(auto prim, primitives) prim.dump();
|
||||
}
|
||||
if (defined["weights"]) {
|
||||
qCDebug(modelformat) << "weights: " << weights;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// BufferViews
|
||||
|
||||
namespace GLTFBufferViewTarget {
|
||||
enum Values {
|
||||
ARRAY_BUFFER = 34962,
|
||||
ELEMENT_ARRAY_BUFFER = 34963
|
||||
};
|
||||
}
|
||||
|
||||
struct GLTFBufferView {
|
||||
int buffer; //required
|
||||
int byteLength; //required
|
||||
int byteOffset;
|
||||
int target;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["buffer"]) {
|
||||
qCDebug(modelformat) << "buffer: " << buffer;
|
||||
}
|
||||
if (defined["byteLength"]) {
|
||||
qCDebug(modelformat) << "byteLength: " << byteLength;
|
||||
}
|
||||
if (defined["byteOffset"]) {
|
||||
qCDebug(modelformat) << "byteOffset: " << byteOffset;
|
||||
}
|
||||
if (defined["target"]) {
|
||||
qCDebug(modelformat) << "target: " << target;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Buffers
|
||||
|
||||
struct GLTFBuffer {
|
||||
int byteLength; //required
|
||||
QString uri;
|
||||
QByteArray blob;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["byteLength"]) {
|
||||
qCDebug(modelformat) << "byteLength: " << byteLength;
|
||||
}
|
||||
if (defined["uri"]) {
|
||||
qCDebug(modelformat) << "uri: " << uri;
|
||||
}
|
||||
if (defined["blob"]) {
|
||||
qCDebug(modelformat) << "blob: " << "DEFINED";
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Samplers
|
||||
namespace GLTFSamplerFilterType {
|
||||
enum Values {
|
||||
NEAREST = 9728,
|
||||
LINEAR = 9729,
|
||||
NEAREST_MIPMAP_NEAREST = 9984,
|
||||
LINEAR_MIPMAP_NEAREST = 9985,
|
||||
NEAREST_MIPMAP_LINEAR = 9986,
|
||||
LINEAR_MIPMAP_LINEAR = 9987
|
||||
};
|
||||
}
|
||||
|
||||
namespace GLTFSamplerWrapType {
|
||||
enum Values {
|
||||
CLAMP_TO_EDGE = 33071,
|
||||
MIRRORED_REPEAT = 33648,
|
||||
REPEAT = 10497
|
||||
};
|
||||
}
|
||||
|
||||
struct GLTFSampler {
|
||||
int magFilter;
|
||||
int minFilter;
|
||||
int wrapS;
|
||||
int wrapT;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["magFilter"]) {
|
||||
qCDebug(modelformat) << "magFilter: " << magFilter;
|
||||
}
|
||||
if (defined["minFilter"]) {
|
||||
qCDebug(modelformat) << "minFilter: " << minFilter;
|
||||
}
|
||||
if (defined["wrapS"]) {
|
||||
qCDebug(modelformat) << "wrapS: " << wrapS;
|
||||
}
|
||||
if (defined["wrapT"]) {
|
||||
qCDebug(modelformat) << "wrapT: " << wrapT;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Cameras
|
||||
|
||||
struct GLTFCameraPerspective {
|
||||
double aspectRatio;
|
||||
double yfov; //required
|
||||
double zfar;
|
||||
double znear; //required
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["zfar"]) {
|
||||
qCDebug(modelformat) << "zfar: " << zfar;
|
||||
}
|
||||
if (defined["znear"]) {
|
||||
qCDebug(modelformat) << "znear: " << znear;
|
||||
}
|
||||
if (defined["aspectRatio"]) {
|
||||
qCDebug(modelformat) << "aspectRatio: " << aspectRatio;
|
||||
}
|
||||
if (defined["yfov"]) {
|
||||
qCDebug(modelformat) << "yfov: " << yfov;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFCameraOrthographic {
|
||||
double zfar; //required
|
||||
double znear; //required
|
||||
double xmag; //required
|
||||
double ymag; //required
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["zfar"]) {
|
||||
qCDebug(modelformat) << "zfar: " << zfar;
|
||||
}
|
||||
if (defined["znear"]) {
|
||||
qCDebug(modelformat) << "znear: " << znear;
|
||||
}
|
||||
if (defined["xmag"]) {
|
||||
qCDebug(modelformat) << "xmag: " << xmag;
|
||||
}
|
||||
if (defined["ymag"]) {
|
||||
qCDebug(modelformat) << "ymag: " << ymag;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
namespace GLTFCameraTypes {
|
||||
enum Values {
|
||||
ORTHOGRAPHIC = 0,
|
||||
PERSPECTIVE
|
||||
};
|
||||
}
|
||||
|
||||
struct GLTFCamera {
|
||||
QString name;
|
||||
GLTFCameraPerspective perspective; //required (or)
|
||||
GLTFCameraOrthographic orthographic; //required (or)
|
||||
int type;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["name"]) {
|
||||
qCDebug(modelformat) << "name: " << name;
|
||||
}
|
||||
if (defined["type"]) {
|
||||
qCDebug(modelformat) << "type: " << type;
|
||||
}
|
||||
if (defined["perspective"]) {
|
||||
perspective.dump();
|
||||
}
|
||||
if (defined["orthographic"]) {
|
||||
orthographic.dump();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Images
|
||||
|
||||
namespace GLTFImageMimetype {
|
||||
enum Values {
|
||||
JPEG = 0,
|
||||
PNG
|
||||
};
|
||||
};
|
||||
|
||||
struct GLTFImage {
|
||||
QString uri; //required (or)
|
||||
int mimeType;
|
||||
int bufferView; //required (or)
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["uri"]) {
|
||||
qCDebug(modelformat) << "uri: " << uri;
|
||||
}
|
||||
if (defined["mimeType"]) {
|
||||
qCDebug(modelformat) << "mimeType: " << mimeType;
|
||||
}
|
||||
if (defined["bufferView"]) {
|
||||
qCDebug(modelformat) << "bufferView: " << bufferView;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Materials
|
||||
|
||||
struct GLTFpbrMetallicRoughness {
|
||||
QVector<double> baseColorFactor;
|
||||
int baseColorTexture;
|
||||
int metallicRoughnessTexture;
|
||||
double metallicFactor;
|
||||
double roughnessFactor;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["baseColorFactor"]) {
|
||||
qCDebug(modelformat) << "baseColorFactor: " << baseColorFactor;
|
||||
}
|
||||
if (defined["baseColorTexture"]) {
|
||||
qCDebug(modelformat) << "baseColorTexture: " << baseColorTexture;
|
||||
}
|
||||
if (defined["metallicRoughnessTexture"]) {
|
||||
qCDebug(modelformat) << "metallicRoughnessTexture: " << metallicRoughnessTexture;
|
||||
}
|
||||
if (defined["metallicFactor"]) {
|
||||
qCDebug(modelformat) << "metallicFactor: " << metallicFactor;
|
||||
}
|
||||
if (defined["roughnessFactor"]) {
|
||||
qCDebug(modelformat) << "roughnessFactor: " << roughnessFactor;
|
||||
}
|
||||
if (defined["baseColorFactor"]) {
|
||||
qCDebug(modelformat) << "baseColorFactor: " << baseColorFactor;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
namespace GLTFMaterialAlphaMode {
|
||||
enum Values {
|
||||
OPAQUE = 0,
|
||||
MASK,
|
||||
BLEND
|
||||
};
|
||||
};
|
||||
|
||||
struct GLTFMaterial {
|
||||
QString name;
|
||||
QVector<double> emissiveFactor;
|
||||
int emissiveTexture;
|
||||
int normalTexture;
|
||||
int occlusionTexture;
|
||||
int alphaMode;
|
||||
double alphaCutoff;
|
||||
bool doubleSided;
|
||||
GLTFpbrMetallicRoughness pbrMetallicRoughness;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["name"]) {
|
||||
qCDebug(modelformat) << "name: " << name;
|
||||
}
|
||||
if (defined["emissiveTexture"]) {
|
||||
qCDebug(modelformat) << "emissiveTexture: " << emissiveTexture;
|
||||
}
|
||||
if (defined["normalTexture"]) {
|
||||
qCDebug(modelformat) << "normalTexture: " << normalTexture;
|
||||
}
|
||||
if (defined["occlusionTexture"]) {
|
||||
qCDebug(modelformat) << "occlusionTexture: " << occlusionTexture;
|
||||
}
|
||||
if (defined["emissiveFactor"]) {
|
||||
qCDebug(modelformat) << "emissiveFactor: " << emissiveFactor;
|
||||
}
|
||||
if (defined["pbrMetallicRoughness"]) {
|
||||
pbrMetallicRoughness.dump();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Accesors
|
||||
|
||||
namespace GLTFAccessorType {
|
||||
enum Values {
|
||||
SCALAR = 0,
|
||||
VEC2,
|
||||
VEC3,
|
||||
VEC4,
|
||||
MAT2,
|
||||
MAT3,
|
||||
MAT4
|
||||
};
|
||||
}
|
||||
namespace GLTFAccessorComponentType {
|
||||
enum Values {
|
||||
BYTE = 5120,
|
||||
UNSIGNED_BYTE = 5121,
|
||||
SHORT = 5122,
|
||||
UNSIGNED_SHORT = 5123,
|
||||
UNSIGNED_INT = 5125,
|
||||
FLOAT = 5126
|
||||
};
|
||||
}
|
||||
struct GLTFAccessor {
|
||||
int bufferView;
|
||||
int byteOffset;
|
||||
int componentType; //required
|
||||
int count; //required
|
||||
int type; //required
|
||||
bool normalized{ false };
|
||||
QVector<double> max;
|
||||
QVector<double> min;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["bufferView"]) {
|
||||
qCDebug(modelformat) << "bufferView: " << bufferView;
|
||||
}
|
||||
if (defined["byteOffset"]) {
|
||||
qCDebug(modelformat) << "byteOffset: " << byteOffset;
|
||||
}
|
||||
if (defined["componentType"]) {
|
||||
qCDebug(modelformat) << "componentType: " << componentType;
|
||||
}
|
||||
if (defined["count"]) {
|
||||
qCDebug(modelformat) << "count: " << count;
|
||||
}
|
||||
if (defined["type"]) {
|
||||
qCDebug(modelformat) << "type: " << type;
|
||||
}
|
||||
if (defined["normalized"]) {
|
||||
qCDebug(modelformat) << "normalized: " << (normalized ? "TRUE" : "FALSE");
|
||||
}
|
||||
if (defined["max"]) {
|
||||
qCDebug(modelformat) << "max: ";
|
||||
foreach(float m, max) {
|
||||
qCDebug(modelformat) << m;
|
||||
}
|
||||
}
|
||||
if (defined["min"]) {
|
||||
qCDebug(modelformat) << "min: ";
|
||||
foreach(float m, min) {
|
||||
qCDebug(modelformat) << m;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Animation
|
||||
|
||||
namespace GLTFChannelTargetPath {
|
||||
enum Values {
|
||||
TRANSLATION = 0,
|
||||
ROTATION,
|
||||
SCALE
|
||||
};
|
||||
}
|
||||
|
||||
struct GLTFChannelTarget {
|
||||
int node;
|
||||
int path;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["node"]) {
|
||||
qCDebug(modelformat) << "node: " << node;
|
||||
}
|
||||
if (defined["path"]) {
|
||||
qCDebug(modelformat) << "path: " << path;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFChannel {
|
||||
int sampler;
|
||||
GLTFChannelTarget target;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["sampler"]) {
|
||||
qCDebug(modelformat) << "sampler: " << sampler;
|
||||
}
|
||||
if (defined["target"]) {
|
||||
target.dump();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
namespace GLTFAnimationSamplerInterpolation {
|
||||
enum Values{
|
||||
LINEAR = 0
|
||||
};
|
||||
}
|
||||
|
||||
struct GLTFAnimationSampler {
|
||||
int input;
|
||||
int output;
|
||||
int interpolation;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["input"]) {
|
||||
qCDebug(modelformat) << "input: " << input;
|
||||
}
|
||||
if (defined["output"]) {
|
||||
qCDebug(modelformat) << "output: " << output;
|
||||
}
|
||||
if (defined["interpolation"]) {
|
||||
qCDebug(modelformat) << "interpolation: " << interpolation;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFAnimation {
|
||||
QVector<GLTFChannel> channels;
|
||||
QVector<GLTFAnimationSampler> samplers;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["channels"]) {
|
||||
foreach(auto channel, channels) channel.dump();
|
||||
}
|
||||
if (defined["samplers"]) {
|
||||
foreach(auto sampler, samplers) sampler.dump();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFScene {
|
||||
QString name;
|
||||
QVector<int> nodes;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["name"]) {
|
||||
qCDebug(modelformat) << "name: " << name;
|
||||
}
|
||||
if (defined["nodes"]) {
|
||||
qCDebug(modelformat) << "nodes: ";
|
||||
foreach(int node, nodes) qCDebug(modelformat) << node;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFSkin {
|
||||
int inverseBindMatrices;
|
||||
QVector<int> joints;
|
||||
int skeleton;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["inverseBindMatrices"]) {
|
||||
qCDebug(modelformat) << "inverseBindMatrices: " << inverseBindMatrices;
|
||||
}
|
||||
if (defined["skeleton"]) {
|
||||
qCDebug(modelformat) << "skeleton: " << skeleton;
|
||||
}
|
||||
if (defined["joints"]) {
|
||||
qCDebug(modelformat) << "joints: ";
|
||||
foreach(int joint, joints) qCDebug(modelformat) << joint;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFTexture {
|
||||
int sampler;
|
||||
int source;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["sampler"]) {
|
||||
qCDebug(modelformat) << "sampler: " << sampler;
|
||||
}
|
||||
if (defined["source"]) {
|
||||
qCDebug(modelformat) << "source: " << sampler;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
struct GLTFFile {
|
||||
GLTFAsset asset;
|
||||
int scene = 0;
|
||||
QVector<GLTFAccessor> accessors;
|
||||
QVector<GLTFAnimation> animations;
|
||||
QVector<GLTFBufferView> bufferviews;
|
||||
QVector<GLTFBuffer> buffers;
|
||||
QVector<GLTFCamera> cameras;
|
||||
QVector<GLTFImage> images;
|
||||
QVector<GLTFMaterial> materials;
|
||||
QVector<GLTFMesh> meshes;
|
||||
QVector<GLTFNode> nodes;
|
||||
QVector<GLTFSampler> samplers;
|
||||
QVector<GLTFScene> scenes;
|
||||
QVector<GLTFSkin> skins;
|
||||
QVector<GLTFTexture> textures;
|
||||
QMap<QString, bool> defined;
|
||||
void dump() {
|
||||
if (defined["asset"]) {
|
||||
asset.dump();
|
||||
}
|
||||
if (defined["scene"]) {
|
||||
qCDebug(modelformat) << "scene: " << scene;
|
||||
}
|
||||
if (defined["accessors"]) {
|
||||
foreach(auto acc, accessors) acc.dump();
|
||||
}
|
||||
if (defined["animations"]) {
|
||||
foreach(auto ani, animations) ani.dump();
|
||||
}
|
||||
if (defined["bufferviews"]) {
|
||||
foreach(auto bv, bufferviews) bv.dump();
|
||||
}
|
||||
if (defined["buffers"]) {
|
||||
foreach(auto b, buffers) b.dump();
|
||||
}
|
||||
if (defined["cameras"]) {
|
||||
foreach(auto c, cameras) c.dump();
|
||||
}
|
||||
if (defined["images"]) {
|
||||
foreach(auto i, images) i.dump();
|
||||
}
|
||||
if (defined["materials"]) {
|
||||
foreach(auto mat, materials) mat.dump();
|
||||
}
|
||||
if (defined["meshes"]) {
|
||||
foreach(auto mes, meshes) mes.dump();
|
||||
}
|
||||
if (defined["nodes"]) {
|
||||
foreach(auto nod, nodes) nod.dump();
|
||||
}
|
||||
if (defined["samplers"]) {
|
||||
foreach(auto sa, samplers) sa.dump();
|
||||
}
|
||||
if (defined["scenes"]) {
|
||||
foreach(auto sc, scenes) sc.dump();
|
||||
}
|
||||
if (defined["skins"]) {
|
||||
foreach(auto sk, nodes) sk.dump();
|
||||
}
|
||||
if (defined["textures"]) {
|
||||
foreach(auto tex, textures) tex.dump();
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
class GLTFReader : public QObject {
|
||||
Q_OBJECT
|
||||
public:
|
||||
GLTFReader();
|
||||
FBXGeometry* readGLTF(QByteArray& model, const QVariantHash& mapping,
|
||||
const QUrl& url, bool loadLightmaps = true, float lightmapLevel = 1.0f);
|
||||
private:
|
||||
GLTFFile _file;
|
||||
QUrl _url;
|
||||
|
||||
glm::mat4 getModelTransform(const GLTFNode& node);
|
||||
|
||||
bool buildGeometry(FBXGeometry& geometry, const QUrl& url);
|
||||
bool parseGLTF(const QByteArray& model);
|
||||
|
||||
bool getStringVal(const QJsonObject& object, const QString& fieldname,
|
||||
QString& value, QMap<QString, bool>& defined);
|
||||
bool getBoolVal(const QJsonObject& object, const QString& fieldname,
|
||||
bool& value, QMap<QString, bool>& defined);
|
||||
bool getIntVal(const QJsonObject& object, const QString& fieldname,
|
||||
int& value, QMap<QString, bool>& defined);
|
||||
bool getDoubleVal(const QJsonObject& object, const QString& fieldname,
|
||||
double& value, QMap<QString, bool>& defined);
|
||||
bool getObjectVal(const QJsonObject& object, const QString& fieldname,
|
||||
QJsonObject& value, QMap<QString, bool>& defined);
|
||||
bool getIntArrayVal(const QJsonObject& object, const QString& fieldname,
|
||||
QVector<int>& values, QMap<QString, bool>& defined);
|
||||
bool getDoubleArrayVal(const QJsonObject& object, const QString& fieldname,
|
||||
QVector<double>& values, QMap<QString, bool>& defined);
|
||||
bool getObjectArrayVal(const QJsonObject& object, const QString& fieldname,
|
||||
QJsonArray& objects, QMap<QString, bool>& defined);
|
||||
|
||||
int getMaterialAlphaMode(const QString& type);
|
||||
int getAccessorType(const QString& type);
|
||||
int getAnimationSamplerInterpolation(const QString& interpolation);
|
||||
int getCameraType(const QString& type);
|
||||
int getImageMimeType(const QString& mime);
|
||||
int getMeshPrimitiveRenderingMode(const QString& type);
|
||||
|
||||
bool getIndexFromObject(const QJsonObject& object, const QString& field,
|
||||
int& outidx, QMap<QString, bool>& defined);
|
||||
|
||||
bool setAsset(const QJsonObject& object);
|
||||
bool addAccessor(const QJsonObject& object);
|
||||
bool addAnimation(const QJsonObject& object);
|
||||
bool addBufferView(const QJsonObject& object);
|
||||
bool addBuffer(const QJsonObject& object);
|
||||
bool addCamera(const QJsonObject& object);
|
||||
bool addImage(const QJsonObject& object);
|
||||
bool addMaterial(const QJsonObject& object);
|
||||
bool addMesh(const QJsonObject& object);
|
||||
bool addNode(const QJsonObject& object);
|
||||
bool addSampler(const QJsonObject& object);
|
||||
bool addScene(const QJsonObject& object);
|
||||
bool addSkin(const QJsonObject& object);
|
||||
bool addTexture(const QJsonObject& object);
|
||||
|
||||
bool readBinary(const QString& url, QByteArray& outdata);
|
||||
|
||||
template<typename T, typename L>
|
||||
bool readArray(const QByteArray& bin, int byteOffset, int byteLength,
|
||||
QVector<L>& outarray, int accessorType);
|
||||
|
||||
template<typename T>
|
||||
bool addArrayOfType(const QByteArray& bin, int byteOffset, int byteLength,
|
||||
QVector<T>& outarray, int accessorType, int componentType);
|
||||
|
||||
void retriangulate(const QVector<int>& in_indices, const QVector<glm::vec3>& in_vertices,
|
||||
const QVector<glm::vec3>& in_normals, QVector<int>& out_indices,
|
||||
QVector<glm::vec3>& out_vertices, QVector<glm::vec3>& out_normals);
|
||||
|
||||
std::tuple<bool, QByteArray> requestData(QUrl& url);
|
||||
QNetworkReply* request(QUrl& url, bool isTest);
|
||||
bool doesResourceExist(const QString& url);
|
||||
|
||||
|
||||
void setFBXMaterial(FBXMaterial& fbxmat, const GLTFMaterial& material);
|
||||
FBXTexture getFBXTexture(const GLTFTexture& texture);
|
||||
void fbxDebugDump(const FBXGeometry& fbxgeo);
|
||||
};
|
||||
|
||||
#endif // hifi_GLTFReader_h
|
|
@ -14,6 +14,7 @@
|
|||
#include <FSTReader.h>
|
||||
#include "FBXReader.h"
|
||||
#include "OBJReader.h"
|
||||
#include "GLTFReader.h"
|
||||
|
||||
#include <gpu/Batch.h>
|
||||
#include <gpu/Stream.h>
|
||||
|
@ -175,9 +176,12 @@ void GeometryReader::run() {
|
|||
|
||||
QString urlname = _url.path().toLower();
|
||||
if (!urlname.isEmpty() && !_url.path().isEmpty() &&
|
||||
(_url.path().toLower().endsWith(".fbx") ||
|
||||
_url.path().toLower().endsWith(".obj") ||
|
||||
_url.path().toLower().endsWith(".obj.gz"))) {
|
||||
|
||||
(_url.path().toLower().endsWith(".fbx") ||
|
||||
_url.path().toLower().endsWith(".obj") ||
|
||||
_url.path().toLower().endsWith(".obj.gz") ||
|
||||
_url.path().toLower().endsWith(".gltf"))) {
|
||||
|
||||
FBXGeometry::Pointer fbxGeometry;
|
||||
|
||||
if (_url.path().toLower().endsWith(".fbx")) {
|
||||
|
@ -189,12 +193,18 @@ void GeometryReader::run() {
|
|||
fbxGeometry.reset(OBJReader().readOBJ(_data, _mapping, _combineParts, _url));
|
||||
} else if (_url.path().toLower().endsWith(".obj.gz")) {
|
||||
QByteArray uncompressedData;
|
||||
if (gunzip(_data, uncompressedData)){
|
||||
if (gunzip(_data, uncompressedData)) {
|
||||
fbxGeometry.reset(OBJReader().readOBJ(uncompressedData, _mapping, _combineParts, _url));
|
||||
} else {
|
||||
throw QString("failed to decompress .obj.gz" );
|
||||
throw QString("failed to decompress .obj.gz");
|
||||
}
|
||||
|
||||
} else if (_url.path().toLower().endsWith(".gltf")) {
|
||||
std::shared_ptr<GLTFReader> glreader = std::make_shared<GLTFReader>();
|
||||
fbxGeometry.reset(glreader->readGLTF(_data, _mapping, _url));
|
||||
if (fbxGeometry->meshes.size() == 0 && fbxGeometry->joints.size() == 0) {
|
||||
throw QString("empty geometry, possibly due to an unsupported GLTF version");
|
||||
}
|
||||
} else {
|
||||
throw QString("unsupported format");
|
||||
}
|
||||
|
|
|
@ -429,7 +429,7 @@ qint64 LimitedNodeList::sendPacket(std::unique_ptr<NLPacket> packet, const HifiS
|
|||
}
|
||||
}
|
||||
|
||||
qint64 LimitedNodeList::sendPacketList(NLPacketList& packetList, const Node& destinationNode) {
|
||||
qint64 LimitedNodeList::sendUnreliableUnorderedPacketList(NLPacketList& packetList, const Node& destinationNode) {
|
||||
auto activeSocket = destinationNode.getActiveSocket();
|
||||
|
||||
if (activeSocket) {
|
||||
|
@ -452,8 +452,8 @@ qint64 LimitedNodeList::sendPacketList(NLPacketList& packetList, const Node& des
|
|||
}
|
||||
}
|
||||
|
||||
qint64 LimitedNodeList::sendPacketList(NLPacketList& packetList, const HifiSockAddr& sockAddr,
|
||||
const QUuid& connectionSecret) {
|
||||
qint64 LimitedNodeList::sendUnreliableUnorderedPacketList(NLPacketList& packetList, const HifiSockAddr& sockAddr,
|
||||
const QUuid& connectionSecret) {
|
||||
qint64 bytesSent = 0;
|
||||
|
||||
// close the last packet in the list
|
||||
|
|
|
@ -124,17 +124,25 @@ public:
|
|||
|
||||
PacketReceiver& getPacketReceiver() { return *_packetReceiver; }
|
||||
|
||||
// use sendUnreliablePacket to send an unrelaible packet (that you do not need to move)
|
||||
// either to a node (via its active socket) or to a manual sockaddr
|
||||
qint64 sendUnreliablePacket(const NLPacket& packet, const Node& destinationNode);
|
||||
qint64 sendUnreliablePacket(const NLPacket& packet, const HifiSockAddr& sockAddr,
|
||||
const QUuid& connectionSecret = QUuid());
|
||||
|
||||
// use sendPacket to send a moved unreliable or reliable NL packet to a node's active socket or manual sockaddr
|
||||
qint64 sendPacket(std::unique_ptr<NLPacket> packet, const Node& destinationNode);
|
||||
qint64 sendPacket(std::unique_ptr<NLPacket> packet, const HifiSockAddr& sockAddr,
|
||||
const QUuid& connectionSecret = QUuid());
|
||||
|
||||
qint64 sendPacketList(NLPacketList& packetList, const Node& destinationNode);
|
||||
qint64 sendPacketList(NLPacketList& packetList, const HifiSockAddr& sockAddr,
|
||||
// use sendUnreliableUnorderedPacketList to unreliably send separate packets from the packet list
|
||||
// either to a node's active socket or to a manual sockaddr
|
||||
qint64 sendUnreliableUnorderedPacketList(NLPacketList& packetList, const Node& destinationNode);
|
||||
qint64 sendUnreliableUnorderedPacketList(NLPacketList& packetList, const HifiSockAddr& sockAddr,
|
||||
const QUuid& connectionSecret = QUuid());
|
||||
|
||||
// use sendPacketList to send reliable packet lists (ordered or unordered) to a node's active socket
|
||||
// or to a manual sock addr
|
||||
qint64 sendPacketList(std::unique_ptr<NLPacketList> packetList, const HifiSockAddr& sockAddr);
|
||||
qint64 sendPacketList(std::unique_ptr<NLPacketList> packetList, const Node& destinationNode);
|
||||
|
||||
|
|
|
@ -979,8 +979,8 @@ void NodeList::maybeSendIgnoreSetToNode(SharedNodePointer newNode) {
|
|||
}
|
||||
|
||||
void NodeList::setAvatarGain(const QUuid& nodeID, float gain) {
|
||||
// cannot set gain of yourself or nobody
|
||||
if (!nodeID.isNull() && _sessionUUID != nodeID) {
|
||||
// cannot set gain of yourself
|
||||
if (_sessionUUID != nodeID) {
|
||||
auto audioMixer = soloNodeOfType(NodeType::AudioMixer);
|
||||
if (audioMixer) {
|
||||
// setup the packet
|
||||
|
@ -988,10 +988,15 @@ void NodeList::setAvatarGain(const QUuid& nodeID, float gain) {
|
|||
|
||||
// write the node ID to the packet
|
||||
setAvatarGainPacket->write(nodeID.toRfc4122());
|
||||
// We need to convert the gain in dB (from the script) to an amplitude before packing it.
|
||||
setAvatarGainPacket->writePrimitive(packFloatGainToByte(fastExp2f(gain / 6.0206f)));
|
||||
|
||||
qCDebug(networking) << "Sending Set Avatar Gain packet UUID: " << uuidStringWithoutCurlyBraces(nodeID) << "Gain:" << gain;
|
||||
// We need to convert the gain in dB (from the script) to an amplitude before packing it.
|
||||
setAvatarGainPacket->writePrimitive(packFloatGainToByte(fastExp2f(gain / 6.02059991f)));
|
||||
|
||||
if (nodeID.isNull()) {
|
||||
qCDebug(networking) << "Sending Set MASTER Avatar Gain packet with Gain:" << gain;
|
||||
} else {
|
||||
qCDebug(networking) << "Sending Set Avatar Gain packet with UUID: " << uuidStringWithoutCurlyBraces(nodeID) << "Gain:" << gain;
|
||||
}
|
||||
|
||||
sendPacket(std::move(setAvatarGainPacket), *audioMixer);
|
||||
QWriteLocker{ &_avatarGainMapLock };
|
||||
|
@ -1001,7 +1006,7 @@ void NodeList::setAvatarGain(const QUuid& nodeID, float gain) {
|
|||
qWarning() << "Couldn't find audio mixer to send set gain request";
|
||||
}
|
||||
} else {
|
||||
qWarning() << "NodeList::setAvatarGain called with an invalid ID or an ID which matches the current session ID:" << nodeID;
|
||||
qWarning() << "NodeList::setAvatarGain called with an ID which matches the current session ID:" << nodeID;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -33,7 +33,7 @@ PacketVersion versionForPacketType(PacketType packetType) {
|
|||
return static_cast<PacketVersion>(EntityVersion::HazeEffect);
|
||||
|
||||
case PacketType::EntityQuery:
|
||||
return static_cast<PacketVersion>(EntityQueryPacketVersion::JSONFilterWithFamilyTree);
|
||||
return static_cast<PacketVersion>(EntityQueryPacketVersion::ConnectionIdentifier);
|
||||
case PacketType::AvatarIdentity:
|
||||
case PacketType::AvatarData:
|
||||
case PacketType::BulkAvatarData:
|
||||
|
|
|
@ -209,7 +209,8 @@ enum class EntityScriptCallMethodVersion : PacketVersion {
|
|||
|
||||
enum class EntityQueryPacketVersion: PacketVersion {
|
||||
JSONFilter = 18,
|
||||
JSONFilterWithFamilyTree = 19
|
||||
JSONFilterWithFamilyTree = 19,
|
||||
ConnectionIdentifier = 20
|
||||
};
|
||||
|
||||
enum class AssetServerPacketVersion: PacketVersion {
|
||||
|
|
|
@ -9,6 +9,8 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include <random>
|
||||
|
||||
#include <QtCore/QJsonDocument>
|
||||
|
||||
#include <GLMHelpers.h>
|
||||
|
@ -22,7 +24,7 @@ const float DEFAULT_ASPECT_RATIO = 1.0f;
|
|||
const float DEFAULT_NEAR_CLIP = 0.1f;
|
||||
const float DEFAULT_FAR_CLIP = 3.0f;
|
||||
|
||||
OctreeQuery::OctreeQuery() :
|
||||
OctreeQuery::OctreeQuery(bool randomizeConnectionID) :
|
||||
_cameraFov(DEFAULT_FOV),
|
||||
_cameraAspectRatio(DEFAULT_ASPECT_RATIO),
|
||||
_cameraNearClip(DEFAULT_NEAR_CLIP),
|
||||
|
@ -30,10 +32,21 @@ OctreeQuery::OctreeQuery() :
|
|||
_cameraCenterRadius(DEFAULT_FAR_CLIP)
|
||||
{
|
||||
_maxQueryPPS = DEFAULT_MAX_OCTREE_PPS;
|
||||
|
||||
if (randomizeConnectionID) {
|
||||
// randomize our initial octree query connection ID using random_device
|
||||
// the connection ID is 16 bits so we take a generated 32 bit value from random device and chop off the top
|
||||
std::random_device randomDevice;
|
||||
_connectionID = randomDevice();
|
||||
}
|
||||
}
|
||||
|
||||
int OctreeQuery::getBroadcastData(unsigned char* destinationBuffer) {
|
||||
unsigned char* bufferStart = destinationBuffer;
|
||||
|
||||
// pack the connection ID so the server can detect when we start a new connection
|
||||
memcpy(destinationBuffer, &_connectionID, sizeof(_connectionID));
|
||||
destinationBuffer += sizeof(_connectionID);
|
||||
|
||||
// back a boolean (cut to 1 byte) to designate if this query uses the sent view frustum
|
||||
memcpy(destinationBuffer, &_usesFrustum, sizeof(_usesFrustum));
|
||||
|
@ -98,7 +111,27 @@ int OctreeQuery::parseData(ReceivedMessage& message) {
|
|||
|
||||
const unsigned char* startPosition = reinterpret_cast<const unsigned char*>(message.getRawMessage());
|
||||
const unsigned char* sourceBuffer = startPosition;
|
||||
|
||||
|
||||
// unpack the connection ID
|
||||
uint16_t newConnectionID;
|
||||
memcpy(&newConnectionID, sourceBuffer, sizeof(newConnectionID));
|
||||
sourceBuffer += sizeof(newConnectionID);
|
||||
|
||||
if (!_hasReceivedFirstQuery) {
|
||||
// set our flag to indicate that we've parsed for this query at least once
|
||||
_hasReceivedFirstQuery = true;
|
||||
|
||||
// set the incoming connection ID as the current
|
||||
_connectionID = newConnectionID;
|
||||
} else {
|
||||
if (newConnectionID != _connectionID) {
|
||||
// the connection ID has changed - emit our signal so the server
|
||||
// knows that the client is starting a new session
|
||||
_connectionID = newConnectionID;
|
||||
emit incomingConnectionIDChanged();
|
||||
}
|
||||
}
|
||||
|
||||
// check if this query uses a view frustum
|
||||
memcpy(&_usesFrustum, sourceBuffer, sizeof(_usesFrustum));
|
||||
sourceBuffer += sizeof(_usesFrustum);
|
||||
|
|
|
@ -27,11 +27,11 @@ class OctreeQuery : public NodeData {
|
|||
Q_OBJECT
|
||||
|
||||
public:
|
||||
OctreeQuery();
|
||||
OctreeQuery(bool randomizeConnectionID = false);
|
||||
virtual ~OctreeQuery() {}
|
||||
|
||||
int getBroadcastData(unsigned char* destinationBuffer);
|
||||
virtual int parseData(ReceivedMessage& message) override;
|
||||
int parseData(ReceivedMessage& message) override;
|
||||
|
||||
// getters for camera details
|
||||
const glm::vec3& getCameraPosition() const { return _cameraPosition; }
|
||||
|
@ -68,6 +68,13 @@ public:
|
|||
bool getUsesFrustum() { return _usesFrustum; }
|
||||
void setUsesFrustum(bool usesFrustum) { _usesFrustum = usesFrustum; }
|
||||
|
||||
void incrementConnectionID() { ++_connectionID; }
|
||||
|
||||
bool hasReceivedFirstQuery() const { return _hasReceivedFirstQuery; }
|
||||
|
||||
signals:
|
||||
void incomingConnectionIDChanged();
|
||||
|
||||
public slots:
|
||||
void setMaxQueryPacketsPerSecond(int maxQueryPPS) { _maxQueryPPS = maxQueryPPS; }
|
||||
void setOctreeSizeScale(float octreeSizeScale) { _octreeElementSizeScale = octreeSizeScale; }
|
||||
|
@ -90,9 +97,12 @@ protected:
|
|||
int _boundaryLevelAdjust = 0; /// used for LOD calculations
|
||||
|
||||
uint8_t _usesFrustum = true;
|
||||
uint16_t _connectionID; // query connection ID, randomized to start, increments with each new connection to server
|
||||
|
||||
QJsonObject _jsonParameters;
|
||||
QReadWriteLock _jsonParametersLock;
|
||||
|
||||
bool _hasReceivedFirstQuery { false };
|
||||
|
||||
private:
|
||||
// privatize the copy constructor and assignment operator so they cannot be called
|
||||
|
|
|
@ -18,13 +18,6 @@
|
|||
#include <SharedUtil.h>
|
||||
#include <UUID.h>
|
||||
|
||||
int OctreeQueryNode::parseData(ReceivedMessage& message) {
|
||||
// set our flag to indicate that we've parsed for this query at least once
|
||||
_hasReceivedFirstQuery = true;
|
||||
|
||||
return OctreeQuery::parseData(message);
|
||||
}
|
||||
|
||||
void OctreeQueryNode::nodeKilled() {
|
||||
_isShuttingDown = true;
|
||||
}
|
||||
|
|
|
@ -35,8 +35,6 @@ public:
|
|||
void init(); // called after creation to set up some virtual items
|
||||
virtual PacketType getMyPacketType() const = 0;
|
||||
|
||||
virtual int parseData(ReceivedMessage& message) override;
|
||||
|
||||
void resetOctreePacket(); // resets octree packet to after "V" header
|
||||
|
||||
void writeToPacket(const unsigned char* buffer, unsigned int bytes); // writes to end of packet
|
||||
|
@ -108,8 +106,6 @@ public:
|
|||
bool shouldForceFullScene() const { return _shouldForceFullScene; }
|
||||
void setShouldForceFullScene(bool shouldForceFullScene) { _shouldForceFullScene = shouldForceFullScene; }
|
||||
|
||||
bool hasReceivedFirstQuery() const { return _hasReceivedFirstQuery; }
|
||||
|
||||
private:
|
||||
OctreeQueryNode(const OctreeQueryNode &);
|
||||
OctreeQueryNode& operator= (const OctreeQueryNode&);
|
||||
|
@ -157,8 +153,6 @@ private:
|
|||
QJsonObject _lastCheckJSONParameters;
|
||||
|
||||
bool _shouldForceFullScene { false };
|
||||
|
||||
bool _hasReceivedFirstQuery { false };
|
||||
};
|
||||
|
||||
#endif // hifi_OctreeQueryNode_h
|
||||
|
|
|
@ -65,7 +65,7 @@ public slots:
|
|||
* Sets an avatar's gain for you and you only.
|
||||
* Units are Decibels (dB)
|
||||
* @function Users.setAvatarGain
|
||||
* @param {nodeID} nodeID The node or session ID of the user whose gain you want to modify.
|
||||
* @param {nodeID} nodeID The node or session ID of the user whose gain you want to modify, or null to set the master gain.
|
||||
* @param {float} gain The gain of the avatar you'd like to set. Units are dB.
|
||||
*/
|
||||
void setAvatarGain(const QUuid& nodeID, float gain);
|
||||
|
@ -73,7 +73,7 @@ public slots:
|
|||
/**jsdoc
|
||||
* Gets an avatar's gain for you and you only.
|
||||
* @function Users.getAvatarGain
|
||||
* @param {nodeID} nodeID The node or session ID of the user whose gain you want to get.
|
||||
* @param {nodeID} nodeID The node or session ID of the user whose gain you want to get, or null to get the master gain.
|
||||
* @return {float} gain (in dB)
|
||||
*/
|
||||
float getAvatarGain(const QUuid& nodeID);
|
||||
|
|
|
@ -63,6 +63,7 @@
|
|||
|
||||
var newAvatarScale = (scalingCurrentDistance / this.scalingStartDistance) * this.scalingStartAvatarScale;
|
||||
MyAvatar.scale = newAvatarScale;
|
||||
MyAvatar.scaleChanged();
|
||||
}
|
||||
return dispatcherUtils.makeRunningValues(true, [], []);
|
||||
}
|
||||
|
|
|
@ -270,11 +270,14 @@ Script.include("/~/system/libraries/controllers.js");
|
|||
this.otherModuleNeedsToRun = function(controllerData) {
|
||||
var grabOverlayModuleName = this.hand === RIGHT_HAND ? "RightNearParentingGrabOverlay" : "LeftNearParentingGrabOverlay";
|
||||
var grabOverlayModule = getEnabledModuleByName(grabOverlayModuleName);
|
||||
var grabEntityModuleName = this.hand === RIGHT_HAND ? "RightNearParentingGrabEntity" : "LeftNearParentingGrabEntity";
|
||||
var grabEntityModule = getEnabledModuleByName(grabEntityModuleName);
|
||||
var grabOverlayModuleReady = grabOverlayModule ? grabOverlayModule.isReady(controllerData) : makeRunningValues(false, [], []);
|
||||
var grabEntityModuleReady = grabEntityModule ? grabEntityModule.isReady(controllerData) : makeRunningValues(false, [], []);
|
||||
var farGrabModuleName = this.hand === RIGHT_HAND ? "RightFarActionGrabEntity" : "LeftFarActionGrabEntity";
|
||||
var farGrabModule = getEnabledModuleByName(farGrabModuleName);
|
||||
var farGrabModuleReady = farGrabModule ? farGrabModule.isReady(controllerData) : makeRunningValues(false, [], []);
|
||||
return grabOverlayModuleReady.active || farGrabModuleReady.active;
|
||||
return grabOverlayModuleReady.active || farGrabModuleReady.active || grabEntityModuleReady.active;
|
||||
};
|
||||
|
||||
this.processStylus = function(controllerData) {
|
||||
|
|
|
@ -1424,24 +1424,29 @@ function deleteSelectedEntities() {
|
|||
for (var i = 0; i < newSortedSelection.length; i++) {
|
||||
var entityID = newSortedSelection[i];
|
||||
var initialProperties = SelectionManager.savedProperties[entityID];
|
||||
var children = Entities.getChildrenIDs(entityID);
|
||||
var childList = [];
|
||||
recursiveDelete(children, childList, deletedIDs);
|
||||
savedProperties.push({
|
||||
entityID: entityID,
|
||||
properties: initialProperties,
|
||||
children: childList
|
||||
});
|
||||
deletedIDs.push(entityID);
|
||||
Entities.deleteEntity(entityID);
|
||||
if (!initialProperties.locked) {
|
||||
var children = Entities.getChildrenIDs(entityID);
|
||||
var childList = [];
|
||||
recursiveDelete(children, childList, deletedIDs);
|
||||
savedProperties.push({
|
||||
entityID: entityID,
|
||||
properties: initialProperties,
|
||||
children: childList
|
||||
});
|
||||
deletedIDs.push(entityID);
|
||||
Entities.deleteEntity(entityID);
|
||||
}
|
||||
}
|
||||
SelectionManager.clearSelections();
|
||||
pushCommandForSelections([], savedProperties);
|
||||
|
||||
entityListTool.webView.emitScriptEvent(JSON.stringify({
|
||||
type: "deleted",
|
||||
ids: deletedIDs
|
||||
}));
|
||||
if (savedProperties.length > 0) {
|
||||
SelectionManager.clearSelections();
|
||||
pushCommandForSelections([], savedProperties);
|
||||
|
||||
entityListTool.webView.emitScriptEvent(JSON.stringify({
|
||||
type: "deleted",
|
||||
ids: deletedIDs
|
||||
}));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -210,11 +210,11 @@
|
|||
notificationOrientation,
|
||||
notificationPosition,
|
||||
buttonPosition;
|
||||
var sensorScaleFactor = MyAvatar.sensorToWorldScale;
|
||||
var sensorScaleFactor = isOnHMD ? MyAvatar.sensorToWorldScale : 1.0;
|
||||
// Notification plane positions
|
||||
noticeY = -y * NOTIFICATION_3D_SCALE - noticeHeight / 2;
|
||||
noticeY = -sensorScaleFactor * (y * NOTIFICATION_3D_SCALE + 0.5 * noticeHeight);
|
||||
notificationPosition = { x: 0, y: noticeY, z: 0 };
|
||||
buttonPosition = { x: (noticeWidth - NOTIFICATION_3D_BUTTON_WIDTH) / 2, y: noticeY, z: 0.001 };
|
||||
buttonPosition = { x: 0.5 * sensorScaleFactor * (noticeWidth - NOTIFICATION_3D_BUTTON_WIDTH), y: noticeY, z: 0.001 };
|
||||
|
||||
// Rotate plane
|
||||
notificationOrientation = Quat.fromPitchYawRollDegrees(NOTIFICATIONS_3D_PITCH,
|
||||
|
@ -245,7 +245,7 @@
|
|||
noticeHeight,
|
||||
positions,
|
||||
last;
|
||||
var sensorScaleFactor = MyAvatar.sensorToWorldScale;
|
||||
var sensorScaleFactor = isOnHMD ? MyAvatar.sensorToWorldScale : 1.0;
|
||||
if (isOnHMD) {
|
||||
// Calculate 3D values from 2D overlay properties.
|
||||
|
||||
|
@ -369,7 +369,7 @@
|
|||
buttonProperties,
|
||||
i;
|
||||
|
||||
var sensorScaleFactor = MyAvatar.sensorToWorldScale;
|
||||
var sensorScaleFactor = isOnHMD ? MyAvatar.sensorToWorldScale : 1.0;
|
||||
if (text.length >= breakPoint) {
|
||||
breaks = count;
|
||||
}
|
||||
|
@ -453,7 +453,7 @@
|
|||
}
|
||||
|
||||
function updateNotificationsTexts() {
|
||||
var sensorScaleFactor = MyAvatar.sensorToWorldScale;
|
||||
var sensorScaleFactor = isOnHMD ? MyAvatar.sensorToWorldScale : 1.0;
|
||||
for (var i = 0; i < notifications.length; i++) {
|
||||
var overlayType = Overlays.getOverlayType(notifications[i]);
|
||||
|
||||
|
|
|
@ -482,14 +482,23 @@ HifiEntityUI.prototype = {
|
|||
textureImage.className = "texture-image no-texture";
|
||||
var image = document.createElement("img");
|
||||
var imageLoad = _.debounce(function (url) {
|
||||
if (url.length > 0) {
|
||||
if (url.slice(0, 5).toLowerCase() === "atp:/") {
|
||||
image.src = "";
|
||||
image.style.display = "none";
|
||||
textureImage.classList.remove("with-texture");
|
||||
textureImage.classList.remove("no-texture");
|
||||
textureImage.classList.add("no-preview");
|
||||
} else if (url.length > 0) {
|
||||
textureImage.classList.remove("no-texture");
|
||||
textureImage.classList.remove("no-preview");
|
||||
textureImage.classList.add("with-texture");
|
||||
image.src = url;
|
||||
image.style.display = "block";
|
||||
} else {
|
||||
image.src = "";
|
||||
image.style.display = "none";
|
||||
textureImage.classList.remove("with-texture");
|
||||
textureImage.classList.remove("no-preview");
|
||||
textureImage.classList.add("no-texture");
|
||||
}
|
||||
self.webBridgeSync(group.id, url);
|
||||
|
|
File diff suppressed because one or more lines are too long
Loading…
Reference in a new issue