Merge with upstream
|
@ -15,6 +15,7 @@
|
|||
#include <QtNetwork/QNetworkDiskCache>
|
||||
#include <QtNetwork/QNetworkRequest>
|
||||
#include <QtNetwork/QNetworkReply>
|
||||
#include <QThread>
|
||||
|
||||
#include <AssetClient.h>
|
||||
#include <AvatarHashMap.h>
|
||||
|
@ -27,12 +28,16 @@
|
|||
#include <ResourceCache.h>
|
||||
#include <ScriptCache.h>
|
||||
#include <SoundCache.h>
|
||||
#include <ScriptEngines.h>
|
||||
#include <UUID.h>
|
||||
|
||||
#include <recording/Deck.h>
|
||||
#include <recording/Recorder.h>
|
||||
#include <recording/Frame.h>
|
||||
|
||||
#include <plugins/CodecPlugin.h>
|
||||
#include <plugins/PluginManager.h>
|
||||
|
||||
#include <WebSocketServerClass.h>
|
||||
#include <EntityScriptingInterface.h> // TODO: consider moving to scriptengine.h
|
||||
|
||||
|
@ -42,6 +47,7 @@
|
|||
#include "AbstractAudioInterface.h"
|
||||
|
||||
#include "Agent.h"
|
||||
#include "AvatarAudioTimer.h"
|
||||
|
||||
static const int RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES = 10;
|
||||
|
||||
|
@ -62,6 +68,7 @@ Agent::Agent(ReceivedMessage& message) :
|
|||
DependencyManager::set<recording::Recorder>();
|
||||
DependencyManager::set<RecordingScriptingInterface>();
|
||||
DependencyManager::set<ScriptCache>();
|
||||
DependencyManager::set<ScriptEngines>();
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
|
||||
|
@ -72,6 +79,17 @@ Agent::Agent(ReceivedMessage& message) :
|
|||
{ PacketType::OctreeStats, PacketType::EntityData, PacketType::EntityErase },
|
||||
this, "handleOctreePacket");
|
||||
packetReceiver.registerListener(PacketType::Jurisdiction, this, "handleJurisdictionPacket");
|
||||
packetReceiver.registerListener(PacketType::SelectedAudioFormat, this, "handleSelectedAudioFormat");
|
||||
}
|
||||
|
||||
void Agent::playAvatarSound(SharedSoundPointer sound) {
|
||||
// this must happen on Agent's main thread
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QMetaObject::invokeMethod(this, "playAvatarSound", Q_ARG(SharedSoundPointer, sound));
|
||||
return;
|
||||
} else {
|
||||
setAvatarSound(sound);
|
||||
}
|
||||
}
|
||||
|
||||
void Agent::handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||
|
@ -118,7 +136,6 @@ void Agent::handleAudioPacket(QSharedPointer<ReceivedMessage> message) {
|
|||
_receivedAudioStream.parseData(*message);
|
||||
|
||||
_lastReceivedAudioLoudness = _receivedAudioStream.getNextOutputFrameLoudness();
|
||||
|
||||
_receivedAudioStream.clearBuffer();
|
||||
}
|
||||
|
||||
|
@ -214,6 +231,59 @@ void Agent::nodeActivated(SharedNodePointer activatedNode) {
|
|||
|
||||
_pendingScriptRequest = nullptr;
|
||||
}
|
||||
if (activatedNode->getType() == NodeType::AudioMixer) {
|
||||
negotiateAudioFormat();
|
||||
}
|
||||
}
|
||||
|
||||
void Agent::negotiateAudioFormat() {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
auto negotiateFormatPacket = NLPacket::create(PacketType::NegotiateAudioFormat);
|
||||
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
|
||||
quint8 numberOfCodecs = (quint8)codecPlugins.size();
|
||||
negotiateFormatPacket->writePrimitive(numberOfCodecs);
|
||||
for (auto& plugin : codecPlugins) {
|
||||
auto codecName = plugin->getName();
|
||||
negotiateFormatPacket->writeString(codecName);
|
||||
}
|
||||
|
||||
// grab our audio mixer from the NodeList, if it exists
|
||||
SharedNodePointer audioMixer = nodeList->soloNodeOfType(NodeType::AudioMixer);
|
||||
|
||||
if (audioMixer) {
|
||||
// send off this mute packet
|
||||
nodeList->sendPacket(std::move(negotiateFormatPacket), *audioMixer);
|
||||
}
|
||||
}
|
||||
|
||||
void Agent::handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> message) {
|
||||
QString selectedCodecName = message->readString();
|
||||
selectAudioFormat(selectedCodecName);
|
||||
}
|
||||
|
||||
void Agent::selectAudioFormat(const QString& selectedCodecName) {
|
||||
_selectedCodecName = selectedCodecName;
|
||||
|
||||
qDebug() << "Selected Codec:" << _selectedCodecName;
|
||||
|
||||
// release any old codec encoder/decoder first...
|
||||
if (_codec && _encoder) {
|
||||
_codec->releaseEncoder(_encoder);
|
||||
_encoder = nullptr;
|
||||
_codec = nullptr;
|
||||
}
|
||||
_receivedAudioStream.cleanupCodec();
|
||||
|
||||
auto codecPlugins = PluginManager::getInstance()->getCodecPlugins();
|
||||
for (auto& plugin : codecPlugins) {
|
||||
if (_selectedCodecName == plugin->getName()) {
|
||||
_codec = plugin;
|
||||
_receivedAudioStream.setupCodec(plugin, _selectedCodecName, AudioConstants::STEREO);
|
||||
_encoder = plugin->createEncoder(AudioConstants::SAMPLE_RATE, AudioConstants::MONO);
|
||||
qDebug() << "Selected Codec Plugin:" << _codec.get();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void Agent::scriptRequestFinished() {
|
||||
|
@ -291,10 +361,6 @@ void Agent::executeScript() {
|
|||
// register ourselves to the script engine
|
||||
_scriptEngine->registerGlobalObject("Agent", this);
|
||||
|
||||
// FIXME -we shouldn't be calling this directly, it's normally called by run(), not sure why
|
||||
// viewers would need this called.
|
||||
//_scriptEngine->init(); // must be done before we set up the viewers
|
||||
|
||||
_scriptEngine->registerGlobalObject("SoundCache", DependencyManager::get<SoundCache>().data());
|
||||
|
||||
QScriptValue webSocketServerConstructorValue = _scriptEngine->newFunction(WebSocketServerClass::constructor);
|
||||
|
@ -314,10 +380,18 @@ void Agent::executeScript() {
|
|||
entityScriptingInterface->setEntityTree(_entityViewer.getTree());
|
||||
|
||||
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
|
||||
|
||||
// wire up our additional agent related processing to the update signal
|
||||
QObject::connect(_scriptEngine.get(), &ScriptEngine::update, this, &Agent::processAgentAvatarAndAudio);
|
||||
|
||||
|
||||
// 100Hz timer for audio
|
||||
AvatarAudioTimer* audioTimerWorker = new AvatarAudioTimer();
|
||||
audioTimerWorker->moveToThread(&_avatarAudioTimerThread);
|
||||
connect(audioTimerWorker, &AvatarAudioTimer::avatarTick, this, &Agent::processAgentAvatarAudio);
|
||||
connect(this, &Agent::startAvatarAudioTimer, audioTimerWorker, &AvatarAudioTimer::start);
|
||||
connect(this, &Agent::stopAvatarAudioTimer, audioTimerWorker, &AvatarAudioTimer::stop);
|
||||
connect(&_avatarAudioTimerThread, &QThread::finished, audioTimerWorker, &QObject::deleteLater);
|
||||
_avatarAudioTimerThread.start();
|
||||
|
||||
// 60Hz timer for avatar
|
||||
QObject::connect(_scriptEngine.get(), &ScriptEngine::update, this, &Agent::processAgentAvatar);
|
||||
_scriptEngine->run();
|
||||
|
||||
Frame::clearFrameHandler(AUDIO_FRAME_TYPE);
|
||||
|
@ -343,6 +417,10 @@ void Agent::setIsAvatar(bool isAvatar) {
|
|||
|
||||
// start the timers
|
||||
_avatarIdentityTimer->start(AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS);
|
||||
|
||||
// tell the avatarAudioTimer to start ticking
|
||||
emit startAvatarAudioTimer();
|
||||
|
||||
}
|
||||
|
||||
if (!_isAvatar) {
|
||||
|
@ -367,6 +445,7 @@ void Agent::setIsAvatar(bool isAvatar) {
|
|||
nodeList->sendPacketList(std::move(packetList), *node);
|
||||
});
|
||||
}
|
||||
emit stopAvatarAudioTimer();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -377,11 +456,9 @@ void Agent::sendAvatarIdentityPacket() {
|
|||
}
|
||||
}
|
||||
|
||||
void Agent::processAgentAvatarAndAudio(float deltaTime) {
|
||||
void Agent::processAgentAvatar() {
|
||||
if (!_scriptEngine->isFinished() && _isAvatar) {
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
const int SCRIPT_AUDIO_BUFFER_SAMPLES = AudioConstants::SAMPLE_RATE / SCRIPT_FPS + 0.5;
|
||||
const int SCRIPT_AUDIO_BUFFER_BYTES = SCRIPT_AUDIO_BUFFER_SAMPLES * sizeof(int16_t);
|
||||
|
||||
QByteArray avatarByteArray = scriptedAvatar->toByteArray(true, randFloat() < AVATAR_SEND_FULL_UPDATE_RATIO);
|
||||
scriptedAvatar->doneEncoding(true);
|
||||
|
@ -395,95 +472,120 @@ void Agent::processAgentAvatarAndAudio(float deltaTime) {
|
|||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
nodeList->broadcastToNodes(std::move(avatarPacket), NodeSet() << NodeType::AvatarMixer);
|
||||
}
|
||||
}
|
||||
void Agent::encodeFrameOfZeros(QByteArray& encodedZeros) {
|
||||
_flushEncoder = false;
|
||||
static const QByteArray zeros(AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL, 0);
|
||||
if (_encoder) {
|
||||
_encoder->encode(zeros, encodedZeros);
|
||||
} else {
|
||||
encodedZeros = zeros;
|
||||
}
|
||||
}
|
||||
|
||||
if (_isListeningToAudioStream || _avatarSound) {
|
||||
// if we have an avatar audio stream then send it out to our audio-mixer
|
||||
bool silentFrame = true;
|
||||
void Agent::processAgentAvatarAudio() {
|
||||
if (_isAvatar && (_isListeningToAudioStream || _avatarSound)) {
|
||||
// if we have an avatar audio stream then send it out to our audio-mixer
|
||||
auto scriptedAvatar = DependencyManager::get<ScriptableAvatar>();
|
||||
bool silentFrame = true;
|
||||
|
||||
int16_t numAvailableSamples = SCRIPT_AUDIO_BUFFER_SAMPLES;
|
||||
const int16_t* nextSoundOutput = NULL;
|
||||
int16_t numAvailableSamples = AudioConstants::NETWORK_FRAME_SAMPLES_PER_CHANNEL;
|
||||
const int16_t* nextSoundOutput = NULL;
|
||||
|
||||
if (_avatarSound) {
|
||||
const QByteArray& soundByteArray = _avatarSound->getByteArray();
|
||||
nextSoundOutput = reinterpret_cast<const int16_t*>(soundByteArray.data()
|
||||
if (_avatarSound) {
|
||||
const QByteArray& soundByteArray = _avatarSound->getByteArray();
|
||||
nextSoundOutput = reinterpret_cast<const int16_t*>(soundByteArray.data()
|
||||
+ _numAvatarSoundSentBytes);
|
||||
|
||||
int numAvailableBytes = (soundByteArray.size() - _numAvatarSoundSentBytes) > SCRIPT_AUDIO_BUFFER_BYTES
|
||||
? SCRIPT_AUDIO_BUFFER_BYTES
|
||||
: soundByteArray.size() - _numAvatarSoundSentBytes;
|
||||
numAvailableSamples = (int16_t)numAvailableBytes / sizeof(int16_t);
|
||||
int numAvailableBytes = (soundByteArray.size() - _numAvatarSoundSentBytes) > AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL
|
||||
? AudioConstants::NETWORK_FRAME_BYTES_PER_CHANNEL
|
||||
: soundByteArray.size() - _numAvatarSoundSentBytes;
|
||||
numAvailableSamples = (int16_t)numAvailableBytes / sizeof(int16_t);
|
||||
|
||||
|
||||
// check if the all of the _numAvatarAudioBufferSamples to be sent are silence
|
||||
for (int i = 0; i < numAvailableSamples; ++i) {
|
||||
if (nextSoundOutput[i] != 0) {
|
||||
silentFrame = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
_numAvatarSoundSentBytes += numAvailableBytes;
|
||||
if (_numAvatarSoundSentBytes == soundByteArray.size()) {
|
||||
// we're done with this sound object - so set our pointer back to NULL
|
||||
// and our sent bytes back to zero
|
||||
_avatarSound.clear();
|
||||
_numAvatarSoundSentBytes = 0;
|
||||
// check if the all of the _numAvatarAudioBufferSamples to be sent are silence
|
||||
for (int i = 0; i < numAvailableSamples; ++i) {
|
||||
if (nextSoundOutput[i] != 0) {
|
||||
silentFrame = false;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
auto audioPacket = NLPacket::create(silentFrame
|
||||
_numAvatarSoundSentBytes += numAvailableBytes;
|
||||
if (_numAvatarSoundSentBytes == soundByteArray.size()) {
|
||||
// we're done with this sound object - so set our pointer back to NULL
|
||||
// and our sent bytes back to zero
|
||||
_avatarSound.clear();
|
||||
_numAvatarSoundSentBytes = 0;
|
||||
_flushEncoder = true;
|
||||
}
|
||||
}
|
||||
|
||||
auto audioPacket = NLPacket::create(silentFrame && !_flushEncoder
|
||||
? PacketType::SilentAudioFrame
|
||||
: PacketType::MicrophoneAudioNoEcho);
|
||||
|
||||
// seek past the sequence number, will be packed when destination node is known
|
||||
audioPacket->seek(sizeof(quint16));
|
||||
// seek past the sequence number, will be packed when destination node is known
|
||||
audioPacket->seek(sizeof(quint16));
|
||||
|
||||
if (silentFrame) {
|
||||
if (!_isListeningToAudioStream) {
|
||||
// if we have a silent frame and we're not listening then just send nothing and break out of here
|
||||
return;
|
||||
}
|
||||
|
||||
// write the number of silent samples so the audio-mixer can uphold timing
|
||||
audioPacket->writePrimitive(SCRIPT_AUDIO_BUFFER_SAMPLES);
|
||||
|
||||
// use the orientation and position of this avatar for the source of this audio
|
||||
audioPacket->writePrimitive(scriptedAvatar->getPosition());
|
||||
glm::quat headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioPacket->writePrimitive(headOrientation);
|
||||
|
||||
} else if (nextSoundOutput) {
|
||||
// write the codec
|
||||
QString codecName;
|
||||
audioPacket->writeString(codecName);
|
||||
|
||||
// assume scripted avatar audio is mono and set channel flag to zero
|
||||
audioPacket->writePrimitive((quint8)0);
|
||||
|
||||
// use the orientation and position of this avatar for the source of this audio
|
||||
audioPacket->writePrimitive(scriptedAvatar->getPosition());
|
||||
glm::quat headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioPacket->writePrimitive(headOrientation);
|
||||
|
||||
// write the raw audio data
|
||||
audioPacket->write(reinterpret_cast<const char*>(nextSoundOutput), numAvailableSamples * sizeof(int16_t));
|
||||
if (silentFrame) {
|
||||
if (!_isListeningToAudioStream) {
|
||||
// if we have a silent frame and we're not listening then just send nothing and break out of here
|
||||
return;
|
||||
}
|
||||
|
||||
// write audio packet to AudioMixer nodes
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->eachNode([this, &nodeList, &audioPacket](const SharedNodePointer& node){
|
||||
// only send to nodes of type AudioMixer
|
||||
if (node->getType() == NodeType::AudioMixer) {
|
||||
// pack sequence number
|
||||
quint16 sequence = _outgoingScriptAudioSequenceNumbers[node->getUUID()]++;
|
||||
audioPacket->seek(0);
|
||||
audioPacket->writePrimitive(sequence);
|
||||
// write the codec
|
||||
audioPacket->writeString(_selectedCodecName);
|
||||
|
||||
// write the number of silent samples so the audio-mixer can uphold timing
|
||||
audioPacket->writePrimitive(numAvailableSamples);
|
||||
|
||||
// send audio packet
|
||||
nodeList->sendUnreliablePacket(*audioPacket, *node);
|
||||
// use the orientation and position of this avatar for the source of this audio
|
||||
audioPacket->writePrimitive(scriptedAvatar->getPosition());
|
||||
glm::quat headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioPacket->writePrimitive(headOrientation);
|
||||
} else if (nextSoundOutput) {
|
||||
|
||||
// write the codec
|
||||
audioPacket->writeString(_selectedCodecName);
|
||||
|
||||
// assume scripted avatar audio is mono and set channel flag to zero
|
||||
audioPacket->writePrimitive((quint8)0);
|
||||
|
||||
// use the orientation and position of this avatar for the source of this audio
|
||||
audioPacket->writePrimitive(scriptedAvatar->getPosition());
|
||||
glm::quat headOrientation = scriptedAvatar->getHeadOrientation();
|
||||
audioPacket->writePrimitive(headOrientation);
|
||||
|
||||
QByteArray encodedBuffer;
|
||||
if (_flushEncoder) {
|
||||
encodeFrameOfZeros(encodedBuffer);
|
||||
} else {
|
||||
QByteArray decodedBuffer(reinterpret_cast<const char*>(nextSoundOutput), numAvailableSamples*sizeof(int16_t));
|
||||
if (_encoder) {
|
||||
// encode it
|
||||
_encoder->encode(decodedBuffer, encodedBuffer);
|
||||
} else {
|
||||
encodedBuffer = decodedBuffer;
|
||||
}
|
||||
});
|
||||
}
|
||||
audioPacket->write(encodedBuffer.constData(), encodedBuffer.size());
|
||||
}
|
||||
|
||||
// write audio packet to AudioMixer nodes
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
nodeList->eachNode([this, &nodeList, &audioPacket](const SharedNodePointer& node) {
|
||||
// only send to nodes of type AudioMixer
|
||||
if (node->getType() == NodeType::AudioMixer) {
|
||||
// pack sequence number
|
||||
quint16 sequence = _outgoingScriptAudioSequenceNumbers[node->getUUID()]++;
|
||||
audioPacket->seek(0);
|
||||
audioPacket->writePrimitive(sequence);
|
||||
// send audio packet
|
||||
nodeList->sendUnreliablePacket(*audioPacket, *node);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -498,7 +600,17 @@ void Agent::aboutToFinish() {
|
|||
DependencyManager::get<EntityScriptingInterface>()->setEntityTree(nullptr);
|
||||
|
||||
ResourceManager::cleanup();
|
||||
|
||||
|
||||
// cleanup the AudioInjectorManager (and any still running injectors)
|
||||
DependencyManager::destroy<AudioInjectorManager>();
|
||||
DependencyManager::destroy<ScriptEngines>();
|
||||
|
||||
emit stopAvatarAudioTimer();
|
||||
_avatarAudioTimerThread.quit();
|
||||
|
||||
// cleanup codec & encoder
|
||||
if (_codec && _encoder) {
|
||||
_codec->releaseEncoder(_encoder);
|
||||
_encoder = nullptr;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
#include <QtScript/QScriptEngine>
|
||||
#include <QtCore/QObject>
|
||||
#include <QtCore/QUrl>
|
||||
#include <QtCore/QTimer>
|
||||
#include <QUuid>
|
||||
|
||||
#include <EntityEditPacketSender.h>
|
||||
|
@ -26,8 +27,9 @@
|
|||
#include <ScriptEngine.h>
|
||||
#include <ThreadedAssignment.h>
|
||||
|
||||
#include "MixedAudioStream.h"
|
||||
#include <plugins/CodecPlugin.h>
|
||||
|
||||
#include "MixedAudioStream.h"
|
||||
|
||||
class Agent : public ThreadedAssignment {
|
||||
Q_OBJECT
|
||||
|
@ -56,7 +58,7 @@ public:
|
|||
|
||||
public slots:
|
||||
void run() override;
|
||||
void playAvatarSound(SharedSoundPointer avatarSound) { setAvatarSound(avatarSound); }
|
||||
void playAvatarSound(SharedSoundPointer avatarSound);
|
||||
|
||||
private slots:
|
||||
void requestScript();
|
||||
|
@ -66,12 +68,21 @@ private slots:
|
|||
void handleAudioPacket(QSharedPointer<ReceivedMessage> message);
|
||||
void handleOctreePacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||
void handleJurisdictionPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode);
|
||||
|
||||
void processAgentAvatarAndAudio(float deltaTime);
|
||||
void handleSelectedAudioFormat(QSharedPointer<ReceivedMessage> message);
|
||||
|
||||
void nodeActivated(SharedNodePointer activatedNode);
|
||||
|
||||
void processAgentAvatar();
|
||||
void processAgentAvatarAudio();
|
||||
|
||||
signals:
|
||||
void startAvatarAudioTimer();
|
||||
void stopAvatarAudioTimer();
|
||||
private:
|
||||
void negotiateAudioFormat();
|
||||
void selectAudioFormat(const QString& selectedCodecName);
|
||||
void encodeFrameOfZeros(QByteArray& encodedZeros);
|
||||
|
||||
std::unique_ptr<ScriptEngine> _scriptEngine;
|
||||
EntityEditPacketSender _entityEditSender;
|
||||
EntityTreeHeadlessViewer _entityViewer;
|
||||
|
@ -92,7 +103,12 @@ private:
|
|||
bool _isAvatar = false;
|
||||
QTimer* _avatarIdentityTimer = nullptr;
|
||||
QHash<QUuid, quint16> _outgoingScriptAudioSequenceNumbers;
|
||||
|
||||
|
||||
CodecPluginPointer _codec;
|
||||
QString _selectedCodecName;
|
||||
Encoder* _encoder { nullptr };
|
||||
QThread _avatarAudioTimerThread;
|
||||
bool _flushEncoder { false };
|
||||
};
|
||||
|
||||
#endif // hifi_Agent_h
|
||||
|
|
|
@ -31,7 +31,6 @@
|
|||
#include <ShutdownEventListener.h>
|
||||
#include <SoundCache.h>
|
||||
#include <ResourceScriptingInterface.h>
|
||||
#include <ScriptEngines.h>
|
||||
|
||||
#include "AssignmentFactory.h"
|
||||
#include "AssignmentActionFactory.h"
|
||||
|
@ -53,10 +52,9 @@ AssignmentClient::AssignmentClient(Assignment::Type requestAssignmentType, QStri
|
|||
QSettings::setDefaultFormat(QSettings::IniFormat);
|
||||
|
||||
DependencyManager::set<AccountManager>();
|
||||
|
||||
|
||||
auto scriptableAvatar = DependencyManager::set<ScriptableAvatar>();
|
||||
auto addressManager = DependencyManager::set<AddressManager>();
|
||||
auto scriptEngines = DependencyManager::set<ScriptEngines>();
|
||||
|
||||
// create a NodeList as an unassigned client, must be after addressManager
|
||||
auto nodeList = DependencyManager::set<NodeList>(NodeType::Unassigned, listenPort);
|
||||
|
@ -178,8 +176,6 @@ AssignmentClient::~AssignmentClient() {
|
|||
void AssignmentClient::aboutToQuit() {
|
||||
stopAssignmentClient();
|
||||
|
||||
DependencyManager::destroy<ScriptEngines>();
|
||||
|
||||
// clear the log handler so that Qt doesn't call the destructor on LogHandler
|
||||
qInstallMessageHandler(0);
|
||||
}
|
||||
|
|
37
assignment-client/src/AvatarAudioTimer.cpp
Normal file
|
@ -0,0 +1,37 @@
|
|||
//
|
||||
// AvatarAudioTimer.cpp
|
||||
// assignment-client/src
|
||||
//
|
||||
// Created by David Kelly on 10/12/13.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include <QDebug>
|
||||
#include <SharedUtil.h>
|
||||
#include "AvatarAudioTimer.h"
|
||||
|
||||
// this should send a signal every 10ms, with pretty good precision. Hardcoding
|
||||
// to 10ms since that's what you'd want for audio.
|
||||
void AvatarAudioTimer::start() {
|
||||
qDebug() << __FUNCTION__;
|
||||
auto startTime = usecTimestampNow();
|
||||
quint64 frameCounter = 0;
|
||||
const int TARGET_INTERVAL_USEC = 10000; // 10ms
|
||||
while (!_quit) {
|
||||
++frameCounter;
|
||||
|
||||
// tick every 10ms from startTime
|
||||
quint64 targetTime = startTime + frameCounter * TARGET_INTERVAL_USEC;
|
||||
quint64 now = usecTimestampNow();
|
||||
|
||||
// avoid quint64 underflow
|
||||
if (now < targetTime) {
|
||||
usleep(targetTime - now);
|
||||
}
|
||||
|
||||
emit avatarTick();
|
||||
}
|
||||
qDebug() << "AvatarAudioTimer is finished";
|
||||
}
|
31
assignment-client/src/AvatarAudioTimer.h
Normal file
|
@ -0,0 +1,31 @@
|
|||
//
|
||||
// AvatarAudioTimer.h
|
||||
// assignment-client/src
|
||||
//
|
||||
// Created by David Kelly on 10/12/13.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_AvatarAudioTimer_h
|
||||
#define hifi_AvatarAudioTimer_h
|
||||
|
||||
#include <QtCore/QObject>
|
||||
|
||||
class AvatarAudioTimer : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
signals:
|
||||
void avatarTick();
|
||||
|
||||
public slots:
|
||||
void start();
|
||||
void stop() { _quit = true; }
|
||||
|
||||
private:
|
||||
bool _quit { false };
|
||||
};
|
||||
|
||||
#endif //hifi_AvatarAudioTimer_h
|
|
@ -453,7 +453,7 @@ bool AssetServer::loadMappingsFromFile() {
|
|||
while (it != _fileMappings.end()) {
|
||||
bool shouldDrop = false;
|
||||
|
||||
if (!isValidPath(it.key())) {
|
||||
if (!isValidFilePath(it.key())) {
|
||||
qWarning() << "Will not keep mapping for" << it.key() << "since it is not a valid path.";
|
||||
shouldDrop = true;
|
||||
}
|
||||
|
@ -508,7 +508,7 @@ bool AssetServer::writeMappingsToFile() {
|
|||
bool AssetServer::setMapping(AssetPath path, AssetHash hash) {
|
||||
path = path.trimmed();
|
||||
|
||||
if (!isValidPath(path)) {
|
||||
if (!isValidFilePath(path)) {
|
||||
qWarning() << "Cannot set a mapping for invalid path:" << path << "=>" << hash;
|
||||
return false;
|
||||
}
|
||||
|
@ -637,8 +637,8 @@ bool AssetServer::renameMapping(AssetPath oldPath, AssetPath newPath) {
|
|||
oldPath = oldPath.trimmed();
|
||||
newPath = newPath.trimmed();
|
||||
|
||||
if (!isValidPath(oldPath) || !isValidPath(newPath)) {
|
||||
qWarning() << "Cannot perform rename with invalid paths - both should have leading forward slashes:"
|
||||
if (!isValidFilePath(oldPath) || !isValidFilePath(newPath)) {
|
||||
qWarning() << "Cannot perform rename with invalid paths - both should have leading forward and no ending slashes:"
|
||||
<< oldPath << "=>" << newPath;
|
||||
|
||||
return false;
|
||||
|
|
|
@ -90,8 +90,8 @@ AudioMixer::AudioMixer(ReceivedMessage& message) :
|
|||
PacketType::InjectAudio, PacketType::SilentAudioFrame,
|
||||
PacketType::AudioStreamStats },
|
||||
this, "handleNodeAudioPacket");
|
||||
packetReceiver.registerListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket");
|
||||
packetReceiver.registerListener(PacketType::NegotiateAudioFormat, this, "handleNegotiateAudioFormat");
|
||||
packetReceiver.registerListener(PacketType::MuteEnvironment, this, "handleMuteEnvironmentPacket");
|
||||
packetReceiver.registerListener(PacketType::NodeIgnoreRequest, this, "handleNodeIgnoreRequestPacket");
|
||||
|
||||
connect(nodeList.data(), &NodeList::nodeKilled, this, &AudioMixer::handleNodeKilled);
|
||||
|
@ -481,6 +481,7 @@ void AudioMixer::sendAudioEnvironmentPacket(SharedNodePointer node) {
|
|||
}
|
||||
|
||||
void AudioMixer::handleNodeAudioPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer sendingNode) {
|
||||
getOrCreateClientData(sendingNode.data());
|
||||
DependencyManager::get<NodeList>()->updateNodeWithDataFromPacket(message, sendingNode);
|
||||
}
|
||||
|
||||
|
@ -579,18 +580,8 @@ void AudioMixer::handleNegotiateAudioFormat(QSharedPointer<ReceivedMessage> mess
|
|||
}
|
||||
}
|
||||
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
|
||||
|
||||
// FIXME - why would we not have client data at this point??
|
||||
if (!clientData) {
|
||||
qDebug() << "UNEXPECTED -- didn't have node linked data in " << __FUNCTION__;
|
||||
sendingNode->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(sendingNode->getUUID()) });
|
||||
clientData = dynamic_cast<AudioMixerClientData*>(sendingNode->getLinkedData());
|
||||
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
|
||||
}
|
||||
|
||||
auto clientData = getOrCreateClientData(sendingNode.data());
|
||||
clientData->setupCodec(selectedCodec, selectedCodecName);
|
||||
|
||||
qDebug() << "selectedCodecName:" << selectedCodecName;
|
||||
clientData->sendSelectAudioFormat(sendingNode, selectedCodecName);
|
||||
}
|
||||
|
@ -636,13 +627,18 @@ QString AudioMixer::percentageForMixStats(int counter) {
|
|||
}
|
||||
|
||||
void AudioMixer::sendStatsPacket() {
|
||||
static QJsonObject statsObject;
|
||||
QJsonObject statsObject;
|
||||
|
||||
if (_numStatFrames == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
statsObject["useDynamicJitterBuffers"] = _numStaticJitterFrames == -1;
|
||||
statsObject["trailing_sleep_percentage"] = _trailingSleepRatio * 100.0f;
|
||||
statsObject["performance_throttling_ratio"] = _performanceThrottlingRatio;
|
||||
|
||||
statsObject["avg_listeners_per_frame"] = (float) _sumListeners / (float) _numStatFrames;
|
||||
statsObject["avg_streams_per_frame"] = (float)_sumStreams / (float)_numStatFrames;
|
||||
statsObject["avg_listeners_per_frame"] = (float)_sumListeners / (float)_numStatFrames;
|
||||
|
||||
QJsonObject mixStats;
|
||||
mixStats["%_hrtf_mixes"] = percentageForMixStats(_hrtfRenders);
|
||||
|
@ -656,6 +652,7 @@ void AudioMixer::sendStatsPacket() {
|
|||
|
||||
statsObject["mix_stats"] = mixStats;
|
||||
|
||||
_sumStreams = 0;
|
||||
_sumListeners = 0;
|
||||
_hrtfRenders = 0;
|
||||
_hrtfSilentRenders = 0;
|
||||
|
@ -703,17 +700,24 @@ void AudioMixer::run() {
|
|||
ThreadedAssignment::commonInit(AUDIO_MIXER_LOGGING_TARGET_NAME, NodeType::AudioMixer);
|
||||
}
|
||||
|
||||
AudioMixerClientData* AudioMixer::getOrCreateClientData(Node* node) {
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
|
||||
if (!clientData) {
|
||||
node->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID()) });
|
||||
clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
|
||||
}
|
||||
|
||||
return clientData;
|
||||
}
|
||||
|
||||
void AudioMixer::domainSettingsRequestComplete() {
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
nodeList->addNodeTypeToInterestSet(NodeType::Agent);
|
||||
|
||||
nodeList->linkedDataCreateCallback = [&](Node* node) {
|
||||
node->setLinkedData(std::unique_ptr<NodeData> { new AudioMixerClientData(node->getUUID()) });
|
||||
auto clientData = dynamic_cast<AudioMixerClientData*>(node->getLinkedData());
|
||||
|
||||
connect(clientData, &AudioMixerClientData::injectorStreamFinished, this, &AudioMixer::removeHRTFsForFinishedInjector);
|
||||
};
|
||||
nodeList->linkedDataCreateCallback = [&](Node* node) { getOrCreateClientData(node); };
|
||||
|
||||
DomainHandler& domainHandler = nodeList->getDomainHandler();
|
||||
const QJsonObject& settingsObject = domainHandler.getSettingsObject();
|
||||
|
@ -726,79 +730,71 @@ void AudioMixer::domainSettingsRequestComplete() {
|
|||
}
|
||||
|
||||
void AudioMixer::broadcastMixes() {
|
||||
const int TRAILING_AVERAGE_FRAMES = 100;
|
||||
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
|
||||
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
|
||||
|
||||
const float STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.10f;
|
||||
const float BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.20f;
|
||||
|
||||
const float RATIO_BACK_OFF = 0.02f;
|
||||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
auto nextFrameTimestamp = p_high_resolution_clock::now();
|
||||
auto timeToSleep = std::chrono::microseconds(0);
|
||||
|
||||
const int TRAILING_AVERAGE_FRAMES = 100;
|
||||
int currentFrame = 1;
|
||||
int numFramesPerSecond = (int) ceil(AudioConstants::NETWORK_FRAMES_PER_SEC);
|
||||
int framesSinceCutoffEvent = TRAILING_AVERAGE_FRAMES;
|
||||
|
||||
int currentFrame { 1 };
|
||||
int numFramesPerSecond { (int) ceil(AudioConstants::NETWORK_FRAMES_PER_SEC) };
|
||||
|
||||
while (!_isFinished) {
|
||||
const float STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.10f;
|
||||
const float BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD = 0.20f;
|
||||
// manage mixer load
|
||||
{
|
||||
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio) +
|
||||
// ratio of frame spent sleeping / total frame time
|
||||
((CURRENT_FRAME_RATIO * timeToSleep.count()) / (float) AudioConstants::NETWORK_FRAME_USECS);
|
||||
|
||||
const float RATIO_BACK_OFF = 0.02f;
|
||||
bool hasRatioChanged = false;
|
||||
|
||||
const float CURRENT_FRAME_RATIO = 1.0f / TRAILING_AVERAGE_FRAMES;
|
||||
const float PREVIOUS_FRAMES_RATIO = 1.0f - CURRENT_FRAME_RATIO;
|
||||
|
||||
if (timeToSleep.count() < 0) {
|
||||
timeToSleep = std::chrono::microseconds(0);
|
||||
}
|
||||
|
||||
_trailingSleepRatio = (PREVIOUS_FRAMES_RATIO * _trailingSleepRatio)
|
||||
+ (timeToSleep.count() * CURRENT_FRAME_RATIO / (float) AudioConstants::NETWORK_FRAME_USECS);
|
||||
|
||||
float lastCutoffRatio = _performanceThrottlingRatio;
|
||||
bool hasRatioChanged = false;
|
||||
|
||||
if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
|
||||
if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
|
||||
// we're struggling - change our min required loudness to reduce some load
|
||||
_performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
|
||||
|
||||
qDebug() << "Mixer is struggling, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
|
||||
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
|
||||
hasRatioChanged = true;
|
||||
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
|
||||
// we've recovered and can back off the required loudness
|
||||
_performanceThrottlingRatio = _performanceThrottlingRatio - RATIO_BACK_OFF;
|
||||
|
||||
if (_performanceThrottlingRatio < 0) {
|
||||
_performanceThrottlingRatio = 0;
|
||||
if (framesSinceCutoffEvent >= TRAILING_AVERAGE_FRAMES) {
|
||||
if (_trailingSleepRatio <= STRUGGLE_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD) {
|
||||
qDebug() << "Mixer is struggling";
|
||||
// change our min required loudness to reduce some load
|
||||
_performanceThrottlingRatio = _performanceThrottlingRatio + (0.5f * (1.0f - _performanceThrottlingRatio));
|
||||
hasRatioChanged = true;
|
||||
} else if (_trailingSleepRatio >= BACK_OFF_TRIGGER_SLEEP_PERCENTAGE_THRESHOLD && _performanceThrottlingRatio != 0) {
|
||||
qDebug() << "Mixer is recovering";
|
||||
// back off the required loudness
|
||||
_performanceThrottlingRatio = std::max(0.0f, _performanceThrottlingRatio - RATIO_BACK_OFF);
|
||||
hasRatioChanged = true;
|
||||
}
|
||||
|
||||
qDebug() << "Mixer is recovering, sleeping" << _trailingSleepRatio * 100 << "% of frame time. Old cutoff was"
|
||||
<< lastCutoffRatio << "and is now" << _performanceThrottlingRatio;
|
||||
hasRatioChanged = true;
|
||||
if (hasRatioChanged) {
|
||||
// set out min audability threshold from the new ratio
|
||||
_minAudibilityThreshold = LOUDNESS_TO_DISTANCE_RATIO / (2.0f * (1.0f - _performanceThrottlingRatio));
|
||||
framesSinceCutoffEvent = 0;
|
||||
|
||||
qDebug() << "Sleeping" << _trailingSleepRatio << "of frame";
|
||||
qDebug() << "Cutoff is" << _performanceThrottlingRatio;
|
||||
qDebug() << "Minimum audibility to be mixed is" << _minAudibilityThreshold;
|
||||
}
|
||||
}
|
||||
|
||||
if (hasRatioChanged) {
|
||||
// set out min audability threshold from the new ratio
|
||||
_minAudibilityThreshold = LOUDNESS_TO_DISTANCE_RATIO / (2.0f * (1.0f - _performanceThrottlingRatio));
|
||||
qDebug() << "Minimum audability required to be mixed is now" << _minAudibilityThreshold;
|
||||
|
||||
framesSinceCutoffEvent = 0;
|
||||
if (!hasRatioChanged) {
|
||||
++framesSinceCutoffEvent;
|
||||
}
|
||||
}
|
||||
|
||||
if (!hasRatioChanged) {
|
||||
++framesSinceCutoffEvent;
|
||||
}
|
||||
|
||||
// mix
|
||||
nodeList->eachNode([&](const SharedNodePointer& node) {
|
||||
|
||||
if (node->getLinkedData()) {
|
||||
AudioMixerClientData* nodeData = (AudioMixerClientData*)node->getLinkedData();
|
||||
|
||||
// this function will attempt to pop a frame from each audio stream.
|
||||
// a pointer to the popped data is stored as a member in InboundAudioStream.
|
||||
// That's how the popped audio data will be read for mixing (but only if the pop was successful)
|
||||
nodeData->checkBuffersBeforeFrameSend();
|
||||
_sumStreams += nodeData->checkBuffersBeforeFrameSend();
|
||||
|
||||
// if the stream should be muted, send mute packet
|
||||
if (nodeData->getAvatarAudioStream()
|
||||
|
@ -814,7 +810,8 @@ void AudioMixer::broadcastMixes() {
|
|||
|
||||
std::unique_ptr<NLPacket> mixPacket;
|
||||
|
||||
if (mixHasAudio) {
|
||||
if (mixHasAudio || nodeData->shouldFlushEncoder()) {
|
||||
|
||||
int mixPacketBytes = sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE
|
||||
+ AudioConstants::NETWORK_FRAME_BYTES_STEREO;
|
||||
mixPacket = NLPacket::create(PacketType::MixedAudio, mixPacketBytes);
|
||||
|
@ -827,12 +824,17 @@ void AudioMixer::broadcastMixes() {
|
|||
QString codecInPacket = nodeData->getCodecName();
|
||||
mixPacket->writeString(codecInPacket);
|
||||
|
||||
QByteArray decodedBuffer(reinterpret_cast<char*>(_clampedSamples), AudioConstants::NETWORK_FRAME_BYTES_STEREO);
|
||||
QByteArray encodedBuffer;
|
||||
nodeData->encode(decodedBuffer, encodedBuffer);
|
||||
|
||||
if (mixHasAudio) {
|
||||
QByteArray decodedBuffer(reinterpret_cast<char*>(_clampedSamples), AudioConstants::NETWORK_FRAME_BYTES_STEREO);
|
||||
nodeData->encode(decodedBuffer, encodedBuffer);
|
||||
} else {
|
||||
// time to flush, which resets the shouldFlush until next time we encode something
|
||||
nodeData->encodeFrameOfZeros(encodedBuffer);
|
||||
}
|
||||
// pack mixed audio samples
|
||||
mixPacket->write(encodedBuffer.constData(), encodedBuffer.size());
|
||||
|
||||
} else {
|
||||
int silentPacketBytes = sizeof(quint16) + sizeof(quint16) + AudioConstants::MAX_CODEC_NAME_LENGTH_ON_WIRE;
|
||||
mixPacket = NLPacket::create(PacketType::SilentAudioFrame, silentPacketBytes);
|
||||
|
@ -872,24 +874,32 @@ void AudioMixer::broadcastMixes() {
|
|||
|
||||
++_numStatFrames;
|
||||
|
||||
// since we're a while loop we need to help Qt's event processing
|
||||
QCoreApplication::processEvents();
|
||||
// play nice with qt event-looping
|
||||
{
|
||||
// since we're a while loop we need to help qt's event processing
|
||||
QCoreApplication::processEvents();
|
||||
|
||||
if (_isFinished) {
|
||||
// at this point the audio-mixer is done
|
||||
// check if we have a deferred delete event to process (which we should once finished)
|
||||
QCoreApplication::sendPostedEvents(this, QEvent::DeferredDelete);
|
||||
break;
|
||||
if (_isFinished) {
|
||||
// alert qt that this is finished
|
||||
QCoreApplication::sendPostedEvents(this, QEvent::DeferredDelete);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// push the next frame timestamp to when we should send the next
|
||||
nextFrameTimestamp += std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
|
||||
// sleep until the next frame, if necessary
|
||||
{
|
||||
nextFrameTimestamp += std::chrono::microseconds(AudioConstants::NETWORK_FRAME_USECS);
|
||||
|
||||
// sleep as long as we need until next frame, if we can
|
||||
auto now = p_high_resolution_clock::now();
|
||||
timeToSleep = std::chrono::duration_cast<std::chrono::microseconds>(nextFrameTimestamp - now);
|
||||
auto now = p_high_resolution_clock::now();
|
||||
timeToSleep = std::chrono::duration_cast<std::chrono::microseconds>(nextFrameTimestamp - now);
|
||||
|
||||
std::this_thread::sleep_for(timeToSleep);
|
||||
if (timeToSleep.count() < 0) {
|
||||
nextFrameTimestamp = now;
|
||||
timeToSleep = std::chrono::microseconds(0);
|
||||
}
|
||||
|
||||
std::this_thread::sleep_for(timeToSleep);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -52,6 +52,7 @@ private slots:
|
|||
void removeHRTFsForFinishedInjector(const QUuid& streamID);
|
||||
|
||||
private:
|
||||
AudioMixerClientData* getOrCreateClientData(Node* node);
|
||||
void domainSettingsRequestComplete();
|
||||
|
||||
/// adds one stream to the mix for a listening node
|
||||
|
@ -85,6 +86,7 @@ private:
|
|||
float _attenuationPerDoublingInDistance;
|
||||
float _noiseMutingThreshold;
|
||||
int _numStatFrames { 0 };
|
||||
int _sumStreams { 0 };
|
||||
int _sumListeners { 0 };
|
||||
int _hrtfRenders { 0 };
|
||||
int _hrtfSilentRenders { 0 };
|
||||
|
|
|
@ -49,7 +49,7 @@ AudioMixerClientData::~AudioMixerClientData() {
|
|||
|
||||
AvatarAudioStream* AudioMixerClientData::getAvatarAudioStream() {
|
||||
QReadLocker readLocker { &_streamsLock };
|
||||
|
||||
|
||||
auto it = _audioStreams.find(QUuid());
|
||||
if (it != _audioStreams.end()) {
|
||||
return dynamic_cast<AvatarAudioStream*>(it->second.get());
|
||||
|
@ -75,7 +75,7 @@ void AudioMixerClientData::removeHRTFForStream(const QUuid& nodeID, const QUuid&
|
|||
|
||||
int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
||||
PacketType packetType = message.getType();
|
||||
|
||||
|
||||
if (packetType == PacketType::AudioStreamStats) {
|
||||
|
||||
// skip over header, appendFlag, and num stats packed
|
||||
|
@ -180,7 +180,7 @@ int AudioMixerClientData::parseData(ReceivedMessage& message) {
|
|||
return 0;
|
||||
}
|
||||
|
||||
void AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
||||
int AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
||||
QWriteLocker writeLocker { &_streamsLock };
|
||||
|
||||
auto it = _audioStreams.begin();
|
||||
|
@ -208,6 +208,8 @@ void AudioMixerClientData::checkBuffersBeforeFrameSend() {
|
|||
++it;
|
||||
}
|
||||
}
|
||||
|
||||
return (int)_audioStreams.size();
|
||||
}
|
||||
|
||||
bool AudioMixerClientData::shouldSendStats(int frameNumber) {
|
||||
|
@ -218,11 +220,10 @@ void AudioMixerClientData::sendAudioStreamStatsPackets(const SharedNodePointer&
|
|||
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
|
||||
// The append flag is a boolean value that will be packed right after the header. The first packet sent
|
||||
// inside this method will have 0 for this flag, while every subsequent packet will have 1 for this flag.
|
||||
// The sole purpose of this flag is so the client can clear its map of injected audio stream stats when
|
||||
// it receives a packet with an appendFlag of 0. This prevents the buildup of dead audio stream stats in the client.
|
||||
quint8 appendFlag = 0;
|
||||
// The append flag is a boolean value that will be packed right after the header.
|
||||
// This flag allows the client to know when it has received all stats packets, so it can group any downstream effects,
|
||||
// and clear its cache of injector stream stats; it helps to prevent buildup of dead audio stream stats in the client.
|
||||
quint8 appendFlag = AudioStreamStats::START;
|
||||
|
||||
auto streamsCopy = getAudioStreams();
|
||||
|
||||
|
@ -233,14 +234,21 @@ void AudioMixerClientData::sendAudioStreamStatsPackets(const SharedNodePointer&
|
|||
while (numStreamStatsRemaining > 0) {
|
||||
auto statsPacket = NLPacket::create(PacketType::AudioStreamStats);
|
||||
|
||||
// pack the append flag in this packet
|
||||
statsPacket->writePrimitive(appendFlag);
|
||||
appendFlag = 1;
|
||||
|
||||
int numStreamStatsRoomFor = (int)(statsPacket->size() - sizeof(quint8) - sizeof(quint16)) / sizeof(AudioStreamStats);
|
||||
|
||||
// calculate and pack the number of stream stats to follow
|
||||
// calculate the number of stream stats to follow
|
||||
quint16 numStreamStatsToPack = std::min(numStreamStatsRemaining, numStreamStatsRoomFor);
|
||||
|
||||
// is this the terminal packet?
|
||||
if (numStreamStatsRemaining <= numStreamStatsToPack) {
|
||||
appendFlag |= AudioStreamStats::END;
|
||||
}
|
||||
|
||||
// pack the append flag in this packet
|
||||
statsPacket->writePrimitive(appendFlag);
|
||||
appendFlag = 0;
|
||||
|
||||
// pack the number of stream stats to follow
|
||||
statsPacket->writePrimitive(numStreamStatsToPack);
|
||||
|
||||
// pack the calculated number of stream stats
|
||||
|
@ -349,7 +357,10 @@ QJsonObject AudioMixerClientData::getAudioStreamStats() {
|
|||
}
|
||||
|
||||
void AudioMixerClientData::handleMismatchAudioFormat(SharedNodePointer node, const QString& currentCodec, const QString& recievedCodec) {
|
||||
qDebug() << __FUNCTION__ << "sendingNode:" << *node << "currentCodec:" << currentCodec << "recievedCodec:" << recievedCodec;
|
||||
qDebug() << __FUNCTION__ <<
|
||||
"sendingNode:" << *node <<
|
||||
"currentCodec:" << currentCodec <<
|
||||
"receivedCodec:" << recievedCodec;
|
||||
sendSelectAudioFormat(node, currentCodec);
|
||||
}
|
||||
|
||||
|
@ -360,6 +371,17 @@ void AudioMixerClientData::sendSelectAudioFormat(SharedNodePointer node, const Q
|
|||
nodeList->sendPacket(std::move(replyPacket), *node);
|
||||
}
|
||||
|
||||
void AudioMixerClientData::encodeFrameOfZeros(QByteArray& encodedZeros) {
|
||||
static QByteArray zeros(AudioConstants::NETWORK_FRAME_BYTES_STEREO, 0);
|
||||
if (_shouldFlushEncoder) {
|
||||
if (_encoder) {
|
||||
_encoder->encode(zeros, encodedZeros);
|
||||
} else {
|
||||
encodedZeros = zeros;
|
||||
}
|
||||
}
|
||||
_shouldFlushEncoder = false;
|
||||
}
|
||||
|
||||
void AudioMixerClientData::setupCodec(CodecPluginPointer codec, const QString& codecName) {
|
||||
cleanupCodec(); // cleanup any previously allocated coders first
|
||||
|
|
|
@ -52,7 +52,8 @@ public:
|
|||
|
||||
int parseData(ReceivedMessage& message) override;
|
||||
|
||||
void checkBuffersBeforeFrameSend();
|
||||
// attempt to pop a frame from each audio stream, and return the number of streams from this client
|
||||
int checkBuffersBeforeFrameSend();
|
||||
|
||||
void removeDeadInjectedStreams();
|
||||
|
||||
|
@ -76,7 +77,11 @@ public:
|
|||
} else {
|
||||
encodedBuffer = decodedBuffer;
|
||||
}
|
||||
// once you have encoded, you need to flush eventually.
|
||||
_shouldFlushEncoder = true;
|
||||
}
|
||||
void encodeFrameOfZeros(QByteArray& encodedZeros);
|
||||
bool shouldFlushEncoder() { return _shouldFlushEncoder; }
|
||||
|
||||
QString getCodecName() { return _selectedCodecName; }
|
||||
|
||||
|
@ -105,6 +110,8 @@ private:
|
|||
QString _selectedCodecName;
|
||||
Encoder* _encoder{ nullptr }; // for outbound mixed stream
|
||||
Decoder* _decoder{ nullptr }; // for mic stream
|
||||
|
||||
bool _shouldFlushEncoder { false };
|
||||
};
|
||||
|
||||
#endif // hifi_AudioMixerClientData_h
|
||||
|
|
|
@ -46,14 +46,21 @@ AnimationDetails ScriptableAvatar::getAnimationDetails() {
|
|||
return _animationDetails;
|
||||
}
|
||||
|
||||
void ScriptableAvatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
|
||||
_bind.reset();
|
||||
_animSkeleton.reset();
|
||||
AvatarData::setSkeletonModelURL(skeletonModelURL);
|
||||
}
|
||||
void ScriptableAvatar::update(float deltatime) {
|
||||
if (_bind.isNull() && !_skeletonFBXURL.isEmpty()) { // AvatarData will parse the .fst, but not get the .fbx skeleton.
|
||||
_bind = DependencyManager::get<AnimationCache>()->getAnimation(_skeletonFBXURL);
|
||||
}
|
||||
|
||||
// Run animation
|
||||
if (_animation && _animation->isLoaded() && _animation->getFrames().size() > 0 && _bind->isLoaded()) {
|
||||
|
||||
if (_animation && _animation->isLoaded() && _animation->getFrames().size() > 0 && !_bind.isNull() && _bind->isLoaded()) {
|
||||
if (!_animSkeleton) {
|
||||
_animSkeleton = std::make_shared<AnimSkeleton>(_bind->getGeometry());
|
||||
}
|
||||
float currentFrame = _animationDetails.currentFrame + deltatime * _animationDetails.fps;
|
||||
if (_animationDetails.loop || currentFrame < _animationDetails.lastFrame) {
|
||||
while (currentFrame >= _animationDetails.lastFrame) {
|
||||
|
@ -64,14 +71,16 @@ void ScriptableAvatar::update(float deltatime) {
|
|||
const QVector<FBXJoint>& modelJoints = _bind->getGeometry().joints;
|
||||
QStringList animationJointNames = _animation->getJointNames();
|
||||
|
||||
if (_jointData.size() != modelJoints.size()) {
|
||||
_jointData.resize(modelJoints.size());
|
||||
const int nJoints = modelJoints.size();
|
||||
if (_jointData.size() != nJoints) {
|
||||
_jointData.resize(nJoints);
|
||||
}
|
||||
|
||||
const int frameCount = _animation->getFrames().size();
|
||||
const FBXAnimationFrame& floorFrame = _animation->getFrames().at((int)glm::floor(currentFrame) % frameCount);
|
||||
const FBXAnimationFrame& ceilFrame = _animation->getFrames().at((int)glm::ceil(currentFrame) % frameCount);
|
||||
const float frameFraction = glm::fract(currentFrame);
|
||||
std::vector<AnimPose> poses = _animSkeleton->getRelativeDefaultPoses();
|
||||
|
||||
for (int i = 0; i < animationJointNames.size(); i++) {
|
||||
const QString& name = animationJointNames[i];
|
||||
|
@ -79,18 +88,21 @@ void ScriptableAvatar::update(float deltatime) {
|
|||
// trusting the .fst (which is sometimes not updated to match changes to .fbx).
|
||||
int mapping = _bind->getGeometry().getJointIndex(name);
|
||||
if (mapping != -1 && !_maskedJoints.contains(name)) {
|
||||
JointData& data = _jointData[mapping];
|
||||
|
||||
auto newRotation = modelJoints[mapping].preRotation *
|
||||
safeMix(floorFrame.rotations.at(i), ceilFrame.rotations.at(i), frameFraction);
|
||||
// We could probably do translations as in interpolation in model space (rather than the parent space that each frame is in),
|
||||
// but we don't do so for MyAvatar yet, so let's not be different here.
|
||||
if (data.rotation != newRotation) {
|
||||
data.rotation = newRotation;
|
||||
data.rotationSet = true;
|
||||
}
|
||||
// Eventually, this should probably deal with post rotations and translations, too.
|
||||
poses[mapping].rot = modelJoints[mapping].preRotation *
|
||||
safeMix(floorFrame.rotations.at(i), ceilFrame.rotations.at(i), frameFraction);;
|
||||
}
|
||||
}
|
||||
_animSkeleton->convertRelativePosesToAbsolute(poses);
|
||||
for (int i = 0; i < nJoints; i++) {
|
||||
JointData& data = _jointData[i];
|
||||
AnimPose& pose = poses[i];
|
||||
if (data.rotation != pose.rot) {
|
||||
data.rotation = pose.rot;
|
||||
data.rotationSet = true;
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
_animation.clear();
|
||||
}
|
||||
|
|
|
@ -13,6 +13,7 @@
|
|||
#define hifi_ScriptableAvatar_h
|
||||
|
||||
#include <AnimationCache.h>
|
||||
#include <AnimSkeleton.h>
|
||||
#include <AvatarData.h>
|
||||
#include <ScriptEngine.h>
|
||||
|
||||
|
@ -25,6 +26,7 @@ public:
|
|||
bool hold = false, float firstFrame = 0.0f, float lastFrame = FLT_MAX, const QStringList& maskedJoints = QStringList());
|
||||
Q_INVOKABLE void stopAnimation();
|
||||
Q_INVOKABLE AnimationDetails getAnimationDetails();
|
||||
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL) override;
|
||||
|
||||
private slots:
|
||||
void update(float deltatime);
|
||||
|
@ -34,6 +36,7 @@ private:
|
|||
AnimationDetails _animationDetails;
|
||||
QStringList _maskedJoints;
|
||||
AnimationPointer _bind; // a sleazy way to get the skeleton, given the various library/cmake dependencies
|
||||
std::shared_ptr<AnimSkeleton> _animSkeleton;
|
||||
};
|
||||
|
||||
#endif // hifi_ScriptableAvatar_h
|
4
cmake/externals/openvr/CMakeLists.txt
vendored
|
@ -7,8 +7,8 @@ string(TOUPPER ${EXTERNAL_NAME} EXTERNAL_NAME_UPPER)
|
|||
|
||||
ExternalProject_Add(
|
||||
${EXTERNAL_NAME}
|
||||
URL https://github.com/ValveSoftware/openvr/archive/v1.0.2.zip
|
||||
URL_MD5 0d1cf5f579cf092e33f34759967b7046
|
||||
URL https://github.com/ValveSoftware/openvr/archive/v1.0.3.zip
|
||||
URL_MD5 b484b12901917cc739e40389583c8b0d
|
||||
CONFIGURE_COMMAND ""
|
||||
BUILD_COMMAND ""
|
||||
INSTALL_COMMAND ""
|
||||
|
|
|
@ -14,7 +14,7 @@ endif ()
|
|||
|
||||
if (HIFI_MEMORY_DEBUGGING)
|
||||
if (UNIX)
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address -fno-omit-frame-pointer")
|
||||
SET(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fsanitize=address -U_FORTIFY_SOURCE -fno-stack-protector -fno-omit-frame-pointer")
|
||||
SET(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} -static-libasan -static-libstdc++ -fsanitize=address")
|
||||
endif (UNIX)
|
||||
endif ()
|
||||
|
|
|
@ -20,7 +20,7 @@ macro(SET_PACKAGING_PARAMETERS)
|
|||
set(RELEASE_NUMBER $ENV{RELEASE_NUMBER})
|
||||
string(TOLOWER "$ENV{BRANCH}" BUILD_BRANCH)
|
||||
set(BUILD_GLOBAL_SERVICES "DEVELOPMENT")
|
||||
set(USE_STABLE_GLOBAL_SERVICES FALSE)
|
||||
set(USE_STABLE_GLOBAL_SERVICES 0)
|
||||
|
||||
message(STATUS "The BUILD_BRANCH variable is: ${BUILD_BRANCH}")
|
||||
message(STATUS "The BRANCH environment variable is: $ENV{BRANCH}")
|
||||
|
@ -43,7 +43,7 @@ macro(SET_PACKAGING_PARAMETERS)
|
|||
if (BUILD_BRANCH STREQUAL "stable")
|
||||
message(STATUS "The RELEASE_TYPE is PRODUCTION and the BUILD_BRANCH is stable...")
|
||||
set(BUILD_GLOBAL_SERVICES "STABLE")
|
||||
set(USE_STABLE_GLOBAL_SERVICES TRUE)
|
||||
set(USE_STABLE_GLOBAL_SERVICES 1)
|
||||
endif()
|
||||
|
||||
elseif (RELEASE_TYPE STREQUAL "PR")
|
||||
|
|
|
@ -17,6 +17,12 @@ macro(SETUP_HIFI_PLUGIN)
|
|||
set(PLUGIN_PATH "plugins")
|
||||
endif()
|
||||
|
||||
if (WIN32)
|
||||
# produce PDB files for plugins as well
|
||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /Zi")
|
||||
set(CMAKE_SHARED_LINKER_FLAGS "${CMAKE_SHARED_LINKER_FLAGS} /DEBUG")
|
||||
endif()
|
||||
|
||||
if (CMAKE_SYSTEM_NAME MATCHES "Linux" OR CMAKE_GENERATOR STREQUAL "Unix Makefiles")
|
||||
set(PLUGIN_FULL_PATH "${CMAKE_BINARY_DIR}/interface/${PLUGIN_PATH}/")
|
||||
else()
|
||||
|
|
|
@ -863,15 +863,6 @@
|
|||
"help": "The path to the directory assets are stored in.<br/>If this path is relative, it will be relative to the application data directory.<br/>If you change this path you will need to manually copy any existing assets from the previous directory.",
|
||||
"default": "",
|
||||
"advanced": true
|
||||
},
|
||||
{
|
||||
"name": "max_bandwidth",
|
||||
"type": "double",
|
||||
"label": "Max Bandwidth Per User",
|
||||
"help": "The maximum upstream bandwidth each user can use (in Mb/s).",
|
||||
"placeholder": "10.0",
|
||||
"default": "",
|
||||
"advanced": true
|
||||
}
|
||||
]
|
||||
},
|
||||
|
|
|
@ -519,7 +519,7 @@ void DomainServer::setupNodeListAndAssignments() {
|
|||
// add whatever static assignments that have been parsed to the queue
|
||||
addStaticAssignmentsToQueue();
|
||||
|
||||
// set a custum packetVersionMatch as the verify packet operator for the udt::Socket
|
||||
// set a custom packetVersionMatch as the verify packet operator for the udt::Socket
|
||||
nodeList->setPacketFilterOperator(&DomainServer::packetVersionMatch);
|
||||
}
|
||||
|
||||
|
@ -2267,7 +2267,7 @@ void DomainServer::processPathQueryPacket(QSharedPointer<ReceivedMessage> messag
|
|||
QByteArray viewpointUTF8 = responseViewpoint.toUtf8();
|
||||
|
||||
// prepare a packet for the response
|
||||
auto pathResponsePacket = NLPacket::create(PacketType::DomainServerPathResponse);
|
||||
auto pathResponsePacket = NLPacket::create(PacketType::DomainServerPathResponse, -1, true);
|
||||
|
||||
// check the number of bytes the viewpoint is
|
||||
quint16 numViewpointBytes = viewpointUTF8.size();
|
||||
|
|
|
@ -30,7 +30,7 @@ const int PEER_SILENCE_THRESHOLD_MSECS = 5 * 1000;
|
|||
IceServer::IceServer(int argc, char* argv[]) :
|
||||
QCoreApplication(argc, argv),
|
||||
_id(QUuid::createUuid()),
|
||||
_serverSocket(),
|
||||
_serverSocket(0, false),
|
||||
_activePeers()
|
||||
{
|
||||
// start the ice-server socket
|
||||
|
|
|
@ -41,8 +41,10 @@ endif ()
|
|||
|
||||
if (ANDROID)
|
||||
set(PLATFORM_QT_COMPONENTS AndroidExtras)
|
||||
set(PLATFORM_QT_LIBRARIES Qt5::AndroidExtras)
|
||||
else ()
|
||||
set(PLATFORM_QT_COMPONENTS WebEngine WebEngineWidgets)
|
||||
set(PLATFORM_QT_LIBRARIES Qt5::WebEngine Qt5::WebEngineWidgets)
|
||||
endif ()
|
||||
|
||||
find_package(
|
||||
|
@ -244,7 +246,8 @@ target_link_libraries(
|
|||
${TARGET_NAME}
|
||||
Qt5::Gui Qt5::Network Qt5::Multimedia Qt5::OpenGL
|
||||
Qt5::Qml Qt5::Quick Qt5::Script Qt5::Svg
|
||||
Qt5::WebChannel Qt5::WebEngine
|
||||
Qt5::WebChannel Qt5::WebEngine
|
||||
${PLATFORM_QT_LIBRARIES}
|
||||
)
|
||||
|
||||
if (UNIX)
|
||||
|
|
|
@ -11,43 +11,35 @@
|
|||
|
||||
{ "from": "OculusTouch.LY", "to": "Standard.LY",
|
||||
"filters": [
|
||||
{ "type": "deadZone", "min": 0.05 },
|
||||
{ "type": "deadZone", "min": 0.3 },
|
||||
"invert"
|
||||
]
|
||||
},
|
||||
{ "from": "OculusTouch.LX", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Standard.LX" },
|
||||
{ "from": "OculusTouch.LT", "to": "Standard.LTClick",
|
||||
{ "from": "OculusTouch.LX", "filters": { "type": "deadZone", "min": 0.3 }, "to": "Standard.LX" },
|
||||
{ "from": "OculusTouch.LT", "to": "Standard.LTClick",
|
||||
"peek": true,
|
||||
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]
|
||||
},
|
||||
{ "from": "OculusTouch.LT", "to": "Standard.LT" },
|
||||
{ "from": "OculusTouch.LS", "to": "Standard.LS" },
|
||||
{ "from": "OculusTouch.LeftGrip", "to": "Standard.LTClick",
|
||||
"peek": true,
|
||||
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]
|
||||
},
|
||||
{ "from": "OculusTouch.LeftGrip", "to": "Standard.LT" },
|
||||
{ "from": "OculusTouch.LeftHand", "to": "Standard.LeftHand" },
|
||||
{ "from": "OculusTouch.LeftGrip", "filters": { "type": "deadZone", "min": 0.5 }, "to": "Standard.LeftGrip" },
|
||||
{ "from": "OculusTouch.LeftHand", "to": "Standard.LeftHand", "when": [ "Application.InHMD" ] },
|
||||
|
||||
{ "from": "OculusTouch.RY", "to": "Standard.RY",
|
||||
"filters": [
|
||||
{ "type": "deadZone", "min": 0.05 },
|
||||
{ "type": "deadZone", "min": 0.3 },
|
||||
"invert"
|
||||
]
|
||||
},
|
||||
{ "from": "OculusTouch.RX", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Standard.RX" },
|
||||
{ "from": "OculusTouch.RT", "to": "Standard.RTClick",
|
||||
{ "from": "OculusTouch.RX", "filters": { "type": "deadZone", "min": 0.3 }, "to": "Standard.RX" },
|
||||
{ "from": "OculusTouch.RT", "to": "Standard.RTClick",
|
||||
"peek": true,
|
||||
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]
|
||||
},
|
||||
{ "from": "OculusTouch.RT", "to": "Standard.RT" },
|
||||
{ "from": "OculusTouch.RS", "to": "Standard.RS" },
|
||||
{ "from": "OculusTouch.RightGrip", "to": "Standard.LTClick",
|
||||
"peek": true,
|
||||
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]
|
||||
},
|
||||
{ "from": "OculusTouch.RightGrip", "to": "Standard.RT" },
|
||||
{ "from": "OculusTouch.RightHand", "to": "Standard.RightHand" },
|
||||
{ "from": "OculusTouch.RightGrip", "filters": { "type": "deadZone", "min": 0.5 }, "to": "Standard.RightGrip" },
|
||||
{ "from": "OculusTouch.RightHand", "to": "Standard.RightHand", "when": [ "Application.InHMD" ] },
|
||||
|
||||
{ "from": "OculusTouch.LeftApplicationMenu", "to": "Standard.Back" },
|
||||
{ "from": "OculusTouch.RightApplicationMenu", "to": "Standard.Start" },
|
||||
|
@ -66,4 +58,3 @@
|
|||
{ "from": "OculusTouch.RightIndexPoint", "to": "Standard.RightIndexPoint" }
|
||||
]
|
||||
}
|
||||
|
||||
|
|
|
@ -34,7 +34,7 @@
|
|||
{ "from": "Vive.RSCenter", "to": "Standard.RightPrimaryThumb" },
|
||||
{ "from": "Vive.RightApplicationMenu", "to": "Standard.RightSecondaryThumb" },
|
||||
|
||||
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand" },
|
||||
{ "from": "Vive.RightHand", "to": "Standard.RightHand" }
|
||||
{ "from": "Vive.LeftHand", "to": "Standard.LeftHand", "when": [ "Application.InHMD" ] },
|
||||
{ "from": "Vive.RightHand", "to": "Standard.RightHand", "when": [ "Application.InHMD" ] }
|
||||
]
|
||||
}
|
||||
}
|
||||
|
|
|
@ -3,6 +3,11 @@
|
|||
"channels": [
|
||||
{ "from": "GamePad.LY", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Actions.TranslateZ" },
|
||||
{ "from": "GamePad.LX", "filters": { "type": "deadZone", "min": 0.05 }, "to": "Actions.TranslateX" },
|
||||
|
||||
{ "from": "GamePad.LT", "to": "Standard.LTClick",
|
||||
"peek": true,
|
||||
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]
|
||||
},
|
||||
{ "from": "GamePad.LT", "to": "Standard.LT" },
|
||||
{ "from": "GamePad.LB", "to": "Standard.LB" },
|
||||
{ "from": "GamePad.LS", "to": "Standard.LS" },
|
||||
|
@ -31,6 +36,10 @@
|
|||
]
|
||||
},
|
||||
|
||||
{ "from": "GamePad.RT", "to": "Standard.RTClick",
|
||||
"peek": true,
|
||||
"filters": [ { "type": "hysteresis", "min": 0.85, "max": 0.9 } ]
|
||||
},
|
||||
{ "from": "GamePad.RT", "to": "Standard.RT" },
|
||||
{ "from": "GamePad.RB", "to": "Standard.RB" },
|
||||
{ "from": "GamePad.RS", "to": "Standard.RS" },
|
||||
|
|
Before Width: | Height: | Size: 94 KiB After Width: | Height: | Size: 106 KiB |
|
@ -11,6 +11,7 @@
|
|||
var POLL_FREQUENCY = 500; // ms
|
||||
var MAX_WARNINGS = 3;
|
||||
var numWarnings = 0;
|
||||
var isWindowFocused = true;
|
||||
var isKeyboardRaised = false;
|
||||
var isNumericKeyboard = false;
|
||||
var KEYBOARD_HEIGHT = 200;
|
||||
|
@ -38,15 +39,15 @@
|
|||
var keyboardRaised = shouldRaiseKeyboard();
|
||||
var numericKeyboard = shouldSetNumeric();
|
||||
|
||||
if (keyboardRaised !== isKeyboardRaised || numericKeyboard !== isNumericKeyboard) {
|
||||
if (isWindowFocused && (keyboardRaised !== isKeyboardRaised || numericKeyboard !== isNumericKeyboard)) {
|
||||
|
||||
if (typeof EventBridge !== "undefined") {
|
||||
if (typeof EventBridge !== "undefined" && EventBridge !== null) {
|
||||
EventBridge.emitWebEvent(
|
||||
keyboardRaised ? ("_RAISE_KEYBOARD" + (numericKeyboard ? "_NUMERIC" : "")) : "_LOWER_KEYBOARD"
|
||||
);
|
||||
} else {
|
||||
if (numWarnings < MAX_WARNINGS) {
|
||||
console.log("WARNING: no global EventBridge object found");
|
||||
console.log("WARNING: No global EventBridge object found");
|
||||
numWarnings++;
|
||||
}
|
||||
}
|
||||
|
@ -65,4 +66,14 @@
|
|||
isNumericKeyboard = numericKeyboard;
|
||||
}
|
||||
}, POLL_FREQUENCY);
|
||||
|
||||
window.addEventListener("focus", function () {
|
||||
isWindowFocused = true;
|
||||
});
|
||||
|
||||
window.addEventListener("blur", function () {
|
||||
isWindowFocused = false;
|
||||
isKeyboardRaised = false;
|
||||
isNumericKeyboard = false;
|
||||
});
|
||||
})();
|
||||
|
|
BIN
interface/resources/images/steam-min-spec-failed.png
Normal file
After Width: | Height: | Size: 82 KiB |
After Width: | Height: | Size: 788 KiB |
After Width: | Height: | Size: 558 KiB |
BIN
interface/resources/meshes/controller/vive_body.fbx
Normal file
BIN
interface/resources/meshes/controller/vive_button.fbx
Normal file
BIN
interface/resources/meshes/controller/vive_l_grip.fbx
Normal file
BIN
interface/resources/meshes/controller/vive_r_grip.fbx
Normal file
BIN
interface/resources/meshes/controller/vive_sys_button.fbx
Normal file
BIN
interface/resources/meshes/controller/vive_tips.fbm/Blank.png
Normal file
After Width: | Height: | Size: 1 KiB |
BIN
interface/resources/meshes/controller/vive_tips.fbm/Grip.png
Normal file
After Width: | Height: | Size: 46 KiB |
BIN
interface/resources/meshes/controller/vive_tips.fbm/Rotate.png
Normal file
After Width: | Height: | Size: 52 KiB |
BIN
interface/resources/meshes/controller/vive_tips.fbm/Teleport.png
Normal file
After Width: | Height: | Size: 19 KiB |
BIN
interface/resources/meshes/controller/vive_tips.fbm/Trigger.png
Normal file
After Width: | Height: | Size: 10 KiB |
BIN
interface/resources/meshes/controller/vive_tips.fbx
Normal file
BIN
interface/resources/meshes/controller/vive_trackpad.fbx
Normal file
BIN
interface/resources/meshes/controller/vive_trigger.fbx
Normal file
|
@ -21,20 +21,23 @@ import "controls-uit" as HifiControls
|
|||
Window {
|
||||
id: root
|
||||
HifiConstants { id: hifi }
|
||||
HifiStyles.HifiConstants { id: hifiStyleConstants }
|
||||
|
||||
objectName: "AddressBarDialog"
|
||||
frame: HiddenFrame {}
|
||||
hideBackground: true
|
||||
title: "Go To"
|
||||
|
||||
shown: false
|
||||
destroyOnHidden: false
|
||||
resizable: false
|
||||
scale: 1.25 // Make this dialog a little larger than normal
|
||||
pinnable: false;
|
||||
|
||||
width: addressBarDialog.implicitWidth
|
||||
height: addressBarDialog.implicitHeight
|
||||
|
||||
onShownChanged: addressBarDialog.observeShownChanged(shown);
|
||||
onShownChanged: {
|
||||
addressBarDialog.keyboardEnabled = HMD.active;
|
||||
addressBarDialog.observeShownChanged(shown);
|
||||
}
|
||||
Component.onCompleted: {
|
||||
root.parentChanged.connect(center);
|
||||
center();
|
||||
|
@ -70,11 +73,12 @@ Window {
|
|||
AddressBarDialog {
|
||||
id: addressBarDialog
|
||||
|
||||
property bool keyboardEnabled: false
|
||||
property bool keyboardRaised: false
|
||||
property bool punctuationMode: false
|
||||
|
||||
implicitWidth: backgroundImage.width
|
||||
implicitHeight: backgroundImage.height + (keyboardRaised ? 200 : 0)
|
||||
implicitHeight: backgroundImage.height + (keyboardEnabled ? keyboard.height : 0) + cardHeight;
|
||||
|
||||
// The buttons have their button state changed on hover, so we have to manually fix them up here
|
||||
onBackEnabledChanged: backArrow.buttonState = addressBarDialog.backEnabled ? 1 : 0;
|
||||
|
@ -93,8 +97,7 @@ Window {
|
|||
spacing: hifi.layout.spacing;
|
||||
clip: true;
|
||||
anchors {
|
||||
bottom: backgroundImage.top;
|
||||
bottomMargin: 2 * hifi.layout.spacing;
|
||||
bottom: backgroundImage.top
|
||||
horizontalCenter: backgroundImage.horizontalCenter
|
||||
}
|
||||
model: suggestions;
|
||||
|
@ -129,12 +132,16 @@ Window {
|
|||
verticalCenter: scroll.verticalCenter;
|
||||
}
|
||||
}
|
||||
|
||||
Image {
|
||||
id: backgroundImage
|
||||
source: "../images/address-bar.svg"
|
||||
width: 576 * root.scale
|
||||
height: 80 * root.scale
|
||||
property int inputAreaHeight: 56.0 * root.scale // Height of the background's input area
|
||||
width: 720
|
||||
height: 100
|
||||
anchors {
|
||||
bottom: parent.keyboardEnabled ? keyboard.top : parent.bottom;
|
||||
}
|
||||
property int inputAreaHeight: 70
|
||||
property int inputAreaStep: (height - inputAreaHeight) / 2
|
||||
|
||||
ToolbarButton {
|
||||
|
@ -181,7 +188,7 @@ Window {
|
|||
|
||||
HifiStyles.RalewayLight {
|
||||
id: notice;
|
||||
font.pixelSize: hifi.fonts.pixelSize * root.scale * 0.50;
|
||||
font.pixelSize: hifi.fonts.pixelSize * 0.50;
|
||||
anchors {
|
||||
top: parent.top
|
||||
topMargin: parent.inputAreaStep + 12
|
||||
|
@ -210,7 +217,7 @@ Window {
|
|||
topMargin: parent.inputAreaStep + (2 * hifi.layout.spacing)
|
||||
bottomMargin: parent.inputAreaStep
|
||||
}
|
||||
font.pixelSize: hifi.fonts.pixelSize * root.scale * 0.75
|
||||
font.pixelSize: hifi.fonts.pixelSize * 0.75
|
||||
cursorVisible: false
|
||||
onTextChanged: {
|
||||
filterChoicesByText();
|
||||
|
@ -259,7 +266,6 @@ Window {
|
|||
Window {
|
||||
width: 938
|
||||
height: 625
|
||||
scale: 0.8 // Reset scale of Window to 1.0 (counteract address bar's scale value of 1.25)
|
||||
HifiControls.WebView {
|
||||
anchors.fill: parent;
|
||||
id: storyCardHTML;
|
||||
|
@ -274,35 +280,18 @@ Window {
|
|||
verticalCenter: backgroundImage.verticalCenter;
|
||||
horizontalCenter: scroll.horizontalCenter;
|
||||
}
|
||||
z: 100
|
||||
}
|
||||
|
||||
// virtual keyboard, letters
|
||||
HifiControls.Keyboard {
|
||||
id: keyboard1
|
||||
y: parent.keyboardRaised ? parent.height : 0
|
||||
height: parent.keyboardRaised ? 200 : 0
|
||||
visible: parent.keyboardRaised && !parent.punctuationMode
|
||||
enabled: parent.keyboardRaised && !parent.punctuationMode
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: 0
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 0
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.bottomMargin: 0
|
||||
}
|
||||
|
||||
HifiControls.KeyboardPunctuation {
|
||||
id: keyboard2
|
||||
y: parent.keyboardRaised ? parent.height : 0
|
||||
height: parent.keyboardRaised ? 200 : 0
|
||||
visible: parent.keyboardRaised && parent.punctuationMode
|
||||
enabled: parent.keyboardRaised && parent.punctuationMode
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: 0
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 0
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.bottomMargin: 0
|
||||
id: keyboard
|
||||
raised: parent.keyboardEnabled // Ignore keyboardRaised; keep keyboard raised if enabled (i.e., in HMD).
|
||||
numeric: parent.punctuationMode
|
||||
anchors {
|
||||
bottom: parent.bottom
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -442,10 +431,10 @@ Window {
|
|||
function updateLocationText(enteringAddress) {
|
||||
if (enteringAddress) {
|
||||
notice.text = "Go to a place, @user, path or network address";
|
||||
notice.color = "gray";
|
||||
notice.color = hifiStyleConstants.colors.baseGrayHighlight;
|
||||
} else {
|
||||
notice.text = AddressManager.isConnected ? "Your location:" : "Not Connected";
|
||||
notice.color = AddressManager.isConnected ? "gray" : "crimson";
|
||||
notice.color = AddressManager.isConnected ? hifiStyleConstants.colors.baseGrayHighlight : hifiStyleConstants.colors.redHighlight;
|
||||
// Display hostname, which includes ip address, localhost, and other non-placenames.
|
||||
location.text = (AddressManager.hostname || '') + (AddressManager.pathname ? AddressManager.pathname.match(/\/[^\/]+/)[0] : '');
|
||||
}
|
||||
|
|
|
@ -184,7 +184,7 @@ ScrollingWindow {
|
|||
prompt.selected.connect(function (jsonResult) {
|
||||
if (jsonResult) {
|
||||
var result = JSON.parse(jsonResult);
|
||||
var url = result.textInput;
|
||||
var url = result.textInput.trim();
|
||||
var shapeType;
|
||||
switch (result.comboBox) {
|
||||
case SHAPE_TYPE_SIMPLE_HULL:
|
||||
|
@ -349,7 +349,7 @@ ScrollingWindow {
|
|||
},
|
||||
function(err, path) {
|
||||
print(err, path);
|
||||
if (!err) {
|
||||
if (err === "") {
|
||||
uploadProgressLabel.text = "Upload Complete";
|
||||
timer.interval = 1000;
|
||||
timer.repeat = false;
|
||||
|
@ -362,14 +362,15 @@ ScrollingWindow {
|
|||
console.log("Asset Browser - finished uploading: ", fileUrl);
|
||||
reload();
|
||||
} else {
|
||||
if (err > 0) {
|
||||
console.log("Asset Browser - error uploading: ", fileUrl, " - error ", err);
|
||||
var box = errorMessageBox("There was an error uploading:\n" + fileUrl + "\n" + Assets.getErrorString(err));
|
||||
box.selected.connect(reload);
|
||||
}
|
||||
uploadSpinner.visible = false;
|
||||
uploadButton.enabled = true;
|
||||
uploadOpen = false;
|
||||
|
||||
if (err !== -1) {
|
||||
console.log("Asset Browser - error uploading: ", fileUrl, " - error ", err);
|
||||
var box = errorMessageBox("There was an error uploading:\n" + fileUrl + "\n" + err);
|
||||
box.selected.connect(reload);
|
||||
}
|
||||
}
|
||||
}, dropping);
|
||||
}
|
||||
|
|
|
@ -95,46 +95,10 @@ Hifi.AvatarInputs {
|
|||
anchors.fill: parent
|
||||
color: root.mirrorVisible ? (root.audioClipping ? "red" : "#696969") : "#00000000"
|
||||
|
||||
Image {
|
||||
id: faceMute
|
||||
width: root.iconSize
|
||||
height: root.iconSize
|
||||
visible: root.cameraEnabled
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: root.iconPadding
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
source: root.cameraMuted ? "../images/face-mute.svg" : "../images/face.svg"
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
onClicked: {
|
||||
root.toggleCameraMute()
|
||||
}
|
||||
onDoubleClicked: {
|
||||
root.resetSensors();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Image {
|
||||
id: micMute
|
||||
width: root.iconSize
|
||||
height: root.iconSize
|
||||
anchors.left: root.cameraEnabled ? faceMute.right : parent.left
|
||||
anchors.leftMargin: root.iconPadding
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
source: root.audioMuted ? "../images/mic-mute.svg" : "../images/mic.svg"
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
onClicked: {
|
||||
root.toggleAudioMute()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Item {
|
||||
id: audioMeter
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
anchors.left: micMute.right
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: root.iconPadding
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: root.iconPadding
|
||||
|
|
|
@ -31,13 +31,6 @@ ScrollingWindow {
|
|||
addressBar.text = webview.url
|
||||
}
|
||||
|
||||
onParentChanged: {
|
||||
if (visible) {
|
||||
addressBar.forceActiveFocus();
|
||||
addressBar.selectAll()
|
||||
}
|
||||
}
|
||||
|
||||
function showPermissionsBar(){
|
||||
permissionsContainer.visible=true;
|
||||
}
|
||||
|
|
14
interface/resources/qml/ConnectionFailureDialog.qml
Normal file
|
@ -0,0 +1,14 @@
|
|||
import QtQuick.Dialogs 1.2 as OriginalDialogs
|
||||
|
||||
import "dialogs"
|
||||
|
||||
MessageDialog {
|
||||
id: root
|
||||
objectName: "ConnectionFailureDialog"
|
||||
|
||||
title: "No Connection"
|
||||
text: "Unable to connect to this domain. Click the 'GO TO' button on the toolbar to visit another domain."
|
||||
buttons: OriginalDialogs.StandardButton.Ok
|
||||
icon: OriginalDialogs.StandardIcon.Warning
|
||||
defaultButton: OriginalDialogs.StandardButton.NoButton;
|
||||
}
|
|
@ -33,6 +33,8 @@ ModalWindow {
|
|||
property string title: ""
|
||||
property int titleWidth: 0
|
||||
|
||||
keyboardOverride: true // Disable ModalWindow's keyboard.
|
||||
|
||||
LoginDialog {
|
||||
id: loginDialog
|
||||
|
||||
|
|
|
@ -29,8 +29,9 @@ Item {
|
|||
readonly property int maxHeight: 720
|
||||
|
||||
function resize() {
|
||||
var targetWidth = Math.max(titleWidth, additionalTextContainer.contentWidth)
|
||||
var targetHeight = 4 * hifi.dimensions.contentSpacing.y + buttons.height + additionalTextContainer.height
|
||||
var targetWidth = Math.max(titleWidth, Math.max(additionalTextContainer.contentWidth,
|
||||
termsContainer.contentWidth))
|
||||
var targetHeight = 5 * hifi.dimensions.contentSpacing.y + buttons.height + additionalTextContainer.height + termsContainer.height
|
||||
|
||||
root.width = Math.max(d.minWidth, Math.min(d.maxWidth, targetWidth))
|
||||
root.height = Math.max(d.minHeight, Math.min(d.maxHeight, targetHeight))
|
||||
|
@ -43,7 +44,7 @@ Item {
|
|||
top: parent.top
|
||||
horizontalCenter: parent.horizontalCenter
|
||||
margins: 0
|
||||
topMargin: 3 * hifi.dimensions.contentSpacing.y
|
||||
topMargin: 2 * hifi.dimensions.contentSpacing.y
|
||||
}
|
||||
spacing: hifi.dimensions.contentSpacing.x
|
||||
onHeightChanged: d.resize(); onWidthChanged: d.resize();
|
||||
|
@ -91,6 +92,25 @@ Item {
|
|||
}
|
||||
}
|
||||
|
||||
InfoItem {
|
||||
id: termsContainer
|
||||
anchors {
|
||||
top: additionalTextContainer.bottom
|
||||
left: parent.left
|
||||
margins: 0
|
||||
topMargin: 2 * hifi.dimensions.contentSpacing.y
|
||||
}
|
||||
|
||||
text: qsTr("By creating this user profile, you agree to <a href='https://highfidelity.com/terms'>High Fidelity's Terms of Service</a>")
|
||||
wrapMode: Text.WordWrap
|
||||
color: hifi.colors.baseGrayHighlight
|
||||
lineHeight: 1
|
||||
lineHeightMode: Text.ProportionalHeight
|
||||
horizontalAlignment: Text.AlignHCenter
|
||||
|
||||
onLinkActivated: loginDialog.openUrl(link)
|
||||
}
|
||||
|
||||
Component.onCompleted: {
|
||||
root.title = qsTr("Complete Your Profile")
|
||||
root.iconText = "<"
|
||||
|
|
|
@ -27,6 +27,7 @@ Item {
|
|||
loginDialog.login(usernameField.text, passwordField.text)
|
||||
}
|
||||
|
||||
property bool keyboardEnabled: false
|
||||
property bool keyboardRaised: false
|
||||
property bool punctuationMode: false
|
||||
|
||||
|
@ -41,12 +42,18 @@ Item {
|
|||
|
||||
function resize() {
|
||||
var targetWidth = Math.max(titleWidth, form.contentWidth);
|
||||
var targetHeight = hifi.dimensions.contentSpacing.y + mainTextContainer.height
|
||||
+ 4 * hifi.dimensions.contentSpacing.y + form.height + hifi.dimensions.contentSpacing.y + buttons.height;
|
||||
var targetHeight = hifi.dimensions.contentSpacing.y + mainTextContainer.height +
|
||||
4 * hifi.dimensions.contentSpacing.y + form.height +
|
||||
hifi.dimensions.contentSpacing.y + buttons.height;
|
||||
|
||||
if (additionalInformation.visible) {
|
||||
targetWidth = Math.max(targetWidth, additionalInformation.width);
|
||||
targetHeight += hifi.dimensions.contentSpacing.y + additionalInformation.height
|
||||
}
|
||||
|
||||
root.width = Math.max(d.minWidth, Math.min(d.maxWidth, targetWidth));
|
||||
root.height = Math.max(d.minHeight, Math.min(d.maxHeight, targetHeight))
|
||||
+ (linkAccountBody.keyboardRaised ? (200 + 2 * hifi.dimensions.contentSpacing.y) : hifi.dimensions.contentSpacing.y);
|
||||
+ (keyboardEnabled && keyboardRaised ? (200 + 2 * hifi.dimensions.contentSpacing.y) : hifi.dimensions.contentSpacing.y);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -135,30 +142,34 @@ Item {
|
|||
|
||||
}
|
||||
|
||||
// Override ScrollingWindow's keyboard that would be at very bottom of dialog.
|
||||
Keyboard {
|
||||
y: parent.keyboardRaised ? parent.height : 0
|
||||
height: parent.keyboardRaised ? 200 : 0
|
||||
visible: parent.keyboardRaised && !parent.punctuationMode
|
||||
enabled: parent.keyboardRaised && !parent.punctuationMode
|
||||
InfoItem {
|
||||
id: additionalInformation
|
||||
anchors {
|
||||
top: form.bottom
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: buttons.top
|
||||
bottomMargin: parent.keyboardRaised ? 2 * hifi.dimensions.contentSpacing.y : 0
|
||||
margins: 0
|
||||
topMargin: hifi.dimensions.contentSpacing.y
|
||||
}
|
||||
|
||||
visible: loginDialog.isSteamRunning()
|
||||
|
||||
text: qsTr("Your steam account informations will not be exposed to other users.")
|
||||
wrapMode: Text.WordWrap
|
||||
color: hifi.colors.baseGrayHighlight
|
||||
lineHeight: 1
|
||||
lineHeightMode: Text.ProportionalHeight
|
||||
horizontalAlignment: Text.AlignHCenter
|
||||
}
|
||||
|
||||
KeyboardPunctuation {
|
||||
y: parent.keyboardRaised ? parent.height : 0
|
||||
height: parent.keyboardRaised ? 200 : 0
|
||||
visible: parent.keyboardRaised && parent.punctuationMode
|
||||
enabled: parent.keyboardRaised && parent.punctuationMode
|
||||
// Override ScrollingWindow's keyboard that would be at very bottom of dialog.
|
||||
Keyboard {
|
||||
raised: keyboardEnabled && keyboardRaised
|
||||
numeric: punctuationMode
|
||||
anchors {
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: buttons.top
|
||||
bottomMargin: parent.keyboardRaised ? 2 * hifi.dimensions.contentSpacing.y : 0
|
||||
bottomMargin: keyboardRaised ? 2 * hifi.dimensions.contentSpacing.y : 0
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -195,9 +206,10 @@ Item {
|
|||
Component.onCompleted: {
|
||||
root.title = qsTr("Sign Into High Fidelity")
|
||||
root.iconText = "<"
|
||||
keyboardEnabled = HMD.active;
|
||||
d.resize();
|
||||
|
||||
usernameField.forceActiveFocus()
|
||||
usernameField.forceActiveFocus();
|
||||
}
|
||||
|
||||
Connections {
|
||||
|
|
|
@ -27,6 +27,13 @@ Item {
|
|||
loginDialog.createAccountFromStream(textField.text)
|
||||
}
|
||||
|
||||
|
||||
property bool keyboardEnabled: false
|
||||
property bool keyboardRaised: false
|
||||
property bool punctuationMode: false
|
||||
|
||||
onKeyboardRaisedChanged: d.resize();
|
||||
|
||||
QtObject {
|
||||
id: d
|
||||
readonly property int minWidth: 480
|
||||
|
@ -35,15 +42,16 @@ Item {
|
|||
readonly property int maxHeight: 720
|
||||
|
||||
function resize() {
|
||||
var targetWidth = Math.max(titleWidth, Math.max(mainTextContainer.contentWidth,
|
||||
termsContainer.contentWidth))
|
||||
var targetWidth = Math.max(titleWidth, mainTextContainer.contentWidth)
|
||||
var targetHeight = mainTextContainer.height +
|
||||
2 * hifi.dimensions.contentSpacing.y + textField.height +
|
||||
5 * hifi.dimensions.contentSpacing.y + termsContainer.height +
|
||||
1 * hifi.dimensions.contentSpacing.y + buttons.height
|
||||
hifi.dimensions.contentSpacing.y + textField.height +
|
||||
hifi.dimensions.contentSpacing.y + buttons.height
|
||||
|
||||
root.width = Math.max(d.minWidth, Math.min(d.maxWidth, targetWidth))
|
||||
root.height = Math.max(d.minHeight, Math.min(d.maxHeight, targetHeight))
|
||||
+ (keyboardEnabled && keyboardRaised ? (200 + 2 * hifi.dimensions.contentSpacing.y) : hifi.dimensions.contentSpacing.y)
|
||||
|
||||
height = root.height
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -71,39 +79,32 @@ Item {
|
|||
top: mainTextContainer.bottom
|
||||
left: parent.left
|
||||
margins: 0
|
||||
topMargin: 2 * hifi.dimensions.contentSpacing.y
|
||||
topMargin: hifi.dimensions.contentSpacing.y
|
||||
}
|
||||
width: 250
|
||||
|
||||
placeholderText: "Choose your own"
|
||||
}
|
||||
|
||||
InfoItem {
|
||||
id: termsContainer
|
||||
// Override ScrollingWindow's keyboard that would be at very bottom of dialog.
|
||||
Keyboard {
|
||||
raised: keyboardEnabled && keyboardRaised
|
||||
numeric: punctuationMode
|
||||
anchors {
|
||||
top: textField.bottom
|
||||
left: parent.left
|
||||
margins: 0
|
||||
topMargin: 3 * hifi.dimensions.contentSpacing.y
|
||||
right: parent.right
|
||||
bottom: buttons.top
|
||||
bottomMargin: keyboardRaised ? 2 * hifi.dimensions.contentSpacing.y : 0
|
||||
}
|
||||
|
||||
text: qsTr("By creating this user profile, you agree to <a href='https://highfidelity.com/terms'>High Fidelity's Terms of Service</a>")
|
||||
wrapMode: Text.WordWrap
|
||||
color: hifi.colors.baseGrayHighlight
|
||||
lineHeight: 1
|
||||
lineHeightMode: Text.ProportionalHeight
|
||||
horizontalAlignment: Text.AlignHCenter
|
||||
|
||||
onLinkActivated: loginDialog.openUrl(link)
|
||||
}
|
||||
|
||||
Row {
|
||||
id: buttons
|
||||
anchors {
|
||||
top: termsContainer.bottom
|
||||
bottom: parent.bottom
|
||||
right: parent.right
|
||||
margins: 0
|
||||
topMargin: 1 * hifi.dimensions.contentSpacing.y
|
||||
topMargin: hifi.dimensions.contentSpacing.y
|
||||
}
|
||||
spacing: hifi.dimensions.contentSpacing.x
|
||||
onHeightChanged: d.resize(); onWidthChanged: d.resize();
|
||||
|
@ -130,8 +131,10 @@ Item {
|
|||
Component.onCompleted: {
|
||||
root.title = qsTr("Complete Your Profile")
|
||||
root.iconText = "<"
|
||||
keyboardEnabled = HMD.active;
|
||||
d.resize();
|
||||
}
|
||||
|
||||
Connections {
|
||||
target: loginDialog
|
||||
onHandleCreateCompleted: {
|
||||
|
|
|
@ -56,6 +56,10 @@ Windows.ScrollingWindow {
|
|||
onWidthChanged: notifyResized();
|
||||
onHeightChanged: notifyResized();
|
||||
|
||||
onShownChanged: {
|
||||
keyboardEnabled = HMD.active;
|
||||
}
|
||||
|
||||
Item {
|
||||
width: pane.contentWidth
|
||||
implicitHeight: pane.scrollHeight
|
||||
|
|
9
interface/resources/qml/StatText.qml
Normal file
|
@ -0,0 +1,9 @@
|
|||
import QtQuick 2.3
|
||||
import QtQuick.Controls 1.2
|
||||
|
||||
Text {
|
||||
color: "white";
|
||||
style: Text.Outline;
|
||||
styleColor: "black";
|
||||
font.pixelSize: 12;
|
||||
}
|
|
@ -1,6 +1,7 @@
|
|||
import Hifi 1.0 as Hifi
|
||||
import QtQuick 2.3
|
||||
import QtQuick.Controls 1.2
|
||||
import '.'
|
||||
|
||||
Item {
|
||||
id: stats
|
||||
|
@ -28,9 +29,7 @@ Item {
|
|||
implicitWidth: row.width
|
||||
|
||||
anchors.horizontalCenter: parent.horizontalCenter
|
||||
readonly property int fontSize: 12
|
||||
readonly property string fontColor: "white"
|
||||
readonly property string bgColor: "#99333333"
|
||||
readonly property string bgColor: "#AA111111"
|
||||
|
||||
Row {
|
||||
id: row
|
||||
|
@ -49,64 +48,40 @@ Item {
|
|||
Column {
|
||||
id: generalCol
|
||||
spacing: 4; x: 4; y: 4;
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Servers: " + root.serverCount
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Avatars: " + root.avatarCount
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Frame Rate: " + root.framerate.toFixed(2);
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Render Rate: " + root.renderrate.toFixed(2);
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Present Rate: " + root.presentrate.toFixed(2);
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Present New Rate: " + root.presentnewrate.toFixed(2);
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Present Drop Rate: " + root.presentdroprate.toFixed(2);
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Simrate: " + root.simrate
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Avatar Simrate: " + root.avatarSimrate
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Packets In/Out: " + root.packetInCount + "/" + root.packetOutCount
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Mbps In/Out: " + root.mbpsIn.toFixed(2) + "/" + root.mbpsOut.toFixed(2)
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded
|
||||
text: "Asset Mbps In/Out: " + root.assetMbpsIn.toFixed(2) + "/" + root.assetMbpsOut.toFixed(2)
|
||||
}
|
||||
|
@ -126,29 +101,19 @@ Item {
|
|||
Column {
|
||||
id: pingCol
|
||||
spacing: 4; x: 4; y: 4;
|
||||
Text {
|
||||
color: root.fontColor
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Audio ping: " + root.audioPing
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Avatar ping: " + root.avatarPing
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Entities avg ping: " + root.entitiesPing
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Asset ping: " + root.assetPing
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Messages max ping: " + root.messagePing
|
||||
}
|
||||
|
@ -167,46 +132,32 @@ Item {
|
|||
Column {
|
||||
id: geoCol
|
||||
spacing: 4; x: 4; y: 4;
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Position: " + root.position.x.toFixed(1) + ", " +
|
||||
root.position.y.toFixed(1) + ", " + root.position.z.toFixed(1)
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Speed: " + root.speed.toFixed(1)
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Yaw: " + root.yaw.toFixed(1)
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Avatar Mixer In: " + root.avatarMixerInKbps + " kbps, " +
|
||||
root.avatarMixerInPps + "pps";
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Avatar Mixer Out: " + root.avatarMixerOutKbps + " kbps, " +
|
||||
root.avatarMixerOutPps + "pps";
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Downloads: " + root.downloads + "/" + root.downloadLimit +
|
||||
", Pending: " + root.downloadsPending;
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded && root.downloadUrls.length > 0;
|
||||
text: "Download URLs:"
|
||||
}
|
||||
|
@ -217,9 +168,7 @@ Item {
|
|||
visible: root.expanded && root.downloadUrls.length > 0;
|
||||
|
||||
model: root.downloadUrls
|
||||
delegate: Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
delegate: StatText {
|
||||
visible: root.expanded;
|
||||
text: modelData.length > 30
|
||||
? modelData.substring(0, 5) + "..." + modelData.substring(modelData.length - 22)
|
||||
|
@ -240,84 +189,110 @@ Item {
|
|||
Column {
|
||||
id: octreeCol
|
||||
spacing: 4; x: 4; y: 4;
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "Triangles: " + root.triangles +
|
||||
" / Material Switches: " + root.materialSwitches
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
text: "GPU Free Memory: " + root.gpuFreeMemory + " MB";
|
||||
}
|
||||
StatText {
|
||||
text: "GPU Textures: ";
|
||||
}
|
||||
StatText {
|
||||
text: " Sparse Enabled: " + (0 == root.gpuSparseTextureEnabled ? "false" : "true");
|
||||
}
|
||||
StatText {
|
||||
text: " Count: " + root.gpuTextures;
|
||||
}
|
||||
StatText {
|
||||
text: " Rectified: " + root.rectifiedTextureCount;
|
||||
}
|
||||
StatText {
|
||||
text: " Decimated: " + root.decimatedTextureCount;
|
||||
}
|
||||
StatText {
|
||||
text: " Sparse Count: " + root.gpuTexturesSparse;
|
||||
visible: 0 != root.gpuSparseTextureEnabled;
|
||||
}
|
||||
StatText {
|
||||
text: " Virtual Memory: " + root.gpuTextureVirtualMemory + " MB";
|
||||
}
|
||||
StatText {
|
||||
text: " Commited Memory: " + root.gpuTextureMemory + " MB";
|
||||
}
|
||||
StatText {
|
||||
text: " Framebuffer Memory: " + root.gpuTextureFramebufferMemory + " MB";
|
||||
}
|
||||
StatText {
|
||||
text: " Sparse Memory: " + root.gpuTextureSparseMemory + " MB";
|
||||
visible: 0 != root.gpuSparseTextureEnabled;
|
||||
}
|
||||
StatText {
|
||||
text: "GPU Buffers: "
|
||||
}
|
||||
StatText {
|
||||
text: " Count: " + root.gpuTextures;
|
||||
}
|
||||
StatText {
|
||||
text: " Memory: " + root.gpuBufferMemory;
|
||||
}
|
||||
StatText {
|
||||
text: "GL Swapchain Memory: " + root.glContextSwapchainMemory + " MB";
|
||||
}
|
||||
StatText {
|
||||
text: "QML Texture Memory: " + root.qmlTextureMemory + " MB";
|
||||
}
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Items rendered / considered: " +
|
||||
root.itemRendered + " / " + root.itemConsidered;
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: " out of view: " + root.itemOutOfView +
|
||||
" too small: " + root.itemTooSmall;
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: "Shadows rendered / considered: " +
|
||||
root.shadowRendered + " / " + root.shadowConsidered;
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded;
|
||||
text: " out of view: " + root.shadowOutOfView +
|
||||
" too small: " + root.shadowTooSmall;
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: !root.expanded
|
||||
text: "Octree Elements Server: " + root.serverElements +
|
||||
" Local: " + root.localElements;
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded
|
||||
text: "Octree Sending Mode: " + root.sendingMode;
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded
|
||||
text: "Octree Packets to Process: " + root.packetStats;
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded
|
||||
text: "Octree Elements - ";
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded
|
||||
text: "\tServer: " + root.serverElements +
|
||||
" Internal: " + root.serverInternal +
|
||||
" Leaves: " + root.serverLeaves;
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded
|
||||
text: "\tLocal: " + root.localElements +
|
||||
" Internal: " + root.localInternal +
|
||||
" Leaves: " + root.localLeaves;
|
||||
}
|
||||
Text {
|
||||
color: root.fontColor;
|
||||
font.pixelSize: root.fontSize
|
||||
StatText {
|
||||
visible: root.expanded
|
||||
text: "LOD: " + root.lodStatus;
|
||||
}
|
||||
|
@ -331,12 +306,10 @@ Item {
|
|||
width: perfText.width + 8
|
||||
height: perfText.height + 8
|
||||
color: root.bgColor;
|
||||
Text {
|
||||
StatText {
|
||||
x: 4; y: 4
|
||||
id: perfText
|
||||
color: root.fontColor
|
||||
font.family: root.monospaceFont
|
||||
font.pixelSize: 12
|
||||
text: "------------------------------------------ Function " +
|
||||
"--------------------------------------- --msecs- -calls--\n" +
|
||||
root.timingStats;
|
||||
|
|
|
@ -42,6 +42,10 @@ ScrollingWindow {
|
|||
}
|
||||
}
|
||||
|
||||
onShownChanged: {
|
||||
keyboardEnabled = HMD.active;
|
||||
}
|
||||
|
||||
Settings {
|
||||
category: "ToolWindow.Position"
|
||||
property alias x: toolWindow.x
|
||||
|
|
|
@ -10,10 +10,10 @@
|
|||
|
||||
import QtQuick 2.5
|
||||
import QtWebEngine 1.2
|
||||
import HFWebEngineProfile 1.0
|
||||
|
||||
WebEngineView {
|
||||
id: root
|
||||
property var newUrl;
|
||||
|
||||
profile: desktop.browserProfile
|
||||
|
||||
|
@ -25,30 +25,6 @@ WebEngineView {
|
|||
});
|
||||
}
|
||||
|
||||
|
||||
|
||||
// FIXME hack to get the URL with the auth token included. Remove when we move to Qt 5.6
|
||||
Timer {
|
||||
id: urlReplacementTimer
|
||||
running: false
|
||||
repeat: false
|
||||
interval: 50
|
||||
onTriggered: url = newUrl;
|
||||
}
|
||||
|
||||
onUrlChanged: {
|
||||
var originalUrl = url.toString();
|
||||
newUrl = urlHandler.fixupUrl(originalUrl).toString();
|
||||
if (newUrl !== originalUrl) {
|
||||
root.stop();
|
||||
if (urlReplacementTimer.running) {
|
||||
console.warn("Replacement timer already running");
|
||||
return;
|
||||
}
|
||||
urlReplacementTimer.start();
|
||||
}
|
||||
}
|
||||
|
||||
onLoadingChanged: {
|
||||
// Required to support clicking on "hifi://" links
|
||||
if (WebEngineView.LoadStartedStatus == loadRequest.status) {
|
||||
|
|
|
@ -32,6 +32,8 @@ FocusScope {
|
|||
|
||||
readonly property ComboBox control: comboBox
|
||||
|
||||
signal accepted();
|
||||
|
||||
implicitHeight: comboBox.height;
|
||||
focus: true
|
||||
|
||||
|
@ -134,6 +136,7 @@ FocusScope {
|
|||
function hideList() {
|
||||
popup.visible = false;
|
||||
scrollView.hoverEnabled = false;
|
||||
root.accepted();
|
||||
}
|
||||
|
||||
FocusScope {
|
||||
|
|
|
@ -1,10 +1,28 @@
|
|||
//
|
||||
// FileDialog.qml
|
||||
//
|
||||
// Created by Anthony Thibault on 31 Oct 2016
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import QtQuick 2.0
|
||||
|
||||
Item {
|
||||
id: keyboardBase
|
||||
height: 200
|
||||
property alias shiftKey: key27
|
||||
|
||||
property bool raised: false
|
||||
property bool numeric: false
|
||||
|
||||
readonly property int raisedHeight: 200
|
||||
|
||||
height: enabled && raised ? raisedHeight : 0
|
||||
visible: enabled && raised
|
||||
|
||||
property bool shiftMode: false
|
||||
property bool numericShiftMode: false
|
||||
|
||||
function resetShiftMode(mode) {
|
||||
shiftMode = mode;
|
||||
|
@ -37,8 +55,8 @@ Item {
|
|||
|
||||
function forEachKey(func) {
|
||||
var i, j;
|
||||
for (i = 0; i < column1.children.length; i++) {
|
||||
var row = column1.children[i];
|
||||
for (i = 0; i < columnAlpha.children.length; i++) {
|
||||
var row = columnAlpha.children[i];
|
||||
for (j = 0; j < row.children.length; j++) {
|
||||
var key = row.children[j];
|
||||
func(key);
|
||||
|
@ -48,10 +66,12 @@ Item {
|
|||
|
||||
onShiftModeChanged: {
|
||||
forEachKey(function (key) {
|
||||
if (shiftMode) {
|
||||
key.glyph = keyboardBase.toUpper(key.glyph);
|
||||
} else {
|
||||
key.glyph = keyboardBase.toLower(key.glyph);
|
||||
if (/[a-z]/i.test(key.glyph)) {
|
||||
if (shiftMode) {
|
||||
key.glyph = keyboardBase.toUpper(key.glyph);
|
||||
} else {
|
||||
key.glyph = keyboardBase.toLower(key.glyph);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
@ -97,265 +117,177 @@ Item {
|
|||
anchors.bottomMargin: 0
|
||||
|
||||
Column {
|
||||
id: column1
|
||||
id: columnAlpha
|
||||
width: 480
|
||||
height: 200
|
||||
visible: !numeric
|
||||
|
||||
Row {
|
||||
id: row1
|
||||
width: 480
|
||||
height: 50
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 0
|
||||
anchors.leftMargin: 4
|
||||
|
||||
Key {
|
||||
id: key1
|
||||
width: 44
|
||||
glyph: "q"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key2
|
||||
width: 44
|
||||
glyph: "w"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key3
|
||||
width: 44
|
||||
glyph: "e"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key4
|
||||
width: 43
|
||||
glyph: "r"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key5
|
||||
width: 43
|
||||
glyph: "t"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key6
|
||||
width: 44
|
||||
glyph: "y"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key7
|
||||
width: 44
|
||||
glyph: "u"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key8
|
||||
width: 43
|
||||
glyph: "i"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key9
|
||||
width: 42
|
||||
glyph: "o"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key10
|
||||
width: 44
|
||||
glyph: "p"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key28
|
||||
width: 45
|
||||
glyph: "←"
|
||||
}
|
||||
Key { width: 43; glyph: "q"; }
|
||||
Key { width: 43; glyph: "w"; }
|
||||
Key { width: 43; glyph: "e"; }
|
||||
Key { width: 43; glyph: "r"; }
|
||||
Key { width: 43; glyph: "t"; }
|
||||
Key { width: 43; glyph: "y"; }
|
||||
Key { width: 43; glyph: "u"; }
|
||||
Key { width: 43; glyph: "i"; }
|
||||
Key { width: 43; glyph: "o"; }
|
||||
Key { width: 43; glyph: "p"; }
|
||||
Key { width: 43; glyph: "←"; }
|
||||
}
|
||||
|
||||
Row {
|
||||
id: row2
|
||||
width: 480
|
||||
height: 50
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 18
|
||||
anchors.leftMargin: 20
|
||||
|
||||
Key {
|
||||
id: key11
|
||||
width: 43
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key12
|
||||
width: 43
|
||||
glyph: "s"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key13
|
||||
width: 43
|
||||
glyph: "d"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key14
|
||||
width: 43
|
||||
glyph: "f"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key15
|
||||
width: 43
|
||||
glyph: "g"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key16
|
||||
width: 43
|
||||
glyph: "h"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key17
|
||||
width: 43
|
||||
glyph: "j"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key18
|
||||
width: 43
|
||||
glyph: "k"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key19
|
||||
width: 43
|
||||
glyph: "l"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key32
|
||||
width: 75
|
||||
glyph: "⏎"
|
||||
}
|
||||
Key { width: 43; glyph: "a"; }
|
||||
Key { width: 43; glyph: "s"; }
|
||||
Key { width: 43; glyph: "d"; }
|
||||
Key { width: 43; glyph: "f"; }
|
||||
Key { width: 43; glyph: "g"; }
|
||||
Key { width: 43; glyph: "h"; }
|
||||
Key { width: 43; glyph: "j"; }
|
||||
Key { width: 43; glyph: "k"; }
|
||||
Key { width: 43; glyph: "l"; }
|
||||
Key { width: 70; glyph: "⏎"; }
|
||||
}
|
||||
|
||||
Row {
|
||||
id: row3
|
||||
width: 480
|
||||
height: 50
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 0
|
||||
anchors.leftMargin: 4
|
||||
|
||||
Key {
|
||||
id: key27
|
||||
width: 46
|
||||
id: shiftKey
|
||||
width: 43
|
||||
glyph: "⇪"
|
||||
toggle: true
|
||||
onToggledChanged: {
|
||||
shiftMode = toggled;
|
||||
}
|
||||
onToggledChanged: shiftMode = toggled
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key20
|
||||
width: 43
|
||||
glyph: "z"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key21
|
||||
width: 43
|
||||
glyph: "x"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key22
|
||||
width: 43
|
||||
glyph: "c"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key23
|
||||
width: 43
|
||||
glyph: "v"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key24
|
||||
width: 43
|
||||
glyph: "b"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key25
|
||||
width: 43
|
||||
glyph: "n"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key26
|
||||
width: 44
|
||||
glyph: "m"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key31
|
||||
width: 43
|
||||
glyph: "_"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key33
|
||||
width: 43
|
||||
glyph: "?"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key36
|
||||
width: 46
|
||||
glyph: "/"
|
||||
}
|
||||
|
||||
Key { width: 43; glyph: "z"; }
|
||||
Key { width: 43; glyph: "x"; }
|
||||
Key { width: 43; glyph: "c"; }
|
||||
Key { width: 43; glyph: "v"; }
|
||||
Key { width: 43; glyph: "b"; }
|
||||
Key { width: 43; glyph: "n"; }
|
||||
Key { width: 43; glyph: "m"; }
|
||||
Key { width: 43; glyph: "_"; }
|
||||
Key { width: 43; glyph: "/"; }
|
||||
Key { width: 43; glyph: "?"; }
|
||||
}
|
||||
|
||||
Row {
|
||||
id: row4
|
||||
width: 480
|
||||
height: 50
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 19
|
||||
anchors.leftMargin: 4
|
||||
|
||||
Key {
|
||||
id: key30
|
||||
width: 89
|
||||
glyph: "&123"
|
||||
mouseArea.onClicked: {
|
||||
keyboardBase.parent.punctuationMode = true;
|
||||
}
|
||||
width: 70
|
||||
glyph: "123"
|
||||
mouseArea.onClicked: keyboardBase.parent.punctuationMode = true
|
||||
}
|
||||
Key { width: 231; glyph: " "; }
|
||||
Key { width: 43; glyph: ","; }
|
||||
Key { width: 43; glyph: "."; }
|
||||
Key { width: 43; glyph: "\u276C"; }
|
||||
Key { width: 43; glyph: "\u276D"; }
|
||||
}
|
||||
}
|
||||
|
||||
Column {
|
||||
id: columnNumeric
|
||||
width: 480
|
||||
height: 200
|
||||
visible: numeric
|
||||
|
||||
Row {
|
||||
width: 480
|
||||
height: 50
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 4
|
||||
|
||||
Key { width: 43; glyph: "1"; }
|
||||
Key { width: 43; glyph: "2"; }
|
||||
Key { width: 43; glyph: "3"; }
|
||||
Key { width: 43; glyph: "4"; }
|
||||
Key { width: 43; glyph: "5"; }
|
||||
Key { width: 43; glyph: "6"; }
|
||||
Key { width: 43; glyph: "7"; }
|
||||
Key { width: 43; glyph: "8"; }
|
||||
Key { width: 43; glyph: "9"; }
|
||||
Key { width: 43; glyph: "0"; }
|
||||
Key { width: 43; glyph: "←"; }
|
||||
}
|
||||
|
||||
Row {
|
||||
width: 480
|
||||
height: 50
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 4
|
||||
|
||||
Key { width: 43; glyph: "!"; }
|
||||
Key { width: 43; glyph: "@"; }
|
||||
Key { width: 43; glyph: "#"; }
|
||||
Key { width: 43; glyph: "$"; }
|
||||
Key { width: 43; glyph: "%"; }
|
||||
Key { width: 43; glyph: "^"; }
|
||||
Key { width: 43; glyph: "&"; }
|
||||
Key { width: 43; glyph: "*"; }
|
||||
Key { width: 43; glyph: "("; }
|
||||
Key { width: 43; glyph: ")"; }
|
||||
Key { width: 43; glyph: "⏎"; }
|
||||
}
|
||||
|
||||
Row {
|
||||
width: 480
|
||||
height: 50
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 4
|
||||
|
||||
Key {
|
||||
id: key29
|
||||
width: 285
|
||||
glyph: " "
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key34
|
||||
id: numericShiftKey
|
||||
width: 43
|
||||
glyph: "⇦"
|
||||
glyph: "\u21E8"
|
||||
toggle: true
|
||||
onToggledChanged: numericShiftMode = toggled
|
||||
}
|
||||
Key { width: 43; glyph: numericShiftMode ? "`" : "+"; }
|
||||
Key { width: 43; glyph: numericShiftMode ? "~" : "-"; }
|
||||
Key { width: 43; glyph: numericShiftMode ? "\u00A3" : "="; }
|
||||
Key { width: 43; glyph: numericShiftMode ? "\u20AC" : ";"; }
|
||||
Key { width: 43; glyph: numericShiftMode ? "\u00A5" : ":"; }
|
||||
Key { width: 43; glyph: numericShiftMode ? "<" : "'"; }
|
||||
Key { width: 43; glyph: numericShiftMode ? ">" : "\""; }
|
||||
Key { width: 43; glyph: numericShiftMode ? "[" : "{"; }
|
||||
Key { width: 43; glyph: numericShiftMode ? "]" : "}"; }
|
||||
Key { width: 43; glyph: numericShiftMode ? "\\" : "|"; }
|
||||
}
|
||||
|
||||
Row {
|
||||
width: 480
|
||||
height: 50
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 4
|
||||
|
||||
Key {
|
||||
id: key35
|
||||
x: 343
|
||||
width: 43
|
||||
glyph: "⇨"
|
||||
width: 70
|
||||
glyph: "abc"
|
||||
mouseArea.onClicked: keyboardBase.parent.punctuationMode = false
|
||||
}
|
||||
|
||||
Key { width: 231; glyph: " "; }
|
||||
Key { width: 43; glyph: ","; }
|
||||
Key { width: 43; glyph: "."; }
|
||||
Key { width: 43; glyph: "\u276C"; }
|
||||
Key { width: 43; glyph: "\u276D"; }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -386,5 +318,4 @@ Item {
|
|||
anchors.top: parent.top
|
||||
anchors.topMargin: 0
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,324 +0,0 @@
|
|||
import QtQuick 2.0
|
||||
|
||||
Item {
|
||||
id: keyboardBase
|
||||
height: 200
|
||||
Rectangle {
|
||||
id: leftRect
|
||||
y: 0
|
||||
height: 200
|
||||
color: "#252525"
|
||||
anchors.right: keyboardRect.left
|
||||
anchors.rightMargin: 0
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.bottomMargin: 0
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 0
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
id: keyboardRect
|
||||
x: 206
|
||||
y: 0
|
||||
width: 480
|
||||
height: 200
|
||||
color: "#252525"
|
||||
anchors.horizontalCenter: parent.horizontalCenter
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.bottomMargin: 0
|
||||
|
||||
Column {
|
||||
id: column1
|
||||
width: 480
|
||||
height: 200
|
||||
|
||||
Row {
|
||||
id: row1
|
||||
width: 480
|
||||
height: 50
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 0
|
||||
|
||||
Key {
|
||||
id: key1
|
||||
width: 43
|
||||
glyph: "1"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key2
|
||||
width: 43
|
||||
glyph: "2"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key3
|
||||
width: 43
|
||||
glyph: "3"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key4
|
||||
width: 43
|
||||
glyph: "4"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key5
|
||||
width: 43
|
||||
glyph: "5"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key6
|
||||
width: 43
|
||||
glyph: "6"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key7
|
||||
width: 43
|
||||
glyph: "7"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key8
|
||||
width: 43
|
||||
glyph: "8"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key9
|
||||
width: 43
|
||||
glyph: "9"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key10
|
||||
width: 43
|
||||
glyph: "0"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key28
|
||||
width: 50
|
||||
glyph: "←"
|
||||
}
|
||||
}
|
||||
|
||||
Row {
|
||||
id: row2
|
||||
width: 480
|
||||
height: 50
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 0
|
||||
|
||||
Key {
|
||||
id: key11
|
||||
width: 43
|
||||
glyph: "!"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key12
|
||||
width: 43
|
||||
glyph: "@"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key13
|
||||
width: 43
|
||||
glyph: "#"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key14
|
||||
width: 43
|
||||
glyph: "$"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key15
|
||||
width: 43
|
||||
glyph: "%"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key16
|
||||
width: 43
|
||||
glyph: "^"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key17
|
||||
width: 43
|
||||
glyph: "&"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key18
|
||||
width: 43
|
||||
glyph: "*"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key19
|
||||
width: 43
|
||||
glyph: "("
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key32
|
||||
width: 43
|
||||
glyph: ")"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key37
|
||||
width: 50
|
||||
glyph: "⏎"
|
||||
}
|
||||
}
|
||||
|
||||
Row {
|
||||
id: row3
|
||||
width: 480
|
||||
height: 50
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 4
|
||||
|
||||
Key {
|
||||
id: key27
|
||||
width: 43
|
||||
glyph: "="
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key20
|
||||
width: 43
|
||||
glyph: "+"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key21
|
||||
width: 43
|
||||
glyph: "-"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key22
|
||||
width: 43
|
||||
glyph: ","
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key23
|
||||
width: 43
|
||||
glyph: "."
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key24
|
||||
width: 43
|
||||
glyph: ";"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key25
|
||||
width: 43
|
||||
glyph: ":"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key26
|
||||
width: 43
|
||||
glyph: "'"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key31
|
||||
width: 43
|
||||
glyph: "\""
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key33
|
||||
width: 43
|
||||
glyph: "<"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key36
|
||||
width: 43
|
||||
glyph: ">"
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
Row {
|
||||
id: row4
|
||||
width: 480
|
||||
height: 50
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 19
|
||||
|
||||
Key {
|
||||
id: key30
|
||||
width: 65
|
||||
glyph: "abc"
|
||||
mouseArea.onClicked: {
|
||||
keyboardBase.parent.punctuationMode = false
|
||||
}
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key29
|
||||
width: 285
|
||||
glyph: " "
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key34
|
||||
width: 43
|
||||
glyph: "⇦"
|
||||
}
|
||||
|
||||
Key {
|
||||
id: key35
|
||||
x: 343
|
||||
width: 43
|
||||
glyph: "⇨"
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
id: rightRect
|
||||
y: 280
|
||||
height: 200
|
||||
color: "#252525"
|
||||
border.width: 0
|
||||
anchors.left: keyboardRect.right
|
||||
anchors.leftMargin: 0
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: 0
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.bottomMargin: 0
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
id: rectangle1
|
||||
color: "#ffffff"
|
||||
anchors.bottom: keyboardRect.top
|
||||
anchors.bottomMargin: 0
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 0
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: 0
|
||||
anchors.top: parent.top
|
||||
anchors.topMargin: 0
|
||||
}
|
||||
|
||||
}
|
|
@ -199,6 +199,11 @@ TreeView {
|
|||
unfocusHelper.forceActiveFocus();
|
||||
}
|
||||
}
|
||||
|
||||
onReadOnlyChanged: {
|
||||
// Have to explicily set keyboardRaised because automatic setting fails because readOnly is true at the time.
|
||||
keyboardRaised = activeFocus;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,8 +13,9 @@ import "."
|
|||
|
||||
BaseWebView {
|
||||
onNewViewRequested: {
|
||||
var component = Qt.createComponent("../Browser.qml");
|
||||
var newWindow = component.createObject(desktop);
|
||||
request.openIn(newWindow.webView)
|
||||
// Load dialog via OffscreenUi so that JavaScript EventBridge is available.
|
||||
var browser = OffscreenUi.load("Browser.qml");
|
||||
request.openIn(browser.webView);
|
||||
browser.webView.forceActiveFocus();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,13 +2,23 @@ import QtQuick 2.5
|
|||
import QtWebEngine 1.1
|
||||
import QtWebChannel 1.0
|
||||
import "../controls-uit" as HiFiControls
|
||||
import HFWebEngineProfile 1.0
|
||||
|
||||
Item {
|
||||
property alias url: root.url
|
||||
property alias eventBridge: eventBridgeWrapper.eventBridge
|
||||
property bool keyboardEnabled: true // FIXME - Keyboard HMD only: Default to false
|
||||
property bool keyboardRaised: false
|
||||
property bool punctuationMode: false
|
||||
|
||||
// FIXME - Keyboard HMD only: Make Interface either set keyboardRaised property directly in OffscreenQmlSurface
|
||||
// or provide HMDinfo object to QML in RenderableWebEntityItem and do the following.
|
||||
/*
|
||||
onKeyboardRaisedChanged: {
|
||||
keyboardEnabled = HMDinfo.active;
|
||||
}
|
||||
*/
|
||||
|
||||
QtObject {
|
||||
id: eventBridgeWrapper
|
||||
WebChannel.id: "eventBridgeWrapper"
|
||||
|
@ -17,10 +27,16 @@ Item {
|
|||
|
||||
WebEngineView {
|
||||
id: root
|
||||
objectName: "webEngineView"
|
||||
x: 0
|
||||
y: 0
|
||||
width: parent.width
|
||||
height: keyboardRaised ? parent.height - keyboard1.height : parent.height
|
||||
height: keyboardEnabled && keyboardRaised ? parent.height - keyboard.height : parent.height
|
||||
|
||||
profile: HFWebEngineProfile {
|
||||
id: webviewProfile
|
||||
storageName: "qmlWebEngine"
|
||||
}
|
||||
|
||||
// creates a global EventBridge object.
|
||||
WebEngineScript {
|
||||
|
@ -53,28 +69,6 @@ Item {
|
|||
root.profile.httpUserAgent = "Mozilla/5.0 Chrome (HighFidelityInterface)";
|
||||
}
|
||||
|
||||
// FIXME hack to get the URL with the auth token included. Remove when we move to Qt 5.6
|
||||
Timer {
|
||||
id: urlReplacementTimer
|
||||
running: false
|
||||
repeat: false
|
||||
interval: 50
|
||||
onTriggered: url = root.newUrl;
|
||||
}
|
||||
|
||||
onUrlChanged: {
|
||||
var originalUrl = url.toString();
|
||||
root.newUrl = urlHandler.fixupUrl(originalUrl).toString();
|
||||
if (root.newUrl !== originalUrl) {
|
||||
root.stop();
|
||||
if (urlReplacementTimer.running) {
|
||||
console.warn("Replacement timer already running");
|
||||
return;
|
||||
}
|
||||
urlReplacementTimer.start();
|
||||
}
|
||||
}
|
||||
|
||||
onFeaturePermissionRequested: {
|
||||
grantFeaturePermission(securityOrigin, feature, true);
|
||||
}
|
||||
|
@ -82,7 +76,7 @@ Item {
|
|||
onLoadingChanged: {
|
||||
keyboardRaised = false;
|
||||
punctuationMode = false;
|
||||
keyboard1.resetShiftMode(false);
|
||||
keyboard.resetShiftMode(false);
|
||||
|
||||
// Required to support clicking on "hifi://" links
|
||||
if (WebEngineView.LoadStartedStatus == loadRequest.status) {
|
||||
|
@ -105,32 +99,15 @@ Item {
|
|||
}
|
||||
}
|
||||
|
||||
// virtual keyboard, letters
|
||||
HiFiControls.Keyboard {
|
||||
id: keyboard1
|
||||
y: keyboardRaised ? parent.height : 0
|
||||
height: keyboardRaised ? 200 : 0
|
||||
visible: keyboardRaised && !punctuationMode
|
||||
enabled: keyboardRaised && !punctuationMode
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: 0
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 0
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.bottomMargin: 0
|
||||
id: keyboard
|
||||
raised: parent.keyboardEnabled && parent.keyboardRaised
|
||||
numeric: parent.punctuationMode
|
||||
anchors {
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: parent.bottom
|
||||
}
|
||||
}
|
||||
|
||||
HiFiControls.KeyboardPunctuation {
|
||||
id: keyboard2
|
||||
y: keyboardRaised ? parent.height : 0
|
||||
height: keyboardRaised ? 200 : 0
|
||||
visible: keyboardRaised && punctuationMode
|
||||
enabled: keyboardRaised && punctuationMode
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: 0
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 0
|
||||
anchors.bottom: parent.bottom
|
||||
anchors.bottomMargin: 0
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,7 +33,7 @@ FocusScope {
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
onHeightChanged: d.handleSizeChanged();
|
||||
|
||||
onWidthChanged: d.handleSizeChanged();
|
||||
|
|
|
@ -22,7 +22,7 @@ ModalWindow {
|
|||
implicitWidth: 640;
|
||||
implicitHeight: 320;
|
||||
visible: true;
|
||||
keyboardEnabled: false // Disable ModalWindow's keyboard.
|
||||
keyboardOverride: true // Disable ModalWindow's keyboard.
|
||||
|
||||
signal selected(var result);
|
||||
signal canceled();
|
||||
|
@ -51,6 +51,7 @@ ModalWindow {
|
|||
}
|
||||
}
|
||||
|
||||
property bool keyboardEnabled: false
|
||||
property bool keyboardRaised: false
|
||||
property bool punctuationMode: false
|
||||
onKeyboardRaisedChanged: d.resize();
|
||||
|
@ -116,7 +117,7 @@ ModalWindow {
|
|||
var targetHeight = (textField.visible ? textField.controlHeight + hifi.dimensions.contentSpacing.y : 0) +
|
||||
(extraInputs.visible ? extraInputs.height + hifi.dimensions.contentSpacing.y : 0) +
|
||||
(buttons.height + 3 * hifi.dimensions.contentSpacing.y) +
|
||||
(root.keyboardRaised ? (200 + hifi.dimensions.contentSpacing.y) : 0);
|
||||
((keyboardEnabled && keyboardRaised) ? (keyboard.raisedHeight + hifi.dimensions.contentSpacing.y) : 0);
|
||||
|
||||
root.width = (targetWidth < d.minWidth) ? d.minWidth : ((targetWidth > d.maxWdith) ? d.maxWidth : targetWidth);
|
||||
root.height = (targetHeight < d.minHeight) ? d.minHeight : ((targetHeight > d.maxHeight) ?
|
||||
|
@ -153,38 +154,15 @@ ModalWindow {
|
|||
}
|
||||
}
|
||||
|
||||
Item {
|
||||
Keyboard {
|
||||
id: keyboard
|
||||
|
||||
height: keyboardRaised ? 200 : 0
|
||||
|
||||
raised: keyboardEnabled && keyboardRaised
|
||||
numeric: punctuationMode
|
||||
anchors {
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: parent.bottom
|
||||
bottomMargin: keyboardRaised ? hifi.dimensions.contentSpacing.y : 0
|
||||
}
|
||||
|
||||
Keyboard {
|
||||
id: keyboard1
|
||||
visible: keyboardRaised && !punctuationMode
|
||||
enabled: keyboardRaised && !punctuationMode
|
||||
anchors {
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: parent.bottom
|
||||
}
|
||||
}
|
||||
|
||||
KeyboardPunctuation {
|
||||
id: keyboard2
|
||||
visible: keyboardRaised && punctuationMode
|
||||
enabled: keyboardRaised && punctuationMode
|
||||
anchors {
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: parent.bottom
|
||||
}
|
||||
bottomMargin: raised ? hifi.dimensions.contentSpacing.y : 0
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -211,6 +189,7 @@ ModalWindow {
|
|||
left: parent.left;
|
||||
bottom: parent.bottom;
|
||||
leftMargin: 6; // Magic number to align with warning icon
|
||||
bottomMargin: 6;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -224,7 +203,10 @@ ModalWindow {
|
|||
bottom: parent.bottom;
|
||||
}
|
||||
model: root.comboBox ? root.comboBox.items : [];
|
||||
onCurrentTextChanged: updateCheckbox();
|
||||
onAccepted: {
|
||||
updateCheckbox();
|
||||
focus = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -335,7 +317,9 @@ ModalWindow {
|
|||
}
|
||||
|
||||
Component.onCompleted: {
|
||||
keyboardEnabled = HMD.active;
|
||||
updateIcon();
|
||||
updateCheckbox();
|
||||
d.resize();
|
||||
textField.forceActiveFocus();
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ ModalWindow {
|
|||
id: root
|
||||
resizable: true
|
||||
implicitWidth: 480
|
||||
implicitHeight: 360 + (fileDialogItem.keyboardRaised ? 200 + hifi.dimensions.contentSpacing.y : 0)
|
||||
implicitHeight: 360 + (fileDialogItem.keyboardEnabled && fileDialogItem.keyboardRaised ? keyboard.raisedHeight + hifi.dimensions.contentSpacing.y : 0)
|
||||
|
||||
minSize: Qt.vector2d(360, 240)
|
||||
draggable: true
|
||||
|
@ -70,7 +70,9 @@ ModalWindow {
|
|||
signal canceled();
|
||||
|
||||
Component.onCompleted: {
|
||||
console.log("Helper " + helper + " drives " + drives)
|
||||
console.log("Helper " + helper + " drives " + drives);
|
||||
|
||||
fileDialogItem.keyboardEnabled = HMD.active;
|
||||
|
||||
// HACK: The following lines force the model to initialize properly such that the go-up button
|
||||
// works properly from the initial screen.
|
||||
|
@ -85,6 +87,8 @@ ModalWindow {
|
|||
|
||||
if (selectDirectory) {
|
||||
currentSelection.text = d.capitalizeDrive(helper.urlToPath(initialFolder));
|
||||
d.currentSelectionIsFolder = true;
|
||||
d.currentSelectionUrl = initialFolder;
|
||||
}
|
||||
|
||||
helper.contentsChanged.connect(function() {
|
||||
|
@ -106,6 +110,7 @@ ModalWindow {
|
|||
height: pane.height
|
||||
anchors.margins: 0
|
||||
|
||||
property bool keyboardEnabled: false
|
||||
property bool keyboardRaised: false
|
||||
property bool punctuationMode: false
|
||||
|
||||
|
@ -626,7 +631,7 @@ ModalWindow {
|
|||
left: parent.left
|
||||
right: selectionType.visible ? selectionType.left: parent.right
|
||||
rightMargin: selectionType.visible ? hifi.dimensions.contentSpacing.x : 0
|
||||
bottom: keyboard1.top
|
||||
bottom: keyboard.top
|
||||
bottomMargin: hifi.dimensions.contentSpacing.y
|
||||
}
|
||||
readOnly: !root.saveDialog
|
||||
|
@ -648,25 +653,15 @@ ModalWindow {
|
|||
}
|
||||
|
||||
Keyboard {
|
||||
id: keyboard1
|
||||
height: parent.keyboardRaised ? 200 : 0
|
||||
visible: parent.keyboardRaised && !parent.punctuationMode
|
||||
enabled: visible
|
||||
anchors.right: parent.right
|
||||
anchors.left: parent.left
|
||||
anchors.bottom: buttonRow.top
|
||||
anchors.bottomMargin: visible ? hifi.dimensions.contentSpacing.y : 0
|
||||
}
|
||||
|
||||
KeyboardPunctuation {
|
||||
id: keyboard2
|
||||
height: parent.keyboardRaised ? 200 : 0
|
||||
visible: parent.keyboardRaised && parent.punctuationMode
|
||||
enabled: visible
|
||||
anchors.right: parent.right
|
||||
anchors.left: parent.left
|
||||
anchors.bottom: buttonRow.top
|
||||
anchors.bottomMargin: visible ? hifi.dimensions.contentSpacing.y : 0
|
||||
id: keyboard
|
||||
raised: parent.keyboardEnabled && parent.keyboardRaised
|
||||
numeric: parent.punctuationMode
|
||||
anchors {
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: buttonRow.top
|
||||
bottomMargin: visible ? hifi.dimensions.contentSpacing.y : 0
|
||||
}
|
||||
}
|
||||
|
||||
Row {
|
||||
|
|
|
@ -22,6 +22,7 @@ ModalWindow {
|
|||
implicitWidth: 640
|
||||
implicitHeight: 320
|
||||
visible: true
|
||||
keyboardOverride: true // Disable ModalWindow's keyboard.
|
||||
|
||||
signal selected(var result);
|
||||
signal canceled();
|
||||
|
@ -45,6 +46,12 @@ ModalWindow {
|
|||
property int titleWidth: 0
|
||||
onTitleWidthChanged: d.resize();
|
||||
|
||||
property bool keyboardEnabled: false
|
||||
property bool keyboardRaised: false
|
||||
property bool punctuationMode: false
|
||||
|
||||
onKeyboardRaisedChanged: d.resize();
|
||||
|
||||
function updateIcon() {
|
||||
if (!root) {
|
||||
return;
|
||||
|
@ -59,11 +66,6 @@ ModalWindow {
|
|||
height: pane.height
|
||||
anchors.margins: 0
|
||||
|
||||
property bool keyboardRaised: false
|
||||
property bool punctuationMode: false
|
||||
|
||||
onKeyboardRaisedChanged: d.resize();
|
||||
|
||||
QtObject {
|
||||
id: d
|
||||
readonly property int minWidth: 480
|
||||
|
@ -74,15 +76,15 @@ ModalWindow {
|
|||
function resize() {
|
||||
var targetWidth = Math.max(titleWidth, pane.width)
|
||||
var targetHeight = (items ? comboBox.controlHeight : textResult.controlHeight) + 5 * hifi.dimensions.contentSpacing.y + buttons.height
|
||||
root.width = (targetWidth < d.minWidth) ? d.minWidth : ((targetWidth > d.maxWdith) ? d.maxWidth : targetWidth)
|
||||
root.height = ((targetHeight < d.minHeight) ? d.minHeight: ((targetHeight > d.maxHeight) ? d.maxHeight : targetHeight)) + (modalWindowItem.keyboardRaised ? (200 + 2 * hifi.dimensions.contentSpacing.y) : 0)
|
||||
root.width = (targetWidth < d.minWidth) ? d.minWidth : ((targetWidth > d.maxWdith) ? d.maxWidth : targetWidth);
|
||||
root.height = ((targetHeight < d.minHeight) ? d.minHeight : ((targetHeight > d.maxHeight) ? d.maxHeight : targetHeight)) + ((keyboardEnabled && keyboardRaised) ? (keyboard.raisedHeight + 2 * hifi.dimensions.contentSpacing.y) : 0)
|
||||
}
|
||||
}
|
||||
|
||||
Item {
|
||||
anchors {
|
||||
top: parent.top
|
||||
bottom: keyboard1.top;
|
||||
bottom: keyboard.top;
|
||||
left: parent.left;
|
||||
right: parent.right;
|
||||
margins: 0
|
||||
|
@ -116,33 +118,16 @@ ModalWindow {
|
|||
}
|
||||
}
|
||||
|
||||
// virtual keyboard, letters
|
||||
Keyboard {
|
||||
id: keyboard1
|
||||
y: parent.keyboardRaised ? parent.height : 0
|
||||
height: parent.keyboardRaised ? 200 : 0
|
||||
visible: parent.keyboardRaised && !parent.punctuationMode
|
||||
enabled: parent.keyboardRaised && !parent.punctuationMode
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: 0
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 0
|
||||
anchors.bottom: buttons.top
|
||||
anchors.bottomMargin: parent.keyboardRaised ? 2 * hifi.dimensions.contentSpacing.y : 0
|
||||
}
|
||||
|
||||
KeyboardPunctuation {
|
||||
id: keyboard2
|
||||
y: parent.keyboardRaised ? parent.height : 0
|
||||
height: parent.keyboardRaised ? 200 : 0
|
||||
visible: parent.keyboardRaised && parent.punctuationMode
|
||||
enabled: parent.keyboardRaised && parent.punctuationMode
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: 0
|
||||
anchors.left: parent.left
|
||||
anchors.leftMargin: 0
|
||||
anchors.bottom: buttons.top
|
||||
anchors.bottomMargin: parent.keyboardRaised ? 2 * hifi.dimensions.contentSpacing.y : 0
|
||||
id: keyboard
|
||||
raised: keyboardEnabled && keyboardRaised
|
||||
numeric: punctuationMode
|
||||
anchors {
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: buttons.top
|
||||
bottomMargin: raised ? 2 * hifi.dimensions.contentSpacing.y : 0
|
||||
}
|
||||
}
|
||||
|
||||
Flow {
|
||||
|
@ -203,6 +188,7 @@ ModalWindow {
|
|||
}
|
||||
|
||||
Component.onCompleted: {
|
||||
keyboardEnabled = HMD.active;
|
||||
updateIcon();
|
||||
d.resize();
|
||||
textResult.forceActiveFocus();
|
||||
|
|
|
@ -10,23 +10,84 @@
|
|||
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import QtWebEngine 1.1
|
||||
import QtWebChannel 1.0
|
||||
import QtWebEngine 1.2
|
||||
|
||||
import "../../windows" as Windows
|
||||
import "../../controls-uit" as Controls
|
||||
import "../../windows"
|
||||
import "../../controls-uit"
|
||||
import "../../styles-uit"
|
||||
|
||||
Windows.Window {
|
||||
Window {
|
||||
id: root
|
||||
HifiConstants { id: hifi }
|
||||
width: 900; height: 700
|
||||
resizable: true
|
||||
modality: Qt.ApplicationModal
|
||||
|
||||
Controls.WebView {
|
||||
id: webview
|
||||
property alias eventBridge: eventBridgeWrapper.eventBridge
|
||||
|
||||
Item {
|
||||
anchors.fill: parent
|
||||
url: "https://metaverse.highfidelity.com/marketplace?category=avatars"
|
||||
focus: true
|
||||
|
||||
property bool keyboardEnabled: false
|
||||
property bool keyboardRaised: true
|
||||
property bool punctuationMode: false
|
||||
|
||||
BaseWebView {
|
||||
id: webview
|
||||
url: "https://metaverse.highfidelity.com/marketplace?category=avatars"
|
||||
focus: true
|
||||
|
||||
anchors {
|
||||
top: parent.top
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: keyboard.top
|
||||
}
|
||||
|
||||
property alias eventBridgeWrapper: eventBridgeWrapper
|
||||
|
||||
QtObject {
|
||||
id: eventBridgeWrapper
|
||||
WebChannel.id: "eventBridgeWrapper"
|
||||
property var eventBridge;
|
||||
}
|
||||
|
||||
webChannel.registeredObjects: [eventBridgeWrapper]
|
||||
|
||||
// Create a global EventBridge object for raiseAndLowerKeyboard.
|
||||
WebEngineScript {
|
||||
id: createGlobalEventBridge
|
||||
sourceCode: eventBridgeJavaScriptToInject
|
||||
injectionPoint: WebEngineScript.DocumentCreation
|
||||
worldId: WebEngineScript.MainWorld
|
||||
}
|
||||
|
||||
// Detect when may want to raise and lower keyboard.
|
||||
WebEngineScript {
|
||||
id: raiseAndLowerKeyboard
|
||||
injectionPoint: WebEngineScript.Deferred
|
||||
sourceUrl: resourceDirectoryUrl + "html/raiseAndLowerKeyboard.js"
|
||||
worldId: WebEngineScript.MainWorld
|
||||
}
|
||||
|
||||
userScripts: [ createGlobalEventBridge, raiseAndLowerKeyboard ]
|
||||
|
||||
}
|
||||
|
||||
Keyboard {
|
||||
id: keyboard
|
||||
raised: parent.keyboardEnabled && parent.keyboardRaised
|
||||
numeric: parent.punctuationMode
|
||||
anchors {
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: parent.bottom
|
||||
}
|
||||
}
|
||||
|
||||
Component.onCompleted: {
|
||||
keyboardEnabled = HMD.active;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -74,11 +74,6 @@ Preference {
|
|||
colorScheme: hifi.colorSchemes.dark
|
||||
}
|
||||
|
||||
Component {
|
||||
id: avatarBrowserBuilder;
|
||||
AvatarBrowser { }
|
||||
}
|
||||
|
||||
Button {
|
||||
id: button
|
||||
text: "Browse"
|
||||
|
@ -87,12 +82,12 @@ Preference {
|
|||
verticalCenter: dataTextField.verticalCenter
|
||||
}
|
||||
onClicked: {
|
||||
root.browser = avatarBrowserBuilder.createObject(desktop);
|
||||
// Load dialog via OffscreenUi so that JavaScript EventBridge is available.
|
||||
root.browser = OffscreenUi.load("dialogs/preferences/AvatarBrowser.qml");
|
||||
root.browser.windowDestroyed.connect(function(){
|
||||
root.browser = null;
|
||||
})
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
|
|
@ -2,6 +2,7 @@ import QtQuick 2.5
|
|||
import QtQuick.Controls 1.4
|
||||
import QtWebEngine 1.1;
|
||||
import Qt.labs.settings 1.0
|
||||
import HFWebEngineProfile 1.0
|
||||
|
||||
import "../desktop" as OriginalDesktop
|
||||
import ".."
|
||||
|
@ -20,17 +21,14 @@ OriginalDesktop.Desktop {
|
|||
onEntered: ApplicationCompositor.reticleOverDesktop = true
|
||||
onExited: ApplicationCompositor.reticleOverDesktop = false
|
||||
acceptedButtons: Qt.NoButton
|
||||
|
||||
|
||||
}
|
||||
|
||||
// The tool window, one instance
|
||||
property alias toolWindow: toolWindow
|
||||
ToolWindow { id: toolWindow }
|
||||
|
||||
property var browserProfile: WebEngineProfile {
|
||||
property var browserProfile: HFWebEngineProfile {
|
||||
id: webviewProfile
|
||||
httpUserAgent: "Chrome/48.0 (HighFidelityInterface)"
|
||||
storageName: "qmlWebEngine"
|
||||
}
|
||||
|
||||
|
@ -53,9 +51,10 @@ OriginalDesktop.Desktop {
|
|||
Toolbar {
|
||||
id: sysToolbar;
|
||||
objectName: "com.highfidelity.interface.toolbar.system";
|
||||
// Magic: sysToolbar.x and y come from settings, and are bound before the properties specified here are applied.
|
||||
x: sysToolbar.x;
|
||||
y: sysToolbar.y;
|
||||
// These values will be overridden by sysToolbar.x/y if there is a saved position in Settings
|
||||
// On exit, the sysToolbar position is saved to settings
|
||||
x: 30
|
||||
y: 50
|
||||
}
|
||||
property var toolbars: (function (map) { // answer dictionary preloaded with sysToolbar
|
||||
map[sysToolbar.objectName] = sysToolbar;
|
||||
|
@ -127,5 +126,3 @@ OriginalDesktop.Desktop {
|
|||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
|
|
@ -45,7 +45,7 @@ ScrollingWindow {
|
|||
|
||||
Rectangle {
|
||||
width: parent.width
|
||||
height: root.height - (keyboardRaised ? 200 : 0)
|
||||
height: root.height - (keyboardEnabled && keyboardRaised ? 200 : 0)
|
||||
radius: 4
|
||||
color: hifi.colors.baseGray
|
||||
|
||||
|
@ -210,7 +210,7 @@ ScrollingWindow {
|
|||
}
|
||||
|
||||
onKeyboardRaisedChanged: {
|
||||
if (keyboardRaised) {
|
||||
if (keyboardEnabled && keyboardRaised) {
|
||||
// Scroll to item with focus if necessary.
|
||||
var footerHeight = newAttachmentButton.height + buttonRow.height + 3 * hifi.dimensions.contentSpacing.y;
|
||||
var delta = activator.mouseY
|
||||
|
|
|
@ -30,7 +30,7 @@ ScrollingWindow {
|
|||
|
||||
Rectangle {
|
||||
width: parent.width
|
||||
height: root.height - (keyboardRaised ? 200 : 0)
|
||||
height: root.height - (keyboardEnabled && keyboardRaised ? 200 : 0)
|
||||
radius: 4
|
||||
color: hifi.colors.baseGray
|
||||
|
||||
|
|
|
@ -34,7 +34,8 @@ Window {
|
|||
property var footer: Item { } // Optional static footer at the bottom of the dialog.
|
||||
readonly property var footerContentHeight: footer.height > 0 ? (footer.height + 2 * hifi.dimensions.contentSpacing.y + 3) : 0
|
||||
|
||||
property bool keyboardEnabled: true // Set false if derived control implements its own keyboard.
|
||||
property bool keyboardOverride: false // Set true in derived control if it implements its own keyboard.
|
||||
property bool keyboardEnabled: false
|
||||
property bool keyboardRaised: false
|
||||
property bool punctuationMode: false
|
||||
|
||||
|
@ -132,7 +133,7 @@ Window {
|
|||
// Optional non-scrolling footer.
|
||||
id: footerPane
|
||||
|
||||
property alias keyboardEnabled: window.keyboardEnabled
|
||||
property alias keyboardOverride: window.keyboardOverride
|
||||
property alias keyboardRaised: window.keyboardRaised
|
||||
property alias punctuationMode: window.punctuationMode
|
||||
|
||||
|
@ -141,9 +142,9 @@ Window {
|
|||
bottom: parent.bottom
|
||||
}
|
||||
width: parent.contentWidth
|
||||
height: footerContentHeight + (keyboardEnabled && keyboardRaised ? 200 : 0)
|
||||
height: footerContentHeight + (keyboard.enabled && keyboard.raised ? keyboard.height : 0)
|
||||
color: hifi.colors.baseGray
|
||||
visible: footer.height > 0 || keyboardEnabled && keyboardRaised
|
||||
visible: footer.height > 0 || keyboard.enabled && keyboard.raised
|
||||
|
||||
Item {
|
||||
// Horizontal rule.
|
||||
|
@ -181,22 +182,10 @@ Window {
|
|||
}
|
||||
|
||||
HiFiControls.Keyboard {
|
||||
id: keyboard1
|
||||
height: parent.keyboardEnabled && parent.keyboardRaised ? 200 : 0
|
||||
visible: parent.keyboardEnabled && parent.keyboardRaised && !parent.punctuationMode
|
||||
enabled: parent.keyboardEnabled && parent.keyboardRaised && !parent.punctuationMode
|
||||
anchors {
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: parent.bottom
|
||||
}
|
||||
}
|
||||
|
||||
HiFiControls.KeyboardPunctuation {
|
||||
id: keyboard2
|
||||
height: parent.keyboardEnabled && parent.keyboardRaised ? 200 : 0
|
||||
visible: parent.keyboardEnabled && parent.keyboardRaised && parent.punctuationMode
|
||||
enabled: parent.keyboardEnabled && parent.keyboardRaised && parent.punctuationMode
|
||||
id: keyboard
|
||||
enabled: !keyboardOverride
|
||||
raised: keyboardEnabled && keyboardRaised
|
||||
numeric: punctuationMode
|
||||
anchors {
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
|
@ -207,9 +196,9 @@ Window {
|
|||
}
|
||||
|
||||
onKeyboardRaisedChanged: {
|
||||
if (keyboardEnabled && keyboardRaised) {
|
||||
if (!keyboardOverride && keyboardEnabled && keyboardRaised) {
|
||||
var delta = activator.mouseY
|
||||
- (activator.height + activator.y - 200 - footerContentHeight - hifi.dimensions.controlLineHeight);
|
||||
- (activator.height + activator.y - keyboard.raisedHeight - footerContentHeight - hifi.dimensions.controlLineHeight);
|
||||
|
||||
if (delta > 0) {
|
||||
pane.scrollBy(delta);
|
||||
|
@ -220,4 +209,10 @@ Window {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
Component.onCompleted: {
|
||||
if (typeof HMD !== "undefined") {
|
||||
keyboardEnabled = HMD.active;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -13,6 +13,9 @@ struct OverlayData {
|
|||
vec4 glowPoints;
|
||||
vec4 glowColors[2];
|
||||
vec4 resolutionRadiusAlpha;
|
||||
|
||||
vec4 extraGlowColor;
|
||||
vec2 extraGlowPoint;
|
||||
};
|
||||
|
||||
layout(std140) uniform overlayBuffer {
|
||||
|
@ -25,6 +28,9 @@ float alpha = overlay.resolutionRadiusAlpha.w;
|
|||
vec4 glowPoints = overlay.glowPoints;
|
||||
vec4 glowColors[2] = overlay.glowColors;
|
||||
|
||||
vec2 extraGlowPoint = overlay.extraGlowPoint;
|
||||
vec4 extraGlowColor = overlay.extraGlowColor;
|
||||
|
||||
in vec3 vPosition;
|
||||
in vec2 vTexCoord;
|
||||
|
||||
|
@ -48,11 +54,16 @@ void main() {
|
|||
float glowIntensity = 0.0;
|
||||
float dist1 = distance(vTexCoord * aspect, glowPoints.xy * aspect);
|
||||
float dist2 = distance(vTexCoord * aspect, glowPoints.zw * aspect);
|
||||
float dist = min(dist1, dist2);
|
||||
float dist3 = distance(vTexCoord * aspect, extraGlowPoint * aspect);
|
||||
float distX = min(dist1, dist2);
|
||||
float dist = min(distX, dist3);
|
||||
vec3 glowColor = glowColors[0].rgb;
|
||||
if (dist2 < dist1) {
|
||||
glowColor = glowColors[1].rgb;
|
||||
}
|
||||
if (dist3 < dist2) {
|
||||
glowColor = extraGlowColor.rgb;
|
||||
}
|
||||
|
||||
if (dist <= radius) {
|
||||
glowIntensity = 1.0 - (dist / radius);
|
||||
|
|
|
@ -11,6 +11,9 @@ struct OverlayData {
|
|||
vec4 glowPoints;
|
||||
vec4 glowColors[2];
|
||||
vec4 resolutionRadiusAlpha;
|
||||
|
||||
vec4 extraGlowColor;
|
||||
vec2 extraGlowPoint;
|
||||
};
|
||||
|
||||
layout(std140) uniform overlayBuffer {
|
||||
|
|
|
@ -34,6 +34,8 @@
|
|||
#include <QtQml/QQmlEngine>
|
||||
#include <QtQuick/QQuickWindow>
|
||||
|
||||
#include <QtWebEngineWidgets/QWebEngineProfile>
|
||||
|
||||
#include <QtWidgets/QDesktopWidget>
|
||||
#include <QtWidgets/QMessageBox>
|
||||
|
||||
|
@ -94,6 +96,7 @@
|
|||
#include <RenderShadowTask.h>
|
||||
#include <RenderDeferredTask.h>
|
||||
#include <ResourceCache.h>
|
||||
#include <SandboxUtils.h>
|
||||
#include <SceneScriptingInterface.h>
|
||||
#include <ScriptEngines.h>
|
||||
#include <ScriptCache.h>
|
||||
|
@ -125,6 +128,7 @@
|
|||
#include "InterfaceLogging.h"
|
||||
#include "LODManager.h"
|
||||
#include "ModelPackager.h"
|
||||
#include "networking/HFWebEngineProfile.h"
|
||||
#include "scripting/AccountScriptingInterface.h"
|
||||
#include "scripting/AssetMappingsScriptingInterface.h"
|
||||
#include "scripting/AudioDeviceScriptingInterface.h"
|
||||
|
@ -415,8 +419,6 @@ bool setupEssentials(int& argc, char** argv) {
|
|||
static const auto SUPPRESS_SETTINGS_RESET = "--suppress-settings-reset";
|
||||
bool suppressPrompt = cmdOptionExists(argc, const_cast<const char**>(argv), SUPPRESS_SETTINGS_RESET);
|
||||
bool previousSessionCrashed = CrashHandler::checkForResetSettings(suppressPrompt);
|
||||
CrashHandler::writeRunningMarkerFiler();
|
||||
qAddPostRoutine(CrashHandler::deleteRunningMarkerFile);
|
||||
|
||||
DependencyManager::registerInheritance<LimitedNodeList, NodeList>();
|
||||
DependencyManager::registerInheritance<AvatarHashMap, AvatarManager>();
|
||||
|
@ -504,8 +506,11 @@ Q_GUI_EXPORT void qt_gl_set_global_share_context(QOpenGLContext *context);
|
|||
|
||||
Setting::Handle<int> sessionRunTime{ "sessionRunTime", 0 };
|
||||
|
||||
Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
||||
Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bool runServer, QString runServerPathOption) :
|
||||
QApplication(argc, argv),
|
||||
_shouldRunServer(runServer),
|
||||
_runServerPath(runServerPathOption),
|
||||
_runningMarker(this, RUNNING_MARKER_FILENAME),
|
||||
_window(new MainWindow(desktop())),
|
||||
_sessionRunTimer(startupTimer),
|
||||
_previousSessionCrashed(setupEssentials(argc, argv)),
|
||||
|
@ -529,7 +534,9 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
_maxOctreePPS(maxOctreePacketsPerSecond.get()),
|
||||
_lastFaceTrackerUpdate(0)
|
||||
{
|
||||
setProperty("com.highfidelity.launchedFromSteam", SteamClient::isRunning());
|
||||
|
||||
_runningMarker.startRunningMarker();
|
||||
|
||||
PluginContainer* pluginContainer = dynamic_cast<PluginContainer*>(this); // set the container for any plugins that care
|
||||
PluginManager::getInstance()->setContainer(pluginContainer);
|
||||
|
@ -564,6 +571,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
_deadlockWatchdogThread = new DeadlockWatchdogThread();
|
||||
_deadlockWatchdogThread->start();
|
||||
|
||||
qCDebug(interfaceapp) << "[VERSION] SteamVR buildID:" << SteamClient::getSteamVRBuildID();
|
||||
qCDebug(interfaceapp) << "[VERSION] Build sequence:" << qPrintable(applicationVersion());
|
||||
qCDebug(interfaceapp) << "[VERSION] MODIFIED_ORGANIZATION:" << BuildInfo::MODIFIED_ORGANIZATION;
|
||||
qCDebug(interfaceapp) << "[VERSION] VERSION:" << BuildInfo::VERSION;
|
||||
|
@ -575,6 +583,37 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
qCDebug(interfaceapp) << "[VERSION] We will use DEVELOPMENT global services.";
|
||||
#endif
|
||||
|
||||
|
||||
bool wantsSandboxRunning = shouldRunServer();
|
||||
static bool determinedSandboxState = false;
|
||||
static bool sandboxIsRunning = false;
|
||||
SandboxUtils sandboxUtils;
|
||||
// updateHeartbeat() because we are going to poll shortly...
|
||||
updateHeartbeat();
|
||||
sandboxUtils.ifLocalSandboxRunningElse([&]() {
|
||||
qCDebug(interfaceapp) << "Home sandbox appears to be running.....";
|
||||
determinedSandboxState = true;
|
||||
sandboxIsRunning = true;
|
||||
}, [&, wantsSandboxRunning]() {
|
||||
qCDebug(interfaceapp) << "Home sandbox does not appear to be running....";
|
||||
if (wantsSandboxRunning) {
|
||||
QString contentPath = getRunServerPath();
|
||||
SandboxUtils::runLocalSandbox(contentPath, true, RUNNING_MARKER_FILENAME);
|
||||
sandboxIsRunning = true;
|
||||
}
|
||||
determinedSandboxState = true;
|
||||
});
|
||||
|
||||
// SandboxUtils::runLocalSandbox currently has 2 sec delay after spawning sandbox, so 4
|
||||
// sec here is ok I guess. TODO: ping sandbox so we know it is up, perhaps?
|
||||
quint64 MAX_WAIT_TIME = USECS_PER_SECOND * 4;
|
||||
auto startWaiting = usecTimestampNow();
|
||||
while (!determinedSandboxState && (usecTimestampNow() - startWaiting <= MAX_WAIT_TIME)) {
|
||||
QCoreApplication::processEvents();
|
||||
// updateHeartbeat() while polling so we don't scare the deadlock watchdog
|
||||
updateHeartbeat();
|
||||
usleep(USECS_PER_MSEC * 50); // 20hz
|
||||
}
|
||||
|
||||
_bookmarks = new Bookmarks(); // Before setting up the menu
|
||||
|
||||
|
@ -668,6 +707,10 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
connect(&domainHandler, SIGNAL(disconnectedFromDomain()), SLOT(clearDomainOctreeDetails()));
|
||||
connect(&domainHandler, &DomainHandler::domainConnectionRefused, this, &Application::domainConnectionRefused);
|
||||
|
||||
// We could clear ATP assets only when changing domains, but it's possible that the domain you are connected
|
||||
// to has gone down and switched to a new content set, so when you reconnect the cached ATP assets will no longer be valid.
|
||||
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, DependencyManager::get<ScriptCache>().data(), &ScriptCache::clearATPScriptsFromCache);
|
||||
|
||||
// update our location every 5 seconds in the metaverse server, assuming that we are authenticated with one
|
||||
const qint64 DATA_SERVER_LOCATION_CHANGE_UPDATE_MSECS = 5 * MSECS_PER_SECOND;
|
||||
|
||||
|
@ -818,7 +861,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
{ "gl_version_int", glVersionToInteger(glContextData.value("version").toString()) },
|
||||
{ "gl_version", glContextData["version"] },
|
||||
{ "gl_vender", glContextData["vendor"] },
|
||||
{ "gl_sl_version", glContextData["slVersion"] },
|
||||
{ "gl_sl_version", glContextData["sl_version"] },
|
||||
{ "gl_renderer", glContextData["renderer"] },
|
||||
{ "ideal_thread_count", QThread::idealThreadCount() }
|
||||
};
|
||||
|
@ -842,8 +885,6 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
|
||||
UserActivityLogger::getInstance().logAction("launch", properties);
|
||||
|
||||
_connectionMonitor.init();
|
||||
|
||||
// Tell our entity edit sender about our known jurisdictions
|
||||
_entityEditSender.setServerJurisdictions(&_entityServerJurisdictions);
|
||||
_entityEditSender.setMyAvatar(myAvatar.get());
|
||||
|
@ -1104,7 +1145,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
});
|
||||
|
||||
// If the user clicks somewhere where there is NO entity at all, we will release focus
|
||||
connect(getEntities(), &EntityTreeRenderer::mousePressOffEntity, [=]() {
|
||||
connect(getEntities().data(), &EntityTreeRenderer::mousePressOffEntity, [=]() {
|
||||
setKeyboardFocusEntity(UNKNOWN_ENTITY_ID);
|
||||
});
|
||||
|
||||
|
@ -1135,15 +1176,25 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
properties["process_memory_used"] = static_cast<qint64>(memInfo.processUsedMemoryBytes);
|
||||
}
|
||||
|
||||
// content location and build info - useful for filtering stats
|
||||
auto addressManager = DependencyManager::get<AddressManager>();
|
||||
auto currentDomain = addressManager->currentShareableAddress(true).toString(); // domain only
|
||||
auto currentPath = addressManager->currentPath(true); // with orientation
|
||||
properties["current_domain"] = currentDomain;
|
||||
properties["current_path"] = currentPath;
|
||||
properties["build_version"] = BuildInfo::VERSION;
|
||||
|
||||
auto displayPlugin = qApp->getActiveDisplayPlugin();
|
||||
|
||||
properties["fps"] = _frameCounter.rate();
|
||||
properties["target_frame_rate"] = getTargetFrameRate();
|
||||
properties["render_rate"] = displayPlugin->renderRate();
|
||||
properties["present_rate"] = displayPlugin->presentRate();
|
||||
properties["new_frame_present_rate"] = displayPlugin->newFramePresentRate();
|
||||
properties["dropped_frame_rate"] = displayPlugin->droppedFrameRate();
|
||||
properties["sim_rate"] = getAverageSimsPerSecond();
|
||||
properties["avatar_sim_rate"] = getAvatarSimrate();
|
||||
properties["has_async_reprojection"] = displayPlugin->hasAsyncReprojection();
|
||||
|
||||
auto bandwidthRecorder = DependencyManager::get<BandwidthRecorder>();
|
||||
properties["packet_rate_in"] = bandwidthRecorder->getCachedTotalAverageInputPacketsPerSecond();
|
||||
|
@ -1184,6 +1235,11 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
properties["active_display_plugin"] = getActiveDisplayPlugin()->getName();
|
||||
properties["using_hmd"] = isHMDMode();
|
||||
|
||||
auto glInfo = getGLContextData();
|
||||
properties["gl_info"] = glInfo;
|
||||
properties["gpu_free_memory"] = (int)BYTES_TO_MB(gpu::Context::getFreeGPUMemory());
|
||||
properties["ideal_thread_count"] = QThread::idealThreadCount();
|
||||
|
||||
auto hmdHeadPose = getHMDSensorPose();
|
||||
properties["hmd_head_pose_changed"] = isHMDMode() && (hmdHeadPose != lastHMDHeadPose);
|
||||
lastHMDHeadPose = hmdHeadPose;
|
||||
|
@ -1261,7 +1317,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
// Get sandbox content set version, if available
|
||||
auto acDirPath = PathUtils::getRootDataDirectory() + BuildInfo::MODIFIED_ORGANIZATION + "/assignment-client/";
|
||||
auto contentVersionPath = acDirPath + "content-version.txt";
|
||||
qDebug() << "Checking " << contentVersionPath << " for content version";
|
||||
qCDebug(interfaceapp) << "Checking " << contentVersionPath << " for content version";
|
||||
auto contentVersion = 0;
|
||||
QFile contentVersionFile(contentVersionPath);
|
||||
if (contentVersionFile.open(QIODevice::ReadOnly | QIODevice::Text)) {
|
||||
|
@ -1269,7 +1325,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
// toInt() returns 0 if the conversion fails, so we don't need to specifically check for failure
|
||||
contentVersion = line.toInt();
|
||||
}
|
||||
qDebug() << "Server content version: " << contentVersion;
|
||||
qCDebug(interfaceapp) << "Server content version: " << contentVersion;
|
||||
|
||||
bool hasTutorialContent = contentVersion >= 1;
|
||||
|
||||
|
@ -1279,10 +1335,10 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
|
||||
bool shouldGoToTutorial = hasHMDAndHandControllers && hasTutorialContent && !tutorialComplete.get();
|
||||
|
||||
qDebug() << "Has HMD + Hand Controllers: " << hasHMDAndHandControllers << ", current plugin: " << _displayPlugin->getName();
|
||||
qDebug() << "Has tutorial content: " << hasTutorialContent;
|
||||
qDebug() << "Tutorial complete: " << tutorialComplete.get();
|
||||
qDebug() << "Should go to tutorial: " << shouldGoToTutorial;
|
||||
qCDebug(interfaceapp) << "Has HMD + Hand Controllers: " << hasHMDAndHandControllers << ", current plugin: " << _displayPlugin->getName();
|
||||
qCDebug(interfaceapp) << "Has tutorial content: " << hasTutorialContent;
|
||||
qCDebug(interfaceapp) << "Tutorial complete: " << tutorialComplete.get();
|
||||
qCDebug(interfaceapp) << "Should go to tutorial: " << shouldGoToTutorial;
|
||||
|
||||
// when --url in command line, teleport to location
|
||||
const QString HIFI_URL_COMMAND_LINE_KEY = "--url";
|
||||
|
@ -1295,11 +1351,11 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
const QString TUTORIAL_PATH = "/tutorial_begin";
|
||||
|
||||
if (shouldGoToTutorial) {
|
||||
DependencyManager::get<AddressManager>()->ifLocalSandboxRunningElse([=]() {
|
||||
qDebug() << "Home sandbox appears to be running, going to Home.";
|
||||
if(sandboxIsRunning) {
|
||||
qCDebug(interfaceapp) << "Home sandbox appears to be running, going to Home.";
|
||||
DependencyManager::get<AddressManager>()->goToLocalSandbox(TUTORIAL_PATH);
|
||||
}, [=]() {
|
||||
qDebug() << "Home sandbox does not appear to be running, going to Entry.";
|
||||
} else {
|
||||
qCDebug(interfaceapp) << "Home sandbox does not appear to be running, going to Entry.";
|
||||
if (firstRun.get()) {
|
||||
showHelp();
|
||||
}
|
||||
|
@ -1308,7 +1364,7 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
} else {
|
||||
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
|
||||
}
|
||||
});
|
||||
}
|
||||
} else {
|
||||
|
||||
bool isFirstRun = firstRun.get();
|
||||
|
@ -1320,22 +1376,24 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer) :
|
|||
// If this is a first run we short-circuit the address passed in
|
||||
if (isFirstRun) {
|
||||
if (hasHMDAndHandControllers) {
|
||||
DependencyManager::get<AddressManager>()->ifLocalSandboxRunningElse([=]() {
|
||||
qDebug() << "Home sandbox appears to be running, going to Home.";
|
||||
if(sandboxIsRunning) {
|
||||
qCDebug(interfaceapp) << "Home sandbox appears to be running, going to Home.";
|
||||
DependencyManager::get<AddressManager>()->goToLocalSandbox();
|
||||
}, [=]() {
|
||||
qDebug() << "Home sandbox does not appear to be running, going to Entry.";
|
||||
} else {
|
||||
qCDebug(interfaceapp) << "Home sandbox does not appear to be running, going to Entry.";
|
||||
DependencyManager::get<AddressManager>()->goToEntry();
|
||||
});
|
||||
}
|
||||
} else {
|
||||
DependencyManager::get<AddressManager>()->goToEntry();
|
||||
}
|
||||
} else {
|
||||
qDebug() << "Not first run... going to" << qPrintable(addressLookupString.isEmpty() ? QString("previous location") : addressLookupString);
|
||||
qCDebug(interfaceapp) << "Not first run... going to" << qPrintable(addressLookupString.isEmpty() ? QString("previous location") : addressLookupString);
|
||||
DependencyManager::get<AddressManager>()->loadSettings(addressLookupString);
|
||||
}
|
||||
}
|
||||
|
||||
_connectionMonitor.init();
|
||||
|
||||
// After all of the constructor is completed, then set firstRun to false.
|
||||
firstRun.set(false);
|
||||
}
|
||||
|
@ -1438,6 +1496,7 @@ void Application::updateHeartbeat() const {
|
|||
|
||||
void Application::aboutToQuit() {
|
||||
emit beforeAboutToQuit();
|
||||
DependencyManager::get<AudioClient>()->beforeAboutToQuit();
|
||||
|
||||
foreach(auto inputPlugin, PluginManager::getInstance()->getInputPlugins()) {
|
||||
if (inputPlugin->isActive()) {
|
||||
|
@ -1498,6 +1557,9 @@ void Application::cleanupBeforeQuit() {
|
|||
DependencyManager::get<ScriptEngines>()->shutdownScripting(); // stop all currently running global scripts
|
||||
DependencyManager::destroy<ScriptEngines>();
|
||||
|
||||
_displayPlugin.reset();
|
||||
PluginManager::getInstance()->shutdown();
|
||||
|
||||
// Cleanup all overlays after the scripts, as scripts might add more
|
||||
_overlays.cleanupAllOverlays();
|
||||
|
||||
|
@ -1513,17 +1575,6 @@ void Application::cleanupBeforeQuit() {
|
|||
saveSettings();
|
||||
_window->saveGeometry();
|
||||
|
||||
// stop the AudioClient
|
||||
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(),
|
||||
"stop", Qt::BlockingQueuedConnection);
|
||||
|
||||
// destroy the AudioClient so it and its thread have a chance to go down safely
|
||||
DependencyManager::destroy<AudioClient>();
|
||||
|
||||
// destroy the AudioInjectorManager so it and its thread have a chance to go down safely
|
||||
// this will also stop any ongoing network injectors
|
||||
DependencyManager::destroy<AudioInjectorManager>();
|
||||
|
||||
// Destroy third party processes after scripts have finished using them.
|
||||
#ifdef HAVE_DDE
|
||||
DependencyManager::destroy<DdeFaceTracker>();
|
||||
|
@ -1532,10 +1583,29 @@ void Application::cleanupBeforeQuit() {
|
|||
DependencyManager::destroy<EyeTracker>();
|
||||
#endif
|
||||
|
||||
// stop QML
|
||||
DependencyManager::destroy<OffscreenUi>();
|
||||
|
||||
// stop audio after QML, as there are unexplained audio crashes originating in qtwebengine
|
||||
|
||||
// stop the AudioClient, synchronously
|
||||
QMetaObject::invokeMethod(DependencyManager::get<AudioClient>().data(),
|
||||
"stop", Qt::BlockingQueuedConnection);
|
||||
|
||||
// destroy Audio so it and its threads have a chance to go down safely
|
||||
DependencyManager::destroy<AudioClient>();
|
||||
DependencyManager::destroy<AudioInjectorManager>();
|
||||
|
||||
// shutdown render engine
|
||||
_main3DScene = nullptr;
|
||||
_renderEngine = nullptr;
|
||||
|
||||
qCDebug(interfaceapp) << "Application::cleanupBeforeQuit() complete";
|
||||
}
|
||||
|
||||
Application::~Application() {
|
||||
DependencyManager::destroy<Preferences>();
|
||||
|
||||
_entityClipboard->eraseAllOctreeElements();
|
||||
_entityClipboard.reset();
|
||||
|
||||
|
@ -1553,7 +1623,6 @@ Application::~Application() {
|
|||
DependencyManager::get<AvatarManager>()->getObjectsToRemoveFromPhysics(motionStates);
|
||||
_physicsEngine->removeObjects(motionStates);
|
||||
|
||||
DependencyManager::destroy<OffscreenUi>();
|
||||
DependencyManager::destroy<AvatarManager>();
|
||||
DependencyManager::destroy<AnimationCache>();
|
||||
DependencyManager::destroy<FramebufferCache>();
|
||||
|
@ -1667,6 +1736,7 @@ void Application::initializeUi() {
|
|||
UpdateDialog::registerType();
|
||||
qmlRegisterType<Preference>("Hifi", 1, 0, "Preference");
|
||||
|
||||
qmlRegisterType<HFWebEngineProfile>("HFWebEngineProfile", 1, 0, "HFWebEngineProfile");
|
||||
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->create(_glWidget->qglContext());
|
||||
|
@ -1694,6 +1764,7 @@ void Application::initializeUi() {
|
|||
// though I can't find it. Hence, "ApplicationInterface"
|
||||
rootContext->setContextProperty("ApplicationInterface", this);
|
||||
rootContext->setContextProperty("Audio", &AudioScriptingInterface::getInstance());
|
||||
rootContext->setContextProperty("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
|
||||
rootContext->setContextProperty("Controller", DependencyManager::get<controller::ScriptingInterface>().data());
|
||||
rootContext->setContextProperty("Entities", DependencyManager::get<EntityScriptingInterface>().data());
|
||||
FileScriptingInterface* fileDownload = new FileScriptingInterface(engine);
|
||||
|
@ -2120,13 +2191,10 @@ void Application::resizeGL() {
|
|||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
auto uiSize = displayPlugin->getRecommendedUiSize();
|
||||
// Bit of a hack since there's no device pixel ratio change event I can find.
|
||||
static qreal lastDevicePixelRatio = 0;
|
||||
qreal devicePixelRatio = _window->devicePixelRatio();
|
||||
if (offscreenUi->size() != fromGlm(uiSize) || devicePixelRatio != lastDevicePixelRatio) {
|
||||
if (offscreenUi->size() != fromGlm(uiSize)) {
|
||||
qCDebug(interfaceapp) << "Device pixel ratio changed, triggering resize to " << uiSize;
|
||||
offscreenUi->resize(fromGlm(uiSize), true);
|
||||
_offscreenContext->makeCurrent();
|
||||
lastDevicePixelRatio = devicePixelRatio;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3404,7 +3472,7 @@ void Application::init() {
|
|||
|
||||
// connect the _entityCollisionSystem to our EntityTreeRenderer since that's what handles running entity scripts
|
||||
connect(_entitySimulation.get(), &EntitySimulation::entityCollisionWithEntity,
|
||||
getEntities(), &EntityTreeRenderer::entityCollisionWithEntity);
|
||||
getEntities().data(), &EntityTreeRenderer::entityCollisionWithEntity);
|
||||
|
||||
// connect the _entities (EntityTreeRenderer) to our script engine's EntityScriptingInterface for firing
|
||||
// of events related clicking, hovering over, and entering entities
|
||||
|
@ -3670,7 +3738,18 @@ void Application::setKeyboardFocusEntity(QUuid id) {
|
|||
void Application::setKeyboardFocusEntity(EntityItemID entityItemID) {
|
||||
auto entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
|
||||
if (_keyboardFocusedItem.get() != entityItemID) {
|
||||
// reset focused entity
|
||||
_keyboardFocusedItem.set(UNKNOWN_ENTITY_ID);
|
||||
if (_keyboardFocusHighlight) {
|
||||
_keyboardFocusHighlight->setVisible(false);
|
||||
}
|
||||
|
||||
// if invalid, return without expensive (locking) operations
|
||||
if (entityItemID == UNKNOWN_ENTITY_ID) {
|
||||
return;
|
||||
}
|
||||
|
||||
// if valid, query properties
|
||||
auto properties = entityScriptingInterface->getEntityProperties(entityItemID);
|
||||
if (!properties.getLocked() && properties.getVisible()) {
|
||||
auto entity = getEntities()->getTree()->findEntityByID(entityItemID);
|
||||
|
@ -3681,6 +3760,8 @@ void Application::setKeyboardFocusEntity(EntityItemID entityItemID) {
|
|||
}
|
||||
_keyboardFocusedItem.set(entityItemID);
|
||||
_lastAcceptedKeyPress = usecTimestampNow();
|
||||
|
||||
// create a focus
|
||||
if (_keyboardFocusHighlightID < 0 || !getOverlays().isAddedOverlay(_keyboardFocusHighlightID)) {
|
||||
_keyboardFocusHighlight = std::make_shared<Cube3DOverlay>();
|
||||
_keyboardFocusHighlight->setAlpha(1.0f);
|
||||
|
@ -3692,17 +3773,16 @@ void Application::setKeyboardFocusEntity(EntityItemID entityItemID) {
|
|||
_keyboardFocusHighlight->setColorPulse(1.0);
|
||||
_keyboardFocusHighlight->setIgnoreRayIntersection(true);
|
||||
_keyboardFocusHighlight->setDrawInFront(false);
|
||||
_keyboardFocusHighlightID = getOverlays().addOverlay(_keyboardFocusHighlight);
|
||||
}
|
||||
|
||||
// position the focus
|
||||
_keyboardFocusHighlight->setRotation(entity->getRotation());
|
||||
_keyboardFocusHighlight->setPosition(entity->getPosition());
|
||||
_keyboardFocusHighlight->setDimensions(entity->getDimensions() * 1.05f);
|
||||
_keyboardFocusHighlight->setVisible(true);
|
||||
_keyboardFocusHighlightID = getOverlays().addOverlay(_keyboardFocusHighlight);
|
||||
}
|
||||
}
|
||||
if (_keyboardFocusedItem.get() == UNKNOWN_ENTITY_ID && _keyboardFocusHighlight) {
|
||||
_keyboardFocusHighlight->setVisible(false);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3712,12 +3792,6 @@ void Application::updateDialogs(float deltaTime) const {
|
|||
PerformanceWarning warn(showWarnings, "Application::updateDialogs()");
|
||||
auto dialogsManager = DependencyManager::get<DialogsManager>();
|
||||
|
||||
// Update audio stats dialog, if any
|
||||
AudioStatsDialog* audioStatsDialog = dialogsManager->getAudioStatsDialog();
|
||||
if(audioStatsDialog) {
|
||||
audioStatsDialog->update();
|
||||
}
|
||||
|
||||
// Update bandwidth dialog, if any
|
||||
BandwidthDialog* bandwidthDialog = dialogsManager->getBandwidthDialog();
|
||||
if (bandwidthDialog) {
|
||||
|
@ -3921,8 +3995,6 @@ void Application::update(float deltaTime) {
|
|||
auto collisionEvents = _physicsEngine->getCollisionEvents();
|
||||
avatarManager->handleCollisionEvents(collisionEvents);
|
||||
|
||||
_physicsEngine->dumpStatsIfNecessary();
|
||||
|
||||
if (!_aboutToQuit) {
|
||||
PerformanceTimer perfTimer("entities");
|
||||
// Collision events (and their scripts) must not be handled when we're locked, above. (That would risk
|
||||
|
@ -3935,6 +4007,13 @@ void Application::update(float deltaTime) {
|
|||
}
|
||||
|
||||
myAvatar->harvestResultsFromPhysicsSimulation(deltaTime);
|
||||
|
||||
if (Menu::getInstance()->isOptionChecked(MenuOption::DisplayDebugTimingDetails) &&
|
||||
Menu::getInstance()->isOptionChecked(MenuOption::ExpandPhysicsSimulationTiming)) {
|
||||
_physicsEngine->harvestPerformanceStats();
|
||||
}
|
||||
// NOTE: the PhysicsEngine stats are written to stdout NOT to Qt log framework
|
||||
_physicsEngine->dumpStatsIfNecessary();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -4966,6 +5045,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
|||
scriptEngine->registerGlobalObject("Stats", Stats::getInstance());
|
||||
scriptEngine->registerGlobalObject("Settings", SettingsScriptingInterface::getInstance());
|
||||
scriptEngine->registerGlobalObject("AudioDevice", AudioDeviceScriptingInterface::getInstance());
|
||||
scriptEngine->registerGlobalObject("AudioStats", DependencyManager::get<AudioClient>()->getStats().data());
|
||||
|
||||
// Caches
|
||||
scriptEngine->registerGlobalObject("AnimationCache", DependencyManager::get<AnimationCache>().data());
|
||||
|
@ -5634,6 +5714,9 @@ void Application::updateDisplayMode() {
|
|||
// Make the switch atomic from the perspective of other threads
|
||||
{
|
||||
std::unique_lock<std::mutex> lock(_displayPluginLock);
|
||||
// Tell the desktop to no reposition (which requires plugin info), until we have set the new plugin, below.
|
||||
bool wasRepositionLocked = offscreenUi->getDesktop()->property("repositionLocked").toBool();
|
||||
offscreenUi->getDesktop()->setProperty("repositionLocked", true);
|
||||
|
||||
auto oldDisplayPlugin = _displayPlugin;
|
||||
if (_displayPlugin) {
|
||||
|
@ -5670,6 +5753,7 @@ void Application::updateDisplayMode() {
|
|||
_offscreenContext->makeCurrent();
|
||||
getApplicationCompositor().setDisplayPlugin(newDisplayPlugin);
|
||||
_displayPlugin = newDisplayPlugin;
|
||||
offscreenUi->getDesktop()->setProperty("repositionLocked", wasRepositionLocked);
|
||||
}
|
||||
|
||||
emit activeDisplayPluginChanged();
|
||||
|
|
|
@ -46,6 +46,8 @@
|
|||
#include <ThreadSafeValueCache.h>
|
||||
#include <shared/FileLogger.h>
|
||||
|
||||
#include <RunningMarker.h>
|
||||
|
||||
#include "avatar/MyAvatar.h"
|
||||
#include "Bookmarks.h"
|
||||
#include "Camera.h"
|
||||
|
@ -57,7 +59,6 @@
|
|||
#include "scripting/ControllerScriptingInterface.h"
|
||||
#include "scripting/DialogsManagerScriptingInterface.h"
|
||||
#include "ui/ApplicationOverlay.h"
|
||||
#include "ui/AudioStatsDialog.h"
|
||||
#include "ui/BandwidthDialog.h"
|
||||
#include "ui/LodToolsDialog.h"
|
||||
#include "ui/LogDialog.h"
|
||||
|
@ -87,6 +88,8 @@ static const UINT UWM_SHOW_APPLICATION =
|
|||
RegisterWindowMessage("UWM_SHOW_APPLICATION_{71123FD6-3DA8-4DC1-9C27-8A12A6250CBA}_" + qgetenv("USERNAME"));
|
||||
#endif
|
||||
|
||||
static const QString RUNNING_MARKER_FILENAME = "Interface.running";
|
||||
|
||||
class Application;
|
||||
#if defined(qApp)
|
||||
#undef qApp
|
||||
|
@ -103,7 +106,16 @@ class Application : public QApplication,
|
|||
// TODO? Get rid of those
|
||||
friend class OctreePacketProcessor;
|
||||
|
||||
private:
|
||||
bool _shouldRunServer { false };
|
||||
QString _runServerPath;
|
||||
RunningMarker _runningMarker;
|
||||
|
||||
public:
|
||||
// startup related getter/setters
|
||||
bool shouldRunServer() const { return _shouldRunServer; }
|
||||
bool hasRunServerPath() const { return !_runServerPath.isEmpty(); }
|
||||
QString getRunServerPath() const { return _runServerPath; }
|
||||
|
||||
// virtual functions required for PluginContainer
|
||||
virtual ui::Menu* getPrimaryMenu() override;
|
||||
|
@ -127,7 +139,7 @@ public:
|
|||
static void initPlugins(const QStringList& arguments);
|
||||
static void shutdownPlugins();
|
||||
|
||||
Application(int& argc, char** argv, QElapsedTimer& startup_time);
|
||||
Application(int& argc, char** argv, QElapsedTimer& startup_time, bool runServer, QString runServerPathOption);
|
||||
~Application();
|
||||
|
||||
void postLambdaEvent(std::function<void()> f) override;
|
||||
|
@ -168,7 +180,7 @@ public:
|
|||
void copyDisplayViewFrustum(ViewFrustum& viewOut) const;
|
||||
void copyShadowViewFrustum(ViewFrustum& viewOut) const override;
|
||||
const OctreePacketProcessor& getOctreePacketProcessor() const { return _octreeProcessor; }
|
||||
EntityTreeRenderer* getEntities() const { return DependencyManager::get<EntityTreeRenderer>().data(); }
|
||||
QSharedPointer<EntityTreeRenderer> getEntities() const { return DependencyManager::get<EntityTreeRenderer>(); }
|
||||
QUndoStack* getUndoStack() { return &_undoStack; }
|
||||
MainWindow* getWindow() const { return _window; }
|
||||
EntityTreePointer getEntityClipboard() const { return _entityClipboard; }
|
||||
|
@ -217,7 +229,7 @@ public:
|
|||
|
||||
qint64 getCurrentSessionRuntime() const { return _sessionRunTimer.elapsed(); }
|
||||
|
||||
bool isAboutToQuit() const { return _aboutToQuit; }
|
||||
bool isAboutToQuit() const override { return _aboutToQuit; }
|
||||
bool isPhysicsEnabled() const { return _physicsEnabled; }
|
||||
|
||||
// the isHMDMode is true whenever we use the interface from an HMD and not a standard flat display
|
||||
|
|
|
@ -13,34 +13,42 @@
|
|||
|
||||
#include "ui/DialogsManager.h"
|
||||
|
||||
#include <NodeList.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <DomainHandler.h>
|
||||
#include <AddressManager.h>
|
||||
#include <NodeList.h>
|
||||
|
||||
// Because the connection monitor is created at startup, the time we wait on initial load
|
||||
// should be longer to allow the application to initialize.
|
||||
static const int ON_INITIAL_LOAD_DISPLAY_AFTER_DISCONNECTED_FOR_X_MS = 10000;
|
||||
static const int DISPLAY_AFTER_DISCONNECTED_FOR_X_MS = 5000;
|
||||
|
||||
void ConnectionMonitor::init() {
|
||||
// Connect to domain disconnected message
|
||||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
const DomainHandler& domainHandler = nodeList->getDomainHandler();
|
||||
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, &ConnectionMonitor::disconnectedFromDomain);
|
||||
connect(&domainHandler, &DomainHandler::connectedToDomain, this, &ConnectionMonitor::connectedToDomain);
|
||||
connect(&domainHandler, &DomainHandler::resetting, this, &ConnectionMonitor::startTimer);
|
||||
connect(&domainHandler, &DomainHandler::disconnectedFromDomain, this, &ConnectionMonitor::startTimer);
|
||||
connect(&domainHandler, &DomainHandler::connectedToDomain, this, &ConnectionMonitor::stopTimer);
|
||||
connect(&domainHandler, &DomainHandler::domainConnectionRefused, this, &ConnectionMonitor::stopTimer);
|
||||
|
||||
_timer.setSingleShot(true);
|
||||
_timer.setInterval(DISPLAY_AFTER_DISCONNECTED_FOR_X_MS);
|
||||
if (!domainHandler.isConnected()) {
|
||||
_timer.start();
|
||||
_timer.start(ON_INITIAL_LOAD_DISPLAY_AFTER_DISCONNECTED_FOR_X_MS);
|
||||
}
|
||||
|
||||
auto dialogsManager = DependencyManager::get<DialogsManager>();
|
||||
connect(&_timer, &QTimer::timeout, dialogsManager.data(), &DialogsManager::showAddressBar);
|
||||
connect(&_timer, &QTimer::timeout, this, []() {
|
||||
qDebug() << "ConnectionMonitor: Showing connection failure window";
|
||||
DependencyManager::get<DialogsManager>()->setDomainConnectionFailureVisibility(true);
|
||||
});
|
||||
}
|
||||
|
||||
void ConnectionMonitor::disconnectedFromDomain() {
|
||||
_timer.start();
|
||||
void ConnectionMonitor::startTimer() {
|
||||
qDebug() << "ConnectionMonitor: Starting timer";
|
||||
_timer.start(DISPLAY_AFTER_DISCONNECTED_FOR_X_MS);
|
||||
}
|
||||
|
||||
void ConnectionMonitor::connectedToDomain(const QString& name) {
|
||||
void ConnectionMonitor::stopTimer() {
|
||||
qDebug() << "ConnectionMonitor: Stopping timer";
|
||||
_timer.stop();
|
||||
DependencyManager::get<DialogsManager>()->setDomainConnectionFailureVisibility(false);
|
||||
}
|
||||
|
|
|
@ -23,8 +23,8 @@ public:
|
|||
void init();
|
||||
|
||||
private slots:
|
||||
void disconnectedFromDomain();
|
||||
void connectedToDomain(const QString& name);
|
||||
void startTimer();
|
||||
void stopTimer();
|
||||
|
||||
private:
|
||||
QTimer _timer;
|
||||
|
|
|
@ -23,9 +23,10 @@
|
|||
#include <QVBoxLayout>
|
||||
#include <QtCore/QUrl>
|
||||
|
||||
#include "Application.h"
|
||||
#include "Menu.h"
|
||||
|
||||
static const QString RUNNING_MARKER_FILENAME = "Interface.running";
|
||||
#include <RunningMarker.h>
|
||||
|
||||
bool CrashHandler::checkForResetSettings(bool suppressPrompt) {
|
||||
QSettings::setDefaultFormat(QSettings::IniFormat);
|
||||
|
@ -39,7 +40,7 @@ bool CrashHandler::checkForResetSettings(bool suppressPrompt) {
|
|||
// If option does not exist in Interface.ini so assume default behavior.
|
||||
bool displaySettingsResetOnCrash = !displayCrashOptions.isValid() || displayCrashOptions.toBool();
|
||||
|
||||
QFile runningMarkerFile(runningMarkerFilePath());
|
||||
QFile runningMarkerFile(RunningMarker::getMarkerFilePath(RUNNING_MARKER_FILENAME));
|
||||
bool wasLikelyCrash = runningMarkerFile.exists();
|
||||
|
||||
if (suppressPrompt) {
|
||||
|
@ -161,20 +162,3 @@ void CrashHandler::handleCrash(CrashHandler::Action action) {
|
|||
}
|
||||
}
|
||||
|
||||
void CrashHandler::writeRunningMarkerFiler() {
|
||||
QFile runningMarkerFile(runningMarkerFilePath());
|
||||
if (!runningMarkerFile.exists()) {
|
||||
runningMarkerFile.open(QIODevice::WriteOnly);
|
||||
runningMarkerFile.close();
|
||||
}
|
||||
}
|
||||
void CrashHandler::deleteRunningMarkerFile() {
|
||||
QFile runningMarkerFile(runningMarkerFilePath());
|
||||
if (runningMarkerFile.exists()) {
|
||||
runningMarkerFile.remove();
|
||||
}
|
||||
}
|
||||
|
||||
const QString CrashHandler::runningMarkerFilePath() {
|
||||
return QStandardPaths::writableLocation(QStandardPaths::DataLocation) + "/" + RUNNING_MARKER_FILENAME;
|
||||
}
|
||||
|
|
|
@ -19,9 +19,6 @@ class CrashHandler {
|
|||
public:
|
||||
static bool checkForResetSettings(bool suppressPrompt = false);
|
||||
|
||||
static void writeRunningMarkerFiler();
|
||||
static void deleteRunningMarkerFile();
|
||||
|
||||
private:
|
||||
enum Action {
|
||||
DELETE_INTERFACE_INI,
|
||||
|
@ -31,8 +28,6 @@ private:
|
|||
|
||||
static Action promptUserForAction(bool showCrashMessage);
|
||||
static void handleCrash(Action action);
|
||||
|
||||
static const QString runningMarkerFilePath();
|
||||
};
|
||||
|
||||
#endif // hifi_CrashHandler_h
|
||||
|
|
|
@ -29,7 +29,7 @@ SpatiallyNestableWeakPointer InterfaceParentFinder::find(QUuid parentID, bool& s
|
|||
if (entityTree) {
|
||||
parent = entityTree->findByID(parentID);
|
||||
} else {
|
||||
EntityTreeRenderer* treeRenderer = qApp->getEntities();
|
||||
auto treeRenderer = qApp->getEntities();
|
||||
EntityTreePointer tree = treeRenderer ? treeRenderer->getTree() : nullptr;
|
||||
parent = tree ? tree->findEntityByEntityItemID(parentID) : nullptr;
|
||||
}
|
||||
|
|
|
@ -338,6 +338,9 @@ Menu::Menu() {
|
|||
// Developer > Render > Throttle FPS If Not Focus
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::ThrottleFPSIfNotFocus, 0, true);
|
||||
|
||||
// Developer > Render > OpenVR threaded submit
|
||||
addCheckableActionToQMenuAndActionHash(renderOptionsMenu, MenuOption::OpenVrThreadedSubmit, 0, true);
|
||||
|
||||
// Developer > Render > Resolution
|
||||
MenuWrapper* resolutionMenu = renderOptionsMenu->addMenu(MenuOption::RenderResolution);
|
||||
QActionGroup* resolutionGroup = new QActionGroup(resolutionMenu);
|
||||
|
@ -605,6 +608,7 @@ Menu::Menu() {
|
|||
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandMyAvatarSimulateTiming, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandOtherAvatarTiming, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandPaintGLTiming, 0, false);
|
||||
addCheckableActionToQMenuAndActionHash(perfTimerMenu, MenuOption::ExpandPhysicsSimulationTiming, 0, false);
|
||||
|
||||
addCheckableActionToQMenuAndActionHash(timingMenu, MenuOption::FrameTimer);
|
||||
addActionToQMenuAndActionHash(timingMenu, MenuOption::RunTimingTests, 0, qApp, SLOT(runTests()));
|
||||
|
@ -616,6 +620,14 @@ Menu::Menu() {
|
|||
// Developer > Audio >>>
|
||||
MenuWrapper* audioDebugMenu = developerMenu->addMenu("Audio");
|
||||
|
||||
action = addActionToQMenuAndActionHash(audioDebugMenu, "Stats...");
|
||||
connect(action, &QAction::triggered, [] {
|
||||
auto scriptEngines = DependencyManager::get<ScriptEngines>();
|
||||
QUrl defaultScriptsLoc = defaultScriptsLocation();
|
||||
defaultScriptsLoc.setPath(defaultScriptsLoc.path() + "developer/utilities/audio/stats.js");
|
||||
scriptEngines->loadScript(defaultScriptsLoc.toString());
|
||||
});
|
||||
|
||||
action = addActionToQMenuAndActionHash(audioDebugMenu, "Buffers...");
|
||||
connect(action, &QAction::triggered, [] {
|
||||
DependencyManager::get<OffscreenUi>()->toggle(QString("hifi/dialogs/AudioPreferencesDialog.qml"), "AudioPreferencesDialog");
|
||||
|
@ -654,10 +666,6 @@ Menu::Menu() {
|
|||
audioScopeFramesGroup->addAction(fiftyFrames);
|
||||
}
|
||||
|
||||
// Developer > Audio > Audio Network Stats...
|
||||
addActionToQMenuAndActionHash(audioDebugMenu, MenuOption::AudioNetworkStats, 0,
|
||||
dialogsManager.data(), SLOT(audioStatsDetails()));
|
||||
|
||||
// Developer > Physics >>>
|
||||
MenuWrapper* physicsOptionsMenu = developerMenu->addMenu("Physics");
|
||||
{
|
||||
|
|
|
@ -37,7 +37,6 @@ namespace MenuOption {
|
|||
const QString AssetMigration = "ATP Asset Migration";
|
||||
const QString AssetServer = "Asset Browser";
|
||||
const QString Attachments = "Attachments...";
|
||||
const QString AudioNetworkStats = "Audio Network Stats";
|
||||
const QString AudioNoiseReduction = "Audio Noise Reduction";
|
||||
const QString AudioScope = "Show Scope";
|
||||
const QString AudioScopeFiftyFrames = "Fifty";
|
||||
|
@ -105,6 +104,7 @@ namespace MenuOption {
|
|||
const QString ExpandMyAvatarTiming = "Expand /myAvatar";
|
||||
const QString ExpandOtherAvatarTiming = "Expand /otherAvatar";
|
||||
const QString ExpandPaintGLTiming = "Expand /paintGL";
|
||||
const QString ExpandPhysicsSimulationTiming = "Expand /physics";
|
||||
const QString ExpandUpdateTiming = "Expand /update";
|
||||
const QString Faceshift = "Faceshift";
|
||||
const QString FirstPerson = "First Person";
|
||||
|
@ -136,6 +136,7 @@ namespace MenuOption {
|
|||
const QString OctreeStats = "Entity Statistics";
|
||||
const QString OnePointCalibration = "1 Point Calibration";
|
||||
const QString OnlyDisplayTopTen = "Only Display Top Ten";
|
||||
const QString OpenVrThreadedSubmit = "OpenVR Threaded Submit";
|
||||
const QString OutputMenu = "Display";
|
||||
const QString Overlays = "Overlays";
|
||||
const QString PackageModel = "Package Model...";
|
||||
|
|
|
@ -50,50 +50,69 @@ void renderWorldBox(gpu::Batch& batch) {
|
|||
static const float DASH_LENGTH = 1.0f;
|
||||
static const float GAP_LENGTH = 1.0f;
|
||||
auto transform = Transform{};
|
||||
static std::array<int, 18> geometryIds;
|
||||
static std::once_flag initGeometryIds;
|
||||
std::call_once(initGeometryIds, [&] {
|
||||
for (size_t i = 0; i < geometryIds.size(); ++i) {
|
||||
geometryIds[i] = geometryCache->allocateID();
|
||||
}
|
||||
});
|
||||
|
||||
batch.setModelTransform(transform);
|
||||
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(HALF_TREE_SCALE, 0.0f, 0.0f), RED);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(HALF_TREE_SCALE, 0.0f, 0.0f), RED, geometryIds[0]);
|
||||
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(-HALF_TREE_SCALE, 0.0f, 0.0f), DASHED_RED,
|
||||
DASH_LENGTH, GAP_LENGTH);
|
||||
DASH_LENGTH, GAP_LENGTH, geometryIds[1]);
|
||||
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, HALF_TREE_SCALE, 0.0f), GREEN);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, HALF_TREE_SCALE, 0.0f), GREEN, geometryIds[2]);
|
||||
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, -HALF_TREE_SCALE, 0.0f), DASHED_GREEN,
|
||||
DASH_LENGTH, GAP_LENGTH);
|
||||
DASH_LENGTH, GAP_LENGTH, geometryIds[3]);
|
||||
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, HALF_TREE_SCALE), BLUE);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, HALF_TREE_SCALE), BLUE, geometryIds[4]);
|
||||
geometryCache->renderDashedLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, 0.0f, -HALF_TREE_SCALE), DASHED_BLUE,
|
||||
DASH_LENGTH, GAP_LENGTH);
|
||||
DASH_LENGTH, GAP_LENGTH, geometryIds[5]);
|
||||
|
||||
// X center boundaries
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
|
||||
glm::vec3(HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f), GREY);
|
||||
glm::vec3(HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f), GREY,
|
||||
geometryIds[6]);
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
|
||||
glm::vec3(-HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY);
|
||||
glm::vec3(-HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
|
||||
geometryIds[7]);
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f),
|
||||
glm::vec3(HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY);
|
||||
glm::vec3(HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
|
||||
geometryIds[8]);
|
||||
geometryCache->renderLine(batch, glm::vec3(HALF_TREE_SCALE, -HALF_TREE_SCALE, 0.0f),
|
||||
glm::vec3(HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY);
|
||||
glm::vec3(HALF_TREE_SCALE, HALF_TREE_SCALE, 0.0f), GREY,
|
||||
geometryIds[9]);
|
||||
|
||||
// Z center boundaries
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, -HALF_TREE_SCALE),
|
||||
glm::vec3(0.0f, -HALF_TREE_SCALE, HALF_TREE_SCALE), GREY);
|
||||
glm::vec3(0.0f, -HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[10]);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, -HALF_TREE_SCALE),
|
||||
glm::vec3(0.0f, HALF_TREE_SCALE, -HALF_TREE_SCALE), GREY);
|
||||
glm::vec3(0.0f, HALF_TREE_SCALE, -HALF_TREE_SCALE), GREY,
|
||||
geometryIds[11]);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, HALF_TREE_SCALE, -HALF_TREE_SCALE),
|
||||
glm::vec3(0.0f, HALF_TREE_SCALE, HALF_TREE_SCALE), GREY);
|
||||
glm::vec3(0.0f, HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[12]);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, -HALF_TREE_SCALE, HALF_TREE_SCALE),
|
||||
glm::vec3(0.0f, HALF_TREE_SCALE, HALF_TREE_SCALE), GREY);
|
||||
glm::vec3(0.0f, HALF_TREE_SCALE, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[13]);
|
||||
|
||||
// Center boundaries
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
|
||||
glm::vec3(-HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY);
|
||||
glm::vec3(-HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[14]);
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
|
||||
glm::vec3(HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE), GREY);
|
||||
glm::vec3(HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE), GREY,
|
||||
geometryIds[15]);
|
||||
geometryCache->renderLine(batch, glm::vec3(HALF_TREE_SCALE, 0.0f, -HALF_TREE_SCALE),
|
||||
glm::vec3(HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY);
|
||||
glm::vec3(HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[16]);
|
||||
geometryCache->renderLine(batch, glm::vec3(-HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE),
|
||||
glm::vec3(HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY);
|
||||
glm::vec3(HALF_TREE_SCALE, 0.0f, HALF_TREE_SCALE), GREY,
|
||||
geometryIds[17]);
|
||||
|
||||
|
||||
geometryCache->renderWireCubeInstance(batch, GREY4);
|
||||
|
|
|
@ -99,12 +99,17 @@ Avatar::Avatar(RigPointer rig) :
|
|||
|
||||
_skeletonModel = std::make_shared<SkeletonModel>(this, nullptr, rig);
|
||||
connect(_skeletonModel.get(), &Model::setURLFinished, this, &Avatar::setModelURLFinished);
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
_nameRectGeometryID = geometryCache->allocateID();
|
||||
_leftPointerGeometryID = geometryCache->allocateID();
|
||||
_rightPointerGeometryID = geometryCache->allocateID();
|
||||
}
|
||||
|
||||
Avatar::~Avatar() {
|
||||
assert(isDead()); // mark dead before calling the dtor
|
||||
|
||||
EntityTreeRenderer* treeRenderer = qApp->getEntities();
|
||||
auto treeRenderer = qApp->getEntities();
|
||||
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
|
||||
if (entityTree) {
|
||||
entityTree->withWriteLock([&] {
|
||||
|
@ -119,6 +124,13 @@ Avatar::~Avatar() {
|
|||
delete _motionState;
|
||||
_motionState = nullptr;
|
||||
}
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
if (geometryCache) {
|
||||
geometryCache->releaseID(_nameRectGeometryID);
|
||||
geometryCache->releaseID(_leftPointerGeometryID);
|
||||
geometryCache->releaseID(_rightPointerGeometryID);
|
||||
}
|
||||
}
|
||||
|
||||
void Avatar::init() {
|
||||
|
@ -187,7 +199,7 @@ void Avatar::updateAvatarEntities() {
|
|||
return; // wait until MyAvatar gets an ID before doing this.
|
||||
}
|
||||
|
||||
EntityTreeRenderer* treeRenderer = qApp->getEntities();
|
||||
auto treeRenderer = qApp->getEntities();
|
||||
EntityTreePointer entityTree = treeRenderer ? treeRenderer->getTree() : nullptr;
|
||||
if (!entityTree) {
|
||||
return;
|
||||
|
@ -492,7 +504,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
|
|||
pointerTransform.setRotation(rotation);
|
||||
batch.setModelTransform(pointerTransform);
|
||||
geometryCache->bindSimpleProgram(batch);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, laserLength, 0.0f), laserColor);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, laserLength, 0.0f), laserColor, _leftPointerGeometryID);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -516,7 +528,7 @@ void Avatar::render(RenderArgs* renderArgs, const glm::vec3& cameraPosition) {
|
|||
pointerTransform.setRotation(rotation);
|
||||
batch.setModelTransform(pointerTransform);
|
||||
geometryCache->bindSimpleProgram(batch);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, laserLength, 0.0f), laserColor);
|
||||
geometryCache->renderLine(batch, glm::vec3(0.0f, 0.0f, 0.0f), glm::vec3(0.0f, laserLength, 0.0f), laserColor, _rightPointerGeometryID);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -603,14 +615,14 @@ void Avatar::fixupModelsInScene() {
|
|||
_skeletonModel->removeFromScene(scene, pendingChanges);
|
||||
_skeletonModel->addToScene(scene, pendingChanges);
|
||||
}
|
||||
for (auto& attachmentModel : _attachmentModels) {
|
||||
for (auto attachmentModel : _attachmentModels) {
|
||||
if (attachmentModel->isRenderable() && attachmentModel->needsFixupInScene()) {
|
||||
attachmentModel->removeFromScene(scene, pendingChanges);
|
||||
attachmentModel->addToScene(scene, pendingChanges);
|
||||
}
|
||||
}
|
||||
|
||||
for (auto& attachmentModelToRemove : _attachmentsToRemove) {
|
||||
for (auto attachmentModelToRemove : _attachmentsToRemove) {
|
||||
attachmentModelToRemove->removeFromScene(scene, pendingChanges);
|
||||
}
|
||||
_attachmentsToDelete.insert(_attachmentsToDelete.end(), _attachmentsToRemove.begin(), _attachmentsToRemove.end());
|
||||
|
@ -782,7 +794,7 @@ void Avatar::renderDisplayName(gpu::Batch& batch, const ViewFrustum& view, const
|
|||
PROFILE_RANGE_BATCH(batch, __FUNCTION__":renderBevelCornersRect");
|
||||
DependencyManager::get<GeometryCache>()->bindSimpleProgram(batch, false, false, true, true, true);
|
||||
DependencyManager::get<GeometryCache>()->renderBevelCornersRect(batch, left, bottom, width, height,
|
||||
bevelDistance, backgroundColor);
|
||||
bevelDistance, backgroundColor, _nameRectGeometryID);
|
||||
}
|
||||
|
||||
// Render actual name
|
||||
|
|
|
@ -248,6 +248,9 @@ protected:
|
|||
ThreadSafeValueCache<glm::quat> _rightPalmRotationCache { glm::quat() };
|
||||
|
||||
private:
|
||||
int _leftPointerGeometryID { 0 };
|
||||
int _rightPointerGeometryID { 0 };
|
||||
int _nameRectGeometryID { 0 };
|
||||
bool _initialized;
|
||||
bool _shouldAnimate { true };
|
||||
bool _shouldSkipRender { false };
|
||||
|
|
|
@ -37,9 +37,13 @@ AvatarActionHold::AvatarActionHold(const QUuid& id, EntityItemPointer ownerEntit
|
|||
}
|
||||
|
||||
AvatarActionHold::~AvatarActionHold() {
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
if (myAvatar) {
|
||||
myAvatar->removeHoldAction(this);
|
||||
// Sometimes actions are destroyed after the AvatarManager is destroyed by the Application.
|
||||
auto avatarManager = DependencyManager::get<AvatarManager>();
|
||||
if (avatarManager) {
|
||||
auto myAvatar = avatarManager->getMyAvatar();
|
||||
if (myAvatar) {
|
||||
myAvatar->removeHoldAction(this);
|
||||
}
|
||||
}
|
||||
|
||||
#if WANT_DEBUG
|
||||
|
|
|
@ -36,6 +36,7 @@
|
|||
#include "Application.h"
|
||||
#include "Avatar.h"
|
||||
#include "AvatarManager.h"
|
||||
#include "InterfaceLogging.h"
|
||||
#include "Menu.h"
|
||||
#include "MyAvatar.h"
|
||||
#include "SceneScriptingInterface.h"
|
||||
|
@ -208,11 +209,15 @@ AvatarSharedPointer AvatarManager::addAvatar(const QUuid& sessionUUID, const QWe
|
|||
auto rawRenderableAvatar = std::static_pointer_cast<Avatar>(newAvatar);
|
||||
|
||||
render::ScenePointer scene = qApp->getMain3DScene();
|
||||
render::PendingChanges pendingChanges;
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
|
||||
rawRenderableAvatar->addToScene(rawRenderableAvatar, scene, pendingChanges);
|
||||
if (scene) {
|
||||
render::PendingChanges pendingChanges;
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderAvatars()) {
|
||||
rawRenderableAvatar->addToScene(rawRenderableAvatar, scene, pendingChanges);
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
} else {
|
||||
qCWarning(interfaceapp) << "AvatarManager::addAvatar() : Unexpected null scene, possibly during application shutdown";
|
||||
}
|
||||
scene->enqueuePendingChanges(pendingChanges);
|
||||
|
||||
return newAvatar;
|
||||
}
|
||||
|
|
|
@ -247,6 +247,15 @@ void MyAvatar::centerBody() {
|
|||
auto worldBodyPos = extractTranslation(worldBodyMatrix);
|
||||
auto worldBodyRot = glm::normalize(glm::quat_cast(worldBodyMatrix));
|
||||
|
||||
if (_characterController.getState() == CharacterController::State::Ground) {
|
||||
// the avatar's physical aspect thinks it is standing on something
|
||||
// therefore need to be careful to not "center" the body below the floor
|
||||
float downStep = glm::dot(worldBodyPos - getPosition(), _worldUpDirection);
|
||||
if (downStep < -0.5f * _characterController.getCapsuleHalfHeight() + _characterController.getCapsuleRadius()) {
|
||||
worldBodyPos -= downStep * _worldUpDirection;
|
||||
}
|
||||
}
|
||||
|
||||
// this will become our new position.
|
||||
setPosition(worldBodyPos);
|
||||
setOrientation(worldBodyRot);
|
||||
|
@ -463,7 +472,7 @@ void MyAvatar::simulate(float deltaTime) {
|
|||
|
||||
locationChanged();
|
||||
// if a entity-child of this avatar has moved outside of its queryAACube, update the cube and tell the entity server.
|
||||
EntityTreeRenderer* entityTreeRenderer = qApp->getEntities();
|
||||
auto entityTreeRenderer = qApp->getEntities();
|
||||
EntityTreePointer entityTree = entityTreeRenderer ? entityTreeRenderer->getTree() : nullptr;
|
||||
if (entityTree) {
|
||||
bool flyingAllowed = true;
|
||||
|
@ -1929,7 +1938,7 @@ void MyAvatar::setCharacterControllerEnabled(bool enabled) {
|
|||
}
|
||||
|
||||
bool ghostingAllowed = true;
|
||||
EntityTreeRenderer* entityTreeRenderer = qApp->getEntities();
|
||||
auto entityTreeRenderer = qApp->getEntities();
|
||||
if (entityTreeRenderer) {
|
||||
std::shared_ptr<ZoneEntityItem> zone = entityTreeRenderer->myAvatarZone();
|
||||
if (zone) {
|
||||
|
@ -2280,15 +2289,16 @@ void MyAvatar::removeHoldAction(AvatarActionHold* holdAction) {
|
|||
}
|
||||
|
||||
void MyAvatar::updateHoldActions(const AnimPose& prePhysicsPose, const AnimPose& postUpdatePose) {
|
||||
EntityTreeRenderer* entityTreeRenderer = qApp->getEntities();
|
||||
auto entityTreeRenderer = qApp->getEntities();
|
||||
EntityTreePointer entityTree = entityTreeRenderer ? entityTreeRenderer->getTree() : nullptr;
|
||||
if (entityTree) {
|
||||
// to prevent actions from adding or removing themselves from the _holdActions vector
|
||||
// while we are iterating, we need to enter a critical section.
|
||||
std::lock_guard<std::mutex> guard(_holdActionsMutex);
|
||||
|
||||
// lateAvatarUpdate will modify entity position & orientation, so we need an entity write lock
|
||||
entityTree->withWriteLock([&] {
|
||||
|
||||
// to prevent actions from adding or removing themselves from the _holdActions vector
|
||||
// while we are iterating, we need to enter a critical section.
|
||||
std::lock_guard<std::mutex> guard(_holdActionsMutex);
|
||||
|
||||
for (auto& holdAction : _holdActions) {
|
||||
holdAction->lateAvatarUpdate(prePhysicsPose, postUpdatePose);
|
||||
}
|
||||
|
|
|
@ -37,6 +37,12 @@
|
|||
#include <CrashReporter.h>
|
||||
#endif
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
extern "C" {
|
||||
typedef int(__stdcall * CHECKMINSPECPROC) ();
|
||||
}
|
||||
#endif
|
||||
|
||||
int main(int argc, const char* argv[]) {
|
||||
#if HAS_BUGSPLAT
|
||||
static QString BUG_SPLAT_DATABASE = "interface_alpha";
|
||||
|
@ -128,22 +134,9 @@ int main(int argc, const char* argv[]) {
|
|||
parser.addOption(runServerOption);
|
||||
parser.addOption(serverContentPathOption);
|
||||
parser.parse(arguments);
|
||||
if (parser.isSet(runServerOption)) {
|
||||
QString applicationDirPath = QFileInfo(arguments[0]).path();
|
||||
QString serverPath = applicationDirPath + "/server-console/server-console.exe";
|
||||
qDebug() << "Application dir path is: " << applicationDirPath;
|
||||
qDebug() << "Server path is: " << serverPath;
|
||||
QStringList args;
|
||||
if (parser.isSet(serverContentPathOption)) {
|
||||
QString serverContentPath = QFileInfo(arguments[0]).path() + "/" + parser.value(serverContentPathOption);
|
||||
args << "--" << "--contentPath" << serverContentPath;
|
||||
}
|
||||
qDebug() << QFileInfo(arguments[0]).path();
|
||||
qDebug() << QProcess::startDetached(serverPath, args);
|
||||
|
||||
// Sleep a short amount of time to give the server a chance to start
|
||||
usleep(2000000);
|
||||
}
|
||||
bool runServer = parser.isSet(runServerOption);
|
||||
bool serverContentPathOptionIsSet = parser.isSet(serverContentPathOption);
|
||||
QString serverContentPathOptionValue = serverContentPathOptionIsSet ? parser.value(serverContentPathOption) : QString();
|
||||
|
||||
QElapsedTimer startupTime;
|
||||
startupTime.start();
|
||||
|
@ -166,10 +159,32 @@ int main(int argc, const char* argv[]) {
|
|||
|
||||
SteamClient::init();
|
||||
|
||||
#ifdef Q_OS_WIN
|
||||
// If we're running in steam mode, we need to do an explicit check to ensure we're up to the required min spec
|
||||
if (SteamClient::isRunning()) {
|
||||
QString appPath;
|
||||
{
|
||||
char filename[MAX_PATH];
|
||||
GetModuleFileName(NULL, filename, MAX_PATH);
|
||||
QFileInfo appInfo(filename);
|
||||
appPath = appInfo.absolutePath();
|
||||
}
|
||||
QString openvrDllPath = appPath + "/plugins/openvr.dll";
|
||||
HMODULE openvrDll;
|
||||
CHECKMINSPECPROC checkMinSpecPtr;
|
||||
if ((openvrDll = LoadLibrary(openvrDllPath.toLocal8Bit().data())) &&
|
||||
(checkMinSpecPtr = (CHECKMINSPECPROC)GetProcAddress(openvrDll, "CheckMinSpec"))) {
|
||||
if (!checkMinSpecPtr()) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
int exitCode;
|
||||
{
|
||||
QSettings::setDefaultFormat(QSettings::IniFormat);
|
||||
Application app(argc, const_cast<char**>(argv), startupTime);
|
||||
Application app(argc, const_cast<char**>(argv), startupTime, runServer, serverContentPathOptionValue);
|
||||
|
||||
// If we failed the OpenGLVersion check, log it.
|
||||
if (override) {
|
||||
|
@ -223,7 +238,6 @@ int main(int argc, const char* argv[]) {
|
|||
QTranslator translator;
|
||||
translator.load("i18n/interface_en");
|
||||
app.installTranslator(&translator);
|
||||
|
||||
qCDebug(interfaceapp, "Created QT Application.");
|
||||
exitCode = app.exec();
|
||||
server.close();
|
||||
|
|
27
interface/src/networking/HFWebEngineProfile.cpp
Normal file
|
@ -0,0 +1,27 @@
|
|||
//
|
||||
// HFWebEngineProfile.cpp
|
||||
// interface/src/networking
|
||||
//
|
||||
// Created by Stephen Birarda on 2016-10-17.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "HFWebEngineProfile.h"
|
||||
|
||||
#include "HFWebEngineRequestInterceptor.h"
|
||||
|
||||
static const QString QML_WEB_ENGINE_STORAGE_NAME = "qmlWebEngine";
|
||||
|
||||
HFWebEngineProfile::HFWebEngineProfile(QObject* parent) :
|
||||
QQuickWebEngineProfile(parent)
|
||||
{
|
||||
static const QString WEB_ENGINE_USER_AGENT = "Chrome/48.0 (HighFidelityInterface)";
|
||||
setHttpUserAgent(WEB_ENGINE_USER_AGENT);
|
||||
|
||||
// we use the HFWebEngineRequestInterceptor to make sure that web requests are authenticated for the interface user
|
||||
auto requestInterceptor = new HFWebEngineRequestInterceptor(this);
|
||||
setRequestInterceptor(requestInterceptor);
|
||||
}
|
25
interface/src/networking/HFWebEngineProfile.h
Normal file
|
@ -0,0 +1,25 @@
|
|||
//
|
||||
// HFWebEngineProfile.h
|
||||
// interface/src/networking
|
||||
//
|
||||
// Created by Stephen Birarda on 2016-10-17.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#ifndef hifi_HFWebEngineProfile_h
|
||||
#define hifi_HFWebEngineProfile_h
|
||||
|
||||
#include <QtWebEngine/QQuickWebEngineProfile>
|
||||
|
||||
class HFWebEngineProfile : public QQuickWebEngineProfile {
|
||||
public:
|
||||
HFWebEngineProfile(QObject* parent = Q_NULLPTR);
|
||||
};
|
||||
|
||||
|
||||
#endif // hifi_HFWebEngineProfile_h
|
40
interface/src/networking/HFWebEngineRequestInterceptor.cpp
Normal file
|
@ -0,0 +1,40 @@
|
|||
//
|
||||
// HFWebEngineRequestInterceptor.cpp
|
||||
// interface/src/networking
|
||||
//
|
||||
// Created by Stephen Birarda on 2016-10-14.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "HFWebEngineRequestInterceptor.h"
|
||||
|
||||
#include <QtCore/QDebug>
|
||||
|
||||
#include <AccountManager.h>
|
||||
|
||||
bool isAuthableHighFidelityURL(const QUrl& url) {
|
||||
static const QStringList HF_HOSTS = {
|
||||
"highfidelity.com", "highfidelity.io",
|
||||
"metaverse.highfidelity.com", "metaverse.highfidelity.io"
|
||||
};
|
||||
|
||||
return url.scheme() == "https" && HF_HOSTS.contains(url.host());
|
||||
}
|
||||
|
||||
void HFWebEngineRequestInterceptor::interceptRequest(QWebEngineUrlRequestInfo& info) {
|
||||
// check if this is a request to a highfidelity URL
|
||||
if (isAuthableHighFidelityURL(info.requestUrl())) {
|
||||
// if we have an access token, add it to the right HTTP header for authorization
|
||||
auto accountManager = DependencyManager::get<AccountManager>();
|
||||
|
||||
if (accountManager->hasValidAccessToken()) {
|
||||
static const QString OAUTH_AUTHORIZATION_HEADER = "Authorization";
|
||||
|
||||
QString bearerTokenString = "Bearer " + accountManager->getAccountInfo().getAccessToken().token;
|
||||
info.setHttpHeader(OAUTH_AUTHORIZATION_HEADER.toLocal8Bit(), bearerTokenString.toLocal8Bit());
|
||||
}
|
||||
}
|
||||
}
|
26
interface/src/networking/HFWebEngineRequestInterceptor.h
Normal file
|
@ -0,0 +1,26 @@
|
|||
//
|
||||
// HFWebEngineRequestInterceptor.h
|
||||
// interface/src/networking
|
||||
//
|
||||
// Created by Stephen Birarda on 2016-10-14.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#pragma once
|
||||
|
||||
#ifndef hifi_HFWebEngineRequestInterceptor_h
|
||||
#define hifi_HFWebEngineRequestInterceptor_h
|
||||
|
||||
#include <QWebEngineUrlRequestInterceptor>
|
||||
|
||||
class HFWebEngineRequestInterceptor : public QWebEngineUrlRequestInterceptor {
|
||||
public:
|
||||
HFWebEngineRequestInterceptor(QObject* parent) : QWebEngineUrlRequestInterceptor(parent) {};
|
||||
|
||||
virtual void interceptRequest(QWebEngineUrlRequestInfo& info) override;
|
||||
};
|
||||
|
||||
#endif // hifi_HFWebEngineRequestInterceptor_h
|
|
@ -92,13 +92,19 @@ void OctreePacketProcessor::processPacket(QSharedPointer<ReceivedMessage> messag
|
|||
switch(packetType) {
|
||||
case PacketType::EntityErase: {
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
|
||||
qApp->getEntities()->processEraseMessage(*message, sendingNode);
|
||||
auto renderer = qApp->getEntities();
|
||||
if (renderer) {
|
||||
renderer->processEraseMessage(*message, sendingNode);
|
||||
}
|
||||
}
|
||||
} break;
|
||||
|
||||
case PacketType::EntityData: {
|
||||
if (DependencyManager::get<SceneScriptingInterface>()->shouldRenderEntities()) {
|
||||
qApp->getEntities()->processDatagram(*message, sendingNode);
|
||||
auto renderer = qApp->getEntities();
|
||||
if (renderer) {
|
||||
renderer->processDatagram(*message, sendingNode);
|
||||
}
|
||||
}
|
||||
} break;
|
||||
|
||||
|
|
|
@ -57,6 +57,20 @@ bool HMDScriptingInterface::isHandControllerAvailable() {
|
|||
return PluginUtils::isHandControllerAvailable();
|
||||
}
|
||||
|
||||
void HMDScriptingInterface::requestShowHandControllers() {
|
||||
_showHandControllersCount++;
|
||||
emit shouldShowHandControllersChanged();
|
||||
}
|
||||
|
||||
void HMDScriptingInterface::requestHideHandControllers() {
|
||||
_showHandControllersCount--;
|
||||
emit shouldShowHandControllersChanged();
|
||||
}
|
||||
|
||||
bool HMDScriptingInterface::shouldShowHandControllers() const {
|
||||
return _showHandControllersCount > 0;
|
||||
}
|
||||
|
||||
QScriptValue HMDScriptingInterface::getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine) {
|
||||
glm::vec3 hudIntersection;
|
||||
auto instance = DependencyManager::get<HMDScriptingInterface>();
|
||||
|
@ -130,6 +144,27 @@ bool HMDScriptingInterface::setHandLasers(int hands, bool enabled, const glm::ve
|
|||
color, direction);
|
||||
}
|
||||
|
||||
bool HMDScriptingInterface::setExtraLaser(const glm::vec3& worldStart, bool enabled, const glm::vec4& color, const glm::vec3& direction) const {
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
offscreenUi->executeOnUiThread([offscreenUi, enabled] {
|
||||
offscreenUi->getDesktop()->setProperty("hmdHandMouseActive", enabled);
|
||||
});
|
||||
|
||||
|
||||
auto myAvatar = DependencyManager::get<AvatarManager>()->getMyAvatar();
|
||||
auto sensorToWorld = myAvatar->getSensorToWorldMatrix();
|
||||
auto worldToSensor = glm::inverse(sensorToWorld);
|
||||
auto sensorStart = ::transformPoint(worldToSensor, worldStart);
|
||||
auto sensorDirection = ::transformVectorFast(worldToSensor, direction);
|
||||
|
||||
return qApp->getActiveDisplayPlugin()->setExtraLaser(enabled ? DisplayPlugin::HandLaserMode::Overlay : DisplayPlugin::HandLaserMode::None,
|
||||
color, sensorStart, sensorDirection);
|
||||
}
|
||||
|
||||
void HMDScriptingInterface::disableExtraLaser() const {
|
||||
setExtraLaser(vec3(0), false, vec4(0), vec3(0));
|
||||
}
|
||||
|
||||
void HMDScriptingInterface::disableHandLasers(int hands) const {
|
||||
setHandLasers(hands, false, vec4(0), vec3(0));
|
||||
}
|
||||
|
|
|
@ -41,9 +41,17 @@ public:
|
|||
Q_INVOKABLE bool isHMDAvailable();
|
||||
Q_INVOKABLE bool isHandControllerAvailable();
|
||||
|
||||
Q_INVOKABLE bool setHandLasers(int hands, bool enabled, const glm::vec4& color, const glm::vec3& direction) const;
|
||||
Q_INVOKABLE void requestShowHandControllers();
|
||||
Q_INVOKABLE void requestHideHandControllers();
|
||||
Q_INVOKABLE bool shouldShowHandControllers() const;
|
||||
|
||||
Q_INVOKABLE bool setHandLasers(int hands, bool enabled, const glm::vec4& color, const glm::vec3& direction) const;
|
||||
Q_INVOKABLE void disableHandLasers(int hands) const;
|
||||
|
||||
Q_INVOKABLE bool setExtraLaser(const glm::vec3& worldStart, bool enabled, const glm::vec4& color, const glm::vec3& direction) const;
|
||||
Q_INVOKABLE void disableExtraLaser() const;
|
||||
|
||||
|
||||
/// Suppress the activation of any on-screen keyboard so that a script operation will
|
||||
/// not be interrupted by a keyboard popup
|
||||
/// Returns false if there is already an active keyboard displayed.
|
||||
|
@ -61,6 +69,9 @@ public:
|
|||
// rotate the overlay UI sphere so that it is centered about the the current HMD position and orientation
|
||||
Q_INVOKABLE void centerUI();
|
||||
|
||||
signals:
|
||||
bool shouldShowHandControllersChanged();
|
||||
|
||||
public:
|
||||
HMDScriptingInterface();
|
||||
static QScriptValue getHUDLookAtPosition2D(QScriptContext* context, QScriptEngine* engine);
|
||||
|
@ -77,6 +88,7 @@ private:
|
|||
|
||||
bool getHUDLookAtPosition3D(glm::vec3& result) const;
|
||||
glm::mat4 getWorldHMDMatrix() const;
|
||||
std::atomic<int> _showHandControllersCount { 0 };
|
||||
};
|
||||
|
||||
#endif // hifi_HMDScriptingInterface_h
|
||||
|
|
|
@ -40,9 +40,18 @@ ApplicationOverlay::ApplicationOverlay()
|
|||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
_domainStatusBorder = geometryCache->allocateID();
|
||||
_magnifierBorder = geometryCache->allocateID();
|
||||
_qmlGeometryId = geometryCache->allocateID();
|
||||
_rearViewGeometryId = geometryCache->allocateID();
|
||||
}
|
||||
|
||||
ApplicationOverlay::~ApplicationOverlay() {
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
if (geometryCache) {
|
||||
geometryCache->releaseID(_domainStatusBorder);
|
||||
geometryCache->releaseID(_magnifierBorder);
|
||||
geometryCache->releaseID(_qmlGeometryId);
|
||||
geometryCache->releaseID(_rearViewGeometryId);
|
||||
}
|
||||
}
|
||||
|
||||
// Renders the overlays either to a texture or to the screen
|
||||
|
@ -89,9 +98,7 @@ void ApplicationOverlay::renderQmlUi(RenderArgs* renderArgs) {
|
|||
PROFILE_RANGE(__FUNCTION__);
|
||||
|
||||
if (!_uiTexture) {
|
||||
_uiTexture = gpu::TexturePointer(gpu::Texture::createExternal2D([](uint32_t recycleTexture, void* recycleFence){
|
||||
DependencyManager::get<OffscreenUi>()->releaseTexture({ recycleTexture, recycleFence });
|
||||
}));
|
||||
_uiTexture = gpu::TexturePointer(gpu::Texture::createExternal2D(OffscreenQmlSurface::getDiscardLambda()));
|
||||
_uiTexture->setSource(__FUNCTION__);
|
||||
}
|
||||
// Once we move UI rendering and screen rendering to different
|
||||
|
@ -112,7 +119,7 @@ void ApplicationOverlay::renderQmlUi(RenderArgs* renderArgs) {
|
|||
batch.setModelTransform(Transform());
|
||||
batch.resetViewTransform();
|
||||
batch.setResourceTexture(0, _uiTexture);
|
||||
geometryCache->renderUnitQuad(batch, glm::vec4(1));
|
||||
geometryCache->renderUnitQuad(batch, glm::vec4(1), _qmlGeometryId);
|
||||
}
|
||||
|
||||
void ApplicationOverlay::renderAudioScope(RenderArgs* renderArgs) {
|
||||
|
@ -188,7 +195,7 @@ void ApplicationOverlay::renderRearView(RenderArgs* renderArgs) {
|
|||
|
||||
batch.setResourceTexture(0, selfieTexture);
|
||||
float alpha = DependencyManager::get<OffscreenUi>()->getDesktop()->property("unpinnedAlpha").toFloat();
|
||||
geometryCache->renderQuad(batch, bottomLeft, topRight, texCoordMinCorner, texCoordMaxCorner, glm::vec4(1.0f, 1.0f, 1.0f, alpha));
|
||||
geometryCache->renderQuad(batch, bottomLeft, topRight, texCoordMinCorner, texCoordMaxCorner, glm::vec4(1.0f, 1.0f, 1.0f, alpha), _rearViewGeometryId);
|
||||
|
||||
batch.setResourceTexture(0, renderArgs->_whiteTexture);
|
||||
}
|
||||
|
@ -258,7 +265,7 @@ void ApplicationOverlay::buildFramebufferObject() {
|
|||
|
||||
auto uiSize = qApp->getUiSize();
|
||||
if (!_overlayFramebuffer || uiSize != _overlayFramebuffer->getSize()) {
|
||||
_overlayFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create());
|
||||
_overlayFramebuffer = gpu::FramebufferPointer(gpu::Framebuffer::create("ApplicationOverlay"));
|
||||
}
|
||||
|
||||
auto width = uiSize.x;
|
||||
|
|
|
@ -50,6 +50,8 @@ private:
|
|||
gpu::TexturePointer _overlayDepthTexture;
|
||||
gpu::TexturePointer _overlayColorTexture;
|
||||
gpu::FramebufferPointer _overlayFramebuffer;
|
||||
int _qmlGeometryId { 0 };
|
||||
int _rearViewGeometryId { 0 };
|
||||
};
|
||||
|
||||
#endif // hifi_ApplicationOverlay_h
|
||||
|
|
|
@ -1,296 +0,0 @@
|
|||
//
|
||||
// AudioStatsDialog.cpp
|
||||
// interface/src/ui
|
||||
//
|
||||
// Created by Bridget Went on 7/9/15.
|
||||
// Copyright 2015 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "AudioStatsDialog.h"
|
||||
|
||||
#include <cstdio>
|
||||
|
||||
#include <AudioClient.h>
|
||||
#include <AudioConstants.h>
|
||||
#include <AudioIOStats.h>
|
||||
#include <DependencyManager.h>
|
||||
#include <GeometryCache.h>
|
||||
#include <NodeList.h>
|
||||
#include <Util.h>
|
||||
|
||||
|
||||
|
||||
const unsigned COLOR0 = 0x33cc99ff;
|
||||
const unsigned COLOR1 = 0xffef40c0;
|
||||
const unsigned COLOR2 = 0xd0d0d0a0;
|
||||
const unsigned COLOR3 = 0x01DD7880;
|
||||
|
||||
|
||||
AudioStatsDisplay::AudioStatsDisplay(QFormLayout* form,
|
||||
QString text, unsigned colorRGBA) :
|
||||
_text(text),
|
||||
_colorRGBA(colorRGBA)
|
||||
{
|
||||
_label = new QLabel();
|
||||
_label->setAlignment(Qt::AlignCenter);
|
||||
|
||||
QPalette palette = _label->palette();
|
||||
unsigned rgb = colorRGBA >> 8;
|
||||
rgb = ((rgb & 0xfefefeu) >> 1) + ((rgb & 0xf8f8f8) >> 3);
|
||||
palette.setColor(QPalette::WindowText, QColor::fromRgb(rgb));
|
||||
_label->setPalette(palette);
|
||||
|
||||
form->addRow(_label);
|
||||
}
|
||||
|
||||
void AudioStatsDisplay::paint() {
|
||||
_label->setText(_strBuf);
|
||||
}
|
||||
|
||||
void AudioStatsDisplay::updatedDisplay(QString str) {
|
||||
_strBuf = str;
|
||||
}
|
||||
|
||||
|
||||
AudioStatsDialog::AudioStatsDialog(QWidget* parent) :
|
||||
QDialog(parent, Qt::Window | Qt::WindowCloseButtonHint | Qt::WindowStaysOnTopHint) {
|
||||
|
||||
setWindowTitle("Audio Network Statistics");
|
||||
|
||||
// Get statistics from the Audio Client
|
||||
_stats = &DependencyManager::get<AudioClient>()->getStats();
|
||||
|
||||
// Create layout
|
||||
_form = new QFormLayout();
|
||||
_form->setSizeConstraint(QLayout::SetFixedSize);
|
||||
|
||||
// Initialize channels' content (needed to correctly size channels)
|
||||
updateStats();
|
||||
|
||||
// Create channels
|
||||
_audioDisplayChannels = QVector<QVector<AudioStatsDisplay*>>(1);
|
||||
|
||||
_audioMixerID = addChannel(_form, _audioMixerStats, COLOR0);
|
||||
_upstreamClientID = addChannel(_form, _upstreamClientStats, COLOR1);
|
||||
_upstreamMixerID = addChannel(_form, _upstreamMixerStats, COLOR2);
|
||||
_downstreamID = addChannel(_form, _downstreamStats, COLOR3);
|
||||
_upstreamInjectedID = addChannel(_form, _upstreamInjectedStats, COLOR0);
|
||||
|
||||
// Initialize channels
|
||||
updateChannels();
|
||||
|
||||
// Future renders
|
||||
connect(averageUpdateTimer, SIGNAL(timeout()), this, SLOT(renderStats()));
|
||||
averageUpdateTimer->start(200);
|
||||
|
||||
// Initial render
|
||||
QDialog::setLayout(_form);
|
||||
}
|
||||
|
||||
int AudioStatsDialog::addChannel(QFormLayout* form, QVector<QString>& stats, const unsigned color) {
|
||||
|
||||
int channelID = _audioDisplayChannels.size() - 1;
|
||||
|
||||
for (int i = 0; i < stats.size(); i++)
|
||||
// Create new display label
|
||||
_audioDisplayChannels[channelID].push_back(new AudioStatsDisplay(form, stats.at(i), color));
|
||||
|
||||
// Expand vector to fit next channel
|
||||
_audioDisplayChannels.resize(_audioDisplayChannels.size() + 1);
|
||||
|
||||
return channelID;
|
||||
}
|
||||
|
||||
void AudioStatsDialog::renderStats() {
|
||||
updateStats();
|
||||
updateChannels();
|
||||
}
|
||||
|
||||
void AudioStatsDialog::updateChannels() {
|
||||
updateChannel(_audioMixerStats, _audioMixerID);
|
||||
updateChannel(_upstreamClientStats, _upstreamClientID);
|
||||
updateChannel(_upstreamMixerStats, _upstreamMixerID);
|
||||
updateChannel(_downstreamStats, _downstreamID);
|
||||
updateChannel(_upstreamInjectedStats, _upstreamInjectedID);
|
||||
}
|
||||
|
||||
void AudioStatsDialog::updateChannel(QVector<QString>& stats, int channelID) {
|
||||
// Update all stat displays at specified channel
|
||||
for (int i = 0; i < stats.size(); i++)
|
||||
_audioDisplayChannels[channelID].at(i)->updatedDisplay(stats.at(i));
|
||||
}
|
||||
|
||||
void AudioStatsDialog::updateStats() {
|
||||
|
||||
// Clear current stats from all vectors
|
||||
clearAllChannels();
|
||||
|
||||
double audioInputBufferLatency{ 0.0 };
|
||||
double inputRingBufferLatency{ 0.0 };
|
||||
double networkRoundtripLatency{ 0.0 };
|
||||
double mixerRingBufferLatency{ 0.0 };
|
||||
double outputRingBufferLatency{ 0.0 };
|
||||
double audioOutputBufferLatency{ 0.0 };
|
||||
|
||||
if (SharedNodePointer audioMixerNodePointer = DependencyManager::get<NodeList>()->soloNodeOfType(NodeType::AudioMixer)) {
|
||||
audioInputBufferLatency = (double)_stats->getInputMsRead().getWindowMax();
|
||||
inputRingBufferLatency = (double)_stats->getInputMsUnplayed().getWindowMax();
|
||||
networkRoundtripLatency = (double)audioMixerNodePointer->getPingMs();
|
||||
mixerRingBufferLatency = (double)_stats->getMixerAvatarStreamStats()._unplayedMs;
|
||||
outputRingBufferLatency = (double)_stats->getMixerDownstreamStats()._unplayedMs;
|
||||
audioOutputBufferLatency = (double)_stats->getOutputMsUnplayed().getWindowMax();
|
||||
}
|
||||
|
||||
double totalLatency = audioInputBufferLatency + inputRingBufferLatency + mixerRingBufferLatency
|
||||
+ outputRingBufferLatency + audioOutputBufferLatency + networkRoundtripLatency;
|
||||
|
||||
QString stats;
|
||||
_audioMixerStats.push_back("PIPELINE (averaged over the past 10s)");
|
||||
stats = "Input Read:\t%1 ms";
|
||||
_audioMixerStats.push_back(stats.arg(QString::number(audioInputBufferLatency, 'f', 0)));
|
||||
stats = "Input Ring:\t%1 ms";
|
||||
_audioMixerStats.push_back(stats.arg(QString::number(inputRingBufferLatency, 'f', 0)));
|
||||
stats = "Network (client->mixer):\t%1 ms";
|
||||
_audioMixerStats.push_back(stats.arg(QString::number(networkRoundtripLatency / 2, 'f', 0)));
|
||||
stats = "Mixer Ring:\t%1 ms";
|
||||
_audioMixerStats.push_back(stats.arg(QString::number(mixerRingBufferLatency, 'f', 0)));
|
||||
stats = "Network (mixer->client):\t%1 ms";
|
||||
_audioMixerStats.push_back(stats.arg(QString::number(networkRoundtripLatency / 2, 'f', 0)));
|
||||
stats = "Output Ring:\t%1 ms";
|
||||
_audioMixerStats.push_back(stats.arg(QString::number(outputRingBufferLatency, 'f', 0)));
|
||||
stats = "Output Read:\t%1 ms";
|
||||
_audioMixerStats.push_back(stats.arg(QString::number(audioOutputBufferLatency, 'f', 0)));
|
||||
stats = "TOTAL:\t%1 ms";
|
||||
_audioMixerStats.push_back(stats.arg(QString::number(totalLatency, 'f', 0)));
|
||||
|
||||
const MovingMinMaxAvg<quint64>& packetSentTimeGaps = _stats->getPacketTimegaps();
|
||||
|
||||
_upstreamClientStats.push_back("\nUpstream Mic Audio Packets Sent Gaps (by client):");
|
||||
|
||||
stats = "Inter-packet timegaps";
|
||||
_upstreamClientStats.push_back(stats);
|
||||
stats = "overall min:\t%1, max:\t%2, avg:\t%3";
|
||||
stats = stats.arg(formatUsecTime(packetSentTimeGaps.getMin()),
|
||||
formatUsecTime(packetSentTimeGaps.getMax()),
|
||||
formatUsecTime(packetSentTimeGaps.getAverage()));
|
||||
_upstreamClientStats.push_back(stats);
|
||||
|
||||
stats = "last window min:\t%1, max:\t%2, avg:\t%3";
|
||||
stats = stats.arg(formatUsecTime(packetSentTimeGaps.getWindowMin()),
|
||||
formatUsecTime(packetSentTimeGaps.getWindowMax()),
|
||||
formatUsecTime(packetSentTimeGaps.getWindowAverage()));
|
||||
_upstreamClientStats.push_back(stats);
|
||||
|
||||
_upstreamMixerStats.push_back("\nMIXER STREAM");
|
||||
_upstreamMixerStats.push_back("(this client's remote mixer stream performance)");
|
||||
|
||||
renderAudioStreamStats(&_stats->getMixerAvatarStreamStats(), &_upstreamMixerStats);
|
||||
|
||||
_downstreamStats.push_back("\nCLIENT STREAM");
|
||||
|
||||
AudioStreamStats downstreamStats = _stats->getMixerDownstreamStats();
|
||||
|
||||
renderAudioStreamStats(&downstreamStats, &_downstreamStats);
|
||||
|
||||
|
||||
if (_shouldShowInjectedStreams) {
|
||||
|
||||
foreach(const AudioStreamStats& injectedStreamAudioStats, _stats->getMixerInjectedStreamStatsMap()) {
|
||||
stats = "\nINJECTED STREAM (ID: %1)";
|
||||
stats = stats.arg(injectedStreamAudioStats._streamIdentifier.toString());
|
||||
_upstreamInjectedStats.push_back(stats);
|
||||
|
||||
renderAudioStreamStats(&injectedStreamAudioStats, &_upstreamInjectedStats);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void AudioStatsDialog::renderAudioStreamStats(const AudioStreamStats* streamStats, QVector<QString>* audioStreamStats) {
|
||||
|
||||
QString stats = "Packet Loss";
|
||||
audioStreamStats->push_back(stats);
|
||||
stats = "overall:\t%1%\t(%2 lost), window:\t%3%\t(%4 lost)";
|
||||
stats = stats.arg(QString::number((int)(streamStats->_packetStreamStats.getLostRate() * 100.0f)),
|
||||
QString::number((int)(streamStats->_packetStreamStats._lost)),
|
||||
QString::number((int)(streamStats->_packetStreamWindowStats.getLostRate() * 100.0f)),
|
||||
QString::number((int)(streamStats->_packetStreamWindowStats._lost)));
|
||||
audioStreamStats->push_back(stats);
|
||||
|
||||
stats = "Ringbuffer";
|
||||
audioStreamStats->push_back(stats);
|
||||
stats = "available frames (avg):\t%1\t(%2), desired:\t%3";
|
||||
stats = stats.arg(QString::number(streamStats->_framesAvailable),
|
||||
QString::number(streamStats->_framesAvailableAverage),
|
||||
QString::number(streamStats->_desiredJitterBufferFrames));
|
||||
audioStreamStats->push_back(stats);
|
||||
stats = "starves:\t%1, last starve duration:\t%2, drops:\t%3, overflows:\t%4";
|
||||
stats = stats.arg(QString::number(streamStats->_starveCount),
|
||||
QString::number(streamStats->_consecutiveNotMixedCount),
|
||||
QString::number(streamStats->_framesDropped),
|
||||
QString::number(streamStats->_overflowCount));
|
||||
audioStreamStats->push_back(stats);
|
||||
|
||||
stats = "Inter-packet timegaps";
|
||||
audioStreamStats->push_back(stats);
|
||||
|
||||
stats = "overall min:\t%1, max:\t%2, avg:\t%3";
|
||||
stats = stats.arg(formatUsecTime(streamStats->_timeGapMin),
|
||||
formatUsecTime(streamStats->_timeGapMax),
|
||||
formatUsecTime(streamStats->_timeGapAverage));
|
||||
audioStreamStats->push_back(stats);
|
||||
|
||||
|
||||
stats = "last window min:\t%1, max:\t%2, avg:\t%3";
|
||||
stats = stats.arg(formatUsecTime(streamStats->_timeGapWindowMin),
|
||||
formatUsecTime(streamStats->_timeGapWindowMax),
|
||||
formatUsecTime(streamStats->_timeGapWindowAverage));
|
||||
audioStreamStats->push_back(stats);
|
||||
}
|
||||
|
||||
void AudioStatsDialog::clearAllChannels() {
|
||||
_audioMixerStats.clear();
|
||||
_upstreamClientStats.clear();
|
||||
_upstreamMixerStats.clear();
|
||||
_downstreamStats.clear();
|
||||
_upstreamInjectedStats.clear();
|
||||
}
|
||||
|
||||
void AudioStatsDialog::paintEvent(QPaintEvent* event) {
|
||||
|
||||
// Repaint each stat in each channel
|
||||
for (int i = 0; i < _audioDisplayChannels.size(); i++) {
|
||||
for(int j = 0; j < _audioDisplayChannels[i].size(); j++) {
|
||||
_audioDisplayChannels[i].at(j)->paint();
|
||||
}
|
||||
}
|
||||
|
||||
QDialog::paintEvent(event);
|
||||
}
|
||||
|
||||
void AudioStatsDialog::reject() {
|
||||
// Just regularly close upon ESC
|
||||
QDialog::close();
|
||||
}
|
||||
|
||||
void AudioStatsDialog::closeEvent(QCloseEvent* event) {
|
||||
QDialog::closeEvent(event);
|
||||
emit closed();
|
||||
}
|
||||
|
||||
AudioStatsDialog::~AudioStatsDialog() {
|
||||
clearAllChannels();
|
||||
for (int i = 0; i < _audioDisplayChannels.size(); i++) {
|
||||
_audioDisplayChannels[i].clear();
|
||||
for(int j = 0; j < _audioDisplayChannels[i].size(); j++) {
|
||||
delete _audioDisplayChannels[i].at(j);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|