mirror of
https://github.com/lubosz/overte.git
synced 2025-08-06 02:54:14 +02:00
Merge branch 'master' of git://github.com/highfidelity/hifi into 21374
This commit is contained in:
commit
a4585b9400
253 changed files with 12608 additions and 2490 deletions
|
@ -1,7 +1,96 @@
|
|||
# Linux build guide
|
||||
|
||||
Please read the [general build guide](BUILD.md) for information on dependencies required for all platforms. Only Linux specific instructions are found in this file.
|
||||
|
||||
### Qt5 Dependencies
|
||||
## Qt5 Dependencies
|
||||
|
||||
Should you choose not to install Qt5 via a package manager that handles dependencies for you, you may be missing some Qt5 dependencies. On Ubuntu, for example, the following additional packages are required:
|
||||
|
||||
libasound2 libxmu-dev libxi-dev freeglut3-dev libasound2-dev libjack0 libjack-dev libxrandr-dev libudev-dev libssl-dev
|
||||
|
||||
## Ubuntu 16.04 specific build guide
|
||||
|
||||
### Prepare environment
|
||||
|
||||
Install qt:
|
||||
```bash
|
||||
wget http://debian.highfidelity.com/pool/h/hi/hifi-qt5.6.1_5.6.1_amd64.deb
|
||||
sudo dpkg -i hifi-qt5.6.1_5.6.1_amd64.deb
|
||||
```
|
||||
|
||||
Install build dependencies:
|
||||
```bash
|
||||
sudo apt-get install libasound2 libxmu-dev libxi-dev freeglut3-dev libasound2-dev libjack0 libjack-dev libxrandr-dev libudev-dev libssl-dev
|
||||
```
|
||||
|
||||
To compile interface in a server you must install:
|
||||
```bash
|
||||
sudo apt -y install libpulse0 libnss3 libnspr4 libfontconfig1 libxcursor1 libxcomposite1 libxtst6 libxslt1.1
|
||||
```
|
||||
|
||||
Install build tools:
|
||||
```bash
|
||||
sudo apt install cmake
|
||||
```
|
||||
|
||||
### Get code and checkout the tag you need
|
||||
|
||||
Clone this repository:
|
||||
```bash
|
||||
git clone https://github.com/highfidelity/hifi.git
|
||||
```
|
||||
|
||||
To compile a RELEASE version checkout the tag you need getting a list of all tags:
|
||||
```bash
|
||||
git fetch -a
|
||||
git tags
|
||||
```
|
||||
|
||||
Then checkout last tag with:
|
||||
```bash
|
||||
git checkout tags/RELEASE-6819
|
||||
```
|
||||
|
||||
Or go to the highfidelity download page (https://highfidelity.com/download) to get the release version. For example, if there is a BETA 6731 type:
|
||||
```bash
|
||||
git checkout tags/RELEASE-6731
|
||||
```
|
||||
|
||||
### Compiling
|
||||
|
||||
Create the build directory:
|
||||
```bash
|
||||
mkdir -p hifi/build
|
||||
cd hifi/build
|
||||
```
|
||||
|
||||
Prepare makefiles:
|
||||
```bash
|
||||
cmake -DQT_CMAKE_PREFIX_PATH=/usr/local/Qt5.6.1/5.6/gcc_64/lib/cmake ..
|
||||
```
|
||||
|
||||
Start compilation and get a cup of coffee:
|
||||
```bash
|
||||
make domain-server assignment-client interface
|
||||
```
|
||||
|
||||
In a server does not make sense to compile interface
|
||||
|
||||
### Running the software
|
||||
|
||||
Running domain server:
|
||||
```bash
|
||||
./domain-server/domain-server
|
||||
```
|
||||
|
||||
Running assignment client:
|
||||
```bash
|
||||
./assignment-client/assignment-client -n 6
|
||||
```
|
||||
|
||||
Running interface:
|
||||
```bash
|
||||
./interface/interface
|
||||
```
|
||||
|
||||
Go to localhost in running interface.
|
||||
|
|
|
@ -16,7 +16,7 @@ Contributing
|
|||
git checkout -b new_branch_name
|
||||
```
|
||||
4. Code
|
||||
* Follow the [coding standard](https://wiki.highfidelity.com/wiki/Coding_Standards)
|
||||
* Follow the [coding standard](https://docs.highfidelity.com/build-guide/coding-standards)
|
||||
5. Commit
|
||||
* Use [well formed commit messages](http://tbaggery.com/2008/04/19/a-note-about-git-commit-messages.html)
|
||||
6. Update your branch
|
||||
|
|
|
@ -23,6 +23,7 @@
|
|||
#include <AvatarHashMap.h>
|
||||
#include <AudioInjectorManager.h>
|
||||
#include <AssetClient.h>
|
||||
#include <DebugDraw.h>
|
||||
#include <LocationScriptingInterface.h>
|
||||
#include <MessagesClient.h>
|
||||
#include <NetworkAccessManager.h>
|
||||
|
@ -50,14 +51,14 @@
|
|||
#include "RecordingScriptingInterface.h"
|
||||
#include "AbstractAudioInterface.h"
|
||||
|
||||
#include "AvatarAudioTimer.h"
|
||||
|
||||
static const int RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES = 10;
|
||||
|
||||
Agent::Agent(ReceivedMessage& message) :
|
||||
ThreadedAssignment(message),
|
||||
_receivedAudioStream(RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES, RECEIVED_AUDIO_STREAM_CAPACITY_FRAMES),
|
||||
_audioGate(AudioConstants::SAMPLE_RATE, AudioConstants::MONO)
|
||||
_audioGate(AudioConstants::SAMPLE_RATE, AudioConstants::MONO),
|
||||
_avatarAudioTimer(this)
|
||||
{
|
||||
_entityEditSender.setPacketsPerSecond(DEFAULT_ENTITY_PPS_PER_SCRIPT);
|
||||
DependencyManager::get<EntityScriptingInterface>()->setPacketSender(&_entityEditSender);
|
||||
|
@ -81,6 +82,9 @@ Agent::Agent(ReceivedMessage& message) :
|
|||
DependencyManager::set<RecordingScriptingInterface>();
|
||||
DependencyManager::set<UsersScriptingInterface>();
|
||||
|
||||
// Needed to ensure the creation of the DebugDraw instance on the main thread
|
||||
DebugDraw::getInstance();
|
||||
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
|
||||
|
@ -92,6 +96,14 @@ Agent::Agent(ReceivedMessage& message) :
|
|||
this, "handleOctreePacket");
|
||||
packetReceiver.registerListener(PacketType::Jurisdiction, this, "handleJurisdictionPacket");
|
||||
packetReceiver.registerListener(PacketType::SelectedAudioFormat, this, "handleSelectedAudioFormat");
|
||||
|
||||
|
||||
// 100Hz timer for audio
|
||||
const int TARGET_INTERVAL_MSEC = 10; // 10ms
|
||||
connect(&_avatarAudioTimer, &QTimer::timeout, this, &Agent::processAgentAvatarAudio);
|
||||
_avatarAudioTimer.setSingleShot(false);
|
||||
_avatarAudioTimer.setInterval(TARGET_INTERVAL_MSEC);
|
||||
_avatarAudioTimer.setTimerType(Qt::PreciseTimer);
|
||||
}
|
||||
|
||||
void Agent::playAvatarSound(SharedSoundPointer sound) {
|
||||
|
@ -471,14 +483,7 @@ void Agent::executeScript() {
|
|||
|
||||
DependencyManager::set<AssignmentParentFinder>(_entityViewer.getTree());
|
||||
|
||||
// 100Hz timer for audio
|
||||
AvatarAudioTimer* audioTimerWorker = new AvatarAudioTimer();
|
||||
audioTimerWorker->moveToThread(&_avatarAudioTimerThread);
|
||||
connect(audioTimerWorker, &AvatarAudioTimer::avatarTick, this, &Agent::processAgentAvatarAudio);
|
||||
connect(this, &Agent::startAvatarAudioTimer, audioTimerWorker, &AvatarAudioTimer::start);
|
||||
connect(this, &Agent::stopAvatarAudioTimer, audioTimerWorker, &AvatarAudioTimer::stop);
|
||||
connect(&_avatarAudioTimerThread, &QThread::finished, audioTimerWorker, &QObject::deleteLater);
|
||||
_avatarAudioTimerThread.start();
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "start");
|
||||
|
||||
// Agents should run at 45hz
|
||||
static const int AVATAR_DATA_HZ = 45;
|
||||
|
@ -557,7 +562,7 @@ void Agent::setIsAvatar(bool isAvatar) {
|
|||
_avatarIdentityTimer->start(AVATAR_IDENTITY_PACKET_SEND_INTERVAL_MSECS); // FIXME - we shouldn't really need to constantly send identity packets
|
||||
|
||||
// tell the avatarAudioTimer to start ticking
|
||||
emit startAvatarAudioTimer();
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "start");
|
||||
|
||||
}
|
||||
|
||||
|
@ -586,7 +591,7 @@ void Agent::setIsAvatar(bool isAvatar) {
|
|||
nodeList->sendPacket(std::move(packet), *node);
|
||||
});
|
||||
}
|
||||
emit stopAvatarAudioTimer();
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "stop");
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -604,6 +609,24 @@ void Agent::processAgentAvatar() {
|
|||
|
||||
AvatarData::AvatarDataDetail dataDetail = (randFloat() < AVATAR_SEND_FULL_UPDATE_RATIO) ? AvatarData::SendAllData : AvatarData::CullSmallData;
|
||||
QByteArray avatarByteArray = scriptedAvatar->toByteArrayStateful(dataDetail);
|
||||
|
||||
int maximumByteArraySize = NLPacket::maxPayloadSize(PacketType::AvatarData) - sizeof(AvatarDataSequenceNumber);
|
||||
|
||||
if (avatarByteArray.size() > maximumByteArraySize) {
|
||||
qWarning() << " scriptedAvatar->toByteArrayStateful() resulted in very large buffer:" << avatarByteArray.size() << "... attempt to drop facial data";
|
||||
avatarByteArray = scriptedAvatar->toByteArrayStateful(dataDetail, true);
|
||||
|
||||
if (avatarByteArray.size() > maximumByteArraySize) {
|
||||
qWarning() << " scriptedAvatar->toByteArrayStateful() without facial data resulted in very large buffer:" << avatarByteArray.size() << "... reduce to MinimumData";
|
||||
avatarByteArray = scriptedAvatar->toByteArrayStateful(AvatarData::MinimumData, true);
|
||||
|
||||
if (avatarByteArray.size() > maximumByteArraySize) {
|
||||
qWarning() << " scriptedAvatar->toByteArrayStateful() MinimumData resulted in very large buffer:" << avatarByteArray.size() << "... FAIL!!";
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
scriptedAvatar->doneEncoding(true);
|
||||
|
||||
static AvatarDataSequenceNumber sequenceNumber = 0;
|
||||
|
@ -796,8 +819,7 @@ void Agent::aboutToFinish() {
|
|||
DependencyManager::destroy<recording::Recorder>();
|
||||
DependencyManager::destroy<recording::ClipCache>();
|
||||
|
||||
emit stopAvatarAudioTimer();
|
||||
_avatarAudioTimerThread.quit();
|
||||
QMetaObject::invokeMethod(&_avatarAudioTimer, "stop");
|
||||
|
||||
// cleanup codec & encoder
|
||||
if (_codec && _encoder) {
|
||||
|
|
|
@ -81,9 +81,6 @@ private slots:
|
|||
void processAgentAvatar();
|
||||
void processAgentAvatarAudio();
|
||||
|
||||
signals:
|
||||
void startAvatarAudioTimer();
|
||||
void stopAvatarAudioTimer();
|
||||
private:
|
||||
void negotiateAudioFormat();
|
||||
void selectAudioFormat(const QString& selectedCodecName);
|
||||
|
@ -118,7 +115,7 @@ private:
|
|||
CodecPluginPointer _codec;
|
||||
QString _selectedCodecName;
|
||||
Encoder* _encoder { nullptr };
|
||||
QThread _avatarAudioTimerThread;
|
||||
QTimer _avatarAudioTimer;
|
||||
bool _flushEncoder { false };
|
||||
};
|
||||
|
||||
|
|
|
@ -17,8 +17,8 @@
|
|||
#include <QtCore/QThread>
|
||||
|
||||
#include <LogHandler.h>
|
||||
#include <SharedUtil.h>
|
||||
#include <HifiConfigVariantMap.h>
|
||||
#include <SharedUtil.h>
|
||||
#include <ShutdownEventListener.h>
|
||||
|
||||
#include "Assignment.h"
|
||||
|
|
|
@ -91,9 +91,22 @@ void AssignmentClientMonitor::simultaneousWaitOnChildren(int waitMsecs) {
|
|||
}
|
||||
}
|
||||
|
||||
void AssignmentClientMonitor::childProcessFinished(qint64 pid) {
|
||||
void AssignmentClientMonitor::childProcessFinished(qint64 pid, int exitCode, QProcess::ExitStatus exitStatus) {
|
||||
auto message = "Child process " + QString::number(pid) + " has %1 with exit code " + QString::number(exitCode) + ".";
|
||||
|
||||
if (_childProcesses.remove(pid)) {
|
||||
qDebug() << "Child process" << pid << "has finished. Removed from internal map.";
|
||||
message.append(" Removed from internal map.");
|
||||
} else {
|
||||
message.append(" Could not find process in internal map.");
|
||||
}
|
||||
|
||||
switch (exitStatus) {
|
||||
case QProcess::NormalExit:
|
||||
qDebug() << qPrintable(message.arg("returned"));
|
||||
break;
|
||||
case QProcess::CrashExit:
|
||||
qCritical() << qPrintable(message.arg("crashed"));
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -221,7 +234,9 @@ void AssignmentClientMonitor::spawnChildClient() {
|
|||
auto pid = assignmentClient->processId();
|
||||
// make sure we hear that this process has finished when it does
|
||||
connect(assignmentClient, static_cast<void(QProcess::*)(int, QProcess::ExitStatus)>(&QProcess::finished),
|
||||
this, [this, pid]() { childProcessFinished(pid); });
|
||||
this, [this, pid](int exitCode, QProcess::ExitStatus exitStatus) {
|
||||
childProcessFinished(pid, exitCode, exitStatus);
|
||||
});
|
||||
|
||||
qDebug() << "Spawned a child client with PID" << assignmentClient->processId();
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ public:
|
|||
void stopChildProcesses();
|
||||
private slots:
|
||||
void checkSpares();
|
||||
void childProcessFinished(qint64 pid);
|
||||
void childProcessFinished(qint64 pid, int exitCode, QProcess::ExitStatus exitStatus);
|
||||
void handleChildStatusPacket(QSharedPointer<ReceivedMessage> message);
|
||||
|
||||
bool handleHTTPRequest(HTTPConnection* connection, const QUrl& url, bool skipSubHandler = false) override;
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
//
|
||||
// AvatarAudioTimer.cpp
|
||||
// assignment-client/src
|
||||
//
|
||||
// Created by David Kelly on 10/12/13.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
#include <QDebug>
|
||||
#include <SharedUtil.h>
|
||||
#include "AvatarAudioTimer.h"
|
||||
|
||||
// this should send a signal every 10ms, with pretty good precision. Hardcoding
|
||||
// to 10ms since that's what you'd want for audio.
|
||||
void AvatarAudioTimer::start() {
|
||||
auto startTime = usecTimestampNow();
|
||||
quint64 frameCounter = 0;
|
||||
const int TARGET_INTERVAL_USEC = 10000; // 10ms
|
||||
while (!_quit) {
|
||||
++frameCounter;
|
||||
|
||||
// tick every 10ms from startTime
|
||||
quint64 targetTime = startTime + frameCounter * TARGET_INTERVAL_USEC;
|
||||
quint64 now = usecTimestampNow();
|
||||
|
||||
// avoid quint64 underflow
|
||||
if (now < targetTime) {
|
||||
usleep(targetTime - now);
|
||||
}
|
||||
|
||||
emit avatarTick();
|
||||
}
|
||||
qDebug() << "AvatarAudioTimer is finished";
|
||||
}
|
|
@ -1,31 +0,0 @@
|
|||
//
|
||||
// AvatarAudioTimer.h
|
||||
// assignment-client/src
|
||||
//
|
||||
// Created by David Kelly on 10/12/13.
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#ifndef hifi_AvatarAudioTimer_h
|
||||
#define hifi_AvatarAudioTimer_h
|
||||
|
||||
#include <QtCore/QObject>
|
||||
|
||||
class AvatarAudioTimer : public QObject {
|
||||
Q_OBJECT
|
||||
|
||||
signals:
|
||||
void avatarTick();
|
||||
|
||||
public slots:
|
||||
void start();
|
||||
void stop() { _quit = true; }
|
||||
|
||||
private:
|
||||
bool _quit { false };
|
||||
};
|
||||
|
||||
#endif //hifi_AvatarAudioTimer_h
|
|
@ -76,7 +76,7 @@ void AudioMixerSlavePool::processPackets(ConstIter begin, ConstIter end) {
|
|||
|
||||
void AudioMixerSlavePool::mix(ConstIter begin, ConstIter end, unsigned int frame, float throttlingRatio) {
|
||||
_function = &AudioMixerSlave::mix;
|
||||
_configure = [&](AudioMixerSlave& slave) {
|
||||
_configure = [=](AudioMixerSlave& slave) {
|
||||
slave.configureMix(_begin, _end, _frame, _throttlingRatio);
|
||||
};
|
||||
_frame = frame;
|
||||
|
|
|
@ -85,7 +85,22 @@ void AvatarMixer::handleReplicatedPacket(QSharedPointer<ReceivedMessage> message
|
|||
auto nodeList = DependencyManager::get<NodeList>();
|
||||
auto nodeID = QUuid::fromRfc4122(message->peek(NUM_BYTES_RFC4122_UUID));
|
||||
|
||||
auto replicatedNode = addOrUpdateReplicatedNode(nodeID, message->getSenderSockAddr());
|
||||
SharedNodePointer replicatedNode;
|
||||
|
||||
if (message->getType() == PacketType::ReplicatedKillAvatar) {
|
||||
// this is a kill packet, which we should only process if we already have the node in our list
|
||||
// since it of course does not make sense to add a node just to remove it an instant later
|
||||
replicatedNode = nodeList->nodeWithUUID(nodeID);
|
||||
|
||||
if (!replicatedNode) {
|
||||
return;
|
||||
}
|
||||
} else {
|
||||
replicatedNode = addOrUpdateReplicatedNode(nodeID, message->getSenderSockAddr());
|
||||
}
|
||||
|
||||
// we better have a node to work with at this point
|
||||
assert(replicatedNode);
|
||||
|
||||
if (message->getType() == PacketType::ReplicatedAvatarIdentity) {
|
||||
handleAvatarIdentityPacket(message, replicatedNode);
|
||||
|
|
|
@ -108,9 +108,6 @@ void AvatarMixerClientData::ignoreOther(SharedNodePointer self, SharedNodePointe
|
|||
void AvatarMixerClientData::removeFromRadiusIgnoringSet(SharedNodePointer self, const QUuid& other) {
|
||||
if (isRadiusIgnoring(other)) {
|
||||
_radiusIgnoredOthers.erase(other);
|
||||
auto exitingSpaceBubblePacket = NLPacket::create(PacketType::ExitingSpaceBubble, NUM_BYTES_RFC4122_UUID);
|
||||
exitingSpaceBubblePacket->write(other.toRfc4122());
|
||||
DependencyManager::get<NodeList>()->sendUnreliablePacket(*exitingSpaceBubblePacket, *self);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -383,11 +383,11 @@ void AvatarMixerSlave::broadcastAvatarDataToAgent(const SharedNodePointer& node)
|
|||
qCWarning(avatars) << "otherAvatar.toByteArray() without facial data resulted in very large buffer:" << bytes.size() << "... reduce to MinimumData";
|
||||
bytes = otherAvatar->toByteArray(AvatarData::MinimumData, lastEncodeForOther, lastSentJointsForOther,
|
||||
hasFlagsOut, dropFaceTracking, distanceAdjust, viewerPosition, &lastSentJointsForOther);
|
||||
}
|
||||
|
||||
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
|
||||
qCWarning(avatars) << "otherAvatar.toByteArray() MinimumData resulted in very large buffer:" << bytes.size() << "... FAIL!!";
|
||||
includeThisAvatar = false;
|
||||
if (bytes.size() > MAX_ALLOWED_AVATAR_DATA) {
|
||||
qCWarning(avatars) << "otherAvatar.toByteArray() MinimumData resulted in very large buffer:" << bytes.size() << "... FAIL!!";
|
||||
includeThisAvatar = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -69,7 +69,7 @@ static AvatarMixerSlave slave;
|
|||
|
||||
void AvatarMixerSlavePool::processIncomingPackets(ConstIter begin, ConstIter end) {
|
||||
_function = &AvatarMixerSlave::processIncomingPackets;
|
||||
_configure = [&](AvatarMixerSlave& slave) {
|
||||
_configure = [=](AvatarMixerSlave& slave) {
|
||||
slave.configure(begin, end);
|
||||
};
|
||||
run(begin, end);
|
||||
|
@ -79,7 +79,7 @@ void AvatarMixerSlavePool::broadcastAvatarData(ConstIter begin, ConstIter end,
|
|||
p_high_resolution_clock::time_point lastFrameTimestamp,
|
||||
float maxKbpsPerNode, float throttlingRatio) {
|
||||
_function = &AvatarMixerSlave::broadcastAvatarData;
|
||||
_configure = [&](AvatarMixerSlave& slave) {
|
||||
_configure = [=](AvatarMixerSlave& slave) {
|
||||
slave.configureBroadcast(begin, end, lastFrameTimestamp, maxKbpsPerNode, throttlingRatio);
|
||||
};
|
||||
run(begin, end);
|
||||
|
|
|
@ -19,7 +19,7 @@
|
|||
#include "ScriptableAvatar.h"
|
||||
|
||||
|
||||
QByteArray ScriptableAvatar::toByteArrayStateful(AvatarDataDetail dataDetail) {
|
||||
QByteArray ScriptableAvatar::toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking) {
|
||||
_globalPosition = getPosition();
|
||||
return AvatarData::toByteArrayStateful(dataDetail);
|
||||
}
|
||||
|
|
|
@ -28,7 +28,7 @@ public:
|
|||
Q_INVOKABLE AnimationDetails getAnimationDetails();
|
||||
virtual void setSkeletonModelURL(const QUrl& skeletonModelURL) override;
|
||||
|
||||
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail) override;
|
||||
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking = false) override;
|
||||
|
||||
|
||||
private slots:
|
||||
|
|
|
@ -50,6 +50,12 @@ EntityServer::~EntityServer() {
|
|||
tree->removeNewlyCreatedHook(this);
|
||||
}
|
||||
|
||||
void EntityServer::aboutToFinish() {
|
||||
DependencyManager::get<ResourceManager>()->cleanup();
|
||||
|
||||
OctreeServer::aboutToFinish();
|
||||
}
|
||||
|
||||
void EntityServer::handleEntityPacket(QSharedPointer<ReceivedMessage> message, SharedNodePointer senderNode) {
|
||||
if (_octreeInboundPacketProcessor) {
|
||||
_octreeInboundPacketProcessor->queueReceivedPacket(message, senderNode);
|
||||
|
|
|
@ -59,6 +59,8 @@ public:
|
|||
virtual void trackSend(const QUuid& dataID, quint64 dataLastEdited, const QUuid& sessionID) override;
|
||||
virtual void trackViewerGone(const QUuid& sessionID) override;
|
||||
|
||||
virtual void aboutToFinish() override;
|
||||
|
||||
public slots:
|
||||
virtual void nodeAdded(SharedNodePointer node) override;
|
||||
virtual void nodeKilled(SharedNodePointer node) override;
|
||||
|
|
|
@ -81,7 +81,6 @@ bool OctreeSendThread::process() {
|
|||
// don't do any send processing until the initial load of the octree is complete...
|
||||
if (_myServer->isInitialLoadComplete()) {
|
||||
if (auto node = _node.lock()) {
|
||||
_nodeMissingCount = 0;
|
||||
OctreeQueryNode* nodeData = static_cast<OctreeQueryNode*>(node->getLinkedData());
|
||||
|
||||
// Sometimes the node data has not yet been linked, in which case we can't really do anything
|
||||
|
@ -129,8 +128,7 @@ AtomicUIntStat OctreeSendThread::_totalSpecialBytes { 0 };
|
|||
AtomicUIntStat OctreeSendThread::_totalSpecialPackets { 0 };
|
||||
|
||||
|
||||
int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode* nodeData, int& trueBytesSent,
|
||||
int& truePacketsSent, bool dontSuppressDuplicate) {
|
||||
int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode* nodeData, bool dontSuppressDuplicate) {
|
||||
OctreeServer::didHandlePacketSend(this);
|
||||
|
||||
// if we're shutting down, then exit early
|
||||
|
@ -141,15 +139,14 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
|
|||
bool debug = _myServer->wantsDebugSending();
|
||||
quint64 now = usecTimestampNow();
|
||||
|
||||
bool packetSent = false; // did we send a packet?
|
||||
int packetsSent = 0;
|
||||
int numPackets = 0;
|
||||
|
||||
// Here's where we check to see if this packet is a duplicate of the last packet. If it is, we will silently
|
||||
// obscure the packet and not send it. This allows the callers and upper level logic to not need to know about
|
||||
// this rate control savings.
|
||||
if (!dontSuppressDuplicate && nodeData->shouldSuppressDuplicatePacket()) {
|
||||
nodeData->resetOctreePacket(); // we still need to reset it though!
|
||||
return packetsSent; // without sending...
|
||||
return numPackets; // without sending...
|
||||
}
|
||||
|
||||
// If we've got a stats message ready to send, then see if we can piggyback them together
|
||||
|
@ -163,12 +160,15 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
|
|||
// copy octree message to back of stats message
|
||||
statsPacket.write(nodeData->getPacket().getData(), nodeData->getPacket().getDataSize());
|
||||
|
||||
// since a stats message is only included on end of scene, don't consider any of these bytes "wasted", since
|
||||
int numBytes = statsPacket.getDataSize();
|
||||
_totalBytes += numBytes;
|
||||
_totalPackets++;
|
||||
// since a stats message is only included on end of scene, don't consider any of these bytes "wasted"
|
||||
// there was nothing else to send.
|
||||
int thisWastedBytes = 0;
|
||||
_totalWastedBytes += thisWastedBytes;
|
||||
_totalBytes += statsPacket.getDataSize();
|
||||
_totalPackets++;
|
||||
//_totalWastedBytes += 0;
|
||||
_trueBytesSent += numBytes;
|
||||
numPackets++;
|
||||
|
||||
if (debug) {
|
||||
NLPacket& sentPacket = nodeData->getPacket();
|
||||
|
@ -191,18 +191,22 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
|
|||
// actually send it
|
||||
OctreeServer::didCallWriteDatagram(this);
|
||||
DependencyManager::get<NodeList>()->sendUnreliablePacket(statsPacket, *node);
|
||||
packetSent = true;
|
||||
} else {
|
||||
// not enough room in the packet, send two packets
|
||||
|
||||
// first packet
|
||||
OctreeServer::didCallWriteDatagram(this);
|
||||
DependencyManager::get<NodeList>()->sendUnreliablePacket(statsPacket, *node);
|
||||
|
||||
// since a stats message is only included on end of scene, don't consider any of these bytes "wasted", since
|
||||
int numBytes = statsPacket.getDataSize();
|
||||
_totalBytes += numBytes;
|
||||
_totalPackets++;
|
||||
// since a stats message is only included on end of scene, don't consider any of these bytes "wasted"
|
||||
// there was nothing else to send.
|
||||
int thisWastedBytes = 0;
|
||||
_totalWastedBytes += thisWastedBytes;
|
||||
_totalBytes += statsPacket.getDataSize();
|
||||
_totalPackets++;
|
||||
//_totalWastedBytes += 0;
|
||||
_trueBytesSent += numBytes;
|
||||
numPackets++;
|
||||
|
||||
if (debug) {
|
||||
NLPacket& sentPacket = nodeData->getPacket();
|
||||
|
@ -221,19 +225,18 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
|
|||
"] wasted bytes:" << thisWastedBytes << " [" << _totalWastedBytes << "]";
|
||||
}
|
||||
|
||||
trueBytesSent += statsPacket.getDataSize();
|
||||
truePacketsSent++;
|
||||
packetsSent++;
|
||||
|
||||
// second packet
|
||||
OctreeServer::didCallWriteDatagram(this);
|
||||
DependencyManager::get<NodeList>()->sendUnreliablePacket(nodeData->getPacket(), *node);
|
||||
packetSent = true;
|
||||
|
||||
int packetSizeWithHeader = nodeData->getPacket().getDataSize();
|
||||
thisWastedBytes = udt::MAX_PACKET_SIZE - packetSizeWithHeader;
|
||||
_totalWastedBytes += thisWastedBytes;
|
||||
_totalBytes += nodeData->getPacket().getDataSize();
|
||||
numBytes = nodeData->getPacket().getDataSize();
|
||||
_totalBytes += numBytes;
|
||||
_totalPackets++;
|
||||
// we count wasted bytes here because we were unable to fit the stats packet
|
||||
thisWastedBytes = udt::MAX_PACKET_SIZE - numBytes;
|
||||
_totalWastedBytes += thisWastedBytes;
|
||||
_trueBytesSent += numBytes;
|
||||
numPackets++;
|
||||
|
||||
if (debug) {
|
||||
NLPacket& sentPacket = nodeData->getPacket();
|
||||
|
@ -259,13 +262,14 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
|
|||
// just send the octree packet
|
||||
OctreeServer::didCallWriteDatagram(this);
|
||||
DependencyManager::get<NodeList>()->sendUnreliablePacket(nodeData->getPacket(), *node);
|
||||
packetSent = true;
|
||||
|
||||
int packetSizeWithHeader = nodeData->getPacket().getDataSize();
|
||||
int thisWastedBytes = udt::MAX_PACKET_SIZE - packetSizeWithHeader;
|
||||
_totalWastedBytes += thisWastedBytes;
|
||||
_totalBytes += packetSizeWithHeader;
|
||||
int numBytes = nodeData->getPacket().getDataSize();
|
||||
_totalBytes += numBytes;
|
||||
_totalPackets++;
|
||||
int thisWastedBytes = udt::MAX_PACKET_SIZE - numBytes;
|
||||
_totalWastedBytes += thisWastedBytes;
|
||||
numPackets++;
|
||||
_trueBytesSent += numBytes;
|
||||
|
||||
if (debug) {
|
||||
NLPacket& sentPacket = nodeData->getPacket();
|
||||
|
@ -280,23 +284,21 @@ int OctreeSendThread::handlePacketSend(SharedNodePointer node, OctreeQueryNode*
|
|||
|
||||
qDebug() << "Sending packet at " << now << " [" << _totalPackets <<"]: sequence: " << sequence <<
|
||||
" timestamp: " << timestamp <<
|
||||
" size: " << packetSizeWithHeader << " [" << _totalBytes <<
|
||||
" size: " << numBytes << " [" << _totalBytes <<
|
||||
"] wasted bytes:" << thisWastedBytes << " [" << _totalWastedBytes << "]";
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// remember to track our stats
|
||||
if (packetSent) {
|
||||
if (numPackets > 0) {
|
||||
nodeData->stats.packetSent(nodeData->getPacket().getPayloadSize());
|
||||
trueBytesSent += nodeData->getPacket().getPayloadSize();
|
||||
truePacketsSent++;
|
||||
packetsSent++;
|
||||
nodeData->octreePacketSent();
|
||||
nodeData->resetOctreePacket();
|
||||
}
|
||||
|
||||
return packetsSent;
|
||||
_truePacketsSent += numPackets;
|
||||
return numPackets;
|
||||
}
|
||||
|
||||
/// Version of octree element distributor that sends the deepest LOD level at once
|
||||
|
@ -315,13 +317,9 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
|
|||
preDistributionProcessing();
|
||||
}
|
||||
|
||||
// calculate max number of packets that can be sent during this interval
|
||||
int clientMaxPacketsPerInterval = std::max(1, (nodeData->getMaxQueryPacketsPerSecond() / INTERVALS_PER_SECOND));
|
||||
int maxPacketsPerInterval = std::min(clientMaxPacketsPerInterval, _myServer->getPacketsPerClientPerInterval());
|
||||
|
||||
int truePacketsSent = 0;
|
||||
int trueBytesSent = 0;
|
||||
int packetsSentThisInterval = 0;
|
||||
_truePacketsSent = 0;
|
||||
_trueBytesSent = 0;
|
||||
_packetsSentThisInterval = 0;
|
||||
|
||||
bool isFullScene = nodeData->shouldForceFullScene();
|
||||
if (isFullScene) {
|
||||
|
@ -334,17 +332,9 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
|
|||
&& ((!viewFrustumChanged && nodeData->getViewFrustumJustStoppedChanging()) || nodeData->hasLodChanged()));
|
||||
}
|
||||
|
||||
bool somethingToSend = true; // assume we have something
|
||||
|
||||
// If our packet already has content in it, then we must use the color choice of the waiting packet.
|
||||
// If we're starting a fresh packet, then...
|
||||
// If we're moving, and the client asked for low res, then we force monochrome, otherwise, use
|
||||
// the clients requested color state.
|
||||
|
||||
// If we have a packet waiting, and our desired want color, doesn't match the current waiting packets color
|
||||
// then let's just send that waiting packet.
|
||||
if (nodeData->isPacketWaiting()) {
|
||||
packetsSentThisInterval += handlePacketSend(node, nodeData, trueBytesSent, truePacketsSent);
|
||||
// send the waiting packet
|
||||
_packetsSentThisInterval += handlePacketSend(node, nodeData);
|
||||
} else {
|
||||
nodeData->resetOctreePacket();
|
||||
}
|
||||
|
@ -375,8 +365,7 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
|
|||
//unsigned long encodeTime = nodeData->stats.getTotalEncodeTime();
|
||||
//unsigned long elapsedTime = nodeData->stats.getElapsedTime();
|
||||
|
||||
int packetsJustSent = handlePacketSend(node, nodeData, trueBytesSent, truePacketsSent, isFullScene);
|
||||
packetsSentThisInterval += packetsJustSent;
|
||||
_packetsSentThisInterval += handlePacketSend(node, nodeData, isFullScene);
|
||||
|
||||
// If we're starting a full scene, then definitely we want to empty the elementBag
|
||||
if (isFullScene) {
|
||||
|
@ -404,185 +393,44 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
|
|||
|
||||
// If we have something in our elementBag, then turn them into packets and send them out...
|
||||
if (!nodeData->elementBag.isEmpty()) {
|
||||
int bytesWritten = 0;
|
||||
quint64 start = usecTimestampNow();
|
||||
|
||||
// TODO: add these to stats page
|
||||
//quint64 startCompressTimeMsecs = OctreePacketData::getCompressContentTime() / 1000;
|
||||
//quint64 startCompressCalls = OctreePacketData::getCompressContentCalls();
|
||||
|
||||
int extraPackingAttempts = 0;
|
||||
bool completedScene = false;
|
||||
|
||||
while (somethingToSend && packetsSentThisInterval < maxPacketsPerInterval && !nodeData->isShuttingDown()) {
|
||||
float lockWaitElapsedUsec = OctreeServer::SKIP_TIME;
|
||||
float encodeElapsedUsec = OctreeServer::SKIP_TIME;
|
||||
float compressAndWriteElapsedUsec = OctreeServer::SKIP_TIME;
|
||||
float packetSendingElapsedUsec = OctreeServer::SKIP_TIME;
|
||||
|
||||
quint64 startInside = usecTimestampNow();
|
||||
|
||||
bool lastNodeDidntFit = false; // assume each node fits
|
||||
if (!nodeData->elementBag.isEmpty()) {
|
||||
|
||||
quint64 lockWaitStart = usecTimestampNow();
|
||||
_myServer->getOctree()->withReadLock([&]{
|
||||
quint64 lockWaitEnd = usecTimestampNow();
|
||||
lockWaitElapsedUsec = (float)(lockWaitEnd - lockWaitStart);
|
||||
quint64 encodeStart = usecTimestampNow();
|
||||
|
||||
OctreeElementPointer subTree = nodeData->elementBag.extract();
|
||||
if (!subTree) {
|
||||
return;
|
||||
}
|
||||
|
||||
float octreeSizeScale = nodeData->getOctreeSizeScale();
|
||||
int boundaryLevelAdjustClient = nodeData->getBoundaryLevelAdjust();
|
||||
|
||||
int boundaryLevelAdjust = boundaryLevelAdjustClient +
|
||||
(viewFrustumChanged ? LOW_RES_MOVING_ADJUST : NO_BOUNDARY_ADJUST);
|
||||
|
||||
EncodeBitstreamParams params(INT_MAX, WANT_EXISTS_BITS, DONT_CHOP,
|
||||
viewFrustumChanged, boundaryLevelAdjust, octreeSizeScale,
|
||||
isFullScene, _myServer->getJurisdiction(), nodeData);
|
||||
nodeData->copyCurrentViewFrustum(params.viewFrustum);
|
||||
if (viewFrustumChanged) {
|
||||
nodeData->copyLastKnownViewFrustum(params.lastViewFrustum);
|
||||
}
|
||||
|
||||
// Our trackSend() function is implemented by the server subclass, and will be called back
|
||||
// during the encodeTreeBitstream() as new entities/data elements are sent
|
||||
params.trackSend = [this, node](const QUuid& dataID, quint64 dataEdited) {
|
||||
_myServer->trackSend(dataID, dataEdited, node->getUUID());
|
||||
};
|
||||
|
||||
// TODO: should this include the lock time or not? This stat is sent down to the client,
|
||||
// it seems like it may be a good idea to include the lock time as part of the encode time
|
||||
// are reported to client. Since you can encode without the lock
|
||||
nodeData->stats.encodeStarted();
|
||||
|
||||
bytesWritten = _myServer->getOctree()->encodeTreeBitstream(subTree, &_packetData, nodeData->elementBag, params);
|
||||
|
||||
quint64 encodeEnd = usecTimestampNow();
|
||||
encodeElapsedUsec = (float)(encodeEnd - encodeStart);
|
||||
|
||||
// If after calling encodeTreeBitstream() there are no nodes left to send, then we know we've
|
||||
// sent the entire scene. We want to know this below so we'll actually write this content into
|
||||
// the packet and send it
|
||||
completedScene = nodeData->elementBag.isEmpty();
|
||||
|
||||
if (params.stopReason == EncodeBitstreamParams::DIDNT_FIT) {
|
||||
lastNodeDidntFit = true;
|
||||
extraPackingAttempts++;
|
||||
}
|
||||
|
||||
nodeData->stats.encodeStopped();
|
||||
});
|
||||
} else {
|
||||
// If the bag was empty then we didn't even attempt to encode, and so we know the bytesWritten were 0
|
||||
bytesWritten = 0;
|
||||
somethingToSend = false; // this will cause us to drop out of the loop...
|
||||
}
|
||||
|
||||
// If the last node didn't fit, but we're in compressed mode, then we actually want to see if we can fit a
|
||||
// little bit more in this packet. To do this we write into the packet, but don't send it yet, we'll
|
||||
// keep attempting to write in compressed mode to add more compressed segments
|
||||
|
||||
// We only consider sending anything if there is something in the _packetData to send... But
|
||||
// if bytesWritten == 0 it means either the subTree couldn't fit or we had an empty bag... Both cases
|
||||
// mean we should send the previous packet contents and reset it.
|
||||
if (completedScene || lastNodeDidntFit) {
|
||||
|
||||
if (_packetData.hasContent()) {
|
||||
quint64 compressAndWriteStart = usecTimestampNow();
|
||||
|
||||
// if for some reason the finalized size is greater than our available size, then probably the "compressed"
|
||||
// form actually inflated beyond our padding, and in this case we will send the current packet, then
|
||||
// write to out new packet...
|
||||
unsigned int writtenSize = _packetData.getFinalizedSize() + sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
|
||||
|
||||
if (writtenSize > nodeData->getAvailable()) {
|
||||
packetsSentThisInterval += handlePacketSend(node, nodeData, trueBytesSent, truePacketsSent);
|
||||
}
|
||||
|
||||
nodeData->writeToPacket(_packetData.getFinalizedData(), _packetData.getFinalizedSize());
|
||||
quint64 compressAndWriteEnd = usecTimestampNow();
|
||||
compressAndWriteElapsedUsec = (float)(compressAndWriteEnd - compressAndWriteStart);
|
||||
}
|
||||
|
||||
// If we're not running compressed, then we know we can just send now. Or if we're running compressed, but
|
||||
// the packet doesn't have enough space to bother attempting to pack more...
|
||||
bool sendNow = true;
|
||||
|
||||
if (!completedScene && (nodeData->getAvailable() >= MINIMUM_ATTEMPT_MORE_PACKING &&
|
||||
extraPackingAttempts <= REASONABLE_NUMBER_OF_PACKING_ATTEMPTS)) {
|
||||
sendNow = false; // try to pack more
|
||||
}
|
||||
|
||||
int targetSize = MAX_OCTREE_PACKET_DATA_SIZE;
|
||||
if (sendNow) {
|
||||
quint64 packetSendingStart = usecTimestampNow();
|
||||
packetsSentThisInterval += handlePacketSend(node, nodeData, trueBytesSent, truePacketsSent);
|
||||
quint64 packetSendingEnd = usecTimestampNow();
|
||||
packetSendingElapsedUsec = (float)(packetSendingEnd - packetSendingStart);
|
||||
|
||||
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
|
||||
extraPackingAttempts = 0;
|
||||
} else {
|
||||
// If we're in compressed mode, then we want to see if we have room for more in this wire packet.
|
||||
// but we've finalized the _packetData, so we want to start a new section, we will do that by
|
||||
// resetting the packet settings with the max uncompressed size of our current available space
|
||||
// in the wire packet. We also include room for our section header, and a little bit of padding
|
||||
// to account for the fact that whenc compressing small amounts of data, we sometimes end up with
|
||||
// a larger compressed size then uncompressed size
|
||||
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE) - COMPRESS_PADDING;
|
||||
}
|
||||
_packetData.changeSettings(true, targetSize); // will do reset - NOTE: Always compressed
|
||||
|
||||
}
|
||||
OctreeServer::trackTreeWaitTime(lockWaitElapsedUsec);
|
||||
OctreeServer::trackEncodeTime(encodeElapsedUsec);
|
||||
OctreeServer::trackCompressAndWriteTime(compressAndWriteElapsedUsec);
|
||||
OctreeServer::trackPacketSendingTime(packetSendingElapsedUsec);
|
||||
|
||||
quint64 endInside = usecTimestampNow();
|
||||
quint64 elapsedInsideUsecs = endInside - startInside;
|
||||
OctreeServer::trackInsideTime((float)elapsedInsideUsecs);
|
||||
}
|
||||
|
||||
if (somethingToSend && _myServer->wantsVerboseDebug()) {
|
||||
qCDebug(octree) << "Hit PPS Limit, packetsSentThisInterval =" << packetsSentThisInterval
|
||||
<< " maxPacketsPerInterval = " << maxPacketsPerInterval
|
||||
<< " clientMaxPacketsPerInterval = " << clientMaxPacketsPerInterval;
|
||||
}
|
||||
traverseTreeAndSendContents(node, nodeData, viewFrustumChanged, isFullScene);
|
||||
|
||||
// Here's where we can/should allow the server to send other data...
|
||||
// send the environment packet
|
||||
// TODO: should we turn this into a while loop to better handle sending multiple special packets
|
||||
if (_myServer->hasSpecialPacketsToSend(node) && !nodeData->isShuttingDown()) {
|
||||
int specialPacketsSent = 0;
|
||||
trueBytesSent += _myServer->sendSpecialPackets(node, nodeData, specialPacketsSent);
|
||||
int specialBytesSent = _myServer->sendSpecialPackets(node, nodeData, specialPacketsSent);
|
||||
nodeData->resetOctreePacket(); // because nodeData's _sequenceNumber has changed
|
||||
truePacketsSent += specialPacketsSent;
|
||||
packetsSentThisInterval += specialPacketsSent;
|
||||
_truePacketsSent += specialPacketsSent;
|
||||
_trueBytesSent += specialBytesSent;
|
||||
_packetsSentThisInterval += specialPacketsSent;
|
||||
|
||||
_totalPackets += specialPacketsSent;
|
||||
_totalBytes += trueBytesSent;
|
||||
_totalBytes += specialBytesSent;
|
||||
|
||||
_totalSpecialPackets += specialPacketsSent;
|
||||
_totalSpecialBytes += trueBytesSent;
|
||||
_totalSpecialBytes += specialBytesSent;
|
||||
}
|
||||
|
||||
// calculate max number of packets that can be sent during this interval
|
||||
int clientMaxPacketsPerInterval = std::max(1, (nodeData->getMaxQueryPacketsPerSecond() / INTERVALS_PER_SECOND));
|
||||
int maxPacketsPerInterval = std::min(clientMaxPacketsPerInterval, _myServer->getPacketsPerClientPerInterval());
|
||||
|
||||
// Re-send packets that were nacked by the client
|
||||
while (nodeData->hasNextNackedPacket() && packetsSentThisInterval < maxPacketsPerInterval) {
|
||||
while (nodeData->hasNextNackedPacket() && _packetsSentThisInterval < maxPacketsPerInterval) {
|
||||
const NLPacket* packet = nodeData->getNextNackedPacket();
|
||||
if (packet) {
|
||||
DependencyManager::get<NodeList>()->sendUnreliablePacket(*packet, *node);
|
||||
truePacketsSent++;
|
||||
packetsSentThisInterval++;
|
||||
int numBytes = packet->getDataSize();
|
||||
_truePacketsSent++;
|
||||
_trueBytesSent += numBytes;
|
||||
_packetsSentThisInterval++;
|
||||
|
||||
_totalBytes += packet->getDataSize();
|
||||
_totalPackets++;
|
||||
_totalBytes += numBytes;
|
||||
_totalWastedBytes += udt::MAX_PACKET_SIZE - packet->getDataSize();
|
||||
}
|
||||
}
|
||||
|
@ -591,12 +439,6 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
|
|||
int elapsedmsec = (end - start) / USECS_PER_MSEC;
|
||||
OctreeServer::trackLoopTime(elapsedmsec);
|
||||
|
||||
// TODO: add these to stats page
|
||||
//quint64 endCompressCalls = OctreePacketData::getCompressContentCalls();
|
||||
//int elapsedCompressCalls = endCompressCalls - startCompressCalls;
|
||||
//quint64 endCompressTimeMsecs = OctreePacketData::getCompressContentTime() / 1000;
|
||||
//int elapsedCompressTimeMsecs = endCompressTimeMsecs - startCompressTimeMsecs;
|
||||
|
||||
// if after sending packets we've emptied our bag, then we want to remember that we've sent all
|
||||
// the octree elements from the current view frustum
|
||||
if (nodeData->elementBag.isEmpty()) {
|
||||
|
@ -606,17 +448,147 @@ int OctreeSendThread::packetDistributor(SharedNodePointer node, OctreeQueryNode*
|
|||
// If this was a full scene then make sure we really send out a stats packet at this point so that
|
||||
// the clients will know the scene is stable
|
||||
if (isFullScene) {
|
||||
int thisTrueBytesSent = 0;
|
||||
int thisTruePacketsSent = 0;
|
||||
nodeData->stats.sceneCompleted();
|
||||
int packetsJustSent = handlePacketSend(node, nodeData, thisTrueBytesSent, thisTruePacketsSent, true);
|
||||
_totalBytes += thisTrueBytesSent;
|
||||
_totalPackets += thisTruePacketsSent;
|
||||
truePacketsSent += packetsJustSent;
|
||||
handlePacketSend(node, nodeData, true);
|
||||
}
|
||||
}
|
||||
|
||||
} // end if bag wasn't empty, and so we sent stuff...
|
||||
|
||||
return truePacketsSent;
|
||||
return _truePacketsSent;
|
||||
}
|
||||
|
||||
void OctreeSendThread::traverseTreeAndSendContents(SharedNodePointer node, OctreeQueryNode* nodeData, bool viewFrustumChanged, bool isFullScene) {
|
||||
// calculate max number of packets that can be sent during this interval
|
||||
int clientMaxPacketsPerInterval = std::max(1, (nodeData->getMaxQueryPacketsPerSecond() / INTERVALS_PER_SECOND));
|
||||
int maxPacketsPerInterval = std::min(clientMaxPacketsPerInterval, _myServer->getPacketsPerClientPerInterval());
|
||||
|
||||
int extraPackingAttempts = 0;
|
||||
bool completedScene = false;
|
||||
|
||||
bool somethingToSend = true; // assume we have something
|
||||
while (somethingToSend && _packetsSentThisInterval < maxPacketsPerInterval && !nodeData->isShuttingDown()) {
|
||||
float lockWaitElapsedUsec = OctreeServer::SKIP_TIME;
|
||||
float encodeElapsedUsec = OctreeServer::SKIP_TIME;
|
||||
float compressAndWriteElapsedUsec = OctreeServer::SKIP_TIME;
|
||||
float packetSendingElapsedUsec = OctreeServer::SKIP_TIME;
|
||||
|
||||
quint64 startInside = usecTimestampNow();
|
||||
|
||||
bool lastNodeDidntFit = false; // assume each node fits
|
||||
if (!nodeData->elementBag.isEmpty()) {
|
||||
|
||||
quint64 lockWaitStart = usecTimestampNow();
|
||||
_myServer->getOctree()->withReadLock([&]{
|
||||
quint64 lockWaitEnd = usecTimestampNow();
|
||||
lockWaitElapsedUsec = (float)(lockWaitEnd - lockWaitStart);
|
||||
quint64 encodeStart = usecTimestampNow();
|
||||
|
||||
OctreeElementPointer subTree = nodeData->elementBag.extract();
|
||||
if (!subTree) {
|
||||
return;
|
||||
}
|
||||
|
||||
float octreeSizeScale = nodeData->getOctreeSizeScale();
|
||||
int boundaryLevelAdjustClient = nodeData->getBoundaryLevelAdjust();
|
||||
|
||||
int boundaryLevelAdjust = boundaryLevelAdjustClient +
|
||||
(viewFrustumChanged ? LOW_RES_MOVING_ADJUST : NO_BOUNDARY_ADJUST);
|
||||
|
||||
EncodeBitstreamParams params(INT_MAX, WANT_EXISTS_BITS, DONT_CHOP,
|
||||
viewFrustumChanged, boundaryLevelAdjust, octreeSizeScale,
|
||||
isFullScene, _myServer->getJurisdiction(), nodeData);
|
||||
nodeData->copyCurrentViewFrustum(params.viewFrustum);
|
||||
if (viewFrustumChanged) {
|
||||
nodeData->copyLastKnownViewFrustum(params.lastViewFrustum);
|
||||
}
|
||||
|
||||
// Our trackSend() function is implemented by the server subclass, and will be called back
|
||||
// during the encodeTreeBitstream() as new entities/data elements are sent
|
||||
params.trackSend = [this](const QUuid& dataID, quint64 dataEdited) {
|
||||
_myServer->trackSend(dataID, dataEdited, _nodeUuid);
|
||||
};
|
||||
|
||||
// TODO: should this include the lock time or not? This stat is sent down to the client,
|
||||
// it seems like it may be a good idea to include the lock time as part of the encode time
|
||||
// are reported to client. Since you can encode without the lock
|
||||
nodeData->stats.encodeStarted();
|
||||
|
||||
// NOTE: this is where the tree "contents" are actaully packed
|
||||
_myServer->getOctree()->encodeTreeBitstream(subTree, &_packetData, nodeData->elementBag, params);
|
||||
|
||||
quint64 encodeEnd = usecTimestampNow();
|
||||
encodeElapsedUsec = (float)(encodeEnd - encodeStart);
|
||||
|
||||
// If after calling encodeTreeBitstream() there are no nodes left to send, then we know we've
|
||||
// sent the entire scene. We want to know this below so we'll actually write this content into
|
||||
// the packet and send it
|
||||
completedScene = nodeData->elementBag.isEmpty();
|
||||
|
||||
if (params.stopReason == EncodeBitstreamParams::DIDNT_FIT) {
|
||||
lastNodeDidntFit = true;
|
||||
extraPackingAttempts++;
|
||||
}
|
||||
|
||||
nodeData->stats.encodeStopped();
|
||||
});
|
||||
} else {
|
||||
somethingToSend = false; // this will cause us to drop out of the loop...
|
||||
}
|
||||
|
||||
if (completedScene || lastNodeDidntFit) {
|
||||
// we probably want to flush what has accumulated in nodeData but:
|
||||
// do we have more data to send? and is there room?
|
||||
if (_packetData.hasContent()) {
|
||||
// yes, more data to send
|
||||
quint64 compressAndWriteStart = usecTimestampNow();
|
||||
unsigned int additionalSize = _packetData.getFinalizedSize() + sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
|
||||
if (additionalSize > nodeData->getAvailable()) {
|
||||
// no room --> flush what we've got
|
||||
_packetsSentThisInterval += handlePacketSend(node, nodeData);
|
||||
}
|
||||
|
||||
// either there is room, or we've flushed and reset nodeData's data buffer
|
||||
// so we can transfer whatever is in _packetData to nodeData
|
||||
nodeData->writeToPacket(_packetData.getFinalizedData(), _packetData.getFinalizedSize());
|
||||
compressAndWriteElapsedUsec = (float)(usecTimestampNow()- compressAndWriteStart);
|
||||
}
|
||||
|
||||
bool sendNow = completedScene ||
|
||||
nodeData->getAvailable() < MINIMUM_ATTEMPT_MORE_PACKING ||
|
||||
extraPackingAttempts > REASONABLE_NUMBER_OF_PACKING_ATTEMPTS;
|
||||
|
||||
int targetSize = MAX_OCTREE_PACKET_DATA_SIZE;
|
||||
if (sendNow) {
|
||||
quint64 packetSendingStart = usecTimestampNow();
|
||||
_packetsSentThisInterval += handlePacketSend(node, nodeData);
|
||||
quint64 packetSendingEnd = usecTimestampNow();
|
||||
packetSendingElapsedUsec = (float)(packetSendingEnd - packetSendingStart);
|
||||
|
||||
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE);
|
||||
extraPackingAttempts = 0;
|
||||
} else {
|
||||
// We want to see if we have room for more in this wire packet but we've copied the _packetData,
|
||||
// so we want to start a new section. We will do that by resetting the packet settings with the max
|
||||
// size of our current available space in the wire packet plus room for our section header and a
|
||||
// little bit of padding.
|
||||
targetSize = nodeData->getAvailable() - sizeof(OCTREE_PACKET_INTERNAL_SECTION_SIZE) - COMPRESS_PADDING;
|
||||
}
|
||||
_packetData.changeSettings(true, targetSize); // will do reset - NOTE: Always compressed
|
||||
}
|
||||
OctreeServer::trackTreeWaitTime(lockWaitElapsedUsec);
|
||||
OctreeServer::trackEncodeTime(encodeElapsedUsec);
|
||||
OctreeServer::trackCompressAndWriteTime(compressAndWriteElapsedUsec);
|
||||
OctreeServer::trackPacketSendingTime(packetSendingElapsedUsec);
|
||||
|
||||
quint64 endInside = usecTimestampNow();
|
||||
quint64 elapsedInsideUsecs = endInside - startInside;
|
||||
OctreeServer::trackInsideTime((float)elapsedInsideUsecs);
|
||||
}
|
||||
|
||||
if (somethingToSend && _myServer->wantsVerboseDebug()) {
|
||||
qCDebug(octree) << "Hit PPS Limit, packetsSentThisInterval =" << _packetsSentThisInterval
|
||||
<< " maxPacketsPerInterval = " << maxPacketsPerInterval
|
||||
<< " clientMaxPacketsPerInterval = " << clientMaxPacketsPerInterval;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ public:
|
|||
|
||||
void setIsShuttingDown();
|
||||
bool isShuttingDown() { return _isShuttingDown; }
|
||||
|
||||
|
||||
QUuid getNodeUuid() const { return _nodeUuid; }
|
||||
|
||||
static AtomicUIntStat _totalBytes;
|
||||
|
@ -53,20 +53,23 @@ protected:
|
|||
|
||||
/// Called before a packetDistributor pass to allow for pre-distribution processing
|
||||
virtual void preDistributionProcessing() {};
|
||||
virtual void traverseTreeAndSendContents(SharedNodePointer node, OctreeQueryNode* nodeData, bool viewFrustumChanged, bool isFullScene);
|
||||
|
||||
OctreeServer* _myServer { nullptr };
|
||||
QWeakPointer<Node> _node;
|
||||
|
||||
private:
|
||||
int handlePacketSend(SharedNodePointer node, OctreeQueryNode* nodeData, int& trueBytesSent, int& truePacketsSent, bool dontSuppressDuplicate = false);
|
||||
int handlePacketSend(SharedNodePointer node, OctreeQueryNode* nodeData, bool dontSuppressDuplicate = false);
|
||||
int packetDistributor(SharedNodePointer node, OctreeQueryNode* nodeData, bool viewFrustumChanged);
|
||||
|
||||
|
||||
|
||||
QUuid _nodeUuid;
|
||||
|
||||
OctreePacketData _packetData;
|
||||
|
||||
int _nodeMissingCount { 0 };
|
||||
int _truePacketsSent { 0 }; // available for debug stats
|
||||
int _trueBytesSent { 0 }; // available for debug stats
|
||||
int _packetsSentThisInterval { 0 }; // used for bandwidth throttle condition
|
||||
bool _isShuttingDown { false };
|
||||
};
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
#include <AudioConstants.h>
|
||||
#include <AudioInjectorManager.h>
|
||||
#include <ClientServerUtils.h>
|
||||
#include <DebugDraw.h>
|
||||
#include <EntityNodeData.h>
|
||||
#include <EntityScriptingInterface.h>
|
||||
#include <LogHandler.h>
|
||||
|
@ -67,6 +68,9 @@ EntityScriptServer::EntityScriptServer(ReceivedMessage& message) : ThreadedAssig
|
|||
DependencyManager::set<ScriptCache>();
|
||||
DependencyManager::set<ScriptEngines>(ScriptEngine::ENTITY_SERVER_SCRIPT);
|
||||
|
||||
// Needed to ensure the creation of the DebugDraw instance on the main thread
|
||||
DebugDraw::getInstance();
|
||||
|
||||
auto& packetReceiver = DependencyManager::get<NodeList>()->getPacketReceiver();
|
||||
packetReceiver.registerListenerForTypes({ PacketType::OctreeStats, PacketType::EntityData, PacketType::EntityErase },
|
||||
this, "handleOctreePacket");
|
||||
|
|
|
@ -49,7 +49,7 @@
|
|||
Var STR_CONTAINS_VAR_3
|
||||
Var STR_CONTAINS_VAR_4
|
||||
Var STR_RETURN_VAR
|
||||
|
||||
|
||||
Function StrContains
|
||||
Exch $STR_NEEDLE
|
||||
Exch 1
|
||||
|
@ -438,6 +438,7 @@ Var DesktopServerCheckbox
|
|||
Var ServerStartupCheckbox
|
||||
Var LaunchServerNowCheckbox
|
||||
Var LaunchClientNowCheckbox
|
||||
Var CleanInstallCheckbox
|
||||
Var CurrentOffset
|
||||
Var OffsetUnits
|
||||
Var CopyFromProductionCheckbox
|
||||
|
@ -475,27 +476,18 @@ Function PostInstallOptionsPage
|
|||
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Create a desktop shortcut for @INTERFACE_HF_SHORTCUT_NAME@"
|
||||
Pop $DesktopClientCheckbox
|
||||
IntOp $CurrentOffset $CurrentOffset + 15
|
||||
|
||||
|
||||
; set the checkbox state depending on what is present in the registry
|
||||
!insertmacro SetPostInstallOption $DesktopClientCheckbox @CLIENT_DESKTOP_SHORTCUT_REG_KEY@ ${BST_CHECKED}
|
||||
${EndIf}
|
||||
|
||||
|
||||
${If} ${SectionIsSelected} ${@SERVER_COMPONENT_NAME@}
|
||||
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Create a desktop shortcut for @CONSOLE_HF_SHORTCUT_NAME@"
|
||||
Pop $DesktopServerCheckbox
|
||||
|
||||
IntOp $CurrentOffset $CurrentOffset + 15
|
||||
|
||||
; set the checkbox state depending on what is present in the registry
|
||||
!insertmacro SetPostInstallOption $DesktopServerCheckbox @CONSOLE_DESKTOP_SHORTCUT_REG_KEY@ ${BST_UNCHECKED}
|
||||
|
||||
IntOp $CurrentOffset $CurrentOffset + 15
|
||||
|
||||
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Launch @CONSOLE_HF_SHORTCUT_NAME@ on startup"
|
||||
Pop $ServerStartupCheckbox
|
||||
|
||||
; set the checkbox state depending on what is present in the registry
|
||||
!insertmacro SetPostInstallOption $ServerStartupCheckbox @CONSOLE_STARTUP_REG_KEY@ ${BST_CHECKED}
|
||||
|
||||
IntOp $CurrentOffset $CurrentOffset + 15
|
||||
${EndIf}
|
||||
|
||||
${If} ${SectionIsSelected} ${@SERVER_COMPONENT_NAME@}
|
||||
|
@ -511,17 +503,33 @@ Function PostInstallOptionsPage
|
|||
|
||||
IntOp $CurrentOffset $CurrentOffset + 15
|
||||
${EndIf}
|
||||
|
||||
|
||||
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
|
||||
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Launch @INTERFACE_HF_SHORTCUT_NAME@ after install"
|
||||
Pop $LaunchClientNowCheckbox
|
||||
|
||||
; set the checkbox state depending on what is present in the registry
|
||||
!insertmacro SetPostInstallOption $LaunchClientNowCheckbox @CLIENT_LAUNCH_NOW_REG_KEY@ ${BST_CHECKED}
|
||||
${StrContains} $substringResult "/forceNoLaunchClient" $CMDLINE
|
||||
${IfNot} $substringResult == ""
|
||||
${NSD_SetState} $LaunchClientNowCheckbox ${BST_UNCHECKED}
|
||||
${EndIf}
|
||||
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Launch @INTERFACE_HF_SHORTCUT_NAME@ after install"
|
||||
Pop $LaunchClientNowCheckbox
|
||||
IntOp $CurrentOffset $CurrentOffset + 30
|
||||
|
||||
; set the checkbox state depending on what is present in the registry
|
||||
!insertmacro SetPostInstallOption $LaunchClientNowCheckbox @CLIENT_LAUNCH_NOW_REG_KEY@ ${BST_CHECKED}
|
||||
${StrContains} $substringResult "/forceNoLaunchClient" $CMDLINE
|
||||
${IfNot} $substringResult == ""
|
||||
${NSD_SetState} $LaunchClientNowCheckbox ${BST_UNCHECKED}
|
||||
${EndIf}
|
||||
${EndIf}
|
||||
|
||||
${If} ${SectionIsSelected} ${@SERVER_COMPONENT_NAME@}
|
||||
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Launch @CONSOLE_HF_SHORTCUT_NAME@ on startup"
|
||||
Pop $ServerStartupCheckbox
|
||||
IntOp $CurrentOffset $CurrentOffset + 15
|
||||
|
||||
; set the checkbox state depending on what is present in the registry
|
||||
!insertmacro SetPostInstallOption $ServerStartupCheckbox @CONSOLE_STARTUP_REG_KEY@ ${BST_CHECKED}
|
||||
${EndIf}
|
||||
|
||||
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
|
||||
${NSD_CreateCheckbox} 0 $CurrentOffset$OffsetUnits 100% 10u "&Perform a clean install (Delete older settings and content)"
|
||||
Pop $CleanInstallCheckbox
|
||||
IntOp $CurrentOffset $CurrentOffset + 15
|
||||
${EndIf}
|
||||
|
||||
${If} @PR_BUILD@ == 1
|
||||
|
@ -543,7 +551,7 @@ Function PostInstallOptionsPage
|
|||
|
||||
${NSD_SetState} $CopyFromProductionCheckbox ${BST_CHECKED}
|
||||
${EndIf}
|
||||
|
||||
|
||||
nsDialogs::Show
|
||||
FunctionEnd
|
||||
|
||||
|
@ -558,6 +566,7 @@ Var ServerStartupState
|
|||
Var LaunchServerNowState
|
||||
Var LaunchClientNowState
|
||||
Var CopyFromProductionState
|
||||
Var CleanInstallState
|
||||
|
||||
Function ReadPostInstallOptions
|
||||
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
|
||||
|
@ -579,13 +588,18 @@ Function ReadPostInstallOptions
|
|||
${EndIf}
|
||||
|
||||
${If} ${SectionIsSelected} ${@SERVER_COMPONENT_NAME@}
|
||||
; check if we need to launch the server post-install
|
||||
${NSD_GetState} $LaunchServerNowCheckbox $LaunchServerNowState
|
||||
; check if we need to launch the server post-install
|
||||
${NSD_GetState} $LaunchServerNowCheckbox $LaunchServerNowState
|
||||
${EndIf}
|
||||
|
||||
|
||||
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
|
||||
; check if we need to launch the client post-install
|
||||
${NSD_GetState} $LaunchClientNowCheckbox $LaunchClientNowState
|
||||
; check if we need to launch the client post-install
|
||||
${NSD_GetState} $LaunchClientNowCheckbox $LaunchClientNowState
|
||||
${EndIf}
|
||||
|
||||
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
|
||||
; check if the user asked for a clean install
|
||||
${NSD_GetState} $CleanInstallCheckbox $CleanInstallState
|
||||
${EndIf}
|
||||
FunctionEnd
|
||||
|
||||
|
@ -628,6 +642,15 @@ Function HandlePostInstallOptions
|
|||
!insertmacro WritePostInstallOption @CONSOLE_STARTUP_REG_KEY@ NO
|
||||
${EndIf}
|
||||
${EndIf}
|
||||
|
||||
${If} ${SectionIsSelected} ${@CLIENT_COMPONENT_NAME@}
|
||||
; check if the user asked for a clean install
|
||||
${If} $CleanInstallState == ${BST_CHECKED}
|
||||
SetShellVarContext current
|
||||
RMDir /r "$APPDATA\@BUILD_ORGANIZATION@"
|
||||
RMDir /r "$LOCALAPPDATA\@BUILD_ORGANIZATION@"
|
||||
${EndIf}
|
||||
${EndIf}
|
||||
|
||||
${If} @PR_BUILD@ == 1
|
||||
|
||||
|
|
|
@ -50,6 +50,7 @@
|
|||
{
|
||||
"label": "Places / Paths",
|
||||
"html_id": "places_paths",
|
||||
"restart": false,
|
||||
"settings": [
|
||||
{
|
||||
"name": "paths",
|
||||
|
@ -75,6 +76,7 @@
|
|||
{
|
||||
"name": "descriptors",
|
||||
"label": "Description",
|
||||
"restart": false,
|
||||
"help": "This data will be queryable from your server. It may be collected by High Fidelity and used to share your domain with others.",
|
||||
"settings": [
|
||||
{
|
||||
|
|
|
@ -2,11 +2,11 @@ $(document).ready(function(){
|
|||
// setup the underscore templates
|
||||
var nodeTemplate = _.template($('#nodes-template').html());
|
||||
var queuedTemplate = _.template($('#queued-template').html());
|
||||
|
||||
|
||||
// setup a function to grab the assignments
|
||||
function getNodesAndAssignments() {
|
||||
$.getJSON("nodes.json", function(json){
|
||||
|
||||
|
||||
json.nodes.sort(function(a, b){
|
||||
if (a.type === b.type) {
|
||||
if (a.uptime < b.uptime) {
|
||||
|
@ -16,36 +16,50 @@ $(document).ready(function(){
|
|||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if (a.type === "agent" && b.type !== "agent") {
|
||||
return 1;
|
||||
} else if (b.type === "agent" && a.type !== "agent") {
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
if (a.type > b.type) {
|
||||
return 1;
|
||||
}
|
||||
|
||||
|
||||
if (a.type < b.type) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
|
||||
$('#nodes-table tbody').html(nodeTemplate(json));
|
||||
}).fail(function(jqXHR, textStatus, errorThrown) {
|
||||
// we assume a 401 means the DS has restarted
|
||||
// and no longer has our OAuth produced uuid
|
||||
// so just reload and re-auth
|
||||
if (jqXHR.status == 401) {
|
||||
location.reload();
|
||||
}
|
||||
});
|
||||
|
||||
$.getJSON("assignments.json", function(json){
|
||||
|
||||
$.getJSON("assignments.json", function(json){
|
||||
$('#assignments-table tbody').html(queuedTemplate(json));
|
||||
}).fail(function(jqXHR, textStatus, errorThrown) {
|
||||
// we assume a 401 means the DS has restarted
|
||||
// and no longer has our OAuth produced uuid
|
||||
// so just reload and re-auth
|
||||
if (jqXHR.status == 401) {
|
||||
location.reload();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
||||
// do the first GET on page load
|
||||
getNodesAndAssignments();
|
||||
// grab the new assignments JSON every two seconds
|
||||
var getNodesAndAssignmentsInterval = setInterval(getNodesAndAssignments, 2000);
|
||||
|
||||
|
||||
// hook the node delete to the X button
|
||||
$(document.body).on('click', '.glyphicon-remove', function(){
|
||||
// fire off a delete for this node
|
||||
|
@ -57,10 +71,10 @@ $(document).ready(function(){
|
|||
}
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
$(document.body).on('click', '#kill-all-btn', function() {
|
||||
var confirmed_kill = confirm("Are you sure?");
|
||||
|
||||
|
||||
if (confirmed_kill == true) {
|
||||
$.ajax({
|
||||
url: "/nodes/",
|
||||
|
|
|
@ -40,11 +40,11 @@
|
|||
#include <LogHandler.h>
|
||||
#include <PathUtils.h>
|
||||
#include <NumericalConstants.h>
|
||||
#include <Trace.h>
|
||||
#include <StatTracker.h>
|
||||
|
||||
#include "DomainServerNodeData.h"
|
||||
#include "NodeConnectionData.h"
|
||||
#include <Trace.h>
|
||||
#include <StatTracker.h>
|
||||
|
||||
int const DomainServer::EXIT_CODE_REBOOT = 234923;
|
||||
|
||||
|
@ -162,8 +162,10 @@ DomainServer::DomainServer(int argc, char* argv[]) :
|
|||
|
||||
_gatekeeper.preloadAllowedUserPublicKeys(); // so they can connect on first request
|
||||
|
||||
//send signal to DomainMetadata when descriptors changed
|
||||
_metadata = new DomainMetadata(this);
|
||||
|
||||
connect(&_settingsManager, &DomainServerSettingsManager::settingsUpdated,
|
||||
_metadata, &DomainMetadata::descriptorsChanged);
|
||||
|
||||
qDebug() << "domain-server is running";
|
||||
static const QString AC_SUBNET_WHITELIST_SETTING_PATH = "security.ac_subnet_whitelist";
|
||||
|
@ -1972,7 +1974,8 @@ bool DomainServer::handleHTTPRequest(HTTPConnection* connection, const QUrl& url
|
|||
return _settingsManager.handleAuthenticatedHTTPRequest(connection, url);
|
||||
}
|
||||
|
||||
const QString HIFI_SESSION_COOKIE_KEY = "DS_WEB_SESSION_UUID";
|
||||
static const QString HIFI_SESSION_COOKIE_KEY = "DS_WEB_SESSION_UUID";
|
||||
static const QString STATE_QUERY_KEY = "state";
|
||||
|
||||
bool DomainServer::handleHTTPSRequest(HTTPSConnection* connection, const QUrl &url, bool skipSubHandler) {
|
||||
qDebug() << "HTTPS request received at" << url.toString();
|
||||
|
@ -1983,10 +1986,9 @@ bool DomainServer::handleHTTPSRequest(HTTPSConnection* connection, const QUrl &u
|
|||
const QString CODE_QUERY_KEY = "code";
|
||||
QString authorizationCode = codeURLQuery.queryItemValue(CODE_QUERY_KEY);
|
||||
|
||||
const QString STATE_QUERY_KEY = "state";
|
||||
QUuid stateUUID = QUuid(codeURLQuery.queryItemValue(STATE_QUERY_KEY));
|
||||
|
||||
if (!authorizationCode.isEmpty() && !stateUUID.isNull()) {
|
||||
if (!authorizationCode.isEmpty() && !stateUUID.isNull() && _webAuthenticationStateSet.remove(stateUUID)) {
|
||||
// fire off a request with this code and state to get an access token for the user
|
||||
|
||||
const QString OAUTH_TOKEN_REQUEST_PATH = "/oauth/token";
|
||||
|
@ -2004,47 +2006,83 @@ bool DomainServer::handleHTTPSRequest(HTTPSConnection* connection, const QUrl &u
|
|||
tokenRequest.setHeader(QNetworkRequest::ContentTypeHeader, "application/x-www-form-urlencoded");
|
||||
|
||||
QNetworkReply* tokenReply = NetworkAccessManager::getInstance().post(tokenRequest, tokenPostBody.toLocal8Bit());
|
||||
connect(tokenReply, &QNetworkReply::finished, this, &DomainServer::tokenGrantFinished);
|
||||
|
||||
if (_webAuthenticationStateSet.remove(stateUUID)) {
|
||||
// this is a web user who wants to auth to access web interface
|
||||
// we hold the response back to them until we get their profile information
|
||||
// and can decide if they are let in or not
|
||||
// add this connection to our list of pending connections so that we can hold the response
|
||||
_pendingOAuthConnections.insert(stateUUID, connection);
|
||||
|
||||
QEventLoop loop;
|
||||
connect(tokenReply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
|
||||
// set the state UUID on the reply so that we can associate the response with the connection later
|
||||
tokenReply->setProperty(STATE_QUERY_KEY.toLocal8Bit(), stateUUID);
|
||||
|
||||
// start the loop for the token request
|
||||
loop.exec();
|
||||
return true;
|
||||
} else {
|
||||
connection->respond(HTTPConnection::StatusCode400);
|
||||
|
||||
QNetworkReply* profileReply = profileRequestGivenTokenReply(tokenReply);
|
||||
return true;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// stop the loop once the profileReply is complete
|
||||
connect(profileReply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
|
||||
HTTPSConnection* DomainServer::connectionFromReplyWithState(QNetworkReply* reply) {
|
||||
// grab the UUID state property from the reply
|
||||
QUuid stateUUID = reply->property(STATE_QUERY_KEY.toLocal8Bit()).toUuid();
|
||||
|
||||
// restart the loop for the profile request
|
||||
loop.exec();
|
||||
if (!stateUUID.isNull()) {
|
||||
return _pendingOAuthConnections.take(stateUUID);
|
||||
} else {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
void DomainServer::tokenGrantFinished() {
|
||||
auto tokenReply = qobject_cast<QNetworkReply*>(sender());
|
||||
|
||||
if (tokenReply) {
|
||||
if (tokenReply->error() == QNetworkReply::NoError) {
|
||||
// now that we have a token for this profile, send off a profile request
|
||||
QNetworkReply* profileReply = profileRequestGivenTokenReply(tokenReply);
|
||||
|
||||
// forward along the state UUID that we kept with the token request
|
||||
profileReply->setProperty(STATE_QUERY_KEY.toLocal8Bit(), tokenReply->property(STATE_QUERY_KEY.toLocal8Bit()));
|
||||
|
||||
connect(profileReply, &QNetworkReply::finished, this, &DomainServer::profileRequestFinished);
|
||||
} else {
|
||||
// the token grant failed, send back a 500 (assuming the connection is still around)
|
||||
auto connection = connectionFromReplyWithState(tokenReply);
|
||||
|
||||
if (connection) {
|
||||
connection->respond(HTTPConnection::StatusCode500);
|
||||
}
|
||||
}
|
||||
|
||||
tokenReply->deleteLater();
|
||||
}
|
||||
}
|
||||
|
||||
void DomainServer::profileRequestFinished() {
|
||||
|
||||
auto profileReply = qobject_cast<QNetworkReply*>(sender());
|
||||
|
||||
if (profileReply) {
|
||||
auto connection = connectionFromReplyWithState(profileReply);
|
||||
|
||||
if (connection) {
|
||||
if (profileReply->error() == QNetworkReply::NoError) {
|
||||
// call helper method to get cookieHeaders
|
||||
Headers cookieHeaders = setupCookieHeadersFromProfileReply(profileReply);
|
||||
|
||||
connection->respond(HTTPConnection::StatusCode302, QByteArray(),
|
||||
HTTPConnection::DefaultContentType, cookieHeaders);
|
||||
|
||||
delete tokenReply;
|
||||
delete profileReply;
|
||||
|
||||
// we've redirected the user back to our homepage
|
||||
return true;
|
||||
|
||||
} else {
|
||||
// the profile request failed, send back a 500 (assuming the connection is still around)
|
||||
connection->respond(HTTPConnection::StatusCode500);
|
||||
}
|
||||
}
|
||||
|
||||
// respond with a 200 code indicating that login is complete
|
||||
connection->respond(HTTPConnection::StatusCode200);
|
||||
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
profileReply->deleteLater();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2104,22 +2142,31 @@ bool DomainServer::isAuthenticatedRequest(HTTPConnection* connection, const QUrl
|
|||
// the user does not have allowed username or role, return 401
|
||||
return false;
|
||||
} else {
|
||||
// re-direct this user to OAuth page
|
||||
static const QByteArray REQUESTED_WITH_HEADER = "X-Requested-With";
|
||||
static const QString XML_REQUESTED_WITH = "XMLHttpRequest";
|
||||
|
||||
// generate a random state UUID to use
|
||||
QUuid stateUUID = QUuid::createUuid();
|
||||
if (connection->requestHeaders().value(REQUESTED_WITH_HEADER) == XML_REQUESTED_WITH) {
|
||||
// unauthorized XHR requests get a 401 and not a 302, since there isn't an XHR
|
||||
// path to OAuth authorize
|
||||
connection->respond(HTTPConnection::StatusCode401, UNAUTHENTICATED_BODY);
|
||||
} else {
|
||||
// re-direct this user to OAuth page
|
||||
|
||||
// add it to the set so we can handle the callback from the OAuth provider
|
||||
_webAuthenticationStateSet.insert(stateUUID);
|
||||
// generate a random state UUID to use
|
||||
QUuid stateUUID = QUuid::createUuid();
|
||||
|
||||
QUrl authURL = oauthAuthorizationURL(stateUUID);
|
||||
// add it to the set so we can handle the callback from the OAuth provider
|
||||
_webAuthenticationStateSet.insert(stateUUID);
|
||||
|
||||
Headers redirectHeaders;
|
||||
QUrl authURL = oauthAuthorizationURL(stateUUID);
|
||||
|
||||
redirectHeaders.insert("Location", authURL.toEncoded());
|
||||
Headers redirectHeaders;
|
||||
|
||||
connection->respond(HTTPConnection::StatusCode302,
|
||||
QByteArray(), HTTPConnection::DefaultContentType, redirectHeaders);
|
||||
redirectHeaders.insert("Location", authURL.toEncoded());
|
||||
|
||||
connection->respond(HTTPConnection::StatusCode302,
|
||||
QByteArray(), HTTPConnection::DefaultContentType, redirectHeaders);
|
||||
}
|
||||
|
||||
// we don't know about this user yet, so they are not yet authenticated
|
||||
return false;
|
||||
|
|
|
@ -111,6 +111,9 @@ private slots:
|
|||
void updateDownstreamNodes();
|
||||
void updateUpstreamNodes();
|
||||
|
||||
void tokenGrantFinished();
|
||||
void profileRequestFinished();
|
||||
|
||||
signals:
|
||||
void iceServerChanged();
|
||||
void userConnected();
|
||||
|
@ -178,6 +181,8 @@ private:
|
|||
|
||||
void updateReplicationNodes(ReplicationServerDirection direction);
|
||||
|
||||
HTTPSConnection* connectionFromReplyWithState(QNetworkReply* reply);
|
||||
|
||||
SubnetList _acSubnetWhitelist;
|
||||
|
||||
std::vector<QString> _replicatedUsernames;
|
||||
|
@ -235,6 +240,8 @@ private:
|
|||
|
||||
bool _sendICEServerAddressToMetaverseAPIInProgress { false };
|
||||
bool _sendICEServerAddressToMetaverseAPIRedo { false };
|
||||
|
||||
QHash<QUuid, QPointer<HTTPSConnection>> _pendingOAuthConnections;
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -1198,6 +1198,7 @@ bool DomainServerSettingsManager::recurseJSONObjectAndOverwriteSettings(const QJ
|
|||
static const QString SECURITY_ROOT_KEY = "security";
|
||||
static const QString AC_SUBNET_WHITELIST_KEY = "ac_subnet_whitelist";
|
||||
static const QString BROADCASTING_KEY = "broadcasting";
|
||||
static const QString DESCRIPTION_ROOT_KEY = "descriptors";
|
||||
|
||||
auto& settingsVariant = _configMap.getConfig();
|
||||
bool needRestart = false;
|
||||
|
@ -1249,7 +1250,7 @@ bool DomainServerSettingsManager::recurseJSONObjectAndOverwriteSettings(const QJ
|
|||
|
||||
if (!matchingDescriptionObject.isEmpty()) {
|
||||
updateSetting(rootKey, rootValue, *thisMap, matchingDescriptionObject);
|
||||
if (rootKey != SECURITY_ROOT_KEY && rootKey != BROADCASTING_KEY) {
|
||||
if (rootKey != SECURITY_ROOT_KEY && rootKey != BROADCASTING_KEY && rootKey != SETTINGS_PATHS_KEY ) {
|
||||
needRestart = true;
|
||||
}
|
||||
} else {
|
||||
|
@ -1265,7 +1266,7 @@ bool DomainServerSettingsManager::recurseJSONObjectAndOverwriteSettings(const QJ
|
|||
if (!matchingDescriptionObject.isEmpty()) {
|
||||
const QJsonValue& settingValue = rootValue.toObject()[settingKey];
|
||||
updateSetting(settingKey, settingValue, *thisMap, matchingDescriptionObject);
|
||||
if ((rootKey != SECURITY_ROOT_KEY && rootKey != BROADCASTING_KEY)
|
||||
if ((rootKey != SECURITY_ROOT_KEY && rootKey != BROADCASTING_KEY && rootKey != DESCRIPTION_ROOT_KEY)
|
||||
|| settingKey == AC_SUBNET_WHITELIST_KEY) {
|
||||
needRestart = true;
|
||||
}
|
||||
|
|
|
@ -81,7 +81,11 @@
|
|||
|
||||
{ "from": { "makeAxis" : ["Keyboard.MouseMoveLeft", "Keyboard.MouseMoveRight"] },
|
||||
"when": "Keyboard.RightMouseButton",
|
||||
"to": "Actions.Yaw"
|
||||
"to": "Actions.Yaw",
|
||||
"filters":
|
||||
[
|
||||
{ "type": "scale", "scale": 0.6 }
|
||||
]
|
||||
},
|
||||
|
||||
{ "from": "Keyboard.W", "to": "Actions.LONGITUDINAL_FORWARD" },
|
||||
|
@ -102,8 +106,19 @@
|
|||
{ "from": "Keyboard.PgDown", "to": "Actions.VERTICAL_DOWN" },
|
||||
{ "from": "Keyboard.PgUp", "to": "Actions.VERTICAL_UP" },
|
||||
|
||||
{ "from": "Keyboard.MouseMoveUp", "when": "Keyboard.RightMouseButton", "to": "Actions.PITCH_UP" },
|
||||
{ "from": "Keyboard.MouseMoveDown", "when": "Keyboard.RightMouseButton", "to": "Actions.PITCH_DOWN" },
|
||||
{ "from": "Keyboard.MouseMoveUp", "when": "Keyboard.RightMouseButton", "to": "Actions.PITCH_UP",
|
||||
"filters":
|
||||
[
|
||||
{ "type": "scale", "scale": 0.6 }
|
||||
]
|
||||
|
||||
},
|
||||
{ "from": "Keyboard.MouseMoveDown", "when": "Keyboard.RightMouseButton", "to": "Actions.PITCH_DOWN",
|
||||
"filters":
|
||||
[
|
||||
{ "type": "scale", "scale": 0.6 }
|
||||
]
|
||||
},
|
||||
|
||||
{ "from": "Keyboard.TouchpadDown", "to": "Actions.PITCH_DOWN" },
|
||||
{ "from": "Keyboard.TouchpadUp", "to": "Actions.PITCH_UP" },
|
||||
|
|
Binary file not shown.
25
interface/resources/icons/tablet-icons/edit-disabled.svg
Normal file
25
interface/resources/icons/tablet-icons/edit-disabled.svg
Normal file
|
@ -0,0 +1,25 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 19.2.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve" opacity="0.33">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<g id="Layer_2">
|
||||
</g>
|
||||
<g>
|
||||
<path class="st0" d="M20.7,29.7c-2.2,2.2-4.4,4.4-6.7,6.7c-0.5-0.5-1.1-1.1-1.6-1.6c2.2-2.2,4.4-4.4,6.7-6.7l-1.8-1.8
|
||||
c-2.6,2.5-5.1,5.1-7.7,7.6c-0.5,0.5-0.9,1.1-1,1.8C8.3,37.8,8,39.8,7.7,42c0.2,0,0.4,0,0.5,0c2-0.4,4-0.8,5.9-1.2
|
||||
c0.4-0.1,0.8-0.3,1.1-0.6c2.7-2.6,5.3-5.3,8-8L20.7,29.7z"/>
|
||||
<path class="st0" d="M31.1,11c0.8-0.8,1.8-1.8,2.7-2.7C34.2,8,34.6,8,34.9,8.4c1.6,1.6,3.1,3.1,4.7,4.7c0.4,0.4,0.4,0.8,0,1.2
|
||||
c-0.9,0.9-1.8,1.8-2.7,2.7C35,15,33.1,13,31.1,11z"/>
|
||||
<path class="st0" d="M33,25.9c-0.4,0.1-0.6,0-0.9-0.2c-0.6-0.6-1.3-1.3-1.9-1.9c1.5-1.5,3.1-3.1,4.6-4.6c0.1-0.1,0.3-0.3,0.3-0.3
|
||||
c-2-2-3.9-4-5.9-6.1c-0.1,0.2-0.2,0.3-0.4,0.5c-1.5,1.5-3,3-4.6,4.6c-2.8-2.8-5.6-5.6-8.4-8.4c-0.2-0.2-0.4-0.4-0.6-0.6
|
||||
c-1.5-1.2-3.5-1-4.8,0.4c-1.2,1.4-1.1,3.5,0.2,4.8c4.2,4.2,8.3,8.3,12.5,12.5c1.4,1.4,2.7,2.7,4.1,4.1c0.2,0.2,0.2,0.4,0.2,0.7
|
||||
c-0.2,0.6-0.3,1.2-0.3,1.9c-0.3,4,2.3,7.5,6.1,8.5c1.6,0.4,3.2,0.3,4.8-0.3c-0.1-0.2-0.3-0.2-0.4-0.4c-1.2-1.2-2.3-2.3-3.5-3.5
|
||||
c-0.8-0.9-0.9-2.1-0.1-3c0.6-0.7,1.3-1.3,2-2c0.9-0.8,2-0.8,2.9,0c0.2,0.2,0.3,0.3,0.5,0.5c1.2,1.2,2.3,2.3,3.5,3.5
|
||||
c0.1,0,0.1,0,0.2,0c0.1-0.7,0.3-1.3,0.3-2C43.9,28.7,38.5,24.3,33,25.9z M12.9,12.6c-0.6,0-1.2-0.5-1.2-1.2s0.5-1.2,1.2-1.2
|
||||
c0.6,0,1.2,0.6,1.2,1.2C14.1,12,13.6,12.6,12.9,12.6z M29.3,16.3c0.5,0.5,1,1.1,1.6,1.6c-1.1,1.1-2.2,2.2-3.3,3.3
|
||||
c-0.5-0.5-1.1-1.1-1.6-1.6C27.1,18.5,28.2,17.4,29.3,16.3z"/>
|
||||
</g>
|
||||
</svg>
|
After Width: | Height: | Size: 1.8 KiB |
19
interface/resources/icons/tablet-icons/spectator-a.svg
Normal file
19
interface/resources/icons/tablet-icons/spectator-a.svg
Normal file
|
@ -0,0 +1,19 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 21.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
|
||||
<path d="M43.9,13.3c-1.3-0.6-2.4-0.3-3.5,0.4c-1.6,1.2-3.3,2.4-5,3.5c-1.4-3.4-3.3-5-6.3-5c-5.9-0.1-11.9-0.1-17.9,0
|
||||
c-3.8,0.1-6.4,3.1-6.4,7.3c0,3.7-0.1,7.6,0,11.4c0,1.1,0.2,2.1,0.6,3.1c1.2,2.7,3.3,3.8,6,3.8c5.6,0,11-0.1,16.5,0
|
||||
c3.5,0.1,6-1.5,7.4-5.1c1.7,1.2,3.4,2.4,5.1,3.5c1.1,0.7,2.2,1.1,3.5,0.3c1.2-0.7,1.6-1.9,1.6-3.3c0-5.6,0-11,0-16.6
|
||||
C45.5,15.3,45.2,14.1,43.9,13.3z M32.2,30.5c0,2.5-1,3.6-3.4,3.6c-2.9,0-5.8,0-8.7,0s-5.6,0-8.5,0.1c-2.4-0.1-3.4-1.2-3.4-3.7
|
||||
c0-3.7,0-7.5,0-11.2c0-2.2,1.1-3.4,3.1-3.4c5.9,0,11.9,0,17.8,0c2,0,3.1,1.2,3.1,3.4C32.2,23,32.2,26.8,32.2,30.5z M41.9,32.8
|
||||
c-2.1-1.4-4.2-2.9-6.3-4.3c-0.1-0.1-0.2-0.4-0.2-0.7c0-1.9,0-3.7,0-5.5c0-0.3,0.1-0.7,0.3-0.8c2-1.4,4-2.8,6.2-4.3
|
||||
C41.9,22.3,41.9,27.4,41.9,32.8z"/>
|
||||
<path d="M27.4,25C27.4,24.7,27.4,25.2,27.4,25c0-1.1-0.1-2-0.2-2.7c-0.2-1.4-0.7-2.7-1.6-4c-0.2-0.3-0.5-0.5-1-0.6
|
||||
c-0.4-0.1-0.9,0-1.3,0.2c-0.5,0.3-0.7,1.3-0.3,1.8c1.2,1.6,1.4,3,1.4,4.8c0.1,2.1-0.2,3.4-1.5,5.2c-0.2,0.3-0.2,1.1,0.1,1.6
|
||||
c0.1,0.2,0.3,0.4,0.6,0.6c0.2,0.1,0.3,0.1,0.5,0.1c0.5,0,1-0.3,1.3-0.9C27,29.3,27.3,27.3,27.4,25L27.4,25z"/>
|
||||
<ellipse cx="15.2" cy="24.7" rx="2.1" ry="2.4"/>
|
||||
<path d="M22.3,24.8C22.3,24.7,22.3,25,22.3,24.8c0-0.7-0.1-1.5-0.1-1.9c-0.2-1-0.6-2.1-1.3-3c-0.1-0.2-0.4-0.5-0.9-0.5
|
||||
c-0.7,0-0.9,0.2-1.2,0.4c-0.4,0.2-0.5,0.9-0.2,1.3c0.9,1.2,1,2.1,1.1,3.5c0,1.6-0.2,2.5-1.1,3.8c-0.1,0.2-0.1,0.7,0,1.2
|
||||
c0.1,0.2,0.2,0.3,0.5,0.4c0.1,0,0.2,0.1,0.3,0.1c0.5,0.2,1.2,0.1,1.5-0.5C21.7,28,22.2,26.5,22.3,24.8L22.3,24.8z"/>
|
||||
</svg>
|
After Width: | Height: | Size: 1.8 KiB |
22
interface/resources/icons/tablet-icons/spectator-i.svg
Normal file
22
interface/resources/icons/tablet-icons/spectator-i.svg
Normal file
|
@ -0,0 +1,22 @@
|
|||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<!-- Generator: Adobe Illustrator 21.1.0, SVG Export Plug-In . SVG Version: 6.00 Build 0) -->
|
||||
<svg version="1.1" id="Layer_1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" y="0px"
|
||||
viewBox="0 0 50 50" style="enable-background:new 0 0 50 50;" xml:space="preserve">
|
||||
<style type="text/css">
|
||||
.st0{fill:#FFFFFF;}
|
||||
</style>
|
||||
<path class="st0" d="M43.9,13.3c-1.3-0.6-2.4-0.3-3.5,0.4c-1.6,1.2-3.3,2.4-5,3.5c-1.4-3.4-3.3-5-6.3-5c-5.9-0.1-11.9-0.1-17.9,0
|
||||
c-3.8,0.1-6.4,3.1-6.4,7.3c0,3.7-0.1,7.6,0,11.4c0,1.1,0.2,2.1,0.6,3.1c1.2,2.7,3.3,3.8,6,3.8c5.6,0,11-0.1,16.5,0
|
||||
c3.5,0.1,6-1.5,7.4-5.1c1.7,1.2,3.4,2.4,5.1,3.5c1.1,0.7,2.2,1.1,3.5,0.3c1.2-0.7,1.6-1.9,1.6-3.3c0-5.6,0-11,0-16.6
|
||||
C45.5,15.3,45.2,14.1,43.9,13.3z M32.2,30.5c0,2.5-1,3.6-3.4,3.6c-2.9,0-5.8,0-8.7,0s-5.6,0-8.5,0.1c-2.4-0.1-3.4-1.2-3.4-3.7
|
||||
c0-3.7,0-7.5,0-11.2c0-2.2,1.1-3.4,3.1-3.4c5.9,0,11.9,0,17.8,0c2,0,3.1,1.2,3.1,3.4C32.2,23,32.2,26.8,32.2,30.5z M41.9,32.8
|
||||
c-2.1-1.4-4.2-2.9-6.3-4.3c-0.1-0.1-0.2-0.4-0.2-0.7c0-1.9,0-3.7,0-5.5c0-0.3,0.1-0.7,0.3-0.8c2-1.4,4-2.8,6.2-4.3
|
||||
C41.9,22.3,41.9,27.4,41.9,32.8z"/>
|
||||
<path class="st0" d="M27.4,25C27.4,24.7,27.4,25.2,27.4,25c0-1.1-0.1-2-0.2-2.7c-0.2-1.4-0.7-2.7-1.6-4c-0.2-0.3-0.5-0.5-1-0.6
|
||||
c-0.4-0.1-0.9,0-1.3,0.2c-0.5,0.3-0.7,1.3-0.3,1.8c1.2,1.6,1.4,3,1.4,4.8c0.1,2.1-0.2,3.4-1.5,5.2c-0.2,0.3-0.2,1.1,0.1,1.6
|
||||
c0.1,0.2,0.3,0.4,0.6,0.6c0.2,0.1,0.3,0.1,0.5,0.1c0.5,0,1-0.3,1.3-0.9C27,29.3,27.3,27.3,27.4,25L27.4,25z"/>
|
||||
<ellipse class="st0" cx="15.2" cy="24.7" rx="2.1" ry="2.4"/>
|
||||
<path class="st0" d="M22.3,24.8C22.3,24.7,22.3,25,22.3,24.8c0-0.7-0.1-1.5-0.1-1.9c-0.2-1-0.6-2.1-1.3-3c-0.1-0.2-0.4-0.5-0.9-0.5
|
||||
c-0.7,0-0.9,0.2-1.2,0.4c-0.4,0.2-0.5,0.9-0.2,1.3c0.9,1.2,1,2.1,1.1,3.5c0,1.6-0.2,2.5-1.1,3.8c-0.1,0.2-0.1,0.7,0,1.2
|
||||
c0.1,0.2,0.2,0.3,0.5,0.4c0.1,0,0.2,0.1,0.3,0.1c0.5,0.2,1.2,0.1,1.5-0.5C21.7,28,22.2,26.5,22.3,24.8L22.3,24.8z"/>
|
||||
</svg>
|
After Width: | Height: | Size: 1.9 KiB |
BIN
interface/resources/images/calibration-help.png
Normal file
BIN
interface/resources/images/calibration-help.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 50 KiB |
BIN
interface/resources/images/static.gif
Normal file
BIN
interface/resources/images/static.gif
Normal file
Binary file not shown.
After Width: | Height: | Size: 899 KiB |
|
@ -18,7 +18,7 @@ Original.CheckBox {
|
|||
id: checkBox
|
||||
|
||||
property int colorScheme: hifi.colorSchemes.light
|
||||
property string color: hifi.colors.lightGray
|
||||
property string color: hifi.colors.lightGrayText
|
||||
readonly property bool isLightColorScheme: colorScheme == hifi.colorSchemes.light
|
||||
property bool isRedCheck: false
|
||||
property int boxSize: 14
|
||||
|
|
64
interface/resources/qml/controls-uit/ImageMessageBox.qml
Normal file
64
interface/resources/qml/controls-uit/ImageMessageBox.qml
Normal file
|
@ -0,0 +1,64 @@
|
|||
//
|
||||
// ImageMessageBox.qml
|
||||
//
|
||||
// Created by Dante Ruiz on 7/5/2017
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import "../styles-uit"
|
||||
|
||||
Item {
|
||||
id: imageBox
|
||||
visible: false
|
||||
anchors.fill: parent
|
||||
property alias source: image.source
|
||||
property alias imageWidth: image.width
|
||||
property alias imageHeight: image.height
|
||||
|
||||
Rectangle {
|
||||
anchors.fill: parent
|
||||
color: "black"
|
||||
opacity: 0.3
|
||||
}
|
||||
|
||||
Image {
|
||||
id: image
|
||||
anchors.centerIn: parent
|
||||
|
||||
HiFiGlyphs {
|
||||
id: closeGlyphButton
|
||||
text: hifi.glyphs.close
|
||||
size: 25
|
||||
|
||||
anchors {
|
||||
top: parent.top
|
||||
topMargin: 15
|
||||
right: parent.right
|
||||
rightMargin: 15
|
||||
}
|
||||
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
hoverEnabled: true
|
||||
|
||||
onEntered: {
|
||||
parent.text = hifi.glyphs.closeInverted;
|
||||
}
|
||||
|
||||
onExited: {
|
||||
parent.text = hifi.glyphs.close;
|
||||
}
|
||||
|
||||
onClicked: {
|
||||
imageBox.visible = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
38
interface/resources/qml/controls-uit/Separator.qml
Normal file
38
interface/resources/qml/controls-uit/Separator.qml
Normal file
|
@ -0,0 +1,38 @@
|
|||
//
|
||||
// Separator.qml
|
||||
//
|
||||
// Created by Zach Fox on 2017-06-06
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import QtQuick 2.5
|
||||
import "../styles-uit"
|
||||
|
||||
Item {
|
||||
// Size
|
||||
height: 2;
|
||||
Rectangle {
|
||||
// Size
|
||||
width: parent.width;
|
||||
height: 1;
|
||||
// Anchors
|
||||
anchors.left: parent.left;
|
||||
anchors.bottom: parent.bottom;
|
||||
anchors.bottomMargin: height;
|
||||
// Style
|
||||
color: hifi.colors.baseGrayShadow;
|
||||
}
|
||||
Rectangle {
|
||||
// Size
|
||||
width: parent.width;
|
||||
height: 1;
|
||||
// Anchors
|
||||
anchors.left: parent.left;
|
||||
anchors.bottom: parent.bottom;
|
||||
// Style
|
||||
color: hifi.colors.baseGrayHighlight;
|
||||
}
|
||||
}
|
|
@ -36,7 +36,7 @@ Slider {
|
|||
|
||||
Rectangle {
|
||||
width: parent.height - 2
|
||||
height: slider.value * (slider.width/(slider.maximumValue - slider.minimumValue)) - 1
|
||||
height: slider.width * (slider.value - slider.minimumValue) / (slider.maximumValue - slider.minimumValue) - 1
|
||||
radius: height / 2
|
||||
anchors {
|
||||
top: parent.top
|
||||
|
|
156
interface/resources/qml/controls-uit/Switch.qml
Normal file
156
interface/resources/qml/controls-uit/Switch.qml
Normal file
|
@ -0,0 +1,156 @@
|
|||
//
|
||||
// Switch.qml
|
||||
//
|
||||
// Created by Zach Fox on 2017-06-06
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4 as Original
|
||||
import QtQuick.Controls.Styles 1.4
|
||||
|
||||
import "../styles-uit"
|
||||
|
||||
Item {
|
||||
id: rootSwitch;
|
||||
|
||||
property int colorScheme: hifi.colorSchemes.light;
|
||||
readonly property bool isLightColorScheme: colorScheme == hifi.colorSchemes.light;
|
||||
property int switchWidth: 70;
|
||||
readonly property int switchRadius: height/2;
|
||||
property string labelTextOff: "";
|
||||
property string labelGlyphOffText: "";
|
||||
property int labelGlyphOffSize: 32;
|
||||
property string labelTextOn: "";
|
||||
property string labelGlyphOnText: "";
|
||||
property int labelGlyphOnSize: 32;
|
||||
property alias checked: originalSwitch.checked;
|
||||
signal onCheckedChanged;
|
||||
signal clicked;
|
||||
|
||||
Original.Switch {
|
||||
id: originalSwitch;
|
||||
activeFocusOnPress: true;
|
||||
anchors.top: rootSwitch.top;
|
||||
anchors.left: rootSwitch.left;
|
||||
anchors.leftMargin: rootSwitch.width/2 - rootSwitch.switchWidth/2;
|
||||
onCheckedChanged: rootSwitch.onCheckedChanged();
|
||||
onClicked: rootSwitch.clicked();
|
||||
|
||||
style: SwitchStyle {
|
||||
|
||||
padding {
|
||||
top: 3;
|
||||
left: 3;
|
||||
right: 3;
|
||||
bottom: 3;
|
||||
}
|
||||
|
||||
groove: Rectangle {
|
||||
color: "#252525";
|
||||
implicitWidth: rootSwitch.switchWidth;
|
||||
implicitHeight: rootSwitch.height;
|
||||
radius: rootSwitch.switchRadius;
|
||||
}
|
||||
|
||||
handle: Rectangle {
|
||||
id: switchHandle;
|
||||
implicitWidth: rootSwitch.height - padding.top - padding.bottom;
|
||||
implicitHeight: implicitWidth;
|
||||
radius: implicitWidth/2;
|
||||
border.color: hifi.colors.lightGrayText;
|
||||
color: hifi.colors.lightGray;
|
||||
|
||||
MouseArea {
|
||||
anchors.fill: parent;
|
||||
hoverEnabled: true;
|
||||
onEntered: parent.color = hifi.colors.blueHighlight;
|
||||
onExited: parent.color = hifi.colors.lightGray;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// OFF Label
|
||||
Item {
|
||||
anchors.right: originalSwitch.left;
|
||||
anchors.rightMargin: 10;
|
||||
anchors.top: rootSwitch.top;
|
||||
height: rootSwitch.height;
|
||||
|
||||
RalewaySemiBold {
|
||||
id: labelOff;
|
||||
text: labelTextOff;
|
||||
size: hifi.fontSizes.inputLabel;
|
||||
color: originalSwitch.checked ? hifi.colors.lightGrayText : "#FFFFFF";
|
||||
anchors.top: parent.top;
|
||||
anchors.right: parent.right;
|
||||
width: paintedWidth;
|
||||
height: parent.height;
|
||||
verticalAlignment: Text.AlignVCenter;
|
||||
}
|
||||
|
||||
HiFiGlyphs {
|
||||
id: labelGlyphOff;
|
||||
text: labelGlyphOffText;
|
||||
size: labelGlyphOffSize;
|
||||
color: labelOff.color;
|
||||
anchors.top: parent.top;
|
||||
anchors.topMargin: 2;
|
||||
anchors.right: labelOff.left;
|
||||
anchors.rightMargin: 4;
|
||||
}
|
||||
|
||||
MouseArea {
|
||||
anchors.top: parent.top;
|
||||
anchors.bottom: parent.bottom;
|
||||
anchors.left: labelGlyphOff.left;
|
||||
anchors.right: labelOff.right;
|
||||
onClicked: {
|
||||
originalSwitch.checked = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ON Label
|
||||
Item {
|
||||
anchors.left: originalSwitch.right;
|
||||
anchors.leftMargin: 10;
|
||||
anchors.top: rootSwitch.top;
|
||||
height: rootSwitch.height;
|
||||
|
||||
RalewaySemiBold {
|
||||
id: labelOn;
|
||||
text: labelTextOn;
|
||||
size: hifi.fontSizes.inputLabel;
|
||||
color: originalSwitch.checked ? "#FFFFFF" : hifi.colors.lightGrayText;
|
||||
anchors.top: parent.top;
|
||||
anchors.left: parent.left;
|
||||
width: paintedWidth;
|
||||
height: parent.height;
|
||||
verticalAlignment: Text.AlignVCenter;
|
||||
}
|
||||
|
||||
HiFiGlyphs {
|
||||
id: labelGlyphOn;
|
||||
text: labelGlyphOnText;
|
||||
size: labelGlyphOnSize;
|
||||
color: labelOn.color;
|
||||
anchors.top: parent.top;
|
||||
anchors.left: labelOn.right;
|
||||
}
|
||||
|
||||
MouseArea {
|
||||
anchors.top: parent.top;
|
||||
anchors.bottom: parent.bottom;
|
||||
anchors.left: labelOn.left;
|
||||
anchors.right: labelGlyphOn.right;
|
||||
onClicked: {
|
||||
originalSwitch.checked = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -72,6 +72,7 @@ Preference {
|
|||
property var avatarBuilder: Component { AvatarPreference { } }
|
||||
property var buttonBuilder: Component { ButtonPreference { } }
|
||||
property var comboBoxBuilder: Component { ComboBoxPreference { } }
|
||||
property var spinnerSliderBuilder: Component { SpinnerSliderPreference { } }
|
||||
property var preferences: []
|
||||
property int checkBoxCount: 0
|
||||
|
||||
|
@ -86,7 +87,7 @@ Preference {
|
|||
}
|
||||
|
||||
function buildPreference(preference) {
|
||||
console.log("\tPreference type " + preference.type + " name " + preference.name)
|
||||
console.log("\tPreference type " + preference.type + " name " + preference.name);
|
||||
var builder;
|
||||
switch (preference.type) {
|
||||
case Preference.Editable:
|
||||
|
@ -128,6 +129,11 @@ Preference {
|
|||
checkBoxCount = 0;
|
||||
builder = comboBoxBuilder;
|
||||
break;
|
||||
|
||||
case Preference.SpinnerSlider:
|
||||
checkBoxCount = 0;
|
||||
builder = spinnerSliderBuilder;
|
||||
break;
|
||||
};
|
||||
|
||||
if (builder) {
|
||||
|
|
|
@ -0,0 +1,111 @@
|
|||
//
|
||||
// SpinnerSliderPreference.qml
|
||||
//
|
||||
// Created by Cain Kilgore on 11th July 2017
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import QtQuick 2.5
|
||||
|
||||
import "../../dialogs"
|
||||
import "../../controls-uit"
|
||||
|
||||
Preference {
|
||||
id: root
|
||||
property alias slider: slider
|
||||
property alias spinner: spinner
|
||||
height: control.height + hifi.dimensions.controlInterlineHeight
|
||||
|
||||
Component.onCompleted: {
|
||||
slider.value = preference.value;
|
||||
spinner.value = preference.value;
|
||||
}
|
||||
|
||||
function save() {
|
||||
preference.value = slider.value;
|
||||
preference.save();
|
||||
}
|
||||
|
||||
Item {
|
||||
id: control
|
||||
anchors {
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: parent.bottom
|
||||
}
|
||||
height: Math.max(labelText.height, slider.height, spinner.height, button.height)
|
||||
|
||||
Label {
|
||||
id: labelText
|
||||
text: root.label + ":"
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
anchors {
|
||||
left: parent.left
|
||||
right: slider.left
|
||||
rightMargin: hifi.dimensions.labelPadding
|
||||
verticalCenter: parent.verticalCenter
|
||||
}
|
||||
horizontalAlignment: Text.AlignRight
|
||||
wrapMode: Text.Wrap
|
||||
}
|
||||
|
||||
Slider {
|
||||
id: slider
|
||||
value: preference.value
|
||||
width: 100
|
||||
minimumValue: MyAvatar.getDomainMinScale()
|
||||
maximumValue: MyAvatar.getDomainMaxScale()
|
||||
stepSize: preference.step
|
||||
onValueChanged: {
|
||||
spinner.value = value
|
||||
}
|
||||
anchors {
|
||||
right: spinner.left
|
||||
rightMargin: 10
|
||||
verticalCenter: parent.verticalCenter
|
||||
}
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
}
|
||||
|
||||
SpinBox {
|
||||
id: spinner
|
||||
decimals: preference.decimals
|
||||
value: preference.value
|
||||
minimumValue: MyAvatar.getDomainMinScale()
|
||||
maximumValue: MyAvatar.getDomainMaxScale()
|
||||
width: 100
|
||||
onValueChanged: {
|
||||
slider.value = value;
|
||||
}
|
||||
anchors {
|
||||
right: button.left
|
||||
rightMargin: 10
|
||||
verticalCenter: parent.verticalCenter
|
||||
}
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
}
|
||||
|
||||
GlyphButton {
|
||||
id: button
|
||||
onClicked: {
|
||||
if (spinner.maximumValue >= 1) {
|
||||
spinner.value = 1
|
||||
slider.value = 1
|
||||
} else {
|
||||
spinner.value = spinner.maximumValue
|
||||
slider.value = spinner.maximumValue
|
||||
}
|
||||
}
|
||||
width: 30
|
||||
glyph: hifi.glyphs.reload
|
||||
anchors {
|
||||
right: parent.right
|
||||
verticalCenter: parent.verticalCenter
|
||||
}
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
}
|
||||
}
|
||||
}
|
|
@ -32,14 +32,15 @@ Item {
|
|||
radius: popupRadius
|
||||
}
|
||||
Rectangle {
|
||||
width: Math.max(parent.width * 0.75, 400)
|
||||
id: textContainer;
|
||||
width: Math.max(parent.width * 0.8, 400)
|
||||
height: contentContainer.height + 50
|
||||
anchors.centerIn: parent
|
||||
radius: popupRadius
|
||||
color: "white"
|
||||
Item {
|
||||
id: contentContainer
|
||||
width: parent.width - 60
|
||||
width: parent.width - 50
|
||||
height: childrenRect.height
|
||||
anchors.centerIn: parent
|
||||
Item {
|
||||
|
@ -92,7 +93,7 @@ Item {
|
|||
anchors.top: parent.top
|
||||
anchors.topMargin: -20
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: -25
|
||||
anchors.rightMargin: -20
|
||||
MouseArea {
|
||||
anchors.fill: closeGlyphButton
|
||||
hoverEnabled: true
|
||||
|
@ -127,11 +128,51 @@ Item {
|
|||
color: hifi.colors.darkGray
|
||||
wrapMode: Text.WordWrap
|
||||
textFormat: Text.StyledText
|
||||
onLinkActivated: {
|
||||
Qt.openUrlExternally(link)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
// Left gray MouseArea
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
anchors.left: parent.left;
|
||||
anchors.right: textContainer.left;
|
||||
anchors.top: textContainer.top;
|
||||
anchors.bottom: textContainer.bottom;
|
||||
acceptedButtons: Qt.LeftButton
|
||||
onClicked: {
|
||||
letterbox.visible = false
|
||||
}
|
||||
}
|
||||
// Right gray MouseArea
|
||||
MouseArea {
|
||||
anchors.left: textContainer.left;
|
||||
anchors.right: parent.left;
|
||||
anchors.top: textContainer.top;
|
||||
anchors.bottom: textContainer.bottom;
|
||||
acceptedButtons: Qt.LeftButton
|
||||
onClicked: {
|
||||
letterbox.visible = false
|
||||
}
|
||||
}
|
||||
// Top gray MouseArea
|
||||
MouseArea {
|
||||
anchors.left: parent.left;
|
||||
anchors.right: parent.right;
|
||||
anchors.top: parent.top;
|
||||
anchors.bottom: textContainer.top;
|
||||
acceptedButtons: Qt.LeftButton
|
||||
onClicked: {
|
||||
letterbox.visible = false
|
||||
}
|
||||
}
|
||||
// Bottom gray MouseArea
|
||||
MouseArea {
|
||||
anchors.left: parent.left;
|
||||
anchors.right: parent.right;
|
||||
anchors.top: textContainer.bottom;
|
||||
anchors.bottom: parent.bottom;
|
||||
acceptedButtons: Qt.LeftButton
|
||||
onClicked: {
|
||||
letterbox.visible = false
|
||||
|
|
|
@ -1101,9 +1101,9 @@ Rectangle {
|
|||
case 'nearbyUsers':
|
||||
var data = message.params;
|
||||
var index = -1;
|
||||
iAmAdmin = Users.canKick;
|
||||
index = findNearbySessionIndex('', data);
|
||||
if (index !== -1) {
|
||||
iAmAdmin = Users.canKick;
|
||||
myData = data[index];
|
||||
data.splice(index, 1);
|
||||
} else {
|
||||
|
|
374
interface/resources/qml/hifi/SpectatorCamera.qml
Normal file
374
interface/resources/qml/hifi/SpectatorCamera.qml
Normal file
|
@ -0,0 +1,374 @@
|
|||
//
|
||||
// SpectatorCamera.qml
|
||||
// qml/hifi
|
||||
//
|
||||
// Spectator Camera
|
||||
//
|
||||
// Created by Zach Fox on 2017-06-05
|
||||
// Copyright 2016 High Fidelity, Inc.
|
||||
//
|
||||
// Distributed under the Apache License, Version 2.0.
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
import Hifi 1.0 as Hifi
|
||||
import QtQuick 2.5
|
||||
import QtQuick.Controls 1.4
|
||||
import "../styles-uit"
|
||||
import "../controls-uit" as HifiControlsUit
|
||||
import "../controls" as HifiControls
|
||||
|
||||
// references HMD, XXX from root context
|
||||
|
||||
Rectangle {
|
||||
HifiConstants { id: hifi; }
|
||||
|
||||
id: spectatorCamera;
|
||||
// Style
|
||||
color: hifi.colors.baseGray;
|
||||
|
||||
// The letterbox used for popup messages
|
||||
LetterboxMessage {
|
||||
id: letterboxMessage;
|
||||
z: 999; // Force the popup on top of everything else
|
||||
}
|
||||
function letterbox(headerGlyph, headerText, message) {
|
||||
letterboxMessage.headerGlyph = headerGlyph;
|
||||
letterboxMessage.headerText = headerText;
|
||||
letterboxMessage.text = message;
|
||||
letterboxMessage.visible = true;
|
||||
letterboxMessage.popupRadius = 0;
|
||||
}
|
||||
|
||||
//
|
||||
// TITLE BAR START
|
||||
//
|
||||
Item {
|
||||
id: titleBarContainer;
|
||||
// Size
|
||||
width: spectatorCamera.width;
|
||||
height: 50;
|
||||
// Anchors
|
||||
anchors.left: parent.left;
|
||||
anchors.top: parent.top;
|
||||
|
||||
// "Spectator" text
|
||||
RalewaySemiBold {
|
||||
id: titleBarText;
|
||||
text: "Spectator";
|
||||
// Text size
|
||||
size: hifi.fontSizes.overlayTitle;
|
||||
// Anchors
|
||||
anchors.fill: parent;
|
||||
anchors.leftMargin: 16;
|
||||
// Style
|
||||
color: hifi.colors.lightGrayText;
|
||||
// Alignment
|
||||
horizontalAlignment: Text.AlignHLeft;
|
||||
verticalAlignment: Text.AlignVCenter;
|
||||
}
|
||||
|
||||
// Separator
|
||||
HifiControlsUit.Separator {
|
||||
anchors.left: parent.left;
|
||||
anchors.right: parent.right;
|
||||
anchors.bottom: parent.bottom;
|
||||
}
|
||||
}
|
||||
//
|
||||
// TITLE BAR END
|
||||
//
|
||||
|
||||
//
|
||||
// SPECTATOR APP DESCRIPTION START
|
||||
//
|
||||
Item {
|
||||
id: spectatorDescriptionContainer;
|
||||
// Size
|
||||
width: spectatorCamera.width;
|
||||
height: childrenRect.height;
|
||||
// Anchors
|
||||
anchors.left: parent.left;
|
||||
anchors.top: titleBarContainer.bottom;
|
||||
|
||||
// (i) Glyph
|
||||
HiFiGlyphs {
|
||||
id: spectatorDescriptionGlyph;
|
||||
text: hifi.glyphs.info;
|
||||
// Size
|
||||
width: 20;
|
||||
height: parent.height;
|
||||
size: 60;
|
||||
// Anchors
|
||||
anchors.left: parent.left;
|
||||
anchors.leftMargin: 20;
|
||||
anchors.top: parent.top;
|
||||
anchors.topMargin: 0;
|
||||
// Style
|
||||
color: hifi.colors.lightGrayText;
|
||||
horizontalAlignment: Text.AlignHLeft;
|
||||
verticalAlignment: Text.AlignTop;
|
||||
}
|
||||
|
||||
// "Spectator" app description text
|
||||
RalewayLight {
|
||||
id: spectatorDescriptionText;
|
||||
text: "Spectator lets you change what your monitor displays while you're using a VR headset. Use Spectator when streaming and recording video.";
|
||||
// Text size
|
||||
size: 14;
|
||||
// Size
|
||||
width: 350;
|
||||
height: paintedHeight;
|
||||
// Anchors
|
||||
anchors.top: parent.top;
|
||||
anchors.topMargin: 15;
|
||||
anchors.left: spectatorDescriptionGlyph.right;
|
||||
anchors.leftMargin: 40;
|
||||
// Style
|
||||
color: hifi.colors.lightGrayText;
|
||||
wrapMode: Text.WordWrap;
|
||||
// Alignment
|
||||
horizontalAlignment: Text.AlignHLeft;
|
||||
verticalAlignment: Text.AlignVCenter;
|
||||
}
|
||||
|
||||
// "Learn More" text
|
||||
RalewayRegular {
|
||||
id: spectatorLearnMoreText;
|
||||
text: "Learn More About Spectator";
|
||||
// Text size
|
||||
size: 14;
|
||||
// Size
|
||||
width: paintedWidth;
|
||||
height: paintedHeight;
|
||||
// Anchors
|
||||
anchors.top: spectatorDescriptionText.bottom;
|
||||
anchors.topMargin: 10;
|
||||
anchors.left: spectatorDescriptionText.anchors.left;
|
||||
anchors.leftMargin: spectatorDescriptionText.anchors.leftMargin;
|
||||
// Style
|
||||
color: hifi.colors.blueAccent;
|
||||
wrapMode: Text.WordWrap;
|
||||
font.underline: true;
|
||||
// Alignment
|
||||
horizontalAlignment: Text.AlignHLeft;
|
||||
verticalAlignment: Text.AlignVCenter;
|
||||
|
||||
MouseArea {
|
||||
anchors.fill: parent;
|
||||
hoverEnabled: enabled;
|
||||
onClicked: {
|
||||
letterbox(hifi.glyphs.question,
|
||||
"Spectator Camera",
|
||||
"By default, your monitor shows a preview of what you're seeing in VR. " +
|
||||
"Using the Spectator Camera app, your monitor can display the view " +
|
||||
"from a virtual hand-held camera - perfect for taking selfies or filming " +
|
||||
"your friends!<br>" +
|
||||
"<h3>Streaming and Recording</h3>" +
|
||||
"We recommend OBS for streaming and recording the contents of your monitor to services like " +
|
||||
"Twitch, YouTube Live, and Facebook Live.<br><br>" +
|
||||
"To get started using OBS, click this link now. The page will open in an external browser:<br>" +
|
||||
'<font size="4"><a href="https://obsproject.com/forum/threads/official-overview-guide.402/">OBS Official Overview Guide</a></font>');
|
||||
}
|
||||
onEntered: parent.color = hifi.colors.blueHighlight;
|
||||
onExited: parent.color = hifi.colors.blueAccent;
|
||||
}
|
||||
}
|
||||
|
||||
// Separator
|
||||
HifiControlsUit.Separator {
|
||||
anchors.left: parent.left;
|
||||
anchors.right: parent.right;
|
||||
anchors.top: spectatorLearnMoreText.bottom;
|
||||
anchors.topMargin: spectatorDescriptionText.anchors.topMargin;
|
||||
}
|
||||
}
|
||||
//
|
||||
// SPECTATOR APP DESCRIPTION END
|
||||
//
|
||||
|
||||
|
||||
//
|
||||
// SPECTATOR CONTROLS START
|
||||
//
|
||||
Item {
|
||||
id: spectatorControlsContainer;
|
||||
// Size
|
||||
height: spectatorCamera.height - spectatorDescriptionContainer.height - titleBarContainer.height;
|
||||
// Anchors
|
||||
anchors.top: spectatorDescriptionContainer.bottom;
|
||||
anchors.topMargin: 20;
|
||||
anchors.left: parent.left;
|
||||
anchors.leftMargin: 25;
|
||||
anchors.right: parent.right;
|
||||
anchors.rightMargin: anchors.leftMargin;
|
||||
|
||||
// "Camera On" Checkbox
|
||||
HifiControlsUit.CheckBox {
|
||||
id: cameraToggleCheckBox;
|
||||
colorScheme: hifi.colorSchemes.dark;
|
||||
anchors.left: parent.left;
|
||||
anchors.top: parent.top;
|
||||
text: "Spectator Camera On";
|
||||
boxSize: 24;
|
||||
onClicked: {
|
||||
sendToScript({method: (checked ? 'spectatorCameraOn' : 'spectatorCameraOff')});
|
||||
spectatorCameraPreview.ready = checked;
|
||||
}
|
||||
}
|
||||
|
||||
// Instructions or Preview
|
||||
Rectangle {
|
||||
id: spectatorCameraImageContainer;
|
||||
anchors.left: parent.left;
|
||||
anchors.top: cameraToggleCheckBox.bottom;
|
||||
anchors.topMargin: 20;
|
||||
anchors.right: parent.right;
|
||||
height: 250;
|
||||
color: cameraToggleCheckBox.checked ? "transparent" : "black";
|
||||
|
||||
AnimatedImage {
|
||||
source: "../../images/static.gif"
|
||||
visible: !cameraToggleCheckBox.checked;
|
||||
anchors.fill: parent;
|
||||
opacity: 0.15;
|
||||
}
|
||||
|
||||
// Instructions (visible when display texture isn't set)
|
||||
FiraSansRegular {
|
||||
id: spectatorCameraInstructions;
|
||||
text: "Turn on Spectator Camera for a preview\nof what your monitor shows.";
|
||||
size: 16;
|
||||
color: hifi.colors.lightGrayText;
|
||||
visible: !cameraToggleCheckBox.checked;
|
||||
anchors.fill: parent;
|
||||
horizontalAlignment: Text.AlignHCenter;
|
||||
verticalAlignment: Text.AlignVCenter;
|
||||
}
|
||||
|
||||
// Spectator Camera Preview
|
||||
Hifi.ResourceImageItem {
|
||||
id: spectatorCameraPreview;
|
||||
visible: cameraToggleCheckbox.checked;
|
||||
url: monitorShowsSwitch.checked ? "resource://spectatorCameraFrame" : "resource://hmdPreviewFrame";
|
||||
ready: cameraToggleCheckBox.checked;
|
||||
mirrorVertically: true;
|
||||
anchors.fill: parent;
|
||||
onVisibleChanged: {
|
||||
ready = cameraToggleCheckBox.checked;
|
||||
update();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// "Monitor Shows" Switch Label Glyph
|
||||
HiFiGlyphs {
|
||||
id: monitorShowsSwitchLabelGlyph;
|
||||
text: hifi.glyphs.screen;
|
||||
size: 32;
|
||||
color: hifi.colors.blueHighlight;
|
||||
anchors.top: spectatorCameraImageContainer.bottom;
|
||||
anchors.topMargin: 13;
|
||||
anchors.left: parent.left;
|
||||
}
|
||||
// "Monitor Shows" Switch Label
|
||||
RalewayLight {
|
||||
id: monitorShowsSwitchLabel;
|
||||
text: "MONITOR SHOWS:";
|
||||
anchors.top: spectatorCameraImageContainer.bottom;
|
||||
anchors.topMargin: 20;
|
||||
anchors.left: monitorShowsSwitchLabelGlyph.right;
|
||||
anchors.leftMargin: 6;
|
||||
size: 16;
|
||||
width: paintedWidth;
|
||||
height: paintedHeight;
|
||||
color: hifi.colors.lightGrayText;
|
||||
verticalAlignment: Text.AlignVCenter;
|
||||
}
|
||||
// "Monitor Shows" Switch
|
||||
HifiControlsUit.Switch {
|
||||
id: monitorShowsSwitch;
|
||||
height: 30;
|
||||
anchors.left: parent.left;
|
||||
anchors.right: parent.right;
|
||||
anchors.top: monitorShowsSwitchLabel.bottom;
|
||||
anchors.topMargin: 10;
|
||||
labelTextOff: "HMD Preview";
|
||||
labelTextOn: "Camera View";
|
||||
labelGlyphOnText: hifi.glyphs.alert;
|
||||
onCheckedChanged: {
|
||||
sendToScript({method: 'setMonitorShowsCameraView', params: checked});
|
||||
}
|
||||
}
|
||||
|
||||
// "Switch View From Controller" Checkbox
|
||||
HifiControlsUit.CheckBox {
|
||||
id: switchViewFromControllerCheckBox;
|
||||
colorScheme: hifi.colorSchemes.dark;
|
||||
anchors.left: parent.left;
|
||||
anchors.top: monitorShowsSwitch.bottom;
|
||||
anchors.topMargin: 25;
|
||||
text: "";
|
||||
boxSize: 24;
|
||||
onClicked: {
|
||||
sendToScript({method: 'changeSwitchViewFromControllerPreference', params: checked});
|
||||
}
|
||||
}
|
||||
}
|
||||
//
|
||||
// SPECTATOR CONTROLS END
|
||||
//
|
||||
|
||||
//
|
||||
// FUNCTION DEFINITIONS START
|
||||
//
|
||||
//
|
||||
// Function Name: fromScript()
|
||||
//
|
||||
// Relevant Variables:
|
||||
// None
|
||||
//
|
||||
// Arguments:
|
||||
// message: The message sent from the SpectatorCamera JavaScript.
|
||||
// Messages are in format "{method, params}", like json-rpc.
|
||||
//
|
||||
// Description:
|
||||
// Called when a message is received from spectatorCamera.js.
|
||||
//
|
||||
function fromScript(message) {
|
||||
switch (message.method) {
|
||||
case 'updateSpectatorCameraCheckbox':
|
||||
cameraToggleCheckBox.checked = message.params;
|
||||
break;
|
||||
case 'updateMonitorShowsSwitch':
|
||||
monitorShowsSwitch.checked = message.params;
|
||||
break;
|
||||
case 'updateControllerMappingCheckbox':
|
||||
switchViewFromControllerCheckBox.checked = message.setting;
|
||||
switchViewFromControllerCheckBox.enabled = true;
|
||||
if (message.controller === "OculusTouch") {
|
||||
switchViewFromControllerCheckBox.text = "Clicking Touch's Left Thumbstick Switches Monitor View";
|
||||
} else if (message.controller === "Vive") {
|
||||
switchViewFromControllerCheckBox.text = "Clicking Left Thumb Pad Switches Monitor View";
|
||||
} else {
|
||||
switchViewFromControllerCheckBox.text = "Pressing Ctrl+0 Switches Monitor View";
|
||||
switchViewFromControllerCheckBox.checked = true;
|
||||
switchViewFromControllerCheckBox.enabled = false;
|
||||
}
|
||||
break;
|
||||
case 'showPreviewTextureNotInstructions':
|
||||
console.log('showPreviewTextureNotInstructions recvd', JSON.stringify(message));
|
||||
spectatorCameraPreview.url = message.url;
|
||||
spectatorCameraPreview.visible = message.setting;
|
||||
break;
|
||||
default:
|
||||
console.log('Unrecognized message from spectatorCamera.js:', JSON.stringify(message));
|
||||
}
|
||||
}
|
||||
signal sendToScript(var message);
|
||||
|
||||
//
|
||||
// FUNCTION DEFINITIONS END
|
||||
//
|
||||
}
|
|
@ -117,26 +117,28 @@ Rectangle {
|
|||
delegate: Item {
|
||||
width: parent.width;
|
||||
height: 36;
|
||||
|
||||
AudioControls.CheckBox {
|
||||
id: checkbox
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
anchors.left: parent.left
|
||||
text: display;
|
||||
wrap: false;
|
||||
checked: selected;
|
||||
enabled: false;
|
||||
}
|
||||
|
||||
RowLayout {
|
||||
width: parent.width;
|
||||
MouseArea {
|
||||
anchors.fill: checkbox
|
||||
onClicked: Audio.setInputDevice(info);
|
||||
}
|
||||
|
||||
AudioControls.CheckBox {
|
||||
Layout.maximumWidth: parent.width - level.width - 40;
|
||||
text: display;
|
||||
wrap: false;
|
||||
checked: selected;
|
||||
onClicked: {
|
||||
selected = checked;
|
||||
checked = Qt.binding(function() { return selected; }); // restore binding
|
||||
}
|
||||
}
|
||||
InputLevel {
|
||||
id: level;
|
||||
Layout.alignment: Qt.AlignRight;
|
||||
Layout.rightMargin: 30;
|
||||
visible: selected;
|
||||
}
|
||||
InputLevel {
|
||||
id: level;
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
anchors.right: parent.right
|
||||
anchors.rightMargin: 30
|
||||
visible: selected;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -166,7 +168,7 @@ Rectangle {
|
|||
|
||||
ListView {
|
||||
anchors { left: parent.left; right: parent.right; leftMargin: 70 }
|
||||
height: 125;
|
||||
height: Math.min(250, contentHeight);
|
||||
spacing: 0;
|
||||
snapMode: ListView.SnapToItem;
|
||||
clip: true;
|
||||
|
@ -174,13 +176,19 @@ Rectangle {
|
|||
delegate: Item {
|
||||
width: parent.width;
|
||||
height: 36;
|
||||
|
||||
AudioControls.CheckBox {
|
||||
id: checkbox
|
||||
anchors.verticalCenter: parent.verticalCenter
|
||||
anchors.left: parent.left
|
||||
text: display;
|
||||
checked: selected;
|
||||
onClicked: {
|
||||
selected = checked;
|
||||
checked = Qt.binding(function() { return selected; }); // restore binding
|
||||
}
|
||||
enabled: false;
|
||||
}
|
||||
|
||||
MouseArea {
|
||||
anchors.fill: checkbox
|
||||
onClicked: Audio.setOutputDevice(info);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -506,7 +506,7 @@ Rectangle {
|
|||
}
|
||||
HifiControls.Tree {
|
||||
id: treeView
|
||||
height: 430
|
||||
height: 290
|
||||
anchors.leftMargin: hifi.dimensions.contentMargin.x + 2 // Extra for border
|
||||
anchors.rightMargin: hifi.dimensions.contentMargin.x + 2 // Extra for border
|
||||
anchors.left: parent.left
|
||||
|
|
|
@ -65,7 +65,7 @@ Rectangle {
|
|||
|
||||
HiFiGlyphs {
|
||||
id: image
|
||||
text: hifi.glyphs.avatar1
|
||||
text: hifi.glyphs.avatarTPose
|
||||
size: 190
|
||||
color: hifi.colors.white
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@ import "../../controls-uit" as HifiControls
|
|||
StackView {
|
||||
id: stack
|
||||
initialItem: inputConfiguration
|
||||
property alias messageVisible: imageMessageBox.visible
|
||||
Rectangle {
|
||||
id: inputConfiguration
|
||||
anchors.fill: parent
|
||||
|
@ -26,6 +27,15 @@ StackView {
|
|||
|
||||
property var pluginSettings: null
|
||||
|
||||
HifiControls.ImageMessageBox {
|
||||
id: imageMessageBox
|
||||
anchors.fill: parent
|
||||
z: 2000
|
||||
imageWidth: 442
|
||||
imageHeight: 670
|
||||
source: "../../../images/calibration-help.png"
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
width: inputConfiguration.width
|
||||
height: 1
|
||||
|
@ -167,7 +177,7 @@ StackView {
|
|||
loader.item.pluginName = box.currentText;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
if (loader.item.hasOwnProperty("displayInformation")) {
|
||||
loader.item.displayConfiguration();
|
||||
}
|
||||
|
@ -183,20 +193,20 @@ StackView {
|
|||
return InputConfiguration.activeInputPlugins();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
function initialize() {
|
||||
changeSource();
|
||||
}
|
||||
|
||||
|
||||
function changeSource() {
|
||||
loader.source = "";
|
||||
var source = "";
|
||||
if (box.currentText == "Vive") {
|
||||
source = InputConfiguration.configurationLayout("OpenVR");
|
||||
} else {
|
||||
} else {
|
||||
source = InputConfiguration.configurationLayout(box.currentText);
|
||||
}
|
||||
|
||||
|
||||
loader.source = source;
|
||||
if (source === "") {
|
||||
box.label = "(not configurable)";
|
||||
|
@ -204,14 +214,14 @@ StackView {
|
|||
box.label = "";
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
Timer {
|
||||
id: timer
|
||||
repeat: false
|
||||
interval: 300
|
||||
onTriggered: initialize()
|
||||
}
|
||||
|
||||
|
||||
Component.onCompleted: {
|
||||
timer.start();
|
||||
}
|
||||
|
|
|
@ -21,7 +21,7 @@ TabView {
|
|||
enabled: true
|
||||
property string originalUrl: ""
|
||||
|
||||
Rectangle {
|
||||
Rectangle {
|
||||
color: "#404040"
|
||||
|
||||
Text {
|
||||
|
@ -180,7 +180,7 @@ TabView {
|
|||
|
||||
WebView {
|
||||
id: entityListToolWebView
|
||||
url: "../../../../../scripts/system/html/entityList.html"
|
||||
url: Paths.defaultScripts + "/system/html/entityList.html"
|
||||
anchors.fill: parent
|
||||
enabled: true
|
||||
}
|
||||
|
@ -194,7 +194,7 @@ TabView {
|
|||
|
||||
WebView {
|
||||
id: entityPropertiesWebView
|
||||
url: "../../../../../scripts/system/html/entityProperties.html"
|
||||
url: Paths.defaultScripts + "/system/html/entityProperties.html"
|
||||
anchors.fill: parent
|
||||
enabled: true
|
||||
}
|
||||
|
@ -208,7 +208,7 @@ TabView {
|
|||
|
||||
WebView {
|
||||
id: gridControlsWebView
|
||||
url: "../../../../../scripts/system/html/gridControls.html"
|
||||
url: Paths.defaultScripts + "/system/html/gridControls.html"
|
||||
anchors.fill: parent
|
||||
enabled: true
|
||||
}
|
||||
|
@ -222,7 +222,7 @@ TabView {
|
|||
|
||||
WebView {
|
||||
id: particleExplorerWebView
|
||||
url: "../../../../../scripts/system/particle_explorer/particleExplorer.html"
|
||||
url: Paths.defaultScripts + "/system/particle_explorer/particleExplorer.html"
|
||||
anchors.fill: parent
|
||||
enabled: true
|
||||
}
|
||||
|
@ -293,16 +293,16 @@ TabView {
|
|||
break;
|
||||
case 'list':
|
||||
editTabView.currentIndex = 1;
|
||||
break;
|
||||
break;
|
||||
case 'properties':
|
||||
editTabView.currentIndex = 2;
|
||||
break;
|
||||
break;
|
||||
case 'grid':
|
||||
editTabView.currentIndex = 3;
|
||||
break;
|
||||
break;
|
||||
case 'particle':
|
||||
editTabView.currentIndex = 4;
|
||||
break;
|
||||
break;
|
||||
default:
|
||||
console.warn('Attempt to switch to invalid tab:', id);
|
||||
}
|
||||
|
@ -310,4 +310,4 @@ TabView {
|
|||
console.warn('Attempt to switch tabs with invalid input:', JSON.stringify(id));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -65,7 +65,8 @@ Rectangle {
|
|||
onClicked: {
|
||||
newModelDialog.keyboardEnabled = HMD.active
|
||||
parent.focus = true;
|
||||
parent.forceActiveFocus()
|
||||
parent.forceActiveFocus();
|
||||
modelURL.cursorPosition = modelURL.positionAt(mouseX, mouseY, TextInput.CursorBetweenCharaters);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -50,9 +50,12 @@ Rectangle {
|
|||
readonly property int apply: 1
|
||||
readonly property int applyAndCalibrate: 2
|
||||
readonly property int calibrate: 3
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
MouseArea {
|
||||
id: mouseArea
|
||||
|
@ -64,6 +67,7 @@ Rectangle {
|
|||
mouse.accepted = false;
|
||||
}
|
||||
}
|
||||
|
||||
color: hifi.colors.baseGray
|
||||
|
||||
RalewayBold {
|
||||
|
@ -146,6 +150,7 @@ Rectangle {
|
|||
label: "Y: offset"
|
||||
minimumValue: -10
|
||||
stepSize: 0.0254
|
||||
value: -0.05
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
|
||||
onEditingFinished: {
|
||||
|
@ -161,15 +166,16 @@ Rectangle {
|
|||
minimumValue: -10
|
||||
stepSize: 0.0254
|
||||
decimals: 4
|
||||
value: -0.05
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
|
||||
|
||||
onEditingFinished: {
|
||||
sendConfigurationSettings();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
RalewayBold {
|
||||
id: hands
|
||||
|
||||
|
@ -245,7 +251,7 @@ Rectangle {
|
|||
anchors.left: openVrConfiguration.left
|
||||
anchors.leftMargin: leftMargin + 10
|
||||
spacing: 10
|
||||
|
||||
|
||||
HifiControls.SpinBox {
|
||||
id: handYOffset
|
||||
decimals: 4
|
||||
|
@ -269,7 +275,7 @@ Rectangle {
|
|||
stepSize: 0.0254
|
||||
decimals: 4
|
||||
colorScheme: hifi.colorSchemes.dark
|
||||
|
||||
|
||||
onEditingFinished: {
|
||||
sendConfigurationSettings();
|
||||
}
|
||||
|
@ -290,6 +296,52 @@ Rectangle {
|
|||
anchors.leftMargin: leftMargin
|
||||
}
|
||||
|
||||
RalewayRegular {
|
||||
id: info
|
||||
|
||||
text: "See Recommended Tracker Placement"
|
||||
color: hifi.colors.blueHighlight
|
||||
size: 10
|
||||
anchors {
|
||||
left: additional.right
|
||||
leftMargin: 10
|
||||
verticalCenter: additional.verticalCenter
|
||||
}
|
||||
|
||||
Rectangle {
|
||||
id: selected
|
||||
color: hifi.colors.blueHighlight
|
||||
|
||||
width: info.width
|
||||
height: 1
|
||||
|
||||
anchors {
|
||||
top: info.bottom
|
||||
topMargin: 1
|
||||
left: info.left
|
||||
right: info.right
|
||||
}
|
||||
|
||||
visible: false
|
||||
}
|
||||
|
||||
MouseArea {
|
||||
anchors.fill: parent;
|
||||
hoverEnabled: true
|
||||
|
||||
onEntered: {
|
||||
selected.visible = true;
|
||||
}
|
||||
|
||||
onExited: {
|
||||
selected.visible = false;
|
||||
}
|
||||
onClicked: {
|
||||
stack.messageVisible = true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Row {
|
||||
id: feetConfig
|
||||
anchors.top: additional.bottom
|
||||
|
@ -379,6 +431,7 @@ Rectangle {
|
|||
if (checked) {
|
||||
hipBox.checked = true;
|
||||
feetBox.checked = true;
|
||||
shoulderBox.checked = false;
|
||||
}
|
||||
sendConfigurationSettings();
|
||||
}
|
||||
|
@ -416,6 +469,7 @@ Rectangle {
|
|||
if (checked) {
|
||||
hipBox.checked = true;
|
||||
feetBox.checked = true;
|
||||
chestBox.checked = false;
|
||||
}
|
||||
sendConfigurationSettings();
|
||||
}
|
||||
|
@ -463,7 +517,7 @@ Rectangle {
|
|||
anchors.leftMargin: leftMargin
|
||||
|
||||
radius: hifi.buttons.radius
|
||||
|
||||
|
||||
gradient: Gradient {
|
||||
GradientStop {
|
||||
position: 0.2
|
||||
|
@ -479,7 +533,7 @@ Rectangle {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
GradientStop {
|
||||
position: 1.0
|
||||
color: {
|
||||
|
@ -495,10 +549,10 @@ Rectangle {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
HiFiGlyphs {
|
||||
id: glyphButton
|
||||
color: enabled ? hifi.buttons.textColor[calibrationButton.color]
|
||||
|
@ -512,7 +566,7 @@ Rectangle {
|
|||
bottomMargin: 1
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
RalewayBold {
|
||||
id: calibrationText
|
||||
font.capitalization: Font.AllUppercase
|
||||
|
@ -527,7 +581,7 @@ Rectangle {
|
|||
topMargin: 7
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
MouseArea {
|
||||
anchors.fill: parent
|
||||
|
@ -549,19 +603,19 @@ Rectangle {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
onPressed: {
|
||||
calibrationButton.pressed = true;
|
||||
}
|
||||
|
||||
|
||||
onReleased: {
|
||||
calibrationButton.pressed = false;
|
||||
}
|
||||
|
||||
|
||||
onEntered: {
|
||||
calibrationButton.hovered = true;
|
||||
}
|
||||
|
||||
|
||||
onExited: {
|
||||
calibrationButton.hovered = false;
|
||||
}
|
||||
|
@ -652,7 +706,7 @@ Rectangle {
|
|||
RalewayBold {
|
||||
id: advanceSettings
|
||||
|
||||
text: "Advance Settings"
|
||||
text: "Advanced Settings"
|
||||
size: 12
|
||||
|
||||
color: hifi.colors.white
|
||||
|
@ -683,7 +737,7 @@ Rectangle {
|
|||
RalewayBold {
|
||||
id: viveDesktopText
|
||||
size: 10
|
||||
text: "Use vive devices in desktop mode"
|
||||
text: "Use Vive devices in desktop mode"
|
||||
color: hifi.colors.white
|
||||
|
||||
anchors {
|
||||
|
@ -718,14 +772,14 @@ Rectangle {
|
|||
calibratingScreen = screen.createObject();
|
||||
stack.push(calibratingScreen);
|
||||
}
|
||||
|
||||
|
||||
if (status["calibrated"]) {
|
||||
calibrationScreen.success();
|
||||
|
||||
if (status["UI"]) {
|
||||
logAction("mocap_ui_success", status);
|
||||
}
|
||||
|
||||
|
||||
} else if (!status["calibrated"]) {
|
||||
calibrationScreen.failure();
|
||||
|
||||
|
@ -840,11 +894,11 @@ Rectangle {
|
|||
var handOverride = handSetting["override"];
|
||||
|
||||
var settingsChanged = false;
|
||||
|
||||
|
||||
if (lastConfiguration["bodyConfiguration"] !== bodySetting) {
|
||||
settingsChanged = true;
|
||||
}
|
||||
|
||||
|
||||
var lastHead = lastConfiguration["headConfiguration"];
|
||||
if (lastHead["override"] !== headOverride) {
|
||||
settingsChanged = true;
|
||||
|
@ -854,13 +908,13 @@ Rectangle {
|
|||
if (lastHand["override"] !== handOverride) {
|
||||
settingsChanged = true;
|
||||
}
|
||||
|
||||
|
||||
if (settingsChanged) {
|
||||
if ((!handOverride) && (!headOverride) && (bodySetting === "None")) {
|
||||
state = buttonState.apply;
|
||||
} else {
|
||||
state = buttonState.applyAndCalibrate;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if (state == buttonState.apply) {
|
||||
state = buttonState.disabled;
|
||||
|
@ -868,7 +922,7 @@ Rectangle {
|
|||
state = buttonState.calibrate;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
lastConfiguration = settings;
|
||||
}
|
||||
|
||||
|
@ -885,7 +939,7 @@ Rectangle {
|
|||
state = buttonState.disabled;
|
||||
} else {
|
||||
state = buttonState.calibrate;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function updateCalibrationButton() {
|
||||
|
@ -951,7 +1005,7 @@ Rectangle {
|
|||
"Y": handYOffset.value,
|
||||
"Z": handZOffset.value
|
||||
}
|
||||
|
||||
|
||||
var settingsObject = {
|
||||
"bodyConfiguration": trackerConfiguration,
|
||||
"headConfiguration": headObject,
|
||||
|
|
|
@ -94,10 +94,26 @@ StackView {
|
|||
property bool keyboardEnabled: false
|
||||
property bool keyboardRaised: false
|
||||
property bool punctuationMode: false
|
||||
|
||||
|
||||
width: parent.width
|
||||
height: parent.height
|
||||
|
||||
MouseArea {
|
||||
anchors {
|
||||
top: parent.top
|
||||
left: parent.left
|
||||
right: parent.right
|
||||
bottom: keyboard.top
|
||||
}
|
||||
|
||||
propagateComposedEvents: true
|
||||
onPressed: {
|
||||
parent.forceActiveFocus();
|
||||
addressBarDialog.keyboardEnabled = false;
|
||||
mouse.accepted = false;
|
||||
}
|
||||
}
|
||||
|
||||
anchors {
|
||||
right: parent.right
|
||||
left: parent.left
|
||||
|
@ -227,9 +243,9 @@ StackView {
|
|||
MouseArea {
|
||||
anchors.fill: parent;
|
||||
onClicked: {
|
||||
if (!addressLine.focus || !HMD.active) {
|
||||
addressLine.focus = true;
|
||||
addressLine.forceActiveFocus();
|
||||
addressLine.focus = true;
|
||||
addressLine.forceActiveFocus();
|
||||
if (HMD.active) {
|
||||
addressBarDialog.keyboardEnabled = HMD.active;
|
||||
}
|
||||
tabletRoot.playButtonClickSound();
|
||||
|
|
|
@ -52,8 +52,10 @@ Windows.ScrollingWindow {
|
|||
|
||||
// used to receive messages from interface script
|
||||
function fromScript(message) {
|
||||
if (loader.item.hasOwnProperty("fromScript")) {
|
||||
loader.item.fromScript(message);
|
||||
if (loader.item !== null) {
|
||||
if (loader.item.hasOwnProperty("fromScript")) {
|
||||
loader.item.fromScript(message);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -81,6 +81,7 @@ Preference {
|
|||
property var avatarBuilder: Component { AvatarPreference { } }
|
||||
property var buttonBuilder: Component { ButtonPreference { } }
|
||||
property var comboBoxBuilder: Component { ComboBoxPreference { } }
|
||||
property var spinnerSliderBuilder: Component { SpinnerSliderPreference { } }
|
||||
property var preferences: []
|
||||
property int checkBoxCount: 0
|
||||
|
||||
|
@ -143,6 +144,10 @@ Preference {
|
|||
//to be not overlapped when drop down is active
|
||||
zpos = root.z + 1000 - itemNum
|
||||
break;
|
||||
case Preference.SpinnerSlider:
|
||||
checkBoxCount = 0;
|
||||
builder = spinnerSliderBuilder;
|
||||
break;
|
||||
};
|
||||
|
||||
if (builder) {
|
||||
|
|
|
@ -50,7 +50,7 @@ Item {
|
|||
id: colors
|
||||
|
||||
// Base colors
|
||||
readonly property color baseGray: "#404040"
|
||||
readonly property color baseGray: "#393939"
|
||||
readonly property color darkGray: "#121212"
|
||||
readonly property color baseGrayShadow: "#252525"
|
||||
readonly property color baseGrayHighlight: "#575757"
|
||||
|
@ -336,5 +336,6 @@ Item {
|
|||
readonly property string source: "\ue01c"
|
||||
readonly property string playback_play: "\ue01d"
|
||||
readonly property string stop_square: "\ue01e"
|
||||
readonly property string avatarTPose: "\ue01f"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -69,6 +69,7 @@
|
|||
#include <EntityScriptClient.h>
|
||||
#include <EntityScriptServerLogClient.h>
|
||||
#include <EntityScriptingInterface.h>
|
||||
#include <HoverOverlayInterface.h>
|
||||
#include <ErrorDialog.h>
|
||||
#include <FileScriptingInterface.h>
|
||||
#include <Finally.h>
|
||||
|
@ -112,10 +113,7 @@
|
|||
#include <plugins/InputConfiguration.h>
|
||||
#include <RecordingScriptingInterface.h>
|
||||
#include <RenderableWebEntityItem.h>
|
||||
#include <RenderShadowTask.h>
|
||||
#include <render/RenderFetchCullSortTask.h>
|
||||
#include <RenderDeferredTask.h>
|
||||
#include <RenderForwardTask.h>
|
||||
#include <UpdateSceneTask.h>
|
||||
#include <RenderViewTask.h>
|
||||
#include <SecondaryCamera.h>
|
||||
#include <ResourceCache.h>
|
||||
|
@ -170,6 +168,7 @@
|
|||
#if defined(Q_OS_MAC) || defined(Q_OS_WIN)
|
||||
#include "SpeechRecognizer.h"
|
||||
#endif
|
||||
#include "ui/ResourceImageItem.h"
|
||||
#include "ui/AddressBarDialog.h"
|
||||
#include "ui/AvatarInputs.h"
|
||||
#include "ui/DialogsManager.h"
|
||||
|
@ -590,6 +589,7 @@ bool setupEssentials(int& argc, char** argv, bool runningMarkerExisted) {
|
|||
DependencyManager::set<Snapshot>();
|
||||
DependencyManager::set<CloseEventSender>();
|
||||
DependencyManager::set<ResourceManager>();
|
||||
DependencyManager::set<HoverOverlayInterface>();
|
||||
|
||||
return previousSessionCrashed;
|
||||
}
|
||||
|
@ -952,58 +952,68 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
// Make sure we don't time out during slow operations at startup
|
||||
updateHeartbeat();
|
||||
|
||||
|
||||
// sessionRunTime will be reset soon by loadSettings. Grab it now to get previous session value.
|
||||
// The value will be 0 if the user blew away settings this session, which is both a feature and a bug.
|
||||
static const QString TESTER = "HIFI_TESTER";
|
||||
auto gpuIdent = GPUIdent::getInstance();
|
||||
auto glContextData = getGLContextData();
|
||||
QJsonObject properties = {
|
||||
{ "version", applicationVersion() },
|
||||
{ "tester", QProcessEnvironment::systemEnvironment().contains(TESTER) },
|
||||
{ "previousSessionCrashed", _previousSessionCrashed },
|
||||
{ "previousSessionRuntime", sessionRunTime.get() },
|
||||
{ "cpu_architecture", QSysInfo::currentCpuArchitecture() },
|
||||
{ "kernel_type", QSysInfo::kernelType() },
|
||||
{ "kernel_version", QSysInfo::kernelVersion() },
|
||||
{ "os_type", QSysInfo::productType() },
|
||||
{ "os_version", QSysInfo::productVersion() },
|
||||
{ "gpu_name", gpuIdent->getName() },
|
||||
{ "gpu_driver", gpuIdent->getDriver() },
|
||||
{ "gpu_memory", static_cast<qint64>(gpuIdent->getMemory()) },
|
||||
{ "gl_version_int", glVersionToInteger(glContextData.value("version").toString()) },
|
||||
{ "gl_version", glContextData["version"] },
|
||||
{ "gl_vender", glContextData["vendor"] },
|
||||
{ "gl_sl_version", glContextData["sl_version"] },
|
||||
{ "gl_renderer", glContextData["renderer"] },
|
||||
{ "ideal_thread_count", QThread::idealThreadCount() }
|
||||
};
|
||||
auto macVersion = QSysInfo::macVersion();
|
||||
if (macVersion != QSysInfo::MV_None) {
|
||||
properties["os_osx_version"] = QSysInfo::macVersion();
|
||||
}
|
||||
auto windowsVersion = QSysInfo::windowsVersion();
|
||||
if (windowsVersion != QSysInfo::WV_None) {
|
||||
properties["os_win_version"] = QSysInfo::windowsVersion();
|
||||
}
|
||||
|
||||
ProcessorInfo procInfo;
|
||||
if (getProcessorInfo(procInfo)) {
|
||||
properties["processor_core_count"] = procInfo.numProcessorCores;
|
||||
properties["logical_processor_count"] = procInfo.numLogicalProcessors;
|
||||
properties["processor_l1_cache_count"] = procInfo.numProcessorCachesL1;
|
||||
properties["processor_l2_cache_count"] = procInfo.numProcessorCachesL2;
|
||||
properties["processor_l3_cache_count"] = procInfo.numProcessorCachesL3;
|
||||
}
|
||||
|
||||
// add firstRun flag from settings to launch event
|
||||
Setting::Handle<bool> firstRun { Settings::firstRun, true };
|
||||
properties["first_run"] = firstRun.get();
|
||||
|
||||
// add the user's machine ID to the launch event
|
||||
properties["machine_fingerprint"] = uuidStringWithoutCurlyBraces(FingerprintUtils::getMachineFingerprint());
|
||||
// once the settings have been loaded, check if we need to flip the default for UserActivityLogger
|
||||
auto& userActivityLogger = UserActivityLogger::getInstance();
|
||||
if (!userActivityLogger.isDisabledSettingSet()) {
|
||||
// the user activity logger is opt-out for Interface
|
||||
// but it's defaulted to disabled for other targets
|
||||
// so we need to enable it here if it has never been disabled by the user
|
||||
userActivityLogger.disable(false);
|
||||
}
|
||||
|
||||
UserActivityLogger::getInstance().logAction("launch", properties);
|
||||
if (userActivityLogger.isEnabled()) {
|
||||
// sessionRunTime will be reset soon by loadSettings. Grab it now to get previous session value.
|
||||
// The value will be 0 if the user blew away settings this session, which is both a feature and a bug.
|
||||
static const QString TESTER = "HIFI_TESTER";
|
||||
auto gpuIdent = GPUIdent::getInstance();
|
||||
auto glContextData = getGLContextData();
|
||||
QJsonObject properties = {
|
||||
{ "version", applicationVersion() },
|
||||
{ "tester", QProcessEnvironment::systemEnvironment().contains(TESTER) },
|
||||
{ "previousSessionCrashed", _previousSessionCrashed },
|
||||
{ "previousSessionRuntime", sessionRunTime.get() },
|
||||
{ "cpu_architecture", QSysInfo::currentCpuArchitecture() },
|
||||
{ "kernel_type", QSysInfo::kernelType() },
|
||||
{ "kernel_version", QSysInfo::kernelVersion() },
|
||||
{ "os_type", QSysInfo::productType() },
|
||||
{ "os_version", QSysInfo::productVersion() },
|
||||
{ "gpu_name", gpuIdent->getName() },
|
||||
{ "gpu_driver", gpuIdent->getDriver() },
|
||||
{ "gpu_memory", static_cast<qint64>(gpuIdent->getMemory()) },
|
||||
{ "gl_version_int", glVersionToInteger(glContextData.value("version").toString()) },
|
||||
{ "gl_version", glContextData["version"] },
|
||||
{ "gl_vender", glContextData["vendor"] },
|
||||
{ "gl_sl_version", glContextData["sl_version"] },
|
||||
{ "gl_renderer", glContextData["renderer"] },
|
||||
{ "ideal_thread_count", QThread::idealThreadCount() }
|
||||
};
|
||||
auto macVersion = QSysInfo::macVersion();
|
||||
if (macVersion != QSysInfo::MV_None) {
|
||||
properties["os_osx_version"] = QSysInfo::macVersion();
|
||||
}
|
||||
auto windowsVersion = QSysInfo::windowsVersion();
|
||||
if (windowsVersion != QSysInfo::WV_None) {
|
||||
properties["os_win_version"] = QSysInfo::windowsVersion();
|
||||
}
|
||||
|
||||
ProcessorInfo procInfo;
|
||||
if (getProcessorInfo(procInfo)) {
|
||||
properties["processor_core_count"] = procInfo.numProcessorCores;
|
||||
properties["logical_processor_count"] = procInfo.numLogicalProcessors;
|
||||
properties["processor_l1_cache_count"] = procInfo.numProcessorCachesL1;
|
||||
properties["processor_l2_cache_count"] = procInfo.numProcessorCachesL2;
|
||||
properties["processor_l3_cache_count"] = procInfo.numProcessorCachesL3;
|
||||
}
|
||||
|
||||
properties["first_run"] = firstRun.get();
|
||||
|
||||
// add the user's machine ID to the launch event
|
||||
properties["machine_fingerprint"] = uuidStringWithoutCurlyBraces(FingerprintUtils::getMachineFingerprint());
|
||||
|
||||
userActivityLogger.logAction("launch", properties);
|
||||
}
|
||||
|
||||
// Tell our entity edit sender about our known jurisdictions
|
||||
_entityEditSender.setServerJurisdictions(&_entityServerJurisdictions);
|
||||
|
@ -1459,6 +1469,14 @@ Application::Application(int& argc, char** argv, QElapsedTimer& startupTimer, bo
|
|||
properties["atp_mapping_requests"] = atpMappingRequests;
|
||||
|
||||
properties["throttled"] = _displayPlugin ? _displayPlugin->isThrottled() : false;
|
||||
|
||||
QJsonObject bytesDownloaded;
|
||||
bytesDownloaded["atp"] = statTracker->getStat(STAT_ATP_RESOURCE_TOTAL_BYTES).toInt();
|
||||
bytesDownloaded["http"] = statTracker->getStat(STAT_HTTP_RESOURCE_TOTAL_BYTES).toInt();
|
||||
bytesDownloaded["file"] = statTracker->getStat(STAT_FILE_RESOURCE_TOTAL_BYTES).toInt();
|
||||
bytesDownloaded["total"] = bytesDownloaded["atp"].toInt() + bytesDownloaded["http"].toInt()
|
||||
+ bytesDownloaded["file"].toInt();
|
||||
properties["bytesDownloaded"] = bytesDownloaded;
|
||||
|
||||
auto myAvatar = getMyAvatar();
|
||||
glm::vec3 avatarPosition = myAvatar->getPosition();
|
||||
|
@ -1697,9 +1715,7 @@ QString Application::getUserAgent() {
|
|||
void Application::toggleTabletUI(bool shouldOpen) const {
|
||||
auto tabletScriptingInterface = DependencyManager::get<TabletScriptingInterface>();
|
||||
auto hmd = DependencyManager::get<HMDScriptingInterface>();
|
||||
TabletProxy* tablet = dynamic_cast<TabletProxy*>(tabletScriptingInterface->getTablet(SYSTEM_TABLET));
|
||||
bool messageOpen = tablet->isMessageDialogOpen();
|
||||
if ((!messageOpen || (messageOpen && !hmd->getShouldShowTablet())) && !(shouldOpen && hmd->getShouldShowTablet())) {
|
||||
if (!(shouldOpen && hmd->getShouldShowTablet())) {
|
||||
auto HMD = DependencyManager::get<HMDScriptingInterface>();
|
||||
HMD->toggleShouldShowTablet();
|
||||
}
|
||||
|
@ -1963,7 +1979,8 @@ void Application::initializeGL() {
|
|||
render::CullFunctor cullFunctor = LODManager::shouldRender;
|
||||
static const QString RENDER_FORWARD = "HIFI_RENDER_FORWARD";
|
||||
bool isDeferred = !QProcessEnvironment::systemEnvironment().contains(RENDER_FORWARD);
|
||||
_renderEngine->addJob<SecondaryCameraRenderTask>("SecondaryCameraFrame", cullFunctor);
|
||||
_renderEngine->addJob<UpdateSceneTask>("UpdateScene");
|
||||
_renderEngine->addJob<SecondaryCameraRenderTask>("SecondaryCameraJob", cullFunctor);
|
||||
_renderEngine->addJob<RenderViewTask>("RenderMainView", cullFunctor, isDeferred);
|
||||
_renderEngine->load();
|
||||
_renderEngine->registerScene(_main3DScene);
|
||||
|
@ -2011,6 +2028,7 @@ void Application::initializeUi() {
|
|||
LoginDialog::registerType();
|
||||
Tooltip::registerType();
|
||||
UpdateDialog::registerType();
|
||||
qmlRegisterType<ResourceImageItem>("Hifi", 1, 0, "ResourceImageItem");
|
||||
qmlRegisterType<Preference>("Hifi", 1, 0, "Preference");
|
||||
|
||||
auto offscreenUi = DependencyManager::get<OffscreenUi>();
|
||||
|
@ -2106,6 +2124,7 @@ void Application::initializeUi() {
|
|||
surfaceContext->setContextProperty("ApplicationCompositor", &getApplicationCompositor());
|
||||
|
||||
surfaceContext->setContextProperty("AvatarInputs", AvatarInputs::getInstance());
|
||||
surfaceContext->setContextProperty("HoverOverlay", DependencyManager::get<HoverOverlayInterface>().data());
|
||||
|
||||
if (auto steamClient = PluginManager::getInstance()->getSteamClientPlugin()) {
|
||||
surfaceContext->setContextProperty("Steam", new SteamScriptingInterface(engine, steamClient.get()));
|
||||
|
@ -2719,56 +2738,53 @@ bool Application::importSVOFromURL(const QString& urlString) {
|
|||
return true;
|
||||
}
|
||||
|
||||
bool _renderRequested { false };
|
||||
|
||||
bool Application::event(QEvent* event) {
|
||||
if (!Menu::getInstance()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Presentation/painting logic
|
||||
// TODO: Decouple presentation and painting loops
|
||||
static bool isPaintingThrottled = false;
|
||||
if ((int)event->type() == (int)Present) {
|
||||
if (isPaintingThrottled) {
|
||||
// If painting (triggered by presentation) is hogging the main thread,
|
||||
// repost as low priority to avoid hanging the GUI.
|
||||
// This has the effect of allowing presentation to exceed the paint budget by X times and
|
||||
// only dropping every (1/X) frames, instead of every ceil(X) frames
|
||||
// (e.g. at a 60FPS target, painting for 17us would fall to 58.82FPS instead of 30FPS).
|
||||
removePostedEvents(this, Present);
|
||||
postEvent(this, new QEvent(static_cast<QEvent::Type>(Present)), Qt::LowEventPriority);
|
||||
isPaintingThrottled = false;
|
||||
int type = event->type();
|
||||
switch (type) {
|
||||
case Event::Lambda:
|
||||
static_cast<LambdaEvent*>(event)->call();
|
||||
return true;
|
||||
}
|
||||
|
||||
float nsecsElapsed = (float)_lastTimeUpdated.nsecsElapsed();
|
||||
if (shouldPaint(nsecsElapsed)) {
|
||||
_lastTimeUpdated.start();
|
||||
idle(nsecsElapsed);
|
||||
postEvent(this, new QEvent(static_cast<QEvent::Type>(Paint)), Qt::HighEventPriority);
|
||||
}
|
||||
isPaintingThrottled = true;
|
||||
// Explicit idle keeps the idle running at a lower interval, but without any rendering
|
||||
// see (windowMinimizedChanged)
|
||||
case Event::Idle:
|
||||
{
|
||||
float nsecsElapsed = (float)_lastTimeUpdated.nsecsElapsed();
|
||||
_lastTimeUpdated.start();
|
||||
idle(nsecsElapsed);
|
||||
}
|
||||
return true;
|
||||
|
||||
return true;
|
||||
} else if ((int)event->type() == (int)Paint) {
|
||||
// NOTE: This must be updated as close to painting as possible,
|
||||
// or AvatarInputs will mysteriously move to the bottom-right
|
||||
AvatarInputs::getInstance()->update();
|
||||
case Event::Present:
|
||||
if (!_renderRequested) {
|
||||
float nsecsElapsed = (float)_lastTimeUpdated.nsecsElapsed();
|
||||
if (shouldPaint(nsecsElapsed)) {
|
||||
_renderRequested = true;
|
||||
_lastTimeUpdated.start();
|
||||
idle(nsecsElapsed);
|
||||
postEvent(this, new QEvent(static_cast<QEvent::Type>(Paint)), Qt::HighEventPriority);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
||||
paintGL();
|
||||
case Event::Paint:
|
||||
// NOTE: This must be updated as close to painting as possible,
|
||||
// or AvatarInputs will mysteriously move to the bottom-right
|
||||
AvatarInputs::getInstance()->update();
|
||||
paintGL();
|
||||
// wait for the next present event before starting idle / paint again
|
||||
removePostedEvents(this, Present);
|
||||
_renderRequested = false;
|
||||
return true;
|
||||
|
||||
isPaintingThrottled = false;
|
||||
|
||||
return true;
|
||||
} else if ((int)event->type() == (int)Idle) {
|
||||
float nsecsElapsed = (float)_lastTimeUpdated.nsecsElapsed();
|
||||
idle(nsecsElapsed);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
if ((int)event->type() == (int)Lambda) {
|
||||
static_cast<LambdaEvent*>(event)->call();
|
||||
return true;
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
{
|
||||
|
@ -3156,59 +3172,6 @@ void Application::keyPressEvent(QKeyEvent* event) {
|
|||
break;
|
||||
#endif
|
||||
|
||||
case Qt::Key_H: {
|
||||
// whenever switching to/from full screen mirror from the keyboard, remember
|
||||
// the state you were in before full screen mirror, and return to that.
|
||||
auto previousMode = _myCamera.getMode();
|
||||
if (previousMode != CAMERA_MODE_MIRROR) {
|
||||
switch (previousMode) {
|
||||
case CAMERA_MODE_FIRST_PERSON:
|
||||
_returnFromFullScreenMirrorTo = MenuOption::FirstPerson;
|
||||
break;
|
||||
case CAMERA_MODE_THIRD_PERSON:
|
||||
_returnFromFullScreenMirrorTo = MenuOption::ThirdPerson;
|
||||
break;
|
||||
|
||||
// FIXME - it's not clear that these modes make sense to return to...
|
||||
case CAMERA_MODE_INDEPENDENT:
|
||||
_returnFromFullScreenMirrorTo = MenuOption::IndependentMode;
|
||||
break;
|
||||
case CAMERA_MODE_ENTITY:
|
||||
_returnFromFullScreenMirrorTo = MenuOption::CameraEntityMode;
|
||||
break;
|
||||
|
||||
default:
|
||||
_returnFromFullScreenMirrorTo = MenuOption::ThirdPerson;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
bool isMirrorChecked = Menu::getInstance()->isOptionChecked(MenuOption::FullscreenMirror);
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FullscreenMirror, !isMirrorChecked);
|
||||
if (isMirrorChecked) {
|
||||
|
||||
// if we got here without coming in from a non-Full Screen mirror case, then our
|
||||
// _returnFromFullScreenMirrorTo is unknown. In that case we'll go to the old
|
||||
// behavior of returning to ThirdPerson
|
||||
if (_returnFromFullScreenMirrorTo.isEmpty()) {
|
||||
_returnFromFullScreenMirrorTo = MenuOption::ThirdPerson;
|
||||
}
|
||||
Menu::getInstance()->setIsOptionChecked(_returnFromFullScreenMirrorTo, true);
|
||||
}
|
||||
cameraMenuChanged();
|
||||
break;
|
||||
}
|
||||
|
||||
case Qt::Key_P: {
|
||||
if (!(isShifted || isMeta || isOption)) {
|
||||
bool isFirstPersonChecked = Menu::getInstance()->isOptionChecked(MenuOption::FirstPerson);
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::FirstPerson, !isFirstPersonChecked);
|
||||
Menu::getInstance()->setIsOptionChecked(MenuOption::ThirdPerson, isFirstPersonChecked);
|
||||
cameraMenuChanged();
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
case Qt::Key_Slash:
|
||||
Menu::getInstance()->triggerOption(MenuOption::Stats);
|
||||
break;
|
||||
|
@ -3783,8 +3746,8 @@ void updateCpuInformation() {
|
|||
// Update friendly structure
|
||||
auto& myCpuInfo = myCpuInfos[i];
|
||||
myCpuInfo.update(cpuInfo);
|
||||
PROFILE_COUNTER(app, myCpuInfo.name.c_str(), {
|
||||
{ "kernel", myCpuInfo.kernelUsage },
|
||||
PROFILE_COUNTER(app, myCpuInfo.name.c_str(), {
|
||||
{ "kernel", myCpuInfo.kernelUsage },
|
||||
{ "user", myCpuInfo.userUsage }
|
||||
});
|
||||
}
|
||||
|
@ -3851,7 +3814,7 @@ void getCpuUsage(vec3& systemAndUser) {
|
|||
void setupCpuMonitorThread() {
|
||||
initCpuUsage();
|
||||
auto cpuMonitorThread = QThread::currentThread();
|
||||
|
||||
|
||||
QTimer* timer = new QTimer();
|
||||
timer->setInterval(50);
|
||||
QObject::connect(timer, &QTimer::timeout, [] {
|
||||
|
@ -5368,7 +5331,7 @@ namespace render {
|
|||
|
||||
auto& batch = *args->_batch;
|
||||
DependencyManager::get<GeometryCache>()->bindSimpleProgram(batch);
|
||||
renderWorldBox(batch);
|
||||
renderWorldBox(args, batch);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -5431,10 +5394,7 @@ void Application::displaySide(RenderArgs* renderArgs, Camera& theCamera, bool se
|
|||
}
|
||||
|
||||
{
|
||||
PerformanceTimer perfTimer("SceneProcessTransaction");
|
||||
_main3DScene->enqueueTransaction(transaction);
|
||||
|
||||
_main3DScene->processTransactionQueue();
|
||||
}
|
||||
|
||||
// For now every frame pass the renderContext
|
||||
|
@ -5488,6 +5448,10 @@ void Application::updateWindowTitle() const {
|
|||
qCDebug(interfaceapp, "Application title set to: %s", title.toStdString().c_str());
|
||||
#endif
|
||||
_window->setWindowTitle(title);
|
||||
|
||||
// updateTitleWindow gets called whenever there's a change regarding the domain, so rather
|
||||
// than placing this within domainChanged, it's placed here to cover the other potential cases.
|
||||
DependencyManager::get< MessagesClient >()->sendLocalMessage("Toolbar-DomainChanged", "");
|
||||
}
|
||||
|
||||
void Application::clearDomainOctreeDetails() {
|
||||
|
@ -5873,6 +5837,7 @@ void Application::registerScriptEngineWithApplicationServices(ScriptEngine* scri
|
|||
auto entityScriptServerLog = DependencyManager::get<EntityScriptServerLogClient>();
|
||||
scriptEngine->registerGlobalObject("EntityScriptServerLog", entityScriptServerLog.data());
|
||||
scriptEngine->registerGlobalObject("AvatarInputs", AvatarInputs::getInstance());
|
||||
scriptEngine->registerGlobalObject("HoverOverlay", DependencyManager::get<HoverOverlayInterface>().data());
|
||||
|
||||
qScriptRegisterMetaType(scriptEngine, OverlayIDtoScriptValue, OverlayIDfromScriptValue);
|
||||
|
||||
|
@ -6644,11 +6609,11 @@ void Application::setPreviousScriptLocation(const QString& location) {
|
|||
}
|
||||
|
||||
void Application::loadScriptURLDialog() const {
|
||||
auto newScript = OffscreenUi::getText(OffscreenUi::ICON_NONE, "Open and Run Script", "Script URL");
|
||||
QString newScript = OffscreenUi::getText(OffscreenUi::ICON_NONE, "Open and Run Script", "Script URL");
|
||||
if (QUrl(newScript).scheme() == "atp") {
|
||||
OffscreenUi::warning("Error Loading Script", "Cannot load client script over ATP");
|
||||
} else if (!newScript.isEmpty()) {
|
||||
DependencyManager::get<ScriptEngines>()->loadScript(newScript);
|
||||
DependencyManager::get<ScriptEngines>()->loadScript(newScript.trimmed());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7125,6 +7090,12 @@ void Application::updateDisplayMode() {
|
|||
// reset the avatar, to set head and hand palms back to a reasonable default pose.
|
||||
getMyAvatar()->reset(false);
|
||||
|
||||
// switch to first person if entering hmd and setting is checked
|
||||
if (isHmd && menu->isOptionChecked(MenuOption::FirstPersonHMD)) {
|
||||
menu->setIsOptionChecked(MenuOption::FirstPerson, true);
|
||||
cameraMenuChanged();
|
||||
}
|
||||
|
||||
Q_ASSERT_X(_displayPlugin, "Application::updateDisplayMode", "could not find an activated display plugin");
|
||||
}
|
||||
|
||||
|
|
|
@ -678,7 +678,7 @@ private:
|
|||
QTimer _addAssetToWorldErrorTimer;
|
||||
|
||||
FileScriptingInterface* _fileDownload;
|
||||
AudioInjector* _snapshotSoundInjector { nullptr };
|
||||
AudioInjectorPointer _snapshotSoundInjector;
|
||||
SharedSoundPointer _snapshotSound;
|
||||
|
||||
DisplayPluginPointer _autoSwitchDisplayModeSupportedHMDPlugin;
|
||||
|
|
|
@ -223,7 +223,7 @@ Menu::Menu() {
|
|||
|
||||
// View > First Person
|
||||
cameraModeGroup->addAction(addCheckableActionToQMenuAndActionHash(viewMenu,
|
||||
MenuOption::FirstPerson, 0, // QML Qt:: Key_P
|
||||
MenuOption::FirstPerson, 0,
|
||||
true, qApp, SLOT(cameraMenuChanged())));
|
||||
|
||||
// View > Third Person
|
||||
|
@ -233,7 +233,7 @@ Menu::Menu() {
|
|||
|
||||
// View > Mirror
|
||||
cameraModeGroup->addAction(addCheckableActionToQMenuAndActionHash(viewMenu,
|
||||
MenuOption::FullscreenMirror, 0, // QML Qt::Key_H,
|
||||
MenuOption::FullscreenMirror, 0,
|
||||
false, qApp, SLOT(cameraMenuChanged())));
|
||||
|
||||
// View > Independent [advanced]
|
||||
|
@ -258,6 +258,9 @@ Menu::Menu() {
|
|||
// View > Overlays
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::Overlays, 0, true);
|
||||
|
||||
// View > Enter First Person Mode in HMD
|
||||
addCheckableActionToQMenuAndActionHash(viewMenu, MenuOption::FirstPersonHMD, 0, true);
|
||||
|
||||
// Navigate menu ----------------------------------
|
||||
MenuWrapper* navigateMenu = addMenu("Navigate");
|
||||
|
||||
|
@ -319,7 +322,7 @@ Menu::Menu() {
|
|||
QString("../../hifi/tablet/TabletLodPreferences.qml"), "LodPreferencesDialog");
|
||||
});
|
||||
|
||||
action = addActionToQMenuAndActionHash(settingsMenu, "Controller Settings");
|
||||
action = addActionToQMenuAndActionHash(settingsMenu, "Controller Settings...");
|
||||
connect(action, &QAction::triggered, [] {
|
||||
auto tablet = DependencyManager::get<TabletScriptingInterface>()->getTablet("com.highfidelity.interface.tablet.system");
|
||||
auto hmd = DependencyManager::get<HMDScriptingInterface>();
|
||||
|
@ -677,7 +680,7 @@ Menu::Menu() {
|
|||
// Developer > Physics >>>
|
||||
MenuWrapper* physicsOptionsMenu = developerMenu->addMenu("Physics");
|
||||
{
|
||||
auto drawStatusConfig = qApp->getRenderEngine()->getConfiguration()->getConfig<render::DrawStatus>();
|
||||
auto drawStatusConfig = qApp->getRenderEngine()->getConfiguration()->getConfig<render::DrawStatus>("RenderMainView.DrawStatus");
|
||||
addCheckableActionToQMenuAndActionHash(physicsOptionsMenu, MenuOption::PhysicsShowOwned,
|
||||
0, false, drawStatusConfig, SLOT(setShowNetwork(bool)));
|
||||
}
|
||||
|
|
|
@ -105,6 +105,7 @@ namespace MenuOption {
|
|||
const QString ExpandPhysicsSimulationTiming = "Expand /physics";
|
||||
const QString ExpandUpdateTiming = "Expand /update";
|
||||
const QString FirstPerson = "First Person";
|
||||
const QString FirstPersonHMD = "Enter First Person Mode in HMD";
|
||||
const QString FivePointCalibration = "5 Point Calibration";
|
||||
const QString FixGaze = "Fix Gaze (no saccade)";
|
||||
const QString Forward = "Forward";
|
||||
|
|
|
@ -9,9 +9,11 @@
|
|||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#include "Application.h"
|
||||
#include "SecondaryCamera.h"
|
||||
#include <TextureCache.h>
|
||||
#include <gpu/Context.h>
|
||||
#include <EntityScriptingInterface.h>
|
||||
|
||||
using RenderArgsPointer = std::shared_ptr<RenderArgs>;
|
||||
|
||||
|
@ -27,39 +29,32 @@ void MainRenderTask::build(JobModel& task, const render::Varying& inputs, render
|
|||
}
|
||||
}
|
||||
|
||||
void SecondaryCameraRenderTaskConfig::resetSize(int width, int height) { // FIXME: Add an arg here for "destinationFramebuffer"
|
||||
bool wasEnabled = isEnabled();
|
||||
setEnabled(false);
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
textureCache->resetSpectatorCameraFramebuffer(width, height); // FIXME: Call the correct reset function based on the "destinationFramebuffer" arg
|
||||
setEnabled(wasEnabled);
|
||||
}
|
||||
|
||||
void SecondaryCameraRenderTaskConfig::resetSizeSpectatorCamera(int width, int height) { // Carefully adjust the framebuffer / texture.
|
||||
resetSize(width, height);
|
||||
}
|
||||
|
||||
class BeginSecondaryCameraFrame { // Changes renderContext for our framebuffer and and view.
|
||||
class SecondaryCameraJob { // Changes renderContext for our framebuffer and view.
|
||||
QUuid _attachedEntityId{};
|
||||
glm::vec3 _position{};
|
||||
glm::quat _orientation{};
|
||||
float _vFoV{};
|
||||
float _nearClipPlaneDistance{};
|
||||
float _farClipPlaneDistance{};
|
||||
EntityPropertyFlags _attachedEntityPropertyFlags;
|
||||
QSharedPointer<EntityScriptingInterface> _entityScriptingInterface;
|
||||
public:
|
||||
using Config = BeginSecondaryCameraFrameConfig;
|
||||
using JobModel = render::Job::ModelO<BeginSecondaryCameraFrame, RenderArgsPointer, Config>;
|
||||
BeginSecondaryCameraFrame() {
|
||||
using Config = SecondaryCameraJobConfig;
|
||||
using JobModel = render::Job::ModelO<SecondaryCameraJob, RenderArgsPointer, Config>;
|
||||
SecondaryCameraJob() {
|
||||
_cachedArgsPointer = std::make_shared<RenderArgs>(_cachedArgs);
|
||||
_entityScriptingInterface = DependencyManager::get<EntityScriptingInterface>();
|
||||
_attachedEntityPropertyFlags += PROP_POSITION;
|
||||
_attachedEntityPropertyFlags += PROP_ROTATION;
|
||||
}
|
||||
|
||||
void configure(const Config& config) {
|
||||
if (config.enabled || config.alwaysEnabled) {
|
||||
_position = config.position;
|
||||
_orientation = config.orientation;
|
||||
_vFoV = config.vFoV;
|
||||
_nearClipPlaneDistance = config.nearClipPlaneDistance;
|
||||
_farClipPlaneDistance = config.farClipPlaneDistance;
|
||||
}
|
||||
_attachedEntityId = config.attachedEntityId;
|
||||
_position = config.position;
|
||||
_orientation = config.orientation;
|
||||
_vFoV = config.vFoV;
|
||||
_nearClipPlaneDistance = config.nearClipPlaneDistance;
|
||||
_farClipPlaneDistance = config.farClipPlaneDistance;
|
||||
}
|
||||
|
||||
void run(const render::RenderContextPointer& renderContext, RenderArgsPointer& cachedArgs) {
|
||||
|
@ -83,8 +78,14 @@ public:
|
|||
});
|
||||
|
||||
auto srcViewFrustum = args->getViewFrustum();
|
||||
srcViewFrustum.setPosition(_position);
|
||||
srcViewFrustum.setOrientation(_orientation);
|
||||
if (!_attachedEntityId.isNull()) {
|
||||
EntityItemProperties entityProperties = _entityScriptingInterface->getEntityProperties(_attachedEntityId, _attachedEntityPropertyFlags);
|
||||
srcViewFrustum.setPosition(entityProperties.getPosition());
|
||||
srcViewFrustum.setOrientation(entityProperties.getRotation());
|
||||
} else {
|
||||
srcViewFrustum.setPosition(_position);
|
||||
srcViewFrustum.setOrientation(_orientation);
|
||||
}
|
||||
srcViewFrustum.setProjection(glm::perspective(glm::radians(_vFoV), ((float)args->_viewport.z / (float)args->_viewport.w), _nearClipPlaneDistance, _farClipPlaneDistance));
|
||||
// Without calculating the bound planes, the secondary camera will use the same culling frustum as the main camera,
|
||||
// which is not what we want here.
|
||||
|
@ -99,6 +100,41 @@ protected:
|
|||
RenderArgsPointer _cachedArgsPointer;
|
||||
};
|
||||
|
||||
void SecondaryCameraJobConfig::setPosition(glm::vec3 pos) {
|
||||
if (attachedEntityId.isNull()) {
|
||||
position = pos;
|
||||
emit dirty();
|
||||
} else {
|
||||
qDebug() << "ERROR: Cannot set position of SecondaryCamera while attachedEntityId is set.";
|
||||
}
|
||||
}
|
||||
|
||||
void SecondaryCameraJobConfig::setOrientation(glm::quat orient) {
|
||||
if (attachedEntityId.isNull()) {
|
||||
orientation = orient;
|
||||
emit dirty();
|
||||
} else {
|
||||
qDebug() << "ERROR: Cannot set orientation of SecondaryCamera while attachedEntityId is set.";
|
||||
}
|
||||
}
|
||||
|
||||
void SecondaryCameraJobConfig::enableSecondaryCameraRenderConfigs(bool enabled) {
|
||||
qApp->getRenderEngine()->getConfiguration()->getConfig<SecondaryCameraRenderTask>()->setEnabled(enabled);
|
||||
setEnabled(enabled);
|
||||
}
|
||||
|
||||
void SecondaryCameraJobConfig::resetSizeSpectatorCamera(int width, int height) { // Carefully adjust the framebuffer / texture.
|
||||
qApp->getRenderEngine()->getConfiguration()->getConfig<SecondaryCameraRenderTask>()->resetSize(width, height);
|
||||
}
|
||||
|
||||
void SecondaryCameraRenderTaskConfig::resetSize(int width, int height) { // FIXME: Add an arg here for "destinationFramebuffer"
|
||||
bool wasEnabled = isEnabled();
|
||||
setEnabled(false);
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
textureCache->resetSpectatorCameraFramebuffer(width, height); // FIXME: Call the correct reset function based on the "destinationFramebuffer" arg
|
||||
setEnabled(wasEnabled);
|
||||
}
|
||||
|
||||
class EndSecondaryCameraFrame { // Restores renderContext.
|
||||
public:
|
||||
using JobModel = render::Job::ModelI<EndSecondaryCameraFrame, RenderArgsPointer>;
|
||||
|
@ -119,7 +155,7 @@ public:
|
|||
};
|
||||
|
||||
void SecondaryCameraRenderTask::build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor) {
|
||||
const auto cachedArg = task.addJob<BeginSecondaryCameraFrame>("BeginSecondaryCamera");
|
||||
const auto cachedArg = task.addJob<SecondaryCameraJob>("SecondaryCamera");
|
||||
const auto items = task.addJob<RenderFetchCullSortTask>("FetchCullSort", cullFunctor);
|
||||
assert(items.canCast<RenderFetchCullSortTask::Output>());
|
||||
task.addJob<RenderDeferredTask>("RenderDeferredTask", items);
|
||||
|
|
|
@ -28,34 +28,40 @@ public:
|
|||
void build(JobModel& task, const render::Varying& inputs, render::Varying& outputs, render::CullFunctor cullFunctor, bool isDeferred = true);
|
||||
};
|
||||
|
||||
class BeginSecondaryCameraFrameConfig : public render::Task::Config { // Exposes secondary camera parameters to JavaScript.
|
||||
class SecondaryCameraJobConfig : public render::Task::Config { // Exposes secondary camera parameters to JavaScript.
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(glm::vec3 position MEMBER position NOTIFY dirty) // of viewpoint to render from
|
||||
Q_PROPERTY(glm::quat orientation MEMBER orientation NOTIFY dirty) // of viewpoint to render from
|
||||
Q_PROPERTY(QUuid attachedEntityId MEMBER attachedEntityId NOTIFY dirty) // entity whose properties define camera position and orientation
|
||||
Q_PROPERTY(glm::vec3 position READ getPosition WRITE setPosition) // of viewpoint to render from
|
||||
Q_PROPERTY(glm::quat orientation READ getOrientation WRITE setOrientation) // of viewpoint to render from
|
||||
Q_PROPERTY(float vFoV MEMBER vFoV NOTIFY dirty) // Secondary camera's vertical field of view. In degrees.
|
||||
Q_PROPERTY(float nearClipPlaneDistance MEMBER nearClipPlaneDistance NOTIFY dirty) // Secondary camera's near clip plane distance. In meters.
|
||||
Q_PROPERTY(float farClipPlaneDistance MEMBER farClipPlaneDistance NOTIFY dirty) // Secondary camera's far clip plane distance. In meters.
|
||||
public:
|
||||
QUuid attachedEntityId{};
|
||||
glm::vec3 position{};
|
||||
glm::quat orientation{};
|
||||
float vFoV{ 45.0f };
|
||||
float nearClipPlaneDistance{ 0.1f };
|
||||
float farClipPlaneDistance{ 100.0f };
|
||||
BeginSecondaryCameraFrameConfig() : render::Task::Config(false) {}
|
||||
float vFoV{ DEFAULT_FIELD_OF_VIEW_DEGREES };
|
||||
float nearClipPlaneDistance{ DEFAULT_NEAR_CLIP };
|
||||
float farClipPlaneDistance{ DEFAULT_FAR_CLIP };
|
||||
SecondaryCameraJobConfig() : render::Task::Config(false) {}
|
||||
signals:
|
||||
void dirty();
|
||||
public slots:
|
||||
glm::vec3 getPosition() { return position; }
|
||||
void setPosition(glm::vec3 pos);
|
||||
glm::quat getOrientation() { return orientation; }
|
||||
void setOrientation(glm::quat orient);
|
||||
void enableSecondaryCameraRenderConfigs(bool enabled);
|
||||
void resetSizeSpectatorCamera(int width, int height);
|
||||
};
|
||||
|
||||
class SecondaryCameraRenderTaskConfig : public render::Task::Config {
|
||||
Q_OBJECT
|
||||
public:
|
||||
SecondaryCameraRenderTaskConfig() : render::Task::Config(false) {}
|
||||
private:
|
||||
void resetSize(int width, int height);
|
||||
signals:
|
||||
void dirty();
|
||||
public slots:
|
||||
void resetSizeSpectatorCamera(int width, int height);
|
||||
};
|
||||
|
||||
class SecondaryCameraRenderTask {
|
||||
|
|
|
@ -34,7 +34,7 @@
|
|||
|
||||
using namespace std;
|
||||
|
||||
void renderWorldBox(gpu::Batch& batch) {
|
||||
void renderWorldBox(RenderArgs* args, gpu::Batch& batch) {
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
|
||||
// Show center of world
|
||||
|
@ -115,7 +115,7 @@ void renderWorldBox(gpu::Batch& batch) {
|
|||
geometryIds[17]);
|
||||
|
||||
|
||||
geometryCache->renderWireCubeInstance(batch, GREY4);
|
||||
geometryCache->renderWireCubeInstance(args, batch, GREY4);
|
||||
|
||||
// Draw meter markers along the 3 axis to help with measuring things
|
||||
const float MARKER_DISTANCE = 1.0f;
|
||||
|
@ -123,23 +123,23 @@ void renderWorldBox(gpu::Batch& batch) {
|
|||
|
||||
transform = Transform().setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(batch, RED);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, RED);
|
||||
|
||||
transform = Transform().setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, 0.0f)).setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(batch, RED);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, RED);
|
||||
|
||||
transform = Transform().setTranslation(glm::vec3(0.0f, MARKER_DISTANCE, 0.0f)).setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(batch, GREEN);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, GREEN);
|
||||
|
||||
transform = Transform().setTranslation(glm::vec3(0.0f, 0.0f, MARKER_DISTANCE)).setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(batch, BLUE);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, BLUE);
|
||||
|
||||
transform = Transform().setTranslation(glm::vec3(MARKER_DISTANCE, 0.0f, MARKER_DISTANCE)).setScale(MARKER_RADIUS);
|
||||
batch.setModelTransform(transform);
|
||||
geometryCache->renderSolidSphereInstance(batch, GREY);
|
||||
geometryCache->renderSolidSphereInstance(args, batch, GREY);
|
||||
}
|
||||
|
||||
// Do some basic timing tests and report the results
|
||||
|
|
|
@ -16,8 +16,9 @@
|
|||
#include <glm/gtc/quaternion.hpp>
|
||||
|
||||
#include <gpu/Batch.h>
|
||||
#include <render/Forward.h>
|
||||
|
||||
void renderWorldBox(gpu::Batch& batch);
|
||||
void renderWorldBox(RenderArgs* args, gpu::Batch& batch);
|
||||
|
||||
void runTimingTests();
|
||||
void runUnitTests();
|
||||
|
|
|
@ -63,7 +63,6 @@ AvatarManager::AvatarManager(QObject* parent) :
|
|||
packetReceiver.registerListener(PacketType::BulkAvatarData, this, "processAvatarDataPacket");
|
||||
packetReceiver.registerListener(PacketType::KillAvatar, this, "processKillAvatar");
|
||||
packetReceiver.registerListener(PacketType::AvatarIdentity, this, "processAvatarIdentityPacket");
|
||||
packetReceiver.registerListener(PacketType::ExitingSpaceBubble, this, "processExitingSpaceBubble");
|
||||
|
||||
// when we hear that the user has ignored an avatar by session UUID
|
||||
// immediately remove that avatar instead of waiting for the absence of packets from avatar mixer
|
||||
|
@ -320,9 +319,6 @@ void AvatarManager::handleRemovedAvatar(const AvatarSharedPointer& removedAvatar
|
|||
|
||||
if (removalReason == KillAvatarReason::TheirAvatarEnteredYourBubble) {
|
||||
emit DependencyManager::get<UsersScriptingInterface>()->enteredIgnoreRadius();
|
||||
}
|
||||
if (removalReason == KillAvatarReason::TheirAvatarEnteredYourBubble || removalReason == YourAvatarEnteredTheirBubble) {
|
||||
DependencyManager::get<NodeList>()->radiusIgnoreNodeBySessionID(avatar->getSessionUUID(), true);
|
||||
} else if (removalReason == KillAvatarReason::AvatarDisconnected) {
|
||||
// remove from node sets, if present
|
||||
DependencyManager::get<NodeList>()->removeFromIgnoreMuteSets(avatar->getSessionUUID());
|
||||
|
@ -434,8 +430,7 @@ void AvatarManager::handleCollisionEvents(const CollisionEvents& collisionEvents
|
|||
// but most avatars are roughly the same size, so let's not be so fancy yet.
|
||||
const float AVATAR_STRETCH_FACTOR = 1.0f;
|
||||
|
||||
|
||||
_collisionInjectors.remove_if([](QPointer<AudioInjector>& injector) {
|
||||
_collisionInjectors.remove_if([](const AudioInjectorPointer& injector) {
|
||||
return !injector || injector->isFinished();
|
||||
});
|
||||
|
||||
|
|
|
@ -22,11 +22,11 @@
|
|||
#include <SimpleMovingAverage.h>
|
||||
#include <shared/RateCounter.h>
|
||||
#include <avatars-renderer/ScriptAvatar.h>
|
||||
#include <AudioInjector.h>
|
||||
|
||||
#include "AvatarMotionState.h"
|
||||
#include "MyAvatar.h"
|
||||
|
||||
class AudioInjector;
|
||||
|
||||
class AvatarManager : public AvatarHashMap {
|
||||
Q_OBJECT
|
||||
|
@ -104,7 +104,7 @@ private:
|
|||
std::shared_ptr<MyAvatar> _myAvatar;
|
||||
quint64 _lastSendAvatarDataTime = 0; // Controls MyAvatar send data rate.
|
||||
|
||||
std::list<QPointer<AudioInjector>> _collisionInjectors;
|
||||
std::list<AudioInjectorPointer> _collisionInjectors;
|
||||
|
||||
RateCounter<> _myAvatarSendRate;
|
||||
int _numAvatarsUpdated { 0 };
|
||||
|
|
|
@ -69,8 +69,8 @@ const float MAX_BOOST_SPEED = 0.5f * MAX_WALKING_SPEED; // action motor gets add
|
|||
const float MIN_AVATAR_SPEED = 0.05f;
|
||||
const float MIN_AVATAR_SPEED_SQUARED = MIN_AVATAR_SPEED * MIN_AVATAR_SPEED; // speed is set to zero below this
|
||||
|
||||
const float YAW_SPEED_DEFAULT = 120.0f; // degrees/sec
|
||||
const float PITCH_SPEED_DEFAULT = 90.0f; // degrees/sec
|
||||
const float YAW_SPEED_DEFAULT = 100.0f; // degrees/sec
|
||||
const float PITCH_SPEED_DEFAULT = 75.0f; // degrees/sec
|
||||
|
||||
// TODO: normalize avatar speed for standard avatar size, then scale all motion logic
|
||||
// to properly follow avatar size.
|
||||
|
@ -295,7 +295,7 @@ void MyAvatar::simulateAttachments(float deltaTime) {
|
|||
// don't update attachments here, do it in harvestResultsFromPhysicsSimulation()
|
||||
}
|
||||
|
||||
QByteArray MyAvatar::toByteArrayStateful(AvatarDataDetail dataDetail) {
|
||||
QByteArray MyAvatar::toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking) {
|
||||
CameraMode mode = qApp->getCamera().getMode();
|
||||
_globalPosition = getPosition();
|
||||
// This might not be right! Isn't the capsule local offset in avatar space, and don't we need to add the radius to the y as well? -HRS 5/26/17
|
||||
|
@ -1075,9 +1075,6 @@ void MyAvatar::loadData() {
|
|||
|
||||
getHead()->setBasePitch(loadSetting(settings, "headPitch", 0.0f));
|
||||
|
||||
_targetScale = loadSetting(settings, "scale", 1.0f);
|
||||
setScale(glm::vec3(_targetScale));
|
||||
|
||||
_prefOverrideAnimGraphUrl.set(QUrl(settings.value("animGraphURL", "").toString()));
|
||||
_fullAvatarURLFromPreferences = settings.value("fullAvatarURL", AvatarData::defaultFullAvatarModelUrl()).toUrl();
|
||||
_fullAvatarModelName = settings.value("fullAvatarModelName", DEFAULT_FULL_AVATAR_MODEL_NAME).toString();
|
||||
|
@ -1356,6 +1353,7 @@ void MyAvatar::setSkeletonModelURL(const QUrl& skeletonModelURL) {
|
|||
Avatar::setSkeletonModelURL(skeletonModelURL);
|
||||
_skeletonModel->setVisibleInScene(true, qApp->getMain3DScene());
|
||||
_headBoneSet.clear();
|
||||
emit skeletonChanged();
|
||||
}
|
||||
|
||||
|
||||
|
@ -2226,6 +2224,14 @@ void MyAvatar::clampScaleChangeToDomainLimits(float desiredScale) {
|
|||
qCDebug(interfaceapp, "Changed scale to %f", (double)_targetScale);
|
||||
}
|
||||
|
||||
float MyAvatar::getDomainMinScale() {
|
||||
return _domainMinimumScale;
|
||||
}
|
||||
|
||||
float MyAvatar::getDomainMaxScale() {
|
||||
return _domainMaximumScale;
|
||||
}
|
||||
|
||||
void MyAvatar::increaseSize() {
|
||||
// make sure we're starting from an allowable scale
|
||||
clampTargetScaleToDomainLimits();
|
||||
|
@ -2273,17 +2279,27 @@ void MyAvatar::restrictScaleFromDomainSettings(const QJsonObject& domainSettings
|
|||
if (_domainMinimumScale > _domainMaximumScale) {
|
||||
std::swap(_domainMinimumScale, _domainMaximumScale);
|
||||
}
|
||||
// Set avatar current scale
|
||||
Settings settings;
|
||||
settings.beginGroup("Avatar");
|
||||
_targetScale = loadSetting(settings, "scale", 1.0f);
|
||||
|
||||
qCDebug(interfaceapp, "This domain requires a minimum avatar scale of %f and a maximum avatar scale of %f",
|
||||
(double)_domainMinimumScale, (double)_domainMaximumScale);
|
||||
qCDebug(interfaceapp) << "This domain requires a minimum avatar scale of " << _domainMinimumScale
|
||||
<< " and a maximum avatar scale of " << _domainMaximumScale
|
||||
<< ". Current avatar scale is " << _targetScale;
|
||||
|
||||
// debug to log if this avatar's scale in this domain will be clamped
|
||||
auto clampedScale = glm::clamp(_targetScale, _domainMinimumScale, _domainMaximumScale);
|
||||
float clampedScale = glm::clamp(_targetScale, _domainMinimumScale, _domainMaximumScale);
|
||||
|
||||
if (_targetScale != clampedScale) {
|
||||
qCDebug(interfaceapp, "Avatar scale will be clamped to %f because %f is not allowed by current domain",
|
||||
(double)clampedScale, (double)_targetScale);
|
||||
qCDebug(interfaceapp) << "Current avatar scale is clamped to " << clampedScale
|
||||
<< " because " << _targetScale << " is not allowed by current domain";
|
||||
// The current scale of avatar should not be more than domain's max_avatar_scale and not less than domain's min_avatar_scale .
|
||||
_targetScale = clampedScale;
|
||||
}
|
||||
|
||||
setScale(glm::vec3(_targetScale));
|
||||
settings.endGroup();
|
||||
}
|
||||
|
||||
void MyAvatar::clearScaleRestriction() {
|
||||
|
|
|
@ -556,6 +556,8 @@ public slots:
|
|||
void increaseSize();
|
||||
void decreaseSize();
|
||||
void resetSize();
|
||||
float getDomainMinScale();
|
||||
float getDomainMaxScale();
|
||||
|
||||
void goToLocation(const glm::vec3& newPosition,
|
||||
bool hasOrientation = false, const glm::quat& newOrientation = glm::quat(),
|
||||
|
@ -606,12 +608,13 @@ signals:
|
|||
void onLoadComplete();
|
||||
void wentAway();
|
||||
void wentActive();
|
||||
void skeletonChanged();
|
||||
|
||||
private:
|
||||
|
||||
bool requiresSafeLanding(const glm::vec3& positionIn, glm::vec3& positionOut);
|
||||
|
||||
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail) override;
|
||||
virtual QByteArray toByteArrayStateful(AvatarDataDetail dataDetail, bool dropFaceTracking) override;
|
||||
|
||||
void simulate(float deltaTime);
|
||||
void updateFromTrackers(float deltaTime);
|
||||
|
|
|
@ -133,4 +133,12 @@ void Audio::setReverb(bool enable) {
|
|||
|
||||
void Audio::setReverbOptions(const AudioEffectOptions* options) {
|
||||
DependencyManager::get<AudioClient>()->setReverbOptions(options);
|
||||
}
|
||||
}
|
||||
|
||||
void Audio::setInputDevice(const QAudioDeviceInfo& device) {
|
||||
_devices.chooseInputDevice(device);
|
||||
}
|
||||
|
||||
void Audio::setOutputDevice(const QAudioDeviceInfo& device) {
|
||||
_devices.chooseOutputDevice(device);
|
||||
}
|
||||
|
|
|
@ -50,6 +50,8 @@ public:
|
|||
void showMicMeter(bool show);
|
||||
void setInputVolume(float volume);
|
||||
|
||||
Q_INVOKABLE void setInputDevice(const QAudioDeviceInfo& device);
|
||||
Q_INVOKABLE void setOutputDevice(const QAudioDeviceInfo& device);
|
||||
Q_INVOKABLE void setReverb(bool enable);
|
||||
Q_INVOKABLE void setReverbOptions(const AudioEffectOptions* options);
|
||||
|
||||
|
@ -79,7 +81,7 @@ private:
|
|||
float _inputVolume { 1.0f };
|
||||
float _inputLevel { 0.0f };
|
||||
bool _isMuted { false };
|
||||
bool _enableNoiseReduction;
|
||||
bool _enableNoiseReduction { true }; // Match default value of AudioClient::_isNoiseGateEnabled.
|
||||
bool _contextIsHMD { false };
|
||||
|
||||
AudioDevices* getDevices() { return &_devices; }
|
||||
|
|
|
@ -38,7 +38,8 @@ Setting::Handle<QString>& getSetting(bool contextIsHMD, QAudio::Mode mode) {
|
|||
|
||||
QHash<int, QByteArray> AudioDeviceList::_roles {
|
||||
{ Qt::DisplayRole, "display" },
|
||||
{ Qt::CheckStateRole, "selected" }
|
||||
{ Qt::CheckStateRole, "selected" },
|
||||
{ Qt::UserRole, "info" }
|
||||
};
|
||||
Qt::ItemFlags AudioDeviceList::_flags { Qt::ItemIsSelectable | Qt::ItemIsEnabled };
|
||||
|
||||
|
@ -51,66 +52,24 @@ QVariant AudioDeviceList::data(const QModelIndex& index, int role) const {
|
|||
return _devices.at(index.row()).display;
|
||||
} else if (role == Qt::CheckStateRole) {
|
||||
return _devices.at(index.row()).selected;
|
||||
} else if (role == Qt::UserRole) {
|
||||
return QVariant::fromValue<QAudioDeviceInfo>(_devices.at(index.row()).info);
|
||||
} else {
|
||||
return QVariant();
|
||||
}
|
||||
}
|
||||
|
||||
bool AudioDeviceList::setData(const QModelIndex& index, const QVariant& value, int role) {
|
||||
if (!index.isValid() || index.row() >= _devices.size() || role != Qt::CheckStateRole) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// only allow switching to a new device, not deactivating an in-use device
|
||||
auto selected = value.toBool();
|
||||
if (!selected) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return setDevice(index.row(), true);
|
||||
}
|
||||
|
||||
bool AudioDeviceList::setDevice(int row, bool fromUser) {
|
||||
bool success = false;
|
||||
auto& device = _devices[row];
|
||||
_userSelection = fromUser;
|
||||
|
||||
// skip if already selected
|
||||
if (!device.selected) {
|
||||
auto client = DependencyManager::get<AudioClient>();
|
||||
QMetaObject::invokeMethod(client.data(), "switchAudioDevice",
|
||||
Q_ARG(QAudio::Mode, _mode),
|
||||
Q_ARG(const QAudioDeviceInfo&, device.info));
|
||||
}
|
||||
|
||||
emit dataChanged(createIndex(0, 0), createIndex(rowCount() - 1, 0));
|
||||
return success;
|
||||
}
|
||||
|
||||
void AudioDeviceList::resetDevice(bool contextIsHMD, const QString& device) {
|
||||
bool success { false };
|
||||
|
||||
// try to set the last selected device
|
||||
if (!device.isNull()) {
|
||||
auto i = 0;
|
||||
for (; i < rowCount(); ++i) {
|
||||
if (device == _devices[i].info.deviceName()) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (i < rowCount()) {
|
||||
success = setDevice(i, false);
|
||||
}
|
||||
|
||||
// the selection failed - reset it
|
||||
if (!success) {
|
||||
emit deviceSelected();
|
||||
}
|
||||
}
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
auto deviceName = getSetting(contextIsHMD, _mode).get();
|
||||
bool switchResult = false;
|
||||
QMetaObject::invokeMethod(client, "switchAudioDevice", Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(bool, switchResult),
|
||||
Q_ARG(QAudio::Mode, _mode), Q_ARG(QString, deviceName));
|
||||
|
||||
// try to set to the default device for this mode
|
||||
if (!success) {
|
||||
auto client = DependencyManager::get<AudioClient>().data();
|
||||
if (!switchResult) {
|
||||
if (contextIsHMD) {
|
||||
QString deviceName;
|
||||
if (_mode == QAudio::AudioInput) {
|
||||
|
@ -131,7 +90,6 @@ void AudioDeviceList::resetDevice(bool contextIsHMD, const QString& device) {
|
|||
void AudioDeviceList::onDeviceChanged(const QAudioDeviceInfo& device) {
|
||||
auto oldDevice = _selectedDevice;
|
||||
_selectedDevice = device;
|
||||
QModelIndex index;
|
||||
|
||||
for (auto i = 0; i < _devices.size(); ++i) {
|
||||
AudioDevice& device = _devices[i];
|
||||
|
@ -139,15 +97,9 @@ void AudioDeviceList::onDeviceChanged(const QAudioDeviceInfo& device) {
|
|||
device.selected = false;
|
||||
} else if (device.info == _selectedDevice) {
|
||||
device.selected = true;
|
||||
index = createIndex(i, 0);
|
||||
}
|
||||
}
|
||||
|
||||
if (_userSelection) {
|
||||
_userSelection = false;
|
||||
emit deviceSelected(_selectedDevice, oldDevice);
|
||||
}
|
||||
|
||||
emit deviceChanged(_selectedDevice);
|
||||
emit dataChanged(createIndex(0, 0), createIndex(rowCount() - 1, 0));
|
||||
}
|
||||
|
@ -182,13 +134,6 @@ AudioDevices::AudioDevices(bool& contextIsHMD) : _contextIsHMD(contextIsHMD) {
|
|||
_outputs.onDeviceChanged(client->getActiveAudioDevice(QAudio::AudioOutput));
|
||||
_inputs.onDevicesChanged(client->getAudioDevices(QAudio::AudioInput));
|
||||
_outputs.onDevicesChanged(client->getAudioDevices(QAudio::AudioOutput));
|
||||
|
||||
connect(&_inputs, &AudioDeviceList::deviceSelected, [&](const QAudioDeviceInfo& device, const QAudioDeviceInfo& previousDevice) {
|
||||
onDeviceSelected(QAudio::AudioInput, device, previousDevice);
|
||||
});
|
||||
connect(&_outputs, &AudioDeviceList::deviceSelected, [&](const QAudioDeviceInfo& device, const QAudioDeviceInfo& previousDevice) {
|
||||
onDeviceSelected(QAudio::AudioOutput, device, previousDevice);
|
||||
});
|
||||
}
|
||||
|
||||
void AudioDevices::onContextChanged(const QString& context) {
|
||||
|
@ -244,22 +189,40 @@ void AudioDevices::onDeviceChanged(QAudio::Mode mode, const QAudioDeviceInfo& de
|
|||
}
|
||||
|
||||
void AudioDevices::onDevicesChanged(QAudio::Mode mode, const QList<QAudioDeviceInfo>& devices) {
|
||||
static bool initialized { false };
|
||||
auto initialize = [&]{
|
||||
if (initialized) {
|
||||
onContextChanged(QString());
|
||||
} else {
|
||||
initialized = true;
|
||||
}
|
||||
};
|
||||
|
||||
static std::once_flag once;
|
||||
if (mode == QAudio::AudioInput) {
|
||||
_inputs.onDevicesChanged(devices);
|
||||
static std::once_flag inputFlag;
|
||||
std::call_once(inputFlag, initialize);
|
||||
} else { // if (mode == QAudio::AudioOutput)
|
||||
_outputs.onDevicesChanged(devices);
|
||||
static std::once_flag outputFlag;
|
||||
std::call_once(outputFlag, initialize);
|
||||
}
|
||||
std::call_once(once, [&] { onContextChanged(QString()); });
|
||||
}
|
||||
|
||||
|
||||
void AudioDevices::chooseInputDevice(const QAudioDeviceInfo& device) {
|
||||
auto client = DependencyManager::get<AudioClient>();
|
||||
bool success = false;
|
||||
QMetaObject::invokeMethod(client.data(), "switchAudioDevice",
|
||||
Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(bool, success),
|
||||
Q_ARG(QAudio::Mode, QAudio::AudioInput),
|
||||
Q_ARG(const QAudioDeviceInfo&, device));
|
||||
|
||||
if (success) {
|
||||
onDeviceSelected(QAudio::AudioInput, device, _inputs._selectedDevice);
|
||||
}
|
||||
}
|
||||
|
||||
void AudioDevices::chooseOutputDevice(const QAudioDeviceInfo& device) {
|
||||
auto client = DependencyManager::get<AudioClient>();
|
||||
bool success = false;
|
||||
QMetaObject::invokeMethod(client.data(), "switchAudioDevice",
|
||||
Qt::BlockingQueuedConnection,
|
||||
Q_RETURN_ARG(bool, success),
|
||||
Q_ARG(QAudio::Mode, QAudio::AudioOutput),
|
||||
Q_ARG(const QAudioDeviceInfo&, device));
|
||||
|
||||
if (success) {
|
||||
onDeviceSelected(QAudio::AudioOutput, device, _outputs._selectedDevice);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,14 +37,11 @@ public:
|
|||
|
||||
// get/set devices through a QML ListView
|
||||
QVariant data(const QModelIndex& index, int role) const override;
|
||||
bool setData(const QModelIndex& index, const QVariant &value, int role) override;
|
||||
|
||||
// reset device to the last selected device in this context, or the default
|
||||
void resetDevice(bool contextIsHMD, const QString& device);
|
||||
|
||||
signals:
|
||||
void deviceSelected(const QAudioDeviceInfo& device = QAudioDeviceInfo(),
|
||||
const QAudioDeviceInfo& previousDevice = QAudioDeviceInfo());
|
||||
void deviceChanged(const QAudioDeviceInfo& device);
|
||||
|
||||
private slots:
|
||||
|
@ -54,12 +51,9 @@ private slots:
|
|||
private:
|
||||
friend class AudioDevices;
|
||||
|
||||
bool setDevice(int index, bool fromUser);
|
||||
|
||||
static QHash<int, QByteArray> _roles;
|
||||
static Qt::ItemFlags _flags;
|
||||
bool _userSelection { false };
|
||||
QAudio::Mode _mode;
|
||||
const QAudio::Mode _mode;
|
||||
QAudioDeviceInfo _selectedDevice;
|
||||
QList<AudioDevice> _devices;
|
||||
};
|
||||
|
@ -73,6 +67,8 @@ class AudioDevices : public QObject {
|
|||
|
||||
public:
|
||||
AudioDevices(bool& contextIsHMD);
|
||||
void chooseInputDevice(const QAudioDeviceInfo& device);
|
||||
void chooseOutputDevice(const QAudioDeviceInfo& device);
|
||||
|
||||
signals:
|
||||
void nop();
|
||||
|
|
|
@ -24,13 +24,14 @@
|
|||
#include "ScriptHighlighting.h"
|
||||
|
||||
const int NO_CURRENT_HISTORY_COMMAND = -1;
|
||||
const int MAX_HISTORY_SIZE = 64;
|
||||
const int MAX_HISTORY_SIZE = 256;
|
||||
const QString HISTORY_FILENAME = "JSConsole.history.json";
|
||||
|
||||
const QString COMMAND_STYLE = "color: #266a9b;";
|
||||
|
||||
const QString RESULT_SUCCESS_STYLE = "color: #677373;";
|
||||
const QString RESULT_INFO_STYLE = "color: #223bd1;";
|
||||
const QString RESULT_WARNING_STYLE = "color: #d13b22;";
|
||||
const QString RESULT_WARNING_STYLE = "color: #999922;";
|
||||
const QString RESULT_ERROR_STYLE = "color: #d13b22;";
|
||||
|
||||
const QString GUTTER_PREVIOUS_COMMAND = "<span style=\"color: #57b8bb;\"><</span>";
|
||||
|
@ -38,14 +39,35 @@ const QString GUTTER_ERROR = "<span style=\"color: #d13b22;\">X</span>";
|
|||
|
||||
const QString JSConsole::_consoleFileName { "about:console" };
|
||||
|
||||
const QString JSON_KEY = "entries";
|
||||
QList<QString> _readLines(const QString& filename) {
|
||||
QFile file(filename);
|
||||
file.open(QFile::ReadOnly);
|
||||
auto json = QTextStream(&file).readAll().toUtf8();
|
||||
auto root = QJsonDocument::fromJson(json).object();
|
||||
// TODO: check root["version"]
|
||||
return root[JSON_KEY].toVariant().toStringList();
|
||||
}
|
||||
|
||||
void _writeLines(const QString& filename, const QList<QString>& lines) {
|
||||
QFile file(filename);
|
||||
file.open(QFile::WriteOnly);
|
||||
auto root = QJsonObject();
|
||||
root["version"] = 1.0;
|
||||
root["last-modified"] = QDateTime::currentDateTime().toTimeSpec(Qt::OffsetFromUTC).toString(Qt::ISODate);
|
||||
root[JSON_KEY] = QJsonArray::fromStringList(lines);
|
||||
auto json = QJsonDocument(root).toJson();
|
||||
QTextStream(&file) << json;
|
||||
}
|
||||
|
||||
JSConsole::JSConsole(QWidget* parent, ScriptEngine* scriptEngine) :
|
||||
QWidget(parent),
|
||||
_ui(new Ui::Console),
|
||||
_currentCommandInHistory(NO_CURRENT_HISTORY_COMMAND),
|
||||
_commandHistory(),
|
||||
_savedHistoryFilename(QStandardPaths::writableLocation(QStandardPaths::DataLocation) + "/" + HISTORY_FILENAME),
|
||||
_commandHistory(_readLines(_savedHistoryFilename)),
|
||||
_ownScriptEngine(scriptEngine == NULL),
|
||||
_scriptEngine(NULL) {
|
||||
|
||||
_ui->setupUi(this);
|
||||
_ui->promptTextEdit->setLineWrapMode(QTextEdit::NoWrap);
|
||||
_ui->promptTextEdit->setWordWrapMode(QTextOption::NoWrap);
|
||||
|
@ -101,9 +123,12 @@ void JSConsole::setScriptEngine(ScriptEngine* scriptEngine) {
|
|||
}
|
||||
|
||||
void JSConsole::executeCommand(const QString& command) {
|
||||
_commandHistory.prepend(command);
|
||||
if (_commandHistory.length() > MAX_HISTORY_SIZE) {
|
||||
_commandHistory.removeLast();
|
||||
if (_commandHistory.isEmpty() || _commandHistory.constFirst() != command) {
|
||||
_commandHistory.prepend(command);
|
||||
if (_commandHistory.length() > MAX_HISTORY_SIZE) {
|
||||
_commandHistory.removeLast();
|
||||
}
|
||||
_writeLines(_savedHistoryFilename, _commandHistory);
|
||||
}
|
||||
|
||||
_ui->promptTextEdit->setDisabled(true);
|
||||
|
@ -182,7 +207,7 @@ bool JSConsole::eventFilter(QObject* sender, QEvent* event) {
|
|||
// a new QTextBlock isn't created.
|
||||
keyEvent->setModifiers(keyEvent->modifiers() & ~Qt::ShiftModifier);
|
||||
} else {
|
||||
QString command = _ui->promptTextEdit->toPlainText().trimmed();
|
||||
QString command = _ui->promptTextEdit->toPlainText().replace("\r\n","\n").trimmed();
|
||||
|
||||
if (!command.isEmpty()) {
|
||||
QTextCursor cursor = _ui->promptTextEdit->textCursor();
|
||||
|
|
|
@ -63,6 +63,7 @@ private:
|
|||
QFutureWatcher<QScriptValue> _executeWatcher;
|
||||
Ui::Console* _ui;
|
||||
int _currentCommandInHistory;
|
||||
QString _savedHistoryFilename;
|
||||
QList<QString> _commandHistory;
|
||||
// Keeps track if the script engine is created inside the JSConsole
|
||||
bool _ownScriptEngine;
|
||||
|
|
|
@ -184,12 +184,15 @@ void setupPreferences() {
|
|||
{
|
||||
auto getter = [=]()->float { return myAvatar->getUniformScale(); };
|
||||
auto setter = [=](float value) { myAvatar->setTargetScale(value); };
|
||||
auto preference = new SpinnerPreference(AVATAR_TUNING, "Avatar scale (default is 1.0)", getter, setter);
|
||||
preference->setMin(0.01f);
|
||||
preference->setMax(99.9f);
|
||||
auto preference = new SpinnerSliderPreference(AVATAR_TUNING, "Avatar Scale", getter, setter);
|
||||
preference->setStep(0.05f);
|
||||
preference->setDecimals(2);
|
||||
preference->setStep(1);
|
||||
preferences->addPreference(preference);
|
||||
|
||||
// When the Interface is first loaded, this section setupPreferences(); is loaded -
|
||||
// causing the myAvatar->getDomainMinScale() and myAvatar->getDomainMaxScale() to get set to incorrect values
|
||||
// which can't be changed across domain switches. Having these values loaded up when you load the Dialog each time
|
||||
// is a way around this, therefore they're not specified here but in the QML.
|
||||
}
|
||||
{
|
||||
auto getter = []()->float { return DependencyManager::get<DdeFaceTracker>()->getEyeClosingThreshold(); };
|
||||
|
|
114
interface/src/ui/ResourceImageItem.cpp
Normal file
114
interface/src/ui/ResourceImageItem.cpp
Normal file
|
@ -0,0 +1,114 @@
|
|||
//
|
||||
// ResourceImageItem.cpp
|
||||
//
|
||||
// Created by David Kelly and Howard Stearns on 2017/06/08
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
|
||||
// Distributed under the Apache License, Version 2.0
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
//#include "Application.h"
|
||||
#include "ResourceImageItem.h"
|
||||
|
||||
#include <QOpenGLFramebufferObjectFormat>
|
||||
#include <QOpenGLFunctions>
|
||||
#include <QOpenGLExtraFunctions>
|
||||
#include <QOpenGLContext>
|
||||
|
||||
ResourceImageItem::ResourceImageItem() : QQuickFramebufferObject() {
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
connect(textureCache.data(), SIGNAL(spectatorCameraFramebufferReset()), this, SLOT(update()));
|
||||
}
|
||||
|
||||
void ResourceImageItem::setUrl(const QString& url) {
|
||||
if (url != m_url) {
|
||||
m_url = url;
|
||||
update();
|
||||
}
|
||||
}
|
||||
|
||||
void ResourceImageItem::setReady(bool ready) {
|
||||
if (ready != m_ready) {
|
||||
m_ready = ready;
|
||||
update();
|
||||
}
|
||||
}
|
||||
|
||||
void ResourceImageItemRenderer::onUpdateTimer() {
|
||||
if (_ready) {
|
||||
if (_networkTexture && _networkTexture->isLoaded()) {
|
||||
if(_fboMutex.tryLock()) {
|
||||
invalidateFramebufferObject();
|
||||
qApp->getActiveDisplayPlugin()->copyTextureToQuickFramebuffer(_networkTexture, _copyFbo, &_fenceSync);
|
||||
_fboMutex.unlock();
|
||||
} else {
|
||||
qDebug() << "couldn't get a lock, using last frame";
|
||||
}
|
||||
} else {
|
||||
_networkTexture = DependencyManager::get<TextureCache>()->getTexture(_url);
|
||||
}
|
||||
}
|
||||
update();
|
||||
}
|
||||
|
||||
ResourceImageItemRenderer::ResourceImageItemRenderer() : QQuickFramebufferObject::Renderer() {
|
||||
connect(&_updateTimer, SIGNAL(timeout()), this, SLOT(onUpdateTimer()));
|
||||
auto textureCache = DependencyManager::get<TextureCache>();
|
||||
}
|
||||
|
||||
void ResourceImageItemRenderer::synchronize(QQuickFramebufferObject* item) {
|
||||
ResourceImageItem* resourceImageItem = static_cast<ResourceImageItem*>(item);
|
||||
|
||||
resourceImageItem->setFlag(QQuickItem::ItemHasContents);
|
||||
|
||||
_url = resourceImageItem->getUrl();
|
||||
_ready = resourceImageItem->getReady();
|
||||
_visible = resourceImageItem->isVisible();
|
||||
_window = resourceImageItem->window();
|
||||
|
||||
_networkTexture = DependencyManager::get<TextureCache>()->getTexture(_url);
|
||||
static const int UPDATE_TIMER_DELAY_IN_MS = 100; // 100 ms = 10 hz for now
|
||||
if (_ready && _visible && !_updateTimer.isActive()) {
|
||||
_updateTimer.start(UPDATE_TIMER_DELAY_IN_MS);
|
||||
} else if (!(_ready && _visible) && _updateTimer.isActive()) {
|
||||
_updateTimer.stop();
|
||||
}
|
||||
}
|
||||
|
||||
QOpenGLFramebufferObject* ResourceImageItemRenderer::createFramebufferObject(const QSize& size) {
|
||||
if (_copyFbo) {
|
||||
delete _copyFbo;
|
||||
}
|
||||
QOpenGLFramebufferObjectFormat format;
|
||||
format.setAttachment(QOpenGLFramebufferObject::CombinedDepthStencil);
|
||||
_copyFbo = new QOpenGLFramebufferObject(size, format);
|
||||
_copyFbo->bind();
|
||||
return new QOpenGLFramebufferObject(size, format);
|
||||
}
|
||||
|
||||
void ResourceImageItemRenderer::render() {
|
||||
auto f = QOpenGLContext::currentContext()->extraFunctions();
|
||||
|
||||
if (_fenceSync) {
|
||||
f->glWaitSync(_fenceSync, 0, GL_TIMEOUT_IGNORED);
|
||||
f->glDeleteSync(_fenceSync);
|
||||
_fenceSync = 0;
|
||||
}
|
||||
if (_ready) {
|
||||
_fboMutex.lock();
|
||||
_copyFbo->bind();
|
||||
QOpenGLFramebufferObject::blitFramebuffer(framebufferObject(), _copyFbo, GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT, GL_NEAREST);
|
||||
|
||||
// this clears the copyFbo texture
|
||||
// so next frame starts fresh - helps
|
||||
// when aspect ratio changes
|
||||
_copyFbo->takeTexture();
|
||||
_copyFbo->bind();
|
||||
_copyFbo->release();
|
||||
|
||||
_fboMutex.unlock();
|
||||
}
|
||||
glFlush();
|
||||
_window->resetOpenGLState();
|
||||
}
|
63
interface/src/ui/ResourceImageItem.h
Normal file
63
interface/src/ui/ResourceImageItem.h
Normal file
|
@ -0,0 +1,63 @@
|
|||
//
|
||||
// ResourceImageItem.h
|
||||
//
|
||||
// Created by David Kelly and Howard Stearns on 2017/06/08
|
||||
// Copyright 2017 High Fidelity, Inc.
|
||||
|
||||
// Distributed under the Apache License, Version 2.0
|
||||
// See the accompanying file LICENSE or http://www.apache.org/licenses/LICENSE-2.0.html
|
||||
//
|
||||
|
||||
#pragma once
|
||||
#ifndef hifi_ResourceImageItem_h
|
||||
#define hifi_ResourceImageItem_h
|
||||
|
||||
#include "Application.h"
|
||||
|
||||
#include <QQuickFramebufferObject>
|
||||
#include <QQuickWindow>
|
||||
#include <QTimer>
|
||||
|
||||
#include <TextureCache.h>
|
||||
|
||||
class ResourceImageItemRenderer : public QObject, public QQuickFramebufferObject::Renderer {
|
||||
Q_OBJECT
|
||||
public:
|
||||
ResourceImageItemRenderer();
|
||||
QOpenGLFramebufferObject* createFramebufferObject(const QSize& size) override;
|
||||
void synchronize(QQuickFramebufferObject* item) override;
|
||||
void render() override;
|
||||
private:
|
||||
bool _ready;
|
||||
QString _url;
|
||||
bool _visible;
|
||||
|
||||
NetworkTexturePointer _networkTexture;
|
||||
QQuickWindow* _window;
|
||||
QMutex _fboMutex;
|
||||
QOpenGLFramebufferObject* _copyFbo { nullptr };
|
||||
GLsync _fenceSync { 0 };
|
||||
QTimer _updateTimer;
|
||||
public slots:
|
||||
void onUpdateTimer();
|
||||
};
|
||||
|
||||
class ResourceImageItem : public QQuickFramebufferObject {
|
||||
Q_OBJECT
|
||||
Q_PROPERTY(QString url READ getUrl WRITE setUrl)
|
||||
Q_PROPERTY(bool ready READ getReady WRITE setReady)
|
||||
public:
|
||||
ResourceImageItem();
|
||||
QString getUrl() const { return m_url; }
|
||||
void setUrl(const QString& url);
|
||||
bool getReady() const { return m_ready; }
|
||||
void setReady(bool ready);
|
||||
QQuickFramebufferObject::Renderer* createRenderer() const override { return new ResourceImageItemRenderer; }
|
||||
|
||||
private:
|
||||
QString m_url;
|
||||
bool m_ready { false };
|
||||
|
||||
};
|
||||
|
||||
#endif // hifi_ResourceImageItem_h
|
|
@ -37,9 +37,11 @@ QVariant Billboard3DOverlay::getProperty(const QString &property) {
|
|||
return Planar3DOverlay::getProperty(property);
|
||||
}
|
||||
|
||||
void Billboard3DOverlay::applyTransformTo(Transform& transform, bool force) {
|
||||
bool Billboard3DOverlay::applyTransformTo(Transform& transform, bool force) {
|
||||
bool transformChanged = false;
|
||||
if (force || usecTimestampNow() > _transformExpiry) {
|
||||
PanelAttachable::applyTransformTo(transform, true);
|
||||
pointTransformAtCamera(transform, getOffsetRotation());
|
||||
transformChanged = PanelAttachable::applyTransformTo(transform, true);
|
||||
transformChanged |= pointTransformAtCamera(transform, getOffsetRotation());
|
||||
}
|
||||
return transformChanged;
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ public:
|
|||
QVariant getProperty(const QString& property) override;
|
||||
|
||||
protected:
|
||||
virtual void applyTransformTo(Transform& transform, bool force = false) override;
|
||||
virtual bool applyTransformTo(Transform& transform, bool force = false) override;
|
||||
};
|
||||
|
||||
#endif // hifi_Billboard3DOverlay_h
|
||||
|
|
|
@ -28,7 +28,7 @@ QVariant Billboardable::getProperty(const QString &property) {
|
|||
return QVariant();
|
||||
}
|
||||
|
||||
void Billboardable::pointTransformAtCamera(Transform& transform, glm::quat offsetRotation) {
|
||||
bool Billboardable::pointTransformAtCamera(Transform& transform, glm::quat offsetRotation) {
|
||||
if (isFacingAvatar()) {
|
||||
glm::vec3 billboardPos = transform.getTranslation();
|
||||
glm::vec3 cameraPos = qApp->getCamera().getPosition();
|
||||
|
@ -38,5 +38,7 @@ void Billboardable::pointTransformAtCamera(Transform& transform, glm::quat offse
|
|||
glm::quat rotation(glm::vec3(elevation, azimuth, 0));
|
||||
transform.setRotation(rotation);
|
||||
transform.postRotate(offsetRotation);
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ protected:
|
|||
void setProperties(const QVariantMap& properties);
|
||||
QVariant getProperty(const QString& property);
|
||||
|
||||
void pointTransformAtCamera(Transform& transform, glm::quat offsetRotation = {1, 0, 0, 0});
|
||||
bool pointTransformAtCamera(Transform& transform, glm::quat offsetRotation = {1, 0, 0, 0});
|
||||
|
||||
private:
|
||||
bool _isFacingAvatar = false;
|
||||
|
|
|
@ -80,8 +80,8 @@ void Circle3DOverlay::render(RenderArgs* args) {
|
|||
|
||||
Q_ASSERT(args->_batch);
|
||||
auto& batch = *args->_batch;
|
||||
if (args->_pipeline) {
|
||||
batch.setPipeline(args->_pipeline->pipeline);
|
||||
if (args->_shapePipeline) {
|
||||
batch.setPipeline(args->_shapePipeline->pipeline);
|
||||
}
|
||||
|
||||
// FIXME: THe line width of _lineWidth is not supported anymore, we ll need a workaround
|
||||
|
|
|
@ -65,15 +65,15 @@ void Cube3DOverlay::render(RenderArgs* args) {
|
|||
transform.setTranslation(position);
|
||||
transform.setRotation(rotation);
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
auto pipeline = args->_pipeline;
|
||||
if (!pipeline) {
|
||||
pipeline = _isSolid ? geometryCache->getOpaqueShapePipeline() : geometryCache->getWireShapePipeline();
|
||||
auto shapePipeline = args->_shapePipeline;
|
||||
if (!shapePipeline) {
|
||||
shapePipeline = _isSolid ? geometryCache->getOpaqueShapePipeline() : geometryCache->getWireShapePipeline();
|
||||
}
|
||||
|
||||
if (_isSolid) {
|
||||
transform.setScale(dimensions);
|
||||
batch->setModelTransform(transform);
|
||||
geometryCache->renderSolidCubeInstance(*batch, cubeColor, pipeline);
|
||||
geometryCache->renderSolidCubeInstance(args, *batch, cubeColor, shapePipeline);
|
||||
} else {
|
||||
geometryCache->bindSimpleProgram(*batch, false, false, false, true, true);
|
||||
if (getIsDashedLine()) {
|
||||
|
@ -109,7 +109,7 @@ void Cube3DOverlay::render(RenderArgs* args) {
|
|||
} else {
|
||||
transform.setScale(dimensions);
|
||||
batch->setModelTransform(transform);
|
||||
geometryCache->renderWireCubeInstance(*batch, cubeColor, pipeline);
|
||||
geometryCache->renderWireCubeInstance(args, *batch, cubeColor, shapePipeline);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -99,10 +99,14 @@ void Image3DOverlay::render(RenderArgs* args) {
|
|||
const float MAX_COLOR = 255.0f;
|
||||
xColor color = getColor();
|
||||
float alpha = getAlpha();
|
||||
|
||||
|
||||
Transform transform = getTransform();
|
||||
applyTransformTo(transform, true);
|
||||
setTransform(transform);
|
||||
bool transformChanged = applyTransformTo(transform, true);
|
||||
// If the transform is not modified, setting the transform to
|
||||
// itself will cause drift over time due to floating point errors.
|
||||
if (transformChanged) {
|
||||
setTransform(transform);
|
||||
}
|
||||
transform.postScale(glm::vec3(getDimensions(), 1.0f));
|
||||
|
||||
batch->setModelTransform(transform);
|
||||
|
|
|
@ -152,6 +152,7 @@ Overlay::Pointer Overlays::getOverlay(OverlayID id) const {
|
|||
OverlayID Overlays::addOverlay(const QString& type, const QVariant& properties) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
OverlayID result;
|
||||
PROFILE_RANGE(script, __FUNCTION__);
|
||||
BLOCKING_INVOKE_METHOD(this, "addOverlay", Q_RETURN_ARG(OverlayID, result), Q_ARG(QString, type), Q_ARG(QVariant, properties));
|
||||
return result;
|
||||
}
|
||||
|
@ -220,6 +221,7 @@ OverlayID Overlays::addOverlay(const Overlay::Pointer& overlay) {
|
|||
OverlayID Overlays::cloneOverlay(OverlayID id) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
OverlayID result;
|
||||
PROFILE_RANGE(script, __FUNCTION__);
|
||||
BLOCKING_INVOKE_METHOD(this, "cloneOverlay", Q_RETURN_ARG(OverlayID, result), Q_ARG(OverlayID, id));
|
||||
return result;
|
||||
}
|
||||
|
@ -307,6 +309,7 @@ void Overlays::deleteOverlay(OverlayID id) {
|
|||
}
|
||||
#endif
|
||||
|
||||
|
||||
_overlaysToDelete.push_back(overlayToDelete);
|
||||
emit overlayDeleted(id);
|
||||
}
|
||||
|
@ -314,6 +317,7 @@ void Overlays::deleteOverlay(OverlayID id) {
|
|||
QString Overlays::getOverlayType(OverlayID overlayId) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QString result;
|
||||
PROFILE_RANGE(script, __FUNCTION__);
|
||||
BLOCKING_INVOKE_METHOD(this, "getOverlayType", Q_RETURN_ARG(QString, result), Q_ARG(OverlayID, overlayId));
|
||||
return result;
|
||||
}
|
||||
|
@ -328,6 +332,7 @@ QString Overlays::getOverlayType(OverlayID overlayId) {
|
|||
QObject* Overlays::getOverlayObject(OverlayID id) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QObject* result;
|
||||
PROFILE_RANGE(script, __FUNCTION__);
|
||||
BLOCKING_INVOKE_METHOD(this, "getOverlayObject", Q_RETURN_ARG(QObject*, result), Q_ARG(OverlayID, id));
|
||||
return result;
|
||||
}
|
||||
|
@ -383,12 +388,6 @@ void Overlays::setParentPanel(OverlayID childId, OverlayID panelId) {
|
|||
#endif
|
||||
|
||||
OverlayID Overlays::getOverlayAtPoint(const glm::vec2& point) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
OverlayID result;
|
||||
BLOCKING_INVOKE_METHOD(this, "getOverlayAtPoint", Q_RETURN_ARG(OverlayID, result), Q_ARG(glm::vec2, point));
|
||||
return result;
|
||||
}
|
||||
|
||||
if (!_enabled) {
|
||||
return UNKNOWN_OVERLAY_ID;
|
||||
}
|
||||
|
@ -413,20 +412,47 @@ OverlayID Overlays::getOverlayAtPoint(const glm::vec2& point) {
|
|||
}
|
||||
|
||||
OverlayPropertyResult Overlays::getProperty(OverlayID id, const QString& property) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
OverlayPropertyResult result;
|
||||
BLOCKING_INVOKE_METHOD(this, "getProperty", Q_RETURN_ARG(OverlayPropertyResult, result), Q_ARG(OverlayID, id), Q_ARG(QString, property));
|
||||
return result;
|
||||
}
|
||||
|
||||
OverlayPropertyResult result;
|
||||
Overlay::Pointer thisOverlay = getOverlay(id);
|
||||
OverlayPropertyResult result;
|
||||
if (thisOverlay && thisOverlay->supportsGetProperty()) {
|
||||
result.value = thisOverlay->getProperty(property);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
OverlayPropertyResult Overlays::getProperties(const OverlayID& id, const QStringList& properties) {
|
||||
Overlay::Pointer thisOverlay = getOverlay(id);
|
||||
OverlayPropertyResult result;
|
||||
if (thisOverlay && thisOverlay->supportsGetProperty()) {
|
||||
QVariantMap mapResult;
|
||||
for (const auto& property : properties) {
|
||||
mapResult.insert(property, thisOverlay->getProperty(property));
|
||||
}
|
||||
result.value = mapResult;
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
OverlayPropertyResult Overlays::getOverlaysProperties(const QVariant& propertiesById) {
|
||||
QVariantMap map = propertiesById.toMap();
|
||||
OverlayPropertyResult result;
|
||||
QVariantMap resultMap;
|
||||
for (const auto& key : map.keys()) {
|
||||
OverlayID id = OverlayID(key);
|
||||
QVariantMap overlayResult;
|
||||
Overlay::Pointer thisOverlay = getOverlay(id);
|
||||
if (thisOverlay && thisOverlay->supportsGetProperty()) {
|
||||
QStringList propertiesToFetch = map[key].toStringList();
|
||||
for (const auto& property : propertiesToFetch) {
|
||||
overlayResult[property] = thisOverlay->getProperty(property);
|
||||
}
|
||||
}
|
||||
resultMap[key] = overlayResult;
|
||||
}
|
||||
result.value = resultMap;
|
||||
return result;
|
||||
}
|
||||
|
||||
OverlayPropertyResult::OverlayPropertyResult() {
|
||||
}
|
||||
|
||||
|
@ -458,18 +484,6 @@ RayToOverlayIntersectionResult Overlays::findRayIntersectionInternal(const PickR
|
|||
const QVector<OverlayID>& overlaysToInclude,
|
||||
const QVector<OverlayID>& overlaysToDiscard,
|
||||
bool visibleOnly, bool collidableOnly) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
RayToOverlayIntersectionResult result;
|
||||
BLOCKING_INVOKE_METHOD(this, "findRayIntersectionInternal", Q_RETURN_ARG(RayToOverlayIntersectionResult, result),
|
||||
Q_ARG(PickRay, ray),
|
||||
Q_ARG(bool, precisionPicking),
|
||||
Q_ARG(QVector<OverlayID>, overlaysToInclude),
|
||||
Q_ARG(QVector<OverlayID>, overlaysToDiscard),
|
||||
Q_ARG(bool, visibleOnly),
|
||||
Q_ARG(bool, collidableOnly));
|
||||
return result;
|
||||
}
|
||||
|
||||
float bestDistance = std::numeric_limits<float>::max();
|
||||
bool bestIsFront = false;
|
||||
|
||||
|
@ -588,6 +602,7 @@ void RayToOverlayIntersectionResultFromScriptValue(const QScriptValue& objectVar
|
|||
bool Overlays::isLoaded(OverlayID id) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
bool result;
|
||||
PROFILE_RANGE(script, __FUNCTION__);
|
||||
BLOCKING_INVOKE_METHOD(this, "isLoaded", Q_RETURN_ARG(bool, result), Q_ARG(OverlayID, id));
|
||||
return result;
|
||||
}
|
||||
|
@ -602,26 +617,21 @@ bool Overlays::isLoaded(OverlayID id) {
|
|||
QSizeF Overlays::textSize(OverlayID id, const QString& text) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
QSizeF result;
|
||||
PROFILE_RANGE(script, __FUNCTION__);
|
||||
BLOCKING_INVOKE_METHOD(this, "textSize", Q_RETURN_ARG(QSizeF, result), Q_ARG(OverlayID, id), Q_ARG(QString, text));
|
||||
return result;
|
||||
}
|
||||
|
||||
Overlay::Pointer thisOverlay;
|
||||
{
|
||||
QMutexLocker locker(&_mutex);
|
||||
thisOverlay = _overlaysHUD[id];
|
||||
}
|
||||
Overlay::Pointer thisOverlay = getOverlay(id);
|
||||
if (thisOverlay) {
|
||||
if (auto textOverlay = std::dynamic_pointer_cast<TextOverlay>(thisOverlay)) {
|
||||
return textOverlay->textSize(text);
|
||||
}
|
||||
} else {
|
||||
{
|
||||
QMutexLocker locker(&_mutex);
|
||||
thisOverlay = _overlaysWorld[id];
|
||||
}
|
||||
if (auto text3dOverlay = std::dynamic_pointer_cast<Text3DOverlay>(thisOverlay)) {
|
||||
return text3dOverlay->textSize(text);
|
||||
if (thisOverlay->is3D()) {
|
||||
if (auto text3dOverlay = std::dynamic_pointer_cast<Text3DOverlay>(thisOverlay)) {
|
||||
return text3dOverlay->textSize(text);
|
||||
}
|
||||
} else {
|
||||
if (auto textOverlay = std::dynamic_pointer_cast<TextOverlay>(thisOverlay)) {
|
||||
return textOverlay->textSize(text);
|
||||
}
|
||||
}
|
||||
}
|
||||
return QSizeF(0.0f, 0.0f);
|
||||
|
@ -686,6 +696,7 @@ void Overlays::deletePanel(OverlayID panelId) {
|
|||
bool Overlays::isAddedOverlay(OverlayID id) {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
bool result;
|
||||
PROFILE_RANGE(script, __FUNCTION__);
|
||||
BLOCKING_INVOKE_METHOD(this, "isAddedOverlay", Q_RETURN_ARG(bool, result), Q_ARG(OverlayID, id));
|
||||
return result;
|
||||
}
|
||||
|
@ -721,6 +732,7 @@ void Overlays::sendHoverLeaveOverlay(OverlayID id, PointerEvent event) {
|
|||
OverlayID Overlays::getKeyboardFocusOverlay() {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
OverlayID result;
|
||||
PROFILE_RANGE(script, __FUNCTION__);
|
||||
BLOCKING_INVOKE_METHOD(this, "getKeyboardFocusOverlay", Q_RETURN_ARG(OverlayID, result));
|
||||
return result;
|
||||
}
|
||||
|
@ -740,6 +752,7 @@ void Overlays::setKeyboardFocusOverlay(OverlayID id) {
|
|||
float Overlays::width() {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
float result;
|
||||
PROFILE_RANGE(script, __FUNCTION__);
|
||||
BLOCKING_INVOKE_METHOD(this, "width", Q_RETURN_ARG(float, result));
|
||||
return result;
|
||||
}
|
||||
|
@ -751,6 +764,7 @@ float Overlays::width() {
|
|||
float Overlays::height() {
|
||||
if (QThread::currentThread() != thread()) {
|
||||
float result;
|
||||
PROFILE_RANGE(script, __FUNCTION__);
|
||||
BLOCKING_INVOKE_METHOD(this, "height", Q_RETURN_ARG(float, result));
|
||||
return result;
|
||||
}
|
||||
|
@ -960,10 +974,11 @@ bool Overlays::mouseMoveEvent(QMouseEvent* event) {
|
|||
|
||||
QVector<QUuid> Overlays::findOverlays(const glm::vec3& center, float radius) {
|
||||
QVector<QUuid> result;
|
||||
if (QThread::currentThread() != thread()) {
|
||||
BLOCKING_INVOKE_METHOD(this, "findOverlays", Q_RETURN_ARG(QVector<QUuid>, result), Q_ARG(glm::vec3, center), Q_ARG(float, radius));
|
||||
return result;
|
||||
}
|
||||
//if (QThread::currentThread() != thread()) {
|
||||
// PROFILE_RANGE(script, __FUNCTION__);
|
||||
// BLOCKING_INVOKE_METHOD(this, "findOverlays", Q_RETURN_ARG(QVector<QUuid>, result), Q_ARG(glm::vec3, center), Q_ARG(float, radius));
|
||||
// return result;
|
||||
//}
|
||||
|
||||
QMutexLocker locker(&_mutex);
|
||||
QMapIterator<OverlayID, Overlay::Pointer> i(_overlaysWorld);
|
||||
|
|
|
@ -190,6 +190,10 @@ public slots:
|
|||
*/
|
||||
OverlayPropertyResult getProperty(OverlayID id, const QString& property);
|
||||
|
||||
OverlayPropertyResult getProperties(const OverlayID& id, const QStringList& properties);
|
||||
|
||||
OverlayPropertyResult getOverlaysProperties(const QVariant& overlaysProperties);
|
||||
|
||||
/*jsdoc
|
||||
* Find the closest 3D overlay hit by a pick ray.
|
||||
*
|
||||
|
|
|
@ -61,7 +61,7 @@ void PanelAttachable::setProperties(const QVariantMap& properties) {
|
|||
}
|
||||
}
|
||||
|
||||
void PanelAttachable::applyTransformTo(Transform& transform, bool force) {
|
||||
bool PanelAttachable::applyTransformTo(Transform& transform, bool force) {
|
||||
if (force || usecTimestampNow() > _transformExpiry) {
|
||||
const quint64 TRANSFORM_UPDATE_PERIOD = 100000; // frequency is 10 Hz
|
||||
_transformExpiry = usecTimestampNow() + TRANSFORM_UPDATE_PERIOD;
|
||||
|
@ -71,7 +71,9 @@ void PanelAttachable::applyTransformTo(Transform& transform, bool force) {
|
|||
transform.postTranslate(getOffsetPosition());
|
||||
transform.postRotate(getOffsetRotation());
|
||||
transform.postScale(getOffsetScale());
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -67,7 +67,7 @@ protected:
|
|||
|
||||
/// set position, rotation and scale on transform based on offsets, and parent panel offsets
|
||||
/// if force is false, only apply transform if it hasn't been applied in the last .1 seconds
|
||||
virtual void applyTransformTo(Transform& transform, bool force = false);
|
||||
virtual bool applyTransformTo(Transform& transform, bool force = false);
|
||||
quint64 _transformExpiry = 0;
|
||||
|
||||
private:
|
||||
|
|
|
@ -45,17 +45,17 @@ void Shape3DOverlay::render(RenderArgs* args) {
|
|||
transform.setTranslation(position);
|
||||
transform.setRotation(rotation);
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
auto pipeline = args->_pipeline;
|
||||
if (!pipeline) {
|
||||
pipeline = _isSolid ? geometryCache->getOpaqueShapePipeline() : geometryCache->getWireShapePipeline();
|
||||
auto shapePipeline = args->_shapePipeline;
|
||||
if (!shapePipeline) {
|
||||
shapePipeline = _isSolid ? geometryCache->getOpaqueShapePipeline() : geometryCache->getWireShapePipeline();
|
||||
}
|
||||
|
||||
transform.setScale(dimensions);
|
||||
batch->setModelTransform(transform);
|
||||
if (_isSolid) {
|
||||
geometryCache->renderSolidShapeInstance(*batch, _shape, cubeColor, pipeline);
|
||||
geometryCache->renderSolidShapeInstance(args, *batch, _shape, cubeColor, shapePipeline);
|
||||
} else {
|
||||
geometryCache->renderWireShapeInstance(*batch, _shape, cubeColor, pipeline);
|
||||
geometryCache->renderWireShapeInstance(args, *batch, _shape, cubeColor, shapePipeline);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,15 +44,15 @@ void Sphere3DOverlay::render(RenderArgs* args) {
|
|||
batch->setModelTransform(transform);
|
||||
|
||||
auto geometryCache = DependencyManager::get<GeometryCache>();
|
||||
auto pipeline = args->_pipeline;
|
||||
if (!pipeline) {
|
||||
pipeline = _isSolid ? geometryCache->getOpaqueShapePipeline() : geometryCache->getWireShapePipeline();
|
||||
auto shapePipeline = args->_shapePipeline;
|
||||
if (!shapePipeline) {
|
||||
shapePipeline = _isSolid ? geometryCache->getOpaqueShapePipeline() : geometryCache->getWireShapePipeline();
|
||||
}
|
||||
|
||||
if (_isSolid) {
|
||||
geometryCache->renderSolidSphereInstance(*batch, sphereColor, pipeline);
|
||||
geometryCache->renderSolidSphereInstance(args, *batch, sphereColor, shapePipeline);
|
||||
} else {
|
||||
geometryCache->renderWireSphereInstance(*batch, sphereColor, pipeline);
|
||||
geometryCache->renderWireSphereInstance(args, *batch, sphereColor, shapePipeline);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -137,8 +137,8 @@ void Text3DOverlay::render(RenderArgs* args) {
|
|||
// Text renderer sets its own pipeline,
|
||||
_textRenderer->draw(batch, 0, 0, getText(), textColor, glm::vec2(-1.0f), getDrawInFront());
|
||||
// so before we continue, we must reset the pipeline
|
||||
batch.setPipeline(args->_pipeline->pipeline);
|
||||
args->_pipeline->prepare(batch);
|
||||
batch.setPipeline(args->_shapePipeline->pipeline);
|
||||
args->_shapePipeline->prepare(batch, args);
|
||||
}
|
||||
|
||||
const render::ShapeKey Text3DOverlay::getShapeKey() {
|
||||
|
|
|
@ -451,7 +451,7 @@ void Web3DOverlay::handlePointerEventAsTouch(const PointerEvent& event) {
|
|||
// In Qt 5.9 mouse events must be sent before touch events to make sure some QtQuick components will
|
||||
// receive mouse events
|
||||
#if QT_VERSION >= QT_VERSION_CHECK(5, 9, 0)
|
||||
if (!(this->_pressed && event.getType() == PointerEvent::Move)) {
|
||||
if (event.getType() == PointerEvent::Move) {
|
||||
QMouseEvent* mouseEvent = new QMouseEvent(mouseType, windowPoint, windowPoint, windowPoint, button, buttons, Qt::NoModifier);
|
||||
QCoreApplication::postEvent(_webSurface->getWindow(), mouseEvent);
|
||||
}
|
||||
|
@ -459,11 +459,10 @@ void Web3DOverlay::handlePointerEventAsTouch(const PointerEvent& event) {
|
|||
QCoreApplication::postEvent(_webSurface->getWindow(), touchEvent);
|
||||
|
||||
#if QT_VERSION < QT_VERSION_CHECK(5, 9, 0)
|
||||
if (this->_pressed && event.getType() == PointerEvent::Move) {
|
||||
return;
|
||||
if (event.getType() == PointerEvent::Move) {
|
||||
QMouseEvent* mouseEvent = new QMouseEvent(mouseType, windowPoint, windowPoint, windowPoint, button, buttons, Qt::NoModifier);
|
||||
QCoreApplication::postEvent(_webSurface->getWindow(), mouseEvent);
|
||||
}
|
||||
QMouseEvent* mouseEvent = new QMouseEvent(mouseType, windowPoint, windowPoint, windowPoint, button, buttons, Qt::NoModifier);
|
||||
QCoreApplication::postEvent(_webSurface->getWindow(), mouseEvent);
|
||||
#endif
|
||||
}
|
||||
|
||||
|
|
|
@ -23,20 +23,23 @@
|
|||
#include "CubicHermiteSpline.h"
|
||||
#include "AnimUtil.h"
|
||||
|
||||
static void lookupJointChainInfo(AnimInverseKinematics::JointChainInfo* jointChainInfos, size_t numJointChainInfos,
|
||||
static const float JOINT_CHAIN_INTERP_TIME = 0.25f;
|
||||
|
||||
static void lookupJointInfo(const AnimInverseKinematics::JointChainInfo& jointChainInfo,
|
||||
int indexA, int indexB,
|
||||
AnimInverseKinematics::JointChainInfo** jointChainInfoA,
|
||||
AnimInverseKinematics::JointChainInfo** jointChainInfoB) {
|
||||
*jointChainInfoA = nullptr;
|
||||
*jointChainInfoB = nullptr;
|
||||
for (size_t i = 0; i < numJointChainInfos; i++) {
|
||||
if (jointChainInfos[i].jointIndex == indexA) {
|
||||
*jointChainInfoA = jointChainInfos + i;
|
||||
const AnimInverseKinematics::JointInfo** jointInfoA,
|
||||
const AnimInverseKinematics::JointInfo** jointInfoB) {
|
||||
*jointInfoA = nullptr;
|
||||
*jointInfoB = nullptr;
|
||||
for (size_t i = 0; i < jointChainInfo.jointInfoVec.size(); i++) {
|
||||
const AnimInverseKinematics::JointInfo* jointInfo = &jointChainInfo.jointInfoVec[i];
|
||||
if (jointInfo->jointIndex == indexA) {
|
||||
*jointInfoA = jointInfo;
|
||||
}
|
||||
if (jointChainInfos[i].jointIndex == indexB) {
|
||||
*jointChainInfoB = jointChainInfos + i;
|
||||
if (jointInfo->jointIndex == indexB) {
|
||||
*jointInfoB = jointInfo;
|
||||
}
|
||||
if (*jointChainInfoA && *jointChainInfoB) {
|
||||
if (*jointInfoA && *jointInfoB) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -149,25 +152,28 @@ void AnimInverseKinematics::setTargetVars(const QString& jointName, const QStrin
|
|||
}
|
||||
|
||||
void AnimInverseKinematics::computeTargets(const AnimVariantMap& animVars, std::vector<IKTarget>& targets, const AnimPoseVec& underPoses) {
|
||||
// build a list of valid targets from _targetVarVec and animVars
|
||||
_maxTargetIndex = -1;
|
||||
|
||||
_hipsTargetIndex = -1;
|
||||
bool removeUnfoundJoints = false;
|
||||
|
||||
targets.reserve(_targetVarVec.size());
|
||||
|
||||
for (auto& targetVar : _targetVarVec) {
|
||||
|
||||
// update targetVar jointIndex cache
|
||||
if (targetVar.jointIndex == -1) {
|
||||
// this targetVar hasn't been validated yet...
|
||||
int jointIndex = _skeleton->nameToJointIndex(targetVar.jointName);
|
||||
if (jointIndex >= 0) {
|
||||
// this targetVar has a valid joint --> cache the indices
|
||||
targetVar.jointIndex = jointIndex;
|
||||
} else {
|
||||
qCWarning(animation) << "AnimInverseKinematics could not find jointName" << targetVar.jointName << "in skeleton";
|
||||
removeUnfoundJoints = true;
|
||||
}
|
||||
} else {
|
||||
IKTarget target;
|
||||
}
|
||||
|
||||
IKTarget target;
|
||||
if (targetVar.jointIndex != -1) {
|
||||
target.setType(animVars.lookup(targetVar.typeVar, (int)IKTarget::Type::RotationAndPosition));
|
||||
target.setIndex(targetVar.jointIndex);
|
||||
if (target.getType() != IKTarget::Type::Unknown) {
|
||||
AnimPose absPose = _skeleton->getAbsolutePose(targetVar.jointIndex, underPoses);
|
||||
glm::quat rotation = animVars.lookupRigToGeometry(targetVar.rotationVar, absPose.rot());
|
||||
|
@ -175,7 +181,6 @@ void AnimInverseKinematics::computeTargets(const AnimVariantMap& animVars, std::
|
|||
float weight = animVars.lookup(targetVar.weightVar, targetVar.weight);
|
||||
|
||||
target.setPose(rotation, translation);
|
||||
target.setIndex(targetVar.jointIndex);
|
||||
target.setWeight(weight);
|
||||
target.setFlexCoefficients(targetVar.numFlexCoefficients, targetVar.flexCoefficients);
|
||||
|
||||
|
@ -188,39 +193,20 @@ void AnimInverseKinematics::computeTargets(const AnimVariantMap& animVars, std::
|
|||
glm::vec3 poleReferenceVector = animVars.lookupRigToGeometryVector(targetVar.poleReferenceVectorVar, Vectors::UNIT_Z);
|
||||
target.setPoleReferenceVector(glm::normalize(poleReferenceVector));
|
||||
|
||||
targets.push_back(target);
|
||||
|
||||
if (targetVar.jointIndex > _maxTargetIndex) {
|
||||
_maxTargetIndex = targetVar.jointIndex;
|
||||
}
|
||||
|
||||
// record the index of the hips ik target.
|
||||
if (target.getIndex() == _hipsIndex) {
|
||||
_hipsTargetIndex = (int)targets.size() - 1;
|
||||
_hipsTargetIndex = (int)targets.size();
|
||||
}
|
||||
}
|
||||
} else {
|
||||
target.setType((int)IKTarget::Type::Unknown);
|
||||
}
|
||||
}
|
||||
|
||||
if (removeUnfoundJoints) {
|
||||
int numVars = (int)_targetVarVec.size();
|
||||
int i = 0;
|
||||
while (i < numVars) {
|
||||
if (_targetVarVec[i].jointIndex == -1) {
|
||||
if (numVars > 1) {
|
||||
// swap i for last element
|
||||
_targetVarVec[i] = _targetVarVec[numVars - 1];
|
||||
}
|
||||
_targetVarVec.pop_back();
|
||||
--numVars;
|
||||
} else {
|
||||
++i;
|
||||
}
|
||||
}
|
||||
targets.push_back(target);
|
||||
}
|
||||
}
|
||||
|
||||
void AnimInverseKinematics::solve(const AnimContext& context, const std::vector<IKTarget>& targets) {
|
||||
void AnimInverseKinematics::solve(const AnimContext& context, const std::vector<IKTarget>& targets, float dt, JointChainInfoVec& jointChainInfoVec) {
|
||||
// compute absolute poses that correspond to relative target poses
|
||||
AnimPoseVec absolutePoses;
|
||||
absolutePoses.resize(_relativePoses.size());
|
||||
|
@ -234,26 +220,75 @@ void AnimInverseKinematics::solve(const AnimContext& context, const std::vector<
|
|||
accumulator.clearAndClean();
|
||||
}
|
||||
|
||||
float maxError = FLT_MAX;
|
||||
float maxError = 0.0f;
|
||||
int numLoops = 0;
|
||||
const int MAX_IK_LOOPS = 16;
|
||||
const float MAX_ERROR_TOLERANCE = 0.1f; // cm
|
||||
while (maxError > MAX_ERROR_TOLERANCE && numLoops < MAX_IK_LOOPS) {
|
||||
while (numLoops < MAX_IK_LOOPS) {
|
||||
++numLoops;
|
||||
|
||||
bool debug = context.getEnableDebugDrawIKChains() && numLoops == MAX_IK_LOOPS;
|
||||
|
||||
// solve all targets
|
||||
for (auto& target: targets) {
|
||||
if (target.getType() == IKTarget::Type::Spline) {
|
||||
solveTargetWithSpline(context, target, absolutePoses, debug);
|
||||
} else {
|
||||
solveTargetWithCCD(context, target, absolutePoses, debug);
|
||||
for (size_t i = 0; i < targets.size(); i++) {
|
||||
switch (targets[i].getType()) {
|
||||
case IKTarget::Type::Unknown:
|
||||
break;
|
||||
case IKTarget::Type::Spline:
|
||||
solveTargetWithSpline(context, targets[i], absolutePoses, debug, jointChainInfoVec[i]);
|
||||
break;
|
||||
default:
|
||||
solveTargetWithCCD(context, targets[i], absolutePoses, debug, jointChainInfoVec[i]);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// on last iteration, interpolate jointChains, if necessary
|
||||
if (numLoops == MAX_IK_LOOPS) {
|
||||
for (size_t i = 0; i < _prevJointChainInfoVec.size(); i++) {
|
||||
if (_prevJointChainInfoVec[i].timer > 0.0f) {
|
||||
float alpha = (JOINT_CHAIN_INTERP_TIME - _prevJointChainInfoVec[i].timer) / JOINT_CHAIN_INTERP_TIME;
|
||||
size_t chainSize = std::min(_prevJointChainInfoVec[i].jointInfoVec.size(), jointChainInfoVec[i].jointInfoVec.size());
|
||||
for (size_t j = 0; j < chainSize; j++) {
|
||||
jointChainInfoVec[i].jointInfoVec[j].rot = safeMix(_prevJointChainInfoVec[i].jointInfoVec[j].rot, jointChainInfoVec[i].jointInfoVec[j].rot, alpha);
|
||||
jointChainInfoVec[i].jointInfoVec[j].trans = lerp(_prevJointChainInfoVec[i].jointInfoVec[j].trans, jointChainInfoVec[i].jointInfoVec[j].trans, alpha);
|
||||
}
|
||||
|
||||
// if joint chain was just disabled, ramp the weight toward zero.
|
||||
if (_prevJointChainInfoVec[i].target.getType() != IKTarget::Type::Unknown &&
|
||||
jointChainInfoVec[i].target.getType() == IKTarget::Type::Unknown) {
|
||||
IKTarget newTarget = _prevJointChainInfoVec[i].target;
|
||||
newTarget.setWeight((1.0f - alpha) * _prevJointChainInfoVec[i].target.getWeight());
|
||||
jointChainInfoVec[i].target = newTarget;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// copy jointChainInfoVecs into accumulators
|
||||
for (size_t i = 0; i < targets.size(); i++) {
|
||||
const std::vector<JointInfo>& jointInfoVec = jointChainInfoVec[i].jointInfoVec;
|
||||
|
||||
// don't accumulate disabled or rotation only ik targets.
|
||||
IKTarget::Type type = jointChainInfoVec[i].target.getType();
|
||||
if (type != IKTarget::Type::Unknown && type != IKTarget::Type::RotationOnly) {
|
||||
float weight = jointChainInfoVec[i].target.getWeight();
|
||||
if (weight > 0.0f) {
|
||||
for (size_t j = 0; j < jointInfoVec.size(); j++) {
|
||||
const JointInfo& info = jointInfoVec[j];
|
||||
if (info.jointIndex >= 0) {
|
||||
_rotationAccumulators[info.jointIndex].add(info.rot, weight);
|
||||
_translationAccumulators[info.jointIndex].add(info.trans, weight);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// harvest accumulated rotations and apply the average
|
||||
for (int i = 0; i < (int)_relativePoses.size(); ++i) {
|
||||
if (i == _hipsIndex) {
|
||||
continue; // don't apply accumulators to hips
|
||||
}
|
||||
if (_rotationAccumulators[i].size() > 0) {
|
||||
_relativePoses[i].rot() = _rotationAccumulators[i].getAverage();
|
||||
_rotationAccumulators[i].clear();
|
||||
|
@ -289,7 +324,7 @@ void AnimInverseKinematics::solve(const AnimContext& context, const std::vector<
|
|||
// finally set the relative rotation of each tip to agree with absolute target rotation
|
||||
for (auto& target: targets) {
|
||||
int tipIndex = target.getIndex();
|
||||
int parentIndex = _skeleton->getParentIndex(tipIndex);
|
||||
int parentIndex = (tipIndex >= 0) ? _skeleton->getParentIndex(tipIndex) : -1;
|
||||
|
||||
// update rotationOnly targets that don't lie on the ik chain of other ik targets.
|
||||
if (parentIndex != -1 && !_rotationAccumulators[tipIndex].isDirty() && target.getType() == IKTarget::Type::RotationOnly) {
|
||||
|
@ -308,9 +343,34 @@ void AnimInverseKinematics::solve(const AnimContext& context, const std::vector<
|
|||
absolutePoses[tipIndex].rot() = targetRotation;
|
||||
}
|
||||
}
|
||||
|
||||
// copy jointChainInfoVec into _prevJointChainInfoVec, and update timers
|
||||
for (size_t i = 0; i < jointChainInfoVec.size(); i++) {
|
||||
_prevJointChainInfoVec[i].timer = _prevJointChainInfoVec[i].timer - dt;
|
||||
if (_prevJointChainInfoVec[i].timer <= 0.0f) {
|
||||
_prevJointChainInfoVec[i] = jointChainInfoVec[i];
|
||||
_prevJointChainInfoVec[i].target = targets[i];
|
||||
// store relative poses into unknown/rotation only joint chains.
|
||||
// so we have something to interpolate from if this chain is activated.
|
||||
IKTarget::Type type = _prevJointChainInfoVec[i].target.getType();
|
||||
if (type == IKTarget::Type::Unknown || type == IKTarget::Type::RotationOnly) {
|
||||
for (size_t j = 0; j < _prevJointChainInfoVec[i].jointInfoVec.size(); j++) {
|
||||
auto& info = _prevJointChainInfoVec[i].jointInfoVec[j];
|
||||
if (info.jointIndex >= 0) {
|
||||
info.rot = _relativePoses[info.jointIndex].rot();
|
||||
info.trans = _relativePoses[info.jointIndex].trans();
|
||||
} else {
|
||||
info.rot = Quaternions::IDENTITY;
|
||||
info.trans = glm::vec3(0.0f);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses, bool debug) {
|
||||
void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses,
|
||||
bool debug, JointChainInfo& jointChainInfoOut) const {
|
||||
size_t chainDepth = 0;
|
||||
|
||||
IKTarget::Type targetType = target.getType();
|
||||
|
@ -338,9 +398,6 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
|
|||
// the tip's parent-relative as we proceed up the chain
|
||||
glm::quat tipParentOrientation = absolutePoses[pivotIndex].rot();
|
||||
|
||||
const size_t MAX_CHAIN_DEPTH = 30;
|
||||
JointChainInfo jointChainInfos[MAX_CHAIN_DEPTH];
|
||||
|
||||
// NOTE: if this code is removed, the head will remain rigid, causing the spine/hips to thrust forward backward
|
||||
// as the head is nodded.
|
||||
if (targetType == IKTarget::Type::HmdHead ||
|
||||
|
@ -368,7 +425,7 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
|
|||
}
|
||||
|
||||
glm::vec3 tipRelativeTranslation = _relativePoses[target.getIndex()].trans();
|
||||
jointChainInfos[chainDepth] = { tipRelativeRotation, tipRelativeTranslation, target.getWeight(), tipIndex, constrained };
|
||||
jointChainInfoOut.jointInfoVec[chainDepth] = { tipRelativeRotation, tipRelativeTranslation, tipIndex, constrained };
|
||||
}
|
||||
|
||||
// cache tip absolute position
|
||||
|
@ -379,7 +436,7 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
|
|||
// descend toward root, pivoting each joint to get tip closer to target position
|
||||
while (pivotIndex != _hipsIndex && pivotsParentIndex != -1) {
|
||||
|
||||
assert(chainDepth < MAX_CHAIN_DEPTH);
|
||||
assert(chainDepth < jointChainInfoOut.jointInfoVec.size());
|
||||
|
||||
// compute the two lines that should be aligned
|
||||
glm::vec3 jointPosition = absolutePoses[pivotIndex].trans();
|
||||
|
@ -444,9 +501,8 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
|
|||
glm::quat twistPart;
|
||||
glm::vec3 axis = glm::normalize(deltaRotation * leverArm);
|
||||
swingTwistDecomposition(missingRotation, axis, swingPart, twistPart);
|
||||
float dotSign = copysignf(1.0f, twistPart.w);
|
||||
const float LIMIT_LEAK_FRACTION = 0.1f;
|
||||
deltaRotation = glm::normalize(glm::lerp(glm::quat(), dotSign * twistPart, LIMIT_LEAK_FRACTION)) * deltaRotation;
|
||||
deltaRotation = safeLerp(glm::quat(), twistPart, LIMIT_LEAK_FRACTION);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -455,9 +511,8 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
|
|||
// An HmdHead target slaves the orientation of the end-effector by distributing rotation
|
||||
// deltas up the hierarchy. Its target position is enforced later (by shifting the hips).
|
||||
deltaRotation = target.getRotation() * glm::inverse(tipOrientation);
|
||||
float dotSign = copysignf(1.0f, deltaRotation.w);
|
||||
const float ANGLE_DISTRIBUTION_FACTOR = 0.45f;
|
||||
deltaRotation = glm::normalize(glm::lerp(glm::quat(), dotSign * deltaRotation, ANGLE_DISTRIBUTION_FACTOR));
|
||||
deltaRotation = safeLerp(glm::quat(), deltaRotation, ANGLE_DISTRIBUTION_FACTOR);
|
||||
}
|
||||
|
||||
// compute joint's new parent-relative rotation after swing
|
||||
|
@ -480,7 +535,7 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
|
|||
}
|
||||
|
||||
glm::vec3 newTrans = _relativePoses[pivotIndex].trans();
|
||||
jointChainInfos[chainDepth] = { newRot, newTrans, target.getWeight(), pivotIndex, constrained };
|
||||
jointChainInfoOut.jointInfoVec[chainDepth] = { newRot, newTrans, pivotIndex, constrained };
|
||||
|
||||
// keep track of tip's new transform as we descend towards root
|
||||
tipPosition = jointPosition + deltaRotation * (tipPosition - jointPosition);
|
||||
|
@ -502,24 +557,25 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
|
|||
int baseParentJointIndex = _skeleton->getParentIndex(baseJointIndex);
|
||||
AnimPose topPose, midPose, basePose;
|
||||
int topChainIndex = -1, baseChainIndex = -1;
|
||||
const size_t MAX_CHAIN_DEPTH = 30;
|
||||
AnimPose postAbsPoses[MAX_CHAIN_DEPTH];
|
||||
AnimPose accum = absolutePoses[_hipsIndex];
|
||||
AnimPose baseParentPose = absolutePoses[_hipsIndex];
|
||||
for (int i = (int)chainDepth - 1; i >= 0; i--) {
|
||||
accum = accum * AnimPose(glm::vec3(1.0f), jointChainInfos[i].relRot, jointChainInfos[i].relTrans);
|
||||
accum = accum * AnimPose(glm::vec3(1.0f), jointChainInfoOut.jointInfoVec[i].rot, jointChainInfoOut.jointInfoVec[i].trans);
|
||||
postAbsPoses[i] = accum;
|
||||
if (jointChainInfos[i].jointIndex == topJointIndex) {
|
||||
if (jointChainInfoOut.jointInfoVec[i].jointIndex == topJointIndex) {
|
||||
topChainIndex = i;
|
||||
topPose = accum;
|
||||
}
|
||||
if (jointChainInfos[i].jointIndex == midJointIndex) {
|
||||
if (jointChainInfoOut.jointInfoVec[i].jointIndex == midJointIndex) {
|
||||
midPose = accum;
|
||||
}
|
||||
if (jointChainInfos[i].jointIndex == baseJointIndex) {
|
||||
if (jointChainInfoOut.jointInfoVec[i].jointIndex == baseJointIndex) {
|
||||
baseChainIndex = i;
|
||||
basePose = accum;
|
||||
}
|
||||
if (jointChainInfos[i].jointIndex == baseParentJointIndex) {
|
||||
if (jointChainInfoOut.jointInfoVec[i].jointIndex == baseParentJointIndex) {
|
||||
baseParentPose = accum;
|
||||
}
|
||||
}
|
||||
|
@ -599,21 +655,16 @@ void AnimInverseKinematics::solveTargetWithCCD(const AnimContext& context, const
|
|||
}
|
||||
|
||||
glm::quat newBaseRelRot = glm::inverse(baseParentPose.rot()) * poleRot * basePose.rot();
|
||||
jointChainInfos[baseChainIndex].relRot = newBaseRelRot;
|
||||
jointChainInfoOut.jointInfoVec[baseChainIndex].rot = newBaseRelRot;
|
||||
|
||||
glm::quat newTopRelRot = glm::inverse(midPose.rot()) * glm::inverse(poleRot) * topPose.rot();
|
||||
jointChainInfos[topChainIndex].relRot = newTopRelRot;
|
||||
jointChainInfoOut.jointInfoVec[topChainIndex].rot = newTopRelRot;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < chainDepth; i++) {
|
||||
_rotationAccumulators[jointChainInfos[i].jointIndex].add(jointChainInfos[i].relRot, jointChainInfos[i].weight);
|
||||
_translationAccumulators[jointChainInfos[i].jointIndex].add(jointChainInfos[i].relTrans, jointChainInfos[i].weight);
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
debugDrawIKChain(jointChainInfos, chainDepth, context);
|
||||
debugDrawIKChain(jointChainInfoOut, context);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -628,7 +679,7 @@ static CubicHermiteSplineFunctorWithArcLength computeSplineFromTipAndBase(const
|
|||
}
|
||||
|
||||
// pre-compute information about each joint influeced by this spline IK target.
|
||||
void AnimInverseKinematics::computeSplineJointInfosForIKTarget(const AnimContext& context, const IKTarget& target) {
|
||||
void AnimInverseKinematics::computeAndCacheSplineJointInfosForIKTarget(const AnimContext& context, const IKTarget& target) const {
|
||||
std::vector<SplineJointInfo> splineJointInfoVec;
|
||||
|
||||
// build spline between the default poses.
|
||||
|
@ -681,13 +732,13 @@ void AnimInverseKinematics::computeSplineJointInfosForIKTarget(const AnimContext
|
|||
_splineJointInfoMap[target.getIndex()] = splineJointInfoVec;
|
||||
}
|
||||
|
||||
const std::vector<AnimInverseKinematics::SplineJointInfo>* AnimInverseKinematics::findOrCreateSplineJointInfo(const AnimContext& context, const IKTarget& target) {
|
||||
const std::vector<AnimInverseKinematics::SplineJointInfo>* AnimInverseKinematics::findOrCreateSplineJointInfo(const AnimContext& context, const IKTarget& target) const {
|
||||
// find or create splineJointInfo for this target
|
||||
auto iter = _splineJointInfoMap.find(target.getIndex());
|
||||
if (iter != _splineJointInfoMap.end()) {
|
||||
return &(iter->second);
|
||||
} else {
|
||||
computeSplineJointInfosForIKTarget(context, target);
|
||||
computeAndCacheSplineJointInfosForIKTarget(context, target);
|
||||
auto iter = _splineJointInfoMap.find(target.getIndex());
|
||||
if (iter != _splineJointInfoMap.end()) {
|
||||
return &(iter->second);
|
||||
|
@ -697,10 +748,8 @@ const std::vector<AnimInverseKinematics::SplineJointInfo>* AnimInverseKinematics
|
|||
return nullptr;
|
||||
}
|
||||
|
||||
void AnimInverseKinematics::solveTargetWithSpline(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses, bool debug) {
|
||||
|
||||
const size_t MAX_CHAIN_DEPTH = 30;
|
||||
JointChainInfo jointChainInfos[MAX_CHAIN_DEPTH];
|
||||
void AnimInverseKinematics::solveTargetWithSpline(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses,
|
||||
bool debug, JointChainInfo& jointChainInfoOut) const {
|
||||
|
||||
const int baseIndex = _hipsIndex;
|
||||
|
||||
|
@ -720,7 +769,7 @@ void AnimInverseKinematics::solveTargetWithSpline(const AnimContext& context, co
|
|||
|
||||
// This prevents the rotation interpolation from rotating the wrong physical way (but correct mathematical way)
|
||||
// when the head is arched backwards very far.
|
||||
glm::quat halfRot = glm::normalize(glm::lerp(basePose.rot(), tipPose.rot(), 0.5f));
|
||||
glm::quat halfRot = safeLerp(basePose.rot(), tipPose.rot(), 0.5f);
|
||||
if (glm::dot(halfRot * Vectors::UNIT_Z, basePose.rot() * Vectors::UNIT_Z) < 0.0f) {
|
||||
tipPose.rot() = -tipPose.rot();
|
||||
}
|
||||
|
@ -743,7 +792,7 @@ void AnimInverseKinematics::solveTargetWithSpline(const AnimContext& context, co
|
|||
if (target.getIndex() == _headIndex) {
|
||||
rotT = t * t;
|
||||
}
|
||||
glm::quat twistRot = glm::normalize(glm::lerp(basePose.rot(), tipPose.rot(), rotT));
|
||||
glm::quat twistRot = safeLerp(basePose.rot(), tipPose.rot(), rotT);
|
||||
|
||||
// compute the rotation by using the derivative of the spline as the y-axis, and the twistRot x-axis
|
||||
glm::vec3 y = glm::normalize(spline.d(t));
|
||||
|
@ -783,19 +832,14 @@ void AnimInverseKinematics::solveTargetWithSpline(const AnimContext& context, co
|
|||
}
|
||||
}
|
||||
|
||||
jointChainInfos[i] = { relPose.rot(), relPose.trans(), target.getWeight(), splineJointInfo.jointIndex, constrained };
|
||||
jointChainInfoOut.jointInfoVec[i] = { relPose.rot(), relPose.trans(), splineJointInfo.jointIndex, constrained };
|
||||
|
||||
parentAbsPose = flexedAbsPose;
|
||||
}
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < splineJointInfoVec->size(); i++) {
|
||||
_rotationAccumulators[jointChainInfos[i].jointIndex].add(jointChainInfos[i].relRot, jointChainInfos[i].weight);
|
||||
_translationAccumulators[jointChainInfos[i].jointIndex].add(jointChainInfos[i].relTrans, jointChainInfos[i].weight);
|
||||
}
|
||||
|
||||
if (debug) {
|
||||
debugDrawIKChain(jointChainInfos, splineJointInfoVec->size(), context);
|
||||
debugDrawIKChain(jointChainInfoOut, context);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -806,6 +850,24 @@ const AnimPoseVec& AnimInverseKinematics::evaluate(const AnimVariantMap& animVar
|
|||
return _relativePoses;
|
||||
}
|
||||
|
||||
AnimPose AnimInverseKinematics::applyHipsOffset() const {
|
||||
glm::vec3 hipsOffset = _hipsOffset;
|
||||
AnimPose relHipsPose = _relativePoses[_hipsIndex];
|
||||
float offsetLength = glm::length(hipsOffset);
|
||||
const float MIN_HIPS_OFFSET_LENGTH = 0.03f;
|
||||
if (offsetLength > MIN_HIPS_OFFSET_LENGTH) {
|
||||
float scaleFactor = ((offsetLength - MIN_HIPS_OFFSET_LENGTH) / offsetLength);
|
||||
glm::vec3 scaledHipsOffset = scaleFactor * hipsOffset;
|
||||
if (_hipsParentIndex == -1) {
|
||||
relHipsPose.trans() = _relativePoses[_hipsIndex].trans() + scaledHipsOffset;
|
||||
} else {
|
||||
AnimPose absHipsPose = _skeleton->getAbsolutePose(_hipsIndex, _relativePoses);
|
||||
absHipsPose.trans() += scaledHipsOffset;
|
||||
relHipsPose = _skeleton->getAbsolutePose(_hipsParentIndex, _relativePoses).inverse() * absHipsPose;
|
||||
}
|
||||
}
|
||||
return relHipsPose;
|
||||
}
|
||||
|
||||
//virtual
|
||||
const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars, const AnimContext& context, float dt, Triggers& triggersOut, const AnimPoseVec& underPoses) {
|
||||
|
@ -850,33 +912,88 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
|
|||
_relativePoses = underPoses;
|
||||
} else {
|
||||
|
||||
JointChainInfoVec jointChainInfoVec(targets.size());
|
||||
{
|
||||
PROFILE_RANGE_EX(simulation_animation, "ik/jointChainInfo", 0xffff00ff, 0);
|
||||
|
||||
// initialize a new jointChainInfoVec, this will hold the results for solving each ik chain.
|
||||
JointInfo defaultJointInfo = { glm::quat(), glm::vec3(), -1, false };
|
||||
for (size_t i = 0; i < targets.size(); i++) {
|
||||
size_t chainDepth = (size_t)_skeleton->getChainDepth(targets[i].getIndex());
|
||||
jointChainInfoVec[i].jointInfoVec.reserve(chainDepth);
|
||||
jointChainInfoVec[i].target = targets[i];
|
||||
int index = targets[i].getIndex();
|
||||
for (size_t j = 0; j < chainDepth; j++) {
|
||||
jointChainInfoVec[i].jointInfoVec.push_back(defaultJointInfo);
|
||||
jointChainInfoVec[i].jointInfoVec[j].jointIndex = index;
|
||||
index = _skeleton->getParentIndex(index);
|
||||
}
|
||||
}
|
||||
|
||||
// identify joint chains that have changed types this frame.
|
||||
_prevJointChainInfoVec.resize(jointChainInfoVec.size());
|
||||
for (size_t i = 0; i < _prevJointChainInfoVec.size(); i++) {
|
||||
if (_prevJointChainInfoVec[i].timer <= 0.0f &&
|
||||
(jointChainInfoVec[i].target.getType() != _prevJointChainInfoVec[i].target.getType() ||
|
||||
jointChainInfoVec[i].target.getPoleVectorEnabled() != _prevJointChainInfoVec[i].target.getPoleVectorEnabled())) {
|
||||
_prevJointChainInfoVec[i].timer = JOINT_CHAIN_INTERP_TIME;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
PROFILE_RANGE_EX(simulation_animation, "ik/shiftHips", 0xffff00ff, 0);
|
||||
|
||||
if (_hipsTargetIndex >= 0 && _hipsTargetIndex < (int)targets.size()) {
|
||||
if (_hipsTargetIndex >= 0) {
|
||||
assert(_hipsTargetIndex < (int)targets.size());
|
||||
|
||||
// slam the hips to match the _hipsTarget
|
||||
|
||||
AnimPose absPose = targets[_hipsTargetIndex].getPose();
|
||||
|
||||
int parentIndex = _skeleton->getParentIndex(targets[_hipsTargetIndex].getIndex());
|
||||
if (parentIndex != -1) {
|
||||
_relativePoses[_hipsIndex] = _skeleton->getAbsolutePose(parentIndex, _relativePoses).inverse() * absPose;
|
||||
} else {
|
||||
_relativePoses[_hipsIndex] = absPose;
|
||||
AnimPose parentAbsPose = _skeleton->getAbsolutePose(parentIndex, _relativePoses);
|
||||
|
||||
// do smooth interpolation of hips, if necessary.
|
||||
if (_prevJointChainInfoVec[_hipsTargetIndex].timer > 0.0f && _prevJointChainInfoVec[_hipsTargetIndex].jointInfoVec.size() > 0) {
|
||||
float alpha = (JOINT_CHAIN_INTERP_TIME - _prevJointChainInfoVec[_hipsTargetIndex].timer) / JOINT_CHAIN_INTERP_TIME;
|
||||
|
||||
auto& info = _prevJointChainInfoVec[_hipsTargetIndex].jointInfoVec[0];
|
||||
AnimPose prevHipsRelPose(info.rot, info.trans);
|
||||
AnimPose prevHipsAbsPose = parentAbsPose * prevHipsRelPose;
|
||||
::blend(1, &prevHipsAbsPose, &absPose, alpha, &absPose);
|
||||
}
|
||||
} else {
|
||||
|
||||
_relativePoses[_hipsIndex] = parentAbsPose.inverse() * absPose;
|
||||
_relativePoses[_hipsIndex].scale() = glm::vec3(1.0f);
|
||||
_hipsOffset = Vectors::ZERO;
|
||||
|
||||
} else if (_hipsIndex >= 0) {
|
||||
|
||||
// if there is no hips target, shift hips according to the _hipsOffset from the previous frame
|
||||
float offsetLength = glm::length(_hipsOffset);
|
||||
const float MIN_HIPS_OFFSET_LENGTH = 0.03f;
|
||||
if (offsetLength > MIN_HIPS_OFFSET_LENGTH && _hipsIndex >= 0) {
|
||||
float scaleFactor = ((offsetLength - MIN_HIPS_OFFSET_LENGTH) / offsetLength);
|
||||
glm::vec3 hipsOffset = scaleFactor * _hipsOffset;
|
||||
if (_hipsParentIndex == -1) {
|
||||
_relativePoses[_hipsIndex].trans() = _relativePoses[_hipsIndex].trans() + hipsOffset;
|
||||
} else {
|
||||
auto absHipsPose = _skeleton->getAbsolutePose(_hipsIndex, _relativePoses);
|
||||
absHipsPose.trans() += hipsOffset;
|
||||
_relativePoses[_hipsIndex] = _skeleton->getAbsolutePose(_hipsParentIndex, _relativePoses).inverse() * absHipsPose;
|
||||
AnimPose relHipsPose = applyHipsOffset();
|
||||
|
||||
// determine if we should begin interpolating the hips.
|
||||
for (size_t i = 0; i < targets.size(); i++) {
|
||||
if (_prevJointChainInfoVec[i].target.getIndex() == _hipsIndex) {
|
||||
if (_prevJointChainInfoVec[i].timer > 0.0f) {
|
||||
// smoothly lerp in hipsOffset
|
||||
float alpha = (JOINT_CHAIN_INTERP_TIME - _prevJointChainInfoVec[i].timer) / JOINT_CHAIN_INTERP_TIME;
|
||||
AnimPose prevRelHipsPose(_prevJointChainInfoVec[i].jointInfoVec[0].rot, _prevJointChainInfoVec[i].jointInfoVec[0].trans);
|
||||
::blend(1, &prevRelHipsPose, &relHipsPose, alpha, &relHipsPose);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
_relativePoses[_hipsIndex] = relHipsPose;
|
||||
}
|
||||
|
||||
// if there is an active jointChainInfo for the hips store the post shifted hips into it.
|
||||
// This is so we have a valid pose to interplate from when the hips target is disabled.
|
||||
if (_hipsTargetIndex >= 0) {
|
||||
jointChainInfoVec[_hipsTargetIndex].jointInfoVec[0].rot = _relativePoses[_hipsIndex].rot();
|
||||
jointChainInfoVec[_hipsTargetIndex].jointInfoVec[0].trans = _relativePoses[_hipsIndex].trans();
|
||||
}
|
||||
|
||||
// update all HipsRelative targets to account for the hips shift/ik target.
|
||||
|
@ -920,15 +1037,14 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
|
|||
|
||||
{
|
||||
PROFILE_RANGE_EX(simulation_animation, "ik/ccd", 0xffff00ff, 0);
|
||||
|
||||
preconditionRelativePosesToAvoidLimbLock(context, targets);
|
||||
solve(context, targets);
|
||||
solve(context, targets, dt, jointChainInfoVec);
|
||||
}
|
||||
|
||||
if (_hipsTargetIndex < 0) {
|
||||
PROFILE_RANGE_EX(simulation_animation, "ik/measureHipsOffset", 0xffff00ff, 0);
|
||||
computeHipsOffset(targets, underPoses, dt);
|
||||
} else {
|
||||
_hipsOffset = Vectors::ZERO;
|
||||
_hipsOffset = computeHipsOffset(targets, underPoses, dt, _hipsOffset);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -937,23 +1053,15 @@ const AnimPoseVec& AnimInverseKinematics::overlay(const AnimVariantMap& animVars
|
|||
}
|
||||
}
|
||||
|
||||
if (_leftHandIndex > -1) {
|
||||
_uncontrolledLeftHandPose = _skeleton->getAbsolutePose(_leftHandIndex, underPoses);
|
||||
}
|
||||
if (_rightHandIndex > -1) {
|
||||
_uncontrolledRightHandPose = _skeleton->getAbsolutePose(_rightHandIndex, underPoses);
|
||||
}
|
||||
if (_hipsIndex > -1) {
|
||||
_uncontrolledHipsPose = _skeleton->getAbsolutePose(_hipsIndex, underPoses);
|
||||
}
|
||||
|
||||
return _relativePoses;
|
||||
}
|
||||
|
||||
void AnimInverseKinematics::computeHipsOffset(const std::vector<IKTarget>& targets, const AnimPoseVec& underPoses, float dt) {
|
||||
glm::vec3 AnimInverseKinematics::computeHipsOffset(const std::vector<IKTarget>& targets, const AnimPoseVec& underPoses, float dt, glm::vec3 prevHipsOffset) const {
|
||||
|
||||
// measure new _hipsOffset for next frame
|
||||
// by looking for discrepancies between where a targeted endEffector is
|
||||
// and where it wants to be (after IK solutions are done)
|
||||
glm::vec3 hipsOffset = prevHipsOffset;
|
||||
glm::vec3 newHipsOffset = Vectors::ZERO;
|
||||
for (auto& target: targets) {
|
||||
int targetIndex = target.getIndex();
|
||||
|
@ -969,9 +1077,9 @@ void AnimInverseKinematics::computeHipsOffset(const std::vector<IKTarget>& targe
|
|||
} else if (target.getType() == IKTarget::Type::HmdHead) {
|
||||
// we want to shift the hips to bring the head to its designated position
|
||||
glm::vec3 actual = _skeleton->getAbsolutePose(_headIndex, _relativePoses).trans();
|
||||
_hipsOffset += target.getTranslation() - actual;
|
||||
hipsOffset += target.getTranslation() - actual;
|
||||
// and ignore all other targets
|
||||
newHipsOffset = _hipsOffset;
|
||||
newHipsOffset = hipsOffset;
|
||||
break;
|
||||
} else if (target.getType() == IKTarget::Type::RotationAndPosition) {
|
||||
glm::vec3 actualPosition = _skeleton->getAbsolutePose(targetIndex, _relativePoses).trans();
|
||||
|
@ -991,16 +1099,18 @@ void AnimInverseKinematics::computeHipsOffset(const std::vector<IKTarget>& targe
|
|||
}
|
||||
}
|
||||
|
||||
// smooth transitions by relaxing _hipsOffset toward the new value
|
||||
// smooth transitions by relaxing hipsOffset toward the new value
|
||||
const float HIPS_OFFSET_SLAVE_TIMESCALE = 0.10f;
|
||||
float tau = dt < HIPS_OFFSET_SLAVE_TIMESCALE ? dt / HIPS_OFFSET_SLAVE_TIMESCALE : 1.0f;
|
||||
_hipsOffset += (newHipsOffset - _hipsOffset) * tau;
|
||||
hipsOffset += (newHipsOffset - hipsOffset) * tau;
|
||||
|
||||
// clamp the hips offset
|
||||
float hipsOffsetLength = glm::length(_hipsOffset);
|
||||
float hipsOffsetLength = glm::length(hipsOffset);
|
||||
if (hipsOffsetLength > _maxHipsOffsetLength) {
|
||||
_hipsOffset *= _maxHipsOffsetLength / hipsOffsetLength;
|
||||
hipsOffset *= _maxHipsOffsetLength / hipsOffsetLength;
|
||||
}
|
||||
|
||||
return hipsOffset;
|
||||
}
|
||||
|
||||
void AnimInverseKinematics::setMaxHipsOffsetLength(float maxLength) {
|
||||
|
@ -1414,8 +1524,6 @@ void AnimInverseKinematics::setSkeletonInternal(AnimSkeleton::ConstPointer skele
|
|||
targetVar.jointIndex = -1;
|
||||
}
|
||||
|
||||
_maxTargetIndex = -1;
|
||||
|
||||
for (auto& accumulator: _rotationAccumulators) {
|
||||
accumulator.clearAndClean();
|
||||
}
|
||||
|
@ -1446,10 +1554,6 @@ void AnimInverseKinematics::setSkeletonInternal(AnimSkeleton::ConstPointer skele
|
|||
_leftHandIndex = -1;
|
||||
_rightHandIndex = -1;
|
||||
}
|
||||
|
||||
_uncontrolledLeftHandPose = AnimPose();
|
||||
_uncontrolledRightHandPose = AnimPose();
|
||||
_uncontrolledHipsPose = AnimPose();
|
||||
}
|
||||
|
||||
static glm::vec3 sphericalToCartesian(float phi, float theta) {
|
||||
|
@ -1495,14 +1599,14 @@ void AnimInverseKinematics::debugDrawRelativePoses(const AnimContext& context) c
|
|||
}
|
||||
}
|
||||
|
||||
void AnimInverseKinematics::debugDrawIKChain(JointChainInfo* jointChainInfos, size_t numJointChainInfos, const AnimContext& context) const {
|
||||
void AnimInverseKinematics::debugDrawIKChain(const JointChainInfo& jointChainInfo, const AnimContext& context) const {
|
||||
AnimPoseVec poses = _relativePoses;
|
||||
|
||||
// copy debug joint rotations into the relative poses
|
||||
for (size_t i = 0; i < numJointChainInfos; i++) {
|
||||
const JointChainInfo& info = jointChainInfos[i];
|
||||
poses[info.jointIndex].rot() = info.relRot;
|
||||
poses[info.jointIndex].trans() = info.relTrans;
|
||||
for (size_t i = 0; i < jointChainInfo.jointInfoVec.size(); i++) {
|
||||
const JointInfo& info = jointChainInfo.jointInfoVec[i];
|
||||
poses[info.jointIndex].rot() = info.rot;
|
||||
poses[info.jointIndex].trans() = info.trans;
|
||||
}
|
||||
|
||||
// convert relative poses to absolute
|
||||
|
@ -1519,9 +1623,9 @@ void AnimInverseKinematics::debugDrawIKChain(JointChainInfo* jointChainInfos, si
|
|||
// draw each pose
|
||||
for (int i = 0; i < (int)poses.size(); i++) {
|
||||
int parentIndex = _skeleton->getParentIndex(i);
|
||||
JointChainInfo* jointInfo = nullptr;
|
||||
JointChainInfo* parentJointInfo = nullptr;
|
||||
lookupJointChainInfo(jointChainInfos, numJointChainInfos, i, parentIndex, &jointInfo, &parentJointInfo);
|
||||
const JointInfo* jointInfo = nullptr;
|
||||
const JointInfo* parentJointInfo = nullptr;
|
||||
lookupJointInfo(jointChainInfo, i, parentIndex, &jointInfo, &parentJointInfo);
|
||||
if (jointInfo && parentJointInfo) {
|
||||
|
||||
// transform local axes into world space.
|
||||
|
@ -1608,7 +1712,7 @@ void AnimInverseKinematics::debugDrawConstraints(const AnimContext& context) con
|
|||
|
||||
const int NUM_SWING_STEPS = 10;
|
||||
for (int i = 0; i < NUM_SWING_STEPS + 1; i++) {
|
||||
glm::quat rot = glm::normalize(glm::lerp(minRot, maxRot, i * (1.0f / NUM_SWING_STEPS)));
|
||||
glm::quat rot = safeLerp(minRot, maxRot, i * (1.0f / NUM_SWING_STEPS));
|
||||
glm::vec3 axis = transformVectorFast(geomToWorldMatrix, parentAbsRot * rot * refRot * Vectors::UNIT_Y);
|
||||
DebugDraw::getInstance().drawRay(pos, pos + TWIST_LENGTH * axis, CYAN);
|
||||
}
|
||||
|
@ -1626,7 +1730,7 @@ void AnimInverseKinematics::debugDrawConstraints(const AnimContext& context) con
|
|||
|
||||
const int NUM_SWING_STEPS = 10;
|
||||
for (int i = 0; i < NUM_SWING_STEPS + 1; i++) {
|
||||
glm::quat rot = glm::normalize(glm::lerp(minRot, maxRot, i * (1.0f / NUM_SWING_STEPS)));
|
||||
glm::quat rot = safeLerp(minRot, maxRot, i * (1.0f / NUM_SWING_STEPS));
|
||||
glm::vec3 axis = transformVectorFast(geomToWorldMatrix, parentAbsRot * rot * refRot * Vectors::UNIT_X);
|
||||
DebugDraw::getInstance().drawRay(pos, pos + TWIST_LENGTH * axis, CYAN);
|
||||
}
|
||||
|
@ -1666,10 +1770,9 @@ void AnimInverseKinematics::blendToPoses(const AnimPoseVec& targetPoses, const A
|
|||
// relax toward poses
|
||||
int numJoints = (int)_relativePoses.size();
|
||||
for (int i = 0; i < numJoints; ++i) {
|
||||
float dotSign = copysignf(1.0f, glm::dot(_relativePoses[i].rot(), targetPoses[i].rot()));
|
||||
if (_rotationAccumulators[i].isDirty()) {
|
||||
// this joint is affected by IK --> blend toward the targetPoses rotation
|
||||
_relativePoses[i].rot() = glm::normalize(glm::lerp(_relativePoses[i].rot(), dotSign * targetPoses[i].rot(), blendFactor));
|
||||
_relativePoses[i].rot() = safeLerp(_relativePoses[i].rot(), targetPoses[i].rot(), blendFactor);
|
||||
} else {
|
||||
// this joint is NOT affected by IK --> slam to underPoses rotation
|
||||
_relativePoses[i].rot() = underPoses[i].rot();
|
||||
|
|
|
@ -26,14 +26,21 @@ class RotationConstraint;
|
|||
class AnimInverseKinematics : public AnimNode {
|
||||
public:
|
||||
|
||||
struct JointChainInfo {
|
||||
glm::quat relRot;
|
||||
glm::vec3 relTrans;
|
||||
float weight;
|
||||
struct JointInfo {
|
||||
glm::quat rot;
|
||||
glm::vec3 trans;
|
||||
int jointIndex;
|
||||
bool constrained;
|
||||
};
|
||||
|
||||
struct JointChainInfo {
|
||||
std::vector<JointInfo> jointInfoVec;
|
||||
IKTarget target;
|
||||
float timer { 0.0f };
|
||||
};
|
||||
|
||||
using JointChainInfoVec = std::vector<JointChainInfo>;
|
||||
|
||||
explicit AnimInverseKinematics(const QString& id);
|
||||
virtual ~AnimInverseKinematics() override;
|
||||
|
||||
|
@ -66,23 +73,22 @@ public:
|
|||
void setSolutionSource(SolutionSource solutionSource) { _solutionSource = solutionSource; }
|
||||
void setSolutionSourceVar(const QString& solutionSourceVar) { _solutionSourceVar = solutionSourceVar; }
|
||||
|
||||
const AnimPose& getUncontrolledLeftHandPose() { return _uncontrolledLeftHandPose; }
|
||||
const AnimPose& getUncontrolledRightHandPose() { return _uncontrolledRightHandPose; }
|
||||
const AnimPose& getUncontrolledHipPose() { return _uncontrolledHipsPose; }
|
||||
|
||||
protected:
|
||||
void computeTargets(const AnimVariantMap& animVars, std::vector<IKTarget>& targets, const AnimPoseVec& underPoses);
|
||||
void solve(const AnimContext& context, const std::vector<IKTarget>& targets);
|
||||
void solveTargetWithCCD(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses, bool debug);
|
||||
void solveTargetWithSpline(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses, bool debug);
|
||||
void solve(const AnimContext& context, const std::vector<IKTarget>& targets, float dt, JointChainInfoVec& jointChainInfoVec);
|
||||
void solveTargetWithCCD(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses,
|
||||
bool debug, JointChainInfo& jointChainInfoOut) const;
|
||||
void solveTargetWithSpline(const AnimContext& context, const IKTarget& target, const AnimPoseVec& absolutePoses,
|
||||
bool debug, JointChainInfo& jointChainInfoOut) const;
|
||||
virtual void setSkeletonInternal(AnimSkeleton::ConstPointer skeleton) override;
|
||||
void debugDrawIKChain(JointChainInfo* jointChainInfos, size_t numJointChainInfos, const AnimContext& context) const;
|
||||
void debugDrawIKChain(const JointChainInfo& jointChainInfo, const AnimContext& context) const;
|
||||
void debugDrawRelativePoses(const AnimContext& context) const;
|
||||
void debugDrawConstraints(const AnimContext& context) const;
|
||||
void debugDrawSpineSplines(const AnimContext& context, const std::vector<IKTarget>& targets) const;
|
||||
void initRelativePosesFromSolutionSource(SolutionSource solutionSource, const AnimPoseVec& underPose);
|
||||
void blendToPoses(const AnimPoseVec& targetPoses, const AnimPoseVec& underPose, float blendFactor);
|
||||
void preconditionRelativePosesToAvoidLimbLock(const AnimContext& context, const std::vector<IKTarget>& targets);
|
||||
AnimPose applyHipsOffset() const;
|
||||
|
||||
// used to pre-compute information about each joint influeced by a spline IK target.
|
||||
struct SplineJointInfo {
|
||||
|
@ -91,8 +97,8 @@ protected:
|
|||
AnimPose offsetPose; // local offset from the spline to the joint.
|
||||
};
|
||||
|
||||
void computeSplineJointInfosForIKTarget(const AnimContext& context, const IKTarget& target);
|
||||
const std::vector<SplineJointInfo>* findOrCreateSplineJointInfo(const AnimContext& context, const IKTarget& target);
|
||||
void computeAndCacheSplineJointInfosForIKTarget(const AnimContext& context, const IKTarget& target) const;
|
||||
const std::vector<SplineJointInfo>* findOrCreateSplineJointInfo(const AnimContext& context, const IKTarget& target) const;
|
||||
|
||||
// for AnimDebugDraw rendering
|
||||
virtual const AnimPoseVec& getPosesInternal() const override { return _relativePoses; }
|
||||
|
@ -101,7 +107,7 @@ protected:
|
|||
void clearConstraints();
|
||||
void initConstraints();
|
||||
void initLimitCenterPoses();
|
||||
void computeHipsOffset(const std::vector<IKTarget>& targets, const AnimPoseVec& underPoses, float dt);
|
||||
glm::vec3 computeHipsOffset(const std::vector<IKTarget>& targets, const AnimPoseVec& underPoses, float dt, glm::vec3 prevHipsOffset) const;
|
||||
|
||||
// no copies
|
||||
AnimInverseKinematics(const AnimInverseKinematics&) = delete;
|
||||
|
@ -136,7 +142,7 @@ protected:
|
|||
AnimPoseVec _relativePoses; // current relative poses
|
||||
AnimPoseVec _limitCenterPoses; // relative
|
||||
|
||||
std::map<int, std::vector<SplineJointInfo>> _splineJointInfoMap;
|
||||
mutable std::map<int, std::vector<SplineJointInfo>> _splineJointInfoMap;
|
||||
|
||||
// experimental data for moving hips during IK
|
||||
glm::vec3 _hipsOffset { Vectors::ZERO };
|
||||
|
@ -148,18 +154,12 @@ protected:
|
|||
int _leftHandIndex { -1 };
|
||||
int _rightHandIndex { -1 };
|
||||
|
||||
// _maxTargetIndex is tracked to help optimize the recalculation of absolute poses
|
||||
// during the the cyclic coordinate descent algorithm
|
||||
int _maxTargetIndex { 0 };
|
||||
|
||||
float _maxErrorOnLastSolve { FLT_MAX };
|
||||
bool _previousEnableDebugIKTargets { false };
|
||||
SolutionSource _solutionSource { SolutionSource::RelaxToUnderPoses };
|
||||
QString _solutionSourceVar;
|
||||
|
||||
AnimPose _uncontrolledLeftHandPose { AnimPose() };
|
||||
AnimPose _uncontrolledRightHandPose { AnimPose() };
|
||||
AnimPose _uncontrolledHipsPose { AnimPose() };
|
||||
JointChainInfoVec _prevJointChainInfoVec;
|
||||
};
|
||||
|
||||
#endif // hifi_AnimInverseKinematics_h
|
||||
|
|
|
@ -42,6 +42,20 @@ int AnimSkeleton::getNumJoints() const {
|
|||
return _jointsSize;
|
||||
}
|
||||
|
||||
int AnimSkeleton::getChainDepth(int jointIndex) const {
|
||||
if (jointIndex >= 0) {
|
||||
int chainDepth = 0;
|
||||
int index = jointIndex;
|
||||
do {
|
||||
chainDepth++;
|
||||
index = _joints[index].parentIndex;
|
||||
} while (index != -1);
|
||||
return chainDepth;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
const AnimPose& AnimSkeleton::getAbsoluteBindPose(int jointIndex) const {
|
||||
return _absoluteBindPoses[jointIndex];
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ public:
|
|||
int nameToJointIndex(const QString& jointName) const;
|
||||
const QString& getJointName(int jointIndex) const;
|
||||
int getNumJoints() const;
|
||||
int getChainDepth(int jointIndex) const;
|
||||
|
||||
// absolute pose, not relative to parent
|
||||
const AnimPose& getAbsoluteBindPose(int jointIndex) const;
|
||||
|
|
|
@ -28,7 +28,7 @@ void blend(size_t numPoses, const AnimPose* a, const AnimPose* b, float alpha, A
|
|||
}
|
||||
|
||||
result[i].scale() = lerp(aPose.scale(), bPose.scale(), alpha);
|
||||
result[i].rot() = glm::normalize(glm::lerp(aPose.rot(), q2, alpha));
|
||||
result[i].rot() = safeLerp(aPose.rot(), bPose.rot(), alpha);
|
||||
result[i].trans() = lerp(aPose.trans(), bPose.trans(), alpha);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,4 +21,14 @@ glm::quat averageQuats(size_t numQuats, const glm::quat* quats);
|
|||
float accumulateTime(float startFrame, float endFrame, float timeScale, float currentFrame, float dt, bool loopFlag,
|
||||
const QString& id, AnimNode::Triggers& triggersOut);
|
||||
|
||||
inline glm::quat safeLerp(const glm::quat& a, const glm::quat& b, float alpha) {
|
||||
// adjust signs if necessary
|
||||
glm::quat bTemp = b;
|
||||
float dot = glm::dot(a, bTemp);
|
||||
if (dot < 0.0f) {
|
||||
bTemp = -bTemp;
|
||||
}
|
||||
return glm::normalize(glm::lerp(a, bTemp, alpha));
|
||||
}
|
||||
|
||||
#endif
|
||||
|
|
|
@ -56,8 +56,8 @@ private:
|
|||
glm::vec3 _poleReferenceVector;
|
||||
bool _poleVectorEnabled { false };
|
||||
int _index { -1 };
|
||||
Type _type { Type::RotationAndPosition };
|
||||
float _weight;
|
||||
Type _type { Type::Unknown };
|
||||
float _weight { 0.0f };
|
||||
float _flexCoefficients[MAX_FLEX_COEFFICIENTS];
|
||||
size_t _numFlexCoefficients;
|
||||
};
|
||||
|
|
|
@ -404,8 +404,18 @@ void Rig::setJointRotation(int index, bool valid, const glm::quat& rotation, flo
|
|||
}
|
||||
|
||||
bool Rig::getJointPositionInWorldFrame(int jointIndex, glm::vec3& position, glm::vec3 translation, glm::quat rotation) const {
|
||||
if (isIndexValid(jointIndex)) {
|
||||
position = (rotation * _internalPoseSet._absolutePoses[jointIndex].trans()) + translation;
|
||||
if (QThread::currentThread() == thread()) {
|
||||
if (isIndexValid(jointIndex)) {
|
||||
position = (rotation * _internalPoseSet._absolutePoses[jointIndex].trans()) + translation;
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
QReadLocker readLock(&_externalPoseSetLock);
|
||||
if (jointIndex >= 0 && jointIndex < (int)_externalPoseSet._absolutePoses.size()) {
|
||||
position = (rotation * _externalPoseSet._absolutePoses[jointIndex].trans()) + translation;
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
|
@ -413,17 +423,31 @@ bool Rig::getJointPositionInWorldFrame(int jointIndex, glm::vec3& position, glm:
|
|||
}
|
||||
|
||||
bool Rig::getJointPosition(int jointIndex, glm::vec3& position) const {
|
||||
if (isIndexValid(jointIndex)) {
|
||||
position = _internalPoseSet._absolutePoses[jointIndex].trans();
|
||||
return true;
|
||||
if (QThread::currentThread() == thread()) {
|
||||
if (isIndexValid(jointIndex)) {
|
||||
position = _internalPoseSet._absolutePoses[jointIndex].trans();
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
return false;
|
||||
return getAbsoluteJointTranslationInRigFrame(jointIndex, position);
|
||||
}
|
||||
}
|
||||
|
||||
bool Rig::getJointRotationInWorldFrame(int jointIndex, glm::quat& result, const glm::quat& rotation) const {
|
||||
if (isIndexValid(jointIndex)) {
|
||||
result = rotation * _internalPoseSet._absolutePoses[jointIndex].rot();
|
||||
if (QThread::currentThread() == thread()) {
|
||||
if (isIndexValid(jointIndex)) {
|
||||
result = rotation * _internalPoseSet._absolutePoses[jointIndex].rot();
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
QReadLocker readLock(&_externalPoseSetLock);
|
||||
if (jointIndex >= 0 && jointIndex < (int)_externalPoseSet._absolutePoses.size()) {
|
||||
result = rotation * _externalPoseSet._absolutePoses[jointIndex].rot();
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
|
@ -431,6 +455,15 @@ bool Rig::getJointRotationInWorldFrame(int jointIndex, glm::quat& result, const
|
|||
}
|
||||
|
||||
bool Rig::getJointRotation(int jointIndex, glm::quat& rotation) const {
|
||||
if (QThread::currentThread() == thread()) {
|
||||
if (isIndexValid(jointIndex)) {
|
||||
rotation = _internalPoseSet._relativePoses[jointIndex].rot();
|
||||
return true;
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
QReadLocker readLock(&_externalPoseSetLock);
|
||||
if (jointIndex >= 0 && jointIndex < (int)_externalPoseSet._relativePoses.size()) {
|
||||
rotation = _externalPoseSet._relativePoses[jointIndex].rot();
|
||||
|
@ -1082,36 +1115,13 @@ void Rig::updateHands(bool leftHandEnabled, bool rightHandEnabled, bool hipsEnab
|
|||
const glm::vec3 bodyCapsuleEnd = bodyCapsuleCenter + glm::vec3(0, bodyCapsuleHalfHeight, 0);
|
||||
|
||||
const float HAND_RADIUS = 0.05f;
|
||||
|
||||
const float RELAX_DURATION = 0.6f;
|
||||
const float CONTROL_DURATION = 0.4f;
|
||||
const bool TO_CONTROLLED = true;
|
||||
const bool FROM_CONTROLLED = false;
|
||||
const bool LEFT_HAND = true;
|
||||
const bool RIGHT_HAND = false;
|
||||
|
||||
const float ELBOW_POLE_VECTOR_BLEND_FACTOR = 0.95f;
|
||||
|
||||
if (leftHandEnabled) {
|
||||
if (!_isLeftHandControlled) {
|
||||
_leftHandControlTimeRemaining = CONTROL_DURATION;
|
||||
_isLeftHandControlled = true;
|
||||
}
|
||||
|
||||
glm::vec3 handPosition = leftHandPose.trans();
|
||||
glm::quat handRotation = leftHandPose.rot();
|
||||
|
||||
if (_leftHandControlTimeRemaining > 0.0f) {
|
||||
// Move hand from non-controlled position to controlled position.
|
||||
_leftHandControlTimeRemaining = std::max(_leftHandControlTimeRemaining - dt, 0.0f);
|
||||
AnimPose handPose(Vectors::ONE, handRotation, handPosition);
|
||||
if (transitionHandPose(_leftHandControlTimeRemaining, CONTROL_DURATION, handPose,
|
||||
LEFT_HAND, TO_CONTROLLED, handPose)) {
|
||||
handPosition = handPose.trans();
|
||||
handRotation = handPose.rot();
|
||||
}
|
||||
}
|
||||
|
||||
if (!hipsEnabled) {
|
||||
// prevent the hand IK targets from intersecting the body capsule
|
||||
glm::vec3 displacement;
|
||||
|
@ -1124,9 +1134,6 @@ void Rig::updateHands(bool leftHandEnabled, bool rightHandEnabled, bool hipsEnab
|
|||
_animVars.set("leftHandRotation", handRotation);
|
||||
_animVars.set("leftHandType", (int)IKTarget::Type::RotationAndPosition);
|
||||
|
||||
_lastLeftHandControlledPose = AnimPose(Vectors::ONE, handRotation, handPosition);
|
||||
_isLeftHandControlled = true;
|
||||
|
||||
// compute pole vector
|
||||
int handJointIndex = _animSkeleton->nameToJointIndex("LeftHand");
|
||||
int armJointIndex = _animSkeleton->nameToJointIndex("LeftArm");
|
||||
|
@ -1154,47 +1161,17 @@ void Rig::updateHands(bool leftHandEnabled, bool rightHandEnabled, bool hipsEnab
|
|||
_prevLeftHandPoleVectorValid = false;
|
||||
_animVars.set("leftHandPoleVectorEnabled", false);
|
||||
|
||||
if (_isLeftHandControlled) {
|
||||
_leftHandRelaxTimeRemaining = RELAX_DURATION;
|
||||
_isLeftHandControlled = false;
|
||||
}
|
||||
_animVars.unset("leftHandPosition");
|
||||
_animVars.unset("leftHandRotation");
|
||||
_animVars.set("leftHandType", (int)IKTarget::Type::HipsRelativeRotationAndPosition);
|
||||
|
||||
if (_leftHandRelaxTimeRemaining > 0.0f) {
|
||||
// Move hand from controlled position to non-controlled position.
|
||||
_leftHandRelaxTimeRemaining = std::max(_leftHandRelaxTimeRemaining - dt, 0.0f);
|
||||
AnimPose handPose;
|
||||
if (transitionHandPose(_leftHandRelaxTimeRemaining, RELAX_DURATION, _lastLeftHandControlledPose,
|
||||
LEFT_HAND, FROM_CONTROLLED, handPose)) {
|
||||
_animVars.set("leftHandPosition", handPose.trans());
|
||||
_animVars.set("leftHandRotation", handPose.rot());
|
||||
_animVars.set("leftHandType", (int)IKTarget::Type::RotationAndPosition);
|
||||
}
|
||||
} else {
|
||||
_animVars.unset("leftHandPosition");
|
||||
_animVars.unset("leftHandRotation");
|
||||
_animVars.set("leftHandType", (int)IKTarget::Type::HipsRelativeRotationAndPosition);
|
||||
}
|
||||
}
|
||||
|
||||
if (rightHandEnabled) {
|
||||
if (!_isRightHandControlled) {
|
||||
_rightHandControlTimeRemaining = CONTROL_DURATION;
|
||||
_isRightHandControlled = true;
|
||||
}
|
||||
|
||||
glm::vec3 handPosition = rightHandPose.trans();
|
||||
glm::quat handRotation = rightHandPose.rot();
|
||||
|
||||
if (_rightHandControlTimeRemaining > 0.0f) {
|
||||
// Move hand from non-controlled position to controlled position.
|
||||
_rightHandControlTimeRemaining = std::max(_rightHandControlTimeRemaining - dt, 0.0f);
|
||||
AnimPose handPose(Vectors::ONE, handRotation, handPosition);
|
||||
if (transitionHandPose(_rightHandControlTimeRemaining, CONTROL_DURATION, handPose, RIGHT_HAND, TO_CONTROLLED, handPose)) {
|
||||
handPosition = handPose.trans();
|
||||
handRotation = handPose.rot();
|
||||
}
|
||||
}
|
||||
|
||||
if (!hipsEnabled) {
|
||||
// prevent the hand IK targets from intersecting the body capsule
|
||||
glm::vec3 displacement;
|
||||
|
@ -1207,9 +1184,6 @@ void Rig::updateHands(bool leftHandEnabled, bool rightHandEnabled, bool hipsEnab
|
|||
_animVars.set("rightHandRotation", handRotation);
|
||||
_animVars.set("rightHandType", (int)IKTarget::Type::RotationAndPosition);
|
||||
|
||||
_lastRightHandControlledPose = AnimPose(Vectors::ONE, handRotation, handPosition);
|
||||
_isRightHandControlled = true;
|
||||
|
||||
// compute pole vector
|
||||
int handJointIndex = _animSkeleton->nameToJointIndex("RightHand");
|
||||
int armJointIndex = _animSkeleton->nameToJointIndex("RightArm");
|
||||
|
@ -1237,25 +1211,9 @@ void Rig::updateHands(bool leftHandEnabled, bool rightHandEnabled, bool hipsEnab
|
|||
_prevRightHandPoleVectorValid = false;
|
||||
_animVars.set("rightHandPoleVectorEnabled", false);
|
||||
|
||||
if (_isRightHandControlled) {
|
||||
_rightHandRelaxTimeRemaining = RELAX_DURATION;
|
||||
_isRightHandControlled = false;
|
||||
}
|
||||
|
||||
if (_rightHandRelaxTimeRemaining > 0.0f) {
|
||||
// Move hand from controlled position to non-controlled position.
|
||||
_rightHandRelaxTimeRemaining = std::max(_rightHandRelaxTimeRemaining - dt, 0.0f);
|
||||
AnimPose handPose;
|
||||
if (transitionHandPose(_rightHandRelaxTimeRemaining, RELAX_DURATION, _lastRightHandControlledPose, RIGHT_HAND, FROM_CONTROLLED, handPose)) {
|
||||
_animVars.set("rightHandPosition", handPose.trans());
|
||||
_animVars.set("rightHandRotation", handPose.rot());
|
||||
_animVars.set("rightHandType", (int)IKTarget::Type::RotationAndPosition);
|
||||
}
|
||||
} else {
|
||||
_animVars.unset("rightHandPosition");
|
||||
_animVars.unset("rightHandRotation");
|
||||
_animVars.set("rightHandType", (int)IKTarget::Type::HipsRelativeRotationAndPosition);
|
||||
}
|
||||
_animVars.unset("rightHandPosition");
|
||||
_animVars.unset("rightHandRotation");
|
||||
_animVars.set("rightHandType", (int)IKTarget::Type::HipsRelativeRotationAndPosition);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1704,39 +1662,38 @@ void Rig::computeAvatarBoundingCapsule(
|
|||
ikNode.setTargetVars("RightFoot", "rightFootPosition", "rightFootRotation",
|
||||
"rightFootType", "rightFootWeight", 1.0f, {},
|
||||
QString(), QString(), QString());
|
||||
|
||||
AnimPose geometryToRig = _modelOffset * _geometryOffset;
|
||||
|
||||
AnimPose hips(glm::vec3(1), glm::quat(), glm::vec3());
|
||||
glm::vec3 hipsPosition(0.0f);
|
||||
int hipsIndex = indexOfJoint("Hips");
|
||||
if (hipsIndex >= 0) {
|
||||
hips = geometryToRig * _animSkeleton->getAbsoluteBindPose(hipsIndex);
|
||||
hipsPosition = transformPoint(_geometryToRigTransform, _animSkeleton->getAbsoluteDefaultPose(hipsIndex).trans());
|
||||
}
|
||||
AnimVariantMap animVars;
|
||||
animVars.setRigToGeometryTransform(_rigToGeometryTransform);
|
||||
glm::quat handRotation = glm::angleAxis(PI, Vectors::UNIT_X);
|
||||
animVars.set("leftHandPosition", hips.trans());
|
||||
animVars.set("leftHandPosition", hipsPosition);
|
||||
animVars.set("leftHandRotation", handRotation);
|
||||
animVars.set("leftHandType", (int)IKTarget::Type::RotationAndPosition);
|
||||
animVars.set("rightHandPosition", hips.trans());
|
||||
animVars.set("rightHandPosition", hipsPosition);
|
||||
animVars.set("rightHandRotation", handRotation);
|
||||
animVars.set("rightHandType", (int)IKTarget::Type::RotationAndPosition);
|
||||
|
||||
int rightFootIndex = indexOfJoint("RightFoot");
|
||||
int leftFootIndex = indexOfJoint("LeftFoot");
|
||||
if (rightFootIndex != -1 && leftFootIndex != -1) {
|
||||
glm::vec3 foot = Vectors::ZERO;
|
||||
glm::vec3 geomFootPosition = glm::vec3(0.0f, _animSkeleton->getAbsoluteDefaultPose(rightFootIndex).trans().y, 0.0f);
|
||||
glm::vec3 footPosition = transformPoint(_geometryToRigTransform, geomFootPosition);
|
||||
glm::quat footRotation = glm::angleAxis(0.5f * PI, Vectors::UNIT_X);
|
||||
animVars.set("leftFootPosition", foot);
|
||||
animVars.set("leftFootPosition", footPosition);
|
||||
animVars.set("leftFootRotation", footRotation);
|
||||
animVars.set("leftFootType", (int)IKTarget::Type::RotationAndPosition);
|
||||
animVars.set("rightFootPosition", foot);
|
||||
animVars.set("rightFootPosition", footPosition);
|
||||
animVars.set("rightFootRotation", footRotation);
|
||||
animVars.set("rightFootType", (int)IKTarget::Type::RotationAndPosition);
|
||||
}
|
||||
|
||||
// call overlay twice: once to verify AnimPoseVec joints and again to do the IK
|
||||
AnimNode::Triggers triggersOut;
|
||||
AnimContext context(false, false, false, glm::mat4(), glm::mat4());
|
||||
AnimContext context(false, false, false, _geometryToRigTransform, _rigToGeometryTransform);
|
||||
float dt = 1.0f; // the value of this does not matter
|
||||
ikNode.overlay(animVars, context, dt, triggersOut, _animSkeleton->getRelativeBindPoses());
|
||||
AnimPoseVec finalPoses = ikNode.overlay(animVars, context, dt, triggersOut, _animSkeleton->getRelativeBindPoses());
|
||||
|
@ -1769,34 +1726,13 @@ void Rig::computeAvatarBoundingCapsule(
|
|||
|
||||
// compute bounding shape parameters
|
||||
// NOTE: we assume that the longest side of totalExtents is the yAxis...
|
||||
glm::vec3 diagonal = (geometryToRig * totalExtents.maximum) - (geometryToRig * totalExtents.minimum);
|
||||
glm::vec3 diagonal = (transformPoint(_geometryToRigTransform, totalExtents.maximum) -
|
||||
transformPoint(_geometryToRigTransform, totalExtents.minimum));
|
||||
// ... and assume the radiusOut is half the RMS of the X and Z sides:
|
||||
radiusOut = 0.5f * sqrtf(0.5f * (diagonal.x * diagonal.x + diagonal.z * diagonal.z));
|
||||
heightOut = diagonal.y - 2.0f * radiusOut;
|
||||
|
||||
glm::vec3 rootPosition = finalPoses[geometry.rootJointIndex].trans();
|
||||
glm::vec3 rigCenter = (geometryToRig * (0.5f * (totalExtents.maximum + totalExtents.minimum)));
|
||||
localOffsetOut = rigCenter - (geometryToRig * rootPosition);
|
||||
}
|
||||
|
||||
bool Rig::transitionHandPose(float deltaTime, float totalDuration, AnimPose& controlledHandPose, bool isLeftHand,
|
||||
bool isToControlled, AnimPose& returnHandPose) {
|
||||
auto ikNode = getAnimInverseKinematicsNode();
|
||||
if (ikNode) {
|
||||
float alpha = 1.0f - deltaTime / totalDuration;
|
||||
const AnimPose geometryToRigTransform(_geometryToRigTransform);
|
||||
AnimPose uncontrolledHandPose;
|
||||
if (isLeftHand) {
|
||||
uncontrolledHandPose = geometryToRigTransform * ikNode->getUncontrolledLeftHandPose();
|
||||
} else {
|
||||
uncontrolledHandPose = geometryToRigTransform * ikNode->getUncontrolledRightHandPose();
|
||||
}
|
||||
if (isToControlled) {
|
||||
::blend(1, &uncontrolledHandPose, &controlledHandPose, alpha, &returnHandPose);
|
||||
} else {
|
||||
::blend(1, &controlledHandPose, &uncontrolledHandPose, alpha, &returnHandPose);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
glm::vec3 rigCenter = transformPoint(_geometryToRigTransform, (0.5f * (totalExtents.maximum + totalExtents.minimum)));
|
||||
localOffsetOut = rigCenter - transformPoint(_geometryToRigTransform, rootPosition);
|
||||
}
|
||||
|
|
|
@ -340,18 +340,6 @@ protected:
|
|||
int _nextStateHandlerId { 0 };
|
||||
QMutex _stateMutex;
|
||||
|
||||
bool transitionHandPose(float deltaTime, float totalDuration, AnimPose& controlledHandPose, bool isLeftHand,
|
||||
bool isToControlled, AnimPose& returnHandPose);
|
||||
|
||||
bool _isLeftHandControlled { false };
|
||||
bool _isRightHandControlled { false };
|
||||
float _leftHandControlTimeRemaining { 0.0f };
|
||||
float _rightHandControlTimeRemaining { 0.0f };
|
||||
float _leftHandRelaxTimeRemaining { 0.0f };
|
||||
float _rightHandRelaxTimeRemaining { 0.0f };
|
||||
AnimPose _lastLeftHandControlledPose;
|
||||
AnimPose _lastRightHandControlledPose;
|
||||
|
||||
glm::vec3 _prevRightFootPoleVector { Vectors::UNIT_Z };
|
||||
bool _prevRightFootPoleVectorValid { false };
|
||||
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue